diff --git a/swh/web/api/apidoc.py b/swh/web/api/apidoc.py index a13e7279..96d859ae 100644 --- a/swh/web/api/apidoc.py +++ b/swh/web/api/apidoc.py @@ -1,366 +1,367 @@ # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import docutils.nodes import docutils.parsers.rst import docutils.utils import functools import os import re import textwrap from functools import wraps from rest_framework.decorators import api_view from swh.web.common.utils import parse_rst from swh.web.api.apiurls import APIUrls from swh.web.api.apiresponse import make_api_response, error_response class _HTTPDomainDocVisitor(docutils.nodes.NodeVisitor): """ docutils visitor for walking on a parsed rst document containing sphinx httpdomain roles. Its purpose is to extract relevant info regarding swh api endpoints (for instance url arguments) from their docstring written using sphinx httpdomain. """ # httpdomain roles we want to parse (based on sphinxcontrib.httpdomain 1.6) parameter_roles = ('param', 'parameter', 'arg', 'argument') response_json_object_roles = ('resjsonobj', 'resjson', '>jsonobj', '>json') response_json_array_roles = ('resjsonarr', '>jsonarr') query_parameter_roles = ('queryparameter', 'queryparam', 'qparam', 'query') request_header_roles = ('header', 'resheader', 'responseheader') status_code_roles = ('statuscode', 'status', 'code') def __init__(self, document, urls, data): super().__init__(document) self.urls = urls self.url_idx = 0 self.data = data self.args_set = set() self.params_set = set() self.returns_set = set() self.status_codes_set = set() self.reqheaders_set = set() self.resheaders_set = set() self.field_list_visited = False def process_paragraph(self, par): """ Process extracted paragraph text before display. Cleanup document model markups and transform the paragraph into a valid raw rst string (as the apidoc documentation transform rst to html when rendering). """ par = par.replace('\n', ' ') # keep emphasized, strong and literal text par = par.replace('', '*') par = par.replace('', '*') par = par.replace('', '**') par = par.replace('', '**') par = par.replace('', '``') par = par.replace('', '``') # remove parsed document markups par = re.sub('<[^<]+?>', '', par) # api urls cleanup to generate valid links afterwards par = re.sub('\(\w+\)', '', par) # noqa par = re.sub('\[.*\]', '', par) # noqa par = par.replace('//', '/') # transform references to api endpoints into valid rst links par = re.sub(':http:get:`(.*)`', r'`<\1>`_', par) # transform references to some elements into bold text par = re.sub(':http:header:`(.*)`', r'**\1**', par) par = re.sub(':func:`(.*)`', r'**\1**', par) return par def visit_field_list(self, node): """ Visit parsed rst field lists to extract relevant info regarding api endpoint. """ self.field_list_visited = True for child in node.traverse(): # get the parsed field name if isinstance(child, docutils.nodes.field_name): field_name = child.astext() # parse field text elif isinstance(child, docutils.nodes.paragraph): text = self.process_paragraph(str(child)) field_data = field_name.split(' ') # Parameters if field_data[0] in self.parameter_roles: if field_data[2] not in self.args_set: self.data['args'].append({'name': field_data[2], 'type': field_data[1], 'doc': text}) self.args_set.add(field_data[2]) # Query Parameters if field_data[0] in self.query_parameter_roles: if field_data[2] not in self.params_set: self.data['params'].append({'name': field_data[2], 'type': field_data[1], 'doc': text}) self.params_set.add(field_data[2]) # Response type if field_data[0] in self.response_json_array_roles or \ field_data[0] in self.response_json_object_roles: # array if field_data[0] in self.response_json_array_roles: self.data['return_type'] = 'array' # object else: self.data['return_type'] = 'object' # returned object field if field_data[2] not in self.returns_set: self.data['returns'].append({'name': field_data[2], 'type': field_data[1], 'doc': text}) self.returns_set.add(field_data[2]) # Status Codes if field_data[0] in self.status_code_roles: if field_data[1] not in self.status_codes_set: self.data['status_codes'].append({'code': field_data[1], # noqa 'doc': text}) self.status_codes_set.add(field_data[1]) # Request Headers if field_data[0] in self.request_header_roles: if field_data[1] not in self.reqheaders_set: self.data['reqheaders'].append({'name': field_data[1], 'doc': text}) self.reqheaders_set.add(field_data[1]) # Response Headers if field_data[0] in self.response_header_roles: if field_data[1] not in self.resheaders_set: resheader = {'name': field_data[1], 'doc': text} self.data['resheaders'].append(resheader) self.resheaders_set.add(field_data[1]) if resheader['name'] == 'Content-Type' and \ resheader['doc'] == 'application/octet-stream': self.data['return_type'] = 'octet stream' def visit_paragraph(self, node): """ Visit relevant paragraphs to parse """ # only parsed top level paragraphs if isinstance(node.parent, docutils.nodes.block_quote): text = self.process_paragraph(str(node)) # endpoint description if not text.startswith('**') and self.data['description'] != text: self.data['description'] += '\n\n' if self.data['description'] else '' # noqa self.data['description'] += text # http methods elif text.startswith('**Allowed HTTP Methods:**'): text = text.replace('**Allowed HTTP Methods:**', '') http_methods = text.strip().split(',') http_methods = [m[m.find('`')+1:-1].upper() for m in http_methods] self.data['urls'].append({'rule': self.urls[self.url_idx], 'methods': http_methods}) self.url_idx += 1 def visit_literal_block(self, node): """ Visit literal blocks """ text = node.astext() # literal block in endpoint description if not self.field_list_visited: self.data['description'] += \ ':\n\n%s\n' % textwrap.indent(text, '\t') # extract example url if ':swh_web_api:' in text: self.data['examples'].append( '/api/1/' + re.sub('.*`(.*)`.*', r'\1', text)) def visit_bullet_list(self, node): # bullet list in endpoint description if not self.field_list_visited: self.data['description'] += '\n\n' for child in node.traverse(): # process list item if isinstance(child, docutils.nodes.paragraph): line_text = self.process_paragraph(str(child)) self.data['description'] += '\t* %s\n' % line_text def unknown_visit(self, node): pass def depart_document(self, node): """ End of parsing extra processing """ default_methods = ['GET', 'HEAD', 'OPTIONS'] # ensure urls info is present and set default http methods if not self.data['urls']: for url in self.urls: self.data['urls'].append({'rule': url, 'methods': default_methods}) def unknown_departure(self, node): pass def _parse_httpdomain_doc(doc, data): doc_lines = doc.split('\n') doc_lines_filtered = [] urls = [] # httpdomain is a sphinx extension that is unknown to docutils but # fortunately we can still parse its directives' content, # so remove lines with httpdomain directives before executing the # rst parser from docutils for doc_line in doc_lines: if '.. http' not in doc_line: doc_lines_filtered.append(doc_line) else: url = doc_line[doc_line.find('/'):] # emphasize url arguments for html rendering url = re.sub(r'\((\w+)\)', r' **\(\1\)** ', url) urls.append(url) # parse the rst docstring and do not print system messages about # unknown httpdomain roles document = parse_rst('\n'.join(doc_lines_filtered), report_level=5) # remove the system_message nodes from the parsed document for node in document.traverse(docutils.nodes.system_message): node.parent.remove(node) # visit the document nodes to extract relevant endpoint info visitor = _HTTPDomainDocVisitor(document, urls, data) document.walkabout(visitor) class APIDocException(Exception): """ Custom exception to signal errors in the use of the APIDoc decorators """ def api_doc(route, noargs=False, need_params=False, tags=[], handle_response=False, api_version='1'): """ Decorate an API function to register it in the API doc route index and create the corresponding DRF route. Args: route (str): documentation page's route noargs (boolean): set to True if the route has no arguments, and its result should be displayed anytime its documentation is requested. Default to False need_params (boolean): specify the route requires query parameters otherwise errors will occur. It enables to avoid displaying the invalid response in its HTML documentation. Default to False. tags (list): Further information on api endpoints. Two values are possibly expected: * hidden: remove the entry points from the listing * upcoming: display the entry point but it is not followable handle_response (boolean): indicate if the decorated function takes care of creating the HTTP response or delegates that task to the apiresponse module api_version (str): api version string """ urlpattern = '^' + api_version + route + '$' tags = set(tags) # @api_doc() Decorator call def decorator(f): # If the route is not hidden, add it to the index if 'hidden' not in tags: doc_data = get_doc_data(f, route, noargs) doc_desc = doc_data['description'] first_dot_pos = doc_desc.find('.') APIUrls.add_route(route, doc_desc[:first_dot_pos+1], tags=tags) # If the decorated route has arguments, we create a specific # documentation view if not noargs: @api_view(['GET', 'HEAD']) @wraps(f) def doc_view(request): doc_data = get_doc_data(f, route, noargs) return make_api_response(request, None, doc_data) - view_name = 'api-%s' % route[1:-1].replace('/', '-') + view_name = 'api-%s-%s' % \ + (api_version, route[1:-1].replace('/', '-')) APIUrls.add_url_pattern(urlpattern, doc_view, view_name) @wraps(f) def documented_view(request, **kwargs): doc_data = get_doc_data(f, route, noargs) try: response = f(request, **kwargs) except Exception as exc: if request.accepted_media_type == 'text/html' and \ need_params and not request.query_params: response = None else: return error_response(request, exc, doc_data) if handle_response: return response else: return make_api_response(request, response, doc_data) return documented_view return decorator @functools.lru_cache(maxsize=32) def get_doc_data(f, route, noargs): """ Build documentation data for the decorated api endpoint function """ data = { 'description': '', 'response_data': None, 'urls': [], 'args': [], 'params': [], 'resheaders': [], 'reqheaders': [], 'return_type': '', 'returns': [], 'status_codes': [], 'examples': [], 'route': route, 'noargs': noargs } if not f.__doc__: raise APIDocException('apidoc: expected a docstring' ' for function %s' % (f.__name__,)) # use raw docstring as endpoint documentation if sphinx # httpdomain is not used if '.. http' not in f.__doc__: data['description'] = f.__doc__ # else parse the sphinx httpdomain docstring with docutils # (except when building the swh-web documentation through autodoc # sphinx extension, not needed and raise errors with sphinx >= 1.7) elif 'SWH_WEB_DOC_BUILD' not in os.environ: _parse_httpdomain_doc(f.__doc__, data) # process returned object info for nicer html display returns_list = '' for ret in data['returns']: returns_list += '\t* **%s (%s)**: %s\n' %\ (ret['name'], ret['type'], ret['doc']) data['returns_list'] = returns_list return data diff --git a/swh/web/api/apiurls.py b/swh/web/api/apiurls.py index 452d8a89..60634fa6 100644 --- a/swh/web/api/apiurls.py +++ b/swh/web/api/apiurls.py @@ -1,85 +1,85 @@ # Copyright (C) 2017-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import functools from rest_framework.decorators import api_view from swh.web.common.urlsindex import UrlsIndex from swh.web.common import throttling class APIUrls(UrlsIndex): """ Class to manage API documentation URLs. - Indexes all routes documented using apidoc's decorators. - Tracks endpoint/request processing method relationships for use in generating related urls in API documentation """ _apidoc_routes = {} _method_endpoints = {} scope = 'api' @classmethod def get_app_endpoints(cls): return cls._apidoc_routes @classmethod def add_route(cls, route, docstring, **kwargs): """ Add a route to the self-documenting API reference """ - route_view_name = 'api-%s' % route[1:-1].replace('/', '-') + route_view_name = 'api-1-%s' % route[1:-1].replace('/', '-') if route not in cls._apidoc_routes: d = {'docstring': docstring, 'route_view_name': route_view_name} for k, v in kwargs.items(): d[k] = v cls._apidoc_routes[route] = d def api_route(url_pattern=None, view_name=None, methods=['GET', 'HEAD', 'OPTIONS'], throttle_scope='swh_api', api_version='1', checksum_args=None): """ Decorator to ease the registration of an API endpoint using the Django REST Framework. Args: url_pattern: the url pattern used by DRF to identify the API route view_name: the name of the API view associated to the route used to reverse the url methods: array of HTTP methods supported by the API route """ url_pattern = '^' + api_version + url_pattern + '$' def decorator(f): # create a DRF view from the wrapped function @api_view(methods) @throttling.throttle_scope(throttle_scope) @functools.wraps(f) def api_view_f(*args, **kwargs): return f(*args, **kwargs) # small hacks for correctly generating API endpoints index doc api_view_f.__name__ = f.__name__ api_view_f.http_method_names = methods # register the route and its view in the endpoints index APIUrls.add_url_pattern(url_pattern, api_view_f, view_name) if checksum_args: APIUrls.add_redirect_for_checksum_args(view_name, [url_pattern], checksum_args) return f return decorator diff --git a/swh/web/api/utils.py b/swh/web/api/utils.py index 5c66d6fd..b84a1049 100644 --- a/swh/web/api/utils.py +++ b/swh/web/api/utils.py @@ -1,211 +1,211 @@ # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from swh.web.common.utils import reverse from swh.web.common.query import parse_hash def filter_field_keys(data, field_keys): """Given an object instance (directory or list), and a csv field keys to filter on. Return the object instance with filtered keys. Note: Returns obj as is if it's an instance of types not in (dictionary, list) Args: - data: one object (dictionary, list...) to filter. - field_keys: csv or set of keys to filter the object on Returns: obj filtered on field_keys """ if isinstance(data, map): return map(lambda x: filter_field_keys(x, field_keys), data) if isinstance(data, list): return [filter_field_keys(x, field_keys) for x in data] if isinstance(data, dict): return {k: v for (k, v) in data.items() if k in field_keys} return data def person_to_string(person): """Map a person (person, committer, tagger, etc...) to a string. """ return ''.join([person['name'], ' <', person['email'], '>']) def enrich_object(object): """Enrich an object (revision, release) with link to the 'target' of type 'target_type'. Args: object: An object with target and target_type keys (e.g. release, revision) Returns: Object enriched with target_url pointing to the right swh.web.ui.api urls for the pointing object (revision, release, content, directory) """ obj = object.copy() if 'target' in obj and 'target_type' in obj: if obj['target_type'] in ('revision', 'release', 'directory'): obj['target_url'] = \ - reverse('api-%s' % obj['target_type'], + reverse('api-1-%s' % obj['target_type'], url_args={'sha1_git': obj['target']}) elif obj['target_type'] == 'content': obj['target_url'] = \ - reverse('api-content', + reverse('api-1-content', url_args={'q': 'sha1_git:' + obj['target']}) elif obj['target_type'] == 'snapshot': obj['target_url'] = \ - reverse('api-snapshot', + reverse('api-1-snapshot', url_args={'snapshot_id': obj['target']}) if 'author' in obj: author = obj['author'] - obj['author_url'] = reverse('api-person', + obj['author_url'] = reverse('api-1-person', url_args={'person_id': author['id']}) return obj enrich_release = enrich_object def enrich_directory(directory, context_url=None): """Enrich directory with url to content or directory. """ if 'type' in directory: target_type = directory['type'] target = directory['target'] if target_type == 'file': - directory['target_url'] = \ - reverse('api-content', url_args={'q': 'sha1_git:%s' % target}) + directory['target_url'] = reverse( + 'api-1-content', url_args={'q': 'sha1_git:%s' % target}) if context_url: directory['file_url'] = context_url + directory['name'] + '/' elif target_type == 'dir': - directory['target_url'] = reverse('api-directory', - url_args={'sha1_git': target}) + directory['target_url'] = reverse( + 'api-1-directory', url_args={'sha1_git': target}) if context_url: directory['dir_url'] = context_url + directory['name'] + '/' else: - directory['target_url'] = reverse('api-revision', - url_args={'sha1_git': target}) + directory['target_url'] = reverse( + 'api-1-revision', url_args={'sha1_git': target}) if context_url: directory['rev_url'] = context_url + directory['name'] + '/' return directory def enrich_metadata_endpoint(content): """Enrich metadata endpoint with link to the upper metadata endpoint. """ c = content.copy() - c['content_url'] = reverse('api-content', + c['content_url'] = reverse('api-1-content', url_args={'q': 'sha1:%s' % c['id']}) return c def enrich_content(content, top_url=False, query_string=None): """Enrich content with links to: - data_url: its raw data - filetype_url: its filetype information - language_url: its programming language information - license_url: its licensing information Args: content: dict of data associated to a swh content object top_url: whether or not to include the content url in the enriched data query_string: optional query string of type ':' used when requesting the content, it acts as a hint for picking the same hash method when computing the url listed above Returns: An enriched content dict filled with additional urls """ checksums = content if 'checksums' in content: checksums = content['checksums'] hash_algo = 'sha1' if query_string: hash_algo = parse_hash(query_string)[0] if hash_algo in checksums: q = '%s:%s' % (hash_algo, checksums[hash_algo]) if top_url: - content['content_url'] = reverse('api-content', url_args={'q': q}) - content['data_url'] = reverse('api-content-raw', url_args={'q': q}) - content['filetype_url'] = reverse('api-content-filetype', - url_args={'q': q}) - content['language_url'] = reverse('api-content-language', - url_args={'q': q}) - content['license_url'] = reverse('api-content-license', - url_args={'q': q}) + content['content_url'] = reverse( + 'api-1-content', url_args={'q': q}) + content['data_url'] = reverse('api-1-content-raw', url_args={'q': q}) + content['filetype_url'] = reverse( + 'api-1-content-filetype', url_args={'q': q}) + content['language_url'] = reverse( + 'api-1-content-language', url_args={'q': q}) + content['license_url'] = reverse( + 'api-1-content-license', url_args={'q': q}) return content def enrich_revision(revision): """Enrich revision with links where it makes sense (directory, parents). Keep track of the navigation breadcrumbs if they are specified. Args: revision: the revision as a dict """ - revision['url'] = reverse('api-revision', + revision['url'] = reverse('api-1-revision', url_args={'sha1_git': revision['id']}) - revision['history_url'] = reverse('api-revision-log', + revision['history_url'] = reverse('api-1-revision-log', url_args={'sha1_git': revision['id']}) if 'author' in revision: author = revision['author'] - revision['author_url'] = reverse('api-person', + revision['author_url'] = reverse('api-1-person', url_args={'person_id': author['id']}) if 'committer' in revision: committer = revision['committer'] - revision['committer_url'] = \ - reverse('api-person', url_args={'person_id': committer['id']}) + revision['committer_url'] = reverse( + 'api-1-person', url_args={'person_id': committer['id']}) if 'directory' in revision: - revision['directory_url'] = \ - reverse('api-directory', - url_args={'sha1_git': revision['directory']}) + revision['directory_url'] = reverse( + 'api-1-directory', url_args={'sha1_git': revision['directory']}) if 'parents' in revision: parents = [] for parent in revision['parents']: parents.append({ 'id': parent, - 'url': reverse('api-revision', url_args={'sha1_git': parent}) + 'url': reverse('api-1-revision', url_args={'sha1_git': parent}) }) revision['parents'] = parents if 'children' in revision: children = [] for child in revision['children']: - children.append(reverse('api-revision', - url_args={'sha1_git': child})) + children.append(reverse( + 'api-1-revision', url_args={'sha1_git': child})) revision['children_urls'] = children if 'message_decoding_failed' in revision: revision['message_url'] = \ - reverse('api-revision-raw-message', + reverse('api-1-revision-raw-message', url_args={'sha1_git': revision['id']}) return revision diff --git a/swh/web/api/views/content.py b/swh/web/api/views/content.py index 661cc103..c632991e 100644 --- a/swh/web/api/views/content.py +++ b/swh/web/api/views/content.py @@ -1,381 +1,382 @@ # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import functools from django.http import HttpResponse from swh.web.common import service from swh.web.common.utils import reverse from swh.web.common.exc import NotFoundExc from swh.web.api.apidoc import api_doc from swh.web.api import utils from swh.web.api.apiurls import api_route from swh.web.api.views.utils import api_lookup @api_route(r'/content/(?P[0-9a-z_:]*[0-9a-f]+)/filetype/', - 'api-content-filetype', checksum_args=['q']) + 'api-1-content-filetype', checksum_args=['q']) @api_doc('/content/filetype/') def api_content_filetype(request, q): """ .. http:get:: /api/1/content/[(hash_type):](hash)/filetype/ Get information about the detected MIME type of a content object. :param string hash_type: optional parameter specifying which hashing algorithm has been used to compute the content checksum. It can be either ``sha1``, ``sha1_git``, ``sha256`` or ``blake2s256``. If that parameter is not provided, it is assumed that the hashing algorithm used is `sha1`. :param string hash: hexadecimal representation of the checksum value computed with the specified hashing algorithm. :>json object content_url: link to :http:get:`/api/1/content/[(hash_type):](hash)/` for getting information about the content :>json string encoding: the detected content encoding :>json string id: the **sha1** identifier of the content :>json string mimetype: the detected MIME type of the content :>json object tool: information about the tool used to detect the content filetype :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 400: an invalid **hash_type** or **hash** has been provided :statuscode 404: requested content can not be found in the archive **Example:** .. parsed-literal:: :swh_web_api:`content/sha1:dc2830a9e72f23c1dfebef4413003221baa5fb62/filetype/` """ # noqa return api_lookup( service.lookup_content_filetype, q, notfound_msg='No filetype information found for content {}.'.format(q), enrich_fn=utils.enrich_metadata_endpoint) @api_route(r'/content/(?P[0-9a-z_:]*[0-9a-f]+)/language/', - 'api-content-language', checksum_args=['q']) + 'api-1-content-language', checksum_args=['q']) @api_doc('/content/language/') def api_content_language(request, q): """ .. http:get:: /api/1/content/[(hash_type):](hash)/language/ Get information about the programming language used in a content object. Note: this endpoint currently returns no data. :param string hash_type: optional parameter specifying which hashing algorithm has been used to compute the content checksum. It can be either ``sha1``, ``sha1_git``, ``sha256`` or ``blake2s256``. If that parameter is not provided, it is assumed that the hashing algorithm used is ``sha1``. :param string hash: hexadecimal representation of the checksum value computed with the specified hashing algorithm. :>json object content_url: link to :http:get:`/api/1/content/[(hash_type):](hash)/` for getting information about the content :>json string id: the **sha1** identifier of the content :>json string lang: the detected programming language if any :>json object tool: information about the tool used to detect the programming language :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 400: an invalid **hash_type** or **hash** has been provided :statuscode 404: requested content can not be found in the archive **Example:** .. parsed-literal:: :swh_web_api:`content/sha1:dc2830a9e72f23c1dfebef4413003221baa5fb62/language/` """ # noqa return api_lookup( service.lookup_content_language, q, notfound_msg='No language information found for content {}.'.format(q), enrich_fn=utils.enrich_metadata_endpoint) @api_route(r'/content/(?P[0-9a-z_:]*[0-9a-f]+)/license/', - 'api-content-license', checksum_args=['q']) + 'api-1-content-license', checksum_args=['q']) @api_doc('/content/license/') def api_content_license(request, q): """ .. http:get:: /api/1/content/[(hash_type):](hash)/license/ Get information about the license of a content object. :param string hash_type: optional parameter specifying which hashing algorithm has been used to compute the content checksum. It can be either ``sha1``, ``sha1_git``, ``sha256`` or ``blake2s256``. If that parameter is not provided, it is assumed that the hashing algorithm used is ``sha1``. :param string hash: hexadecimal representation of the checksum value computed with the specified hashing algorithm. :>json object content_url: link to :http:get:`/api/1/content/[(hash_type):](hash)/` for getting information about the content :>json string id: the **sha1** identifier of the content :>json array licenses: array of strings containing the detected license names if any :>json object tool: information about the tool used to detect the license :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 400: an invalid **hash_type** or **hash** has been provided :statuscode 404: requested content can not be found in the archive **Example:** .. parsed-literal:: :swh_web_api:`content/sha1:dc2830a9e72f23c1dfebef4413003221baa5fb62/license/` """ # noqa return api_lookup( service.lookup_content_license, q, notfound_msg='No license information found for content {}.'.format(q), enrich_fn=utils.enrich_metadata_endpoint) -@api_route(r'/content/(?P[0-9a-z_:]*[0-9a-f]+)/ctags/', 'api-content-ctags') +@api_route(r'/content/(?P[0-9a-z_:]*[0-9a-f]+)/ctags/', + 'api-1-content-ctags') @api_doc('/content/ctags/', tags=['hidden']) def api_content_ctags(request, q): """ Get information about all `Ctags `_-style symbols defined in a content object. """ return api_lookup( service.lookup_content_ctags, q, notfound_msg='No ctags symbol found for content {}.'.format(q), enrich_fn=utils.enrich_metadata_endpoint) -@api_route(r'/content/(?P[0-9a-z_:]*[0-9a-f]+)/raw/', 'api-content-raw', +@api_route(r'/content/(?P[0-9a-z_:]*[0-9a-f]+)/raw/', 'api-1-content-raw', checksum_args=['q']) @api_doc('/content/raw/', handle_response=True) def api_content_raw(request, q): """ .. http:get:: /api/1/content/[(hash_type):](hash)/raw/ Get the raw content of a content object (aka a "blob"), as a byte sequence. :param string hash_type: optional parameter specifying which hashing algorithm has been used to compute the content checksum. It can be either ``sha1``, ``sha1_git``, ``sha256`` or ``blake2s256``. If that parameter is not provided, it is assumed that the hashing algorithm used is ``sha1``. :param string hash: hexadecimal representation of the checksum value computed with the specified hashing algorithm. :query string filename: if provided, the downloaded content will get that filename :resheader Content-Type: application/octet-stream **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 400: an invalid **hash_type** or **hash** has been provided :statuscode 404: requested content can not be found in the archive **Example:** .. parsed-literal:: :swh_web_api:`content/sha1:dc2830a9e72f23c1dfebef4413003221baa5fb62/raw/` """ # noqa def generate(content): yield content['data'] content_raw = service.lookup_content_raw(q) if not content_raw: raise NotFoundExc('Content %s is not found.' % q) filename = request.query_params.get('filename') if not filename: filename = 'content_%s_raw' % q.replace(':', '_') response = HttpResponse(generate(content_raw), content_type='application/octet-stream') response['Content-disposition'] = 'attachment; filename=%s' % filename return response -@api_route(r'/content/symbol/(?P.+)/', 'api-content-symbol') +@api_route(r'/content/symbol/(?P.+)/', 'api-1-content-symbol') @api_doc('/content/symbol/', tags=['hidden']) def api_content_symbol(request, q=None): """Search content objects by `Ctags `_-style symbol (e.g., function name, data type, method, ...). """ result = {} last_sha1 = request.query_params.get('last_sha1', None) per_page = int(request.query_params.get('per_page', '10')) def lookup_exp(exp, last_sha1=last_sha1, per_page=per_page): exp = list(service.lookup_expression(exp, last_sha1, per_page)) return exp if exp else None symbols = api_lookup( lookup_exp, q, notfound_msg="No indexed raw content match expression '{}'.".format(q), enrich_fn=functools.partial(utils.enrich_content, top_url=True)) if symbols: nb_symbols = len(symbols) if nb_symbols == per_page: query_params = {} new_last_sha1 = symbols[-1]['sha1'] query_params['last_sha1'] = new_last_sha1 if request.query_params.get('per_page'): query_params['per_page'] = per_page result['headers'] = { - 'link-next': reverse('api-content-symbol', url_args={'q': q}, + 'link-next': reverse('api-1-content-symbol', url_args={'q': q}, query_params=query_params) } result.update({ 'results': symbols }) return result -@api_route(r'/content/known/search/', 'api-content-known', methods=['POST']) -@api_route(r'/content/known/(?P(?!search).*)/', 'api-content-known') +@api_route(r'/content/known/search/', 'api-1-content-known', methods=['POST']) +@api_route(r'/content/known/(?P(?!search).*)/', 'api-1-content-known') @api_doc('/content/known/', tags=['hidden']) def api_check_content_known(request, q=None): """ .. http:get:: /api/1/content/known/(sha1)[,(sha1), ...,(sha1)]/ Check whether some content(s) (aka "blob(s)") is present in the archive based on its **sha1** checksum. :param string sha1: hexadecimal representation of the **sha1** checksum value for the content to check existence. Multiple values can be provided separated by ','. :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request :>json array search_res: array holding the search result for each provided **sha1** :>json object search_stats: some statistics regarding the number of **sha1** provided and the percentage of those found in the archive **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 400: an invalid **sha1** has been provided **Example:** .. parsed-literal:: :swh_web_api:`content/known/dc2830a9e72f23c1dfebef4413003221baa5fb62,0c3f19cb47ebfbe643fb19fa94c874d18fa62d12/` """ # noqa response = {'search_res': None, 'search_stats': None} search_stats = {'nbfiles': 0, 'pct': 0} search_res = None queries = [] # GET: Many hash separated values request if q: hashes = q.split(',') for v in hashes: queries.append({'filename': None, 'sha1': v}) # POST: Many hash requests in post form submission elif request.method == 'POST': data = request.data # Remove potential inputs with no associated value for k, v in data.items(): if v is not None: if k == 'q' and len(v) > 0: queries.append({'filename': None, 'sha1': v}) elif v != '': queries.append({'filename': k, 'sha1': v}) if queries: lookup = service.lookup_multiple_hashes(queries) result = [] nb_queries = len(queries) for el in lookup: res_d = {'sha1': el['sha1'], 'found': el['found']} if 'filename' in el and el['filename']: res_d['filename'] = el['filename'] result.append(res_d) search_res = result nbfound = len([x for x in lookup if x['found']]) search_stats['nbfiles'] = nb_queries search_stats['pct'] = (nbfound / nb_queries) * 100 response['search_res'] = search_res response['search_stats'] = search_stats return response -@api_route(r'/content/(?P[0-9a-z_:]*[0-9a-f]+)/', 'api-content', +@api_route(r'/content/(?P[0-9a-z_:]*[0-9a-f]+)/', 'api-1-content', checksum_args=['q']) @api_doc('/content/') def api_content_metadata(request, q): """ .. http:get:: /api/1/content/[(hash_type):](hash)/ Get information about a content (aka a "blob") object. In the archive, a content object is identified based on checksum values computed using various hashing algorithms. :param string hash_type: optional parameter specifying which hashing algorithm has been used to compute the content checksum. It can be either ``sha1``, ``sha1_git``, ``sha256`` or ``blake2s256``. If that parameter is not provided, it is assumed that the hashing algorithm used is ``sha1``. :param string hash: hexadecimal representation of the checksum value computed with the specified hashing algorithm. :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request :>json object checksums: object holding the computed checksum values for the requested content :>json string data_url: link to :http:get:`/api/1/content/[(hash_type):](hash)/raw/` for downloading the content raw bytes :>json string filetype_url: link to :http:get:`/api/1/content/[(hash_type):](hash)/filetype/` for getting information about the content MIME type :>json string language_url: link to :http:get:`/api/1/content/[(hash_type):](hash)/language/` for getting information about the programming language used in the content :>json number length: length of the content in bytes :>json string license_url: link to :http:get:`/api/1/content/[(hash_type):](hash)/license/` for getting information about the license of the content **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 400: an invalid **hash_type** or **hash** has been provided :statuscode 404: requested content can not be found in the archive **Example:** .. parsed-literal:: curl -i :swh_web_api:`content/sha1_git:fe95a46679d128ff167b7c55df5d02356c5a1ae1/` """ # noqa return api_lookup( service.lookup_content, q, notfound_msg='Content with {} not found.'.format(q), enrich_fn=functools.partial(utils.enrich_content, query_string=q)) diff --git a/swh/web/api/views/directory.py b/swh/web/api/views/directory.py index a5273542..682fd099 100644 --- a/swh/web/api/views/directory.py +++ b/swh/web/api/views/directory.py @@ -1,77 +1,77 @@ # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from swh.web.common import service from swh.web.api import utils from swh.web.api.apidoc import api_doc from swh.web.api.apiurls import api_route from swh.web.api.views.utils import api_lookup -@api_route(r'/directory/(?P[0-9a-f]+)/', 'api-directory', +@api_route(r'/directory/(?P[0-9a-f]+)/', 'api-1-directory', checksum_args=['sha1_git']) @api_route(r'/directory/(?P[0-9a-f]+)/(?P.+)/', - 'api-directory', + 'api-1-directory', checksum_args=['sha1_git']) @api_doc('/directory/') def api_directory(request, sha1_git, path=None): """ .. http:get:: /api/1/directory/(sha1_git)/[(path)/] Get information about directory objects. Directories are identified by **sha1** checksums, compatible with Git directory identifiers. See :func:`swh.model.identifiers.directory_identifier` in our data model module for details about how they are computed. When given only a directory identifier, this endpoint returns information about the directory itself, returning its content (usually a list of directory entries). When given a directory identifier and a path, this endpoint returns information about the directory entry pointed by the relative path, starting path resolution from the given directory. :param string sha1_git: hexadecimal representation of the directory **sha1_git** identifier :param string path: optional parameter to get information about the directory entry pointed by that relative path :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request :>jsonarr object checksums: object holding the computed checksum values for a directory entry (only for file entries) :>jsonarr string dir_id: **sha1_git** identifier of the requested directory :>jsonarr number length: length of a directory entry in bytes (only for file entries) for getting information about the content MIME type :>jsonarr string name: the directory entry name :>jsonarr number perms: permissions for the directory entry :>jsonarr string target: **sha1_git** identifier of the directory entry :>jsonarr string target_url: link to :http:get:`/api/1/content/[(hash_type):](hash)/` or :http:get:`/api/1/directory/(sha1_git)/[(path)/]` depending on the directory entry type :>jsonarr string type: the type of the directory entry, can be either ``dir``, ``file`` or ``rev`` **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 400: an invalid **hash_type** or **hash** has been provided :statuscode 404: requested directory can not be found in the archive **Example:** .. parsed-literal:: :swh_web_api:`directory/977fc4b98c0e85816348cebd3b12026407c368b6/` """ # noqa if path: error_msg_path = ('Entry with path %s relative to directory ' 'with sha1_git %s not found.') % (path, sha1_git) return api_lookup( service.lookup_directory_with_path, sha1_git, path, notfound_msg=error_msg_path, enrich_fn=utils.enrich_directory) else: error_msg_nopath = 'Directory with sha1_git %s not found.' % sha1_git return api_lookup( service.lookup_directory, sha1_git, notfound_msg=error_msg_nopath, enrich_fn=utils.enrich_directory) diff --git a/swh/web/api/views/identifiers.py b/swh/web/api/views/identifiers.py index 06896827..f6cf790b 100644 --- a/swh/web/api/views/identifiers.py +++ b/swh/web/api/views/identifiers.py @@ -1,77 +1,77 @@ # Copyright (C) 2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from swh.model.identifiers import ( CONTENT, DIRECTORY, RELEASE, REVISION, SNAPSHOT ) from swh.web.common import service from swh.web.common.utils import resolve_swh_persistent_id from swh.web.api.apidoc import api_doc from swh.web.api.apiurls import api_route @api_route(r'/resolve/(?P.*)/', - 'api-resolve-swh-pid') + 'api-1-resolve-swh-pid') @api_doc('/resolve/') def api_resolve_swh_pid(request, swh_id): """ .. http:get:: /api/1/resolve/(swh_id)/ Resolve a Software Heritage persistent identifier. Try to resolve a provided `persistent identifier `_ into an url for browsing the pointed archive object. If the provided identifier is valid, the existence of the object in the archive will also be checked. :param string swh_id: a Software Heritage presistent identifier :>json string browse_url: the url for browsing the pointed object :>json object metadata: object holding optional parts of the persistent identifier :>json string namespace: the persistent identifier namespace :>json string object_id: the hash identifier of the pointed object :>json string object_type: the type of the pointed object :>json number scheme_version: the scheme version of the persistent identifier :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 400: an invalid persistent identifier has been provided :statuscode 404: the pointed object does not exist in the archive **Example:** .. parsed-literal:: :swh_web_api:`resolve/swh:1:rev:96db9023b881d7cd9f379b0c154650d6c108e9a3;origin=https://github.com/openssl/openssl/` """ # noqa # try to resolve the provided pid swh_id_resolved = resolve_swh_persistent_id(swh_id) # id is well-formed, now check that the pointed # object is present in the archive, NotFoundExc # will be raised otherwise swh_id_parsed = swh_id_resolved['swh_id_parsed'] object_type = swh_id_parsed.object_type object_id = swh_id_parsed.object_id if object_type == CONTENT: service.lookup_content('sha1_git:%s' % object_id) elif object_type == DIRECTORY: service.lookup_directory(object_id) elif object_type == RELEASE: service.lookup_release(object_id) elif object_type == REVISION: service.lookup_revision(object_id) elif object_type == SNAPSHOT: service.lookup_snapshot(object_id) # id is well-formed and the pointed object exists swh_id_data = swh_id_parsed._asdict() swh_id_data['browse_url'] = swh_id_resolved['browse_url'] return swh_id_data diff --git a/swh/web/api/views/origin.py b/swh/web/api/views/origin.py index edbd56b5..51b3a7c1 100644 --- a/swh/web/api/views/origin.py +++ b/swh/web/api/views/origin.py @@ -1,434 +1,435 @@ # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from distutils.util import strtobool from functools import partial from swh.web.common import service from swh.web.common.exc import BadInputExc from swh.web.common.origin_visits import get_origin_visits from swh.web.common.utils import reverse from swh.web.api.apidoc import api_doc from swh.web.api.apiurls import api_route from swh.web.api.views.utils import api_lookup def _enrich_origin(origin): if 'id' in origin: o = origin.copy() - o['origin_visits_url'] = \ - reverse('api-origin-visits', url_args={'origin_id': origin['id']}) + o['origin_visits_url'] = reverse( + 'api-1-origin-visits', url_args={'origin_id': origin['id']}) return o return origin def _enrich_origin_visit(origin_visit, *, with_origin_url, with_origin_visit_url): ov = origin_visit.copy() if with_origin_url: - ov['origin_url'] = reverse('api-origin', + ov['origin_url'] = reverse('api-1-origin', url_args={'origin_id': ov['origin']}) if with_origin_visit_url: - ov['origin_visit_url'] = reverse('api-origin-visit', + ov['origin_visit_url'] = reverse('api-1-origin-visit', url_args={'origin_id': ov['origin'], 'visit_id': ov['visit']}) snapshot = ov['snapshot'] if snapshot: - ov['snapshot_url'] = reverse('api-snapshot', + ov['snapshot_url'] = reverse('api-1-snapshot', url_args={'snapshot_id': snapshot}) else: ov['snapshot_url'] = None return ov -@api_route(r'/origins/', 'api-origins') +@api_route(r'/origins/', 'api-1-origins') @api_doc('/origins/', noargs=True) def api_origins(request): """ .. http:get:: /api/1/origins/ Get list of archived software origins. Origins are sorted by ids before returning them. :query int origin_from: The first origin id that will be included in returned results (default to 1) :query int origin_count: The maximum number of origins to return (default to 100, can not exceed 10000) :>jsonarr number id: the origin unique identifier :>jsonarr string origin_visits_url: link to in order to get information about the visits for that origin :>jsonarr string type: the type of software origin (possible values are ``git``, ``svn``, ``hg``, ``deb``, ``pypi``, ``ftp`` or ``deposit``) :>jsonarr string url: the origin canonical url :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request :resheader Link: indicates that a subsequent or previous result page are available and contains the urls pointing to them **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error **Example:** .. parsed-literal:: :swh_web_api:`origins?origin_from=50000&origin_count=500` """ # noqa origin_from = int(request.query_params.get('origin_from', '1')) origin_count = int(request.query_params.get('origin_count', '100')) origin_count = min(origin_count, 10000) results = api_lookup( service.lookup_origins, origin_from, origin_count+1, enrich_fn=_enrich_origin) response = {'results': results, 'headers': {}} if len(results) > origin_count: origin_from = results.pop()['id'] response['headers']['link-next'] = reverse( - 'api-origins', query_params={'origin_from': origin_from, - 'origin_count': origin_count}) + 'api-1-origins', + query_params={'origin_from': origin_from, + 'origin_count': origin_count}) return response -@api_route(r'/origin/(?P[0-9]+)/', 'api-origin') +@api_route(r'/origin/(?P[0-9]+)/', 'api-1-origin') @api_route(r'/origin/(?P[a-z]+)/url/(?P.+)/', - 'api-origin') + 'api-1-origin') @api_doc('/origin/') def api_origin(request, origin_id=None, origin_type=None, origin_url=None): """ .. http:get:: /api/1/origin/(origin_id)/ Get information about a software origin. :param int origin_id: a software origin identifier :>json number id: the origin unique identifier :>json string origin_visits_url: link to in order to get information about the visits for that origin :>json string type: the type of software origin (possible values are ``git``, ``svn``, ``hg``, ``deb``, ``pypi``, ``ftp`` or ``deposit``) :>json string url: the origin canonical url :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 404: requested origin can not be found in the archive **Example:** .. parsed-literal:: :swh_web_api:`origin/1/` .. http:get:: /api/1/origin/(origin_type)/url/(origin_url)/ Get information about a software origin. :param string origin_type: the origin type (possible values are ``git``, ``svn``, ``hg``, ``deb``, ``pypi``, ``ftp`` or ``deposit``) :param string origin_url: the origin url :>json number id: the origin unique identifier :>json string origin_visits_url: link to in order to get information about the visits for that origin :>json string type: the type of software origin :>json string url: the origin canonical url :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 404: requested origin can not be found in the archive **Example:** .. parsed-literal:: :swh_web_api:`origin/git/url/https://github.com/python/cpython/` """ # noqa ori_dict = { 'id': int(origin_id) if origin_id else None, 'type': origin_type, 'url': origin_url } ori_dict = {k: v for k, v in ori_dict.items() if ori_dict[k]} if 'id' in ori_dict: error_msg = 'Origin with id %s not found.' % ori_dict['id'] else: error_msg = 'Origin with type %s and URL %s not found' % ( ori_dict['type'], ori_dict['url']) return api_lookup( service.lookup_origin, ori_dict, notfound_msg=error_msg, enrich_fn=_enrich_origin) @api_route(r'/origin/search/(?P.+)/', - 'api-origin-search') + 'api-1-origin-search') @api_doc('/origin/search/') def api_origin_search(request, url_pattern): """ .. http:get:: /api/1/origin/search/(url_pattern)/ Search for software origins whose urls contain a provided string pattern or match a provided regular expression. The search is performed in a case insensitive way. :param string url_pattern: a string pattern or a regular expression :query int offset: the number of found origins to skip before returning results :query int limit: the maximum number of found origins to return :query boolean regexp: if true, consider provided pattern as a regular expression and search origins whose urls match it :query boolean with_visit: if true, only return origins with at least one visit by Software heritage :>jsonarr number id: the origin unique identifier :>jsonarr string origin_visits_url: link to in order to get information about the visits for that origin :>jsonarr string type: the type of software origin :>jsonarr string url: the origin canonical url :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error **Example:** .. parsed-literal:: :swh_web_api:`origin/search/python/?limit=2` """ # noqa result = {} offset = int(request.query_params.get('offset', '0')) limit = int(request.query_params.get('limit', '70')) regexp = request.query_params.get('regexp', 'false') with_visit = request.query_params.get('with_visit', 'false') results = api_lookup(service.search_origin, url_pattern, offset, limit, bool(strtobool(regexp)), bool(strtobool(with_visit)), enrich_fn=_enrich_origin) nb_results = len(results) if nb_results == limit: query_params = {} query_params['offset'] = offset + limit query_params['limit'] = limit query_params['regexp'] = regexp result['headers'] = { - 'link-next': reverse('api-origin-search', + 'link-next': reverse('api-1-origin-search', url_args={'url_pattern': url_pattern}, query_params=query_params) } result.update({ 'results': results }) return result @api_route(r'/origin/metadata-search/', - 'api-origin-metadata-search') + 'api-1-origin-metadata-search') @api_doc('/origin/metadata-search/', noargs=True, need_params=True) def api_origin_metadata_search(request): """ .. http:get:: /api/1/origin/metadata-search/ Search for software origins whose metadata (expressed as a JSON-LD/CodeMeta dictionary) match the provided criteria. For now, only full-text search on this dictionary is supported. :query str fulltext: a string that will be matched against origin metadata; results are ranked and ordered starting with the best ones. :query int limit: the maximum number of found origins to return (bounded to 100) :>jsonarr number origin_id: the origin unique identifier :>jsonarr dict metadata: metadata of the origin (as a JSON-LD/CodeMeta dictionary) :>jsonarr string from_revision: the revision used to extract these metadata (the current HEAD or one of the former HEADs) :>jsonarr dict tool: the tool used to extract these metadata :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error **Example:** .. parsed-literal:: :swh_web_api:`origin/metadata-search/?limit=2&fulltext=Jane%20Doe` """ # noqa fulltext = request.query_params.get('fulltext', None) limit = min(int(request.query_params.get('limit', '70')), 100) if not fulltext: content = '"fulltext" must be provided and non-empty.' raise BadInputExc(content) results = api_lookup(service.search_origin_metadata, fulltext, limit) return { 'results': results, } -@api_route(r'/origin/(?P[0-9]+)/visits/', 'api-origin-visits') +@api_route(r'/origin/(?P[0-9]+)/visits/', 'api-1-origin-visits') @api_doc('/origin/visits/') def api_origin_visits(request, origin_id): """ .. http:get:: /api/1/origin/(origin_id)/visits/ Get information about all visits of a software origin. Visits are returned sorted in descending order according to their date. :param int origin_id: a software origin identifier :query int per_page: specify the number of visits to list, for pagination purposes :query int last_visit: visit to start listing from, for pagination purposes :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request :resheader Link: indicates that a subsequent result page is available and contains the url pointing to it :>jsonarr string date: ISO representation of the visit date (in UTC) :>jsonarr number id: the unique identifier of the origin :>jsonarr string origin_visit_url: link to :http:get:`/api/1/origin/(origin_id)/visit/(visit_id)/` in order to get information about the visit :>jsonarr string snapshot: the snapshot identifier of the visit :>jsonarr string snapshot_url: link to :http:get:`/api/1/snapshot/(snapshot_id)/` in order to get information about the snapshot of the visit :>jsonarr string status: status of the visit (either **full**, **partial** or **ongoing**) :>jsonarr number visit: the unique identifier of the visit **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 404: requested origin can not be found in the archive **Example:** .. parsed-literal:: :swh_web_api:`origin/1/visits/` """ # noqa result = {} origin_id = int(origin_id) per_page = int(request.query_params.get('per_page', '10')) last_visit = request.query_params.get('last_visit') if last_visit: last_visit = int(last_visit) def _lookup_origin_visits( origin_id, last_visit=last_visit, per_page=per_page): all_visits = get_origin_visits({'id': origin_id}) all_visits.reverse() visits = [] if not last_visit: visits = all_visits[:per_page] else: for i, v in enumerate(all_visits): if v['visit'] == last_visit: visits = all_visits[i+1:i+1+per_page] break for v in visits: yield v results = api_lookup(_lookup_origin_visits, origin_id, notfound_msg='No origin {} found'.format(origin_id), enrich_fn=partial(_enrich_origin_visit, with_origin_url=False, with_origin_visit_url=True)) if results: nb_results = len(results) if nb_results == per_page: new_last_visit = results[-1]['visit'] query_params = {} query_params['last_visit'] = new_last_visit if request.query_params.get('per_page'): query_params['per_page'] = per_page result['headers'] = { - 'link-next': reverse('api-origin-visits', + 'link-next': reverse('api-1-origin-visits', url_args={'origin_id': origin_id}, query_params=query_params) } result.update({ 'results': results }) return result @api_route(r'/origin/(?P[0-9]+)/visit/(?P[0-9]+)/', - 'api-origin-visit') + 'api-1-origin-visit') @api_doc('/origin/visit/') def api_origin_visit(request, origin_id, visit_id): """ .. http:get:: /api/1/origin/(origin_id)/visit/(visit_id)/ Get information about a specific visit of a software origin. :param int origin_id: a software origin identifier :param int visit_id: a visit identifier :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request :>json string date: ISO representation of the visit date (in UTC) :>json number origin: the origin unique identifier :>json string origin_url: link to get information about the origin :>jsonarr string snapshot: the snapshot identifier of the visit :>jsonarr string snapshot_url: link to :http:get:`/api/1/snapshot/(snapshot_id)/` in order to get information about the snapshot of the visit :>json string status: status of the visit (either **full**, **partial** or **ongoing**) :>json number visit: the unique identifier of the visit **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 404: requested origin or visit can not be found in the archive **Example:** .. parsed-literal:: :swh_web_api:`origin/1500/visit/1/` """ # noqa return api_lookup( service.lookup_origin_visit, int(origin_id), int(visit_id), notfound_msg=('No visit {} for origin {} found' .format(visit_id, origin_id)), enrich_fn=partial(_enrich_origin_visit, with_origin_url=True, with_origin_visit_url=False)) diff --git a/swh/web/api/views/origin_save.py b/swh/web/api/views/origin_save.py index 54b63eb6..ecc7eb06 100644 --- a/swh/web/api/views/origin_save.py +++ b/swh/web/api/views/origin_save.py @@ -1,84 +1,84 @@ # Copyright (C) 2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from django.views.decorators.cache import never_cache from swh.web.api.apidoc import api_doc from swh.web.api.apiurls import api_route from swh.web.common.origin_save import ( create_save_origin_request, get_save_origin_requests ) @api_route(r'/origin/save/(?P.+)/url/(?P.+)/', - 'api-save-origin', methods=['GET', 'POST'], + 'api-1-save-origin', methods=['GET', 'POST'], throttle_scope='swh_save_origin') @never_cache @api_doc('/origin/save/') def api_save_origin(request, origin_type, origin_url): """ .. http:get:: /api/1/origin/save/(origin_type)/url/(origin_url)/ .. http:post:: /api/1/origin/save/(origin_type)/url/(origin_url)/ Request the saving of a software origin into the archive or check the status of previously created save requests. That endpoint enables to create a saving task for a software origin through a POST request. Depending of the provided origin url, the save request can either be: * immediately **accepted**, for well known code hosting providers like for instance GitHub or GitLab * **rejected**, in case the url is blacklisted by Software Heritage * **put in pending state** until a manual check is done in order to determine if it can be loaded or not Once a saving request has been accepted, its associated saving task status can then be checked through a GET request on the same url. Returned status can either be: * **not created**: no saving task has been created * **not yet scheduled**: saving task has been created but its execution has not yet been scheduled * **scheduled**: the task execution has been scheduled * **succeed**: the saving task has been successfully executed * **failed**: the saving task has been executed but it failed When issuing a POST request an object will be returned while a GET request will return an array of objects (as multiple save requests might have been submitted for the same origin). :param string origin_type: the type of origin to save (currently the supported types are ``git``, ``hg`` and ``svn``) :param string origin_url: the url of the origin to save :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request :>json string origin_url: the url of the origin to save :>json string origin_type: the type of the origin to save :>json string save_request_date: the date (in iso format) the save request was issued :>json string save_request_status: the status of the save request, either **accepted**, **rejected** or **pending** :>json string save_task_status: the status of the origin saving task, either **not created**, **not yet scheduled**, **scheduled**, **succeed** or **failed** **Allowed HTTP Methods:** :http:method:`get`, :http:method:`post`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 400: an invalid origin type or url has been provided :statuscode 403: the provided origin url is blacklisted :statuscode 404: no save requests have been found for a given origin """ # noqa if request.method == 'POST': sor = create_save_origin_request(origin_type, origin_url) del sor['id'] else: sor = get_save_origin_requests(origin_type, origin_url) for s in sor: del s['id'] # noqa return sor diff --git a/swh/web/api/views/person.py b/swh/web/api/views/person.py index 628ef7a9..1aa79a55 100644 --- a/swh/web/api/views/person.py +++ b/swh/web/api/views/person.py @@ -1,44 +1,44 @@ # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from swh.web.common import service from swh.web.api.apidoc import api_doc from swh.web.api.apiurls import api_route from swh.web.api.views.utils import api_lookup -@api_route(r'/person/(?P[0-9]+)/', 'api-person') +@api_route(r'/person/(?P[0-9]+)/', 'api-1-person') @api_doc('/person/') def api_person(request, person_id): """ .. http:get:: /api/1/person/(person_id)/ Get information about a person in the archive. :param int person_id: a person identifier :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request :>json string email: the email of the person :>json string fullname: the full name of the person: combination of its name and email :>json number id: the unique identifier of the person :>json string name: the name of the person **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 404: requested person can not be found in the archive **Example:** .. parsed-literal:: :swh_web_api:`person/8275/` """ # noqa return api_lookup( service.lookup_person, person_id, notfound_msg='Person with id {} not found.'.format(person_id)) diff --git a/swh/web/api/views/release.py b/swh/web/api/views/release.py index 46301edf..b8f0d1b1 100644 --- a/swh/web/api/views/release.py +++ b/swh/web/api/views/release.py @@ -1,59 +1,59 @@ # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from swh.web.common import service from swh.web.api import utils from swh.web.api.apidoc import api_doc from swh.web.api.apiurls import api_route from swh.web.api.views.utils import api_lookup -@api_route(r'/release/(?P[0-9a-f]+)/', 'api-release', +@api_route(r'/release/(?P[0-9a-f]+)/', 'api-1-release', checksum_args=['sha1_git']) @api_doc('/release/') def api_release(request, sha1_git): """ .. http:get:: /api/1/release/(sha1_git)/ Get information about a release in the archive. Releases are identified by **sha1** checksums, compatible with Git tag identifiers. See :func:`swh.model.identifiers.release_identifier` in our data model module for details about how they are computed. :param string sha1_git: hexadecimal representation of the release **sha1_git** identifier :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request :>json object author: information about the author of the release :>json string author_url: link to :http:get:`/api/1/person/(person_id)/` to get information about the author of the release :>json string date: ISO representation of the release date (in UTC) :>json string id: the release unique identifier :>json string message: the message associated to the release :>json string name: the name of the release :>json string target: the target identifier of the release :>json string target_type: the type of the target, can be either **release**, **revision**, **content**, **directory** :>json string target_url: a link to the adequate api url based on the target type **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 400: an invalid **sha1_git** value has been provided :statuscode 404: requested release can not be found in the archive **Example:** .. parsed-literal:: :swh_web_api:`release/208f61cc7a5dbc9879ae6e5c2f95891e270f09ef/` """ # noqa error_msg = 'Release with sha1_git %s not found.' % sha1_git return api_lookup( service.lookup_release, sha1_git, notfound_msg=error_msg, enrich_fn=utils.enrich_release) diff --git a/swh/web/api/views/revision.py b/swh/web/api/views/revision.py index 10b3ec86..3bce6dbc 100644 --- a/swh/web/api/views/revision.py +++ b/swh/web/api/views/revision.py @@ -1,489 +1,489 @@ # Copyright (C) 2015-2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from django.http import HttpResponse from swh.web.common import service from swh.web.common.utils import reverse from swh.web.common.utils import parse_timestamp from swh.web.api import utils from swh.web.api.apidoc import api_doc from swh.web.api.apiurls import api_route from swh.web.api.views.utils import api_lookup def _revision_directory_by(revision, path, request_path, limit=100, with_data=False): """ Compute the revision matching criterion's directory or content data. Args: revision: dictionary of criterions representing a revision to lookup path: directory's path to lookup request_path: request path which holds the original context to limit: optional query parameter to limit the revisions log (default to 100). For now, note that this limit could impede the transitivity conclusion about sha1_git not being an ancestor of with_data: indicate to retrieve the content's raw data if path resolves to a content. """ def enrich_directory_local(dir, context_url=request_path): return utils.enrich_directory(dir, context_url) rev_id, result = service.lookup_directory_through_revision( revision, path, limit=limit, with_data=with_data) content = result['content'] if result['type'] == 'dir': # dir_entries result['content'] = list(map(enrich_directory_local, content)) elif result['type'] == 'file': # content result['content'] = utils.enrich_content(content) elif result['type'] == 'rev': # revision result['content'] = utils.enrich_revision(content) return result @api_route(r'/revision/origin/(?P[0-9]+)' r'/branch/(?P.+)/log/', - 'api-revision-origin-log') + 'api-1-revision-origin-log') @api_route(r'/revision/origin/(?P[0-9]+)/log/', - 'api-revision-origin-log') + 'api-1-revision-origin-log') @api_route(r'/revision/origin/(?P[0-9]+)' r'/ts/(?P.+)/log/', - 'api-revision-origin-log') + 'api-1-revision-origin-log') @api_route(r'/revision/origin/(?P[0-9]+)' r'/branch/(?P.+)' r'/ts/(?P.+)/log/', - 'api-revision-origin-log') + 'api-1-revision-origin-log') @api_doc('/revision/origin/log/') def api_revision_log_by(request, origin_id, branch_name='HEAD', ts=None): """ .. http:get:: /api/1/revision/origin/(origin_id)[/branch/(branch_name)][/ts/(timestamp)]/log Show the commit log for a revision, searching for it based on software origin, branch name, and/or visit timestamp. This endpoint behaves like :http:get:`/api/1/revision/(sha1_git)[/prev/(prev_sha1s)]/log/`, but operates on the revision that has been found at a given software origin, close to a given point in time, pointed by a given branch. :param int origin_id: a software origin identifier :param string branch_name: optional parameter specifying a fully-qualified branch name associated to the software origin, e.g., "refs/heads/master". Defaults to the HEAD branch. :param string timestamp: optional parameter specifying a timestamp close to which the revision pointed by the given branch should be looked up. The timestamp can be expressed either as an ISO date or as a Unix one (in UTC). Defaults to now. :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request :>jsonarr object author: information about the author of the revision :>jsonarr string author_url: link to :http:get:`/api/1/person/(person_id)/` to get information about the author of the revision :>jsonarr object committer: information about the committer of the revision :>jsonarr string committer_url: link to :http:get:`/api/1/person/(person_id)/` to get information about the committer of the revision :>jsonarr string committer_date: ISO representation of the commit date (in UTC) :>jsonarr string date: ISO representation of the revision date (in UTC) :>jsonarr string directory: the unique identifier that revision points to :>jsonarr string directory_url: link to :http:get:`/api/1/directory/(sha1_git)/[(path)/]` to get information about the directory associated to the revision :>jsonarr string id: the revision unique identifier :>jsonarr boolean merge: whether or not the revision corresponds to a merge commit :>jsonarr string message: the message associated to the revision :>jsonarr array parents: the parents of the revision, i.e. the previous revisions that head directly to it, each entry of that array contains an unique parent revision identifier but also a link to :http:get:`/api/1/revision/(sha1_git)/` to get more information about it :>jsonarr string type: the type of the revision **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 404: no revision matching the given criteria could be found in the archive **Example:** .. parsed-literal:: :swh_web_api:`revision/origin/723566/ts/2016-01-17T00:00:00+00:00/log/` """ # noqa result = {} per_page = int(request.query_params.get('per_page', '10')) def lookup_revision_log_by_with_limit(o_id, br, ts, limit=per_page+1): return service.lookup_revision_log_by(o_id, br, ts, limit) error_msg = 'No revision matching origin %s ' % origin_id error_msg += ', branch name %s' % branch_name error_msg += (' and time stamp %s.' % ts) if ts else '.' rev_get = api_lookup( lookup_revision_log_by_with_limit, int(origin_id), branch_name, ts, notfound_msg=error_msg, enrich_fn=utils.enrich_revision) nb_rev = len(rev_get) if nb_rev == per_page+1: revisions = rev_get[:-1] last_sha1_git = rev_get[-1]['id'] params = {k: v for k, v in {'origin_id': origin_id, 'branch_name': branch_name, 'ts': ts, }.items() if v is not None} query_params = {} query_params['sha1_git'] = last_sha1_git if request.query_params.get('per_page'): query_params['per_page'] = per_page result['headers'] = { - 'link-next': reverse('api-revision-origin-log', url_args=params, + 'link-next': reverse('api-1-revision-origin-log', url_args=params, query_params=query_params) } else: revisions = rev_get result.update({'results': revisions}) return result @api_route(r'/revision/origin/(?P[0-9]+)/directory/', - 'api-revision-origin-directory') + 'api-1-revision-origin-directory') @api_route(r'/revision/origin/(?P[0-9]+)/directory/(?P.+)/', - 'api-revision-origin-directory') + 'api-1-revision-origin-directory') @api_route(r'/revision/origin/(?P[0-9]+)' r'/branch/(?P.+)/directory/', - 'api-revision-origin-directory') + 'api-1-revision-origin-directory') @api_route(r'/revision/origin/(?P[0-9]+)' r'/branch/(?P.+)/ts/(?P.+)/directory/', - 'api-revision-origin-directory') + 'api-1-revision-origin-directory') @api_route(r'/revision/origin/(?P[0-9]+)' r'/branch/(?P.+)/directory/(?P.+)/', - 'api-revision-origin-directory') + 'api-1-revision-origin-directory') @api_route(r'/revision/origin/(?P[0-9]+)' r'/branch/(?P.+)/ts/(?P.+)' r'/directory/(?P.+)/', - 'api-revision-origin-directory') + 'api-1-revision-origin-directory') @api_doc('/revision/origin/directory/', tags=['hidden']) def api_directory_through_revision_origin(request, origin_id, branch_name='HEAD', ts=None, path=None, with_data=False): """ Display directory or content information through a revision identified by origin/branch/timestamp. """ if ts: ts = parse_timestamp(ts) return _revision_directory_by({'origin_id': int(origin_id), 'branch_name': branch_name, 'ts': ts }, path, request.path, with_data=with_data) @api_route(r'/revision/origin/(?P[0-9]+)/', - 'api-revision-origin') + 'api-1-revision-origin') @api_route(r'/revision/origin/(?P[0-9]+)' r'/branch/(?P.+)/', - 'api-revision-origin') + 'api-1-revision-origin') @api_route(r'/revision/origin/(?P[0-9]+)' r'/branch/(?P.+)/ts/(?P.+)/', - 'api-revision-origin') + 'api-1-revision-origin') @api_route(r'/revision/origin/(?P[0-9]+)/ts/(?P.+)/', - 'api-revision-origin') + 'api-1-revision-origin') @api_doc('/revision/origin/') def api_revision_with_origin(request, origin_id, branch_name='HEAD', ts=None): """ .. http:get:: /api/1/revision/origin/(origin_id)/[branch/(branch_name)/][ts/(timestamp)/] Get information about a revision, searching for it based on software origin, branch name, and/or visit timestamp. This endpoint behaves like :http:get:`/api/1/revision/(sha1_git)/`, but operates on the revision that has been found at a given software origin, close to a given point in time, pointed by a given branch. :param int origin_id: a software origin identifier :param string branch_name: optional parameter specifying a fully-qualified branch name associated to the software origin, e.g., "refs/heads/master". Defaults to the HEAD branch. :param string timestamp: optional parameter specifying a timestamp close to which the revision pointed by the given branch should be looked up. The timestamp can be expressed either as an ISO date or as a Unix one (in UTC). Defaults to now. :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request :>json object author: information about the author of the revision :>json string author_url: link to :http:get:`/api/1/person/(person_id)/` to get information about the author of the revision :>json object committer: information about the committer of the revision :>json string committer_url: link to :http:get:`/api/1/person/(person_id)/` to get information about the committer of the revision :>json string committer_date: ISO representation of the commit date (in UTC) :>json string date: ISO representation of the revision date (in UTC) :>json string directory: the unique identifier that revision points to :>json string directory_url: link to :http:get:`/api/1/directory/(sha1_git)/[(path)/]` to get information about the directory associated to the revision :>json string id: the revision unique identifier :>json boolean merge: whether or not the revision corresponds to a merge commit :>json string message: the message associated to the revision :>json array parents: the parents of the revision, i.e. the previous revisions that head directly to it, each entry of that array contains an unique parent revision identifier but also a link to :http:get:`/api/1/revision/(sha1_git)/` to get more information about it :>json string type: the type of the revision **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 404: no revision matching the given criteria could be found in the archive **Example:** .. parsed-literal:: :swh_web_api:`revision/origin/13706355/branch/refs/heads/2.7/` """ # noqa return api_lookup( service.lookup_revision_by, int(origin_id), branch_name, ts, notfound_msg=('Revision with (origin_id: {}, branch_name: {}' ', ts: {}) not found.'.format(origin_id, branch_name, ts)), enrich_fn=utils.enrich_revision) -@api_route(r'/revision/(?P[0-9a-f]+)/', 'api-revision', +@api_route(r'/revision/(?P[0-9a-f]+)/', 'api-1-revision', checksum_args=['sha1_git']) @api_doc('/revision/') def api_revision(request, sha1_git): """ .. http:get:: /api/1/revision/(sha1_git)/ Get information about a revision in the archive. Revisions are identified by **sha1** checksums, compatible with Git commit identifiers. See :func:`swh.model.identifiers.revision_identifier` in our data model module for details about how they are computed. :param string sha1_git: hexadecimal representation of the revision **sha1_git** identifier :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request :>json object author: information about the author of the revision :>json string author_url: link to :http:get:`/api/1/person/(person_id)/` to get information about the author of the revision :>json object committer: information about the committer of the revision :>json string committer_url: link to :http:get:`/api/1/person/(person_id)/` to get information about the committer of the revision :>json string committer_date: ISO representation of the commit date (in UTC) :>json string date: ISO representation of the revision date (in UTC) :>json string directory: the unique identifier that revision points to :>json string directory_url: link to :http:get:`/api/1/directory/(sha1_git)/[(path)/]` to get information about the directory associated to the revision :>json string id: the revision unique identifier :>json boolean merge: whether or not the revision corresponds to a merge commit :>json string message: the message associated to the revision :>json array parents: the parents of the revision, i.e. the previous revisions that head directly to it, each entry of that array contains an unique parent revision identifier but also a link to :http:get:`/api/1/revision/(sha1_git)/` to get more information about it :>json string type: the type of the revision **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 400: an invalid **sha1_git** value has been provided :statuscode 404: requested revision can not be found in the archive **Example:** .. parsed-literal:: :swh_web_api:`revision/aafb16d69fd30ff58afdd69036a26047f3aebdc6/` """ # noqa return api_lookup( service.lookup_revision, sha1_git, notfound_msg='Revision with sha1_git {} not found.'.format(sha1_git), enrich_fn=utils.enrich_revision) @api_route(r'/revision/(?P[0-9a-f]+)/raw/', - 'api-revision-raw-message', checksum_args=['sha1_git']) + 'api-1-revision-raw-message', checksum_args=['sha1_git']) @api_doc('/revision/raw/', tags=['hidden'], handle_response=True) def api_revision_raw_message(request, sha1_git): """Return the raw data of the message of revision identified by sha1_git """ raw = service.lookup_revision_message(sha1_git) response = HttpResponse(raw['message'], content_type='application/octet-stream') response['Content-disposition'] = \ 'attachment;filename=rev_%s_raw' % sha1_git return response @api_route(r'/revision/(?P[0-9a-f]+)/directory/', - 'api-revision-directory', checksum_args=['sha1_git']) + 'api-1-revision-directory', checksum_args=['sha1_git']) @api_route(r'/revision/(?P[0-9a-f]+)/directory/(?P.+)/', - 'api-revision-directory', checksum_args=['sha1_git']) + 'api-1-revision-directory', checksum_args=['sha1_git']) @api_doc('/revision/directory/') def api_revision_directory(request, sha1_git, dir_path=None, with_data=False): """ .. http:get:: /api/1/revision/(sha1_git)/directory/[(path)/] Get information about directory (entry) objects associated to revisions. Each revision is associated to a single "root" directory. This endpoint behaves like :http:get:`/api/1/directory/(sha1_git)/[(path)/]`, but operates on the root directory associated to a given revision. :param string sha1_git: hexadecimal representation of the revision **sha1_git** identifier :param string path: optional parameter to get information about the directory entry pointed by that relative path :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request :>json array content: directory entries as returned by :http:get:`/api/1/directory/(sha1_git)/[(path)/]` :>json string path: path of directory from the revision root one :>json string revision: the unique revision identifier :>json string type: the type of the directory **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 400: an invalid **sha1_git** value has been provided :statuscode 404: requested revision can not be found in the archive **Example:** .. parsed-literal:: :swh_web_api:`revision/f1b94134a4b879bc55c3dacdb496690c8ebdc03f/directory/` """ # noqa return _revision_directory_by({'sha1_git': sha1_git}, dir_path, request.path, with_data=with_data) -@api_route(r'/revision/(?P[0-9a-f]+)/log/', 'api-revision-log', +@api_route(r'/revision/(?P[0-9a-f]+)/log/', 'api-1-revision-log', checksum_args=['sha1_git']) @api_route(r'/revision/(?P[0-9a-f]+)' r'/prev/(?P[0-9a-f]*/*)/log/', - 'api-revision-log', checksum_args=['sha1_git', 'prev_sha1s']) + 'api-1-revision-log', checksum_args=['sha1_git', 'prev_sha1s']) @api_doc('/revision/log/') def api_revision_log(request, sha1_git, prev_sha1s=None): """ .. http:get:: /api/1/revision/(sha1_git)[/prev/(prev_sha1s)]/log/ Get a list of all revisions heading to a given one, in other words show the commit log. :param string sha1_git: hexadecimal representation of the revision **sha1_git** identifier :param string prev_sha1s: optional parameter representing the navigation breadcrumbs (descendant revisions previously visited). If multiple values, use / as delimiter. If provided, revisions information will be added at the beginning of the returned list. :query int per_page: number of elements in the returned list, for pagination purpose :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request :resheader Link: indicates that a subsequent result page is available and contains the url pointing to it :>jsonarr object author: information about the author of the revision :>jsonarr string author_url: link to :http:get:`/api/1/person/(person_id)/` to get information about the author of the revision :>jsonarr object committer: information about the committer of the revision :>jsonarr string committer_url: link to :http:get:`/api/1/person/(person_id)/` to get information about the committer of the revision :>jsonarr string committer_date: ISO representation of the commit date (in UTC) :>jsonarr string date: ISO representation of the revision date (in UTC) :>jsonarr string directory: the unique identifier that revision points to :>jsonarr string directory_url: link to :http:get:`/api/1/directory/(sha1_git)/[(path)/]` to get information about the directory associated to the revision :>jsonarr string id: the revision unique identifier :>jsonarr boolean merge: whether or not the revision corresponds to a merge commit :>jsonarr string message: the message associated to the revision :>jsonarr array parents: the parents of the revision, i.e. the previous revisions that head directly to it, each entry of that array contains an unique parent revision identifier but also a link to :http:get:`/api/1/revision/(sha1_git)/` to get more information about it :>jsonarr string type: the type of the revision **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 400: an invalid **sha1_git** value has been provided :statuscode 404: requested revision can not be found in the archive **Example:** .. parsed-literal:: :swh_web_api:`revision/e1a315fa3fa734e2a6154ed7b5b9ae0eb8987aad/log/` """ # noqa result = {} per_page = int(request.query_params.get('per_page', '10')) def lookup_revision_log_with_limit(s, limit=per_page+1): return service.lookup_revision_log(s, limit) error_msg = 'Revision with sha1_git %s not found.' % sha1_git rev_get = api_lookup(lookup_revision_log_with_limit, sha1_git, notfound_msg=error_msg, enrich_fn=utils.enrich_revision) nb_rev = len(rev_get) if nb_rev == per_page+1: rev_backward = rev_get[:-1] new_last_sha1 = rev_get[-1]['id'] query_params = {} if request.query_params.get('per_page'): query_params['per_page'] = per_page result['headers'] = { - 'link-next': reverse('api-revision-log', + 'link-next': reverse('api-1-revision-log', url_args={'sha1_git': new_last_sha1}, query_params=query_params) } else: rev_backward = rev_get if not prev_sha1s: # no nav breadcrumbs, so we're done revisions = rev_backward else: rev_forward_ids = prev_sha1s.split('/') rev_forward = api_lookup( service.lookup_revision_multiple, rev_forward_ids, notfound_msg=error_msg, enrich_fn=utils.enrich_revision) revisions = rev_forward + rev_backward result.update({ 'results': revisions }) return result diff --git a/swh/web/api/views/snapshot.py b/swh/web/api/views/snapshot.py index 5b2e0035..2979da4a 100644 --- a/swh/web/api/views/snapshot.py +++ b/swh/web/api/views/snapshot.py @@ -1,118 +1,118 @@ # Copyright (C) 2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from swh.web.common import service from swh.web.common.utils import reverse from swh.web.config import get_config from swh.web.api.apidoc import api_doc from swh.web.api import utils from swh.web.api.apiurls import api_route from swh.web.api.views.utils import api_lookup -@api_route(r'/snapshot/(?P[0-9a-f]+)/', 'api-snapshot', +@api_route(r'/snapshot/(?P[0-9a-f]+)/', 'api-1-snapshot', checksum_args=['snapshot_id']) @api_doc('/snapshot/') def api_snapshot(request, snapshot_id): """ .. http:get:: /api/1/snapshot/(snapshot_id)/ Get information about a snapshot in the archive. A snapshot is a set of named branches, which are pointers to objects at any level of the Software Heritage DAG. It represents a full picture of an origin at a given time. As well as pointing to other objects in the Software Heritage DAG, branches can also be aliases, in which case their target is the name of another branch in the same snapshot, or dangling, in which case the target is unknown. A snapshot identifier is a salted sha1. See :func:`swh.model.identifiers.snapshot_identifier` in our data model module for details about how they are computed. :param sha1 snapshot_id: a snapshot identifier :query str branches_from: optional parameter used to skip branches whose name is lesser than it before returning them :query int branches_count: optional parameter used to restrain the amount of returned branches (default to 1000) :query str target_types: optional comma separated list parameter used to filter the target types of branch to return (possible values that can be contained in that list are ``content``, ``directory``, ``revision``, ``release``, ``snapshot`` or ``alias``) :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request :resheader Link: indicates that a subsequent result page is available and contains the url pointing to it :>json object branches: object containing all branches associated to the snapshot, for each of them the associated target type and id are given but also a link to get information about that target :>json string id: the unique identifier of the snapshot **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 400: an invalid snapshot identifier has been provided :statuscode 404: requested snapshot can not be found in the archive **Example:** .. parsed-literal:: :swh_web_api:`snapshot/6a3a2cf0b2b90ce7ae1cf0a221ed68035b686f5a/` """ # noqa def _enrich_snapshot(snapshot): s = snapshot.copy() if 'branches' in s: s['branches'] = { k: utils.enrich_object(v) if v else None for k, v in s['branches'].items() } for k, v in s['branches'].items(): if v and v['target_type'] == 'alias': if v['target'] in s['branches']: branch_alias = s['branches'][v['target']] if branch_alias: v['target_url'] = branch_alias['target_url'] else: snp = \ service.lookup_snapshot(s['id'], branches_from=v['target'], branches_count=1) if snp and v['target'] in snp['branches']: branch = snp['branches'][v['target']] branch = utils.enrich_object(branch) v['target_url'] = branch['target_url'] return s snapshot_content_max_size = get_config()['snapshot_content_max_size'] branches_from = request.GET.get('branches_from', '') branches_count = int(request.GET.get('branches_count', snapshot_content_max_size)) target_types = request.GET.get('target_types', None) target_types = target_types.split(',') if target_types else None results = api_lookup( service.lookup_snapshot, snapshot_id, branches_from, branches_count, target_types, notfound_msg='Snapshot with id {} not found.'.format(snapshot_id), enrich_fn=_enrich_snapshot) response = {'results': results, 'headers': {}} if results['next_branch'] is not None: response['headers']['link-next'] = \ - reverse('api-snapshot', + reverse('api-1-snapshot', url_args={'snapshot_id': snapshot_id}, query_params={'branches_from': results['next_branch'], 'branches_count': branches_count, 'target_types': target_types}) return response diff --git a/swh/web/api/views/stat.py b/swh/web/api/views/stat.py index 4b31a2d9..5ff85de2 100644 --- a/swh/web/api/views/stat.py +++ b/swh/web/api/views/stat.py @@ -1,47 +1,47 @@ # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from swh.web.common import service from swh.web.api.apidoc import api_doc from swh.web.api.apiurls import api_route -@api_route(r'/stat/counters/', 'api-stat-counters') +@api_route(r'/stat/counters/', 'api-1-stat-counters') @api_doc('/stat/counters/', noargs=True) def api_stats(request): """ .. http:get:: /api/1/stat/counters/ Get statistics about the content of the archive. :>json number content: current number of content objects (aka files) in the archive :>json number directory: current number of directory objects in the archive :>json number origin: current number of software origins (an origin is a "place" where code source can be found, e.g. a git repository, a tarball, ...) in the archive :>json number origin_visit: current number of visits on software origins to fill the archive :>json number person: current number of persons (code source authors or committers) in the archive :>json number release: current number of releases objects in the archive :>json number revision: current number of revision objects (aka commits) in the archive :>json number skipped_content: current number of content objects (aka files) which where not inserted in the archive :>json number snapshot: current number of snapshot objects (aka set of named branches) in the archive :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error **Example:** .. parsed-literal:: :swh_web_api:`stat/counters/` """ # noqa return service.stat_counters() diff --git a/swh/web/api/views/utils.py b/swh/web/api/views/utils.py index 583cf3f7..00645b67 100644 --- a/swh/web/api/views/utils.py +++ b/swh/web/api/views/utils.py @@ -1,73 +1,73 @@ # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from rest_framework.response import Response from rest_framework.decorators import api_view from types import GeneratorType from swh.web.common.exc import NotFoundExc from swh.web.api.apiurls import APIUrls, api_route def api_lookup(lookup_fn, *args, notfound_msg='Object not found', enrich_fn=None): """ Capture a redundant behavior of: - looking up the backend with a criteria (be it an identifier or checksum) passed to the function lookup_fn - if nothing is found, raise an NotFoundExc exception with error message notfound_msg. - Otherwise if something is returned: - either as list, map or generator, map the enrich_fn function to it and return the resulting data structure as list. - either as dict and pass to enrich_fn and return the dict enriched. Args: - lookup_fn: function expects one criteria and optional supplementary \*args. - notfound_msg: if nothing matching the criteria is found, raise NotFoundExc with this error message. - enrich_fn: Function to use to enrich the result returned by lookup_fn. Default to the identity function if not provided. - \*args: supplementary arguments to pass to lookup_fn. Raises: NotFoundExp or whatever `lookup_fn` raises. """ # noqa if enrich_fn is None: enrich_fn = (lambda x: x) res = lookup_fn(*args) if res is None: raise NotFoundExc(notfound_msg) if isinstance(res, (map, list, GeneratorType)): return [enrich_fn(x) for x in res] return enrich_fn(res) @api_view(['GET', 'HEAD']) def api_home(request): return Response({}, template_name='api/api.html') -APIUrls.add_url_pattern(r'^$', api_home, view_name='api-homepage') +APIUrls.add_url_pattern(r'^$', api_home, view_name='api-1-homepage') -@api_route(r'/', 'api-endpoints') +@api_route(r'/', 'api-1-endpoints') def api_endpoints(request): """Display the list of opened api endpoints. """ routes = APIUrls.get_app_endpoints().copy() for route, doc in routes.items(): doc['doc_intro'] = doc['docstring'].split('\n\n')[0] # Return a list of routes with consistent ordering env = { 'doc_routes': sorted(routes.items()) } return Response(env, template_name="api/endpoints.html") diff --git a/swh/web/api/views/vault.py b/swh/web/api/views/vault.py index 22c529e6..80f7d565 100644 --- a/swh/web/api/views/vault.py +++ b/swh/web/api/views/vault.py @@ -1,214 +1,214 @@ # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from django.http import HttpResponse from django.views.decorators.cache import never_cache from swh.model import hashutil from swh.web.common import service, query from swh.web.common.utils import reverse from swh.web.api.apidoc import api_doc from swh.web.api.apiurls import api_route from swh.web.api.views.utils import api_lookup # XXX: a bit spaghetti. Would be better with class-based views. def _dispatch_cook_progress(request, obj_type, obj_id): hex_id = hashutil.hash_to_hex(obj_id) object_name = obj_type.split('_')[0].title() if request.method == 'GET': return api_lookup( service.vault_progress, obj_type, obj_id, notfound_msg=("{} '{}' was never requested." .format(object_name, hex_id))) elif request.method == 'POST': email = request.POST.get('email', request.GET.get('email', None)) return api_lookup( service.vault_cook, obj_type, obj_id, email, notfound_msg=("{} '{}' not found." .format(object_name, hex_id))) @api_route(r'/vault/directory/(?P[0-9a-f]+)/', - 'api-vault-cook-directory', methods=['GET', 'POST'], + 'api-1-vault-cook-directory', methods=['GET', 'POST'], checksum_args=['dir_id'], throttle_scope='swh_vault_cooking') @never_cache @api_doc('/vault/directory/') def api_vault_cook_directory(request, dir_id): """ .. http:get:: /api/1/vault/directory/(dir_id)/ .. http:post:: /api/1/vault/directory/(dir_id)/ Request the cooking of an archive for a directory or check its cooking status. That endpoint enables to create a vault cooking task for a directory through a POST request or check the status of a previously created one through a GET request. Once the cooking task has been executed, the resulting archive can be downloaded using the dedicated endpoint :http:get:`/api/1/vault/directory/(dir_id)/raw/`. Then to extract the cooked directory in the current one, use:: $ tar xvf path/to/directory.tar.gz :param string dir_id: the directory's sha1 identifier :query string email: e-mail to notify when the archive is ready :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request :>json string fetch_url: the url from which to download the archive once it has been cooked (see :http:get:`/api/1/vault/directory/(dir_id)/raw/`) :>json string obj_type: the type of object to cook (directory or revision) :>json string progress_message: message describing the cooking task progress :>json number id: the cooking task id :>json string status: the cooking task status (either **new**, **pending**, **done** or **failed**) :>json string obj_id: the identifier of the object to cook **Allowed HTTP Methods:** :http:method:`get`, :http:method:`post`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 400: an invalid directory identifier has been provided :statuscode 404: requested directory can not be found in the archive """ # noqa _, obj_id = query.parse_hash_with_algorithms_or_throws( dir_id, ['sha1'], 'Only sha1_git is supported.') res = _dispatch_cook_progress(request, 'directory', obj_id) - res['fetch_url'] = reverse('api-vault-fetch-directory', + res['fetch_url'] = reverse('api-1-vault-fetch-directory', url_args={'dir_id': dir_id}) return res @api_route(r'/vault/directory/(?P[0-9a-f]+)/raw/', - 'api-vault-fetch-directory', + 'api-1-vault-fetch-directory', checksum_args=['dir_id']) @api_doc('/vault/directory/raw/', handle_response=True) def api_vault_fetch_directory(request, dir_id): """ .. http:get:: /api/1/vault/directory/(dir_id)/raw/ Fetch the cooked archive for a directory. See :http:get:`/api/1/vault/directory/(dir_id)/` to get more details on directory cooking. :param string dir_id: the directory's sha1 identifier :resheader Content-Type: application/octet-stream **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 400: an invalid directory identifier has been provided :statuscode 404: requested directory can not be found in the archive """ # noqa _, obj_id = query.parse_hash_with_algorithms_or_throws( dir_id, ['sha1'], 'Only sha1_git is supported.') res = api_lookup( service.vault_fetch, 'directory', obj_id, notfound_msg="Directory with ID '{}' not found.".format(dir_id)) fname = '{}.tar.gz'.format(dir_id) response = HttpResponse(res, content_type='application/gzip') response['Content-disposition'] = 'attachment; filename={}'.format(fname) return response @api_route(r'/vault/revision/(?P[0-9a-f]+)/gitfast/', - 'api-vault-cook-revision_gitfast', methods=['GET', 'POST'], + 'api-1-vault-cook-revision_gitfast', methods=['GET', 'POST'], checksum_args=['rev_id'], throttle_scope='swh_vault_cooking') @never_cache @api_doc('/vault/revision/gitfast/') def api_vault_cook_revision_gitfast(request, rev_id): """ .. http:get:: /api/1/vault/revision/(rev_id)/gitfast/ .. http:post:: /api/1/vault/revision/(rev_id)/gitfast/ Request the cooking of a gitfast archive for a revision or check its cooking status. That endpoint enables to create a vault cooking task for a revision through a POST request or check the status of a previously created one through a GET request. Once the cooking task has been executed, the resulting gitfast archive can be downloaded using the dedicated endpoint :http:get:`/api/1/vault/revision/(rev_id)/gitfast/raw/`. Then to import the revision in the current directory, use:: $ git init $ zcat path/to/revision.gitfast.gz | git fast-import $ git checkout HEAD :param string rev_id: the revision's sha1 identifier :query string email: e-mail to notify when the gitfast archive is ready :reqheader Accept: the requested response content type, either ``application/json`` (default) or ``application/yaml`` :resheader Content-Type: this depends on :http:header:`Accept` header of request :>json string fetch_url: the url from which to download the archive once it has been cooked (see :http:get:`/api/1/vault/revision/(rev_id)/gitfast/raw/`) :>json string obj_type: the type of object to cook (directory or revision) :>json string progress_message: message describing the cooking task progress :>json number id: the cooking task id :>json string status: the cooking task status (new/pending/done/failed) :>json string obj_id: the identifier of the object to cook **Allowed HTTP Methods:** :http:method:`get`, :http:method:`post`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 400: an invalid revision identifier has been provided :statuscode 404: requested revision can not be found in the archive """ # noqa _, obj_id = query.parse_hash_with_algorithms_or_throws( rev_id, ['sha1'], 'Only sha1_git is supported.') res = _dispatch_cook_progress(request, 'revision_gitfast', obj_id) - res['fetch_url'] = reverse('api-vault-fetch-revision_gitfast', + res['fetch_url'] = reverse('api-1-vault-fetch-revision_gitfast', url_args={'rev_id': rev_id}) return res @api_route(r'/vault/revision/(?P[0-9a-f]+)/gitfast/raw/', - 'api-vault-fetch-revision_gitfast', + 'api-1-vault-fetch-revision_gitfast', checksum_args=['rev_id']) @api_doc('/vault/revision/gitfast/raw/', handle_response=True) def api_vault_fetch_revision_gitfast(request, rev_id): """ .. http:get:: /api/1/vault/revision/(rev_id)/gitfast/raw/ Fetch the cooked gitfast archive for a revision. See :http:get:`/api/1/vault/revision/(rev_id)/gitfast/` to get more details on directory cooking. :param string rev_id: the revision's sha1 identifier :resheader Content-Type: application/octet-stream **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options` :statuscode 200: no error :statuscode 400: an invalid revision identifier has been provided :statuscode 404: requested revision can not be found in the archive """ # noqa _, obj_id = query.parse_hash_with_algorithms_or_throws( rev_id, ['sha1'], 'Only sha1_git is supported.') res = api_lookup( service.vault_fetch, 'revision_gitfast', obj_id, notfound_msg="Revision with ID '{}' not found.".format(rev_id)) fname = '{}.gitfast.gz'.format(rev_id) response = HttpResponse(res, content_type='application/gzip') response['Content-disposition'] = 'attachment; filename={}'.format(fname) return response diff --git a/swh/web/assets/src/bundles/browse/origin-search.js b/swh/web/assets/src/bundles/browse/origin-search.js index 83e36358..8f40ee96 100644 --- a/swh/web/assets/src/bundles/browse/origin-search.js +++ b/swh/web/assets/src/bundles/browse/origin-search.js @@ -1,238 +1,238 @@ /** * Copyright (C) 2018 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ import {heapsPermute} from 'utils/heaps-permute'; import {handleFetchError} from 'utils/functions'; let originPatterns; let perPage = 100; let limit = perPage * 2; let offset = 0; let currentData = null; let inSearch = false; function fixTableRowsStyle() { setTimeout(() => { $('#origin-search-results tbody tr').removeAttr('style'); }); } function clearOriginSearchResultsTable() { $('#origin-search-results tbody tr').remove(); } function populateOriginSearchResultsTable(data, offset) { let localOffset = offset % limit; if (data.length > 0) { $('#swh-origin-search-results').show(); $('#swh-no-result').hide(); clearOriginSearchResultsTable(); let table = $('#origin-search-results tbody'); for (let i = localOffset; i < localOffset + perPage && i < data.length; ++i) { let elem = data[i]; let browseUrl = Urls.browse_origin(elem.url); let tableRow = ``; tableRow += `${elem.type}`; tableRow += `${encodeURI(elem.url)}`; tableRow += ``; tableRow += ''; table.append(tableRow); // get async latest visit snapshot and update visit status icon let latestSnapshotUrl = Urls.browse_origin_latest_snapshot(elem.id); fetch(latestSnapshotUrl) .then(response => response.json()) .then(data => { let originId = elem.id; $(`#visit-status-origin-${originId}`).children().remove(); if (data) { $(`#visit-status-origin-${originId}`).append(''); } else { $(`#visit-status-origin-${originId}`).append(''); if ($('#swh-filter-empty-visits').prop('checked')) { $(`#origin-${originId}`).remove(); } } }); } fixTableRowsStyle(); } else { $('#swh-origin-search-results').hide(); $('#swh-no-result').text('No origins matching the search criteria were found.'); $('#swh-no-result').show(); } if (data.length - localOffset < perPage || (data.length < limit && (localOffset + perPage) === data.length)) { $('#origins-next-results-button').addClass('disabled'); } else { $('#origins-next-results-button').removeClass('disabled'); } if (offset > 0) { $('#origins-prev-results-button').removeClass('disabled'); } else { $('#origins-prev-results-button').addClass('disabled'); } inSearch = false; setTimeout(() => { window.scrollTo(0, 0); }); } function escapeStringRegexp(str) { let matchOperatorsRe = /[|\\{}()[\]^$+*?.]/g; return str.replace(matchOperatorsRe, '\\\\\\$&'); } function searchOrigins(patterns, limit, searchOffset, offset) { let baseSearchUrl; let searchMetadata = $('#swh-search-origin-metadata').prop('checked'); if (searchMetadata) { - baseSearchUrl = Urls.api_origin_metadata_search() + `?fulltext=${patterns}`; + baseSearchUrl = Urls.api_1_origin_metadata_search() + `?fulltext=${patterns}`; } else { originPatterns = patterns; let patternsArray = patterns.trim().replace(/\s+/g, ' ').split(' '); for (let i = 0; i < patternsArray.length; ++i) { patternsArray[i] = escapeStringRegexp(patternsArray[i]); } // url length must be less than 4096 for modern browsers // assuming average word length, 6 is max patternArray.length if (patternsArray.length < 7) { let patternsPermut = []; heapsPermute(patternsArray, p => patternsPermut.push(p.join('.*'))); let regex = patternsPermut.join('|'); baseSearchUrl = Urls.browse_origin_search(regex) + `?regexp=true`; } else { baseSearchUrl = Urls.browse_origin_search(patternsArray.join('.*')) + `?regexp=true`; } } let withVisit = $('#swh-search-origins-with-visit').prop('checked'); let searchUrl = baseSearchUrl + `&limit=${limit}&offset=${searchOffset}&with_visit=${withVisit}`; clearOriginSearchResultsTable(); $('.swh-loading').addClass('show'); fetch(searchUrl) .then(handleFetchError) .then(response => response.json()) .then(data => { currentData = data; $('.swh-loading').removeClass('show'); populateOriginSearchResultsTable(data, offset); }) .catch(response => { $('.swh-loading').removeClass('show'); inSearch = false; $('#swh-origin-search-results').hide(); $('#swh-no-result').text(`Error ${response.status}: ${response.statusText}`); $('#swh-no-result').show(); }); } function doSearch() { $('#swh-no-result').hide(); let patterns = $('#origins-url-patterns').val(); offset = 0; inSearch = true; // first try to resolve a swh persistent identifier - let resolvePidUrl = Urls.api_resolve_swh_pid(patterns); + let resolvePidUrl = Urls.api_1_resolve_swh_pid(patterns); fetch(resolvePidUrl) .then(handleFetchError) .then(response => response.json()) .then(data => { // pid has been successfully resolved, // so redirect to browse page window.location = data.browse_url; }) .catch(response => { // pid resolving failed if (patterns.startsWith('swh:')) { // display a useful error message if the input // looks like a swh pid response.json().then(data => { $('#swh-origin-search-results').hide(); $('.swh-search-pagination').hide(); $('#swh-no-result').text(data.reason); $('#swh-no-result').show(); }); } else { // otherwise, proceed with origins search $('#swh-origin-search-results').show(); $('.swh-search-pagination').show(); searchOrigins(patterns, limit, offset, offset); } }); } export function initOriginSearch() { $(document).ready(() => { $('#swh-search-origins').submit(event => { event.preventDefault(); let patterns = $('#origins-url-patterns').val().trim(); let withVisit = $('#swh-search-origins-with-visit').prop('checked'); let withContent = $('#swh-filter-empty-visits').prop('checked'); let searchMetadata = $('#swh-search-origin-metadata').prop('checked'); let queryParameters = '?q=' + encodeURIComponent(patterns); if (withVisit) { queryParameters += '&with_visit'; } if (withContent) { queryParameters += '&with_content'; } if (searchMetadata) { queryParameters += '&search_metadata'; } // Update the url, triggering page reload and effective search window.location.search = queryParameters; }); $('#origins-next-results-button').click(event => { if ($('#origins-next-results-button').hasClass('disabled') || inSearch) { return; } inSearch = true; offset += perPage; if (!currentData || (offset >= limit && offset % limit === 0)) { searchOrigins(originPatterns, limit, offset, offset); } else { populateOriginSearchResultsTable(currentData, offset); } event.preventDefault(); }); $('#origins-prev-results-button').click(event => { if ($('#origins-prev-results-button').hasClass('disabled') || inSearch) { return; } inSearch = true; offset -= perPage; if (!currentData || (offset > 0 && (offset + perPage) % limit === 0)) { searchOrigins(originPatterns, limit, (offset + perPage) - limit, offset); } else { populateOriginSearchResultsTable(currentData, offset); } event.preventDefault(); }); $(document).on('shown.bs.tab', 'a[data-toggle="tab"]', e => { if (e.currentTarget.text.trim() === 'Search') { fixTableRowsStyle(); } }); let urlParams = new URLSearchParams(window.location.search); let query = urlParams.get('q'); let withVisit = urlParams.has('with_visit'); let withContent = urlParams.has('with_content'); let searchMetadata = urlParams.has('search_metadata'); if (query) { $('#origins-url-patterns').val(query); $('#swh-search-origins-with-visit').prop('checked', withVisit); $('#swh-filter-empty-visits').prop('checked', withContent); $('#swh-search-origin-metadata').prop('checked', searchMetadata); doSearch(); } }); } diff --git a/swh/web/assets/src/bundles/vault/vault-create-tasks.js b/swh/web/assets/src/bundles/vault/vault-create-tasks.js index c3f673b9..7ba188e0 100644 --- a/swh/web/assets/src/bundles/vault/vault-create-tasks.js +++ b/swh/web/assets/src/bundles/vault/vault-create-tasks.js @@ -1,80 +1,80 @@ /** * Copyright (C) 2018-2019 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ import {handleFetchError, csrfPost} from 'utils/functions'; function addVaultCookingTask(cookingTask) { let vaultCookingTasks = JSON.parse(localStorage.getItem('swh-vault-cooking-tasks')); if (!vaultCookingTasks) { vaultCookingTasks = []; } if (vaultCookingTasks.find(val => { return val.object_type === cookingTask.object_type && val.object_id === cookingTask.object_id; }) === undefined) { let cookingUrl; if (cookingTask.object_type === 'directory') { - cookingUrl = Urls.api_vault_cook_directory(cookingTask.object_id); + cookingUrl = Urls.api_1_vault_cook_directory(cookingTask.object_id); } else { - cookingUrl = Urls.api_vault_cook_revision_gitfast(cookingTask.object_id); + cookingUrl = Urls.api_1_vault_cook_revision_gitfast(cookingTask.object_id); } if (cookingTask.email) { cookingUrl += '?email=' + cookingTask.email; } csrfPost(cookingUrl) .then(handleFetchError) .then(() => { vaultCookingTasks.push(cookingTask); localStorage.setItem('swh-vault-cooking-tasks', JSON.stringify(vaultCookingTasks)); $('#vault-cook-directory-modal').modal('hide'); $('#vault-cook-revision-modal').modal('hide'); window.location = Urls.browse_vault(); }) .catch(() => { $('#vault-cook-directory-modal').modal('hide'); $('#vault-cook-revision-modal').modal('hide'); }); } else { window.location = Urls.browse_vault(); } } function validateEmail(email) { let re = /^(([^<>()[\]\\.,;:\s@"]+(\.[^<>()[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/; return re.test(String(email).toLowerCase()); } export function cookDirectoryArchive(directoryId) { let email = $('#swh-vault-directory-email').val().trim(); if (!email || validateEmail(email)) { let cookingTask = { 'object_type': 'directory', 'object_id': directoryId, 'email': email, 'status': 'new' }; addVaultCookingTask(cookingTask); } else { $('#invalid-email-modal').modal('show'); } } export function cookRevisionArchive(revisionId) { let email = $('#swh-vault-revision-email').val().trim(); if (!email || validateEmail(email)) { let cookingTask = { 'object_type': 'revision', 'object_id': revisionId, 'email': email, 'status': 'new' }; addVaultCookingTask(cookingTask); } else { $('#invalid-email-modal').modal('show'); } } diff --git a/swh/web/assets/src/bundles/vault/vault-ui.js b/swh/web/assets/src/bundles/vault/vault-ui.js index 5d451c5f..5c1bfc25 100644 --- a/swh/web/assets/src/bundles/vault/vault-ui.js +++ b/swh/web/assets/src/bundles/vault/vault-ui.js @@ -1,252 +1,252 @@ /** * Copyright (C) 2018 The Software Heritage developers * See the AUTHORS file at the top-level directory of this distribution * License: GNU Affero General Public License version 3, or any later version * See top-level LICENSE file for more information */ import {handleFetchError, handleFetchErrors, csrfPost} from 'utils/functions'; let progress = `
;`; let pollingInterval = 5000; let checkVaultId; function updateProgressBar(progressBar, cookingTask) { if (cookingTask.status === 'new') { progressBar.css('background-color', 'rgba(128, 128, 128, 0.5)'); } else if (cookingTask.status === 'pending') { progressBar.css('background-color', 'rgba(0, 0, 255, 0.5)'); } else if (cookingTask.status === 'done') { progressBar.css('background-color', '#5cb85c'); } else if (cookingTask.status === 'failed') { progressBar.css('background-color', 'rgba(255, 0, 0, 0.5)'); progressBar.css('background-image', 'none'); } progressBar.text(cookingTask.progress_message || cookingTask.status); if (cookingTask.status === 'new' || cookingTask.status === 'pending') { progressBar.addClass('progress-bar-animated'); } else { progressBar.removeClass('progress-bar-striped'); } } let recookTask; // called when the user wants to download a cooked archive export function fetchCookedObject(fetchUrl) { recookTask = null; // first, check if the link is still available from the vault fetch(fetchUrl) .then(response => { // link is still alive, proceed to download if (response.ok) { $('#vault-fetch-iframe').attr('src', fetchUrl); // link is dead } else { // get the associated cooking task let vaultCookingTasks = JSON.parse(localStorage.getItem('swh-vault-cooking-tasks')); for (let i = 0; i < vaultCookingTasks.length; ++i) { if (vaultCookingTasks[i].fetch_url === fetchUrl) { recookTask = vaultCookingTasks[i]; break; } } // display a modal asking the user if he wants to recook the archive $('#vault-recook-object-modal').modal('show'); } }); } // called when the user wants to recook an archive // for which the download link is not available anymore export function recookObject() { if (recookTask) { // stop cooking tasks status polling clearTimeout(checkVaultId); // build cook request url let cookingUrl; if (recookTask.object_type === 'directory') { - cookingUrl = Urls.api_vault_cook_directory(recookTask.object_id); + cookingUrl = Urls.api_1_vault_cook_directory(recookTask.object_id); } else { - cookingUrl = Urls.api_vault_cook_revision_gitfast(recookTask.object_id); + cookingUrl = Urls.api_1_vault_cook_revision_gitfast(recookTask.object_id); } if (recookTask.email) { cookingUrl += '?email=' + recookTask.email; } // request archive cooking csrfPost(cookingUrl) .then(handleFetchError) .then(() => { // update task status recookTask.status = 'new'; let vaultCookingTasks = JSON.parse(localStorage.getItem('swh-vault-cooking-tasks')); for (let i = 0; i < vaultCookingTasks.length; ++i) { if (vaultCookingTasks[i].object_id === recookTask.object_id) { vaultCookingTasks[i] = recookTask; break; } } // save updated tasks to local storage localStorage.setItem('swh-vault-cooking-tasks', JSON.stringify(vaultCookingTasks)); // restart cooking tasks status polling checkVaultCookingTasks(); // hide recook archive modal $('#vault-recook-object-modal').modal('hide'); }) // something went wrong .catch(() => { checkVaultCookingTasks(); $('#vault-recook-object-modal').modal('hide'); }); } } function checkVaultCookingTasks() { let vaultCookingTasks = JSON.parse(localStorage.getItem('swh-vault-cooking-tasks')); if (!vaultCookingTasks || vaultCookingTasks.length === 0) { $('.swh-vault-table tbody tr').remove(); checkVaultId = setTimeout(checkVaultCookingTasks, pollingInterval); return; } let cookingTaskRequests = []; let tasks = {}; let currentObjectIds = []; for (let i = 0; i < vaultCookingTasks.length; ++i) { let cookingTask = vaultCookingTasks[i]; currentObjectIds.push(cookingTask.object_id); tasks[cookingTask.object_id] = cookingTask; let cookingUrl; if (cookingTask.object_type === 'directory') { - cookingUrl = Urls.api_vault_cook_directory(cookingTask.object_id); + cookingUrl = Urls.api_1_vault_cook_directory(cookingTask.object_id); } else { - cookingUrl = Urls.api_vault_cook_revision_gitfast(cookingTask.object_id); + cookingUrl = Urls.api_1_vault_cook_revision_gitfast(cookingTask.object_id); } if (cookingTask.status !== 'done' && cookingTask.status !== 'failed') { cookingTaskRequests.push(fetch(cookingUrl)); } } $('.swh-vault-table tbody tr').each((i, row) => { let objectId = $(row).find('.vault-object-id').data('object-id'); if ($.inArray(objectId, currentObjectIds) === -1) { $(row).remove(); } }); Promise.all(cookingTaskRequests) .then(handleFetchErrors) .then(responses => Promise.all(responses.map(r => r.json()))) .then(cookingTasks => { let table = $('#vault-cooking-tasks tbody'); for (let i = 0; i < cookingTasks.length; ++i) { let cookingTask = tasks[cookingTasks[i].obj_id]; cookingTask.status = cookingTasks[i].status; cookingTask.fetch_url = cookingTasks[i].fetch_url; cookingTask.progress_message = cookingTasks[i].progress_message; } for (let i = 0; i < vaultCookingTasks.length; ++i) { let cookingTask = vaultCookingTasks[i]; let rowTask = $('#vault-task-' + cookingTask.object_id); let downloadLinkWait = 'Waiting for download link to be available'; if (!rowTask.length) { let browseUrl; if (cookingTask.object_type === 'directory') { browseUrl = Urls.browse_directory(cookingTask.object_id); } else { browseUrl = Urls.browse_revision(cookingTask.object_id); } let progressBar = $.parseHTML(progress)[0]; let progressBarContent = $(progressBar).find('.progress-bar'); updateProgressBar(progressBarContent, cookingTask); let tableRow; if (cookingTask.object_type === 'directory') { tableRow = ``; } else { tableRow = ``; } tableRow += ''; tableRow += `${cookingTask.object_type}`; tableRow += `${cookingTask.object_id}`; tableRow += `${progressBar.outerHTML}`; let downloadLink = downloadLinkWait; if (cookingTask.status === 'done') { downloadLink = `'; } else if (cookingTask.status === 'failed') { downloadLink = ''; } tableRow += `${downloadLink}`; tableRow += ''; table.prepend(tableRow); } else { let progressBar = rowTask.find('.progress-bar'); updateProgressBar(progressBar, cookingTask); let downloadLink = rowTask.find('.vault-dl-link'); if (cookingTask.status === 'done') { downloadLink[0].innerHTML = `'; } else if (cookingTask.status === 'failed') { downloadLink[0].innerHTML = ''; } else if (cookingTask.status === 'new') { downloadLink[0].innerHTML = downloadLinkWait; } } } localStorage.setItem('swh-vault-cooking-tasks', JSON.stringify(vaultCookingTasks)); checkVaultId = setTimeout(checkVaultCookingTasks, pollingInterval); }) .catch(() => {}); } export function initUi() { $('#vault-tasks-toggle-selection').change(event => { $('.vault-task-toggle-selection').prop('checked', event.currentTarget.checked); }); $('#vault-remove-tasks').click(() => { clearTimeout(checkVaultId); let tasksToRemove = []; $('.swh-vault-table tbody tr').each((i, row) => { let taskSelected = $(row).find('.vault-task-toggle-selection').prop('checked'); if (taskSelected) { let objectId = $(row).find('.vault-object-id').data('object-id'); tasksToRemove.push(objectId); $(row).remove(); } }); let vaultCookingTasks = JSON.parse(localStorage.getItem('swh-vault-cooking-tasks')); vaultCookingTasks = $.grep(vaultCookingTasks, task => { return $.inArray(task.object_id, tasksToRemove) === -1; }); localStorage.setItem('swh-vault-cooking-tasks', JSON.stringify(vaultCookingTasks)); $('#vault-tasks-toggle-selection').prop('checked', false); checkVaultId = setTimeout(checkVaultCookingTasks, pollingInterval); }); checkVaultId = setTimeout(checkVaultCookingTasks, pollingInterval); $(document).on('shown.bs.tab', 'a[data-toggle="tab"]', e => { if (e.currentTarget.text.trim() === 'Vault') { clearTimeout(checkVaultId); checkVaultCookingTasks(); } }); window.onfocus = () => { clearTimeout(checkVaultId); checkVaultCookingTasks(); }; } diff --git a/swh/web/templates/api/api.html b/swh/web/templates/api/api.html index 8503bf5f..3ceed7ce 100644 --- a/swh/web/templates/api/api.html +++ b/swh/web/templates/api/api.html @@ -1,23 +1,23 @@ {% extends "layout.html" %} {% comment %} Copyright (C) 2015-2018 The Software Heritage developers See the AUTHORS file at the top-level directory of this distribution License: GNU Affero General Public License version 3, or any later version See top-level LICENSE file for more information {% endcomment %} {% block title %} Overview – Software Heritage API {% endblock %} {% block navbar-content %}

Web API

{% endblock %} {% block content %} -
+
{% include 'includes/apidoc-header.html' %}
{% endblock %} diff --git a/swh/web/templates/api/endpoints.html b/swh/web/templates/api/endpoints.html index a46ef842..0938a222 100644 --- a/swh/web/templates/api/endpoints.html +++ b/swh/web/templates/api/endpoints.html @@ -1,82 +1,82 @@ {% extends "layout.html" %} {% comment %} Copyright (C) 2015-2018 The Software Heritage developers See the AUTHORS file at the top-level directory of this distribution License: GNU Affero General Public License version 3, or any later version See top-level LICENSE file for more information {% endcomment %} {% load swh_templatetags %} {% block title %} Endpoints – Software Heritage API {% endblock %} {% block navbar-content %} {% endblock %} {% block content %} -
+

Below you can find a list of the available endpoints for version 1 of the Software Heritage API. For a more general introduction please refer to the API overview.

Endpoints marked "available" are considered stable for the current version of the API; endpoints marked "upcoming" are work in progress that will be stabilized in the near future.

-
+
{% for route, doc in doc_routes %} - + {% if doc.tags|length > 0 %} {% else %} {% endif %} {% endfor %}
Endpoint Description
{% url doc.route_view_name %} {% url doc.route_view_name %} {{ doc.doc_intro | safe_docstring_display | safe }}
{% endblock %} diff --git a/swh/web/templates/homepage.html b/swh/web/templates/homepage.html index a47aa870..2a533495 100644 --- a/swh/web/templates/homepage.html +++ b/swh/web/templates/homepage.html @@ -1,121 +1,121 @@ {% extends "layout.html" %} {% comment %} Copyright (C) 2017-2018 The Software Heritage developers See the AUTHORS file at the top-level directory of this distribution License: GNU Affero General Public License version 3, or any later version See top-level LICENSE file for more information {% endcomment %} {% load static %} {% block title %}Welcome to the Software Heritage archive{% endblock %} {% block navbar-content %}

Welcome to the Software Heritage archive

{% endblock %} {% block content %}

Overview

The long term goal of the Software Heritage initiative is to collect all publicly available software in source code form together with its development history, replicate it massively to ensure its preservation, and share it with everyone who needs it. The Software Heritage archive is growing over time as we crawl new source code from software projects and development forges. We will incrementally release archive search and browse functionalities — as of now you can check whether source code you care about is already present in the archive or not.

Content

A significant amount of source code has already been ingested in the Software Heritage archive. It currently includes:

Size

As of today the archive already contains and keeps safe for you the following amount of objects:

Source files
Directories
Commits
Authors
Projects
Releases

Access

{% endblock %} diff --git a/swh/web/templates/layout.html b/swh/web/templates/layout.html index 0820896d..ee567ef2 100644 --- a/swh/web/templates/layout.html +++ b/swh/web/templates/layout.html @@ -1,213 +1,213 @@ {% comment %} Copyright (C) 2015-2019 The Software Heritage developers See the AUTHORS file at the top-level directory of this distribution License: GNU Affero General Public License version 3, or any later version See top-level LICENSE file for more information {% endcomment %} {% load js_reverse %} {% load static %} {% load render_bundle from webpack_loader %} {% load swh_templatetags %} {% block title %}{% endblock %} {% render_bundle 'vendors' %} {% render_bundle 'webapp' %} {% block header %}{% endblock %}
{% block content %}{% endblock %}
{% include "includes/global-modals.html" %}
back to top
diff --git a/swh/web/tests/admin/test_origin_save.py b/swh/web/tests/admin/test_origin_save.py index b0e20d04..7af27d91 100644 --- a/swh/web/tests/admin/test_origin_save.py +++ b/swh/web/tests/admin/test_origin_save.py @@ -1,232 +1,232 @@ # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from urllib.parse import unquote from django.contrib.auth import get_user_model from unittest.mock import patch from swh.web.common.models import ( SaveAuthorizedOrigin, SaveUnauthorizedOrigin, SaveOriginRequest ) from swh.web.common.origin_save import can_save_origin from swh.web.common.models import ( SAVE_REQUEST_PENDING, SAVE_REQUEST_ACCEPTED, SAVE_REQUEST_REJECTED, SAVE_TASK_NOT_YET_SCHEDULED ) from swh.web.common.utils import reverse from swh.web.tests.testcase import WebTestCase _user_name = 'swh-web-admin' _user_mail = 'admin@swh-web.org' _user_password = '..34~pounds~BEAUTY~march~63..' _authorized_origin_url = 'https://scm.ourproject.org/anonscm/' _unauthorized_origin_url = 'https://www.softwareheritage.org/' class OriginSaveAdminTestCase(WebTestCase): @classmethod def setUpTestData(cls): # noqa: N802 User = get_user_model() # noqa: N806 user = User.objects.create_user(_user_name, _user_mail, _user_password) user.is_staff = True user.save() SaveAuthorizedOrigin.objects.create(url=_authorized_origin_url) SaveUnauthorizedOrigin.objects.create(url=_unauthorized_origin_url) def check_not_login(self, url): login_url = reverse('login', query_params={'next': url}) response = self.client.post(url) self.assertEqual(response.status_code, 302) self.assertEqual(unquote(response.url), login_url) def test_add_authorized_origin_url(self): authorized_url = 'https://scm.adullact.net/anonscm/' self.assertEqual(can_save_origin(authorized_url), SAVE_REQUEST_PENDING) url = reverse('admin-origin-save-add-authorized-url', url_args={'origin_url': authorized_url}) self.check_not_login(url) self.assertEqual(can_save_origin(authorized_url), SAVE_REQUEST_PENDING) self.client.login(username=_user_name, password=_user_password) response = self.client.post(url) self.assertEqual(response.status_code, 200) self.assertEqual(can_save_origin(authorized_url), SAVE_REQUEST_ACCEPTED) def test_remove_authorized_origin_url(self): self.assertEqual(can_save_origin(_authorized_origin_url), SAVE_REQUEST_ACCEPTED) url = reverse('admin-origin-save-remove-authorized-url', url_args={'origin_url': _authorized_origin_url}) self.check_not_login(url) self.assertEqual(can_save_origin(_authorized_origin_url), SAVE_REQUEST_ACCEPTED) self.client.login(username=_user_name, password=_user_password) response = self.client.post(url) self.assertEqual(response.status_code, 200) self.assertEqual(can_save_origin(_authorized_origin_url), SAVE_REQUEST_PENDING) def test_add_unauthorized_origin_url(self): unauthorized_url = 'https://www.yahoo./' self.assertEqual(can_save_origin(unauthorized_url), SAVE_REQUEST_PENDING) url = reverse('admin-origin-save-add-unauthorized-url', url_args={'origin_url': unauthorized_url}) self.check_not_login(url) self.assertEqual(can_save_origin(unauthorized_url), SAVE_REQUEST_PENDING) self.client.login(username=_user_name, password=_user_password) response = self.client.post(url) self.assertEqual(response.status_code, 200) self.assertEqual(can_save_origin(unauthorized_url), SAVE_REQUEST_REJECTED) def test_remove_unauthorized_origin_url(self): self.assertEqual(can_save_origin(_unauthorized_origin_url), SAVE_REQUEST_REJECTED) url = reverse('admin-origin-save-remove-unauthorized-url', url_args={'origin_url': _unauthorized_origin_url}) self.check_not_login(url) self.assertEqual(can_save_origin(_unauthorized_origin_url), SAVE_REQUEST_REJECTED) self.client.login(username=_user_name, password=_user_password) response = self.client.post(url) self.assertEqual(response.status_code, 200) self.assertEqual(can_save_origin(_unauthorized_origin_url), SAVE_REQUEST_PENDING) @patch('swh.web.common.origin_save.scheduler') def test_accept_pending_save_request(self, mock_scheduler): origin_type = 'git' origin_url = 'https://v2.pikacode.com/bthate/botlib.git' - save_request_url = reverse('api-save-origin', + save_request_url = reverse('api-1-save-origin', url_args={'origin_type': origin_type, 'origin_url': origin_url}) response = self.client.post(save_request_url, data={}, content_type='application/x-www-form-urlencoded') # noqa self.assertEqual(response.status_code, 200) self.assertEqual(response.data['save_request_status'], SAVE_REQUEST_PENDING) accept_request_url = reverse('admin-origin-save-request-accept', url_args={'origin_type': origin_type, 'origin_url': origin_url}) self.check_not_login(accept_request_url) tasks_data = [ { 'priority': 'high', 'policy': 'oneshot', 'type': 'load-git', 'arguments': { 'kwargs': { 'repo_url': origin_url }, 'args': [] }, 'status': 'next_run_not_scheduled', 'id': 1, } ] mock_scheduler.create_tasks.return_value = tasks_data mock_scheduler.get_tasks.return_value = tasks_data self.client.login(username=_user_name, password=_user_password) response = self.client.post(accept_request_url) self.assertEqual(response.status_code, 200) response = self.client.get(save_request_url) self.assertEqual(response.status_code, 200) self.assertEqual(response.data[0]['save_request_status'], SAVE_REQUEST_ACCEPTED) self.assertEqual(response.data[0]['save_task_status'], SAVE_TASK_NOT_YET_SCHEDULED) @patch('swh.web.common.origin_save.scheduler') def test_reject_pending_save_request(self, mock_scheduler): origin_type = 'git' origin_url = 'https://wikipedia.com' - save_request_url = reverse('api-save-origin', + save_request_url = reverse('api-1-save-origin', url_args={'origin_type': origin_type, 'origin_url': origin_url}) response = self.client.post(save_request_url, data={}, content_type='application/x-www-form-urlencoded') # noqa self.assertEqual(response.status_code, 200) self.assertEqual(response.data['save_request_status'], SAVE_REQUEST_PENDING) reject_request_url = reverse('admin-origin-save-request-reject', url_args={'origin_type': origin_type, 'origin_url': origin_url}) self.check_not_login(reject_request_url) self.client.login(username=_user_name, password=_user_password) response = self.client.post(reject_request_url) self.assertEqual(response.status_code, 200) tasks_data = [ { 'priority': 'high', 'policy': 'oneshot', 'type': 'load-git', 'arguments': { 'kwargs': { 'repo_url': origin_url }, 'args': [] }, 'status': 'next_run_not_scheduled', 'id': 1, } ] mock_scheduler.create_tasks.return_value = tasks_data mock_scheduler.get_tasks.return_value = tasks_data response = self.client.get(save_request_url) self.assertEqual(response.status_code, 200) self.assertEqual(response.data[0]['save_request_status'], SAVE_REQUEST_REJECTED) def test_remove_save_request(self): sor = SaveOriginRequest.objects.create(origin_type='git', origin_url='https://wikipedia.com', # noqa status=SAVE_REQUEST_PENDING) self.assertEqual(SaveOriginRequest.objects.count(), 1) remove_request_url = reverse('admin-origin-save-request-remove', url_args={'sor_id': sor.id}) self.check_not_login(remove_request_url) self.client.login(username=_user_name, password=_user_password) response = self.client.post(remove_request_url) self.assertEqual(response.status_code, 200) self.assertEqual(SaveOriginRequest.objects.count(), 0) diff --git a/swh/web/tests/api/test_utils.py b/swh/web/tests/api/test_utils.py index de0855e8..58dd679e 100644 --- a/swh/web/tests/api/test_utils.py +++ b/swh/web/tests/api/test_utils.py @@ -1,599 +1,599 @@ # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from unittest.mock import patch, call from swh.web.api import utils from swh.web.tests.testcase import WebTestCase class UtilsTestCase(WebTestCase): def setUp(self): self.maxDiff = None self.url_map = [dict(rule='/other/', methods=set(['GET', 'POST', 'HEAD']), endpoint='foo'), dict(rule='/some/old/url/', methods=set(['GET', 'POST']), endpoint='blablafn'), dict(rule='/other/old/url/', methods=set(['GET', 'HEAD']), endpoint='bar'), dict(rule='/other', methods=set([]), endpoint=None), dict(rule='/other2', methods=set([]), endpoint=None)] self.sample_content_hashes = { 'blake2s256': ('791e07fcea240ade6dccd0a9309141673' 'c31242cae9c237cf3855e151abc78e9'), 'sha1': 'dc2830a9e72f23c1dfebef4413003221baa5fb62', 'sha1_git': 'fe95a46679d128ff167b7c55df5d02356c5a1ae1', 'sha256': ('b5c7fe0536f44ef60c8780b6065d30bca74a5cd06' 'd78a4a71ba1ad064770f0c9') } def test_filter_field_keys_dict_unknown_keys(self): # when actual_res = utils.filter_field_keys( {'directory': 1, 'file': 2, 'link': 3}, {'directory1', 'file2'}) # then self.assertEqual(actual_res, {}) def test_filter_field_keys_dict(self): # when actual_res = utils.filter_field_keys( {'directory': 1, 'file': 2, 'link': 3}, {'directory', 'link'}) # then self.assertEqual(actual_res, {'directory': 1, 'link': 3}) def test_filter_field_keys_list_unknown_keys(self): # when actual_res = utils.filter_field_keys( [{'directory': 1, 'file': 2, 'link': 3}, {'1': 1, '2': 2, 'link': 3}], {'d'}) # then self.assertEqual(actual_res, [{}, {}]) def test_filter_field_keys_map(self): # when actual_res = utils.filter_field_keys( map(lambda x: {'i': x['i']+1, 'j': x['j']}, [{'i': 1, 'j': None}, {'i': 2, 'j': None}, {'i': 3, 'j': None}]), {'i'}) # then self.assertEqual(list(actual_res), [{'i': 2}, {'i': 3}, {'i': 4}]) def test_filter_field_keys_list(self): # when actual_res = utils.filter_field_keys( [{'directory': 1, 'file': 2, 'link': 3}, {'dir': 1, 'fil': 2, 'lin': 3}], {'directory', 'dir'}) # then self.assertEqual(actual_res, [{'directory': 1}, {'dir': 1}]) def test_filter_field_keys_other(self): # given input_set = {1, 2} # when actual_res = utils.filter_field_keys(input_set, {'a', '1'}) # then self.assertEqual(actual_res, input_set) def test_person_to_string(self): self.assertEqual(utils.person_to_string(dict(name='raboof', email='foo@bar')), 'raboof ') def test_enrich_release_0(self): # when actual_release = utils.enrich_release({}) # then self.assertEqual(actual_release, {}) @patch('swh.web.api.utils.reverse') def test_enrich_release_1(self, mock_django_reverse): # given def reverse_test_context(view_name, url_args): - if view_name == 'api-content': + if view_name == 'api-1-content': id = url_args['q'] return '/api/1/content/%s/' % id - elif view_name == 'api-person': + elif view_name == 'api-1-person': id = url_args['person_id'] return '/api/1/person/%s/' % id else: raise ValueError( 'This should not happened so fail if it does.') mock_django_reverse.side_effect = reverse_test_context # when actual_release = utils.enrich_release({ 'target': '123', 'target_type': 'content', 'author': { 'id': 100, 'name': 'author release name', 'email': 'author@email', }, }) # then self.assertEqual(actual_release, { 'target': '123', 'target_type': 'content', 'target_url': '/api/1/content/sha1_git:123/', 'author_url': '/api/1/person/100/', 'author': { 'id': 100, 'name': 'author release name', 'email': 'author@email', }, }) mock_django_reverse.assert_has_calls([ - call('api-content', url_args={'q': 'sha1_git:123'}), - call('api-person', url_args={'person_id': 100}) + call('api-1-content', url_args={'q': 'sha1_git:123'}), + call('api-1-person', url_args={'person_id': 100}) ]) @patch('swh.web.api.utils.reverse') def test_enrich_release_2(self, mock_django_reverse): # given mock_django_reverse.return_value = '/api/1/dir/23/' # when actual_release = utils.enrich_release({'target': '23', 'target_type': 'directory'}) # then self.assertEqual(actual_release, { 'target': '23', 'target_type': 'directory', 'target_url': '/api/1/dir/23/' }) - mock_django_reverse.assert_called_once_with('api-directory', + mock_django_reverse.assert_called_once_with('api-1-directory', url_args={'sha1_git': '23'}) # noqa @patch('swh.web.api.utils.reverse') def test_enrich_release_3(self, mock_django_reverse): # given mock_django_reverse.return_value = '/api/1/rev/3/' # when actual_release = utils.enrich_release({'target': '3', 'target_type': 'revision'}) # then self.assertEqual(actual_release, { 'target': '3', 'target_type': 'revision', 'target_url': '/api/1/rev/3/' }) - mock_django_reverse.assert_called_once_with('api-revision', + mock_django_reverse.assert_called_once_with('api-1-revision', url_args={'sha1_git': '3'}) @patch('swh.web.api.utils.reverse') def test_enrich_release_4(self, mock_django_reverse): # given mock_django_reverse.return_value = '/api/1/rev/4/' # when actual_release = utils.enrich_release({'target': '4', 'target_type': 'release'}) # then self.assertEqual(actual_release, { 'target': '4', 'target_type': 'release', 'target_url': '/api/1/rev/4/' }) - mock_django_reverse.assert_called_once_with('api-release', + mock_django_reverse.assert_called_once_with('api-1-release', url_args={'sha1_git': '4'}) @patch('swh.web.api.utils.reverse') def test_enrich_directory_no_type(self, mock_django_reverse): # when/then self.assertEqual(utils.enrich_directory({'id': 'dir-id'}), {'id': 'dir-id'}) # given mock_django_reverse.return_value = '/api/content/sha1_git:123/' # when actual_directory = utils.enrich_directory({ 'id': 'dir-id', 'type': 'file', 'target': '123', }) # then self.assertEqual(actual_directory, { 'id': 'dir-id', 'type': 'file', 'target': '123', 'target_url': '/api/content/sha1_git:123/', }) mock_django_reverse.assert_called_once_with( - 'api-content', url_args={'q': 'sha1_git:123'}) + 'api-1-content', url_args={'q': 'sha1_git:123'}) @patch('swh.web.api.utils.reverse') def test_enrich_directory_with_context_and_type_file( self, mock_django_reverse, ): # given mock_django_reverse.return_value = '/api/content/sha1_git:123/' # when actual_directory = utils.enrich_directory({ 'id': 'dir-id', 'type': 'file', 'name': 'hy', 'target': '789', }, context_url='/api/revision/revsha1/directory/prefix/path/') # then self.assertEqual(actual_directory, { 'id': 'dir-id', 'type': 'file', 'name': 'hy', 'target': '789', 'target_url': '/api/content/sha1_git:123/', 'file_url': '/api/revision/revsha1/directory' '/prefix/path/hy/' }) mock_django_reverse.assert_called_once_with( - 'api-content', url_args={'q': 'sha1_git:789'}) + 'api-1-content', url_args={'q': 'sha1_git:789'}) @patch('swh.web.api.utils.reverse') def test_enrich_directory_with_context_and_type_dir( self, mock_django_reverse, ): # given mock_django_reverse.return_value = '/api/directory/456/' # when actual_directory = utils.enrich_directory({ 'id': 'dir-id', 'type': 'dir', 'name': 'emacs-42', 'target_type': 'file', 'target': '456', }, context_url='/api/revision/origin/2/directory/some/prefix/path/') # then self.assertEqual(actual_directory, { 'id': 'dir-id', 'type': 'dir', 'target_type': 'file', 'name': 'emacs-42', 'target': '456', 'target_url': '/api/directory/456/', 'dir_url': '/api/revision/origin/2/directory' '/some/prefix/path/emacs-42/' }) - mock_django_reverse.assert_called_once_with('api-directory', + mock_django_reverse.assert_called_once_with('api-1-directory', url_args={'sha1_git': '456'}) # noqa def test_enrich_content_without_hashes(self): # when/then self.assertEqual(utils.enrich_content({'id': '123'}), {'id': '123'}) @patch('swh.web.api.utils.reverse') def test_enrich_content_with_hashes(self, mock_django_reverse): for algo, hash in self.sample_content_hashes.items(): query_string = '%s:%s' % (algo, hash) # given mock_django_reverse.side_effect = [ '/api/content/%s/raw/' % query_string, '/api/filetype/%s/' % query_string, '/api/language/%s/' % query_string, '/api/license/%s/' % query_string ] # when enriched_content = utils.enrich_content( { algo: hash, }, query_string=query_string ) # then self.assertEqual( enriched_content, { algo: hash, 'data_url': '/api/content/%s/raw/' % query_string, 'filetype_url': '/api/filetype/%s/' % query_string, 'language_url': '/api/language/%s/' % query_string, 'license_url': '/api/license/%s/' % query_string, } ) mock_django_reverse.assert_has_calls([ - call('api-content-raw', url_args={'q': query_string}), - call('api-content-filetype', url_args={'q': query_string}), - call('api-content-language', url_args={'q': query_string}), - call('api-content-license', url_args={'q': query_string}), + call('api-1-content-raw', url_args={'q': query_string}), + call('api-1-content-filetype', url_args={'q': query_string}), + call('api-1-content-language', url_args={'q': query_string}), + call('api-1-content-license', url_args={'q': query_string}), ]) mock_django_reverse.reset() @patch('swh.web.api.utils.reverse') def test_enrich_content_with_hashes_and_top_level_url(self, mock_django_reverse): for algo, hash in self.sample_content_hashes.items(): query_string = '%s:%s' % (algo, hash) # given mock_django_reverse.side_effect = [ '/api/content/%s/' % query_string, '/api/content/%s/raw/' % query_string, '/api/filetype/%s/' % query_string, '/api/language/%s/' % query_string, '/api/license/%s/' % query_string, ] # when enriched_content = utils.enrich_content( { algo: hash }, top_url=True, query_string=query_string ) # then self.assertEqual( enriched_content, { algo: hash, 'content_url': '/api/content/%s/' % query_string, 'data_url': '/api/content/%s/raw/' % query_string, 'filetype_url': '/api/filetype/%s/' % query_string, 'language_url': '/api/language/%s/' % query_string, 'license_url': '/api/license/%s/' % query_string, } ) mock_django_reverse.assert_has_calls([ - call('api-content', url_args={'q': query_string}), - call('api-content-raw', url_args={'q': query_string}), - call('api-content-filetype', url_args={'q': query_string}), - call('api-content-language', url_args={'q': query_string}), - call('api-content-license', url_args={'q': query_string}), + call('api-1-content', url_args={'q': query_string}), + call('api-1-content-raw', url_args={'q': query_string}), + call('api-1-content-filetype', url_args={'q': query_string}), + call('api-1-content-language', url_args={'q': query_string}), + call('api-1-content-license', url_args={'q': query_string}), ]) mock_django_reverse.reset() def _reverse_context_test(self, view_name, url_args): - if view_name == 'api-revision': + if view_name == 'api-1-revision': return '/api/revision/%s/' % url_args['sha1_git'] - elif view_name == 'api-revision-context': + elif view_name == 'api-1-revision-context': return '/api/revision/%s/prev/%s/' % (url_args['sha1_git'], url_args['context']) # noqa - elif view_name == 'api-revision-log': + elif view_name == 'api-1-revision-log': if 'prev_sha1s' in url_args: return '/api/revision/%s/prev/%s/log/' % (url_args['sha1_git'], url_args['prev_sha1s']) # noqa else: return '/api/revision/%s/log/' % url_args['sha1_git'] @patch('swh.web.api.utils.reverse') def test_enrich_revision_without_children_or_parent( self, mock_django_reverse, ): # given def reverse_test(view_name, url_args): - if view_name == 'api-revision': + if view_name == 'api-1-revision': return '/api/revision/' + url_args['sha1_git'] + '/' - elif view_name == 'api-revision-log': + elif view_name == 'api-1-revision-log': return '/api/revision/' + url_args['sha1_git'] + '/log/' - elif view_name == 'api-directory': + elif view_name == 'api-1-directory': return '/api/directory/' + url_args['sha1_git'] + '/' - elif view_name == 'api-person': + elif view_name == 'api-1-person': return '/api/person/' + url_args['person_id'] + '/' mock_django_reverse.side_effect = reverse_test # when actual_revision = utils.enrich_revision({ 'id': 'rev-id', 'directory': '123', 'author': {'id': '1'}, 'committer': {'id': '2'}, }) expected_revision = { 'id': 'rev-id', 'directory': '123', 'url': '/api/revision/rev-id/', 'history_url': '/api/revision/rev-id/log/', 'directory_url': '/api/directory/123/', 'author': {'id': '1'}, 'author_url': '/api/person/1/', 'committer': {'id': '2'}, 'committer_url': '/api/person/2/' } # then self.assertEqual(actual_revision, expected_revision) mock_django_reverse.assert_has_calls( - [call('api-revision', url_args={'sha1_git': 'rev-id'}), - call('api-revision-log', url_args={'sha1_git': 'rev-id'}), - call('api-person', url_args={'person_id': '1'}), - call('api-person', url_args={'person_id': '2'}), - call('api-directory', url_args={'sha1_git': '123'})]) + [call('api-1-revision', url_args={'sha1_git': 'rev-id'}), + call('api-1-revision-log', url_args={'sha1_git': 'rev-id'}), + call('api-1-person', url_args={'person_id': '1'}), + call('api-1-person', url_args={'person_id': '2'}), + call('api-1-directory', url_args={'sha1_git': '123'})]) @patch('swh.web.api.utils.reverse') def test_enrich_revision_with_children_and_parent_no_dir( self, mock_django_reverse, ): # given mock_django_reverse.side_effect = self._reverse_context_test # when actual_revision = utils.enrich_revision({ 'id': 'rev-id', 'parents': ['123'], 'children': ['456'], }) expected_revision = { 'id': 'rev-id', 'url': '/api/revision/rev-id/', 'history_url': '/api/revision/rev-id/log/', 'parents': [{'id': '123', 'url': '/api/revision/123/'}], 'children': ['456'], 'children_urls': ['/api/revision/456/'], } # then self.assertEqual(actual_revision, expected_revision) mock_django_reverse.assert_has_calls( - [call('api-revision', url_args={'sha1_git': 'rev-id'}), - call('api-revision-log', url_args={'sha1_git': 'rev-id'}), - call('api-revision', url_args={'sha1_git': '123'}), - call('api-revision', url_args={'sha1_git': '456'})]) + [call('api-1-revision', url_args={'sha1_git': 'rev-id'}), + call('api-1-revision-log', url_args={'sha1_git': 'rev-id'}), + call('api-1-revision', url_args={'sha1_git': '123'}), + call('api-1-revision', url_args={'sha1_git': '456'})]) @patch('swh.web.api.utils.reverse') def test_enrich_revision_no_context(self, mock_django_reverse): # given mock_django_reverse.side_effect = self._reverse_context_test # when actual_revision = utils.enrich_revision({ 'id': 'rev-id', 'parents': ['123'], 'children': ['456'], }) expected_revision = { 'id': 'rev-id', 'url': '/api/revision/rev-id/', 'history_url': '/api/revision/rev-id/log/', 'parents': [{'id': '123', 'url': '/api/revision/123/'}], 'children': ['456'], 'children_urls': ['/api/revision/456/'] } # then self.assertEqual(actual_revision, expected_revision) mock_django_reverse.assert_has_calls( - [call('api-revision', url_args={'sha1_git': 'rev-id'}), - call('api-revision-log', url_args={'sha1_git': 'rev-id'}), - call('api-revision', url_args={'sha1_git': '123'}), - call('api-revision', url_args={'sha1_git': '456'})]) + [call('api-1-revision', url_args={'sha1_git': 'rev-id'}), + call('api-1-revision-log', url_args={'sha1_git': 'rev-id'}), + call('api-1-revision', url_args={'sha1_git': '123'}), + call('api-1-revision', url_args={'sha1_git': '456'})]) def _reverse_rev_message_test(self, view_name, url_args): - if view_name == 'api-revision': + if view_name == 'api-1-revision': return '/api/revision/%s/' % url_args['sha1_git'] - elif view_name == 'api-revision-log': + elif view_name == 'api-1-revision-log': if 'prev_sha1s' in url_args and url_args['prev_sha1s'] is not None: return '/api/revision/%s/prev/%s/log/' % (url_args['sha1_git'], url_args['prev_sha1s']) # noqa else: return '/api/revision/%s/log/' % url_args['sha1_git'] - elif view_name == 'api-revision-raw-message': + elif view_name == 'api-1-revision-raw-message': return '/api/revision/' + url_args['sha1_git'] + '/raw/' else: return '/api/revision/%s/prev/%s/' % (url_args['sha1_git'], url_args['context']) # noqa @patch('swh.web.api.utils.reverse') def test_enrich_revision_with_no_message(self, mock_django_reverse): # given mock_django_reverse.side_effect = self._reverse_rev_message_test # when expected_revision = { 'id': 'rev-id', 'url': '/api/revision/rev-id/', 'history_url': '/api/revision/rev-id/log/', 'message': None, 'parents': [{'id': '123', 'url': '/api/revision/123/'}], 'children': ['456'], 'children_urls': ['/api/revision/456/'], } actual_revision = utils.enrich_revision({ 'id': 'rev-id', 'message': None, 'parents': ['123'], 'children': ['456'], }) # then self.assertEqual(actual_revision, expected_revision) mock_django_reverse.assert_has_calls( - [call('api-revision', url_args={'sha1_git': 'rev-id'}), - call('api-revision-log', url_args={'sha1_git': 'rev-id'}), - call('api-revision', url_args={'sha1_git': '123'}), - call('api-revision', url_args={'sha1_git': '456'})] + [call('api-1-revision', url_args={'sha1_git': 'rev-id'}), + call('api-1-revision-log', url_args={'sha1_git': 'rev-id'}), + call('api-1-revision', url_args={'sha1_git': '123'}), + call('api-1-revision', url_args={'sha1_git': '456'})] ) @patch('swh.web.api.utils.reverse') def test_enrich_revision_with_invalid_message(self, mock_django_reverse): # given mock_django_reverse.side_effect = self._reverse_rev_message_test # when actual_revision = utils.enrich_revision({ 'id': 'rev-id', 'message': None, 'message_decoding_failed': True, 'parents': ['123'], 'children': ['456'], }) expected_revision = { 'id': 'rev-id', 'url': '/api/revision/rev-id/', 'history_url': '/api/revision/rev-id/log/', 'message': None, 'message_decoding_failed': True, 'message_url': '/api/revision/rev-id/raw/', 'parents': [{'id': '123', 'url': '/api/revision/123/'}], 'children': ['456'], 'children_urls': ['/api/revision/456/'], } # then self.assertEqual(actual_revision, expected_revision) mock_django_reverse.assert_has_calls( - [call('api-revision', url_args={'sha1_git': 'rev-id'}), - call('api-revision-log', url_args={'sha1_git': 'rev-id'}), - call('api-revision', url_args={'sha1_git': '123'}), - call('api-revision', url_args={'sha1_git': '456'}), - call('api-revision-raw-message', url_args={'sha1_git': 'rev-id'})]) # noqa + [call('api-1-revision', url_args={'sha1_git': 'rev-id'}), + call('api-1-revision-log', url_args={'sha1_git': 'rev-id'}), + call('api-1-revision', url_args={'sha1_git': '123'}), + call('api-1-revision', url_args={'sha1_git': '456'}), + call('api-1-revision-raw-message', url_args={'sha1_git': 'rev-id'})]) # noqa diff --git a/swh/web/tests/api/views/test_content.py b/swh/web/tests/api/views/test_content.py index 7ac1051f..940cb903 100644 --- a/swh/web/tests/api/views/test_content.py +++ b/swh/web/tests/api/views/test_content.py @@ -1,391 +1,391 @@ # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import pytest from hypothesis import given from rest_framework.test import APITestCase from swh.web.common.utils import reverse from swh.web.tests.data import random_content from swh.web.tests.strategies import ( content, contents_with_ctags ) from swh.web.tests.testcase import ( WebTestCase, ctags_json_missing, fossology_missing ) class ContentApiTestCase(WebTestCase, APITestCase): @given(content()) def test_api_content_filetype(self, content): self.content_add_mimetype(content['sha1']) - url = reverse('api-content-filetype', + url = reverse('api-1-content-filetype', url_args={'q': 'sha1_git:%s' % content['sha1_git']}) rv = self.client.get(url) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') - content_url = reverse('api-content', + content_url = reverse('api-1-content', url_args={'q': 'sha1:%s' % content['sha1']}) expected_data = self.content_get_mimetype(content['sha1']) expected_data['content_url'] = content_url self.assertEqual(rv.data, expected_data) def test_api_content_filetype_sha_not_found(self): unknown_content_ = random_content() - url = reverse('api-content-filetype', + url = reverse('api-1-content-filetype', url_args={'q': 'sha1:%s' % unknown_content_['sha1']}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'NotFoundExc', 'reason': 'No filetype information found for content ' 'sha1:%s.' % unknown_content_['sha1'] }) @pytest.mark.xfail # Language indexer is disabled @given(content()) def test_api_content_language(self, content): self.content_add_language(content['sha1']) - url = reverse('api-content-language', + url = reverse('api-1-content-language', url_args={'q': 'sha1_git:%s' % content['sha1_git']}) rv = self.client.get(url) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') - content_url = reverse('api-content', + content_url = reverse('api-1-content', url_args={'q': 'sha1:%s' % content['sha1']}) expected_data = self.content_get_language(content['sha1']) expected_data['content_url'] = content_url self.assertEqual(rv.data, expected_data) def test_api_content_language_sha_not_found(self): unknown_content_ = random_content() - url = reverse('api-content-language', + url = reverse('api-1-content-language', url_args={'q': 'sha1:%s' % unknown_content_['sha1']}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'NotFoundExc', 'reason': 'No language information found for content ' 'sha1:%s.' % unknown_content_['sha1'] }) @pytest.mark.xfail # Language indexer is disabled @pytest.mark.skipif(ctags_json_missing, reason="requires ctags with json output support") @given(contents_with_ctags()) def test_api_content_symbol(self, contents_with_ctags): expected_data = {} for content_sha1 in contents_with_ctags['sha1s']: self.content_add_ctags(content_sha1) for ctag in self.content_get_ctags(content_sha1): if ctag['name'] == contents_with_ctags['symbol_name']: expected_data[content_sha1] = ctag break - url = reverse('api-content-symbol', + url = reverse('api-1-content-symbol', url_args={'q': contents_with_ctags['symbol_name']}, query_params={'per_page': 100}) rv = self.client.get(url) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') for entry in rv.data: content_sha1 = entry['sha1'] expected_entry = expected_data[content_sha1] - for key, view_name in (('content_url', 'api-content'), - ('data_url', 'api-content-raw'), - ('license_url', 'api-content-license'), - ('language_url', 'api-content-language'), - ('filetype_url', 'api-content-filetype')): + for key, view_name in (('content_url', 'api-1-content'), + ('data_url', 'api-1-content-raw'), + ('license_url', 'api-1-content-license'), + ('language_url', 'api-1-content-language'), + ('filetype_url', 'api-1-content-filetype')): expected_entry[key] = reverse(view_name, url_args={'q': 'sha1:%s' % content_sha1}) expected_entry['sha1'] = content_sha1 del expected_entry['id'] self.assertEqual(entry, expected_entry) self.assertFalse('Link' in rv) - url = reverse('api-content-symbol', + url = reverse('api-1-content-symbol', url_args={'q': contents_with_ctags['symbol_name']}, query_params={'per_page': 2}) rv = self.client.get(url) - next_url = reverse('api-content-symbol', + next_url = reverse('api-1-content-symbol', url_args={'q': contents_with_ctags['symbol_name']}, query_params={'last_sha1': rv.data[1]['sha1'], 'per_page': 2}) self.assertEqual(rv['Link'], '<%s>; rel="next"' % next_url) def test_api_content_symbol_not_found(self): - url = reverse('api-content-symbol', url_args={'q': 'bar'}) + url = reverse('api-1-content-symbol', url_args={'q': 'bar'}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'NotFoundExc', 'reason': 'No indexed raw content match expression \'bar\'.' }) self.assertFalse('Link' in rv) @pytest.mark.skipif(ctags_json_missing, reason="requires ctags with json output support") @given(content()) def test_api_content_ctags(self, content): self.content_add_ctags(content['sha1']) - url = reverse('api-content-ctags', + url = reverse('api-1-content-ctags', url_args={'q': 'sha1_git:%s' % content['sha1_git']}) rv = self.client.get(url) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') - content_url = reverse('api-content', + content_url = reverse('api-1-content', url_args={'q': 'sha1:%s' % content['sha1']}) expected_data = list(self.content_get_ctags(content['sha1'])) for e in expected_data: e['content_url'] = content_url self.assertEqual(rv.data, expected_data) @pytest.mark.skipif(fossology_missing, reason="requires fossology-nomossa installed") @given(content()) def test_api_content_license(self, content): self.content_add_license(content['sha1']) - url = reverse('api-content-license', + url = reverse('api-1-content-license', url_args={'q': 'sha1_git:%s' % content['sha1_git']}) rv = self.client.get(url) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') - content_url = reverse('api-content', + content_url = reverse('api-1-content', url_args={'q': 'sha1:%s' % content['sha1']}) expected_data = self.content_get_license(content['sha1']) expected_data['content_url'] = content_url self.assertEqual(rv.data, expected_data) def test_api_content_license_sha_not_found(self): unknown_content_ = random_content() - url = reverse('api-content-license', + url = reverse('api-1-content-license', url_args={'q': 'sha1:%s' % unknown_content_['sha1']}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'NotFoundExc', 'reason': 'No license information found for content ' 'sha1:%s.' % unknown_content_['sha1'] }) @given(content()) def test_api_content_metadata(self, content): - url = reverse('api-content', {'q': 'sha1:%s' % content['sha1']}) + url = reverse('api-1-content', {'q': 'sha1:%s' % content['sha1']}) rv = self.client.get(url) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') expected_data = self.content_get_metadata(content['sha1']) - for key, view_name in (('data_url', 'api-content-raw'), - ('license_url', 'api-content-license'), - ('language_url', 'api-content-language'), - ('filetype_url', 'api-content-filetype')): + for key, view_name in (('data_url', 'api-1-content-raw'), + ('license_url', 'api-1-content-license'), + ('language_url', 'api-1-content-language'), + ('filetype_url', 'api-1-content-filetype')): expected_data[key] = reverse(view_name, url_args={'q': 'sha1:%s' % content['sha1']}) self.assertEqual(rv.data, expected_data) def test_api_content_not_found_as_json(self): unknown_content_ = random_content() - url = reverse('api-content', + url = reverse('api-1-content', url_args={'q': 'sha1:%s' % unknown_content_['sha1']}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Content with sha1 checksum equals to %s not found!' % unknown_content_['sha1'] }) def test_api_content_not_found_as_yaml(self): unknown_content_ = random_content() - url = reverse('api-content', + url = reverse('api-1-content', url_args={'q': 'sha256:%s' % unknown_content_['sha256']}) rv = self.client.get(url, HTTP_ACCEPT='application/yaml') self.assertEqual(rv.status_code, 404) self.assertTrue('application/yaml' in rv['Content-Type']) self.assertEqual(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Content with sha256 checksum equals to %s not found!' % unknown_content_['sha256'] }) def test_api_content_raw_ko_not_found(self): unknown_content_ = random_content() - url = reverse('api-content-raw', + url = reverse('api-1-content-raw', url_args={'q': 'sha1:%s' % unknown_content_['sha1']}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Content with sha1 checksum equals to %s not found!' % unknown_content_['sha1'] }) @given(content()) def test_api_content_raw_text(self, content): - url = reverse('api-content-raw', + url = reverse('api-1-content-raw', url_args={'q': 'sha1:%s' % content['sha1']}) rv = self.client.get(url) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/octet-stream') self.assertEqual( rv['Content-disposition'], 'attachment; filename=content_sha1_%s_raw' % content['sha1']) self.assertEqual( rv['Content-Type'], 'application/octet-stream') expected_data = self.content_get(content['sha1']) self.assertEqual(rv.content, expected_data['data']) @given(content()) def test_api_content_raw_text_with_filename(self, content): - url = reverse('api-content-raw', + url = reverse('api-1-content-raw', url_args={'q': 'sha1:%s' % content['sha1']}, query_params={'filename': 'filename.txt'}) rv = self.client.get(url) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/octet-stream') self.assertEqual( rv['Content-disposition'], 'attachment; filename=filename.txt') self.assertEqual( rv['Content-Type'], 'application/octet-stream') expected_data = self.content_get(content['sha1']) self.assertEqual(rv.content, expected_data['data']) @given(content()) def test_api_check_content_known(self, content): - url = reverse('api-content-known', + url = reverse('api-1-content-known', url_args={'q': content['sha1']}) rv = self.client.get(url) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'search_res': [ { 'found': True, 'sha1': content['sha1'] } ], 'search_stats': {'nbfiles': 1, 'pct': 100.0} }) @given(content()) def test_api_check_content_known_as_yaml(self, content): - url = reverse('api-content-known', + url = reverse('api-1-content-known', url_args={'q': content['sha1']}) rv = self.client.get(url, HTTP_ACCEPT='application/yaml') self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/yaml') self.assertEqual(rv.data, { 'search_res': [ { 'found': True, 'sha1': content['sha1'] } ], 'search_stats': {'nbfiles': 1, 'pct': 100.0} }) @given(content()) def test_api_check_content_known_post_as_yaml(self, content): - url = reverse('api-content-known') + url = reverse('api-1-content-known') rv = self.client.post( url, data={ 'q': content['sha1'] }, HTTP_ACCEPT='application/yaml' ) self.assertEqual(rv.status_code, 200) self.assertTrue('application/yaml' in rv['Content-Type']) self.assertEqual(rv.data, { 'search_res': [ { 'found': True, 'sha1': content['sha1'] } ], 'search_stats': {'nbfiles': 1, 'pct': 100.0} }) def test_api_check_content_known_not_found(self): unknown_content_ = random_content() - url = reverse('api-content-known', + url = reverse('api-1-content-known', url_args={'q': unknown_content_['sha1']}) rv = self.client.get(url) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'search_res': [ { 'found': False, 'sha1': unknown_content_['sha1'] } ], 'search_stats': {'nbfiles': 1, 'pct': 0.0} }) @given(content()) def test_api_content_uppercase(self, content): - url = reverse('api-content-uppercase-checksum', + url = reverse('api-1-content-uppercase-checksum', url_args={'q': content['sha1'].upper()}) resp = self.client.get(url) self.assertEqual(resp.status_code, 302) - redirect_url = reverse('api-content', + redirect_url = reverse('api-1-content', url_args={'q': content['sha1']}) self.assertEqual(resp['location'], redirect_url) diff --git a/swh/web/tests/api/views/test_directory.py b/swh/web/tests/api/views/test_directory.py index 0d6af0f5..3114e172 100644 --- a/swh/web/tests/api/views/test_directory.py +++ b/swh/web/tests/api/views/test_directory.py @@ -1,106 +1,106 @@ # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import random from hypothesis import given from rest_framework.test import APITestCase from swh.web.common.utils import reverse from swh.web.tests.data import random_sha1 from swh.web.tests.strategies import directory from swh.web.tests.testcase import WebTestCase class DirectoryApiTestCase(WebTestCase, APITestCase): @given(directory()) def test_api_directory(self, directory): - url = reverse('api-directory', url_args={'sha1_git': directory}) + url = reverse('api-1-directory', url_args={'sha1_git': directory}) rv = self.client.get(url) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') expected_data = list(map(self._enrich_dir_data, self.directory_ls(directory))) self.assertEqual(rv.data, expected_data) def test_api_directory_not_found(self): unknown_directory_ = random_sha1() - url = reverse('api-directory', + url = reverse('api-1-directory', url_args={'sha1_git': unknown_directory_}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Directory with sha1_git %s not found' % unknown_directory_}) @given(directory()) def test_api_directory_with_path_found(self, directory): directory_content = self.directory_ls(directory) path = random.choice(directory_content) - url = reverse('api-directory', + url = reverse('api-1-directory', url_args={'sha1_git': directory, 'path': path['name']}) rv = self.client.get(url) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, self._enrich_dir_data(path)) @given(directory()) def test_api_directory_with_path_not_found(self, directory): path = 'some/path/to/nonexistent/dir/' - url = reverse('api-directory', + url = reverse('api-1-directory', url_args={'sha1_git': directory, 'path': path}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'NotFoundExc', 'reason': ('Directory entry with path %s from %s not found' % (path, directory))}) @given(directory()) def test_api_directory_uppercase(self, directory): - url = reverse('api-directory-uppercase-checksum', + url = reverse('api-1-directory-uppercase-checksum', url_args={'sha1_git': directory.upper()}) resp = self.client.get(url) self.assertEqual(resp.status_code, 302) - redirect_url = reverse('api-directory', + redirect_url = reverse('api-1-directory', url_args={'sha1_git': directory}) self.assertEqual(resp['location'], redirect_url) @classmethod def _enrich_dir_data(cls, dir_data): if dir_data['type'] == 'file': dir_data['target_url'] = \ - reverse('api-content', + reverse('api-1-content', url_args={'q': 'sha1_git:%s' % dir_data['target']}) elif dir_data['type'] == 'dir': dir_data['target_url'] = \ - reverse('api-directory', + reverse('api-1-directory', url_args={'sha1_git': dir_data['target']}) elif dir_data['type'] == 'rev': dir_data['target_url'] = \ - reverse('api-revision', + reverse('api-1-revision', url_args={'sha1_git': dir_data['target']}) return dir_data diff --git a/swh/web/tests/api/views/test_identifiers.py b/swh/web/tests/api/views/test_identifiers.py index 05c55e46..21cba145 100644 --- a/swh/web/tests/api/views/test_identifiers.py +++ b/swh/web/tests/api/views/test_identifiers.py @@ -1,91 +1,91 @@ # Copyright (C) 2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from hypothesis import given from rest_framework.test import APITestCase from swh.model.identifiers import ( CONTENT, DIRECTORY, RELEASE, REVISION, SNAPSHOT ) from swh.web.common.utils import reverse from swh.web.tests.strategies import ( content, directory, origin, release, revision, snapshot, unknown_content, unknown_directory, unknown_release, unknown_revision, unknown_snapshot ) from swh.web.tests.testcase import WebTestCase class SwhIdsApiTestCase(WebTestCase, APITestCase): @given(origin(), content(), directory(), release(), revision(), snapshot()) def test_swh_id_resolve_success(self, origin, content, directory, release, revision, snapshot): for obj_type_short, obj_type, obj_id in ( ('cnt', CONTENT, content['sha1_git']), ('dir', DIRECTORY, directory), ('rel', RELEASE, release), ('rev', REVISION, revision), ('snp', SNAPSHOT, snapshot)): swh_id = 'swh:1:%s:%s;origin=%s' % (obj_type_short, obj_id, origin['url']) - url = reverse('api-resolve-swh-pid', url_args={'swh_id': swh_id}) + url = reverse('api-1-resolve-swh-pid', url_args={'swh_id': swh_id}) resp = self.client.get(url) if obj_type == CONTENT: url_args = {'query_string': 'sha1_git:%s' % obj_id} elif obj_type == SNAPSHOT: url_args = {'snapshot_id': obj_id} else: url_args = {'sha1_git': obj_id} browse_rev_url = reverse('browse-%s' % obj_type, url_args=url_args, query_params={'origin': origin['url']}) expected_result = { 'browse_url': browse_rev_url, 'metadata': {'origin': origin['url']}, 'namespace': 'swh', 'object_id': obj_id, 'object_type': obj_type, 'scheme_version': 1 } self.assertEqual(resp.status_code, 200) self.assertEqual(resp.data, expected_result) def test_swh_id_resolve_invalid(self): rev_id_invalid = '96db9023b8_foo_50d6c108e9a3' swh_id = 'swh:1:rev:%s' % rev_id_invalid - url = reverse('api-resolve-swh-pid', url_args={'swh_id': swh_id}) + url = reverse('api-1-resolve-swh-pid', url_args={'swh_id': swh_id}) resp = self.client.get(url) self.assertEqual(resp.status_code, 400) @given(unknown_content(), unknown_directory(), unknown_release(), unknown_revision(), unknown_snapshot()) def test_swh_id_resolve_not_found(self, unknown_content, unknown_directory, unknown_release, unknown_revision, unknown_snapshot): for obj_type_short, obj_id in (('cnt', unknown_content['sha1_git']), ('dir', unknown_directory), ('rel', unknown_release), ('rev', unknown_revision), ('snp', unknown_snapshot)): swh_id = 'swh:1:%s:%s' % (obj_type_short, obj_id) - url = reverse('api-resolve-swh-pid', url_args={'swh_id': swh_id}) + url = reverse('api-1-resolve-swh-pid', url_args={'swh_id': swh_id}) resp = self.client.get(url) self.assertEqual(resp.status_code, 404) diff --git a/swh/web/tests/api/views/test_origin.py b/swh/web/tests/api/views/test_origin.py index 14307e3c..995fed77 100644 --- a/swh/web/tests/api/views/test_origin.py +++ b/swh/web/tests/api/views/test_origin.py @@ -1,375 +1,375 @@ # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import random from hypothesis import given from rest_framework.test import APITestCase from unittest.mock import patch from swh.storage.exc import StorageDBError, StorageAPIError from swh.web.common.utils import reverse from swh.web.common.origin_visits import get_origin_visits from swh.web.tests.strategies import ( origin, new_origin, new_origins, visit_dates, new_snapshots ) from swh.web.tests.testcase import WebTestCase class OriginApiTestCase(WebTestCase, APITestCase): @patch('swh.web.api.views.origin.get_origin_visits') def test_api_lookup_origin_visits_raise_error( self, mock_get_origin_visits, ): err_msg = 'voluntary error to check the bad request middleware.' mock_get_origin_visits.side_effect = ValueError(err_msg) - url = reverse('api-origin-visits', url_args={'origin_id': 2}) + url = reverse('api-1-origin-visits', url_args={'origin_id': 2}) rv = self.client.get(url) self.assertEqual(rv.status_code, 400) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'ValueError', 'reason': err_msg}) @patch('swh.web.api.views.origin.get_origin_visits') def test_api_lookup_origin_visits_raise_swh_storage_error_db( self, mock_get_origin_visits): err_msg = 'Storage exploded! Will be back online shortly!' mock_get_origin_visits.side_effect = StorageDBError(err_msg) - url = reverse('api-origin-visits', url_args={'origin_id': 2}) + url = reverse('api-1-origin-visits', url_args={'origin_id': 2}) rv = self.client.get(url) self.assertEqual(rv.status_code, 503) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'StorageDBError', 'reason': 'An unexpected error occurred in the backend: %s' % err_msg}) @patch('swh.web.api.views.origin.get_origin_visits') def test_api_lookup_origin_visits_raise_swh_storage_error_api( self, mock_get_origin_visits): err_msg = 'Storage API dropped dead! Will resurrect asap!' mock_get_origin_visits.side_effect = StorageAPIError(err_msg) - url = reverse('api-origin-visits', url_args={'origin_id': 2}) + url = reverse('api-1-origin-visits', url_args={'origin_id': 2}) rv = self.client.get(url) self.assertEqual(rv.status_code, 503) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'StorageAPIError', 'reason': 'An unexpected error occurred in the api backend: %s' % err_msg }) @given(new_origin(), visit_dates(3), new_snapshots(3)) def test_api_lookup_origin_visits(self, new_origin, visit_dates, new_snapshots): origin_id = self.storage.origin_add_one(new_origin) new_origin['id'] = origin_id for i, visit_date in enumerate(visit_dates): origin_visit = self.storage.origin_visit_add(origin_id, visit_date) self.storage.snapshot_add(origin_id, origin_visit['visit'], new_snapshots[i]) all_visits = list(reversed(get_origin_visits(new_origin))) for last_visit, expected_visits in ( (None, all_visits[:2]), (all_visits[1]['visit'], all_visits[2:4])): - url = reverse('api-origin-visits', + url = reverse('api-1-origin-visits', url_args={'origin_id': origin_id}, query_params={'per_page': 2, 'last_visit': last_visit}) rv = self.client.get(url) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') for expected_visit in expected_visits: origin_visit_url = reverse( - 'api-origin-visit', + 'api-1-origin-visit', url_args={'origin_id': origin_id, 'visit_id': expected_visit['visit']}) snapshot_url = reverse( - 'api-snapshot', + 'api-1-snapshot', url_args={'snapshot_id': expected_visit['snapshot']}) expected_visit['origin_visit_url'] = origin_visit_url expected_visit['snapshot_url'] = snapshot_url self.assertEqual(rv.data, expected_visits) @given(new_origin(), visit_dates(3), new_snapshots(3)) def test_api_lookup_origin_visit(self, new_origin, visit_dates, new_snapshots): origin_id = self.storage.origin_add_one(new_origin) new_origin['id'] = origin_id for i, visit_date in enumerate(visit_dates): origin_visit = self.storage.origin_visit_add(origin_id, visit_date) visit_id = origin_visit['visit'] self.storage.snapshot_add(origin_id, origin_visit['visit'], new_snapshots[i]) - url = reverse('api-origin-visit', + url = reverse('api-1-origin-visit', url_args={'origin_id': origin_id, 'visit_id': visit_id}) rv = self.client.get(url) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') expected_visit = self.origin_visit_get_by(origin_id, visit_id) - origin_url = reverse('api-origin', + origin_url = reverse('api-1-origin', url_args={'origin_id': origin_id}) snapshot_url = reverse( - 'api-snapshot', + 'api-1-snapshot', url_args={'snapshot_id': expected_visit['snapshot']}) expected_visit['origin_url'] = origin_url expected_visit['snapshot_url'] = snapshot_url self.assertEqual(rv.data, expected_visit) @given(origin()) def test_api_lookup_origin_visit_not_found(self, origin): all_visits = list(reversed(get_origin_visits(origin))) max_visit_id = max([v['visit'] for v in all_visits]) - url = reverse('api-origin-visit', + url = reverse('api-1-origin-visit', url_args={'origin_id': origin['id'], 'visit_id': max_visit_id + 1}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Origin with id %s or its visit with id %s not found!' % (origin['id'], max_visit_id+1) }) @given(origin()) def test_api_origin_by_id(self, origin): - url = reverse('api-origin', url_args={'origin_id': origin['id']}) + url = reverse('api-1-origin', url_args={'origin_id': origin['id']}) rv = self.client.get(url) expected_origin = self.origin_get(origin) - origin_visits_url = reverse('api-origin-visits', + origin_visits_url = reverse('api-1-origin-visits', url_args={'origin_id': origin['id']}) expected_origin['origin_visits_url'] = origin_visits_url self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_origin) @given(origin()) def test_api_origin_by_type_url(self, origin): - url = reverse('api-origin', + url = reverse('api-1-origin', url_args={'origin_type': origin['type'], 'origin_url': origin['url']}) rv = self.client.get(url) expected_origin = self.origin_get(origin) - origin_visits_url = reverse('api-origin-visits', + origin_visits_url = reverse('api-1-origin-visits', url_args={'origin_id': origin['id']}) expected_origin['origin_visits_url'] = origin_visits_url self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_origin) @given(new_origin()) def test_api_origin_not_found(self, new_origin): - url = reverse('api-origin', + url = reverse('api-1-origin', url_args={'origin_type': new_origin['type'], 'origin_url': new_origin['url']}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Origin with type %s and url %s not found!' % (new_origin['type'], new_origin['url']) }) @given(origin()) def test_api_origin_metadata_search(self, origin): with patch('swh.web.common.service.idx_storage') as mock_idx_storage: mock_idx_storage.origin_intrinsic_metadata_search_fulltext \ .side_effect = lambda conjunction, limit: [{ 'from_revision': ( b'p&\xb7\xc1\xa2\xafVR\x1e\x95\x1c\x01\xed ' b'\xf2U\xfa\x05B8'), 'metadata': {'author': 'Jane Doe'}, 'id': origin['id'], 'tool': { 'configuration': { 'context': ['NpmMapping', 'CodemetaMapping'], 'type': 'local' }, 'id': 3, 'name': 'swh-metadata-detector', 'version': '0.0.1' } }] - url = reverse('api-origin-metadata-search', + url = reverse('api-1-origin-metadata-search', query_params={'fulltext': 'Jane Doe'}) rv = self.client.get(url) self.assertEqual(rv.status_code, 200, rv.content) self.assertEqual(rv['Content-Type'], 'application/json') expected_data = [{ 'id': origin['id'], 'type': origin['type'], 'url': origin['url'], 'metadata': { 'metadata': {'author': 'Jane Doe'}, 'from_revision': ( '7026b7c1a2af56521e951c01ed20f255fa054238'), 'tool': { 'configuration': { 'context': ['NpmMapping', 'CodemetaMapping'], 'type': 'local' }, 'id': 3, 'name': 'swh-metadata-detector', 'version': '0.0.1', } } }] self.assertEqual(rv.data, expected_data) mock_idx_storage.origin_intrinsic_metadata_search_fulltext \ .assert_called_with(conjunction=['Jane Doe'], limit=70) @given(origin()) def test_api_origin_metadata_search_limit(self, origin): with patch('swh.web.common.service.idx_storage') as mock_idx_storage: mock_idx_storage.origin_intrinsic_metadata_search_fulltext \ .side_effect = lambda conjunction, limit: [{ 'from_revision': ( b'p&\xb7\xc1\xa2\xafVR\x1e\x95\x1c\x01\xed ' b'\xf2U\xfa\x05B8'), 'metadata': {'author': 'Jane Doe'}, 'id': origin['id'], 'tool': { 'configuration': { 'context': ['NpmMapping', 'CodemetaMapping'], 'type': 'local' }, 'id': 3, 'name': 'swh-metadata-detector', 'version': '0.0.1' } }] - url = reverse('api-origin-metadata-search', + url = reverse('api-1-origin-metadata-search', query_params={'fulltext': 'Jane Doe'}) rv = self.client.get(url) self.assertEqual(rv.status_code, 200, rv.content) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(len(rv.data), 1) mock_idx_storage.origin_intrinsic_metadata_search_fulltext \ .assert_called_with(conjunction=['Jane Doe'], limit=70) - url = reverse('api-origin-metadata-search', + url = reverse('api-1-origin-metadata-search', query_params={'fulltext': 'Jane Doe', 'limit': 10}) rv = self.client.get(url) self.assertEqual(rv.status_code, 200, rv.content) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(len(rv.data), 1) mock_idx_storage.origin_intrinsic_metadata_search_fulltext \ .assert_called_with(conjunction=['Jane Doe'], limit=10) - url = reverse('api-origin-metadata-search', + url = reverse('api-1-origin-metadata-search', query_params={'fulltext': 'Jane Doe', 'limit': 987}) rv = self.client.get(url) self.assertEqual(rv.status_code, 200, rv.content) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(len(rv.data), 1) mock_idx_storage.origin_intrinsic_metadata_search_fulltext \ .assert_called_with(conjunction=['Jane Doe'], limit=100) @patch('swh.web.common.service.idx_storage') def test_api_origin_metadata_search_invalid(self, mock_idx_storage): - url = reverse('api-origin-metadata-search') + url = reverse('api-1-origin-metadata-search') rv = self.client.get(url) self.assertEqual(rv.status_code, 400, rv.content) mock_idx_storage.assert_not_called() @given(new_origins(10)) def test_api_lookup_origins(self, new_origins): nb_origins = len(new_origins) expected_origins = self.storage.origin_add(new_origins) origin_from_idx = random.randint(1, nb_origins-1) - 1 origin_from = expected_origins[origin_from_idx]['id'] max_origin_id = expected_origins[-1]['id'] origin_count = random.randint(1, max_origin_id - origin_from) - url = reverse('api-origins', + url = reverse('api-1-origins', query_params={'origin_from': origin_from, 'origin_count': origin_count}) rv = self.client.get(url) self.assertEqual(rv.status_code, 200) start = origin_from_idx end = origin_from_idx + origin_count expected_origins = expected_origins[start:end] for expected_origin in expected_origins: expected_origin['origin_visits_url'] = reverse( - 'api-origin-visits', + 'api-1-origin-visits', url_args={'origin_id': expected_origin['id']}) self.assertEqual(rv.data, expected_origins) next_origin_id = expected_origins[-1]['id']+1 if self.storage.origin_get({'id': next_origin_id}): self.assertIn('Link', rv) - next_url = reverse('api-origins', + next_url = reverse('api-1-origins', query_params={'origin_from': next_origin_id, 'origin_count': origin_count}) self.assertIn(next_url, rv['Link']) diff --git a/swh/web/tests/api/views/test_origin_save.py b/swh/web/tests/api/views/test_origin_save.py index c210dbf5..3bd7673c 100644 --- a/swh/web/tests/api/views/test_origin_save.py +++ b/swh/web/tests/api/views/test_origin_save.py @@ -1,261 +1,261 @@ # Copyright (C) 2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from datetime import datetime, timedelta from django.utils import timezone from rest_framework.test import APITestCase from unittest.mock import patch from swh.web.common.utils import reverse from swh.web.common.models import ( SaveUnauthorizedOrigin, SaveOriginRequest, SAVE_REQUEST_ACCEPTED, SAVE_REQUEST_REJECTED, SAVE_REQUEST_PENDING ) from swh.web.common.models import ( SAVE_TASK_NOT_CREATED, SAVE_TASK_NOT_YET_SCHEDULED, SAVE_TASK_SCHEDULED, SAVE_TASK_FAILED, SAVE_TASK_SUCCEED ) from swh.web.tests.testcase import WebTestCase class SaveApiTestCase(WebTestCase, APITestCase): @classmethod def setUpTestData(cls): # noqa: N802 SaveUnauthorizedOrigin.objects.create( url='https://github.com/user/illegal_repo') SaveUnauthorizedOrigin.objects.create( url='https://gitlab.com/user_to_exclude') def test_invalid_origin_type(self): - url = reverse('api-save-origin', + url = reverse('api-1-save-origin', url_args={'origin_type': 'foo', 'origin_url': 'https://github.com/torvalds/linux'}) # noqa response = self.client.post(url) self.assertEqual(response.status_code, 400) def test_invalid_origin_url(self): - url = reverse('api-save-origin', + url = reverse('api-1-save-origin', url_args={'origin_type': 'git', 'origin_url': 'bar'}) response = self.client.post(url) self.assertEqual(response.status_code, 400) def check_created_save_request_status(self, mock_scheduler, origin_url, scheduler_task_status, expected_request_status, expected_task_status=None, visit_date=None): if not scheduler_task_status: mock_scheduler.get_tasks.return_value = [] else: mock_scheduler.get_tasks.return_value = \ [{ 'priority': 'high', 'policy': 'oneshot', 'type': 'load-git', 'arguments': { 'kwargs': { 'repo_url': origin_url }, 'args': [] }, 'status': scheduler_task_status, 'id': 1, }] mock_scheduler.create_tasks.return_value = \ [{ 'priority': 'high', 'policy': 'oneshot', 'type': 'load-git', 'arguments': { 'kwargs': { 'repo_url': origin_url }, 'args': [] }, 'status': 'next_run_not_scheduled', 'id': 1, }] - url = reverse('api-save-origin', + url = reverse('api-1-save-origin', url_args={'origin_type': 'git', 'origin_url': origin_url}) with patch('swh.web.common.origin_save._get_visit_info_for_save_request') as mock_visit_date: # noqa mock_visit_date.return_value = (visit_date, None) response = self.client.post(url) if expected_request_status != SAVE_REQUEST_REJECTED: self.assertEqual(response.status_code, 200) self.assertEqual(response.data['save_request_status'], expected_request_status) self.assertEqual(response.data['save_task_status'], expected_task_status) else: self.assertEqual(response.status_code, 403) def check_save_request_status(self, mock_scheduler, origin_url, expected_request_status, expected_task_status, scheduler_task_status='next_run_not_scheduled', # noqa visit_date=None): mock_scheduler.get_tasks.return_value = \ [{ 'priority': 'high', 'policy': 'oneshot', 'type': 'load-git', 'arguments': { 'kwargs': { 'repo_url': origin_url }, 'args': [] }, 'status': scheduler_task_status, 'id': 1, }] - url = reverse('api-save-origin', + url = reverse('api-1-save-origin', url_args={'origin_type': 'git', 'origin_url': origin_url}) with patch('swh.web.common.origin_save._get_visit_info_for_save_request') as mock_visit_date: # noqa mock_visit_date.return_value = (visit_date, None) response = self.client.get(url) self.assertEqual(response.status_code, 200) save_request_data = response.data[0] self.assertEqual(save_request_data['save_request_status'], expected_request_status) self.assertEqual(save_request_data['save_task_status'], expected_task_status) # Check that save task status is still available when # the scheduler task has been archived mock_scheduler.get_tasks.return_value = [] response = self.client.get(url) self.assertEqual(response.status_code, 200) save_request_data = response.data[0] self.assertEqual(save_request_data['save_task_status'], expected_task_status) @patch('swh.web.common.origin_save.scheduler') def test_save_request_rejected(self, mock_scheduler): origin_url = 'https://github.com/user/illegal_repo' self.check_created_save_request_status(mock_scheduler, origin_url, None, SAVE_REQUEST_REJECTED) self.check_save_request_status(mock_scheduler, origin_url, SAVE_REQUEST_REJECTED, SAVE_TASK_NOT_CREATED) @patch('swh.web.common.origin_save.scheduler') def test_save_request_pending(self, mock_scheduler): origin_url = 'https://unkwownforge.com/user/repo' self.check_created_save_request_status(mock_scheduler, origin_url, None, SAVE_REQUEST_PENDING, SAVE_TASK_NOT_CREATED) self.check_save_request_status(mock_scheduler, origin_url, SAVE_REQUEST_PENDING, SAVE_TASK_NOT_CREATED) @patch('swh.web.common.origin_save.scheduler') def test_save_request_succeed(self, mock_scheduler): origin_url = 'https://github.com/Kitware/CMake' self.check_created_save_request_status(mock_scheduler, origin_url, None, SAVE_REQUEST_ACCEPTED, SAVE_TASK_NOT_YET_SCHEDULED) self.check_save_request_status(mock_scheduler, origin_url, SAVE_REQUEST_ACCEPTED, SAVE_TASK_SCHEDULED, scheduler_task_status='next_run_scheduled') # noqa self.check_save_request_status(mock_scheduler, origin_url, SAVE_REQUEST_ACCEPTED, SAVE_TASK_SCHEDULED, scheduler_task_status='completed', visit_date=None) # noqa visit_date = datetime.now(tz=timezone.utc) + timedelta(hours=1) self.check_save_request_status(mock_scheduler, origin_url, SAVE_REQUEST_ACCEPTED, SAVE_TASK_SUCCEED, scheduler_task_status='completed', visit_date=visit_date) # noqa @patch('swh.web.common.origin_save.scheduler') def test_save_request_failed(self, mock_scheduler): origin_url = 'https://gitlab.com/inkscape/inkscape' self.check_created_save_request_status(mock_scheduler, origin_url, None, SAVE_REQUEST_ACCEPTED, SAVE_TASK_NOT_YET_SCHEDULED) self.check_save_request_status(mock_scheduler, origin_url, SAVE_REQUEST_ACCEPTED, SAVE_TASK_SCHEDULED, scheduler_task_status='next_run_scheduled') # noqa self.check_save_request_status(mock_scheduler, origin_url, SAVE_REQUEST_ACCEPTED, SAVE_TASK_FAILED, scheduler_task_status='disabled') # noqa @patch('swh.web.common.origin_save.scheduler') def test_create_save_request_only_when_needed(self, mock_scheduler): origin_url = 'https://github.com/webpack/webpack' SaveOriginRequest.objects.create(origin_type='git', origin_url=origin_url, status=SAVE_REQUEST_ACCEPTED, # noqa loading_task_id=56) self.check_created_save_request_status(mock_scheduler, origin_url, 'next_run_not_scheduled', SAVE_REQUEST_ACCEPTED, SAVE_TASK_NOT_YET_SCHEDULED) sors = list(SaveOriginRequest.objects.filter(origin_type='git', origin_url=origin_url)) self.assertEqual(len(sors), 1) self.check_created_save_request_status(mock_scheduler, origin_url, 'next_run_scheduled', SAVE_REQUEST_ACCEPTED, SAVE_TASK_SCHEDULED) sors = list(SaveOriginRequest.objects.filter(origin_type='git', origin_url=origin_url)) self.assertEqual(len(sors), 1) visit_date = datetime.now(tz=timezone.utc) + timedelta(hours=1) self.check_created_save_request_status(mock_scheduler, origin_url, 'completed', SAVE_REQUEST_ACCEPTED, SAVE_TASK_NOT_YET_SCHEDULED, visit_date=visit_date) sors = list(SaveOriginRequest.objects.filter(origin_type='git', origin_url=origin_url)) self.assertEqual(len(sors), 2) self.check_created_save_request_status(mock_scheduler, origin_url, 'disabled', SAVE_REQUEST_ACCEPTED, SAVE_TASK_NOT_YET_SCHEDULED) sors = list(SaveOriginRequest.objects.filter(origin_type='git', origin_url=origin_url)) self.assertEqual(len(sors), 3) def test_get_save_requests_unknown_origin(self): unknown_origin_url = 'https://gitlab.com/foo/bar' - url = reverse('api-save-origin', + url = reverse('api-1-save-origin', url_args={'origin_type': 'git', 'origin_url': unknown_origin_url}) response = self.client.get(url) self.assertEqual(response.status_code, 404) self.assertEqual(response.data, { 'exception': 'NotFoundExc', 'reason': ('No save requests found for origin with type ' 'git and url %s.') % unknown_origin_url }) diff --git a/swh/web/tests/api/views/test_person.py b/swh/web/tests/api/views/test_person.py index aeb2d103..f6cab9a8 100644 --- a/swh/web/tests/api/views/test_person.py +++ b/swh/web/tests/api/views/test_person.py @@ -1,42 +1,42 @@ # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import random from hypothesis import given from rest_framework.test import APITestCase from swh.web.common.utils import reverse from swh.web.tests.strategies import person from swh.web.tests.testcase import WebTestCase class PersonApiTestCase(WebTestCase, APITestCase): @given(person()) def test_api_person(self, person): - url = reverse('api-person', url_args={'person_id': person}) + url = reverse('api-1-person', url_args={'person_id': person}) rv = self.client.get(url) expected_person = self.person_get(person) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_person) def test_api_person_not_found(self): unknown_person_ = random.randint(1000, 10000000) - url = reverse('api-person', url_args={'person_id': unknown_person_}) + url = reverse('api-1-person', url_args={'person_id': unknown_person_}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Person with id %s not found' % unknown_person_}) diff --git a/swh/web/tests/api/views/test_release.py b/swh/web/tests/api/views/test_release.py index eff189b4..f4b216f4 100644 --- a/swh/web/tests/api/views/test_release.py +++ b/swh/web/tests/api/views/test_release.py @@ -1,125 +1,125 @@ # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from datetime import datetime from hypothesis import given from rest_framework.test import APITestCase from swh.model.hashutil import hash_to_bytes from swh.web.common.utils import reverse from swh.web.tests.data import random_sha1 from swh.web.tests.strategies import ( release, sha1, content, directory ) from swh.web.tests.testcase import WebTestCase class ReleaseApiTestCase(WebTestCase, APITestCase): @given(release()) def test_api_release(self, release): - url = reverse('api-release', url_args={'sha1_git': release}) + url = reverse('api-1-release', url_args={'sha1_git': release}) rv = self.client.get(url) expected_release = self.release_get(release) author_id = expected_release['author']['id'] target_revision = expected_release['target'] - author_url = reverse('api-person', + author_url = reverse('api-1-person', url_args={'person_id': author_id}) - target_url = reverse('api-revision', + target_url = reverse('api-1-revision', url_args={'sha1_git': target_revision}) expected_release['author_url'] = author_url expected_release['target_url'] = target_url self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_release) @given(sha1(), sha1(), sha1(), content(), directory(), release()) def test_api_release_target_type_not_a_revision(self, new_rel1, new_rel2, new_rel3, content, directory, release): for new_rel_id, target_type, target in ( (new_rel1, 'content', content), (new_rel2, 'directory', directory), (new_rel3, 'release', release)): if target_type == 'content': target = target['sha1_git'] sample_release = { 'author': { 'email': b'author@company.org', 'fullname': b'author ', 'name': b'author' }, 'date': { 'timestamp': int(datetime.now().timestamp()), 'offset': 0, 'negative_utc': False, }, 'id': hash_to_bytes(new_rel_id), 'message': b'sample release message', 'name': b'sample release', 'synthetic': False, 'target': hash_to_bytes(target), 'target_type': target_type } self.storage.release_add([sample_release]) - url = reverse('api-release', url_args={'sha1_git': new_rel_id}) + url = reverse('api-1-release', url_args={'sha1_git': new_rel_id}) rv = self.client.get(url) expected_release = self.release_get(new_rel_id) author_id = expected_release['author']['id'] - author_url = reverse('api-person', + author_url = reverse('api-1-person', url_args={'person_id': author_id}) if target_type == 'content': url_args = {'q': 'sha1_git:%s' % target} else: url_args = {'sha1_git': target} - target_url = reverse('api-%s' % target_type, + target_url = reverse('api-1-%s' % target_type, url_args=url_args) expected_release['author_url'] = author_url expected_release['target_url'] = target_url self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_release) def test_api_release_not_found(self): unknown_release_ = random_sha1() - url = reverse('api-release', url_args={'sha1_git': unknown_release_}) + url = reverse('api-1-release', url_args={'sha1_git': unknown_release_}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Release with sha1_git %s not found.' % unknown_release_ }) @given(release()) def test_api_release_uppercase(self, release): - url = reverse('api-release-uppercase-checksum', + url = reverse('api-1-release-uppercase-checksum', url_args={'sha1_git': release.upper()}) resp = self.client.get(url) self.assertEqual(resp.status_code, 302) - redirect_url = reverse('api-release-uppercase-checksum', + redirect_url = reverse('api-1-release-uppercase-checksum', url_args={'sha1_git': release}) self.assertEqual(resp['location'], redirect_url) diff --git a/swh/web/tests/api/views/test_revision.py b/swh/web/tests/api/views/test_revision.py index 306b7991..457d61be 100644 --- a/swh/web/tests/api/views/test_revision.py +++ b/swh/web/tests/api/views/test_revision.py @@ -1,539 +1,539 @@ # Copyright (C) 2015-2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import random from hypothesis import given from rest_framework.test import APITestCase from unittest.mock import patch from swh.model.hashutil import hash_to_hex from swh.web.common.exc import NotFoundExc from swh.web.common.utils import reverse, parse_timestamp from swh.web.tests.data import random_sha1 from swh.web.tests.strategies import ( revision, new_revision, origin, origin_with_multiple_visits ) from swh.web.tests.testcase import WebTestCase class RevisionApiTestCase(WebTestCase, APITestCase): @given(revision()) def test_api_revision(self, revision): - url = reverse('api-revision', url_args={'sha1_git': revision}) + url = reverse('api-1-revision', url_args={'sha1_git': revision}) rv = self.client.get(url) expected_revision = self.revision_get(revision) self._enrich_revision(expected_revision) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_revision) def test_api_revision_not_found(self): unknown_revision_ = random_sha1() - url = reverse('api-revision', + url = reverse('api-1-revision', url_args={'sha1_git': unknown_revision_}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Revision with sha1_git %s not found.' % unknown_revision_}) @given(revision()) def test_api_revision_raw_ok(self, revision): - url = reverse('api-revision-raw-message', + url = reverse('api-1-revision-raw-message', url_args={'sha1_git': revision}) rv = self.client.get(url) expected_message = self.revision_get(revision)['message'] self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/octet-stream') self.assertEqual(rv.content, expected_message.encode()) @given(new_revision()) def test_api_revision_raw_ok_no_msg(self, new_revision): del new_revision['message'] self.storage.revision_add([new_revision]) new_revision_id = hash_to_hex(new_revision['id']) - url = reverse('api-revision-raw-message', + url = reverse('api-1-revision-raw-message', url_args={'sha1_git': new_revision_id}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'NotFoundExc', 'reason': 'No message for revision with sha1_git %s.' % new_revision_id}) def test_api_revision_raw_ko_no_rev(self): unknown_revision_ = random_sha1() - url = reverse('api-revision-raw-message', + url = reverse('api-1-revision-raw-message', url_args={'sha1_git': unknown_revision_}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Revision with sha1_git %s not found.' % unknown_revision_}) def test_api_revision_with_origin_not_found(self): unknown_origin_id_ = random.randint(1000, 1000000) - url = reverse('api-revision-origin', + url = reverse('api-1-revision-origin', url_args={'origin_id': unknown_origin_id_}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Origin with id %s not found!' % unknown_origin_id_}) @given(origin()) def test_api_revision_with_origin(self, origin): - url = reverse('api-revision-origin', + url = reverse('api-1-revision-origin', url_args={'origin_id': origin['id']}) rv = self.client.get(url) snapshot = self.snapshot_get_latest(origin['id']) expected_revision = self.revision_get( snapshot['branches']['HEAD']['target']) self._enrich_revision(expected_revision) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_revision) @given(origin()) def test_api_revision_with_origin_and_branch_name(self, origin): snapshot = self.snapshot_get_latest(origin['id']) branch_name = random.choice( list(b for b in snapshot['branches'].keys() if snapshot['branches'][b]['target_type'] == 'revision')) - url = reverse('api-revision-origin', + url = reverse('api-1-revision-origin', url_args={'origin_id': origin['id'], 'branch_name': branch_name}) rv = self.client.get(url) expected_revision = self.revision_get( snapshot['branches'][branch_name]['target']) self._enrich_revision(expected_revision) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_revision) @given(origin_with_multiple_visits()) def test_api_revision_with_origin_and_branch_name_and_ts(self, origin): visit = random.choice(self.origin_visit_get(origin['id'])) snapshot = self.snapshot_get(visit['snapshot']) branch_name = random.choice( list(b for b in snapshot['branches'].keys() if snapshot['branches'][b]['target_type'] == 'revision')) - url = reverse('api-revision-origin', + url = reverse('api-1-revision-origin', url_args={'origin_id': origin['id'], 'branch_name': branch_name, 'ts': visit['date']}) rv = self.client.get(url) expected_revision = self.revision_get( snapshot['branches'][branch_name]['target']) self._enrich_revision(expected_revision) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_revision) @given(origin_with_multiple_visits()) def test_api_revision_with_origin_and_branch_name_and_ts_escapes(self, origin): visit = random.choice(self.origin_visit_get(origin['id'])) snapshot = self.snapshot_get(visit['snapshot']) branch_name = random.choice( list(b for b in snapshot['branches'].keys() if snapshot['branches'][b]['target_type'] == 'revision')) date = parse_timestamp(visit['date']) formatted_date = date.strftime('Today is %B %d, %Y at %X') - url = reverse('api-revision-origin', + url = reverse('api-1-revision-origin', url_args={'origin_id': origin['id'], 'branch_name': branch_name, 'ts': formatted_date}) rv = self.client.get(url) expected_revision = self.revision_get( snapshot['branches'][branch_name]['target']) self._enrich_revision(expected_revision) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_revision) def test_api_directory_through_revision_origin_ko(self): unknown_origin_id_ = random.randint(1000, 1000000) - url = reverse('api-revision-origin-directory', + url = reverse('api-1-revision-origin-directory', url_args={'origin_id': unknown_origin_id_}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Origin with id %s not found!' % unknown_origin_id_ }) @given(origin()) def test_api_directory_through_revision_origin(self, origin): - url = reverse('api-revision-origin-directory', + url = reverse('api-1-revision-origin-directory', url_args={'origin_id': origin['id']}) rv = self.client.get(url) snapshot = self.snapshot_get_latest(origin['id']) revision_id = snapshot['branches']['HEAD']['target'] revision = self.revision_get(revision_id) directory = self.directory_ls(revision['directory']) for entry in directory: if entry['type'] == 'dir': entry['target_url'] = reverse( - 'api-directory', + 'api-1-directory', url_args={'sha1_git': entry['target']} ) entry['dir_url'] = reverse( - 'api-revision-origin-directory', + 'api-1-revision-origin-directory', url_args={'origin_id': origin['id'], 'path': entry['name']}) elif entry['type'] == 'file': entry['target_url'] = reverse( - 'api-content', + 'api-1-content', url_args={'q': 'sha1_git:%s' % entry['target']} ) entry['file_url'] = reverse( - 'api-revision-origin-directory', + 'api-1-revision-origin-directory', url_args={'origin_id': origin['id'], 'path': entry['name']}) elif entry['type'] == 'rev': entry['target_url'] = reverse( - 'api-revision', + 'api-1-revision', url_args={'sha1_git': entry['target']} ) entry['rev_url'] = reverse( - 'api-revision-origin-directory', + 'api-1-revision-origin-directory', url_args={'origin_id': origin['id'], 'path': entry['name']}) expected_result = { 'content': directory, 'path': '.', 'revision': revision_id, 'type': 'dir' } self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_result) @given(revision()) def test_api_revision_log(self, revision): per_page = 10 - url = reverse('api-revision-log', url_args={'sha1_git': revision}, + url = reverse('api-1-revision-log', url_args={'sha1_git': revision}, query_params={'per_page': per_page}) rv = self.client.get(url) expected_log = self.revision_log(revision, limit=per_page+1) expected_log = list(map(self._enrich_revision, expected_log)) has_next = len(expected_log) > per_page self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_log[:-1] if has_next else expected_log) if has_next: self.assertIn('Link', rv) next_log_url = reverse( - 'api-revision-log', + 'api-1-revision-log', url_args={'sha1_git': expected_log[-1]['id']}, query_params={'per_page': per_page}) self.assertIn(next_log_url, rv['Link']) def test_api_revision_log_not_found(self): unknown_revision_ = random_sha1() - url = reverse('api-revision-log', + url = reverse('api-1-revision-log', url_args={'sha1_git': unknown_revision_}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Revision with sha1_git %s not found.' % unknown_revision_}) self.assertFalse(rv.has_header('Link')) @given(revision()) def test_api_revision_log_context(self, revision): revisions = self.revision_log(revision, limit=4) prev_rev = revisions[0]['id'] rev = revisions[-1]['id'] per_page = 10 - url = reverse('api-revision-log', + url = reverse('api-1-revision-log', url_args={'sha1_git': rev, 'prev_sha1s': prev_rev}, query_params={'per_page': per_page}) rv = self.client.get(url) expected_log = self.revision_log(rev, limit=per_page) prev_revision = self.revision_get(prev_rev) expected_log.insert(0, prev_revision) expected_log = list(map(self._enrich_revision, expected_log)) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_log) @given(origin()) def test_api_revision_log_by(self, origin): per_page = 10 - url = reverse('api-revision-origin-log', + url = reverse('api-1-revision-origin-log', url_args={'origin_id': origin['id']}, query_params={'per_page': per_page}) rv = self.client.get(url) snapshot = self.snapshot_get_latest(origin['id']) expected_log = self.revision_log( snapshot['branches']['HEAD']['target'], limit=per_page+1) expected_log = list(map(self._enrich_revision, expected_log)) has_next = len(expected_log) > per_page self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_log[:-1] if has_next else expected_log) if has_next: self.assertIn('Link', rv) next_log_url = reverse( - 'api-revision-origin-log', + 'api-1-revision-origin-log', url_args={'origin_id': origin['id'], 'branch_name': 'HEAD'}, query_params={'per_page': per_page, 'sha1_git': expected_log[-1]['id']}) self.assertIn(next_log_url, rv['Link']) @given(origin()) def test_api_revision_log_by_ko(self, origin): invalid_branch_name = 'foobar' - url = reverse('api-revision-origin-log', + url = reverse('api-1-revision-origin-log', url_args={'origin_id': origin['id'], 'branch_name': invalid_branch_name}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertFalse(rv.has_header('Link')) self.assertEqual( rv.data, {'exception': 'NotFoundExc', 'reason': 'Revision for origin %s and branch %s not found.' % (origin['id'], invalid_branch_name)}) @patch('swh.web.api.views.revision._revision_directory_by') def test_api_revision_directory_ko_not_found(self, mock_rev_dir): # given mock_rev_dir.side_effect = NotFoundExc('Not found') # then rv = self.client.get('/api/1/revision/999/directory/some/path/to/dir/') self.assertEqual(rv.status_code, 404) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Not found'}) mock_rev_dir.assert_called_once_with( {'sha1_git': '999'}, 'some/path/to/dir', '/api/1/revision/999/directory/some/path/to/dir/', with_data=False) @patch('swh.web.api.views.revision._revision_directory_by') def test_api_revision_directory_ok_returns_dir_entries(self, mock_rev_dir): stub_dir = { 'type': 'dir', 'revision': '999', 'content': [ { 'sha1_git': '789', 'type': 'file', 'target': '101', 'target_url': '/api/1/content/sha1_git:101/', 'name': 'somefile', 'file_url': '/api/1/revision/999/directory/some/path/' 'somefile/' }, { 'sha1_git': '123', 'type': 'dir', 'target': '456', 'target_url': '/api/1/directory/456/', 'name': 'to-subdir', 'dir_url': '/api/1/revision/999/directory/some/path/' 'to-subdir/', }] } # given mock_rev_dir.return_value = stub_dir # then rv = self.client.get('/api/1/revision/999/directory/some/path/') self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, stub_dir) mock_rev_dir.assert_called_once_with( {'sha1_git': '999'}, 'some/path', '/api/1/revision/999/directory/some/path/', with_data=False) @patch('swh.web.api.views.revision._revision_directory_by') def test_api_revision_directory_ok_returns_content(self, mock_rev_dir): stub_content = { 'type': 'file', 'revision': '999', 'content': { 'sha1_git': '789', 'sha1': '101', 'data_url': '/api/1/content/101/raw/', } } # given mock_rev_dir.return_value = stub_content # then url = '/api/1/revision/666/directory/some/other/path/' rv = self.client.get(url) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, stub_content) mock_rev_dir.assert_called_once_with( {'sha1_git': '666'}, 'some/other/path', url, with_data=False) def _enrich_revision(self, revision): author_url = reverse( - 'api-person', + 'api-1-person', url_args={'person_id': revision['author']['id']}) committer_url = reverse( - 'api-person', + 'api-1-person', url_args={'person_id': revision['committer']['id']}) directory_url = reverse( - 'api-directory', + 'api-1-directory', url_args={'sha1_git': revision['directory']}) - history_url = reverse('api-revision-log', + history_url = reverse('api-1-revision-log', url_args={'sha1_git': revision['id']}) parents_id_url = [] for p in revision['parents']: parents_id_url.append({ 'id': p, - 'url': reverse('api-revision', url_args={'sha1_git': p}) + 'url': reverse('api-1-revision', url_args={'sha1_git': p}) }) - revision_url = reverse('api-revision', + revision_url = reverse('api-1-revision', url_args={'sha1_git': revision['id']}) revision['author_url'] = author_url revision['committer_url'] = committer_url revision['directory_url'] = directory_url revision['history_url'] = history_url revision['url'] = revision_url revision['parents'] = parents_id_url return revision @given(revision()) def test_api_revision_uppercase(self, revision): - url = reverse('api-revision-uppercase-checksum', + url = reverse('api-1-revision-uppercase-checksum', url_args={'sha1_git': revision.upper()}) resp = self.client.get(url) self.assertEqual(resp.status_code, 302) - redirect_url = reverse('api-revision', + redirect_url = reverse('api-1-revision', url_args={'sha1_git': revision}) self.assertEqual(resp['location'], redirect_url) diff --git a/swh/web/tests/api/views/test_snapshot.py b/swh/web/tests/api/views/test_snapshot.py index d8eb5348..454a4aca 100644 --- a/swh/web/tests/api/views/test_snapshot.py +++ b/swh/web/tests/api/views/test_snapshot.py @@ -1,190 +1,190 @@ # Copyright (C) 2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information import random from hypothesis import given from rest_framework.test import APITestCase from swh.model.hashutil import hash_to_hex from swh.web.common.utils import reverse from swh.web.tests.data import random_sha1 from swh.web.tests.strategies import ( snapshot, new_snapshot ) from swh.web.tests.testcase import WebTestCase class SnapshotApiTestCase(WebTestCase, APITestCase): @given(snapshot()) def test_api_snapshot(self, snapshot): - url = reverse('api-snapshot', + url = reverse('api-1-snapshot', url_args={'snapshot_id': snapshot}) rv = self.client.get(url) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') expected_data = self.snapshot_get(snapshot) expected_data = self._enrich_snapshot(expected_data) self.assertEqual(rv.data, expected_data) @given(snapshot()) def test_api_snapshot_paginated(self, snapshot): branches_offset = 0 branches_count = 2 snapshot_branches = [] for k, v in sorted(self.snapshot_get(snapshot)['branches'].items()): snapshot_branches.append({ 'name': k, 'target_type': v['target_type'], 'target': v['target'] }) whole_snapshot = {'id': snapshot, 'branches': {}, 'next_branch': None} while branches_offset < len(snapshot_branches): branches_from = snapshot_branches[branches_offset]['name'] - url = reverse('api-snapshot', + url = reverse('api-1-snapshot', url_args={'snapshot_id': snapshot}, query_params={'branches_from': branches_from, 'branches_count': branches_count}) rv = self.client.get(url) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') expected_data = self.snapshot_get_branches(snapshot, branches_from, branches_count) expected_data = self._enrich_snapshot(expected_data) branches_offset += branches_count if branches_offset < len(snapshot_branches): next_branch = snapshot_branches[branches_offset]['name'] expected_data['next_branch'] = next_branch else: expected_data['next_branch'] = None self.assertEqual(rv.data, expected_data) whole_snapshot['branches'].update(expected_data['branches']) if branches_offset < len(snapshot_branches): next_url = reverse( - 'api-snapshot', + 'api-1-snapshot', url_args={'snapshot_id': snapshot}, query_params={'branches_from': next_branch, 'branches_count': branches_count}) self.assertEqual(rv['Link'], '<%s>; rel="next"' % next_url) else: self.assertFalse(rv.has_header('Link')) - url = reverse('api-snapshot', + url = reverse('api-1-snapshot', url_args={'snapshot_id': snapshot}) rv = self.client.get(url) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, whole_snapshot) @given(snapshot()) def test_api_snapshot_filtered(self, snapshot): snapshot_branches = [] for k, v in sorted(self.snapshot_get(snapshot)['branches'].items()): snapshot_branches.append({ 'name': k, 'target_type': v['target_type'], 'target': v['target'] }) target_type = random.choice(snapshot_branches)['target_type'] - url = reverse('api-snapshot', + url = reverse('api-1-snapshot', url_args={'snapshot_id': snapshot}, query_params={'target_types': target_type}) rv = self.client.get(url) expected_data = self.snapshot_get_branches( snapshot, target_types=target_type) expected_data = self._enrich_snapshot(expected_data) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_data) def test_api_snapshot_errors(self): unknown_snapshot_ = random_sha1() - url = reverse('api-snapshot', + url = reverse('api-1-snapshot', url_args={'snapshot_id': '63ce369'}) rv = self.client.get(url) self.assertEqual(rv.status_code, 400) - url = reverse('api-snapshot', + url = reverse('api-1-snapshot', url_args={'snapshot_id': unknown_snapshot_}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404) def _enrich_snapshot(self, snapshot): def _get_branch_url(target_type, target): url = None if target_type == 'revision': - url = reverse('api-revision', url_args={'sha1_git': target}) + url = reverse('api-1-revision', url_args={'sha1_git': target}) if target_type == 'release': - url = reverse('api-release', url_args={'sha1_git': target}) + url = reverse('api-1-release', url_args={'sha1_git': target}) return url for branch in snapshot['branches'].keys(): target = snapshot['branches'][branch]['target'] target_type = snapshot['branches'][branch]['target_type'] snapshot['branches'][branch]['target_url'] = \ _get_branch_url(target_type, target) for branch in snapshot['branches'].keys(): target = snapshot['branches'][branch]['target'] target_type = snapshot['branches'][branch]['target_type'] if target_type == 'alias': if target in snapshot['branches']: snapshot['branches'][branch]['target_url'] = \ snapshot['branches'][target]['target_url'] else: snp = self.snapshot_get_branches(snapshot['id'], branches_from=target, branches_count=1) alias_target = snp['branches'][target]['target'] alias_target_type = snp['branches'][target]['target_type'] snapshot['branches'][branch]['target_url'] = \ _get_branch_url(alias_target_type, alias_target) return snapshot @given(snapshot()) def test_api_snapshot_uppercase(self, snapshot): - url = reverse('api-snapshot-uppercase-checksum', + url = reverse('api-1-snapshot-uppercase-checksum', url_args={'snapshot_id': snapshot.upper()}) resp = self.client.get(url) self.assertEqual(resp.status_code, 302) - redirect_url = reverse('api-snapshot-uppercase-checksum', + redirect_url = reverse('api-1-snapshot-uppercase-checksum', url_args={'snapshot_id': snapshot}) self.assertEqual(resp['location'], redirect_url) @given(new_snapshot(min_size=4)) def test_api_snapshot_null_branch(self, new_snapshot): snp_dict = new_snapshot.to_dict() snp_id = hash_to_hex(snp_dict['id']) for branch in snp_dict['branches'].keys(): snp_dict['branches'][branch] = None break self.storage.snapshot_add([snp_dict]) - url = reverse('api-snapshot', + url = reverse('api-1-snapshot', url_args={'snapshot_id': snp_id}) rv = self.client.get(url) self.assertEqual(rv.status_code, 200, rv.data) diff --git a/swh/web/tests/api/views/test_stat.py b/swh/web/tests/api/views/test_stat.py index c4df0431..85054f6e 100644 --- a/swh/web/tests/api/views/test_stat.py +++ b/swh/web/tests/api/views/test_stat.py @@ -1,75 +1,75 @@ # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU Affero General Public License version 3, or any later version # See top-level LICENSE file for more information from rest_framework.test import APITestCase from unittest.mock import patch from swh.storage.exc import StorageDBError, StorageAPIError from swh.web.common.utils import reverse from swh.web.tests.testcase import WebTestCase class StatApiTestCase(WebTestCase, APITestCase): @patch('swh.web.api.views.stat.service') def test_api_1_stat_counters_raise_error(self, mock_service): mock_service.stat_counters.side_effect = ValueError( 'voluntary error to check the bad request middleware.') - url = reverse('api-stat-counters') + url = reverse('api-1-stat-counters') rv = self.client.get(url) self.assertEqual(rv.status_code, 400) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'ValueError', 'reason': 'voluntary error to check the bad request middleware.'}) @patch('swh.web.api.views.stat.service') def test_api_1_stat_counters_raise_from_db(self, mock_service): mock_service.stat_counters.side_effect = StorageDBError( 'Storage exploded! Will be back online shortly!') - url = reverse('api-stat-counters') + url = reverse('api-1-stat-counters') rv = self.client.get(url) self.assertEqual(rv.status_code, 503) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'StorageDBError', 'reason': 'An unexpected error occurred in the backend: ' 'Storage exploded! Will be back online shortly!'}) @patch('swh.web.api.views.stat.service') def test_api_1_stat_counters_raise_from_api(self, mock_service): mock_service.stat_counters.side_effect = StorageAPIError( 'Storage API dropped dead! Will resurrect from its ashes asap!' ) - url = reverse('api-stat-counters') + url = reverse('api-1-stat-counters') rv = self.client.get(url) self.assertEqual(rv.status_code, 503) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'StorageAPIError', 'reason': 'An unexpected error occurred in the api backend: ' 'Storage API dropped dead! Will resurrect from its ashes asap!' }) def test_api_1_stat_counters(self): - url = reverse('api-stat-counters') + url = reverse('api-1-stat-counters') rv = self.client.get(url) self.assertEqual(rv.status_code, 200) self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, self.storage.stat_counters())