Page MenuHomeSoftware Heritage

No OneTemporary

This file is larger than 256 KB, so syntax highlighting was skipped.
diff --git a/swh/web/api/urls.py b/swh/web/api/urls.py
index afbcaf44..3c5ccf02 100644
--- a/swh/web/api/urls.py
+++ b/swh/web/api/urls.py
@@ -1,20 +1,19 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import swh.web.api.views.content # noqa
import swh.web.api.views.directory # noqa
import swh.web.api.views.identifiers # noqa
import swh.web.api.views.origin # noqa
import swh.web.api.views.origin_save # noqa
-import swh.web.api.views.person # noqa
import swh.web.api.views.release # noqa
import swh.web.api.views.revision # noqa
import swh.web.api.views.snapshot # noqa
import swh.web.api.views.stat # noqa
import swh.web.api.views.vault # noqa
from swh.web.api.apiurls import APIUrls
urlpatterns = APIUrls.get_url_patterns()
diff --git a/swh/web/api/utils.py b/swh/web/api/utils.py
index d4f2d49b..011e3ac0 100644
--- a/swh/web/api/utils.py
+++ b/swh/web/api/utils.py
@@ -1,211 +1,196 @@
# Copyright (C) 2015-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
from swh.web.common.utils import reverse
from swh.web.common.query import parse_hash
def filter_field_keys(data, field_keys):
"""Given an object instance (directory or list), and a csv field keys
to filter on.
Return the object instance with filtered keys.
Note: Returns obj as is if it's an instance of types not in (dictionary,
list)
Args:
- data: one object (dictionary, list...) to filter.
- field_keys: csv or set of keys to filter the object on
Returns:
obj filtered on field_keys
"""
if isinstance(data, map):
return map(lambda x: filter_field_keys(x, field_keys), data)
if isinstance(data, list):
return [filter_field_keys(x, field_keys) for x in data]
if isinstance(data, dict):
return {k: v for (k, v) in data.items() if k in field_keys}
return data
def person_to_string(person):
"""Map a person (person, committer, tagger, etc...) to a string.
"""
return ''.join([person['name'], ' <', person['email'], '>'])
def enrich_object(object):
"""Enrich an object (revision, release) with link to the 'target' of
type 'target_type'.
Args:
object: An object with target and target_type keys
(e.g. release, revision)
Returns:
Object enriched with target_url pointing to the right
swh.web.ui.api urls for the pointing object (revision,
release, content, directory)
"""
obj = object.copy()
if 'target' in obj and 'target_type' in obj:
if obj['target_type'] in ('revision', 'release', 'directory'):
obj['target_url'] = \
reverse('api-1-%s' % obj['target_type'],
url_args={'sha1_git': obj['target']})
elif obj['target_type'] == 'content':
obj['target_url'] = \
reverse('api-1-content',
url_args={'q': 'sha1_git:' + obj['target']})
elif obj['target_type'] == 'snapshot':
obj['target_url'] = \
reverse('api-1-snapshot',
url_args={'snapshot_id': obj['target']})
- if 'author' in obj:
- author = obj['author']
- obj['author_url'] = reverse('api-1-person',
- url_args={'person_id': author['id']})
-
return obj
enrich_release = enrich_object
def enrich_directory(directory, context_url=None):
"""Enrich directory with url to content or directory.
"""
if 'type' in directory:
target_type = directory['type']
target = directory['target']
if target_type == 'file':
directory['target_url'] = reverse(
'api-1-content', url_args={'q': 'sha1_git:%s' % target})
if context_url:
directory['file_url'] = context_url + directory['name'] + '/'
elif target_type == 'dir':
directory['target_url'] = reverse(
'api-1-directory', url_args={'sha1_git': target})
if context_url:
directory['dir_url'] = context_url + directory['name'] + '/'
else:
directory['target_url'] = reverse(
'api-1-revision', url_args={'sha1_git': target})
if context_url:
directory['rev_url'] = context_url + directory['name'] + '/'
return directory
def enrich_metadata_endpoint(content):
"""Enrich metadata endpoint with link to the upper metadata endpoint.
"""
c = content.copy()
c['content_url'] = reverse('api-1-content',
url_args={'q': 'sha1:%s' % c['id']})
return c
def enrich_content(content, top_url=False, query_string=None):
"""Enrich content with links to:
- data_url: its raw data
- filetype_url: its filetype information
- language_url: its programming language information
- license_url: its licensing information
Args:
content: dict of data associated to a swh content object
top_url: whether or not to include the content url in
the enriched data
query_string: optional query string of type '<algo>:<hash>'
used when requesting the content, it acts as a hint
for picking the same hash method when computing
the url listed above
Returns:
An enriched content dict filled with additional urls
"""
checksums = content
if 'checksums' in content:
checksums = content['checksums']
hash_algo = 'sha1'
if query_string:
hash_algo = parse_hash(query_string)[0]
if hash_algo in checksums:
q = '%s:%s' % (hash_algo, checksums[hash_algo])
if top_url:
content['content_url'] = reverse(
'api-1-content', url_args={'q': q})
content['data_url'] = reverse('api-1-content-raw', url_args={'q': q})
content['filetype_url'] = reverse(
'api-1-content-filetype', url_args={'q': q})
content['language_url'] = reverse(
'api-1-content-language', url_args={'q': q})
content['license_url'] = reverse(
'api-1-content-license', url_args={'q': q})
return content
def enrich_revision(revision):
"""Enrich revision with links where it makes sense (directory, parents).
Keep track of the navigation breadcrumbs if they are specified.
Args:
revision: the revision as a dict
"""
revision['url'] = reverse('api-1-revision',
url_args={'sha1_git': revision['id']})
revision['history_url'] = reverse('api-1-revision-log',
url_args={'sha1_git': revision['id']})
- if 'author' in revision:
- author = revision['author']
- revision['author_url'] = reverse('api-1-person',
- url_args={'person_id': author['id']})
-
- if 'committer' in revision:
- committer = revision['committer']
- revision['committer_url'] = reverse(
- 'api-1-person', url_args={'person_id': committer['id']})
-
if 'directory' in revision:
revision['directory_url'] = reverse(
'api-1-directory', url_args={'sha1_git': revision['directory']})
if 'parents' in revision:
parents = []
for parent in revision['parents']:
parents.append({
'id': parent,
'url': reverse('api-1-revision', url_args={'sha1_git': parent})
})
revision['parents'] = parents
if 'children' in revision:
children = []
for child in revision['children']:
children.append(reverse(
'api-1-revision', url_args={'sha1_git': child}))
revision['children_urls'] = children
if 'message_decoding_failed' in revision:
revision['message_url'] = \
reverse('api-1-revision-raw-message',
url_args={'sha1_git': revision['id']})
return revision
diff --git a/swh/web/api/views/person.py b/swh/web/api/views/person.py
deleted file mode 100644
index 38b2dca2..00000000
--- a/swh/web/api/views/person.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright (C) 2015-2019 The Software Heritage developers
-# See the AUTHORS file at the top-level directory of this distribution
-# License: GNU Affero General Public License version 3, or any later version
-# See top-level LICENSE file for more information
-
-from swh.web.common import service
-from swh.web.api.apidoc import api_doc, format_docstring
-from swh.web.api.apiurls import api_route
-from swh.web.api.views.utils import api_lookup
-
-
-@api_route(r'/person/(?P<person_id>[0-9]+)/', 'api-1-person')
-@api_doc('/person/')
-@format_docstring()
-def api_person(request, person_id):
- """
- .. http:get:: /api/1/person/(person_id)/
-
- Get information about a person in the archive.
-
- :param int person_id: a person identifier
-
- {common_headers}
-
- :>json string email: the email of the person
- :>json string fullname: the full name of the person: combination of its
- name and email
- :>json number id: the unique identifier of the person
- :>json string name: the name of the person
-
- **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`,
- :http:method:`options`
-
- :statuscode 200: no error
- :statuscode 404: requested person can not be found in the archive
-
- **Example:**
-
- .. parsed-literal::
-
- :swh_web_api:`person/8275/`
- """
- return api_lookup(
- service.lookup_person, person_id,
- notfound_msg='Person with id {} not found.'.format(person_id))
diff --git a/swh/web/api/views/release.py b/swh/web/api/views/release.py
index d9238c50..b43ecdc3 100644
--- a/swh/web/api/views/release.py
+++ b/swh/web/api/views/release.py
@@ -1,62 +1,59 @@
# Copyright (C) 2015-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
from swh.web.common import service
from swh.web.api import utils
from swh.web.api.apidoc import api_doc, format_docstring
from swh.web.api.apiurls import api_route
from swh.web.api.views.utils import api_lookup
@api_route(r'/release/(?P<sha1_git>[0-9a-f]+)/', 'api-1-release',
checksum_args=['sha1_git'])
@api_doc('/release/')
@format_docstring()
def api_release(request, sha1_git):
"""
.. http:get:: /api/1/release/(sha1_git)/
Get information about a release in the archive. Releases are identified
by **sha1** checksums, compatible with Git tag identifiers. See
:func:`swh.model.identifiers.release_identifier` in our data model
module for details about how they are computed.
:param string sha1_git: hexadecimal representation of the release
**sha1_git** identifier
{common_headers}
:>json object author: information about the author of the release
- :>json string author_url: link to
- :http:get:`/api/1/person/(person_id)/` to get information about the
- author of the release
:>json string date: ISO representation of the release date (in UTC)
:>json string id: the release unique identifier
:>json string message: the message associated to the release
:>json string name: the name of the release
:>json string target: the target identifier of the release
:>json string target_type: the type of the target, can be either
**release**, **revision**, **content**, **directory**
:>json string target_url: a link to the adequate api url based on the
target type
**Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`,
:http:method:`options`
:statuscode 200: no error
:statuscode 400: an invalid **sha1_git** value has been provided
:statuscode 404: requested release can not be found in the archive
**Example:**
.. parsed-literal::
:swh_web_api:`release/208f61cc7a5dbc9879ae6e5c2f95891e270f09ef/`
"""
error_msg = 'Release with sha1_git %s not found.' % sha1_git
return api_lookup(
service.lookup_release, sha1_git,
notfound_msg=error_msg,
enrich_fn=utils.enrich_release)
diff --git a/swh/web/api/views/revision.py b/swh/web/api/views/revision.py
index 0e5e27d3..e7f676b9 100644
--- a/swh/web/api/views/revision.py
+++ b/swh/web/api/views/revision.py
@@ -1,479 +1,473 @@
# Copyright (C) 2015-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
from django.http import HttpResponse
from swh.web.common import service
from swh.web.common.utils import reverse
from swh.web.common.utils import parse_timestamp
from swh.web.api import utils
from swh.web.api.apidoc import api_doc, format_docstring
from swh.web.api.apiurls import api_route
from swh.web.api.views.utils import api_lookup
DOC_RETURN_REVISION = '''
:>json object author: information about the author of the revision
- :>json string author_url: link to
- :http:get:`/api/1/person/(person_id)/` to get information about the
- author of the revision
:>json object committer: information about the committer of the
revision
- :>json string committer_url: link to
- :http:get:`/api/1/person/(person_id)/` to get information about the
- committer of the revision
:>json string committer_date: ISO representation of the commit date
(in UTC)
:>json string date: ISO representation of the revision date (in UTC)
:>json string directory: the unique identifier that revision points to
:>json string directory_url: link to
:http:get:`/api/1/directory/(sha1_git)/[(path)/]` to get
information about the directory associated to the revision
:>json string id: the revision unique identifier
:>json boolean merge: whether or not the revision corresponds to a
merge commit
:>json string message: the message associated to the revision
:>json array parents: the parents of the revision, i.e. the previous
revisions that head directly to it, each entry of that array
contains an unique parent revision identifier but also a link to
:http:get:`/api/1/revision/(sha1_git)/` to get more information
about it
:>json string type: the type of the revision
''' # noqa
DOC_RETURN_REVISION_ARRAY = \
DOC_RETURN_REVISION.replace(':>json', ':>jsonarr')
def _revision_directory_by(revision, path, request_path,
limit=100, with_data=False):
"""
Compute the revision matching criterion's directory or content data.
Args:
revision: dictionary of criterions representing a revision to lookup
path: directory's path to lookup
request_path: request path which holds the original context to
limit: optional query parameter to limit the revisions log
(default to 100). For now, note that this limit could impede the
transitivity conclusion about sha1_git not being an ancestor of
with_data: indicate to retrieve the content's raw data if path resolves
to a content.
"""
def enrich_directory_local(dir, context_url=request_path):
return utils.enrich_directory(dir, context_url)
rev_id, result = service.lookup_directory_through_revision(
revision, path, limit=limit, with_data=with_data)
content = result['content']
if result['type'] == 'dir': # dir_entries
result['content'] = list(map(enrich_directory_local, content))
elif result['type'] == 'file': # content
result['content'] = utils.enrich_content(content)
elif result['type'] == 'rev': # revision
result['content'] = utils.enrich_revision(content)
return result
@api_route(r'/revision/origin/(?P<origin_id>[0-9]+)'
r'/branch/(?P<branch_name>.+)/log/',
'api-1-revision-origin-log')
@api_route(r'/revision/origin/(?P<origin_id>[0-9]+)/log/',
'api-1-revision-origin-log')
@api_route(r'/revision/origin/(?P<origin_id>[0-9]+)'
r'/ts/(?P<ts>.+)/log/',
'api-1-revision-origin-log')
@api_route(r'/revision/origin/(?P<origin_id>[0-9]+)'
r'/branch/(?P<branch_name>.+)'
r'/ts/(?P<ts>.+)/log/',
'api-1-revision-origin-log')
@api_doc('/revision/origin/log/')
@format_docstring(return_revision_array=DOC_RETURN_REVISION_ARRAY)
def api_revision_log_by(request, origin_id,
branch_name='HEAD',
ts=None):
"""
.. http:get:: /api/1/revision/origin/(origin_id)[/branch/(branch_name)][/ts/(timestamp)]/log
Show the commit log for a revision, searching for it based on software origin,
branch name, and/or visit timestamp.
This endpoint behaves like :http:get:`/api/1/revision/(sha1_git)[/prev/(prev_sha1s)]/log/`,
but operates on the revision that has been found at a given software origin,
close to a given point in time, pointed by a given branch.
.. warning::
All endpoints using an ``origin_id`` are deprecated and will be
removed in the near future. Only those using an ``origin_url``
will remain available.
You should instead use successively
:http:get:`/api/1/origin/(origin_url)/visits/`,
:http:get:`/api/1/snapshot/(snapshot_id)/`, and
:http:get:`/api/1/revision/(sha1_git)[/prev/(prev_sha1s)]/log/`.
:param int origin_id: a software origin identifier
:param string branch_name: optional parameter specifying a fully-qualified branch name
associated to the software origin, e.g., "refs/heads/master". Defaults to the HEAD branch.
:param string timestamp: optional parameter specifying a timestamp close to which the revision
pointed by the given branch should be looked up. The timestamp can be expressed either
as an ISO date or as a Unix one (in UTC). Defaults to now.
{common_headers}
{return_revision_array}
**Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options`
:statuscode 200: no error
:statuscode 404: no revision matching the given criteria could be found in the archive
**Example:**
.. parsed-literal::
:swh_web_api:`revision/origin/723566/ts/2016-01-17T00:00:00+00:00/log/`
""" # noqa
result = {}
per_page = int(request.query_params.get('per_page', '10'))
def lookup_revision_log_by_with_limit(o_id, br, ts, limit=per_page+1):
return service.lookup_revision_log_by(o_id, br, ts, limit)
error_msg = 'No revision matching origin %s ' % origin_id
error_msg += ', branch name %s' % branch_name
error_msg += (' and time stamp %s.' % ts) if ts else '.'
rev_get = api_lookup(
lookup_revision_log_by_with_limit, int(origin_id), branch_name, ts,
notfound_msg=error_msg,
enrich_fn=utils.enrich_revision)
nb_rev = len(rev_get)
if nb_rev == per_page+1:
revisions = rev_get[:-1]
last_sha1_git = rev_get[-1]['id']
params = {k: v for k, v in {'origin_id': origin_id,
'branch_name': branch_name,
'ts': ts,
}.items() if v is not None}
query_params = {}
query_params['sha1_git'] = last_sha1_git
if request.query_params.get('per_page'):
query_params['per_page'] = per_page
result['headers'] = {
'link-next': reverse('api-1-revision-origin-log', url_args=params,
query_params=query_params)
}
else:
revisions = rev_get
result.update({'results': revisions})
return result
@api_route(r'/revision/origin/(?P<origin_id>[0-9]+)/directory/',
'api-1-revision-origin-directory')
@api_route(r'/revision/origin/(?P<origin_id>[0-9]+)/directory/(?P<path>.+)/',
'api-1-revision-origin-directory')
@api_route(r'/revision/origin/(?P<origin_id>[0-9]+)'
r'/branch/(?P<branch_name>.+)/directory/',
'api-1-revision-origin-directory')
@api_route(r'/revision/origin/(?P<origin_id>[0-9]+)'
r'/branch/(?P<branch_name>.+)/ts/(?P<ts>.+)/directory/',
'api-1-revision-origin-directory')
@api_route(r'/revision/origin/(?P<origin_id>[0-9]+)'
r'/branch/(?P<branch_name>.+)/directory/(?P<path>.+)/',
'api-1-revision-origin-directory')
@api_route(r'/revision/origin/(?P<origin_id>[0-9]+)'
r'/branch/(?P<branch_name>.+)/ts/(?P<ts>.+)'
r'/directory/(?P<path>.+)/',
'api-1-revision-origin-directory')
@api_doc('/revision/origin/directory/', tags=['hidden'])
def api_directory_through_revision_origin(request, origin_id,
branch_name='HEAD',
ts=None,
path=None,
with_data=False):
"""
Display directory or content information through a revision identified
by origin/branch/timestamp.
.. warning::
All endpoints using an ``origin_id`` are deprecated and will be
removed in the near future. Only those using an ``origin_url``
will remain available.
You should instead use successively
:http:get:`/api/1/origin/(origin_url)/visits/`,
:http:get:`/api/1/snapshot/(snapshot_id)/`,
:http:get:`/api/1/revision/(sha1_git)/`,
:http:get:`/api/1/directory/(sha1_git)/[(path)/]`
"""
if ts:
ts = parse_timestamp(ts)
return _revision_directory_by({'origin_id': int(origin_id),
'branch_name': branch_name,
'ts': ts
},
path, request.path,
with_data=with_data)
@api_route(r'/revision/origin/(?P<origin_id>[0-9]+)/',
'api-1-revision-origin')
@api_route(r'/revision/origin/(?P<origin_id>[0-9]+)'
r'/branch/(?P<branch_name>.+)/',
'api-1-revision-origin')
@api_route(r'/revision/origin/(?P<origin_id>[0-9]+)'
r'/branch/(?P<branch_name>.+)/ts/(?P<ts>.+)/',
'api-1-revision-origin')
@api_route(r'/revision/origin/(?P<origin_id>[0-9]+)/ts/(?P<ts>.+)/',
'api-1-revision-origin')
@api_doc('/revision/origin/')
@format_docstring(return_revision=DOC_RETURN_REVISION)
def api_revision_with_origin(request, origin_id,
branch_name='HEAD',
ts=None):
"""
.. http:get:: /api/1/revision/origin/(origin_id)/[branch/(branch_name)/][ts/(timestamp)/]
Get information about a revision, searching for it based on software origin,
branch name, and/or visit timestamp.
This endpoint behaves like :http:get:`/api/1/revision/(sha1_git)/`,
but operates on the revision that has been found at a given software origin,
close to a given point in time, pointed by a given branch.
.. warning::
All endpoints using an ``origin_id`` are deprecated and will be
removed in the near future. Only those using an ``origin_url``
will remain available.
You should instead use successively
:http:get:`/api/1/origin/(origin_url)/visits/`,
:http:get:`/api/1/snapshot/(snapshot_id)/`, and
:http:get:`/api/1/revision/(sha1_git)/`.
:param int origin_id: a software origin identifier
:param string branch_name: optional parameter specifying a fully-qualified branch name
associated to the software origin, e.g., "refs/heads/master". Defaults to the HEAD branch.
:param string timestamp: optional parameter specifying a timestamp close to which the revision
pointed by the given branch should be looked up. The timestamp can be expressed either
as an ISO date or as a Unix one (in UTC). Defaults to now.
{common_headers}
{return_revision}
**Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options`
:statuscode 200: no error
:statuscode 404: no revision matching the given criteria could be found in the archive
**Example:**
.. parsed-literal::
:swh_web_api:`revision/origin/13706355/branch/refs/heads/2.7/`
""" # noqa
return api_lookup(
service.lookup_revision_by, int(origin_id), branch_name, ts,
notfound_msg=('Revision with (origin_id: {}, branch_name: {}'
', ts: {}) not found.'.format(origin_id,
branch_name, ts)),
enrich_fn=utils.enrich_revision)
@api_route(r'/revision/(?P<sha1_git>[0-9a-f]+)/', 'api-1-revision',
checksum_args=['sha1_git'])
@api_doc('/revision/')
@format_docstring(return_revision=DOC_RETURN_REVISION)
def api_revision(request, sha1_git):
"""
.. http:get:: /api/1/revision/(sha1_git)/
Get information about a revision in the archive. Revisions are
identified by **sha1** checksums, compatible with Git commit
identifiers.
See :func:`swh.model.identifiers.revision_identifier` in our data model
module for details about how they are computed.
:param string sha1_git: hexadecimal representation of the revision
**sha1_git** identifier
{common_headers}
{return_revision}
**Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`,
:http:method:`options`
:statuscode 200: no error
:statuscode 400: an invalid **sha1_git** value has been provided
:statuscode 404: requested revision can not be found in the archive
**Example:**
.. parsed-literal::
:swh_web_api:`revision/aafb16d69fd30ff58afdd69036a26047f3aebdc6/`
""" # noqa
return api_lookup(
service.lookup_revision, sha1_git,
notfound_msg='Revision with sha1_git {} not found.'.format(sha1_git),
enrich_fn=utils.enrich_revision)
@api_route(r'/revision/(?P<sha1_git>[0-9a-f]+)/raw/',
'api-1-revision-raw-message', checksum_args=['sha1_git'])
@api_doc('/revision/raw/', tags=['hidden'], handle_response=True)
def api_revision_raw_message(request, sha1_git):
"""Return the raw data of the message of revision identified by sha1_git
"""
raw = service.lookup_revision_message(sha1_git)
response = HttpResponse(raw['message'],
content_type='application/octet-stream')
response['Content-disposition'] = \
'attachment;filename=rev_%s_raw' % sha1_git
return response
@api_route(r'/revision/(?P<sha1_git>[0-9a-f]+)/directory/',
'api-1-revision-directory', checksum_args=['sha1_git'])
@api_route(r'/revision/(?P<sha1_git>[0-9a-f]+)/directory/(?P<dir_path>.+)/',
'api-1-revision-directory', checksum_args=['sha1_git'])
@api_doc('/revision/directory/')
@format_docstring()
def api_revision_directory(request, sha1_git,
dir_path=None,
with_data=False):
"""
.. http:get:: /api/1/revision/(sha1_git)/directory/[(path)/]
Get information about directory (entry) objects associated to revisions.
Each revision is associated to a single "root" directory.
This endpoint behaves like :http:get:`/api/1/directory/(sha1_git)/[(path)/]`,
but operates on the root directory associated to a given revision.
:param string sha1_git: hexadecimal representation of the revision **sha1_git** identifier
:param string path: optional parameter to get information about the directory entry
pointed by that relative path
{common_headers}
:>json array content: directory entries as returned by :http:get:`/api/1/directory/(sha1_git)/[(path)/]`
:>json string path: path of directory from the revision root one
:>json string revision: the unique revision identifier
:>json string type: the type of the directory
**Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options`
:statuscode 200: no error
:statuscode 400: an invalid **sha1_git** value has been provided
:statuscode 404: requested revision can not be found in the archive
**Example:**
.. parsed-literal::
:swh_web_api:`revision/f1b94134a4b879bc55c3dacdb496690c8ebdc03f/directory/`
""" # noqa
return _revision_directory_by({'sha1_git': sha1_git},
dir_path, request.path,
with_data=with_data)
@api_route(r'/revision/(?P<sha1_git>[0-9a-f]+)/log/', 'api-1-revision-log',
checksum_args=['sha1_git'])
@api_route(r'/revision/(?P<sha1_git>[0-9a-f]+)'
r'/prev/(?P<prev_sha1s>[0-9a-f]*/*)/log/',
'api-1-revision-log', checksum_args=['sha1_git', 'prev_sha1s'])
@api_doc('/revision/log/')
@format_docstring(return_revision_array=DOC_RETURN_REVISION_ARRAY)
def api_revision_log(request, sha1_git, prev_sha1s=None):
"""
.. http:get:: /api/1/revision/(sha1_git)[/prev/(prev_sha1s)]/log/
Get a list of all revisions heading to a given one, in other words show the commit log.
:param string sha1_git: hexadecimal representation of the revision **sha1_git** identifier
:param string prev_sha1s: optional parameter representing the navigation breadcrumbs
(descendant revisions previously visited). If multiple values, use / as delimiter.
If provided, revisions information will be added at the beginning of the returned list.
:query int per_page: number of elements in the returned list, for pagination purpose
{common_headers}
{resheader_link}
{return_revision_array}
**Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options`
:statuscode 200: no error
:statuscode 400: an invalid **sha1_git** value has been provided
:statuscode 404: requested revision can not be found in the archive
**Example:**
.. parsed-literal::
:swh_web_api:`revision/e1a315fa3fa734e2a6154ed7b5b9ae0eb8987aad/log/`
""" # noqa
result = {}
per_page = int(request.query_params.get('per_page', '10'))
def lookup_revision_log_with_limit(s, limit=per_page+1):
return service.lookup_revision_log(s, limit)
error_msg = 'Revision with sha1_git %s not found.' % sha1_git
rev_get = api_lookup(lookup_revision_log_with_limit, sha1_git,
notfound_msg=error_msg,
enrich_fn=utils.enrich_revision)
nb_rev = len(rev_get)
if nb_rev == per_page+1:
rev_backward = rev_get[:-1]
new_last_sha1 = rev_get[-1]['id']
query_params = {}
if request.query_params.get('per_page'):
query_params['per_page'] = per_page
result['headers'] = {
'link-next': reverse('api-1-revision-log',
url_args={'sha1_git': new_last_sha1},
query_params=query_params)
}
else:
rev_backward = rev_get
if not prev_sha1s: # no nav breadcrumbs, so we're done
revisions = rev_backward
else:
rev_forward_ids = prev_sha1s.split('/')
rev_forward = api_lookup(
service.lookup_revision_multiple, rev_forward_ids,
notfound_msg=error_msg,
enrich_fn=utils.enrich_revision)
revisions = rev_forward + rev_backward
result.update({
'results': revisions
})
return result
diff --git a/swh/web/browse/urls.py b/swh/web/browse/urls.py
index ea06eb42..2fe4a0c6 100644
--- a/swh/web/browse/urls.py
+++ b/swh/web/browse/urls.py
@@ -1,52 +1,51 @@
# Copyright (C) 2017-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
from django.conf.urls import url
from django.shortcuts import render, redirect
import swh.web.browse.views.directory # noqa
import swh.web.browse.views.content # noqa
import swh.web.browse.views.origin # noqa
-import swh.web.browse.views.person # noqa
import swh.web.browse.views.release # noqa
import swh.web.browse.views.revision # noqa
import swh.web.browse.views.snapshot # noqa
from swh.web.browse.browseurls import BrowseUrls
from swh.web.browse.identifiers import swh_id_browse
from swh.web.common.utils import reverse
def _browse_help_view(request):
return render(request, 'browse/help.html',
{'heading': 'How to browse the archive ?'})
def _browse_search_view(request):
return render(request, 'browse/search.html',
{'heading': 'Search software origins to browse'})
def _browse_vault_view(request):
return render(request, 'browse/vault-ui.html',
{'heading': 'Download archive content from the Vault'})
def _browse_origin_save_view(request):
return redirect(reverse('origin-save'))
urlpatterns = [
url(r'^$', _browse_search_view),
url(r'^help/$', _browse_help_view, name='browse-help'),
url(r'^search/$', _browse_search_view, name='browse-search'),
url(r'^vault/$', _browse_vault_view, name='browse-vault'),
# for backward compatibility
url(r'^origin/save/$', _browse_origin_save_view,
name='browse-origin-save'),
url(r'^(?P<swh_id>swh:[0-9]+:[a-z]+:[0-9a-f]+.*)/$', swh_id_browse),
]
urlpatterns += BrowseUrls.get_url_patterns()
diff --git a/swh/web/browse/utils.py b/swh/web/browse/utils.py
index 0cbbcd40..45e5369c 100644
--- a/swh/web/browse/utils.py
+++ b/swh/web/browse/utils.py
@@ -1,1133 +1,1111 @@
# Copyright (C) 2017-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import base64
import magic
import pypandoc
import stat
import textwrap
from collections import defaultdict
from threading import Lock
from django.core.cache import cache
from django.utils.safestring import mark_safe
from django.utils.html import escape
from swh.model.identifiers import persistent_identifier
from swh.web.common import highlightjs, service
from swh.web.common.exc import NotFoundExc, http_status_code_message
from swh.web.common.origin_visits import get_origin_visit
from swh.web.common.utils import (
reverse, format_utc_iso_date, get_swh_persistent_id,
swh_object_icons
)
from swh.web.config import get_config
def get_directory_entries(sha1_git):
"""Function that retrieves the content of a directory
from the archive.
The directories entries are first sorted in lexicographical order.
Sub-directories and regular files are then extracted.
Args:
sha1_git: sha1_git identifier of the directory
Returns:
A tuple whose first member corresponds to the sub-directories list
and second member the regular files list
Raises:
NotFoundExc if the directory is not found
"""
cache_entry_id = 'directory_entries_%s' % sha1_git
cache_entry = cache.get(cache_entry_id)
if cache_entry:
return cache_entry
entries = list(service.lookup_directory(sha1_git))
for e in entries:
e['perms'] = stat.filemode(e['perms'])
if e['type'] == 'rev':
# modify dir entry name to explicitly show it points
# to a revision
e['name'] = '%s @ %s' % (e['name'], e['target'][:7])
dirs = [e for e in entries if e['type'] in ('dir', 'rev')]
files = [e for e in entries if e['type'] == 'file']
dirs = sorted(dirs, key=lambda d: d['name'])
files = sorted(files, key=lambda f: f['name'])
cache.set(cache_entry_id, (dirs, files))
return dirs, files
_lock = Lock()
def get_mimetype_and_encoding_for_content(content):
"""Function that returns the mime type and the encoding associated to
a content buffer using the magic module under the hood.
Args:
content (bytes): a content buffer
Returns:
A tuple (mimetype, encoding), for instance ('text/plain', 'us-ascii'),
associated to the provided content.
"""
# https://pypi.org/project/python-magic/
# packaged as python3-magic in debian buster
if hasattr(magic, 'from_buffer'):
m = magic.Magic(mime=True, mime_encoding=True)
mime_encoding = m.from_buffer(content)
mime_type, encoding = mime_encoding.split(';')
encoding = encoding.replace(' charset=', '')
# https://pypi.org/project/file-magic/
# packaged as python3-magic in debian stretch
else:
# TODO: Remove that code when production environment is upgraded
# to debian buster
# calls to the file-magic API are not thread-safe so they must
# be protected with a Lock to guarantee they will succeed
_lock.acquire()
magic_result = magic.detect_from_content(content)
_lock.release()
mime_type = magic_result.mime_type
encoding = magic_result.encoding
return mime_type, encoding
# maximum authorized content size in bytes for HTML display
# with code highlighting
content_display_max_size = get_config()['content_display_max_size']
snapshot_content_max_size = get_config()['snapshot_content_max_size']
def _re_encode_content(mimetype, encoding, content_data):
# encode textual content to utf-8 if needed
if mimetype.startswith('text/'):
# probably a malformed UTF-8 content, re-encode it
# by replacing invalid chars with a substitution one
if encoding == 'unknown-8bit':
content_data = content_data.decode('utf-8', 'replace')\
.encode('utf-8')
elif encoding not in ['utf-8', 'binary']:
content_data = content_data.decode(encoding, 'replace')\
.encode('utf-8')
elif mimetype.startswith('application/octet-stream'):
# file may detect a text content as binary
# so try to decode it for display
encodings = ['us-ascii']
encodings += ['iso-8859-%s' % i for i in range(1, 17)]
for encoding in encodings:
try:
content_data = content_data.decode(encoding)\
.encode('utf-8')
except Exception:
pass
else:
# ensure display in content view
mimetype = 'text/plain'
break
return mimetype, content_data
def request_content(query_string, max_size=content_display_max_size,
raise_if_unavailable=True, re_encode=True):
"""Function that retrieves a content from the archive.
Raw bytes content is first retrieved, then the content mime type.
If the mime type is not stored in the archive, it will be computed
using Python magic module.
Args:
query_string: a string of the form "[ALGO_HASH:]HASH" where
optional ALGO_HASH can be either ``sha1``, ``sha1_git``,
``sha256``, or ``blake2s256`` (default to ``sha1``) and HASH
the hexadecimal representation of the hash value
max_size: the maximum size for a content to retrieve (default to 1MB,
no size limit if None)
Returns:
A tuple whose first member corresponds to the content raw bytes
and second member the content mime type
Raises:
NotFoundExc if the content is not found
"""
content_data = service.lookup_content(query_string)
filetype = None
language = None
license = None
# requests to the indexer db may fail so properly handle
# those cases in order to avoid content display errors
try:
filetype = service.lookup_content_filetype(query_string)
language = service.lookup_content_language(query_string)
license = service.lookup_content_license(query_string)
except Exception:
pass
mimetype = 'unknown'
encoding = 'unknown'
if filetype:
mimetype = filetype['mimetype']
encoding = filetype['encoding']
# workaround when encountering corrupted data due to implicit
# conversion from bytea to text in the indexer db (see T818)
# TODO: Remove that code when all data have been correctly converted
if mimetype.startswith('\\'):
filetype = None
content_data['error_code'] = 200
content_data['error_message'] = ''
content_data['error_description'] = ''
if not max_size or content_data['length'] < max_size:
try:
content_raw = service.lookup_content_raw(query_string)
except Exception as e:
if raise_if_unavailable:
raise e
else:
content_data['raw_data'] = None
content_data['error_code'] = 404
content_data['error_description'] = \
'The bytes of the content are currently not available in the archive.' # noqa
content_data['error_message'] = \
http_status_code_message[content_data['error_code']]
else:
content_data['raw_data'] = content_raw['data']
if not filetype:
mimetype, encoding = \
get_mimetype_and_encoding_for_content(content_data['raw_data']) # noqa
if re_encode:
mimetype, raw_data = _re_encode_content(
mimetype, encoding, content_data['raw_data'])
content_data['raw_data'] = raw_data
else:
content_data['raw_data'] = None
content_data['mimetype'] = mimetype
content_data['encoding'] = encoding
if language:
content_data['language'] = language['lang']
else:
content_data['language'] = 'not detected'
if license:
content_data['licenses'] = ', '.join(license['facts'][0]['licenses'])
else:
content_data['licenses'] = 'not detected'
return content_data
_browsers_supported_image_mimes = set(['image/gif', 'image/png',
'image/jpeg', 'image/bmp',
'image/webp', 'image/svg',
'image/svg+xml'])
def prepare_content_for_display(content_data, mime_type, path):
"""Function that prepares a content for HTML display.
The function tries to associate a programming language to a
content in order to perform syntax highlighting client-side
using highlightjs. The language is determined using either
the content filename or its mime type.
If the mime type corresponds to an image format supported
by web browsers, the content will be encoded in base64
for displaying the image.
Args:
content_data (bytes): raw bytes of the content
mime_type (string): mime type of the content
path (string): path of the content including filename
Returns:
A dict containing the content bytes (possibly different from the one
provided as parameter if it is an image) under the key 'content_data
and the corresponding highlightjs language class under the
key 'language'.
"""
language = highlightjs.get_hljs_language_from_filename(path)
if not language:
language = highlightjs.get_hljs_language_from_mime_type(mime_type)
if not language:
language = 'nohighlight'
elif mime_type.startswith('application/'):
mime_type = mime_type.replace('application/', 'text/')
if mime_type.startswith('image/'):
if mime_type in _browsers_supported_image_mimes:
content_data = base64.b64encode(content_data)
content_data = content_data.decode('utf-8')
else:
content_data = None
if mime_type.startswith('image/svg'):
mime_type = 'image/svg+xml'
return {'content_data': content_data,
'language': language,
'mimetype': mime_type}
def process_snapshot_branches(snapshot):
"""
Process a dictionary describing snapshot branches: extract those
targeting revisions and releases, put them in two different lists,
then sort those lists in lexicographical order of the branches' names.
Args:
snapshot_branches (dict): A dict describing the branches of a snapshot
as returned for instance by
:func:`swh.web.common.service.lookup_snapshot`
Returns:
tuple: A tuple whose first member is the sorted list of branches
targeting revisions and second member the sorted list of branches
targeting releases
"""
snapshot_branches = snapshot['branches']
branches = {}
branch_aliases = {}
releases = {}
revision_to_branch = defaultdict(set)
revision_to_release = defaultdict(set)
release_to_branch = defaultdict(set)
for branch_name, target in snapshot_branches.items():
if not target:
# FIXME: display branches with an unknown target anyway
continue
target_id = target['target']
target_type = target['target_type']
if target_type == 'revision':
branches[branch_name] = {
'name': branch_name,
'revision': target_id,
}
revision_to_branch[target_id].add(branch_name)
elif target_type == 'release':
release_to_branch[target_id].add(branch_name)
elif target_type == 'alias':
branch_aliases[branch_name] = target_id
# FIXME: handle pointers to other object types
def _enrich_release_branch(branch, release):
releases[branch] = {
'name': release['name'],
'branch_name': branch,
'date': format_utc_iso_date(release['date']),
'id': release['id'],
'message': release['message'],
'target_type': release['target_type'],
'target': release['target'],
}
def _enrich_revision_branch(branch, revision):
branches[branch].update({
'revision': revision['id'],
'directory': revision['directory'],
'date': format_utc_iso_date(revision['date']),
'message': revision['message']
})
releases_info = service.lookup_release_multiple(
release_to_branch.keys()
)
for release in releases_info:
branches_to_update = release_to_branch[release['id']]
for branch in branches_to_update:
_enrich_release_branch(branch, release)
if release['target_type'] == 'revision':
revision_to_release[release['target']].update(
branches_to_update
)
revisions = service.lookup_revision_multiple(
set(revision_to_branch.keys()) | set(revision_to_release.keys())
)
for revision in revisions:
if not revision:
continue
for branch in revision_to_branch[revision['id']]:
_enrich_revision_branch(branch, revision)
for release in revision_to_release[revision['id']]:
releases[release]['directory'] = revision['directory']
for branch_alias, branch_target in branch_aliases.items():
if branch_target in branches:
branches[branch_alias] = dict(branches[branch_target])
else:
snp = service.lookup_snapshot(snapshot['id'],
branches_from=branch_target,
branches_count=1)
if snp and branch_target in snp['branches']:
if snp['branches'][branch_target] is None:
continue
target_type = snp['branches'][branch_target]['target_type']
target = snp['branches'][branch_target]['target']
if target_type == 'revision':
branches[branch_alias] = snp['branches'][branch_target]
revision = service.lookup_revision(target)
_enrich_revision_branch(branch_alias, revision)
elif target_type == 'release':
release = service.lookup_release(target)
_enrich_release_branch(branch_alias, release)
if branch_alias in branches:
branches[branch_alias]['name'] = branch_alias
ret_branches = list(sorted(branches.values(), key=lambda b: b['name']))
ret_releases = list(sorted(releases.values(), key=lambda b: b['name']))
return ret_branches, ret_releases
def get_snapshot_content(snapshot_id):
"""Returns the lists of branches and releases
associated to a swh snapshot.
That list is put in cache in order to speedup the navigation
in the swh-web/browse ui.
.. warning:: At most 1000 branches contained in the snapshot
will be returned for performance reasons.
Args:
snapshot_id (str): hexadecimal representation of the snapshot
identifier
Returns:
A tuple with two members. The first one is a list of dict describing
the snapshot branches. The second one is a list of dict describing the
snapshot releases.
Raises:
NotFoundExc if the snapshot does not exist
"""
cache_entry_id = 'swh_snapshot_%s' % snapshot_id
cache_entry = cache.get(cache_entry_id)
if cache_entry:
return cache_entry['branches'], cache_entry['releases']
branches = []
releases = []
if snapshot_id:
snapshot = service.lookup_snapshot(
snapshot_id, branches_count=snapshot_content_max_size)
branches, releases = process_snapshot_branches(snapshot)
cache.set(cache_entry_id, {
'branches': branches,
'releases': releases,
})
return branches, releases
def get_origin_visit_snapshot(origin_info, visit_ts=None, visit_id=None,
snapshot_id=None):
"""Returns the lists of branches and releases
associated to a swh origin for a given visit.
The visit is expressed by a timestamp. In the latter case,
the closest visit from the provided timestamp will be used.
If no visit parameter is provided, it returns the list of branches
found for the latest visit.
That list is put in cache in order to speedup the navigation
in the swh-web/browse ui.
.. warning:: At most 1000 branches contained in the snapshot
will be returned for performance reasons.
Args:
origin_info (dict): a dict filled with origin information
(id, url, type)
visit_ts (int or str): an ISO date string or Unix timestamp to parse
visit_id (int): optional visit id for disambiguation in case
several visits have the same timestamp
Returns:
A tuple with two members. The first one is a list of dict describing
the origin branches for the given visit.
The second one is a list of dict describing the origin releases
for the given visit.
Raises:
NotFoundExc if the origin or its visit are not found
"""
visit_info = get_origin_visit(origin_info, visit_ts, visit_id, snapshot_id)
return get_snapshot_content(visit_info['snapshot'])
def gen_link(url, link_text=None, link_attrs=None):
"""
Utility function for generating an HTML link to insert
in Django templates.
Args:
url (str): an url
link_text (str): optional text for the produced link,
if not provided the url will be used
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form '<a href="url">link_text</a>'
"""
attrs = ' '
if link_attrs:
for k, v in link_attrs.items():
attrs += '%s="%s" ' % (k, v)
if not link_text:
link_text = url
link = '<a%shref="%s">%s</a>' \
% (attrs, escape(url), escape(link_text))
return mark_safe(link)
def _snapshot_context_query_params(snapshot_context):
query_params = None
if snapshot_context and snapshot_context['origin_info']:
origin_info = snapshot_context['origin_info']
query_params = {'origin': origin_info['url']}
if 'timestamp' in snapshot_context['url_args']:
query_params['timestamp'] = \
snapshot_context['url_args']['timestamp']
if 'visit_id' in snapshot_context['query_params']:
query_params['visit_id'] = \
snapshot_context['query_params']['visit_id']
elif snapshot_context:
query_params = {'snapshot_id': snapshot_context['snapshot_id']}
return query_params
-def gen_person_link(person_id, person_name, snapshot_context=None,
- link_attrs=None):
- """
- Utility function for generating a link to a person HTML view
- to insert in Django templates.
-
- Args:
- person_id (int): a person id
- person_name (str): the associated person name
- link_attrs (dict): optional attributes (e.g. class)
- to add to the link
-
- Returns:
- An HTML link in the form '<a href="person_view_url">person_name</a>'
-
- """
- query_params = _snapshot_context_query_params(snapshot_context)
- person_url = reverse('browse-person', url_args={'person_id': person_id},
- query_params=query_params)
- return gen_link(person_url, person_name or 'None', link_attrs)
-
-
def gen_revision_url(revision_id, snapshot_context=None):
"""
Utility function for generating an url to a revision.
Args:
revision_id (str): a revision id
snapshot_context (dict): if provided, generate snapshot-dependent
browsing url
Returns:
str: The url to browse the revision
"""
query_params = _snapshot_context_query_params(snapshot_context)
return reverse('browse-revision',
url_args={'sha1_git': revision_id},
query_params=query_params)
def gen_revision_link(revision_id, shorten_id=False, snapshot_context=None,
link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'}):
"""
Utility function for generating a link to a revision HTML view
to insert in Django templates.
Args:
revision_id (str): a revision id
shorten_id (boolean): whether to shorten the revision id to 7
characters for the link text
snapshot_context (dict): if provided, generate snapshot-dependent
browsing link
link_text (str): optional text for the generated link
(the revision id will be used by default)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
str: An HTML link in the form '<a href="revision_url">revision_id</a>'
"""
if not revision_id:
return None
revision_url = gen_revision_url(revision_id, snapshot_context)
if shorten_id:
return gen_link(revision_url, revision_id[:7], link_attrs)
else:
if not link_text:
link_text = revision_id
return gen_link(revision_url, link_text, link_attrs)
def gen_directory_link(sha1_git, snapshot_context=None, link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'}):
"""
Utility function for generating a link to a directory HTML view
to insert in Django templates.
Args:
sha1_git (str): directory identifier
link_text (str): optional text for the generated link
(the directory id will be used by default)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form '<a href="directory_view_url">link_text</a>'
"""
if not sha1_git:
return None
query_params = _snapshot_context_query_params(snapshot_context)
directory_url = reverse('browse-directory',
url_args={'sha1_git': sha1_git},
query_params=query_params)
if not link_text:
link_text = sha1_git
return gen_link(directory_url, link_text, link_attrs)
def gen_snapshot_link(snapshot_id, snapshot_context=None, link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'}):
"""
Utility function for generating a link to a snapshot HTML view
to insert in Django templates.
Args:
snapshot_id (str): snapshot identifier
link_text (str): optional text for the generated link
(the snapshot id will be used by default)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form '<a href="snapshot_view_url">link_text</a>'
"""
query_params = _snapshot_context_query_params(snapshot_context)
snapshot_url = reverse('browse-snapshot',
url_args={'snapshot_id': snapshot_id},
query_params=query_params)
if not link_text:
link_text = snapshot_id
return gen_link(snapshot_url, link_text, link_attrs)
def gen_content_link(sha1_git, snapshot_context=None, link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'}):
"""
Utility function for generating a link to a content HTML view
to insert in Django templates.
Args:
sha1_git (str): content identifier
link_text (str): optional text for the generated link
(the content sha1_git will be used by default)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form '<a href="content_view_url">link_text</a>'
"""
if not sha1_git:
return None
query_params = _snapshot_context_query_params(snapshot_context)
content_url = reverse('browse-content',
url_args={'query_string': 'sha1_git:' + sha1_git},
query_params=query_params)
if not link_text:
link_text = sha1_git
return gen_link(content_url, link_text, link_attrs)
def get_revision_log_url(revision_id, snapshot_context=None):
"""
Utility function for getting the URL for a revision log HTML view
(possibly in the context of an origin).
Args:
revision_id (str): revision identifier the history heads to
snapshot_context (dict): if provided, generate snapshot-dependent
browsing link
Returns:
The revision log view URL
"""
query_params = {'revision': revision_id}
if snapshot_context and snapshot_context['origin_info']:
origin_info = snapshot_context['origin_info']
url_args = {'origin_url': origin_info['url']}
if 'timestamp' in snapshot_context['url_args']:
url_args['timestamp'] = \
snapshot_context['url_args']['timestamp']
if 'visit_id' in snapshot_context['query_params']:
query_params['visit_id'] = \
snapshot_context['query_params']['visit_id']
revision_log_url = reverse('browse-origin-log',
url_args=url_args,
query_params=query_params)
elif snapshot_context:
url_args = {'snapshot_id': snapshot_context['snapshot_id']}
revision_log_url = reverse('browse-snapshot-log',
url_args=url_args,
query_params=query_params)
else:
revision_log_url = reverse('browse-revision-log',
url_args={'sha1_git': revision_id})
return revision_log_url
def gen_revision_log_link(revision_id, snapshot_context=None,
link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'}):
"""
Utility function for generating a link to a revision log HTML view
(possibly in the context of an origin) to insert in Django templates.
Args:
revision_id (str): revision identifier the history heads to
snapshot_context (dict): if provided, generate snapshot-dependent
browsing link
link_text (str): optional text to use for the generated link
(the revision id will be used by default)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form
'<a href="revision_log_view_url">link_text</a>'
"""
if not revision_id:
return None
revision_log_url = get_revision_log_url(revision_id, snapshot_context)
if not link_text:
link_text = revision_id
return gen_link(revision_log_url, link_text, link_attrs)
def gen_release_link(sha1_git, snapshot_context=None, link_text='Browse',
link_attrs={'class': 'btn btn-default btn-sm',
'role': 'button'}):
"""
Utility function for generating a link to a release HTML view
to insert in Django templates.
Args:
sha1_git (str): release identifier
link_text (str): optional text for the generated link
(the release id will be used by default)
link_attrs (dict): optional attributes (e.g. class)
to add to the link
Returns:
An HTML link in the form '<a href="release_view_url">link_text</a>'
"""
query_params = _snapshot_context_query_params(snapshot_context)
release_url = reverse('browse-release',
url_args={'sha1_git': sha1_git},
query_params=query_params)
if not link_text:
link_text = sha1_git
return gen_link(release_url, link_text, link_attrs)
def format_log_entries(revision_log, per_page, snapshot_context=None):
"""
Utility functions that process raw revision log data for HTML display.
Its purpose is to:
* add links to relevant browse views
* format date in human readable format
* truncate the message log
Args:
revision_log (list): raw revision log as returned by the swh-web api
per_page (int): number of log entries per page
snapshot_context (dict): if provided, generate snapshot-dependent
browsing link
"""
revision_log_data = []
for i, rev in enumerate(revision_log):
if i == per_page:
break
author_name = 'None'
author_fullname = 'None'
committer_fullname = 'None'
if rev['author']:
author_name = rev['author']['name'] or rev['author']['fullname']
author_fullname = rev['author']['fullname']
if rev['committer']:
committer_fullname = rev['committer']['fullname']
author_date = format_utc_iso_date(rev['date'])
committer_date = format_utc_iso_date(rev['committer_date'])
tooltip = 'revision %s\n' % rev['id']
tooltip += 'author: %s\n' % author_fullname
tooltip += 'author date: %s\n' % author_date
tooltip += 'committer: %s\n' % committer_fullname
tooltip += 'committer date: %s\n\n' % committer_date
if rev['message']:
tooltip += textwrap.indent(rev['message'], ' '*4)
revision_log_data.append({
'author': author_name,
'id': rev['id'][:7],
'message': rev['message'],
'date': author_date,
'commit_date': committer_date,
'url': gen_revision_url(rev['id'], snapshot_context),
'tooltip': tooltip
})
return revision_log_data
# list of origin types that can be found in the swh archive
# TODO: retrieve it dynamically in an efficient way instead
# of hardcoding it
_swh_origin_types = ['git', 'svn', 'deb', 'hg', 'ftp', 'deposit',
'pypi', 'npm']
def get_origin_info(origin_url, origin_type=None):
"""
Get info about a software origin.
Its main purpose is to automatically find an origin type
when it is not provided as parameter.
Args:
origin_url (str): complete url of a software origin
origin_type (str): optional origin type
Returns:
A dict with the following entries:
* type: the origin type
* url: the origin url
* id: the internal id of the origin
"""
if origin_type:
return service.lookup_origin({'type': origin_type,
'url': origin_url})
else:
for origin_type in _swh_origin_types:
try:
origin_info = service.lookup_origin({'type': origin_type,
'url': origin_url})
return origin_info
except Exception:
pass
raise NotFoundExc('Origin with url %s not found!' % escape(origin_url))
def get_snapshot_context(snapshot_id=None, origin_type=None, origin_url=None,
timestamp=None, visit_id=None):
"""
Utility function to compute relevant information when navigating
the archive in a snapshot context. The snapshot is either
referenced by its id or it will be retrieved from an origin visit.
Args:
snapshot_id (str): hexadecimal representation of a snapshot identifier,
all other parameters will be ignored if it is provided
origin_type (str): the origin type (git, svn, deposit, ...)
origin_url (str): the origin_url
(e.g. https://github.com/(user)/(repo)/)
timestamp (str): a datetime string for retrieving the closest
visit of the origin
visit_id (int): optional visit id for disambiguation in case
of several visits with the same timestamp
Returns:
A dict with the following entries:
* origin_info: dict containing origin information
* visit_info: dict containing visit information
* branches: the list of branches for the origin found
during the visit
* releases: the list of releases for the origin found
during the visit
* origin_browse_url: the url to browse the origin
* origin_branches_url: the url to browse the origin branches
* origin_releases_url': the url to browse the origin releases
* origin_visit_url: the url to browse the snapshot of the origin
found during the visit
* url_args: dict containing url arguments to use when browsing in
the context of the origin and its visit
Raises:
NotFoundExc: if no snapshot is found for the visit of an origin.
"""
origin_info = None
visit_info = None
url_args = None
query_params = {}
branches = []
releases = []
browse_url = None
visit_url = None
branches_url = None
releases_url = None
swh_type = 'snapshot'
if origin_url:
swh_type = 'origin'
origin_info = get_origin_info(origin_url, origin_type)
visit_info = get_origin_visit(origin_info, timestamp, visit_id,
snapshot_id)
fmt_date = format_utc_iso_date(visit_info['date'])
visit_info['fmt_date'] = fmt_date
snapshot_id = visit_info['snapshot']
if not snapshot_id:
raise NotFoundExc('No snapshot associated to the visit of origin '
'%s on %s' % (escape(origin_url), fmt_date))
# provided timestamp is not necessarily equals to the one
# of the retrieved visit, so get the exact one in order
# use it in the urls generated below
if timestamp:
timestamp = visit_info['date']
branches, releases = \
get_origin_visit_snapshot(origin_info, timestamp, visit_id,
snapshot_id)
url_args = {'origin_type': origin_type,
'origin_url': origin_info['url']}
query_params = {'visit_id': visit_id}
browse_url = reverse('browse-origin-visits',
url_args=url_args)
if timestamp:
url_args['timestamp'] = format_utc_iso_date(timestamp,
'%Y-%m-%dT%H:%M:%S')
visit_url = reverse('browse-origin-directory',
url_args=url_args,
query_params=query_params)
visit_info['url'] = visit_url
branches_url = reverse('browse-origin-branches',
url_args=url_args,
query_params=query_params)
releases_url = reverse('browse-origin-releases',
url_args=url_args,
query_params=query_params)
elif snapshot_id:
branches, releases = get_snapshot_content(snapshot_id)
url_args = {'snapshot_id': snapshot_id}
browse_url = reverse('browse-snapshot',
url_args=url_args)
branches_url = reverse('browse-snapshot-branches',
url_args=url_args)
releases_url = reverse('browse-snapshot-releases',
url_args=url_args)
releases = list(reversed(releases))
snapshot_size = service.lookup_snapshot_size(snapshot_id)
is_empty = sum(snapshot_size.values()) == 0
swh_snp_id = persistent_identifier('snapshot', snapshot_id)
return {
'swh_type': swh_type,
'swh_object_id': swh_snp_id,
'snapshot_id': snapshot_id,
'snapshot_size': snapshot_size,
'is_empty': is_empty,
'origin_info': origin_info,
# keep track if the origin type was provided as url argument
'origin_type': origin_type,
'visit_info': visit_info,
'branches': branches,
'releases': releases,
'branch': None,
'release': None,
'browse_url': browse_url,
'branches_url': branches_url,
'releases_url': releases_url,
'url_args': url_args,
'query_params': query_params
}
# list of common readme names ordered by preference
# (lower indices have higher priority)
_common_readme_names = [
"readme.markdown",
"readme.md",
"readme.rst",
"readme.txt",
"readme"
]
def get_readme_to_display(readmes):
"""
Process a list of readme files found in a directory
in order to find the adequate one to display.
Args:
readmes: a list of dict where keys are readme file names and values
are readme sha1s
Returns:
A tuple (readme_name, readme_sha1)
"""
readme_name = None
readme_url = None
readme_sha1 = None
readme_html = None
lc_readmes = {k.lower(): {'orig_name': k, 'sha1': v}
for k, v in readmes.items()}
# look for readme names according to the preference order
# defined by the _common_readme_names list
for common_readme_name in _common_readme_names:
if common_readme_name in lc_readmes:
readme_name = lc_readmes[common_readme_name]['orig_name']
readme_sha1 = lc_readmes[common_readme_name]['sha1']
readme_url = reverse('browse-content-raw',
url_args={'query_string': readme_sha1},
query_params={'re_encode': 'true'})
break
# otherwise pick the first readme like file if any
if not readme_name and len(readmes.items()) > 0:
readme_name = next(iter(readmes))
readme_sha1 = readmes[readme_name]
readme_url = reverse('browse-content-raw',
url_args={'query_string': readme_sha1},
query_params={'re_encode': 'true'})
# convert rst README to html server side as there is
# no viable solution to perform that task client side
if readme_name and readme_name.endswith('.rst'):
cache_entry_id = 'readme_%s' % readme_sha1
cache_entry = cache.get(cache_entry_id)
if cache_entry:
readme_html = cache_entry
else:
try:
rst_doc = request_content(readme_sha1)
readme_html = pypandoc.convert_text(rst_doc['raw_data'],
'html', format='rst')
cache.set(cache_entry_id, readme_html)
except Exception:
readme_html = 'Readme bytes are not available'
return readme_name, readme_url, readme_html
def get_swh_persistent_ids(swh_objects, snapshot_context=None):
"""
Returns a list of dict containing info related to persistent
identifiers of swh objects.
Args:
swh_objects (list): a list of dict with the following keys:
* type: swh object type
(content/directory/release/revision/snapshot)
* id: swh object id
snapshot_context (dict): optional parameter describing the snapshot in
which the object has been found
Returns:
list: a list of dict with the following keys:
* object_type: the swh object type
(content/directory/release/revision/snapshot)
* object_icon: the swh object icon to use in HTML views
* swh_id: the computed swh object persistent identifier
* swh_id_url: the url resolving the persistent identifier
* show_options: boolean indicating if the persistent id options
must be displayed in persistent ids HTML view
"""
swh_ids = []
for swh_object in swh_objects:
if not swh_object['id']:
continue
swh_id = get_swh_persistent_id(swh_object['type'], swh_object['id'])
show_options = swh_object['type'] == 'content' or \
(snapshot_context and snapshot_context['origin_info'] is not None)
object_icon = swh_object_icons[swh_object['type']]
swh_ids.append({
'object_type': swh_object['type'],
'object_icon': object_icon,
'swh_id': swh_id,
'swh_id_url': reverse('browse-swh-id',
url_args={'swh_id': swh_id}),
'show_options': show_options
})
return swh_ids
diff --git a/swh/web/browse/views/person.py b/swh/web/browse/views/person.py
deleted file mode 100644
index 1b8a7da9..00000000
--- a/swh/web/browse/views/person.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright (C) 2017-2018 The Software Heritage developers
-# See the AUTHORS file at the top-level directory of this distribution
-# License: GNU Affero General Public License version 3, or any later version
-# See top-level LICENSE file for more information
-
-
-from django.shortcuts import render
-from swh.web.common import service
-from swh.web.common.exc import handle_view_exception
-from swh.web.browse.browseurls import browse_route
-from swh.web.browse.utils import get_snapshot_context
-
-
-@browse_route(r'person/(?P<person_id>[0-9]+)/',
- view_name='browse-person')
-def person_browse(request, person_id):
- """
- Django view that produces an HTML display of a swh person
- identified by its id.
-
- The url that points to it is :http:get:`/browse/person/(person_id)/`.
- """
- try:
- snapshot_context = None
- origin_type = request.GET.get('origin_type', None)
- origin_url = request.GET.get('origin_url', None)
- if not origin_url:
- origin_url = request.GET.get('origin', None)
- snapshot_id = request.GET.get('snapshot_id', None)
- if origin_url:
- snapshot_context = get_snapshot_context(None, origin_type,
- origin_url)
- elif snapshot_id:
- snapshot_context = get_snapshot_context(snapshot_id)
- person = service.lookup_person(person_id)
- except Exception as exc:
- return handle_view_exception(request, exc)
-
- heading = 'Person - %s' % person['fullname']
- if snapshot_context:
- context_found = 'snapshot: %s' % snapshot_context['snapshot_id']
- if origin_url:
- context_found = 'origin: %s' % origin_url
- heading += ' - %s' % context_found
-
- return render(request, 'browse/person.html',
- {'heading': heading,
- 'swh_object_name': 'Person',
- 'swh_object_metadata': person,
- 'snapshot_context': snapshot_context,
- 'vault_cooking': None,
- 'show_actions_menu': False})
diff --git a/swh/web/browse/views/release.py b/swh/web/browse/views/release.py
index 7e8295f8..9dba53e1 100644
--- a/swh/web/browse/views/release.py
+++ b/swh/web/browse/views/release.py
@@ -1,194 +1,192 @@
# Copyright (C) 2017-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
from django.shortcuts import render
from swh.web.common import service
from swh.web.common.utils import (
reverse, format_utc_iso_date
)
from swh.web.common.exc import NotFoundExc, handle_view_exception
from swh.web.browse.browseurls import browse_route
from swh.web.browse.utils import (
- gen_person_link, gen_revision_link, get_snapshot_context, gen_link,
+ gen_revision_link, get_snapshot_context, gen_link,
gen_snapshot_link, get_swh_persistent_ids, gen_directory_link,
gen_content_link, gen_release_link
)
@browse_route(r'release/(?P<sha1_git>[0-9a-f]+)/',
view_name='browse-release',
checksum_args=['sha1_git'])
def release_browse(request, sha1_git):
"""
Django view that produces an HTML display of a release
identified by its id.
The url that points to it is :http:get:`/browse/release/(sha1_git)/`.
"""
try:
release = service.lookup_release(sha1_git)
snapshot_context = None
origin_info = None
snapshot_id = request.GET.get('snapshot_id', None)
origin_type = request.GET.get('origin_type', None)
origin_url = request.GET.get('origin_url', None)
if not origin_url:
origin_url = request.GET.get('origin', None)
timestamp = request.GET.get('timestamp', None)
visit_id = request.GET.get('visit_id', None)
if origin_url:
try:
snapshot_context = \
get_snapshot_context(snapshot_id, origin_type,
origin_url, timestamp,
visit_id)
except Exception:
raw_rel_url = reverse('browse-release',
url_args={'sha1_git': sha1_git})
error_message = \
('The Software Heritage archive has a release '
'with the hash you provided but the origin '
'mentioned in your request appears broken: %s. '
'Please check the URL and try again.\n\n'
'Nevertheless, you can still browse the release '
'without origin information: %s'
% (gen_link(origin_url), gen_link(raw_rel_url)))
raise NotFoundExc(error_message)
origin_info = snapshot_context['origin_info']
elif snapshot_id:
snapshot_context = get_snapshot_context(snapshot_id)
except Exception as exc:
return handle_view_exception(request, exc)
release_data = {}
author_name = 'None'
release_data['author'] = 'None'
if release['author']:
author_name = release['author']['name'] or \
release['author']['fullname']
- release_data['author'] = \
- gen_person_link(release['author']['id'], author_name,
- snapshot_context)
+ release_data['author'] = author_name,
release_data['date'] = format_utc_iso_date(release['date'])
release_data['release'] = sha1_git
release_data['name'] = release['name']
release_data['synthetic'] = release['synthetic']
release_data['target'] = release['target']
release_data['target type'] = release['target_type']
if snapshot_context:
if release['target_type'] == 'revision':
release_data['context-independent target'] = \
gen_revision_link(release['target'])
elif release['target_type'] == 'content':
release_data['context-independent target'] = \
gen_content_link(release['target'])
elif release['target_type'] == 'directory':
release_data['context-independent target'] = \
gen_directory_link(release['target'])
elif release['target_type'] == 'release':
release_data['context-independent target'] = \
gen_release_link(release['target'])
release_note_lines = []
if release['message']:
release_note_lines = release['message'].split('\n')
vault_cooking = None
target_link = None
if release['target_type'] == 'revision':
target_link = gen_revision_link(release['target'],
snapshot_context=snapshot_context,
link_text=None, link_attrs=None)
try:
revision = service.lookup_revision(release['target'])
vault_cooking = {
'directory_context': True,
'directory_id': revision['directory'],
'revision_context': True,
'revision_id': release['target']
}
except Exception:
pass
elif release['target_type'] == 'directory':
target_link = gen_directory_link(release['target'],
snapshot_context=snapshot_context,
link_text=None, link_attrs=None)
try:
revision = service.lookup_directory(release['target'])
vault_cooking = {
'directory_context': True,
'directory_id': revision['directory'],
'revision_context': False,
'revision_id': None
}
except Exception:
pass
elif release['target_type'] == 'content':
target_link = gen_content_link(release['target'],
snapshot_context=snapshot_context,
link_text=None, link_attrs=None)
elif release['target_type'] == 'release':
target_link = gen_release_link(release['target'],
snapshot_context=snapshot_context,
link_text=None, link_attrs=None)
release['target_link'] = target_link
if snapshot_context:
release_data['snapshot'] = snapshot_context['snapshot_id']
if origin_info:
release_data['context-independent release'] = \
gen_release_link(release['id'])
release_data['origin type'] = origin_info['type']
release_data['origin url'] = gen_link(origin_info['url'],
origin_info['url'])
browse_snapshot_link = \
gen_snapshot_link(snapshot_context['snapshot_id'])
release_data['context-independent snapshot'] = browse_snapshot_link
swh_objects = [{'type': 'release',
'id': sha1_git}]
if snapshot_context:
snapshot_id = snapshot_context['snapshot_id']
if snapshot_id:
swh_objects.append({'type': 'snapshot',
'id': snapshot_id})
swh_ids = get_swh_persistent_ids(swh_objects, snapshot_context)
note_header = 'None'
if len(release_note_lines) > 0:
note_header = release_note_lines[0]
release['note_header'] = note_header
release['note_body'] = '\n'.join(release_note_lines[1:])
heading = 'Release - %s' % release['name']
if snapshot_context:
context_found = 'snapshot: %s' % snapshot_context['snapshot_id']
if origin_info:
context_found = 'origin: %s' % origin_info['url']
heading += ' - %s' % context_found
return render(request, 'browse/release.html',
{'heading': heading,
'swh_object_id': swh_ids[0]['swh_id'],
'swh_object_name': 'Release',
'swh_object_metadata': release_data,
'release': release,
'snapshot_context': snapshot_context,
'show_actions_menu': True,
'breadcrumbs': None,
'vault_cooking': vault_cooking,
'top_right_link': None,
'swh_ids': swh_ids})
diff --git a/swh/web/browse/views/revision.py b/swh/web/browse/views/revision.py
index d9c275f5..a6125b81 100644
--- a/swh/web/browse/views/revision.py
+++ b/swh/web/browse/views/revision.py
@@ -1,536 +1,534 @@
# Copyright (C) 2017-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import hashlib
import json
import textwrap
from django.http import HttpResponse
from django.shortcuts import render
from django.template.defaultfilters import filesizeformat
from django.utils.html import escape
from django.utils.safestring import mark_safe
from swh.model.identifiers import persistent_identifier
from swh.web.common import service
from swh.web.common.utils import (
reverse, format_utc_iso_date, gen_path_info, swh_object_icons
)
from swh.web.common.exc import NotFoundExc, handle_view_exception
from swh.web.browse.browseurls import browse_route
from swh.web.browse.utils import (
- gen_link, gen_person_link, gen_revision_link, gen_revision_url,
+ gen_link, gen_revision_link, gen_revision_url,
get_snapshot_context, get_revision_log_url, get_directory_entries,
gen_directory_link, request_content, prepare_content_for_display,
content_display_max_size, gen_snapshot_link, get_readme_to_display,
get_swh_persistent_ids, format_log_entries
)
def _gen_content_url(revision, query_string, path, snapshot_context):
if snapshot_context:
url_args = snapshot_context['url_args']
url_args['path'] = path
query_params = snapshot_context['query_params']
query_params['revision'] = revision['id']
content_url = reverse('browse-origin-content',
url_args=url_args,
query_params=query_params)
else:
content_path = '%s/%s' % (revision['directory'], path)
content_url = reverse('browse-content',
url_args={'query_string': query_string},
query_params={'path': content_path})
return content_url
def _gen_diff_link(idx, diff_anchor, link_text):
if idx < _max_displayed_file_diffs:
return gen_link(diff_anchor, link_text)
else:
return link_text
# TODO: put in conf
_max_displayed_file_diffs = 1000
def _gen_revision_changes_list(revision, changes, snapshot_context):
"""
Returns a HTML string describing the file changes
introduced in a revision.
As this string will be displayed in the browse revision view,
links to adequate file diffs are also generated.
Args:
revision (str): hexadecimal representation of a revision identifier
changes (list): list of file changes in the revision
snapshot_context (dict): optional origin context used to reverse
the content urls
Returns:
A string to insert in a revision HTML view.
"""
changes_msg = []
for i, change in enumerate(changes):
hasher = hashlib.sha1()
from_query_string = ''
to_query_string = ''
diff_id = 'diff-'
if change['from']:
from_query_string = 'sha1_git:' + change['from']['target']
diff_id += change['from']['target'] + '-' + change['from_path']
diff_id += '-'
if change['to']:
to_query_string = 'sha1_git:' + change['to']['target']
diff_id += change['to']['target'] + change['to_path']
change['path'] = change['to_path'] or change['from_path']
url_args = {'from_query_string': from_query_string,
'to_query_string': to_query_string}
query_params = {'path': change['path']}
change['diff_url'] = reverse('diff-contents',
url_args=url_args,
query_params=query_params)
hasher.update(diff_id.encode('utf-8'))
diff_id = hasher.hexdigest()
change['id'] = diff_id
panel_diff_link = '#panel_' + diff_id
if change['type'] == 'modify':
change['content_url'] = \
_gen_content_url(revision, to_query_string,
change['to_path'], snapshot_context)
changes_msg.append('modified: %s' %
_gen_diff_link(i, panel_diff_link,
change['to_path']))
elif change['type'] == 'insert':
change['content_url'] = \
_gen_content_url(revision, to_query_string,
change['to_path'], snapshot_context)
changes_msg.append('new file: %s' %
_gen_diff_link(i, panel_diff_link,
change['to_path']))
elif change['type'] == 'delete':
parent = service.lookup_revision(revision['parents'][0])
change['content_url'] = \
_gen_content_url(parent,
from_query_string,
change['from_path'], snapshot_context)
changes_msg.append('deleted: %s' %
_gen_diff_link(i, panel_diff_link,
change['from_path']))
elif change['type'] == 'rename':
change['content_url'] = \
_gen_content_url(revision, to_query_string,
change['to_path'], snapshot_context)
link_text = change['from_path'] + ' &rarr; ' + change['to_path']
changes_msg.append('renamed: %s' %
_gen_diff_link(i, panel_diff_link, link_text))
if not changes:
changes_msg.append('No changes')
return mark_safe('\n'.join(changes_msg))
@browse_route(r'revision/(?P<sha1_git>[0-9a-f]+)/diff/',
view_name='diff-revision',
checksum_args=['sha1_git'])
def _revision_diff(request, sha1_git):
"""
Browse internal endpoint to compute revision diff
"""
try:
revision = service.lookup_revision(sha1_git)
snapshot_context = None
origin_type = request.GET.get('origin_type', None)
origin_url = request.GET.get('origin_url', None)
if not origin_url:
origin_url = request.GET.get('origin', None)
timestamp = request.GET.get('timestamp', None)
visit_id = request.GET.get('visit_id', None)
if origin_url:
snapshot_context = get_snapshot_context(None, origin_type,
origin_url,
timestamp, visit_id)
except Exception as exc:
return handle_view_exception(request, exc)
changes = service.diff_revision(sha1_git)
changes_msg = _gen_revision_changes_list(revision, changes,
snapshot_context)
diff_data = {
'total_nb_changes': len(changes),
'changes': changes[:_max_displayed_file_diffs],
'changes_msg': changes_msg
}
diff_data_json = json.dumps(diff_data, separators=(',', ': '))
return HttpResponse(diff_data_json, content_type='application/json')
NB_LOG_ENTRIES = 100
@browse_route(r'revision/(?P<sha1_git>[0-9a-f]+)/log/',
view_name='browse-revision-log',
checksum_args=['sha1_git'])
def revision_log_browse(request, sha1_git):
"""
Django view that produces an HTML display of the history
log for a revision identified by its id.
The url that points to it is :http:get:`/browse/revision/(sha1_git)/log/`
"""
try:
per_page = int(request.GET.get('per_page', NB_LOG_ENTRIES))
offset = int(request.GET.get('offset', 0))
revs_ordering = request.GET.get('revs_ordering', 'committer_date')
session_key = 'rev_%s_log_ordering_%s' % (sha1_git, revs_ordering)
rev_log_session = request.session.get(session_key, None)
rev_log = []
revs_walker_state = None
if rev_log_session:
rev_log = rev_log_session['rev_log']
revs_walker_state = rev_log_session['revs_walker_state']
if len(rev_log) < offset+per_page:
revs_walker = \
service.get_revisions_walker(revs_ordering, sha1_git,
max_revs=offset+per_page+1,
state=revs_walker_state)
rev_log += [rev['id'] for rev in revs_walker]
revs_walker_state = revs_walker.export_state()
revs = rev_log[offset:offset+per_page]
revision_log = service.lookup_revision_multiple(revs)
request.session[session_key] = {
'rev_log': rev_log,
'revs_walker_state': revs_walker_state
}
except Exception as exc:
return handle_view_exception(request, exc)
revs_ordering = request.GET.get('revs_ordering', '')
prev_log_url = None
if len(rev_log) > offset + per_page:
prev_log_url = reverse('browse-revision-log',
url_args={'sha1_git': sha1_git},
query_params={'per_page': per_page,
'offset': offset + per_page,
'revs_ordering': revs_ordering})
next_log_url = None
if offset != 0:
next_log_url = reverse('browse-revision-log',
url_args={'sha1_git': sha1_git},
query_params={'per_page': per_page,
'offset': offset - per_page,
'revs_ordering': revs_ordering})
revision_log_data = format_log_entries(revision_log, per_page)
swh_rev_id = persistent_identifier('revision', sha1_git)
return render(request, 'browse/revision-log.html',
{'heading': 'Revision history',
'swh_object_id': swh_rev_id,
'swh_object_name': 'Revisions history',
'swh_object_metadata': None,
'revision_log': revision_log_data,
'revs_ordering': revs_ordering,
'next_log_url': next_log_url,
'prev_log_url': prev_log_url,
'breadcrumbs': None,
'top_right_link': None,
'snapshot_context': None,
'vault_cooking': None,
'show_actions_menu': True,
'swh_ids': None})
@browse_route(r'revision/(?P<sha1_git>[0-9a-f]+)/',
r'revision/(?P<sha1_git>[0-9a-f]+)/(?P<extra_path>.+)/',
view_name='browse-revision',
checksum_args=['sha1_git'])
def revision_browse(request, sha1_git, extra_path=None):
"""
Django view that produces an HTML display of a revision
identified by its id.
The url that points to it is :http:get:`/browse/revision/(sha1_git)/`.
"""
try:
revision = service.lookup_revision(sha1_git)
origin_info = None
snapshot_context = None
origin_type = request.GET.get('origin_type', None)
origin_url = request.GET.get('origin_url', None)
if not origin_url:
origin_url = request.GET.get('origin', None)
timestamp = request.GET.get('timestamp', None)
visit_id = request.GET.get('visit_id', None)
snapshot_id = request.GET.get('snapshot_id', None)
path = request.GET.get('path', None)
dir_id = None
dirs, files = None, None
content_data = None
if origin_url:
try:
snapshot_context = get_snapshot_context(None, origin_type,
origin_url,
timestamp, visit_id)
except Exception:
raw_rev_url = reverse('browse-revision',
url_args={'sha1_git': sha1_git})
error_message = \
('The Software Heritage archive has a revision '
'with the hash you provided but the origin '
'mentioned in your request appears broken: %s. '
'Please check the URL and try again.\n\n'
'Nevertheless, you can still browse the revision '
'without origin information: %s'
% (gen_link(origin_url), gen_link(raw_rev_url)))
raise NotFoundExc(error_message)
origin_info = snapshot_context['origin_info']
snapshot_id = snapshot_context['snapshot_id']
elif snapshot_id:
snapshot_context = get_snapshot_context(snapshot_id)
if path:
file_info = \
service.lookup_directory_with_path(revision['directory'], path)
if file_info['type'] == 'dir':
dir_id = file_info['target']
else:
query_string = 'sha1_git:' + file_info['target']
content_data = request_content(query_string,
raise_if_unavailable=False)
else:
dir_id = revision['directory']
if dir_id:
path = '' if path is None else (path + '/')
dirs, files = get_directory_entries(dir_id)
except Exception as exc:
return handle_view_exception(request, exc)
revision_data = {}
author_name = 'None'
revision_data['author'] = 'None'
if revision['author']:
author_name = revision['author']['name'] or \
revision['author']['fullname']
- revision_data['author'] = \
- gen_person_link(revision['author']['id'], author_name,
- snapshot_context)
+ revision_data['author'] = author_name,
revision_data['committer'] = 'None'
if revision['committer']:
- revision_data['committer'] = \
- gen_person_link(revision['committer']['id'],
- revision['committer']['name'], snapshot_context)
+ committer_name = revision['committer']['name'] or \
+ revision['committer']['fullname']
+ revision_data['committer'] = committer_name
revision_data['committer date'] = \
format_utc_iso_date(revision['committer_date'])
revision_data['date'] = format_utc_iso_date(revision['date'])
revision_data['directory'] = revision['directory']
if snapshot_context:
revision_data['snapshot'] = snapshot_id
browse_snapshot_link = \
gen_snapshot_link(snapshot_id)
revision_data['context-independent snapshot'] = browse_snapshot_link
revision_data['context-independent directory'] = \
gen_directory_link(revision['directory'])
revision_data['revision'] = sha1_git
revision_data['merge'] = revision['merge']
revision_data['metadata'] = escape(json.dumps(revision['metadata'],
sort_keys=True,
indent=4, separators=(',', ': ')))
if origin_info:
revision_data['origin type'] = origin_info['type']
revision_data['origin url'] = gen_link(origin_info['url'],
origin_info['url'])
revision_data['context-independent revision'] = \
gen_revision_link(sha1_git)
parents = ''
for p in revision['parents']:
parent_link = gen_revision_link(p, link_text=None, link_attrs=None,
snapshot_context=snapshot_context)
parents += parent_link + '<br/>'
revision_data['parents'] = mark_safe(parents)
revision_data['synthetic'] = revision['synthetic']
revision_data['type'] = revision['type']
message_lines = ['None']
if revision['message']:
message_lines = revision['message'].split('\n')
parents = []
for p in revision['parents']:
parent_url = gen_revision_url(p, snapshot_context)
parents.append({'id': p, 'url': parent_url})
path_info = gen_path_info(path)
query_params = {'snapshot_id': snapshot_id,
'origin_type': origin_type,
'origin': origin_url,
'timestamp': timestamp,
'visit_id': visit_id}
breadcrumbs = []
breadcrumbs.append({'name': revision['directory'][:7],
'url': reverse('browse-revision',
url_args={'sha1_git': sha1_git},
query_params=query_params)})
for pi in path_info:
query_params['path'] = pi['path']
breadcrumbs.append({'name': pi['name'],
'url': reverse('browse-revision',
url_args={'sha1_git': sha1_git},
query_params=query_params)})
vault_cooking = {
'directory_context': False,
'directory_id': None,
'revision_context': True,
'revision_id': sha1_git
}
swh_objects = [{'type': 'revision',
'id': sha1_git}]
content = None
content_size = None
mimetype = None
language = None
readme_name = None
readme_url = None
readme_html = None
readmes = {}
error_code = 200
error_message = ''
error_description = ''
if content_data:
breadcrumbs[-1]['url'] = None
content_size = content_data['length']
mimetype = content_data['mimetype']
if content_data['raw_data']:
content_display_data = prepare_content_for_display(
content_data['raw_data'], content_data['mimetype'], path)
content = content_display_data['content_data']
language = content_display_data['language']
mimetype = content_display_data['mimetype']
query_params = {}
if path:
filename = path_info[-1]['name']
query_params['filename'] = path_info[-1]['name']
revision_data['filename'] = filename
top_right_link = {
'url': reverse('browse-content-raw',
url_args={'query_string': query_string},
query_params=query_params),
'icon': swh_object_icons['content'],
'text': 'Raw File'
}
swh_objects.append({'type': 'content',
'id': file_info['target']})
error_code = content_data['error_code']
error_message = content_data['error_message']
error_description = content_data['error_description']
else:
for d in dirs:
if d['type'] == 'rev':
d['url'] = reverse('browse-revision',
url_args={'sha1_git': d['target']})
else:
query_params['path'] = path + d['name']
d['url'] = reverse('browse-revision',
url_args={'sha1_git': sha1_git},
query_params=query_params)
for f in files:
query_params['path'] = path + f['name']
f['url'] = reverse('browse-revision',
url_args={'sha1_git': sha1_git},
query_params=query_params)
if f['length'] is not None:
f['length'] = filesizeformat(f['length'])
if f['name'].lower().startswith('readme'):
readmes[f['name']] = f['checksums']['sha1']
readme_name, readme_url, readme_html = get_readme_to_display(readmes)
top_right_link = {
'url': get_revision_log_url(sha1_git, snapshot_context),
'icon': swh_object_icons['revisions history'],
'text': 'History'
}
vault_cooking['directory_context'] = True
vault_cooking['directory_id'] = dir_id
swh_objects.append({'type': 'directory',
'id': dir_id})
diff_revision_url = reverse('diff-revision',
url_args={'sha1_git': sha1_git},
query_params={'origin_type': origin_type,
'origin': origin_url,
'timestamp': timestamp,
'visit_id': visit_id})
if snapshot_id:
swh_objects.append({'type': 'snapshot',
'id': snapshot_id})
swh_ids = get_swh_persistent_ids(swh_objects, snapshot_context)
heading = 'Revision - %s - %s' %\
(sha1_git[:7], textwrap.shorten(message_lines[0], width=70))
if snapshot_context:
context_found = 'snapshot: %s' % snapshot_context['snapshot_id']
if origin_info:
context_found = 'origin: %s' % origin_info['url']
heading += ' - %s' % context_found
return render(request, 'browse/revision.html',
{'heading': heading,
'swh_object_id': swh_ids[0]['swh_id'],
'swh_object_name': 'Revision',
'swh_object_metadata': revision_data,
'message_header': message_lines[0],
'message_body': '\n'.join(message_lines[1:]),
'parents': parents,
'snapshot_context': snapshot_context,
'dirs': dirs,
'files': files,
'content': content,
'content_size': content_size,
'max_content_size': content_display_max_size,
'mimetype': mimetype,
'language': language,
'readme_name': readme_name,
'readme_url': readme_url,
'readme_html': readme_html,
'breadcrumbs': breadcrumbs,
'top_right_link': top_right_link,
'vault_cooking': vault_cooking,
'diff_revision_url': diff_revision_url,
'show_actions_menu': True,
'swh_ids': swh_ids,
'error_code': error_code,
'error_message': error_message,
'error_description': error_description},
status=error_code)
diff --git a/swh/web/common/converters.py b/swh/web/common/converters.py
index c09663ed..71072176 100644
--- a/swh/web/common/converters.py
+++ b/swh/web/common/converters.py
@@ -1,374 +1,375 @@
# Copyright (C) 2015-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import datetime
import json
from swh.model import hashutil
from swh.core.utils import decode_with_escape
def _group_checksums(data):
"""Groups checksums values computed from hash functions used in swh
and stored in data dict under a single entry 'checksums'
"""
if data:
checksums = {}
for hash in hashutil.ALGORITHMS:
if hash in data and data[hash]:
checksums[hash] = data[hash]
del data[hash]
if len(checksums) > 0:
data['checksums'] = checksums
def fmap(f, data):
"""Map f to data at each level.
This must keep the origin data structure type:
- map -> map
- dict -> dict
- list -> list
- None -> None
Args:
f: function that expects one argument.
data: data to traverse to apply the f function.
list, map, dict or bare value.
Returns:
The same data-structure with modified values by the f function.
"""
if data is None:
return data
if isinstance(data, map):
return map(lambda y: fmap(f, y), (x for x in data))
if isinstance(data, list):
return [fmap(f, x) for x in data]
if isinstance(data, dict):
return {k: fmap(f, v) for (k, v) in data.items()}
return f(data)
def from_swh(dict_swh, hashess={}, bytess={}, dates={}, blacklist={},
removables_if_empty={}, empty_dict={}, empty_list={},
convert={}, convert_fn=lambda x: x):
"""Convert from a swh dictionary to something reasonably json
serializable.
Args:
dict_swh: the origin dictionary needed to be transformed
hashess: list/set of keys representing hashes values (sha1, sha256,
sha1_git, etc...) as bytes. Those need to be transformed in
hexadecimal string
bytess: list/set of keys representing bytes values which needs to be
decoded
blacklist: set of keys to filter out from the conversion
convert: set of keys whose associated values need to be converted using
convert_fn
convert_fn: the conversion function to apply on the value of key in
'convert'
The remaining keys are copied as is in the output.
Returns:
dictionary equivalent as dict_swh only with its keys converted.
"""
def convert_hashes_bytes(v):
"""v is supposedly a hash as bytes, returns it converted in hex.
"""
if isinstance(v, bytes):
return hashutil.hash_to_hex(v)
return v
def convert_bytes(v):
"""v is supposedly a bytes string, decode as utf-8.
FIXME: Improve decoding policy.
If not utf-8, break!
"""
if isinstance(v, bytes):
return v.decode('utf-8')
return v
def convert_date(v):
"""
Args:
v (dict or datatime): either:
- a dict with three keys:
- timestamp (dict or integer timestamp)
- offset
- negative_utc
- or, a datetime
We convert it to a human-readable string
"""
if not v:
return v
if isinstance(v, datetime.datetime):
return v.isoformat()
tz = datetime.timezone(datetime.timedelta(minutes=v['offset']))
swh_timestamp = v['timestamp']
if isinstance(swh_timestamp, dict):
date = datetime.datetime.fromtimestamp(
swh_timestamp['seconds'], tz=tz)
else:
date = datetime.datetime.fromtimestamp(
swh_timestamp, tz=tz)
datestr = date.isoformat()
if v['offset'] == 0 and v['negative_utc']:
# remove the rightmost + and replace it with a -
return '-'.join(datestr.rsplit('+', 1))
return datestr
if not dict_swh:
return dict_swh
new_dict = {}
for key, value in dict_swh.items():
if key in blacklist or (key in removables_if_empty and not value):
continue
if key in dates:
new_dict[key] = convert_date(value)
elif key in convert:
new_dict[key] = convert_fn(value)
elif isinstance(value, dict):
new_dict[key] = from_swh(value,
hashess=hashess, bytess=bytess,
dates=dates, blacklist=blacklist,
removables_if_empty=removables_if_empty,
empty_dict=empty_dict,
empty_list=empty_list,
convert=convert,
convert_fn=convert_fn)
elif key in hashess:
new_dict[key] = fmap(convert_hashes_bytes, value)
elif key in bytess:
try:
new_dict[key] = fmap(convert_bytes, value)
except UnicodeDecodeError:
if 'decoding_failures' not in new_dict:
new_dict['decoding_failures'] = [key]
else:
new_dict['decoding_failures'].append(key)
new_dict[key] = fmap(decode_with_escape, value)
elif key in empty_dict and not value:
new_dict[key] = {}
elif key in empty_list and not value:
new_dict[key] = []
else:
new_dict[key] = value
_group_checksums(new_dict)
return new_dict
def from_origin(origin):
"""Convert from a swh origin to an origin dictionary.
"""
return from_swh(origin)
def from_release(release):
"""Convert from a swh release to a json serializable release dictionary.
Args:
release (dict): dictionary with keys:
- id: identifier of the revision (sha1 in bytes)
- revision: identifier of the revision the release points to (sha1
in bytes)
comment: release's comment message (bytes)
name: release's name (string)
author: release's author identifier (swh's id)
synthetic: the synthetic property (boolean)
Returns:
dict: Release dictionary with the following keys:
- id: hexadecimal sha1 (string)
- revision: hexadecimal sha1 (string)
- comment: release's comment message (string)
- name: release's name (string)
- author: release's author identifier (swh's id)
- synthetic: the synthetic property (boolean)
"""
return from_swh(
release,
hashess={'id', 'target'},
bytess={'message', 'name', 'fullname', 'email'},
dates={'date'},
)
class SWHMetadataEncoder(json.JSONEncoder):
"""Special json encoder for metadata field which can contain bytes
encoded value.
"""
def default(self, obj):
if isinstance(obj, bytes):
try:
return obj.decode('utf-8')
except UnicodeDecodeError:
# fallback to binary representation to avoid display errors
return repr(obj)
# Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj)
def convert_revision_metadata(metadata):
"""Convert json specific dict to a json serializable one.
"""
if not metadata:
return {}
return json.loads(json.dumps(metadata, cls=SWHMetadataEncoder))
def from_revision(revision):
"""Convert from a swh revision to a json serializable revision dictionary.
Args:
revision (dict): dict with keys:
- id: identifier of the revision (sha1 in bytes)
- directory: identifier of the directory the revision points to
(sha1 in bytes)
- author_name, author_email: author's revision name and email
- committer_name, committer_email: committer's revision name and
email
- message: revision's message
- date, date_offset: revision's author date
- committer_date, committer_date_offset: revision's commit date
- parents: list of parents for such revision
- synthetic: revision's property nature
- type: revision's type (git, tar or dsc at the moment)
- metadata: if the revision is synthetic, this can reference
dynamic properties.
Returns:
dict: Revision dictionary with the same keys as inputs, except:
- sha1s are in hexadecimal strings (id, directory)
- bytes are decoded in string (author_name, committer_name,
author_email, committer_email)
Remaining keys are left as is
"""
revision = from_swh(revision,
hashess={'id', 'directory', 'parents', 'children'},
bytess={'name', 'fullname', 'email'},
convert={'metadata'},
convert_fn=convert_revision_metadata,
dates={'date', 'committer_date'})
if revision:
if 'parents' in revision:
revision['merge'] = len(revision['parents']) > 1
if 'message' in revision:
try:
revision['message'] = revision['message'].decode('utf-8')
except UnicodeDecodeError:
revision['message_decoding_failed'] = True
revision['message'] = None
return revision
def from_content(content):
"""Convert swh content to serializable content dictionary.
"""
return from_swh(content,
hashess={'sha1', 'sha1_git', 'sha256', 'blake2s256'},
blacklist={'ctime'},
convert={'status'},
convert_fn=lambda v: 'absent' if v == 'hidden' else v)
def from_person(person):
"""Convert swh person to serializable person dictionary.
"""
return from_swh(person,
bytess={'name', 'fullname', 'email'})
def from_origin_visit(visit):
"""Convert swh origin_visit to serializable origin_visit dictionary.
"""
ov = from_swh(visit,
hashess={'target', 'snapshot'},
bytess={'branch'},
dates={'date'},
empty_dict={'metadata'})
return ov
def from_snapshot(snapshot):
"""Convert swh snapshot to serializable snapshot dictionary.
"""
sv = from_swh(snapshot,
hashess={'id', 'target'},
bytess={'next_branch'})
if sv and 'branches' in sv:
sv['branches'] = {
decode_with_escape(k): v
for k, v in sv['branches'].items()
}
for k, v in snapshot['branches'].items():
# alias target existing branch names, not a sha1
if v and v['target_type'] == 'alias':
branch = decode_with_escape(k)
target = decode_with_escape(v['target'])
sv['branches'][branch]['target'] = target
return sv
def from_directory_entry(dir_entry):
- """Convert swh person to serializable person dictionary.
+ """Convert swh directory to serializable directory dictionary.
"""
return from_swh(dir_entry,
hashess={'dir_id', 'sha1_git', 'sha1', 'sha256',
'blake2s256', 'target'},
bytess={'name'},
removables_if_empty={
'sha1', 'sha1_git', 'sha256', 'blake2s256', 'status'},
convert={'status'},
convert_fn=lambda v: 'absent' if v == 'hidden' else v)
def from_filetype(content_entry):
- """Convert swh person to serializable person dictionary.
+ """Convert swh content to serializable dictionary containing keys
+ 'id', 'encoding', and 'mimetype'.
"""
return from_swh(content_entry,
hashess={'id'})
diff --git a/swh/web/common/service.py b/swh/web/common/service.py
index 00f7a79d..08f8369c 100644
--- a/swh/web/common/service.py
+++ b/swh/web/common/service.py
@@ -1,1128 +1,1109 @@
# Copyright (C) 2015-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import os
from collections import defaultdict
from swh.model import hashutil
from swh.storage.algos import diff, revisions_walker
from swh.web.common import converters
from swh.web.common import query
from swh.web.common.exc import NotFoundExc
from swh.web.common.origin_visits import get_origin_visit
from swh.web import config
storage = config.storage()
vault = config.vault()
idx_storage = config.indexer_storage()
MAX_LIMIT = 50 # Top limit the users can ask for
def _first_element(l):
"""Returns the first element in the provided list or None
if it is empty or None"""
return next(iter(l or []), None)
def lookup_multiple_hashes(hashes):
"""Lookup the passed hashes in a single DB connection, using batch
processing.
Args:
An array of {filename: X, sha1: Y}, string X, hex sha1 string Y.
Returns:
The same array with elements updated with elem['found'] = true if
the hash is present in storage, elem['found'] = false if not.
"""
hashlist = [hashutil.hash_to_bytes(elem['sha1']) for elem in hashes]
content_missing = storage.content_missing_per_sha1(hashlist)
missing = [hashutil.hash_to_hex(x) for x in content_missing]
for x in hashes:
x.update({'found': True})
for h in hashes:
if h['sha1'] in missing:
h['found'] = False
return hashes
def lookup_expression(expression, last_sha1, per_page):
"""Lookup expression in raw content.
Args:
expression (str): An expression to lookup through raw indexed
content
last_sha1 (str): Last sha1 seen
per_page (int): Number of results per page
Yields:
ctags whose content match the expression
"""
limit = min(per_page, MAX_LIMIT)
ctags = idx_storage.content_ctags_search(expression,
last_sha1=last_sha1,
limit=limit)
for ctag in ctags:
ctag = converters.from_swh(ctag, hashess={'id'})
ctag['sha1'] = ctag['id']
ctag.pop('id')
yield ctag
def lookup_hash(q):
"""Checks if the storage contains a given content checksum
Args: query string of the form <hash_algo:hash>
Returns: Dict with key found containing the hash info if the
hash is present, None if not.
"""
algo, hash = query.parse_hash(q)
found = _first_element(storage.content_find({algo: hash}))
return {'found': converters.from_content(found),
'algo': algo}
def search_hash(q):
"""Checks if the storage contains a given content checksum
Args: query string of the form <hash_algo:hash>
Returns: Dict with key found to True or False, according to
whether the checksum is present or not
"""
algo, hash = query.parse_hash(q)
found = _first_element(storage.content_find({algo: hash}))
return {'found': found is not None}
def _lookup_content_sha1(q):
"""Given a possible input, query for the content's sha1.
Args:
q: query string of the form <hash_algo:hash>
Returns:
binary sha1 if found or None
"""
algo, hash = query.parse_hash(q)
if algo != 'sha1':
hashes = _first_element(storage.content_find({algo: hash}))
if not hashes:
return None
return hashes['sha1']
return hash
def lookup_content_ctags(q):
"""Return ctags information from a specified content.
Args:
q: query string of the form <hash_algo:hash>
Yields:
ctags information (dict) list if the content is found.
"""
sha1 = _lookup_content_sha1(q)
if not sha1:
return None
ctags = list(idx_storage.content_ctags_get([sha1]))
if not ctags:
return None
for ctag in ctags:
yield converters.from_swh(ctag, hashess={'id'})
def lookup_content_filetype(q):
"""Return filetype information from a specified content.
Args:
q: query string of the form <hash_algo:hash>
Yields:
filetype information (dict) list if the content is found.
"""
sha1 = _lookup_content_sha1(q)
if not sha1:
return None
filetype = _first_element(list(idx_storage.content_mimetype_get([sha1])))
if not filetype:
return None
return converters.from_filetype(filetype)
def lookup_content_language(q):
"""Return language information from a specified content.
Args:
q: query string of the form <hash_algo:hash>
Yields:
language information (dict) list if the content is found.
"""
sha1 = _lookup_content_sha1(q)
if not sha1:
return None
lang = _first_element(list(idx_storage.content_language_get([sha1])))
if not lang:
return None
return converters.from_swh(lang, hashess={'id'})
def lookup_content_license(q):
"""Return license information from a specified content.
Args:
q: query string of the form <hash_algo:hash>
Yields:
license information (dict) list if the content is found.
"""
sha1 = _lookup_content_sha1(q)
if not sha1:
return None
lic = _first_element(idx_storage.content_fossology_license_get([sha1]))
if not lic:
return None
return converters.from_swh({'id': sha1, 'facts': lic[sha1]},
hashess={'id'})
def lookup_origin(origin):
"""Return information about the origin matching dict origin.
Args:
origin: origin's dict with keys either 'id' or 'url'
Returns:
origin information as dict.
"""
origin_info = storage.origin_get(origin)
if not origin_info:
msg = 'Origin %s not found!' % \
(origin.get('id') or origin['url'])
raise NotFoundExc(msg)
return converters.from_origin(origin_info)
def lookup_origins(origin_from=1, origin_count=100):
"""Get list of archived software origins in a paginated way.
Origins are sorted by id before returning them
Args:
origin_from (int): The minimum id of the origins to return
origin_count (int): The maximum number of origins to return
Yields:
origins information as dicts
"""
origins = storage.origin_get_range(origin_from, origin_count)
return map(converters.from_origin, origins)
def search_origin(url_pattern, offset=0, limit=50, regexp=False,
with_visit=False):
"""Search for origins whose urls contain a provided string pattern
or match a provided regular expression.
Args:
url_pattern: the string pattern to search for in origin urls
offset: number of found origins to skip before returning results
limit: the maximum number of found origins to return
Returns:
list of origin information as dict.
"""
origins = storage.origin_search(url_pattern, offset, limit, regexp,
with_visit)
return map(converters.from_origin, origins)
def search_origin_metadata(fulltext, limit=50):
"""Search for origins whose metadata match a provided string pattern.
Args:
fulltext: the string pattern to search for in origin metadata
offset: number of found origins to skip before returning results
limit: the maximum number of found origins to return
Returns:
list of origin metadata as dict.
"""
matches = idx_storage.origin_intrinsic_metadata_search_fulltext(
conjunction=[fulltext], limit=limit)
results = []
for match in matches:
match['from_revision'] = hashutil.hash_to_hex(match['from_revision'])
result = converters.from_origin(
storage.origin_get({'url': match.pop('origin_url')}))
result['metadata'] = match
results.append(result)
return results
def lookup_origin_intrinsic_metadata(origin_dict):
"""Return intrinsic metadata for origin whose origin matches given
origin.
Args:
origin_dict: origin's dict with keys ('type' AND 'url')
Returns:
origin metadata.
"""
origin_info = storage.origin_get(origin_dict)
if not origin_info:
msg = 'Origin with type %s and url %s not found!' % \
(origin_dict['type'], origin_dict['url'])
raise NotFoundExc(msg)
origins = [origin_info['url']]
match = _first_element(
idx_storage.origin_intrinsic_metadata_get(origins))
result = {}
if match:
result = match['metadata']
return result
-def lookup_person(person_id):
- """Return information about the person with id person_id.
-
- Args:
- person_id as string
-
- Returns:
- person information as dict.
-
- Raises:
- NotFoundExc if there is no person with the provided id.
-
- """
- person = _first_element(storage.person_get([int(person_id)]))
- if not person:
- raise NotFoundExc('Person with id %s not found' % person_id)
- return converters.from_person(person)
-
-
def _to_sha1_bin(sha1_hex):
_, sha1_git_bin = query.parse_hash_with_algorithms_or_throws(
sha1_hex,
['sha1'], # HACK: sha1_git really
'Only sha1_git is supported.')
return sha1_git_bin
def _check_directory_exists(sha1_git, sha1_git_bin):
if len(list(storage.directory_missing([sha1_git_bin]))):
raise NotFoundExc('Directory with sha1_git %s not found' % sha1_git)
def lookup_directory(sha1_git):
"""Return information about the directory with id sha1_git.
Args:
sha1_git as string
Returns:
directory information as dict.
"""
empty_dir_sha1 = '4b825dc642cb6eb9a060e54bf8d69288fbee4904'
if sha1_git == empty_dir_sha1:
return []
sha1_git_bin = _to_sha1_bin(sha1_git)
_check_directory_exists(sha1_git, sha1_git_bin)
directory_entries = storage.directory_ls(sha1_git_bin)
return map(converters.from_directory_entry, directory_entries)
def lookup_directory_with_path(sha1_git, path_string):
"""Return directory information for entry with path path_string w.r.t.
root directory pointed by directory_sha1_git
Args:
- directory_sha1_git: sha1_git corresponding to the directory
to which we append paths to (hopefully) find the entry
- the relative path to the entry starting from the directory pointed by
directory_sha1_git
Raises:
NotFoundExc if the directory entry is not found
"""
sha1_git_bin = _to_sha1_bin(sha1_git)
_check_directory_exists(sha1_git, sha1_git_bin)
paths = path_string.strip(os.path.sep).split(os.path.sep)
queried_dir = storage.directory_entry_get_by_path(
sha1_git_bin, list(map(lambda p: p.encode('utf-8'), paths)))
if not queried_dir:
raise NotFoundExc(('Directory entry with path %s from %s not found') %
(path_string, sha1_git))
return converters.from_directory_entry(queried_dir)
def lookup_release(release_sha1_git):
"""Return information about the release with sha1 release_sha1_git.
Args:
release_sha1_git: The release's sha1 as hexadecimal
Returns:
Release information as dict.
Raises:
ValueError if the identifier provided is not of sha1 nature.
"""
sha1_git_bin = _to_sha1_bin(release_sha1_git)
release = _first_element(storage.release_get([sha1_git_bin]))
if not release:
raise NotFoundExc('Release with sha1_git %s not found.'
% release_sha1_git)
return converters.from_release(release)
def lookup_release_multiple(sha1_git_list):
"""Return information about the revisions identified with
their sha1_git identifiers.
Args:
sha1_git_list: A list of revision sha1_git identifiers
Returns:
Release information as dict.
Raises:
ValueError if the identifier provided is not of sha1 nature.
"""
sha1_bin_list = (_to_sha1_bin(sha1_git) for sha1_git in sha1_git_list)
releases = storage.release_get(sha1_bin_list) or []
return (converters.from_release(r) for r in releases)
def lookup_revision(rev_sha1_git):
"""Return information about the revision with sha1 revision_sha1_git.
Args:
revision_sha1_git: The revision's sha1 as hexadecimal
Returns:
Revision information as dict.
Raises:
ValueError if the identifier provided is not of sha1 nature.
NotFoundExc if there is no revision with the provided sha1_git.
"""
sha1_git_bin = _to_sha1_bin(rev_sha1_git)
revision = _first_element(storage.revision_get([sha1_git_bin]))
if not revision:
raise NotFoundExc('Revision with sha1_git %s not found.'
% rev_sha1_git)
return converters.from_revision(revision)
def lookup_revision_multiple(sha1_git_list):
"""Return information about the revisions identified with
their sha1_git identifiers.
Args:
sha1_git_list: A list of revision sha1_git identifiers
Returns:
Generator of revisions information as dict.
Raises:
ValueError if the identifier provided is not of sha1 nature.
"""
sha1_bin_list = (_to_sha1_bin(sha1_git) for sha1_git in sha1_git_list)
revisions = storage.revision_get(sha1_bin_list) or []
return (converters.from_revision(r) for r in revisions)
def lookup_revision_message(rev_sha1_git):
"""Return the raw message of the revision with sha1 revision_sha1_git.
Args:
revision_sha1_git: The revision's sha1 as hexadecimal
Returns:
Decoded revision message as dict {'message': <the_message>}
Raises:
ValueError if the identifier provided is not of sha1 nature.
NotFoundExc if the revision is not found, or if it has no message
"""
sha1_git_bin = _to_sha1_bin(rev_sha1_git)
revision = _first_element(storage.revision_get([sha1_git_bin]))
if not revision:
raise NotFoundExc('Revision with sha1_git %s not found.'
% rev_sha1_git)
if 'message' not in revision:
raise NotFoundExc('No message for revision with sha1_git %s.'
% rev_sha1_git)
res = {'message': revision['message']}
return res
def _lookup_revision_id_by(origin, branch_name, timestamp):
def _get_snapshot_branch(snapshot, branch_name):
snapshot = lookup_snapshot(visit['snapshot'],
branches_from=branch_name,
branches_count=10)
branch = None
if branch_name in snapshot['branches']:
branch = snapshot['branches'][branch_name]
return branch
if isinstance(origin, int):
origin = {'id': origin}
elif isinstance(origin, str):
origin = {'url': origin}
else:
raise TypeError('"origin" must be an int or a string.')
visit = get_origin_visit(origin, visit_ts=timestamp)
branch = _get_snapshot_branch(visit['snapshot'], branch_name)
rev_id = None
if branch and branch['target_type'] == 'revision':
rev_id = branch['target']
elif branch and branch['target_type'] == 'alias':
branch = _get_snapshot_branch(visit['snapshot'], branch['target'])
if branch and branch['target_type'] == 'revision':
rev_id = branch['target']
if not rev_id:
raise NotFoundExc('Revision for origin %s and branch %s not found.'
% (origin.get('url') or origin['id'], branch_name))
return rev_id
def lookup_revision_by(origin,
branch_name='HEAD',
timestamp=None):
"""Lookup revision by origin, snapshot branch name and visit timestamp.
If branch_name is not provided, lookup using 'HEAD' as default.
If timestamp is not provided, use the most recent.
Args:
origin (Union[int,str]): origin of the revision
branch_name (str): snapshot branch name
timestamp (str/int): origin visit time frame
Returns:
dict: The revision matching the criterions
Raises:
NotFoundExc if no revision corresponds to the criterion
"""
rev_id = _lookup_revision_id_by(origin, branch_name, timestamp)
return lookup_revision(rev_id)
def lookup_revision_log(rev_sha1_git, limit):
"""Lookup revision log by revision id.
Args:
rev_sha1_git (str): The revision's sha1 as hexadecimal
limit (int): the maximum number of revisions returned
Returns:
list: Revision log as list of revision dicts
Raises:
ValueError: if the identifier provided is not of sha1 nature.
NotFoundExc: if there is no revision with the provided sha1_git.
"""
lookup_revision(rev_sha1_git)
sha1_git_bin = _to_sha1_bin(rev_sha1_git)
revision_entries = storage.revision_log([sha1_git_bin], limit)
return map(converters.from_revision, revision_entries)
def lookup_revision_log_by(origin, branch_name, timestamp, limit):
"""Lookup revision by origin, snapshot branch name and visit timestamp.
Args:
origin (Union[int,str]): origin of the revision
branch_name (str): snapshot branch
timestamp (str/int): origin visit time frame
limit (int): the maximum number of revisions returned
Returns:
list: Revision log as list of revision dicts
Raises:
NotFoundExc: if no revision corresponds to the criterion
"""
rev_id = _lookup_revision_id_by(origin, branch_name, timestamp)
return lookup_revision_log(rev_id, limit)
def lookup_revision_with_context_by(origin, branch_name, timestamp,
sha1_git, limit=100):
"""Return information about revision sha1_git, limited to the
sub-graph of all transitive parents of sha1_git_root.
sha1_git_root being resolved through the lookup of a revision by origin,
branch_name and ts.
In other words, sha1_git is an ancestor of sha1_git_root.
Args:
- origin: origin of the revision.
- branch_name: revision's branch.
- timestamp: revision's time frame.
- sha1_git: one of sha1_git_root's ancestors.
- limit: limit the lookup to 100 revisions back.
Returns:
Pair of (root_revision, revision).
Information on sha1_git if it is an ancestor of sha1_git_root
including children leading to sha1_git_root
Raises:
- BadInputExc in case of unknown algo_hash or bad hash.
- NotFoundExc if either revision is not found or if sha1_git is not an
ancestor of sha1_git_root.
"""
rev_root_id = _lookup_revision_id_by(origin, branch_name, timestamp)
rev_root_id_bin = hashutil.hash_to_bytes(rev_root_id)
rev_root = _first_element(storage.revision_get([rev_root_id_bin]))
return (converters.from_revision(rev_root),
lookup_revision_with_context(rev_root, sha1_git, limit))
def lookup_revision_with_context(sha1_git_root, sha1_git, limit=100):
"""Return information about revision sha1_git, limited to the
sub-graph of all transitive parents of sha1_git_root.
In other words, sha1_git is an ancestor of sha1_git_root.
Args:
sha1_git_root: latest revision. The type is either a sha1 (as an hex
string) or a non converted dict.
sha1_git: one of sha1_git_root's ancestors
limit: limit the lookup to 100 revisions back
Returns:
Information on sha1_git if it is an ancestor of sha1_git_root
including children leading to sha1_git_root
Raises:
BadInputExc in case of unknown algo_hash or bad hash
NotFoundExc if either revision is not found or if sha1_git is not an
ancestor of sha1_git_root
"""
sha1_git_bin = _to_sha1_bin(sha1_git)
revision = _first_element(storage.revision_get([sha1_git_bin]))
if not revision:
raise NotFoundExc('Revision %s not found' % sha1_git)
if isinstance(sha1_git_root, str):
sha1_git_root_bin = _to_sha1_bin(sha1_git_root)
revision_root = _first_element(storage.revision_get([sha1_git_root_bin])) # noqa
if not revision_root:
raise NotFoundExc('Revision root %s not found' % sha1_git_root)
else:
sha1_git_root_bin = sha1_git_root['id']
revision_log = storage.revision_log([sha1_git_root_bin], limit)
parents = {}
children = defaultdict(list)
for rev in revision_log:
rev_id = rev['id']
parents[rev_id] = []
for parent_id in rev['parents']:
parents[rev_id].append(parent_id)
children[parent_id].append(rev_id)
if revision['id'] not in parents:
raise NotFoundExc('Revision %s is not an ancestor of %s' %
(sha1_git, sha1_git_root))
revision['children'] = children[revision['id']]
return converters.from_revision(revision)
def lookup_directory_with_revision(sha1_git, dir_path=None, with_data=False):
"""Return information on directory pointed by revision with sha1_git.
If dir_path is not provided, display top level directory.
Otherwise, display the directory pointed by dir_path (if it exists).
Args:
sha1_git: revision's hash.
dir_path: optional directory pointed to by that revision.
with_data: boolean that indicates to retrieve the raw data if the path
resolves to a content. Default to False (for the api)
Returns:
Information on the directory pointed to by that revision.
Raises:
BadInputExc in case of unknown algo_hash or bad hash.
NotFoundExc either if the revision is not found or the path referenced
does not exist.
NotImplementedError in case of dir_path exists but do not reference a
type 'dir' or 'file'.
"""
sha1_git_bin = _to_sha1_bin(sha1_git)
revision = _first_element(storage.revision_get([sha1_git_bin]))
if not revision:
raise NotFoundExc('Revision %s not found' % sha1_git)
dir_sha1_git_bin = revision['directory']
if dir_path:
paths = dir_path.strip(os.path.sep).split(os.path.sep)
entity = storage.directory_entry_get_by_path(
dir_sha1_git_bin, list(map(lambda p: p.encode('utf-8'), paths)))
if not entity:
raise NotFoundExc(
"Directory or File '%s' pointed to by revision %s not found"
% (dir_path, sha1_git))
else:
entity = {'type': 'dir', 'target': dir_sha1_git_bin}
if entity['type'] == 'dir':
directory_entries = storage.directory_ls(entity['target']) or []
return {'type': 'dir',
'path': '.' if not dir_path else dir_path,
'revision': sha1_git,
'content': list(map(converters.from_directory_entry,
directory_entries))}
elif entity['type'] == 'file': # content
content = _first_element(
storage.content_find({'sha1_git': entity['target']}))
if not content:
raise NotFoundExc('Content not found for revision %s'
% sha1_git)
if with_data:
c = _first_element(storage.content_get([content['sha1']]))
content['data'] = c['data']
return {'type': 'file',
'path': '.' if not dir_path else dir_path,
'revision': sha1_git,
'content': converters.from_content(content)}
elif entity['type'] == 'rev': # revision
revision = next(storage.revision_get([entity['target']]))
return {'type': 'rev',
'path': '.' if not dir_path else dir_path,
'revision': sha1_git,
'content': converters.from_revision(revision)}
else:
raise NotImplementedError('Entity of type %s not implemented.'
% entity['type'])
def lookup_content(q):
"""Lookup the content designed by q.
Args:
q: The release's sha1 as hexadecimal
Raises:
NotFoundExc if the requested content is not found
"""
algo, hash = query.parse_hash(q)
c = _first_element(storage.content_find({algo: hash}))
if not c:
raise NotFoundExc('Content with %s checksum equals to %s not found!' %
(algo, hashutil.hash_to_hex(hash)))
return converters.from_content(c)
def lookup_content_raw(q):
"""Lookup the content defined by q.
Args:
q: query string of the form <hash_algo:hash>
Returns:
dict with 'sha1' and 'data' keys.
data representing its raw data decoded.
Raises:
NotFoundExc if the requested content is not found or
if the content bytes are not available in the storage
"""
c = lookup_content(q)
content_sha1_bytes = hashutil.hash_to_bytes(c['checksums']['sha1'])
content = _first_element(storage.content_get([content_sha1_bytes]))
if not content:
algo, hash = query.parse_hash(q)
raise NotFoundExc('Bytes of content with %s checksum equals to %s '
'are not available!' %
(algo, hashutil.hash_to_hex(hash)))
return converters.from_content(content)
def stat_counters():
"""Return the stat counters for Software Heritage
Returns:
A dict mapping textual labels to integer values.
"""
return storage.stat_counters()
def _lookup_origin_visits(origin_url, last_visit=None, limit=10):
"""Yields the origin origins' visits.
Args:
origin_url (str): origin to list visits for
last_visit (int): last visit to lookup from
limit (int): Number of elements max to display
Yields:
Dictionaries of origin_visit for that origin
"""
limit = min(limit, MAX_LIMIT)
for visit in storage.origin_visit_get(
origin_url, last_visit=last_visit, limit=limit):
visit['origin'] = origin_url
yield visit
def lookup_origin_visits(origin, last_visit=None, per_page=10):
"""Yields the origin origins' visits.
Args:
origin: origin to list visits for
Yields:
Dictionaries of origin_visit for that origin
"""
visits = _lookup_origin_visits(origin, last_visit=last_visit,
limit=per_page)
for visit in visits:
yield converters.from_origin_visit(visit)
def lookup_origin_visit_latest(origin_url, require_snapshot):
"""Return the origin's latest visit
Args:
origin_url (str): origin to list visits for
require_snapshot (bool): filter out origins without a snapshot
Returns:
dict: The origin_visit concerned
"""
visit = storage.origin_visit_get_latest(
origin_url, require_snapshot=require_snapshot)
if isinstance(visit['origin'], int):
# soon-to-be-legacy origin ids
visit['origin'] = storage.origin_get({'id': visit['origin']})['url']
return converters.from_origin_visit(visit)
def lookup_origin_visit(origin_url, visit_id):
"""Return information about visit visit_id with origin origin.
Args:
origin (str): origin concerned by the visit
visit_id: the visit identifier to lookup
Yields:
The dict origin_visit concerned
"""
visit = storage.origin_visit_get_by(origin_url, visit_id)
if not visit:
raise NotFoundExc('Origin %s or its visit '
'with id %s not found!' % (origin_url, visit_id))
visit['origin'] = origin_url
return converters.from_origin_visit(visit)
def lookup_snapshot_size(snapshot_id):
"""Count the number of branches in the snapshot with the given id
Args:
snapshot_id (str): sha1 identifier of the snapshot
Returns:
dict: A dict whose keys are the target types of branches and
values their corresponding amount
"""
snapshot_id_bin = _to_sha1_bin(snapshot_id)
snapshot_size = storage.snapshot_count_branches(snapshot_id_bin)
if 'revision' not in snapshot_size:
snapshot_size['revision'] = 0
if 'release' not in snapshot_size:
snapshot_size['release'] = 0
return snapshot_size
def lookup_snapshot(snapshot_id, branches_from='', branches_count=1000,
target_types=None):
"""Return information about a snapshot, aka the list of named
branches found during a specific visit of an origin.
Args:
snapshot_id (str): sha1 identifier of the snapshot
branches_from (str): optional parameter used to skip branches
whose name is lesser than it before returning them
branches_count (int): optional parameter used to restrain
the amount of returned branches
target_types (list): optional parameter used to filter the
target types of branch to return (possible values that can be
contained in that list are `'content', 'directory',
'revision', 'release', 'snapshot', 'alias'`)
Returns:
A dict filled with the snapshot content.
"""
snapshot_id_bin = _to_sha1_bin(snapshot_id)
snapshot = storage.snapshot_get_branches(snapshot_id_bin,
branches_from.encode(),
branches_count, target_types)
if not snapshot:
raise NotFoundExc('Snapshot with id %s not found!' % snapshot_id)
return converters.from_snapshot(snapshot)
def lookup_latest_origin_snapshot(origin, allowed_statuses=None):
"""Return information about the latest snapshot of an origin.
.. warning:: At most 1000 branches contained in the snapshot
will be returned for performance reasons.
Args:
origin: URL or integer identifier of the origin
allowed_statuses: list of visit statuses considered
to find the latest snapshot for the visit. For instance,
``allowed_statuses=['full']`` will only consider visits that
have successfully run to completion.
Returns:
A dict filled with the snapshot content.
"""
snapshot = storage.snapshot_get_latest(origin, allowed_statuses)
return converters.from_snapshot(snapshot)
def lookup_revision_through(revision, limit=100):
"""Retrieve a revision from the criterion stored in revision dictionary.
Args:
revision: Dictionary of criterion to lookup the revision with.
Here are the supported combination of possible values:
- origin_id, branch_name, ts, sha1_git
- origin_id, branch_name, ts
- origin_url, branch_name, ts, sha1_git
- origin_url, branch_name, ts
- sha1_git_root, sha1_git
- sha1_git
Returns:
None if the revision is not found or the actual revision.
"""
if (
'origin_url' in revision and
'branch_name' in revision and
'ts' in revision and
'sha1_git' in revision):
return lookup_revision_with_context_by(revision['origin_url'],
revision['branch_name'],
revision['ts'],
revision['sha1_git'],
limit)
if (
'origin_id' in revision and
'branch_name' in revision and
'ts' in revision and
'sha1_git' in revision):
return lookup_revision_with_context_by(revision['origin_id'],
revision['branch_name'],
revision['ts'],
revision['sha1_git'],
limit)
if (
'origin_url' in revision and
'branch_name' in revision and
'ts' in revision):
return lookup_revision_by(revision['origin_url'],
revision['branch_name'],
revision['ts'])
if (
'origin_id' in revision and
'branch_name' in revision and
'ts' in revision):
return lookup_revision_by(revision['origin_id'],
revision['branch_name'],
revision['ts'])
if (
'sha1_git_root' in revision and
'sha1_git' in revision):
return lookup_revision_with_context(revision['sha1_git_root'],
revision['sha1_git'],
limit)
if 'sha1_git' in revision:
return lookup_revision(revision['sha1_git'])
# this should not happen
raise NotImplementedError('Should not happen!')
def lookup_directory_through_revision(revision, path=None,
limit=100, with_data=False):
"""Retrieve the directory information from the revision.
Args:
revision: dictionary of criterion representing a revision to lookup
path: directory's path to lookup.
limit: optional query parameter to limit the revisions log (default to
100). For now, note that this limit could impede the transitivity
conclusion about sha1_git not being an ancestor of.
with_data: indicate to retrieve the content's raw data if path resolves
to a content.
Returns:
The directory pointing to by the revision criterions at path.
"""
rev = lookup_revision_through(revision, limit)
if not rev:
raise NotFoundExc('Revision with criterion %s not found!' % revision)
return (rev['id'],
lookup_directory_with_revision(rev['id'], path, with_data))
def vault_cook(obj_type, obj_id, email=None):
"""Cook a vault bundle.
"""
return vault.cook(obj_type, obj_id, email=email)
def vault_fetch(obj_type, obj_id):
"""Fetch a vault bundle.
"""
return vault.fetch(obj_type, obj_id)
def vault_progress(obj_type, obj_id):
"""Get the current progress of a vault bundle.
"""
return vault.progress(obj_type, obj_id)
def diff_revision(rev_id):
"""Get the list of file changes (insertion / deletion / modification /
renaming) for a particular revision.
"""
rev_sha1_git_bin = _to_sha1_bin(rev_id)
changes = diff.diff_revision(storage, rev_sha1_git_bin,
track_renaming=True)
for change in changes:
change['from'] = converters.from_directory_entry(change['from'])
change['to'] = converters.from_directory_entry(change['to'])
if change['from_path']:
change['from_path'] = change['from_path'].decode('utf-8')
if change['to_path']:
change['to_path'] = change['to_path'].decode('utf-8')
return changes
class _RevisionsWalkerProxy(object):
"""
Proxy class wrapping a revisions walker iterator from
swh-storage and performing needed conversions.
"""
def __init__(self, rev_walker_type, rev_start, *args, **kwargs):
rev_start_bin = hashutil.hash_to_bytes(rev_start)
self.revisions_walker = \
revisions_walker.get_revisions_walker(rev_walker_type,
storage,
rev_start_bin,
*args, **kwargs)
def export_state(self):
return self.revisions_walker.export_state()
def __next__(self):
return converters.from_revision(next(self.revisions_walker))
def __iter__(self):
return self
def get_revisions_walker(rev_walker_type, rev_start, *args, **kwargs):
"""
Utility function to instantiate a revisions walker of a given type,
see :mod:`swh.storage.algos.revisions_walker`.
Args:
rev_walker_type (str): the type of revisions walker to return,
possible values are: ``committer_date``, ``dfs``, ``dfs_post``,
``bfs`` and ``path``
rev_start (str): hexadecimal representation of a revision identifier
args (list): position arguments to pass to the revisions walker
constructor
kwargs (dict): keyword arguments to pass to the revisions walker
constructor
"""
# first check if the provided revision is valid
lookup_revision(rev_start)
return _RevisionsWalkerProxy(rev_walker_type, rev_start, *args, **kwargs)
diff --git a/swh/web/tests/api/test_apidoc.py b/swh/web/tests/api/test_apidoc.py
index c9080036..9d1fcc52 100644
--- a/swh/web/tests/api/test_apidoc.py
+++ b/swh/web/tests/api/test_apidoc.py
@@ -1,279 +1,265 @@
# Copyright (C) 2015-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
from rest_framework.test import APITestCase
from rest_framework.response import Response
from swh.web.api.apidoc import api_doc, _parse_httpdomain_doc
from swh.web.api.apiurls import api_route
from swh.web.tests.testcase import WebTestCase
# flake8: noqa
httpdomain_doc = """
.. http:get:: /api/1/revision/(sha1_git)/
Get information about a revision in the archive.
Revisions are identified by **sha1** checksums, compatible with Git commit identifiers.
See :func:`swh.model.identifiers.revision_identifier` in our data model module for details
about how they are computed.
:param string sha1_git: hexadecimal representation of the revision **sha1_git** identifier
:reqheader Accept: the requested response content type,
either ``application/json`` (default) or ``application/yaml``
:resheader Content-Type: this depends on :http:header:`Accept` header of request
:>json object author: information about the author of the revision
- :>json string author_url: link to :http:get:`/api/1/person/(person_id)/` to get
- information about the author of the revision
:>json object committer: information about the committer of the revision
- :>json string committer_url: link to :http:get:`/api/1/person/(person_id)/` to get
- information about the committer of the revision
:>json string committer_date: ISO representation of the commit date (in UTC)
:>json string date: ISO representation of the revision date (in UTC)
:>json string directory: the unique identifier that revision points to
:>json string directory_url: link to :http:get:`/api/1/directory/(sha1_git)/[(path)/]`
to get information about the directory associated to the revision
:>json string id: the revision unique identifier
:>json boolean merge: whether or not the revision corresponds to a merge commit
:>json string message: the message associated to the revision
:>json array parents: the parents of the revision, i.e. the previous revisions
that head directly to it, each entry of that array contains an unique parent
revision identifier but also a link to :http:get:`/api/1/revision/(sha1_git)/`
to get more information about it
:>json string type: the type of the revision
**Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options`
:statuscode 200: no error
:statuscode 400: an invalid **sha1_git** value has been provided
:statuscode 404: requested revision can not be found in the archive
**Request:**
.. parsed-literal::
$ curl -i :swh_web_api:`revision/aafb16d69fd30ff58afdd69036a26047f3aebdc6/`
"""
class APIDocTestCase(WebTestCase, APITestCase):
def test_apidoc_nodoc_failure(self):
with self.assertRaises(Exception):
@api_doc('/my/nodoc/url/')
def apidoc_nodoc_tester(request, arga=0, argb=0):
return Response(arga + argb)
@staticmethod
@api_route(r'/some/(?P<myarg>[0-9]+)/(?P<myotherarg>[0-9]+)/',
'some-doc-route')
@api_doc('/some/doc/route/')
def apidoc_route(request, myarg, myotherarg, akw=0):
"""
Sample doc
"""
return {'result': int(myarg) + int(myotherarg) + akw}
def test_apidoc_route_doc(self):
# when
rv = self.client.get('/api/1/some/doc/route/')
# then
self.assertEqual(rv.status_code, 200, rv.data)
self.assertTemplateUsed('api/apidoc.html')
def test_apidoc_route_fn(self):
# when
rv = self.client.get('/api/1/some/1/1/')
# then
self.assertEqual(rv.status_code, 200, rv.data)
@staticmethod
@api_route(r'/some/full/(?P<myarg>[0-9]+)/(?P<myotherarg>[0-9]+)/',
'some-complete-doc-route')
@api_doc('/some/complete/doc/route/')
def apidoc_full_stack(request, myarg, myotherarg, akw=0):
"""
Sample doc
"""
return {'result': int(myarg) + int(myotherarg) + akw}
def test_apidoc_full_stack_doc(self):
# when
rv = self.client.get('/api/1/some/complete/doc/route/')
# then
self.assertEqual(rv.status_code, 200, rv.data)
self.assertTemplateUsed('api/apidoc.html')
def test_apidoc_full_stack_fn(self):
# when
rv = self.client.get('/api/1/some/full/1/1/')
# then
self.assertEqual(rv.status_code, 200, rv.data)
def test_api_doc_parse_httpdomain(self):
doc_data = {
'description': '',
'urls': [],
'args': [],
'params': [],
'resheaders': [],
'reqheaders': [],
'return_type': '',
'returns': [],
'status_codes': [],
'examples': []
}
_parse_httpdomain_doc(httpdomain_doc, doc_data)
expected_urls = [{
'rule': '/api/1/revision/ **\\(sha1_git\\)** /',
'methods': ['GET', 'HEAD', 'OPTIONS']
}]
self.assertIn('urls', doc_data)
self.assertEqual(doc_data['urls'], expected_urls)
expected_description = 'Get information about a revision in the archive. \
Revisions are identified by **sha1** checksums, compatible with Git commit \
identifiers. See **swh.model.identifiers.revision_identifier** in our data \
model module for details about how they are computed.'
self.assertIn('description', doc_data)
self.assertEqual(doc_data['description'], expected_description)
expected_args = [{
'name': 'sha1_git',
'type': 'string',
'doc': 'hexadecimal representation of the revision **sha1_git** identifier'
}]
self.assertIn('args', doc_data)
self.assertEqual(doc_data['args'], expected_args)
expected_params = []
self.assertIn('params', doc_data)
self.assertEqual(doc_data['params'], expected_params)
expected_reqheaders = [{
'doc': 'the requested response content type, either ``application/json`` or ``application/yaml``',
'name': 'Accept'
}]
self.assertIn('reqheaders', doc_data)
self.assertEqual(doc_data['reqheaders'], expected_reqheaders)
expected_resheaders = [{
'doc': 'this depends on **Accept** header of request',
'name': 'Content-Type'
}]
self.assertIn('resheaders', doc_data)
self.assertEqual(doc_data['resheaders'], expected_resheaders)
expected_statuscodes = [
{
'code': '200',
'doc': 'no error'
},
{
'code': '400',
'doc': 'an invalid **sha1_git** value has been provided'
},
{
'code': '404',
'doc': 'requested revision can not be found in the archive'
}
]
self.assertIn('status_codes', doc_data)
self.assertEqual(doc_data['status_codes'], expected_statuscodes)
expected_return_type = 'object'
self.assertIn('return_type', doc_data)
self.assertEqual(doc_data['return_type'], expected_return_type)
expected_returns = [
{
'name': 'author',
'type': 'object',
'doc': 'information about the author of the revision'
},
- {
- 'name': 'author_url',
- 'type': 'string',
- 'doc': 'link to `</api/1/person/>`_ to get information about the author of the revision'
- },
{
'name': 'committer',
'type': 'object',
'doc': 'information about the committer of the revision'
},
- {
- 'name': 'committer_url',
- 'type': 'string',
- 'doc': 'link to `</api/1/person/>`_ to get information about the committer of the revision'
- },
{
'name': 'committer_date',
'type': 'string',
'doc': 'ISO representation of the commit date (in UTC)'
},
{
'name': 'date',
'type': 'string',
'doc': 'ISO representation of the revision date (in UTC)'
},
{
'name': 'directory',
'type': 'string',
'doc': 'the unique identifier that revision points to'
},
{
'name': 'directory_url',
'type': 'string',
'doc': 'link to `</api/1/directory/>`_ to get information about the directory associated to the revision'
},
{
'name': 'id',
'type': 'string',
'doc': 'the revision unique identifier'
},
{
'name': 'merge',
'type': 'boolean',
'doc': 'whether or not the revision corresponds to a merge commit'
},
{
'name': 'message',
'type': 'string',
'doc': 'the message associated to the revision'
},
{
'name': 'parents',
'type': 'array',
'doc': 'the parents of the revision, i.e. the previous revisions that head directly to it, each entry of that array contains an unique parent revision identifier but also a link to `</api/1/revision/>`_ to get more information about it'
},
{
'name': 'type',
'type': 'string',
'doc': 'the type of the revision'
}
]
self.assertIn('returns', doc_data)
self.assertEqual(doc_data['returns'], expected_returns)
expected_examples = ['/api/1/revision/aafb16d69fd30ff58afdd69036a26047f3aebdc6/']
self.assertIn('examples', doc_data)
self.assertEqual(doc_data['examples'], expected_examples)
diff --git a/swh/web/tests/api/test_utils.py b/swh/web/tests/api/test_utils.py
index 65bf06fd..e9db2c85 100644
--- a/swh/web/tests/api/test_utils.py
+++ b/swh/web/tests/api/test_utils.py
@@ -1,599 +1,588 @@
# Copyright (C) 2015-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
from unittest.mock import patch, call
from swh.web.api import utils
from swh.web.tests.testcase import WebTestCase
class UtilsTestCase(WebTestCase):
def setUp(self):
self.maxDiff = None
self.url_map = [dict(rule='/other/<slug>',
methods=set(['GET', 'POST', 'HEAD']),
endpoint='foo'),
dict(rule='/some/old/url/<slug>',
methods=set(['GET', 'POST']),
endpoint='blablafn'),
dict(rule='/other/old/url/<int:id>',
methods=set(['GET', 'HEAD']),
endpoint='bar'),
dict(rule='/other',
methods=set([]),
endpoint=None),
dict(rule='/other2',
methods=set([]),
endpoint=None)]
self.sample_content_hashes = {
'blake2s256': ('791e07fcea240ade6dccd0a9309141673'
'c31242cae9c237cf3855e151abc78e9'),
'sha1': 'dc2830a9e72f23c1dfebef4413003221baa5fb62',
'sha1_git': 'fe95a46679d128ff167b7c55df5d02356c5a1ae1',
'sha256': ('b5c7fe0536f44ef60c8780b6065d30bca74a5cd06'
'd78a4a71ba1ad064770f0c9')
}
def test_filter_field_keys_dict_unknown_keys(self):
# when
actual_res = utils.filter_field_keys(
{'directory': 1, 'file': 2, 'link': 3},
{'directory1', 'file2'})
# then
self.assertEqual(actual_res, {})
def test_filter_field_keys_dict(self):
# when
actual_res = utils.filter_field_keys(
{'directory': 1, 'file': 2, 'link': 3},
{'directory', 'link'})
# then
self.assertEqual(actual_res, {'directory': 1, 'link': 3})
def test_filter_field_keys_list_unknown_keys(self):
# when
actual_res = utils.filter_field_keys(
[{'directory': 1, 'file': 2, 'link': 3},
{'1': 1, '2': 2, 'link': 3}],
{'d'})
# then
self.assertEqual(actual_res, [{}, {}])
def test_filter_field_keys_map(self):
# when
actual_res = utils.filter_field_keys(
map(lambda x: {'i': x['i']+1, 'j': x['j']},
[{'i': 1, 'j': None},
{'i': 2, 'j': None},
{'i': 3, 'j': None}]),
{'i'})
# then
self.assertEqual(list(actual_res), [{'i': 2}, {'i': 3}, {'i': 4}])
def test_filter_field_keys_list(self):
# when
actual_res = utils.filter_field_keys(
[{'directory': 1, 'file': 2, 'link': 3},
{'dir': 1, 'fil': 2, 'lin': 3}],
{'directory', 'dir'})
# then
self.assertEqual(actual_res, [{'directory': 1}, {'dir': 1}])
def test_filter_field_keys_other(self):
# given
input_set = {1, 2}
# when
actual_res = utils.filter_field_keys(input_set, {'a', '1'})
# then
self.assertEqual(actual_res, input_set)
def test_person_to_string(self):
self.assertEqual(utils.person_to_string(dict(name='raboof',
email='foo@bar')),
'raboof <foo@bar>')
def test_enrich_release_0(self):
# when
actual_release = utils.enrich_release({})
# then
self.assertEqual(actual_release, {})
@patch('swh.web.api.utils.reverse')
def test_enrich_release_1(self, mock_django_reverse):
# given
def reverse_test_context(view_name, url_args):
if view_name == 'api-1-content':
id = url_args['q']
return '/api/1/content/%s/' % id
- elif view_name == 'api-1-person':
- id = url_args['person_id']
- return '/api/1/person/%s/' % id
else:
raise ValueError(
'This should not happened so fail if it does.')
mock_django_reverse.side_effect = reverse_test_context
# when
actual_release = utils.enrich_release({
'target': '123',
'target_type': 'content',
'author': {
'id': 100,
'name': 'author release name',
'email': 'author@email',
},
})
# then
self.assertEqual(actual_release, {
'target': '123',
'target_type': 'content',
'target_url': '/api/1/content/sha1_git:123/',
- 'author_url': '/api/1/person/100/',
'author': {
'id': 100,
'name': 'author release name',
'email': 'author@email',
},
})
mock_django_reverse.assert_has_calls([
call('api-1-content', url_args={'q': 'sha1_git:123'}),
- call('api-1-person', url_args={'person_id': 100})
])
@patch('swh.web.api.utils.reverse')
def test_enrich_release_2(self, mock_django_reverse):
# given
mock_django_reverse.return_value = '/api/1/dir/23/'
# when
actual_release = utils.enrich_release({'target': '23',
'target_type': 'directory'})
# then
self.assertEqual(actual_release, {
'target': '23',
'target_type': 'directory',
'target_url': '/api/1/dir/23/'
})
mock_django_reverse.assert_called_once_with('api-1-directory',
url_args={'sha1_git': '23'}) # noqa
@patch('swh.web.api.utils.reverse')
def test_enrich_release_3(self, mock_django_reverse):
# given
mock_django_reverse.return_value = '/api/1/rev/3/'
# when
actual_release = utils.enrich_release({'target': '3',
'target_type': 'revision'})
# then
self.assertEqual(actual_release, {
'target': '3',
'target_type': 'revision',
'target_url': '/api/1/rev/3/'
})
mock_django_reverse.assert_called_once_with('api-1-revision',
url_args={'sha1_git': '3'})
@patch('swh.web.api.utils.reverse')
def test_enrich_release_4(self, mock_django_reverse):
# given
mock_django_reverse.return_value = '/api/1/rev/4/'
# when
actual_release = utils.enrich_release({'target': '4',
'target_type': 'release'})
# then
self.assertEqual(actual_release, {
'target': '4',
'target_type': 'release',
'target_url': '/api/1/rev/4/'
})
mock_django_reverse.assert_called_once_with('api-1-release',
url_args={'sha1_git': '4'})
@patch('swh.web.api.utils.reverse')
def test_enrich_directory_no_type(self, mock_django_reverse):
# when/then
self.assertEqual(utils.enrich_directory({'id': 'dir-id'}),
{'id': 'dir-id'})
# given
mock_django_reverse.return_value = '/api/content/sha1_git:123/'
# when
actual_directory = utils.enrich_directory({
'id': 'dir-id',
'type': 'file',
'target': '123',
})
# then
self.assertEqual(actual_directory, {
'id': 'dir-id',
'type': 'file',
'target': '123',
'target_url': '/api/content/sha1_git:123/',
})
mock_django_reverse.assert_called_once_with(
'api-1-content', url_args={'q': 'sha1_git:123'})
@patch('swh.web.api.utils.reverse')
def test_enrich_directory_with_context_and_type_file(
self, mock_django_reverse,
):
# given
mock_django_reverse.return_value = '/api/content/sha1_git:123/'
# when
actual_directory = utils.enrich_directory({
'id': 'dir-id',
'type': 'file',
'name': 'hy',
'target': '789',
}, context_url='/api/revision/revsha1/directory/prefix/path/')
# then
self.assertEqual(actual_directory, {
'id': 'dir-id',
'type': 'file',
'name': 'hy',
'target': '789',
'target_url': '/api/content/sha1_git:123/',
'file_url': '/api/revision/revsha1/directory'
'/prefix/path/hy/'
})
mock_django_reverse.assert_called_once_with(
'api-1-content', url_args={'q': 'sha1_git:789'})
@patch('swh.web.api.utils.reverse')
def test_enrich_directory_with_context_and_type_dir(
self, mock_django_reverse,
):
# given
mock_django_reverse.return_value = '/api/directory/456/'
# when
actual_directory = utils.enrich_directory({
'id': 'dir-id',
'type': 'dir',
'name': 'emacs-42',
'target_type': 'file',
'target': '456',
}, context_url='/api/revision/origin/2/directory/some/prefix/path/')
# then
self.assertEqual(actual_directory, {
'id': 'dir-id',
'type': 'dir',
'target_type': 'file',
'name': 'emacs-42',
'target': '456',
'target_url': '/api/directory/456/',
'dir_url': '/api/revision/origin/2/directory'
'/some/prefix/path/emacs-42/'
})
mock_django_reverse.assert_called_once_with('api-1-directory',
url_args={'sha1_git': '456'}) # noqa
def test_enrich_content_without_hashes(self):
# when/then
self.assertEqual(utils.enrich_content({'id': '123'}),
{'id': '123'})
@patch('swh.web.api.utils.reverse')
def test_enrich_content_with_hashes(self, mock_django_reverse):
for algo, hash in self.sample_content_hashes.items():
query_string = '%s:%s' % (algo, hash)
# given
mock_django_reverse.side_effect = [
'/api/content/%s/raw/' % query_string,
'/api/filetype/%s/' % query_string,
'/api/language/%s/' % query_string,
'/api/license/%s/' % query_string
]
# when
enriched_content = utils.enrich_content(
{
algo: hash,
},
query_string=query_string
)
# then
self.assertEqual(
enriched_content,
{
algo: hash,
'data_url': '/api/content/%s/raw/' % query_string,
'filetype_url': '/api/filetype/%s/' % query_string,
'language_url': '/api/language/%s/' % query_string,
'license_url': '/api/license/%s/' % query_string,
}
)
mock_django_reverse.assert_has_calls([
call('api-1-content-raw', url_args={'q': query_string}),
call('api-1-content-filetype', url_args={'q': query_string}),
call('api-1-content-language', url_args={'q': query_string}),
call('api-1-content-license', url_args={'q': query_string}),
])
mock_django_reverse.reset()
@patch('swh.web.api.utils.reverse')
def test_enrich_content_with_hashes_and_top_level_url(self,
mock_django_reverse):
for algo, hash in self.sample_content_hashes.items():
query_string = '%s:%s' % (algo, hash)
# given
mock_django_reverse.side_effect = [
'/api/content/%s/' % query_string,
'/api/content/%s/raw/' % query_string,
'/api/filetype/%s/' % query_string,
'/api/language/%s/' % query_string,
'/api/license/%s/' % query_string,
]
# when
enriched_content = utils.enrich_content(
{
algo: hash
},
top_url=True,
query_string=query_string
)
# then
self.assertEqual(
enriched_content,
{
algo: hash,
'content_url': '/api/content/%s/' % query_string,
'data_url': '/api/content/%s/raw/' % query_string,
'filetype_url': '/api/filetype/%s/' % query_string,
'language_url': '/api/language/%s/' % query_string,
'license_url': '/api/license/%s/' % query_string,
}
)
mock_django_reverse.assert_has_calls([
call('api-1-content', url_args={'q': query_string}),
call('api-1-content-raw', url_args={'q': query_string}),
call('api-1-content-filetype', url_args={'q': query_string}),
call('api-1-content-language', url_args={'q': query_string}),
call('api-1-content-license', url_args={'q': query_string}),
])
mock_django_reverse.reset()
def _reverse_context_test(self, view_name, url_args):
if view_name == 'api-1-revision':
return '/api/revision/%s/' % url_args['sha1_git']
elif view_name == 'api-1-revision-context':
return '/api/revision/%s/prev/%s/' % (url_args['sha1_git'], url_args['context']) # noqa
elif view_name == 'api-1-revision-log':
if 'prev_sha1s' in url_args:
return '/api/revision/%s/prev/%s/log/' % (url_args['sha1_git'], url_args['prev_sha1s']) # noqa
else:
return '/api/revision/%s/log/' % url_args['sha1_git']
@patch('swh.web.api.utils.reverse')
def test_enrich_revision_without_children_or_parent(
self, mock_django_reverse,
):
# given
def reverse_test(view_name, url_args):
if view_name == 'api-1-revision':
return '/api/revision/' + url_args['sha1_git'] + '/'
elif view_name == 'api-1-revision-log':
return '/api/revision/' + url_args['sha1_git'] + '/log/'
elif view_name == 'api-1-directory':
return '/api/directory/' + url_args['sha1_git'] + '/'
- elif view_name == 'api-1-person':
- return '/api/person/' + url_args['person_id'] + '/'
mock_django_reverse.side_effect = reverse_test
# when
actual_revision = utils.enrich_revision({
'id': 'rev-id',
'directory': '123',
'author': {'id': '1'},
'committer': {'id': '2'},
})
expected_revision = {
'id': 'rev-id',
'directory': '123',
'url': '/api/revision/rev-id/',
'history_url': '/api/revision/rev-id/log/',
'directory_url': '/api/directory/123/',
'author': {'id': '1'},
- 'author_url': '/api/person/1/',
'committer': {'id': '2'},
- 'committer_url': '/api/person/2/'
}
# then
self.assertEqual(actual_revision, expected_revision)
mock_django_reverse.assert_has_calls(
[call('api-1-revision', url_args={'sha1_git': 'rev-id'}),
call('api-1-revision-log', url_args={'sha1_git': 'rev-id'}),
- call('api-1-person', url_args={'person_id': '1'}),
- call('api-1-person', url_args={'person_id': '2'}),
call('api-1-directory', url_args={'sha1_git': '123'})])
@patch('swh.web.api.utils.reverse')
def test_enrich_revision_with_children_and_parent_no_dir(
self, mock_django_reverse,
):
# given
mock_django_reverse.side_effect = self._reverse_context_test
# when
actual_revision = utils.enrich_revision({
'id': 'rev-id',
'parents': ['123'],
'children': ['456'],
})
expected_revision = {
'id': 'rev-id',
'url': '/api/revision/rev-id/',
'history_url': '/api/revision/rev-id/log/',
'parents': [{'id': '123', 'url': '/api/revision/123/'}],
'children': ['456'],
'children_urls': ['/api/revision/456/'],
}
# then
self.assertEqual(actual_revision, expected_revision)
mock_django_reverse.assert_has_calls(
[call('api-1-revision', url_args={'sha1_git': 'rev-id'}),
call('api-1-revision-log', url_args={'sha1_git': 'rev-id'}),
call('api-1-revision', url_args={'sha1_git': '123'}),
call('api-1-revision', url_args={'sha1_git': '456'})])
@patch('swh.web.api.utils.reverse')
def test_enrich_revision_no_context(self, mock_django_reverse):
# given
mock_django_reverse.side_effect = self._reverse_context_test
# when
actual_revision = utils.enrich_revision({
'id': 'rev-id',
'parents': ['123'],
'children': ['456'],
})
expected_revision = {
'id': 'rev-id',
'url': '/api/revision/rev-id/',
'history_url': '/api/revision/rev-id/log/',
'parents': [{'id': '123', 'url': '/api/revision/123/'}],
'children': ['456'],
'children_urls': ['/api/revision/456/']
}
# then
self.assertEqual(actual_revision, expected_revision)
mock_django_reverse.assert_has_calls(
[call('api-1-revision', url_args={'sha1_git': 'rev-id'}),
call('api-1-revision-log', url_args={'sha1_git': 'rev-id'}),
call('api-1-revision', url_args={'sha1_git': '123'}),
call('api-1-revision', url_args={'sha1_git': '456'})])
def _reverse_rev_message_test(self, view_name, url_args):
if view_name == 'api-1-revision':
return '/api/revision/%s/' % url_args['sha1_git']
elif view_name == 'api-1-revision-log':
if 'prev_sha1s' in url_args and url_args['prev_sha1s'] is not None:
return '/api/revision/%s/prev/%s/log/' % (url_args['sha1_git'], url_args['prev_sha1s']) # noqa
else:
return '/api/revision/%s/log/' % url_args['sha1_git']
elif view_name == 'api-1-revision-raw-message':
return '/api/revision/' + url_args['sha1_git'] + '/raw/'
else:
return '/api/revision/%s/prev/%s/' % (url_args['sha1_git'], url_args['context']) # noqa
@patch('swh.web.api.utils.reverse')
def test_enrich_revision_with_no_message(self, mock_django_reverse):
# given
mock_django_reverse.side_effect = self._reverse_rev_message_test
# when
expected_revision = {
'id': 'rev-id',
'url': '/api/revision/rev-id/',
'history_url': '/api/revision/rev-id/log/',
'message': None,
'parents': [{'id': '123', 'url': '/api/revision/123/'}],
'children': ['456'],
'children_urls': ['/api/revision/456/'],
}
actual_revision = utils.enrich_revision({
'id': 'rev-id',
'message': None,
'parents': ['123'],
'children': ['456'],
})
# then
self.assertEqual(actual_revision, expected_revision)
mock_django_reverse.assert_has_calls(
[call('api-1-revision', url_args={'sha1_git': 'rev-id'}),
call('api-1-revision-log', url_args={'sha1_git': 'rev-id'}),
call('api-1-revision', url_args={'sha1_git': '123'}),
call('api-1-revision', url_args={'sha1_git': '456'})]
)
@patch('swh.web.api.utils.reverse')
def test_enrich_revision_with_invalid_message(self, mock_django_reverse):
# given
mock_django_reverse.side_effect = self._reverse_rev_message_test
# when
actual_revision = utils.enrich_revision({
'id': 'rev-id',
'message': None,
'message_decoding_failed': True,
'parents': ['123'],
'children': ['456'],
})
expected_revision = {
'id': 'rev-id',
'url': '/api/revision/rev-id/',
'history_url': '/api/revision/rev-id/log/',
'message': None,
'message_decoding_failed': True,
'message_url': '/api/revision/rev-id/raw/',
'parents': [{'id': '123', 'url': '/api/revision/123/'}],
'children': ['456'],
'children_urls': ['/api/revision/456/'],
}
# then
self.assertEqual(actual_revision, expected_revision)
mock_django_reverse.assert_has_calls(
[call('api-1-revision', url_args={'sha1_git': 'rev-id'}),
call('api-1-revision-log', url_args={'sha1_git': 'rev-id'}),
call('api-1-revision', url_args={'sha1_git': '123'}),
call('api-1-revision', url_args={'sha1_git': '456'}),
call('api-1-revision-raw-message', url_args={'sha1_git': 'rev-id'})]) # noqa
diff --git a/swh/web/tests/api/views/test_person.py b/swh/web/tests/api/views/test_person.py
deleted file mode 100644
index 6b98f066..00000000
--- a/swh/web/tests/api/views/test_person.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright (C) 2015-2019 The Software Heritage developers
-# See the AUTHORS file at the top-level directory of this distribution
-# License: GNU Affero General Public License version 3, or any later version
-# See top-level LICENSE file for more information
-
-import random
-
-from hypothesis import given
-from rest_framework.test import APITestCase
-
-from swh.web.common.utils import reverse
-from swh.web.tests.strategies import person
-from swh.web.tests.testcase import WebTestCase
-
-
-class PersonApiTestCase(WebTestCase, APITestCase):
-
- @given(person())
- def test_api_person(self, person):
-
- url = reverse('api-1-person', url_args={'person_id': person})
-
- rv = self.client.get(url)
-
- expected_person = self.person_get(person)
-
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, expected_person)
-
- def test_api_person_not_found(self):
- unknown_person_ = random.randint(1000, 10000000)
-
- url = reverse('api-1-person', url_args={'person_id': unknown_person_})
-
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 404, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, {
- 'exception': 'NotFoundExc',
- 'reason': 'Person with id %s not found' % unknown_person_})
diff --git a/swh/web/tests/api/views/test_release.py b/swh/web/tests/api/views/test_release.py
index 3233e849..f680a86e 100644
--- a/swh/web/tests/api/views/test_release.py
+++ b/swh/web/tests/api/views/test_release.py
@@ -1,125 +1,116 @@
# Copyright (C) 2015-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
from datetime import datetime
from hypothesis import given
from rest_framework.test import APITestCase
from swh.model.hashutil import hash_to_bytes
from swh.web.common.utils import reverse
from swh.web.tests.data import random_sha1
from swh.web.tests.strategies import (
release, sha1, content, directory
)
from swh.web.tests.testcase import WebTestCase
class ReleaseApiTestCase(WebTestCase, APITestCase):
@given(release())
def test_api_release(self, release):
url = reverse('api-1-release', url_args={'sha1_git': release})
rv = self.client.get(url)
expected_release = self.release_get(release)
- author_id = expected_release['author']['id']
target_revision = expected_release['target']
- author_url = reverse('api-1-person',
- url_args={'person_id': author_id})
target_url = reverse('api-1-revision',
url_args={'sha1_git': target_revision})
- expected_release['author_url'] = author_url
expected_release['target_url'] = target_url
self.assertEqual(rv.status_code, 200, rv.data)
self.assertEqual(rv['Content-Type'], 'application/json')
self.assertEqual(rv.data, expected_release)
@given(sha1(), sha1(), sha1(), content(), directory(), release())
def test_api_release_target_type_not_a_revision(self, new_rel1, new_rel2,
new_rel3, content,
directory, release):
for new_rel_id, target_type, target in (
(new_rel1, 'content', content),
(new_rel2, 'directory', directory),
(new_rel3, 'release', release)):
if target_type == 'content':
target = target['sha1_git']
sample_release = {
'author': {
'email': b'author@company.org',
'fullname': b'author <author@company.org>',
'name': b'author'
},
'date': {
'timestamp': int(datetime.now().timestamp()),
'offset': 0,
'negative_utc': False,
},
'id': hash_to_bytes(new_rel_id),
'message': b'sample release message',
'name': b'sample release',
'synthetic': False,
'target': hash_to_bytes(target),
'target_type': target_type
}
self.storage.release_add([sample_release])
url = reverse('api-1-release', url_args={'sha1_git': new_rel_id})
rv = self.client.get(url)
expected_release = self.release_get(new_rel_id)
- author_id = expected_release['author']['id']
- author_url = reverse('api-1-person',
- url_args={'person_id': author_id})
-
if target_type == 'content':
url_args = {'q': 'sha1_git:%s' % target}
else:
url_args = {'sha1_git': target}
target_url = reverse('api-1-%s' % target_type,
url_args=url_args)
- expected_release['author_url'] = author_url
expected_release['target_url'] = target_url
self.assertEqual(rv.status_code, 200, rv.data)
self.assertEqual(rv['Content-Type'], 'application/json')
self.assertEqual(rv.data, expected_release)
def test_api_release_not_found(self):
unknown_release_ = random_sha1()
url = reverse('api-1-release', url_args={'sha1_git': unknown_release_})
rv = self.client.get(url)
self.assertEqual(rv.status_code, 404, rv.data)
self.assertEqual(rv['Content-Type'], 'application/json')
self.assertEqual(rv.data, {
'exception': 'NotFoundExc',
'reason': 'Release with sha1_git %s not found.' % unknown_release_
})
@given(release())
def test_api_release_uppercase(self, release):
url = reverse('api-1-release-uppercase-checksum',
url_args={'sha1_git': release.upper()})
resp = self.client.get(url)
self.assertEqual(resp.status_code, 302)
redirect_url = reverse('api-1-release-uppercase-checksum',
url_args={'sha1_git': release})
self.assertEqual(resp['location'], redirect_url)
diff --git a/swh/web/tests/api/views/test_revision.py b/swh/web/tests/api/views/test_revision.py
index 77f40203..9280566c 100644
--- a/swh/web/tests/api/views/test_revision.py
+++ b/swh/web/tests/api/views/test_revision.py
@@ -1,570 +1,560 @@
# Copyright (C) 2015-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import random
from hypothesis import given
import pytest
from rest_framework.test import APITestCase
from unittest.mock import patch
from swh.model.hashutil import hash_to_hex
from swh.web.common.exc import NotFoundExc
from swh.web.common.utils import reverse, parse_timestamp
from swh.web.tests.data import random_sha1
from swh.web.tests.strategies import (
revision, new_revision, origin, origin_with_multiple_visits
)
from swh.web.tests.testcase import WebTestCase
class RevisionApiTestCase(WebTestCase, APITestCase):
@given(revision())
def test_api_revision(self, revision):
url = reverse('api-1-revision', url_args={'sha1_git': revision})
rv = self.client.get(url)
expected_revision = self.revision_get(revision)
self._enrich_revision(expected_revision)
self.assertEqual(rv.status_code, 200, rv.data)
self.assertEqual(rv['Content-Type'], 'application/json')
self.assertEqual(rv.data, expected_revision)
def test_api_revision_not_found(self):
unknown_revision_ = random_sha1()
url = reverse('api-1-revision',
url_args={'sha1_git': unknown_revision_})
rv = self.client.get(url)
self.assertEqual(rv.status_code, 404, rv.data)
self.assertEqual(rv['Content-Type'], 'application/json')
self.assertEqual(rv.data, {
'exception': 'NotFoundExc',
'reason': 'Revision with sha1_git %s not found.' %
unknown_revision_})
@given(revision())
def test_api_revision_raw_ok(self, revision):
url = reverse('api-1-revision-raw-message',
url_args={'sha1_git': revision})
rv = self.client.get(url)
expected_message = self.revision_get(revision)['message']
self.assertEqual(rv.status_code, 200)
self.assertEqual(rv['Content-Type'], 'application/octet-stream')
self.assertEqual(rv.content, expected_message.encode())
@given(new_revision())
def test_api_revision_raw_ok_no_msg(self, new_revision):
del new_revision['message']
self.storage.revision_add([new_revision])
new_revision_id = hash_to_hex(new_revision['id'])
url = reverse('api-1-revision-raw-message',
url_args={'sha1_git': new_revision_id})
rv = self.client.get(url)
self.assertEqual(rv.status_code, 404, rv.data)
self.assertEqual(rv['Content-Type'], 'application/json')
self.assertEqual(rv.data, {
'exception': 'NotFoundExc',
'reason': 'No message for revision with sha1_git %s.' %
new_revision_id})
def test_api_revision_raw_ko_no_rev(self):
unknown_revision_ = random_sha1()
url = reverse('api-1-revision-raw-message',
url_args={'sha1_git': unknown_revision_})
rv = self.client.get(url)
self.assertEqual(rv.status_code, 404, rv.data)
self.assertEqual(rv['Content-Type'], 'application/json')
self.assertEqual(rv.data, {
'exception': 'NotFoundExc',
'reason': 'Revision with sha1_git %s not found.' %
unknown_revision_})
@pytest.mark.origin_id
def test_api_revision_with_origin_id_not_found(self):
unknown_origin_id = random.randint(1000, 1000000)
url = reverse('api-1-revision-origin',
url_args={'origin_id': unknown_origin_id})
rv = self.client.get(url)
self.assertEqual(rv.status_code, 404, rv.data)
self.assertEqual(rv['content-type'], 'application/json')
self.assertEqual(rv.data, {
'exception': 'NotFoundExc',
'reason': 'Origin %s not found!' %
unknown_origin_id})
@pytest.mark.origin_id
@given(origin())
def test_api_revision_with_origin_id(self, origin):
url = reverse('api-1-revision-origin',
url_args={'origin_id': origin['id']})
rv = self.client.get(url)
snapshot = self.snapshot_get_latest(origin['url'])
expected_revision = self.revision_get(
snapshot['branches']['HEAD']['target'])
self._enrich_revision(expected_revision)
self.assertEqual(rv.status_code, 200, rv.data)
self.assertEqual(rv['Content-Type'], 'application/json')
self.assertEqual(rv.data, expected_revision)
@pytest.mark.origin_id
@given(origin())
def test_api_revision_with_origin_id_and_branch_name(self, origin):
snapshot = self.snapshot_get_latest(origin['url'])
branch_name = random.choice(
list(b for b in snapshot['branches'].keys()
if snapshot['branches'][b]['target_type'] == 'revision'))
url = reverse('api-1-revision-origin',
url_args={'origin_id': origin['id'],
'branch_name': branch_name})
rv = self.client.get(url)
expected_revision = self.revision_get(
snapshot['branches'][branch_name]['target'])
self._enrich_revision(expected_revision)
self.assertEqual(rv.status_code, 200, rv.data)
self.assertEqual(rv['content-type'], 'application/json')
self.assertEqual(rv.data, expected_revision)
@pytest.mark.origin_id
@given(origin_with_multiple_visits())
def test_api_revision_with_origin_id_and_branch_name_and_ts(self, origin):
visit = random.choice(self.origin_visit_get(origin['url']))
snapshot = self.snapshot_get(visit['snapshot'])
branch_name = random.choice(
list(b for b in snapshot['branches'].keys()
if snapshot['branches'][b]['target_type'] == 'revision'))
url = reverse('api-1-revision-origin',
url_args={'origin_id': origin['id'],
'branch_name': branch_name,
'ts': visit['date']})
rv = self.client.get(url)
expected_revision = self.revision_get(
snapshot['branches'][branch_name]['target'])
self._enrich_revision(expected_revision)
self.assertEqual(rv.status_code, 200, rv.data)
self.assertEqual(rv['Content-Type'], 'application/json')
self.assertEqual(rv.data, expected_revision)
@pytest.mark.origin_id
@given(origin_with_multiple_visits())
def test_api_revision_with_origin_id_and_branch_name_and_ts_escapes(
self, origin):
visit = random.choice(self.origin_visit_get(origin['url']))
snapshot = self.snapshot_get(visit['snapshot'])
branch_name = random.choice(
list(b for b in snapshot['branches'].keys()
if snapshot['branches'][b]['target_type'] == 'revision'))
date = parse_timestamp(visit['date'])
formatted_date = date.strftime('Today is %B %d, %Y at %X')
url = reverse('api-1-revision-origin',
url_args={'origin_id': origin['id'],
'branch_name': branch_name,
'ts': formatted_date})
rv = self.client.get(url)
expected_revision = self.revision_get(
snapshot['branches'][branch_name]['target'])
self._enrich_revision(expected_revision)
self.assertEqual(rv.status_code, 200, rv.data)
self.assertEqual(rv['Content-Type'], 'application/json')
self.assertEqual(rv.data, expected_revision)
@pytest.mark.origin_id
def test_api_directory_through_revision_origin_id_ko(self):
unknown_origin_id_ = random.randint(1000, 1000000)
url = reverse('api-1-revision-origin-directory',
url_args={'origin_id': unknown_origin_id_})
rv = self.client.get(url)
self.assertEqual(rv.status_code, 404, rv.data)
self.assertEqual(rv['Content-Type'], 'application/json')
self.assertEqual(rv.data, {
'exception': 'NotFoundExc',
'reason': 'Origin %s not found!' %
unknown_origin_id_
})
@pytest.mark.origin_id
@given(origin())
def test_api_directory_through_revision_origin_id(self, origin):
url = reverse('api-1-revision-origin-directory',
url_args={'origin_id': origin['id']})
rv = self.client.get(url)
snapshot = self.snapshot_get_latest(origin['id'])
revision_id = snapshot['branches']['HEAD']['target']
revision = self.revision_get(revision_id)
directory = self.directory_ls(revision['directory'])
for entry in directory:
if entry['type'] == 'dir':
entry['target_url'] = reverse(
'api-1-directory',
url_args={'sha1_git': entry['target']}
)
entry['dir_url'] = reverse(
'api-1-revision-origin-directory',
url_args={'origin_id': origin['id'],
'path': entry['name']})
elif entry['type'] == 'file':
entry['target_url'] = reverse(
'api-1-content',
url_args={'q': 'sha1_git:%s' % entry['target']}
)
entry['file_url'] = reverse(
'api-1-revision-origin-directory',
url_args={'origin_id': origin['id'],
'path': entry['name']})
elif entry['type'] == 'rev':
entry['target_url'] = reverse(
'api-1-revision',
url_args={'sha1_git': entry['target']}
)
entry['rev_url'] = reverse(
'api-1-revision-origin-directory',
url_args={'origin_id': origin['id'],
'path': entry['name']})
expected_result = {
'content': directory,
'path': '.',
'revision': revision_id,
'type': 'dir'
}
self.assertEqual(rv.status_code, 200, rv.data)
self.assertEqual(rv['Content-Type'], 'application/json')
self.assertEqual(rv.data, expected_result)
@given(revision())
def test_api_revision_log(self, revision):
per_page = 10
url = reverse('api-1-revision-log', url_args={'sha1_git': revision},
query_params={'per_page': per_page})
rv = self.client.get(url)
expected_log = self.revision_log(revision, limit=per_page+1)
expected_log = list(map(self._enrich_revision, expected_log))
has_next = len(expected_log) > per_page
self.assertEqual(rv.status_code, 200, rv.data)
self.assertEqual(rv['Content-Type'], 'application/json')
self.assertEqual(rv.data,
expected_log[:-1] if has_next else expected_log)
if has_next:
self.assertIn('Link', rv)
next_log_url = reverse(
'api-1-revision-log',
url_args={'sha1_git': expected_log[-1]['id']},
query_params={'per_page': per_page})
self.assertIn(next_log_url, rv['Link'])
def test_api_revision_log_not_found(self):
unknown_revision_ = random_sha1()
url = reverse('api-1-revision-log',
url_args={'sha1_git': unknown_revision_})
rv = self.client.get(url)
self.assertEqual(rv.status_code, 404, rv.data)
self.assertEqual(rv['Content-Type'], 'application/json')
self.assertEqual(rv.data, {
'exception': 'NotFoundExc',
'reason': 'Revision with sha1_git %s not found.' %
unknown_revision_})
self.assertFalse(rv.has_header('Link'))
@given(revision())
def test_api_revision_log_context(self, revision):
revisions = self.revision_log(revision, limit=4)
prev_rev = revisions[0]['id']
rev = revisions[-1]['id']
per_page = 10
url = reverse('api-1-revision-log',
url_args={'sha1_git': rev,
'prev_sha1s': prev_rev},
query_params={'per_page': per_page})
rv = self.client.get(url)
expected_log = self.revision_log(rev, limit=per_page)
prev_revision = self.revision_get(prev_rev)
expected_log.insert(0, prev_revision)
expected_log = list(map(self._enrich_revision, expected_log))
self.assertEqual(rv.status_code, 200, rv.data)
self.assertEqual(rv['Content-Type'], 'application/json')
self.assertEqual(rv.data, expected_log)
@pytest.mark.origin_id
@given(origin())
def test_api_revision_log_by_origin_id(self, origin):
per_page = 10
url = reverse('api-1-revision-origin-log',
url_args={'origin_id': origin['id']},
query_params={'per_page': per_page})
rv = self.client.get(url)
snapshot = self.snapshot_get_latest(origin['url'])
expected_log = self.revision_log(
snapshot['branches']['HEAD']['target'], limit=per_page+1)
expected_log = list(map(self._enrich_revision, expected_log))
has_next = len(expected_log) > per_page
self.assertEqual(rv.status_code, 200, rv.data)
self.assertEqual(rv['Content-Type'], 'application/json')
self.assertEqual(rv.data,
expected_log[:-1] if has_next else expected_log)
if has_next:
self.assertIn('Link', rv)
next_log_url = reverse(
'api-1-revision-origin-log',
url_args={'origin_id': origin['id'],
'branch_name': 'HEAD'},
query_params={'per_page': per_page,
'sha1_git': expected_log[-1]['id']})
self.assertIn(next_log_url, rv['Link'])
@pytest.mark.origin_id
@given(origin())
def test_api_revision_log_by_ko(self, origin):
invalid_branch_name = 'foobar'
url = reverse('api-1-revision-origin-log',
url_args={'origin_id': origin['id'],
'branch_name': invalid_branch_name})
rv = self.client.get(url)
self.assertEqual(rv.status_code, 404, rv.data)
self.assertEqual(rv['Content-Type'], 'application/json')
self.assertFalse(rv.has_header('Link'))
self.assertEqual(
rv.data,
{'exception': 'NotFoundExc',
'reason': 'Revision for origin %s and branch %s not found.' %
(origin['id'], invalid_branch_name)})
@pytest.mark.origin_id
@given(origin())
def test_api_revision_log_by_origin_id_ko(self, origin):
invalid_branch_name = 'foobar'
url = reverse('api-1-revision-origin-log',
url_args={'origin_id': origin['id'],
'branch_name': invalid_branch_name})
rv = self.client.get(url)
self.assertEqual(rv.status_code, 404, rv.data)
self.assertEqual(rv['Content-Type'], 'application/json')
self.assertFalse(rv.has_header('Link'))
self.assertEqual(
rv.data,
{'exception': 'NotFoundExc',
'reason': 'Revision for origin %s and branch %s not found.' %
(origin['id'], invalid_branch_name)})
@patch('swh.web.api.views.revision._revision_directory_by')
def test_api_revision_directory_ko_not_found(self, mock_rev_dir):
# given
mock_rev_dir.side_effect = NotFoundExc('Not found')
# then
rv = self.client.get('/api/1/revision/999/directory/some/path/to/dir/')
self.assertEqual(rv.status_code, 404, rv.data)
self.assertEqual(rv['Content-Type'], 'application/json')
self.assertEqual(rv.data, {
'exception': 'NotFoundExc',
'reason': 'Not found'})
mock_rev_dir.assert_called_once_with(
{'sha1_git': '999'},
'some/path/to/dir',
'/api/1/revision/999/directory/some/path/to/dir/',
with_data=False)
@patch('swh.web.api.views.revision._revision_directory_by')
def test_api_revision_directory_ok_returns_dir_entries(self, mock_rev_dir):
stub_dir = {
'type': 'dir',
'revision': '999',
'content': [
{
'sha1_git': '789',
'type': 'file',
'target': '101',
'target_url': '/api/1/content/sha1_git:101/',
'name': 'somefile',
'file_url': '/api/1/revision/999/directory/some/path/'
'somefile/'
},
{
'sha1_git': '123',
'type': 'dir',
'target': '456',
'target_url': '/api/1/directory/456/',
'name': 'to-subdir',
'dir_url': '/api/1/revision/999/directory/some/path/'
'to-subdir/',
}]
}
# given
mock_rev_dir.return_value = stub_dir
# then
rv = self.client.get('/api/1/revision/999/directory/some/path/')
self.assertEqual(rv.status_code, 200, rv.data)
self.assertEqual(rv['Content-Type'], 'application/json')
self.assertEqual(rv.data, stub_dir)
mock_rev_dir.assert_called_once_with(
{'sha1_git': '999'},
'some/path',
'/api/1/revision/999/directory/some/path/',
with_data=False)
@patch('swh.web.api.views.revision._revision_directory_by')
def test_api_revision_directory_ok_returns_content(self, mock_rev_dir):
stub_content = {
'type': 'file',
'revision': '999',
'content': {
'sha1_git': '789',
'sha1': '101',
'data_url': '/api/1/content/101/raw/',
}
}
# given
mock_rev_dir.return_value = stub_content
# then
url = '/api/1/revision/666/directory/some/other/path/'
rv = self.client.get(url)
self.assertEqual(rv.status_code, 200, rv.data)
self.assertEqual(rv['Content-Type'], 'application/json')
self.assertEqual(rv.data, stub_content)
mock_rev_dir.assert_called_once_with(
{'sha1_git': '666'}, 'some/other/path', url, with_data=False)
def _enrich_revision(self, revision):
- author_url = reverse(
- 'api-1-person',
- url_args={'person_id': revision['author']['id']})
-
- committer_url = reverse(
- 'api-1-person',
- url_args={'person_id': revision['committer']['id']})
-
directory_url = reverse(
'api-1-directory',
url_args={'sha1_git': revision['directory']})
history_url = reverse('api-1-revision-log',
url_args={'sha1_git': revision['id']})
parents_id_url = []
for p in revision['parents']:
parents_id_url.append({
'id': p,
'url': reverse('api-1-revision', url_args={'sha1_git': p})
})
revision_url = reverse('api-1-revision',
url_args={'sha1_git': revision['id']})
- revision['author_url'] = author_url
- revision['committer_url'] = committer_url
revision['directory_url'] = directory_url
revision['history_url'] = history_url
revision['url'] = revision_url
revision['parents'] = parents_id_url
return revision
@given(revision())
def test_api_revision_uppercase(self, revision):
url = reverse('api-1-revision-uppercase-checksum',
url_args={'sha1_git': revision.upper()})
resp = self.client.get(url)
self.assertEqual(resp.status_code, 302)
redirect_url = reverse('api-1-revision',
url_args={'sha1_git': revision})
self.assertEqual(resp['location'], redirect_url)
diff --git a/swh/web/tests/browse/test_utils.py b/swh/web/tests/browse/test_utils.py
index 52e58a53..fa626a2a 100644
--- a/swh/web/tests/browse/test_utils.py
+++ b/swh/web/tests/browse/test_utils.py
@@ -1,90 +1,80 @@
# Copyright (C) 2017-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
from hypothesis import given
from swh.web.browse import utils
from swh.web.common.utils import reverse, format_utc_iso_date
from swh.web.tests.strategies import origin_with_multiple_visits
from swh.web.tests.testcase import WebTestCase
class SwhBrowseUtilsTestCase(WebTestCase):
def test_get_mimetype_and_encoding_for_content(self):
text = b'Hello world!'
self.assertEqual(utils.get_mimetype_and_encoding_for_content(text),
('text/plain', 'us-ascii'))
@given(origin_with_multiple_visits())
def test_get_origin_visit_snapshot_simple(self, origin):
visits = self.origin_visit_get(origin['url'])
for visit in visits:
snapshot = self.snapshot_get(visit['snapshot'])
branches = []
releases = []
for branch in sorted(snapshot['branches'].keys()):
branch_data = snapshot['branches'][branch]
if branch_data['target_type'] == 'revision':
rev_data = self.revision_get(branch_data['target'])
branches.append({
'name': branch,
'revision': branch_data['target'],
'directory': rev_data['directory'],
'date': format_utc_iso_date(rev_data['date']),
'message': rev_data['message']
})
elif branch_data['target_type'] == 'release':
rel_data = self.release_get(branch_data['target'])
rev_data = self.revision_get(rel_data['target'])
releases.append({
'name': rel_data['name'],
'branch_name': branch,
'date': format_utc_iso_date(rel_data['date']),
'id': rel_data['id'],
'message': rel_data['message'],
'target_type': rel_data['target_type'],
'target': rel_data['target'],
'directory': rev_data['directory']
})
assert branches and releases, 'Incomplete test data.'
origin_visit_branches = utils.get_origin_visit_snapshot(
origin, visit_id=visit['visit'])
self.assertEqual(origin_visit_branches, (branches, releases))
def test_gen_link(self):
self.assertEqual(
utils.gen_link('https://www.softwareheritage.org/', 'swh'),
'<a href="https://www.softwareheritage.org/">swh</a>')
- def test_gen_person_link(self):
- person_id = 8221896
- person_name = 'Antoine Lambert'
- person_url = reverse('browse-person',
- url_args={'person_id': person_id})
-
- self.assertEqual(utils.gen_person_link(person_id, person_name,
- link_attrs=None),
- '<a href="%s">%s</a>' % (person_url, person_name))
-
def test_gen_revision_link(self):
revision_id = '28a0bc4120d38a394499382ba21d6965a67a3703'
revision_url = reverse('browse-revision',
url_args={'sha1_git': revision_id})
self.assertEqual(utils.gen_revision_link(revision_id, link_text=None,
link_attrs=None),
'<a href="%s">%s</a>' % (revision_url, revision_id))
self.assertEqual(
utils.gen_revision_link(revision_id, shorten_id=True,
link_attrs=None),
'<a href="%s">%s</a>' % (revision_url, revision_id[:7]))
diff --git a/swh/web/tests/browse/views/test_person.py b/swh/web/tests/browse/views/test_person.py
deleted file mode 100644
index 414df8ef..00000000
--- a/swh/web/tests/browse/views/test_person.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright (C) 2017-2019 The Software Heritage developers
-# See the AUTHORS file at the top-level directory of this distribution
-# License: GNU Affero General Public License version 3, or any later version
-# See top-level LICENSE file for more information
-
-from hypothesis import given
-
-from swh.web.common.utils import reverse
-from swh.web.tests.strategies import person, unknown_person
-from swh.web.tests.testcase import WebTestCase
-
-
-class SwhBrowsePersonTest(WebTestCase):
-
- @given(person())
- def test_person_browse(self, person):
- test_person_data = self.person_get(person)
-
- url = reverse('browse-person', url_args={'person_id': person})
-
- resp = self.client.get(url)
-
- self.assertEqual(resp.status_code, 200)
- self.assertTemplateUsed('browse/person.html')
- self.assertContains(resp, '<pre>%s</pre>' % test_person_data['id'])
- self.assertContains(resp, '<pre>%s</pre>' % test_person_data['name'])
- self.assertContains(resp, '<pre><a href="mailto:%s">%s</a></pre>' %
- (test_person_data['email'],
- test_person_data['email']))
- self.assertContains(resp, '<pre>%s <<a href="mailto:%s">%s</a>></pre>' % # noqa
- (test_person_data['name'],
- test_person_data['email'],
- test_person_data['email']))
-
- @given(unknown_person())
- def test_person_request_error(self, unknown_person):
- url = reverse('browse-person', url_args={'person_id': unknown_person})
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 404)
- self.assertTemplateUsed('error.html')
- self.assertContains(resp,
- 'Person with id %s not found' % unknown_person,
- status_code=404)
diff --git a/swh/web/tests/browse/views/test_release.py b/swh/web/tests/browse/views/test_release.py
index eddb6904..e4e1045b 100644
--- a/swh/web/tests/browse/views/test_release.py
+++ b/swh/web/tests/browse/views/test_release.py
@@ -1,113 +1,108 @@
# Copyright (C) 2018-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import random
from hypothesis import given
from swh.web.common.utils import (
reverse, format_utc_iso_date, get_swh_persistent_id
)
from swh.web.tests.strategies import (
release, origin_with_release, unknown_release
)
from swh.web.tests.testcase import WebTestCase
class SwhBrowseReleaseTest(WebTestCase):
@given(release())
def test_release_browse(self, release):
url = reverse('browse-release',
url_args={'sha1_git': release})
release_data = self.release_get(release)
resp = self.client.get(url)
self._release_browse_checks(resp, release_data)
@given(origin_with_release())
def test_release_browse_with_origin(self, origin):
snapshot = self.snapshot_get_latest(origin['url'])
release = random.choice([b for b in snapshot['branches'].values()
if b['target_type'] == 'release'])
url = reverse('browse-release',
url_args={'sha1_git': release['target']},
query_params={'origin': origin['url']})
release_data = self.release_get(release['target'])
resp = self.client.get(url)
self._release_browse_checks(resp, release_data, origin)
@given(unknown_release())
def test_release_browse_not_found(self, unknown_release):
url = reverse('browse-release',
url_args={'sha1_git': unknown_release})
resp = self.client.get(url)
self.assertEqual(resp.status_code, 404)
self.assertTemplateUsed('error.html')
err_msg = 'Release with sha1_git %s not found' % unknown_release
self.assertContains(resp, err_msg, status_code=404)
def _release_browse_checks(self, resp, release_data, origin_info=None):
query_params = {}
if origin_info:
query_params['origin'] = origin_info['url']
release_id = release_data['id']
release_name = release_data['name']
- author_id = release_data['author']['id']
author_name = release_data['author']['name']
- author_url = reverse('browse-person',
- url_args={'person_id': author_id},
- query_params=query_params)
release_date = release_data['date']
message = release_data['message']
target_type = release_data['target_type']
target = release_data['target']
target_url = reverse('browse-revision',
url_args={'sha1_git': target},
query_params=query_params)
message_lines = message.split('\n')
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('browse/release.html')
- self.assertContains(resp, '<a href="%s">%s</a>' %
- (author_url, author_name))
+ self.assertContains(resp, author_name)
self.assertContains(resp, format_utc_iso_date(release_date))
self.assertContains(resp,
'<h6>%s</h6>%s' % (message_lines[0] or 'None',
'\n'.join(message_lines[1:])))
self.assertContains(resp, release_id)
self.assertContains(resp, release_name)
self.assertContains(resp, target_type)
self.assertContains(resp, '<a href="%s">%s</a>' %
(target_url, target))
swh_rel_id = get_swh_persistent_id('release', release_id)
swh_rel_id_url = reverse('browse-swh-id',
url_args={'swh_id': swh_rel_id})
self.assertContains(resp, swh_rel_id)
self.assertContains(resp, swh_rel_id_url)
@given(release())
def test_release_uppercase(self, release):
url = reverse('browse-release-uppercase-checksum',
url_args={'sha1_git': release.upper()})
resp = self.client.get(url)
self.assertEqual(resp.status_code, 302)
redirect_url = reverse('browse-release',
url_args={'sha1_git': release})
self.assertEqual(resp['location'], redirect_url)
diff --git a/swh/web/tests/browse/views/test_revision.py b/swh/web/tests/browse/views/test_revision.py
index 145299a5..9dc7c0fc 100644
--- a/swh/web/tests/browse/views/test_revision.py
+++ b/swh/web/tests/browse/views/test_revision.py
@@ -1,256 +1,247 @@
# Copyright (C) 2017-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
from django.utils.html import escape
from hypothesis import given
from swh.web.common.utils import (
reverse, format_utc_iso_date, get_swh_persistent_id,
parse_timestamp
)
from swh.web.tests.strategies import (
origin, revision, unknown_revision, new_origin
)
from swh.web.tests.testcase import WebTestCase
class SwhBrowseRevisionTest(WebTestCase):
@given(revision())
def test_revision_browse(self, revision):
url = reverse('browse-revision',
url_args={'sha1_git': revision})
revision_data = self.revision_get(revision)
- author_id = revision_data['author']['id']
author_name = revision_data['author']['name']
- committer_id = revision_data['committer']['id']
committer_name = revision_data['committer']['name']
dir_id = revision_data['directory']
- author_url = reverse('browse-person',
- url_args={'person_id': author_id})
- committer_url = reverse('browse-person',
- url_args={'person_id': committer_id})
-
directory_url = reverse('browse-directory',
url_args={'sha1_git': dir_id})
history_url = reverse('browse-revision-log',
url_args={'sha1_git': revision})
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('browse/revision.html')
- self.assertContains(resp, '<a href="%s">%s</a>' %
- (author_url, author_name))
- self.assertContains(resp, '<a href="%s">%s</a>' %
- (committer_url, committer_name))
+ self.assertContains(resp, author_name)
+ self.assertContains(resp, committer_name)
self.assertContains(resp, directory_url)
self.assertContains(resp, history_url)
for parent in revision_data['parents']:
parent_url = reverse('browse-revision',
url_args={'sha1_git': parent})
self.assertContains(resp, '<a href="%s">%s</a>' %
(parent_url, parent))
author_date = revision_data['date']
committer_date = revision_data['committer_date']
message_lines = revision_data['message'].split('\n')
self.assertContains(resp, format_utc_iso_date(author_date))
self.assertContains(resp, format_utc_iso_date(committer_date))
self.assertContains(resp, escape(message_lines[0]))
self.assertContains(resp, escape('\n'.join(message_lines[1:])))
@given(origin())
def test_revision_origin_browse(self, origin):
snapshot = self.snapshot_get_latest(origin['url'])
revision = snapshot['branches']['HEAD']['target']
revision_data = self.revision_get(revision)
dir_id = revision_data['directory']
origin_revision_log_url = reverse('browse-origin-log',
url_args={'origin_url': origin['url']}, # noqa
query_params={'revision': revision})
url = reverse('browse-revision',
url_args={'sha1_git': revision},
query_params={'origin': origin['url']})
resp = self.client.get(url)
self.assertContains(resp, origin_revision_log_url)
for parent in revision_data['parents']:
parent_url = reverse('browse-revision',
url_args={'sha1_git': parent},
query_params={'origin': origin['url']})
self.assertContains(resp, '<a href="%s">%s</a>' %
(parent_url, parent))
self.assertContains(resp, 'vault-cook-directory')
self.assertContains(resp, 'vault-cook-revision')
swh_rev_id = get_swh_persistent_id('revision', revision)
swh_rev_id_url = reverse('browse-swh-id',
url_args={'swh_id': swh_rev_id})
self.assertContains(resp, swh_rev_id)
self.assertContains(resp, swh_rev_id_url)
swh_dir_id = get_swh_persistent_id('directory', dir_id)
swh_dir_id_url = reverse('browse-swh-id',
url_args={'swh_id': swh_dir_id})
self.assertContains(resp, swh_dir_id)
self.assertContains(resp, swh_dir_id_url)
self.assertContains(resp, 'swh-take-new-snapshot')
@given(revision())
def test_revision_log_browse(self, revision):
per_page = 10
revision_log = self.revision_log(revision)
revision_log_sorted = \
sorted(revision_log,
key=lambda rev: -parse_timestamp(
rev['committer_date']).timestamp())
url = reverse('browse-revision-log',
url_args={'sha1_git': revision},
query_params={'per_page': per_page})
resp = self.client.get(url)
next_page_url = reverse('browse-revision-log',
url_args={'sha1_git': revision},
query_params={'offset': per_page,
'per_page': per_page})
nb_log_entries = per_page
if len(revision_log_sorted) < per_page:
nb_log_entries = len(revision_log_sorted)
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('browse/revision-log.html')
self.assertContains(resp, '<tr class="swh-revision-log-entry',
count=nb_log_entries)
self.assertContains(resp, '<a class="page-link">Newer</a>')
if len(revision_log_sorted) > per_page:
self.assertContains(resp, '<a class="page-link" href="%s">Older</a>' % # noqa
escape(next_page_url))
for log in revision_log_sorted[:per_page]:
revision_url = reverse('browse-revision',
url_args={'sha1_git': log['id']})
self.assertContains(resp, log['id'][:7])
self.assertContains(resp, log['author']['name'])
self.assertContains(resp, format_utc_iso_date(log['date']))
self.assertContains(resp, escape(log['message']))
self.assertContains(resp, format_utc_iso_date(log['committer_date'])) # noqa
self.assertContains(resp, revision_url)
if len(revision_log_sorted) <= per_page:
return
resp = self.client.get(next_page_url)
prev_page_url = reverse('browse-revision-log',
url_args={'sha1_git': revision},
query_params={'per_page': per_page})
next_page_url = reverse('browse-revision-log',
url_args={'sha1_git': revision},
query_params={'offset': 2 * per_page,
'per_page': per_page})
nb_log_entries = len(revision_log_sorted) - per_page
if nb_log_entries > per_page:
nb_log_entries = per_page
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('browse/revision-log.html')
self.assertContains(resp, '<tr class="swh-revision-log-entry',
count=nb_log_entries)
self.assertContains(resp, '<a class="page-link" href="%s">Newer</a>' %
escape(prev_page_url))
if len(revision_log_sorted) > 2 * per_page:
self.assertContains(resp, '<a class="page-link" href="%s">Older</a>' % # noqa
escape(next_page_url))
if len(revision_log_sorted) <= 2 * per_page:
return
resp = self.client.get(next_page_url)
prev_page_url = reverse('browse-revision-log',
url_args={'sha1_git': revision},
query_params={'offset': per_page,
'per_page': per_page})
next_page_url = reverse('browse-revision-log',
url_args={'sha1_git': revision},
query_params={'offset': 3 * per_page,
'per_page': per_page})
nb_log_entries = len(revision_log_sorted) - 2 * per_page
if nb_log_entries > per_page:
nb_log_entries = per_page
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('browse/revision-log.html')
self.assertContains(resp, '<tr class="swh-revision-log-entry',
count=nb_log_entries)
self.assertContains(resp, '<a class="page-link" href="%s">Newer</a>' %
escape(prev_page_url))
if len(revision_log_sorted) > 3 * per_page:
self.assertContains(resp, '<a class="page-link" href="%s">Older</a>' % # noqa
escape(next_page_url))
@given(revision(), unknown_revision(), new_origin())
def test_revision_request_errors(self, revision, unknown_revision,
new_origin):
url = reverse('browse-revision',
url_args={'sha1_git': unknown_revision})
resp = self.client.get(url)
self.assertEqual(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp,
'Revision with sha1_git %s not found' %
unknown_revision, status_code=404)
url = reverse('browse-revision',
url_args={'sha1_git': revision},
query_params={'origin_type': new_origin['type'],
'origin': new_origin['url']})
resp = self.client.get(url)
self.assertEqual(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, 'the origin mentioned in your request'
' appears broken', status_code=404)
@given(revision())
def test_revision_uppercase(self, revision):
url = reverse('browse-revision-uppercase-checksum',
url_args={'sha1_git': revision.upper()})
resp = self.client.get(url)
self.assertEqual(resp.status_code, 302)
redirect_url = reverse('browse-revision',
url_args={'sha1_git': revision})
self.assertEqual(resp['location'], redirect_url)
diff --git a/swh/web/tests/common/test_service.py b/swh/web/tests/common/test_service.py
index b614db10..cc2af791 100644
--- a/swh/web/tests/common/test_service.py
+++ b/swh/web/tests/common/test_service.py
@@ -1,886 +1,877 @@
# Copyright (C) 2015-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import itertools
import pytest
import random
from collections import defaultdict
from hypothesis import given
from swh.model.hashutil import hash_to_bytes, hash_to_hex
from swh.model.from_disk import DentryPerms
from swh.web.common import service
from swh.web.common.exc import BadInputExc, NotFoundExc
from swh.web.tests.data import random_sha1, random_content
from swh.web.tests.strategies import (
content, contents, unknown_contents,
contents_with_ctags, origin, new_origin, visit_dates, directory,
release, revision, unknown_revision, revisions,
ancestor_revisions, non_ancestor_revisions, invalid_sha1, sha256,
revision_with_submodules, empty_directory,
new_revision, new_origins
)
from swh.web.tests.testcase import (
WebTestCase, ctags_json_missing, fossology_missing
)
class ServiceTestCase(WebTestCase):
@given(contents())
def test_lookup_multiple_hashes_all_present(self, contents):
input_data = []
expected_output = []
for cnt in contents:
input_data.append({'sha1': cnt['sha1']})
expected_output.append({'sha1': cnt['sha1'],
'found': True})
self.assertEqual(service.lookup_multiple_hashes(input_data),
expected_output)
@given(contents(), unknown_contents())
def test_lookup_multiple_hashes_some_missing(self, contents,
unknown_contents):
input_contents = list(itertools.chain(contents, unknown_contents))
random.shuffle(input_contents)
input_data = []
expected_output = []
for cnt in input_contents:
input_data.append({'sha1': cnt['sha1']})
expected_output.append({'sha1': cnt['sha1'],
'found': cnt in contents})
self.assertEqual(service.lookup_multiple_hashes(input_data),
expected_output)
def test_lookup_hash_does_not_exist(self):
unknown_content_ = random_content()
actual_lookup = service.lookup_hash('sha1_git:%s' %
unknown_content_['sha1_git'])
self.assertEqual(actual_lookup, {'found': None,
'algo': 'sha1_git'})
@given(content())
def test_lookup_hash_exist(self, content):
actual_lookup = service.lookup_hash('sha1:%s' % content['sha1'])
content_metadata = self.content_get_metadata(content['sha1'])
self.assertEqual({'found': content_metadata,
'algo': 'sha1'}, actual_lookup)
def test_search_hash_does_not_exist(self):
unknown_content_ = random_content()
actual_lookup = service.search_hash('sha1_git:%s' %
unknown_content_['sha1_git'])
self.assertEqual({'found': False}, actual_lookup)
@given(content())
def test_search_hash_exist(self, content):
actual_lookup = service.search_hash('sha1:%s' % content['sha1'])
self.assertEqual({'found': True}, actual_lookup)
@pytest.mark.skipif(ctags_json_missing,
reason="requires ctags with json output support")
@given(contents_with_ctags())
def test_lookup_content_ctags(self, contents_with_ctags):
content_sha1 = random.choice(contents_with_ctags['sha1s'])
self.content_add_ctags(content_sha1)
actual_ctags = \
list(service.lookup_content_ctags('sha1:%s' % content_sha1))
expected_data = list(self.content_get_ctags(content_sha1))
for ctag in expected_data:
ctag['id'] = content_sha1
self.assertEqual(actual_ctags, expected_data)
def test_lookup_content_ctags_no_hash(self):
unknown_content_ = random_content()
actual_ctags = \
list(service.lookup_content_ctags('sha1:%s' %
unknown_content_['sha1']))
self.assertEqual(actual_ctags, [])
@given(content())
def test_lookup_content_filetype(self, content):
self.content_add_mimetype(content['sha1'])
actual_filetype = service.lookup_content_filetype(content['sha1'])
expected_filetype = self.content_get_mimetype(content['sha1'])
self.assertEqual(actual_filetype, expected_filetype)
@pytest.mark.xfail # Language indexer is disabled.
@given(content())
def test_lookup_content_language(self, content):
self.content_add_language(content['sha1'])
actual_language = service.lookup_content_language(content['sha1'])
expected_language = self.content_get_language(content['sha1'])
self.assertEqual(actual_language, expected_language)
@given(contents_with_ctags())
def test_lookup_expression(self, contents_with_ctags):
per_page = 10
expected_ctags = []
for content_sha1 in contents_with_ctags['sha1s']:
if len(expected_ctags) == per_page:
break
self.content_add_ctags(content_sha1)
for ctag in self.content_get_ctags(content_sha1):
if len(expected_ctags) == per_page:
break
if ctag['name'] == contents_with_ctags['symbol_name']:
del ctag['id']
ctag['sha1'] = content_sha1
expected_ctags.append(ctag)
actual_ctags = \
list(service.lookup_expression(contents_with_ctags['symbol_name'],
last_sha1=None, per_page=10))
self.assertEqual(actual_ctags, expected_ctags)
def test_lookup_expression_no_result(self):
expected_ctags = []
actual_ctags = \
list(service.lookup_expression('barfoo', last_sha1=None,
per_page=10))
self.assertEqual(actual_ctags, expected_ctags)
@pytest.mark.skipif(fossology_missing,
reason="requires fossology-nomossa installed")
@given(content())
def test_lookup_content_license(self, content):
self.content_add_license(content['sha1'])
actual_license = service.lookup_content_license(content['sha1'])
expected_license = self.content_get_license(content['sha1'])
self.assertEqual(actual_license, expected_license)
def test_stat_counters(self):
actual_stats = service.stat_counters()
self.assertEqual(actual_stats, self.storage.stat_counters())
@given(new_origin(), visit_dates())
def test_lookup_origin_visits(self, new_origin, visit_dates):
origin_id = self.storage.origin_add_one(new_origin)
for ts in visit_dates:
self.storage.origin_visit_add(origin_id, ts)
actual_origin_visits = list(
service.lookup_origin_visits(origin_id, per_page=100))
expected_visits = self.origin_visit_get(origin_id)
self.assertEqual(actual_origin_visits, expected_visits)
@given(new_origin(), visit_dates())
def test_lookup_origin_visit(self, new_origin, visit_dates):
origin_id = self.storage.origin_add_one(new_origin)
visits = []
for ts in visit_dates:
visits.append(self.storage.origin_visit_add(origin_id, ts))
visit = random.choice(visits)['visit']
actual_origin_visit = service.lookup_origin_visit(origin_id, visit)
expected_visit = dict(self.storage.origin_visit_get_by(origin_id,
visit))
expected_visit['date'] = expected_visit['date'].isoformat()
expected_visit['metadata'] = {}
self.assertEqual(actual_origin_visit, expected_visit)
@pytest.mark.origin_id
@given(new_origin())
def test_lookup_origin_by_id(self, new_origin):
origin_id = self.storage.origin_add_one(new_origin)
actual_origin = service.lookup_origin({'id': origin_id})
expected_origin = self.storage.origin_get({'id': origin_id})
self.assertEqual(actual_origin, expected_origin)
@given(new_origin())
def test_lookup_origin(self, new_origin):
self.storage.origin_add_one(new_origin)
actual_origin = service.lookup_origin({'type': new_origin['type'],
'url': new_origin['url']})
expected_origin = self.storage.origin_get({'type': new_origin['type'],
'url': new_origin['url']})
self.assertEqual(actual_origin, expected_origin)
@given(invalid_sha1())
def test_lookup_release_ko_id_checksum_not_a_sha1(self, invalid_sha1):
with self.assertRaises(BadInputExc) as cm:
service.lookup_release(invalid_sha1)
self.assertIn('invalid checksum', cm.exception.args[0].lower())
@given(sha256())
def test_lookup_release_ko_id_checksum_too_long(self, sha256):
with self.assertRaises(BadInputExc) as cm:
service.lookup_release(sha256)
self.assertEqual('Only sha1_git is supported.', cm.exception.args[0])
@given(directory())
def test_lookup_directory_with_path_not_found(self, directory):
path = 'some/invalid/path/here'
with self.assertRaises(NotFoundExc) as cm:
service.lookup_directory_with_path(directory, path)
self.assertEqual('Directory entry with path %s from %s '
'not found' % (path, directory),
cm.exception.args[0])
@given(directory())
def test_lookup_directory_with_path_found(self, directory):
directory_content = self.directory_ls(directory)
directory_entry = random.choice(directory_content)
path = directory_entry['name']
actual_result = service.lookup_directory_with_path(directory, path)
self.assertEqual(actual_result, directory_entry)
@given(release())
def test_lookup_release(self, release):
actual_release = service.lookup_release(release)
self.assertEqual(actual_release,
self.release_get(release))
@given(revision(), invalid_sha1(), sha256())
def test_lookup_revision_with_context_ko_not_a_sha1(self, revision,
invalid_sha1,
sha256):
sha1_git_root = revision
sha1_git = invalid_sha1
with self.assertRaises(BadInputExc) as cm:
service.lookup_revision_with_context(sha1_git_root, sha1_git)
self.assertIn('Invalid checksum query string', cm.exception.args[0])
sha1_git = sha256
with self.assertRaises(BadInputExc) as cm:
service.lookup_revision_with_context(sha1_git_root, sha1_git)
self.assertIn('Only sha1_git is supported', cm.exception.args[0])
@given(revision(), unknown_revision())
def test_lookup_revision_with_context_ko_sha1_git_does_not_exist(
self, revision, unknown_revision):
sha1_git_root = revision
sha1_git = unknown_revision
with self.assertRaises(NotFoundExc) as cm:
service.lookup_revision_with_context(sha1_git_root, sha1_git)
self.assertIn('Revision %s not found' % sha1_git, cm.exception.args[0])
@given(revision(), unknown_revision())
def test_lookup_revision_with_context_ko_root_sha1_git_does_not_exist(
self, revision, unknown_revision):
sha1_git_root = unknown_revision
sha1_git = revision
with self.assertRaises(NotFoundExc) as cm:
service.lookup_revision_with_context(sha1_git_root, sha1_git)
self.assertIn('Revision root %s not found' % sha1_git_root,
cm.exception.args[0])
@given(ancestor_revisions())
def test_lookup_revision_with_context(self, ancestor_revisions):
sha1_git = ancestor_revisions['sha1_git']
root_sha1_git = ancestor_revisions['sha1_git_root']
for sha1_git_root in (root_sha1_git,
{'id': hash_to_bytes(root_sha1_git)}):
actual_revision = \
service.lookup_revision_with_context(sha1_git_root,
sha1_git)
children = []
for rev in self.revision_log(root_sha1_git):
for p_rev in rev['parents']:
p_rev_hex = hash_to_hex(p_rev)
if p_rev_hex == sha1_git:
children.append(rev['id'])
expected_revision = self.revision_get(sha1_git)
expected_revision['children'] = children
self.assertEqual(actual_revision, expected_revision)
@given(non_ancestor_revisions())
def test_lookup_revision_with_context_ko(self, non_ancestor_revisions):
sha1_git = non_ancestor_revisions['sha1_git']
root_sha1_git = non_ancestor_revisions['sha1_git_root']
with self.assertRaises(NotFoundExc) as cm:
service.lookup_revision_with_context(root_sha1_git, sha1_git)
self.assertIn('Revision %s is not an ancestor of %s' %
(sha1_git, root_sha1_git), cm.exception.args[0])
def test_lookup_directory_with_revision_not_found(self):
unknown_revision_ = random_sha1()
with self.assertRaises(NotFoundExc) as cm:
service.lookup_directory_with_revision(unknown_revision_)
self.assertIn('Revision %s not found' % unknown_revision_,
cm.exception.args[0])
def test_lookup_directory_with_revision_unknown_content(self):
unknown_content_ = random_content()
unknown_revision_ = random_sha1()
unknown_directory_ = random_sha1()
dir_path = 'README.md'
# Create a revision that points to a directory
# Which points to unknown content
revision = {
'author': {
'name': b'abcd',
'email': b'abcd@company.org',
'fullname': b'abcd abcd'
},
'committer': {
'email': b'aaaa@company.org',
'fullname': b'aaaa aaa',
'name': b'aaa'
},
'committer_date': {
'negative_utc': False,
'offset': 0,
'timestamp': 1437511651
},
'date': {
'negative_utc': False,
'offset': 0,
'timestamp': 1437511651
},
'message': b'bleh',
'metadata': [],
'parents': [],
'synthetic': False,
'type': 'file',
'id': hash_to_bytes(unknown_revision_),
'directory': hash_to_bytes(unknown_directory_)
}
# A directory that points to unknown content
dir = {
'id': hash_to_bytes(unknown_directory_),
'entries': [{
'name': bytes(dir_path.encode('utf-8')),
'type': 'file',
'target': hash_to_bytes(unknown_content_['sha1_git']),
'perms': DentryPerms.content
}]
}
# Add the directory and revision in mem
self.storage.directory_add([dir])
self.storage.revision_add([revision])
with self.assertRaises(NotFoundExc) as cm:
service.lookup_directory_with_revision(
unknown_revision_, dir_path)
self.assertIn('Content not found for revision %s' %
unknown_revision_,
cm.exception.args[0])
@given(revision())
def test_lookup_directory_with_revision_ko_path_to_nowhere(
self, revision):
invalid_path = 'path/to/something/unknown'
with self.assertRaises(NotFoundExc) as cm:
service.lookup_directory_with_revision(revision, invalid_path)
exception_text = cm.exception.args[0].lower()
self.assertIn('directory or file', exception_text)
self.assertIn(invalid_path, exception_text)
self.assertIn('revision %s' % revision, exception_text)
self.assertIn('not found', exception_text)
@given(revision_with_submodules())
def test_lookup_directory_with_revision_submodules(
self, revision_with_submodules):
rev_sha1_git = revision_with_submodules['rev_sha1_git']
rev_dir_path = revision_with_submodules['rev_dir_rev_path']
actual_data = service.lookup_directory_with_revision(
rev_sha1_git, rev_dir_path)
revision = self.revision_get(revision_with_submodules['rev_sha1_git'])
directory = self.directory_ls(revision['directory'])
rev_entry = next(e for e in directory if e['name'] == rev_dir_path)
expected_data = {
'content': self.revision_get(rev_entry['target']),
'path': rev_dir_path,
'revision': rev_sha1_git,
'type': 'rev'
}
self.assertEqual(actual_data, expected_data)
@given(revision())
def test_lookup_directory_with_revision_without_path(self, revision):
actual_directory_entries = \
service.lookup_directory_with_revision(revision)
revision_data = self.revision_get(revision)
expected_directory_entries = \
self.directory_ls(revision_data['directory'])
self.assertEqual(actual_directory_entries['type'], 'dir')
self.assertEqual(actual_directory_entries['content'],
expected_directory_entries)
@given(revision())
def test_lookup_directory_with_revision_with_path(self, revision):
revision_data = self.revision_get(revision)
dir_entries = [e for e in self.directory_ls(revision_data['directory'])
if e['type'] in ('file', 'dir')]
expected_dir_entry = random.choice(dir_entries)
actual_dir_entry = \
service.lookup_directory_with_revision(revision,
expected_dir_entry['name'])
self.assertEqual(actual_dir_entry['type'], expected_dir_entry['type'])
self.assertEqual(actual_dir_entry['revision'], revision)
self.assertEqual(actual_dir_entry['path'], expected_dir_entry['name'])
if actual_dir_entry['type'] == 'file':
del actual_dir_entry['content']['checksums']['blake2s256']
for key in ('checksums', 'status', 'length'):
self.assertEqual(actual_dir_entry['content'][key],
expected_dir_entry[key])
else:
sub_dir_entries = self.directory_ls(expected_dir_entry['target'])
self.assertEqual(actual_dir_entry['content'], sub_dir_entries)
@given(revision())
def test_lookup_directory_with_revision_with_path_to_file_and_data(
self, revision):
revision_data = self.revision_get(revision)
dir_entries = [e for e in self.directory_ls(revision_data['directory'])
if e['type'] == 'file']
expected_dir_entry = random.choice(dir_entries)
expected_data = \
self.content_get(expected_dir_entry['checksums']['sha1'])
actual_dir_entry = \
service.lookup_directory_with_revision(revision,
expected_dir_entry['name'],
with_data=True)
self.assertEqual(actual_dir_entry['type'], expected_dir_entry['type'])
self.assertEqual(actual_dir_entry['revision'], revision)
self.assertEqual(actual_dir_entry['path'], expected_dir_entry['name'])
del actual_dir_entry['content']['checksums']['blake2s256']
for key in ('checksums', 'status', 'length'):
self.assertEqual(actual_dir_entry['content'][key],
expected_dir_entry[key])
self.assertEqual(actual_dir_entry['content']['data'],
expected_data['data'])
@given(revision())
def test_lookup_revision(self, revision):
actual_revision = service.lookup_revision(revision)
self.assertEqual(actual_revision, self.revision_get(revision))
@given(new_revision())
def test_lookup_revision_invalid_msg(self, new_revision):
new_revision['message'] = b'elegant fix for bug \xff'
self.storage.revision_add([new_revision])
revision = service.lookup_revision(hash_to_hex(new_revision['id']))
self.assertEqual(revision['message'], None)
self.assertEqual(revision['message_decoding_failed'], True)
@given(new_revision())
def test_lookup_revision_msg_ok(self, new_revision):
self.storage.revision_add([new_revision])
revision_message = service.lookup_revision_message(
hash_to_hex(new_revision['id']))
self.assertEqual(revision_message,
{'message': new_revision['message']})
@given(new_revision())
def test_lookup_revision_msg_absent(self, new_revision):
del new_revision['message']
self.storage.revision_add([new_revision])
new_revision_id = hash_to_hex(new_revision['id'])
with self.assertRaises(NotFoundExc) as cm:
service.lookup_revision_message(new_revision_id)
self.assertEqual(
cm.exception.args[0],
'No message for revision with sha1_git %s.' % new_revision_id
)
def test_lookup_revision_msg_no_rev(self):
unknown_revision_ = random_sha1()
with self.assertRaises(NotFoundExc) as cm:
service.lookup_revision_message(unknown_revision_)
self.assertEqual(
cm.exception.args[0],
'Revision with sha1_git %s not found.' % unknown_revision_
)
@given(revisions())
def test_lookup_revision_multiple(self, revisions):
actual_revisions = list(service.lookup_revision_multiple(revisions))
expected_revisions = []
for rev in revisions:
expected_revisions.append(self.revision_get(rev))
self.assertEqual(actual_revisions, expected_revisions)
def test_lookup_revision_multiple_none_found(self):
unknown_revisions_ = [random_sha1(), random_sha1(), random_sha1()]
actual_revisions = \
list(service.lookup_revision_multiple(unknown_revisions_))
self.assertEqual(actual_revisions, [None] * len(unknown_revisions_))
@given(revision())
def test_lookup_revision_log(self, revision):
actual_revision_log = \
list(service.lookup_revision_log(revision, limit=25))
expected_revision_log = self.revision_log(revision, limit=25)
self.assertEqual(actual_revision_log, expected_revision_log)
def _get_origin_branches(self, origin):
origin_visit = self.origin_visit_get(origin['url'])[-1]
snapshot = self.snapshot_get(origin_visit['snapshot'])
branches = {k: v for (k, v) in snapshot['branches'].items()
if v['target_type'] == 'revision'}
return branches
@given(origin())
def test_lookup_revision_log_by(self, origin):
branches = self._get_origin_branches(origin)
branch_name = random.choice(list(branches.keys()))
actual_log = \
list(service.lookup_revision_log_by(origin['url'], branch_name,
None, limit=25))
expected_log = \
self.revision_log(branches[branch_name]['target'], limit=25)
self.assertEqual(actual_log, expected_log)
@given(origin())
def test_lookup_revision_log_by_notfound(self, origin):
with self.assertRaises(NotFoundExc):
service.lookup_revision_log_by(
origin['url'], 'unknown_branch_name', None, limit=100)
def test_lookup_content_raw_not_found(self):
unknown_content_ = random_content()
with self.assertRaises(NotFoundExc) as cm:
service.lookup_content_raw('sha1:' + unknown_content_['sha1'])
self.assertIn(cm.exception.args[0],
'Content with %s checksum equals to %s not found!' %
('sha1', unknown_content_['sha1']))
@given(content())
def test_lookup_content_raw(self, content):
actual_content = service.lookup_content_raw(
'sha256:%s' % content['sha256'])
expected_content = self.content_get(content['sha1'])
self.assertEqual(actual_content, expected_content)
def test_lookup_content_not_found(self):
unknown_content_ = random_content()
with self.assertRaises(NotFoundExc) as cm:
service.lookup_content('sha1:%s' % unknown_content_['sha1'])
self.assertIn(cm.exception.args[0],
'Content with %s checksum equals to %s not found!' %
('sha1', unknown_content_['sha1']))
@given(content())
def test_lookup_content_with_sha1(self, content):
actual_content = service.lookup_content(
'sha1:%s' % content['sha1'])
expected_content = self.content_get_metadata(content['sha1'])
self.assertEqual(actual_content, expected_content)
@given(content())
def test_lookup_content_with_sha256(self, content):
actual_content = service.lookup_content(
'sha256:%s' % content['sha256'])
expected_content = self.content_get_metadata(content['sha1'])
self.assertEqual(actual_content, expected_content)
- @given(revision())
- def test_lookup_person(self, revision):
-
- rev_data = self.revision_get(revision)
-
- actual_person = service.lookup_person(rev_data['author']['id'])
-
- self.assertEqual(actual_person, rev_data['author'])
-
def test_lookup_directory_bad_checksum(self):
with self.assertRaises(BadInputExc):
service.lookup_directory('directory_id')
def test_lookup_directory_not_found(self):
unknown_directory_ = random_sha1()
with self.assertRaises(NotFoundExc) as cm:
service.lookup_directory(unknown_directory_)
self.assertIn('Directory with sha1_git %s not found'
% unknown_directory_, cm.exception.args[0])
@given(directory())
def test_lookup_directory(self, directory):
actual_directory_ls = list(service.lookup_directory(
directory))
expected_directory_ls = self.directory_ls(directory)
self.assertEqual(actual_directory_ls, expected_directory_ls)
@given(empty_directory())
def test_lookup_directory_empty(self, empty_directory):
actual_directory_ls = list(service.lookup_directory(empty_directory))
self.assertEqual(actual_directory_ls, [])
@given(origin())
def test_lookup_revision_by_nothing_found(self, origin):
with self.assertRaises(NotFoundExc):
service.lookup_revision_by(
origin['url'], 'invalid-branch-name')
@given(origin())
def test_lookup_revision_by(self, origin):
branches = self._get_origin_branches(origin)
branch_name = random.choice(list(branches.keys()))
actual_revision = \
service.lookup_revision_by(origin['url'], branch_name, None)
expected_revision = \
self.revision_get(branches[branch_name]['target'])
self.assertEqual(actual_revision, expected_revision)
@given(origin(), revision())
def test_lookup_revision_with_context_by_ko(self, origin, revision):
with self.assertRaises(NotFoundExc):
service.lookup_revision_with_context_by(origin['url'],
'invalid-branch-name',
None,
revision)
@given(origin())
def test_lookup_revision_with_context_by(self, origin):
branches = self._get_origin_branches(origin)
branch_name = random.choice(list(branches.keys()))
root_rev = branches[branch_name]['target']
root_rev_log = self.revision_log(root_rev)
children = defaultdict(list)
for rev in root_rev_log:
for rev_p in rev['parents']:
children[rev_p].append(rev['id'])
rev = root_rev_log[-1]['id']
actual_root_rev, actual_rev = service.lookup_revision_with_context_by(
origin['url'], branch_name, None, rev)
expected_root_rev = self.revision_get(root_rev)
expected_rev = self.revision_get(rev)
expected_rev['children'] = children[rev]
self.assertEqual(actual_root_rev, expected_root_rev)
self.assertEqual(actual_rev, expected_rev)
def test_lookup_revision_through_ko_not_implemented(self):
with self.assertRaises(NotImplementedError):
service.lookup_revision_through({
'something-unknown': 10,
})
@given(origin())
def test_lookup_revision_through_with_context_by(self, origin):
branches = self._get_origin_branches(origin)
branch_name = random.choice(list(branches.keys()))
root_rev = branches[branch_name]['target']
root_rev_log = self.revision_log(root_rev)
rev = root_rev_log[-1]['id']
self.assertEqual(service.lookup_revision_through({
'origin_url': origin['url'],
'branch_name': branch_name,
'ts': None,
'sha1_git': rev
}),
service.lookup_revision_with_context_by(
origin['url'], branch_name, None, rev)
)
@given(origin())
def test_lookup_revision_through_with_revision_by(self, origin):
branches = self._get_origin_branches(origin)
branch_name = random.choice(list(branches.keys()))
self.assertEqual(service.lookup_revision_through({
'origin_url': origin['url'],
'branch_name': branch_name,
'ts': None,
}),
service.lookup_revision_by(
origin['url'], branch_name, None)
)
@given(ancestor_revisions())
def test_lookup_revision_through_with_context(self, ancestor_revisions):
sha1_git = ancestor_revisions['sha1_git']
sha1_git_root = ancestor_revisions['sha1_git_root']
self.assertEqual(service.lookup_revision_through({
'sha1_git_root': sha1_git_root,
'sha1_git': sha1_git,
}),
service.lookup_revision_with_context(
sha1_git_root, sha1_git)
)
@given(revision())
def test_lookup_revision_through_with_revision(self, revision):
self.assertEqual(service.lookup_revision_through({
'sha1_git': revision
}),
service.lookup_revision(revision)
)
@given(revision())
def test_lookup_directory_through_revision_ko_not_found(self, revision):
with self.assertRaises(NotFoundExc):
service.lookup_directory_through_revision(
{'sha1_git': revision}, 'some/invalid/path')
@given(revision())
def test_lookup_directory_through_revision_ok(self, revision):
revision_data = self.revision_get(revision)
dir_entries = [e for e in self.directory_ls(revision_data['directory'])
if e['type'] == 'file']
dir_entry = random.choice(dir_entries)
self.assertEqual(
service.lookup_directory_through_revision({'sha1_git': revision},
dir_entry['name']),
(revision,
service.lookup_directory_with_revision(
revision, dir_entry['name']))
)
@given(revision())
def test_lookup_directory_through_revision_ok_with_data(self, revision):
revision_data = self.revision_get(revision)
dir_entries = [e for e in self.directory_ls(revision_data['directory'])
if e['type'] == 'file']
dir_entry = random.choice(dir_entries)
self.assertEqual(
service.lookup_directory_through_revision({'sha1_git': revision},
dir_entry['name'],
with_data=True),
(revision,
service.lookup_directory_with_revision(
revision, dir_entry['name'], with_data=True))
)
@pytest.mark.origin_id
@given(new_origins(20))
def test_lookup_origins(self, new_origins):
nb_origins = len(new_origins)
expected_origins = self.storage.origin_add(new_origins)
origin_from_idx = random.randint(1, nb_origins-1) - 1
origin_from = expected_origins[origin_from_idx]['id']
max_origin_idx = expected_origins[-1]['id']
origin_count = random.randint(1, max_origin_idx - origin_from)
actual_origins = list(service.lookup_origins(origin_from,
origin_count))
expected_origins = list(self.storage.origin_get_range(origin_from,
origin_count))
self.assertEqual(actual_origins, expected_origins)
diff --git a/swh/web/tests/testcase.py b/swh/web/tests/testcase.py
index 8cd5e899..3155ce2b 100644
--- a/swh/web/tests/testcase.py
+++ b/swh/web/tests/testcase.py
@@ -1,153 +1,149 @@
# Copyright (C) 2015-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import shutil
from subprocess import run, PIPE
from django.core.cache import cache
from hypothesis.extra.django import TestCase
from swh.model.hashutil import hash_to_bytes
from swh.web.common import converters
from swh.web.tests.data import get_tests_data, override_storages
ctags_json_missing = \
shutil.which('ctags') is None or \
b'+json' not in run(['ctags', '--version'], stdout=PIPE).stdout
fossology_missing = shutil.which('nomossa') is None
class WebTestCase(TestCase):
"""Base TestCase class for swh-web.
It is initialized with references to in-memory storages containing
raw tests data.
It also defines class methods to retrieve those tests data in
a json serializable format in order to ease tests implementation.
"""
def _pre_setup(self):
cache.clear()
tests_data = get_tests_data(reset=True)
self.storage = tests_data['storage']
self.idx_storage = tests_data['idx_storage']
self.mimetype_indexer = tests_data['mimetype_indexer']
self.license_indexer = tests_data['license_indexer']
self.ctags_indexer = tests_data['ctags_indexer']
# Update swh-web configuration to use the in-memory storages
# instantiated in the tests.data module
override_storages(tests_data['storage'], tests_data['idx_storage'])
super()._pre_setup()
def content_add_mimetype(self, cnt_id):
self.mimetype_indexer.run([hash_to_bytes(cnt_id)],
'update-dups')
def content_get_mimetype(self, cnt_id):
mimetype = next(self.idx_storage.content_mimetype_get(
[hash_to_bytes(cnt_id)]))
return converters.from_filetype(mimetype)
def content_add_language(self, cnt_id):
raise NotImplementedError('Language indexer is disabled.')
self.language_indexer.run([hash_to_bytes(cnt_id)],
'update-dups')
def content_get_language(self, cnt_id):
lang = next(self.idx_storage.content_language_get(
[hash_to_bytes(cnt_id)]))
return converters.from_swh(lang, hashess={'id'})
def content_add_license(self, cnt_id):
self.license_indexer.run([hash_to_bytes(cnt_id)],
'update-dups')
def content_get_license(self, cnt_id):
cnt_id_bytes = hash_to_bytes(cnt_id)
lic = next(self.idx_storage.content_fossology_license_get(
[cnt_id_bytes]))
return converters.from_swh({'id': cnt_id_bytes,
'facts': lic[cnt_id_bytes]},
hashess={'id'})
def content_add_ctags(self, cnt_id):
self.ctags_indexer.run([hash_to_bytes(cnt_id)],
'update-dups')
def content_get_ctags(self, cnt_id):
cnt_id_bytes = hash_to_bytes(cnt_id)
ctags = self.idx_storage.content_ctags_get([cnt_id_bytes])
for ctag in ctags:
yield converters.from_swh(ctag, hashess={'id'})
def content_get_metadata(self, cnt_id):
cnt_id_bytes = hash_to_bytes(cnt_id)
metadata = next(self.storage.content_get_metadata([cnt_id_bytes]))
return converters.from_swh(metadata,
hashess={'sha1', 'sha1_git', 'sha256',
'blake2s256'})
def content_get(self, cnt_id):
cnt_id_bytes = hash_to_bytes(cnt_id)
cnt = next(self.storage.content_get([cnt_id_bytes]))
return converters.from_content(cnt)
def directory_ls(self, dir_id):
cnt_id_bytes = hash_to_bytes(dir_id)
dir_content = map(converters.from_directory_entry,
self.storage.directory_ls(cnt_id_bytes))
return list(dir_content)
def release_get(self, rel_id):
rel_id_bytes = hash_to_bytes(rel_id)
rel_data = next(self.storage.release_get([rel_id_bytes]))
return converters.from_release(rel_data)
def revision_get(self, rev_id):
rev_id_bytes = hash_to_bytes(rev_id)
rev_data = next(self.storage.revision_get([rev_id_bytes]))
return converters.from_revision(rev_data)
def revision_log(self, rev_id, limit=None):
rev_id_bytes = hash_to_bytes(rev_id)
return list(map(converters.from_revision,
self.storage.revision_log([rev_id_bytes], limit=limit)))
def snapshot_get_latest(self, origin_id):
snp = self.storage.snapshot_get_latest(origin_id)
return converters.from_snapshot(snp)
def origin_get(self, origin_info):
origin = self.storage.origin_get(origin_info)
return converters.from_origin(origin)
def origin_visit_get(self, origin_id):
visits = self.storage.origin_visit_get(origin_id)
return list(map(converters.from_origin_visit, visits))
def origin_visit_get_by(self, origin_id, visit_id):
visit = self.storage.origin_visit_get_by(origin_id, visit_id)
return converters.from_origin_visit(visit)
def snapshot_get(self, snapshot_id):
snp = self.storage.snapshot_get(hash_to_bytes(snapshot_id))
return converters.from_snapshot(snp)
def snapshot_get_branches(self, snapshot_id, branches_from='',
branches_count=1000, target_types=None):
snp = self.storage.snapshot_get_branches(
hash_to_bytes(snapshot_id), branches_from.encode(),
branches_count, target_types)
return converters.from_snapshot(snp)
-
- def person_get(self, person_id):
- person = next(self.storage.person_get([person_id]))
- return converters.from_person(person)

File Metadata

Mime Type
text/x-diff
Expires
Jul 4 2025, 10:05 AM (5 w, 6 h ago)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3247142

Event Timeline