Page MenuHomeSoftware Heritage

D2242.id7860.diff
No OneTemporary

D2242.id7860.diff

This file is larger than 256 KB, so syntax highlighting was skipped.
diff --git a/pytest.ini b/pytest.ini
--- a/pytest.ini
+++ b/pytest.ini
@@ -2,5 +2,3 @@
norecursedirs = docs node_modules .tox
DJANGO_SETTINGS_MODULE = swh.web.settings.tests
-markers =
- origin_id: execute tests using an origin id (deselect with '-m "not origin_id"')
diff --git a/swh/web/tests/admin/test_origin_save.py b/swh/web/tests/admin/test_origin_save.py
--- a/swh/web/tests/admin/test_origin_save.py
+++ b/swh/web/tests/admin/test_origin_save.py
@@ -5,9 +5,9 @@
from urllib.parse import unquote
+import pytest
from django.contrib.auth import get_user_model
-from unittest.mock import patch
from swh.web.common.models import (
SaveAuthorizedOrigin, SaveUnauthorizedOrigin, SaveOriginRequest
@@ -18,7 +18,6 @@
SAVE_REQUEST_REJECTED, SAVE_TASK_NOT_YET_SCHEDULED
)
from swh.web.common.utils import reverse
-from swh.web.tests.testcase import WebTestCase
_user_name = 'swh-web-admin'
_user_mail = 'admin@swh-web.org'
@@ -28,205 +27,197 @@
_unauthorized_origin_url = 'https://www.softwareheritage.org/'
-class OriginSaveAdminTestCase(WebTestCase):
-
- @classmethod
- def setUpTestData(cls): # noqa: N802
- User = get_user_model() # noqa: N806
- user = User.objects.create_user(_user_name, _user_mail, _user_password)
- user.is_staff = True
- user.save()
- SaveAuthorizedOrigin.objects.create(url=_authorized_origin_url)
- SaveUnauthorizedOrigin.objects.create(url=_unauthorized_origin_url)
+pytestmark = pytest.mark.django_db
- def check_not_login(self, url):
- login_url = reverse('login', query_params={'next': url})
- response = self.client.post(url)
- self.assertEqual(response.status_code, 302)
- self.assertEqual(unquote(response.url), login_url)
-
- def test_add_authorized_origin_url(self):
- authorized_url = 'https://scm.adullact.net/anonscm/'
- self.assertEqual(can_save_origin(authorized_url),
- SAVE_REQUEST_PENDING)
-
- url = reverse('admin-origin-save-add-authorized-url',
- url_args={'origin_url': authorized_url})
-
- self.check_not_login(url)
-
- self.assertEqual(can_save_origin(authorized_url),
- SAVE_REQUEST_PENDING)
-
- self.client.login(username=_user_name, password=_user_password)
- response = self.client.post(url)
- self.assertEqual(response.status_code, 200)
- self.assertEqual(can_save_origin(authorized_url),
- SAVE_REQUEST_ACCEPTED)
-
- def test_remove_authorized_origin_url(self):
- self.assertEqual(can_save_origin(_authorized_origin_url),
- SAVE_REQUEST_ACCEPTED)
-
- url = reverse('admin-origin-save-remove-authorized-url',
- url_args={'origin_url': _authorized_origin_url})
-
- self.check_not_login(url)
-
- self.assertEqual(can_save_origin(_authorized_origin_url),
- SAVE_REQUEST_ACCEPTED)
-
- self.client.login(username=_user_name, password=_user_password)
- response = self.client.post(url)
- self.assertEqual(response.status_code, 200)
- self.assertEqual(can_save_origin(_authorized_origin_url),
- SAVE_REQUEST_PENDING)
-
- def test_add_unauthorized_origin_url(self):
- unauthorized_url = 'https://www.yahoo./'
- self.assertEqual(can_save_origin(unauthorized_url),
- SAVE_REQUEST_PENDING)
-
- url = reverse('admin-origin-save-add-unauthorized-url',
- url_args={'origin_url': unauthorized_url})
-
- self.check_not_login(url)
-
- self.assertEqual(can_save_origin(unauthorized_url),
- SAVE_REQUEST_PENDING)
-
- self.client.login(username=_user_name, password=_user_password)
- response = self.client.post(url)
- self.assertEqual(response.status_code, 200)
- self.assertEqual(can_save_origin(unauthorized_url),
- SAVE_REQUEST_REJECTED)
-
- def test_remove_unauthorized_origin_url(self):
- self.assertEqual(can_save_origin(_unauthorized_origin_url),
- SAVE_REQUEST_REJECTED)
-
- url = reverse('admin-origin-save-remove-unauthorized-url',
- url_args={'origin_url': _unauthorized_origin_url})
-
- self.check_not_login(url)
-
- self.assertEqual(can_save_origin(_unauthorized_origin_url),
- SAVE_REQUEST_REJECTED)
-
- self.client.login(username=_user_name, password=_user_password)
- response = self.client.post(url)
- self.assertEqual(response.status_code, 200)
- self.assertEqual(can_save_origin(_unauthorized_origin_url),
- SAVE_REQUEST_PENDING)
-
- @patch('swh.web.common.origin_save.scheduler')
- def test_accept_pending_save_request(self, mock_scheduler):
- visit_type = 'git'
- origin_url = 'https://v2.pikacode.com/bthate/botlib.git'
- save_request_url = reverse('api-1-save-origin',
- url_args={'visit_type': visit_type,
- 'origin_url': origin_url})
- response = self.client.post(save_request_url, data={},
- content_type='application/x-www-form-urlencoded') # noqa
- self.assertEqual(response.status_code, 200)
- self.assertEqual(response.data['save_request_status'],
- SAVE_REQUEST_PENDING)
-
- accept_request_url = reverse('admin-origin-save-request-accept',
- url_args={'visit_type': visit_type,
- 'origin_url': origin_url})
-
- self.check_not_login(accept_request_url)
-
- tasks_data = [
- {
- 'priority': 'high',
- 'policy': 'oneshot',
- 'type': 'load-git',
- 'arguments': {
- 'kwargs': {
- 'repo_url': origin_url
- },
- 'args': []
+
+@pytest.fixture(autouse=True)
+def populated_db():
+ User = get_user_model()
+ user = User.objects.create_user(_user_name, _user_mail, _user_password)
+ user.is_staff = True
+ user.save()
+ SaveAuthorizedOrigin.objects.create(url=_authorized_origin_url)
+ SaveUnauthorizedOrigin.objects.create(url=_unauthorized_origin_url)
+
+
+def check_not_login(client, url):
+ login_url = reverse('login', query_params={'next': url})
+ response = client.post(url)
+ assert response.status_code == 302
+ assert unquote(response.url) == login_url
+
+
+def test_add_authorized_origin_url(client):
+ authorized_url = 'https://scm.adullact.net/anonscm/'
+ assert can_save_origin(authorized_url) == SAVE_REQUEST_PENDING
+
+ url = reverse('admin-origin-save-add-authorized-url',
+ url_args={'origin_url': authorized_url})
+
+ check_not_login(client, url)
+
+ assert can_save_origin(authorized_url) == SAVE_REQUEST_PENDING
+
+ client.login(username=_user_name, password=_user_password)
+ response = client.post(url)
+ assert response.status_code == 200
+ assert can_save_origin(authorized_url) == SAVE_REQUEST_ACCEPTED
+
+
+def test_remove_authorized_origin_url(client):
+ assert can_save_origin(_authorized_origin_url) == SAVE_REQUEST_ACCEPTED
+
+ url = reverse('admin-origin-save-remove-authorized-url',
+ url_args={'origin_url': _authorized_origin_url})
+
+ check_not_login(client, url)
+
+ assert can_save_origin(_authorized_origin_url) == SAVE_REQUEST_ACCEPTED
+
+ client.login(username=_user_name, password=_user_password)
+ response = client.post(url)
+ assert response.status_code == 200
+ assert can_save_origin(_authorized_origin_url) == SAVE_REQUEST_PENDING
+
+
+def test_add_unauthorized_origin_url(client):
+ unauthorized_url = 'https://www.yahoo./'
+ assert can_save_origin(unauthorized_url) == SAVE_REQUEST_PENDING
+
+ url = reverse('admin-origin-save-add-unauthorized-url',
+ url_args={'origin_url': unauthorized_url})
+
+ check_not_login(client, url)
+
+ assert can_save_origin(unauthorized_url) == SAVE_REQUEST_PENDING
+
+ client.login(username=_user_name, password=_user_password)
+ response = client.post(url)
+ assert response.status_code == 200
+ assert can_save_origin(unauthorized_url) == SAVE_REQUEST_REJECTED
+
+
+def test_remove_unauthorized_origin_url(client):
+ assert can_save_origin(_unauthorized_origin_url) == SAVE_REQUEST_REJECTED
+
+ url = reverse('admin-origin-save-remove-unauthorized-url',
+ url_args={'origin_url': _unauthorized_origin_url})
+
+ check_not_login(client, url)
+
+ assert can_save_origin(_unauthorized_origin_url) == SAVE_REQUEST_REJECTED
+
+ client.login(username=_user_name, password=_user_password)
+ response = client.post(url)
+ assert response.status_code == 200
+ assert can_save_origin(_unauthorized_origin_url) == SAVE_REQUEST_PENDING
+
+
+def test_accept_pending_save_request(client, mocker):
+ mock_scheduler = mocker.patch('swh.web.common.origin_save.scheduler')
+ visit_type = 'git'
+ origin_url = 'https://v2.pikacode.com/bthate/botlib.git'
+ save_request_url = reverse('api-1-save-origin',
+ url_args={'visit_type': visit_type,
+ 'origin_url': origin_url})
+ response = client.post(save_request_url, data={},
+ content_type='application/x-www-form-urlencoded')
+ assert response.status_code == 200
+ assert response.data['save_request_status'] == SAVE_REQUEST_PENDING
+
+ accept_request_url = reverse('admin-origin-save-request-accept',
+ url_args={'visit_type': visit_type,
+ 'origin_url': origin_url})
+
+ check_not_login(client, accept_request_url)
+
+ tasks_data = [
+ {
+ 'priority': 'high',
+ 'policy': 'oneshot',
+ 'type': 'load-git',
+ 'arguments': {
+ 'kwargs': {
+ 'repo_url': origin_url
},
- 'status': 'next_run_not_scheduled',
- 'id': 1,
- }
- ]
-
- mock_scheduler.create_tasks.return_value = tasks_data
- mock_scheduler.get_tasks.return_value = tasks_data
-
- self.client.login(username=_user_name, password=_user_password)
- response = self.client.post(accept_request_url)
- self.assertEqual(response.status_code, 200)
-
- response = self.client.get(save_request_url)
- self.assertEqual(response.status_code, 200)
- self.assertEqual(response.data[0]['save_request_status'],
- SAVE_REQUEST_ACCEPTED)
- self.assertEqual(response.data[0]['save_task_status'],
- SAVE_TASK_NOT_YET_SCHEDULED)
-
- @patch('swh.web.common.origin_save.scheduler')
- def test_reject_pending_save_request(self, mock_scheduler):
- visit_type = 'git'
- origin_url = 'https://wikipedia.com'
- save_request_url = reverse('api-1-save-origin',
- url_args={'visit_type': visit_type,
- 'origin_url': origin_url})
- response = self.client.post(save_request_url, data={},
- content_type='application/x-www-form-urlencoded') # noqa
- self.assertEqual(response.status_code, 200)
- self.assertEqual(response.data['save_request_status'],
- SAVE_REQUEST_PENDING)
-
- reject_request_url = reverse('admin-origin-save-request-reject',
- url_args={'visit_type': visit_type,
- 'origin_url': origin_url})
-
- self.check_not_login(reject_request_url)
-
- self.client.login(username=_user_name, password=_user_password)
- response = self.client.post(reject_request_url)
- self.assertEqual(response.status_code, 200)
-
- tasks_data = [
- {
- 'priority': 'high',
- 'policy': 'oneshot',
- 'type': 'load-git',
- 'arguments': {
- 'kwargs': {
- 'repo_url': origin_url
- },
- 'args': []
+ 'args': []
+ },
+ 'status': 'next_run_not_scheduled',
+ 'id': 1,
+ }
+ ]
+
+ mock_scheduler.create_tasks.return_value = tasks_data
+ mock_scheduler.get_tasks.return_value = tasks_data
+
+ client.login(username=_user_name, password=_user_password)
+ response = client.post(accept_request_url)
+ assert response.status_code == 200
+
+ response = client.get(save_request_url)
+ assert response.status_code == 200
+ assert response.data[0]['save_request_status'] == SAVE_REQUEST_ACCEPTED
+ assert response.data[0]['save_task_status'] == SAVE_TASK_NOT_YET_SCHEDULED
+
+
+def test_reject_pending_save_request(client, mocker):
+ mock_scheduler = mocker.patch('swh.web.common.origin_save.scheduler')
+ visit_type = 'git'
+ origin_url = 'https://wikipedia.com'
+ save_request_url = reverse('api-1-save-origin',
+ url_args={'visit_type': visit_type,
+ 'origin_url': origin_url})
+ response = client.post(save_request_url, data={},
+ content_type='application/x-www-form-urlencoded')
+ assert response.status_code == 200
+ assert response.data['save_request_status'] == SAVE_REQUEST_PENDING
+
+ reject_request_url = reverse('admin-origin-save-request-reject',
+ url_args={'visit_type': visit_type,
+ 'origin_url': origin_url})
+
+ check_not_login(client, reject_request_url)
+
+ client.login(username=_user_name, password=_user_password)
+ response = client.post(reject_request_url)
+ assert response.status_code == 200
+
+ tasks_data = [
+ {
+ 'priority': 'high',
+ 'policy': 'oneshot',
+ 'type': 'load-git',
+ 'arguments': {
+ 'kwargs': {
+ 'repo_url': origin_url
},
- 'status': 'next_run_not_scheduled',
- 'id': 1,
- }
- ]
-
- mock_scheduler.create_tasks.return_value = tasks_data
- mock_scheduler.get_tasks.return_value = tasks_data
-
- response = self.client.get(save_request_url)
- self.assertEqual(response.status_code, 200)
- self.assertEqual(response.data[0]['save_request_status'],
- SAVE_REQUEST_REJECTED)
-
- def test_remove_save_request(self):
- sor = SaveOriginRequest.objects.create(visit_type='git',
- origin_url='https://wikipedia.com', # noqa
- status=SAVE_REQUEST_PENDING)
- self.assertEqual(SaveOriginRequest.objects.count(), 1)
-
- remove_request_url = reverse('admin-origin-save-request-remove',
- url_args={'sor_id': sor.id})
-
- self.check_not_login(remove_request_url)
-
- self.client.login(username=_user_name, password=_user_password)
- response = self.client.post(remove_request_url)
- self.assertEqual(response.status_code, 200)
- self.assertEqual(SaveOriginRequest.objects.count(), 0)
+ 'args': []
+ },
+ 'status': 'next_run_not_scheduled',
+ 'id': 1,
+ }
+ ]
+
+ mock_scheduler.create_tasks.return_value = tasks_data
+ mock_scheduler.get_tasks.return_value = tasks_data
+
+ response = client.get(save_request_url)
+ assert response.status_code == 200
+ assert response.data[0]['save_request_status'] == SAVE_REQUEST_REJECTED
+
+
+def test_remove_save_request(client):
+ sor = SaveOriginRequest.objects.create(visit_type='git',
+ origin_url='https://wikipedia.com', # noqa
+ status=SAVE_REQUEST_PENDING)
+ assert SaveOriginRequest.objects.count() == 1
+
+ remove_request_url = reverse('admin-origin-save-request-remove',
+ url_args={'sor_id': sor.id})
+
+ check_not_login(client, remove_request_url)
+
+ client.login(username=_user_name, password=_user_password)
+ response = client.post(remove_request_url)
+ assert response.status_code == 200
+ assert SaveOriginRequest.objects.count() == 0
diff --git a/swh/web/tests/api/test_api_lookup.py b/swh/web/tests/api/test_api_lookup.py
--- a/swh/web/tests/api/test_api_lookup.py
+++ b/swh/web/tests/api/test_api_lookup.py
@@ -1,116 +1,115 @@
-# Copyright (C) 2015-2018 The Software Heritage developers
+# Copyright (C) 2015-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
+import pytest
+
from swh.web.common.exc import NotFoundExc
from swh.web.api.views import utils
-from swh.web.tests.testcase import WebTestCase
-
-
-class ApiLookupTestCase(WebTestCase):
-
- def test_genericapi_lookup_nothing_is_found(self):
- # given
- def test_generic_lookup_fn(sha1, another_unused_arg):
- assert another_unused_arg == 'unused_arg'
- assert sha1 == 'sha1'
- return None
-
- # when
- with self.assertRaises(NotFoundExc) as cm:
- utils.api_lookup(
- test_generic_lookup_fn, 'sha1', 'unused_arg',
- notfound_msg='This will be raised because None is returned.')
-
- self.assertIn('This will be raised because None is returned.',
- cm.exception.args[0])
-
- def test_generic_api_map_are_enriched_and_transformed_to_list(self):
- # given
- def test_generic_lookup_fn_1(criteria0, param0, param1):
- assert criteria0 == 'something'
- return map(lambda x: x + 1, [1, 2, 3])
-
- # when
- actual_result = utils.api_lookup(
- test_generic_lookup_fn_1, 'something', 'some param 0',
- 'some param 1',
- notfound_msg=('This is not the error message you are looking for. '
- 'Move along.'),
- enrich_fn=lambda x: x * 2)
-
- self.assertEqual(actual_result, [4, 6, 8])
-
- def test_generic_api_list_are_enriched_too(self):
- # given
- def test_generic_lookup_fn_2(crit):
- assert crit == 'something'
- return ['a', 'b', 'c']
-
- # when
- actual_result = utils.api_lookup(
- test_generic_lookup_fn_2, 'something',
- notfound_msg=('Not the error message you are looking for, it is. '
- 'Along, you move!'),
- enrich_fn=lambda x: ''. join(['=', x, '=']))
-
- self.assertEqual(actual_result, ['=a=', '=b=', '=c='])
-
- def test_generic_api_generator_are_enriched_and_returned_as_list(self):
- # given
- def test_generic_lookup_fn_3(crit):
- assert crit == 'crit'
- return (i for i in [4, 5, 6])
-
- # when
- actual_result = utils.api_lookup(
- test_generic_lookup_fn_3, 'crit',
- notfound_msg='Move!',
- enrich_fn=lambda x: x - 1)
-
- self.assertEqual(actual_result, [3, 4, 5])
-
- def test_generic_api_simple_data_are_enriched_and_returned_too(self):
- # given
- def test_generic_lookup_fn_4(crit):
- assert crit == '123'
- return {'a': 10}
-
- def test_enrich_data(x):
- x['a'] = x['a'] * 10
- return x
-
- # when
- actual_result = utils.api_lookup(
- test_generic_lookup_fn_4, '123',
- notfound_msg='Nothing to do',
- enrich_fn=test_enrich_data)
-
- self.assertEqual(actual_result, {'a': 100})
-
- def test_api_lookup_not_found(self):
- # when
- with self.assertRaises(NotFoundExc) as e:
- utils.api_lookup(
- lambda x: None, 'something',
- notfound_msg='this is the error message raised as it is None')
-
- self.assertEqual(e.exception.args[0],
- 'this is the error message raised as it is None')
-
- def test_api_lookup_with_result(self):
- # when
- actual_result = utils.api_lookup(
- lambda x: x + '!', 'something',
- notfound_msg='this is the error which won\'t be used here')
-
- self.assertEqual(actual_result, 'something!')
-
- def test_api_lookup_with_result_as_map(self):
- # when
- actual_result = utils.api_lookup(
- lambda x: map(lambda y: y+1, x), [1, 2, 3],
- notfound_msg='this is the error which won\'t be used here')
-
- self.assertEqual(actual_result, [2, 3, 4])
+
+
+def test_genericapi_lookup_nothing_is_found():
+
+ def test_generic_lookup_fn(sha1, another_unused_arg):
+ assert another_unused_arg == 'unused_arg'
+ assert sha1 == 'sha1'
+ return None
+
+ notfound_msg = 'This will be raised because None is returned.'
+
+ with pytest.raises(NotFoundExc) as e:
+ utils.api_lookup(
+ test_generic_lookup_fn, 'sha1', 'unused_arg',
+ notfound_msg=notfound_msg)
+
+ assert e.match(notfound_msg)
+
+
+def test_generic_api_map_are_enriched_and_transformed_to_list():
+
+ def test_generic_lookup_fn_1(criteria0, param0, param1):
+ assert criteria0 == 'something'
+ return map(lambda x: x + 1, [1, 2, 3])
+
+ actual_result = utils.api_lookup(
+ test_generic_lookup_fn_1, 'something', 'some param 0',
+ 'some param 1',
+ notfound_msg=('This is not the error message you are looking for. '
+ 'Move along.'),
+ enrich_fn=lambda x: x * 2)
+
+ assert actual_result == [4, 6, 8]
+
+
+def test_generic_api_list_are_enriched_too():
+
+ def test_generic_lookup_fn_2(crit):
+ assert crit == 'something'
+ return ['a', 'b', 'c']
+
+ actual_result = utils.api_lookup(
+ test_generic_lookup_fn_2, 'something',
+ notfound_msg=('Not the error message you are looking for, it is. '
+ 'Along, you move!'),
+ enrich_fn=lambda x: ''. join(['=', x, '=']))
+
+ assert actual_result == ['=a=', '=b=', '=c=']
+
+
+def test_generic_api_generator_are_enriched_and_returned_as_list():
+
+ def test_generic_lookup_fn_3(crit):
+ assert crit == 'crit'
+ return (i for i in [4, 5, 6])
+
+ actual_result = utils.api_lookup(
+ test_generic_lookup_fn_3, 'crit',
+ notfound_msg='Move!',
+ enrich_fn=lambda x: x - 1)
+
+ assert actual_result == [3, 4, 5]
+
+
+def test_generic_api_simple_data_are_enriched_and_returned_too():
+
+ def test_generic_lookup_fn_4(crit):
+ assert crit == '123'
+ return {'a': 10}
+
+ def test_enrich_data(x):
+ x['a'] = x['a'] * 10
+ return x
+
+ actual_result = utils.api_lookup(
+ test_generic_lookup_fn_4, '123',
+ notfound_msg='Nothing to do',
+ enrich_fn=test_enrich_data)
+
+ assert actual_result == {'a': 100}
+
+
+def test_api_lookup_not_found():
+ notfound_msg = 'this is the error message raised as it is None'
+ with pytest.raises(NotFoundExc) as e:
+ utils.api_lookup(
+ lambda x: None, 'something',
+ notfound_msg=notfound_msg)
+
+ assert e.match(notfound_msg)
+
+
+def test_api_lookup_with_result():
+ actual_result = utils.api_lookup(
+ lambda x: x + '!', 'something',
+ notfound_msg='this is the error which won\'t be used here')
+
+ assert actual_result == 'something!'
+
+
+def test_api_lookup_with_result_as_map():
+ actual_result = utils.api_lookup(
+ lambda x: map(lambda y: y+1, x), [1, 2, 3],
+ notfound_msg='this is the error which won\'t be used here')
+
+ assert actual_result == [2, 3, 4]
diff --git a/swh/web/tests/api/test_apidoc.py b/swh/web/tests/api/test_apidoc.py
--- a/swh/web/tests/api/test_apidoc.py
+++ b/swh/web/tests/api/test_apidoc.py
@@ -1,9 +1,10 @@
-# Copyright (C) 2015-2018 The Software Heritage developers
+# Copyright (C) 2015-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
-from rest_framework.test import APITestCase
+import pytest
+
from rest_framework.response import Response
from swh.storage.exc import StorageDBError, StorageAPIError
@@ -11,41 +12,47 @@
from swh.web.api.apidoc import api_doc, _parse_httpdomain_doc
from swh.web.api.apiurls import api_route
from swh.web.common.exc import BadInputExc, ForbiddenExc, NotFoundExc
-from swh.web.tests.testcase import WebTestCase
+from swh.web.tests.django_asserts import assert_template_used
-# flake8: noqa
httpdomain_doc = """
.. http:get:: /api/1/revision/(sha1_git)/
Get information about a revision in the archive.
- Revisions are identified by **sha1** checksums, compatible with Git commit identifiers.
- See :func:`swh.model.identifiers.revision_identifier` in our data model module for details
- about how they are computed.
+ Revisions are identified by **sha1** checksums, compatible with Git commit
+ identifiers.
+ See :func:`swh.model.identifiers.revision_identifier` in our data model
+ module for details about how they are computed.
- :param string sha1_git: hexadecimal representation of the revision **sha1_git** identifier
+ :param string sha1_git: hexadecimal representation of the revision
+ **sha1_git** identifier
:reqheader Accept: the requested response content type,
either ``application/json`` (default) or ``application/yaml``
- :resheader Content-Type: this depends on :http:header:`Accept` header of request
+ :resheader Content-Type: this depends on :http:header:`Accept` header
+ of request
:>json object author: information about the author of the revision
:>json object committer: information about the committer of the revision
- :>json string committer_date: ISO representation of the commit date (in UTC)
+ :>json string committer_date: ISO representation of the commit date
+ (in UTC)
:>json string date: ISO representation of the revision date (in UTC)
:>json string directory: the unique identifier that revision points to
- :>json string directory_url: link to :http:get:`/api/1/directory/(sha1_git)/[(path)/]`
- to get information about the directory associated to the revision
+ :>json string directory_url: link to
+ :http:get:`/api/1/directory/(sha1_git)/[(path)/]` to get information
+ about the directory associated to the revision
:>json string id: the revision unique identifier
- :>json boolean merge: whether or not the revision corresponds to a merge commit
+ :>json boolean merge: whether or not the revision corresponds to a merge
+ commit
:>json string message: the message associated to the revision
- :>json array parents: the parents of the revision, i.e. the previous revisions
- that head directly to it, each entry of that array contains an unique parent
- revision identifier but also a link to :http:get:`/api/1/revision/(sha1_git)/`
- to get more information about it
+ :>json array parents: the parents of the revision, i.e. the previous
+ revisions that head directly to it, each entry of that array contains
+ an unique parent revision identifier but also a link to
+ :http:get:`/api/1/revision/(sha1_git)/` to get more information
+ about it
:>json string type: the type of the revision
- **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`, :http:method:`options`
+ **Allowed HTTP Methods:** :http:method:`get`, :http:method:`head`
:statuscode 200: no error
:statuscode 400: an invalid **sha1_git** value has been provided
@@ -55,243 +62,252 @@
.. parsed-literal::
- $ curl -i :swh_web_api:`revision/aafb16d69fd30ff58afdd69036a26047f3aebdc6/`
+ :swh_web_api:`revision/aafb16d69fd30ff58afdd69036a26047f3aebdc6/`
"""
-class APIDocTestCase(WebTestCase, APITestCase):
+exception_http_code = {
+ BadInputExc: 400,
+ ForbiddenExc: 403,
+ NotFoundExc: 404,
+ Exception: 500,
+ StorageAPIError: 503,
+ StorageDBError: 503,
+}
- exception_http_code = {
- BadInputExc: 400,
- ForbiddenExc: 403,
- NotFoundExc: 404,
- Exception: 500,
- StorageAPIError: 503,
- StorageDBError: 503,
- }
- def test_apidoc_nodoc_failure(self):
- with self.assertRaises(Exception):
- @api_doc('/my/nodoc/url/')
- def apidoc_nodoc_tester(request, arga=0, argb=0):
- return Response(arga + argb)
-
- @staticmethod
- @api_route(r'/some/(?P<myarg>[0-9]+)/(?P<myotherarg>[0-9]+)/',
- 'some-doc-route')
- @api_doc('/some/doc/route/')
- def apidoc_route(request, myarg, myotherarg, akw=0):
- """
- Sample doc
- """
- return {'result': int(myarg) + int(myotherarg) + akw}
-
- def test_apidoc_route_doc(self):
- # when
- rv = self.client.get('/api/1/some/doc/route/')
-
- # then
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertTemplateUsed('api/apidoc.html')
-
- def test_apidoc_route_fn(self):
-
- # when
- rv = self.client.get('/api/1/some/1/1/')
-
- # then
- self.assertEqual(rv.status_code, 200, rv.data)
-
- @staticmethod
- @api_route(r'/test/error/(?P<exc_name>.+)/',
- 'test-error')
- @api_doc('/test/error/')
- def apidoc_test_error_route(request, exc_name):
- """
- Sample doc
- """
- for e in APIDocTestCase.exception_http_code.keys():
- if e.__name__ == exc_name:
- raise e('Error')
-
- def test_apidoc_error(self):
- for exc, code in self.exception_http_code.items():
- # when
- rv = self.client.get('/api/1/test/error/%s/' % exc.__name__)
-
- # then
- self.assertEqual(rv.status_code, code)
-
- @staticmethod
- @api_route(r'/some/full/(?P<myarg>[0-9]+)/(?P<myotherarg>[0-9]+)/',
- 'some-complete-doc-route')
- @api_doc('/some/complete/doc/route/')
- def apidoc_full_stack(request, myarg, myotherarg, akw=0):
- """
- Sample doc
- """
- return {'result': int(myarg) + int(myotherarg) + akw}
-
- def test_apidoc_full_stack_doc(self):
- # when
- rv = self.client.get('/api/1/some/complete/doc/route/')
-
- # then
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertTemplateUsed('api/apidoc.html')
-
- def test_apidoc_full_stack_fn(self):
- # when
- rv = self.client.get('/api/1/some/full/1/1/')
-
- # then
- self.assertEqual(rv.status_code, 200, rv.data)
-
- def test_api_doc_parse_httpdomain(self):
- doc_data = {
- 'description': '',
- 'urls': [],
- 'args': [],
- 'params': [],
- 'resheaders': [],
- 'reqheaders': [],
- 'return_type': '',
- 'returns': [],
- 'status_codes': [],
- 'examples': []
- }
+def test_apidoc_nodoc_failure():
+ with pytest.raises(Exception):
+ @api_doc('/my/nodoc/url/')
+ def apidoc_nodoc_tester(request, arga=0, argb=0):
+ return Response(arga + argb)
+
+
+@api_route(r'/some/(?P<myarg>[0-9]+)/(?P<myotherarg>[0-9]+)/',
+ 'some-doc-route')
+@api_doc('/some/doc/route/')
+def apidoc_route(request, myarg, myotherarg, akw=0):
+ """
+ Sample doc
+ """
+ return {'result': int(myarg) + int(myotherarg) + akw}
+
+# remove deprecation warnings related to docutils
+@pytest.mark.filterwarnings(
+ 'ignore:.*U.*mode is deprecated:DeprecationWarning')
+def test_apidoc_route_doc(client):
+ rv = client.get('/api/1/some/doc/route/', HTTP_ACCEPT='text/html')
+
+ assert rv.status_code == 200, rv.content
+ assert_template_used(rv, 'api/apidoc.html')
+
+
+def test_apidoc_route_fn(api_client):
+ rv = api_client.get('/api/1/some/1/1/')
+
+ assert rv.status_code == 200, rv.data
+
+
+@api_route(r'/test/error/(?P<exc_name>.+)/', 'test-error')
+@api_doc('/test/error/')
+def apidoc_test_error_route(request, exc_name):
+ """
+ Sample doc
+ """
+ for e in exception_http_code.keys():
+ if e.__name__ == exc_name:
+ raise e('Error')
+
+
+def test_apidoc_error(api_client):
+ for exc, code in exception_http_code.items():
+ rv = api_client.get('/api/1/test/error/%s/' % exc.__name__)
+
+ assert rv.status_code == code, rv.data
+
- _parse_httpdomain_doc(httpdomain_doc, doc_data)
+@api_route(r'/some/full/(?P<myarg>[0-9]+)/(?P<myotherarg>[0-9]+)/',
+ 'some-complete-doc-route')
+@api_doc('/some/complete/doc/route/')
+def apidoc_full_stack(request, myarg, myotherarg, akw=0):
+ """
+ Sample doc
+ """
+ return {'result': int(myarg) + int(myotherarg) + akw}
- expected_urls = [{
- 'rule': '/api/1/revision/ **\\(sha1_git\\)** /',
- 'methods': ['GET', 'HEAD', 'OPTIONS']
- }]
- self.assertIn('urls', doc_data)
- self.assertEqual(doc_data['urls'], expected_urls)
+# remove deprecation warnings related to docutils
+@pytest.mark.filterwarnings(
+ 'ignore:.*U.*mode is deprecated:DeprecationWarning')
+def test_apidoc_full_stack_doc(client):
+ rv = client.get('/api/1/some/complete/doc/route/', HTTP_ACCEPT='text/html')
+ assert rv.status_code == 200, rv.content
+ assert_template_used(rv, 'api/apidoc.html')
- expected_description = 'Get information about a revision in the archive. \
-Revisions are identified by **sha1** checksums, compatible with Git commit \
-identifiers. See **swh.model.identifiers.revision_identifier** in our data \
-model module for details about how they are computed.'
- self.assertIn('description', doc_data)
- self.assertEqual(doc_data['description'], expected_description)
- expected_args = [{
- 'name': 'sha1_git',
+def test_apidoc_full_stack_fn(api_client):
+ rv = api_client.get('/api/1/some/full/1/1/')
+
+ assert rv.status_code == 200, rv.data
+
+
+def test_api_doc_parse_httpdomain():
+ doc_data = {
+ 'description': '',
+ 'urls': [],
+ 'args': [],
+ 'params': [],
+ 'resheaders': [],
+ 'reqheaders': [],
+ 'return_type': '',
+ 'returns': [],
+ 'status_codes': [],
+ 'examples': []
+ }
+
+ _parse_httpdomain_doc(httpdomain_doc, doc_data)
+
+ expected_urls = [{
+ 'rule': '/api/1/revision/ **\\(sha1_git\\)** /',
+ 'methods': ['GET', 'HEAD']
+ }]
+
+ assert 'urls' in doc_data
+ assert doc_data['urls'] == expected_urls
+
+ expected_description = ('Get information about a revision in the archive. '
+ 'Revisions are identified by **sha1** checksums, '
+ 'compatible with Git commit identifiers. See '
+ '**swh.model.identifiers.revision_identifier** in '
+ 'our data model module for details about how they '
+ 'are computed.')
+
+ assert 'description' in doc_data
+ assert doc_data['description'] == expected_description
+
+ expected_args = [{
+ 'name': 'sha1_git',
+ 'type': 'string',
+ 'doc': ('hexadecimal representation of the revision '
+ '**sha1_git** identifier')
+ }]
+
+ assert 'args' in doc_data
+ assert doc_data['args'] == expected_args
+
+ expected_params = []
+ assert 'params' in doc_data
+ assert doc_data['params'] == expected_params
+
+ expected_reqheaders = [{
+ 'doc': ('the requested response content type, either '
+ '``application/json`` or ``application/yaml``'),
+ 'name': 'Accept'
+ }]
+
+ assert 'reqheaders' in doc_data
+ assert doc_data['reqheaders'] == expected_reqheaders
+
+ expected_resheaders = [{
+ 'doc': 'this depends on **Accept** header of request',
+ 'name': 'Content-Type'
+ }]
+
+ assert 'resheaders' in doc_data
+ assert doc_data['resheaders'] == expected_resheaders
+
+ expected_statuscodes = [
+ {
+ 'code': '200',
+ 'doc': 'no error'
+ },
+ {
+ 'code': '400',
+ 'doc': 'an invalid **sha1_git** value has been provided'
+ },
+ {
+ 'code': '404',
+ 'doc': 'requested revision can not be found in the archive'
+ }
+ ]
+
+ assert 'status_codes' in doc_data
+ assert doc_data['status_codes'] == expected_statuscodes
+
+ expected_return_type = 'object'
+
+ assert 'return_type' in doc_data
+ assert doc_data['return_type'] in expected_return_type
+
+ expected_returns = [
+ {
+ 'name': 'author',
+ 'type': 'object',
+ 'doc': 'information about the author of the revision'
+ },
+ {
+ 'name': 'committer',
+ 'type': 'object',
+ 'doc': 'information about the committer of the revision'
+ },
+ {
+ 'name': 'committer_date',
+ 'type': 'string',
+ 'doc': 'ISO representation of the commit date (in UTC)'
+ },
+ {
+ 'name': 'date',
+ 'type': 'string',
+ 'doc': 'ISO representation of the revision date (in UTC)'
+ },
+ {
+ 'name': 'directory',
'type': 'string',
- 'doc': 'hexadecimal representation of the revision **sha1_git** identifier'
- }]
-
- self.assertIn('args', doc_data)
- self.assertEqual(doc_data['args'], expected_args)
-
- expected_params = []
- self.assertIn('params', doc_data)
- self.assertEqual(doc_data['params'], expected_params)
-
- expected_reqheaders = [{
- 'doc': 'the requested response content type, either ``application/json`` or ``application/yaml``',
- 'name': 'Accept'
- }]
-
- self.assertIn('reqheaders', doc_data)
- self.assertEqual(doc_data['reqheaders'], expected_reqheaders)
-
- expected_resheaders = [{
- 'doc': 'this depends on **Accept** header of request',
- 'name': 'Content-Type'
- }]
-
- self.assertIn('resheaders', doc_data)
- self.assertEqual(doc_data['resheaders'], expected_resheaders)
-
- expected_statuscodes = [
- {
- 'code': '200',
- 'doc': 'no error'
- },
- {
- 'code': '400',
- 'doc': 'an invalid **sha1_git** value has been provided'
- },
- {
- 'code': '404',
- 'doc': 'requested revision can not be found in the archive'
- }
- ]
-
- self.assertIn('status_codes', doc_data)
- self.assertEqual(doc_data['status_codes'], expected_statuscodes)
-
- expected_return_type = 'object'
-
- self.assertIn('return_type', doc_data)
- self.assertEqual(doc_data['return_type'], expected_return_type)
-
- expected_returns = [
- {
- 'name': 'author',
- 'type': 'object',
- 'doc': 'information about the author of the revision'
- },
- {
- 'name': 'committer',
- 'type': 'object',
- 'doc': 'information about the committer of the revision'
- },
- {
- 'name': 'committer_date',
- 'type': 'string',
- 'doc': 'ISO representation of the commit date (in UTC)'
- },
- {
- 'name': 'date',
- 'type': 'string',
- 'doc': 'ISO representation of the revision date (in UTC)'
- },
- {
- 'name': 'directory',
- 'type': 'string',
- 'doc': 'the unique identifier that revision points to'
- },
- {
- 'name': 'directory_url',
- 'type': 'string',
- 'doc': 'link to `</api/1/directory/>`_ to get information about the directory associated to the revision'
- },
- {
- 'name': 'id',
- 'type': 'string',
- 'doc': 'the revision unique identifier'
- },
- {
- 'name': 'merge',
- 'type': 'boolean',
- 'doc': 'whether or not the revision corresponds to a merge commit'
- },
- {
- 'name': 'message',
- 'type': 'string',
- 'doc': 'the message associated to the revision'
- },
- {
- 'name': 'parents',
- 'type': 'array',
- 'doc': 'the parents of the revision, i.e. the previous revisions that head directly to it, each entry of that array contains an unique parent revision identifier but also a link to `</api/1/revision/>`_ to get more information about it'
- },
- {
- 'name': 'type',
- 'type': 'string',
- 'doc': 'the type of the revision'
- }
- ]
-
- self.assertIn('returns', doc_data)
- self.assertEqual(doc_data['returns'], expected_returns)
-
- expected_examples = ['/api/1/revision/aafb16d69fd30ff58afdd69036a26047f3aebdc6/']
-
- self.assertIn('examples', doc_data)
- self.assertEqual(doc_data['examples'], expected_examples)
+ 'doc': 'the unique identifier that revision points to'
+ },
+ {
+ 'name': 'directory_url',
+ 'type': 'string',
+ 'doc': ('link to `</api/1/directory/>`_ to get information about '
+ 'the directory associated to the revision')
+ },
+ {
+ 'name': 'id',
+ 'type': 'string',
+ 'doc': 'the revision unique identifier'
+ },
+ {
+ 'name': 'merge',
+ 'type': 'boolean',
+ 'doc': 'whether or not the revision corresponds to a merge commit'
+ },
+ {
+ 'name': 'message',
+ 'type': 'string',
+ 'doc': 'the message associated to the revision'
+ },
+ {
+ 'name': 'parents',
+ 'type': 'array',
+ 'doc': ('the parents of the revision, i.e. the previous revisions '
+ 'that head directly to it, each entry of that array '
+ 'contains an unique parent revision identifier but also a '
+ 'link to `</api/1/revision/>`_ to get more information '
+ 'about it')
+ },
+ {
+ 'name': 'type',
+ 'type': 'string',
+ 'doc': 'the type of the revision'
+ }
+ ]
+
+ assert 'returns' in doc_data
+ assert doc_data['returns'] == expected_returns
+
+ expected_examples = [
+ '/api/1/revision/aafb16d69fd30ff58afdd69036a26047f3aebdc6/'
+ ]
+
+ assert 'examples' in doc_data
+ assert doc_data['examples'] == expected_examples
diff --git a/swh/web/tests/api/test_apiresponse.py b/swh/web/tests/api/test_apiresponse.py
--- a/swh/web/tests/api/test_apiresponse.py
+++ b/swh/web/tests/api/test_apiresponse.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2015-2018 The Software Heritage developers
+# Copyright (C) 2015-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
@@ -7,163 +7,146 @@
from rest_framework.test import APIRequestFactory
-from unittest.mock import patch
-
from swh.web.api.apiresponse import (
compute_link_header, transform, make_api_response,
filter_by_fields
)
-from swh.web.tests.testcase import WebTestCase
api_request_factory = APIRequestFactory()
-class SWHComputeLinkHeaderTest(WebTestCase):
- def test_compute_link_header(self):
- rv = {
- 'headers': {'link-next': 'foo', 'link-prev': 'bar'},
- 'results': [1, 2, 3]
- }
- options = {}
+def test_compute_link_header():
+ rv = {
+ 'headers': {'link-next': 'foo', 'link-prev': 'bar'},
+ 'results': [1, 2, 3]
+ }
+ options = {}
+
+ headers = compute_link_header(rv, options)
+
+ assert headers == {'Link': '<foo>; rel="next",<bar>; rel="previous"'}
+
+
+def test_compute_link_header_nothing_changed():
+ rv = {}
+ options = {}
+
+ headers = compute_link_header(rv, options)
+
+ assert headers == {}
+
- # when
- headers = compute_link_header(
- rv, options)
+def test_compute_link_header_nothing_changed_2():
+ rv = {'headers': {}}
+ options = {}
- self.assertEqual(headers, {
- 'Link': '<foo>; rel="next",<bar>; rel="previous"',
- })
+ headers = compute_link_header(rv, options)
- def test_compute_link_header_nothing_changed(self):
- rv = {}
- options = {}
+ assert headers == {}
- # when
- headers = compute_link_header(
- rv, options)
- self.assertEqual(headers, {})
+def test_transform_only_return_results_1():
+ rv = {'results': {'some-key': 'some-value'}}
- def test_compute_link_header_nothing_changed_2(self):
- rv = {'headers': {}}
- options = {}
+ assert transform(rv) == {'some-key': 'some-value'}
- # when
- headers = compute_link_header(
- rv, options)
- self.assertEqual(headers, {})
+def test_transform_only_return_results_2():
+ rv = {'headers': {'something': 'do changes'},
+ 'results': {'some-key': 'some-value'}}
+ assert transform(rv) == {'some-key': 'some-value'}
-class SWHTransformProcessorTest(WebTestCase):
- def test_transform_only_return_results_1(self):
- rv = {'results': {'some-key': 'some-value'}}
- self.assertEqual(transform(rv), {'some-key': 'some-value'})
+def test_transform_do_remove_headers():
+ rv = {'headers': {'something': 'do changes'},
+ 'some-key': 'some-value'}
- def test_transform_only_return_results_2(self):
- rv = {'headers': {'something': 'do changes'},
- 'results': {'some-key': 'some-value'}}
+ assert transform(rv) == {'some-key': 'some-value'}
- self.assertEqual(transform(rv), {'some-key': 'some-value'})
- def test_transform_do_remove_headers(self):
- rv = {'headers': {'something': 'do changes'},
- 'some-key': 'some-value'}
+def test_transform_do_nothing():
+ rv = {'some-key': 'some-value'}
- self.assertEqual(transform(rv), {'some-key': 'some-value'})
+ assert transform(rv) == {'some-key': 'some-value'}
- def test_transform_do_nothing(self):
- rv = {'some-key': 'some-value'}
- self.assertEqual(transform(rv), {'some-key': 'some-value'})
+def test_swh_multi_response_mimetype(mocker):
+ mock_shorten_path = mocker.patch('swh.web.api.apiresponse.shorten_path')
+ mock_filter = mocker.patch('swh.web.api.apiresponse.filter_by_fields')
+ mock_json = mocker.patch('swh.web.api.apiresponse.json')
+ data = {
+ 'data': [12, 34],
+ 'id': 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc'
+ }
-class RendererTestCase(WebTestCase):
+ mock_filter.return_value = data
+ mock_shorten_path.return_value = 'my_short_path'
- @patch('swh.web.api.apiresponse.json')
- @patch('swh.web.api.apiresponse.filter_by_fields')
- @patch('swh.web.api.apiresponse.shorten_path')
- def test_swh_multi_response_mimetype(self, mock_shorten_path,
- mock_filter, mock_json):
- # given
- data = {
- 'data': [12, 34],
- 'id': 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc'
- }
+ accepted_response_formats = {'html': 'text/html',
+ 'yaml': 'application/yaml',
+ 'json': 'application/json'}
- mock_filter.return_value = data
- mock_shorten_path.return_value = 'my_short_path'
+ for format in accepted_response_formats:
- accepted_response_formats = {'html': 'text/html',
- 'yaml': 'application/yaml',
- 'json': 'application/json'}
+ request = api_request_factory.get('/api/test/path/')
- for format in accepted_response_formats:
+ mime_type = accepted_response_formats[format]
+ setattr(request, 'accepted_media_type', mime_type)
- request = api_request_factory.get('/api/test/path/')
+ if mime_type == 'text/html':
- mime_type = accepted_response_formats[format]
- setattr(request, 'accepted_media_type', mime_type)
+ expected_data = {
+ 'response_data': json.dumps(data),
+ 'request': {
+ 'path': request.path,
+ 'method': request.method,
+ 'absolute_uri': request.build_absolute_uri()
+ },
+ 'headers_data': {},
+ 'heading': 'my_short_path',
+ 'status_code': 200
+ }
- if mime_type == 'text/html':
+ mock_json.dumps.return_value = json.dumps(data)
+ else:
+ expected_data = data
- expected_data = {
- 'response_data': json.dumps(data),
- 'request': {
- 'path': request.path,
- 'method': request.method,
- 'absolute_uri': request.build_absolute_uri()
- },
- 'headers_data': {},
- 'heading': 'my_short_path',
- 'status_code': 200
- }
+ rv = make_api_response(request, data)
- mock_json.dumps.return_value = json.dumps(data)
- else:
- expected_data = data
+ mock_filter.assert_called_with(request, data)
- # when
+ assert rv.status_code == 200, rv.data
+ assert rv.data == expected_data
+ if mime_type == 'text/html':
+ assert rv.template_name == 'api/apidoc.html'
- rv = make_api_response(request, data)
- # then
- mock_filter.assert_called_with(request, data)
- self.assertEqual(rv.data, expected_data)
- self.assertEqual(rv.status_code, 200, rv.data)
- if mime_type == 'text/html':
- self.assertEqual(rv.template_name, 'api/apidoc.html')
+def test_swh_filter_renderer_do_nothing():
+ input_data = {'a': 'some-data'}
- def test_swh_filter_renderer_do_nothing(self):
- # given
- input_data = {'a': 'some-data'}
+ request = api_request_factory.get('/api/test/path/', data={})
+ setattr(request, 'query_params', request.GET)
- request = api_request_factory.get('/api/test/path/', data={})
- setattr(request, 'query_params', request.GET)
+ actual_data = filter_by_fields(request, input_data)
- # when
- actual_data = filter_by_fields(request, input_data)
+ assert actual_data == input_data
- # then
- self.assertEqual(actual_data, input_data)
- @patch('swh.web.api.apiresponse.utils.filter_field_keys')
- def test_swh_filter_renderer_do_filter(self, mock_ffk):
- # given
- mock_ffk.return_value = {'a': 'some-data'}
+def test_swh_filter_renderer_do_filter(mocker):
+ mock_ffk = mocker.patch('swh.web.api.apiresponse.utils.filter_field_keys')
+ mock_ffk.return_value = {'a': 'some-data'}
- request = api_request_factory.get('/api/test/path/',
- data={'fields': 'a,c'})
- setattr(request, 'query_params', request.GET)
+ request = api_request_factory.get('/api/test/path/',
+ data={'fields': 'a,c'})
+ setattr(request, 'query_params', request.GET)
- input_data = {'a': 'some-data',
- 'b': 'some-other-data'}
+ input_data = {'a': 'some-data',
+ 'b': 'some-other-data'}
- # when
- actual_data = filter_by_fields(request, input_data)
+ actual_data = filter_by_fields(request, input_data)
- # then
- self.assertEqual(actual_data, {'a': 'some-data'})
+ assert actual_data == {'a': 'some-data'}
- mock_ffk.assert_called_once_with(input_data, {'a', 'c'})
+ mock_ffk.assert_called_once_with(input_data, {'a', 'c'})
diff --git a/swh/web/tests/api/test_utils.py b/swh/web/tests/api/test_utils.py
--- a/swh/web/tests/api/test_utils.py
+++ b/swh/web/tests/api/test_utils.py
@@ -3,586 +3,543 @@
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
-from unittest.mock import patch, call
-
from swh.web.api import utils
-from swh.web.tests.testcase import WebTestCase
-
-
-class UtilsTestCase(WebTestCase):
- def setUp(self):
- self.maxDiff = None
- self.url_map = [dict(rule='/other/<slug>',
- methods=set(['GET', 'POST', 'HEAD']),
- endpoint='foo'),
- dict(rule='/some/old/url/<slug>',
- methods=set(['GET', 'POST']),
- endpoint='blablafn'),
- dict(rule='/other/old/url/<int:id>',
- methods=set(['GET', 'HEAD']),
- endpoint='bar'),
- dict(rule='/other',
- methods=set([]),
- endpoint=None),
- dict(rule='/other2',
- methods=set([]),
- endpoint=None)]
- self.sample_content_hashes = {
- 'blake2s256': ('791e07fcea240ade6dccd0a9309141673'
- 'c31242cae9c237cf3855e151abc78e9'),
- 'sha1': 'dc2830a9e72f23c1dfebef4413003221baa5fb62',
- 'sha1_git': 'fe95a46679d128ff167b7c55df5d02356c5a1ae1',
- 'sha256': ('b5c7fe0536f44ef60c8780b6065d30bca74a5cd06'
- 'd78a4a71ba1ad064770f0c9')
- }
- def test_filter_field_keys_dict_unknown_keys(self):
- # when
- actual_res = utils.filter_field_keys(
- {'directory': 1, 'file': 2, 'link': 3},
- {'directory1', 'file2'})
-
- # then
- self.assertEqual(actual_res, {})
-
- def test_filter_field_keys_dict(self):
- # when
- actual_res = utils.filter_field_keys(
- {'directory': 1, 'file': 2, 'link': 3},
- {'directory', 'link'})
-
- # then
- self.assertEqual(actual_res, {'directory': 1, 'link': 3})
-
- def test_filter_field_keys_list_unknown_keys(self):
- # when
- actual_res = utils.filter_field_keys(
- [{'directory': 1, 'file': 2, 'link': 3},
- {'1': 1, '2': 2, 'link': 3}],
- {'d'})
-
- # then
- self.assertEqual(actual_res, [{}, {}])
-
- def test_filter_field_keys_map(self):
- # when
- actual_res = utils.filter_field_keys(
- map(lambda x: {'i': x['i']+1, 'j': x['j']},
- [{'i': 1, 'j': None},
- {'i': 2, 'j': None},
- {'i': 3, 'j': None}]),
- {'i'})
-
- # then
- self.assertEqual(list(actual_res), [{'i': 2}, {'i': 3}, {'i': 4}])
-
- def test_filter_field_keys_list(self):
- # when
- actual_res = utils.filter_field_keys(
- [{'directory': 1, 'file': 2, 'link': 3},
- {'dir': 1, 'fil': 2, 'lin': 3}],
- {'directory', 'dir'})
-
- # then
- self.assertEqual(actual_res, [{'directory': 1}, {'dir': 1}])
-
- def test_filter_field_keys_other(self):
- # given
- input_set = {1, 2}
-
- # when
- actual_res = utils.filter_field_keys(input_set, {'a', '1'})
-
- # then
- self.assertEqual(actual_res, input_set)
-
- def test_person_to_string(self):
- self.assertEqual(utils.person_to_string(dict(name='raboof',
- email='foo@bar')),
- 'raboof <foo@bar>')
-
- def test_enrich_release_0(self):
- # when
- actual_release = utils.enrich_release({})
-
- # then
- self.assertEqual(actual_release, {})
-
- @patch('swh.web.api.utils.reverse')
- def test_enrich_release_1(self, mock_django_reverse):
- # given
-
- def reverse_test_context(view_name, url_args):
- if view_name == 'api-1-content':
- id = url_args['q']
- return '/api/1/content/%s/' % id
- else:
- raise ValueError(
- 'This should not happened so fail if it does.')
-
- mock_django_reverse.side_effect = reverse_test_context
-
- # when
- actual_release = utils.enrich_release({
- 'target': '123',
- 'target_type': 'content',
- 'author': {
- 'id': 100,
- 'name': 'author release name',
- 'email': 'author@email',
- },
- })
-
- # then
- self.assertEqual(actual_release, {
- 'target': '123',
- 'target_type': 'content',
- 'target_url': '/api/1/content/sha1_git:123/',
- 'author': {
- 'id': 100,
- 'name': 'author release name',
- 'email': 'author@email',
- },
- })
+
+url_map = [
+ {
+ 'rule': '/other/<slug>',
+ 'methods': set(['GET', 'POST', 'HEAD']),
+ 'endpoint': 'foo'
+ },
+ {
+ 'rule': '/some/old/url/<slug>',
+ 'methods': set(['GET', 'POST']),
+ 'endpoint': 'blablafn'
+ },
+ {
+ 'rule': '/other/old/url/<int:id>',
+ 'methods': set(['GET', 'HEAD']),
+ 'endpoint': 'bar'
+ },
+ {
+ 'rule': '/other',
+ 'methods': set([]),
+ 'endpoint': None
+ },
+ {
+ 'rule': '/other2',
+ 'methods': set([]),
+ 'endpoint': None
+ }
+]
+
+sample_content_hashes = {
+ 'blake2s256': ('791e07fcea240ade6dccd0a9309141673'
+ 'c31242cae9c237cf3855e151abc78e9'),
+ 'sha1': 'dc2830a9e72f23c1dfebef4413003221baa5fb62',
+ 'sha1_git': 'fe95a46679d128ff167b7c55df5d02356c5a1ae1',
+ 'sha256': ('b5c7fe0536f44ef60c8780b6065d30bca74a5cd06'
+ 'd78a4a71ba1ad064770f0c9')
+}
+
+
+def test_filter_field_keys_dict_unknown_keys():
+ actual_res = utils.filter_field_keys(
+ {'directory': 1, 'file': 2, 'link': 3},
+ {'directory1', 'file2'})
+
+ assert actual_res == {}
+
+
+def test_filter_field_keys_dict():
+ actual_res = utils.filter_field_keys(
+ {'directory': 1, 'file': 2, 'link': 3},
+ {'directory', 'link'})
+
+ assert actual_res == {'directory': 1, 'link': 3}
+
+
+def test_filter_field_keys_list_unknown_keys():
+ actual_res = utils.filter_field_keys(
+ [{'directory': 1, 'file': 2, 'link': 3},
+ {'1': 1, '2': 2, 'link': 3}], {'d'})
+
+ assert actual_res == [{}, {}]
+
+
+def test_filter_field_keys_map():
+ actual_res = utils.filter_field_keys(
+ map(lambda x: {'i': x['i']+1, 'j': x['j']},
+ [{'i': 1, 'j': None},
+ {'i': 2, 'j': None},
+ {'i': 3, 'j': None}]), {'i'})
+
+ assert list(actual_res) == [{'i': 2}, {'i': 3}, {'i': 4}]
+
+
+def test_filter_field_keys_list():
+ actual_res = utils.filter_field_keys(
+ [{'directory': 1, 'file': 2, 'link': 3},
+ {'dir': 1, 'fil': 2, 'lin': 3}],
+ {'directory', 'dir'})
+
+ assert actual_res == [{'directory': 1}, {'dir': 1}]
+
+
+def test_filter_field_keys_other():
+ input_set = {1, 2}
+
+ actual_res = utils.filter_field_keys(input_set, {'a', '1'})
+
+ assert actual_res == input_set
+
+
+def test_person_to_string():
+ assert utils.person_to_string({'name': 'raboof',
+ 'email': 'foo@bar'}) == 'raboof <foo@bar>'
+
+
+def test_enrich_release_0():
+ actual_release = utils.enrich_release({})
+
+ assert actual_release == {}
+
+
+def test_enrich_release_1(mocker):
+
+ mock_django_reverse = mocker.patch('swh.web.api.utils.reverse')
+
+ def reverse_test_context(view_name, url_args):
+ if view_name == 'api-1-content':
+ id = url_args['q']
+ return '/api/1/content/%s/' % id
+ else:
+ raise ValueError('This should not happened so fail if it does.')
+
+ mock_django_reverse.side_effect = reverse_test_context
+
+ actual_release = utils.enrich_release({
+ 'target': '123',
+ 'target_type': 'content',
+ 'author': {
+ 'id': 100,
+ 'name': 'author release name',
+ 'email': 'author@email',
+ },
+ })
+
+ assert actual_release == {
+ 'target': '123',
+ 'target_type': 'content',
+ 'target_url': '/api/1/content/sha1_git:123/',
+ 'author': {
+ 'id': 100,
+ 'name': 'author release name',
+ 'email': 'author@email',
+ },
+ }
+
+ mock_django_reverse.assert_has_calls([
+ mocker.call('api-1-content', url_args={'q': 'sha1_git:123'}),
+ ])
+
+
+def test_enrich_release_2(mocker):
+ mock_django_reverse = mocker.patch('swh.web.api.utils.reverse')
+ mock_django_reverse.return_value = '/api/1/dir/23/'
+
+ actual_release = utils.enrich_release({'target': '23',
+ 'target_type': 'directory'})
+
+ assert actual_release == {
+ 'target': '23',
+ 'target_type': 'directory',
+ 'target_url': '/api/1/dir/23/'
+ }
+
+ mock_django_reverse.assert_called_once_with('api-1-directory',
+ url_args={'sha1_git': '23'})
+
+
+def test_enrich_release_3(mocker):
+ mock_django_reverse = mocker.patch('swh.web.api.utils.reverse')
+ mock_django_reverse.return_value = '/api/1/rev/3/'
+
+ actual_release = utils.enrich_release({'target': '3',
+ 'target_type': 'revision'})
+
+ assert actual_release == {
+ 'target': '3',
+ 'target_type': 'revision',
+ 'target_url': '/api/1/rev/3/'
+ }
+
+ mock_django_reverse.assert_called_once_with('api-1-revision',
+ url_args={'sha1_git': '3'})
+
+
+def test_enrich_release_4(mocker):
+ mock_django_reverse = mocker.patch('swh.web.api.utils.reverse')
+ mock_django_reverse.return_value = '/api/1/rev/4/'
+
+ actual_release = utils.enrich_release({'target': '4',
+ 'target_type': 'release'})
+
+ assert actual_release == {
+ 'target': '4',
+ 'target_type': 'release',
+ 'target_url': '/api/1/rev/4/'
+ }
+
+ mock_django_reverse.assert_called_once_with('api-1-release',
+ url_args={'sha1_git': '4'})
+
+
+def test_enrich_directory_no_type(mocker):
+ mock_django_reverse = mocker.patch('swh.web.api.utils.reverse')
+ assert utils.enrich_directory({'id': 'dir-id'}) == {'id': 'dir-id'}
+
+ mock_django_reverse.return_value = '/api/content/sha1_git:123/'
+
+ actual_directory = utils.enrich_directory({
+ 'id': 'dir-id',
+ 'type': 'file',
+ 'target': '123',
+ })
+
+ assert actual_directory == {
+ 'id': 'dir-id',
+ 'type': 'file',
+ 'target': '123',
+ 'target_url': '/api/content/sha1_git:123/',
+ }
+
+ mock_django_reverse.assert_called_once_with(
+ 'api-1-content', url_args={'q': 'sha1_git:123'})
+
+
+def test_enrich_directory_with_context_and_type_file(mocker):
+ mock_django_reverse = mocker.patch('swh.web.api.utils.reverse')
+ mock_django_reverse.return_value = '/api/content/sha1_git:123/'
+
+ actual_directory = utils.enrich_directory({
+ 'id': 'dir-id',
+ 'type': 'file',
+ 'name': 'hy',
+ 'target': '789',
+ }, context_url='/api/revision/revsha1/directory/prefix/path/')
+
+ assert actual_directory == {
+ 'id': 'dir-id',
+ 'type': 'file',
+ 'name': 'hy',
+ 'target': '789',
+ 'target_url': '/api/content/sha1_git:123/',
+ 'file_url': '/api/revision/revsha1/directory'
+ '/prefix/path/hy/'
+ }
+
+ mock_django_reverse.assert_called_once_with(
+ 'api-1-content', url_args={'q': 'sha1_git:789'})
+
+
+def test_enrich_directory_with_context_and_type_dir(mocker):
+ mock_django_reverse = mocker.patch('swh.web.api.utils.reverse')
+ mock_django_reverse.return_value = '/api/directory/456/'
+
+ actual_directory = utils.enrich_directory({
+ 'id': 'dir-id',
+ 'type': 'dir',
+ 'name': 'emacs-42',
+ 'target_type': 'file',
+ 'target': '456',
+ }, context_url='/api/revision/origin/2/directory/some/prefix/path/')
+
+ assert actual_directory == {
+ 'id': 'dir-id',
+ 'type': 'dir',
+ 'target_type': 'file',
+ 'name': 'emacs-42',
+ 'target': '456',
+ 'target_url': '/api/directory/456/',
+ 'dir_url': '/api/revision/origin/2/directory'
+ '/some/prefix/path/emacs-42/'
+ }
+
+ mock_django_reverse.assert_called_once_with('api-1-directory',
+ url_args={'sha1_git': '456'})
+
+
+def test_enrich_content_without_hashes():
+ assert utils.enrich_content({'id': '123'}) == {'id': '123'}
+
+
+def test_enrich_content_with_hashes(mocker):
+ mock_django_reverse = mocker.patch('swh.web.api.utils.reverse')
+ for algo, hash in sample_content_hashes.items():
+
+ query_string = '%s:%s' % (algo, hash)
+
+ mock_django_reverse.side_effect = [
+ '/api/content/%s/raw/' % query_string,
+ '/api/filetype/%s/' % query_string,
+ '/api/language/%s/' % query_string,
+ '/api/license/%s/' % query_string
+ ]
+
+ enriched_content = utils.enrich_content({algo: hash},
+ query_string=query_string)
+
+ assert enriched_content == {
+ algo: hash,
+ 'data_url': '/api/content/%s/raw/' % query_string,
+ 'filetype_url': '/api/filetype/%s/' % query_string,
+ 'language_url': '/api/language/%s/' % query_string,
+ 'license_url': '/api/license/%s/' % query_string,
+ }
mock_django_reverse.assert_has_calls([
- call('api-1-content', url_args={'q': 'sha1_git:123'}),
+ mocker.call('api-1-content-raw', url_args={'q': query_string}),
+ mocker.call('api-1-content-filetype',
+ url_args={'q': query_string}),
+ mocker.call('api-1-content-language',
+ url_args={'q': query_string}),
+ mocker.call('api-1-content-license',
+ url_args={'q': query_string}),
])
- @patch('swh.web.api.utils.reverse')
- def test_enrich_release_2(self, mock_django_reverse):
- # given
- mock_django_reverse.return_value = '/api/1/dir/23/'
-
- # when
- actual_release = utils.enrich_release({'target': '23',
- 'target_type': 'directory'})
-
- # then
- self.assertEqual(actual_release, {
- 'target': '23',
- 'target_type': 'directory',
- 'target_url': '/api/1/dir/23/'
- })
-
- mock_django_reverse.assert_called_once_with('api-1-directory',
- url_args={'sha1_git': '23'}) # noqa
-
- @patch('swh.web.api.utils.reverse')
- def test_enrich_release_3(self, mock_django_reverse):
- # given
- mock_django_reverse.return_value = '/api/1/rev/3/'
-
- # when
- actual_release = utils.enrich_release({'target': '3',
- 'target_type': 'revision'})
-
- # then
- self.assertEqual(actual_release, {
- 'target': '3',
- 'target_type': 'revision',
- 'target_url': '/api/1/rev/3/'
- })
-
- mock_django_reverse.assert_called_once_with('api-1-revision',
- url_args={'sha1_git': '3'})
-
- @patch('swh.web.api.utils.reverse')
- def test_enrich_release_4(self, mock_django_reverse):
- # given
- mock_django_reverse.return_value = '/api/1/rev/4/'
-
- # when
- actual_release = utils.enrich_release({'target': '4',
- 'target_type': 'release'})
-
- # then
- self.assertEqual(actual_release, {
- 'target': '4',
- 'target_type': 'release',
- 'target_url': '/api/1/rev/4/'
- })
-
- mock_django_reverse.assert_called_once_with('api-1-release',
- url_args={'sha1_git': '4'})
-
- @patch('swh.web.api.utils.reverse')
- def test_enrich_directory_no_type(self, mock_django_reverse):
- # when/then
- self.assertEqual(utils.enrich_directory({'id': 'dir-id'}),
- {'id': 'dir-id'})
-
- # given
- mock_django_reverse.return_value = '/api/content/sha1_git:123/'
-
- # when
- actual_directory = utils.enrich_directory({
- 'id': 'dir-id',
- 'type': 'file',
- 'target': '123',
- })
-
- # then
- self.assertEqual(actual_directory, {
- 'id': 'dir-id',
- 'type': 'file',
- 'target': '123',
- 'target_url': '/api/content/sha1_git:123/',
- })
-
- mock_django_reverse.assert_called_once_with(
- 'api-1-content', url_args={'q': 'sha1_git:123'})
-
- @patch('swh.web.api.utils.reverse')
- def test_enrich_directory_with_context_and_type_file(
- self, mock_django_reverse,
- ):
- # given
- mock_django_reverse.return_value = '/api/content/sha1_git:123/'
-
- # when
- actual_directory = utils.enrich_directory({
- 'id': 'dir-id',
- 'type': 'file',
- 'name': 'hy',
- 'target': '789',
- }, context_url='/api/revision/revsha1/directory/prefix/path/')
-
- # then
- self.assertEqual(actual_directory, {
- 'id': 'dir-id',
- 'type': 'file',
- 'name': 'hy',
- 'target': '789',
- 'target_url': '/api/content/sha1_git:123/',
- 'file_url': '/api/revision/revsha1/directory'
- '/prefix/path/hy/'
- })
-
- mock_django_reverse.assert_called_once_with(
- 'api-1-content', url_args={'q': 'sha1_git:789'})
-
- @patch('swh.web.api.utils.reverse')
- def test_enrich_directory_with_context_and_type_dir(
- self, mock_django_reverse,
- ):
- # given
- mock_django_reverse.return_value = '/api/directory/456/'
-
- # when
- actual_directory = utils.enrich_directory({
- 'id': 'dir-id',
- 'type': 'dir',
- 'name': 'emacs-42',
- 'target_type': 'file',
- 'target': '456',
- }, context_url='/api/revision/origin/2/directory/some/prefix/path/')
-
- # then
- self.assertEqual(actual_directory, {
- 'id': 'dir-id',
- 'type': 'dir',
- 'target_type': 'file',
- 'name': 'emacs-42',
- 'target': '456',
- 'target_url': '/api/directory/456/',
- 'dir_url': '/api/revision/origin/2/directory'
- '/some/prefix/path/emacs-42/'
- })
-
- mock_django_reverse.assert_called_once_with('api-1-directory',
- url_args={'sha1_git': '456'}) # noqa
-
- def test_enrich_content_without_hashes(self):
- # when/then
- self.assertEqual(utils.enrich_content({'id': '123'}),
- {'id': '123'})
-
- @patch('swh.web.api.utils.reverse')
- def test_enrich_content_with_hashes(self, mock_django_reverse):
-
- for algo, hash in self.sample_content_hashes.items():
-
- query_string = '%s:%s' % (algo, hash)
-
- # given
- mock_django_reverse.side_effect = [
- '/api/content/%s/raw/' % query_string,
- '/api/filetype/%s/' % query_string,
- '/api/language/%s/' % query_string,
- '/api/license/%s/' % query_string
- ]
-
- # when
- enriched_content = utils.enrich_content(
- {
- algo: hash,
- },
- query_string=query_string
- )
-
- # then
- self.assertEqual(
- enriched_content,
- {
- algo: hash,
- 'data_url': '/api/content/%s/raw/' % query_string,
- 'filetype_url': '/api/filetype/%s/' % query_string,
- 'language_url': '/api/language/%s/' % query_string,
- 'license_url': '/api/license/%s/' % query_string,
- }
- )
-
- mock_django_reverse.assert_has_calls([
- call('api-1-content-raw', url_args={'q': query_string}),
- call('api-1-content-filetype', url_args={'q': query_string}),
- call('api-1-content-language', url_args={'q': query_string}),
- call('api-1-content-license', url_args={'q': query_string}),
- ])
-
- mock_django_reverse.reset()
-
- @patch('swh.web.api.utils.reverse')
- def test_enrich_content_with_hashes_and_top_level_url(self,
- mock_django_reverse):
-
- for algo, hash in self.sample_content_hashes.items():
-
- query_string = '%s:%s' % (algo, hash)
-
- # given
- mock_django_reverse.side_effect = [
- '/api/content/%s/' % query_string,
- '/api/content/%s/raw/' % query_string,
- '/api/filetype/%s/' % query_string,
- '/api/language/%s/' % query_string,
- '/api/license/%s/' % query_string,
- ]
-
- # when
- enriched_content = utils.enrich_content(
- {
- algo: hash
- },
- top_url=True,
- query_string=query_string
- )
-
- # then
- self.assertEqual(
- enriched_content,
- {
- algo: hash,
- 'content_url': '/api/content/%s/' % query_string,
- 'data_url': '/api/content/%s/raw/' % query_string,
- 'filetype_url': '/api/filetype/%s/' % query_string,
- 'language_url': '/api/language/%s/' % query_string,
- 'license_url': '/api/license/%s/' % query_string,
- }
- )
-
- mock_django_reverse.assert_has_calls([
- call('api-1-content', url_args={'q': query_string}),
- call('api-1-content-raw', url_args={'q': query_string}),
- call('api-1-content-filetype', url_args={'q': query_string}),
- call('api-1-content-language', url_args={'q': query_string}),
- call('api-1-content-license', url_args={'q': query_string}),
- ])
-
- mock_django_reverse.reset()
-
- def _reverse_context_test(self, view_name, url_args):
- if view_name == 'api-1-revision':
- return '/api/revision/%s/' % url_args['sha1_git']
- elif view_name == 'api-1-revision-context':
- return '/api/revision/%s/prev/%s/' % (url_args['sha1_git'], url_args['context']) # noqa
- elif view_name == 'api-1-revision-log':
- if 'prev_sha1s' in url_args:
- return '/api/revision/%s/prev/%s/log/' % (url_args['sha1_git'], url_args['prev_sha1s']) # noqa
- else:
- return '/api/revision/%s/log/' % url_args['sha1_git']
-
- @patch('swh.web.api.utils.reverse')
- def test_enrich_revision_without_children_or_parent(
- self, mock_django_reverse,
- ):
- # given
- def reverse_test(view_name, url_args):
- if view_name == 'api-1-revision':
- return '/api/revision/' + url_args['sha1_git'] + '/'
- elif view_name == 'api-1-revision-log':
- return '/api/revision/' + url_args['sha1_git'] + '/log/'
- elif view_name == 'api-1-directory':
- return '/api/directory/' + url_args['sha1_git'] + '/'
-
- mock_django_reverse.side_effect = reverse_test
-
- # when
- actual_revision = utils.enrich_revision({
- 'id': 'rev-id',
- 'directory': '123',
- 'author': {'id': '1'},
- 'committer': {'id': '2'},
- })
-
- expected_revision = {
- 'id': 'rev-id',
- 'directory': '123',
- 'url': '/api/revision/rev-id/',
- 'history_url': '/api/revision/rev-id/log/',
- 'directory_url': '/api/directory/123/',
- 'author': {'id': '1'},
- 'committer': {'id': '2'},
- }
+ mock_django_reverse.reset()
- # then
- self.assertEqual(actual_revision, expected_revision)
-
- mock_django_reverse.assert_has_calls(
- [call('api-1-revision', url_args={'sha1_git': 'rev-id'}),
- call('api-1-revision-log', url_args={'sha1_git': 'rev-id'}),
- call('api-1-directory', url_args={'sha1_git': '123'})])
-
- @patch('swh.web.api.utils.reverse')
- def test_enrich_revision_with_children_and_parent_no_dir(
- self, mock_django_reverse,
- ):
- # given
- mock_django_reverse.side_effect = self._reverse_context_test
-
- # when
- actual_revision = utils.enrich_revision({
- 'id': 'rev-id',
- 'parents': ['123'],
- 'children': ['456'],
- })
-
- expected_revision = {
- 'id': 'rev-id',
- 'url': '/api/revision/rev-id/',
- 'history_url': '/api/revision/rev-id/log/',
- 'parents': [{'id': '123', 'url': '/api/revision/123/'}],
- 'children': ['456'],
- 'children_urls': ['/api/revision/456/'],
- }
- # then
- self.assertEqual(actual_revision, expected_revision)
-
- mock_django_reverse.assert_has_calls(
- [call('api-1-revision', url_args={'sha1_git': 'rev-id'}),
- call('api-1-revision-log', url_args={'sha1_git': 'rev-id'}),
- call('api-1-revision', url_args={'sha1_git': '123'}),
- call('api-1-revision', url_args={'sha1_git': '456'})])
-
- @patch('swh.web.api.utils.reverse')
- def test_enrich_revision_no_context(self, mock_django_reverse):
- # given
- mock_django_reverse.side_effect = self._reverse_context_test
-
- # when
- actual_revision = utils.enrich_revision({
- 'id': 'rev-id',
- 'parents': ['123'],
- 'children': ['456'],
- })
-
- expected_revision = {
- 'id': 'rev-id',
- 'url': '/api/revision/rev-id/',
- 'history_url': '/api/revision/rev-id/log/',
- 'parents': [{'id': '123', 'url': '/api/revision/123/'}],
- 'children': ['456'],
- 'children_urls': ['/api/revision/456/']
+def test_enrich_content_with_hashes_and_top_level_url(mocker):
+ mock_django_reverse = mocker.patch('swh.web.api.utils.reverse')
+ for algo, hash in sample_content_hashes.items():
+
+ query_string = '%s:%s' % (algo, hash)
+
+ mock_django_reverse.side_effect = [
+ '/api/content/%s/' % query_string,
+ '/api/content/%s/raw/' % query_string,
+ '/api/filetype/%s/' % query_string,
+ '/api/language/%s/' % query_string,
+ '/api/license/%s/' % query_string,
+ ]
+
+ enriched_content = utils.enrich_content({algo: hash}, top_url=True,
+ query_string=query_string)
+
+ assert enriched_content == {
+ algo: hash,
+ 'content_url': '/api/content/%s/' % query_string,
+ 'data_url': '/api/content/%s/raw/' % query_string,
+ 'filetype_url': '/api/filetype/%s/' % query_string,
+ 'language_url': '/api/language/%s/' % query_string,
+ 'license_url': '/api/license/%s/' % query_string,
}
- # then
- self.assertEqual(actual_revision, expected_revision)
+ mock_django_reverse.assert_has_calls([
+ mocker.call('api-1-content', url_args={'q': query_string}),
+ mocker.call('api-1-content-raw', url_args={'q': query_string}),
+ mocker.call('api-1-content-filetype',
+ url_args={'q': query_string}),
+ mocker.call('api-1-content-language',
+ url_args={'q': query_string}),
+ mocker.call('api-1-content-license', url_args={'q': query_string}),
+ ])
+
+ mock_django_reverse.reset()
- mock_django_reverse.assert_has_calls(
- [call('api-1-revision', url_args={'sha1_git': 'rev-id'}),
- call('api-1-revision-log', url_args={'sha1_git': 'rev-id'}),
- call('api-1-revision', url_args={'sha1_git': '123'}),
- call('api-1-revision', url_args={'sha1_git': '456'})])
- def _reverse_rev_message_test(self, view_name, url_args):
- if view_name == 'api-1-revision':
- return '/api/revision/%s/' % url_args['sha1_git']
- elif view_name == 'api-1-revision-log':
- if 'prev_sha1s' in url_args and url_args['prev_sha1s'] is not None:
- return '/api/revision/%s/prev/%s/log/' % (url_args['sha1_git'], url_args['prev_sha1s']) # noqa
- else:
- return '/api/revision/%s/log/' % url_args['sha1_git']
- elif view_name == 'api-1-revision-raw-message':
- return '/api/revision/' + url_args['sha1_git'] + '/raw/'
+def _reverse_context_test(view_name, url_args):
+ if view_name == 'api-1-revision':
+ return '/api/revision/%s/' % url_args['sha1_git']
+ elif view_name == 'api-1-revision-context':
+ return ('/api/revision/%s/prev/%s/' %
+ (url_args['sha1_git'], url_args['context']))
+ elif view_name == 'api-1-revision-log':
+ if 'prev_sha1s' in url_args:
+ return ('/api/revision/%s/prev/%s/log/' %
+ (url_args['sha1_git'], url_args['prev_sha1s']))
else:
- return '/api/revision/%s/prev/%s/' % (url_args['sha1_git'], url_args['context']) # noqa
-
- @patch('swh.web.api.utils.reverse')
- def test_enrich_revision_with_no_message(self, mock_django_reverse):
- # given
- mock_django_reverse.side_effect = self._reverse_rev_message_test
-
- # when
- expected_revision = {
- 'id': 'rev-id',
- 'url': '/api/revision/rev-id/',
- 'history_url': '/api/revision/rev-id/log/',
- 'message': None,
- 'parents': [{'id': '123', 'url': '/api/revision/123/'}],
- 'children': ['456'],
- 'children_urls': ['/api/revision/456/'],
- }
+ return '/api/revision/%s/log/' % url_args['sha1_git']
- actual_revision = utils.enrich_revision({
- 'id': 'rev-id',
- 'message': None,
- 'parents': ['123'],
- 'children': ['456'],
- })
-
- # then
- self.assertEqual(actual_revision, expected_revision)
-
- mock_django_reverse.assert_has_calls(
- [call('api-1-revision', url_args={'sha1_git': 'rev-id'}),
- call('api-1-revision-log', url_args={'sha1_git': 'rev-id'}),
- call('api-1-revision', url_args={'sha1_git': '123'}),
- call('api-1-revision', url_args={'sha1_git': '456'})]
- )
-
- @patch('swh.web.api.utils.reverse')
- def test_enrich_revision_with_invalid_message(self, mock_django_reverse):
- # given
- mock_django_reverse.side_effect = self._reverse_rev_message_test
-
- # when
- actual_revision = utils.enrich_revision({
- 'id': 'rev-id',
- 'message': None,
- 'message_decoding_failed': True,
- 'parents': ['123'],
- 'children': ['456'],
- })
-
- expected_revision = {
- 'id': 'rev-id',
- 'url': '/api/revision/rev-id/',
- 'history_url': '/api/revision/rev-id/log/',
- 'message': None,
- 'message_decoding_failed': True,
- 'message_url': '/api/revision/rev-id/raw/',
- 'parents': [{'id': '123', 'url': '/api/revision/123/'}],
- 'children': ['456'],
- 'children_urls': ['/api/revision/456/'],
- }
- # then
- self.assertEqual(actual_revision, expected_revision)
+def test_enrich_revision_without_children_or_parent(mocker):
+ mock_django_reverse = mocker.patch('swh.web.api.utils.reverse')
- mock_django_reverse.assert_has_calls(
- [call('api-1-revision', url_args={'sha1_git': 'rev-id'}),
- call('api-1-revision-log', url_args={'sha1_git': 'rev-id'}),
- call('api-1-revision', url_args={'sha1_git': '123'}),
- call('api-1-revision', url_args={'sha1_git': '456'}),
- call('api-1-revision-raw-message', url_args={'sha1_git': 'rev-id'})]) # noqa
+ def reverse_test(view_name, url_args):
+ if view_name == 'api-1-revision':
+ return '/api/revision/' + url_args['sha1_git'] + '/'
+ elif view_name == 'api-1-revision-log':
+ return '/api/revision/' + url_args['sha1_git'] + '/log/'
+ elif view_name == 'api-1-directory':
+ return '/api/directory/' + url_args['sha1_git'] + '/'
+
+ mock_django_reverse.side_effect = reverse_test
+
+ actual_revision = utils.enrich_revision({
+ 'id': 'rev-id',
+ 'directory': '123',
+ 'author': {'id': '1'},
+ 'committer': {'id': '2'},
+ })
+
+ expected_revision = {
+ 'id': 'rev-id',
+ 'directory': '123',
+ 'url': '/api/revision/rev-id/',
+ 'history_url': '/api/revision/rev-id/log/',
+ 'directory_url': '/api/directory/123/',
+ 'author': {'id': '1'},
+ 'committer': {'id': '2'},
+ }
+
+ assert actual_revision == expected_revision
+
+ mock_django_reverse.assert_has_calls([
+ mocker.call('api-1-revision', url_args={'sha1_git': 'rev-id'}),
+ mocker.call('api-1-revision-log', url_args={'sha1_git': 'rev-id'}),
+ mocker.call('api-1-directory', url_args={'sha1_git': '123'})
+ ])
+
+
+def test_enrich_revision_with_children_and_parent_no_dir(mocker):
+ mock_django_reverse = mocker.patch('swh.web.api.utils.reverse')
+ mock_django_reverse.side_effect = _reverse_context_test
+
+ actual_revision = utils.enrich_revision({
+ 'id': 'rev-id',
+ 'parents': ['123'],
+ 'children': ['456'],
+ })
+
+ expected_revision = {
+ 'id': 'rev-id',
+ 'url': '/api/revision/rev-id/',
+ 'history_url': '/api/revision/rev-id/log/',
+ 'parents': [{'id': '123', 'url': '/api/revision/123/'}],
+ 'children': ['456'],
+ 'children_urls': ['/api/revision/456/'],
+ }
+
+ assert actual_revision == expected_revision
+
+ mock_django_reverse.assert_has_calls([
+ mocker.call('api-1-revision', url_args={'sha1_git': 'rev-id'}),
+ mocker.call('api-1-revision-log', url_args={'sha1_git': 'rev-id'}),
+ mocker.call('api-1-revision', url_args={'sha1_git': '123'}),
+ mocker.call('api-1-revision', url_args={'sha1_git': '456'})
+ ])
+
+
+def test_enrich_revision_no_context(mocker):
+ mock_django_reverse = mocker.patch('swh.web.api.utils.reverse')
+ mock_django_reverse.side_effect = _reverse_context_test
+
+ actual_revision = utils.enrich_revision({
+ 'id': 'rev-id',
+ 'parents': ['123'],
+ 'children': ['456'],
+ })
+
+ expected_revision = {
+ 'id': 'rev-id',
+ 'url': '/api/revision/rev-id/',
+ 'history_url': '/api/revision/rev-id/log/',
+ 'parents': [{'id': '123', 'url': '/api/revision/123/'}],
+ 'children': ['456'],
+ 'children_urls': ['/api/revision/456/']
+ }
+
+ assert actual_revision == expected_revision
+
+ mock_django_reverse.assert_has_calls([
+ mocker.call('api-1-revision', url_args={'sha1_git': 'rev-id'}),
+ mocker.call('api-1-revision-log', url_args={'sha1_git': 'rev-id'}),
+ mocker.call('api-1-revision', url_args={'sha1_git': '123'}),
+ mocker.call('api-1-revision', url_args={'sha1_git': '456'})
+ ])
+
+
+def _reverse_rev_message_test(view_name, url_args):
+ if view_name == 'api-1-revision':
+ return '/api/revision/%s/' % url_args['sha1_git']
+ elif view_name == 'api-1-revision-log':
+ if 'prev_sha1s' in url_args and url_args['prev_sha1s'] is not None:
+ return ('/api/revision/%s/prev/%s/log/' %
+ (url_args['sha1_git'], url_args['prev_sha1s']))
+ else:
+ return '/api/revision/%s/log/' % url_args['sha1_git']
+ elif view_name == 'api-1-revision-raw-message':
+ return '/api/revision/' + url_args['sha1_git'] + '/raw/'
+ else:
+ return ('/api/revision/%s/prev/%s/' %
+ (url_args['sha1_git'], url_args['context']))
+
+
+def test_enrich_revision_with_no_message(mocker):
+ mock_django_reverse = mocker.patch('swh.web.api.utils.reverse')
+ mock_django_reverse.side_effect = _reverse_rev_message_test
+
+ expected_revision = {
+ 'id': 'rev-id',
+ 'url': '/api/revision/rev-id/',
+ 'history_url': '/api/revision/rev-id/log/',
+ 'message': None,
+ 'parents': [{'id': '123', 'url': '/api/revision/123/'}],
+ 'children': ['456'],
+ 'children_urls': ['/api/revision/456/'],
+ }
+
+ actual_revision = utils.enrich_revision({
+ 'id': 'rev-id',
+ 'message': None,
+ 'parents': ['123'],
+ 'children': ['456'],
+ })
+
+ assert actual_revision == expected_revision
+
+ mock_django_reverse.assert_has_calls([
+ mocker.call('api-1-revision', url_args={'sha1_git': 'rev-id'}),
+ mocker.call('api-1-revision-log', url_args={'sha1_git': 'rev-id'}),
+ mocker.call('api-1-revision', url_args={'sha1_git': '123'}),
+ mocker.call('api-1-revision', url_args={'sha1_git': '456'})
+ ])
+
+
+def test_enrich_revision_with_invalid_message(mocker):
+ mock_django_reverse = mocker.patch('swh.web.api.utils.reverse')
+ mock_django_reverse.side_effect = _reverse_rev_message_test
+
+ actual_revision = utils.enrich_revision({
+ 'id': 'rev-id',
+ 'message': None,
+ 'message_decoding_failed': True,
+ 'parents': ['123'],
+ 'children': ['456'],
+ })
+
+ expected_revision = {
+ 'id': 'rev-id',
+ 'url': '/api/revision/rev-id/',
+ 'history_url': '/api/revision/rev-id/log/',
+ 'message': None,
+ 'message_decoding_failed': True,
+ 'message_url': '/api/revision/rev-id/raw/',
+ 'parents': [{'id': '123', 'url': '/api/revision/123/'}],
+ 'children': ['456'],
+ 'children_urls': ['/api/revision/456/'],
+ }
+
+ assert actual_revision == expected_revision
+
+ mock_django_reverse.assert_has_calls([
+ mocker.call('api-1-revision', url_args={'sha1_git': 'rev-id'}),
+ mocker.call('api-1-revision-log', url_args={'sha1_git': 'rev-id'}),
+ mocker.call('api-1-revision', url_args={'sha1_git': '123'}),
+ mocker.call('api-1-revision', url_args={'sha1_git': '456'}),
+ mocker.call('api-1-revision-raw-message',
+ url_args={'sha1_git': 'rev-id'})
+ ])
diff --git a/swh/web/tests/api/views/test_content.py b/swh/web/tests/api/views/test_content.py
--- a/swh/web/tests/api/views/test_content.py
+++ b/swh/web/tests/api/views/test_content.py
@@ -6,386 +6,375 @@
import pytest
from hypothesis import given
-from rest_framework.test import APITestCase
from swh.web.common.utils import reverse
from swh.web.tests.data import random_content
-from swh.web.tests.strategies import (
- content, contents_with_ctags
-)
-from swh.web.tests.testcase import (
- WebTestCase, ctags_json_missing, fossology_missing
-)
-
-
-class ContentApiTestCase(WebTestCase, APITestCase):
-
- @given(content())
- def test_api_content_filetype(self, content):
-
- self.content_add_mimetype(content['sha1'])
- url = reverse('api-1-content-filetype',
- url_args={'q': 'sha1_git:%s' % content['sha1_git']})
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- content_url = reverse('api-1-content',
- url_args={'q': 'sha1:%s' % content['sha1']})
- expected_data = self.content_get_mimetype(content['sha1'])
- expected_data['content_url'] = content_url
- self.assertEqual(rv.data, expected_data)
-
- def test_api_content_filetype_sha_not_found(self):
- unknown_content_ = random_content()
-
- url = reverse('api-1-content-filetype',
- url_args={'q': 'sha1:%s' % unknown_content_['sha1']})
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 404, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, {
- 'exception': 'NotFoundExc',
- 'reason': 'No filetype information found for content '
- 'sha1:%s.' % unknown_content_['sha1']
- })
-
- @pytest.mark.xfail # Language indexer is disabled
- @given(content())
- def test_api_content_language(self, content):
-
- self.content_add_language(content['sha1'])
- url = reverse('api-1-content-language',
- url_args={'q': 'sha1_git:%s' % content['sha1_git']})
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- content_url = reverse('api-1-content',
- url_args={'q': 'sha1:%s' % content['sha1']})
- expected_data = self.content_get_language(content['sha1'])
- expected_data['content_url'] = content_url
- self.assertEqual(rv.data, expected_data)
-
- def test_api_content_language_sha_not_found(self):
- unknown_content_ = random_content()
-
- url = reverse('api-1-content-language',
- url_args={'q': 'sha1:%s' % unknown_content_['sha1']})
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 404, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, {
- 'exception': 'NotFoundExc',
- 'reason': 'No language information found for content '
- 'sha1:%s.' % unknown_content_['sha1']
- })
-
- @pytest.mark.xfail # Language indexer is disabled
- @pytest.mark.skipif(ctags_json_missing,
- reason="requires ctags with json output support")
- @given(contents_with_ctags())
- def test_api_content_symbol(self, contents_with_ctags):
-
- expected_data = {}
- for content_sha1 in contents_with_ctags['sha1s']:
- self.content_add_ctags(content_sha1)
- for ctag in self.content_get_ctags(content_sha1):
- if ctag['name'] == contents_with_ctags['symbol_name']:
- expected_data[content_sha1] = ctag
- break
- url = reverse('api-1-content-symbol',
- url_args={'q': contents_with_ctags['symbol_name']},
- query_params={'per_page': 100})
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- for entry in rv.data:
- content_sha1 = entry['sha1']
- expected_entry = expected_data[content_sha1]
- for key, view_name in (('content_url', 'api-1-content'),
- ('data_url', 'api-1-content-raw'),
- ('license_url', 'api-1-content-license'),
- ('language_url', 'api-1-content-language'),
- ('filetype_url', 'api-1-content-filetype')):
- expected_entry[key] = reverse(view_name,
- url_args={'q': 'sha1:%s' %
- content_sha1})
- expected_entry['sha1'] = content_sha1
- del expected_entry['id']
- self.assertEqual(entry, expected_entry)
- self.assertFalse('Link' in rv)
-
- url = reverse('api-1-content-symbol',
- url_args={'q': contents_with_ctags['symbol_name']},
- query_params={'per_page': 2})
- rv = self.client.get(url)
-
- next_url = reverse('api-1-content-symbol',
- url_args={'q': contents_with_ctags['symbol_name']},
- query_params={'last_sha1': rv.data[1]['sha1'],
- 'per_page': 2})
- self.assertEqual(rv['Link'], '<%s>; rel="next"' % next_url)
-
- def test_api_content_symbol_not_found(self):
-
- url = reverse('api-1-content-symbol', url_args={'q': 'bar'})
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 404, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, {
- 'exception': 'NotFoundExc',
- 'reason': 'No indexed raw content match expression \'bar\'.'
- })
- self.assertFalse('Link' in rv)
-
- @pytest.mark.skipif(ctags_json_missing,
- reason="requires ctags with json output support")
- @given(content())
- def test_api_content_ctags(self, content):
-
- self.content_add_ctags(content['sha1'])
- url = reverse('api-1-content-ctags',
- url_args={'q': 'sha1_git:%s' % content['sha1_git']})
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- content_url = reverse('api-1-content',
- url_args={'q': 'sha1:%s' % content['sha1']})
- expected_data = list(self.content_get_ctags(content['sha1']))
- for e in expected_data:
- e['content_url'] = content_url
- self.assertEqual(rv.data, expected_data)
-
- @pytest.mark.skipif(fossology_missing,
- reason="requires fossology-nomossa installed")
- @given(content())
- def test_api_content_license(self, content):
-
- self.content_add_license(content['sha1'])
- url = reverse('api-1-content-license',
- url_args={'q': 'sha1_git:%s' % content['sha1_git']})
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- content_url = reverse('api-1-content',
- url_args={'q': 'sha1:%s' % content['sha1']})
- expected_data = self.content_get_license(content['sha1'])
- expected_data['content_url'] = content_url
- self.assertEqual(rv.data, expected_data)
-
- def test_api_content_license_sha_not_found(self):
- unknown_content_ = random_content()
-
- url = reverse('api-1-content-license',
- url_args={'q': 'sha1:%s' % unknown_content_['sha1']})
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 404, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, {
- 'exception': 'NotFoundExc',
- 'reason': 'No license information found for content '
- 'sha1:%s.' % unknown_content_['sha1']
- })
-
- @given(content())
- def test_api_content_metadata(self, content):
-
- url = reverse('api-1-content', {'q': 'sha1:%s' % content['sha1']})
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- expected_data = self.content_get_metadata(content['sha1'])
- for key, view_name in (('data_url', 'api-1-content-raw'),
+from swh.web.tests.strategies import content, contents_with_ctags
+from swh.web.tests.conftest import ctags_json_missing, fossology_missing
+
+
+@given(content())
+def test_api_content_filetype(api_client, indexer_data, content):
+ indexer_data.content_add_mimetype(content['sha1'])
+ url = reverse('api-1-content-filetype',
+ url_args={'q': 'sha1_git:%s' % content['sha1_git']})
+ rv = api_client.get(url)
+
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ content_url = reverse('api-1-content',
+ url_args={'q': 'sha1:%s' % content['sha1']})
+ expected_data = indexer_data.content_get_mimetype(content['sha1'])
+ expected_data['content_url'] = content_url
+ assert rv.data == expected_data
+
+
+def test_api_content_filetype_sha_not_found(api_client):
+ unknown_content_ = random_content()
+
+ url = reverse('api-1-content-filetype',
+ url_args={'q': 'sha1:%s' % unknown_content_['sha1']})
+ rv = api_client.get(url)
+
+ assert rv.status_code == 404, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == {
+ 'exception': 'NotFoundExc',
+ 'reason': 'No filetype information found for content '
+ 'sha1:%s.' % unknown_content_['sha1']
+ }
+
+
+@pytest.mark.skip # Language indexer is disabled
+@given(content())
+def test_api_content_language(api_client, indexer_data, content):
+ indexer_data.content_add_language(content['sha1'])
+ url = reverse('api-1-content-language',
+ url_args={'q': 'sha1_git:%s' % content['sha1_git']})
+ rv = api_client.get(url)
+
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ content_url = reverse('api-1-content',
+ url_args={'q': 'sha1:%s' % content['sha1']})
+ expected_data = indexer_data.content_get_language(content['sha1'])
+ expected_data['content_url'] = content_url
+ assert rv.data == expected_data
+
+
+def test_api_content_language_sha_not_found(api_client):
+ unknown_content_ = random_content()
+
+ url = reverse('api-1-content-language',
+ url_args={'q': 'sha1:%s' % unknown_content_['sha1']})
+ rv = api_client.get(url)
+
+ assert rv.status_code == 404, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == {
+ 'exception': 'NotFoundExc',
+ 'reason': 'No language information found for content '
+ 'sha1:%s.' % unknown_content_['sha1']
+ }
+
+
+@pytest.mark.skip # Language indexer is disabled
+@pytest.mark.skipif(ctags_json_missing,
+ reason="requires ctags with json output support")
+@given(contents_with_ctags())
+def test_api_content_symbol(api_client, indexer_data, contents_with_ctags):
+ expected_data = {}
+ for content_sha1 in contents_with_ctags['sha1s']:
+ indexer_data.content_add_ctags(content_sha1)
+ for ctag in indexer_data.content_get_ctags(content_sha1):
+ if ctag['name'] == contents_with_ctags['symbol_name']:
+ expected_data[content_sha1] = ctag
+ break
+ url = reverse('api-1-content-symbol',
+ url_args={'q': contents_with_ctags['symbol_name']},
+ query_params={'per_page': 100})
+ rv = api_client.get(url)
+
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ for entry in rv.data:
+ content_sha1 = entry['sha1']
+ expected_entry = expected_data[content_sha1]
+ for key, view_name in (('content_url', 'api-1-content'),
+ ('data_url', 'api-1-content-raw'),
('license_url', 'api-1-content-license'),
('language_url', 'api-1-content-language'),
('filetype_url', 'api-1-content-filetype')):
- expected_data[key] = reverse(view_name,
- url_args={'q': 'sha1:%s' %
- content['sha1']})
- self.assertEqual(rv.data, expected_data)
-
- def test_api_content_not_found_as_json(self):
- unknown_content_ = random_content()
-
- url = reverse('api-1-content',
- url_args={'q': 'sha1:%s' % unknown_content_['sha1']})
- rv = self.client.get(url)
- self.assertEqual(rv.status_code, 404, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, {
- 'exception': 'NotFoundExc',
- 'reason': 'Content with sha1 checksum equals to %s not found!'
- % unknown_content_['sha1']
- })
-
- def test_api_content_not_found_as_yaml(self):
- unknown_content_ = random_content()
-
- url = reverse('api-1-content',
- url_args={'q': 'sha256:%s' % unknown_content_['sha256']})
- rv = self.client.get(url, HTTP_ACCEPT='application/yaml')
-
- self.assertEqual(rv.status_code, 404, rv.data)
- self.assertTrue('application/yaml' in rv['Content-Type'])
-
- self.assertEqual(rv.data, {
- 'exception': 'NotFoundExc',
- 'reason': 'Content with sha256 checksum equals to %s not found!' %
- unknown_content_['sha256']
- })
-
- def test_api_content_raw_ko_not_found(self):
- unknown_content_ = random_content()
-
- url = reverse('api-1-content-raw',
- url_args={'q': 'sha1:%s' % unknown_content_['sha1']})
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 404, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, {
- 'exception': 'NotFoundExc',
- 'reason': 'Content with sha1 checksum equals to %s not found!' %
- unknown_content_['sha1']
- })
-
- @given(content())
- def test_api_content_raw_text(self, content):
-
- url = reverse('api-1-content-raw',
- url_args={'q': 'sha1:%s' % content['sha1']})
-
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 200)
- self.assertEqual(rv['Content-Type'], 'application/octet-stream')
- self.assertEqual(
- rv['Content-disposition'],
- 'attachment; filename=content_sha1_%s_raw' % content['sha1'])
- self.assertEqual(
- rv['Content-Type'], 'application/octet-stream')
- expected_data = self.content_get(content['sha1'])
- self.assertEqual(rv.content, expected_data['data'])
-
- @given(content())
- def test_api_content_raw_text_with_filename(self, content):
-
- url = reverse('api-1-content-raw',
- url_args={'q': 'sha1:%s' % content['sha1']},
- query_params={'filename': 'filename.txt'})
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 200)
- self.assertEqual(rv['Content-Type'], 'application/octet-stream')
- self.assertEqual(
- rv['Content-disposition'],
- 'attachment; filename=filename.txt')
- self.assertEqual(
- rv['Content-Type'], 'application/octet-stream')
- expected_data = self.content_get(content['sha1'])
- self.assertEqual(rv.content, expected_data['data'])
-
- @given(content())
- def test_api_check_content_known(self, content):
-
- url = reverse('api-1-content-known',
- url_args={'q': content['sha1']})
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
-
- self.assertEqual(rv.data, {
- 'search_res': [
- {
- 'found': True,
- 'sha1': content['sha1']
- }
- ],
- 'search_stats': {'nbfiles': 1, 'pct': 100.0}
- })
-
- @given(content())
- def test_api_check_content_known_as_yaml(self, content):
-
- url = reverse('api-1-content-known',
- url_args={'q': content['sha1']})
- rv = self.client.get(url, HTTP_ACCEPT='application/yaml')
-
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/yaml')
-
- self.assertEqual(rv.data, {
- 'search_res': [
- {
- 'found': True,
- 'sha1': content['sha1']
- }
- ],
- 'search_stats': {'nbfiles': 1, 'pct': 100.0}
- })
-
- @given(content())
- def test_api_check_content_known_post_as_yaml(self, content):
-
- url = reverse('api-1-content-known')
- rv = self.client.post(
- url,
- data={
- 'q': content['sha1']
- },
- HTTP_ACCEPT='application/yaml'
- )
-
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertTrue('application/yaml' in rv['Content-Type'])
- self.assertEqual(rv.data, {
- 'search_res': [
- {
- 'found': True,
- 'sha1': content['sha1']
- }
- ],
- 'search_stats': {'nbfiles': 1, 'pct': 100.0}
- })
-
- def test_api_check_content_known_not_found(self):
- unknown_content_ = random_content()
-
- url = reverse('api-1-content-known',
- url_args={'q': unknown_content_['sha1']})
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, {
- 'search_res': [
- {
- 'found': False,
- 'sha1': unknown_content_['sha1']
- }
- ],
- 'search_stats': {'nbfiles': 1, 'pct': 0.0}
- })
-
- @given(content())
- def test_api_content_uppercase(self, content):
- url = reverse('api-1-content-uppercase-checksum',
- url_args={'q': content['sha1'].upper()})
-
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 302)
-
- redirect_url = reverse('api-1-content',
- url_args={'q': content['sha1']})
-
- self.assertEqual(resp['location'], redirect_url)
+ expected_entry[key] = reverse(
+ view_name, url_args={'q': 'sha1:%s' % content_sha1})
+ expected_entry['sha1'] = content_sha1
+ del expected_entry['id']
+ assert entry == expected_entry
+ assert 'Link' not in rv
+
+ url = reverse('api-1-content-symbol',
+ url_args={'q': contents_with_ctags['symbol_name']},
+ query_params={'per_page': 2})
+ rv = api_client.get(url)
+
+ next_url = reverse('api-1-content-symbol',
+ url_args={'q': contents_with_ctags['symbol_name']},
+ query_params={'last_sha1': rv.data[1]['sha1'],
+ 'per_page': 2})
+ assert rv['Link'] == '<%s>; rel="next"' % next_url
+
+
+def test_api_content_symbol_not_found(api_client):
+ url = reverse('api-1-content-symbol', url_args={'q': 'bar'})
+ rv = api_client.get(url)
+
+ assert rv.status_code == 404, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == {
+ 'exception': 'NotFoundExc',
+ 'reason': 'No indexed raw content match expression \'bar\'.'
+ }
+ assert 'Link' not in rv
+
+
+@pytest.mark.skipif(ctags_json_missing,
+ reason="requires ctags with json output support")
+@given(content())
+def test_api_content_ctags(api_client, indexer_data, content):
+ indexer_data.content_add_ctags(content['sha1'])
+ url = reverse('api-1-content-ctags',
+ url_args={'q': 'sha1_git:%s' % content['sha1_git']})
+ rv = api_client.get(url)
+
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ content_url = reverse('api-1-content',
+ url_args={'q': 'sha1:%s' % content['sha1']})
+ expected_data = list(indexer_data.content_get_ctags(content['sha1']))
+ for e in expected_data:
+ e['content_url'] = content_url
+ assert rv.data == expected_data
+
+
+@pytest.mark.skipif(fossology_missing,
+ reason="requires fossology-nomossa installed")
+@given(content())
+def test_api_content_license(api_client, indexer_data, content):
+ indexer_data.content_add_license(content['sha1'])
+ url = reverse('api-1-content-license',
+ url_args={'q': 'sha1_git:%s' % content['sha1_git']})
+ rv = api_client.get(url)
+
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ content_url = reverse('api-1-content',
+ url_args={'q': 'sha1:%s' % content['sha1']})
+ expected_data = indexer_data.content_get_license(content['sha1'])
+ expected_data['content_url'] = content_url
+ assert rv.data == expected_data
+
+
+def test_api_content_license_sha_not_found(api_client):
+ unknown_content_ = random_content()
+
+ url = reverse('api-1-content-license',
+ url_args={'q': 'sha1:%s' % unknown_content_['sha1']})
+ rv = api_client.get(url)
+
+ assert rv.status_code == 404, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == {
+ 'exception': 'NotFoundExc',
+ 'reason': 'No license information found for content '
+ 'sha1:%s.' % unknown_content_['sha1']
+ }
+
+
+@given(content())
+def test_api_content_metadata(api_client, archive_data, content):
+ url = reverse('api-1-content', {'q': 'sha1:%s' % content['sha1']})
+ rv = api_client.get(url)
+
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ expected_data = archive_data.content_get_metadata(content['sha1'])
+ for key, view_name in (('data_url', 'api-1-content-raw'),
+ ('license_url', 'api-1-content-license'),
+ ('language_url', 'api-1-content-language'),
+ ('filetype_url', 'api-1-content-filetype')):
+ expected_data[key] = reverse(
+ view_name, url_args={'q': 'sha1:%s' % content['sha1']})
+ assert rv.data == expected_data
+
+
+def test_api_content_not_found_as_json(api_client):
+ unknown_content_ = random_content()
+
+ url = reverse('api-1-content',
+ url_args={'q': 'sha1:%s' % unknown_content_['sha1']})
+ rv = api_client.get(url)
+ assert rv.status_code == 404, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == {
+ 'exception': 'NotFoundExc',
+ 'reason': 'Content with sha1 checksum equals to %s not found!'
+ % unknown_content_['sha1']
+ }
+
+
+def test_api_content_not_found_as_yaml(api_client):
+ unknown_content_ = random_content()
+
+ url = reverse('api-1-content',
+ url_args={'q': 'sha256:%s' % unknown_content_['sha256']})
+ rv = api_client.get(url, HTTP_ACCEPT='application/yaml')
+
+ assert rv.status_code == 404, rv.data
+ assert 'application/yaml' in rv['Content-Type']
+
+ assert rv.data == {
+ 'exception': 'NotFoundExc',
+ 'reason': 'Content with sha256 checksum equals to %s not found!' %
+ unknown_content_['sha256']
+ }
+
+
+def test_api_content_raw_ko_not_found(api_client):
+ unknown_content_ = random_content()
+
+ url = reverse('api-1-content-raw',
+ url_args={'q': 'sha1:%s' % unknown_content_['sha1']})
+ rv = api_client.get(url)
+
+ assert rv.status_code == 404, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == {
+ 'exception': 'NotFoundExc',
+ 'reason': 'Content with sha1 checksum equals to %s not found!' %
+ unknown_content_['sha1']
+ }
+
+
+@given(content())
+def test_api_content_raw_text(api_client, archive_data, content):
+ url = reverse('api-1-content-raw',
+ url_args={'q': 'sha1:%s' % content['sha1']})
+
+ rv = api_client.get(url)
+
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/octet-stream'
+ assert rv['Content-disposition'] == \
+ 'attachment; filename=content_sha1_%s_raw' % content['sha1']
+ assert rv['Content-Type'] == 'application/octet-stream'
+ expected_data = archive_data.content_get(content['sha1'])
+ assert rv.content == expected_data['data']
+
+
+@given(content())
+def test_api_content_raw_text_with_filename(api_client, archive_data, content):
+ url = reverse('api-1-content-raw',
+ url_args={'q': 'sha1:%s' % content['sha1']},
+ query_params={'filename': 'filename.txt'})
+ rv = api_client.get(url)
+
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/octet-stream'
+ assert rv['Content-disposition'] == \
+ 'attachment; filename=filename.txt'
+ assert rv['Content-Type'] == 'application/octet-stream'
+ expected_data = archive_data.content_get(content['sha1'])
+ assert rv.content == expected_data['data']
+
+
+@given(content())
+def test_api_check_content_known(api_client, content):
+ url = reverse('api-1-content-known',
+ url_args={'q': content['sha1']})
+ rv = api_client.get(url)
+
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+
+ assert rv.data == {
+ 'search_res': [
+ {
+ 'found': True,
+ 'sha1': content['sha1']
+ }
+ ],
+ 'search_stats': {'nbfiles': 1, 'pct': 100.0}
+ }
+
+
+@given(content())
+def test_api_check_content_known_as_yaml(api_client, content):
+ url = reverse('api-1-content-known',
+ url_args={'q': content['sha1']})
+ rv = api_client.get(url, HTTP_ACCEPT='application/yaml')
+
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/yaml'
+
+ assert rv.data == {
+ 'search_res': [
+ {
+ 'found': True,
+ 'sha1': content['sha1']
+ }
+ ],
+ 'search_stats': {'nbfiles': 1, 'pct': 100.0}
+ }
+
+
+@given(content())
+def test_api_check_content_known_post_as_yaml(api_client, content):
+ url = reverse('api-1-content-known')
+ rv = api_client.post(url, data={'q': content['sha1']},
+ HTTP_ACCEPT='application/yaml')
+
+ assert rv.status_code == 200, rv.data
+ assert 'application/yaml' in rv['Content-Type']
+ assert rv.data == {
+ 'search_res': [
+ {
+ 'found': True,
+ 'sha1': content['sha1']
+ }
+ ],
+ 'search_stats': {'nbfiles': 1, 'pct': 100.0}
+ }
+
+
+def test_api_check_content_known_not_found(api_client):
+ unknown_content_ = random_content()
+
+ url = reverse('api-1-content-known',
+ url_args={'q': unknown_content_['sha1']})
+ rv = api_client.get(url)
+
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == {
+ 'search_res': [
+ {
+ 'found': False,
+ 'sha1': unknown_content_['sha1']
+ }
+ ],
+ 'search_stats': {'nbfiles': 1, 'pct': 0.0}
+ }
+
+
+@given(content())
+def test_api_content_uppercase(api_client, content):
+ url = reverse('api-1-content-uppercase-checksum',
+ url_args={'q': content['sha1'].upper()})
+
+ rv = api_client.get(url)
+ assert rv.status_code == 302, rv.data
+
+ redirect_url = reverse('api-1-content',
+ url_args={'q': content['sha1']})
+
+ assert rv['location'] == redirect_url
diff --git a/swh/web/tests/api/views/test_directory.py b/swh/web/tests/api/views/test_directory.py
--- a/swh/web/tests/api/views/test_directory.py
+++ b/swh/web/tests/api/views/test_directory.py
@@ -6,101 +6,98 @@
import random
from hypothesis import given
-from rest_framework.test import APITestCase
from swh.web.common.utils import reverse
from swh.web.tests.data import random_sha1
from swh.web.tests.strategies import directory
-from swh.web.tests.testcase import WebTestCase
-class DirectoryApiTestCase(WebTestCase, APITestCase):
+@given(directory())
+def test_api_directory(api_client, archive_data, directory):
- @given(directory())
- def test_api_directory(self, directory):
+ url = reverse('api-1-directory', url_args={'sha1_git': directory})
+ rv = api_client.get(url)
- url = reverse('api-1-directory', url_args={'sha1_git': directory})
- rv = self.client.get(url)
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
+ expected_data = list(map(_enrich_dir_data,
+ archive_data.directory_ls(directory)))
- expected_data = list(map(self._enrich_dir_data,
- self.directory_ls(directory)))
+ assert rv.data == expected_data
- self.assertEqual(rv.data, expected_data)
- def test_api_directory_not_found(self):
- unknown_directory_ = random_sha1()
+def test_api_directory_not_found(api_client):
+ unknown_directory_ = random_sha1()
- url = reverse('api-1-directory',
- url_args={'sha1_git': unknown_directory_})
- rv = self.client.get(url)
+ url = reverse('api-1-directory',
+ url_args={'sha1_git': unknown_directory_})
+ rv = api_client.get(url)
- self.assertEqual(rv.status_code, 404, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, {
- 'exception': 'NotFoundExc',
- 'reason': 'Directory with sha1_git %s not found'
- % unknown_directory_})
+ assert rv.status_code == 404, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == {
+ 'exception': 'NotFoundExc',
+ 'reason': 'Directory with sha1_git %s not found' % unknown_directory_
+ }
- @given(directory())
- def test_api_directory_with_path_found(self, directory):
- directory_content = self.directory_ls(directory)
- path = random.choice(directory_content)
+@given(directory())
+def test_api_directory_with_path_found(api_client, archive_data, directory):
- url = reverse('api-1-directory',
- url_args={'sha1_git': directory,
- 'path': path['name']})
- rv = self.client.get(url)
+ directory_content = archive_data.directory_ls(directory)
+ path = random.choice(directory_content)
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, self._enrich_dir_data(path))
+ url = reverse('api-1-directory',
+ url_args={'sha1_git': directory, 'path': path['name']})
+ rv = api_client.get(url)
- @given(directory())
- def test_api_directory_with_path_not_found(self, directory):
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == _enrich_dir_data(path)
- path = 'some/path/to/nonexistent/dir/'
- url = reverse('api-1-directory',
- url_args={'sha1_git': directory,
- 'path': path})
- rv = self.client.get(url)
- self.assertEqual(rv.status_code, 404, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, {
- 'exception': 'NotFoundExc',
- 'reason': ('Directory entry with path %s from %s not found'
- % (path, directory))})
+@given(directory())
+def test_api_directory_with_path_not_found(api_client, directory):
- @given(directory())
- def test_api_directory_uppercase(self, directory):
- url = reverse('api-1-directory-uppercase-checksum',
- url_args={'sha1_git': directory.upper()})
+ path = 'some/path/to/nonexistent/dir/'
+ url = reverse('api-1-directory',
+ url_args={'sha1_git': directory, 'path': path})
+ rv = api_client.get(url)
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 302)
+ assert rv.status_code == 404, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == {
+ 'exception': 'NotFoundExc',
+ 'reason': ('Directory entry with path %s from %s not found' %
+ (path, directory))
+ }
- redirect_url = reverse('api-1-directory',
- url_args={'sha1_git': directory})
- self.assertEqual(resp['location'], redirect_url)
+@given(directory())
+def test_api_directory_uppercase(api_client, directory):
+ url = reverse('api-1-directory-uppercase-checksum',
+ url_args={'sha1_git': directory.upper()})
- @classmethod
- def _enrich_dir_data(cls, dir_data):
- if dir_data['type'] == 'file':
- dir_data['target_url'] = \
- reverse('api-1-content',
- url_args={'q': 'sha1_git:%s' % dir_data['target']})
- elif dir_data['type'] == 'dir':
- dir_data['target_url'] = \
- reverse('api-1-directory',
- url_args={'sha1_git': dir_data['target']})
- elif dir_data['type'] == 'rev':
- dir_data['target_url'] = \
- reverse('api-1-revision',
- url_args={'sha1_git': dir_data['target']})
+ resp = api_client.get(url)
+ assert resp.status_code == 302
- return dir_data
+ redirect_url = reverse('api-1-directory', url_args={'sha1_git': directory})
+
+ assert resp['location'] == redirect_url
+
+
+def _enrich_dir_data(dir_data):
+ if dir_data['type'] == 'file':
+ dir_data['target_url'] = reverse(
+ 'api-1-content',
+ url_args={'q': 'sha1_git:%s' % dir_data['target']})
+ elif dir_data['type'] == 'dir':
+ dir_data['target_url'] = reverse(
+ 'api-1-directory',
+ url_args={'sha1_git': dir_data['target']})
+ elif dir_data['type'] == 'rev':
+ dir_data['target_url'] = reverse(
+ 'api-1-revision',
+ url_args={'sha1_git': dir_data['target']})
+ return dir_data
diff --git a/swh/web/tests/api/views/test_identifiers.py b/swh/web/tests/api/views/test_identifiers.py
--- a/swh/web/tests/api/views/test_identifiers.py
+++ b/swh/web/tests/api/views/test_identifiers.py
@@ -4,7 +4,6 @@
# See top-level LICENSE file for more information
from hypothesis import given
-from rest_framework.test import APITestCase
from swh.model.identifiers import (
CONTENT, DIRECTORY, RELEASE, REVISION, SNAPSHOT
@@ -16,76 +15,75 @@
unknown_content, unknown_directory, unknown_release,
unknown_revision, unknown_snapshot
)
-from swh.web.tests.testcase import WebTestCase
-
-
-class SwhIdsApiTestCase(WebTestCase, APITestCase):
-
- @given(origin(), content(), directory(), release(), revision(), snapshot())
- def test_swh_id_resolve_success(self, origin, content, directory,
- release, revision, snapshot):
-
- for obj_type_short, obj_type, obj_id in (
- ('cnt', CONTENT, content['sha1_git']),
- ('dir', DIRECTORY, directory),
- ('rel', RELEASE, release),
- ('rev', REVISION, revision),
- ('snp', SNAPSHOT, snapshot)):
-
- swh_id = 'swh:1:%s:%s;origin=%s' % (obj_type_short, obj_id,
- origin['url'])
- url = reverse('api-1-resolve-swh-pid', url_args={'swh_id': swh_id})
-
- resp = self.client.get(url)
-
- if obj_type == CONTENT:
- url_args = {'query_string': 'sha1_git:%s' % obj_id}
- elif obj_type == SNAPSHOT:
- url_args = {'snapshot_id': obj_id}
- else:
- url_args = {'sha1_git': obj_id}
-
- browse_rev_url = reverse('browse-%s' % obj_type,
- url_args=url_args,
- query_params={'origin': origin['url']})
-
- expected_result = {
- 'browse_url': browse_rev_url,
- 'metadata': {'origin': origin['url']},
- 'namespace': 'swh',
- 'object_id': obj_id,
- 'object_type': obj_type,
- 'scheme_version': 1
- }
-
- self.assertEqual(resp.status_code, 200, resp.data)
- self.assertEqual(resp.data, expected_result)
-
- def test_swh_id_resolve_invalid(self):
- rev_id_invalid = '96db9023b8_foo_50d6c108e9a3'
- swh_id = 'swh:1:rev:%s' % rev_id_invalid
+
+
+@given(origin(), content(), directory(), release(), revision(), snapshot())
+def test_swh_id_resolve_success(api_client, origin, content, directory,
+ release, revision, snapshot):
+
+ for obj_type_short, obj_type, obj_id in (
+ ('cnt', CONTENT, content['sha1_git']),
+ ('dir', DIRECTORY, directory),
+ ('rel', RELEASE, release),
+ ('rev', REVISION, revision),
+ ('snp', SNAPSHOT, snapshot)):
+
+ swh_id = 'swh:1:%s:%s;origin=%s' % (obj_type_short, obj_id,
+ origin['url'])
url = reverse('api-1-resolve-swh-pid', url_args={'swh_id': swh_id})
- resp = self.client.get(url)
+ resp = api_client.get(url)
+
+ if obj_type == CONTENT:
+ url_args = {'query_string': 'sha1_git:%s' % obj_id}
+ elif obj_type == SNAPSHOT:
+ url_args = {'snapshot_id': obj_id}
+ else:
+ url_args = {'sha1_git': obj_id}
+
+ browse_rev_url = reverse('browse-%s' % obj_type,
+ url_args=url_args,
+ query_params={'origin': origin['url']})
+
+ expected_result = {
+ 'browse_url': browse_rev_url,
+ 'metadata': {'origin': origin['url']},
+ 'namespace': 'swh',
+ 'object_id': obj_id,
+ 'object_type': obj_type,
+ 'scheme_version': 1
+ }
- self.assertEqual(resp.status_code, 400, resp.data)
+ assert resp.status_code == 200, resp.data
+ assert resp.data == expected_result
- @given(unknown_content(), unknown_directory(), unknown_release(),
- unknown_revision(), unknown_snapshot())
- def test_swh_id_resolve_not_found(self, unknown_content, unknown_directory,
- unknown_release, unknown_revision,
- unknown_snapshot):
- for obj_type_short, obj_id in (('cnt', unknown_content['sha1_git']),
- ('dir', unknown_directory),
- ('rel', unknown_release),
- ('rev', unknown_revision),
- ('snp', unknown_snapshot)):
+def test_swh_id_resolve_invalid(api_client):
+ rev_id_invalid = '96db9023b8_foo_50d6c108e9a3'
+ swh_id = 'swh:1:rev:%s' % rev_id_invalid
+ url = reverse('api-1-resolve-swh-pid', url_args={'swh_id': swh_id})
- swh_id = 'swh:1:%s:%s' % (obj_type_short, obj_id)
+ resp = api_client.get(url)
- url = reverse('api-1-resolve-swh-pid', url_args={'swh_id': swh_id})
+ assert resp.status_code == 400, resp.data
+
+
+@given(unknown_content(), unknown_directory(), unknown_release(),
+ unknown_revision(), unknown_snapshot())
+def test_swh_id_resolve_not_found(api_client, unknown_content,
+ unknown_directory, unknown_release,
+ unknown_revision, unknown_snapshot):
+
+ for obj_type_short, obj_id in (('cnt', unknown_content['sha1_git']),
+ ('dir', unknown_directory),
+ ('rel', unknown_release),
+ ('rev', unknown_revision),
+ ('snp', unknown_snapshot)):
+
+ swh_id = 'swh:1:%s:%s' % (obj_type_short, obj_id)
+
+ url = reverse('api-1-resolve-swh-pid', url_args={'swh_id': swh_id})
- resp = self.client.get(url)
+ resp = api_client.get(url)
- self.assertEqual(resp.status_code, 404, resp.data)
+ assert resp.status_code == 404, resp.data
diff --git a/swh/web/tests/api/views/test_origin.py b/swh/web/tests/api/views/test_origin.py
--- a/swh/web/tests/api/views/test_origin.py
+++ b/swh/web/tests/api/views/test_origin.py
@@ -3,12 +3,8 @@
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
-from unittest.mock import patch
-
-from hypothesis import given, strategies
-import pytest
+from hypothesis import given
from requests.utils import parse_header_links
-from rest_framework.test import APITestCase
from swh.storage.exc import StorageDBError, StorageAPIError
@@ -18,295 +14,202 @@
from swh.web.tests.strategies import (
origin, new_origin, visit_dates, new_snapshots
)
-from swh.web.tests.testcase import WebTestCase
-from swh.web.tests.data import get_tests_data
-
-
-class OriginApiTestCase(WebTestCase, APITestCase):
- def _scroll_results(self, url):
- """Iterates through pages of results, and returns them all."""
- results = []
-
- while True:
- rv = self.client.get(url)
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
-
- results.extend(rv.data)
-
- if 'Link' in rv:
- for link in parse_header_links(rv['Link']):
- if link['rel'] == 'next':
- # Found link to next page of results
- url = link['url']
- break
- else:
- # No link with 'rel=next'
+
+
+def _scroll_results(api_client, url):
+ """Iterates through pages of results, and returns them all."""
+ results = []
+
+ while True:
+ rv = api_client.get(url)
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+
+ results.extend(rv.data)
+
+ if 'Link' in rv:
+ for link in parse_header_links(rv['Link']):
+ if link['rel'] == 'next':
+ # Found link to next page of results
+ url = link['url']
break
else:
- # No Link header
+ # No link with 'rel=next'
break
+ else:
+ # No Link header
+ break
- return results
+ return results
- @patch('swh.web.api.views.origin.get_origin_visits')
- def test_api_lookup_origin_visits_raise_error(
- self, mock_get_origin_visits,
- ):
- err_msg = 'voluntary error to check the bad request middleware.'
+def test_api_lookup_origin_visits_raise_error(api_client, mocker):
+ mock_get_origin_visits = mocker.patch(
+ 'swh.web.api.views.origin.get_origin_visits')
+ err_msg = 'voluntary error to check the bad request middleware.'
- mock_get_origin_visits.side_effect = BadInputExc(err_msg)
-
- url = reverse(
- 'api-1-origin-visits', url_args={'origin_url': 'http://foo'})
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 400, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, {
- 'exception': 'BadInputExc',
- 'reason': err_msg})
-
- @patch('swh.web.api.views.origin.get_origin_visits')
- def test_api_lookup_origin_visits_raise_swh_storage_error_db(
- self, mock_get_origin_visits):
-
- err_msg = 'Storage exploded! Will be back online shortly!'
-
- mock_get_origin_visits.side_effect = StorageDBError(err_msg)
+ mock_get_origin_visits.side_effect = BadInputExc(err_msg)
- url = reverse(
- 'api-1-origin-visits', url_args={'origin_url': 'http://foo'})
- rv = self.client.get(url)
+ url = reverse('api-1-origin-visits', url_args={'origin_url': 'http://foo'})
+ rv = api_client.get(url)
- self.assertEqual(rv.status_code, 503, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, {
- 'exception': 'StorageDBError',
- 'reason':
- 'An unexpected error occurred in the backend: %s' % err_msg})
-
- @patch('swh.web.api.views.origin.get_origin_visits')
- def test_api_lookup_origin_visits_raise_swh_storage_error_api(
- self, mock_get_origin_visits):
-
- err_msg = 'Storage API dropped dead! Will resurrect asap!'
-
- mock_get_origin_visits.side_effect = StorageAPIError(err_msg)
-
- url = reverse(
- 'api-1-origin-visits', url_args={'origin_url': 'http://foo'})
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 503, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, {
- 'exception': 'StorageAPIError',
- 'reason':
- 'An unexpected error occurred in the api backend: %s' % err_msg
- })
-
- @given(new_origin(), visit_dates(3), new_snapshots(3))
- def test_api_lookup_origin_visits(self, new_origin, visit_dates,
- new_snapshots):
-
- self.storage.origin_add_one(new_origin)
- for i, visit_date in enumerate(visit_dates):
- origin_visit = self.storage.origin_visit_add(
- new_origin['url'], visit_date, type='git')
- self.storage.snapshot_add([new_snapshots[i]])
- self.storage.origin_visit_update(
- new_origin['url'], origin_visit['visit'],
- snapshot=new_snapshots[i]['id'])
-
- all_visits = list(reversed(get_origin_visits(new_origin)))
-
- for last_visit, expected_visits in (
- (None, all_visits[:2]),
- (all_visits[1]['visit'], all_visits[2:4])):
-
- url = reverse('api-1-origin-visits',
- url_args={'origin_url': new_origin['url']},
- query_params={'per_page': 2,
- 'last_visit': last_visit})
-
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
-
- for expected_visit in expected_visits:
- origin_visit_url = reverse(
- 'api-1-origin-visit',
- url_args={'origin_url': new_origin['url'],
- 'visit_id': expected_visit['visit']})
- snapshot_url = reverse(
- 'api-1-snapshot',
- url_args={'snapshot_id': expected_visit['snapshot']})
- expected_visit['origin'] = new_origin['url']
- expected_visit['origin_visit_url'] = origin_visit_url
- expected_visit['snapshot_url'] = snapshot_url
-
- self.assertEqual(rv.data, expected_visits)
-
- @given(new_origin(), visit_dates(3), new_snapshots(3))
- def test_api_lookup_origin_visits_by_id(self, new_origin, visit_dates,
- new_snapshots):
-
- self.storage.origin_add_one(new_origin)
- for i, visit_date in enumerate(visit_dates):
- origin_visit = self.storage.origin_visit_add(
- new_origin['url'], visit_date, type='git')
- self.storage.snapshot_add([new_snapshots[i]])
- self.storage.origin_visit_update(
- new_origin['url'], origin_visit['visit'],
- snapshot=new_snapshots[i]['id'])
-
- all_visits = list(reversed(get_origin_visits(new_origin)))
-
- for last_visit, expected_visits in (
- (None, all_visits[:2]),
- (all_visits[1]['visit'], all_visits[2:4])):
-
- url = reverse('api-1-origin-visits',
- url_args={'origin_url': new_origin['url']},
- query_params={'per_page': 2,
- 'last_visit': last_visit})
-
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
-
- for expected_visit in expected_visits:
- origin_visit_url = reverse(
- 'api-1-origin-visit',
- url_args={'origin_url': new_origin['url'],
- 'visit_id': expected_visit['visit']})
- snapshot_url = reverse(
- 'api-1-snapshot',
- url_args={'snapshot_id': expected_visit['snapshot']})
- expected_visit['origin'] = new_origin['url']
- expected_visit['origin_visit_url'] = origin_visit_url
- expected_visit['snapshot_url'] = snapshot_url
-
- self.assertEqual(rv.data, expected_visits)
-
- @given(new_origin(), visit_dates(3), new_snapshots(3))
- def test_api_lookup_origin_visit(self, new_origin, visit_dates,
- new_snapshots):
-
- self.storage.origin_add_one(new_origin)
- for i, visit_date in enumerate(visit_dates):
- origin_visit = self.storage.origin_visit_add(
- new_origin['url'], visit_date, type='git')
- visit_id = origin_visit['visit']
- self.storage.snapshot_add([new_snapshots[i]])
- self.storage.origin_visit_update(
- new_origin['url'], origin_visit['visit'],
- snapshot=new_snapshots[i]['id'])
- url = reverse('api-1-origin-visit',
- url_args={'origin_url': new_origin['url'],
- 'visit_id': visit_id})
-
- rv = self.client.get(url)
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
-
- expected_visit = self.origin_visit_get_by(
- new_origin['url'], visit_id)
-
- origin_url = reverse('api-1-origin',
- url_args={'origin_url': new_origin['url']})
- snapshot_url = reverse(
- 'api-1-snapshot',
- url_args={'snapshot_id': expected_visit['snapshot']})
+ assert rv.status_code == 400, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == {
+ 'exception': 'BadInputExc',
+ 'reason': err_msg
+ }
- expected_visit['origin'] = new_origin['url']
- expected_visit['origin_url'] = origin_url
- expected_visit['snapshot_url'] = snapshot_url
- self.assertEqual(rv.data, expected_visit)
+def test_api_lookup_origin_visits_raise_swh_storage_error_db(api_client,
+ mocker):
+ mock_get_origin_visits = mocker.patch(
+ 'swh.web.api.views.origin.get_origin_visits')
+ err_msg = 'Storage exploded! Will be back online shortly!'
- @given(new_origin())
- def test_api_lookup_origin_visit_latest_no_visit(self, new_origin):
+ mock_get_origin_visits.side_effect = StorageDBError(err_msg)
- self.storage.origin_add_one(new_origin)
+ url = reverse('api-1-origin-visits', url_args={'origin_url': 'http://foo'})
+ rv = api_client.get(url)
- url = reverse('api-1-origin-visit-latest',
- url_args={'origin_url': new_origin['url']})
+ assert rv.status_code == 503, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == {
+ 'exception': 'StorageDBError',
+ 'reason':
+ 'An unexpected error occurred in the backend: %s' % err_msg
+ }
- rv = self.client.get(url)
- self.assertEqual(rv.status_code, 404, rv.data)
- self.assertEqual(rv.data, {
- 'exception': 'NotFoundExc',
- 'reason': 'No visit for origin %s found' % new_origin['url']
- })
- @given(new_origin(), visit_dates(2), new_snapshots(1))
- def test_api_lookup_origin_visit_latest(
- self, new_origin, visit_dates, new_snapshots):
+def test_api_lookup_origin_visits_raise_swh_storage_error_api(api_client,
+ mocker):
+ mock_get_origin_visits = mocker.patch(
+ 'swh.web.api.views.origin.get_origin_visits')
+ err_msg = 'Storage API dropped dead! Will resurrect asap!'
- self.storage.origin_add_one(new_origin)
+ mock_get_origin_visits.side_effect = StorageAPIError(err_msg)
- visit_dates.sort()
- visit_ids = []
- for i, visit_date in enumerate(visit_dates):
- origin_visit = self.storage.origin_visit_add(
- new_origin['url'], visit_date, type='git')
- visit_ids.append(origin_visit['visit'])
+ url = reverse(
+ 'api-1-origin-visits', url_args={'origin_url': 'http://foo'})
+ rv = api_client.get(url)
- self.storage.snapshot_add([new_snapshots[0]])
- self.storage.origin_visit_update(
- new_origin['url'], visit_ids[0],
- snapshot=new_snapshots[0]['id'])
+ assert rv.status_code == 503, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == {
+ 'exception': 'StorageAPIError',
+ 'reason':
+ 'An unexpected error occurred in the api backend: %s' % err_msg
+ }
- url = reverse('api-1-origin-visit-latest',
- url_args={'origin_url': new_origin['url']})
- rv = self.client.get(url)
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
+@given(new_origin(), visit_dates(3), new_snapshots(3))
+def test_api_lookup_origin_visits(api_client, archive_data, new_origin,
+ visit_dates, new_snapshots):
+ archive_data.origin_add_one(new_origin)
+ for i, visit_date in enumerate(visit_dates):
+ origin_visit = archive_data.origin_visit_add(
+ new_origin['url'], visit_date, type='git')
+ archive_data.snapshot_add([new_snapshots[i]])
+ archive_data.origin_visit_update(
+ new_origin['url'], origin_visit['visit'],
+ snapshot=new_snapshots[i]['id'])
- expected_visit = self.origin_visit_get_by(
- new_origin['url'], visit_ids[1])
+ all_visits = list(reversed(get_origin_visits(new_origin)))
- origin_url = reverse('api-1-origin',
- url_args={'origin_url': new_origin['url']})
+ for last_visit, expected_visits in (
+ (None, all_visits[:2]),
+ (all_visits[1]['visit'], all_visits[2:4])):
+
+ url = reverse('api-1-origin-visits',
+ url_args={'origin_url': new_origin['url']},
+ query_params={'per_page': 2,
+ 'last_visit': last_visit})
+
+ rv = api_client.get(url)
+
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+
+ for expected_visit in expected_visits:
+ origin_visit_url = reverse(
+ 'api-1-origin-visit',
+ url_args={'origin_url': new_origin['url'],
+ 'visit_id': expected_visit['visit']})
+ snapshot_url = reverse(
+ 'api-1-snapshot',
+ url_args={'snapshot_id': expected_visit['snapshot']})
+ expected_visit['origin'] = new_origin['url']
+ expected_visit['origin_visit_url'] = origin_visit_url
+ expected_visit['snapshot_url'] = snapshot_url
+
+ assert rv.data == expected_visits
- expected_visit['origin'] = new_origin['url']
- expected_visit['origin_url'] = origin_url
- expected_visit['snapshot_url'] = None
- self.assertEqual(rv.data, expected_visit)
+@given(new_origin(), visit_dates(3), new_snapshots(3))
+def test_api_lookup_origin_visits_by_id(api_client, archive_data, new_origin,
+ visit_dates, new_snapshots):
+ archive_data.origin_add_one(new_origin)
+ for i, visit_date in enumerate(visit_dates):
+ origin_visit = archive_data.origin_visit_add(
+ new_origin['url'], visit_date, type='git')
+ archive_data.snapshot_add([new_snapshots[i]])
+ archive_data.origin_visit_update(
+ new_origin['url'], origin_visit['visit'],
+ snapshot=new_snapshots[i]['id'])
- @given(new_origin(), visit_dates(2), new_snapshots(1))
- def test_api_lookup_origin_visit_latest_with_snapshot(
- self, new_origin, visit_dates, new_snapshots):
- self.storage.origin_add_one(new_origin)
- visit_dates.sort()
- visit_ids = []
- for i, visit_date in enumerate(visit_dates):
- origin_visit = self.storage.origin_visit_add(
- new_origin['url'], visit_date, type='git')
- visit_ids.append(origin_visit['visit'])
+ all_visits = list(reversed(get_origin_visits(new_origin)))
- self.storage.snapshot_add([new_snapshots[0]])
- self.storage.origin_visit_update(
- new_origin['url'], visit_ids[0],
- snapshot=new_snapshots[0]['id'])
+ for last_visit, expected_visits in (
+ (None, all_visits[:2]),
+ (all_visits[1]['visit'], all_visits[2:4])):
- url = reverse('api-1-origin-visit-latest',
- url_args={'origin_url': new_origin['url']})
- url += '?require_snapshot=true'
+ url = reverse('api-1-origin-visits',
+ url_args={'origin_url': new_origin['url']},
+ query_params={'per_page': 2,
+ 'last_visit': last_visit})
- rv = self.client.get(url)
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
+ rv = api_client.get(url)
- expected_visit = self.origin_visit_get_by(
- new_origin['url'], visit_ids[0])
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+
+ for expected_visit in expected_visits:
+ origin_visit_url = reverse(
+ 'api-1-origin-visit',
+ url_args={'origin_url': new_origin['url'],
+ 'visit_id': expected_visit['visit']})
+ snapshot_url = reverse(
+ 'api-1-snapshot',
+ url_args={'snapshot_id': expected_visit['snapshot']})
+ expected_visit['origin'] = new_origin['url']
+ expected_visit['origin_visit_url'] = origin_visit_url
+ expected_visit['snapshot_url'] = snapshot_url
+
+ assert rv.data == expected_visits
+
+
+@given(new_origin(), visit_dates(3), new_snapshots(3))
+def test_api_lookup_origin_visit(api_client, archive_data, new_origin,
+ visit_dates, new_snapshots):
+ archive_data.origin_add_one(new_origin)
+ for i, visit_date in enumerate(visit_dates):
+ origin_visit = archive_data.origin_visit_add(
+ new_origin['url'], visit_date, type='git')
+ visit_id = origin_visit['visit']
+ archive_data.snapshot_add([new_snapshots[i]])
+ archive_data.origin_visit_update(
+ new_origin['url'], origin_visit['visit'],
+ snapshot=new_snapshots[i]['id'])
+ url = reverse('api-1-origin-visit',
+ url_args={'origin_url': new_origin['url'],
+ 'visit_id': visit_id})
+
+ rv = api_client.get(url)
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+
+ expected_visit = archive_data.origin_visit_get_by(
+ new_origin['url'], visit_id)
origin_url = reverse('api-1-origin',
url_args={'origin_url': new_origin['url']})
@@ -318,331 +221,276 @@
expected_visit['origin_url'] = origin_url
expected_visit['snapshot_url'] = snapshot_url
- self.assertEqual(rv.data, expected_visit)
+ assert rv.data == expected_visit
- @given(origin())
- def test_api_lookup_origin_visit_not_found(self, origin):
- all_visits = list(reversed(get_origin_visits(origin)))
+@given(new_origin())
+def test_api_lookup_origin_visit_latest_no_visit(api_client, archive_data,
+ new_origin):
+ archive_data.origin_add_one(new_origin)
- max_visit_id = max([v['visit'] for v in all_visits])
+ url = reverse('api-1-origin-visit-latest',
+ url_args={'origin_url': new_origin['url']})
- url = reverse('api-1-origin-visit',
- url_args={'origin_url': origin['url'],
- 'visit_id': max_visit_id + 1})
-
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 404, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, {
- 'exception': 'NotFoundExc',
- 'reason': 'Origin %s or its visit with id %s not found!' %
- (origin['url'], max_visit_id+1)
- })
-
- @pytest.mark.origin_id
- def test_api_origins(self):
- origins = get_tests_data()['origins']
- origin_urls = {origin['url'] for origin in origins}
-
- # Get only one
- url = reverse('api-1-origins',
- query_params={'origin_count': 1})
- rv = self.client.get(url)
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(len(rv.data), 1)
- self.assertLess({origin['url'] for origin in rv.data}, origin_urls)
-
- # Get all
- url = reverse('api-1-origins',
- query_params={'origin_count': len(origins)})
- rv = self.client.get(url)
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(len(rv.data), len(origins))
- self.assertEqual({origin['url'] for origin in rv.data}, origin_urls)
-
- # Get "all + 10"
- url = reverse('api-1-origins',
- query_params={'origin_count': len(origins)+10})
- rv = self.client.get(url)
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(len(rv.data), len(origins))
- self.assertEqual({origin['url'] for origin in rv.data}, origin_urls)
-
- @pytest.mark.origin_id
- @given(strategies.integers(min_value=1))
- def test_api_origins_scroll(self, origin_count):
- origins = get_tests_data()['origins']
- origin_urls = {origin['url'] for origin in origins}
-
- url = reverse('api-1-origins',
- query_params={'origin_count': origin_count})
-
- results = self._scroll_results(url)
-
- self.assertEqual(len(results), len(origins))
- self.assertEqual({origin['url'] for origin in results}, origin_urls)
-
- @given(origin())
- def test_api_origin_by_url(self, origin):
-
- url = reverse('api-1-origin',
- url_args={'origin_url': origin['url']})
- rv = self.client.get(url)
-
- expected_origin = self.origin_get(origin)
-
- origin_visits_url = reverse('api-1-origin-visits',
- url_args={'origin_url': origin['url']})
-
- expected_origin['origin_visits_url'] = origin_visits_url
-
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, expected_origin)
-
- @given(new_origin())
- def test_api_origin_not_found(self, new_origin):
-
- url = reverse('api-1-origin',
- url_args={'origin_url': new_origin['url']})
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 404, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, {
- 'exception': 'NotFoundExc',
- 'reason': 'Origin with url %s not found!' % new_origin['url']
- })
-
- @pytest.mark.origin_id
- def test_api_origin_search(self):
- expected_origins = {
- 'https://github.com/wcoder/highlightjs-line-numbers.js',
- 'https://github.com/memononen/libtess2',
- }
+ rv = api_client.get(url)
+ assert rv.status_code == 404, rv.data
+ assert rv.data == {
+ 'exception': 'NotFoundExc',
+ 'reason': 'No visit for origin %s found' % new_origin['url']
+ }
- # Search for 'github.com', get only one
- url = reverse('api-1-origin-search',
- url_args={'url_pattern': 'github.com'},
- query_params={'limit': 1})
- rv = self.client.get(url)
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(len(rv.data), 1)
- self.assertLess({origin['url'] for origin in rv.data},
- expected_origins)
-
- # Search for 'github.com', get all
- url = reverse('api-1-origin-search',
- url_args={'url_pattern': 'github.com'},
- query_params={'limit': 2})
- rv = self.client.get(url)
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual({origin['url'] for origin in rv.data},
- expected_origins)
-
- # Search for 'github.com', get more than available
- url = reverse('api-1-origin-search',
- url_args={'url_pattern': 'github.com'},
- query_params={'limit': 10})
- rv = self.client.get(url)
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual({origin['url'] for origin in rv.data},
- expected_origins)
-
- @pytest.mark.origin_id
- def test_api_origin_search_regexp(self):
- expected_origins = {
- 'https://github.com/memononen/libtess2',
- 'repo_with_submodules'
- }
- url = reverse('api-1-origin-search',
- url_args={'url_pattern': '(repo|libtess)'},
- query_params={'limit': 10,
- 'regexp': True})
- rv = self.client.get(url)
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual({origin['url'] for origin in rv.data},
- expected_origins)
-
- @pytest.mark.origin_id
- @given(strategies.integers(min_value=1))
- def test_api_origin_search_scroll(self, limit):
- expected_origins = {
- 'https://github.com/wcoder/highlightjs-line-numbers.js',
- 'https://github.com/memononen/libtess2',
+@given(new_origin(), visit_dates(2), new_snapshots(1))
+def test_api_lookup_origin_visit_latest(api_client, archive_data, new_origin,
+ visit_dates, new_snapshots):
+ archive_data.origin_add_one(new_origin)
+ visit_dates.sort()
+ visit_ids = []
+ for i, visit_date in enumerate(visit_dates):
+ origin_visit = archive_data.origin_visit_add(
+ new_origin['url'], visit_date, type='git')
+ visit_ids.append(origin_visit['visit'])
+
+ archive_data.snapshot_add([new_snapshots[0]])
+ archive_data.origin_visit_update(
+ new_origin['url'], visit_ids[0],
+ snapshot=new_snapshots[0]['id'])
+
+ url = reverse('api-1-origin-visit-latest',
+ url_args={'origin_url': new_origin['url']})
+
+ rv = api_client.get(url)
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+
+ expected_visit = archive_data.origin_visit_get_by(
+ new_origin['url'], visit_ids[1])
+
+ origin_url = reverse('api-1-origin',
+ url_args={'origin_url': new_origin['url']})
+
+ expected_visit['origin'] = new_origin['url']
+ expected_visit['origin_url'] = origin_url
+ expected_visit['snapshot_url'] = None
+
+ assert rv.data == expected_visit
+
+
+@given(new_origin(), visit_dates(2), new_snapshots(1))
+def test_api_lookup_origin_visit_latest_with_snapshot(api_client, archive_data,
+ new_origin, visit_dates,
+ new_snapshots):
+ archive_data.origin_add_one(new_origin)
+ visit_dates.sort()
+ visit_ids = []
+ for i, visit_date in enumerate(visit_dates):
+ origin_visit = archive_data.origin_visit_add(
+ new_origin['url'], visit_date, type='git')
+ visit_ids.append(origin_visit['visit'])
+
+ archive_data.snapshot_add([new_snapshots[0]])
+ archive_data.origin_visit_update(
+ new_origin['url'], visit_ids[0],
+ snapshot=new_snapshots[0]['id'])
+
+ url = reverse('api-1-origin-visit-latest',
+ url_args={'origin_url': new_origin['url']})
+ url += '?require_snapshot=true'
+
+ rv = api_client.get(url)
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+
+ expected_visit = archive_data.origin_visit_get_by(
+ new_origin['url'], visit_ids[0])
+
+ origin_url = reverse('api-1-origin',
+ url_args={'origin_url': new_origin['url']})
+ snapshot_url = reverse(
+ 'api-1-snapshot',
+ url_args={'snapshot_id': expected_visit['snapshot']})
+
+ expected_visit['origin'] = new_origin['url']
+ expected_visit['origin_url'] = origin_url
+ expected_visit['snapshot_url'] = snapshot_url
+
+ assert rv.data == expected_visit
+
+
+@given(origin())
+def test_api_lookup_origin_visit_not_found(api_client, origin):
+
+ all_visits = list(reversed(get_origin_visits(origin)))
+
+ max_visit_id = max([v['visit'] for v in all_visits])
+
+ url = reverse('api-1-origin-visit',
+ url_args={'origin_url': origin['url'],
+ 'visit_id': max_visit_id + 1})
+
+ rv = api_client.get(url)
+
+ assert rv.status_code == 404, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == {
+ 'exception': 'NotFoundExc',
+ 'reason': 'Origin %s or its visit with id %s not found!' %
+ (origin['url'], max_visit_id+1)
+ }
+
+
+def test_api_origin_search_limit(api_client, archive_data):
+ archive_data.origin_add([
+ {'url': 'http://foobar/{}'.format(i)}
+ for i in range(2000)
+ ])
+
+ url = reverse('api-1-origin-search',
+ url_args={'url_pattern': 'foobar'},
+ query_params={'limit': 1050})
+ rv = api_client.get(url)
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert len(rv.data) == 1000
+
+
+@given(origin())
+def test_api_origin_metadata_search(api_client, mocker, origin):
+ mock_idx_storage = mocker.patch('swh.web.common.service.idx_storage')
+ oimsft = mock_idx_storage.origin_intrinsic_metadata_search_fulltext
+ oimsft.side_effect = lambda conjunction, limit: [{
+ 'from_revision': (
+ b'p&\xb7\xc1\xa2\xafVR\x1e\x95\x1c\x01\xed '
+ b'\xf2U\xfa\x05B8'),
+ 'metadata': {'author': 'Jane Doe'},
+ 'id': origin['url'],
+ 'tool': {
+ 'configuration': {
+ 'context': ['NpmMapping', 'CodemetaMapping'],
+ 'type': 'local'
+ },
+ 'id': 3,
+ 'name': 'swh-metadata-detector',
+ 'version': '0.0.1'
+ }
+ }]
+
+ url = reverse('api-1-origin-metadata-search',
+ query_params={'fulltext': 'Jane Doe'})
+ rv = api_client.get(url)
+
+ assert rv.status_code == 200, rv.content
+ assert rv['Content-Type'] == 'application/json'
+ expected_data = [{
+ 'url': origin['url'],
+ 'metadata': {
+ 'metadata': {'author': 'Jane Doe'},
+ 'from_revision': (
+ '7026b7c1a2af56521e951c01ed20f255fa054238'),
+ 'tool': {
+ 'configuration': {
+ 'context': ['NpmMapping', 'CodemetaMapping'],
+ 'type': 'local'
+ },
+ 'id': 3,
+ 'name': 'swh-metadata-detector',
+ 'version': '0.0.1',
+ }
}
+ }]
+
+ assert rv.data == expected_data
+ oimsft.assert_called_with(conjunction=['Jane Doe'], limit=70)
+
+
+@given(origin())
+def test_api_origin_metadata_search_limit(api_client, mocker, origin):
+ mock_idx_storage = mocker.patch('swh.web.common.service.idx_storage')
+ oimsft = mock_idx_storage.origin_intrinsic_metadata_search_fulltext
+
+ oimsft.side_effect = lambda conjunction, limit: [{
+ 'from_revision': (
+ b'p&\xb7\xc1\xa2\xafVR\x1e\x95\x1c\x01\xed '
+ b'\xf2U\xfa\x05B8'),
+ 'metadata': {'author': 'Jane Doe'},
+ 'id': origin['url'],
+ 'tool': {
+ 'configuration': {
+ 'context': ['NpmMapping', 'CodemetaMapping'],
+ 'type': 'local'
+ },
+ 'id': 3,
+ 'name': 'swh-metadata-detector',
+ 'version': '0.0.1'
+ }
+ }]
+
+ url = reverse('api-1-origin-metadata-search',
+ query_params={'fulltext': 'Jane Doe'})
+ rv = api_client.get(url)
+
+ assert rv.status_code == 200, rv.content
+ assert rv['Content-Type'] == 'application/json'
+ assert len(rv.data) == 1
+ oimsft.assert_called_with(conjunction=['Jane Doe'], limit=70)
+
+ url = reverse('api-1-origin-metadata-search',
+ query_params={'fulltext': 'Jane Doe',
+ 'limit': 10})
+ rv = api_client.get(url)
+
+ assert rv.status_code == 200, rv.content
+ assert rv['Content-Type'] == 'application/json'
+ assert len(rv.data) == 1
+ oimsft.assert_called_with(conjunction=['Jane Doe'], limit=10)
+
+ url = reverse('api-1-origin-metadata-search',
+ query_params={'fulltext': 'Jane Doe',
+ 'limit': 987})
+ rv = api_client.get(url)
+
+ assert rv.status_code == 200, rv.content
+ assert rv['Content-Type'] == 'application/json'
+ assert len(rv.data) == 1
+ oimsft.assert_called_with(conjunction=['Jane Doe'], limit=100)
+
+
+@given(origin())
+def test_api_origin_intrinsic_metadata(api_client, mocker, origin):
+ mock_idx_storage = mocker.patch('swh.web.common.service.idx_storage')
+ oimg = mock_idx_storage.origin_intrinsic_metadata_get
+ oimg.side_effect = lambda origin_urls: [{
+ 'from_revision': (
+ b'p&\xb7\xc1\xa2\xafVR\x1e\x95\x1c\x01\xed '
+ b'\xf2U\xfa\x05B8'),
+ 'metadata': {'author': 'Jane Doe'},
+ 'id': origin['url'],
+ 'tool': {
+ 'configuration': {
+ 'context': ['NpmMapping', 'CodemetaMapping'],
+ 'type': 'local'
+ },
+ 'id': 3,
+ 'name': 'swh-metadata-detector',
+ 'version': '0.0.1'
+ }
+ }]
+
+ url = reverse('api-origin-intrinsic-metadata',
+ url_args={'origin_url': origin['url']})
+ rv = api_client.get(url)
+
+ oimg.assert_called_once_with([origin['url']])
+ assert rv.status_code == 200, rv.content
+ assert rv['Content-Type'] == 'application/json'
+ expected_data = {'author': 'Jane Doe'}
+ assert rv.data == expected_data
+
+
+def test_api_origin_metadata_search_invalid(api_client, mocker):
+ mock_idx_storage = mocker.patch('swh.web.common.service.idx_storage')
+ url = reverse('api-1-origin-metadata-search')
+ rv = api_client.get(url)
- url = reverse('api-1-origin-search',
- url_args={'url_pattern': 'github.com'},
- query_params={'limit': limit})
-
- results = self._scroll_results(url)
-
- self.assertEqual({origin['url'] for origin in results},
- expected_origins)
-
- def test_api_origin_search_limit(self):
- self.storage.origin_add([
- {'url': 'http://foobar/{}'.format(i)}
- for i in range(2000)
- ])
-
- url = reverse('api-1-origin-search',
- url_args={'url_pattern': 'foobar'},
- query_params={'limit': 1050})
- rv = self.client.get(url)
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(len(rv.data), 1000)
-
- @given(origin())
- def test_api_origin_metadata_search(self, origin):
- with patch('swh.web.common.service.idx_storage') as mock_idx_storage:
- mock_idx_storage.origin_intrinsic_metadata_search_fulltext \
- .side_effect = lambda conjunction, limit: [{
- 'from_revision': (
- b'p&\xb7\xc1\xa2\xafVR\x1e\x95\x1c\x01\xed '
- b'\xf2U\xfa\x05B8'),
- 'metadata': {'author': 'Jane Doe'},
- 'id': origin['url'],
- 'tool': {
- 'configuration': {
- 'context': ['NpmMapping', 'CodemetaMapping'],
- 'type': 'local'
- },
- 'id': 3,
- 'name': 'swh-metadata-detector',
- 'version': '0.0.1'
- }
- }]
-
- url = reverse('api-1-origin-metadata-search',
- query_params={'fulltext': 'Jane Doe'})
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 200, rv.content)
- self.assertEqual(rv['Content-Type'], 'application/json')
- expected_data = [{
- 'url': origin['url'],
- 'metadata': {
- 'metadata': {'author': 'Jane Doe'},
- 'from_revision': (
- '7026b7c1a2af56521e951c01ed20f255fa054238'),
- 'tool': {
- 'configuration': {
- 'context': ['NpmMapping', 'CodemetaMapping'],
- 'type': 'local'
- },
- 'id': 3,
- 'name': 'swh-metadata-detector',
- 'version': '0.0.1',
- }
- }
- }]
- self.assertEqual(rv.data, expected_data)
- mock_idx_storage.origin_intrinsic_metadata_search_fulltext \
- .assert_called_with(conjunction=['Jane Doe'], limit=70)
-
- @given(origin())
- def test_api_origin_metadata_search_limit(self, origin):
- with patch('swh.web.common.service.idx_storage') as mock_idx_storage:
- mock_idx_storage.origin_intrinsic_metadata_search_fulltext \
- .side_effect = lambda conjunction, limit: [{
- 'from_revision': (
- b'p&\xb7\xc1\xa2\xafVR\x1e\x95\x1c\x01\xed '
- b'\xf2U\xfa\x05B8'),
- 'metadata': {'author': 'Jane Doe'},
- 'id': origin['url'],
- 'tool': {
- 'configuration': {
- 'context': ['NpmMapping', 'CodemetaMapping'],
- 'type': 'local'
- },
- 'id': 3,
- 'name': 'swh-metadata-detector',
- 'version': '0.0.1'
- }
- }]
-
- url = reverse('api-1-origin-metadata-search',
- query_params={'fulltext': 'Jane Doe'})
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 200, rv.content)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(len(rv.data), 1)
- mock_idx_storage.origin_intrinsic_metadata_search_fulltext \
- .assert_called_with(conjunction=['Jane Doe'], limit=70)
-
- url = reverse('api-1-origin-metadata-search',
- query_params={'fulltext': 'Jane Doe',
- 'limit': 10})
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 200, rv.content)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(len(rv.data), 1)
- mock_idx_storage.origin_intrinsic_metadata_search_fulltext \
- .assert_called_with(conjunction=['Jane Doe'], limit=10)
-
- url = reverse('api-1-origin-metadata-search',
- query_params={'fulltext': 'Jane Doe',
- 'limit': 987})
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 200, rv.content)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(len(rv.data), 1)
- mock_idx_storage.origin_intrinsic_metadata_search_fulltext \
- .assert_called_with(conjunction=['Jane Doe'], limit=100)
-
- @given(origin())
- def test_api_origin_intrinsic_metadata(self, origin):
- with patch('swh.web.common.service.idx_storage') as mock_idx_storage:
- mock_idx_storage.origin_intrinsic_metadata_get \
- .side_effect = lambda origin_urls: [{
- 'from_revision': (
- b'p&\xb7\xc1\xa2\xafVR\x1e\x95\x1c\x01\xed '
- b'\xf2U\xfa\x05B8'),
- 'metadata': {'author': 'Jane Doe'},
- 'id': origin['url'],
- 'tool': {
- 'configuration': {
- 'context': ['NpmMapping', 'CodemetaMapping'],
- 'type': 'local'
- },
- 'id': 3,
- 'name': 'swh-metadata-detector',
- 'version': '0.0.1'
- }
- }]
-
- url = reverse('api-origin-intrinsic-metadata',
- url_args={'origin_url': origin['url']})
- rv = self.client.get(url)
-
- mock_idx_storage.origin_intrinsic_metadata_get \
- .assert_called_once_with([origin['url']])
- self.assertEqual(rv.status_code, 200, rv.content)
- self.assertEqual(rv['Content-Type'], 'application/json')
- expected_data = {'author': 'Jane Doe'}
- self.assertEqual(rv.data, expected_data)
-
- @patch('swh.web.common.service.idx_storage')
- def test_api_origin_metadata_search_invalid(self, mock_idx_storage):
-
- url = reverse('api-1-origin-metadata-search')
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 400, rv.content)
- mock_idx_storage.assert_not_called()
+ assert rv.status_code == 400, rv.content
+ mock_idx_storage.assert_not_called()
diff --git a/swh/web/tests/api/views/test_origin_save.py b/swh/web/tests/api/views/test_origin_save.py
--- a/swh/web/tests/api/views/test_origin_save.py
+++ b/swh/web/tests/api/views/test_origin_save.py
@@ -3,12 +3,11 @@
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
+import pytest
+
from datetime import datetime, timedelta
from django.utils import timezone
-from rest_framework.test import APITestCase
-from unittest.mock import patch
-
from swh.web.common.utils import reverse
from swh.web.common.models import (
SaveUnauthorizedOrigin, SaveOriginRequest,
@@ -19,243 +18,234 @@
SAVE_TASK_NOT_CREATED, SAVE_TASK_NOT_YET_SCHEDULED,
SAVE_TASK_SCHEDULED, SAVE_TASK_FAILED, SAVE_TASK_SUCCEED
)
-from swh.web.tests.testcase import WebTestCase
-
-
-class SaveApiTestCase(WebTestCase, APITestCase):
-
- @classmethod
- def setUpTestData(cls): # noqa: N802
- SaveUnauthorizedOrigin.objects.create(
- url='https://github.com/user/illegal_repo')
- SaveUnauthorizedOrigin.objects.create(
- url='https://gitlab.com/user_to_exclude')
-
- def test_invalid_visit_type(self):
- url = reverse('api-1-save-origin',
- url_args={'visit_type': 'foo',
- 'origin_url': 'https://github.com/torvalds/linux'}) # noqa
-
- response = self.client.post(url)
- self.assertEqual(response.status_code, 400)
-
- def test_invalid_origin_url(self):
- url = reverse('api-1-save-origin',
- url_args={'visit_type': 'git',
- 'origin_url': 'bar'})
-
- response = self.client.post(url)
- self.assertEqual(response.status_code, 400)
-
- def check_created_save_request_status(self, mock_scheduler, origin_url,
- scheduler_task_status,
- expected_request_status,
- expected_task_status=None,
- visit_date=None):
-
- if not scheduler_task_status:
- mock_scheduler.get_tasks.return_value = []
- else:
- mock_scheduler.get_tasks.return_value = \
- [{
- 'priority': 'high',
- 'policy': 'oneshot',
- 'type': 'load-git',
- 'arguments': {
- 'kwargs': {
- 'repo_url': origin_url
- },
- 'args': []
- },
- 'status': scheduler_task_status,
- 'id': 1,
- }]
-
- mock_scheduler.create_tasks.return_value = \
- [{
- 'priority': 'high',
- 'policy': 'oneshot',
- 'type': 'load-git',
- 'arguments': {
- 'kwargs': {
- 'repo_url': origin_url
- },
- 'args': []
- },
- 'status': 'next_run_not_scheduled',
- 'id': 1,
- }]
-
- url = reverse('api-1-save-origin',
- url_args={'visit_type': 'git',
- 'origin_url': origin_url})
-
- with patch('swh.web.common.origin_save._get_visit_info_for_save_request') as mock_visit_date: # noqa
- mock_visit_date.return_value = (visit_date, None)
- response = self.client.post(url)
-
- if expected_request_status != SAVE_REQUEST_REJECTED:
- self.assertEqual(response.status_code, 200)
- self.assertEqual(response.data['save_request_status'],
- expected_request_status)
- self.assertEqual(response.data['save_task_status'],
- expected_task_status)
-
- else:
- self.assertEqual(response.status_code, 403)
-
- def check_save_request_status(self, mock_scheduler, origin_url,
- expected_request_status,
- expected_task_status,
- scheduler_task_status='next_run_not_scheduled', # noqa
- visit_date=None):
-
- mock_scheduler.get_tasks.return_value = \
- [{
- 'priority': 'high',
- 'policy': 'oneshot',
- 'type': 'load-git',
- 'arguments': {
- 'kwargs': {
- 'repo_url': origin_url
- },
- 'args': []
+
+pytestmark = pytest.mark.django_db
+
+
+@pytest.fixture(autouse=True)
+def populated_db():
+ SaveUnauthorizedOrigin.objects.create(
+ url='https://github.com/user/illegal_repo')
+ SaveUnauthorizedOrigin.objects.create(
+ url='https://gitlab.com/user_to_exclude')
+
+
+def test_invalid_visit_type(api_client):
+ url = reverse('api-1-save-origin',
+ url_args={'visit_type': 'foo',
+ 'origin_url': 'https://github.com/torvalds/linux'})
+
+ response = api_client.post(url)
+ assert response.status_code == 400
+
+
+def test_invalid_origin_url(api_client):
+ url = reverse('api-1-save-origin',
+ url_args={'visit_type': 'git',
+ 'origin_url': 'bar'})
+
+ response = api_client.post(url)
+ assert response.status_code == 400
+
+
+def check_created_save_request_status(api_client, mocker, origin_url,
+ scheduler_task_status,
+ expected_request_status,
+ expected_task_status=None,
+ visit_date=None):
+
+ mock_scheduler = mocker.patch('swh.web.common.origin_save.scheduler')
+ if not scheduler_task_status:
+ mock_scheduler.get_tasks.return_value = []
+ else:
+ mock_scheduler.get_tasks.return_value = [{
+ 'priority': 'high',
+ 'policy': 'oneshot',
+ 'type': 'load-git',
+ 'arguments': {
+ 'kwargs': {
+ 'repo_url': origin_url
},
- 'status': scheduler_task_status,
- 'id': 1,
- }]
-
- url = reverse('api-1-save-origin',
- url_args={'visit_type': 'git',
- 'origin_url': origin_url})
-
- with patch('swh.web.common.origin_save._get_visit_info_for_save_request') as mock_visit_date: # noqa
- mock_visit_date.return_value = (visit_date, None)
- response = self.client.get(url)
- self.assertEqual(response.status_code, 200)
- save_request_data = response.data[0]
-
- self.assertEqual(save_request_data['save_request_status'],
- expected_request_status)
- self.assertEqual(save_request_data['save_task_status'],
- expected_task_status)
-
- # Check that save task status is still available when
- # the scheduler task has been archived
- mock_scheduler.get_tasks.return_value = []
- response = self.client.get(url)
- self.assertEqual(response.status_code, 200)
- save_request_data = response.data[0]
- self.assertEqual(save_request_data['save_task_status'],
- expected_task_status)
-
- @patch('swh.web.common.origin_save.scheduler')
- def test_save_request_rejected(self, mock_scheduler):
- origin_url = 'https://github.com/user/illegal_repo'
- self.check_created_save_request_status(mock_scheduler, origin_url,
- None, SAVE_REQUEST_REJECTED)
- self.check_save_request_status(mock_scheduler, origin_url,
- SAVE_REQUEST_REJECTED,
- SAVE_TASK_NOT_CREATED)
-
- @patch('swh.web.common.origin_save.scheduler')
- def test_save_request_pending(self, mock_scheduler):
- origin_url = 'https://unkwownforge.com/user/repo'
- self.check_created_save_request_status(mock_scheduler, origin_url,
- None, SAVE_REQUEST_PENDING,
- SAVE_TASK_NOT_CREATED)
- self.check_save_request_status(mock_scheduler, origin_url,
- SAVE_REQUEST_PENDING,
- SAVE_TASK_NOT_CREATED)
-
- @patch('swh.web.common.origin_save.scheduler')
- def test_save_request_succeed(self, mock_scheduler):
- origin_url = 'https://github.com/Kitware/CMake'
- self.check_created_save_request_status(mock_scheduler, origin_url,
- None, SAVE_REQUEST_ACCEPTED,
- SAVE_TASK_NOT_YET_SCHEDULED)
- self.check_save_request_status(mock_scheduler, origin_url,
- SAVE_REQUEST_ACCEPTED,
- SAVE_TASK_SCHEDULED,
- scheduler_task_status='next_run_scheduled') # noqa
- self.check_save_request_status(mock_scheduler, origin_url,
- SAVE_REQUEST_ACCEPTED,
- SAVE_TASK_SUCCEED,
- scheduler_task_status='completed',
- visit_date=None) # noqa
- visit_date = datetime.now(tz=timezone.utc) + timedelta(hours=1)
- self.check_save_request_status(mock_scheduler, origin_url,
- SAVE_REQUEST_ACCEPTED,
- SAVE_TASK_SUCCEED,
- scheduler_task_status='completed',
- visit_date=visit_date) # noqa
-
- @patch('swh.web.common.origin_save.scheduler')
- def test_save_request_failed(self, mock_scheduler):
- origin_url = 'https://gitlab.com/inkscape/inkscape'
- self.check_created_save_request_status(mock_scheduler, origin_url,
- None, SAVE_REQUEST_ACCEPTED,
- SAVE_TASK_NOT_YET_SCHEDULED)
- self.check_save_request_status(mock_scheduler, origin_url,
- SAVE_REQUEST_ACCEPTED,
- SAVE_TASK_SCHEDULED,
- scheduler_task_status='next_run_scheduled') # noqa
- self.check_save_request_status(mock_scheduler, origin_url,
- SAVE_REQUEST_ACCEPTED,
- SAVE_TASK_FAILED,
- scheduler_task_status='disabled') # noqa
-
- @patch('swh.web.common.origin_save.scheduler')
- def test_create_save_request_only_when_needed(self, mock_scheduler):
- origin_url = 'https://github.com/webpack/webpack'
- SaveOriginRequest.objects.create(visit_type='git',
- origin_url=origin_url,
- status=SAVE_REQUEST_ACCEPTED, # noqa
- loading_task_id=56)
-
- self.check_created_save_request_status(mock_scheduler, origin_url,
- 'next_run_not_scheduled',
- SAVE_REQUEST_ACCEPTED,
- SAVE_TASK_NOT_YET_SCHEDULED)
- sors = list(SaveOriginRequest.objects.filter(visit_type='git',
- origin_url=origin_url))
- self.assertEqual(len(sors), 1)
-
- self.check_created_save_request_status(mock_scheduler, origin_url,
- 'next_run_scheduled',
- SAVE_REQUEST_ACCEPTED,
- SAVE_TASK_SCHEDULED)
- sors = list(SaveOriginRequest.objects.filter(visit_type='git',
- origin_url=origin_url))
- self.assertEqual(len(sors), 1)
-
- visit_date = datetime.now(tz=timezone.utc) + timedelta(hours=1)
- self.check_created_save_request_status(mock_scheduler, origin_url,
- 'completed',
- SAVE_REQUEST_ACCEPTED,
- SAVE_TASK_NOT_YET_SCHEDULED,
- visit_date=visit_date)
- sors = list(SaveOriginRequest.objects.filter(visit_type='git',
- origin_url=origin_url))
- self.assertEqual(len(sors), 2)
-
- self.check_created_save_request_status(mock_scheduler, origin_url,
- 'disabled',
- SAVE_REQUEST_ACCEPTED,
- SAVE_TASK_NOT_YET_SCHEDULED)
- sors = list(SaveOriginRequest.objects.filter(visit_type='git',
- origin_url=origin_url))
- self.assertEqual(len(sors), 3)
-
- def test_get_save_requests_unknown_origin(self):
- unknown_origin_url = 'https://gitlab.com/foo/bar'
- url = reverse('api-1-save-origin',
- url_args={'visit_type': 'git',
- 'origin_url': unknown_origin_url})
- response = self.client.get(url)
- self.assertEqual(response.status_code, 404)
- self.assertEqual(response.data, {
- 'exception': 'NotFoundExc',
- 'reason': ('No save requests found for visit of type '
- 'git on origin with url %s.') % unknown_origin_url
- })
+ 'args': []
+ },
+ 'status': scheduler_task_status,
+ 'id': 1,
+ }]
+
+ mock_scheduler.create_tasks.return_value = [{
+ 'priority': 'high',
+ 'policy': 'oneshot',
+ 'type': 'load-git',
+ 'arguments': {
+ 'kwargs': {
+ 'repo_url': origin_url
+ },
+ 'args': []
+ },
+ 'status': 'next_run_not_scheduled',
+ 'id': 1,
+ }]
+
+ url = reverse('api-1-save-origin',
+ url_args={'visit_type': 'git',
+ 'origin_url': origin_url})
+
+ mock_visit_date = mocker.patch(('swh.web.common.origin_save.'
+ '_get_visit_info_for_save_request'))
+ mock_visit_date.return_value = (visit_date, None)
+ response = api_client.post(url)
+
+ if expected_request_status != SAVE_REQUEST_REJECTED:
+ assert response.status_code == 200, response.data
+ assert (response.data['save_request_status'] ==
+ expected_request_status)
+ assert response.data['save_task_status'] == expected_task_status
+ else:
+ assert response.status_code == 403, response.data
+
+
+def check_save_request_status(api_client, mocker, origin_url,
+ expected_request_status,
+ expected_task_status,
+ scheduler_task_status='next_run_not_scheduled',
+ visit_date=None):
+ mock_scheduler = mocker.patch('swh.web.common.origin_save.scheduler')
+ mock_scheduler.get_tasks.return_value = [{
+ 'priority': 'high',
+ 'policy': 'oneshot',
+ 'type': 'load-git',
+ 'arguments': {
+ 'kwargs': {
+ 'repo_url': origin_url
+ },
+ 'args': []
+ },
+ 'status': scheduler_task_status,
+ 'id': 1,
+ }]
+
+ url = reverse('api-1-save-origin',
+ url_args={'visit_type': 'git',
+ 'origin_url': origin_url})
+
+ mock_visit_date = mocker.patch(('swh.web.common.origin_save.'
+ '_get_visit_info_for_save_request'))
+ mock_visit_date.return_value = (visit_date, None)
+ response = api_client.get(url)
+ assert response.status_code == 200, response.data
+ save_request_data = response.data[0]
+
+ assert (save_request_data['save_request_status'] ==
+ expected_request_status)
+ assert save_request_data['save_task_status'] == expected_task_status
+
+ # Check that save task status is still available when
+ # the scheduler task has been archived
+ mock_scheduler.get_tasks.return_value = []
+ response = api_client.get(url)
+ assert response.status_code == 200
+ save_request_data = response.data[0]
+ assert save_request_data['save_task_status'] == expected_task_status
+
+
+def test_save_request_rejected(api_client, mocker):
+ origin_url = 'https://github.com/user/illegal_repo'
+ check_created_save_request_status(api_client, mocker, origin_url,
+ None, SAVE_REQUEST_REJECTED)
+ check_save_request_status(api_client, mocker, origin_url,
+ SAVE_REQUEST_REJECTED, SAVE_TASK_NOT_CREATED)
+
+
+def test_save_request_pending(api_client, mocker):
+ origin_url = 'https://unkwownforge.com/user/repo'
+ check_created_save_request_status(api_client, mocker,
+ origin_url, None, SAVE_REQUEST_PENDING,
+ SAVE_TASK_NOT_CREATED)
+ check_save_request_status(api_client, mocker, origin_url,
+ SAVE_REQUEST_PENDING, SAVE_TASK_NOT_CREATED)
+
+
+def test_save_request_succeed(api_client, mocker):
+ origin_url = 'https://github.com/Kitware/CMake'
+ check_created_save_request_status(api_client, mocker, origin_url,
+ None, SAVE_REQUEST_ACCEPTED,
+ SAVE_TASK_NOT_YET_SCHEDULED)
+ check_save_request_status(api_client, mocker, origin_url,
+ SAVE_REQUEST_ACCEPTED, SAVE_TASK_SCHEDULED,
+ scheduler_task_status='next_run_scheduled')
+ check_save_request_status(api_client, mocker, origin_url,
+ SAVE_REQUEST_ACCEPTED, SAVE_TASK_SUCCEED,
+ scheduler_task_status='completed',
+ visit_date=None)
+ visit_date = datetime.now(tz=timezone.utc) + timedelta(hours=1)
+ check_save_request_status(api_client, mocker, origin_url,
+ SAVE_REQUEST_ACCEPTED, SAVE_TASK_SUCCEED,
+ scheduler_task_status='completed',
+ visit_date=visit_date)
+
+
+def test_save_request_failed(api_client, mocker):
+ origin_url = 'https://gitlab.com/inkscape/inkscape'
+ check_created_save_request_status(api_client, mocker, origin_url,
+ None, SAVE_REQUEST_ACCEPTED,
+ SAVE_TASK_NOT_YET_SCHEDULED)
+ check_save_request_status(api_client, mocker, origin_url,
+ SAVE_REQUEST_ACCEPTED, SAVE_TASK_SCHEDULED,
+ scheduler_task_status='next_run_scheduled')
+ check_save_request_status(api_client, mocker, origin_url,
+ SAVE_REQUEST_ACCEPTED, SAVE_TASK_FAILED,
+ scheduler_task_status='disabled')
+
+
+def test_create_save_request_only_when_needed(api_client, mocker):
+ origin_url = 'https://github.com/webpack/webpack'
+ SaveOriginRequest.objects.create(visit_type='git', origin_url=origin_url,
+ status=SAVE_REQUEST_ACCEPTED,
+ loading_task_id=56)
+
+ check_created_save_request_status(api_client, mocker, origin_url,
+ 'next_run_not_scheduled',
+ SAVE_REQUEST_ACCEPTED,
+ SAVE_TASK_NOT_YET_SCHEDULED)
+
+ sors = list(SaveOriginRequest.objects.filter(visit_type='git',
+ origin_url=origin_url))
+ assert len(sors) == 1
+
+ check_created_save_request_status(api_client, mocker, origin_url,
+ 'next_run_scheduled',
+ SAVE_REQUEST_ACCEPTED,
+ SAVE_TASK_SCHEDULED)
+ sors = list(SaveOriginRequest.objects.filter(visit_type='git',
+ origin_url=origin_url))
+ assert len(sors) == 1
+
+ visit_date = datetime.now(tz=timezone.utc) + timedelta(hours=1)
+ check_created_save_request_status(api_client, mocker, origin_url,
+ 'completed', SAVE_REQUEST_ACCEPTED,
+ SAVE_TASK_NOT_YET_SCHEDULED,
+ visit_date=visit_date)
+ sors = list(SaveOriginRequest.objects.filter(visit_type='git',
+ origin_url=origin_url))
+ assert len(sors) == 2
+
+ check_created_save_request_status(api_client, mocker, origin_url,
+ 'disabled', SAVE_REQUEST_ACCEPTED,
+ SAVE_TASK_NOT_YET_SCHEDULED)
+ sors = list(SaveOriginRequest.objects.filter(visit_type='git',
+ origin_url=origin_url))
+ assert len(sors) == 3
+
+
+def test_get_save_requests_unknown_origin(api_client):
+ unknown_origin_url = 'https://gitlab.com/foo/bar'
+ url = reverse('api-1-save-origin',
+ url_args={'visit_type': 'git',
+ 'origin_url': unknown_origin_url})
+ response = api_client.get(url)
+ assert response.status_code == 404
+ assert response.data == {
+ 'exception': 'NotFoundExc',
+ 'reason': ('No save requests found for visit of type '
+ 'git on origin with url %s.') % unknown_origin_url
+ }
diff --git a/swh/web/tests/api/views/test_release.py b/swh/web/tests/api/views/test_release.py
--- a/swh/web/tests/api/views/test_release.py
+++ b/swh/web/tests/api/views/test_release.py
@@ -5,7 +5,6 @@
from datetime import datetime
from hypothesis import given
-from rest_framework.test import APITestCase
from swh.model.hashutil import hash_to_bytes
from swh.web.common.utils import reverse
@@ -13,104 +12,103 @@
from swh.web.tests.strategies import (
release, sha1, content, directory
)
-from swh.web.tests.testcase import WebTestCase
-class ReleaseApiTestCase(WebTestCase, APITestCase):
+@given(release())
+def test_api_release(api_client, archive_data, release):
+ url = reverse('api-1-release', url_args={'sha1_git': release})
+
+ rv = api_client.get(url)
+
+ expected_release = archive_data.release_get(release)
+ target_revision = expected_release['target']
+ target_url = reverse('api-1-revision',
+ url_args={'sha1_git': target_revision})
+ expected_release['target_url'] = target_url
+
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == expected_release
+
+
+@given(sha1(), sha1(), sha1(), content(), directory(), release())
+def test_api_release_target_type_not_a_revision(api_client, archive_data,
+ new_rel1, new_rel2,
+ new_rel3, content,
+ directory, release):
+ for new_rel_id, target_type, target in (
+ (new_rel1, 'content', content),
+ (new_rel2, 'directory', directory),
+ (new_rel3, 'release', release)):
+
+ if target_type == 'content':
+ target = target['sha1_git']
+
+ sample_release = {
+ 'author': {
+ 'email': b'author@company.org',
+ 'fullname': b'author <author@company.org>',
+ 'name': b'author'
+ },
+ 'date': {
+ 'timestamp': int(datetime.now().timestamp()),
+ 'offset': 0,
+ 'negative_utc': False,
+ },
+ 'id': hash_to_bytes(new_rel_id),
+ 'message': b'sample release message',
+ 'name': b'sample release',
+ 'synthetic': False,
+ 'target': hash_to_bytes(target),
+ 'target_type': target_type
+ }
+
+ archive_data.release_add([sample_release])
+
+ url = reverse('api-1-release', url_args={'sha1_git': new_rel_id})
+
+ rv = api_client.get(url)
+
+ expected_release = archive_data.release_get(new_rel_id)
+
+ if target_type == 'content':
+ url_args = {'q': 'sha1_git:%s' % target}
+ else:
+ url_args = {'sha1_git': target}
+
+ target_url = reverse('api-1-%s' % target_type,
+ url_args=url_args)
+ expected_release['target_url'] = target_url
- @given(release())
- def test_api_release(self, release):
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == expected_release
- url = reverse('api-1-release', url_args={'sha1_git': release})
- rv = self.client.get(url)
+def test_api_release_not_found(api_client):
+ unknown_release_ = random_sha1()
- expected_release = self.release_get(release)
- target_revision = expected_release['target']
- target_url = reverse('api-1-revision',
- url_args={'sha1_git': target_revision})
- expected_release['target_url'] = target_url
+ url = reverse('api-1-release', url_args={'sha1_git': unknown_release_})
+
+ rv = api_client.get(url)
+
+ assert rv.status_code == 404, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == {
+ 'exception': 'NotFoundExc',
+ 'reason': 'Release with sha1_git %s not found.' % unknown_release_
+ }
+
+
+@given(release())
+def test_api_release_uppercase(api_client, release):
+ url = reverse('api-1-release-uppercase-checksum',
+ url_args={'sha1_git': release.upper()})
+
+ resp = api_client.get(url)
+ assert resp.status_code == 302
+
+ redirect_url = reverse('api-1-release-uppercase-checksum',
+ url_args={'sha1_git': release})
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, expected_release)
-
- @given(sha1(), sha1(), sha1(), content(), directory(), release())
- def test_api_release_target_type_not_a_revision(self, new_rel1, new_rel2,
- new_rel3, content,
- directory, release):
-
- for new_rel_id, target_type, target in (
- (new_rel1, 'content', content),
- (new_rel2, 'directory', directory),
- (new_rel3, 'release', release)):
-
- if target_type == 'content':
- target = target['sha1_git']
-
- sample_release = {
- 'author': {
- 'email': b'author@company.org',
- 'fullname': b'author <author@company.org>',
- 'name': b'author'
- },
- 'date': {
- 'timestamp': int(datetime.now().timestamp()),
- 'offset': 0,
- 'negative_utc': False,
- },
- 'id': hash_to_bytes(new_rel_id),
- 'message': b'sample release message',
- 'name': b'sample release',
- 'synthetic': False,
- 'target': hash_to_bytes(target),
- 'target_type': target_type
- }
-
- self.storage.release_add([sample_release])
-
- url = reverse('api-1-release', url_args={'sha1_git': new_rel_id})
-
- rv = self.client.get(url)
-
- expected_release = self.release_get(new_rel_id)
-
- if target_type == 'content':
- url_args = {'q': 'sha1_git:%s' % target}
- else:
- url_args = {'sha1_git': target}
-
- target_url = reverse('api-1-%s' % target_type,
- url_args=url_args)
- expected_release['target_url'] = target_url
-
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, expected_release)
-
- def test_api_release_not_found(self):
- unknown_release_ = random_sha1()
-
- url = reverse('api-1-release', url_args={'sha1_git': unknown_release_})
-
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 404, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, {
- 'exception': 'NotFoundExc',
- 'reason': 'Release with sha1_git %s not found.' % unknown_release_
- })
-
- @given(release())
- def test_api_release_uppercase(self, release):
- url = reverse('api-1-release-uppercase-checksum',
- url_args={'sha1_git': release.upper()})
-
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 302)
-
- redirect_url = reverse('api-1-release-uppercase-checksum',
- url_args={'sha1_git': release})
-
- self.assertEqual(resp['location'], redirect_url)
+ assert resp['location'] == redirect_url
diff --git a/swh/web/tests/api/views/test_revision.py b/swh/web/tests/api/views/test_revision.py
--- a/swh/web/tests/api/views/test_revision.py
+++ b/swh/web/tests/api/views/test_revision.py
@@ -4,266 +4,268 @@
# See top-level LICENSE file for more information
from hypothesis import given
-from rest_framework.test import APITestCase
-from unittest.mock import patch
from swh.web.common.exc import NotFoundExc
from swh.web.common.utils import reverse
from swh.web.tests.data import random_sha1
from swh.web.tests.strategies import revision
-from swh.web.tests.testcase import WebTestCase
-class RevisionApiTestCase(WebTestCase, APITestCase):
+@given(revision())
+def test_api_revision(api_client, archive_data, revision):
+ url = reverse('api-1-revision', url_args={'sha1_git': revision})
+ rv = api_client.get(url)
- @given(revision())
- def test_api_revision(self, revision):
+ expected_revision = archive_data.revision_get(revision)
- url = reverse('api-1-revision', url_args={'sha1_git': revision})
- rv = self.client.get(url)
+ _enrich_revision(expected_revision)
- expected_revision = self.revision_get(revision)
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == expected_revision
- self._enrich_revision(expected_revision)
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, expected_revision)
+def test_api_revision_not_found(api_client):
+ unknown_revision_ = random_sha1()
- def test_api_revision_not_found(self):
- unknown_revision_ = random_sha1()
+ url = reverse('api-1-revision',
+ url_args={'sha1_git': unknown_revision_})
+ rv = api_client.get(url)
- url = reverse('api-1-revision',
- url_args={'sha1_git': unknown_revision_})
- rv = self.client.get(url)
+ assert rv.status_code == 404, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == {
+ 'exception': 'NotFoundExc',
+ 'reason': 'Revision with sha1_git %s not found.' % unknown_revision_
+ }
- self.assertEqual(rv.status_code, 404, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, {
- 'exception': 'NotFoundExc',
- 'reason': 'Revision with sha1_git %s not found.' %
- unknown_revision_})
- @given(revision())
- def test_api_revision_raw_ok(self, revision):
+@given(revision())
+def test_api_revision_raw_ok(api_client, archive_data, revision):
+ url = reverse('api-1-revision-raw-message',
+ url_args={'sha1_git': revision})
+ rv = api_client.get(url)
- url = reverse('api-1-revision-raw-message',
- url_args={'sha1_git': revision})
- rv = self.client.get(url)
+ expected_message = archive_data.revision_get(revision)['message']
- expected_message = self.revision_get(revision)['message']
+ assert rv.status_code == 200
+ assert rv['Content-Type'] == 'application/octet-stream'
+ assert rv.content == expected_message.encode()
- self.assertEqual(rv.status_code, 200)
- self.assertEqual(rv['Content-Type'], 'application/octet-stream')
- self.assertEqual(rv.content, expected_message.encode())
- def test_api_revision_raw_ko_no_rev(self):
- unknown_revision_ = random_sha1()
+def test_api_revision_raw_ko_no_rev(api_client):
+ unknown_revision_ = random_sha1()
- url = reverse('api-1-revision-raw-message',
- url_args={'sha1_git': unknown_revision_})
- rv = self.client.get(url)
+ url = reverse('api-1-revision-raw-message',
+ url_args={'sha1_git': unknown_revision_})
+ rv = api_client.get(url)
- self.assertEqual(rv.status_code, 404, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, {
- 'exception': 'NotFoundExc',
- 'reason': 'Revision with sha1_git %s not found.' %
- unknown_revision_})
+ assert rv.status_code == 404, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == {
+ 'exception': 'NotFoundExc',
+ 'reason': 'Revision with sha1_git %s not found.' % unknown_revision_
+ }
- @given(revision())
- def test_api_revision_log(self, revision):
- per_page = 10
+@given(revision())
+def test_api_revision_log(api_client, archive_data, revision):
+ per_page = 10
- url = reverse('api-1-revision-log', url_args={'sha1_git': revision},
- query_params={'per_page': per_page})
+ url = reverse('api-1-revision-log', url_args={'sha1_git': revision},
+ query_params={'per_page': per_page})
- rv = self.client.get(url)
+ rv = api_client.get(url)
- expected_log = self.revision_log(revision, limit=per_page+1)
- expected_log = list(map(self._enrich_revision, expected_log))
-
- has_next = len(expected_log) > per_page
-
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data,
- expected_log[:-1] if has_next else expected_log)
-
- if has_next:
- self.assertIn('Link', rv)
- next_log_url = reverse(
- 'api-1-revision-log',
- url_args={'sha1_git': expected_log[-1]['id']},
- query_params={'per_page': per_page})
- self.assertIn(next_log_url, rv['Link'])
-
- def test_api_revision_log_not_found(self):
- unknown_revision_ = random_sha1()
-
- url = reverse('api-1-revision-log',
- url_args={'sha1_git': unknown_revision_})
-
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 404, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, {
- 'exception': 'NotFoundExc',
- 'reason': 'Revision with sha1_git %s not found.' %
- unknown_revision_})
- self.assertFalse(rv.has_header('Link'))
-
- @given(revision())
- def test_api_revision_log_context(self, revision):
-
- revisions = self.revision_log(revision, limit=4)
-
- prev_rev = revisions[0]['id']
- rev = revisions[-1]['id']
-
- per_page = 10
-
- url = reverse('api-1-revision-log',
- url_args={'sha1_git': rev,
- 'prev_sha1s': prev_rev},
- query_params={'per_page': per_page})
-
- rv = self.client.get(url)
-
- expected_log = self.revision_log(rev, limit=per_page)
- prev_revision = self.revision_get(prev_rev)
- expected_log.insert(0, prev_revision)
- expected_log = list(map(self._enrich_revision, expected_log))
-
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, expected_log)
-
- @patch('swh.web.api.views.revision._revision_directory_by')
- def test_api_revision_directory_ko_not_found(self, mock_rev_dir):
- # given
- mock_rev_dir.side_effect = NotFoundExc('Not found')
-
- # then
- rv = self.client.get('/api/1/revision/999/directory/some/path/to/dir/')
-
- self.assertEqual(rv.status_code, 404, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, {
- 'exception': 'NotFoundExc',
- 'reason': 'Not found'})
-
- mock_rev_dir.assert_called_once_with(
- {'sha1_git': '999'},
- 'some/path/to/dir',
- '/api/1/revision/999/directory/some/path/to/dir/',
- with_data=False)
-
- @patch('swh.web.api.views.revision._revision_directory_by')
- def test_api_revision_directory_ok_returns_dir_entries(self, mock_rev_dir):
- stub_dir = {
- 'type': 'dir',
- 'revision': '999',
- 'content': [
- {
- 'sha1_git': '789',
- 'type': 'file',
- 'target': '101',
- 'target_url': '/api/1/content/sha1_git:101/',
- 'name': 'somefile',
- 'file_url': '/api/1/revision/999/directory/some/path/'
- 'somefile/'
- },
- {
- 'sha1_git': '123',
- 'type': 'dir',
- 'target': '456',
- 'target_url': '/api/1/directory/456/',
- 'name': 'to-subdir',
- 'dir_url': '/api/1/revision/999/directory/some/path/'
- 'to-subdir/',
- }]
- }
+ expected_log = archive_data.revision_log(revision, limit=per_page+1)
+ expected_log = list(map(_enrich_revision, expected_log))
+
+ has_next = len(expected_log) > per_page
+
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == (expected_log[:-1] if has_next else expected_log)
+
+ if has_next:
+ assert 'Link' in rv
+ next_log_url = reverse(
+ 'api-1-revision-log',
+ url_args={'sha1_git': expected_log[-1]['id']},
+ query_params={'per_page': per_page})
+ assert next_log_url in rv['Link']
+
+
+def test_api_revision_log_not_found(api_client):
+ unknown_revision_ = random_sha1()
+
+ url = reverse('api-1-revision-log',
+ url_args={'sha1_git': unknown_revision_})
+
+ rv = api_client.get(url)
+
+ assert rv.status_code == 404, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == {
+ 'exception': 'NotFoundExc',
+ 'reason': 'Revision with sha1_git %s not found.' % unknown_revision_
+ }
+ assert not rv.has_header('Link')
+
+
+@given(revision())
+def test_api_revision_log_context(api_client, archive_data, revision):
+ revisions = archive_data.revision_log(revision, limit=4)
- # given
- mock_rev_dir.return_value = stub_dir
+ prev_rev = revisions[0]['id']
+ rev = revisions[-1]['id']
- # then
- rv = self.client.get('/api/1/revision/999/directory/some/path/')
+ per_page = 10
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, stub_dir)
+ url = reverse('api-1-revision-log',
+ url_args={'sha1_git': rev,
+ 'prev_sha1s': prev_rev},
+ query_params={'per_page': per_page})
- mock_rev_dir.assert_called_once_with(
- {'sha1_git': '999'},
- 'some/path',
- '/api/1/revision/999/directory/some/path/',
- with_data=False)
+ rv = api_client.get(url)
- @patch('swh.web.api.views.revision._revision_directory_by')
- def test_api_revision_directory_ok_returns_content(self, mock_rev_dir):
- stub_content = {
- 'type': 'file',
- 'revision': '999',
- 'content': {
+ expected_log = archive_data.revision_log(rev, limit=per_page)
+ prev_revision = archive_data.revision_get(prev_rev)
+ expected_log.insert(0, prev_revision)
+ expected_log = list(map(_enrich_revision, expected_log))
+
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == expected_log
+
+
+def test_api_revision_directory_ko_not_found(api_client, mocker):
+ mock_rev_dir = mocker.patch(
+ 'swh.web.api.views.revision._revision_directory_by')
+ mock_rev_dir.side_effect = NotFoundExc('Not found')
+
+ rv = api_client.get('/api/1/revision/999/directory/some/path/to/dir/')
+
+ assert rv.status_code == 404, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == {
+ 'exception': 'NotFoundExc',
+ 'reason': 'Not found'
+ }
+
+ mock_rev_dir.assert_called_once_with(
+ {'sha1_git': '999'},
+ 'some/path/to/dir',
+ '/api/1/revision/999/directory/some/path/to/dir/',
+ with_data=False
+ )
+
+
+def test_api_revision_directory_ok_returns_dir_entries(api_client, mocker):
+ mock_rev_dir = mocker.patch(
+ 'swh.web.api.views.revision._revision_directory_by')
+ stub_dir = {
+ 'type': 'dir',
+ 'revision': '999',
+ 'content': [
+ {
'sha1_git': '789',
- 'sha1': '101',
- 'data_url': '/api/1/content/101/raw/',
+ 'type': 'file',
+ 'target': '101',
+ 'target_url': '/api/1/content/sha1_git:101/',
+ 'name': 'somefile',
+ 'file_url': '/api/1/revision/999/directory/some/path/'
+ 'somefile/'
+ },
+ {
+ 'sha1_git': '123',
+ 'type': 'dir',
+ 'target': '456',
+ 'target_url': '/api/1/directory/456/',
+ 'name': 'to-subdir',
+ 'dir_url': '/api/1/revision/999/directory/some/path/'
+ 'to-subdir/',
}
+ ]
+ }
+
+ mock_rev_dir.return_value = stub_dir
+
+ rv = api_client.get('/api/1/revision/999/directory/some/path/')
+
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == stub_dir
+
+ mock_rev_dir.assert_called_once_with(
+ {'sha1_git': '999'},
+ 'some/path',
+ '/api/1/revision/999/directory/some/path/',
+ with_data=False
+ )
+
+
+def test_api_revision_directory_ok_returns_content(api_client, mocker):
+ mock_rev_dir = mocker.patch(
+ 'swh.web.api.views.revision._revision_directory_by')
+ stub_content = {
+ 'type': 'file',
+ 'revision': '999',
+ 'content': {
+ 'sha1_git': '789',
+ 'sha1': '101',
+ 'data_url': '/api/1/content/101/raw/',
}
+ }
+
+ mock_rev_dir.return_value = stub_content
+
+ url = '/api/1/revision/666/directory/some/other/path/'
+ rv = api_client.get(url)
- # given
- mock_rev_dir.return_value = stub_content
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == stub_content
- # then
- url = '/api/1/revision/666/directory/some/other/path/'
- rv = self.client.get(url)
+ mock_rev_dir.assert_called_once_with(
+ {'sha1_git': '666'}, 'some/other/path', url, with_data=False)
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, stub_content)
- mock_rev_dir.assert_called_once_with(
- {'sha1_git': '666'}, 'some/other/path', url, with_data=False)
+@given(revision())
+def test_api_revision_uppercase(api_client, revision):
+ url = reverse('api-1-revision-uppercase-checksum',
+ url_args={'sha1_git': revision.upper()})
- def _enrich_revision(self, revision):
- directory_url = reverse(
- 'api-1-directory',
- url_args={'sha1_git': revision['directory']})
+ resp = api_client.get(url)
+ assert resp.status_code == 302
- history_url = reverse('api-1-revision-log',
- url_args={'sha1_git': revision['id']})
+ redirect_url = reverse('api-1-revision',
+ url_args={'sha1_git': revision})
- parents_id_url = []
- for p in revision['parents']:
- parents_id_url.append({
- 'id': p,
- 'url': reverse('api-1-revision', url_args={'sha1_git': p})
- })
+ assert resp['location'] == redirect_url
- revision_url = reverse('api-1-revision',
- url_args={'sha1_git': revision['id']})
- revision['directory_url'] = directory_url
- revision['history_url'] = history_url
- revision['url'] = revision_url
- revision['parents'] = parents_id_url
+def _enrich_revision(revision):
+ directory_url = reverse(
+ 'api-1-directory',
+ url_args={'sha1_git': revision['directory']})
- return revision
+ history_url = reverse('api-1-revision-log',
+ url_args={'sha1_git': revision['id']})
- @given(revision())
- def test_api_revision_uppercase(self, revision):
- url = reverse('api-1-revision-uppercase-checksum',
- url_args={'sha1_git': revision.upper()})
+ parents_id_url = []
+ for p in revision['parents']:
+ parents_id_url.append({
+ 'id': p,
+ 'url': reverse('api-1-revision', url_args={'sha1_git': p})
+ })
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 302)
+ revision_url = reverse('api-1-revision',
+ url_args={'sha1_git': revision['id']})
- redirect_url = reverse('api-1-revision',
- url_args={'sha1_git': revision})
+ revision['directory_url'] = directory_url
+ revision['history_url'] = history_url
+ revision['url'] = revision_url
+ revision['parents'] = parents_id_url
- self.assertEqual(resp['location'], redirect_url)
+ return revision
diff --git a/swh/web/tests/api/views/test_snapshot.py b/swh/web/tests/api/views/test_snapshot.py
--- a/swh/web/tests/api/views/test_snapshot.py
+++ b/swh/web/tests/api/views/test_snapshot.py
@@ -6,7 +6,6 @@
import random
from hypothesis import given
-from rest_framework.test import APITestCase
from swh.model.hashutil import hash_to_hex
from swh.web.common.utils import reverse
@@ -14,177 +13,182 @@
from swh.web.tests.strategies import (
snapshot, new_snapshot
)
-from swh.web.tests.testcase import WebTestCase
-class SnapshotApiTestCase(WebTestCase, APITestCase):
+@given(snapshot())
+def test_api_snapshot(api_client, archive_data, snapshot):
- @given(snapshot())
- def test_api_snapshot(self, snapshot):
+ url = reverse('api-1-snapshot',
+ url_args={'snapshot_id': snapshot})
+ rv = api_client.get(url)
- url = reverse('api-1-snapshot',
- url_args={'snapshot_id': snapshot})
- rv = self.client.get(url)
-
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- expected_data = self.snapshot_get(snapshot)
- expected_data = self._enrich_snapshot(expected_data)
- self.assertEqual(rv.data, expected_data)
-
- @given(snapshot())
- def test_api_snapshot_paginated(self, snapshot):
-
- branches_offset = 0
- branches_count = 2
-
- snapshot_branches = []
-
- for k, v in sorted(self.snapshot_get(snapshot)['branches'].items()):
- snapshot_branches.append({
- 'name': k,
- 'target_type': v['target_type'],
- 'target': v['target']
- })
-
- whole_snapshot = {'id': snapshot, 'branches': {}, 'next_branch': None}
-
- while branches_offset < len(snapshot_branches):
- branches_from = snapshot_branches[branches_offset]['name']
- url = reverse('api-1-snapshot',
- url_args={'snapshot_id': snapshot},
- query_params={'branches_from': branches_from,
- 'branches_count': branches_count})
- rv = self.client.get(url)
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- expected_data = self.snapshot_get_branches(snapshot, branches_from,
- branches_count)
-
- expected_data = self._enrich_snapshot(expected_data)
-
- branches_offset += branches_count
- if branches_offset < len(snapshot_branches):
- next_branch = snapshot_branches[branches_offset]['name']
- expected_data['next_branch'] = next_branch
- else:
- expected_data['next_branch'] = None
-
- self.assertEqual(rv.data, expected_data)
- whole_snapshot['branches'].update(expected_data['branches'])
-
- if branches_offset < len(snapshot_branches):
- next_url = reverse(
- 'api-1-snapshot',
- url_args={'snapshot_id': snapshot},
- query_params={'branches_from': next_branch,
- 'branches_count': branches_count})
- self.assertEqual(rv['Link'], '<%s>; rel="next"' % next_url)
- else:
- self.assertFalse(rv.has_header('Link'))
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ expected_data = archive_data.snapshot_get(snapshot)
+ expected_data = _enrich_snapshot(archive_data, expected_data)
+ assert rv.data == expected_data
- url = reverse('api-1-snapshot',
- url_args={'snapshot_id': snapshot})
- rv = self.client.get(url)
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, whole_snapshot)
+@given(snapshot())
+def test_api_snapshot_paginated(api_client, archive_data, snapshot):
- @given(snapshot())
- def test_api_snapshot_filtered(self, snapshot):
+ branches_offset = 0
+ branches_count = 2
- snapshot_branches = []
+ snapshot_branches = []
- for k, v in sorted(self.snapshot_get(snapshot)['branches'].items()):
- snapshot_branches.append({
- 'name': k,
- 'target_type': v['target_type'],
- 'target': v['target']
- })
+ for k, v in sorted(
+ archive_data.snapshot_get(snapshot)['branches'].items()):
+ snapshot_branches.append({
+ 'name': k,
+ 'target_type': v['target_type'],
+ 'target': v['target']
+ })
- target_type = random.choice(snapshot_branches)['target_type']
+ whole_snapshot = {'id': snapshot, 'branches': {}, 'next_branch': None}
+ while branches_offset < len(snapshot_branches):
+ branches_from = snapshot_branches[branches_offset]['name']
url = reverse('api-1-snapshot',
url_args={'snapshot_id': snapshot},
- query_params={'target_types': target_type})
- rv = self.client.get(url)
-
- expected_data = self.snapshot_get_branches(
- snapshot, target_types=target_type)
- expected_data = self._enrich_snapshot(expected_data)
-
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, expected_data)
+ query_params={'branches_from': branches_from,
+ 'branches_count': branches_count})
+ rv = api_client.get(url)
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ expected_data = archive_data.snapshot_get_branches(
+ snapshot, branches_from, branches_count)
+
+ expected_data = _enrich_snapshot(archive_data, expected_data)
- def test_api_snapshot_errors(self):
- unknown_snapshot_ = random_sha1()
-
- url = reverse('api-1-snapshot',
- url_args={'snapshot_id': '63ce369'})
- rv = self.client.get(url)
- self.assertEqual(rv.status_code, 400, rv.data)
-
- url = reverse('api-1-snapshot',
- url_args={'snapshot_id': unknown_snapshot_})
- rv = self.client.get(url)
- self.assertEqual(rv.status_code, 404, rv.data)
-
- def _enrich_snapshot(self, snapshot):
- def _get_branch_url(target_type, target):
- url = None
- if target_type == 'revision':
- url = reverse('api-1-revision', url_args={'sha1_git': target})
- if target_type == 'release':
- url = reverse('api-1-release', url_args={'sha1_git': target})
- return url
-
- for branch in snapshot['branches'].keys():
- target = snapshot['branches'][branch]['target']
- target_type = snapshot['branches'][branch]['target_type']
- snapshot['branches'][branch]['target_url'] = \
- _get_branch_url(target_type, target)
- for branch in snapshot['branches'].keys():
- target = snapshot['branches'][branch]['target']
- target_type = snapshot['branches'][branch]['target_type']
- if target_type == 'alias':
- if target in snapshot['branches']:
- snapshot['branches'][branch]['target_url'] = \
- snapshot['branches'][target]['target_url']
- else:
- snp = self.snapshot_get_branches(snapshot['id'],
- branches_from=target,
- branches_count=1)
- alias_target = snp['branches'][target]['target']
- alias_target_type = snp['branches'][target]['target_type']
- snapshot['branches'][branch]['target_url'] = \
- _get_branch_url(alias_target_type, alias_target)
-
- return snapshot
-
- @given(snapshot())
- def test_api_snapshot_uppercase(self, snapshot):
- url = reverse('api-1-snapshot-uppercase-checksum',
- url_args={'snapshot_id': snapshot.upper()})
-
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 302)
-
- redirect_url = reverse('api-1-snapshot-uppercase-checksum',
- url_args={'snapshot_id': snapshot})
-
- self.assertEqual(resp['location'], redirect_url)
-
- @given(new_snapshot(min_size=4))
- def test_api_snapshot_null_branch(self, new_snapshot):
- snp_dict = new_snapshot.to_dict()
- snp_id = hash_to_hex(snp_dict['id'])
- for branch in snp_dict['branches'].keys():
- snp_dict['branches'][branch] = None
- break
- self.storage.snapshot_add([snp_dict])
- url = reverse('api-1-snapshot',
- url_args={'snapshot_id': snp_id})
- rv = self.client.get(url)
- self.assertEqual(rv.status_code, 200, rv.data)
+ branches_offset += branches_count
+ if branches_offset < len(snapshot_branches):
+ next_branch = snapshot_branches[branches_offset]['name']
+ expected_data['next_branch'] = next_branch
+ else:
+ expected_data['next_branch'] = None
+
+ assert rv.data == expected_data
+ whole_snapshot['branches'].update(expected_data['branches'])
+
+ if branches_offset < len(snapshot_branches):
+ next_url = reverse(
+ 'api-1-snapshot',
+ url_args={'snapshot_id': snapshot},
+ query_params={'branches_from': next_branch,
+ 'branches_count': branches_count})
+ assert rv['Link'] == '<%s>; rel="next"' % next_url
+ else:
+ assert not rv.has_header('Link')
+
+ url = reverse('api-1-snapshot',
+ url_args={'snapshot_id': snapshot})
+ rv = api_client.get(url)
+
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == whole_snapshot
+
+
+@given(snapshot())
+def test_api_snapshot_filtered(api_client, archive_data, snapshot):
+
+ snapshot_branches = []
+
+ for k, v in sorted(
+ archive_data.snapshot_get(snapshot)['branches'].items()):
+ snapshot_branches.append({
+ 'name': k,
+ 'target_type': v['target_type'],
+ 'target': v['target']
+ })
+
+ target_type = random.choice(snapshot_branches)['target_type']
+
+ url = reverse('api-1-snapshot',
+ url_args={'snapshot_id': snapshot},
+ query_params={'target_types': target_type})
+ rv = api_client.get(url)
+
+ expected_data = archive_data.snapshot_get_branches(
+ snapshot, target_types=target_type)
+ expected_data = _enrich_snapshot(archive_data, expected_data)
+
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == expected_data
+
+
+def test_api_snapshot_errors(api_client):
+ unknown_snapshot_ = random_sha1()
+
+ url = reverse('api-1-snapshot',
+ url_args={'snapshot_id': '63ce369'})
+ rv = api_client.get(url)
+ assert rv.status_code == 400, rv.data
+
+ url = reverse('api-1-snapshot',
+ url_args={'snapshot_id': unknown_snapshot_})
+ rv = api_client.get(url)
+ assert rv.status_code == 404, rv.data
+
+
+@given(snapshot())
+def test_api_snapshot_uppercase(api_client, snapshot):
+ url = reverse('api-1-snapshot-uppercase-checksum',
+ url_args={'snapshot_id': snapshot.upper()})
+
+ resp = api_client.get(url)
+ assert resp.status_code == 302
+
+ redirect_url = reverse('api-1-snapshot-uppercase-checksum',
+ url_args={'snapshot_id': snapshot})
+
+ assert resp['location'] == redirect_url
+
+
+@given(new_snapshot(min_size=4))
+def test_api_snapshot_null_branch(api_client, archive_data, new_snapshot):
+ snp_dict = new_snapshot.to_dict()
+ snp_id = hash_to_hex(snp_dict['id'])
+ for branch in snp_dict['branches'].keys():
+ snp_dict['branches'][branch] = None
+ break
+ archive_data.snapshot_add([snp_dict])
+ url = reverse('api-1-snapshot',
+ url_args={'snapshot_id': snp_id})
+ rv = api_client.get(url)
+ assert rv.status_code == 200, rv.data
+
+
+def _enrich_snapshot(archive_data, snapshot):
+ def _get_branch_url(target_type, target):
+ url = None
+ if target_type == 'revision':
+ url = reverse('api-1-revision', url_args={'sha1_git': target})
+ if target_type == 'release':
+ url = reverse('api-1-release', url_args={'sha1_git': target})
+ return url
+
+ for branch in snapshot['branches'].keys():
+ target = snapshot['branches'][branch]['target']
+ target_type = snapshot['branches'][branch]['target_type']
+ snapshot['branches'][branch]['target_url'] = \
+ _get_branch_url(target_type, target)
+ for branch in snapshot['branches'].keys():
+ target = snapshot['branches'][branch]['target']
+ target_type = snapshot['branches'][branch]['target_type']
+ if target_type == 'alias':
+ if target in snapshot['branches']:
+ snapshot['branches'][branch]['target_url'] = \
+ snapshot['branches'][target]['target_url']
+ else:
+ snp = archive_data.snapshot_get_branches(snapshot['id'],
+ branches_from=target,
+ branches_count=1)
+ alias_target = snp['branches'][target]['target']
+ alias_target_type = snp['branches'][target]['target_type']
+ snapshot['branches'][branch]['target_url'] = \
+ _get_branch_url(alias_target_type, alias_target)
+
+ return snapshot
diff --git a/swh/web/tests/api/views/test_stat.py b/swh/web/tests/api/views/test_stat.py
--- a/swh/web/tests/api/views/test_stat.py
+++ b/swh/web/tests/api/views/test_stat.py
@@ -3,74 +3,68 @@
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
-from rest_framework.test import APITestCase
-from unittest.mock import patch
-
from swh.storage.exc import StorageDBError, StorageAPIError
from swh.web.common.exc import BadInputExc
from swh.web.common.utils import reverse
-from swh.web.tests.testcase import WebTestCase
-
-class StatApiTestCase(WebTestCase, APITestCase):
- @patch('swh.web.api.views.stat.service')
- def test_api_1_stat_counters_raise_error(self, mock_service):
- mock_service.stat_counters.side_effect = BadInputExc(
- 'voluntary error to check the bad request middleware.')
+def test_api_1_stat_counters_raise_error(api_client, mocker):
+ mock_service = mocker.patch('swh.web.api.views.stat.service')
+ mock_service.stat_counters.side_effect = BadInputExc(
+ 'voluntary error to check the bad request middleware.')
- url = reverse('api-1-stat-counters')
- rv = self.client.get(url)
+ url = reverse('api-1-stat-counters')
+ rv = api_client.get(url)
- self.assertEqual(rv.status_code, 400, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, {
- 'exception': 'BadInputExc',
- 'reason': 'voluntary error to check the bad request middleware.'})
+ assert rv.status_code == 400, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == {
+ 'exception': 'BadInputExc',
+ 'reason': 'voluntary error to check the bad request middleware.'}
- @patch('swh.web.api.views.stat.service')
- def test_api_1_stat_counters_raise_from_db(self, mock_service):
- mock_service.stat_counters.side_effect = StorageDBError(
- 'Storage exploded! Will be back online shortly!')
+def test_api_1_stat_counters_raise_from_db(api_client, mocker):
+ mock_service = mocker.patch('swh.web.api.views.stat.service')
+ mock_service.stat_counters.side_effect = StorageDBError(
+ 'Storage exploded! Will be back online shortly!')
- url = reverse('api-1-stat-counters')
- rv = self.client.get(url)
+ url = reverse('api-1-stat-counters')
+ rv = api_client.get(url)
- self.assertEqual(rv.status_code, 503, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, {
- 'exception': 'StorageDBError',
- 'reason':
- 'An unexpected error occurred in the backend: '
- 'Storage exploded! Will be back online shortly!'})
+ assert rv.status_code == 503, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == {
+ 'exception': 'StorageDBError',
+ 'reason':
+ 'An unexpected error occurred in the backend: '
+ 'Storage exploded! Will be back online shortly!'}
- @patch('swh.web.api.views.stat.service')
- def test_api_1_stat_counters_raise_from_api(self, mock_service):
- mock_service.stat_counters.side_effect = StorageAPIError(
- 'Storage API dropped dead! Will resurrect from its ashes asap!'
- )
+def test_api_1_stat_counters_raise_from_api(api_client, mocker):
+ mock_service = mocker.patch('swh.web.api.views.stat.service')
+ mock_service.stat_counters.side_effect = StorageAPIError(
+ 'Storage API dropped dead! Will resurrect from its ashes asap!'
+ )
- url = reverse('api-1-stat-counters')
- rv = self.client.get(url)
+ url = reverse('api-1-stat-counters')
+ rv = api_client.get(url)
- self.assertEqual(rv.status_code, 503, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, {
- 'exception': 'StorageAPIError',
- 'reason':
- 'An unexpected error occurred in the api backend: '
- 'Storage API dropped dead! Will resurrect from its ashes asap!'
- })
+ assert rv.status_code == 503, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == {
+ 'exception': 'StorageAPIError',
+ 'reason':
+ 'An unexpected error occurred in the api backend: '
+ 'Storage API dropped dead! Will resurrect from its ashes asap!'
+ }
- def test_api_1_stat_counters(self):
- url = reverse('api-1-stat-counters')
+def test_api_1_stat_counters(api_client, archive_data):
+ url = reverse('api-1-stat-counters')
- rv = self.client.get(url)
+ rv = api_client.get(url)
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data, self.storage.stat_counters())
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data == archive_data.stat_counters()
diff --git a/swh/web/tests/api/views/test_vault.py b/swh/web/tests/api/views/test_vault.py
--- a/swh/web/tests/api/views/test_vault.py
+++ b/swh/web/tests/api/views/test_vault.py
@@ -3,120 +3,116 @@
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
-from rest_framework.test import APITestCase
-from unittest.mock import patch
-
from swh.model import hashutil
-from swh.web.tests.testcase import WebTestCase
-
TEST_OBJ_ID = 'd4905454cc154b492bd6afed48694ae3c579345e'
OBJECT_TYPES = {'directory': ('directory', None),
'revision_gitfast': ('revision', 'gitfast')}
-class VaultApiTestCase(WebTestCase, APITestCase):
- @patch('swh.web.api.views.vault.service')
- def test_api_vault_cook(self, mock_service):
- stub_cook = {
- 'fetch_url': ('http://127.0.0.1:5004/api/1/vault/directory/{}/raw/'
- .format(TEST_OBJ_ID)),
- 'obj_id': TEST_OBJ_ID,
- 'obj_type': 'test_type',
- 'progress_message': None,
- 'status': 'done',
- 'task_uuid': 'de75c902-5ee5-4739-996e-448376a93eff',
- }
- stub_fetch = b'content'
-
- mock_service.vault_cook.return_value = stub_cook
- mock_service.vault_fetch.return_value = stub_fetch
-
- for obj_type, (obj_type_name, obj_type_format) in OBJECT_TYPES.items():
- url = '/api/1/vault/{}/{}/'.format(obj_type_name, TEST_OBJ_ID)
- if obj_type_format:
- url += '{}/'.format(obj_type_format)
- rv = self.client.post(url, {'email': 'test@test.mail'})
-
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
-
- self.assertEqual(rv.data, stub_cook)
- mock_service.vault_cook.assert_called_with(
- obj_type,
- hashutil.hash_to_bytes(TEST_OBJ_ID),
- 'test@test.mail')
-
- rv = self.client.get(url + 'raw/')
-
- self.assertEqual(rv.status_code, 200)
- self.assertEqual(rv['Content-Type'], 'application/gzip')
- self.assertEqual(rv.content, stub_fetch)
- mock_service.vault_fetch.assert_called_with(
- obj_type, hashutil.hash_to_bytes(TEST_OBJ_ID))
-
- @patch('swh.web.api.views.vault.service')
- def test_api_vault_cook_uppercase_hash(self, mock_service):
- stub_cook = {
- 'fetch_url': ('http://127.0.0.1:5004/api/1/vault/directory/{}/raw/'
- .format(TEST_OBJ_ID.upper())),
- 'obj_id': TEST_OBJ_ID.upper(),
- 'obj_type': 'test_type',
- 'progress_message': None,
- 'status': 'done',
- 'task_uuid': 'de75c902-5ee5-4739-996e-448376a93eff',
- }
- stub_fetch = b'content'
-
- mock_service.vault_cook.return_value = stub_cook
- mock_service.vault_fetch.return_value = stub_fetch
-
- for obj_type, (obj_type_name, obj_type_format) in OBJECT_TYPES.items():
- url = '/api/1/vault/{}/{}/'.format(obj_type_name, TEST_OBJ_ID)
- if obj_type_format:
- url += '{}/'.format(obj_type_format)
- rv = self.client.post(url, {'email': 'test@test.mail'})
-
- self.assertEqual(rv.status_code, 200, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
-
- self.assertEqual(rv.data, stub_cook)
- mock_service.vault_cook.assert_called_with(
- obj_type,
- hashutil.hash_to_bytes(TEST_OBJ_ID),
- 'test@test.mail')
-
- rv = self.client.get(url + 'raw/')
-
- self.assertEqual(rv.status_code, 200)
- self.assertEqual(rv['Content-Type'], 'application/gzip')
- self.assertEqual(rv.content, stub_fetch)
- mock_service.vault_fetch.assert_called_with(
- obj_type, hashutil.hash_to_bytes(TEST_OBJ_ID))
-
- @patch('swh.web.api.views.vault.service')
- def test_api_vault_cook_notfound(self, mock_service):
- mock_service.vault_cook.return_value = None
- mock_service.vault_fetch.return_value = None
-
- for obj_type, (obj_type_name, obj_type_format) in OBJECT_TYPES.items():
- url = '/api/1/vault/{}/{}/'.format(obj_type_name, TEST_OBJ_ID)
- if obj_type_format:
- url += '{}/'.format(obj_type_format)
- rv = self.client.post(url)
-
- self.assertEqual(rv.status_code, 404, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
-
- self.assertEqual(rv.data['exception'], 'NotFoundExc')
- mock_service.vault_cook.assert_called_with(
- obj_type, hashutil.hash_to_bytes(TEST_OBJ_ID), None)
-
- rv = self.client.get(url + 'raw/')
-
- self.assertEqual(rv.status_code, 404, rv.data)
- self.assertEqual(rv['Content-Type'], 'application/json')
- self.assertEqual(rv.data['exception'], 'NotFoundExc')
- mock_service.vault_fetch.assert_called_with(
- obj_type, hashutil.hash_to_bytes(TEST_OBJ_ID))
+def test_api_vault_cook(api_client, mocker):
+ mock_service = mocker.patch('swh.web.api.views.vault.service')
+ stub_cook = {
+ 'fetch_url': ('http://127.0.0.1:5004/api/1/vault/directory/{}/raw/'
+ .format(TEST_OBJ_ID)),
+ 'obj_id': TEST_OBJ_ID,
+ 'obj_type': 'test_type',
+ 'progress_message': None,
+ 'status': 'done',
+ 'task_uuid': 'de75c902-5ee5-4739-996e-448376a93eff',
+ }
+ stub_fetch = b'content'
+
+ mock_service.vault_cook.return_value = stub_cook
+ mock_service.vault_fetch.return_value = stub_fetch
+
+ for obj_type, (obj_type_name, obj_type_format) in OBJECT_TYPES.items():
+ url = '/api/1/vault/{}/{}/'.format(obj_type_name, TEST_OBJ_ID)
+ if obj_type_format:
+ url += '{}/'.format(obj_type_format)
+ rv = api_client.post(url, {'email': 'test@test.mail'})
+
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+
+ assert rv.data == stub_cook
+ mock_service.vault_cook.assert_called_with(
+ obj_type,
+ hashutil.hash_to_bytes(TEST_OBJ_ID),
+ 'test@test.mail')
+
+ rv = api_client.get(url + 'raw/')
+
+ assert rv.status_code == 200
+ assert rv['Content-Type'] == 'application/gzip'
+ assert rv.content == stub_fetch
+ mock_service.vault_fetch.assert_called_with(
+ obj_type, hashutil.hash_to_bytes(TEST_OBJ_ID))
+
+
+def test_api_vault_cook_uppercase_hash(api_client, mocker):
+ mock_service = mocker.patch('swh.web.api.views.vault.service')
+ stub_cook = {
+ 'fetch_url': ('http://127.0.0.1:5004/api/1/vault/directory/{}/raw/'
+ .format(TEST_OBJ_ID.upper())),
+ 'obj_id': TEST_OBJ_ID.upper(),
+ 'obj_type': 'test_type',
+ 'progress_message': None,
+ 'status': 'done',
+ 'task_uuid': 'de75c902-5ee5-4739-996e-448376a93eff',
+ }
+ stub_fetch = b'content'
+
+ mock_service.vault_cook.return_value = stub_cook
+ mock_service.vault_fetch.return_value = stub_fetch
+
+ for obj_type, (obj_type_name, obj_type_format) in OBJECT_TYPES.items():
+ url = '/api/1/vault/{}/{}/'.format(obj_type_name, TEST_OBJ_ID)
+ if obj_type_format:
+ url += '{}/'.format(obj_type_format)
+ rv = api_client.post(url, {'email': 'test@test.mail'})
+
+ assert rv.status_code == 200, rv.data
+ assert rv['Content-Type'] == 'application/json'
+
+ assert rv.data == stub_cook
+ mock_service.vault_cook.assert_called_with(
+ obj_type,
+ hashutil.hash_to_bytes(TEST_OBJ_ID),
+ 'test@test.mail')
+
+ rv = api_client.get(url + 'raw/')
+
+ assert rv.status_code == 200
+ assert rv['Content-Type'] == 'application/gzip'
+ assert rv.content == stub_fetch
+ mock_service.vault_fetch.assert_called_with(
+ obj_type, hashutil.hash_to_bytes(TEST_OBJ_ID))
+
+
+def test_api_vault_cook_notfound(api_client, mocker):
+ mock_service = mocker.patch('swh.web.api.views.vault.service')
+ mock_service.vault_cook.return_value = None
+ mock_service.vault_fetch.return_value = None
+
+ for obj_type, (obj_type_name, obj_type_format) in OBJECT_TYPES.items():
+ url = '/api/1/vault/{}/{}/'.format(obj_type_name, TEST_OBJ_ID)
+ if obj_type_format:
+ url += '{}/'.format(obj_type_format)
+ rv = api_client.post(url)
+
+ assert rv.status_code == 404, rv.data
+ assert rv['Content-Type'] == 'application/json'
+
+ assert rv.data['exception'] == 'NotFoundExc'
+ mock_service.vault_cook.assert_called_with(
+ obj_type, hashutil.hash_to_bytes(TEST_OBJ_ID), None)
+
+ rv = api_client.get(url + 'raw/')
+
+ assert rv.status_code == 404, rv.data
+ assert rv['Content-Type'] == 'application/json'
+ assert rv.data['exception'] == 'NotFoundExc'
+ mock_service.vault_fetch.assert_called_with(
+ obj_type, hashutil.hash_to_bytes(TEST_OBJ_ID))
diff --git a/swh/web/tests/browse/test_utils.py b/swh/web/tests/browse/test_utils.py
--- a/swh/web/tests/browse/test_utils.py
+++ b/swh/web/tests/browse/test_utils.py
@@ -8,134 +8,121 @@
from swh.web.browse import utils
from swh.web.common.utils import reverse, format_utc_iso_date
from swh.web.tests.strategies import origin_with_multiple_visits
-from swh.web.tests.testcase import WebTestCase
-
-
-class SwhBrowseUtilsTestCase(WebTestCase):
-
- def test_get_mimetype_and_encoding_for_content(self):
- text = b'Hello world!'
- self.assertEqual(utils.get_mimetype_and_encoding_for_content(text),
- ('text/plain', 'us-ascii'))
-
- @given(origin_with_multiple_visits())
- def test_get_origin_visit_snapshot_simple(self, origin):
-
- visits = self.origin_visit_get(origin['url'])
-
- for visit in visits:
-
- snapshot = self.snapshot_get(visit['snapshot'])
- branches = []
- releases = []
-
- def _process_branch_data(branch, branch_data):
- if branch_data['target_type'] == 'revision':
- rev_data = self.revision_get(branch_data['target'])
- branches.append({
- 'name': branch,
- 'revision': branch_data['target'],
- 'directory': rev_data['directory'],
- 'date': format_utc_iso_date(rev_data['date']),
- 'message': rev_data['message']
- })
- elif branch_data['target_type'] == 'release':
- rel_data = self.release_get(branch_data['target'])
- rev_data = self.revision_get(rel_data['target'])
- releases.append({
- 'name': rel_data['name'],
- 'branch_name': branch,
- 'date': format_utc_iso_date(rel_data['date']),
- 'id': rel_data['id'],
- 'message': rel_data['message'],
- 'target_type': rel_data['target_type'],
- 'target': rel_data['target'],
- 'directory': rev_data['directory']
- })
-
- for branch in sorted(snapshot['branches'].keys()):
- branch_data = snapshot['branches'][branch]
- if branch_data['target_type'] == 'alias':
- target_data = snapshot['branches'][branch_data['target']]
- _process_branch_data(branch, target_data)
- else:
- _process_branch_data(branch, branch_data)
-
- assert branches and releases, 'Incomplete test data.'
-
- origin_visit_branches = utils.get_origin_visit_snapshot(
- origin, visit_id=visit['visit'])
-
- self.assertEqual(origin_visit_branches, (branches, releases))
-
- def test_gen_link(self):
- self.assertEqual(
- utils.gen_link('https://www.softwareheritage.org/', 'swh'),
+
+
+def test_get_mimetype_and_encoding_for_content():
+ text = b'Hello world!'
+ assert (utils.get_mimetype_and_encoding_for_content(text) ==
+ ('text/plain', 'us-ascii'))
+
+
+@given(origin_with_multiple_visits())
+def test_get_origin_visit_snapshot_simple(archive_data, origin):
+ visits = archive_data.origin_visit_get(origin['url'])
+
+ for visit in visits:
+
+ snapshot = archive_data.snapshot_get(visit['snapshot'])
+ branches = []
+ releases = []
+
+ def _process_branch_data(branch, branch_data):
+ if branch_data['target_type'] == 'revision':
+ rev_data = archive_data.revision_get(branch_data['target'])
+ branches.append({
+ 'name': branch,
+ 'revision': branch_data['target'],
+ 'directory': rev_data['directory'],
+ 'date': format_utc_iso_date(rev_data['date']),
+ 'message': rev_data['message']
+ })
+ elif branch_data['target_type'] == 'release':
+ rel_data = archive_data.release_get(branch_data['target'])
+ rev_data = archive_data.revision_get(rel_data['target'])
+ releases.append({
+ 'name': rel_data['name'],
+ 'branch_name': branch,
+ 'date': format_utc_iso_date(rel_data['date']),
+ 'id': rel_data['id'],
+ 'message': rel_data['message'],
+ 'target_type': rel_data['target_type'],
+ 'target': rel_data['target'],
+ 'directory': rev_data['directory']
+ })
+
+ for branch in sorted(snapshot['branches'].keys()):
+ branch_data = snapshot['branches'][branch]
+ if branch_data['target_type'] == 'alias':
+ target_data = snapshot['branches'][branch_data['target']]
+ _process_branch_data(branch, target_data)
+ else:
+ _process_branch_data(branch, branch_data)
+
+ assert branches and releases, 'Incomplete test data.'
+
+ origin_visit_branches = utils.get_origin_visit_snapshot(
+ origin, visit_id=visit['visit'])
+
+ assert origin_visit_branches == (branches, releases)
+
+
+def test_gen_link():
+ assert (utils.gen_link('https://www.softwareheritage.org/', 'swh') ==
'<a href="https://www.softwareheritage.org/">swh</a>')
- def test_gen_revision_link(self):
- revision_id = '28a0bc4120d38a394499382ba21d6965a67a3703'
- revision_url = reverse('browse-revision',
- url_args={'sha1_git': revision_id})
-
- self.assertEqual(utils.gen_revision_link(revision_id, link_text=None,
- link_attrs=None),
- '<a href="%s">%s</a>' % (revision_url, revision_id))
- self.assertEqual(
- utils.gen_revision_link(revision_id, shorten_id=True,
- link_attrs=None),
+
+def test_gen_revision_link():
+ revision_id = '28a0bc4120d38a394499382ba21d6965a67a3703'
+ revision_url = reverse('browse-revision',
+ url_args={'sha1_git': revision_id})
+
+ assert (utils.gen_revision_link(revision_id, link_text=None,
+ link_attrs=None) ==
+ '<a href="%s">%s</a>' % (revision_url, revision_id))
+ assert (utils.gen_revision_link(revision_id, shorten_id=True,
+ link_attrs=None) ==
'<a href="%s">%s</a>' % (revision_url, revision_id[:7]))
- def test_gen_person_mail_link(self):
- person_full = {
- 'name': 'John Doe',
- 'email': 'john.doe@swh.org',
- 'fullname': 'John Doe <john.doe@swh.org>'
- }
- self.assertEqual(
- utils.gen_person_mail_link(person_full),
+def test_gen_person_mail_link():
+ person_full = {
+ 'name': 'John Doe',
+ 'email': 'john.doe@swh.org',
+ 'fullname': 'John Doe <john.doe@swh.org>'
+ }
+
+ assert (utils.gen_person_mail_link(person_full) ==
'<a href="mailto:%s">%s</a>' % (person_full['email'],
- person_full['name'])
- )
+ person_full['name']))
- link_text = 'Mail'
- self.assertEqual(
- utils.gen_person_mail_link(person_full, link_text=link_text),
+ link_text = 'Mail'
+ assert (utils.gen_person_mail_link(person_full, link_text=link_text) ==
'<a href="mailto:%s">%s</a>' % (person_full['email'],
- link_text)
- )
+ link_text))
- person_partial_email = {
- 'name': None,
- 'email': None,
- 'fullname': 'john.doe@swh.org'
- }
+ person_partial_email = {
+ 'name': None,
+ 'email': None,
+ 'fullname': 'john.doe@swh.org'
+ }
- self.assertEqual(
- utils.gen_person_mail_link(person_partial_email),
+ assert (utils.gen_person_mail_link(person_partial_email) ==
'<a href="mailto:%s">%s</a>' % (person_partial_email['fullname'],
- person_partial_email['fullname'])
- )
-
- person_partial = {
- 'name': None,
- 'email': None,
- 'fullname': 'John Doe <john.doe@swh.org>'
- }
-
- self.assertEqual(
- utils.gen_person_mail_link(person_partial),
- person_partial['fullname']
- )
-
- person_none = {
- 'name': None,
- 'email': None,
- 'fullname': None
- }
-
- self.assertEqual(
- utils.gen_person_mail_link(person_none),
- 'None'
- )
+ person_partial_email['fullname']))
+
+ person_partial = {
+ 'name': None,
+ 'email': None,
+ 'fullname': 'John Doe <john.doe@swh.org>'
+ }
+
+ assert (utils.gen_person_mail_link(person_partial) ==
+ person_partial['fullname'])
+
+ person_none = {
+ 'name': None,
+ 'email': None,
+ 'fullname': None
+ }
+
+ assert utils.gen_person_mail_link(person_none) == 'None'
diff --git a/swh/web/tests/browse/views/test_content.py b/swh/web/tests/browse/views/test_content.py
--- a/swh/web/tests/browse/views/test_content.py
+++ b/swh/web/tests/browse/views/test_content.py
@@ -3,8 +3,6 @@
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
-from unittest.mock import patch
-
from django.utils.html import escape
from hypothesis import given
@@ -16,354 +14,356 @@
from swh.web.common.exc import NotFoundExc
from swh.web.common.utils import reverse, get_swh_persistent_id
from swh.web.common.utils import gen_path_info
+from swh.web.tests.django_asserts import (
+ assert_contains, assert_not_contains, assert_template_used
+)
from swh.web.tests.strategies import (
content, content_text_non_utf8, content_text_no_highlight,
content_image_type, content_text, invalid_sha1, unknown_content
)
-from swh.web.tests.testcase import WebTestCase
-
-class SwhBrowseContentTest(WebTestCase):
- @given(content_text())
- def test_content_view_text(self, content):
+@given(content_text())
+def test_content_view_text(client, archive_data, content):
+ sha1_git = content['sha1_git']
- sha1_git = content['sha1_git']
+ url = reverse('browse-content',
+ url_args={'query_string': content['sha1']},
+ query_params={'path': content['path']})
- url = reverse('browse-content',
- url_args={'query_string': content['sha1']},
- query_params={'path': content['path']})
+ url_raw = reverse('browse-content-raw',
+ url_args={'query_string': content['sha1']})
- url_raw = reverse('browse-content-raw',
- url_args={'query_string': content['sha1']})
+ resp = client.get(url)
- resp = self.client.get(url)
+ content_display = _process_content_for_display(archive_data, content)
+ mimetype = content_display['mimetype']
- content_display = self._process_content_for_display(content)
- mimetype = content_display['mimetype']
+ assert resp.status_code == 200
+ assert_template_used('browse/content.html')
- self.assertEqual(resp.status_code, 200)
- self.assertTemplateUsed('browse/content.html')
+ if mimetype.startswith('text/'):
+ assert_contains(resp, '<code class="%s">' %
+ content_display['language'])
+ assert_contains(resp, escape(content_display['content_data']))
+ assert_contains(resp, url_raw)
- if mimetype.startswith('text/'):
- self.assertContains(resp, '<code class="%s">' %
- content_display['language'])
- self.assertContains(resp, escape(content_display['content_data']))
- self.assertContains(resp, url_raw)
+ swh_cnt_id = get_swh_persistent_id('content', sha1_git)
+ swh_cnt_id_url = reverse('browse-swh-id',
+ url_args={'swh_id': swh_cnt_id})
+ assert_contains(resp, swh_cnt_id)
+ assert_contains(resp, swh_cnt_id_url)
- swh_cnt_id = get_swh_persistent_id('content', sha1_git)
- swh_cnt_id_url = reverse('browse-swh-id',
- url_args={'swh_id': swh_cnt_id})
- self.assertContains(resp, swh_cnt_id)
- self.assertContains(resp, swh_cnt_id_url)
- @given(content_text_no_highlight())
- def test_content_view_text_no_highlight(self, content):
+@given(content_text_no_highlight())
+def test_content_view_text_no_highlight(client, archive_data, content):
+ sha1_git = content['sha1_git']
- sha1_git = content['sha1_git']
+ url = reverse('browse-content',
+ url_args={'query_string': content['sha1']})
- url = reverse('browse-content',
+ url_raw = reverse('browse-content-raw',
url_args={'query_string': content['sha1']})
- url_raw = reverse('browse-content-raw',
- url_args={'query_string': content['sha1']})
+ resp = client.get(url)
- resp = self.client.get(url)
+ content_display = _process_content_for_display(archive_data, content)
- content_display = self._process_content_for_display(content)
+ assert resp.status_code == 200
+ assert_template_used('browse/content.html')
- self.assertEqual(resp.status_code, 200)
- self.assertTemplateUsed('browse/content.html')
+ assert_contains(resp, '<code class="nohighlight">')
+ assert_contains(resp, escape(content_display['content_data']))
+ assert_contains(resp, url_raw)
- self.assertContains(resp, '<code class="nohighlight">')
- self.assertContains(resp, escape(content_display['content_data'])) # noqa
- self.assertContains(resp, url_raw)
+ swh_cnt_id = get_swh_persistent_id('content', sha1_git)
+ swh_cnt_id_url = reverse('browse-swh-id',
+ url_args={'swh_id': swh_cnt_id})
- swh_cnt_id = get_swh_persistent_id('content', sha1_git)
- swh_cnt_id_url = reverse('browse-swh-id',
- url_args={'swh_id': swh_cnt_id})
+ assert_contains(resp, swh_cnt_id)
+ assert_contains(resp, swh_cnt_id_url)
- self.assertContains(resp, swh_cnt_id)
- self.assertContains(resp, swh_cnt_id_url)
- @given(content_text_non_utf8())
- def test_content_view_no_utf8_text(self, content):
+@given(content_text_non_utf8())
+def test_content_view_no_utf8_text(client, archive_data, content):
+ sha1_git = content['sha1_git']
- sha1_git = content['sha1_git']
+ url = reverse('browse-content',
+ url_args={'query_string': content['sha1']})
- url = reverse('browse-content',
- url_args={'query_string': content['sha1']})
+ resp = client.get(url)
- resp = self.client.get(url)
+ content_display = _process_content_for_display(archive_data, content)
- content_display = self._process_content_for_display(content)
+ assert resp.status_code == 200
+ assert_template_used('browse/content.html')
+ swh_cnt_id = get_swh_persistent_id('content', sha1_git)
+ swh_cnt_id_url = reverse('browse-swh-id',
+ url_args={'swh_id': swh_cnt_id})
+ assert_contains(resp, swh_cnt_id_url)
+ assert_contains(resp, escape(content_display['content_data']))
- self.assertEqual(resp.status_code, 200)
- self.assertTemplateUsed('browse/content.html')
- swh_cnt_id = get_swh_persistent_id('content', sha1_git)
- swh_cnt_id_url = reverse('browse-swh-id',
- url_args={'swh_id': swh_cnt_id})
- self.assertContains(resp, swh_cnt_id_url)
- self.assertContains(resp, escape(content_display['content_data']))
- @given(content_image_type())
- def test_content_view_image(self, content):
+@given(content_image_type())
+def test_content_view_image(client, archive_data, content):
+ url = reverse('browse-content',
+ url_args={'query_string': content['sha1']})
- url = reverse('browse-content',
+ url_raw = reverse('browse-content-raw',
url_args={'query_string': content['sha1']})
- url_raw = reverse('browse-content-raw',
- url_args={'query_string': content['sha1']})
-
- resp = self.client.get(url)
+ resp = client.get(url)
- content_display = self._process_content_for_display(content)
- mimetype = content_display['mimetype']
- content_data = content_display['content_data']
+ content_display = _process_content_for_display(archive_data, content)
+ mimetype = content_display['mimetype']
+ content_data = content_display['content_data']
- self.assertEqual(resp.status_code, 200)
- self.assertTemplateUsed('browse/content.html')
- self.assertContains(resp, '<img src="data:%s;base64,%s"/>'
- % (mimetype, content_data))
- self.assertContains(resp, url_raw)
+ assert resp.status_code == 200
+ assert_template_used('browse/content.html')
+ assert_contains(resp, '<img src="data:%s;base64,%s"/>'
+ % (mimetype, content_data))
+ assert_contains(resp, url_raw)
- @given(content_text())
- def test_content_view_text_with_path(self, content):
- path = content['path']
+@given(content_text())
+def test_content_view_text_with_path(client, archive_data, content):
+ path = content['path']
- url = reverse('browse-content',
- url_args={'query_string': content['sha1']},
- query_params={'path': path})
+ url = reverse('browse-content',
+ url_args={'query_string': content['sha1']},
+ query_params={'path': path})
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 200)
- self.assertTemplateUsed('browse/content.html')
+ resp = client.get(url)
+ assert resp.status_code == 200
+ assert_template_used('browse/content.html')
- self.assertContains(resp, '<nav class="bread-crumbs')
+ assert_contains(resp, '<nav class="bread-crumbs')
- content_display = self._process_content_for_display(content)
- mimetype = content_display['mimetype']
+ content_display = _process_content_for_display(archive_data, content)
+ mimetype = content_display['mimetype']
- if mimetype.startswith('text/'):
- hljs_language = content['hljs_language']
- self.assertContains(resp, '<code class="%s">' % hljs_language)
- self.assertContains(resp, escape(content_display['content_data']))
+ if mimetype.startswith('text/'):
+ hljs_language = content['hljs_language']
+ assert_contains(resp, '<code class="%s">' % hljs_language)
+ assert_contains(resp, escape(content_display['content_data']))
- split_path = path.split('/')
+ split_path = path.split('/')
- root_dir_sha1 = split_path[0]
- filename = split_path[-1]
- path = path.replace(root_dir_sha1 + '/', '').replace(filename, '')
+ root_dir_sha1 = split_path[0]
+ filename = split_path[-1]
+ path = path.replace(root_dir_sha1 + '/', '').replace(filename, '')
- path_info = gen_path_info(path)
+ path_info = gen_path_info(path)
- root_dir_url = reverse('browse-directory',
- url_args={'sha1_git': root_dir_sha1})
+ root_dir_url = reverse('browse-directory',
+ url_args={'sha1_git': root_dir_sha1})
- self.assertContains(resp, '<li class="swh-path">',
- count=len(path_info)+1)
+ assert_contains(resp, '<li class="swh-path">',
+ count=len(path_info)+1)
- self.assertContains(resp, '<a href="' + root_dir_url + '">' +
- root_dir_sha1[:7] + '</a>')
+ assert_contains(resp, '<a href="' + root_dir_url + '">' +
+ root_dir_sha1[:7] + '</a>')
- for p in path_info:
- dir_url = reverse('browse-directory',
- url_args={'sha1_git': root_dir_sha1,
- 'path': p['path']})
- self.assertContains(resp, '<a href="' + dir_url + '">' +
- p['name'] + '</a>')
+ for p in path_info:
+ dir_url = reverse('browse-directory',
+ url_args={'sha1_git': root_dir_sha1,
+ 'path': p['path']})
+ assert_contains(resp, '<a href="' + dir_url + '">' +
+ p['name'] + '</a>')
- self.assertContains(resp, '<li>' + filename + '</li>')
+ assert_contains(resp, '<li>' + filename + '</li>')
- url_raw = reverse('browse-content-raw',
- url_args={'query_string': content['sha1']},
- query_params={'filename': filename})
- self.assertContains(resp, url_raw)
-
- url = reverse('browse-content',
+ url_raw = reverse('browse-content-raw',
url_args={'query_string': content['sha1']},
- query_params={'path': filename})
+ query_params={'filename': filename})
+ assert_contains(resp, url_raw)
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 200)
- self.assertTemplateUsed('browse/content.html')
+ url = reverse('browse-content',
+ url_args={'query_string': content['sha1']},
+ query_params={'path': filename})
- self.assertNotContains(resp, '<nav class="bread-crumbs')
+ resp = client.get(url)
+ assert resp.status_code == 200
+ assert_template_used('browse/content.html')
- invalid_path = '%s/foo/bar/baz' % root_dir_sha1
- url = reverse('browse-content',
- url_args={'query_string': content['sha1']},
- query_params={'path': invalid_path})
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 404)
+ assert_not_contains(resp, '<nav class="bread-crumbs')
- @given(content_text())
- def test_content_raw_text(self, content):
+ invalid_path = '%s/foo/bar/baz' % root_dir_sha1
+ url = reverse('browse-content',
+ url_args={'query_string': content['sha1']},
+ query_params={'path': invalid_path})
+ resp = client.get(url)
+ assert resp.status_code == 404
- url = reverse('browse-content-raw',
- url_args={'query_string': content['sha1']})
- resp = self.client.get(url)
+@given(content_text())
+def test_content_raw_text(client, archive_data, content):
+ url = reverse('browse-content-raw',
+ url_args={'query_string': content['sha1']})
- content_data = self.content_get(content['sha1'])['data']
+ resp = client.get(url)
- self.assertEqual(resp.status_code, 200)
- self.assertEqual(resp['Content-Type'], 'text/plain')
- self.assertEqual(resp['Content-disposition'],
- 'filename=%s_%s' % ('sha1', content['sha1']))
- self.assertEqual(resp.content, content_data)
+ content_data = archive_data.content_get(content['sha1'])['data']
- filename = content['path'].split('/')[-1]
+ assert resp.status_code == 200
+ assert resp['Content-Type'] == 'text/plain'
+ assert resp['Content-disposition'] == ('filename=%s_%s' %
+ ('sha1', content['sha1']))
+ assert resp.content == content_data
- url = reverse('browse-content-raw',
- url_args={'query_string': content['sha1']}, # noqa
- query_params={'filename': filename})
+ filename = content['path'].split('/')[-1]
- resp = self.client.get(url)
+ url = reverse('browse-content-raw',
+ url_args={'query_string': content['sha1']},
+ query_params={'filename': filename})
- self.assertEqual(resp.status_code, 200)
- self.assertEqual(resp['Content-Type'], 'text/plain')
- self.assertEqual(resp['Content-disposition'],
- 'filename=%s' % filename)
- self.assertEqual(resp.content, content_data)
+ resp = client.get(url)
- @given(content_text_non_utf8())
- def test_content_raw_no_utf8_text(self, content):
+ assert resp.status_code == 200
+ assert resp['Content-Type'] == 'text/plain'
+ assert resp['Content-disposition'] == 'filename=%s' % filename
+ assert resp.content == content_data
+
+
+@given(content_text_non_utf8())
+def test_content_raw_no_utf8_text(client, content):
+ url = reverse('browse-content-raw',
+ url_args={'query_string': content['sha1']})
- url = reverse('browse-content-raw',
- url_args={'query_string': content['sha1']})
+ resp = client.get(url)
+ assert resp.status_code == 200
+ _, encoding = get_mimetype_and_encoding_for_content(resp.content)
+ assert encoding == content['encoding']
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 200)
- _, encoding = get_mimetype_and_encoding_for_content(resp.content)
- self.assertEqual(encoding, content['encoding'])
- @given(content_image_type())
- def test_content_raw_bin(self, content):
+@given(content_image_type())
+def test_content_raw_bin(client, archive_data, content):
+ url = reverse('browse-content-raw',
+ url_args={'query_string': content['sha1']})
- url = reverse('browse-content-raw',
- url_args={'query_string': content['sha1']})
+ resp = client.get(url)
- resp = self.client.get(url)
+ filename = content['path'].split('/')[-1]
+ content_data = archive_data.content_get(content['sha1'])['data']
- filename = content['path'].split('/')[-1]
- content_data = self.content_get(content['sha1'])['data']
+ assert resp.status_code == 200
+ assert resp['Content-Type'] == 'application/octet-stream'
+ assert resp['Content-disposition'] == \
+ 'attachment; filename=%s_%s' % \
+ ('sha1', content['sha1'])
+ assert resp.content == content_data
- self.assertEqual(resp.status_code, 200)
- self.assertEqual(resp['Content-Type'], 'application/octet-stream')
- self.assertEqual(resp['Content-disposition'],
- 'attachment; filename=%s_%s' %
- ('sha1', content['sha1']))
- self.assertEqual(resp.content, content_data)
+ url = reverse('browse-content-raw',
+ url_args={'query_string': content['sha1']},
+ query_params={'filename': filename})
- url = reverse('browse-content-raw',
- url_args={'query_string': content['sha1']},
- query_params={'filename': filename})
+ resp = client.get(url)
- resp = self.client.get(url)
+ assert resp.status_code == 200
+ assert resp['Content-Type'] == 'application/octet-stream'
+ assert resp['Content-disposition'] == \
+ 'attachment; filename=%s' % filename
+ assert resp.content == content_data
- self.assertEqual(resp.status_code, 200)
- self.assertEqual(resp['Content-Type'], 'application/octet-stream')
- self.assertEqual(resp['Content-disposition'],
- 'attachment; filename=%s' % filename)
- self.assertEqual(resp.content, content_data)
- @given(invalid_sha1(), unknown_content())
- def test_content_request_errors(self, invalid_sha1, unknown_content):
+@given(invalid_sha1(), unknown_content())
+def test_content_request_errors(client, invalid_sha1, unknown_content):
+ url = reverse('browse-content',
+ url_args={'query_string': invalid_sha1})
+ resp = client.get(url)
+ assert resp.status_code == 400
+ assert_template_used('error.html')
- url = reverse('browse-content',
- url_args={'query_string': invalid_sha1})
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 400)
- self.assertTemplateUsed('error.html')
+ url = reverse('browse-content',
+ url_args={'query_string': unknown_content['sha1']})
+ resp = client.get(url)
+ assert resp.status_code == 404
+ assert_template_used('error.html')
- url = reverse('browse-content',
- url_args={'query_string': unknown_content['sha1']})
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 404)
- self.assertTemplateUsed('error.html')
- @patch('swh.web.browse.utils.service')
- @given(content())
- def test_content_bytes_missing(self, mock_service, content):
+@given(content())
+def test_content_bytes_missing(client, archive_data, mocker, content):
+ mock_service = mocker.patch('swh.web.browse.utils.service')
+ content_data = archive_data.content_get_metadata(content['sha1'])
+ content_data['data'] = None
- content_data = self.content_get_metadata(content['sha1'])
- content_data['data'] = None
+ mock_service.lookup_content.return_value = content_data
+ mock_service.lookup_content_filetype.side_effect = Exception()
+ mock_service.lookup_content_raw.side_effect = NotFoundExc(
+ 'Content bytes not available!')
- mock_service.lookup_content.return_value = content_data
- mock_service.lookup_content_filetype.side_effect = Exception()
- mock_service.lookup_content_raw.side_effect = NotFoundExc(
- 'Content bytes not available!')
+ url = reverse('browse-content',
+ url_args={'query_string': content['sha1']})
- url = reverse('browse-content',
- url_args={'query_string': content['sha1']})
+ resp = client.get(url)
+
+ assert resp.status_code == 404
+ assert_template_used('browse/content.html')
- resp = self.client.get(url)
-
- self.assertEqual(resp.status_code, 404)
- self.assertTemplateUsed('browse/content.html')
-
- @patch('swh.web.browse.views.content.request_content')
- def test_content_too_large(self, mock_request_content):
- stub_content_too_large_data = {
- 'checksums': {
- 'sha1': '8624bcdae55baeef00cd11d5dfcfa60f68710a02',
- 'sha1_git': '94a9ed024d3859793618152ea559a168bbcbb5e2',
- 'sha256': ('8ceb4b9ee5adedde47b31e975c1d90c73ad27b6b16'
- '5a1dcd80c7c545eb65b903'),
- 'blake2s256': ('38702b7168c7785bfe748b51b45d9856070ba90'
- 'f9dc6d90f2ea75d4356411ffe')
- },
- 'length': 30000000,
- 'raw_data': None,
- 'mimetype': 'text/plain',
- 'encoding': 'us-ascii',
- 'language': 'not detected',
- 'licenses': 'GPL',
- 'error_code': 200,
- 'error_message': '',
- 'error_description': ''
- }
-
- content_sha1 = stub_content_too_large_data['checksums']['sha1']
-
- mock_request_content.return_value = stub_content_too_large_data
-
- url = reverse('browse-content',
+
+def test_content_too_large(client, mocker):
+ mock_request_content = mocker.patch(
+ 'swh.web.browse.views.content.request_content')
+ stub_content_too_large_data = {
+ 'checksums': {
+ 'sha1': '8624bcdae55baeef00cd11d5dfcfa60f68710a02',
+ 'sha1_git': '94a9ed024d3859793618152ea559a168bbcbb5e2',
+ 'sha256': ('8ceb4b9ee5adedde47b31e975c1d90c73ad27b6b16'
+ '5a1dcd80c7c545eb65b903'),
+ 'blake2s256': ('38702b7168c7785bfe748b51b45d9856070ba90'
+ 'f9dc6d90f2ea75d4356411ffe')
+ },
+ 'length': 30000000,
+ 'raw_data': None,
+ 'mimetype': 'text/plain',
+ 'encoding': 'us-ascii',
+ 'language': 'not detected',
+ 'licenses': 'GPL',
+ 'error_code': 200,
+ 'error_message': '',
+ 'error_description': ''
+ }
+
+ content_sha1 = stub_content_too_large_data['checksums']['sha1']
+
+ mock_request_content.return_value = stub_content_too_large_data
+
+ url = reverse('browse-content',
+ url_args={'query_string': content_sha1})
+
+ url_raw = reverse('browse-content-raw',
url_args={'query_string': content_sha1})
- url_raw = reverse('browse-content-raw',
- url_args={'query_string': content_sha1})
+ resp = client.get(url)
+
+ assert resp.status_code == 200
+ assert_template_used('browse/content.html')
- resp = self.client.get(url)
+ assert_contains(resp, 'Content is too large to be displayed')
+ assert_contains(resp, url_raw)
- self.assertEqual(resp.status_code, 200)
- self.assertTemplateUsed('browse/content.html')
- self.assertContains(resp, 'Content is too large to be displayed')
- self.assertContains(resp, url_raw)
+@given(content())
+def test_content_uppercase(client, content):
+ url = reverse('browse-content-uppercase-checksum',
+ url_args={'query_string': content['sha1'].upper()})
+ resp = client.get(url)
+ assert resp.status_code == 302
- def _process_content_for_display(self, content):
- content_data = self.content_get(content['sha1'])
+ redirect_url = reverse('browse-content',
+ url_args={'query_string': content['sha1']})
- mime_type, encoding = get_mimetype_and_encoding_for_content(
- content_data['data'])
+ assert resp['location'] == redirect_url
- mime_type, content_data = _re_encode_content(mime_type, encoding,
- content_data['data'])
- return prepare_content_for_display(content_data, mime_type,
- content['path'])
+def _process_content_for_display(archive_data, content):
+ content_data = archive_data.content_get(content['sha1'])
- @given(content())
- def test_content_uppercase(self, content):
- url = reverse('browse-content-uppercase-checksum',
- url_args={'query_string': content['sha1'].upper()})
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 302)
+ mime_type, encoding = get_mimetype_and_encoding_for_content(
+ content_data['data'])
- redirect_url = reverse('browse-content',
- url_args={'query_string': content['sha1']})
+ mime_type, content_data = _re_encode_content(mime_type, encoding,
+ content_data['data'])
- self.assertEqual(resp['location'], redirect_url)
+ return prepare_content_for_display(content_data, mime_type,
+ content['path'])
diff --git a/swh/web/tests/browse/views/test_directory.py b/swh/web/tests/browse/views/test_directory.py
--- a/swh/web/tests/browse/views/test_directory.py
+++ b/swh/web/tests/browse/views/test_directory.py
@@ -9,123 +9,125 @@
from swh.web.common.utils import reverse, get_swh_persistent_id
from swh.web.common.utils import gen_path_info
+from swh.web.tests.django_asserts import assert_contains, assert_template_used
from swh.web.tests.strategies import (
directory, directory_with_subdirs, invalid_sha1,
unknown_directory
)
-from swh.web.tests.testcase import WebTestCase
-class SwhBrowseDirectoryTest(WebTestCase):
+@given(directory())
+def test_root_directory_view(client, archive_data, directory):
+ _directory_view(client, directory, archive_data.directory_ls(directory))
- def directory_view(self, root_directory_sha1, directory_entries,
- path=None):
- dirs = [e for e in directory_entries if e['type'] in ('dir', 'rev')]
- files = [e for e in directory_entries if e['type'] == 'file']
- url_args = {'sha1_git': root_directory_sha1}
- if path:
- url_args['path'] = path
-
- url = reverse('browse-directory',
- url_args=url_args)
-
- root_dir_url = reverse('browse-directory',
- url_args={'sha1_git': root_directory_sha1})
-
- resp = self.client.get(url)
-
- self.assertEqual(resp.status_code, 200)
- self.assertTemplateUsed('browse/directory.html')
- self.assertContains(resp, '<a href="' + root_dir_url + '">' +
- root_directory_sha1[:7] + '</a>')
- self.assertContains(resp, '<td class="swh-directory">',
- count=len(dirs))
- self.assertContains(resp, '<td class="swh-content">',
- count=len(files))
-
- for d in dirs:
- if d['type'] == 'rev':
- dir_url = reverse('browse-revision',
- url_args={'sha1_git': d['target']})
- else:
- dir_path = d['name']
- if path:
- dir_path = "%s/%s" % (path, d['name'])
- dir_url = reverse('browse-directory',
- url_args={'sha1_git': root_directory_sha1,
- 'path': dir_path})
- self.assertContains(resp, dir_url)
-
- for f in files:
- file_path = "%s/%s" % (root_directory_sha1, f['name'])
- if path:
- file_path = "%s/%s/%s" % (root_directory_sha1, path, f['name'])
- query_string = 'sha1_git:' + f['target']
- file_url = reverse('browse-content',
- url_args={'query_string': query_string},
- query_params={'path': file_path})
- self.assertContains(resp, file_url)
+@given(directory_with_subdirs())
+def test_sub_directory_view(client, archive_data, directory):
+ dir_content = archive_data.directory_ls(directory)
+ subdir = random.choice([e for e in dir_content if e['type'] == 'dir'])
+ subdir_content = archive_data.directory_ls(subdir['target'])
+ _directory_view(client, directory, subdir_content, subdir['name'])
- path_info = gen_path_info(path)
- self.assertContains(resp, '<li class="swh-path">',
- count=len(path_info)+1)
- self.assertContains(resp, '<a href="%s">%s</a>' %
- (root_dir_url, root_directory_sha1[:7]))
+@given(invalid_sha1(), unknown_directory())
+def test_directory_request_errors(client, invalid_sha1, unknown_directory):
+ dir_url = reverse('browse-directory',
+ url_args={'sha1_git': invalid_sha1})
- for p in path_info:
- dir_url = reverse('browse-directory',
- url_args={'sha1_git': root_directory_sha1,
- 'path': p['path']})
- self.assertContains(resp, '<a href="%s">%s</a>' %
- (dir_url, p['name']))
+ resp = client.get(dir_url)
+ assert resp.status_code == 400
+ assert_template_used('browse/error.html')
- self.assertContains(resp, 'vault-cook-directory')
+ dir_url = reverse('browse-directory',
+ url_args={'sha1_git': unknown_directory})
- swh_dir_id = get_swh_persistent_id('directory', directory_entries[0]['dir_id']) # noqa
- swh_dir_id_url = reverse('browse-swh-id',
- url_args={'swh_id': swh_dir_id})
- self.assertContains(resp, swh_dir_id)
- self.assertContains(resp, swh_dir_id_url)
+ resp = client.get(dir_url)
+ assert resp.status_code == 404
+ assert_template_used('browse/error.html')
- @given(directory())
- def test_root_directory_view(self, directory):
- self.directory_view(directory, self.directory_ls(directory))
- @given(directory_with_subdirs())
- def test_sub_directory_view(self, directory):
- dir_content = self.directory_ls(directory)
- subdir = random.choice([e for e in dir_content if e['type'] == 'dir'])
- subdir_content = self.directory_ls(subdir['target'])
- self.directory_view(directory, subdir_content, subdir['name'])
+@given(directory())
+def test_directory_uppercase(client, directory):
+ url = reverse('browse-directory-uppercase-checksum',
+ url_args={'sha1_git': directory.upper()})
- @given(invalid_sha1(), unknown_directory())
- def test_directory_request_errors(self, invalid_sha1, unknown_directory):
+ resp = client.get(url)
+ assert resp.status_code == 302
- dir_url = reverse('browse-directory',
- url_args={'sha1_git': invalid_sha1})
+ redirect_url = reverse('browse-directory',
+ url_args={'sha1_git': directory})
- resp = self.client.get(dir_url)
- self.assertEqual(resp.status_code, 400)
- self.assertTemplateUsed('browse/error.html')
+ assert resp['location'] == redirect_url
- dir_url = reverse('browse-directory',
- url_args={'sha1_git': unknown_directory})
- resp = self.client.get(dir_url)
- self.assertEqual(resp.status_code, 404)
- self.assertTemplateUsed('browse/error.html')
+def _directory_view(client, root_directory_sha1, directory_entries,
+ path=None):
+ dirs = [e for e in directory_entries if e['type'] in ('dir', 'rev')]
+ files = [e for e in directory_entries if e['type'] == 'file']
+
+ url_args = {'sha1_git': root_directory_sha1}
+ if path:
+ url_args['path'] = path
- @given(directory())
- def test_directory_uppercase(self, directory):
- url = reverse('browse-directory-uppercase-checksum',
- url_args={'sha1_git': directory.upper()})
+ url = reverse('browse-directory',
+ url_args=url_args)
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 302)
+ root_dir_url = reverse('browse-directory',
+ url_args={'sha1_git': root_directory_sha1})
- redirect_url = reverse('browse-directory',
- url_args={'sha1_git': directory})
+ resp = client.get(url)
- self.assertEqual(resp['location'], redirect_url)
+ assert resp.status_code == 200
+ assert_template_used('browse/directory.html')
+ assert_contains(resp, '<a href="' + root_dir_url + '">' +
+ root_directory_sha1[:7] + '</a>')
+ assert_contains(resp, '<td class="swh-directory">',
+ count=len(dirs))
+ assert_contains(resp, '<td class="swh-content">',
+ count=len(files))
+
+ for d in dirs:
+ if d['type'] == 'rev':
+ dir_url = reverse('browse-revision',
+ url_args={'sha1_git': d['target']})
+ else:
+ dir_path = d['name']
+ if path:
+ dir_path = "%s/%s" % (path, d['name'])
+ dir_url = reverse('browse-directory',
+ url_args={'sha1_git': root_directory_sha1,
+ 'path': dir_path})
+ assert_contains(resp, dir_url)
+
+ for f in files:
+ file_path = "%s/%s" % (root_directory_sha1, f['name'])
+ if path:
+ file_path = "%s/%s/%s" % (root_directory_sha1, path, f['name'])
+ query_string = 'sha1_git:' + f['target']
+ file_url = reverse('browse-content',
+ url_args={'query_string': query_string},
+ query_params={'path': file_path})
+ assert_contains(resp, file_url)
+
+ path_info = gen_path_info(path)
+
+ assert_contains(resp, '<li class="swh-path">',
+ count=len(path_info)+1)
+ assert_contains(resp, '<a href="%s">%s</a>' %
+ (root_dir_url, root_directory_sha1[:7]))
+
+ for p in path_info:
+ dir_url = reverse('browse-directory',
+ url_args={'sha1_git': root_directory_sha1,
+ 'path': p['path']})
+ assert_contains(resp, '<a href="%s">%s</a>' %
+ (dir_url, p['name']))
+
+ assert_contains(resp, 'vault-cook-directory')
+
+ swh_dir_id = get_swh_persistent_id(
+ 'directory', directory_entries[0]['dir_id'])
+ swh_dir_id_url = reverse('browse-swh-id',
+ url_args={'swh_id': swh_dir_id})
+ assert_contains(resp, swh_dir_id)
+ assert_contains(resp, swh_dir_id_url)
diff --git a/swh/web/tests/browse/views/test_identifiers.py b/swh/web/tests/browse/views/test_identifiers.py
--- a/swh/web/tests/browse/views/test_identifiers.py
+++ b/swh/web/tests/browse/views/test_identifiers.py
@@ -9,151 +9,154 @@
from swh.web.tests.strategies import (
content, directory, revision, release, snapshot
)
-from swh.web.tests.testcase import WebTestCase
swh_id_prefix = 'swh:1:'
-class SwhBrowseIdTest(WebTestCase):
+@given(content())
+def test_content_id_browse(client, content):
+ cnt_sha1_git = content['sha1_git']
+ swh_id = swh_id_prefix + 'cnt:' + cnt_sha1_git
+ url = reverse('browse-swh-id',
+ url_args={'swh_id': swh_id})
- @given(content())
- def test_content_id_browse(self, content):
- cnt_sha1_git = content['sha1_git']
- swh_id = swh_id_prefix + 'cnt:' + cnt_sha1_git
- url = reverse('browse-swh-id',
- url_args={'swh_id': swh_id})
+ query_string = 'sha1_git:' + cnt_sha1_git
+ content_browse_url = reverse('browse-content',
+ url_args={'query_string': query_string})
- query_string = 'sha1_git:' + cnt_sha1_git
- content_browse_url = reverse('browse-content',
- url_args={'query_string': query_string})
+ resp = client.get(url)
- resp = self.client.get(url)
+ assert resp.status_code == 302
+ assert resp['location'] == content_browse_url
- self.assertEqual(resp.status_code, 302)
- self.assertEqual(resp['location'], content_browse_url)
- @given(directory())
- def test_directory_id_browse(self, directory):
- swh_id = swh_id_prefix + 'dir:' + directory
- url = reverse('browse-swh-id',
- url_args={'swh_id': swh_id})
+@given(directory())
+def test_directory_id_browse(client, directory):
+ swh_id = swh_id_prefix + 'dir:' + directory
+ url = reverse('browse-swh-id',
+ url_args={'swh_id': swh_id})
- directory_browse_url = reverse('browse-directory',
- url_args={'sha1_git': directory})
+ directory_browse_url = reverse('browse-directory',
+ url_args={'sha1_git': directory})
- resp = self.client.get(url)
+ resp = client.get(url)
- self.assertEqual(resp.status_code, 302)
- self.assertEqual(resp['location'], directory_browse_url)
+ assert resp.status_code == 302
+ assert resp['location'] == directory_browse_url
- @given(revision())
- def test_revision_id_browse(self, revision):
- swh_id = swh_id_prefix + 'rev:' + revision
- url = reverse('browse-swh-id',
- url_args={'swh_id': swh_id})
- revision_browse_url = reverse('browse-revision',
- url_args={'sha1_git': revision})
+@given(revision())
+def test_revision_id_browse(client, revision):
+ swh_id = swh_id_prefix + 'rev:' + revision
+ url = reverse('browse-swh-id',
+ url_args={'swh_id': swh_id})
- resp = self.client.get(url)
+ revision_browse_url = reverse('browse-revision',
+ url_args={'sha1_git': revision})
- self.assertEqual(resp.status_code, 302)
- self.assertEqual(resp['location'], revision_browse_url)
+ resp = client.get(url)
- query_params = {'origin': 'https://github.com/user/repo'}
+ assert resp.status_code == 302
+ assert resp['location'] == revision_browse_url
- url = reverse('browse-swh-id',
- url_args={'swh_id': swh_id},
- query_params=query_params)
+ query_params = {'origin': 'https://github.com/user/repo'}
- revision_browse_url = reverse('browse-revision',
- url_args={'sha1_git': revision},
- query_params=query_params)
+ url = reverse('browse-swh-id',
+ url_args={'swh_id': swh_id},
+ query_params=query_params)
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 302)
- self.assertEqual(resp['location'], revision_browse_url)
+ revision_browse_url = reverse('browse-revision',
+ url_args={'sha1_git': revision},
+ query_params=query_params)
- @given(release())
- def test_release_id_browse(self, release):
- swh_id = swh_id_prefix + 'rel:' + release
- url = reverse('browse-swh-id',
- url_args={'swh_id': swh_id})
+ resp = client.get(url)
+ assert resp.status_code == 302
+ assert resp['location'] == revision_browse_url
- release_browse_url = reverse('browse-release',
- url_args={'sha1_git': release})
- resp = self.client.get(url)
+@given(release())
+def test_release_id_browse(client, release):
+ swh_id = swh_id_prefix + 'rel:' + release
+ url = reverse('browse-swh-id',
+ url_args={'swh_id': swh_id})
- self.assertEqual(resp.status_code, 302)
- self.assertEqual(resp['location'], release_browse_url)
+ release_browse_url = reverse('browse-release',
+ url_args={'sha1_git': release})
- query_params = {'origin': 'https://github.com/user/repo'}
+ resp = client.get(url)
- url = reverse('browse-swh-id',
- url_args={'swh_id': swh_id},
- query_params=query_params)
+ assert resp.status_code == 302
+ assert resp['location'] == release_browse_url
- release_browse_url = reverse('browse-release',
- url_args={'sha1_git': release},
- query_params=query_params)
+ query_params = {'origin': 'https://github.com/user/repo'}
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 302)
- self.assertEqual(resp['location'], release_browse_url)
+ url = reverse('browse-swh-id',
+ url_args={'swh_id': swh_id},
+ query_params=query_params)
- @given(snapshot())
- def test_snapshot_id_browse(self, snapshot):
- swh_id = swh_id_prefix + 'snp:' + snapshot
- url = reverse('browse-swh-id',
- url_args={'swh_id': swh_id})
+ release_browse_url = reverse('browse-release',
+ url_args={'sha1_git': release},
+ query_params=query_params)
- snapshot_browse_url = reverse('browse-snapshot',
- url_args={'snapshot_id': snapshot})
+ resp = client.get(url)
+ assert resp.status_code == 302
+ assert resp['location'] == release_browse_url
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 302)
- self.assertEqual(resp['location'], snapshot_browse_url)
+@given(snapshot())
+def test_snapshot_id_browse(client, snapshot):
+ swh_id = swh_id_prefix + 'snp:' + snapshot
+ url = reverse('browse-swh-id',
+ url_args={'swh_id': swh_id})
- query_params = {'origin': 'https://github.com/user/repo'}
+ snapshot_browse_url = reverse('browse-snapshot',
+ url_args={'snapshot_id': snapshot})
- url = reverse('browse-swh-id',
- url_args={'swh_id': swh_id},
- query_params=query_params)
+ resp = client.get(url)
- release_browse_url = reverse('browse-snapshot',
- url_args={'snapshot_id': snapshot},
- query_params=query_params)
+ assert resp.status_code == 302
+ assert resp['location'] == snapshot_browse_url
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 302)
- self.assertEqual(resp['location'], release_browse_url)
+ query_params = {'origin': 'https://github.com/user/repo'}
- @given(release())
- def test_bad_id_browse(self, release):
- swh_id = swh_id_prefix + 'foo:' + release
- url = reverse('browse-swh-id',
- url_args={'swh_id': swh_id})
+ url = reverse('browse-swh-id',
+ url_args={'swh_id': swh_id},
+ query_params=query_params)
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 400)
+ release_browse_url = reverse('browse-snapshot',
+ url_args={'snapshot_id': snapshot},
+ query_params=query_params)
- @given(content())
- def test_content_id_optional_parts_browse(self, content):
- cnt_sha1_git = content['sha1_git']
- optional_parts = ';lines=4-20;origin=https://github.com/user/repo'
- swh_id = swh_id_prefix + 'cnt:' + cnt_sha1_git + optional_parts
- url = reverse('browse-swh-id',
- url_args={'swh_id': swh_id})
+ resp = client.get(url)
+ assert resp.status_code == 302
+ assert resp['location'] == release_browse_url
- query_string = 'sha1_git:' + cnt_sha1_git
- content_browse_url = reverse(
- 'browse-content', url_args={'query_string': query_string},
- query_params={'origin': 'https://github.com/user/repo'})
- content_browse_url += '#L4-L20'
- resp = self.client.get(url)
+@given(release())
+def test_bad_id_browse(client, release):
+ swh_id = swh_id_prefix + 'foo:' + release
+ url = reverse('browse-swh-id',
+ url_args={'swh_id': swh_id})
- self.assertEqual(resp.status_code, 302)
- self.assertEqual(resp['location'], content_browse_url)
+ resp = client.get(url)
+ assert resp.status_code == 400
+
+
+@given(content())
+def test_content_id_optional_parts_browse(client, content):
+ cnt_sha1_git = content['sha1_git']
+ optional_parts = ';lines=4-20;origin=https://github.com/user/repo'
+ swh_id = swh_id_prefix + 'cnt:' + cnt_sha1_git + optional_parts
+ url = reverse('browse-swh-id',
+ url_args={'swh_id': swh_id})
+
+ query_string = 'sha1_git:' + cnt_sha1_git
+ content_browse_url = reverse(
+ 'browse-content', url_args={'query_string': query_string},
+ query_params={'origin': 'https://github.com/user/repo'})
+ content_browse_url += '#L4-L20'
+
+ resp = client.get(url)
+
+ assert resp.status_code == 302
+ assert resp['location'] == content_browse_url
diff --git a/swh/web/tests/browse/views/test_origin.py b/swh/web/tests/browse/views/test_origin.py
--- a/swh/web/tests/browse/views/test_origin.py
+++ b/swh/web/tests/browse/views/test_origin.py
@@ -4,8 +4,7 @@
# See top-level LICENSE file for more information
import random
-
-from unittest.mock import patch
+import re
from django.utils.html import escape
@@ -19,896 +18,838 @@
parse_timestamp, get_swh_persistent_id
)
from swh.web.tests.data import get_content
+from swh.web.tests.django_asserts import assert_contains, assert_template_used
from swh.web.tests.strategies import (
origin, origin_with_multiple_visits, new_origin,
new_snapshot, visit_dates, revisions
)
-from swh.web.tests.testcase import WebTestCase
-class SwhBrowseOriginTest(WebTestCase):
+@given(origin_with_multiple_visits())
+def test_origin_visits_browse(client, archive_data, origin):
+ url = reverse('browse-origin-visits',
+ url_args={'origin_url': origin['url']})
+ resp = client.get(url)
- @given(origin_with_multiple_visits())
- def test_origin_visits_browse(self, origin):
+ assert resp.status_code == 200
+ assert_template_used('origin-visits.html')
- url = reverse('browse-origin-visits',
- url_args={'origin_url': origin['url']})
- resp = self.client.get(url)
+ url = reverse('browse-origin-visits',
+ url_args={'origin_url': origin['url']})
+ resp = client.get(url)
- self.assertEqual(resp.status_code, 200)
- self.assertTemplateUsed('origin-visits.html')
+ assert resp.status_code == 200
+ assert_template_used('origin-visits.html')
- url = reverse('browse-origin-visits',
- url_args={'origin_url': origin['url']})
- resp = self.client.get(url)
+ visits = archive_data.origin_visit_get(origin['url'])
- self.assertEqual(resp.status_code, 200)
- self.assertTemplateUsed('origin-visits.html')
+ for v in visits:
+ vdate = format_utc_iso_date(v['date'], '%Y-%m-%dT%H:%M:%SZ')
+ browse_dir_url = reverse('browse-origin-directory',
+ url_args={'origin_url': origin['url'],
+ 'timestamp': vdate})
+ assert_contains(resp, browse_dir_url)
- visits = self.origin_visit_get(origin['url'])
- for v in visits:
- vdate = format_utc_iso_date(v['date'], '%Y-%m-%dT%H:%M:%SZ')
- browse_dir_url = reverse('browse-origin-directory',
- url_args={'origin_url': origin['url'],
- 'timestamp': vdate})
- self.assertContains(resp, browse_dir_url)
+@given(origin_with_multiple_visits())
+def test_origin_content_view(client, archive_data, origin):
+ origin_visits = archive_data.origin_visit_get(origin['url'])
- def origin_content_view_helper(self, origin_info, origin_visits,
- origin_branches, origin_releases,
- root_dir_sha1, content,
- visit_id=None, timestamp=None):
+ def _get_archive_data(visit_idx):
+ snapshot = archive_data.snapshot_get(
+ origin_visits[visit_idx]['snapshot'])
+ head_rev_id = archive_data.snapshot_get_head(snapshot)
+ head_rev = archive_data.revision_get(head_rev_id)
+ dir_content = archive_data.directory_ls(head_rev['directory'])
+ dir_files = [e for e in dir_content if e['type'] == 'file']
+ dir_file = random.choice(dir_files)
+ branches, releases = process_snapshot_branches(snapshot)
+ return {
+ 'branches': branches,
+ 'releases': releases,
+ 'root_dir_sha1': head_rev['directory'],
+ 'content': get_content(dir_file['checksums']['sha1']),
+ 'visit': origin_visits[visit_idx]
+ }
- content_path = '/'.join(content['path'].split('/')[1:])
+ tdata = _get_archive_data(-1)
+
+ _origin_content_view_test_helper(client, origin, origin_visits,
+ tdata['branches'],
+ tdata['releases'],
+ tdata['root_dir_sha1'],
+ tdata['content'])
+
+ _origin_content_view_test_helper(client, origin, origin_visits,
+ tdata['branches'],
+ tdata['releases'],
+ tdata['root_dir_sha1'],
+ tdata['content'],
+ timestamp=tdata['visit']['date'])
+
+ visit_unix_ts = parse_timestamp(tdata['visit']['date']).timestamp()
+ visit_unix_ts = int(visit_unix_ts)
+
+ _origin_content_view_test_helper(client, origin, origin_visits,
+ tdata['branches'],
+ tdata['releases'],
+ tdata['root_dir_sha1'],
+ tdata['content'],
+ timestamp=visit_unix_ts)
+
+ tdata = _get_archive_data(0)
+
+ _origin_content_view_test_helper(client, origin, origin_visits,
+ tdata['branches'],
+ tdata['releases'],
+ tdata['root_dir_sha1'],
+ tdata['content'],
+ visit_id=tdata['visit']['visit'])
+
+
+@given(origin())
+def test_origin_root_directory_view(client, archive_data, origin):
+ origin_visits = archive_data.origin_visit_get(origin['url'])
+
+ visit = origin_visits[-1]
+ snapshot = archive_data.snapshot_get(visit['snapshot'])
+ head_rev_id = archive_data.snapshot_get_head(snapshot)
+ head_rev = archive_data.revision_get(head_rev_id)
+ root_dir_sha1 = head_rev['directory']
+ dir_content = archive_data.directory_ls(root_dir_sha1)
+ branches, releases = process_snapshot_branches(snapshot)
+ visit_unix_ts = parse_timestamp(visit['date']).timestamp()
+ visit_unix_ts = int(visit_unix_ts)
+
+ _origin_directory_view_test_helper(client, origin, origin_visits, branches,
+ releases, root_dir_sha1, dir_content)
+
+ _origin_directory_view_test_helper(client, origin, origin_visits, branches,
+ releases, root_dir_sha1, dir_content,
+ visit_id=visit['visit'])
+
+ _origin_directory_view_test_helper(client, origin, origin_visits, branches,
+ releases, root_dir_sha1, dir_content,
+ timestamp=visit_unix_ts)
+
+ _origin_directory_view_test_helper(client, origin, origin_visits, branches,
+ releases, root_dir_sha1, dir_content,
+ timestamp=visit['date'])
- url_args = {'origin_url': origin_info['url'],
- 'path': content_path}
+ origin = dict(origin)
+ del origin['type']
- if not visit_id:
- visit_id = origin_visits[-1]['visit']
+ _origin_directory_view_test_helper(client, origin, origin_visits, branches,
+ releases, root_dir_sha1, dir_content)
- query_params = {}
+ _origin_directory_view_test_helper(client, origin, origin_visits, branches,
+ releases, root_dir_sha1, dir_content,
+ visit_id=visit['visit'])
- if timestamp:
- url_args['timestamp'] = timestamp
+ _origin_directory_view_test_helper(client, origin, origin_visits, branches,
+ releases, root_dir_sha1, dir_content,
+ timestamp=visit_unix_ts)
- if visit_id:
- query_params['visit_id'] = visit_id
+ _origin_directory_view_test_helper(client, origin, origin_visits, branches,
+ releases, root_dir_sha1, dir_content,
+ timestamp=visit['date'])
- url = reverse('browse-origin-content',
- url_args=url_args,
- query_params=query_params)
- resp = self.client.get(url)
+@given(origin())
+def test_origin_sub_directory_view(client, archive_data, origin):
+ origin_visits = archive_data.origin_visit_get(origin['url'])
- self.assertEqual(resp.status_code, 200)
- self.assertTemplateUsed('content.html')
+ visit = origin_visits[-1]
+ snapshot = archive_data.snapshot_get(visit['snapshot'])
+ head_rev_id = archive_data.snapshot_get_head(snapshot)
+ head_rev = archive_data.revision_get(head_rev_id)
+ root_dir_sha1 = head_rev['directory']
+ subdirs = [e for e in archive_data.directory_ls(root_dir_sha1)
+ if e['type'] == 'dir']
+ branches, releases = process_snapshot_branches(snapshot)
+ visit_unix_ts = parse_timestamp(visit['date']).timestamp()
+ visit_unix_ts = int(visit_unix_ts)
- self.assertContains(resp, '<code class="%s">' %
- content['hljs_language'])
- self.assertContains(resp, escape(content['data']))
+ if len(subdirs) == 0:
+ return
+
+ subdir = random.choice(subdirs)
+ subdir_content = archive_data.directory_ls(subdir['target'])
+ subdir_path = subdir['name']
+
+ _origin_directory_view_test_helper(client, origin, origin_visits, branches,
+ releases, root_dir_sha1, subdir_content,
+ path=subdir_path)
+
+ _origin_directory_view_test_helper(client, origin, origin_visits, branches,
+ releases, root_dir_sha1, subdir_content,
+ path=subdir_path,
+ visit_id=visit['visit'])
- split_path = content_path.split('/')
+ _origin_directory_view_test_helper(client, origin, origin_visits, branches,
+ releases, root_dir_sha1, subdir_content,
+ path=subdir_path,
+ timestamp=visit_unix_ts)
- filename = split_path[-1]
- path = content_path.replace(filename, '')[:-1]
+ _origin_directory_view_test_helper(client, origin, origin_visits, branches,
+ releases, root_dir_sha1, subdir_content,
+ path=subdir_path,
+ timestamp=visit['date'])
- path_info = gen_path_info(path)
+ origin = dict(origin)
+ del origin['type']
- del url_args['path']
+ _origin_directory_view_test_helper(client, origin, origin_visits, branches,
+ releases, root_dir_sha1, subdir_content,
+ path=subdir_path)
- if timestamp:
- url_args['timestamp'] = \
- format_utc_iso_date(parse_timestamp(timestamp).isoformat(),
- '%Y-%m-%dT%H:%M:%S')
+ _origin_directory_view_test_helper(client, origin, origin_visits, branches,
+ releases, root_dir_sha1, subdir_content,
+ path=subdir_path,
+ visit_id=visit['visit'])
- root_dir_url = reverse('browse-origin-directory',
- url_args=url_args,
- query_params=query_params)
+ _origin_directory_view_test_helper(client, origin, origin_visits, branches,
+ releases, root_dir_sha1, subdir_content,
+ path=subdir_path,
+ timestamp=visit_unix_ts)
- self.assertContains(resp, '<li class="swh-path">',
- count=len(path_info)+1)
+ _origin_directory_view_test_helper(client, origin, origin_visits, branches,
+ releases, root_dir_sha1, subdir_content,
+ path=subdir_path,
+ timestamp=visit['date'])
- self.assertContains(resp, '<a href="%s">%s</a>' %
- (root_dir_url, root_dir_sha1[:7]))
- for p in path_info:
- url_args['path'] = p['path']
- dir_url = reverse('browse-origin-directory',
- url_args=url_args,
- query_params=query_params)
- self.assertContains(resp, '<a href="%s">%s</a>' %
- (dir_url, p['name']))
+@given(origin())
+def test_origin_branches(client, archive_data, origin):
+ origin_visits = archive_data.origin_visit_get(origin['url'])
- self.assertContains(resp, '<li>%s</li>' % filename)
+ visit = origin_visits[-1]
+ snapshot = archive_data.snapshot_get(visit['snapshot'])
+ snapshot_content = process_snapshot_branches(snapshot)
- query_string = 'sha1_git:' + content['sha1_git']
+ _origin_branches_test_helper(client, origin, snapshot_content)
- url_raw = reverse('browse-content-raw',
- url_args={'query_string': query_string},
- query_params={'filename': filename})
- self.assertContains(resp, url_raw)
+ origin = dict(origin)
+ origin['type'] = None
- if 'args' in url_args:
- del url_args['path']
+ _origin_branches_test_helper(client, origin, snapshot_content)
- origin_branches_url = reverse('browse-origin-branches',
- url_args=url_args,
- query_params=query_params)
- self.assertContains(resp, '<a href="%s">Branches (%s)</a>' %
- (origin_branches_url, len(origin_branches)))
+@given(origin())
+def test_origin_releases(client, archive_data, origin):
+ origin_visits = archive_data.origin_visit_get(origin['url'])
+
+ visit = origin_visits[-1]
+ snapshot = archive_data.snapshot_get(visit['snapshot'])
+ snapshot_content = process_snapshot_branches(snapshot)
+
+ _origin_releases_test_helper(client, origin, snapshot_content)
+
+ origin = dict(origin)
+ origin['type'] = None
+
+ _origin_releases_test_helper(client, origin, snapshot_content)
- origin_releases_url = reverse('browse-origin-releases',
- url_args=url_args,
- query_params=query_params)
- self.assertContains(resp, '<a href="%s">Releases (%s)</a>' %
- (origin_releases_url, len(origin_releases)))
-
- self.assertContains(resp, '<li class="swh-branch">',
- count=len(origin_branches))
-
- url_args['path'] = content_path
-
- for branch in origin_branches:
- query_params['branch'] = branch['name']
- root_dir_branch_url = reverse('browse-origin-content',
- url_args=url_args,
- query_params=query_params)
-
- self.assertContains(resp, '<a href="%s">' % root_dir_branch_url)
-
- self.assertContains(resp, '<li class="swh-release">',
- count=len(origin_releases))
-
- query_params['branch'] = None
- for release in origin_releases:
- query_params['release'] = release['name']
- root_dir_release_url = reverse('browse-origin-content',
- url_args=url_args,
- query_params=query_params)
-
- self.assertContains(resp, '<a href="%s">' % root_dir_release_url)
-
- url = reverse('browse-origin-content',
- url_args=url_args,
- query_params=query_params)
-
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 200)
- self.assertTemplateUsed('content.html')
-
- swh_cnt_id = get_swh_persistent_id('content', content['sha1_git'])
- swh_cnt_id_url = reverse('browse-swh-id',
- url_args={'swh_id': swh_cnt_id})
- self.assertContains(resp, swh_cnt_id)
- self.assertContains(resp, swh_cnt_id_url)
-
- self.assertContains(resp, 'swh-take-new-snapshot')
-
- @given(origin_with_multiple_visits())
- def test_origin_content_view(self, origin):
-
- origin_visits = self.origin_visit_get(origin['url'])
-
- def _get_test_data(visit_idx):
- snapshot = self.snapshot_get(origin_visits[visit_idx]['snapshot'])
- head_rev_id = self.snapshot_get_head(snapshot)
- head_rev = self.revision_get(head_rev_id)
- dir_content = self.directory_ls(head_rev['directory'])
- dir_files = [e for e in dir_content if e['type'] == 'file']
- dir_file = random.choice(dir_files)
- branches, releases = process_snapshot_branches(snapshot)
- return {
- 'branches': branches,
- 'releases': releases,
- 'root_dir_sha1': head_rev['directory'],
- 'content': get_content(dir_file['checksums']['sha1']),
- 'visit': origin_visits[visit_idx]
+@given(new_origin(), new_snapshot(min_size=4, max_size=4), visit_dates(),
+ revisions(min_size=3, max_size=3))
+def test_origin_snapshot_null_branch(client, archive_data, new_origin,
+ new_snapshot, visit_dates, revisions):
+ snp_dict = new_snapshot.to_dict()
+ new_origin = archive_data.origin_add([new_origin])[0]
+ for i, branch in enumerate(snp_dict['branches'].keys()):
+ if i == 0:
+ snp_dict['branches'][branch] = None
+ else:
+ snp_dict['branches'][branch] = {
+ 'target_type': 'revision',
+ 'target': hash_to_bytes(revisions[i-1]),
}
- test_data = _get_test_data(-1)
-
- self.origin_content_view_helper(origin,
- origin_visits,
- test_data['branches'],
- test_data['releases'],
- test_data['root_dir_sha1'],
- test_data['content'])
-
- self.origin_content_view_helper(origin,
- origin_visits,
- test_data['branches'],
- test_data['releases'],
- test_data['root_dir_sha1'],
- test_data['content'],
- timestamp=test_data['visit']['date'])
-
- visit_unix_ts = parse_timestamp(test_data['visit']['date']).timestamp()
- visit_unix_ts = int(visit_unix_ts)
-
- self.origin_content_view_helper(origin,
- origin_visits,
- test_data['branches'],
- test_data['releases'],
- test_data['root_dir_sha1'],
- test_data['content'],
- timestamp=visit_unix_ts)
-
- test_data = _get_test_data(0)
-
- self.origin_content_view_helper(origin,
- origin_visits,
- test_data['branches'],
- test_data['releases'],
- test_data['root_dir_sha1'],
- test_data['content'],
- visit_id=test_data['visit']['visit'])
-
- def origin_directory_view_helper(self, origin_info, origin_visits,
+ archive_data.snapshot_add([snp_dict])
+ visit = archive_data.origin_visit_add(
+ new_origin['url'], visit_dates[0], type='git')
+ archive_data.origin_visit_update(new_origin['url'], visit['visit'],
+ status='partial',
+ snapshot=snp_dict['id'])
+
+ url = reverse('browse-origin-directory',
+ url_args={'origin_url': new_origin['url']})
+ rv = client.get(url)
+ assert rv.status_code == 200
+
+
+@given(new_origin(), new_snapshot(min_size=4, max_size=4), visit_dates(),
+ revisions(min_size=3, max_size=3))
+def test_origin_snapshot_invalid_branch(client, archive_data, new_origin,
+ new_snapshot, visit_dates, revisions):
+ snp_dict = new_snapshot.to_dict()
+ new_origin = archive_data.origin_add([new_origin])[0]
+ for i, branch in enumerate(snp_dict['branches'].keys()):
+ if i == 0:
+ invalid_branch = branch
+ else:
+ snp_dict['branches'][branch] = {
+ 'target_type': 'revision',
+ 'target': hash_to_bytes(revisions[i-1]),
+ }
+ del snp_dict['branches'][invalid_branch]
+
+ archive_data.snapshot_add([snp_dict])
+ visit = archive_data.origin_visit_add(
+ new_origin['url'], visit_dates[0], type='git')
+ archive_data.origin_visit_update(new_origin['url'], visit['visit'],
+ status='full',
+ snapshot=snp_dict['id'])
+
+ url = reverse('browse-origin-directory',
+ url_args={'origin_url': new_origin['url']},
+ query_params={'branch': invalid_branch})
+ rv = client.get(url)
+ assert rv.status_code == 404
+
+
+def test_origin_request_errors(client, archive_data, mocker):
+ mock_snapshot_service = mocker.patch(
+ 'swh.web.browse.views.utils.snapshot_context.service')
+ mock_origin_service = mocker.patch('swh.web.browse.views.origin.service')
+ mock_utils_service = mocker.patch('swh.web.browse.utils.service')
+ mock_get_origin_visit_snapshot = mocker.patch(
+ 'swh.web.browse.utils.get_origin_visit_snapshot')
+ mock_get_origin_visits = mocker.patch(
+ 'swh.web.common.origin_visits.get_origin_visits')
+ mock_request_content = mocker.patch(
+ 'swh.web.browse.views.utils.snapshot_context.request_content')
+ mock_origin_service.lookup_origin.side_effect = NotFoundExc(
+ 'origin not found')
+ url = reverse('browse-origin-visits',
+ url_args={'origin_url': 'bar'})
+ resp = client.get(url)
+ assert resp.status_code == 404
+ assert_template_used('error.html')
+ assert_contains(resp, 'origin not found', status_code=404)
+
+ mock_origin_service.lookup_origin.side_effect = None
+ mock_origin_service.lookup_origin.return_value = {'type': 'foo',
+ 'url': 'bar',
+ 'id': 457}
+ mock_get_origin_visits.return_value = []
+ url = reverse('browse-origin-directory',
+ url_args={'origin_url': 'bar'})
+ resp = client.get(url)
+ assert resp.status_code == 404
+ assert_template_used('error.html')
+ assert_contains(resp, "No visit", status_code=404)
+
+ mock_get_origin_visits.return_value = [{'visit': 1}]
+ mock_get_origin_visit_snapshot.side_effect = NotFoundExc('visit not found')
+ url = reverse('browse-origin-directory',
+ url_args={'origin_url': 'bar'},
+ query_params={'visit_id': 2})
+ resp = client.get(url)
+ assert resp.status_code == 404
+ assert_template_used('error.html')
+ assert re.search('Visit.*not found', resp.content.decode('utf-8'))
+
+ mock_get_origin_visits.return_value = [{
+ 'date': '2015-09-26T09:30:52.373449+00:00',
+ 'metadata': {},
+ 'origin': 457,
+ 'snapshot': 'bdaf9ac436488a8c6cda927a0f44e172934d3f65',
+ 'status': 'full',
+ 'visit': 1
+ }]
+ mock_get_origin_visit_snapshot.side_effect = None
+ mock_get_origin_visit_snapshot.return_value = (
+ [{'directory': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
+ 'name': 'HEAD',
+ 'revision': '7bc08e1aa0b08cb23e18715a32aa38517ad34672',
+ 'date': '04 May 2017, 13:27 UTC',
+ 'message': ''}],
+ []
+ )
+ mock_utils_service.lookup_snapshot_size.return_value = {
+ 'revision': 1,
+ 'release': 0
+ }
+ mock_lookup_directory = mock_utils_service.lookup_directory
+ mock_lookup_directory.side_effect = NotFoundExc('Directory not found')
+ url = reverse('browse-origin-directory',
+ url_args={'origin_url': 'bar'})
+ resp = client.get(url)
+ assert resp.status_code == 404
+ assert_template_used('error.html')
+ assert_contains(resp, 'Directory not found', status_code=404)
+
+ mock_origin_service.lookup_origin.side_effect = None
+ mock_origin_service.lookup_origin.return_value = {'type': 'foo',
+ 'url': 'bar',
+ 'id': 457}
+ mock_get_origin_visits.return_value = []
+ url = reverse('browse-origin-content',
+ url_args={'origin_url': 'bar',
+ 'path': 'foo'})
+ resp = client.get(url)
+ assert resp.status_code == 404
+ assert_template_used('error.html')
+ assert_contains(resp, "No visit", status_code=404)
+
+ mock_get_origin_visits.return_value = [{'visit': 1}]
+ mock_get_origin_visit_snapshot.side_effect = NotFoundExc('visit not found')
+ url = reverse('browse-origin-content',
+ url_args={'origin_url': 'bar',
+ 'path': 'foo'},
+ query_params={'visit_id': 2})
+ resp = client.get(url)
+ assert resp.status_code == 404
+ assert_template_used('error.html')
+ assert re.search('Visit.*not found', resp.content.decode('utf-8'))
+
+ mock_get_origin_visits.return_value = [{
+ 'date': '2015-09-26T09:30:52.373449+00:00',
+ 'metadata': {},
+ 'origin': 457,
+ 'snapshot': 'bdaf9ac436488a8c6cda927a0f44e172934d3f65',
+ 'status': 'full',
+ 'visit': 1
+ }]
+ mock_get_origin_visit_snapshot.side_effect = None
+ mock_get_origin_visit_snapshot.return_value = ([], [])
+ url = reverse('browse-origin-content',
+ url_args={'origin_url': 'bar',
+ 'path': 'baz'})
+ resp = client.get(url)
+ assert resp.status_code == 404
+ assert_template_used('error.html')
+ assert re.search('Origin.*has an empty list of branches',
+ resp.content.decode('utf-8'))
+
+ mock_get_origin_visit_snapshot.return_value = (
+ [{'directory': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
+ 'name': 'HEAD',
+ 'revision': '7bc08e1aa0b08cb23e18715a32aa38517ad34672',
+ 'date': '04 May 2017, 13:27 UTC',
+ 'message': ''}],
+ []
+ )
+ mock_snapshot_service.lookup_directory_with_path.return_value = {
+ 'target': '5ecd9f37b7a2d2e9980d201acd6286116f2ba1f1'
+ }
+ mock_request_content.side_effect = NotFoundExc('Content not found')
+ url = reverse('browse-origin-content',
+ url_args={'origin_url': 'bar',
+ 'path': 'baz'})
+ resp = client.get(url)
+ assert resp.status_code == 404
+ assert_template_used('error.html')
+ assert_contains(resp, 'Content not found', status_code=404)
+
+ mock_get_snapshot_context = mocker.patch(
+ 'swh.web.browse.views.utils.snapshot_context.get_snapshot_context')
+
+ mock_get_snapshot_context.side_effect = NotFoundExc('Snapshot not found')
+ url = reverse('browse-origin-directory',
+ url_args={'origin_url': 'bar'})
+ resp = client.get(url)
+ assert resp.status_code == 404
+ assert_template_used('error.html')
+ assert_contains(resp, 'Snapshot not found', status_code=404)
+
+
+def test_origin_empty_snapshot(client, mocker):
+ mock_utils_service = mocker.patch('swh.web.browse.utils.service')
+ mock_get_origin_visit_snapshot = mocker.patch(
+ 'swh.web.browse.utils.get_origin_visit_snapshot')
+ mock_get_origin_visits = mocker.patch(
+ 'swh.web.common.origin_visits.get_origin_visits')
+ mock_get_origin_visits.return_value = [{
+ 'date': '2015-09-26T09:30:52.373449+00:00',
+ 'metadata': {},
+ 'origin': 457,
+ 'snapshot': 'bdaf9ac436488a8c6cda927a0f44e172934d3f65',
+ 'status': 'full',
+ 'type': 'git',
+ 'visit': 1
+ }]
+ mock_get_origin_visit_snapshot.return_value = ([], [])
+ mock_utils_service.lookup_snapshot_size.return_value = {
+ 'revision': 0,
+ 'release': 0
+ }
+ mock_utils_service.lookup_origin.return_value = {
+ 'id': 457,
+ 'url': 'https://github.com/foo/bar'
+ }
+ url = reverse('browse-origin-directory',
+ url_args={'origin_url': 'bar'})
+ resp = client.get(url)
+ assert resp.status_code == 200
+ assert_template_used('content.html')
+ assert re.search('snapshot.*is empty', resp.content.decode('utf-8'))
+
+
+def _origin_content_view_test_helper(client, origin_info, origin_visits,
origin_branches, origin_releases,
- root_directory_sha1, directory_entries,
- visit_id=None, timestamp=None, path=None):
+ root_dir_sha1, content,
+ visit_id=None, timestamp=None):
+ content_path = '/'.join(content['path'].split('/')[1:])
- dirs = [e for e in directory_entries
- if e['type'] in ('dir', 'rev')]
- files = [e for e in directory_entries
- if e['type'] == 'file']
+ url_args = {'origin_url': origin_info['url'],
+ 'path': content_path}
- if not visit_id:
- visit_id = origin_visits[-1]['visit']
+ if not visit_id:
+ visit_id = origin_visits[-1]['visit']
- url_args = {'origin_url': origin_info['url']}
+ query_params = {}
- query_params = {}
+ if timestamp:
+ url_args['timestamp'] = timestamp
- if timestamp:
- url_args['timestamp'] = timestamp
- else:
- query_params['visit_id'] = visit_id
+ if visit_id:
+ query_params['visit_id'] = visit_id
- if path:
- url_args['path'] = path
-
- url = reverse('browse-origin-directory',
- url_args=url_args,
- query_params=query_params)
-
- resp = self.client.get(url)
-
- self.assertEqual(resp.status_code, 200)
- self.assertTemplateUsed('directory.html')
-
- self.assertEqual(resp.status_code, 200)
- self.assertTemplateUsed('directory.html')
-
- self.assertContains(resp, '<td class="swh-directory">',
- count=len(dirs))
- self.assertContains(resp, '<td class="swh-content">',
- count=len(files))
-
- if timestamp:
- url_args['timestamp'] = \
- format_utc_iso_date(parse_timestamp(timestamp).isoformat(),
- '%Y-%m-%dT%H:%M:%S')
-
- for d in dirs:
- if d['type'] == 'rev':
- dir_url = reverse('browse-revision',
- url_args={'sha1_git': d['target']})
- else:
- dir_path = d['name']
- if path:
- dir_path = "%s/%s" % (path, d['name'])
- dir_url_args = dict(url_args)
- dir_url_args['path'] = dir_path
- dir_url = reverse('browse-origin-directory',
- url_args=dir_url_args,
+ url = reverse('browse-origin-content',
+ url_args=url_args,
+ query_params=query_params)
+
+ resp = client.get(url)
+
+ assert resp.status_code == 200
+ assert_template_used('content.html')
+
+ assert_contains(resp, '<code class="%s">' %
+ content['hljs_language'])
+ assert_contains(resp, escape(content['data']))
+
+ split_path = content_path.split('/')
+
+ filename = split_path[-1]
+ path = content_path.replace(filename, '')[:-1]
+
+ path_info = gen_path_info(path)
+
+ del url_args['path']
+
+ if timestamp:
+ url_args['timestamp'] = format_utc_iso_date(
+ parse_timestamp(timestamp).isoformat(), '%Y-%m-%dT%H:%M:%S')
+
+ root_dir_url = reverse('browse-origin-directory',
+ url_args=url_args,
+ query_params=query_params)
+
+ assert_contains(resp, '<li class="swh-path">',
+ count=len(path_info)+1)
+
+ assert_contains(resp, '<a href="%s">%s</a>' %
+ (root_dir_url, root_dir_sha1[:7]))
+
+ for p in path_info:
+ url_args['path'] = p['path']
+ dir_url = reverse('browse-origin-directory',
+ url_args=url_args,
+ query_params=query_params)
+ assert_contains(resp, '<a href="%s">%s</a>' %
+ (dir_url, p['name']))
+
+ assert_contains(resp, '<li>%s</li>' % filename)
+
+ query_string = 'sha1_git:' + content['sha1_git']
+
+ url_raw = reverse('browse-content-raw',
+ url_args={'query_string': query_string},
+ query_params={'filename': filename})
+ assert_contains(resp, url_raw)
+
+ if 'args' in url_args:
+ del url_args['path']
+
+ origin_branches_url = reverse('browse-origin-branches',
+ url_args=url_args,
query_params=query_params)
- self.assertContains(resp, dir_url)
- for f in files:
- file_path = f['name']
- if path:
- file_path = "%s/%s" % (path, f['name'])
- file_url_args = dict(url_args)
- file_url_args['path'] = file_path
- file_url = reverse('browse-origin-content',
- url_args=file_url_args,
- query_params=query_params)
- self.assertContains(resp, file_url)
-
- if 'path' in url_args:
- del url_args['path']
-
- root_dir_branch_url = \
- reverse('browse-origin-directory',
- url_args=url_args,
- query_params=query_params)
-
- nb_bc_paths = 1
- if path:
- nb_bc_paths = len(path.split('/')) + 1
+ assert_contains(resp, '<a href="%s">Branches (%s)</a>' %
+ (origin_branches_url, len(origin_branches)))
+
+ origin_releases_url = reverse('browse-origin-releases',
+ url_args=url_args,
+ query_params=query_params)
- self.assertContains(resp, '<li class="swh-path">', count=nb_bc_paths)
- self.assertContains(resp, '<a href="%s">%s</a>' %
- (root_dir_branch_url,
- root_directory_sha1[:7]))
+ assert_contains(resp, '<a href="%s">Releases (%s)</a>' %
+ (origin_releases_url, len(origin_releases)))
- origin_branches_url = reverse('browse-origin-branches',
+ assert_contains(resp, '<li class="swh-branch">',
+ count=len(origin_branches))
+
+ url_args['path'] = content_path
+
+ for branch in origin_branches:
+ query_params['branch'] = branch['name']
+ root_dir_branch_url = reverse('browse-origin-content',
url_args=url_args,
query_params=query_params)
- self.assertContains(resp, '<a href="%s">Branches (%s)</a>' %
- (origin_branches_url, len(origin_branches)))
+ assert_contains(resp, '<a href="%s">' % root_dir_branch_url)
+
+ assert_contains(resp, '<li class="swh-release">',
+ count=len(origin_releases))
+
+ query_params['branch'] = None
+ for release in origin_releases:
+ query_params['release'] = release['name']
+ root_dir_release_url = reverse('browse-origin-content',
+ url_args=url_args,
+ query_params=query_params)
+
+ assert_contains(resp, '<a href="%s">' % root_dir_release_url)
+
+ url = reverse('browse-origin-content',
+ url_args=url_args,
+ query_params=query_params)
+
+ resp = client.get(url)
+ assert resp.status_code == 200
+ assert_template_used('content.html')
+
+ swh_cnt_id = get_swh_persistent_id('content', content['sha1_git'])
+ swh_cnt_id_url = reverse('browse-swh-id',
+ url_args={'swh_id': swh_cnt_id})
+ assert_contains(resp, swh_cnt_id)
+ assert_contains(resp, swh_cnt_id_url)
+
+ assert_contains(resp, 'swh-take-new-snapshot')
+
+
+def _origin_directory_view_test_helper(client, origin_info, origin_visits,
+ origin_branches, origin_releases,
+ root_directory_sha1, directory_entries,
+ visit_id=None, timestamp=None,
+ path=None):
+ dirs = [e for e in directory_entries
+ if e['type'] in ('dir', 'rev')]
+ files = [e for e in directory_entries
+ if e['type'] == 'file']
+
+ if not visit_id:
+ visit_id = origin_visits[-1]['visit']
+
+ url_args = {'origin_url': origin_info['url']}
- origin_releases_url = reverse('browse-origin-releases',
+ query_params = {}
+
+ if timestamp:
+ url_args['timestamp'] = timestamp
+ else:
+ query_params['visit_id'] = visit_id
+
+ if path:
+ url_args['path'] = path
+
+ url = reverse('browse-origin-directory',
+ url_args=url_args,
+ query_params=query_params)
+
+ resp = client.get(url)
+
+ assert resp.status_code == 200
+ assert_template_used('directory.html')
+
+ assert resp.status_code == 200
+ assert_template_used('directory.html')
+
+ assert_contains(resp, '<td class="swh-directory">',
+ count=len(dirs))
+ assert_contains(resp, '<td class="swh-content">',
+ count=len(files))
+
+ if timestamp:
+ url_args['timestamp'] = format_utc_iso_date(
+ parse_timestamp(timestamp).isoformat(), '%Y-%m-%dT%H:%M:%S')
+
+ for d in dirs:
+ if d['type'] == 'rev':
+ dir_url = reverse('browse-revision',
+ url_args={'sha1_git': d['target']})
+ else:
+ dir_path = d['name']
+ if path:
+ dir_path = "%s/%s" % (path, d['name'])
+ dir_url_args = dict(url_args)
+ dir_url_args['path'] = dir_path
+ dir_url = reverse('browse-origin-directory',
+ url_args=dir_url_args,
+ query_params=query_params)
+ assert_contains(resp, dir_url)
+
+ for f in files:
+ file_path = f['name']
+ if path:
+ file_path = "%s/%s" % (path, f['name'])
+ file_url_args = dict(url_args)
+ file_url_args['path'] = file_path
+ file_url = reverse('browse-origin-content',
+ url_args=file_url_args,
+ query_params=query_params)
+ assert_contains(resp, file_url)
+
+ if 'path' in url_args:
+ del url_args['path']
+
+ root_dir_branch_url = reverse('browse-origin-directory',
+ url_args=url_args,
+ query_params=query_params)
+
+ nb_bc_paths = 1
+ if path:
+ nb_bc_paths = len(path.split('/')) + 1
+
+ assert_contains(resp, '<li class="swh-path">', count=nb_bc_paths)
+ assert_contains(resp, '<a href="%s">%s</a>' %
+ (root_dir_branch_url,
+ root_directory_sha1[:7]))
+
+ origin_branches_url = reverse('browse-origin-branches',
+ url_args=url_args,
+ query_params=query_params)
+
+ assert_contains(resp, '<a href="%s">Branches (%s)</a>' %
+ (origin_branches_url, len(origin_branches)))
+
+ origin_releases_url = reverse('browse-origin-releases',
+ url_args=url_args,
+ query_params=query_params)
+
+ nb_releases = len(origin_releases)
+ if nb_releases > 0:
+ assert_contains(resp, '<a href="%s">Releases (%s)</a>' %
+ (origin_releases_url, nb_releases))
+
+ if path:
+ url_args['path'] = path
+
+ assert_contains(resp, '<li class="swh-branch">',
+ count=len(origin_branches))
+
+ for branch in origin_branches:
+ query_params['branch'] = branch['name']
+ root_dir_branch_url = reverse('browse-origin-directory',
url_args=url_args,
query_params=query_params)
- nb_releases = len(origin_releases)
- if nb_releases > 0:
- self.assertContains(resp, '<a href="%s">Releases (%s)</a>' %
- (origin_releases_url, nb_releases))
+ assert_contains(resp, '<a href="%s">' % root_dir_branch_url)
- if path:
- url_args['path'] = path
+ assert_contains(resp, '<li class="swh-release">',
+ count=len(origin_releases))
- self.assertContains(resp, '<li class="swh-branch">',
- count=len(origin_branches))
+ query_params['branch'] = None
+ for release in origin_releases:
+ query_params['release'] = release['name']
+ root_dir_release_url = reverse('browse-origin-directory',
+ url_args=url_args,
+ query_params=query_params)
- for branch in origin_branches:
- query_params['branch'] = branch['name']
- root_dir_branch_url = \
- reverse('browse-origin-directory',
- url_args=url_args,
- query_params=query_params)
+ assert_contains(resp, '<a href="%s">' % root_dir_release_url)
- self.assertContains(resp, '<a href="%s">' % root_dir_branch_url)
+ assert_contains(resp, 'vault-cook-directory')
+ assert_contains(resp, 'vault-cook-revision')
- self.assertContains(resp, '<li class="swh-release">',
- count=len(origin_releases))
+ swh_dir_id = get_swh_persistent_id('directory', directory_entries[0]['dir_id']) # noqa
+ swh_dir_id_url = reverse('browse-swh-id',
+ url_args={'swh_id': swh_dir_id})
+ assert_contains(resp, swh_dir_id)
+ assert_contains(resp, swh_dir_id_url)
- query_params['branch'] = None
- for release in origin_releases:
- query_params['release'] = release['name']
- root_dir_release_url = \
- reverse('browse-origin-directory',
- url_args=url_args,
- query_params=query_params)
+ assert_contains(resp, 'swh-take-new-snapshot')
- self.assertContains(resp, '<a href="%s">' % root_dir_release_url)
- self.assertContains(resp, 'vault-cook-directory')
- self.assertContains(resp, 'vault-cook-revision')
+def _origin_branches_test_helper(client, origin_info, origin_snapshot):
+ url_args = {'origin_url': origin_info['url']}
- swh_dir_id = get_swh_persistent_id('directory', directory_entries[0]['dir_id']) # noqa
- swh_dir_id_url = reverse('browse-swh-id',
- url_args={'swh_id': swh_dir_id})
- self.assertContains(resp, swh_dir_id)
- self.assertContains(resp, swh_dir_id_url)
+ url = reverse('browse-origin-branches',
+ url_args=url_args)
- self.assertContains(resp, 'swh-take-new-snapshot')
+ resp = client.get(url)
- @given(origin())
- def test_origin_root_directory_view(self, origin):
+ assert resp.status_code == 200
+ assert_template_used('branches.html')
- origin_visits = self.origin_visit_get(origin['url'])
+ origin_branches = origin_snapshot[0]
+ origin_releases = origin_snapshot[1]
- visit = origin_visits[-1]
- snapshot = self.snapshot_get(visit['snapshot'])
- head_rev_id = self.snapshot_get_head(snapshot)
- head_rev = self.revision_get(head_rev_id)
- root_dir_sha1 = head_rev['directory']
- dir_content = self.directory_ls(root_dir_sha1)
- branches, releases = process_snapshot_branches(snapshot)
- visit_unix_ts = parse_timestamp(visit['date']).timestamp()
- visit_unix_ts = int(visit_unix_ts)
-
- self.origin_directory_view_helper(origin, origin_visits,
- branches,
- releases,
- root_dir_sha1,
- dir_content)
-
- self.origin_directory_view_helper(origin, origin_visits,
- branches,
- releases,
- root_dir_sha1,
- dir_content,
- visit_id=visit['visit'])
-
- self.origin_directory_view_helper(origin, origin_visits,
- branches,
- releases,
- root_dir_sha1,
- dir_content,
- timestamp=visit_unix_ts)
-
- self.origin_directory_view_helper(origin, origin_visits,
- branches,
- releases,
- root_dir_sha1,
- dir_content,
- timestamp=visit['date'])
-
- origin = dict(origin)
- del origin['type']
-
- self.origin_directory_view_helper(origin, origin_visits,
- branches,
- releases,
- root_dir_sha1,
- dir_content)
-
- self.origin_directory_view_helper(origin, origin_visits,
- branches,
- releases,
- root_dir_sha1,
- dir_content,
- visit_id=visit['visit'])
-
- self.origin_directory_view_helper(origin, origin_visits,
- branches,
- releases,
- root_dir_sha1,
- dir_content,
- timestamp=visit_unix_ts)
-
- self.origin_directory_view_helper(origin, origin_visits,
- branches,
- releases,
- root_dir_sha1,
- dir_content,
- timestamp=visit['date'])
-
- @given(origin())
- def test_origin_sub_directory_view(self, origin):
-
- origin_visits = self.origin_visit_get(origin['url'])
-
- visit = origin_visits[-1]
- snapshot = self.snapshot_get(visit['snapshot'])
- head_rev_id = self.snapshot_get_head(snapshot)
- head_rev = self.revision_get(head_rev_id)
- root_dir_sha1 = head_rev['directory']
- subdirs = [e for e in self.directory_ls(root_dir_sha1)
- if e['type'] == 'dir']
- branches, releases = process_snapshot_branches(snapshot)
- visit_unix_ts = parse_timestamp(visit['date']).timestamp()
- visit_unix_ts = int(visit_unix_ts)
-
- if len(subdirs) == 0:
- return
-
- subdir = random.choice(subdirs)
- subdir_content = self.directory_ls(subdir['target'])
- subdir_path = subdir['name']
-
- self.origin_directory_view_helper(origin, origin_visits,
- branches,
- releases,
- root_dir_sha1,
- subdir_content,
- path=subdir_path)
-
- self.origin_directory_view_helper(origin, origin_visits,
- branches,
- releases,
- root_dir_sha1,
- subdir_content,
- path=subdir_path,
- visit_id=visit['visit'])
-
- self.origin_directory_view_helper(origin, origin_visits,
- branches,
- releases,
- root_dir_sha1,
- subdir_content,
- path=subdir_path,
- timestamp=visit_unix_ts)
-
- self.origin_directory_view_helper(origin, origin_visits,
- branches,
- releases,
- root_dir_sha1,
- subdir_content,
- path=subdir_path,
- timestamp=visit['date'])
-
- origin = dict(origin)
- del origin['type']
-
- self.origin_directory_view_helper(origin, origin_visits,
- branches,
- releases,
- root_dir_sha1,
- subdir_content,
- path=subdir_path)
-
- self.origin_directory_view_helper(origin, origin_visits,
- branches,
- releases,
- root_dir_sha1,
- subdir_content,
- path=subdir_path,
- visit_id=visit['visit'])
-
- self.origin_directory_view_helper(origin, origin_visits,
- branches,
- releases,
- root_dir_sha1,
- subdir_content,
- path=subdir_path,
- timestamp=visit_unix_ts)
-
- self.origin_directory_view_helper(origin, origin_visits,
- branches,
- releases,
- root_dir_sha1,
- subdir_content,
- path=subdir_path,
- timestamp=visit['date'])
-
- def origin_branches_helper(self, origin_info, origin_snapshot):
- url_args = {'origin_url': origin_info['url']}
-
- url = reverse('browse-origin-branches',
- url_args=url_args)
-
- resp = self.client.get(url)
-
- self.assertEqual(resp.status_code, 200)
- self.assertTemplateUsed('branches.html')
-
- origin_branches = origin_snapshot[0]
- origin_releases = origin_snapshot[1]
-
- origin_branches_url = reverse('browse-origin-branches',
- url_args=url_args)
-
- self.assertContains(resp, '<a href="%s">Branches (%s)</a>' %
- (origin_branches_url, len(origin_branches)))
-
- origin_releases_url = reverse('browse-origin-releases',
- url_args=url_args)
-
- nb_releases = len(origin_releases)
- if nb_releases > 0:
- self.assertContains(resp, '<a href="%s">Releases (%s)</a>' %
- (origin_releases_url, nb_releases))
-
- self.assertContains(resp, '<tr class="swh-branch-entry',
- count=len(origin_branches))
-
- for branch in origin_branches:
- browse_branch_url = reverse(
- 'browse-origin-directory',
- url_args={'origin_url': origin_info['url']},
- query_params={'branch': branch['name']})
- self.assertContains(resp, '<a href="%s">' %
- escape(browse_branch_url))
-
- browse_revision_url = reverse(
- 'browse-revision',
- url_args={'sha1_git': branch['revision']},
- query_params={'origin': origin_info['url']})
- self.assertContains(resp, '<a href="%s">' %
- escape(browse_revision_url))
-
- @given(origin())
- def test_origin_branches(self, origin):
-
- origin_visits = self.origin_visit_get(origin['url'])
-
- visit = origin_visits[-1]
- snapshot = self.snapshot_get(visit['snapshot'])
- snapshot_content = process_snapshot_branches(snapshot)
-
- self.origin_branches_helper(origin, snapshot_content)
-
- origin = dict(origin)
- origin['type'] = None
-
- self.origin_branches_helper(origin, snapshot_content)
-
- def origin_releases_helper(self, origin_info, origin_snapshot):
- url_args = {'origin_url': origin_info['url']}
-
- url = reverse('browse-origin-releases',
- url_args=url_args)
-
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 200)
- self.assertTemplateUsed('releases.html')
-
- origin_branches = origin_snapshot[0]
- origin_releases = origin_snapshot[1]
-
- origin_branches_url = reverse('browse-origin-branches',
- url_args=url_args)
-
- self.assertContains(resp, '<a href="%s">Branches (%s)</a>' %
- (origin_branches_url, len(origin_branches)))
-
- origin_releases_url = reverse('browse-origin-releases',
- url_args=url_args)
-
- nb_releases = len(origin_releases)
- if nb_releases > 0:
- self.assertContains(resp, '<a href="%s">Releases (%s)</a>' %
- (origin_releases_url, nb_releases))
-
- self.assertContains(resp, '<tr class="swh-release-entry',
- count=nb_releases)
-
- for release in origin_releases:
- browse_release_url = reverse(
- 'browse-release',
- url_args={'sha1_git': release['id']},
- query_params={'origin': origin_info['url']})
- browse_revision_url = reverse(
- 'browse-revision',
- url_args={'sha1_git': release['target']},
- query_params={'origin': origin_info['url']})
-
- self.assertContains(resp, '<a href="%s">' %
- escape(browse_release_url))
- self.assertContains(resp, '<a href="%s">' %
- escape(browse_revision_url))
-
- @given(origin())
- def test_origin_releases(self, origin):
-
- origin_visits = self.origin_visit_get(origin['url'])
-
- visit = origin_visits[-1]
- snapshot = self.snapshot_get(visit['snapshot'])
- snapshot_content = process_snapshot_branches(snapshot)
-
- self.origin_releases_helper(origin, snapshot_content)
-
- origin = dict(origin)
- origin['type'] = None
-
- self.origin_releases_helper(origin, snapshot_content)
-
- @given(new_origin(), new_snapshot(min_size=4, max_size=4), visit_dates(),
- revisions(min_size=3, max_size=3))
- def test_origin_snapshot_null_branch(self, new_origin, new_snapshot,
- visit_dates, revisions):
- snp_dict = new_snapshot.to_dict()
- new_origin = self.storage.origin_add([new_origin])[0]
- for i, branch in enumerate(snp_dict['branches'].keys()):
- if i == 0:
- snp_dict['branches'][branch] = None
- else:
- snp_dict['branches'][branch] = {
- 'target_type': 'revision',
- 'target': hash_to_bytes(revisions[i-1]),
- }
-
- self.storage.snapshot_add([snp_dict])
- visit = self.storage.origin_visit_add(
- new_origin['url'], visit_dates[0], type='git')
- self.storage.origin_visit_update(new_origin['url'], visit['visit'],
- status='partial',
- snapshot=snp_dict['id'])
-
- url = reverse('browse-origin-directory',
- url_args={'origin_url': new_origin['url']})
- rv = self.client.get(url)
- self.assertEqual(rv.status_code, 200)
-
- @given(new_origin(), new_snapshot(min_size=4, max_size=4), visit_dates(),
- revisions(min_size=3, max_size=3))
- def test_origin_snapshot_invalid_branch(self, new_origin, new_snapshot,
- visit_dates, revisions):
- snp_dict = new_snapshot.to_dict()
- new_origin = self.storage.origin_add([new_origin])[0]
- for i, branch in enumerate(snp_dict['branches'].keys()):
- if i == 0:
- invalid_branch = branch
- else:
- snp_dict['branches'][branch] = {
- 'target_type': 'revision',
- 'target': hash_to_bytes(revisions[i-1]),
- }
- del snp_dict['branches'][invalid_branch]
-
- self.storage.snapshot_add([snp_dict])
- visit = self.storage.origin_visit_add(
- new_origin['url'], visit_dates[0], type='git')
- self.storage.origin_visit_update(new_origin['url'], visit['visit'],
- status='full',
- snapshot=snp_dict['id'])
-
- url = reverse('browse-origin-directory',
- url_args={'origin_url': new_origin['url']},
- query_params={'branch': invalid_branch})
- rv = self.client.get(url)
- self.assertEqual(rv.status_code, 404)
-
- @patch('swh.web.browse.views.utils.snapshot_context.request_content')
- @patch('swh.web.common.origin_visits.get_origin_visits')
- @patch('swh.web.browse.utils.get_origin_visit_snapshot')
- @patch('swh.web.browse.utils.service')
- @patch('swh.web.browse.views.origin.service')
- @patch('swh.web.browse.views.utils.snapshot_context.service')
- def test_origin_request_errors(self,
- mock_snapshot_service,
- mock_origin_service,
- mock_utils_service,
- mock_get_origin_visit_snapshot,
- mock_get_origin_visits,
- mock_request_content):
-
- mock_origin_service.lookup_origin.side_effect = NotFoundExc(
- 'origin not found')
- url = reverse('browse-origin-visits',
- url_args={'origin_url': 'bar'})
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 404)
- self.assertTemplateUsed('error.html')
- self.assertContains(resp, 'origin not found', status_code=404)
-
- mock_origin_service.lookup_origin.side_effect = None
- mock_origin_service.lookup_origin.return_value = {'type': 'foo',
- 'url': 'bar',
- 'id': 457}
- mock_get_origin_visits.return_value = []
- url = reverse('browse-origin-directory',
- url_args={'origin_url': 'bar'})
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 404)
- self.assertTemplateUsed('error.html')
- self.assertContains(resp, "No visit", status_code=404)
-
- mock_get_origin_visits.return_value = [{'visit': 1}]
- mock_get_origin_visit_snapshot.side_effect = \
- NotFoundExc('visit not found')
- url = reverse('browse-origin-directory',
- url_args={'origin_url': 'bar'},
- query_params={'visit_id': 2})
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 404)
- self.assertTemplateUsed('error.html')
- self.assertRegex(resp.content.decode('utf-8'), 'Visit.*not found')
-
- mock_get_origin_visits.return_value = [{
- 'date': '2015-09-26T09:30:52.373449+00:00',
- 'metadata': {},
- 'origin': 457,
- 'snapshot': 'bdaf9ac436488a8c6cda927a0f44e172934d3f65',
- 'status': 'full',
- 'visit': 1
- }]
- mock_get_origin_visit_snapshot.side_effect = None
- mock_get_origin_visit_snapshot.return_value = (
- [{'directory': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
- 'name': 'HEAD',
- 'revision': '7bc08e1aa0b08cb23e18715a32aa38517ad34672',
- 'date': '04 May 2017, 13:27 UTC',
- 'message': ''}],
- []
- )
- mock_utils_service.lookup_snapshot_size.return_value = {
- 'revision': 1,
- 'release': 0
- }
- mock_utils_service.lookup_directory.side_effect = \
- NotFoundExc('Directory not found')
- url = reverse('browse-origin-directory',
- url_args={'origin_url': 'bar'})
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 404)
- self.assertTemplateUsed('error.html')
- self.assertContains(resp, 'Directory not found', status_code=404)
-
- with patch('swh.web.browse.views.utils.snapshot_context.'
- 'get_snapshot_context') as mock_get_snapshot_context:
- mock_get_snapshot_context.side_effect = \
- NotFoundExc('Snapshot not found')
- url = reverse('browse-origin-directory',
- url_args={'origin_url': 'bar'})
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 404)
- self.assertTemplateUsed('error.html')
- self.assertContains(resp, 'Snapshot not found', status_code=404)
-
- mock_origin_service.lookup_origin.side_effect = None
- mock_origin_service.lookup_origin.return_value = {'type': 'foo',
- 'url': 'bar',
- 'id': 457}
- mock_get_origin_visits.return_value = []
- url = reverse('browse-origin-content',
- url_args={'origin_url': 'bar',
- 'path': 'foo'})
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 404)
- self.assertTemplateUsed('error.html')
- self.assertContains(resp, "No visit", status_code=404)
-
- mock_get_origin_visits.return_value = [{'visit': 1}]
- mock_get_origin_visit_snapshot.side_effect = \
- NotFoundExc('visit not found')
- url = reverse('browse-origin-content',
- url_args={'origin_url': 'bar',
- 'path': 'foo'},
- query_params={'visit_id': 2})
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 404)
- self.assertTemplateUsed('error.html')
- self.assertRegex(resp.content.decode('utf-8'), 'Visit.*not found')
-
- mock_get_origin_visits.return_value = [{
- 'date': '2015-09-26T09:30:52.373449+00:00',
- 'metadata': {},
- 'origin': 457,
- 'snapshot': 'bdaf9ac436488a8c6cda927a0f44e172934d3f65',
- 'status': 'full',
- 'visit': 1
- }]
- mock_get_origin_visit_snapshot.side_effect = None
- mock_get_origin_visit_snapshot.return_value = ([], [])
- url = reverse('browse-origin-content',
- url_args={'origin_url': 'bar',
- 'path': 'baz'})
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 404)
- self.assertTemplateUsed('error.html')
- self.assertRegex(resp.content.decode('utf-8'),
- 'Origin.*has an empty list of branches')
-
- mock_get_origin_visit_snapshot.return_value = (
- [{'directory': 'ae59ceecf46367e8e4ad800e231fc76adc3afffb',
- 'name': 'HEAD',
- 'revision': '7bc08e1aa0b08cb23e18715a32aa38517ad34672',
- 'date': '04 May 2017, 13:27 UTC',
- 'message': ''}],
- []
- )
- mock_snapshot_service.lookup_directory_with_path.return_value = \
- {'target': '5ecd9f37b7a2d2e9980d201acd6286116f2ba1f1'}
- mock_request_content.side_effect = \
- NotFoundExc('Content not found')
- url = reverse('browse-origin-content',
- url_args={'origin_url': 'bar',
- 'path': 'baz'})
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 404)
- self.assertTemplateUsed('error.html')
- self.assertContains(resp, 'Content not found', status_code=404)
-
- @patch('swh.web.common.origin_visits.get_origin_visits')
- @patch('swh.web.browse.utils.get_origin_visit_snapshot')
- @patch('swh.web.browse.utils.service')
- def test_origin_empty_snapshot(self, mock_utils_service,
- mock_get_origin_visit_snapshot,
- mock_get_origin_visits):
-
- mock_get_origin_visits.return_value = [{
- 'date': '2015-09-26T09:30:52.373449+00:00',
- 'metadata': {},
- 'origin': 457,
- 'snapshot': 'bdaf9ac436488a8c6cda927a0f44e172934d3f65',
- 'status': 'full',
- 'type': 'git',
- 'visit': 1
- }]
- mock_get_origin_visit_snapshot.return_value = ([], [])
- mock_utils_service.lookup_snapshot_size.return_value = {
- 'revision': 0,
- 'release': 0
- }
- mock_utils_service.lookup_origin.return_value = {
- 'id': 457,
- 'url': 'https://github.com/foo/bar'
- }
- url = reverse('browse-origin-directory',
- url_args={'origin_url': 'bar'})
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 200)
- self.assertTemplateUsed('content.html')
- self.assertRegex(resp.content.decode('utf-8'), 'snapshot.*is empty')
+ origin_branches_url = reverse('browse-origin-branches',
+ url_args=url_args)
+
+ assert_contains(resp, '<a href="%s">Branches (%s)</a>' %
+ (origin_branches_url, len(origin_branches)))
+
+ origin_releases_url = reverse('browse-origin-releases',
+ url_args=url_args)
+
+ nb_releases = len(origin_releases)
+ if nb_releases > 0:
+ assert_contains(resp, '<a href="%s">Releases (%s)</a>' %
+ (origin_releases_url, nb_releases))
+
+ assert_contains(resp, '<tr class="swh-branch-entry',
+ count=len(origin_branches))
+
+ for branch in origin_branches:
+ browse_branch_url = reverse(
+ 'browse-origin-directory',
+ url_args={'origin_url': origin_info['url']},
+ query_params={'branch': branch['name']})
+ assert_contains(resp, '<a href="%s">' %
+ escape(browse_branch_url))
+
+ browse_revision_url = reverse(
+ 'browse-revision',
+ url_args={'sha1_git': branch['revision']},
+ query_params={'origin': origin_info['url']})
+ assert_contains(resp, '<a href="%s">' %
+ escape(browse_revision_url))
+
+
+def _origin_releases_test_helper(client, origin_info, origin_snapshot):
+ url_args = {'origin_url': origin_info['url']}
+
+ url = reverse('browse-origin-releases',
+ url_args=url_args)
+
+ resp = client.get(url)
+ assert resp.status_code == 200
+ assert_template_used('releases.html')
+
+ origin_branches = origin_snapshot[0]
+ origin_releases = origin_snapshot[1]
+
+ origin_branches_url = reverse('browse-origin-branches',
+ url_args=url_args)
+
+ assert_contains(resp, '<a href="%s">Branches (%s)</a>' %
+ (origin_branches_url, len(origin_branches)))
+
+ origin_releases_url = reverse('browse-origin-releases',
+ url_args=url_args)
+
+ nb_releases = len(origin_releases)
+ if nb_releases > 0:
+ assert_contains(resp, '<a href="%s">Releases (%s)</a>' %
+ (origin_releases_url, nb_releases))
+
+ assert_contains(resp, '<tr class="swh-release-entry',
+ count=nb_releases)
+
+ for release in origin_releases:
+ browse_release_url = reverse(
+ 'browse-release',
+ url_args={'sha1_git': release['id']},
+ query_params={'origin': origin_info['url']})
+ browse_revision_url = reverse(
+ 'browse-revision',
+ url_args={'sha1_git': release['target']},
+ query_params={'origin': origin_info['url']})
+
+ assert_contains(resp, '<a href="%s">' %
+ escape(browse_release_url))
+ assert_contains(resp, '<a href="%s">' %
+ escape(browse_revision_url))
diff --git a/swh/web/tests/browse/views/test_release.py b/swh/web/tests/browse/views/test_release.py
--- a/swh/web/tests/browse/views/test_release.py
+++ b/swh/web/tests/browse/views/test_release.py
@@ -10,99 +10,95 @@
from swh.web.common.utils import (
reverse, format_utc_iso_date, get_swh_persistent_id
)
+from swh.web.tests.django_asserts import assert_contains, assert_template_used
from swh.web.tests.strategies import (
release, origin_with_release, unknown_release
)
-from swh.web.tests.testcase import WebTestCase
-class SwhBrowseReleaseTest(WebTestCase):
+@given(release())
+def test_release_browse(client, archive_data, release):
+ url = reverse('browse-release',
+ url_args={'sha1_git': release})
- @given(release())
- def test_release_browse(self, release):
+ release_data = archive_data.release_get(release)
- url = reverse('browse-release',
- url_args={'sha1_git': release})
+ resp = client.get(url)
- release_data = self.release_get(release)
+ _release_browse_checks(resp, release_data)
- resp = self.client.get(url)
- self._release_browse_checks(resp, release_data)
+@given(origin_with_release())
+def test_release_browse_with_origin(client, archive_data, origin):
+ snapshot = archive_data.snapshot_get_latest(origin['url'])
+ release = random.choice([b for b in snapshot['branches'].values()
+ if b['target_type'] == 'release'])
+ url = reverse('browse-release',
+ url_args={'sha1_git': release['target']},
+ query_params={'origin': origin['url']})
- @given(origin_with_release())
- def test_release_browse_with_origin(self, origin):
- snapshot = self.snapshot_get_latest(origin['url'])
- release = random.choice([b for b in snapshot['branches'].values()
- if b['target_type'] == 'release'])
- url = reverse('browse-release',
- url_args={'sha1_git': release['target']},
- query_params={'origin': origin['url']})
+ release_data = archive_data.release_get(release['target'])
- release_data = self.release_get(release['target'])
+ resp = client.get(url)
- resp = self.client.get(url)
+ _release_browse_checks(resp, release_data, origin)
- self._release_browse_checks(resp, release_data, origin)
- @given(unknown_release())
- def test_release_browse_not_found(self, unknown_release):
- url = reverse('browse-release',
- url_args={'sha1_git': unknown_release})
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 404)
- self.assertTemplateUsed('error.html')
- err_msg = 'Release with sha1_git %s not found' % unknown_release
- self.assertContains(resp, err_msg, status_code=404)
+@given(unknown_release())
+def test_release_browse_not_found(client, archive_data, unknown_release):
+ url = reverse('browse-release',
+ url_args={'sha1_git': unknown_release})
+ resp = client.get(url)
+ assert resp.status_code == 404
+ assert_template_used('error.html')
+ err_msg = 'Release with sha1_git %s not found' % unknown_release
+ assert_contains(resp, err_msg, status_code=404)
- def _release_browse_checks(self, resp, release_data, origin_info=None):
- query_params = {}
- if origin_info:
- query_params['origin'] = origin_info['url']
+@given(release())
+def test_release_uppercase(client, release):
+ url = reverse('browse-release-uppercase-checksum',
+ url_args={'sha1_git': release.upper()})
- release_id = release_data['id']
- release_name = release_data['name']
- author_name = release_data['author']['name']
+ resp = client.get(url)
+ assert resp.status_code == 302
- release_date = release_data['date']
- message = release_data['message']
- target_type = release_data['target_type']
- target = release_data['target']
+ redirect_url = reverse('browse-release',
+ url_args={'sha1_git': release})
- target_url = reverse('browse-revision',
- url_args={'sha1_git': target},
- query_params=query_params)
- message_lines = message.split('\n')
+ assert resp['location'] == redirect_url
- self.assertEqual(resp.status_code, 200)
- self.assertTemplateUsed('browse/release.html')
- self.assertContains(resp, author_name)
- self.assertContains(resp, format_utc_iso_date(release_date))
- self.assertContains(resp,
- '<h6>%s</h6>%s' % (message_lines[0] or 'None',
- '\n'.join(message_lines[1:])))
- self.assertContains(resp, release_id)
- self.assertContains(resp, release_name)
- self.assertContains(resp, target_type)
- self.assertContains(resp, '<a href="%s">%s</a>' %
- (target_url, target))
- swh_rel_id = get_swh_persistent_id('release', release_id)
- swh_rel_id_url = reverse('browse-swh-id',
- url_args={'swh_id': swh_rel_id})
- self.assertContains(resp, swh_rel_id)
- self.assertContains(resp, swh_rel_id_url)
+def _release_browse_checks(resp, release_data, origin_info=None):
+ query_params = {}
+ if origin_info:
+ query_params['origin'] = origin_info['url']
- @given(release())
- def test_release_uppercase(self, release):
- url = reverse('browse-release-uppercase-checksum',
- url_args={'sha1_git': release.upper()})
+ release_id = release_data['id']
+ release_name = release_data['name']
+ author_name = release_data['author']['name']
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 302)
+ release_date = release_data['date']
+ message = release_data['message']
+ target_type = release_data['target_type']
+ target = release_data['target']
- redirect_url = reverse('browse-release',
- url_args={'sha1_git': release})
+ target_url = reverse('browse-revision', url_args={'sha1_git': target},
+ query_params=query_params)
+ message_lines = message.split('\n')
- self.assertEqual(resp['location'], redirect_url)
+ assert resp.status_code == 200
+ assert_template_used('browse/release.html')
+ assert_contains(resp, author_name)
+ assert_contains(resp, format_utc_iso_date(release_date))
+ assert_contains(resp, '<h6>%s</h6>%s' % (message_lines[0] or 'None',
+ '\n'.join(message_lines[1:])))
+ assert_contains(resp, release_id)
+ assert_contains(resp, release_name)
+ assert_contains(resp, target_type)
+ assert_contains(resp, '<a href="%s">%s</a>' % (target_url, target))
+
+ swh_rel_id = get_swh_persistent_id('release', release_id)
+ swh_rel_id_url = reverse('browse-swh-id', url_args={'swh_id': swh_rel_id})
+ assert_contains(resp, swh_rel_id)
+ assert_contains(resp, swh_rel_id_url)
diff --git a/swh/web/tests/browse/views/test_revision.py b/swh/web/tests/browse/views/test_revision.py
--- a/swh/web/tests/browse/views/test_revision.py
+++ b/swh/web/tests/browse/views/test_revision.py
@@ -10,237 +10,236 @@
reverse, format_utc_iso_date, get_swh_persistent_id,
parse_timestamp
)
+from swh.web.tests.django_asserts import assert_contains, assert_template_used
from swh.web.tests.strategies import (
origin, revision, unknown_revision, new_origin
)
-from swh.web.tests.testcase import WebTestCase
-class SwhBrowseRevisionTest(WebTestCase):
+@given(revision())
+def test_revision_browse(client, archive_data, revision):
+ url = reverse('browse-revision',
+ url_args={'sha1_git': revision})
- @given(revision())
- def test_revision_browse(self, revision):
+ revision_data = archive_data.revision_get(revision)
- url = reverse('browse-revision',
- url_args={'sha1_git': revision})
+ author_name = revision_data['author']['name']
+ committer_name = revision_data['committer']['name']
+ dir_id = revision_data['directory']
- revision_data = self.revision_get(revision)
+ directory_url = reverse('browse-directory',
+ url_args={'sha1_git': dir_id})
- author_name = revision_data['author']['name']
- committer_name = revision_data['committer']['name']
- dir_id = revision_data['directory']
+ history_url = reverse('browse-revision-log',
+ url_args={'sha1_git': revision})
- directory_url = reverse('browse-directory',
- url_args={'sha1_git': dir_id})
+ resp = client.get(url)
- history_url = reverse('browse-revision-log',
- url_args={'sha1_git': revision})
+ assert resp.status_code == 200
+ assert_template_used('browse/revision.html')
+ assert_contains(resp, author_name)
+ assert_contains(resp, committer_name)
+ assert_contains(resp, directory_url)
+ assert_contains(resp, history_url)
- resp = self.client.get(url)
+ for parent in revision_data['parents']:
+ parent_url = reverse('browse-revision',
+ url_args={'sha1_git': parent})
+ assert_contains(resp, '<a href="%s">%s</a>' %
+ (parent_url, parent))
- self.assertEqual(resp.status_code, 200)
- self.assertTemplateUsed('browse/revision.html')
- self.assertContains(resp, author_name)
- self.assertContains(resp, committer_name)
- self.assertContains(resp, directory_url)
- self.assertContains(resp, history_url)
+ author_date = revision_data['date']
+ committer_date = revision_data['committer_date']
- for parent in revision_data['parents']:
- parent_url = reverse('browse-revision',
- url_args={'sha1_git': parent})
- self.assertContains(resp, '<a href="%s">%s</a>' %
- (parent_url, parent))
+ message_lines = revision_data['message'].split('\n')
- author_date = revision_data['date']
- committer_date = revision_data['committer_date']
+ assert_contains(resp, format_utc_iso_date(author_date))
+ assert_contains(resp, format_utc_iso_date(committer_date))
+ assert_contains(resp, escape(message_lines[0]))
+ assert_contains(resp, escape('\n'.join(message_lines[1:])))
- message_lines = revision_data['message'].split('\n')
- self.assertContains(resp, format_utc_iso_date(author_date))
- self.assertContains(resp, format_utc_iso_date(committer_date))
- self.assertContains(resp, escape(message_lines[0]))
- self.assertContains(resp, escape('\n'.join(message_lines[1:])))
+@given(origin())
+def test_revision_origin_browse(client, archive_data, origin):
+ snapshot = archive_data.snapshot_get_latest(origin['url'])
+ revision = archive_data.snapshot_get_head(snapshot)
+ revision_data = archive_data.revision_get(revision)
+ dir_id = revision_data['directory']
- @given(origin())
- def test_revision_origin_browse(self, origin):
+ origin_revision_log_url = reverse('browse-origin-log',
+ url_args={'origin_url': origin['url']}, # noqa
+ query_params={'revision': revision})
- snapshot = self.snapshot_get_latest(origin['url'])
- revision = self.snapshot_get_head(snapshot)
- revision_data = self.revision_get(revision)
- dir_id = revision_data['directory']
+ url = reverse('browse-revision',
+ url_args={'sha1_git': revision},
+ query_params={'origin': origin['url']})
- origin_revision_log_url = reverse('browse-origin-log',
- url_args={'origin_url': origin['url']}, # noqa
- query_params={'revision': revision})
+ resp = client.get(url)
- url = reverse('browse-revision',
- url_args={'sha1_git': revision},
- query_params={'origin': origin['url']})
+ assert_contains(resp, origin_revision_log_url)
- resp = self.client.get(url)
+ for parent in revision_data['parents']:
+ parent_url = reverse('browse-revision',
+ url_args={'sha1_git': parent},
+ query_params={'origin': origin['url']})
+ assert_contains(resp, '<a href="%s">%s</a>' %
+ (parent_url, parent))
- self.assertContains(resp, origin_revision_log_url)
+ assert_contains(resp, 'vault-cook-directory')
+ assert_contains(resp, 'vault-cook-revision')
- for parent in revision_data['parents']:
- parent_url = reverse('browse-revision',
- url_args={'sha1_git': parent},
- query_params={'origin': origin['url']})
- self.assertContains(resp, '<a href="%s">%s</a>' %
- (parent_url, parent))
+ swh_rev_id = get_swh_persistent_id('revision', revision)
+ swh_rev_id_url = reverse('browse-swh-id',
+ url_args={'swh_id': swh_rev_id})
+ assert_contains(resp, swh_rev_id)
+ assert_contains(resp, swh_rev_id_url)
- self.assertContains(resp, 'vault-cook-directory')
- self.assertContains(resp, 'vault-cook-revision')
+ swh_dir_id = get_swh_persistent_id('directory', dir_id)
+ swh_dir_id_url = reverse('browse-swh-id',
+ url_args={'swh_id': swh_dir_id})
+ assert_contains(resp, swh_dir_id)
+ assert_contains(resp, swh_dir_id_url)
- swh_rev_id = get_swh_persistent_id('revision', revision)
- swh_rev_id_url = reverse('browse-swh-id',
- url_args={'swh_id': swh_rev_id})
- self.assertContains(resp, swh_rev_id)
- self.assertContains(resp, swh_rev_id_url)
+ assert_contains(resp, 'swh-take-new-snapshot')
- swh_dir_id = get_swh_persistent_id('directory', dir_id)
- swh_dir_id_url = reverse('browse-swh-id',
- url_args={'swh_id': swh_dir_id})
- self.assertContains(resp, swh_dir_id)
- self.assertContains(resp, swh_dir_id_url)
- self.assertContains(resp, 'swh-take-new-snapshot')
+@given(revision())
+def test_revision_log_browse(client, archive_data, revision):
+ per_page = 10
- @given(revision())
- def test_revision_log_browse(self, revision):
- per_page = 10
+ revision_log = archive_data.revision_log(revision)
- revision_log = self.revision_log(revision)
+ revision_log_sorted = \
+ sorted(revision_log,
+ key=lambda rev: -parse_timestamp(
+ rev['committer_date']).timestamp())
- revision_log_sorted = \
- sorted(revision_log,
- key=lambda rev: -parse_timestamp(
- rev['committer_date']).timestamp())
+ url = reverse('browse-revision-log',
+ url_args={'sha1_git': revision},
+ query_params={'per_page': per_page})
- url = reverse('browse-revision-log',
- url_args={'sha1_git': revision},
- query_params={'per_page': per_page})
+ resp = client.get(url)
- resp = self.client.get(url)
+ next_page_url = reverse('browse-revision-log',
+ url_args={'sha1_git': revision},
+ query_params={'offset': per_page,
+ 'per_page': per_page})
+
+ nb_log_entries = per_page
+ if len(revision_log_sorted) < per_page:
+ nb_log_entries = len(revision_log_sorted)
+
+ assert resp.status_code == 200
+ assert_template_used('browse/revision-log.html')
+ assert_contains(resp, '<tr class="swh-revision-log-entry',
+ count=nb_log_entries)
+ assert_contains(resp, '<a class="page-link">Newer</a>')
+
+ if len(revision_log_sorted) > per_page:
+ assert_contains(resp, '<a class="page-link" href="%s">Older</a>' % # noqa
+ escape(next_page_url))
+
+ for log in revision_log_sorted[:per_page]:
+ revision_url = reverse('browse-revision',
+ url_args={'sha1_git': log['id']})
+ assert_contains(resp, log['id'][:7])
+ assert_contains(resp, log['author']['name'])
+ assert_contains(resp, format_utc_iso_date(log['date']))
+ assert_contains(resp, escape(log['message']))
+ assert_contains(resp, format_utc_iso_date(log['committer_date'])) # noqa
+ assert_contains(resp, revision_url)
+
+ if len(revision_log_sorted) <= per_page:
+ return
+
+ resp = client.get(next_page_url)
+
+ prev_page_url = reverse('browse-revision-log',
+ url_args={'sha1_git': revision},
+ query_params={'per_page': per_page})
+ next_page_url = reverse('browse-revision-log',
+ url_args={'sha1_git': revision},
+ query_params={'offset': 2 * per_page,
+ 'per_page': per_page})
+
+ nb_log_entries = len(revision_log_sorted) - per_page
+ if nb_log_entries > per_page:
+ nb_log_entries = per_page
+
+ assert resp.status_code == 200
+ assert_template_used('browse/revision-log.html')
+ assert_contains(resp, '<tr class="swh-revision-log-entry',
+ count=nb_log_entries)
+
+ assert_contains(resp, '<a class="page-link" href="%s">Newer</a>' %
+ escape(prev_page_url))
+
+ if len(revision_log_sorted) > 2 * per_page:
+ assert_contains(resp, '<a class="page-link" href="%s">Older</a>' % # noqa
+ escape(next_page_url))
- next_page_url = reverse('browse-revision-log',
- url_args={'sha1_git': revision},
- query_params={'offset': per_page,
- 'per_page': per_page})
+ if len(revision_log_sorted) <= 2 * per_page:
+ return
+ resp = client.get(next_page_url)
+
+ prev_page_url = reverse('browse-revision-log',
+ url_args={'sha1_git': revision},
+ query_params={'offset': per_page,
+ 'per_page': per_page})
+ next_page_url = reverse('browse-revision-log',
+ url_args={'sha1_git': revision},
+ query_params={'offset': 3 * per_page,
+ 'per_page': per_page})
+
+ nb_log_entries = len(revision_log_sorted) - 2 * per_page
+ if nb_log_entries > per_page:
nb_log_entries = per_page
- if len(revision_log_sorted) < per_page:
- nb_log_entries = len(revision_log_sorted)
-
- self.assertEqual(resp.status_code, 200)
- self.assertTemplateUsed('browse/revision-log.html')
- self.assertContains(resp, '<tr class="swh-revision-log-entry',
- count=nb_log_entries)
- self.assertContains(resp, '<a class="page-link">Newer</a>')
-
- if len(revision_log_sorted) > per_page:
- self.assertContains(resp, '<a class="page-link" href="%s">Older</a>' % # noqa
- escape(next_page_url))
-
- for log in revision_log_sorted[:per_page]:
- revision_url = reverse('browse-revision',
- url_args={'sha1_git': log['id']})
- self.assertContains(resp, log['id'][:7])
- self.assertContains(resp, log['author']['name'])
- self.assertContains(resp, format_utc_iso_date(log['date']))
- self.assertContains(resp, escape(log['message']))
- self.assertContains(resp, format_utc_iso_date(log['committer_date'])) # noqa
- self.assertContains(resp, revision_url)
-
- if len(revision_log_sorted) <= per_page:
- return
-
- resp = self.client.get(next_page_url)
-
- prev_page_url = reverse('browse-revision-log',
- url_args={'sha1_git': revision},
- query_params={'per_page': per_page})
- next_page_url = reverse('browse-revision-log',
- url_args={'sha1_git': revision},
- query_params={'offset': 2 * per_page,
- 'per_page': per_page})
-
- nb_log_entries = len(revision_log_sorted) - per_page
- if nb_log_entries > per_page:
- nb_log_entries = per_page
-
- self.assertEqual(resp.status_code, 200)
- self.assertTemplateUsed('browse/revision-log.html')
- self.assertContains(resp, '<tr class="swh-revision-log-entry',
- count=nb_log_entries)
-
- self.assertContains(resp, '<a class="page-link" href="%s">Newer</a>' %
- escape(prev_page_url))
-
- if len(revision_log_sorted) > 2 * per_page:
- self.assertContains(resp, '<a class="page-link" href="%s">Older</a>' % # noqa
- escape(next_page_url))
-
- if len(revision_log_sorted) <= 2 * per_page:
- return
-
- resp = self.client.get(next_page_url)
-
- prev_page_url = reverse('browse-revision-log',
- url_args={'sha1_git': revision},
- query_params={'offset': per_page,
- 'per_page': per_page})
- next_page_url = reverse('browse-revision-log',
- url_args={'sha1_git': revision},
- query_params={'offset': 3 * per_page,
- 'per_page': per_page})
-
- nb_log_entries = len(revision_log_sorted) - 2 * per_page
- if nb_log_entries > per_page:
- nb_log_entries = per_page
-
- self.assertEqual(resp.status_code, 200)
- self.assertTemplateUsed('browse/revision-log.html')
- self.assertContains(resp, '<tr class="swh-revision-log-entry',
- count=nb_log_entries)
- self.assertContains(resp, '<a class="page-link" href="%s">Newer</a>' %
- escape(prev_page_url))
-
- if len(revision_log_sorted) > 3 * per_page:
- self.assertContains(resp, '<a class="page-link" href="%s">Older</a>' % # noqa
- escape(next_page_url))
-
- @given(revision(), unknown_revision(), new_origin())
- def test_revision_request_errors(self, revision, unknown_revision,
- new_origin):
-
- url = reverse('browse-revision',
- url_args={'sha1_git': unknown_revision})
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 404)
- self.assertTemplateUsed('error.html')
- self.assertContains(resp,
- 'Revision with sha1_git %s not found' %
- unknown_revision, status_code=404)
-
- url = reverse('browse-revision',
- url_args={'sha1_git': revision},
- query_params={'origin': new_origin['url']})
-
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 404)
- self.assertTemplateUsed('error.html')
- self.assertContains(resp, 'the origin mentioned in your request'
- ' appears broken', status_code=404)
-
- @given(revision())
- def test_revision_uppercase(self, revision):
- url = reverse('browse-revision-uppercase-checksum',
- url_args={'sha1_git': revision.upper()})
-
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 302)
-
- redirect_url = reverse('browse-revision',
- url_args={'sha1_git': revision})
-
- self.assertEqual(resp['location'], redirect_url)
+
+ assert resp.status_code == 200
+ assert_template_used('browse/revision-log.html')
+ assert_contains(resp, '<tr class="swh-revision-log-entry',
+ count=nb_log_entries)
+ assert_contains(resp, '<a class="page-link" href="%s">Newer</a>' %
+ escape(prev_page_url))
+
+ if len(revision_log_sorted) > 3 * per_page:
+ assert_contains(resp, '<a class="page-link" href="%s">Older</a>' % # noqa
+ escape(next_page_url))
+
+
+@given(revision(), unknown_revision(), new_origin())
+def test_revision_request_errors(client, revision, unknown_revision,
+ new_origin):
+ url = reverse('browse-revision',
+ url_args={'sha1_git': unknown_revision})
+ resp = client.get(url)
+ assert resp.status_code == 404
+ assert_template_used('error.html')
+ assert_contains(resp,
+ 'Revision with sha1_git %s not found' %
+ unknown_revision, status_code=404)
+
+ url = reverse('browse-revision',
+ url_args={'sha1_git': revision},
+ query_params={'origin': new_origin['url']})
+
+ resp = client.get(url)
+ assert resp.status_code == 404
+ assert_template_used('error.html')
+ assert_contains(resp, 'the origin mentioned in your request'
+ ' appears broken', status_code=404)
+
+
+@given(revision())
+def test_revision_uppercase(client, revision):
+ url = reverse('browse-revision-uppercase-checksum',
+ url_args={'sha1_git': revision.upper()})
+
+ resp = client.get(url)
+ assert resp.status_code == 302
+
+ redirect_url = reverse('browse-revision',
+ url_args={'sha1_git': revision})
+
+ assert resp['location'] == redirect_url
diff --git a/swh/web/tests/common/test_converters.py b/swh/web/tests/common/test_converters.py
--- a/swh/web/tests/common/test_converters.py
+++ b/swh/web/tests/common/test_converters.py
@@ -1,237 +1,192 @@
-# Copyright (C) 2015-2018 The Software Heritage developers
+# Copyright (C) 2015-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import datetime
-
from swh.model import hashutil
-
from swh.web.common import converters
-from swh.web.tests.testcase import WebTestCase
-
-
-class ConvertersTestCase(WebTestCase):
-
- def test_fmap(self):
- self.assertEqual([2, 3, None, 4],
- converters.fmap(lambda x: x+1, [1, 2, None, 3]))
- self.assertEqual([11, 12, 13],
- list(converters.fmap(lambda x: x+10,
- map(lambda x: x, [1, 2, 3]))))
- self.assertEqual({'a': 2, 'b': 4},
- converters.fmap(lambda x: x*2, {'a': 1, 'b': 2}))
- self.assertEqual(100,
- converters.fmap(lambda x: x*10, 10))
- self.assertEqual({'a': [2, 6], 'b': 4},
- converters.fmap(lambda x: x*2, {'a': [1, 3], 'b': 2})) # noqa
-
- self.assertIsNone(converters.fmap(lambda x: x, None))
-
- def test_from_swh(self):
- some_input = {
- 'a': 'something',
- 'b': 'someone',
- 'c': b'sharp-0.3.4.tgz',
- 'd': hashutil.hash_to_bytes(
- 'b04caf10e9535160d90e874b45aa426de762f19f'),
- 'e': b'sharp.html/doc_002dS_005fISREG.html',
- 'g': [b'utf-8-to-decode', b'another-one'],
- 'h': 'something filtered',
- 'i': {'e': b'something'},
- 'j': {
- 'k': {
- 'l': [b'bytes thing', b'another thingy', b''],
- 'n': 'dont care either'
- },
- 'm': 'dont care'
- },
- 'o': 'something',
- 'p': b'foo',
- 'q': {'extra-headers': [['a', b'intact']]},
- 'w': None,
- 'r': {'p': 'also intact',
- 'q': 'bar'},
- 's': {
- 'timestamp': 42,
- 'offset': -420,
- 'negative_utc': None,
- },
- 's1': {
- 'timestamp': {'seconds': 42, 'microseconds': 0},
- 'offset': -420,
- 'negative_utc': None,
- },
- 's2': datetime.datetime(
- 2013, 7, 1, 20, 0, 0,
- tzinfo=datetime.timezone.utc),
- 't': None,
- 'u': None,
- 'v': None,
- 'x': None,
- }
-
- expected_output = {
- 'a': 'something',
- 'b': 'someone',
- 'c': 'sharp-0.3.4.tgz',
- 'd': 'b04caf10e9535160d90e874b45aa426de762f19f',
- 'e': 'sharp.html/doc_002dS_005fISREG.html',
- 'g': ['utf-8-to-decode', 'another-one'],
- 'i': {'e': 'something'},
- 'j': {
- 'k': {
- 'l': ['bytes thing', 'another thingy', '']
- }
- },
- 'p': 'foo',
- 'q': {'extra-headers': [['a', 'intact']]},
- 'w': {},
- 'r': {'p': 'also intact',
- 'q': 'bar'},
- 's': '1969-12-31T17:00:42-07:00',
- 's1': '1969-12-31T17:00:42-07:00',
- 's2': '2013-07-01T20:00:00+00:00',
- 'u': {},
- 'v': [],
- 'x': None,
- }
-
- actual_output = converters.from_swh(
- some_input,
- hashess={'d', 'o', 'x'},
- bytess={'c', 'e', 'g', 'l'},
- dates={'s', 's1', 's2'},
- blacklist={'h', 'm', 'n', 'o'},
- removables_if_empty={'t'},
- empty_dict={'u'},
- empty_list={'v'},
- convert={'p', 'q', 'w'},
- convert_fn=converters.convert_revision_metadata)
-
- self.assertEqual(expected_output, actual_output)
-
- def test_from_swh_edge_cases_do_no_conversion_if_none_or_not_bytes(self):
- some_input = {
- 'a': 'something',
- 'b': None,
- 'c': 'someone',
- 'd': None,
- 'e': None
- }
-
- expected_output = {
- 'a': 'something',
- 'b': None,
- 'c': 'someone',
- 'd': None,
- 'e': None
- }
-
- actual_output = converters.from_swh(some_input,
- hashess={'a', 'b'},
- bytess={'c', 'd'},
- dates={'e'})
-
- self.assertEqual(expected_output, actual_output)
-
- def test_from_swh_edge_cases_convert_invalid_utf8_bytes(self):
- some_input = {
- 'a': 'something',
- 'b': 'someone',
- 'c': b'a name \xff',
- 'd': b'an email \xff',
- }
- expected_output = {
- 'a': 'something',
- 'b': 'someone',
- 'c': 'a name \\xff',
- 'd': 'an email \\xff',
- 'decoding_failures': ['c', 'd']
- }
-
- actual_output = converters.from_swh(some_input,
- hashess={'a', 'b'},
- bytess={'c', 'd'})
- for v in ['a', 'b', 'c', 'd']:
- self.assertEqual(expected_output[v], actual_output[v])
- self.assertEqual(len(expected_output['decoding_failures']),
- len(actual_output['decoding_failures']))
- for v in expected_output['decoding_failures']:
- self.assertTrue(v in actual_output['decoding_failures'])
-
- def test_from_swh_empty(self):
- # when
- self.assertEqual({}, converters.from_swh({}))
-
- def test_from_swh_none(self):
- # when
- self.assertIsNone(converters.from_swh(None))
-
- def test_from_origin(self):
- # given
- origin_input = {
- 'id': 9,
- 'type': 'ftp',
- 'url': 'rsync://ftp.gnu.org/gnu/octave',
- }
-
- expected_origin = {
- 'id': 9,
- 'type': 'ftp',
- 'url': 'rsync://ftp.gnu.org/gnu/octave',
- }
- # when
- actual_origin = converters.from_origin(origin_input)
-
- # then
- self.assertEqual(actual_origin, expected_origin)
-
- def test_from_origin_visit(self):
- snap_hash = 'b5f0b7f716735ebffe38505c60145c4fd9da6ca3'
-
- for snap in [snap_hash, None]:
- # given
- visit = {
- 'date': {
- 'timestamp': datetime.datetime(
- 2015, 1, 1, 22, 0, 0,
- tzinfo=datetime.timezone.utc).timestamp(),
- 'offset': 0,
- 'negative_utc': False,
- },
- 'origin': 10,
- 'visit': 100,
- 'metadata': None,
- 'status': 'full',
- 'snapshot': hashutil.hash_to_bytes(snap) if snap else snap,
+def test_fmap():
+ assert [2, 3, None, 4] == converters.fmap(lambda x: x+1, [1, 2, None, 3])
+ assert [11, 12, 13] == list(converters.fmap(lambda x: x+10,
+ map(lambda x: x, [1, 2, 3])))
+ assert {'a': 2, 'b': 4} == converters.fmap(lambda x: x*2, {'a': 1, 'b': 2})
+ assert 100 == converters.fmap(lambda x: x*10, 10)
+ assert {'a': [2, 6], 'b': 4} == converters.fmap(lambda x: x*2,
+ {'a': [1, 3], 'b': 2})
+ assert converters.fmap(lambda x: x, None) is None
+
+
+def test_from_swh():
+ some_input = {
+ 'a': 'something',
+ 'b': 'someone',
+ 'c': b'sharp-0.3.4.tgz',
+ 'd': hashutil.hash_to_bytes(
+ 'b04caf10e9535160d90e874b45aa426de762f19f'),
+ 'e': b'sharp.html/doc_002dS_005fISREG.html',
+ 'g': [b'utf-8-to-decode', b'another-one'],
+ 'h': 'something filtered',
+ 'i': {'e': b'something'},
+ 'j': {
+ 'k': {
+ 'l': [b'bytes thing', b'another thingy', b''],
+ 'n': 'dont care either'
+ },
+ 'm': 'dont care'
+ },
+ 'o': 'something',
+ 'p': b'foo',
+ 'q': {'extra-headers': [['a', b'intact']]},
+ 'w': None,
+ 'r': {'p': 'also intact',
+ 'q': 'bar'},
+ 's': {
+ 'timestamp': 42,
+ 'offset': -420,
+ 'negative_utc': None,
+ },
+ 's1': {
+ 'timestamp': {'seconds': 42, 'microseconds': 0},
+ 'offset': -420,
+ 'negative_utc': None,
+ },
+ 's2': datetime.datetime(
+ 2013, 7, 1, 20, 0, 0,
+ tzinfo=datetime.timezone.utc),
+ 't': None,
+ 'u': None,
+ 'v': None,
+ 'x': None,
+ }
+
+ expected_output = {
+ 'a': 'something',
+ 'b': 'someone',
+ 'c': 'sharp-0.3.4.tgz',
+ 'd': 'b04caf10e9535160d90e874b45aa426de762f19f',
+ 'e': 'sharp.html/doc_002dS_005fISREG.html',
+ 'g': ['utf-8-to-decode', 'another-one'],
+ 'i': {'e': 'something'},
+ 'j': {
+ 'k': {
+ 'l': ['bytes thing', 'another thingy', '']
}
-
- expected_visit = {
- 'date': '2015-01-01T22:00:00+00:00',
- 'origin': 10,
- 'visit': 100,
- 'metadata': {},
- 'status': 'full',
- 'snapshot': snap_hash if snap else snap
- }
-
- # when
- actual_visit = converters.from_origin_visit(visit)
-
- # then
- self.assertEqual(actual_visit, expected_visit)
-
- def test_from_release(self):
- release_input = {
- 'id': hashutil.hash_to_bytes(
- 'aad23fa492a0c5fed0708a6703be875448c86884'),
- 'target': hashutil.hash_to_bytes(
- '5e46d564378afc44b31bb89f99d5675195fbdf67'),
- 'target_type': 'revision',
+ },
+ 'p': 'foo',
+ 'q': {'extra-headers': [['a', 'intact']]},
+ 'w': {},
+ 'r': {'p': 'also intact',
+ 'q': 'bar'},
+ 's': '1969-12-31T17:00:42-07:00',
+ 's1': '1969-12-31T17:00:42-07:00',
+ 's2': '2013-07-01T20:00:00+00:00',
+ 'u': {},
+ 'v': [],
+ 'x': None,
+ }
+
+ actual_output = converters.from_swh(
+ some_input,
+ hashess={'d', 'o', 'x'},
+ bytess={'c', 'e', 'g', 'l'},
+ dates={'s', 's1', 's2'},
+ blacklist={'h', 'm', 'n', 'o'},
+ removables_if_empty={'t'},
+ empty_dict={'u'},
+ empty_list={'v'},
+ convert={'p', 'q', 'w'},
+ convert_fn=converters.convert_revision_metadata)
+
+ assert expected_output == actual_output
+
+
+def test_from_swh_edge_cases_do_no_conversion_if_none_or_not_bytes():
+ some_input = {
+ 'a': 'something',
+ 'b': None,
+ 'c': 'someone',
+ 'd': None,
+ 'e': None
+ }
+
+ expected_output = {
+ 'a': 'something',
+ 'b': None,
+ 'c': 'someone',
+ 'd': None,
+ 'e': None
+ }
+
+ actual_output = converters.from_swh(some_input,
+ hashess={'a', 'b'},
+ bytess={'c', 'd'},
+ dates={'e'})
+
+ assert expected_output == actual_output
+
+
+def test_from_swh_edge_cases_convert_invalid_utf8_bytes():
+ some_input = {
+ 'a': 'something',
+ 'b': 'someone',
+ 'c': b'a name \xff',
+ 'd': b'an email \xff',
+ }
+
+ expected_output = {
+ 'a': 'something',
+ 'b': 'someone',
+ 'c': 'a name \\xff',
+ 'd': 'an email \\xff',
+ 'decoding_failures': ['c', 'd']
+ }
+
+ actual_output = converters.from_swh(some_input,
+ hashess={'a', 'b'},
+ bytess={'c', 'd'})
+ for v in ['a', 'b', 'c', 'd']:
+ assert expected_output[v] == actual_output[v]
+ assert (len(expected_output['decoding_failures']) ==
+ len(actual_output['decoding_failures']))
+ for v in expected_output['decoding_failures']:
+ assert v in actual_output['decoding_failures']
+
+
+def test_from_swh_empty():
+ assert {} == converters.from_swh({})
+
+
+def test_from_swh_none():
+ assert converters.from_swh(None) is None
+
+
+def test_from_origin():
+ origin_input = {
+ 'id': 9,
+ 'type': 'ftp',
+ 'url': 'rsync://ftp.gnu.org/gnu/octave',
+ }
+
+ expected_origin = {
+ 'id': 9,
+ 'type': 'ftp',
+ 'url': 'rsync://ftp.gnu.org/gnu/octave',
+ }
+
+ actual_origin = converters.from_origin(origin_input)
+
+ assert actual_origin == expected_origin
+
+
+def test_from_origin_visit():
+ snap_hash = 'b5f0b7f716735ebffe38505c60145c4fd9da6ca3'
+
+ for snap in [snap_hash, None]:
+ visit = {
'date': {
'timestamp': datetime.datetime(
2015, 1, 1, 22, 0, 0,
@@ -239,521 +194,546 @@
'offset': 0,
'negative_utc': False,
},
- 'author': {
- 'name': b'author name',
- 'fullname': b'Author Name author@email',
- 'email': b'author@email',
- },
- 'name': b'v0.0.1',
- 'message': b'some comment on release',
- 'synthetic': True,
+ 'origin': 10,
+ 'visit': 100,
+ 'metadata': None,
+ 'status': 'full',
+ 'snapshot': hashutil.hash_to_bytes(snap) if snap else snap,
}
- expected_release = {
- 'id': 'aad23fa492a0c5fed0708a6703be875448c86884',
- 'target': '5e46d564378afc44b31bb89f99d5675195fbdf67',
- 'target_type': 'revision',
+ expected_visit = {
'date': '2015-01-01T22:00:00+00:00',
- 'author': {
- 'name': 'author name',
- 'fullname': 'Author Name author@email',
- 'email': 'author@email',
- },
- 'name': 'v0.0.1',
- 'message': 'some comment on release',
- 'target_type': 'revision',
- 'synthetic': True,
- }
-
- # when
- actual_release = converters.from_release(release_input)
-
- # then
- self.assertEqual(actual_release, expected_release)
-
- def test_from_release_no_revision(self):
- release_input = {
- 'id': hashutil.hash_to_bytes(
- 'b2171ee2bdf119cd99a7ec7eff32fa8013ef9a4e'),
- 'target': None,
- 'date': {
- 'timestamp': datetime.datetime(
- 2016, 3, 2, 10, 0, 0,
- tzinfo=datetime.timezone.utc).timestamp(),
- 'offset': 0,
- 'negative_utc': True,
-
- },
- 'name': b'v0.1.1',
- 'message': b'comment on release',
- 'synthetic': False,
- 'author': {
- 'name': b'bob',
- 'fullname': b'Bob bob@alice.net',
- 'email': b'bob@alice.net',
- },
- }
-
- expected_release = {
- 'id': 'b2171ee2bdf119cd99a7ec7eff32fa8013ef9a4e',
- 'target': None,
- 'date': '2016-03-02T10:00:00-00:00',
- 'name': 'v0.1.1',
- 'message': 'comment on release',
- 'synthetic': False,
- 'author': {
- 'name': 'bob',
- 'fullname': 'Bob bob@alice.net',
- 'email': 'bob@alice.net',
- },
- }
-
- # when
- actual_release = converters.from_release(release_input)
-
- # then
- self.assertEqual(actual_release, expected_release)
-
- def test_from_revision(self):
- revision_input = {
- 'id': hashutil.hash_to_bytes(
- '18d8be353ed3480476f032475e7c233eff7371d5'),
- 'directory': hashutil.hash_to_bytes(
- '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'),
- 'author': {
- 'name': b'Software Heritage',
- 'fullname': b'robot robot@softwareheritage.org',
- 'email': b'robot@softwareheritage.org',
- },
- 'committer': {
- 'name': b'Software Heritage',
- 'fullname': b'robot robot@softwareheritage.org',
- 'email': b'robot@softwareheritage.org',
- },
- 'message': b'synthetic revision message',
- 'date': {
- 'timestamp': datetime.datetime(
- 2000, 1, 17, 11, 23, 54,
- tzinfo=datetime.timezone.utc).timestamp(),
- 'offset': 0,
- 'negative_utc': False,
- },
- 'committer_date': {
- 'timestamp': datetime.datetime(
- 2000, 1, 17, 11, 23, 54,
- tzinfo=datetime.timezone.utc).timestamp(),
- 'offset': 0,
- 'negative_utc': False,
- },
- 'synthetic': True,
- 'type': 'tar',
- 'parents': [
- hashutil.hash_to_bytes(
- '29d8be353ed3480476f032475e7c244eff7371d5'),
- hashutil.hash_to_bytes(
- '30d8be353ed3480476f032475e7c244eff7371d5')
- ],
- 'children': [
- hashutil.hash_to_bytes(
- '123546353ed3480476f032475e7c244eff7371d5'),
- ],
- 'metadata': {
- 'extra_headers': [['gpgsig', b'some-signature']],
- 'original_artifact': [{
- 'archive_type': 'tar',
- 'name': 'webbase-5.7.0.tar.gz',
- 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
- 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
- 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
- '309d36484e7edf7bb912',
-
- }]
- },
- }
-
- expected_revision = {
- 'id': '18d8be353ed3480476f032475e7c233eff7371d5',
- 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6',
- 'author': {
- 'name': 'Software Heritage',
- 'fullname': 'robot robot@softwareheritage.org',
- 'email': 'robot@softwareheritage.org',
- },
- 'committer': {
- 'name': 'Software Heritage',
- 'fullname': 'robot robot@softwareheritage.org',
- 'email': 'robot@softwareheritage.org',
- },
- 'message': 'synthetic revision message',
- 'date': "2000-01-17T11:23:54+00:00",
- 'committer_date': "2000-01-17T11:23:54+00:00",
- 'children': [
- '123546353ed3480476f032475e7c244eff7371d5'
- ],
- 'parents': [
- '29d8be353ed3480476f032475e7c244eff7371d5',
- '30d8be353ed3480476f032475e7c244eff7371d5'
- ],
- 'type': 'tar',
- 'synthetic': True,
- 'metadata': {
- 'extra_headers': [['gpgsig', 'some-signature']],
- 'original_artifact': [{
- 'archive_type': 'tar',
- 'name': 'webbase-5.7.0.tar.gz',
- 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
- 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
- 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
- '309d36484e7edf7bb912'
- }]
- },
- 'merge': True
- }
-
- # when
- actual_revision = converters.from_revision(revision_input)
-
- # then
- self.assertEqual(actual_revision, expected_revision)
-
- def test_from_revision_nomerge(self):
- revision_input = {
- 'id': hashutil.hash_to_bytes(
- '18d8be353ed3480476f032475e7c233eff7371d5'),
- 'parents': [
- hashutil.hash_to_bytes(
- '29d8be353ed3480476f032475e7c244eff7371d5')
- ]
+ 'origin': 10,
+ 'visit': 100,
+ 'metadata': {},
+ 'status': 'full',
+ 'snapshot': snap_hash if snap else snap
}
- expected_revision = {
- 'id': '18d8be353ed3480476f032475e7c233eff7371d5',
- 'parents': [
- '29d8be353ed3480476f032475e7c244eff7371d5'
- ],
- 'merge': False
- }
-
- # when
- actual_revision = converters.from_revision(revision_input)
-
- # then
- self.assertEqual(actual_revision, expected_revision)
-
- def test_from_revision_noparents(self):
- revision_input = {
- 'id': hashutil.hash_to_bytes(
- '18d8be353ed3480476f032475e7c233eff7371d5'),
- 'directory': hashutil.hash_to_bytes(
- '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'),
- 'author': {
- 'name': b'Software Heritage',
- 'fullname': b'robot robot@softwareheritage.org',
- 'email': b'robot@softwareheritage.org',
- },
- 'committer': {
- 'name': b'Software Heritage',
- 'fullname': b'robot robot@softwareheritage.org',
- 'email': b'robot@softwareheritage.org',
- },
- 'message': b'synthetic revision message',
- 'date': {
- 'timestamp': datetime.datetime(
- 2000, 1, 17, 11, 23, 54,
- tzinfo=datetime.timezone.utc).timestamp(),
- 'offset': 0,
- 'negative_utc': False,
- },
- 'committer_date': {
- 'timestamp': datetime.datetime(
- 2000, 1, 17, 11, 23, 54,
- tzinfo=datetime.timezone.utc).timestamp(),
- 'offset': 0,
- 'negative_utc': False,
- },
- 'synthetic': True,
- 'type': 'tar',
- 'children': [
- hashutil.hash_to_bytes(
- '123546353ed3480476f032475e7c244eff7371d5'),
- ],
- 'metadata': {
- 'original_artifact': [{
- 'archive_type': 'tar',
- 'name': 'webbase-5.7.0.tar.gz',
- 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
- 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
- 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
- '309d36484e7edf7bb912',
-
- }]
- },
- }
-
- expected_revision = {
- 'id': '18d8be353ed3480476f032475e7c233eff7371d5',
- 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6',
- 'author': {
- 'name': 'Software Heritage',
- 'fullname': 'robot robot@softwareheritage.org',
- 'email': 'robot@softwareheritage.org',
- },
- 'committer': {
- 'name': 'Software Heritage',
- 'fullname': 'robot robot@softwareheritage.org',
- 'email': 'robot@softwareheritage.org',
- },
- 'message': 'synthetic revision message',
- 'date': "2000-01-17T11:23:54+00:00",
- 'committer_date': "2000-01-17T11:23:54+00:00",
- 'children': [
- '123546353ed3480476f032475e7c244eff7371d5'
- ],
- 'type': 'tar',
- 'synthetic': True,
- 'metadata': {
- 'original_artifact': [{
- 'archive_type': 'tar',
- 'name': 'webbase-5.7.0.tar.gz',
- 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
- 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
- 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
- '309d36484e7edf7bb912'
- }]
- }
- }
-
- # when
- actual_revision = converters.from_revision(revision_input)
-
- # then
- self.assertEqual(actual_revision, expected_revision)
-
- def test_from_revision_invalid(self):
- revision_input = {
- 'id': hashutil.hash_to_bytes(
- '18d8be353ed3480476f032475e7c233eff7371d5'),
- 'directory': hashutil.hash_to_bytes(
- '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'),
- 'author': {
- 'name': b'Software Heritage',
- 'fullname': b'robot robot@softwareheritage.org',
- 'email': b'robot@softwareheritage.org',
- },
- 'committer': {
- 'name': b'Software Heritage',
- 'fullname': b'robot robot@softwareheritage.org',
- 'email': b'robot@softwareheritage.org',
- },
- 'message': b'invalid message \xff',
- 'date': {
- 'timestamp': datetime.datetime(
- 2000, 1, 17, 11, 23, 54,
- tzinfo=datetime.timezone.utc).timestamp(),
- 'offset': 0,
- 'negative_utc': False,
- },
- 'committer_date': {
- 'timestamp': datetime.datetime(
- 2000, 1, 17, 11, 23, 54,
- tzinfo=datetime.timezone.utc).timestamp(),
- 'offset': 0,
- 'negative_utc': False,
- },
- 'synthetic': True,
- 'type': 'tar',
- 'parents': [
- hashutil.hash_to_bytes(
- '29d8be353ed3480476f032475e7c244eff7371d5'),
- hashutil.hash_to_bytes(
- '30d8be353ed3480476f032475e7c244eff7371d5')
- ],
- 'children': [
- hashutil.hash_to_bytes(
- '123546353ed3480476f032475e7c244eff7371d5'),
- ],
- 'metadata': {
- 'original_artifact': [{
- 'archive_type': 'tar',
- 'name': 'webbase-5.7.0.tar.gz',
- 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
- 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
- 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
- '309d36484e7edf7bb912',
-
- }]
- },
- }
-
- expected_revision = {
- 'id': '18d8be353ed3480476f032475e7c233eff7371d5',
- 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6',
- 'author': {
- 'name': 'Software Heritage',
- 'fullname': 'robot robot@softwareheritage.org',
- 'email': 'robot@softwareheritage.org',
- },
- 'committer': {
- 'name': 'Software Heritage',
- 'fullname': 'robot robot@softwareheritage.org',
- 'email': 'robot@softwareheritage.org',
- },
- 'message': None,
- 'message_decoding_failed': True,
- 'date': "2000-01-17T11:23:54+00:00",
- 'committer_date': "2000-01-17T11:23:54+00:00",
- 'children': [
- '123546353ed3480476f032475e7c244eff7371d5'
- ],
- 'parents': [
- '29d8be353ed3480476f032475e7c244eff7371d5',
- '30d8be353ed3480476f032475e7c244eff7371d5'
- ],
- 'type': 'tar',
- 'synthetic': True,
- 'metadata': {
- 'original_artifact': [{
- 'archive_type': 'tar',
- 'name': 'webbase-5.7.0.tar.gz',
- 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
- 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
- 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
- '309d36484e7edf7bb912'
- }]
- },
- 'merge': True
- }
-
- # when
- actual_revision = converters.from_revision(revision_input)
-
- # then
- self.assertEqual(actual_revision, expected_revision)
-
- def test_from_content_none(self):
- self.assertIsNone(converters.from_content(None))
-
- def test_from_content(self):
- content_input = {
- 'sha1': hashutil.hash_to_bytes(
- '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5'),
- 'sha256': hashutil.hash_to_bytes(
- '39007420ca5de7cb3cfc15196335507e'
- 'e76c98930e7e0afa4d2747d3bf96c926'),
- 'blake2s256': hashutil.hash_to_bytes(
- '49007420ca5de7cb3cfc15196335507e'
- 'e76c98930e7e0afa4d2747d3bf96c926'),
- 'sha1_git': hashutil.hash_to_bytes(
- '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'),
- 'ctime': 'something-which-is-filtered-out',
- 'data': b'data in bytes',
- 'length': 10,
- 'status': 'hidden',
- }
-
- # 'status' is filtered
- expected_content = {
- 'checksums': {
- 'sha1': '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5',
- 'sha256': '39007420ca5de7cb3cfc15196335507ee76c98'
- '930e7e0afa4d2747d3bf96c926',
- 'blake2s256': '49007420ca5de7cb3cfc15196335507ee7'
- '6c98930e7e0afa4d2747d3bf96c926',
- 'sha1_git': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03',
- },
- 'data': b'data in bytes',
- 'length': 10,
- 'status': 'absent',
- }
-
- # when
- actual_content = converters.from_content(content_input)
-
- # then
- self.assertEqual(actual_content, expected_content)
-
- def test_from_person(self):
- person_input = {
- 'id': 10,
- 'anything': 'else',
+ actual_visit = converters.from_origin_visit(visit)
+
+ assert actual_visit == expected_visit
+
+
+def test_from_release():
+ release_input = {
+ 'id': hashutil.hash_to_bytes(
+ 'aad23fa492a0c5fed0708a6703be875448c86884'),
+ 'target': hashutil.hash_to_bytes(
+ '5e46d564378afc44b31bb89f99d5675195fbdf67'),
+ 'target_type': 'revision',
+ 'date': {
+ 'timestamp': datetime.datetime(
+ 2015, 1, 1, 22, 0, 0,
+ tzinfo=datetime.timezone.utc).timestamp(),
+ 'offset': 0,
+ 'negative_utc': False,
+ },
+ 'author': {
+ 'name': b'author name',
+ 'fullname': b'Author Name author@email',
+ 'email': b'author@email',
+ },
+ 'name': b'v0.0.1',
+ 'message': b'some comment on release',
+ 'synthetic': True,
+ }
+
+ expected_release = {
+ 'id': 'aad23fa492a0c5fed0708a6703be875448c86884',
+ 'target': '5e46d564378afc44b31bb89f99d5675195fbdf67',
+ 'target_type': 'revision',
+ 'date': '2015-01-01T22:00:00+00:00',
+ 'author': {
+ 'name': 'author name',
+ 'fullname': 'Author Name author@email',
+ 'email': 'author@email',
+ },
+ 'name': 'v0.0.1',
+ 'message': 'some comment on release',
+ 'target_type': 'revision',
+ 'synthetic': True,
+ }
+
+ actual_release = converters.from_release(release_input)
+
+ assert actual_release == expected_release
+
+
+def test_from_release_no_revision():
+ release_input = {
+ 'id': hashutil.hash_to_bytes(
+ 'b2171ee2bdf119cd99a7ec7eff32fa8013ef9a4e'),
+ 'target': None,
+ 'date': {
+ 'timestamp': datetime.datetime(
+ 2016, 3, 2, 10, 0, 0,
+ tzinfo=datetime.timezone.utc).timestamp(),
+ 'offset': 0,
+ 'negative_utc': True,
+
+ },
+ 'name': b'v0.1.1',
+ 'message': b'comment on release',
+ 'synthetic': False,
+ 'author': {
'name': b'bob',
- 'fullname': b'bob bob@alice.net',
- 'email': b'bob@foo.alice',
- }
-
- expected_person = {
- 'id': 10,
- 'anything': 'else',
- 'name': 'bob',
- 'fullname': 'bob bob@alice.net',
- 'email': 'bob@foo.alice',
- }
-
- # when
- actual_person = converters.from_person(person_input)
-
- # then
- self.assertEqual(actual_person, expected_person)
-
- def test_from_directory_entries(self):
- dir_entries_input = {
- 'sha1': hashutil.hash_to_bytes(
- '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5'),
- 'sha256': hashutil.hash_to_bytes(
- '39007420ca5de7cb3cfc15196335507e'
- 'e76c98930e7e0afa4d2747d3bf96c926'),
- 'sha1_git': hashutil.hash_to_bytes(
- '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'),
- 'blake2s256': hashutil.hash_to_bytes(
- '685395c5dc57cada459364f0946d3dd45bad5fcbab'
- 'c1048edb44380f1d31d0aa'),
- 'target': hashutil.hash_to_bytes(
- '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'),
- 'dir_id': hashutil.hash_to_bytes(
- '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'),
- 'name': b'bob',
- 'type': 10,
- 'status': 'hidden',
- }
-
- expected_dir_entries = {
- 'checksums': {
- 'sha1': '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5',
- 'sha256': '39007420ca5de7cb3cfc15196335507ee76c98'
- '930e7e0afa4d2747d3bf96c926',
- 'sha1_git': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03',
- 'blake2s256': '685395c5dc57cada459364f0946d3dd45bad5f'
- 'cbabc1048edb44380f1d31d0aa',
- },
- 'target': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03',
- 'dir_id': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03',
+ 'fullname': b'Bob bob@alice.net',
+ 'email': b'bob@alice.net',
+ },
+ }
+
+ expected_release = {
+ 'id': 'b2171ee2bdf119cd99a7ec7eff32fa8013ef9a4e',
+ 'target': None,
+ 'date': '2016-03-02T10:00:00-00:00',
+ 'name': 'v0.1.1',
+ 'message': 'comment on release',
+ 'synthetic': False,
+ 'author': {
'name': 'bob',
- 'type': 10,
- 'status': 'absent',
+ 'fullname': 'Bob bob@alice.net',
+ 'email': 'bob@alice.net',
+ },
+ }
+
+ actual_release = converters.from_release(release_input)
+
+ assert actual_release == expected_release
+
+
+def test_from_revision():
+ revision_input = {
+ 'id': hashutil.hash_to_bytes(
+ '18d8be353ed3480476f032475e7c233eff7371d5'),
+ 'directory': hashutil.hash_to_bytes(
+ '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'),
+ 'author': {
+ 'name': b'Software Heritage',
+ 'fullname': b'robot robot@softwareheritage.org',
+ 'email': b'robot@softwareheritage.org',
+ },
+ 'committer': {
+ 'name': b'Software Heritage',
+ 'fullname': b'robot robot@softwareheritage.org',
+ 'email': b'robot@softwareheritage.org',
+ },
+ 'message': b'synthetic revision message',
+ 'date': {
+ 'timestamp': datetime.datetime(
+ 2000, 1, 17, 11, 23, 54,
+ tzinfo=datetime.timezone.utc).timestamp(),
+ 'offset': 0,
+ 'negative_utc': False,
+ },
+ 'committer_date': {
+ 'timestamp': datetime.datetime(
+ 2000, 1, 17, 11, 23, 54,
+ tzinfo=datetime.timezone.utc).timestamp(),
+ 'offset': 0,
+ 'negative_utc': False,
+ },
+ 'synthetic': True,
+ 'type': 'tar',
+ 'parents': [
+ hashutil.hash_to_bytes(
+ '29d8be353ed3480476f032475e7c244eff7371d5'),
+ hashutil.hash_to_bytes(
+ '30d8be353ed3480476f032475e7c244eff7371d5')
+ ],
+ 'children': [
+ hashutil.hash_to_bytes(
+ '123546353ed3480476f032475e7c244eff7371d5'),
+ ],
+ 'metadata': {
+ 'extra_headers': [['gpgsig', b'some-signature']],
+ 'original_artifact': [{
+ 'archive_type': 'tar',
+ 'name': 'webbase-5.7.0.tar.gz',
+ 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
+ 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
+ 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
+ '309d36484e7edf7bb912',
+
+ }]
+ },
+ }
+
+ expected_revision = {
+ 'id': '18d8be353ed3480476f032475e7c233eff7371d5',
+ 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6',
+ 'author': {
+ 'name': 'Software Heritage',
+ 'fullname': 'robot robot@softwareheritage.org',
+ 'email': 'robot@softwareheritage.org',
+ },
+ 'committer': {
+ 'name': 'Software Heritage',
+ 'fullname': 'robot robot@softwareheritage.org',
+ 'email': 'robot@softwareheritage.org',
+ },
+ 'message': 'synthetic revision message',
+ 'date': "2000-01-17T11:23:54+00:00",
+ 'committer_date': "2000-01-17T11:23:54+00:00",
+ 'children': [
+ '123546353ed3480476f032475e7c244eff7371d5'
+ ],
+ 'parents': [
+ '29d8be353ed3480476f032475e7c244eff7371d5',
+ '30d8be353ed3480476f032475e7c244eff7371d5'
+ ],
+ 'type': 'tar',
+ 'synthetic': True,
+ 'metadata': {
+ 'extra_headers': [['gpgsig', 'some-signature']],
+ 'original_artifact': [{
+ 'archive_type': 'tar',
+ 'name': 'webbase-5.7.0.tar.gz',
+ 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
+ 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
+ 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
+ '309d36484e7edf7bb912'
+ }]
+ },
+ 'merge': True
+ }
+
+ actual_revision = converters.from_revision(revision_input)
+
+ assert actual_revision == expected_revision
+
+
+def test_from_revision_nomerge():
+ revision_input = {
+ 'id': hashutil.hash_to_bytes(
+ '18d8be353ed3480476f032475e7c233eff7371d5'),
+ 'parents': [
+ hashutil.hash_to_bytes(
+ '29d8be353ed3480476f032475e7c244eff7371d5')
+ ]
+ }
+
+ expected_revision = {
+ 'id': '18d8be353ed3480476f032475e7c233eff7371d5',
+ 'parents': [
+ '29d8be353ed3480476f032475e7c244eff7371d5'
+ ],
+ 'merge': False
+ }
+
+ actual_revision = converters.from_revision(revision_input)
+
+ assert actual_revision == expected_revision
+
+
+def test_from_revision_noparents():
+ revision_input = {
+ 'id': hashutil.hash_to_bytes(
+ '18d8be353ed3480476f032475e7c233eff7371d5'),
+ 'directory': hashutil.hash_to_bytes(
+ '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'),
+ 'author': {
+ 'name': b'Software Heritage',
+ 'fullname': b'robot robot@softwareheritage.org',
+ 'email': b'robot@softwareheritage.org',
+ },
+ 'committer': {
+ 'name': b'Software Heritage',
+ 'fullname': b'robot robot@softwareheritage.org',
+ 'email': b'robot@softwareheritage.org',
+ },
+ 'message': b'synthetic revision message',
+ 'date': {
+ 'timestamp': datetime.datetime(
+ 2000, 1, 17, 11, 23, 54,
+ tzinfo=datetime.timezone.utc).timestamp(),
+ 'offset': 0,
+ 'negative_utc': False,
+ },
+ 'committer_date': {
+ 'timestamp': datetime.datetime(
+ 2000, 1, 17, 11, 23, 54,
+ tzinfo=datetime.timezone.utc).timestamp(),
+ 'offset': 0,
+ 'negative_utc': False,
+ },
+ 'synthetic': True,
+ 'type': 'tar',
+ 'children': [
+ hashutil.hash_to_bytes(
+ '123546353ed3480476f032475e7c244eff7371d5'),
+ ],
+ 'metadata': {
+ 'original_artifact': [{
+ 'archive_type': 'tar',
+ 'name': 'webbase-5.7.0.tar.gz',
+ 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
+ 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
+ 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
+ '309d36484e7edf7bb912',
+
+ }]
+ },
+ }
+
+ expected_revision = {
+ 'id': '18d8be353ed3480476f032475e7c233eff7371d5',
+ 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6',
+ 'author': {
+ 'name': 'Software Heritage',
+ 'fullname': 'robot robot@softwareheritage.org',
+ 'email': 'robot@softwareheritage.org',
+ },
+ 'committer': {
+ 'name': 'Software Heritage',
+ 'fullname': 'robot robot@softwareheritage.org',
+ 'email': 'robot@softwareheritage.org',
+ },
+ 'message': 'synthetic revision message',
+ 'date': "2000-01-17T11:23:54+00:00",
+ 'committer_date': "2000-01-17T11:23:54+00:00",
+ 'children': [
+ '123546353ed3480476f032475e7c244eff7371d5'
+ ],
+ 'type': 'tar',
+ 'synthetic': True,
+ 'metadata': {
+ 'original_artifact': [{
+ 'archive_type': 'tar',
+ 'name': 'webbase-5.7.0.tar.gz',
+ 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
+ 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
+ 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
+ '309d36484e7edf7bb912'
+ }]
}
-
- # when
- actual_dir_entries = converters.from_directory_entry(dir_entries_input)
-
- # then
- self.assertEqual(actual_dir_entries, expected_dir_entries)
-
- def test_from_filetype(self):
- content_filetype = {
- 'id': hashutil.hash_to_bytes(
- '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5'),
- 'encoding': 'utf-8',
- 'mimetype': 'text/plain',
- }
-
- expected_content_filetype = {
- 'id': '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5',
- 'encoding': 'utf-8',
- 'mimetype': 'text/plain',
- }
-
- # when
- actual_content_filetype = converters.from_filetype(content_filetype)
-
- # then
- self.assertEqual(actual_content_filetype, expected_content_filetype)
+ }
+
+ actual_revision = converters.from_revision(revision_input)
+
+ assert actual_revision == expected_revision
+
+
+def test_from_revision_invalid():
+ revision_input = {
+ 'id': hashutil.hash_to_bytes(
+ '18d8be353ed3480476f032475e7c233eff7371d5'),
+ 'directory': hashutil.hash_to_bytes(
+ '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'),
+ 'author': {
+ 'name': b'Software Heritage',
+ 'fullname': b'robot robot@softwareheritage.org',
+ 'email': b'robot@softwareheritage.org',
+ },
+ 'committer': {
+ 'name': b'Software Heritage',
+ 'fullname': b'robot robot@softwareheritage.org',
+ 'email': b'robot@softwareheritage.org',
+ },
+ 'message': b'invalid message \xff',
+ 'date': {
+ 'timestamp': datetime.datetime(
+ 2000, 1, 17, 11, 23, 54,
+ tzinfo=datetime.timezone.utc).timestamp(),
+ 'offset': 0,
+ 'negative_utc': False,
+ },
+ 'committer_date': {
+ 'timestamp': datetime.datetime(
+ 2000, 1, 17, 11, 23, 54,
+ tzinfo=datetime.timezone.utc).timestamp(),
+ 'offset': 0,
+ 'negative_utc': False,
+ },
+ 'synthetic': True,
+ 'type': 'tar',
+ 'parents': [
+ hashutil.hash_to_bytes(
+ '29d8be353ed3480476f032475e7c244eff7371d5'),
+ hashutil.hash_to_bytes(
+ '30d8be353ed3480476f032475e7c244eff7371d5')
+ ],
+ 'children': [
+ hashutil.hash_to_bytes(
+ '123546353ed3480476f032475e7c244eff7371d5'),
+ ],
+ 'metadata': {
+ 'original_artifact': [{
+ 'archive_type': 'tar',
+ 'name': 'webbase-5.7.0.tar.gz',
+ 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
+ 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
+ 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
+ '309d36484e7edf7bb912',
+
+ }]
+ },
+ }
+
+ expected_revision = {
+ 'id': '18d8be353ed3480476f032475e7c233eff7371d5',
+ 'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6',
+ 'author': {
+ 'name': 'Software Heritage',
+ 'fullname': 'robot robot@softwareheritage.org',
+ 'email': 'robot@softwareheritage.org',
+ },
+ 'committer': {
+ 'name': 'Software Heritage',
+ 'fullname': 'robot robot@softwareheritage.org',
+ 'email': 'robot@softwareheritage.org',
+ },
+ 'message': None,
+ 'message_decoding_failed': True,
+ 'date': "2000-01-17T11:23:54+00:00",
+ 'committer_date': "2000-01-17T11:23:54+00:00",
+ 'children': [
+ '123546353ed3480476f032475e7c244eff7371d5'
+ ],
+ 'parents': [
+ '29d8be353ed3480476f032475e7c244eff7371d5',
+ '30d8be353ed3480476f032475e7c244eff7371d5'
+ ],
+ 'type': 'tar',
+ 'synthetic': True,
+ 'metadata': {
+ 'original_artifact': [{
+ 'archive_type': 'tar',
+ 'name': 'webbase-5.7.0.tar.gz',
+ 'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
+ 'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
+ 'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
+ '309d36484e7edf7bb912'
+ }]
+ },
+ 'merge': True
+ }
+
+ actual_revision = converters.from_revision(revision_input)
+
+ assert actual_revision == expected_revision
+
+
+def test_from_content_none():
+ assert converters.from_content(None) is None
+
+
+def test_from_content():
+ content_input = {
+ 'sha1': hashutil.hash_to_bytes(
+ '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5'),
+ 'sha256': hashutil.hash_to_bytes(
+ '39007420ca5de7cb3cfc15196335507e'
+ 'e76c98930e7e0afa4d2747d3bf96c926'),
+ 'blake2s256': hashutil.hash_to_bytes(
+ '49007420ca5de7cb3cfc15196335507e'
+ 'e76c98930e7e0afa4d2747d3bf96c926'),
+ 'sha1_git': hashutil.hash_to_bytes(
+ '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'),
+ 'ctime': 'something-which-is-filtered-out',
+ 'data': b'data in bytes',
+ 'length': 10,
+ 'status': 'hidden',
+ }
+
+ # 'status' is filtered
+ expected_content = {
+ 'checksums': {
+ 'sha1': '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5',
+ 'sha256': '39007420ca5de7cb3cfc15196335507ee76c98'
+ '930e7e0afa4d2747d3bf96c926',
+ 'blake2s256': '49007420ca5de7cb3cfc15196335507ee7'
+ '6c98930e7e0afa4d2747d3bf96c926',
+ 'sha1_git': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03',
+ },
+ 'data': b'data in bytes',
+ 'length': 10,
+ 'status': 'absent',
+ }
+
+ actual_content = converters.from_content(content_input)
+
+ assert actual_content == expected_content
+
+
+def test_from_person():
+ person_input = {
+ 'id': 10,
+ 'anything': 'else',
+ 'name': b'bob',
+ 'fullname': b'bob bob@alice.net',
+ 'email': b'bob@foo.alice',
+ }
+
+ expected_person = {
+ 'id': 10,
+ 'anything': 'else',
+ 'name': 'bob',
+ 'fullname': 'bob bob@alice.net',
+ 'email': 'bob@foo.alice',
+ }
+
+ actual_person = converters.from_person(person_input)
+
+ assert actual_person == expected_person
+
+
+def test_from_directory_entries():
+ dir_entries_input = {
+ 'sha1': hashutil.hash_to_bytes(
+ '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5'),
+ 'sha256': hashutil.hash_to_bytes(
+ '39007420ca5de7cb3cfc15196335507e'
+ 'e76c98930e7e0afa4d2747d3bf96c926'),
+ 'sha1_git': hashutil.hash_to_bytes(
+ '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'),
+ 'blake2s256': hashutil.hash_to_bytes(
+ '685395c5dc57cada459364f0946d3dd45bad5fcbab'
+ 'c1048edb44380f1d31d0aa'),
+ 'target': hashutil.hash_to_bytes(
+ '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'),
+ 'dir_id': hashutil.hash_to_bytes(
+ '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'),
+ 'name': b'bob',
+ 'type': 10,
+ 'status': 'hidden',
+ }
+
+ expected_dir_entries = {
+ 'checksums': {
+ 'sha1': '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5',
+ 'sha256': '39007420ca5de7cb3cfc15196335507ee76c98'
+ '930e7e0afa4d2747d3bf96c926',
+ 'sha1_git': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03',
+ 'blake2s256': '685395c5dc57cada459364f0946d3dd45bad5f'
+ 'cbabc1048edb44380f1d31d0aa',
+ },
+ 'target': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03',
+ 'dir_id': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03',
+ 'name': 'bob',
+ 'type': 10,
+ 'status': 'absent',
+ }
+
+ actual_dir_entries = converters.from_directory_entry(dir_entries_input)
+
+ assert actual_dir_entries == expected_dir_entries
+
+
+def test_from_filetype():
+ content_filetype = {
+ 'id': hashutil.hash_to_bytes(
+ '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5'),
+ 'encoding': 'utf-8',
+ 'mimetype': 'text/plain',
+ }
+
+ expected_content_filetype = {
+ 'id': '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5',
+ 'encoding': 'utf-8',
+ 'mimetype': 'text/plain',
+ }
+
+ actual_content_filetype = converters.from_filetype(content_filetype)
+
+ assert actual_content_filetype == expected_content_filetype
diff --git a/swh/web/tests/common/test_highlightjs.py b/swh/web/tests/common/test_highlightjs.py
--- a/swh/web/tests/common/test_highlightjs.py
+++ b/swh/web/tests/common/test_highlightjs.py
@@ -1,126 +1,62 @@
-# Copyright (C) 2017-2018 The Software Heritage developers
+# Copyright (C) 2017-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
from swh.web.common import highlightjs
-from swh.web.tests.testcase import WebTestCase
-class HighlightJsTestCase(WebTestCase):
+def test_get_hljs_language_from_mime_type():
+ lang = highlightjs.get_hljs_language_from_mime_type('text/plain')
+ assert lang is None
- def test_get_hljs_language_from_mime_type(self):
+ lang = highlightjs.get_hljs_language_from_mime_type('text/x-c')
+ assert lang == 'cpp'
- lang = highlightjs.get_hljs_language_from_mime_type('text/plain')
- self.assertEqual(lang, None)
+ lang = highlightjs.get_hljs_language_from_mime_type('text/x-c++')
+ assert lang == 'cpp'
- lang = highlightjs.get_hljs_language_from_mime_type('text/x-c')
- self.assertEqual(lang, 'cpp')
+ lang = highlightjs.get_hljs_language_from_mime_type('text/x-perl')
+ assert lang == 'perl'
- lang = highlightjs.get_hljs_language_from_mime_type('text/x-c++')
- self.assertEqual(lang, 'cpp')
+ lang = highlightjs.get_hljs_language_from_mime_type('text/x-python')
+ assert lang == 'python'
- lang = highlightjs.get_hljs_language_from_mime_type('text/x-perl')
- self.assertEqual(lang, 'perl')
+ lang = highlightjs.get_hljs_language_from_mime_type('text/x-msdos-batch')
+ assert lang == 'dos'
- lang = highlightjs.get_hljs_language_from_mime_type('text/x-python')
- self.assertEqual(lang, 'python')
+ lang = highlightjs.get_hljs_language_from_mime_type('text/x-tex')
+ assert lang == 'tex'
- lang = highlightjs.get_hljs_language_from_mime_type('text/x-msdos-batch') # noqa
- self.assertEqual(lang, 'dos')
+ lang = highlightjs.get_hljs_language_from_mime_type('text/x-lisp')
+ assert lang == 'lisp'
- lang = highlightjs.get_hljs_language_from_mime_type('text/x-tex')
- self.assertEqual(lang, 'tex')
+ lang = highlightjs.get_hljs_language_from_mime_type('text/x-java')
+ assert lang == 'java'
- lang = highlightjs.get_hljs_language_from_mime_type('text/x-lisp')
- self.assertEqual(lang, 'lisp')
+ lang = highlightjs.get_hljs_language_from_mime_type('text/x-makefile')
+ assert lang == 'makefile'
- lang = highlightjs.get_hljs_language_from_mime_type('text/x-java')
- self.assertEqual(lang, 'java')
+ lang = highlightjs.get_hljs_language_from_mime_type('text/x-shellscript')
+ assert lang == 'bash'
- lang = highlightjs.get_hljs_language_from_mime_type('text/x-makefile')
- self.assertEqual(lang, 'makefile')
+ lang = highlightjs.get_hljs_language_from_mime_type('image/png')
+ assert lang is None
- lang = highlightjs.get_hljs_language_from_mime_type('text/x-shellscript') # noqa
- self.assertEqual(lang, 'bash')
- lang = highlightjs.get_hljs_language_from_mime_type('image/png')
- self.assertEqual(lang, None)
+def test_get_hljs_language_from_filename():
- def test_get_hljs_language_from_filename(self):
-
- lang = highlightjs.get_hljs_language_from_filename('foo')
- self.assertEqual(lang, None)
-
- lang = highlightjs.get_hljs_language_from_filename('foo.h')
- self.assertEqual(lang, 'cpp')
-
- lang = highlightjs.get_hljs_language_from_filename('foo.c')
- self.assertEqual(lang, 'cpp')
-
- lang = highlightjs.get_hljs_language_from_filename('foo.c.in')
- self.assertEqual(lang, 'cpp')
-
- lang = highlightjs.get_hljs_language_from_filename('foo.cpp')
- self.assertEqual(lang, 'cpp')
-
- lang = highlightjs.get_hljs_language_from_filename('foo.pl')
- self.assertEqual(lang, 'perl')
-
- lang = highlightjs.get_hljs_language_from_filename('foo.py')
- self.assertEqual(lang, 'python')
-
- lang = highlightjs.get_hljs_language_from_filename('foo.md')
- self.assertEqual(lang, 'markdown')
-
- lang = highlightjs.get_hljs_language_from_filename('foo.js')
- self.assertEqual(lang, 'javascript')
-
- lang = highlightjs.get_hljs_language_from_filename('foo.bat')
- self.assertEqual(lang, 'dos')
-
- lang = highlightjs.get_hljs_language_from_filename('foo.json')
- self.assertEqual(lang, 'json')
-
- lang = highlightjs.get_hljs_language_from_filename('foo.yml')
- self.assertEqual(lang, 'yaml')
-
- lang = highlightjs.get_hljs_language_from_filename('foo.ini')
- self.assertEqual(lang, 'ini')
-
- lang = highlightjs.get_hljs_language_from_filename('foo.cfg')
- self.assertEqual(lang, 'ini')
-
- lang = highlightjs.get_hljs_language_from_filename('foo.hy')
- self.assertEqual(lang, 'hy')
-
- lang = highlightjs.get_hljs_language_from_filename('foo.lisp')
- self.assertEqual(lang, 'lisp')
-
- lang = highlightjs.get_hljs_language_from_filename('foo.java')
- self.assertEqual(lang, 'java')
-
- lang = highlightjs.get_hljs_language_from_filename('foo.sh')
- self.assertEqual(lang, 'bash')
-
- lang = highlightjs.get_hljs_language_from_filename('foo.cmake')
- self.assertEqual(lang, 'cmake')
-
- lang = highlightjs.get_hljs_language_from_filename('foo.ml')
- self.assertEqual(lang, 'ocaml')
-
- lang = highlightjs.get_hljs_language_from_filename('foo.mli')
- self.assertEqual(lang, 'ocaml')
-
- lang = highlightjs.get_hljs_language_from_filename('foo.rb')
- self.assertEqual(lang, 'ruby')
-
- lang = highlightjs.get_hljs_language_from_filename('foo.jl')
- self.assertEqual(lang, 'julia')
-
- lang = highlightjs.get_hljs_language_from_filename('Makefile')
- self.assertEqual(lang, 'makefile')
-
- lang = highlightjs.get_hljs_language_from_filename('CMakeLists.txt')
- self.assertEqual(lang, 'cmake')
+ for filename, language in (
+ ('foo', None), ('foo.h', 'cpp'), ('foo.c', 'cpp'), ('foo.c.in', 'cpp'),
+ ('foo.cpp', 'cpp'), ('foo.pl', 'perl'), ('foo.py', 'python'),
+ ('foo.md', 'markdown'), ('foo.js', 'javascript'), ('foo.bat', 'dos'),
+ ('foo.json', 'json'), ('foo.yml', 'yaml'), ('foo.ini', 'ini'),
+ ('foo.cfg', 'ini'), ('foo.hy', 'hy'), ('foo.lisp', 'lisp'),
+ ('foo.java', 'java'), ('foo.sh', 'bash'), ('foo.cmake', 'cmake'),
+ ('foo.ml', 'ocaml'), ('foo.mli', 'ocaml'), ('foo.rb', 'ruby'),
+ ('foo.jl', 'julia'), ('Makefile', 'makefile'),
+ ('CMakeLists.txt', 'cmake')
+ ):
+ lang = highlightjs.get_hljs_language_from_filename(filename)
+ assert lang == language
diff --git a/swh/web/tests/common/test_origin_save.py b/swh/web/tests/common/test_origin_save.py
--- a/swh/web/tests/common/test_origin_save.py
+++ b/swh/web/tests/common/test_origin_save.py
@@ -3,30 +3,52 @@
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
-import json
-import os
+import re
from datetime import datetime, timedelta, timezone
+from functools import partial
import pytest
-import requests_mock
+import requests
-from swh.web.common.models import (
- SaveOriginRequest
-)
+from swh.core.pytest_plugin import get_response_cb
+
+from swh.web.common.models import SaveOriginRequest
from swh.web.common.origin_save import get_save_origin_task_info
from swh.web.config import get_config
-_RESOURCES_PATH = os.path.join(os.path.dirname(__file__), '../resources')
-
_es_url = 'http://esnode1.internal.softwareheritage.org:9200'
_es_workers_index_url = '%s/swh_workers-*' % _es_url
+@pytest.fixture(autouse=True)
+def requests_mock_datadir(datadir, requests_mock_datadir):
+ """Override default behavior to deal with post method
+
+ """
+ cb = partial(get_response_cb, datadir=datadir)
+ requests_mock_datadir.post(re.compile('https?://'), body=cb)
+ return requests_mock_datadir
+
+
+@pytest.mark.django_db
+def test_get_save_origin_archived_task_info(mocker):
+ _get_save_origin_task_info_test(mocker, task_archived=True)
+
+
+@pytest.mark.django_db
+def test_get_save_origin_task_info_with_es(mocker):
+ _get_save_origin_task_info_test(mocker, es_available=True)
+
+
+@pytest.mark.django_db
+def test_get_save_origin_task_info_without_es(mocker):
+ _get_save_origin_task_info_test(mocker, es_available=False)
+
+
def _get_save_origin_task_info_test(mocker, task_archived=False,
es_available=True):
-
swh_web_config = get_config()
if es_available:
@@ -79,18 +101,11 @@
}
mock_scheduler.get_task_runs.return_value = [task_run]
- es_response = os.path.join(_RESOURCES_PATH,
- 'json/es_task_info_response.json')
- with open(es_response) as json_fd:
- es_response = json.load(json_fd)
+ es_response = requests.post('%s/_search' % _es_workers_index_url).json()
task_exec_data = es_response['hits']['hits'][-1]['_source']
- with requests_mock.Mocker() as requests_mocker:
- requests_mocker.register_uri('POST', _es_workers_index_url+'/_search',
- json=es_response)
-
- sor_task_info = get_save_origin_task_info(sor_id)
+ sor_task_info = get_save_origin_task_info(sor_id)
expected_result = {
'type': task['type'],
@@ -110,18 +125,3 @@
})
assert sor_task_info == expected_result
-
-
-@pytest.mark.django_db
-def test_get_save_origin_archived_task_info(mocker):
- _get_save_origin_task_info_test(mocker, task_archived=True)
-
-
-@pytest.mark.django_db
-def test_get_save_origin_task_info_with_es(mocker):
- _get_save_origin_task_info_test(mocker, es_available=True)
-
-
-@pytest.mark.django_db
-def test_get_save_origin_task_info_without_es(mocker):
- _get_save_origin_task_info_test(mocker, es_available=False)
diff --git a/swh/web/tests/common/test_origin_visits.py b/swh/web/tests/common/test_origin_visits.py
--- a/swh/web/tests/common/test_origin_visits.py
+++ b/swh/web/tests/common/test_origin_visits.py
@@ -1,114 +1,133 @@
-# Copyright (C) 2018 The Software Heritage developers
+# Copyright (C) 2018-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
-from unittest.mock import patch
+import pytest
from swh.web.common.exc import NotFoundExc
from swh.web.common.origin_visits import (
get_origin_visits, get_origin_visit
)
-from swh.web.tests.testcase import WebTestCase
-
-
-class OriginVisitsTestCase(WebTestCase):
- @patch('swh.web.common.service')
- def test_get_origin_visits(self, mock_service):
- mock_service.MAX_LIMIT = 2
-
- def _lookup_origin_visits(*args, **kwargs):
- if kwargs['last_visit'] is None:
- return [{'visit': 1,
- 'date': '2017-05-06T00:59:10+00:00',
- 'metadata': {}},
- {'visit': 2,
- 'date': '2017-08-06T00:59:10+00:00',
- 'metadata': {}}
- ]
- else:
- return [{'visit': 3,
- 'date': '2017-09-06T00:59:10+00:00',
- 'metadata': {}}
- ]
-
- mock_service.lookup_origin_visits.side_effect = _lookup_origin_visits
-
- origin_info = {
- 'id': 1,
- 'type': 'git',
- 'url': 'https://github.com/foo/bar',
+
+
+def test_get_origin_visits(mocker):
+ mock_service = mocker.patch('swh.web.common.service')
+ mock_service.MAX_LIMIT = 2
+
+ def _lookup_origin_visits(*args, **kwargs):
+ if kwargs['last_visit'] is None:
+ return [
+ {
+ 'visit': 1,
+ 'date': '2017-05-06T00:59:10+00:00',
+ 'metadata': {}
+ },
+ {
+ 'visit': 2,
+ 'date': '2017-08-06T00:59:10+00:00',
+ 'metadata': {}
+ }
+ ]
+ else:
+ return [
+ {
+ 'visit': 3,
+ 'date': '2017-09-06T00:59:10+00:00',
+ 'metadata': {}
+ }
+ ]
+
+ mock_service.lookup_origin_visits.side_effect = _lookup_origin_visits
+
+ origin_info = {
+ 'id': 1,
+ 'type': 'git',
+ 'url': 'https://github.com/foo/bar',
+ }
+
+ origin_visits = get_origin_visits(origin_info)
+
+ assert len(origin_visits) == 3
+
+
+def test_get_origin_visit(mocker):
+ mock_origin_visits = mocker.patch(
+ 'swh.web.common.origin_visits.get_origin_visits')
+ origin_info = {
+ 'id': 2,
+ 'type': 'git',
+ 'url': 'https://github.com/foo/bar',
+ }
+ visits = [
+ {
+ 'status': 'full',
+ 'date': '2015-07-09T21:09:24+00:00',
+ 'visit': 1,
+ 'origin': origin_info['id']
+ },
+ {
+ 'status': 'full',
+ 'date': '2016-02-23T18:05:23.312045+00:00',
+ 'visit': 2,
+ 'origin': origin_info['id']
+ },
+ {
+ 'status': 'full',
+ 'date': '2016-03-28T01:35:06.554111+00:00',
+ 'visit': 3,
+ 'origin': origin_info['id']
+ },
+ {
+ 'status': 'full',
+ 'date': '2016-06-18T01:22:24.808485+00:00',
+ 'visit': 4,
+ 'origin': origin_info['id']
+ },
+ {
+ 'status': 'full',
+ 'date': '2016-08-14T12:10:00.536702+00:00',
+ 'visit': 5,
+ 'origin': origin_info['id']
}
+ ]
+ mock_origin_visits.return_value = visits
- origin_visits = get_origin_visits(origin_info)
+ visit_id = 12
+ with pytest.raises(NotFoundExc) as e:
+ visit = get_origin_visit(origin_info,
+ visit_id=visit_id)
- self.assertEqual(len(origin_visits), 3)
+ assert e.match('Visit with id %s' % visit_id)
+ assert e.match('url %s' % origin_info['url'])
- @patch('swh.web.common.origin_visits.get_origin_visits')
- def test_get_origin_visit(self, mock_origin_visits):
- origin_info = {
- 'id': 2,
- 'type': 'git',
- 'url': 'https://github.com/foo/bar',
- }
- visits = \
- [{'status': 'full',
- 'date': '2015-07-09T21:09:24+00:00',
- 'visit': 1,
- 'origin': origin_info['id']},
- {'status': 'full',
- 'date': '2016-02-23T18:05:23.312045+00:00',
- 'visit': 2,
- 'origin': origin_info['id']},
- {'status': 'full',
- 'date': '2016-03-28T01:35:06.554111+00:00',
- 'visit': 3,
- 'origin': origin_info['id']},
- {'status': 'full',
- 'date': '2016-06-18T01:22:24.808485+00:00',
- 'visit': 4,
- 'origin': origin_info['id']},
- {'status': 'full',
- 'date': '2016-08-14T12:10:00.536702+00:00',
- 'visit': 5,
- 'origin': origin_info['id']}]
- mock_origin_visits.return_value = visits
-
- visit_id = 12
- with self.assertRaises(NotFoundExc) as cm:
- visit = get_origin_visit(origin_info,
- visit_id=visit_id)
- exception_text = cm.exception.args[0]
- self.assertIn('Visit with id %s' % visit_id, exception_text)
- self.assertIn('url %s' % origin_info['url'], exception_text)
-
- visit = get_origin_visit(origin_info, visit_id=2)
- self.assertEqual(visit, visits[1])
-
- visit = get_origin_visit(
- origin_info, visit_ts='2016-02-23T18:05:23.312045+00:00')
- self.assertEqual(visit, visits[1])
-
- visit = get_origin_visit(
- origin_info, visit_ts='2016-02-20')
- self.assertEqual(visit, visits[1])
-
- visit = get_origin_visit(
- origin_info, visit_ts='2016-06-18T01:22')
- self.assertEqual(visit, visits[3])
-
- visit = get_origin_visit(
- origin_info, visit_ts='2016-06-18 01:22')
- self.assertEqual(visit, visits[3])
-
- visit = get_origin_visit(
- origin_info, visit_ts=1466208000)
- self.assertEqual(visit, visits[3])
-
- visit = get_origin_visit(
- origin_info, visit_ts='2014-01-01')
- self.assertEqual(visit, visits[0])
-
- visit = get_origin_visit(
- origin_info, visit_ts='2018-01-01')
- self.assertEqual(visit, visits[-1])
+ visit = get_origin_visit(origin_info, visit_id=2)
+ assert visit == visits[1]
+
+ visit = get_origin_visit(
+ origin_info, visit_ts='2016-02-23T18:05:23.312045+00:00')
+ assert visit == visits[1]
+
+ visit = get_origin_visit(
+ origin_info, visit_ts='2016-02-20')
+ assert visit == visits[1]
+
+ visit = get_origin_visit(
+ origin_info, visit_ts='2016-06-18T01:22')
+ assert visit == visits[3]
+
+ visit = get_origin_visit(
+ origin_info, visit_ts='2016-06-18 01:22')
+ assert visit == visits[3]
+
+ visit = get_origin_visit(
+ origin_info, visit_ts=1466208000)
+ assert visit == visits[3]
+
+ visit = get_origin_visit(
+ origin_info, visit_ts='2014-01-01')
+ assert visit == visits[0]
+
+ visit = get_origin_visit(
+ origin_info, visit_ts='2018-01-01')
+ assert visit == visits[-1]
diff --git a/swh/web/tests/common/test_query.py b/swh/web/tests/common/test_query.py
--- a/swh/web/tests/common/test_query.py
+++ b/swh/web/tests/common/test_query.py
@@ -1,124 +1,125 @@
-# Copyright (C) 2015-2018 The Software Heritage developers
+# Copyright (C) 2015-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
-from unittest.mock import patch
-
from swh.model import hashutil
from swh.web.common import query
from swh.web.common.exc import BadInputExc
-from swh.web.tests.testcase import WebTestCase
-
-
-class QueryTestCase(WebTestCase):
- def test_parse_hash_malformed_query_with_more_than_2_parts(self):
- with self.assertRaises(BadInputExc):
- query.parse_hash('sha1:1234567890987654:other-stuff')
-
- def test_parse_hash_guess_sha1(self):
- h = 'f1d2d2f924e986ac86fdf7b36c94bcdf32beec15'
- r = query.parse_hash(h)
- self.assertEqual(r, ('sha1', hashutil.hash_to_bytes(h)))
-
- def test_parse_hash_guess_sha256(self):
- h = '084C799CD551DD1D8D5C5F9A5D593B2' \
- 'E931F5E36122ee5c793c1d08a19839cc0'
- r = query.parse_hash(h)
- self.assertEqual(r, ('sha256', hashutil.hash_to_bytes(h)))
-
- def test_parse_hash_guess_algo_malformed_hash(self):
- with self.assertRaises(BadInputExc):
- query.parse_hash('1234567890987654')
-
- def test_parse_hash_check_sha1(self):
- h = 'f1d2d2f924e986ac86fdf7b36c94bcdf32beec15'
- r = query.parse_hash('sha1:' + h)
- self.assertEqual(r, ('sha1', hashutil.hash_to_bytes(h)))
-
- def test_parse_hash_check_sha1_git(self):
- h = 'e1d2d2f924e986ac86fdf7b36c94bcdf32beec15'
- r = query.parse_hash('sha1_git:' + h)
- self.assertEqual(r, ('sha1_git', hashutil.hash_to_bytes(h)))
-
- def test_parse_hash_check_sha256(self):
- h = '084C799CD551DD1D8D5C5F9A5D593B2E931F5E36122ee5c793c1d08a19839cc0'
- r = query.parse_hash('sha256:' + h)
- self.assertEqual(r, ('sha256', hashutil.hash_to_bytes(h)))
-
- def test_parse_hash_check_algo_malformed_sha1_hash(self):
- with self.assertRaises(BadInputExc):
- query.parse_hash('sha1:1234567890987654')
-
- def test_parse_hash_check_algo_malformed_sha1_git_hash(self):
- with self.assertRaises(BadInputExc):
- query.parse_hash('sha1_git:1234567890987654')
-
- def test_parse_hash_check_algo_malformed_sha256_hash(self):
- with self.assertRaises(BadInputExc):
- query.parse_hash('sha256:1234567890987654')
-
- def test_parse_hash_check_algo_unknown_one(self):
- with self.assertRaises(BadInputExc):
- query.parse_hash('sha2:1234567890987654')
-
- @patch('swh.web.common.query.parse_hash')
- def test_parse_hash_with_algorithms_or_throws_bad_query(self, mock_hash):
- # given
- mock_hash.side_effect = BadInputExc('Error input')
-
- # when
- with self.assertRaises(BadInputExc) as cm:
- query.parse_hash_with_algorithms_or_throws(
- 'sha1:blah',
- ['sha1'],
- 'useless error message for this use case')
- self.assertIn('Error input', cm.exception.args[0])
-
- mock_hash.assert_called_once_with('sha1:blah')
-
- @patch('swh.web.common.query.parse_hash')
- def test_parse_hash_with_algorithms_or_throws_bad_algo(self, mock_hash):
- # given
- mock_hash.return_value = 'sha1', '123'
-
- # when
- with self.assertRaises(BadInputExc) as cm:
- query.parse_hash_with_algorithms_or_throws(
- 'sha1:431',
- ['sha1_git'],
- 'Only sha1_git!')
- self.assertIn('Only sha1_git!', cm.exception.args[0])
-
- mock_hash.assert_called_once_with('sha1:431')
-
- @patch('swh.web.common.query.parse_hash')
- def test_parse_hash_with_algorithms(self, mock_hash):
- # given
- mock_hash.return_value = ('sha256', b'123')
-
- # when
- algo, sha = query.parse_hash_with_algorithms_or_throws(
- 'sha256:123',
- ['sha256', 'sha1_git'],
+import pytest
+
+
+def test_parse_hash_malformed_query_with_more_than_2_parts():
+ with pytest.raises(BadInputExc):
+ query.parse_hash('sha1:1234567890987654:other-stuff')
+
+
+def test_parse_hash_guess_sha1():
+ h = 'f1d2d2f924e986ac86fdf7b36c94bcdf32beec15'
+ r = query.parse_hash(h)
+ assert r == ('sha1', hashutil.hash_to_bytes(h))
+
+
+def test_parse_hash_guess_sha256():
+ h = '084C799CD551DD1D8D5C5F9A5D593B2E931F5E36122ee5c793c1d08a19839cc0'
+ r = query.parse_hash(h)
+ assert r == ('sha256', hashutil.hash_to_bytes(h))
+
+
+def test_parse_hash_guess_algo_malformed_hash():
+ with pytest.raises(BadInputExc):
+ query.parse_hash('1234567890987654')
+
+
+def test_parse_hash_check_sha1():
+ h = 'f1d2d2f924e986ac86fdf7b36c94bcdf32beec15'
+ r = query.parse_hash('sha1:' + h)
+ assert r == ('sha1', hashutil.hash_to_bytes(h))
+
+
+def test_parse_hash_check_sha1_git():
+ h = 'e1d2d2f924e986ac86fdf7b36c94bcdf32beec15'
+ r = query.parse_hash('sha1_git:' + h)
+ assert r == ('sha1_git', hashutil.hash_to_bytes(h))
+
+
+def test_parse_hash_check_sha256():
+ h = '084C799CD551DD1D8D5C5F9A5D593B2E931F5E36122ee5c793c1d08a19839cc0'
+ r = query.parse_hash('sha256:' + h)
+ assert r == ('sha256', hashutil.hash_to_bytes(h))
+
+
+def test_parse_hash_check_algo_malformed_sha1_hash():
+ with pytest.raises(BadInputExc):
+ query.parse_hash('sha1:1234567890987654')
+
+
+def test_parse_hash_check_algo_malformed_sha1_git_hash():
+ with pytest.raises(BadInputExc):
+ query.parse_hash('sha1_git:1234567890987654')
+
+
+def test_parse_hash_check_algo_malformed_sha256_hash():
+ with pytest.raises(BadInputExc):
+ query.parse_hash('sha256:1234567890987654')
+
+
+def test_parse_hash_check_algo_unknown_one():
+ with pytest.raises(BadInputExc):
+ query.parse_hash('sha2:1234567890987654')
+
+
+def test_parse_hash_with_algorithms_or_throws_bad_query(mocker):
+ mock_hash = mocker.patch('swh.web.common.query.parse_hash')
+ mock_hash.side_effect = BadInputExc('Error input')
+
+ with pytest.raises(BadInputExc) as e:
+ query.parse_hash_with_algorithms_or_throws(
+ 'sha1:blah',
+ ['sha1'],
'useless error message for this use case')
+ assert e.match('Error input')
+
+ mock_hash.assert_called_once_with('sha1:blah')
+
+
+def test_parse_hash_with_algorithms_or_throws_bad_algo(mocker):
+ mock_hash = mocker.patch('swh.web.common.query.parse_hash')
+ mock_hash.return_value = 'sha1', '123'
+
+ with pytest.raises(BadInputExc) as e:
+ query.parse_hash_with_algorithms_or_throws(
+ 'sha1:431',
+ ['sha1_git'],
+ 'Only sha1_git!')
+ assert e.match('Only sha1_git!')
+
+ mock_hash.assert_called_once_with('sha1:431')
+
+
+def test_parse_hash_with_algorithms(mocker):
+ mock_hash = mocker.patch('swh.web.common.query.parse_hash')
+ mock_hash.return_value = ('sha256', b'123')
+
+ algo, sha = query.parse_hash_with_algorithms_or_throws(
+ 'sha256:123',
+ ['sha256', 'sha1_git'],
+ 'useless error message for this use case')
+
+ assert algo == 'sha256'
+ assert sha == b'123'
+
+ mock_hash.assert_called_once_with('sha256:123')
- self.assertEqual(algo, 'sha256')
- self.assertEqual(sha, b'123')
- mock_hash.assert_called_once_with('sha256:123')
+def test_parse_uuid4():
+ actual_uuid = query.parse_uuid4('7c33636b-8f11-4bda-89d9-ba8b76a42cec')
- def test_parse_uuid4(self):
- # when
- actual_uuid = query.parse_uuid4('7c33636b-8f11-4bda-89d9-ba8b76a42cec')
+ assert actual_uuid == '7c33636b-8f11-4bda-89d9-ba8b76a42cec'
- # then
- self.assertEqual(actual_uuid, '7c33636b-8f11-4bda-89d9-ba8b76a42cec')
- def test_parse_uuid4_ko(self):
- # when
- with self.assertRaises(BadInputExc) as cm:
- query.parse_uuid4('7c33636b-8f11-4bda-89d9-ba8b76a42')
- self.assertIn('badly formed hexadecimal UUID string',
- cm.exception.args[0])
+def test_parse_uuid4_ko():
+ with pytest.raises(BadInputExc) as e:
+ query.parse_uuid4('7c33636b-8f11-4bda-89d9-ba8b76a42')
+ assert e.match('badly formed hexadecimal UUID string')
diff --git a/swh/web/tests/common/test_service.py b/swh/web/tests/common/test_service.py
--- a/swh/web/tests/common/test_service.py
+++ b/swh/web/tests/common/test_service.py
@@ -24,813 +24,779 @@
revision_with_submodules, empty_directory,
new_revision
)
-from swh.web.tests.testcase import (
- WebTestCase, ctags_json_missing, fossology_missing
-)
+from swh.web.tests.conftest import ctags_json_missing, fossology_missing
+
+
+@given(contents())
+def test_lookup_multiple_hashes_all_present(contents):
+ input_data = []
+ expected_output = []
+ for cnt in contents:
+ input_data.append({'sha1': cnt['sha1']})
+ expected_output.append({'sha1': cnt['sha1'],
+ 'found': True})
+ assert service.lookup_multiple_hashes(input_data) == expected_output
-class ServiceTestCase(WebTestCase):
- @given(contents())
- def test_lookup_multiple_hashes_all_present(self, contents):
- input_data = []
- expected_output = []
- for cnt in contents:
- input_data.append({'sha1': cnt['sha1']})
- expected_output.append({'sha1': cnt['sha1'],
- 'found': True})
+@given(contents(), unknown_contents())
+def test_lookup_multiple_hashes_some_missing(contents, unknown_contents):
+ input_contents = list(itertools.chain(contents, unknown_contents))
+ random.shuffle(input_contents)
- self.assertEqual(service.lookup_multiple_hashes(input_data),
- expected_output)
+ input_data = []
+ expected_output = []
+ for cnt in input_contents:
+ input_data.append({'sha1': cnt['sha1']})
+ expected_output.append({'sha1': cnt['sha1'],
+ 'found': cnt in contents})
- @given(contents(), unknown_contents())
- def test_lookup_multiple_hashes_some_missing(self, contents,
- unknown_contents):
- input_contents = list(itertools.chain(contents, unknown_contents))
- random.shuffle(input_contents)
+ assert service.lookup_multiple_hashes(input_data) == expected_output
- input_data = []
- expected_output = []
- for cnt in input_contents:
- input_data.append({'sha1': cnt['sha1']})
- expected_output.append({'sha1': cnt['sha1'],
- 'found': cnt in contents})
- self.assertEqual(service.lookup_multiple_hashes(input_data),
- expected_output)
+def test_lookup_hash_does_not_exist():
+ unknown_content_ = random_content()
- def test_lookup_hash_does_not_exist(self):
- unknown_content_ = random_content()
+ actual_lookup = service.lookup_hash('sha1_git:%s' %
+ unknown_content_['sha1_git'])
- actual_lookup = service.lookup_hash('sha1_git:%s' %
- unknown_content_['sha1_git'])
+ assert actual_lookup == {'found': None, 'algo': 'sha1_git'}
- self.assertEqual(actual_lookup, {'found': None,
- 'algo': 'sha1_git'})
- @given(content())
- def test_lookup_hash_exist(self, content):
+@given(content())
+def test_lookup_hash_exist(archive_data, content):
+ actual_lookup = service.lookup_hash('sha1:%s' % content['sha1'])
- actual_lookup = service.lookup_hash('sha1:%s' % content['sha1'])
+ content_metadata = archive_data.content_get_metadata(content['sha1'])
- content_metadata = self.content_get_metadata(content['sha1'])
+ assert {'found': content_metadata, 'algo': 'sha1'} == actual_lookup
- self.assertEqual({'found': content_metadata,
- 'algo': 'sha1'}, actual_lookup)
- def test_search_hash_does_not_exist(self):
- unknown_content_ = random_content()
+def test_search_hash_does_not_exist():
+ unknown_content_ = random_content()
- actual_lookup = service.search_hash('sha1_git:%s' %
- unknown_content_['sha1_git'])
+ actual_lookup = service.search_hash('sha1_git:%s' %
+ unknown_content_['sha1_git'])
- self.assertEqual({'found': False}, actual_lookup)
+ assert {'found': False} == actual_lookup
- @given(content())
- def test_search_hash_exist(self, content):
- actual_lookup = service.search_hash('sha1:%s' % content['sha1'])
+@given(content())
+def test_search_hash_exist(content):
+ actual_lookup = service.search_hash('sha1:%s' % content['sha1'])
- self.assertEqual({'found': True}, actual_lookup)
+ assert {'found': True} == actual_lookup
- @pytest.mark.skipif(ctags_json_missing,
- reason="requires ctags with json output support")
- @given(contents_with_ctags())
- def test_lookup_content_ctags(self, contents_with_ctags):
- content_sha1 = random.choice(contents_with_ctags['sha1s'])
- self.content_add_ctags(content_sha1)
- actual_ctags = \
- list(service.lookup_content_ctags('sha1:%s' % content_sha1))
+@pytest.mark.skipif(ctags_json_missing,
+ reason="requires ctags with json output support")
+@given(contents_with_ctags())
+def test_lookup_content_ctags(indexer_data, contents_with_ctags):
+ content_sha1 = random.choice(contents_with_ctags['sha1s'])
+ indexer_data.content_add_ctags(content_sha1)
+ actual_ctags = list(service.lookup_content_ctags('sha1:%s' % content_sha1))
- expected_data = list(self.content_get_ctags(content_sha1))
- for ctag in expected_data:
- ctag['id'] = content_sha1
+ expected_data = list(indexer_data.content_get_ctags(content_sha1))
+ for ctag in expected_data:
+ ctag['id'] = content_sha1
- self.assertEqual(actual_ctags, expected_data)
+ assert actual_ctags == expected_data
- def test_lookup_content_ctags_no_hash(self):
- unknown_content_ = random_content()
- actual_ctags = \
- list(service.lookup_content_ctags('sha1:%s' %
- unknown_content_['sha1']))
+def test_lookup_content_ctags_no_hash():
+ unknown_content_ = random_content()
- self.assertEqual(actual_ctags, [])
+ actual_ctags = list(service.lookup_content_ctags('sha1:%s' %
+ unknown_content_['sha1']))
- @given(content())
- def test_lookup_content_filetype(self, content):
+ assert actual_ctags == []
- self.content_add_mimetype(content['sha1'])
- actual_filetype = service.lookup_content_filetype(content['sha1'])
- expected_filetype = self.content_get_mimetype(content['sha1'])
- self.assertEqual(actual_filetype, expected_filetype)
+@given(content())
+def test_lookup_content_filetype(indexer_data, content):
+ indexer_data.content_add_mimetype(content['sha1'])
+ actual_filetype = service.lookup_content_filetype(content['sha1'])
- @pytest.mark.xfail # Language indexer is disabled.
- @given(content())
- def test_lookup_content_language(self, content):
+ expected_filetype = indexer_data.content_get_mimetype(content['sha1'])
+ assert actual_filetype == expected_filetype
- self.content_add_language(content['sha1'])
- actual_language = service.lookup_content_language(content['sha1'])
- expected_language = self.content_get_language(content['sha1'])
- self.assertEqual(actual_language, expected_language)
+@pytest.mark.skip # Language indexer is disabled.
+@given(content())
+def test_lookup_content_language(indexer_data, content):
+ indexer_data.content_add_language(content['sha1'])
+ actual_language = service.lookup_content_language(content['sha1'])
- @given(contents_with_ctags())
- def test_lookup_expression(self, contents_with_ctags):
+ expected_language = indexer_data.content_get_language(content['sha1'])
+ assert actual_language == expected_language
- per_page = 10
- expected_ctags = []
- for content_sha1 in contents_with_ctags['sha1s']:
+@given(contents_with_ctags())
+def test_lookup_expression(indexer_data, contents_with_ctags):
+ per_page = 10
+ expected_ctags = []
+
+ for content_sha1 in contents_with_ctags['sha1s']:
+ if len(expected_ctags) == per_page:
+ break
+ indexer_data.content_add_ctags(content_sha1)
+ for ctag in indexer_data.content_get_ctags(content_sha1):
if len(expected_ctags) == per_page:
break
- self.content_add_ctags(content_sha1)
- for ctag in self.content_get_ctags(content_sha1):
- if len(expected_ctags) == per_page:
- break
- if ctag['name'] == contents_with_ctags['symbol_name']:
- del ctag['id']
- ctag['sha1'] = content_sha1
- expected_ctags.append(ctag)
+ if ctag['name'] == contents_with_ctags['symbol_name']:
+ del ctag['id']
+ ctag['sha1'] = content_sha1
+ expected_ctags.append(ctag)
- actual_ctags = \
- list(service.lookup_expression(contents_with_ctags['symbol_name'],
- last_sha1=None, per_page=10))
+ actual_ctags = list(
+ service.lookup_expression(contents_with_ctags['symbol_name'],
+ last_sha1=None, per_page=10))
- self.assertEqual(actual_ctags, expected_ctags)
+ assert actual_ctags == expected_ctags
- def test_lookup_expression_no_result(self):
- expected_ctags = []
+def test_lookup_expression_no_result():
+ expected_ctags = []
- actual_ctags = \
- list(service.lookup_expression('barfoo', last_sha1=None,
- per_page=10))
- self.assertEqual(actual_ctags, expected_ctags)
+ actual_ctags = list(service.lookup_expression('barfoo', last_sha1=None,
+ per_page=10))
+ assert actual_ctags == expected_ctags
- @pytest.mark.skipif(fossology_missing,
- reason="requires fossology-nomossa installed")
- @given(content())
- def test_lookup_content_license(self, content):
- self.content_add_license(content['sha1'])
- actual_license = service.lookup_content_license(content['sha1'])
+@pytest.mark.skipif(fossology_missing,
+ reason="requires fossology-nomossa installed")
+@given(content())
+def test_lookup_content_license(indexer_data, content):
+ indexer_data.content_add_license(content['sha1'])
+ actual_license = service.lookup_content_license(content['sha1'])
- expected_license = self.content_get_license(content['sha1'])
- self.assertEqual(actual_license, expected_license)
+ expected_license = indexer_data.content_get_license(content['sha1'])
+ assert actual_license == expected_license
- def test_stat_counters(self):
- actual_stats = service.stat_counters()
- self.assertEqual(actual_stats, self.storage.stat_counters())
- @given(new_origin(), visit_dates())
- def test_lookup_origin_visits(self, new_origin, visit_dates):
+def test_stat_counters(archive_data):
+ actual_stats = service.stat_counters()
+ assert actual_stats == archive_data.stat_counters()
- self.storage.origin_add_one(new_origin)
- for ts in visit_dates:
- self.storage.origin_visit_add(new_origin['url'], ts, type='git')
- actual_origin_visits = list(
- service.lookup_origin_visits(new_origin['url'], per_page=100))
+@given(new_origin(), visit_dates())
+def test_lookup_origin_visits(archive_data, new_origin, visit_dates):
+ archive_data.origin_add_one(new_origin)
+ for ts in visit_dates:
+ archive_data.origin_visit_add(
+ new_origin['url'], ts, type='git')
- expected_visits = self.origin_visit_get(new_origin['url'])
- for expected_visit in expected_visits:
- expected_visit['origin'] = new_origin['url']
+ actual_origin_visits = list(
+ service.lookup_origin_visits(new_origin['url'], per_page=100))
- self.assertEqual(actual_origin_visits, expected_visits)
+ expected_visits = archive_data.origin_visit_get(new_origin['url'])
+ for expected_visit in expected_visits:
+ expected_visit['origin'] = new_origin['url']
- @given(new_origin(), visit_dates())
- def test_lookup_origin_visit(self, new_origin, visit_dates):
- self.storage.origin_add_one(new_origin)
- visits = []
- for ts in visit_dates:
- visits.append(self.storage.origin_visit_add(
- new_origin['url'], ts, type='git'))
+ assert actual_origin_visits == expected_visits
- visit = random.choice(visits)['visit']
- actual_origin_visit = service.lookup_origin_visit(
- new_origin['url'], visit)
- expected_visit = dict(self.storage.origin_visit_get_by(
- new_origin['url'], visit))
- expected_visit['date'] = expected_visit['date'].isoformat()
- expected_visit['metadata'] = {}
- expected_visit['origin'] = new_origin['url']
+@given(new_origin(), visit_dates())
+def test_lookup_origin_visit(archive_data, new_origin, visit_dates):
+ archive_data.origin_add_one(new_origin)
+ visits = []
+ for ts in visit_dates:
+ visits.append(archive_data.origin_visit_add(
+ new_origin['url'], ts, type='git'))
+
+ visit = random.choice(visits)['visit']
+ actual_origin_visit = service.lookup_origin_visit(
+ new_origin['url'], visit)
+
+ expected_visit = dict(archive_data.origin_visit_get_by(
+ new_origin['url'], visit))
+
+ assert actual_origin_visit == expected_visit
+
+
+@given(new_origin())
+def test_lookup_origin(archive_data, new_origin):
+ archive_data.origin_add_one(new_origin)
+
+ actual_origin = service.lookup_origin({'url': new_origin['url']})
+ expected_origin = archive_data.origin_get(
+ {'url': new_origin['url']})
+ assert actual_origin == expected_origin
+
+
+@given(invalid_sha1())
+def test_lookup_release_ko_id_checksum_not_a_sha1(invalid_sha1):
+ with pytest.raises(BadInputExc) as e:
+ service.lookup_release(invalid_sha1)
+ assert e.match('Invalid checksum')
+
+
+@given(sha256())
+def test_lookup_release_ko_id_checksum_too_long(sha256):
+ with pytest.raises(BadInputExc) as e:
+ service.lookup_release(sha256)
+ assert e.match('Only sha1_git is supported.')
+
+
+@given(directory())
+def test_lookup_directory_with_path_not_found(directory):
+ path = 'some/invalid/path/here'
+ with pytest.raises(NotFoundExc) as e:
+ service.lookup_directory_with_path(directory, path)
+ assert e.match('Directory entry with path %s from %s not found' %
+ (path, directory))
+
+
+@given(directory())
+def test_lookup_directory_with_path_found(archive_data, directory):
+ directory_content = archive_data.directory_ls(directory)
+ directory_entry = random.choice(directory_content)
+ path = directory_entry['name']
+ actual_result = service.lookup_directory_with_path(directory, path)
+ assert actual_result == directory_entry
+
+
+@given(release())
+def test_lookup_release(archive_data, release):
+ actual_release = service.lookup_release(release)
- self.assertEqual(actual_origin_visit, expected_visit)
-
- @given(new_origin())
- def test_lookup_origin(self, new_origin):
- self.storage.origin_add_one(new_origin)
-
- actual_origin = service.lookup_origin({'url': new_origin['url']})
- expected_origin = self.storage.origin_get({'url': new_origin['url']})
- self.assertEqual(actual_origin, expected_origin)
-
- @given(invalid_sha1())
- def test_lookup_release_ko_id_checksum_not_a_sha1(self, invalid_sha1):
- with self.assertRaises(BadInputExc) as cm:
- service.lookup_release(invalid_sha1)
- self.assertIn('invalid checksum', cm.exception.args[0].lower())
-
- @given(sha256())
- def test_lookup_release_ko_id_checksum_too_long(self, sha256):
- with self.assertRaises(BadInputExc) as cm:
- service.lookup_release(sha256)
- self.assertEqual('Only sha1_git is supported.', cm.exception.args[0])
-
- @given(directory())
- def test_lookup_directory_with_path_not_found(self, directory):
- path = 'some/invalid/path/here'
- with self.assertRaises(NotFoundExc) as cm:
- service.lookup_directory_with_path(directory, path)
- self.assertEqual('Directory entry with path %s from %s '
- 'not found' % (path, directory),
- cm.exception.args[0])
-
- @given(directory())
- def test_lookup_directory_with_path_found(self, directory):
- directory_content = self.directory_ls(directory)
- directory_entry = random.choice(directory_content)
- path = directory_entry['name']
- actual_result = service.lookup_directory_with_path(directory, path)
- self.assertEqual(actual_result, directory_entry)
-
- @given(release())
- def test_lookup_release(self, release):
- actual_release = service.lookup_release(release)
-
- self.assertEqual(actual_release,
- self.release_get(release))
-
- @given(revision(), invalid_sha1(), sha256())
- def test_lookup_revision_with_context_ko_not_a_sha1(self, revision,
- invalid_sha1,
- sha256):
- sha1_git_root = revision
- sha1_git = invalid_sha1
-
- with self.assertRaises(BadInputExc) as cm:
- service.lookup_revision_with_context(sha1_git_root, sha1_git)
- self.assertIn('Invalid checksum query string', cm.exception.args[0])
-
- sha1_git = sha256
-
- with self.assertRaises(BadInputExc) as cm:
- service.lookup_revision_with_context(sha1_git_root, sha1_git)
- self.assertIn('Only sha1_git is supported', cm.exception.args[0])
-
- @given(revision(), unknown_revision())
- def test_lookup_revision_with_context_ko_sha1_git_does_not_exist(
- self, revision, unknown_revision):
- sha1_git_root = revision
- sha1_git = unknown_revision
-
- with self.assertRaises(NotFoundExc) as cm:
- service.lookup_revision_with_context(sha1_git_root, sha1_git)
- self.assertIn('Revision %s not found' % sha1_git, cm.exception.args[0])
-
- @given(revision(), unknown_revision())
- def test_lookup_revision_with_context_ko_root_sha1_git_does_not_exist(
- self, revision, unknown_revision):
- sha1_git_root = unknown_revision
- sha1_git = revision
- with self.assertRaises(NotFoundExc) as cm:
- service.lookup_revision_with_context(sha1_git_root, sha1_git)
- self.assertIn('Revision root %s not found' % sha1_git_root,
- cm.exception.args[0])
-
- @given(ancestor_revisions())
- def test_lookup_revision_with_context(self, ancestor_revisions):
- sha1_git = ancestor_revisions['sha1_git']
- root_sha1_git = ancestor_revisions['sha1_git_root']
- for sha1_git_root in (root_sha1_git,
- {'id': hash_to_bytes(root_sha1_git)}):
- actual_revision = \
- service.lookup_revision_with_context(sha1_git_root,
- sha1_git)
-
- children = []
- for rev in self.revision_log(root_sha1_git):
- for p_rev in rev['parents']:
- p_rev_hex = hash_to_hex(p_rev)
- if p_rev_hex == sha1_git:
- children.append(rev['id'])
-
- expected_revision = self.revision_get(sha1_git)
- expected_revision['children'] = children
- self.assertEqual(actual_revision, expected_revision)
-
- @given(non_ancestor_revisions())
- def test_lookup_revision_with_context_ko(self, non_ancestor_revisions):
- sha1_git = non_ancestor_revisions['sha1_git']
- root_sha1_git = non_ancestor_revisions['sha1_git_root']
-
- with self.assertRaises(NotFoundExc) as cm:
- service.lookup_revision_with_context(root_sha1_git, sha1_git)
- self.assertIn('Revision %s is not an ancestor of %s' %
- (sha1_git, root_sha1_git), cm.exception.args[0])
-
- def test_lookup_directory_with_revision_not_found(self):
- unknown_revision_ = random_sha1()
-
- with self.assertRaises(NotFoundExc) as cm:
- service.lookup_directory_with_revision(unknown_revision_)
- self.assertIn('Revision %s not found' % unknown_revision_,
- cm.exception.args[0])
-
- def test_lookup_directory_with_revision_unknown_content(self):
- unknown_content_ = random_content()
- unknown_revision_ = random_sha1()
- unknown_directory_ = random_sha1()
-
- dir_path = 'README.md'
- # Create a revision that points to a directory
- # Which points to unknown content
- revision = {
- 'author': {
- 'name': b'abcd',
- 'email': b'abcd@company.org',
- 'fullname': b'abcd abcd'
- },
- 'committer': {
- 'email': b'aaaa@company.org',
- 'fullname': b'aaaa aaa',
- 'name': b'aaa'
- },
- 'committer_date': {
- 'negative_utc': False,
- 'offset': 0,
- 'timestamp': 1437511651
- },
- 'date': {
- 'negative_utc': False,
- 'offset': 0,
- 'timestamp': 1437511651
- },
- 'message': b'bleh',
- 'metadata': [],
- 'parents': [],
- 'synthetic': False,
- 'type': 'git',
- 'id': hash_to_bytes(unknown_revision_),
- 'directory': hash_to_bytes(unknown_directory_)
- }
- # A directory that points to unknown content
- dir = {
- 'id': hash_to_bytes(unknown_directory_),
- 'entries': [{
- 'name': bytes(dir_path.encode('utf-8')),
- 'type': 'file',
- 'target': hash_to_bytes(unknown_content_['sha1_git']),
- 'perms': DentryPerms.content
- }]
- }
- # Add the directory and revision in mem
- self.storage.directory_add([dir])
- self.storage.revision_add([revision])
- with self.assertRaises(NotFoundExc) as cm:
- service.lookup_directory_with_revision(
- unknown_revision_, dir_path)
- self.assertIn('Content not found for revision %s' %
- unknown_revision_,
- cm.exception.args[0])
-
- @given(revision())
- def test_lookup_directory_with_revision_ko_path_to_nowhere(
- self, revision):
- invalid_path = 'path/to/something/unknown'
- with self.assertRaises(NotFoundExc) as cm:
- service.lookup_directory_with_revision(revision, invalid_path)
- exception_text = cm.exception.args[0].lower()
- self.assertIn('directory or file', exception_text)
- self.assertIn(invalid_path, exception_text)
- self.assertIn('revision %s' % revision, exception_text)
- self.assertIn('not found', exception_text)
-
- @given(revision_with_submodules())
- def test_lookup_directory_with_revision_submodules(
- self, revision_with_submodules):
-
- rev_sha1_git = revision_with_submodules['rev_sha1_git']
- rev_dir_path = revision_with_submodules['rev_dir_rev_path']
-
- actual_data = service.lookup_directory_with_revision(
- rev_sha1_git, rev_dir_path)
-
- revision = self.revision_get(revision_with_submodules['rev_sha1_git'])
- directory = self.directory_ls(revision['directory'])
- rev_entry = next(e for e in directory if e['name'] == rev_dir_path)
-
- expected_data = {
- 'content': self.revision_get(rev_entry['target']),
- 'path': rev_dir_path,
- 'revision': rev_sha1_git,
- 'type': 'rev'
- }
-
- self.assertEqual(actual_data, expected_data)
-
- @given(revision())
- def test_lookup_directory_with_revision_without_path(self, revision):
-
- actual_directory_entries = \
- service.lookup_directory_with_revision(revision)
-
- revision_data = self.revision_get(revision)
- expected_directory_entries = \
- self.directory_ls(revision_data['directory'])
-
- self.assertEqual(actual_directory_entries['type'], 'dir')
- self.assertEqual(actual_directory_entries['content'],
- expected_directory_entries)
-
- @given(revision())
- def test_lookup_directory_with_revision_with_path(self, revision):
-
- revision_data = self.revision_get(revision)
- dir_entries = [e for e in self.directory_ls(revision_data['directory'])
- if e['type'] in ('file', 'dir')]
- expected_dir_entry = random.choice(dir_entries)
-
- actual_dir_entry = \
- service.lookup_directory_with_revision(revision,
- expected_dir_entry['name'])
-
- self.assertEqual(actual_dir_entry['type'], expected_dir_entry['type'])
- self.assertEqual(actual_dir_entry['revision'], revision)
- self.assertEqual(actual_dir_entry['path'], expected_dir_entry['name'])
- if actual_dir_entry['type'] == 'file':
- del actual_dir_entry['content']['checksums']['blake2s256']
- for key in ('checksums', 'status', 'length'):
- self.assertEqual(actual_dir_entry['content'][key],
- expected_dir_entry[key])
- else:
- sub_dir_entries = self.directory_ls(expected_dir_entry['target'])
- self.assertEqual(actual_dir_entry['content'], sub_dir_entries)
-
- @given(revision())
- def test_lookup_directory_with_revision_with_path_to_file_and_data(
- self, revision):
-
- revision_data = self.revision_get(revision)
- dir_entries = [e for e in self.directory_ls(revision_data['directory'])
- if e['type'] == 'file']
- expected_dir_entry = random.choice(dir_entries)
- expected_data = \
- self.content_get(expected_dir_entry['checksums']['sha1'])
-
- actual_dir_entry = \
- service.lookup_directory_with_revision(revision,
- expected_dir_entry['name'],
- with_data=True)
-
- self.assertEqual(actual_dir_entry['type'], expected_dir_entry['type'])
- self.assertEqual(actual_dir_entry['revision'], revision)
- self.assertEqual(actual_dir_entry['path'], expected_dir_entry['name'])
+ assert actual_release == archive_data.release_get(release)
+
+
+@given(revision(), invalid_sha1(), sha256())
+def test_lookup_revision_with_context_ko_not_a_sha1(revision,
+ invalid_sha1,
+ sha256):
+ sha1_git_root = revision
+ sha1_git = invalid_sha1
+
+ with pytest.raises(BadInputExc) as e:
+ service.lookup_revision_with_context(sha1_git_root, sha1_git)
+ assert e.match('Invalid checksum query string')
+
+ sha1_git = sha256
+
+ with pytest.raises(BadInputExc) as e:
+ service.lookup_revision_with_context(sha1_git_root, sha1_git)
+ assert e.match('Only sha1_git is supported')
+
+
+@given(revision(), unknown_revision())
+def test_lookup_revision_with_context_ko_sha1_git_does_not_exist(
+ revision, unknown_revision):
+ sha1_git_root = revision
+ sha1_git = unknown_revision
+
+ with pytest.raises(NotFoundExc) as e:
+ service.lookup_revision_with_context(sha1_git_root, sha1_git)
+ assert e.match('Revision %s not found' % sha1_git)
+
+
+@given(revision(), unknown_revision())
+def test_lookup_revision_with_context_ko_root_sha1_git_does_not_exist(
+ revision, unknown_revision):
+ sha1_git_root = unknown_revision
+ sha1_git = revision
+ with pytest.raises(NotFoundExc) as e:
+ service.lookup_revision_with_context(sha1_git_root, sha1_git)
+ assert e.match('Revision root %s not found' % sha1_git_root)
+
+
+@given(ancestor_revisions())
+def test_lookup_revision_with_context(archive_data, ancestor_revisions):
+ sha1_git = ancestor_revisions['sha1_git']
+ root_sha1_git = ancestor_revisions['sha1_git_root']
+ for sha1_git_root in (root_sha1_git,
+ {'id': hash_to_bytes(root_sha1_git)}):
+ actual_revision = service.lookup_revision_with_context(sha1_git_root,
+ sha1_git)
+
+ children = []
+ for rev in archive_data.revision_log(root_sha1_git):
+ for p_rev in rev['parents']:
+ p_rev_hex = hash_to_hex(p_rev)
+ if p_rev_hex == sha1_git:
+ children.append(rev['id'])
+
+ expected_revision = archive_data.revision_get(sha1_git)
+ expected_revision['children'] = children
+ assert actual_revision == expected_revision
+
+
+@given(non_ancestor_revisions())
+def test_lookup_revision_with_context_ko(non_ancestor_revisions):
+ sha1_git = non_ancestor_revisions['sha1_git']
+ root_sha1_git = non_ancestor_revisions['sha1_git_root']
+
+ with pytest.raises(NotFoundExc) as e:
+ service.lookup_revision_with_context(root_sha1_git, sha1_git)
+ assert e.match('Revision %s is not an ancestor of %s' %
+ (sha1_git, root_sha1_git))
+
+
+def test_lookup_directory_with_revision_not_found():
+ unknown_revision_ = random_sha1()
+
+ with pytest.raises(NotFoundExc) as e:
+ service.lookup_directory_with_revision(unknown_revision_)
+ assert e.match('Revision %s not found' % unknown_revision_)
+
+
+def test_lookup_directory_with_revision_unknown_content(archive_data):
+ unknown_content_ = random_content()
+ unknown_revision_ = random_sha1()
+ unknown_directory_ = random_sha1()
+
+ dir_path = 'README.md'
+ # Create a revision that points to a directory
+ # Which points to unknown content
+ revision = {
+ 'author': {
+ 'name': b'abcd',
+ 'email': b'abcd@company.org',
+ 'fullname': b'abcd abcd'
+ },
+ 'committer': {
+ 'email': b'aaaa@company.org',
+ 'fullname': b'aaaa aaa',
+ 'name': b'aaa'
+ },
+ 'committer_date': {
+ 'negative_utc': False,
+ 'offset': 0,
+ 'timestamp': 1437511651
+ },
+ 'date': {
+ 'negative_utc': False,
+ 'offset': 0,
+ 'timestamp': 1437511651
+ },
+ 'message': b'bleh',
+ 'metadata': [],
+ 'parents': [],
+ 'synthetic': False,
+ 'type': 'git',
+ 'id': hash_to_bytes(unknown_revision_),
+ 'directory': hash_to_bytes(unknown_directory_)
+ }
+ # A directory that points to unknown content
+ dir = {
+ 'id': hash_to_bytes(unknown_directory_),
+ 'entries': [{
+ 'name': bytes(dir_path.encode('utf-8')),
+ 'type': 'file',
+ 'target': hash_to_bytes(unknown_content_['sha1_git']),
+ 'perms': DentryPerms.content
+ }]
+ }
+ # Add the directory and revision in mem
+ archive_data.directory_add([dir])
+ archive_data.revision_add([revision])
+ with pytest.raises(NotFoundExc) as e:
+ service.lookup_directory_with_revision(unknown_revision_, dir_path)
+ assert e.match('Content not found for revision %s' % unknown_revision_)
+
+
+@given(revision())
+def test_lookup_directory_with_revision_ko_path_to_nowhere(revision):
+ invalid_path = 'path/to/something/unknown'
+ with pytest.raises(NotFoundExc) as e:
+ service.lookup_directory_with_revision(revision, invalid_path)
+ assert e.match('Directory or File')
+ assert e.match(invalid_path)
+ assert e.match('revision %s' % revision)
+ assert e.match('not found')
+
+
+@given(revision_with_submodules())
+def test_lookup_directory_with_revision_submodules(archive_data,
+ revision_with_submodules):
+ rev_sha1_git = revision_with_submodules['rev_sha1_git']
+ rev_dir_path = revision_with_submodules['rev_dir_rev_path']
+
+ actual_data = service.lookup_directory_with_revision(
+ rev_sha1_git, rev_dir_path)
+
+ revision = archive_data.revision_get(
+ revision_with_submodules['rev_sha1_git'])
+ directory = archive_data.directory_ls(revision['directory'])
+ rev_entry = next(e for e in directory if e['name'] == rev_dir_path)
+
+ expected_data = {
+ 'content': archive_data.revision_get(rev_entry['target']),
+ 'path': rev_dir_path,
+ 'revision': rev_sha1_git,
+ 'type': 'rev'
+ }
+
+ assert actual_data == expected_data
+
+
+@given(revision())
+def test_lookup_directory_with_revision_without_path(archive_data, revision):
+ actual_directory_entries = service.lookup_directory_with_revision(revision)
+
+ revision_data = archive_data.revision_get(revision)
+ expected_directory_entries = archive_data.directory_ls(
+ revision_data['directory'])
+
+ assert actual_directory_entries['type'] == 'dir'
+ assert actual_directory_entries['content'] == expected_directory_entries
+
+
+@given(revision())
+def test_lookup_directory_with_revision_with_path(archive_data, revision):
+ rev_data = archive_data.revision_get(revision)
+ dir_entries = [e for e in archive_data.directory_ls(rev_data['directory'])
+ if e['type'] in ('file', 'dir')]
+ expected_dir_entry = random.choice(dir_entries)
+
+ actual_dir_entry = service.lookup_directory_with_revision(
+ revision, expected_dir_entry['name'])
+
+ assert actual_dir_entry['type'] == expected_dir_entry['type']
+ assert actual_dir_entry['revision'] == revision
+ assert actual_dir_entry['path'] == expected_dir_entry['name']
+ if actual_dir_entry['type'] == 'file':
del actual_dir_entry['content']['checksums']['blake2s256']
for key in ('checksums', 'status', 'length'):
- self.assertEqual(actual_dir_entry['content'][key],
- expected_dir_entry[key])
- self.assertEqual(actual_dir_entry['content']['data'],
- expected_data['data'])
+ assert actual_dir_entry['content'][key] == expected_dir_entry[key]
+ else:
+ sub_dir_entries = archive_data.directory_ls(
+ expected_dir_entry['target'])
+ assert actual_dir_entry['content'] == sub_dir_entries
+
+
+@given(revision())
+def test_lookup_directory_with_revision_with_path_to_file_and_data(
+ archive_data, revision):
+ rev_data = archive_data.revision_get(revision)
+ dir_entries = [e for e in archive_data.directory_ls(rev_data['directory'])
+ if e['type'] == 'file']
+ expected_dir_entry = random.choice(dir_entries)
+ expected_data = archive_data.content_get(
+ expected_dir_entry['checksums']['sha1'])
+
+ actual_dir_entry = service.lookup_directory_with_revision(
+ revision, expected_dir_entry['name'], with_data=True)
+
+ assert actual_dir_entry['type'] == expected_dir_entry['type']
+ assert actual_dir_entry['revision'] == revision
+ assert actual_dir_entry['path'] == expected_dir_entry['name']
+ del actual_dir_entry['content']['checksums']['blake2s256']
+ for key in ('checksums', 'status', 'length'):
+ assert actual_dir_entry['content'][key] == expected_dir_entry[key]
+ assert actual_dir_entry['content']['data'] == expected_data['data']
+
+
+@given(revision())
+def test_lookup_revision(archive_data, revision):
+ actual_revision = service.lookup_revision(revision)
+ assert actual_revision == archive_data.revision_get(revision)
+
+
+@given(new_revision())
+def test_lookup_revision_invalid_msg(archive_data, new_revision):
+ new_revision['message'] = b'elegant fix for bug \xff'
+ archive_data.revision_add([new_revision])
+
+ revision = service.lookup_revision(hash_to_hex(new_revision['id']))
+ assert revision['message'] is None
+ assert revision['message_decoding_failed'] is True
+
+
+@given(new_revision())
+def test_lookup_revision_msg_ok(archive_data, new_revision):
+ archive_data.revision_add([new_revision])
+
+ revision_message = service.lookup_revision_message(
+ hash_to_hex(new_revision['id']))
+
+ assert revision_message == {'message': new_revision['message']}
+
+
+def test_lookup_revision_msg_no_rev():
+ unknown_revision_ = random_sha1()
+
+ with pytest.raises(NotFoundExc) as e:
+ service.lookup_revision_message(unknown_revision_)
+
+ assert e.match('Revision with sha1_git %s not found.' % unknown_revision_)
+
+
+@given(revisions())
+def test_lookup_revision_multiple(archive_data, revisions):
+ actual_revisions = list(service.lookup_revision_multiple(revisions))
+
+ expected_revisions = []
+ for rev in revisions:
+ expected_revisions.append(archive_data.revision_get(rev))
- @given(revision())
- def test_lookup_revision(self, revision):
- actual_revision = service.lookup_revision(revision)
- self.assertEqual(actual_revision, self.revision_get(revision))
+ assert actual_revisions == expected_revisions
- @given(new_revision())
- def test_lookup_revision_invalid_msg(self, new_revision):
- new_revision['message'] = b'elegant fix for bug \xff'
- self.storage.revision_add([new_revision])
+def test_lookup_revision_multiple_none_found():
+ unknown_revisions_ = [random_sha1(), random_sha1(), random_sha1()]
- revision = service.lookup_revision(hash_to_hex(new_revision['id']))
- self.assertEqual(revision['message'], None)
- self.assertEqual(revision['message_decoding_failed'], True)
+ actual_revisions = list(
+ service.lookup_revision_multiple(unknown_revisions_))
- @given(new_revision())
- def test_lookup_revision_msg_ok(self, new_revision):
+ assert actual_revisions == [None] * len(unknown_revisions_)
- self.storage.revision_add([new_revision])
- revision_message = service.lookup_revision_message(
- hash_to_hex(new_revision['id']))
+@given(revision())
+def test_lookup_revision_log(archive_data, revision):
+ actual_revision_log = list(service.lookup_revision_log(revision, limit=25))
+ expected_revision_log = archive_data.revision_log(revision, limit=25)
- self.assertEqual(revision_message,
- {'message': new_revision['message']})
+ assert actual_revision_log == expected_revision_log
- def test_lookup_revision_msg_no_rev(self):
- unknown_revision_ = random_sha1()
- with self.assertRaises(NotFoundExc) as cm:
- service.lookup_revision_message(unknown_revision_)
+def _get_origin_branches(archive_data, origin):
+ origin_visit = archive_data.origin_visit_get(origin['url'])[-1]
+ snapshot = archive_data.snapshot_get(origin_visit['snapshot'])
+ branches = {k: v for (k, v) in snapshot['branches'].items()
+ if v['target_type'] == 'revision'}
+ return branches
- self.assertEqual(
- cm.exception.args[0],
- 'Revision with sha1_git %s not found.' % unknown_revision_
- )
- @given(revisions())
- def test_lookup_revision_multiple(self, revisions):
+@given(origin())
+def test_lookup_revision_log_by(archive_data, origin):
+ branches = _get_origin_branches(archive_data, origin)
+ branch_name = random.choice(list(branches.keys()))
- actual_revisions = list(service.lookup_revision_multiple(revisions))
+ actual_log = list(
+ service.lookup_revision_log_by(origin['url'], branch_name,
+ None, limit=25))
- expected_revisions = []
- for rev in revisions:
- expected_revisions.append(self.revision_get(rev))
+ expected_log = archive_data.revision_log(
+ branches[branch_name]['target'], limit=25)
- self.assertEqual(actual_revisions, expected_revisions)
+ assert actual_log == expected_log
- def test_lookup_revision_multiple_none_found(self):
- unknown_revisions_ = [random_sha1(), random_sha1(), random_sha1()]
- actual_revisions = \
- list(service.lookup_revision_multiple(unknown_revisions_))
+@given(origin())
+def test_lookup_revision_log_by_notfound(origin):
+ with pytest.raises(NotFoundExc):
+ service.lookup_revision_log_by(
+ origin['url'], 'unknown_branch_name', None, limit=100)
- self.assertEqual(actual_revisions, [None] * len(unknown_revisions_))
- @given(revision())
- def test_lookup_revision_log(self, revision):
+def test_lookup_content_raw_not_found():
+ unknown_content_ = random_content()
- actual_revision_log = \
- list(service.lookup_revision_log(revision, limit=25))
- expected_revision_log = self.revision_log(revision, limit=25)
+ with pytest.raises(NotFoundExc) as e:
+ service.lookup_content_raw('sha1:' + unknown_content_['sha1'])
- self.assertEqual(actual_revision_log, expected_revision_log)
+ assert e.match('Content with %s checksum equals to %s not found!' %
+ ('sha1', unknown_content_['sha1']))
- def _get_origin_branches(self, origin):
- origin_visit = self.origin_visit_get(origin['url'])[-1]
- snapshot = self.snapshot_get(origin_visit['snapshot'])
- branches = {k: v for (k, v) in snapshot['branches'].items()
- if v['target_type'] == 'revision'}
- return branches
- @given(origin())
- def test_lookup_revision_log_by(self, origin):
+@given(content())
+def test_lookup_content_raw(archive_data, content):
+ actual_content = service.lookup_content_raw(
+ 'sha256:%s' % content['sha256'])
- branches = self._get_origin_branches(origin)
- branch_name = random.choice(list(branches.keys()))
+ expected_content = archive_data.content_get(content['sha1'])
- actual_log = \
- list(service.lookup_revision_log_by(origin['url'], branch_name,
- None, limit=25))
+ assert actual_content == expected_content
- expected_log = \
- self.revision_log(branches[branch_name]['target'], limit=25)
- self.assertEqual(actual_log, expected_log)
+def test_lookup_content_not_found():
+ unknown_content_ = random_content()
- @given(origin())
- def test_lookup_revision_log_by_notfound(self, origin):
+ with pytest.raises(NotFoundExc) as e:
+ service.lookup_content('sha1:%s' % unknown_content_['sha1'])
- with self.assertRaises(NotFoundExc):
- service.lookup_revision_log_by(
- origin['url'], 'unknown_branch_name', None, limit=100)
+ assert e.match('Content with %s checksum equals to %s not found!' %
+ ('sha1', unknown_content_['sha1']))
- def test_lookup_content_raw_not_found(self):
- unknown_content_ = random_content()
- with self.assertRaises(NotFoundExc) as cm:
- service.lookup_content_raw('sha1:' + unknown_content_['sha1'])
+@given(content())
+def test_lookup_content_with_sha1(archive_data, content):
+ actual_content = service.lookup_content('sha1:%s' % content['sha1'])
- self.assertIn(cm.exception.args[0],
- 'Content with %s checksum equals to %s not found!' %
- ('sha1', unknown_content_['sha1']))
+ expected_content = archive_data.content_get_metadata(content['sha1'])
- @given(content())
- def test_lookup_content_raw(self, content):
+ assert actual_content == expected_content
- actual_content = service.lookup_content_raw(
- 'sha256:%s' % content['sha256'])
- expected_content = self.content_get(content['sha1'])
+@given(content())
+def test_lookup_content_with_sha256(archive_data, content):
+ actual_content = service.lookup_content('sha256:%s' % content['sha256'])
- self.assertEqual(actual_content, expected_content)
+ expected_content = archive_data.content_get_metadata(content['sha1'])
- def test_lookup_content_not_found(self):
- unknown_content_ = random_content()
+ assert actual_content == expected_content
- with self.assertRaises(NotFoundExc) as cm:
- service.lookup_content('sha1:%s' % unknown_content_['sha1'])
- self.assertIn(cm.exception.args[0],
- 'Content with %s checksum equals to %s not found!' %
- ('sha1', unknown_content_['sha1']))
+def test_lookup_directory_bad_checksum():
+ with pytest.raises(BadInputExc):
+ service.lookup_directory('directory_id')
- @given(content())
- def test_lookup_content_with_sha1(self, content):
- actual_content = service.lookup_content(
- 'sha1:%s' % content['sha1'])
+def test_lookup_directory_not_found():
+ unknown_directory_ = random_sha1()
- expected_content = self.content_get_metadata(content['sha1'])
+ with pytest.raises(NotFoundExc) as e:
+ service.lookup_directory(unknown_directory_)
- self.assertEqual(actual_content, expected_content)
+ assert e.match('Directory with sha1_git %s not found' % unknown_directory_)
- @given(content())
- def test_lookup_content_with_sha256(self, content):
- actual_content = service.lookup_content(
- 'sha256:%s' % content['sha256'])
+@given(directory())
+def test_lookup_directory(archive_data, directory):
+ actual_directory_ls = list(service.lookup_directory(directory))
- expected_content = self.content_get_metadata(content['sha1'])
+ expected_directory_ls = archive_data.directory_ls(directory)
- self.assertEqual(actual_content, expected_content)
+ assert actual_directory_ls == expected_directory_ls
- def test_lookup_directory_bad_checksum(self):
- with self.assertRaises(BadInputExc):
- service.lookup_directory('directory_id')
+@given(empty_directory())
+def test_lookup_directory_empty(empty_directory):
+ actual_directory_ls = list(service.lookup_directory(empty_directory))
- def test_lookup_directory_not_found(self):
- unknown_directory_ = random_sha1()
+ assert actual_directory_ls == []
- with self.assertRaises(NotFoundExc) as cm:
- service.lookup_directory(unknown_directory_)
- self.assertIn('Directory with sha1_git %s not found'
- % unknown_directory_, cm.exception.args[0])
+@given(origin())
+def test_lookup_revision_by_nothing_found(origin):
+ with pytest.raises(NotFoundExc):
+ service.lookup_revision_by(origin['url'], 'invalid-branch-name')
- @given(directory())
- def test_lookup_directory(self, directory):
- actual_directory_ls = list(service.lookup_directory(
- directory))
+@given(origin())
+def test_lookup_revision_by(archive_data, origin):
+ branches = _get_origin_branches(archive_data, origin)
+ branch_name = random.choice(list(branches.keys()))
- expected_directory_ls = self.directory_ls(directory)
+ actual_revision = service.lookup_revision_by(origin['url'], branch_name)
- self.assertEqual(actual_directory_ls, expected_directory_ls)
+ expected_revision = archive_data.revision_get(
+ branches[branch_name]['target'])
- @given(empty_directory())
- def test_lookup_directory_empty(self, empty_directory):
+ assert actual_revision == expected_revision
- actual_directory_ls = list(service.lookup_directory(empty_directory))
- self.assertEqual(actual_directory_ls, [])
+@given(origin(), revision())
+def test_lookup_revision_with_context_by_ko(origin, revision):
+ with pytest.raises(NotFoundExc):
+ service.lookup_revision_with_context_by(origin['url'],
+ 'invalid-branch-name',
+ None, revision)
- @given(origin())
- def test_lookup_revision_by_nothing_found(self, origin):
- with self.assertRaises(NotFoundExc):
- service.lookup_revision_by(
- origin['url'], 'invalid-branch-name')
+@given(origin())
+def test_lookup_revision_with_context_by(archive_data, origin):
+ branches = _get_origin_branches(archive_data, origin)
+ branch_name = random.choice(list(branches.keys()))
- @given(origin())
- def test_lookup_revision_by(self, origin):
+ root_rev = branches[branch_name]['target']
+ root_rev_log = archive_data.revision_log(root_rev)
- branches = self._get_origin_branches(origin)
- branch_name = random.choice(list(branches.keys()))
+ children = defaultdict(list)
- actual_revision = \
- service.lookup_revision_by(origin['url'], branch_name, None)
+ for rev in root_rev_log:
+ for rev_p in rev['parents']:
+ children[rev_p].append(rev['id'])
- expected_revision = \
- self.revision_get(branches[branch_name]['target'])
+ rev = root_rev_log[-1]['id']
- self.assertEqual(actual_revision, expected_revision)
+ actual_root_rev, actual_rev = service.lookup_revision_with_context_by(
+ origin['url'], branch_name, None, rev)
- @given(origin(), revision())
- def test_lookup_revision_with_context_by_ko(self, origin, revision):
+ expected_root_rev = archive_data.revision_get(root_rev)
+ expected_rev = archive_data.revision_get(rev)
+ expected_rev['children'] = children[rev]
- with self.assertRaises(NotFoundExc):
- service.lookup_revision_with_context_by(origin['url'],
- 'invalid-branch-name',
- None,
- revision)
+ assert actual_root_rev == expected_root_rev
+ assert actual_rev == expected_rev
- @given(origin())
- def test_lookup_revision_with_context_by(self, origin):
- branches = self._get_origin_branches(origin)
- branch_name = random.choice(list(branches.keys()))
+def test_lookup_revision_through_ko_not_implemented():
+ with pytest.raises(NotImplementedError):
+ service.lookup_revision_through({'something-unknown': 10})
- root_rev = branches[branch_name]['target']
- root_rev_log = self.revision_log(root_rev)
- children = defaultdict(list)
+@given(origin())
+def test_lookup_revision_through_with_context_by(archive_data, origin):
+ branches = _get_origin_branches(archive_data, origin)
+ branch_name = random.choice(list(branches.keys()))
- for rev in root_rev_log:
- for rev_p in rev['parents']:
- children[rev_p].append(rev['id'])
+ root_rev = branches[branch_name]['target']
+ root_rev_log = archive_data.revision_log(root_rev)
+ rev = root_rev_log[-1]['id']
- rev = root_rev_log[-1]['id']
+ assert service.lookup_revision_through({
+ 'origin_url': origin['url'],
+ 'branch_name': branch_name,
+ 'ts': None,
+ 'sha1_git': rev
+ }) == service.lookup_revision_with_context_by(origin['url'], branch_name,
+ None, rev)
- actual_root_rev, actual_rev = service.lookup_revision_with_context_by(
- origin['url'], branch_name, None, rev)
- expected_root_rev = self.revision_get(root_rev)
- expected_rev = self.revision_get(rev)
- expected_rev['children'] = children[rev]
+@given(origin())
+def test_lookup_revision_through_with_revision_by(archive_data, origin):
+ branches = _get_origin_branches(archive_data, origin)
+ branch_name = random.choice(list(branches.keys()))
- self.assertEqual(actual_root_rev, expected_root_rev)
- self.assertEqual(actual_rev, expected_rev)
+ assert service.lookup_revision_through({
+ 'origin_url': origin['url'],
+ 'branch_name': branch_name,
+ 'ts': None,
+ }) == service.lookup_revision_by(origin['url'], branch_name, None)
- def test_lookup_revision_through_ko_not_implemented(self):
- with self.assertRaises(NotImplementedError):
- service.lookup_revision_through({
- 'something-unknown': 10,
- })
+@given(ancestor_revisions())
+def test_lookup_revision_through_with_context(ancestor_revisions):
+ sha1_git = ancestor_revisions['sha1_git']
+ sha1_git_root = ancestor_revisions['sha1_git_root']
- @given(origin())
- def test_lookup_revision_through_with_context_by(self, origin):
+ assert service.lookup_revision_through({
+ 'sha1_git_root': sha1_git_root,
+ 'sha1_git': sha1_git,
+ }) == service.lookup_revision_with_context(sha1_git_root, sha1_git)
- branches = self._get_origin_branches(origin)
- branch_name = random.choice(list(branches.keys()))
- root_rev = branches[branch_name]['target']
- root_rev_log = self.revision_log(root_rev)
- rev = root_rev_log[-1]['id']
+@given(revision())
+def test_lookup_revision_through_with_revision(revision):
+ assert service.lookup_revision_through({
+ 'sha1_git': revision
+ }) == service.lookup_revision(revision)
- self.assertEqual(service.lookup_revision_through({
- 'origin_url': origin['url'],
- 'branch_name': branch_name,
- 'ts': None,
- 'sha1_git': rev
- }),
- service.lookup_revision_with_context_by(
- origin['url'], branch_name, None, rev)
- )
- @given(origin())
- def test_lookup_revision_through_with_revision_by(self, origin):
+@given(revision())
+def test_lookup_directory_through_revision_ko_not_found(revision):
+ with pytest.raises(NotFoundExc):
+ service.lookup_directory_through_revision(
+ {'sha1_git': revision}, 'some/invalid/path')
- branches = self._get_origin_branches(origin)
- branch_name = random.choice(list(branches.keys()))
- self.assertEqual(service.lookup_revision_through({
- 'origin_url': origin['url'],
- 'branch_name': branch_name,
- 'ts': None,
- }),
- service.lookup_revision_by(
- origin['url'], branch_name, None)
- )
+@given(revision())
+def test_lookup_directory_through_revision_ok(archive_data, revision):
+ rev_data = archive_data.revision_get(revision)
+ dir_entries = [e for e in archive_data.directory_ls(rev_data['directory'])
+ if e['type'] == 'file']
+ dir_entry = random.choice(dir_entries)
- @given(ancestor_revisions())
- def test_lookup_revision_through_with_context(self, ancestor_revisions):
+ assert service.lookup_directory_through_revision(
+ {'sha1_git': revision}, dir_entry['name']
+ ) == (revision,
+ service.lookup_directory_with_revision(revision, dir_entry['name']))
- sha1_git = ancestor_revisions['sha1_git']
- sha1_git_root = ancestor_revisions['sha1_git_root']
- self.assertEqual(service.lookup_revision_through({
- 'sha1_git_root': sha1_git_root,
- 'sha1_git': sha1_git,
- }),
- service.lookup_revision_with_context(
- sha1_git_root, sha1_git)
+@given(revision())
+def test_lookup_directory_through_revision_ok_with_data(
+ archive_data, revision):
+ rev_data = archive_data.revision_get(revision)
+ dir_entries = [e for e in archive_data.directory_ls(rev_data['directory'])
+ if e['type'] == 'file']
+ dir_entry = random.choice(dir_entries)
- )
-
- @given(revision())
- def test_lookup_revision_through_with_revision(self, revision):
-
- self.assertEqual(service.lookup_revision_through({
- 'sha1_git': revision
- }),
- service.lookup_revision(revision)
- )
-
- @given(revision())
- def test_lookup_directory_through_revision_ko_not_found(self, revision):
-
- with self.assertRaises(NotFoundExc):
- service.lookup_directory_through_revision(
- {'sha1_git': revision}, 'some/invalid/path')
-
- @given(revision())
- def test_lookup_directory_through_revision_ok(self, revision):
-
- revision_data = self.revision_get(revision)
- dir_entries = [e for e in self.directory_ls(revision_data['directory'])
- if e['type'] == 'file']
- dir_entry = random.choice(dir_entries)
-
- self.assertEqual(
- service.lookup_directory_through_revision({'sha1_git': revision},
- dir_entry['name']),
- (revision,
- service.lookup_directory_with_revision(
- revision, dir_entry['name']))
- )
-
- @given(revision())
- def test_lookup_directory_through_revision_ok_with_data(self, revision):
-
- revision_data = self.revision_get(revision)
- dir_entries = [e for e in self.directory_ls(revision_data['directory'])
- if e['type'] == 'file']
- dir_entry = random.choice(dir_entries)
-
- self.assertEqual(
- service.lookup_directory_through_revision({'sha1_git': revision},
- dir_entry['name'],
- with_data=True),
- (revision,
- service.lookup_directory_with_revision(
- revision, dir_entry['name'], with_data=True))
- )
+ assert service.lookup_directory_through_revision(
+ {'sha1_git': revision}, dir_entry['name'], with_data=True
+ ) == (revision,
+ service.lookup_directory_with_revision(revision, dir_entry['name'],
+ with_data=True))
diff --git a/swh/web/tests/common/test_templatetags.py b/swh/web/tests/common/test_templatetags.py
--- a/swh/web/tests/common/test_templatetags.py
+++ b/swh/web/tests/common/test_templatetags.py
@@ -1,63 +1,62 @@
-# Copyright (C) 2015-2018 The Software Heritage developers
+# Copyright (C) 2015-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import pytest
-from swh.web.common import swh_templatetags
-from swh.web.tests.testcase import WebTestCase
-
-
-class SWHTemplateTagsTest(WebTestCase):
- def test_urlize_api_links_api(self):
- # update api link with html links content with links
- content = '{"url": "/api/1/abc/"}'
- expected_content = ('{"url": "<a href="/api/1/abc/">/api/1/abc/</a>"}')
-
- self.assertEqual(swh_templatetags.urlize_links_and_mails(content),
- expected_content)
-
- def test_urlize_api_links_browse(self):
- # update /browse link with html links content with links
- content = '{"url": "/browse/def/"}'
- expected_content = ('{"url": "<a href="/browse/def/">'
- '/browse/def/</a>"}')
- self.assertEqual(swh_templatetags.urlize_links_and_mails(content),
- expected_content)
-
- def test_urlize_header_links(self):
- # update api link with html links content with links
- content = """</api/1/abc/>; rel="next"
-</api/1/def/>; rel="prev"
-"""
- expected_content = """<<a href="/api/1/abc/">/api/1/abc/</a>>; rel="next"
-<<a href="/api/1/def/">/api/1/def/</a>>; rel="prev"
-"""
-
- self.assertEqual(swh_templatetags.urlize_header_links(content),
- expected_content)
-
- # remove deprecation warnings related to docutils
- @pytest.mark.filterwarnings('ignore:.*U.*mode is deprecated:DeprecationWarning') # noqa
- def test_safe_docstring_display(self):
- # update api link with html links content with links
- docstring = """This is my list header:
-
- - Here is item 1, with a continuation
- line right here
- - Here is item 2
-
- Here is something that is not part of the list"""
-
- expected_docstring = """<p>This is my list header:</p>
-<ul class="docstring">
-<li>Here is item 1, with a continuation
-line right here</li>
-<li>Here is item 2</li>
-</ul>
-<p>Here is something that is not part of the list</p>
-"""
-
- self.assertEqual(swh_templatetags.safe_docstring_display(docstring),
- expected_docstring)
+from swh.web.common.swh_templatetags import (
+ urlize_links_and_mails, urlize_header_links, safe_docstring_display
+)
+
+
+def test_urlize_api_links_api():
+ # update api link with html links content with links
+ content = '{"url": "/api/1/abc/"}'
+ expected_content = ('{"url": "<a href="/api/1/abc/">/api/1/abc/</a>"}')
+
+ assert urlize_links_and_mails(content) == expected_content
+
+
+def test_urlize_api_links_browse():
+ # update /browse link with html links content with links
+ content = '{"url": "/browse/def/"}'
+ expected_content = ('{"url": "<a href="/browse/def/">'
+ '/browse/def/</a>"}')
+ assert urlize_links_and_mails(content) == expected_content
+
+
+def test_urlize_header_links():
+ # update api link with html links content with links
+ content = '</api/1/abc/>; rel="next"\n</api/1/def/>; rel="prev"'
+
+ expected_content = ('<<a href="/api/1/abc/">/api/1/abc/</a>>; rel="next"\n'
+ '<<a href="/api/1/def/">/api/1/def/</a>>; rel="prev"')
+
+ assert urlize_header_links(content) == expected_content
+
+
+# remove deprecation warnings related to docutils
+@pytest.mark.filterwarnings(
+ 'ignore:.*U.*mode is deprecated:DeprecationWarning')
+def test_safe_docstring_display():
+ # update api link with html links content with links
+ docstring = (
+ 'This is my list header:\n\n'
+ ' - Here is item 1, with a continuation\n'
+ ' line right here\n'
+ ' - Here is item 2\n\n'
+ ' Here is something that is not part of the list'
+ )
+
+ expected_docstring = (
+ '<p>This is my list header:</p>\n'
+ '<ul class="docstring">\n'
+ '<li>Here is item 1, with a continuation\n'
+ 'line right here</li>\n'
+ '<li>Here is item 2</li>\n'
+ '</ul>\n'
+ '<p>Here is something that is not part of the list</p>\n'
+ )
+
+ assert safe_docstring_display(docstring) == expected_docstring
diff --git a/swh/web/tests/common/test_throttling.py b/swh/web/tests/common/test_throttling.py
--- a/swh/web/tests/common/test_throttling.py
+++ b/swh/web/tests/common/test_throttling.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2017-2018 The Software Heritage developers
+# Copyright (C) 2017-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
@@ -10,19 +10,14 @@
)
from django.conf.urls import url
-from django.core.cache import cache
from django.test.utils import override_settings
from rest_framework.views import APIView
from rest_framework.response import Response
-from rest_framework.test import APIRequestFactory
from rest_framework.decorators import api_view
-from swh.web.common.throttling import (
- SwhWebRateThrottle, throttle_scope
-)
-from swh.web.tests.testcase import WebTestCase
+from swh.web.common.throttling import SwhWebRateThrottle, throttle_scope
class MockViewScope1(APIView):
@@ -73,84 +68,81 @@
]
+def check_response(response, status_code,
+ limit=None, remaining=None):
+ assert response.status_code == status_code
+ if limit is not None:
+ assert response['X-RateLimit-Limit'] == str(limit)
+ else:
+ assert 'X-RateLimit-Limit' not in response
+ if remaining is not None:
+ assert response['X-RateLimit-Remaining'] == str(remaining)
+ else:
+ assert 'X-RateLimit-Remaining' not in response
+
+
+@override_settings(ROOT_URLCONF=__name__)
+def test_scope1_requests_are_throttled(api_client):
+ """
+ Ensure request rate is limited in scope1
+ """
+ for i in range(scope1_limiter_rate):
+ response = api_client.get('/scope1_class')
+ check_response(response, 200, scope1_limiter_rate,
+ scope1_limiter_rate - i - 1)
+
+ response = api_client.get('/scope1_class')
+ check_response(response, 429, scope1_limiter_rate, 0)
+
+ for i in range(scope1_limiter_rate_post):
+ response = api_client.post('/scope1_class')
+ check_response(response, 200, scope1_limiter_rate_post,
+ scope1_limiter_rate_post - i - 1)
+
+ response = api_client.post('/scope1_class')
+ check_response(response, 429, scope1_limiter_rate_post, 0)
+
+
+@override_settings(ROOT_URLCONF=__name__)
+def test_scope2_requests_are_throttled(api_client):
+ """
+ Ensure request rate is limited in scope2
+ """
+ for i in range(scope2_limiter_rate):
+ response = api_client.get('/scope2_func')
+ check_response(response, 200, scope2_limiter_rate,
+ scope2_limiter_rate - i - 1)
+
+ response = api_client.get('/scope2_func')
+ check_response(response, 429, scope2_limiter_rate, 0)
+
+ for i in range(scope2_limiter_rate_post):
+ response = api_client.post('/scope2_func')
+ check_response(response, 200, scope2_limiter_rate_post,
+ scope2_limiter_rate_post - i - 1)
+
+ response = api_client.post('/scope2_func')
+ check_response(response, 429, scope2_limiter_rate_post, 0)
+
+
@override_settings(ROOT_URLCONF=__name__)
-class ThrottlingTests(WebTestCase):
- def setUp(self):
- """
- Reset the cache so that no throttles will be active
- """
- cache.clear()
- self.factory = APIRequestFactory()
-
- def check_response(self, response, status_code,
- limit=None, remaining=None):
- assert response.status_code == status_code
- if limit is not None:
- assert response['X-RateLimit-Limit'] == str(limit)
- else:
- assert 'X-RateLimit-Limit' not in response
- if remaining is not None:
- assert response['X-RateLimit-Remaining'] == str(remaining)
- else:
- assert 'X-RateLimit-Remaining' not in response
-
- def test_scope1_requests_are_throttled(self):
- """
- Ensure request rate is limited in scope1
- """
- for i in range(scope1_limiter_rate):
- response = self.client.get('/scope1_class')
- self.check_response(response, 200, scope1_limiter_rate,
- scope1_limiter_rate - i - 1)
-
- response = self.client.get('/scope1_class')
- self.check_response(response, 429, scope1_limiter_rate, 0)
-
- for i in range(scope1_limiter_rate_post):
- response = self.client.post('/scope1_class')
- self.check_response(response, 200, scope1_limiter_rate_post,
- scope1_limiter_rate_post - i - 1)
-
- response = self.client.post('/scope1_class')
- self.check_response(response, 429, scope1_limiter_rate_post, 0)
-
- def test_scope2_requests_are_throttled(self):
- """
- Ensure request rate is limited in scope2
- """
- for i in range(scope2_limiter_rate):
- response = self.client.get('/scope2_func')
- self.check_response(response, 200, scope2_limiter_rate,
- scope2_limiter_rate - i - 1)
-
- response = self.client.get('/scope2_func')
- self.check_response(response, 429, scope2_limiter_rate, 0)
-
- for i in range(scope2_limiter_rate_post):
- response = self.client.post('/scope2_func')
- self.check_response(response, 200, scope2_limiter_rate_post,
- scope2_limiter_rate_post - i - 1)
-
- response = self.client.post('/scope2_func')
- self.check_response(response, 429, scope2_limiter_rate_post, 0)
-
- def test_scope3_requests_are_throttled_exempted(self):
- """
- Ensure request rate is not limited in scope3 as
- requests coming from localhost are exempted from rate limit.
- """
- for _ in range(scope3_limiter_rate+1):
- response = self.client.get('/scope3_class')
- self.check_response(response, 200)
-
- for _ in range(scope3_limiter_rate_post+1):
- response = self.client.post('/scope3_class')
- self.check_response(response, 200)
-
- for _ in range(scope3_limiter_rate+1):
- response = self.client.get('/scope3_func')
- self.check_response(response, 200)
-
- for _ in range(scope3_limiter_rate_post+1):
- response = self.client.post('/scope3_func')
- self.check_response(response, 200)
+def test_scope3_requests_are_throttled_exempted(api_client):
+ """
+ Ensure request rate is not limited in scope3 as
+ requests coming from localhost are exempted from rate limit.
+ """
+ for _ in range(scope3_limiter_rate+1):
+ response = api_client.get('/scope3_class')
+ check_response(response, 200)
+
+ for _ in range(scope3_limiter_rate_post+1):
+ response = api_client.post('/scope3_class')
+ check_response(response, 200)
+
+ for _ in range(scope3_limiter_rate+1):
+ response = api_client.get('/scope3_func')
+ check_response(response, 200)
+
+ for _ in range(scope3_limiter_rate_post+1):
+ response = api_client.post('/scope3_func')
+ check_response(response, 200)
diff --git a/swh/web/tests/common/test_utils.py b/swh/web/tests/common/test_utils.py
--- a/swh/web/tests/common/test_utils.py
+++ b/swh/web/tests/common/test_utils.py
@@ -1,117 +1,114 @@
-# Copyright (C) 2017-2018 The Software Heritage developers
+# Copyright (C) 2017-2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import datetime
+import pytest
+
from swh.web.common import utils
from swh.web.common.exc import BadInputExc
-from swh.web.tests.testcase import WebTestCase
-
-
-class UtilsTestCase(WebTestCase):
- def test_shorten_path_noop(self):
- noops = [
- '/api/',
- '/browse/',
- '/content/symbol/foobar/'
- ]
-
- for noop in noops:
- self.assertEqual(
- utils.shorten_path(noop),
- noop
- )
-
- def test_shorten_path_sha1(self):
- sha1 = 'aafb16d69fd30ff58afdd69036a26047f3aebdc6'
- short_sha1 = sha1[:8] + '...'
-
- templates = [
- '/api/1/content/sha1:%s/',
- '/api/1/content/sha1_git:%s/',
- '/api/1/directory/%s/',
- '/api/1/content/sha1:%s/ctags/',
- ]
-
- for template in templates:
- self.assertEqual(
- utils.shorten_path(template % sha1),
- template % short_sha1
- )
-
- def test_shorten_path_sha256(self):
- sha256 = ('aafb16d69fd30ff58afdd69036a26047'
- '213add102934013a014dfca031c41aef')
- short_sha256 = sha256[:8] + '...'
-
- templates = [
- '/api/1/content/sha256:%s/',
- '/api/1/directory/%s/',
- '/api/1/content/sha256:%s/filetype/',
- ]
-
- for template in templates:
- self.assertEqual(
- utils.shorten_path(template % sha256),
- template % short_sha256
- )
-
- def test_parse_timestamp(self):
- input_timestamps = [
- None,
- '2016-01-12',
- '2016-01-12T09:19:12+0100',
- 'Today is January 1, 2047 at 8:21:00AM',
- '1452591542',
- ]
-
- output_dates = [
- None,
- datetime.datetime(2016, 1, 12, 0, 0),
- datetime.datetime(2016, 1, 12, 8, 19, 12,
- tzinfo=datetime.timezone.utc),
- datetime.datetime(2047, 1, 1, 8, 21),
- datetime.datetime(2016, 1, 12, 9, 39, 2,
- tzinfo=datetime.timezone.utc),
- ]
-
- for ts, exp_date in zip(input_timestamps, output_dates):
- self.assertEqual(utils.parse_timestamp(ts), exp_date)
-
- def test_format_utc_iso_date(self):
- self.assertEqual(utils.format_utc_iso_date('2017-05-04T13:27:13+02:00'), # noqa
- '04 May 2017, 11:27 UTC')
-
- def test_gen_path_info(self):
- input_path = '/home/user/swh-environment/swh-web/'
- expected_result = [
- {'name': 'home', 'path': 'home'},
- {'name': 'user', 'path': 'home/user'},
- {'name': 'swh-environment', 'path': 'home/user/swh-environment'},
- {'name': 'swh-web', 'path': 'home/user/swh-environment/swh-web'}
- ]
- path_info = utils.gen_path_info(input_path)
- self.assertEqual(path_info, expected_result)
-
- input_path = 'home/user/swh-environment/swh-web'
- path_info = utils.gen_path_info(input_path)
- self.assertEqual(path_info, expected_result)
-
- def test_get_swh_persistent_id(self):
- swh_object_type = 'content'
- sha1_git = 'aafb16d69fd30ff58afdd69036a26047f3aebdc6'
-
- expected_swh_id = 'swh:1:cnt:' + sha1_git
-
- self.assertEqual(utils.get_swh_persistent_id(swh_object_type, sha1_git), # noqa
- expected_swh_id)
-
- with self.assertRaises(BadInputExc) as cm:
- utils.get_swh_persistent_id('foo', sha1_git)
- self.assertIn('Invalid object', cm.exception.args[0])
-
- with self.assertRaises(BadInputExc) as cm:
- utils.get_swh_persistent_id(swh_object_type, 'not a valid id')
- self.assertIn('Invalid object', cm.exception.args[0])
+
+
+def test_shorten_path_noop():
+ noops = [
+ '/api/',
+ '/browse/',
+ '/content/symbol/foobar/'
+ ]
+
+ for noop in noops:
+ assert utils.shorten_path(noop) == noop
+
+
+def test_shorten_path_sha1():
+ sha1 = 'aafb16d69fd30ff58afdd69036a26047f3aebdc6'
+ short_sha1 = sha1[:8] + '...'
+
+ templates = [
+ '/api/1/content/sha1:%s/',
+ '/api/1/content/sha1_git:%s/',
+ '/api/1/directory/%s/',
+ '/api/1/content/sha1:%s/ctags/',
+ ]
+
+ for template in templates:
+ assert utils.shorten_path(template % sha1) == template % short_sha1
+
+
+def test_shorten_path_sha256():
+ sha256 = ('aafb16d69fd30ff58afdd69036a26047'
+ '213add102934013a014dfca031c41aef')
+ short_sha256 = sha256[:8] + '...'
+
+ templates = [
+ '/api/1/content/sha256:%s/',
+ '/api/1/directory/%s/',
+ '/api/1/content/sha256:%s/filetype/',
+ ]
+
+ for template in templates:
+ assert utils.shorten_path(template % sha256) == template % short_sha256
+
+
+def test_parse_timestamp():
+ input_timestamps = [
+ None,
+ '2016-01-12',
+ '2016-01-12T09:19:12+0100',
+ 'Today is January 1, 2047 at 8:21:00AM',
+ '1452591542',
+ ]
+
+ output_dates = [
+ None,
+ datetime.datetime(2016, 1, 12, 0, 0),
+ datetime.datetime(2016, 1, 12, 8, 19, 12,
+ tzinfo=datetime.timezone.utc),
+ datetime.datetime(2047, 1, 1, 8, 21),
+ datetime.datetime(2016, 1, 12, 9, 39, 2,
+ tzinfo=datetime.timezone.utc),
+ ]
+
+ for ts, exp_date in zip(input_timestamps, output_dates):
+ assert utils.parse_timestamp(ts) == exp_date
+
+
+def test_format_utc_iso_date():
+ assert (utils.format_utc_iso_date('2017-05-04T13:27:13+02:00') ==
+ '04 May 2017, 11:27 UTC')
+
+
+def test_gen_path_info():
+ input_path = '/home/user/swh-environment/swh-web/'
+ expected_result = [
+ {'name': 'home', 'path': 'home'},
+ {'name': 'user', 'path': 'home/user'},
+ {'name': 'swh-environment', 'path': 'home/user/swh-environment'},
+ {'name': 'swh-web', 'path': 'home/user/swh-environment/swh-web'}
+ ]
+ path_info = utils.gen_path_info(input_path)
+ assert path_info == expected_result
+
+ input_path = 'home/user/swh-environment/swh-web'
+ path_info = utils.gen_path_info(input_path)
+ assert path_info == expected_result
+
+
+def test_get_swh_persistent_id():
+ swh_object_type = 'content'
+ sha1_git = 'aafb16d69fd30ff58afdd69036a26047f3aebdc6'
+
+ expected_swh_id = 'swh:1:cnt:' + sha1_git
+
+ assert (utils.get_swh_persistent_id(swh_object_type, sha1_git) ==
+ expected_swh_id)
+
+ with pytest.raises(BadInputExc) as e:
+ utils.get_swh_persistent_id('foo', sha1_git)
+ assert e.match('Invalid object')
+
+ with pytest.raises(BadInputExc) as e:
+ utils.get_swh_persistent_id(swh_object_type, 'not a valid id')
+ assert e.match('Invalid object')
diff --git a/swh/web/tests/conftest.py b/swh/web/tests/conftest.py
--- a/swh/web/tests/conftest.py
+++ b/swh/web/tests/conftest.py
@@ -3,13 +3,32 @@
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
+import inspect
import json
import os
+import shutil
+from subprocess import run, PIPE
+
+import pytest
+
+from django.core.cache import cache
from hypothesis import settings, HealthCheck
+from rest_framework.test import APIClient
-# Register some hypothesis profiles
+from swh.model.hashutil import hash_to_bytes
+from swh.web.common import converters
+from swh.web.tests.data import get_tests_data, override_storages
+
+# Used to skip some tests
+ctags_json_missing = (
+ shutil.which('ctags') is None or
+ b'+json' not in run(['ctags', '--version'], stdout=PIPE).stdout
+)
+fossology_missing = shutil.which('nomossa') is None
+
+# Register some hypothesis profiles
settings.register_profile('default', settings())
settings.register_profile(
@@ -56,3 +75,205 @@
with open(webpack_stats, 'w') as outfile:
json.dump(mock_webpack_stats, outfile)
+
+
+# Clear Django cache before each test
+@pytest.fixture(autouse=True)
+def django_clear_cache():
+ cache.clear()
+
+
+# Fixture to get test client from Django REST Framework
+@pytest.fixture(scope='module')
+def api_client():
+ return APIClient()
+
+
+# Fixture to manipulate data from a sample archive used in the tests
+@pytest.fixture(scope='module')
+def archive_data():
+ return _ArchiveData()
+
+
+# Fixture to manipulate indexer data from a sample archive used in the tests
+@pytest.fixture(scope='module')
+def indexer_data():
+ return _IndexerData()
+
+
+# Custom data directory for requests_mock
+@pytest.fixture
+def datadir():
+ return os.path.join(os.path.abspath(os.path.dirname(__file__)),
+ 'resources')
+
+
+# Initialize tests data
+_tests_data = get_tests_data(reset=True)
+
+
+class _ArchiveData:
+ """
+ Helper class to manage data from a sample test archive.
+
+ It is initialized with a reference to an in-memory storage
+ containing raw tests data.
+
+ It is basically a proxy to Storage interface but it overrides some methods
+ to retrieve those tests data in a json serializable format in order to ease
+ tests implementation.
+ """
+
+ def __init__(self):
+ self.storage = _tests_data['storage']
+
+ # Update swh-web configuration to use the in-memory storages
+ # instantiated in the tests.data module
+ override_storages(self.storage, _tests_data['idx_storage'])
+
+ def _call_storage_method(method):
+ def call_storage_method(*args, **kwargs):
+ return method(*args, **kwargs)
+
+ return call_storage_method
+
+ # Forward calls to non overridden Storage methods to wrapped
+ # storage instance
+ for method_name, method in inspect.getmembers(
+ self.storage, predicate=inspect.ismethod):
+ if (not hasattr(self, method_name) and
+ not method_name.startswith('_')):
+ setattr(self, method_name, _call_storage_method(method))
+
+ def content_get_metadata(self, cnt_id):
+ cnt_id_bytes = hash_to_bytes(cnt_id)
+ metadata = next(self.storage.content_get_metadata([cnt_id_bytes]))
+ return converters.from_swh(metadata,
+ hashess={'sha1', 'sha1_git', 'sha256',
+ 'blake2s256'})
+
+ def content_get(self, cnt_id):
+ cnt_id_bytes = hash_to_bytes(cnt_id)
+ cnt = next(self.storage.content_get([cnt_id_bytes]))
+ return converters.from_content(cnt)
+
+ def directory_ls(self, dir_id):
+ cnt_id_bytes = hash_to_bytes(dir_id)
+ dir_content = map(converters.from_directory_entry,
+ self.storage.directory_ls(cnt_id_bytes))
+ return list(dir_content)
+
+ def release_get(self, rel_id):
+ rel_id_bytes = hash_to_bytes(rel_id)
+ rel_data = next(self.storage.release_get([rel_id_bytes]))
+ return converters.from_release(rel_data)
+
+ def revision_get(self, rev_id):
+ rev_id_bytes = hash_to_bytes(rev_id)
+ rev_data = next(self.storage.revision_get([rev_id_bytes]))
+ return converters.from_revision(rev_data)
+
+ def revision_log(self, rev_id, limit=None):
+ rev_id_bytes = hash_to_bytes(rev_id)
+ return list(map(converters.from_revision,
+ self.storage.revision_log([rev_id_bytes],
+ limit=limit)))
+
+ def snapshot_get_latest(self, origin_url):
+ snp = self.storage.snapshot_get_latest(origin_url)
+ return converters.from_snapshot(snp)
+
+ def origin_get(self, origin_info):
+ origin = self.storage.origin_get(origin_info)
+ return converters.from_origin(origin)
+
+ def origin_visit_get(self, origin_url):
+ visits = self.storage.origin_visit_get(origin_url)
+ return list(map(converters.from_origin_visit, visits))
+
+ def origin_visit_get_by(self, origin_url, visit_id):
+ visit = self.storage.origin_visit_get_by(origin_url, visit_id)
+ return converters.from_origin_visit(visit)
+
+ def snapshot_get(self, snapshot_id):
+ snp = self.storage.snapshot_get(hash_to_bytes(snapshot_id))
+ return converters.from_snapshot(snp)
+
+ def snapshot_get_branches(self, snapshot_id, branches_from='',
+ branches_count=1000, target_types=None):
+ snp = self.storage.snapshot_get_branches(
+ hash_to_bytes(snapshot_id), branches_from.encode(),
+ branches_count, target_types)
+ return converters.from_snapshot(snp)
+
+ def snapshot_get_head(self, snapshot):
+ if snapshot['branches']['HEAD']['target_type'] == 'alias':
+ target = snapshot['branches']['HEAD']['target']
+ head = snapshot['branches'][target]['target']
+ else:
+ head = snapshot['branches']['HEAD']['target']
+ return head
+
+
+class _IndexerData:
+ """
+ Helper class to manage indexer tests data
+
+ It is initialized with a reference to an in-memory indexer storage
+ containing raw tests data.
+
+ It also defines class methods to retrieve those tests data in
+ a json serializable format in order to ease tests implementation.
+
+ """
+
+ def __init__(self):
+ self.idx_storage = _tests_data['idx_storage']
+ self.mimetype_indexer = _tests_data['mimetype_indexer']
+ self.license_indexer = _tests_data['license_indexer']
+ self.ctags_indexer = _tests_data['ctags_indexer']
+
+ # Update swh-web configuration to use the in-memory storages
+ # instantiated in the tests.data module
+ override_storages(_tests_data['storage'], self.idx_storage)
+
+ def content_add_mimetype(self, cnt_id):
+ self.mimetype_indexer.run([hash_to_bytes(cnt_id)],
+ 'update-dups')
+
+ def content_get_mimetype(self, cnt_id):
+ mimetype = next(self.idx_storage.content_mimetype_get(
+ [hash_to_bytes(cnt_id)]))
+ return converters.from_filetype(mimetype)
+
+ def content_add_language(self, cnt_id):
+ raise NotImplementedError('Language indexer is disabled.')
+ self.language_indexer.run([hash_to_bytes(cnt_id)],
+ 'update-dups')
+
+ def content_get_language(self, cnt_id):
+ lang = next(self.idx_storage.content_language_get(
+ [hash_to_bytes(cnt_id)]))
+ return converters.from_swh(lang, hashess={'id'})
+
+ def content_add_license(self, cnt_id):
+ self.license_indexer.run([hash_to_bytes(cnt_id)],
+ 'update-dups')
+
+ def content_get_license(self, cnt_id):
+ cnt_id_bytes = hash_to_bytes(cnt_id)
+ lic = next(self.idx_storage.content_fossology_license_get(
+ [cnt_id_bytes]))
+ return converters.from_swh({'id': cnt_id_bytes,
+ 'facts': lic[cnt_id_bytes]},
+ hashess={'id'})
+
+ def content_add_ctags(self, cnt_id):
+ self.ctags_indexer.run([hash_to_bytes(cnt_id)],
+ 'update-dups')
+
+ def content_get_ctags(self, cnt_id):
+ cnt_id_bytes = hash_to_bytes(cnt_id)
+ ctags = self.idx_storage.content_ctags_get([cnt_id_bytes])
+ for ctag in ctags:
+ yield converters.from_swh(ctag, hashess={'id'})
diff --git a/swh/web/tests/django_asserts.py b/swh/web/tests/django_asserts.py
new file mode 100644
--- /dev/null
+++ b/swh/web/tests/django_asserts.py
@@ -0,0 +1,21 @@
+# Copyright (C) 2019 The Software Heritage developers
+# See the AUTHORS file at the top-level directory of this distribution
+# License: GNU Affero General Public License version 3, or any later version
+# See top-level LICENSE file for more information
+
+# https://github.com/pytest-dev/pytest-django/pull/709 proposes a more
+# generic way to expose all asserts but it makes mypy unhappy.
+# So explicitly expose the assertions we need for swh-web tests to
+# avoid mypy errors
+
+"""
+Expose some Django assertions to be used with pytest
+"""
+
+from django.test import TestCase
+
+_test_case = TestCase('run')
+
+assert_template_used = _test_case.assertTemplateUsed
+assert_contains = _test_case.assertContains
+assert_not_contains = _test_case.assertNotContains
diff --git a/swh/web/tests/misc/test_origin_save.py b/swh/web/tests/misc/test_origin_save.py
--- a/swh/web/tests/misc/test_origin_save.py
+++ b/swh/web/tests/misc/test_origin_save.py
@@ -3,84 +3,87 @@
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
+import pytest
from datetime import datetime
-from unittest.mock import patch
-from rest_framework.test import APITestCase, APIClient
+from django.test import Client
from swh.web.common.origin_save import (
SAVE_REQUEST_ACCEPTED, SAVE_TASK_NOT_YET_SCHEDULED
)
from swh.web.common.utils import reverse
from swh.web.settings.tests import save_origin_rate_post
-from swh.web.tests.testcase import WebTestCase
-
-
-class SwhOriginSaveTest(WebTestCase, APITestCase):
-
- def setUp(self):
- self.client = APIClient(enforce_csrf_checks=True)
- self.visit_type = 'git'
- self.origin = {
- 'url': 'https://github.com/python/cpython'
- }
-
- @patch('swh.web.misc.origin_save.create_save_origin_request')
- def test_save_request_form_csrf_token(
- self, mock_create_save_origin_request):
-
- self._mock_create_save_origin_request(mock_create_save_origin_request)
-
- url = reverse('origin-save-request',
- url_args={'visit_type': self.visit_type,
- 'origin_url': self.origin['url']})
-
- resp = self.client.post(url)
- self.assertEqual(resp.status_code, 403)
-
- data = self._get_csrf_token(reverse('origin-save'))
- resp = self.client.post(url, data=data)
- self.assertEqual(resp.status_code, 200)
-
- @patch('swh.web.misc.origin_save.create_save_origin_request')
- def test_save_request_form_rate_limit(
- self, mock_create_save_origin_request):
-
- self._mock_create_save_origin_request(mock_create_save_origin_request)
-
- url = reverse('origin-save-request',
- url_args={'visit_type': self.visit_type,
- 'origin_url': self.origin['url']})
-
- data = self._get_csrf_token(reverse('origin-save'))
- for _ in range(save_origin_rate_post):
- resp = self.client.post(url, data=data)
- self.assertEqual(resp.status_code, 200)
-
- resp = self.client.post(url, data=data)
- self.assertEqual(resp.status_code, 429)
-
- def test_old_save_url_redirection(self):
- url = reverse('browse-origin-save')
- resp = self.client.get(url)
- self.assertEqual(resp.status_code, 302)
- redirect_url = reverse('origin-save')
- self.assertEqual(resp['location'], redirect_url)
-
- def _get_csrf_token(self, url):
- resp = self.client.get(url)
- return {
- 'csrfmiddlewaretoken': resp.cookies['csrftoken'].value
- }
-
- def _mock_create_save_origin_request(self, mock):
- expected_data = {
- 'visit_type': self.visit_type,
- 'origin_url': self.origin['url'],
- 'save_request_date': datetime.now().isoformat(),
- 'save_request_status': SAVE_REQUEST_ACCEPTED,
- 'save_task_status': SAVE_TASK_NOT_YET_SCHEDULED,
- 'visit_date': None
- }
- mock.return_value = expected_data
+
+
+visit_type = 'git'
+origin = {
+ 'url': 'https://github.com/python/cpython'
+}
+
+
+@pytest.fixture
+def client():
+ return Client(enforce_csrf_checks=True)
+
+
+def test_save_request_form_csrf_token(client, mocker):
+ mock_create_save_origin_request = mocker.patch(
+ 'swh.web.misc.origin_save.create_save_origin_request')
+ _mock_create_save_origin_request(mock_create_save_origin_request)
+
+ url = reverse('origin-save-request',
+ url_args={'visit_type': visit_type,
+ 'origin_url': origin['url']})
+
+ resp = client.post(url)
+ assert resp.status_code == 403
+
+ data = _get_csrf_token(client, reverse('origin-save'))
+ resp = client.post(url, data=data)
+ assert resp.status_code == 200
+
+
+def test_save_request_form_rate_limit(client, mocker):
+ mock_create_save_origin_request = mocker.patch(
+ 'swh.web.misc.origin_save.create_save_origin_request')
+ _mock_create_save_origin_request(mock_create_save_origin_request)
+
+ url = reverse('origin-save-request',
+ url_args={'visit_type': visit_type,
+ 'origin_url': origin['url']})
+
+ data = _get_csrf_token(client, reverse('origin-save'))
+ for _ in range(save_origin_rate_post):
+ resp = client.post(url, data=data)
+ assert resp.status_code == 200
+
+ resp = client.post(url, data=data)
+ assert resp.status_code == 429
+
+
+def test_old_save_url_redirection(client):
+ url = reverse('browse-origin-save')
+ resp = client.get(url)
+ assert resp.status_code == 302
+ redirect_url = reverse('origin-save')
+ assert resp['location'] == redirect_url
+
+
+def _get_csrf_token(client, url):
+ resp = client.get(url)
+ return {
+ 'csrfmiddlewaretoken': resp.cookies['csrftoken'].value
+ }
+
+
+def _mock_create_save_origin_request(mock):
+ expected_data = {
+ 'visit_type': visit_type,
+ 'origin_url': origin['url'],
+ 'save_request_date': datetime.now().isoformat(),
+ 'save_request_status': SAVE_REQUEST_ACCEPTED,
+ 'save_task_status': SAVE_TASK_NOT_YET_SCHEDULED,
+ 'visit_date': None
+ }
+ mock.return_value = expected_data
diff --git a/swh/web/tests/resources/json/es_task_info_response.json b/swh/web/tests/resources/http_esnode1.internal.softwareheritage.org/swh_workers-*__search
rename from swh/web/tests/resources/json/es_task_info_response.json
rename to swh/web/tests/resources/http_esnode1.internal.softwareheritage.org/swh_workers-*__search
diff --git a/swh/web/tests/testcase.py b/swh/web/tests/testcase.py
deleted file mode 100644
--- a/swh/web/tests/testcase.py
+++ /dev/null
@@ -1,157 +0,0 @@
-# Copyright (C) 2015-2019 The Software Heritage developers
-# See the AUTHORS file at the top-level directory of this distribution
-# License: GNU Affero General Public License version 3, or any later version
-# See top-level LICENSE file for more information
-
-import shutil
-from subprocess import run, PIPE
-
-from django.core.cache import cache
-from hypothesis.extra.django import TestCase
-
-from swh.model.hashutil import hash_to_bytes
-from swh.web.common import converters
-from swh.web.tests.data import get_tests_data, override_storages
-
-
-ctags_json_missing = \
- shutil.which('ctags') is None or \
- b'+json' not in run(['ctags', '--version'], stdout=PIPE).stdout
-
-fossology_missing = shutil.which('nomossa') is None
-
-
-class WebTestCase(TestCase):
- """Base TestCase class for swh-web.
-
- It is initialized with references to in-memory storages containing
- raw tests data.
-
- It also defines class methods to retrieve those tests data in
- a json serializable format in order to ease tests implementation.
-
- """
- def _pre_setup(self):
- cache.clear()
-
- tests_data = get_tests_data(reset=True)
- self.storage = tests_data['storage']
- self.idx_storage = tests_data['idx_storage']
- self.mimetype_indexer = tests_data['mimetype_indexer']
- self.license_indexer = tests_data['license_indexer']
- self.ctags_indexer = tests_data['ctags_indexer']
-
- # Update swh-web configuration to use the in-memory storages
- # instantiated in the tests.data module
- override_storages(tests_data['storage'], tests_data['idx_storage'])
-
- super()._pre_setup()
-
- def content_add_mimetype(self, cnt_id):
- self.mimetype_indexer.run([hash_to_bytes(cnt_id)],
- 'update-dups')
-
- def content_get_mimetype(self, cnt_id):
- mimetype = next(self.idx_storage.content_mimetype_get(
- [hash_to_bytes(cnt_id)]))
- return converters.from_filetype(mimetype)
-
- def content_add_language(self, cnt_id):
- raise NotImplementedError('Language indexer is disabled.')
- self.language_indexer.run([hash_to_bytes(cnt_id)],
- 'update-dups')
-
- def content_get_language(self, cnt_id):
- lang = next(self.idx_storage.content_language_get(
- [hash_to_bytes(cnt_id)]))
- return converters.from_swh(lang, hashess={'id'})
-
- def content_add_license(self, cnt_id):
- self.license_indexer.run([hash_to_bytes(cnt_id)],
- 'update-dups')
-
- def content_get_license(self, cnt_id):
- cnt_id_bytes = hash_to_bytes(cnt_id)
- lic = next(self.idx_storage.content_fossology_license_get(
- [cnt_id_bytes]))
- return converters.from_swh({'id': cnt_id_bytes,
- 'facts': lic[cnt_id_bytes]},
- hashess={'id'})
-
- def content_add_ctags(self, cnt_id):
- self.ctags_indexer.run([hash_to_bytes(cnt_id)],
- 'update-dups')
-
- def content_get_ctags(self, cnt_id):
- cnt_id_bytes = hash_to_bytes(cnt_id)
- ctags = self.idx_storage.content_ctags_get([cnt_id_bytes])
- for ctag in ctags:
- yield converters.from_swh(ctag, hashess={'id'})
-
- def content_get_metadata(self, cnt_id):
- cnt_id_bytes = hash_to_bytes(cnt_id)
- metadata = next(self.storage.content_get_metadata([cnt_id_bytes]))
- return converters.from_swh(metadata,
- hashess={'sha1', 'sha1_git', 'sha256',
- 'blake2s256'})
-
- def content_get(self, cnt_id):
- cnt_id_bytes = hash_to_bytes(cnt_id)
- cnt = next(self.storage.content_get([cnt_id_bytes]))
- return converters.from_content(cnt)
-
- def directory_ls(self, dir_id):
- cnt_id_bytes = hash_to_bytes(dir_id)
- dir_content = map(converters.from_directory_entry,
- self.storage.directory_ls(cnt_id_bytes))
- return list(dir_content)
-
- def release_get(self, rel_id):
- rel_id_bytes = hash_to_bytes(rel_id)
- rel_data = next(self.storage.release_get([rel_id_bytes]))
- return converters.from_release(rel_data)
-
- def revision_get(self, rev_id):
- rev_id_bytes = hash_to_bytes(rev_id)
- rev_data = next(self.storage.revision_get([rev_id_bytes]))
- return converters.from_revision(rev_data)
-
- def revision_log(self, rev_id, limit=None):
- rev_id_bytes = hash_to_bytes(rev_id)
- return list(map(converters.from_revision,
- self.storage.revision_log([rev_id_bytes], limit=limit)))
-
- def snapshot_get_latest(self, origin_url):
- snp = self.storage.snapshot_get_latest(origin_url)
- return converters.from_snapshot(snp)
-
- def origin_get(self, origin_info):
- origin = self.storage.origin_get(origin_info)
- return converters.from_origin(origin)
-
- def origin_visit_get(self, origin_url):
- visits = self.storage.origin_visit_get(origin_url)
- return list(map(converters.from_origin_visit, visits))
-
- def origin_visit_get_by(self, origin_url, visit_id):
- visit = self.storage.origin_visit_get_by(origin_url, visit_id)
- return converters.from_origin_visit(visit)
-
- def snapshot_get(self, snapshot_id):
- snp = self.storage.snapshot_get(hash_to_bytes(snapshot_id))
- return converters.from_snapshot(snp)
-
- def snapshot_get_branches(self, snapshot_id, branches_from='',
- branches_count=1000, target_types=None):
- snp = self.storage.snapshot_get_branches(
- hash_to_bytes(snapshot_id), branches_from.encode(),
- branches_count, target_types)
- return converters.from_snapshot(snp)
-
- def snapshot_get_head(self, snapshot):
- if snapshot['branches']['HEAD']['target_type'] == 'alias':
- target = snapshot['branches']['HEAD']['target']
- head = snapshot['branches'][target]['target']
- else:
- head = snapshot['branches']['HEAD']['target']
- return head

File Metadata

Mime Type
text/plain
Expires
Thu, Jan 30, 4:16 PM (6 d, 21 h ago)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3222825

Event Timeline