')
self.assertContains(resp, escape(stub_content_text_data['raw_data']))
self.assertContains(resp, url_raw)
swh_cnt_id = get_swh_persistent_id('content', sha1_git)
swh_cnt_id_url = reverse('browse-swh-id',
kwargs={'swh_id': swh_cnt_id})
self.assertContains(resp, swh_cnt_id)
self.assertContains(resp, swh_cnt_id_url)
@patch('swh.web.browse.views.content.request_content')
def test_content_view_text_no_highlight(self, mock_request_content):
mock_request_content.return_value = stub_content_text_no_highlight_data
sha1_git = stub_content_text_no_highlight_data['checksums']['sha1_git']
url = reverse('browse-content',
kwargs={'query_string': stub_content_text_no_highlight_data['checksums']['sha1']}) # noqa
url_raw = reverse('browse-content-raw',
kwargs={'query_string': stub_content_text_no_highlight_data['checksums']['sha1']}) # noqa
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('browse/content.html')
self.assertContains(resp, '')
self.assertContains(resp, escape(stub_content_text_no_highlight_data['raw_data'])) # noqa
self.assertContains(resp, url_raw)
swh_cnt_id = get_swh_persistent_id('content', sha1_git)
swh_cnt_id_url = reverse('browse-swh-id',
kwargs={'swh_id': swh_cnt_id})
self.assertContains(resp, swh_cnt_id)
self.assertContains(resp, swh_cnt_id_url)
@patch('swh.web.browse.utils.service')
def test_content_view_no_utf8_text(self, mock_service):
mock_service.lookup_content.return_value = \
non_utf8_encoded_content_data
mock_service.lookup_content_raw.return_value = \
{'data': non_utf8_encoded_content}
mock_service.lookup_content_filetype.return_value = None
mock_service.lookup_content_language.return_value = None
mock_service.lookup_content_license.return_value = None
sha1_git = non_utf8_encoded_content_data['checksums']['sha1_git']
url = reverse('browse-content',
kwargs={'query_string': non_utf8_encoded_content_data['checksums']['sha1']}) # noqa
try:
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('browse/content.html')
swh_cnt_id = get_swh_persistent_id('content', sha1_git)
swh_cnt_id_url = reverse('browse-swh-id',
kwargs={'swh_id': swh_cnt_id})
self.assertContains(resp, swh_cnt_id_url)
self.assertContains(resp, escape(non_utf8_encoded_content.decode(non_utf8_encoding).encode('utf-8'))) # noqa
except DjangoUnicodeDecodeError:
self.fail('Textual content is not encoded in utf-8')
@patch('swh.web.browse.views.content.request_content')
def test_content_view_image(self, mock_request_content):
mime_type = 'image/png'
mock_request_content.return_value = stub_content_bin_data
url = reverse('browse-content',
kwargs={'query_string': stub_content_bin_data['checksums']['sha1']}) # noqa
url_raw = reverse('browse-content-raw',
kwargs={'query_string': stub_content_bin_data['checksums']['sha1']}) # noqa
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('browse/content.html')
png_encoded = base64.b64encode(stub_content_bin_data['raw_data']) \
.decode('utf-8')
self.assertContains(resp, '
'
% (mime_type, png_encoded))
self.assertContains(resp, url_raw)
@patch('swh.web.browse.views.content.request_content')
def test_content_view_with_path(self, mock_request_content):
mock_request_content.return_value = stub_content_text_data
url = reverse('browse-content',
kwargs={'query_string': stub_content_text_data['checksums']['sha1']}, # noqa
query_params={'path': stub_content_text_path_with_root_dir}) # noqa
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('browse/content.html')
self.assertContains(resp, '')
self.assertContains(resp, escape(stub_content_text_data['raw_data']))
split_path = stub_content_text_path_with_root_dir.split('/')
root_dir_sha1 = split_path[0]
filename = split_path[-1]
path = stub_content_text_path_with_root_dir \
.replace(root_dir_sha1 + '/', '') \
.replace(filename, '')
path_info = gen_path_info(path)
root_dir_url = reverse('browse-directory',
kwargs={'sha1_git': root_dir_sha1})
self.assertContains(resp, '',
count=len(path_info)+1)
self.assertContains(resp, '' +
root_dir_sha1[:7] + '')
for p in path_info:
dir_url = reverse('browse-directory',
kwargs={'sha1_git': root_dir_sha1,
'path': p['path']})
self.assertContains(resp, '' +
p['name'] + '')
self.assertContains(resp, ' ' + filename + ' ')
url_raw = reverse('browse-content-raw',
kwargs={'query_string': stub_content_text_data['checksums']['sha1']}, # noqa
query_params={'filename': filename})
self.assertContains(resp, url_raw)
@patch('swh.web.browse.views.content.request_content')
def test_content_raw_text(self, mock_request_content):
mock_request_content.return_value = stub_content_text_data
url = reverse('browse-content-raw',
kwargs={'query_string': stub_content_text_data['checksums']['sha1']}) # noqa
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertEqual(resp['Content-Type'], 'text/plain')
self.assertEqual(resp['Content-disposition'],
'filename=%s_%s' % ('sha1', stub_content_text_data['checksums']['sha1'])) # noqa
self.assertEqual(resp.content, stub_content_text_data['raw_data'])
filename = stub_content_text_path_with_root_dir.split('/')[-1]
url = reverse('browse-content-raw',
kwargs={'query_string': stub_content_text_data['checksums']['sha1']}, # noqa
query_params={'filename': filename})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertEqual(resp['Content-Type'], 'text/plain')
self.assertEqual(resp['Content-disposition'],
'filename=%s' % filename)
self.assertEqual(resp.content, stub_content_text_data['raw_data'])
@patch('swh.web.browse.utils.service')
def test_content_raw_no_utf8_text(self, mock_service):
mock_service.lookup_content.return_value = \
non_utf8_encoded_content_data
mock_service.lookup_content_raw.return_value = \
{'data': non_utf8_encoded_content}
mock_service.lookup_content_filetype.return_value = None
mock_service.lookup_content_language.return_value = None
mock_service.lookup_content_license.return_value = None
url = reverse('browse-content-raw',
kwargs={'query_string': non_utf8_encoded_content_data['checksums']['sha1']}) # noqa
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
_, encoding = get_mimetype_and_encoding_for_content(resp.content)
self.assertEqual(encoding, non_utf8_encoding)
@patch('swh.web.browse.views.content.request_content')
def test_content_raw_bin(self, mock_request_content):
mock_request_content.return_value = stub_content_bin_data
url = reverse('browse-content-raw',
kwargs={'query_string': stub_content_bin_data['checksums']['sha1']}) # noqa
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertEqual(resp['Content-Type'], 'application/octet-stream')
self.assertEqual(resp['Content-disposition'],
'attachment; filename=%s_%s' %
('sha1', stub_content_bin_data['checksums']['sha1']))
self.assertEqual(resp.content, stub_content_bin_data['raw_data'])
url = reverse('browse-content-raw',
kwargs={'query_string': stub_content_bin_data['checksums']['sha1']}, # noqa
query_params={'filename': stub_content_bin_filename})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertEqual(resp['Content-Type'], 'application/octet-stream')
self.assertEqual(resp['Content-disposition'],
'attachment; filename=%s' % stub_content_bin_filename)
self.assertEqual(resp.content, stub_content_bin_data['raw_data'])
@patch('swh.web.browse.views.content.request_content')
def test_content_request_errors(self, mock_request_content):
url = reverse('browse-content', kwargs={'query_string': '123456'})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 400)
+ self.assertEqual(resp.status_code, 400)
self.assertTemplateUsed('error.html')
mock_request_content.side_effect = NotFoundExc('content not found')
url = reverse('browse-content',
kwargs={'query_string': stub_content_text_data['checksums']['sha1']}) # noqa
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 404)
+ self.assertEqual(resp.status_code, 404)
self.assertTemplateUsed('error.html')
@patch('swh.web.browse.utils.service')
def test_content_bytes_missing(self, mock_service):
content_data = dict(stub_content_text_data)
content_data['raw_data'] = None
mock_service.lookup_content.return_value = content_data
mock_service.lookup_content_raw.side_effect = NotFoundExc('Content bytes not available!') # noqa
url = reverse('browse-content',
kwargs={'query_string': content_data['checksums']['sha1']}) # noqa
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 404)
+ self.assertEqual(resp.status_code, 404)
self.assertTemplateUsed('browse/content.html')
@patch('swh.web.browse.views.content.request_content')
def test_content_too_large(self, mock_request_content):
mock_request_content.return_value = stub_content_too_large_data
url = reverse('browse-content',
kwargs={'query_string': stub_content_too_large_data['checksums']['sha1']}) # noqa
url_raw = reverse('browse-content-raw',
kwargs={'query_string': stub_content_too_large_data['checksums']['sha1']}) # noqa
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('browse/content.html')
self.assertContains(resp, 'Content is too large to be displayed')
self.assertContains(resp, url_raw)
diff --git a/swh/web/tests/browse/views/test_directory.py b/swh/web/tests/browse/views/test_directory.py
index 6b90d9ca9..e7df1b919 100644
--- a/swh/web/tests/browse/views/test_directory.py
+++ b/swh/web/tests/browse/views/test_directory.py
@@ -1,135 +1,135 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
from unittest.mock import patch
from swh.web.common.exc import BadInputExc, NotFoundExc
from swh.web.common.utils import reverse, get_swh_persistent_id
from swh.web.common.utils import gen_path_info
from swh.web.tests.testcase import SWHWebTestCase
from .data.directory_test_data import (
stub_root_directory_sha1, stub_root_directory_data,
stub_sub_directory_path, stub_sub_directory_data
)
class SwhBrowseDirectoryTest(SWHWebTestCase):
def directory_view(self, root_directory_sha1, directory_entries,
path=None):
dirs = [e for e in directory_entries if e['type'] in ('dir', 'rev')]
files = [e for e in directory_entries if e['type'] == 'file']
url_args = {'sha1_git': root_directory_sha1}
if path:
url_args['path'] = path
url = reverse('browse-directory',
kwargs=url_args)
root_dir_url = reverse('browse-directory',
kwargs={'sha1_git': root_directory_sha1})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('browse/directory.html')
self.assertContains(resp, '' +
root_directory_sha1[:7] + '')
self.assertContains(resp, '',
count=len(dirs))
self.assertContains(resp, ' ',
count=len(files))
for d in dirs:
if d['type'] == 'rev':
dir_url = reverse('browse-revision',
kwargs={'sha1_git': d['target']})
else:
dir_path = d['name']
if path:
dir_path = "%s/%s" % (path, d['name'])
dir_url = reverse('browse-directory',
kwargs={'sha1_git': root_directory_sha1,
'path': dir_path})
self.assertContains(resp, dir_url)
for f in files:
file_path = "%s/%s" % (root_directory_sha1, f['name'])
if path:
file_path = "%s/%s/%s" % (root_directory_sha1, path, f['name'])
query_string = 'sha1_git:' + f['target']
file_url = reverse('browse-content',
kwargs={'query_string': query_string},
query_params={'path': file_path})
self.assertContains(resp, file_url)
path_info = gen_path_info(path)
self.assertContains(resp, '',
count=len(path_info)+1)
self.assertContains(resp, '%s' %
(root_dir_url, root_directory_sha1[:7]))
for p in path_info:
dir_url = reverse('browse-directory',
kwargs={'sha1_git': root_directory_sha1,
'path': p['path']})
self.assertContains(resp, '%s' %
(dir_url, p['name']))
self.assertContains(resp, 'vault-cook-directory')
swh_dir_id = get_swh_persistent_id('directory', directory_entries[0]['dir_id']) # noqa
swh_dir_id_url = reverse('browse-swh-id',
kwargs={'swh_id': swh_dir_id})
self.assertContains(resp, swh_dir_id)
self.assertContains(resp, swh_dir_id_url)
@patch('swh.web.browse.utils.service')
def test_root_directory_view(self, mock_service):
mock_service.lookup_directory.return_value = \
stub_root_directory_data
self.directory_view(stub_root_directory_sha1, stub_root_directory_data)
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.views.directory.service')
def test_sub_directory_view(self, mock_directory_service,
mock_utils_service):
mock_utils_service.lookup_directory.return_value = \
stub_sub_directory_data
mock_directory_service.lookup_directory_with_path.return_value = \
{'target': stub_sub_directory_data[0]['dir_id'],
'type': 'dir'}
self.directory_view(stub_root_directory_sha1, stub_sub_directory_data,
stub_sub_directory_path)
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.views.directory.service')
def test_directory_request_errors(self, mock_directory_service,
mock_utils_service):
mock_utils_service.lookup_directory.side_effect = \
BadInputExc('directory not found')
dir_url = reverse('browse-directory',
kwargs={'sha1_git': '1253456'})
resp = self.client.get(dir_url)
- self.assertEquals(resp.status_code, 400)
+ self.assertEqual(resp.status_code, 400)
self.assertTemplateUsed('browse/error.html')
mock_utils_service.lookup_directory.side_effect = \
NotFoundExc('directory not found')
dir_url = reverse('browse-directory',
kwargs={'sha1_git': '1253456'})
resp = self.client.get(dir_url)
- self.assertEquals(resp.status_code, 404)
+ self.assertEqual(resp.status_code, 404)
self.assertTemplateUsed('browse/error.html')
diff --git a/swh/web/tests/browse/views/test_identifiers.py b/swh/web/tests/browse/views/test_identifiers.py
index 844f38653..a2d8ea178 100644
--- a/swh/web/tests/browse/views/test_identifiers.py
+++ b/swh/web/tests/browse/views/test_identifiers.py
@@ -1,136 +1,136 @@
# Copyright (C) 2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
# flake8: noqa
from unittest.mock import patch
from swh.web.common.exc import BadInputExc
from swh.web.common.utils import reverse
from swh.web.tests.testcase import SWHWebTestCase
from .data.content_test_data import stub_content_text_data
from .data.directory_test_data import stub_root_directory_sha1
from .data.revision_test_data import revision_id_test
from .data.release_test_data import stub_release
swh_id_prefix = 'swh:1:'
class SwhBrowseIdTest(SWHWebTestCase):
def test_content_id_browse(self):
cnt_sha1_git = stub_content_text_data['checksums']['sha1_git']
swh_id = swh_id_prefix + 'cnt:' + cnt_sha1_git
url = reverse('browse-swh-id',
kwargs={'swh_id': swh_id})
query_string = 'sha1_git:' + cnt_sha1_git
content_browse_url = reverse('browse-content',
kwargs={'query_string': query_string})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 302)
+ self.assertEqual(resp.status_code, 302)
self.assertEqual(resp['location'], content_browse_url)
def test_directory_id_browse(self):
swh_id = swh_id_prefix + 'dir:' + stub_root_directory_sha1
url = reverse('browse-swh-id',
kwargs={'swh_id': swh_id})
directory_browse_url = reverse('browse-directory',
kwargs={'sha1_git': stub_root_directory_sha1})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 302)
+ self.assertEqual(resp.status_code, 302)
self.assertEqual(resp['location'], directory_browse_url)
def test_revision_id_browse(self):
swh_id = swh_id_prefix + 'rev:' + revision_id_test
url = reverse('browse-swh-id',
kwargs={'swh_id': swh_id})
revision_browse_url = reverse('browse-revision',
kwargs={'sha1_git': revision_id_test})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 302)
+ self.assertEqual(resp.status_code, 302)
self.assertEqual(resp['location'], revision_browse_url)
query_params = {'origin_type': 'git',
'origin': 'https://github.com/webpack/webpack'}
url = reverse('browse-swh-id',
kwargs={'swh_id': swh_id},
query_params=query_params)
revision_browse_url = reverse('browse-revision',
kwargs={'sha1_git': revision_id_test},
query_params=query_params)
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 302)
+ self.assertEqual(resp.status_code, 302)
self.assertEqual(resp['location'], revision_browse_url)
def test_release_id_browse(self):
swh_id = swh_id_prefix + 'rel:' + stub_release['id']
url = reverse('browse-swh-id',
kwargs={'swh_id': swh_id})
release_browse_url = reverse('browse-release',
kwargs={'sha1_git': stub_release['id']})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 302)
+ self.assertEqual(resp.status_code, 302)
self.assertEqual(resp['location'], release_browse_url)
query_params = {'origin_type': 'git',
'origin': 'https://github.com/python/cpython'}
url = reverse('browse-swh-id',
kwargs={'swh_id': swh_id},
query_params=query_params)
release_browse_url = reverse('browse-release',
kwargs={'sha1_git': stub_release['id']},
query_params=query_params)
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 302)
+ self.assertEqual(resp.status_code, 302)
self.assertEqual(resp['location'], release_browse_url)
def test_bad_id_browse(self):
swh_id = swh_id_prefix + 'foo:' + stub_release['id']
url = reverse('browse-swh-id',
kwargs={'swh_id': swh_id})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 400)
+ self.assertEqual(resp.status_code, 400)
def test_content_id_optional_parts_browse(self):
cnt_sha1_git = stub_content_text_data['checksums']['sha1_git']
optional_parts = ';lines=4-20;origin=https://github.com/user/repo'
swh_id = swh_id_prefix + 'cnt:' + cnt_sha1_git + optional_parts
url = reverse('browse-swh-id',
kwargs={'swh_id': swh_id})
query_string = 'sha1_git:' + cnt_sha1_git
content_browse_url = reverse('browse-content',
kwargs={'query_string': query_string},
query_params={'origin' : 'https://github.com/user/repo'})
content_browse_url += '#L4-L20'
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 302)
+ self.assertEqual(resp.status_code, 302)
self.assertEqual(resp['location'], content_browse_url)
diff --git a/swh/web/tests/browse/views/test_origin.py b/swh/web/tests/browse/views/test_origin.py
index 70642553d..c70005cd6 100644
--- a/swh/web/tests/browse/views/test_origin.py
+++ b/swh/web/tests/browse/views/test_origin.py
@@ -1,883 +1,883 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
# flake8: noqa
from unittest.mock import patch
from django.utils.html import escape
from swh.web.common.exc import NotFoundExc
from swh.web.common.utils import (
reverse, gen_path_info, format_utc_iso_date,
parse_timestamp, get_swh_persistent_id
)
from swh.web.tests.testcase import SWHWebTestCase
from .data.origin_test_data import (
origin_info_test_data,
origin_visits_test_data,
stub_content_origin_info, stub_content_origin_visit_id,
stub_content_origin_visit_unix_ts, stub_content_origin_visit_iso_date,
stub_content_origin_branch,
stub_content_origin_visits, stub_content_origin_snapshot,
stub_origin_info, stub_visit_id,
stub_origin_visits, stub_origin_snapshot,
stub_origin_root_directory_entries, stub_origin_master_branch,
stub_origin_root_directory_sha1, stub_origin_sub_directory_path,
stub_origin_sub_directory_entries, stub_visit_unix_ts, stub_visit_iso_date
)
from .data.content_test_data import (
stub_content_root_dir,
stub_content_text_data,
stub_content_text_path
)
stub_origin_info_no_type = dict(stub_origin_info)
stub_origin_info_no_type['type'] = None
def _to_snapshot_dict(branches=None, releases=None):
snp = {'branches': {}}
if branches:
for b in branches:
snp['branches'][b['name']] = {
'target': b['revision'],
'target_type': 'revision'
}
if releases:
for r in releases:
snp['branches'][r['branch_name']] = {
'target': r['id'],
'target_type': 'release'
}
return snp
class SwhBrowseOriginTest(SWHWebTestCase):
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.utils.get_origin_visit_snapshot')
@patch('swh.web.browse.utils.get_origin_visits')
@patch('swh.web.browse.utils.get_origin_info')
@patch('swh.web.browse.views.origin.get_origin_info')
@patch('swh.web.browse.views.origin.get_origin_visits')
@patch('swh.web.browse.views.origin.service')
def test_origin_visits_browse(self, mock_service, mock_get_origin_visits,
mock_get_origin_info, mock_get_origin_info_utils,
mock_get_origin_visits_utils,
mock_get_origin_visit_snapshot,
mock_utils_service):
mock_service.lookup_origin.return_value = origin_info_test_data
mock_get_origin_info.return_value = origin_info_test_data
mock_get_origin_info_utils.return_value = origin_info_test_data
mock_get_origin_visits.return_value = origin_visits_test_data
mock_get_origin_visits_utils.return_value = origin_visits_test_data
mock_get_origin_visit_snapshot.return_value = stub_content_origin_snapshot
mock_utils_service.lookup_snapshot_size.return_value = {
'revision': len(stub_content_origin_snapshot[0]),
'release': len(stub_content_origin_snapshot[1])
}
url = reverse('browse-origin-visits',
kwargs={'origin_type': origin_info_test_data['type'],
'origin_url': origin_info_test_data['url']})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('origin-visits.html')
url = reverse('browse-origin-visits',
kwargs={'origin_url': origin_info_test_data['url']})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('origin-visits.html')
def origin_content_view_helper(self, origin_info, origin_visits,
origin_branches, origin_releases,
origin_branch,
root_dir_sha1, content_sha1, content_sha1_git,
content_path, content_data,
content_language,
visit_id=None, timestamp=None):
url_args = {'origin_type': origin_info['type'],
'origin_url': origin_info['url'],
'path': content_path}
if not visit_id:
visit_id = origin_visits[-1]['visit']
query_params = {}
if timestamp:
url_args['timestamp'] = timestamp
if visit_id:
query_params['visit_id'] = visit_id
url = reverse('browse-origin-content',
kwargs=url_args,
query_params=query_params)
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('content.html')
self.assertContains(resp, '' % content_language)
self.assertContains(resp, escape(content_data))
split_path = content_path.split('/')
filename = split_path[-1]
path = content_path.replace(filename, '')[:-1]
path_info = gen_path_info(path)
del url_args['path']
if timestamp:
url_args['timestamp'] = \
format_utc_iso_date(parse_timestamp(timestamp).isoformat(),
'%Y-%m-%dT%H:%M:%S')
root_dir_url = reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, '',
count=len(path_info)+1)
self.assertContains(resp, '%s' %
(root_dir_url, root_dir_sha1[:7]))
for p in path_info:
url_args['path'] = p['path']
dir_url = reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, '%s' %
(dir_url, p['name']))
self.assertContains(resp, ' %s ' % filename)
query_string = 'sha1_git:' + content_sha1
url_raw = reverse('browse-content-raw',
kwargs={'query_string': query_string},
query_params={'filename': filename})
self.assertContains(resp, url_raw)
del url_args['path']
origin_branches_url = \
reverse('browse-origin-branches',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, 'Branches (%s)' %
(origin_branches_url, len(origin_branches)))
origin_releases_url = \
reverse('browse-origin-releases',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, 'Releases (%s)' %
(origin_releases_url, len(origin_releases)))
self.assertContains(resp, '',
count=len(origin_branches))
url_args['path'] = content_path
for branch in origin_branches:
query_params['branch'] = branch['name']
root_dir_branch_url = \
reverse('browse-origin-content',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, '' % root_dir_branch_url)
self.assertContains(resp, '',
count=len(origin_releases))
query_params['branch'] = None
for release in origin_releases:
query_params['release'] = release['name']
root_dir_release_url = \
reverse('browse-origin-content',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, '' % root_dir_release_url)
del url_args['origin_type']
url = reverse('browse-origin-content',
kwargs=url_args,
query_params=query_params)
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('content.html')
swh_cnt_id = get_swh_persistent_id('content', content_sha1_git)
swh_cnt_id_url = reverse('browse-swh-id',
kwargs={'swh_id': swh_cnt_id})
self.assertContains(resp, swh_cnt_id)
self.assertContains(resp, swh_cnt_id_url)
@patch('swh.web.browse.utils.get_origin_visits')
@patch('swh.web.browse.utils.get_origin_visit_snapshot')
@patch('swh.web.browse.views.utils.snapshot_context.service')
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.views.utils.snapshot_context.request_content')
def test_origin_content_view(self, mock_request_content, mock_utils_service,
mock_service, mock_get_origin_visit_snapshot,
mock_get_origin_visits):
stub_content_text_sha1 = stub_content_text_data['checksums']['sha1']
stub_content_text_sha1_git = stub_content_text_data['checksums']['sha1_git']
mock_get_origin_visits.return_value = stub_content_origin_visits
mock_get_origin_visit_snapshot.return_value = stub_content_origin_snapshot
mock_service.lookup_directory_with_path.return_value = \
{'target': stub_content_text_sha1}
mock_request_content.return_value = stub_content_text_data
mock_utils_service.lookup_origin.return_value = stub_content_origin_info
mock_utils_service.lookup_snapshot_size.return_value = {
'revision': len(stub_content_origin_snapshot[0]),
'release': len(stub_content_origin_snapshot[1])
}
self.origin_content_view_helper(stub_content_origin_info,
stub_content_origin_visits,
stub_content_origin_snapshot[0],
stub_content_origin_snapshot[1],
stub_content_origin_branch,
stub_content_root_dir,
stub_content_text_sha1,
stub_content_text_sha1_git,
stub_content_text_path,
stub_content_text_data['raw_data'],
'cpp')
self.origin_content_view_helper(stub_content_origin_info,
stub_content_origin_visits,
stub_content_origin_snapshot[0],
stub_content_origin_snapshot[1],
stub_content_origin_branch,
stub_content_root_dir,
stub_content_text_sha1,
stub_content_text_sha1_git,
stub_content_text_path,
stub_content_text_data['raw_data'],
'cpp',
visit_id=stub_content_origin_visit_id)
self.origin_content_view_helper(stub_content_origin_info,
stub_content_origin_visits,
stub_content_origin_snapshot[0],
stub_content_origin_snapshot[1],
stub_content_origin_branch,
stub_content_root_dir,
stub_content_text_sha1,
stub_content_text_sha1_git,
stub_content_text_path,
stub_content_text_data['raw_data'],
'cpp',
timestamp=stub_content_origin_visit_unix_ts)
self.origin_content_view_helper(stub_content_origin_info,
stub_content_origin_visits,
stub_content_origin_snapshot[0],
stub_content_origin_snapshot[1],
stub_content_origin_branch,
stub_content_root_dir,
stub_content_text_sha1,
stub_content_text_sha1_git,
stub_content_text_path,
stub_content_text_data['raw_data'],
'cpp',
timestamp=stub_content_origin_visit_iso_date)
def origin_directory_view_helper(self, origin_info, origin_visits,
origin_branches, origin_releases, origin_branch,
root_directory_sha1, directory_entries,
visit_id=None, timestamp=None, path=None):
dirs = [e for e in directory_entries
if e['type'] in ('dir', 'rev')]
files = [e for e in directory_entries
if e['type'] == 'file']
if not visit_id:
visit_id = origin_visits[-1]['visit']
url_args = {'origin_url': origin_info['url']}
query_params = {}
if timestamp:
url_args['timestamp'] = timestamp
else:
query_params['visit_id'] = visit_id
if path:
url_args['path'] = path
url = reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('directory.html')
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('directory.html')
self.assertContains(resp, '',
count=len(dirs))
self.assertContains(resp, ' ',
count=len(files))
if timestamp:
url_args['timestamp'] = \
format_utc_iso_date(parse_timestamp(timestamp).isoformat(),
'%Y-%m-%dT%H:%M:%S')
for d in dirs:
if d['type'] == 'rev':
dir_url = reverse('browse-revision',
kwargs={'sha1_git': d['target']})
else:
dir_path = d['name']
if path:
dir_path = "%s/%s" % (path, d['name'])
dir_url_args = dict(url_args)
dir_url_args['path'] = dir_path
dir_url = reverse('browse-origin-directory',
kwargs=dir_url_args,
query_params=query_params)
self.assertContains(resp, dir_url)
for f in files:
file_path = f['name']
if path:
file_path = "%s/%s" % (path, f['name'])
file_url_args = dict(url_args)
file_url_args['path'] = file_path
file_url = reverse('browse-origin-content',
kwargs=file_url_args,
query_params=query_params)
self.assertContains(resp, file_url)
if 'path' in url_args:
del url_args['path']
root_dir_branch_url = \
reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)
nb_bc_paths = 1
if path:
nb_bc_paths = len(path.split('/')) + 1
self.assertContains(resp, '', count=nb_bc_paths)
self.assertContains(resp, '%s' %
(root_dir_branch_url,
root_directory_sha1[:7]))
origin_branches_url = \
reverse('browse-origin-branches',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, 'Branches (%s)' %
(origin_branches_url, len(origin_branches)))
origin_releases_url = \
reverse('browse-origin-releases',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, 'Releases (%s)' %
(origin_releases_url, len(origin_releases)))
if path:
url_args['path'] = path
self.assertContains(resp, ' ',
count=len(origin_branches))
for branch in origin_branches:
query_params['branch'] = branch['name']
root_dir_branch_url = \
reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, '' % root_dir_branch_url)
self.assertContains(resp, '',
count=len(origin_releases))
query_params['branch'] = None
for release in origin_releases:
query_params['release'] = release['name']
root_dir_release_url = \
reverse('browse-origin-directory',
kwargs=url_args,
query_params=query_params)
self.assertContains(resp, '' % root_dir_release_url)
self.assertContains(resp, 'vault-cook-directory')
self.assertContains(resp, 'vault-cook-revision')
swh_dir_id = get_swh_persistent_id('directory', directory_entries[0]['dir_id']) # noqa
swh_dir_id_url = reverse('browse-swh-id',
kwargs={'swh_id': swh_dir_id})
self.assertContains(resp, swh_dir_id)
self.assertContains(resp, swh_dir_id_url)
@patch('swh.web.browse.utils.get_origin_visits')
@patch('swh.web.browse.utils.get_origin_visit_snapshot')
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.views.origin.service')
def test_origin_root_directory_view(self, mock_origin_service,
mock_utils_service,
mock_get_origin_visit_snapshot,
mock_get_origin_visits):
mock_get_origin_visits.return_value = stub_origin_visits
mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
mock_utils_service.lookup_directory.return_value = \
stub_origin_root_directory_entries
mock_utils_service.lookup_origin.return_value = stub_origin_info
mock_utils_service.lookup_snapshot_size.return_value = {
'revision': len(stub_origin_snapshot[0]),
'release': len(stub_origin_snapshot[1])
}
self.origin_directory_view_helper(stub_origin_info, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_root_directory_entries)
self.origin_directory_view_helper(stub_origin_info, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_root_directory_entries,
visit_id=stub_visit_id)
self.origin_directory_view_helper(stub_origin_info, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_root_directory_entries,
timestamp=stub_visit_unix_ts)
self.origin_directory_view_helper(stub_origin_info, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_root_directory_entries,
timestamp=stub_visit_iso_date)
self.origin_directory_view_helper(stub_origin_info_no_type, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_root_directory_entries)
self.origin_directory_view_helper(stub_origin_info_no_type, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_root_directory_entries,
visit_id=stub_visit_id)
self.origin_directory_view_helper(stub_origin_info_no_type, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_root_directory_entries,
timestamp=stub_visit_unix_ts)
self.origin_directory_view_helper(stub_origin_info_no_type, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_root_directory_entries,
timestamp=stub_visit_iso_date)
@patch('swh.web.browse.utils.get_origin_visits')
@patch('swh.web.browse.utils.get_origin_visit_snapshot')
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.views.utils.snapshot_context.service')
def test_origin_sub_directory_view(self, mock_origin_service,
mock_utils_service,
mock_get_origin_visit_snapshot,
mock_get_origin_visits):
mock_get_origin_visits.return_value = stub_origin_visits
mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
mock_utils_service.lookup_directory.return_value = \
stub_origin_sub_directory_entries
mock_origin_service.lookup_directory_with_path.return_value = \
{'target': stub_origin_sub_directory_entries[0]['dir_id'],
'type' : 'dir'}
mock_utils_service.lookup_origin.return_value = stub_origin_info
mock_utils_service.lookup_snapshot_size.return_value = {
'revision': len(stub_origin_snapshot[0]),
'release': len(stub_origin_snapshot[1])
}
self.origin_directory_view_helper(stub_origin_info, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_sub_directory_entries,
path=stub_origin_sub_directory_path)
self.origin_directory_view_helper(stub_origin_info, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_sub_directory_entries,
visit_id=stub_visit_id,
path=stub_origin_sub_directory_path)
self.origin_directory_view_helper(stub_origin_info, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_sub_directory_entries,
timestamp=stub_visit_unix_ts,
path=stub_origin_sub_directory_path)
self.origin_directory_view_helper(stub_origin_info, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_sub_directory_entries,
timestamp=stub_visit_iso_date,
path=stub_origin_sub_directory_path)
self.origin_directory_view_helper(stub_origin_info_no_type, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_sub_directory_entries,
path=stub_origin_sub_directory_path)
self.origin_directory_view_helper(stub_origin_info_no_type, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_sub_directory_entries,
visit_id=stub_visit_id,
path=stub_origin_sub_directory_path)
self.origin_directory_view_helper(stub_origin_info_no_type, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_sub_directory_entries,
timestamp=stub_visit_unix_ts,
path=stub_origin_sub_directory_path)
self.origin_directory_view_helper(stub_origin_info_no_type, stub_origin_visits,
stub_origin_snapshot[0],
stub_origin_snapshot[1],
stub_origin_master_branch,
stub_origin_root_directory_sha1,
stub_origin_sub_directory_entries,
timestamp=stub_visit_iso_date,
path=stub_origin_sub_directory_path)
@patch('swh.web.browse.views.utils.snapshot_context.request_content')
@patch('swh.web.browse.utils.get_origin_visits')
@patch('swh.web.browse.utils.get_origin_visit_snapshot')
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.views.origin.service')
@patch('swh.web.browse.views.utils.snapshot_context.service')
@patch('swh.web.browse.views.origin.get_origin_info')
def test_origin_request_errors(self, mock_get_origin_info,
mock_snapshot_service,
mock_origin_service,
mock_utils_service,
mock_get_origin_visit_snapshot,
mock_get_origin_visits,
mock_request_content):
mock_get_origin_info.side_effect = \
NotFoundExc('origin not found')
url = reverse('browse-origin-visits',
kwargs={'origin_type': 'foo',
'origin_url': 'bar'})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 404)
+ self.assertEqual(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, 'origin not found', status_code=404)
mock_utils_service.lookup_origin.side_effect = None
mock_utils_service.lookup_origin.return_value = origin_info_test_data
mock_get_origin_visits.return_value = []
url = reverse('browse-origin-directory',
kwargs={'origin_type': 'foo',
'origin_url': 'bar'})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 404)
+ self.assertEqual(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, "No SWH visit", status_code=404)
mock_get_origin_visits.return_value = stub_origin_visits
mock_get_origin_visit_snapshot.side_effect = \
NotFoundExc('visit not found')
url = reverse('browse-origin-directory',
kwargs={'origin_type': 'foo',
'origin_url': 'bar'},
query_params={'visit_id': len(stub_origin_visits)+1})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 404)
+ self.assertEqual(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertRegex(resp.content.decode('utf-8'), 'Visit.*not found')
mock_get_origin_visits.return_value = stub_origin_visits
mock_get_origin_visit_snapshot.side_effect = None
mock_get_origin_visit_snapshot.return_value = ([], [])
url = reverse('browse-origin-directory',
kwargs={'origin_type': 'foo',
'origin_url': 'bar'})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 404)
+ self.assertEqual(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertRegex(resp.content.decode('utf-8'),
'Origin.*has an empty list of branches')
mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
mock_utils_service.lookup_directory.side_effect = \
NotFoundExc('Directory not found')
url = reverse('browse-origin-directory',
kwargs={'origin_type': 'foo',
'origin_url': 'bar'})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 404)
+ self.assertEqual(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, 'Directory not found', status_code=404)
mock_origin_service.lookup_origin.side_effect = None
mock_origin_service.lookup_origin.return_value = origin_info_test_data
mock_get_origin_visits.return_value = []
url = reverse('browse-origin-content',
kwargs={'origin_type': 'foo',
'origin_url': 'bar',
'path': 'foo'})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 404)
+ self.assertEqual(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, "No SWH visit", status_code=404)
mock_get_origin_visits.return_value = stub_origin_visits
mock_get_origin_visit_snapshot.side_effect = \
NotFoundExc('visit not found')
url = reverse('browse-origin-content',
kwargs={'origin_type': 'foo',
'origin_url': 'bar',
'path': 'foo'},
query_params={'visit_id': len(stub_origin_visits)+1})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 404)
+ self.assertEqual(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertRegex(resp.content.decode('utf-8'), 'Visit.*not found')
mock_get_origin_visits.return_value = stub_origin_visits
mock_get_origin_visit_snapshot.side_effect = None
mock_get_origin_visit_snapshot.return_value = ([], [])
url = reverse('browse-origin-content',
kwargs={'origin_type': 'foo',
'origin_url': 'bar',
'path': 'baz'})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 404)
+ self.assertEqual(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertRegex(resp.content.decode('utf-8'),
'Origin.*has an empty list of branches')
mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
mock_snapshot_service.lookup_directory_with_path.return_value = \
{'target': stub_content_text_data['checksums']['sha1']}
mock_request_content.side_effect = \
NotFoundExc('Content not found')
url = reverse('browse-origin-content',
kwargs={'origin_type': 'foo',
'origin_url': 'bar',
'path': 'baz'})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 404)
+ self.assertEqual(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, 'Content not found', status_code=404)
def origin_branches_helper(self, origin_info, origin_snapshot):
url_args = {'origin_type': origin_info['type'],
'origin_url': origin_info['url']}
url = reverse('browse-origin-branches',
kwargs=url_args)
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('branches.html')
origin_branches = origin_snapshot[0]
origin_releases = origin_snapshot[1]
origin_branches_url = \
reverse('browse-origin-branches',
kwargs=url_args)
self.assertContains(resp, 'Branches (%s)' %
(origin_branches_url, len(origin_branches)))
origin_releases_url = \
reverse('browse-origin-releases',
kwargs=url_args)
self.assertContains(resp, 'Releases (%s)' %
(origin_releases_url, len(origin_releases)))
self.assertContains(resp, '',
count=len(origin_branches))
for branch in origin_branches:
browse_branch_url = reverse('browse-origin-directory',
kwargs={'origin_type': origin_info['type'],
'origin_url': origin_info['url']},
query_params={'branch': branch['name']})
self.assertContains(resp, '%s' % (escape(browse_branch_url), branch['name']))
browse_revision_url = reverse('browse-revision',
kwargs={'sha1_git': branch['revision']},
query_params={'origin_type': origin_info['type'],
'origin': origin_info['url']})
self.assertContains(resp, '%s' % (escape(browse_revision_url), branch['revision'][:7]))
@patch('swh.web.browse.views.utils.snapshot_context.process_snapshot_branches')
@patch('swh.web.browse.views.utils.snapshot_context.service')
@patch('swh.web.browse.utils.get_origin_visits')
@patch('swh.web.browse.utils.get_origin_visit_snapshot')
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.views.origin.service')
def test_origin_branches(self, mock_origin_service,
mock_utils_service,
mock_get_origin_visit_snapshot,
mock_get_origin_visits,
mock_snp_ctx_service,
mock_snp_ctx_process_branches):
mock_get_origin_visits.return_value = stub_origin_visits
mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
mock_utils_service.lookup_origin.return_value = stub_origin_info
mock_utils_service.lookup_snapshot_size.return_value = \
{'revision': len(stub_origin_snapshot[0]), 'release': len(stub_origin_snapshot[1])}
mock_snp_ctx_service.lookup_snapshot.return_value = \
_to_snapshot_dict(branches=stub_origin_snapshot[0])
mock_snp_ctx_process_branches.return_value = stub_origin_snapshot
self.origin_branches_helper(stub_origin_info, stub_origin_snapshot)
self.origin_branches_helper(stub_origin_info_no_type, stub_origin_snapshot)
def origin_releases_helper(self, origin_info, origin_snapshot):
url_args = {'origin_type': origin_info['type'],
'origin_url': origin_info['url']}
url = reverse('browse-origin-releases',
kwargs=url_args)
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('releases.html')
origin_branches = origin_snapshot[0]
origin_releases = origin_snapshot[1]
origin_branches_url = \
reverse('browse-origin-branches',
kwargs=url_args)
self.assertContains(resp, 'Branches (%s)' %
(origin_branches_url, len(origin_branches)))
origin_releases_url = \
reverse('browse-origin-releases',
kwargs=url_args)
self.assertContains(resp, 'Releases (%s)' %
(origin_releases_url, len(origin_releases)))
self.assertContains(resp, ' ',
count=len(origin_releases))
for release in origin_releases:
browse_release_url = reverse('browse-release',
kwargs={'sha1_git': release['id']},
query_params={'origin_type': origin_info['type'],
'origin': origin_info['url']})
self.assertContains(resp, '%s' % (escape(browse_release_url), release['name']))
@patch('swh.web.browse.views.utils.snapshot_context.process_snapshot_branches')
@patch('swh.web.browse.views.utils.snapshot_context.service')
@patch('swh.web.browse.utils.get_origin_visits')
@patch('swh.web.browse.utils.get_origin_visit_snapshot')
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.views.origin.service')
def test_origin_releases(self, mock_origin_service,
mock_utils_service,
mock_get_origin_visit_snapshot,
mock_get_origin_visits,
mock_snp_ctx_service,
mock_snp_ctx_process_branches):
mock_get_origin_visits.return_value = stub_origin_visits
mock_get_origin_visit_snapshot.return_value = stub_origin_snapshot
mock_utils_service.lookup_origin.return_value = stub_origin_info
mock_utils_service.lookup_snapshot_size.return_value = \
{'revision': len(stub_origin_snapshot[0]), 'release': len(stub_origin_snapshot[1])}
mock_snp_ctx_service.lookup_snapshot.return_value = \
_to_snapshot_dict(releases=stub_origin_snapshot[1])
mock_snp_ctx_process_branches.return_value = stub_origin_snapshot
self.origin_releases_helper(stub_origin_info, stub_origin_snapshot)
self.origin_releases_helper(stub_origin_info_no_type, stub_origin_snapshot)
diff --git a/swh/web/tests/browse/views/test_person.py b/swh/web/tests/browse/views/test_person.py
index 12e7b80c9..c0bb3e571 100644
--- a/swh/web/tests/browse/views/test_person.py
+++ b/swh/web/tests/browse/views/test_person.py
@@ -1,52 +1,52 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
from unittest.mock import patch
from swh.web.common.exc import NotFoundExc
from swh.web.common.utils import reverse
from swh.web.tests.testcase import SWHWebTestCase
class SwhBrowsePersonTest(SWHWebTestCase):
@patch('swh.web.browse.views.person.service')
def test_person_browse(self, mock_service):
test_person_data = \
{
"email": "j.adams440@gmail.com",
"fullname": "oysterCrusher ",
"id": 457587,
"name": "oysterCrusher"
}
mock_service.lookup_person.return_value = test_person_data
url = reverse('browse-person', kwargs={'person_id': 457587})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('browse/person.html')
self.assertContains(resp, '%s
' % test_person_data['id'])
self.assertContains(resp, '%s
' % test_person_data['name'])
self.assertContains(resp, '%s
' %
(test_person_data['email'],
test_person_data['email']))
self.assertContains(resp, '%s <%s>
' % # noqa
(test_person_data['name'],
test_person_data['email'],
test_person_data['email']))
@patch('swh.web.browse.views.person.service')
def test_person_request_error(self, mock_service):
mock_service.lookup_person.side_effect = \
NotFoundExc('Person not found')
url = reverse('browse-person', kwargs={'person_id': 457587})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 404)
+ self.assertEqual(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, 'Person not found', status_code=404)
diff --git a/swh/web/tests/browse/views/test_release.py b/swh/web/tests/browse/views/test_release.py
index 4de4cc04b..8c6a39a2a 100644
--- a/swh/web/tests/browse/views/test_release.py
+++ b/swh/web/tests/browse/views/test_release.py
@@ -1,111 +1,111 @@
# Copyright (C) 2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
# flake8: noqa
from unittest.mock import patch
from swh.web.common.exc import NotFoundExc
from swh.web.common.utils import (
reverse, format_utc_iso_date, get_swh_persistent_id
)
from swh.web.tests.testcase import SWHWebTestCase
from .data.release_test_data import (
stub_release
)
from .data.origin_test_data import stub_origin_visits
class SwhBrowseReleaseTest(SWHWebTestCase):
@patch('swh.web.browse.views.release.service')
@patch('swh.web.browse.utils.service')
@patch('swh.web.common.utils.service')
def test_release_browse(self, mock_service_common, mock_service_utils,
mock_service):
mock_service.lookup_release.return_value = stub_release
url = reverse('browse-release',
kwargs={'sha1_git': stub_release['id']})
release_id = stub_release['id']
release_name = stub_release['name']
author_id = stub_release['author']['id']
author_name = stub_release['author']['name']
author_url = reverse('browse-person',
kwargs={'person_id': author_id})
release_date = stub_release['date']
message = stub_release['message']
target_type = stub_release['target_type']
target = stub_release['target']
target_url = reverse('browse-revision', kwargs={'sha1_git': target})
message_lines = stub_release['message'].split('\n')
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('browse/release.html')
self.assertContains(resp, '%s' %
(author_url, author_name))
self.assertContains(resp, format_utc_iso_date(release_date))
self.assertContains(resp, '%s
%s' % (message_lines[0],
'\n'.join(message_lines[1:])))
self.assertContains(resp, release_id)
self.assertContains(resp, release_name)
self.assertContains(resp, target_type)
self.assertContains(resp, '%s' %
(target_url, target))
swh_rel_id = get_swh_persistent_id('release', release_id)
swh_rel_id_url = reverse('browse-swh-id',
kwargs={'swh_id': swh_rel_id})
self.assertContains(resp, swh_rel_id)
self.assertContains(resp, swh_rel_id_url)
origin_info = {
'id': 13706355,
'type': 'git',
'url': 'https://github.com/python/cpython'
}
mock_service_utils.lookup_origin.return_value = origin_info
mock_service_common.lookup_origin_visits.return_value = stub_origin_visits
mock_service_common.MAX_LIMIT = 20
url = reverse('browse-release',
kwargs={'sha1_git': stub_release['id']},
query_params={'origin': origin_info['url']})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('browse/release.html')
self.assertContains(resp, author_url)
self.assertContains(resp, author_name)
self.assertContains(resp, format_utc_iso_date(release_date))
self.assertContains(resp, '%s
%s' % (message_lines[0],
'\n'.join(message_lines[1:])))
self.assertContains(resp, release_id)
self.assertContains(resp, release_name)
self.assertContains(resp, target_type)
target_url = reverse('browse-revision', kwargs={'sha1_git': target},
query_params={'origin': origin_info['url']})
self.assertContains(resp, '%s' % (target_url, target))
mock_service.lookup_release.side_effect = \
NotFoundExc('Release not found')
url = reverse('browse-release',
kwargs={'sha1_git': 'ffff'})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 404)
+ self.assertEqual(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, 'Release not found', status_code=404)
diff --git a/swh/web/tests/browse/views/test_revision.py b/swh/web/tests/browse/views/test_revision.py
index 88be9be19..ddf9b3b1e 100644
--- a/swh/web/tests/browse/views/test_revision.py
+++ b/swh/web/tests/browse/views/test_revision.py
@@ -1,273 +1,273 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
# flake8: noqa
from unittest.mock import patch
from django.utils.html import escape
from swh.web.common.exc import NotFoundExc
from swh.web.common.utils import (
reverse, format_utc_iso_date, get_swh_persistent_id
)
from swh.web.tests.testcase import SWHWebTestCase
from .data.revision_test_data import (
revision_id_test, revision_metadata_test,
revision_history_log_test
)
from .data.origin_test_data import stub_origin_visits
class SwhBrowseRevisionTest(SWHWebTestCase):
@patch('swh.web.browse.views.revision.service')
@patch('swh.web.browse.utils.service')
@patch('swh.web.common.utils.service')
def test_revision_browse(self, mock_service_common, mock_service_utils,
mock_service):
mock_service.lookup_revision.return_value = revision_metadata_test
url = reverse('browse-revision',
kwargs={'sha1_git': revision_id_test})
author_id = revision_metadata_test['author']['id']
author_name = revision_metadata_test['author']['name']
committer_id = revision_metadata_test['committer']['id']
committer_name = revision_metadata_test['committer']['name']
dir_id = revision_metadata_test['directory']
author_url = reverse('browse-person',
kwargs={'person_id': author_id})
committer_url = reverse('browse-person',
kwargs={'person_id': committer_id})
directory_url = reverse('browse-directory',
kwargs={'sha1_git': dir_id})
history_url = reverse('browse-revision-log',
kwargs={'sha1_git': revision_id_test})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('browse/revision.html')
self.assertContains(resp, '%s' %
(author_url, author_name))
self.assertContains(resp, '%s' %
(committer_url, committer_name))
self.assertContains(resp, directory_url)
self.assertContains(resp, history_url)
for parent in revision_metadata_test['parents']:
parent_url = reverse('browse-revision',
kwargs={'sha1_git': parent})
self.assertContains(resp, '%s' %
(parent_url, parent))
author_date = revision_metadata_test['date']
committer_date = revision_metadata_test['committer_date']
message_lines = revision_metadata_test['message'].split('\n')
self.assertContains(resp, format_utc_iso_date(author_date))
self.assertContains(resp, format_utc_iso_date(committer_date))
self.assertContains(resp, message_lines[0])
self.assertContains(resp, '\n'.join(message_lines[1:]))
origin_info = {
'id': '7416001',
'type': 'git',
'url': 'https://github.com/webpack/webpack'
}
mock_service_utils.lookup_origin.return_value = origin_info
mock_service_common.lookup_origin_visits.return_value = stub_origin_visits
mock_service_common.MAX_LIMIT = 20
origin_directory_url = reverse('browse-origin-directory',
kwargs={'origin_type': origin_info['type'],
'origin_url': origin_info['url']},
query_params={'revision': revision_id_test})
origin_revision_log_url = reverse('browse-origin-log',
kwargs={'origin_type': origin_info['type'],
'origin_url': origin_info['url']},
query_params={'revision': revision_id_test})
url = reverse('browse-revision',
kwargs={'sha1_git': revision_id_test},
query_params={'origin_type': origin_info['type'],
'origin': origin_info['url']})
resp = self.client.get(url)
self.assertContains(resp, origin_directory_url)
self.assertContains(resp, origin_revision_log_url)
for parent in revision_metadata_test['parents']:
parent_url = reverse('browse-revision',
kwargs={'sha1_git': parent},
query_params={'origin_type': origin_info['type'],
'origin': origin_info['url']})
self.assertContains(resp, '%s' %
(parent_url, parent))
self.assertContains(resp, 'vault-cook-directory')
self.assertContains(resp, 'vault-cook-revision')
swh_rev_id = get_swh_persistent_id('revision', revision_id_test)
swh_rev_id_url = reverse('browse-swh-id',
kwargs={'swh_id': swh_rev_id})
self.assertContains(resp, swh_rev_id)
self.assertContains(resp, swh_rev_id_url)
swh_dir_id = get_swh_persistent_id('directory', dir_id)
swh_dir_id_url = reverse('browse-swh-id',
kwargs={'swh_id': swh_dir_id})
self.assertContains(resp, swh_dir_id)
self.assertContains(resp, swh_dir_id_url)
@patch('swh.web.browse.views.revision.service')
def test_revision_log_browse(self, mock_service):
per_page = 10
mock_service.lookup_revision_log.return_value = \
revision_history_log_test[:per_page+1]
url = reverse('browse-revision-log',
kwargs={'sha1_git': revision_id_test},
query_params={'per_page': per_page})
resp = self.client.get(url)
prev_rev = revision_history_log_test[per_page]['id']
next_page_url = reverse('browse-revision-log',
kwargs={'sha1_git': prev_rev},
query_params={'revs_breadcrumb': revision_id_test,
'per_page': per_page})
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('browse/revision-log.html')
self.assertContains(resp, '',
count=per_page)
self.assertContains(resp, 'Newer ')
self.assertContains(resp, 'Older ' %
escape(next_page_url))
for log in revision_history_log_test[:per_page]:
author_url = reverse('browse-person',
kwargs={'person_id': log['author']['id']})
revision_url = reverse('browse-revision',
kwargs={'sha1_git': log['id']})
directory_url = reverse('browse-directory',
kwargs={'sha1_git': log['directory']})
self.assertContains(resp, '%s' %
(author_url, log['author']['name']))
self.assertContains(resp, '%s' %
(revision_url, log['id'][:7]))
self.assertContains(resp, directory_url)
mock_service.lookup_revision_log.return_value = \
revision_history_log_test[per_page:2*per_page+1]
resp = self.client.get(next_page_url)
prev_prev_rev = revision_history_log_test[2*per_page]['id']
prev_page_url = reverse('browse-revision-log',
kwargs={'sha1_git': revision_id_test},
query_params={'per_page': per_page})
next_page_url = reverse('browse-revision-log',
kwargs={'sha1_git': prev_prev_rev},
query_params={'revs_breadcrumb': revision_id_test + '/' + prev_rev,
'per_page': per_page})
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('browse/revision-log.html')
self.assertContains(resp, ' ',
count=per_page)
self.assertContains(resp, 'Newer ' %
escape(prev_page_url))
self.assertContains(resp, 'Older ' %
escape(next_page_url))
mock_service.lookup_revision_log.return_value = \
revision_history_log_test[2*per_page:3*per_page+1]
resp = self.client.get(next_page_url)
prev_prev_prev_rev = revision_history_log_test[3*per_page]['id']
prev_page_url = reverse('browse-revision-log',
kwargs={'sha1_git': prev_rev},
query_params={'revs_breadcrumb': revision_id_test,
'per_page': per_page})
next_page_url = reverse('browse-revision-log',
kwargs={'sha1_git': prev_prev_prev_rev},
query_params={'revs_breadcrumb': revision_id_test + '/' + prev_rev + '/' + prev_prev_rev,
'per_page': per_page})
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('browse/revision-log.html')
self.assertContains(resp, ' ',
count=per_page)
self.assertContains(resp, 'Newer ' %
escape(prev_page_url))
self.assertContains(resp, 'Older ' %
escape(next_page_url))
mock_service.lookup_revision_log.return_value = \
revision_history_log_test[3*per_page:3*per_page+per_page//2]
resp = self.client.get(next_page_url)
prev_page_url = reverse('browse-revision-log',
kwargs={'sha1_git': prev_prev_rev},
query_params={'revs_breadcrumb': revision_id_test + '/' + prev_rev,
'per_page': per_page})
- self.assertEquals(resp.status_code, 200)
+ self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed('browse/revision-log.html')
self.assertContains(resp, ' ',
count=per_page//2)
self.assertContains(resp, 'Older ')
self.assertContains(resp, 'Newer ' %
escape(prev_page_url))
@patch('swh.web.browse.utils.service')
@patch('swh.web.browse.views.revision.service')
def test_revision_request_errors(self, mock_service, mock_utils_service):
mock_service.lookup_revision.side_effect = \
NotFoundExc('Revision not found')
url = reverse('browse-revision',
kwargs={'sha1_git': revision_id_test})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 404)
+ self.assertEqual(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, 'Revision not found', status_code=404)
mock_service.lookup_revision_log.side_effect = \
NotFoundExc('Revision not found')
url = reverse('browse-revision-log',
kwargs={'sha1_git': revision_id_test})
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 404)
+ self.assertEqual(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, 'Revision not found', status_code=404)
url = reverse('browse-revision',
kwargs={'sha1_git': revision_id_test},
query_params={'origin_type': 'git',
'origin': 'https://github.com/foo/bar'})
mock_service.lookup_revision.side_effect = None
mock_utils_service.lookup_origin.side_effect = \
NotFoundExc('Origin not found')
resp = self.client.get(url)
- self.assertEquals(resp.status_code, 404)
+ self.assertEqual(resp.status_code, 404)
self.assertTemplateUsed('error.html')
self.assertContains(resp, 'Origin not found', status_code=404)
diff --git a/swh/web/tests/common/test_converters.py b/swh/web/tests/common/test_converters.py
index 6310b430d..25ad6840f 100644
--- a/swh/web/tests/common/test_converters.py
+++ b/swh/web/tests/common/test_converters.py
@@ -1,785 +1,785 @@
# Copyright (C) 2015-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import datetime
from swh.model import hashutil
from swh.web.common import converters
from swh.web.tests.testcase import SWHWebTestCase
class ConvertersTestCase(SWHWebTestCase):
def test_fmap(self):
- self.assertEquals([2, 3, None, 4],
- converters.fmap(lambda x: x+1, [1, 2, None, 3]))
- self.assertEquals([11, 12, 13],
- list(converters.fmap(lambda x: x+10,
- map(lambda x: x, [1, 2, 3]))))
- self.assertEquals({'a': 2, 'b': 4},
- converters.fmap(lambda x: x*2, {'a': 1, 'b': 2}))
- self.assertEquals(100,
- converters.fmap(lambda x: x*10, 10))
- self.assertEquals({'a': [2, 6], 'b': 4},
+ self.assertEqual([2, 3, None, 4],
+ converters.fmap(lambda x: x+1, [1, 2, None, 3]))
+ self.assertEqual([11, 12, 13],
+ list(converters.fmap(lambda x: x+10,
+ map(lambda x: x, [1, 2, 3]))))
+ self.assertEqual({'a': 2, 'b': 4},
+ converters.fmap(lambda x: x*2, {'a': 1, 'b': 2}))
+ self.assertEqual(100,
+ converters.fmap(lambda x: x*10, 10))
+ self.assertEqual({'a': [2, 6], 'b': 4},
converters.fmap(lambda x: x*2, {'a': [1, 3], 'b': 2})) # noqa
self.assertIsNone(converters.fmap(lambda x: x, None))
def test_from_swh(self):
some_input = {
'a': 'something',
'b': 'someone',
'c': b'sharp-0.3.4.tgz',
'd': hashutil.hash_to_bytes(
'b04caf10e9535160d90e874b45aa426de762f19f'),
'e': b'sharp.html/doc_002dS_005fISREG.html',
'g': [b'utf-8-to-decode', b'another-one'],
'h': 'something filtered',
'i': {'e': b'something'},
'j': {
'k': {
'l': [b'bytes thing', b'another thingy', b''],
'n': 'dont care either'
},
'm': 'dont care'
},
'o': 'something',
'p': b'foo',
'q': {'extra-headers': [['a', b'intact']]},
'w': None,
'r': {'p': 'also intact',
'q': 'bar'},
's': {
'timestamp': 42,
'offset': -420,
'negative_utc': None,
},
's1': {
'timestamp': {'seconds': 42, 'microseconds': 0},
'offset': -420,
'negative_utc': None,
},
's2': datetime.datetime(
2013, 7, 1, 20, 0, 0,
tzinfo=datetime.timezone.utc),
't': None,
'u': None,
'v': None,
'x': None,
}
expected_output = {
'a': 'something',
'b': 'someone',
'c': 'sharp-0.3.4.tgz',
'd': 'b04caf10e9535160d90e874b45aa426de762f19f',
'e': 'sharp.html/doc_002dS_005fISREG.html',
'g': ['utf-8-to-decode', 'another-one'],
'i': {'e': 'something'},
'j': {
'k': {
'l': ['bytes thing', 'another thingy', '']
}
},
'p': 'foo',
'q': {'extra-headers': [['a', 'intact']]},
'w': {},
'r': {'p': 'also intact',
'q': 'bar'},
's': '1969-12-31T17:00:42-07:00',
's1': '1969-12-31T17:00:42-07:00',
's2': '2013-07-01T20:00:00+00:00',
'u': {},
'v': [],
'x': None,
}
actual_output = converters.from_swh(
some_input,
hashess={'d', 'o', 'x'},
bytess={'c', 'e', 'g', 'l'},
dates={'s', 's1', 's2'},
blacklist={'h', 'm', 'n', 'o'},
removables_if_empty={'t'},
empty_dict={'u'},
empty_list={'v'},
convert={'p', 'q', 'w'},
convert_fn=converters.convert_revision_metadata)
- self.assertEquals(expected_output, actual_output)
+ self.assertEqual(expected_output, actual_output)
def test_from_swh_edge_cases_do_no_conversion_if_none_or_not_bytes(self):
some_input = {
'a': 'something',
'b': None,
'c': 'someone',
'd': None,
'e': None
}
expected_output = {
'a': 'something',
'b': None,
'c': 'someone',
'd': None,
'e': None
}
actual_output = converters.from_swh(some_input,
hashess={'a', 'b'},
bytess={'c', 'd'},
dates={'e'})
- self.assertEquals(expected_output, actual_output)
+ self.assertEqual(expected_output, actual_output)
def test_from_swh_edge_cases_convert_invalid_utf8_bytes(self):
some_input = {
'a': 'something',
'b': 'someone',
'c': b'a name \xff',
'd': b'an email \xff',
}
expected_output = {
'a': 'something',
'b': 'someone',
'c': 'a name \\xff',
'd': 'an email \\xff',
'decoding_failures': ['c', 'd']
}
actual_output = converters.from_swh(some_input,
hashess={'a', 'b'},
bytess={'c', 'd'})
for v in ['a', 'b', 'c', 'd']:
self.assertEqual(expected_output[v], actual_output[v])
self.assertEqual(len(expected_output['decoding_failures']),
len(actual_output['decoding_failures']))
for v in expected_output['decoding_failures']:
self.assertTrue(v in actual_output['decoding_failures'])
def test_from_swh_empty(self):
# when
- self.assertEquals({}, converters.from_swh({}))
+ self.assertEqual({}, converters.from_swh({}))
def test_from_swh_none(self):
# when
self.assertIsNone(converters.from_swh(None))
def test_from_provenance(self):
# given
input_provenance = {
'origin': 10,
'visit': 1,
'content': hashutil.hash_to_bytes(
'321caf10e9535160d90e874b45aa426de762f19f'),
'revision': hashutil.hash_to_bytes(
'123caf10e9535160d90e874b45aa426de762f19f'),
'path': b'octave-3.4.0/doc/interpreter/octave/doc_002dS_005fISREG'
}
expected_provenance = {
'origin': 10,
'visit': 1,
'content': '321caf10e9535160d90e874b45aa426de762f19f',
'revision': '123caf10e9535160d90e874b45aa426de762f19f',
'path': 'octave-3.4.0/doc/interpreter/octave/doc_002dS_005fISREG'
}
# when
actual_provenance = converters.from_provenance(input_provenance)
# then
self.assertEqual(actual_provenance, expected_provenance)
def test_from_origin(self):
# given
origin_input = {
'id': 9,
'type': 'ftp',
'url': 'rsync://ftp.gnu.org/gnu/octave',
}
expected_origin = {
'id': 9,
'type': 'ftp',
'url': 'rsync://ftp.gnu.org/gnu/octave',
}
# when
actual_origin = converters.from_origin(origin_input)
# then
self.assertEqual(actual_origin, expected_origin)
def test_from_origin_visit(self):
snap_hash = 'b5f0b7f716735ebffe38505c60145c4fd9da6ca3'
for snap in [snap_hash, None]:
# given
visit = {
'date': {
'timestamp': datetime.datetime(
2015, 1, 1, 22, 0, 0,
tzinfo=datetime.timezone.utc).timestamp(),
'offset': 0,
'negative_utc': False,
},
'origin': 10,
'visit': 100,
'metadata': None,
'status': 'full',
'snapshot': hashutil.hash_to_bytes(snap) if snap else snap,
}
expected_visit = {
'date': '2015-01-01T22:00:00+00:00',
'origin': 10,
'visit': 100,
'metadata': {},
'status': 'full',
'snapshot': snap_hash if snap else snap
}
# when
actual_visit = converters.from_origin_visit(visit)
# then
self.assertEqual(actual_visit, expected_visit)
def test_from_release(self):
release_input = {
'id': hashutil.hash_to_bytes(
'aad23fa492a0c5fed0708a6703be875448c86884'),
'target': hashutil.hash_to_bytes(
'5e46d564378afc44b31bb89f99d5675195fbdf67'),
'target_type': 'revision',
'date': {
'timestamp': datetime.datetime(
2015, 1, 1, 22, 0, 0,
tzinfo=datetime.timezone.utc).timestamp(),
'offset': 0,
'negative_utc': False,
},
'author': {
'name': b'author name',
'fullname': b'Author Name author@email',
'email': b'author@email',
},
'name': b'v0.0.1',
'message': b'some comment on release',
'synthetic': True,
}
expected_release = {
'id': 'aad23fa492a0c5fed0708a6703be875448c86884',
'target': '5e46d564378afc44b31bb89f99d5675195fbdf67',
'target_type': 'revision',
'date': '2015-01-01T22:00:00+00:00',
'author': {
'name': 'author name',
'fullname': 'Author Name author@email',
'email': 'author@email',
},
'name': 'v0.0.1',
'message': 'some comment on release',
'target_type': 'revision',
'synthetic': True,
}
# when
actual_release = converters.from_release(release_input)
# then
self.assertEqual(actual_release, expected_release)
def test_from_release_no_revision(self):
release_input = {
'id': hashutil.hash_to_bytes(
'b2171ee2bdf119cd99a7ec7eff32fa8013ef9a4e'),
'target': None,
'date': {
'timestamp': datetime.datetime(
2016, 3, 2, 10, 0, 0,
tzinfo=datetime.timezone.utc).timestamp(),
'offset': 0,
'negative_utc': True,
},
'name': b'v0.1.1',
'message': b'comment on release',
'synthetic': False,
'author': {
'name': b'bob',
'fullname': b'Bob bob@alice.net',
'email': b'bob@alice.net',
},
}
expected_release = {
'id': 'b2171ee2bdf119cd99a7ec7eff32fa8013ef9a4e',
'target': None,
'date': '2016-03-02T10:00:00-00:00',
'name': 'v0.1.1',
'message': 'comment on release',
'synthetic': False,
'author': {
'name': 'bob',
'fullname': 'Bob bob@alice.net',
'email': 'bob@alice.net',
},
}
# when
actual_release = converters.from_release(release_input)
# then
self.assertEqual(actual_release, expected_release)
def test_from_revision(self):
revision_input = {
'id': hashutil.hash_to_bytes(
'18d8be353ed3480476f032475e7c233eff7371d5'),
'directory': hashutil.hash_to_bytes(
'7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'),
'author': {
'name': b'Software Heritage',
'fullname': b'robot robot@softwareheritage.org',
'email': b'robot@softwareheritage.org',
},
'committer': {
'name': b'Software Heritage',
'fullname': b'robot robot@softwareheritage.org',
'email': b'robot@softwareheritage.org',
},
'message': b'synthetic revision message',
'date': {
'timestamp': datetime.datetime(
2000, 1, 17, 11, 23, 54,
tzinfo=datetime.timezone.utc).timestamp(),
'offset': 0,
'negative_utc': False,
},
'committer_date': {
'timestamp': datetime.datetime(
2000, 1, 17, 11, 23, 54,
tzinfo=datetime.timezone.utc).timestamp(),
'offset': 0,
'negative_utc': False,
},
'synthetic': True,
'type': 'tar',
'parents': [
hashutil.hash_to_bytes(
'29d8be353ed3480476f032475e7c244eff7371d5'),
hashutil.hash_to_bytes(
'30d8be353ed3480476f032475e7c244eff7371d5')
],
'children': [
hashutil.hash_to_bytes(
'123546353ed3480476f032475e7c244eff7371d5'),
],
'metadata': {
'extra_headers': [['gpgsig', b'some-signature']],
'original_artifact': [{
'archive_type': 'tar',
'name': 'webbase-5.7.0.tar.gz',
'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
'309d36484e7edf7bb912',
}]
},
}
expected_revision = {
'id': '18d8be353ed3480476f032475e7c233eff7371d5',
'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6',
'author': {
'name': 'Software Heritage',
'fullname': 'robot robot@softwareheritage.org',
'email': 'robot@softwareheritage.org',
},
'committer': {
'name': 'Software Heritage',
'fullname': 'robot robot@softwareheritage.org',
'email': 'robot@softwareheritage.org',
},
'message': 'synthetic revision message',
'date': "2000-01-17T11:23:54+00:00",
'committer_date': "2000-01-17T11:23:54+00:00",
'children': [
'123546353ed3480476f032475e7c244eff7371d5'
],
'parents': [
'29d8be353ed3480476f032475e7c244eff7371d5',
'30d8be353ed3480476f032475e7c244eff7371d5'
],
'type': 'tar',
'synthetic': True,
'metadata': {
'extra_headers': [['gpgsig', 'some-signature']],
'original_artifact': [{
'archive_type': 'tar',
'name': 'webbase-5.7.0.tar.gz',
'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
'309d36484e7edf7bb912'
}]
},
'merge': True
}
# when
actual_revision = converters.from_revision(revision_input)
# then
self.assertEqual(actual_revision, expected_revision)
def test_from_revision_nomerge(self):
revision_input = {
'id': hashutil.hash_to_bytes(
'18d8be353ed3480476f032475e7c233eff7371d5'),
'parents': [
hashutil.hash_to_bytes(
'29d8be353ed3480476f032475e7c244eff7371d5')
]
}
expected_revision = {
'id': '18d8be353ed3480476f032475e7c233eff7371d5',
'parents': [
'29d8be353ed3480476f032475e7c244eff7371d5'
],
'merge': False
}
# when
actual_revision = converters.from_revision(revision_input)
# then
self.assertEqual(actual_revision, expected_revision)
def test_from_revision_noparents(self):
revision_input = {
'id': hashutil.hash_to_bytes(
'18d8be353ed3480476f032475e7c233eff7371d5'),
'directory': hashutil.hash_to_bytes(
'7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'),
'author': {
'name': b'Software Heritage',
'fullname': b'robot robot@softwareheritage.org',
'email': b'robot@softwareheritage.org',
},
'committer': {
'name': b'Software Heritage',
'fullname': b'robot robot@softwareheritage.org',
'email': b'robot@softwareheritage.org',
},
'message': b'synthetic revision message',
'date': {
'timestamp': datetime.datetime(
2000, 1, 17, 11, 23, 54,
tzinfo=datetime.timezone.utc).timestamp(),
'offset': 0,
'negative_utc': False,
},
'committer_date': {
'timestamp': datetime.datetime(
2000, 1, 17, 11, 23, 54,
tzinfo=datetime.timezone.utc).timestamp(),
'offset': 0,
'negative_utc': False,
},
'synthetic': True,
'type': 'tar',
'children': [
hashutil.hash_to_bytes(
'123546353ed3480476f032475e7c244eff7371d5'),
],
'metadata': {
'original_artifact': [{
'archive_type': 'tar',
'name': 'webbase-5.7.0.tar.gz',
'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
'309d36484e7edf7bb912',
}]
},
}
expected_revision = {
'id': '18d8be353ed3480476f032475e7c233eff7371d5',
'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6',
'author': {
'name': 'Software Heritage',
'fullname': 'robot robot@softwareheritage.org',
'email': 'robot@softwareheritage.org',
},
'committer': {
'name': 'Software Heritage',
'fullname': 'robot robot@softwareheritage.org',
'email': 'robot@softwareheritage.org',
},
'message': 'synthetic revision message',
'date': "2000-01-17T11:23:54+00:00",
'committer_date': "2000-01-17T11:23:54+00:00",
'children': [
'123546353ed3480476f032475e7c244eff7371d5'
],
'type': 'tar',
'synthetic': True,
'metadata': {
'original_artifact': [{
'archive_type': 'tar',
'name': 'webbase-5.7.0.tar.gz',
'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
'309d36484e7edf7bb912'
}]
}
}
# when
actual_revision = converters.from_revision(revision_input)
# then
self.assertEqual(actual_revision, expected_revision)
def test_from_revision_invalid(self):
revision_input = {
'id': hashutil.hash_to_bytes(
'18d8be353ed3480476f032475e7c233eff7371d5'),
'directory': hashutil.hash_to_bytes(
'7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'),
'author': {
'name': b'Software Heritage',
'fullname': b'robot robot@softwareheritage.org',
'email': b'robot@softwareheritage.org',
},
'committer': {
'name': b'Software Heritage',
'fullname': b'robot robot@softwareheritage.org',
'email': b'robot@softwareheritage.org',
},
'message': b'invalid message \xff',
'date': {
'timestamp': datetime.datetime(
2000, 1, 17, 11, 23, 54,
tzinfo=datetime.timezone.utc).timestamp(),
'offset': 0,
'negative_utc': False,
},
'committer_date': {
'timestamp': datetime.datetime(
2000, 1, 17, 11, 23, 54,
tzinfo=datetime.timezone.utc).timestamp(),
'offset': 0,
'negative_utc': False,
},
'synthetic': True,
'type': 'tar',
'parents': [
hashutil.hash_to_bytes(
'29d8be353ed3480476f032475e7c244eff7371d5'),
hashutil.hash_to_bytes(
'30d8be353ed3480476f032475e7c244eff7371d5')
],
'children': [
hashutil.hash_to_bytes(
'123546353ed3480476f032475e7c244eff7371d5'),
],
'metadata': {
'original_artifact': [{
'archive_type': 'tar',
'name': 'webbase-5.7.0.tar.gz',
'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
'309d36484e7edf7bb912',
}]
},
}
expected_revision = {
'id': '18d8be353ed3480476f032475e7c233eff7371d5',
'directory': '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6',
'author': {
'name': 'Software Heritage',
'fullname': 'robot robot@softwareheritage.org',
'email': 'robot@softwareheritage.org',
},
'committer': {
'name': 'Software Heritage',
'fullname': 'robot robot@softwareheritage.org',
'email': 'robot@softwareheritage.org',
},
'message': None,
'message_decoding_failed': True,
'date': "2000-01-17T11:23:54+00:00",
'committer_date': "2000-01-17T11:23:54+00:00",
'children': [
'123546353ed3480476f032475e7c244eff7371d5'
],
'parents': [
'29d8be353ed3480476f032475e7c244eff7371d5',
'30d8be353ed3480476f032475e7c244eff7371d5'
],
'type': 'tar',
'synthetic': True,
'metadata': {
'original_artifact': [{
'archive_type': 'tar',
'name': 'webbase-5.7.0.tar.gz',
'sha1': '147f73f369733d088b7a6fa9c4e0273dcd3c7ccd',
'sha1_git': '6a15ea8b881069adedf11feceec35588f2cfe8f1',
'sha256': '401d0df797110bea805d358b85bcc1ced29549d3d73f'
'309d36484e7edf7bb912'
}]
},
'merge': True
}
# when
actual_revision = converters.from_revision(revision_input)
# then
self.assertEqual(actual_revision, expected_revision)
def test_from_content_none(self):
self.assertIsNone(converters.from_content(None))
def test_from_content(self):
content_input = {
'sha1': hashutil.hash_to_bytes(
'5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5'),
'sha256': hashutil.hash_to_bytes(
'39007420ca5de7cb3cfc15196335507e'
'e76c98930e7e0afa4d2747d3bf96c926'),
'blake2s256': hashutil.hash_to_bytes(
'49007420ca5de7cb3cfc15196335507e'
'e76c98930e7e0afa4d2747d3bf96c926'),
'sha1_git': hashutil.hash_to_bytes(
'40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'),
'ctime': 'something-which-is-filtered-out',
'data': b'data in bytes',
'length': 10,
'status': 'hidden',
}
# 'status' is filtered
expected_content = {
'checksums': {
'sha1': '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5',
'sha256': '39007420ca5de7cb3cfc15196335507ee76c98'
'930e7e0afa4d2747d3bf96c926',
'blake2s256': '49007420ca5de7cb3cfc15196335507ee7'
'6c98930e7e0afa4d2747d3bf96c926',
'sha1_git': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03',
},
'data': b'data in bytes',
'length': 10,
'status': 'absent',
}
# when
actual_content = converters.from_content(content_input)
# then
self.assertEqual(actual_content, expected_content)
def test_from_person(self):
person_input = {
'id': 10,
'anything': 'else',
'name': b'bob',
'fullname': b'bob bob@alice.net',
'email': b'bob@foo.alice',
}
expected_person = {
'id': 10,
'anything': 'else',
'name': 'bob',
'fullname': 'bob bob@alice.net',
'email': 'bob@foo.alice',
}
# when
actual_person = converters.from_person(person_input)
# then
self.assertEqual(actual_person, expected_person)
def test_from_directory_entries(self):
dir_entries_input = {
'sha1': hashutil.hash_to_bytes(
'5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5'),
'sha256': hashutil.hash_to_bytes(
'39007420ca5de7cb3cfc15196335507e'
'e76c98930e7e0afa4d2747d3bf96c926'),
'sha1_git': hashutil.hash_to_bytes(
'40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'),
'blake2s256': hashutil.hash_to_bytes(
'685395c5dc57cada459364f0946d3dd45bad5fcbab'
'c1048edb44380f1d31d0aa'),
'target': hashutil.hash_to_bytes(
'40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'),
'dir_id': hashutil.hash_to_bytes(
'40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'),
'name': b'bob',
'type': 10,
'status': 'hidden',
}
expected_dir_entries = {
'checksums': {
'sha1': '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5',
'sha256': '39007420ca5de7cb3cfc15196335507ee76c98'
'930e7e0afa4d2747d3bf96c926',
'sha1_git': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03',
'blake2s256': '685395c5dc57cada459364f0946d3dd45bad5f'
'cbabc1048edb44380f1d31d0aa',
},
'target': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03',
'dir_id': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03',
'name': 'bob',
'type': 10,
'status': 'absent',
}
# when
actual_dir_entries = converters.from_directory_entry(dir_entries_input)
# then
self.assertEqual(actual_dir_entries, expected_dir_entries)
def test_from_filetype(self):
content_filetype = {
'id': hashutil.hash_to_bytes(
'5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5'),
'encoding': b'utf-8',
'mimetype': b'text/plain',
}
expected_content_filetype = {
'id': '5c6f0e2750f48fa0bd0c4cf5976ba0b9e02ebda5',
'encoding': 'utf-8',
'mimetype': 'text/plain',
}
# when
actual_content_filetype = converters.from_filetype(content_filetype)
# then
self.assertEqual(actual_content_filetype, expected_content_filetype)
diff --git a/swh/web/tests/common/test_highlightjs.py b/swh/web/tests/common/test_highlightjs.py
index e51844357..fb5f26f09 100644
--- a/swh/web/tests/common/test_highlightjs.py
+++ b/swh/web/tests/common/test_highlightjs.py
@@ -1,126 +1,126 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
from swh.web.common import highlightjs
from swh.web.tests.testcase import SWHWebTestCase
class HighlightJsTestCase(SWHWebTestCase):
def test_get_hljs_language_from_mime_type(self):
lang = highlightjs.get_hljs_language_from_mime_type('text/plain')
- self.assertEquals(lang, None)
+ self.assertEqual(lang, None)
lang = highlightjs.get_hljs_language_from_mime_type('text/x-c')
- self.assertEquals(lang, 'cpp')
+ self.assertEqual(lang, 'cpp')
lang = highlightjs.get_hljs_language_from_mime_type('text/x-c++')
- self.assertEquals(lang, 'cpp')
+ self.assertEqual(lang, 'cpp')
lang = highlightjs.get_hljs_language_from_mime_type('text/x-perl')
- self.assertEquals(lang, 'perl')
+ self.assertEqual(lang, 'perl')
lang = highlightjs.get_hljs_language_from_mime_type('text/x-python')
- self.assertEquals(lang, 'python')
+ self.assertEqual(lang, 'python')
lang = highlightjs.get_hljs_language_from_mime_type('text/x-msdos-batch') # noqa
- self.assertEquals(lang, 'dos')
+ self.assertEqual(lang, 'dos')
lang = highlightjs.get_hljs_language_from_mime_type('text/x-tex')
- self.assertEquals(lang, 'tex')
+ self.assertEqual(lang, 'tex')
lang = highlightjs.get_hljs_language_from_mime_type('text/x-lisp')
- self.assertEquals(lang, 'lisp')
+ self.assertEqual(lang, 'lisp')
lang = highlightjs.get_hljs_language_from_mime_type('text/x-java')
- self.assertEquals(lang, 'java')
+ self.assertEqual(lang, 'java')
lang = highlightjs.get_hljs_language_from_mime_type('text/x-makefile')
- self.assertEquals(lang, 'makefile')
+ self.assertEqual(lang, 'makefile')
lang = highlightjs.get_hljs_language_from_mime_type('text/x-shellscript') # noqa
- self.assertEquals(lang, 'bash')
+ self.assertEqual(lang, 'bash')
lang = highlightjs.get_hljs_language_from_mime_type('image/png')
- self.assertEquals(lang, None)
+ self.assertEqual(lang, None)
def test_get_hljs_language_from_filename(self):
lang = highlightjs.get_hljs_language_from_filename('foo')
- self.assertEquals(lang, None)
+ self.assertEqual(lang, None)
lang = highlightjs.get_hljs_language_from_filename('foo.h')
- self.assertEquals(lang, 'cpp')
+ self.assertEqual(lang, 'cpp')
lang = highlightjs.get_hljs_language_from_filename('foo.c')
- self.assertEquals(lang, 'cpp')
+ self.assertEqual(lang, 'cpp')
lang = highlightjs.get_hljs_language_from_filename('foo.c.in')
- self.assertEquals(lang, 'cpp')
+ self.assertEqual(lang, 'cpp')
lang = highlightjs.get_hljs_language_from_filename('foo.cpp')
- self.assertEquals(lang, 'cpp')
+ self.assertEqual(lang, 'cpp')
lang = highlightjs.get_hljs_language_from_filename('foo.pl')
- self.assertEquals(lang, 'perl')
+ self.assertEqual(lang, 'perl')
lang = highlightjs.get_hljs_language_from_filename('foo.py')
- self.assertEquals(lang, 'python')
+ self.assertEqual(lang, 'python')
lang = highlightjs.get_hljs_language_from_filename('foo.md')
- self.assertEquals(lang, 'markdown')
+ self.assertEqual(lang, 'markdown')
lang = highlightjs.get_hljs_language_from_filename('foo.js')
- self.assertEquals(lang, 'javascript')
+ self.assertEqual(lang, 'javascript')
lang = highlightjs.get_hljs_language_from_filename('foo.bat')
- self.assertEquals(lang, 'dos')
+ self.assertEqual(lang, 'dos')
lang = highlightjs.get_hljs_language_from_filename('foo.json')
- self.assertEquals(lang, 'json')
+ self.assertEqual(lang, 'json')
lang = highlightjs.get_hljs_language_from_filename('foo.yml')
- self.assertEquals(lang, 'yaml')
+ self.assertEqual(lang, 'yaml')
lang = highlightjs.get_hljs_language_from_filename('foo.ini')
- self.assertEquals(lang, 'ini')
+ self.assertEqual(lang, 'ini')
lang = highlightjs.get_hljs_language_from_filename('foo.cfg')
- self.assertEquals(lang, 'ini')
+ self.assertEqual(lang, 'ini')
lang = highlightjs.get_hljs_language_from_filename('foo.hy')
- self.assertEquals(lang, 'hy')
+ self.assertEqual(lang, 'hy')
lang = highlightjs.get_hljs_language_from_filename('foo.lisp')
- self.assertEquals(lang, 'lisp')
+ self.assertEqual(lang, 'lisp')
lang = highlightjs.get_hljs_language_from_filename('foo.java')
- self.assertEquals(lang, 'java')
+ self.assertEqual(lang, 'java')
lang = highlightjs.get_hljs_language_from_filename('foo.sh')
- self.assertEquals(lang, 'bash')
+ self.assertEqual(lang, 'bash')
lang = highlightjs.get_hljs_language_from_filename('foo.cmake')
- self.assertEquals(lang, 'cmake')
+ self.assertEqual(lang, 'cmake')
lang = highlightjs.get_hljs_language_from_filename('foo.ml')
- self.assertEquals(lang, 'ocaml')
+ self.assertEqual(lang, 'ocaml')
lang = highlightjs.get_hljs_language_from_filename('foo.mli')
- self.assertEquals(lang, 'ocaml')
+ self.assertEqual(lang, 'ocaml')
lang = highlightjs.get_hljs_language_from_filename('foo.rb')
- self.assertEquals(lang, 'ruby')
+ self.assertEqual(lang, 'ruby')
lang = highlightjs.get_hljs_language_from_filename('foo.jl')
- self.assertEquals(lang, 'julia')
+ self.assertEqual(lang, 'julia')
lang = highlightjs.get_hljs_language_from_filename('Makefile')
- self.assertEquals(lang, 'makefile')
+ self.assertEqual(lang, 'makefile')
lang = highlightjs.get_hljs_language_from_filename('CMakeLists.txt')
- self.assertEquals(lang, 'cmake')
+ self.assertEqual(lang, 'cmake')
diff --git a/swh/web/tests/common/test_query.py b/swh/web/tests/common/test_query.py
index 3341f900b..e3086ed36 100644
--- a/swh/web/tests/common/test_query.py
+++ b/swh/web/tests/common/test_query.py
@@ -1,124 +1,124 @@
# Copyright (C) 2015-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
from unittest.mock import patch
from swh.model import hashutil
from swh.web.common import query
from swh.web.common.exc import BadInputExc
from swh.web.tests.testcase import SWHWebTestCase
class QueryTestCase(SWHWebTestCase):
def test_parse_hash_malformed_query_with_more_than_2_parts(self):
with self.assertRaises(BadInputExc):
query.parse_hash('sha1:1234567890987654:other-stuff')
def test_parse_hash_guess_sha1(self):
h = 'f1d2d2f924e986ac86fdf7b36c94bcdf32beec15'
r = query.parse_hash(h)
- self.assertEquals(r, ('sha1', hashutil.hash_to_bytes(h)))
+ self.assertEqual(r, ('sha1', hashutil.hash_to_bytes(h)))
def test_parse_hash_guess_sha256(self):
h = '084C799CD551DD1D8D5C5F9A5D593B2' \
'E931F5E36122ee5c793c1d08a19839cc0'
r = query.parse_hash(h)
- self.assertEquals(r, ('sha256', hashutil.hash_to_bytes(h)))
+ self.assertEqual(r, ('sha256', hashutil.hash_to_bytes(h)))
def test_parse_hash_guess_algo_malformed_hash(self):
with self.assertRaises(BadInputExc):
query.parse_hash('1234567890987654')
def test_parse_hash_check_sha1(self):
h = 'f1d2d2f924e986ac86fdf7b36c94bcdf32beec15'
r = query.parse_hash('sha1:' + h)
- self.assertEquals(r, ('sha1', hashutil.hash_to_bytes(h)))
+ self.assertEqual(r, ('sha1', hashutil.hash_to_bytes(h)))
def test_parse_hash_check_sha1_git(self):
h = 'e1d2d2f924e986ac86fdf7b36c94bcdf32beec15'
r = query.parse_hash('sha1_git:' + h)
- self.assertEquals(r, ('sha1_git', hashutil.hash_to_bytes(h)))
+ self.assertEqual(r, ('sha1_git', hashutil.hash_to_bytes(h)))
def test_parse_hash_check_sha256(self):
h = '084C799CD551DD1D8D5C5F9A5D593B2E931F5E36122ee5c793c1d08a19839cc0'
r = query.parse_hash('sha256:' + h)
- self.assertEquals(r, ('sha256', hashutil.hash_to_bytes(h)))
+ self.assertEqual(r, ('sha256', hashutil.hash_to_bytes(h)))
def test_parse_hash_check_algo_malformed_sha1_hash(self):
with self.assertRaises(BadInputExc):
query.parse_hash('sha1:1234567890987654')
def test_parse_hash_check_algo_malformed_sha1_git_hash(self):
with self.assertRaises(BadInputExc):
query.parse_hash('sha1_git:1234567890987654')
def test_parse_hash_check_algo_malformed_sha256_hash(self):
with self.assertRaises(BadInputExc):
query.parse_hash('sha256:1234567890987654')
def test_parse_hash_check_algo_unknown_one(self):
with self.assertRaises(BadInputExc):
query.parse_hash('sha2:1234567890987654')
@patch('swh.web.common.query.parse_hash')
def test_parse_hash_with_algorithms_or_throws_bad_query(self, mock_hash):
# given
mock_hash.side_effect = BadInputExc('Error input')
# when
with self.assertRaises(BadInputExc) as cm:
query.parse_hash_with_algorithms_or_throws(
'sha1:blah',
['sha1'],
'useless error message for this use case')
self.assertIn('Error input', cm.exception.args[0])
mock_hash.assert_called_once_with('sha1:blah')
@patch('swh.web.common.query.parse_hash')
def test_parse_hash_with_algorithms_or_throws_bad_algo(self, mock_hash):
# given
mock_hash.return_value = 'sha1', '123'
# when
with self.assertRaises(BadInputExc) as cm:
query.parse_hash_with_algorithms_or_throws(
'sha1:431',
['sha1_git'],
'Only sha1_git!')
self.assertIn('Only sha1_git!', cm.exception.args[0])
mock_hash.assert_called_once_with('sha1:431')
@patch('swh.web.common.query.parse_hash')
def test_parse_hash_with_algorithms(self, mock_hash):
# given
mock_hash.return_value = ('sha256', b'123')
# when
algo, sha = query.parse_hash_with_algorithms_or_throws(
'sha256:123',
['sha256', 'sha1_git'],
'useless error message for this use case')
- self.assertEquals(algo, 'sha256')
- self.assertEquals(sha, b'123')
+ self.assertEqual(algo, 'sha256')
+ self.assertEqual(sha, b'123')
mock_hash.assert_called_once_with('sha256:123')
def test_parse_uuid4(self):
# when
actual_uuid = query.parse_uuid4('7c33636b-8f11-4bda-89d9-ba8b76a42cec')
# then
- self.assertEquals(actual_uuid, '7c33636b-8f11-4bda-89d9-ba8b76a42cec')
+ self.assertEqual(actual_uuid, '7c33636b-8f11-4bda-89d9-ba8b76a42cec')
def test_parse_uuid4_ko(self):
# when
with self.assertRaises(BadInputExc) as cm:
query.parse_uuid4('7c33636b-8f11-4bda-89d9-ba8b76a42')
self.assertIn('badly formed hexadecimal UUID string',
cm.exception.args[0])
diff --git a/swh/web/tests/common/test_service.py b/swh/web/tests/common/test_service.py
index 3ec5f8c16..b7cca942f 100644
--- a/swh/web/tests/common/test_service.py
+++ b/swh/web/tests/common/test_service.py
@@ -1,2011 +1,2011 @@
# Copyright (C) 2015-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import datetime
from unittest.mock import MagicMock, patch, call
from swh.model.hashutil import hash_to_bytes, hash_to_hex
from swh.web.common import service
from swh.web.common.exc import BadInputExc, NotFoundExc
from swh.web.tests.testcase import SWHWebTestCase
class ServiceTestCase(SWHWebTestCase):
def setUp(self):
self.BLAKE2S256_SAMPLE = ('685395c5dc57cada459364f0946d3dd45b'
'ad5fcbabc1048edb44380f1d31d0aa')
self.BLAKE2S256_SAMPLE_BIN = hash_to_bytes(self.BLAKE2S256_SAMPLE)
self.SHA1_SAMPLE = '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'
self.SHA1_SAMPLE_BIN = hash_to_bytes(self.SHA1_SAMPLE)
self.SHA256_SAMPLE = ('8abb0aa566452620ecce816eecdef4792d77a'
'293ad8ea82a4d5ecb4d36f7e560')
self.SHA256_SAMPLE_BIN = hash_to_bytes(self.SHA256_SAMPLE)
self.SHA1GIT_SAMPLE = '25d1a2e8f32937b0f498a5ca87f823d8df013c01'
self.SHA1GIT_SAMPLE_BIN = hash_to_bytes(self.SHA1GIT_SAMPLE)
self.DIRECTORY_ID = '7834ef7e7c357ce2af928115c6c6a42b7e2a44e6'
self.DIRECTORY_ID_BIN = hash_to_bytes(self.DIRECTORY_ID)
self.AUTHOR_ID_BIN = {
'name': b'author',
'email': b'author@company.org',
}
self.AUTHOR_ID = {
'name': 'author',
'email': 'author@company.org',
}
self.COMMITTER_ID_BIN = {
'name': b'committer',
'email': b'committer@corp.org',
}
self.COMMITTER_ID = {
'name': 'committer',
'email': 'committer@corp.org',
}
self.SAMPLE_DATE_RAW = {
'timestamp': datetime.datetime(
2000, 1, 17, 11, 23, 54,
tzinfo=datetime.timezone.utc,
).timestamp(),
'offset': 0,
'negative_utc': False,
}
self.SAMPLE_DATE = '2000-01-17T11:23:54+00:00'
self.SAMPLE_MESSAGE_BIN = b'elegant fix for bug 31415957'
self.SAMPLE_MESSAGE = 'elegant fix for bug 31415957'
self.SAMPLE_REVISION = {
'id': self.SHA1_SAMPLE,
'directory': self.DIRECTORY_ID,
'author': self.AUTHOR_ID,
'committer': self.COMMITTER_ID,
'message': self.SAMPLE_MESSAGE,
'date': self.SAMPLE_DATE,
'committer_date': self.SAMPLE_DATE,
'synthetic': False,
'type': 'git',
'parents': [],
'metadata': {},
'merge': False
}
self.SAMPLE_REVISION_RAW = {
'id': self.SHA1_SAMPLE_BIN,
'directory': self.DIRECTORY_ID_BIN,
'author': self.AUTHOR_ID_BIN,
'committer': self.COMMITTER_ID_BIN,
'message': self.SAMPLE_MESSAGE_BIN,
'date': self.SAMPLE_DATE_RAW,
'committer_date': self.SAMPLE_DATE_RAW,
'synthetic': False,
'type': 'git',
'parents': [],
'metadata': [],
}
self.SAMPLE_CONTENT = {
'checksums': {
'blake2s256': self.BLAKE2S256_SAMPLE,
'sha1': self.SHA1_SAMPLE,
'sha256': self.SHA256_SAMPLE,
'sha1_git': self.SHA1GIT_SAMPLE,
},
'length': 190,
'status': 'absent'
}
self.SAMPLE_CONTENT_RAW = {
'blake2s256': self.BLAKE2S256_SAMPLE_BIN,
'sha1': self.SHA1_SAMPLE_BIN,
'sha256': self.SHA256_SAMPLE_BIN,
'sha1_git': self.SHA1GIT_SAMPLE_BIN,
'length': 190,
'status': 'hidden'
}
self.date_origin_visit1 = datetime.datetime(
2015, 1, 1, 22, 0, 0,
tzinfo=datetime.timezone.utc)
self.origin_visit1 = {
'date': self.date_origin_visit1,
'origin': 1,
'visit': 1
}
@patch('swh.web.common.service.storage')
def test_lookup_multiple_hashes_ball_missing(self, mock_storage):
# given
mock_storage.content_missing_per_sha1 = MagicMock(return_value=[])
# when
actual_lookup = service.lookup_multiple_hashes(
[{'filename': 'a',
'sha1': '456caf10e9535160d90e874b45aa426de762f19f'},
{'filename': 'b',
'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865'}])
# then
- self.assertEquals(actual_lookup, [
+ self.assertEqual(actual_lookup, [
{'filename': 'a',
'sha1': '456caf10e9535160d90e874b45aa426de762f19f',
'found': True},
{'filename': 'b',
'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865',
'found': True}
])
@patch('swh.web.common.service.storage')
def test_lookup_multiple_hashes_some_missing(self, mock_storage):
# given
mock_storage.content_missing_per_sha1 = MagicMock(return_value=[
hash_to_bytes('456caf10e9535160d90e874b45aa426de762f19f')
])
# when
actual_lookup = service.lookup_multiple_hashes(
[{'filename': 'a',
'sha1': '456caf10e9535160d90e874b45aa426de762f19f'},
{'filename': 'b',
'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865'}])
# then
- self.assertEquals(actual_lookup, [
+ self.assertEqual(actual_lookup, [
{'filename': 'a',
'sha1': '456caf10e9535160d90e874b45aa426de762f19f',
'found': False},
{'filename': 'b',
'sha1': '745bab676c8f3cec8016e0c39ea61cf57e518865',
'found': True}
])
@patch('swh.web.common.service.storage')
def test_lookup_hash_does_not_exist(self, mock_storage):
# given
mock_storage.content_find = MagicMock(return_value=None)
# when
actual_lookup = service.lookup_hash(
'sha1_git:123caf10e9535160d90e874b45aa426de762f19f')
# then
- self.assertEquals({'found': None,
- 'algo': 'sha1_git'}, actual_lookup)
+ self.assertEqual({'found': None,
+ 'algo': 'sha1_git'}, actual_lookup)
# check the function has been called with parameters
mock_storage.content_find.assert_called_with(
{'sha1_git':
hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')})
@patch('swh.web.common.service.storage')
def test_lookup_hash_exist(self, mock_storage):
# given
stub_content = {
'sha1': hash_to_bytes(
'456caf10e9535160d90e874b45aa426de762f19f')
}
mock_storage.content_find = MagicMock(return_value=stub_content)
# when
actual_lookup = service.lookup_hash(
'sha1:456caf10e9535160d90e874b45aa426de762f19f')
# then
- self.assertEquals({'found': stub_content,
- 'algo': 'sha1'}, actual_lookup)
+ self.assertEqual({'found': stub_content,
+ 'algo': 'sha1'}, actual_lookup)
mock_storage.content_find.assert_called_with(
{'sha1':
hash_to_bytes('456caf10e9535160d90e874b45aa426de762f19f')}
)
@patch('swh.web.common.service.storage')
def test_search_hash_does_not_exist(self, mock_storage):
# given
mock_storage.content_find = MagicMock(return_value=None)
# when
actual_lookup = service.search_hash(
'sha1_git:123caf10e9535160d90e874b45aa426de762f19f')
# then
- self.assertEquals({'found': False}, actual_lookup)
+ self.assertEqual({'found': False}, actual_lookup)
# check the function has been called with parameters
mock_storage.content_find.assert_called_with(
{'sha1_git':
hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')})
@patch('swh.web.common.service.storage')
def test_search_hash_exist(self, mock_storage):
# given
stub_content = {
'sha1': hash_to_bytes(
'456caf10e9535160d90e874b45aa426de762f19f')
}
mock_storage.content_find = MagicMock(return_value=stub_content)
# when
actual_lookup = service.search_hash(
'sha1:456caf10e9535160d90e874b45aa426de762f19f')
# then
- self.assertEquals({'found': True}, actual_lookup)
+ self.assertEqual({'found': True}, actual_lookup)
mock_storage.content_find.assert_called_with(
{'sha1':
hash_to_bytes('456caf10e9535160d90e874b45aa426de762f19f')},
)
@patch('swh.web.common.service.idx_storage')
def test_lookup_content_ctags(self, mock_idx_storage):
# given
mock_idx_storage.content_ctags_get = MagicMock(
return_value=[{
'id': hash_to_bytes(
'123caf10e9535160d90e874b45aa426de762f19f'),
'line': 100,
'name': 'hello',
'kind': 'function',
'tool_name': 'ctags',
'tool_version': 'some-version',
}])
expected_ctags = [{
'id': '123caf10e9535160d90e874b45aa426de762f19f',
'line': 100,
'name': 'hello',
'kind': 'function',
'tool_name': 'ctags',
'tool_version': 'some-version',
}]
# when
actual_ctags = list(service.lookup_content_ctags(
'sha1:123caf10e9535160d90e874b45aa426de762f19f'))
# then
self.assertEqual(actual_ctags, expected_ctags)
mock_idx_storage.content_ctags_get.assert_called_with(
[hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')])
@patch('swh.web.common.service.idx_storage')
def test_lookup_content_ctags_no_hash(self, mock_idx_storage):
# given
mock_idx_storage.content_ctags_get = MagicMock(return_value=[])
# when
actual_ctags = list(service.lookup_content_ctags(
'sha1:123caf10e9535160d90e874b45aa426de762f19f'))
# then
self.assertEqual(actual_ctags, [])
@patch('swh.web.common.service.idx_storage')
def test_lookup_content_filetype(self, mock_idx_storage):
# given
mock_idx_storage.content_mimetype_get = MagicMock(
return_value=[{
'id': hash_to_bytes(
'123caf10e9535160d90e874b45aa426de762f19f'),
'mimetype': b'text/x-c++',
'encoding': b'us-ascii',
}])
expected_filetype = {
'id': '123caf10e9535160d90e874b45aa426de762f19f',
'mimetype': 'text/x-c++',
'encoding': 'us-ascii',
}
# when
actual_filetype = service.lookup_content_filetype(
'sha1:123caf10e9535160d90e874b45aa426de762f19f')
# then
self.assertEqual(actual_filetype, expected_filetype)
mock_idx_storage.content_mimetype_get.assert_called_with(
[hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')])
@patch('swh.web.common.service.idx_storage')
@patch('swh.web.common.service.storage')
def test_lookup_content_filetype_2(self, mock_storage, mock_idx_storage):
# given
mock_storage.content_find = MagicMock(
return_value={
'sha1': hash_to_bytes(
'123caf10e9535160d90e874b45aa426de762f19f')
}
)
mock_idx_storage.content_mimetype_get = MagicMock(
return_value=[{
'id': hash_to_bytes(
'123caf10e9535160d90e874b45aa426de762f19f'),
'mimetype': b'text/x-python',
'encoding': b'us-ascii',
}]
)
expected_filetype = {
'id': '123caf10e9535160d90e874b45aa426de762f19f',
'mimetype': 'text/x-python',
'encoding': 'us-ascii',
}
# when
actual_filetype = service.lookup_content_filetype(
'sha1_git:456caf10e9535160d90e874b45aa426de762f19f')
# then
self.assertEqual(actual_filetype, expected_filetype)
mock_storage.content_find(
'sha1_git', hash_to_bytes(
'456caf10e9535160d90e874b45aa426de762f19f')
)
mock_idx_storage.content_mimetype_get.assert_called_with(
[hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')])
@patch('swh.web.common.service.idx_storage')
def test_lookup_content_language(self, mock_idx_storage):
# given
mock_idx_storage.content_language_get = MagicMock(
return_value=[{
'id': hash_to_bytes(
'123caf10e9535160d90e874b45aa426de762f19f'),
'lang': 'python',
}])
expected_language = {
'id': '123caf10e9535160d90e874b45aa426de762f19f',
'lang': 'python',
}
# when
actual_language = service.lookup_content_language(
'sha1:123caf10e9535160d90e874b45aa426de762f19f')
# then
self.assertEqual(actual_language, expected_language)
mock_idx_storage.content_language_get.assert_called_with(
[hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')])
@patch('swh.web.common.service.idx_storage')
@patch('swh.web.common.service.storage')
def test_lookup_content_language_2(self, mock_storage, mock_idx_storage):
# given
mock_storage.content_find = MagicMock(
return_value={
'sha1': hash_to_bytes(
'123caf10e9535160d90e874b45aa426de762f19f')
}
)
mock_idx_storage.content_language_get = MagicMock(
return_value=[{
'id': hash_to_bytes(
'123caf10e9535160d90e874b45aa426de762f19f'),
'lang': 'haskell',
}]
)
expected_language = {
'id': '123caf10e9535160d90e874b45aa426de762f19f',
'lang': 'haskell',
}
# when
actual_language = service.lookup_content_language(
'sha1_git:456caf10e9535160d90e874b45aa426de762f19f')
# then
self.assertEqual(actual_language, expected_language)
mock_storage.content_find(
'sha1_git', hash_to_bytes(
'456caf10e9535160d90e874b45aa426de762f19f')
)
mock_idx_storage.content_language_get.assert_called_with(
[hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')])
@patch('swh.web.common.service.idx_storage')
def test_lookup_expression(self, mock_idx_storage):
# given
mock_idx_storage.content_ctags_search = MagicMock(
return_value=[{
'id': hash_to_bytes(
'123caf10e9535160d90e874b45aa426de762f19f'),
'name': 'foobar',
'kind': 'variable',
'lang': 'C',
'line': 10
}])
expected_ctags = [{
'sha1': '123caf10e9535160d90e874b45aa426de762f19f',
'name': 'foobar',
'kind': 'variable',
'lang': 'C',
'line': 10
}]
# when
actual_ctags = list(service.lookup_expression(
'foobar', last_sha1='hash', per_page=10))
# then
self.assertEqual(actual_ctags, expected_ctags)
mock_idx_storage.content_ctags_search.assert_called_with(
'foobar', last_sha1='hash', limit=10)
@patch('swh.web.common.service.idx_storage')
def test_lookup_expression_no_result(self, mock_idx_storage):
# given
mock_idx_storage.content_ctags_search = MagicMock(
return_value=[])
expected_ctags = []
# when
actual_ctags = list(service.lookup_expression(
'barfoo', last_sha1='hash', per_page=10))
# then
self.assertEqual(actual_ctags, expected_ctags)
mock_idx_storage.content_ctags_search.assert_called_with(
'barfoo', last_sha1='hash', limit=10)
@patch('swh.web.common.service.idx_storage')
def test_lookup_content_license(self, mock_idx_storage):
# given
mock_idx_storage.content_fossology_license_get = MagicMock(
return_value=[{
hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f'): [{
'licenses': ['GPL-3.0+'],
'tool': {}
}]
}])
expected_license = {
'id': '123caf10e9535160d90e874b45aa426de762f19f',
'facts': [{
'licenses': ['GPL-3.0+'],
'tool': {}
}]
}
# when
actual_license = service.lookup_content_license(
'sha1:123caf10e9535160d90e874b45aa426de762f19f')
# then
self.assertEqual(actual_license, expected_license)
mock_idx_storage.content_fossology_license_get.assert_called_with(
[hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')])
@patch('swh.web.common.service.idx_storage')
@patch('swh.web.common.service.storage')
def test_lookup_content_license_2(self, mock_storage, mock_idx_storage):
# given
mock_storage.content_find = MagicMock(
return_value={
'sha1': hash_to_bytes(
'123caf10e9535160d90e874b45aa426de762f19f')
}
)
mock_idx_storage.content_fossology_license_get = MagicMock(
return_value=[{
hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f'): [{
'licenses': ['BSD-2-Clause'],
'tool': {}
}]
}]
)
expected_license = {
'id': '123caf10e9535160d90e874b45aa426de762f19f',
'facts': [{
'licenses': ['BSD-2-Clause'],
'tool': {}
}]
}
# when
actual_license = service.lookup_content_license(
'sha1_git:456caf10e9535160d90e874b45aa426de762f19f')
# then
self.assertEqual(actual_license, expected_license)
mock_storage.content_find(
'sha1_git', hash_to_bytes(
'456caf10e9535160d90e874b45aa426de762f19f')
)
mock_idx_storage.content_fossology_license_get.assert_called_with(
[hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')])
@patch('swh.web.common.service.storage')
def test_lookup_content_provenance(self, mock_storage):
# given
mock_storage.content_find_provenance = MagicMock(
return_value=(p for p in [{
'content': hash_to_bytes(
'123caf10e9535160d90e874b45aa426de762f19f'),
'revision': hash_to_bytes(
'456caf10e9535160d90e874b45aa426de762f19f'),
'origin': 100,
'visit': 1,
'path': b'octavio-3.4.0/octave.html/doc_002dS_005fISREG.html'
}]))
expected_provenances = [{
'content': '123caf10e9535160d90e874b45aa426de762f19f',
'revision': '456caf10e9535160d90e874b45aa426de762f19f',
'origin': 100,
'visit': 1,
'path': 'octavio-3.4.0/octave.html/doc_002dS_005fISREG.html'
}]
# when
actual_provenances = service.lookup_content_provenance(
'sha1_git:123caf10e9535160d90e874b45aa426de762f19f')
# then
self.assertEqual(list(actual_provenances), expected_provenances)
mock_storage.content_find_provenance.assert_called_with(
{'sha1_git':
hash_to_bytes('123caf10e9535160d90e874b45aa426de762f19f')})
@patch('swh.web.common.service.storage')
def test_lookup_content_provenance_not_found(self, mock_storage):
# given
mock_storage.content_find_provenance = MagicMock(return_value=None)
# when
actual_provenances = service.lookup_content_provenance(
'sha1_git:456caf10e9535160d90e874b45aa426de762f19f')
# then
self.assertIsNone(actual_provenances)
mock_storage.content_find_provenance.assert_called_with(
{'sha1_git':
hash_to_bytes('456caf10e9535160d90e874b45aa426de762f19f')})
@patch('swh.web.common.service.storage')
def test_stat_counters(self, mock_storage):
# given
input_stats = {
"content": 1770830,
"directory": 211683,
"directory_entry_dir": 209167,
"directory_entry_file": 1807094,
"directory_entry_rev": 0,
"entity": 0,
"entity_history": 0,
"origin": 1096,
"person": 0,
"release": 8584,
"revision": 7792,
"revision_history": 0,
"skipped_content": 0
}
mock_storage.stat_counters = MagicMock(return_value=input_stats)
# when
actual_stats = service.stat_counters()
# then
expected_stats = input_stats
self.assertEqual(actual_stats, expected_stats)
mock_storage.stat_counters.assert_called_with()
@patch('swh.web.common.service._lookup_origin_visits')
def test_lookup_origin_visits(self, mock_lookup_visits):
# given
date_origin_visit2 = datetime.datetime(
2013, 7, 1, 20, 0, 0,
tzinfo=datetime.timezone.utc)
date_origin_visit3 = datetime.datetime(
2015, 1, 1, 21, 0, 0,
tzinfo=datetime.timezone.utc)
stub_result = [self.origin_visit1, {
'date': date_origin_visit2,
'origin': 1,
'visit': 2,
'target': hash_to_bytes(
'65a55bbdf3629f916219feb3dcc7393ded1bc8db'),
'branch': b'master',
'target_type': 'release',
'metadata': None,
}, {
'date': date_origin_visit3,
'origin': 1,
'visit': 3
}]
mock_lookup_visits.return_value = stub_result
# when
expected_origin_visits = [{
'date': self.origin_visit1['date'].isoformat(),
'origin': self.origin_visit1['origin'],
'visit': self.origin_visit1['visit']
}, {
'date': date_origin_visit2.isoformat(),
'origin': 1,
'visit': 2,
'target': '65a55bbdf3629f916219feb3dcc7393ded1bc8db',
'branch': 'master',
'target_type': 'release',
'metadata': {},
}, {
'date': date_origin_visit3.isoformat(),
'origin': 1,
'visit': 3
}]
actual_origin_visits = service.lookup_origin_visits(6)
# then
self.assertEqual(list(actual_origin_visits), expected_origin_visits)
mock_lookup_visits.assert_called_once_with(
6, last_visit=None, limit=10)
@patch('swh.web.common.service.storage')
def test_lookup_origin_visit(self, mock_storage):
# given
stub_result = self.origin_visit1
mock_storage.origin_visit_get_by.return_value = stub_result
expected_origin_visit = {
'date': self.origin_visit1['date'].isoformat(),
'origin': self.origin_visit1['origin'],
'visit': self.origin_visit1['visit']
}
# when
actual_origin_visit = service.lookup_origin_visit(1, 1)
# then
self.assertEqual(actual_origin_visit, expected_origin_visit)
mock_storage.origin_visit_get_by.assert_called_once_with(1, 1)
@patch('swh.web.common.service.storage')
def test_lookup_origin(self, mock_storage):
# given
mock_storage.origin_get = MagicMock(return_value={
'id': 'origin-id',
'url': 'ftp://some/url/to/origin',
'type': 'ftp'})
# when
actual_origin = service.lookup_origin({'id': 'origin-id'})
# then
self.assertEqual(actual_origin, {'id': 'origin-id',
'url': 'ftp://some/url/to/origin',
'type': 'ftp'})
mock_storage.origin_get.assert_called_with({'id': 'origin-id'})
@patch('swh.web.common.service.storage')
def test_lookup_release_ko_id_checksum_not_a_sha1(self, mock_storage):
# given
mock_storage.release_get = MagicMock()
with self.assertRaises(BadInputExc) as cm:
# when
service.lookup_release('not-a-sha1')
self.assertIn('invalid checksum', cm.exception.args[0].lower())
mock_storage.release_get.called = False
@patch('swh.web.common.service.storage')
def test_lookup_release_ko_id_checksum_too_long(self, mock_storage):
# given
mock_storage.release_get = MagicMock()
# when
with self.assertRaises(BadInputExc) as cm:
service.lookup_release(
'13c1d34d138ec13b5ebad226dc2528dc7506c956e4646f62d4daf5'
'1aea892abe')
self.assertEqual('Only sha1_git is supported.', cm.exception.args[0])
mock_storage.release_get.called = False
@patch('swh.web.common.service.storage')
def test_lookup_directory_with_path_not_found(self, mock_storage):
# given
mock_storage.lookup_directory_with_path = MagicMock(return_value=None)
sha1_git = '65a55bbdf3629f916219feb3dcc7393ded1bc8db'
# when
actual_directory = mock_storage.lookup_directory_with_path(
sha1_git, 'some/path/here')
self.assertIsNone(actual_directory)
@patch('swh.web.common.service.storage')
def test_lookup_directory_with_path_found(self, mock_storage):
# given
sha1_git = '65a55bbdf3629f916219feb3dcc7393ded1bc8db'
entry = {'id': 'dir-id',
'type': 'dir',
'name': 'some/path/foo'}
mock_storage.lookup_directory_with_path = MagicMock(return_value=entry)
# when
actual_directory = mock_storage.lookup_directory_with_path(
sha1_git, 'some/path/here')
self.assertEqual(entry, actual_directory)
@patch('swh.web.common.service.storage')
def test_lookup_release(self, mock_storage):
# given
mock_storage.release_get = MagicMock(return_value=[{
'id': hash_to_bytes('65a55bbdf3629f916219feb3dcc7393ded1bc8db'),
'target': None,
'date': {
'timestamp': datetime.datetime(
2015, 1, 1, 22, 0, 0,
tzinfo=datetime.timezone.utc).timestamp(),
'offset': 0,
'negative_utc': True,
},
'name': b'v0.0.1',
'message': b'synthetic release',
'synthetic': True,
}])
# when
actual_release = service.lookup_release(
'65a55bbdf3629f916219feb3dcc7393ded1bc8db')
# then
self.assertEqual(actual_release, {
'id': '65a55bbdf3629f916219feb3dcc7393ded1bc8db',
'target': None,
'date': '2015-01-01T22:00:00-00:00',
'name': 'v0.0.1',
'message': 'synthetic release',
'synthetic': True,
})
mock_storage.release_get.assert_called_with(
[hash_to_bytes('65a55bbdf3629f916219feb3dcc7393ded1bc8db')])
def test_lookup_revision_with_context_ko_not_a_sha1_1(self):
# given
sha1_git = '13c1d34d138ec13b5ebad226dc2528dc7506c956e4646f62d4' \
'daf51aea892abe'
sha1_git_root = '65a55bbdf3629f916219feb3dcc7393ded1bc8db'
# when
with self.assertRaises(BadInputExc) as cm:
service.lookup_revision_with_context(sha1_git_root, sha1_git)
self.assertIn('Only sha1_git is supported', cm.exception.args[0])
def test_lookup_revision_with_context_ko_not_a_sha1_2(self):
# given
sha1_git_root = '65a55bbdf3629f916219feb3dcc7393ded1bc8db'
sha1_git = '13c1d34d138ec13b5ebad226dc2528dc7506c956e4646f6' \
'2d4daf51aea892abe'
# when
with self.assertRaises(BadInputExc) as cm:
service.lookup_revision_with_context(sha1_git_root, sha1_git)
self.assertIn('Only sha1_git is supported', cm.exception.args[0])
@patch('swh.web.common.service.storage')
def test_lookup_revision_with_context_ko_sha1_git_does_not_exist(
self,
mock_storage):
# given
sha1_git_root = '65a55bbdf3629f916219feb3dcc7393ded1bc8db'
sha1_git = '777777bdf3629f916219feb3dcc7393ded1bc8db'
sha1_git_bin = hash_to_bytes(sha1_git)
mock_storage.revision_get.return_value = None
# when
with self.assertRaises(NotFoundExc) as cm:
service.lookup_revision_with_context(sha1_git_root, sha1_git)
self.assertIn('Revision 777777bdf3629f916219feb3dcc7393ded1bc8db'
' not found', cm.exception.args[0])
mock_storage.revision_get.assert_called_once_with(
[sha1_git_bin])
@patch('swh.web.common.service.storage')
def test_lookup_revision_with_context_ko_root_sha1_git_does_not_exist(
self,
mock_storage):
# given
sha1_git_root = '65a55bbdf3629f916219feb3dcc7393ded1bc8db'
sha1_git = '777777bdf3629f916219feb3dcc7393ded1bc8db'
sha1_git_root_bin = hash_to_bytes(sha1_git_root)
sha1_git_bin = hash_to_bytes(sha1_git)
mock_storage.revision_get.side_effect = ['foo', None]
# when
with self.assertRaises(NotFoundExc) as cm:
service.lookup_revision_with_context(sha1_git_root, sha1_git)
self.assertIn('Revision root 65a55bbdf3629f916219feb3dcc7393ded1bc8db'
' not found', cm.exception.args[0])
mock_storage.revision_get.assert_has_calls([call([sha1_git_bin]),
call([sha1_git_root_bin])])
@patch('swh.web.common.service.storage')
@patch('swh.web.common.service.query')
def test_lookup_revision_with_context(self, mock_query, mock_storage):
# given
sha1_git_root = '666'
sha1_git = '883'
sha1_git_root_bin = b'666'
sha1_git_bin = b'883'
sha1_git_root_dict = {
'id': sha1_git_root_bin,
'parents': [b'999'],
}
sha1_git_dict = {
'id': sha1_git_bin,
'parents': [],
'directory': b'278',
}
stub_revisions = [
sha1_git_root_dict,
{
'id': b'999',
'parents': [b'777', b'883', b'888'],
},
{
'id': b'777',
'parents': [b'883'],
},
sha1_git_dict,
{
'id': b'888',
'parents': [b'889'],
},
{
'id': b'889',
'parents': [],
},
]
# inputs ok
mock_query.parse_hash_with_algorithms_or_throws.side_effect = [
('sha1', sha1_git_bin),
('sha1', sha1_git_root_bin)
]
# lookup revision first 883, then 666 (both exists)
mock_storage.revision_get.return_value = [
sha1_git_dict,
sha1_git_root_dict
]
mock_storage.revision_log = MagicMock(
return_value=stub_revisions)
# when
actual_revision = service.lookup_revision_with_context(
sha1_git_root,
sha1_git)
# then
- self.assertEquals(actual_revision, {
+ self.assertEqual(actual_revision, {
'id': hash_to_hex(sha1_git_bin),
'parents': [],
'children': [hash_to_hex(b'999'), hash_to_hex(b'777')],
'directory': hash_to_hex(b'278'),
'merge': False
})
mock_query.parse_hash_with_algorithms_or_throws.assert_has_calls(
[call(sha1_git, ['sha1'], 'Only sha1_git is supported.'),
call(sha1_git_root, ['sha1'], 'Only sha1_git is supported.')])
mock_storage.revision_log.assert_called_with(
[sha1_git_root_bin], 100)
@patch('swh.web.common.service.storage')
@patch('swh.web.common.service.query')
def test_lookup_revision_with_context_retrieved_as_dict(
self, mock_query, mock_storage):
# given
sha1_git = '883'
sha1_git_root_bin = b'666'
sha1_git_bin = b'883'
sha1_git_root_dict = {
'id': sha1_git_root_bin,
'parents': [b'999'],
}
sha1_git_dict = {
'id': sha1_git_bin,
'parents': [],
'directory': b'278',
}
stub_revisions = [
sha1_git_root_dict,
{
'id': b'999',
'parents': [b'777', b'883', b'888'],
},
{
'id': b'777',
'parents': [b'883'],
},
sha1_git_dict,
{
'id': b'888',
'parents': [b'889'],
},
{
'id': b'889',
'parents': [],
},
]
# inputs ok
mock_query.parse_hash_with_algorithms_or_throws.return_value = (
'sha1', sha1_git_bin)
# lookup only on sha1
mock_storage.revision_get.return_value = [sha1_git_dict]
mock_storage.revision_log.return_value = stub_revisions
# when
actual_revision = service.lookup_revision_with_context(
{'id': sha1_git_root_bin},
sha1_git)
# then
- self.assertEquals(actual_revision, {
+ self.assertEqual(actual_revision, {
'id': hash_to_hex(sha1_git_bin),
'parents': [],
'children': [hash_to_hex(b'999'), hash_to_hex(b'777')],
'directory': hash_to_hex(b'278'),
'merge': False
})
mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with( # noqa
sha1_git, ['sha1'], 'Only sha1_git is supported.')
mock_storage.revision_get.assert_called_once_with([sha1_git_bin])
mock_storage.revision_log.assert_called_with(
[sha1_git_root_bin], 100)
@patch('swh.web.common.service.storage')
@patch('swh.web.common.service.query')
def test_lookup_directory_with_revision_not_found(self,
mock_query,
mock_storage):
# given
mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1',
b'123')
mock_storage.revision_get.return_value = None
# when
with self.assertRaises(NotFoundExc) as cm:
service.lookup_directory_with_revision('123')
self.assertIn('Revision 123 not found', cm.exception.args[0])
mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with
('123', ['sha1'], 'Only sha1_git is supported.')
mock_storage.revision_get.assert_called_once_with([b'123'])
@patch('swh.web.common.service.storage')
@patch('swh.web.common.service.query')
def test_lookup_directory_with_revision_ko_revision_with_path_to_nowhere(
self,
mock_query,
mock_storage):
# given
mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1',
b'123')
dir_id = b'dir-id-as-sha1'
mock_storage.revision_get.return_value = [{
'directory': dir_id,
}]
mock_storage.directory_entry_get_by_path.return_value = None
# when
with self.assertRaises(NotFoundExc) as cm:
service.lookup_directory_with_revision(
'123',
'path/to/something/unknown')
exception_text = cm.exception.args[0].lower()
self.assertIn('directory or file', exception_text)
self.assertIn('path/to/something/unknown', exception_text)
self.assertIn('revision 123', exception_text)
self.assertIn('not found', exception_text)
mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with
('123', ['sha1'], 'Only sha1_git is supported.')
mock_storage.revision_get.assert_called_once_with([b'123'])
mock_storage.directory_entry_get_by_path.assert_called_once_with(
b'dir-id-as-sha1', [b'path', b'to', b'something', b'unknown'])
@patch('swh.web.common.service.storage')
@patch('swh.web.common.service.query')
def test_lookup_directory_with_revision_ko_type_not_implemented(
self,
mock_query,
mock_storage):
# given
mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1',
b'123')
dir_id = b'dir-id-as-sha1'
mock_storage.revision_get.return_value = [{
'directory': dir_id,
}]
mock_storage.directory_entry_get_by_path.return_value = {
'type': 'rev',
'name': b'some/path/to/rev',
'target': b'456'
}
stub_content = {
'id': b'12',
'type': 'file'
}
mock_storage.content_get.return_value = stub_content
# when
with self.assertRaises(NotImplementedError) as cm:
service.lookup_directory_with_revision(
'123',
'some/path/to/rev')
self.assertIn("Entity of type rev not implemented.",
cm.exception.args[0])
# then
mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with
('123', ['sha1'], 'Only sha1_git is supported.')
mock_storage.revision_get.assert_called_once_with([b'123'])
mock_storage.directory_entry_get_by_path.assert_called_once_with(
b'dir-id-as-sha1', [b'some', b'path', b'to', b'rev'])
@patch('swh.web.common.service.storage')
@patch('swh.web.common.service.query')
def test_lookup_directory_with_revision_revision_without_path(
self, mock_query, mock_storage,
):
# given
mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1',
b'123')
dir_id = b'dir-id-as-sha1'
mock_storage.revision_get.return_value = [{
'directory': dir_id,
}]
stub_dir_entries = [{
'id': b'123',
'type': 'dir'
}, {
'id': b'456',
'type': 'file'
}]
mock_storage.directory_ls.return_value = stub_dir_entries
# when
actual_directory_entries = service.lookup_directory_with_revision(
'123')
self.assertEqual(actual_directory_entries['type'], 'dir')
self.assertEqual(list(actual_directory_entries['content']),
stub_dir_entries)
mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with
('123', ['sha1'], 'Only sha1_git is supported.')
mock_storage.revision_get.assert_called_once_with([b'123'])
mock_storage.directory_ls.assert_called_once_with(dir_id)
@patch('swh.web.common.service.storage')
@patch('swh.web.common.service.query')
def test_lookup_directory_with_revision_with_path_to_dir(self,
mock_query,
mock_storage):
# given
mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1',
b'123')
dir_id = b'dir-id-as-sha1'
mock_storage.revision_get.return_value = [{
'directory': dir_id,
}]
stub_dir_entries = [{
'id': b'12',
'type': 'dir'
}, {
'id': b'34',
'type': 'file'
}]
mock_storage.directory_entry_get_by_path.return_value = {
'type': 'dir',
'name': b'some/path',
'target': b'456'
}
mock_storage.directory_ls.return_value = stub_dir_entries
# when
actual_directory_entries = service.lookup_directory_with_revision(
'123',
'some/path')
self.assertEqual(actual_directory_entries['type'], 'dir')
self.assertEqual(actual_directory_entries['revision'], '123')
self.assertEqual(actual_directory_entries['path'], 'some/path')
self.assertEqual(list(actual_directory_entries['content']),
stub_dir_entries)
mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with
('123', ['sha1'], 'Only sha1_git is supported.')
mock_storage.revision_get.assert_called_once_with([b'123'])
mock_storage.directory_entry_get_by_path.assert_called_once_with(
dir_id,
[b'some', b'path'])
mock_storage.directory_ls.assert_called_once_with(b'456')
@patch('swh.web.common.service.storage')
@patch('swh.web.common.service.query')
def test_lookup_directory_with_revision_with_path_to_file_wo_data(
self,
mock_query,
mock_storage):
# given
mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1',
b'123')
dir_id = b'dir-id-as-sha1'
mock_storage.revision_get.return_value = [{
'directory': dir_id,
}]
mock_storage.directory_entry_get_by_path.return_value = {
'type': 'file',
'name': b'some/path/to/file',
'target': b'789'
}
stub_content = {
'status': 'visible',
}
mock_storage.content_find.return_value = stub_content
# when
actual_content = service.lookup_directory_with_revision(
'123',
'some/path/to/file')
# then
self.assertEqual(actual_content, {'type': 'file',
'revision': '123',
'path': 'some/path/to/file',
'content': stub_content})
mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with
('123', ['sha1'], 'Only sha1_git is supported.')
mock_storage.revision_get.assert_called_once_with([b'123'])
mock_storage.directory_entry_get_by_path.assert_called_once_with(
b'dir-id-as-sha1', [b'some', b'path', b'to', b'file'])
mock_storage.content_find.assert_called_once_with({'sha1_git': b'789'})
@patch('swh.web.common.service.storage')
@patch('swh.web.common.service.query')
def test_lookup_directory_with_revision_with_path_to_file_w_data(
self,
mock_query,
mock_storage):
# given
mock_query.parse_hash_with_algorithms_or_throws.return_value = ('sha1',
b'123')
dir_id = b'dir-id-as-sha1'
mock_storage.revision_get.return_value = [{
'directory': dir_id,
}]
mock_storage.directory_entry_get_by_path.return_value = {
'type': 'file',
'name': b'some/path/to/file',
'target': b'789'
}
stub_content = {
'status': 'visible',
'sha1': b'content-sha1'
}
mock_storage.content_find.return_value = stub_content
mock_storage.content_get.return_value = [{
'sha1': b'content-sha1',
'data': b'some raw data'
}]
expected_content = {
'status': 'visible',
'checksums': {
'sha1': hash_to_hex(b'content-sha1'),
},
'data': b'some raw data'
}
# when
actual_content = service.lookup_directory_with_revision(
'123',
'some/path/to/file',
with_data=True)
# then
self.assertEqual(actual_content, {'type': 'file',
'revision': '123',
'path': 'some/path/to/file',
'content': expected_content})
mock_query.parse_hash_with_algorithms_or_throws.assert_called_once_with
('123', ['sha1'], 'Only sha1_git is supported.')
mock_storage.revision_get.assert_called_once_with([b'123'])
mock_storage.directory_entry_get_by_path.assert_called_once_with(
b'dir-id-as-sha1', [b'some', b'path', b'to', b'file'])
mock_storage.content_find.assert_called_once_with({'sha1_git': b'789'})
mock_storage.content_get.assert_called_once_with([b'content-sha1'])
@patch('swh.web.common.service.storage')
def test_lookup_revision(self, mock_storage):
# given
mock_storage.revision_get = MagicMock(
return_value=[self.SAMPLE_REVISION_RAW])
# when
actual_revision = service.lookup_revision(
self.SHA1_SAMPLE)
# then
self.assertEqual(actual_revision, self.SAMPLE_REVISION)
mock_storage.revision_get.assert_called_with(
[self.SHA1_SAMPLE_BIN])
@patch('swh.web.common.service.storage')
def test_lookup_revision_invalid_msg(self, mock_storage):
# given
stub_rev = self.SAMPLE_REVISION_RAW
stub_rev['message'] = b'elegant fix for bug \xff'
expected_revision = self.SAMPLE_REVISION
expected_revision['message'] = None
expected_revision['message_decoding_failed'] = True
mock_storage.revision_get = MagicMock(return_value=[stub_rev])
# when
actual_revision = service.lookup_revision(
self.SHA1_SAMPLE)
# then
self.assertEqual(actual_revision, expected_revision)
mock_storage.revision_get.assert_called_with(
[self.SHA1_SAMPLE_BIN])
@patch('swh.web.common.service.storage')
def test_lookup_revision_msg_ok(self, mock_storage):
# given
mock_storage.revision_get.return_value = [self.SAMPLE_REVISION_RAW]
# when
rv = service.lookup_revision_message(
self.SHA1_SAMPLE)
# then
- self.assertEquals(rv, {'message': self.SAMPLE_MESSAGE_BIN})
+ self.assertEqual(rv, {'message': self.SAMPLE_MESSAGE_BIN})
mock_storage.revision_get.assert_called_with(
[self.SHA1_SAMPLE_BIN])
@patch('swh.web.common.service.storage')
def test_lookup_revision_msg_absent(self, mock_storage):
# given
stub_revision = self.SAMPLE_REVISION_RAW
del stub_revision['message']
mock_storage.revision_get.return_value = stub_revision
# when
with self.assertRaises(NotFoundExc) as cm:
service.lookup_revision_message(
self.SHA1_SAMPLE)
# then
mock_storage.revision_get.assert_called_with(
[self.SHA1_SAMPLE_BIN])
self.assertEqual(
cm.exception.args[0],
'No message for revision with sha1_git %s.' % self.SHA1_SAMPLE,
)
@patch('swh.web.common.service.storage')
def test_lookup_revision_msg_norev(self, mock_storage):
# given
mock_storage.revision_get.return_value = None
# when
with self.assertRaises(NotFoundExc) as cm:
service.lookup_revision_message(
self.SHA1_SAMPLE)
# then
mock_storage.revision_get.assert_called_with(
[self.SHA1_SAMPLE_BIN])
self.assertEqual(
cm.exception.args[0],
'Revision with sha1_git %s not found.' % self.SHA1_SAMPLE,
)
@patch('swh.web.common.service.storage')
def test_lookup_revision_multiple(self, mock_storage):
# given
sha1 = self.SHA1_SAMPLE
sha1_other = 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc'
stub_revisions = [
self.SAMPLE_REVISION_RAW,
{
'id': hash_to_bytes(sha1_other),
'directory': 'abcdbe353ed3480476f032475e7c233eff7371d5',
'author': {
'name': b'name',
'email': b'name@surname.org',
},
'committer': {
'name': b'name',
'email': b'name@surname.org',
},
'message': b'ugly fix for bug 42',
'date': {
'timestamp': datetime.datetime(
2000, 1, 12, 5, 23, 54,
tzinfo=datetime.timezone.utc).timestamp(),
'offset': 0,
'negative_utc': False
},
'date_offset': 0,
'committer_date': {
'timestamp': datetime.datetime(
2000, 1, 12, 5, 23, 54,
tzinfo=datetime.timezone.utc).timestamp(),
'offset': 0,
'negative_utc': False
},
'committer_date_offset': 0,
'synthetic': False,
'type': 'git',
'parents': [],
'metadata': [],
}
]
mock_storage.revision_get.return_value = stub_revisions
# when
actual_revisions = service.lookup_revision_multiple(
[sha1, sha1_other])
# then
self.assertEqual(list(actual_revisions), [
self.SAMPLE_REVISION,
{
'id': sha1_other,
'directory': 'abcdbe353ed3480476f032475e7c233eff7371d5',
'author': {
'name': 'name',
'email': 'name@surname.org',
},
'committer': {
'name': 'name',
'email': 'name@surname.org',
},
'message': 'ugly fix for bug 42',
'date': '2000-01-12T05:23:54+00:00',
'date_offset': 0,
'committer_date': '2000-01-12T05:23:54+00:00',
'committer_date_offset': 0,
'synthetic': False,
'type': 'git',
'parents': [],
'metadata': {},
'merge': False
}
])
self.assertEqual(
list(mock_storage.revision_get.call_args[0][0]),
[hash_to_bytes(sha1),
hash_to_bytes(sha1_other)])
@patch('swh.web.common.service.storage')
def test_lookup_revision_multiple_none_found(self, mock_storage):
# given
sha1_bin = self.SHA1_SAMPLE
sha1_other = 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc'
mock_storage.revision_get.return_value = []
# then
actual_revisions = service.lookup_revision_multiple(
[sha1_bin, sha1_other])
self.assertEqual(list(actual_revisions), [])
self.assertEqual(
list(mock_storage.revision_get.call_args[0][0]),
[hash_to_bytes(self.SHA1_SAMPLE),
hash_to_bytes(sha1_other)])
@patch('swh.web.common.service.storage')
def test_lookup_revision_log(self, mock_storage):
# given
stub_revision_log = [self.SAMPLE_REVISION_RAW]
mock_storage.revision_log = MagicMock(return_value=stub_revision_log)
# when
actual_revision = service.lookup_revision_log(
'abcdbe353ed3480476f032475e7c233eff7371d5',
limit=25)
# then
self.assertEqual(list(actual_revision), [self.SAMPLE_REVISION])
mock_storage.revision_log.assert_called_with(
[hash_to_bytes('abcdbe353ed3480476f032475e7c233eff7371d5')], 25)
@patch('swh.web.common.service.storage')
def test_lookup_revision_log_by(self, mock_storage):
# given
stub_revision_log = [self.SAMPLE_REVISION_RAW]
mock_storage.revision_log_by = MagicMock(
return_value=stub_revision_log)
# when
actual_log = service.lookup_revision_log_by(
1, 'refs/heads/master', None, limit=100)
# then
self.assertEqual(list(actual_log), [self.SAMPLE_REVISION])
mock_storage.revision_log_by.assert_called_with(
1, 'refs/heads/master', None, limit=100)
@patch('swh.web.common.service.storage')
def test_lookup_revision_log_by_nolog(self, mock_storage):
# given
mock_storage.revision_log_by = MagicMock(return_value=None)
# when
res = service.lookup_revision_log_by(
1, 'refs/heads/master', None, limit=100)
# then
- self.assertEquals(res, None)
+ self.assertEqual(res, None)
mock_storage.revision_log_by.assert_called_with(
1, 'refs/heads/master', None, limit=100)
@patch('swh.web.common.service.storage')
def test_lookup_content_raw_not_found(self, mock_storage):
# given
mock_storage.content_find = MagicMock(return_value=None)
# when
with self.assertRaises(NotFoundExc) as cm:
service.lookup_content_raw('sha1:' + self.SHA1_SAMPLE)
self.assertIn(cm.exception.args[0],
'Content with %s checksum equals to %s not found!' %
('sha1', self.SHA1_SAMPLE))
mock_storage.content_find.assert_called_with(
{'sha1': hash_to_bytes(self.SHA1_SAMPLE)})
@patch('swh.web.common.service.storage')
def test_lookup_content_raw(self, mock_storage):
# given
mock_storage.content_find = MagicMock(return_value={
'sha1': self.SHA1_SAMPLE,
})
mock_storage.content_get = MagicMock(return_value=[{
'data': b'binary data'}])
# when
actual_content = service.lookup_content_raw(
'sha256:%s' % self.SHA256_SAMPLE)
# then
- self.assertEquals(actual_content, {'data': b'binary data'})
+ self.assertEqual(actual_content, {'data': b'binary data'})
mock_storage.content_find.assert_called_once_with(
{'sha256': self.SHA256_SAMPLE_BIN})
mock_storage.content_get.assert_called_once_with(
[self.SHA1_SAMPLE])
@patch('swh.web.common.service.storage')
def test_lookup_content_not_found(self, mock_storage):
# given
mock_storage.content_find = MagicMock(return_value=None)
# when
with self.assertRaises(NotFoundExc) as cm:
# then
service.lookup_content('sha1:%s' % self.SHA1_SAMPLE)
self.assertIn(cm.exception.args[0],
'Content with %s checksum equals to %s not found!' %
('sha1', self.SHA1_SAMPLE))
mock_storage.content_find.assert_called_with(
{'sha1': self.SHA1_SAMPLE_BIN})
@patch('swh.web.common.service.storage')
def test_lookup_content_with_sha1(self, mock_storage):
# given
mock_storage.content_find = MagicMock(
return_value=self.SAMPLE_CONTENT_RAW)
# when
actual_content = service.lookup_content(
'sha1:%s' % self.SHA1_SAMPLE)
# then
self.assertEqual(actual_content, self.SAMPLE_CONTENT)
mock_storage.content_find.assert_called_with(
{'sha1': hash_to_bytes(self.SHA1_SAMPLE)})
@patch('swh.web.common.service.storage')
def test_lookup_content_with_sha256(self, mock_storage):
# given
stub_content = self.SAMPLE_CONTENT_RAW
stub_content['status'] = 'visible'
expected_content = self.SAMPLE_CONTENT
expected_content['status'] = 'visible'
mock_storage.content_find = MagicMock(
return_value=stub_content)
# when
actual_content = service.lookup_content(
'sha256:%s' % self.SHA256_SAMPLE)
# then
self.assertEqual(actual_content, expected_content)
mock_storage.content_find.assert_called_with(
{'sha256': self.SHA256_SAMPLE_BIN})
@patch('swh.web.common.service.storage')
def test_lookup_person(self, mock_storage):
# given
mock_storage.person_get = MagicMock(return_value=[{
'id': 'person_id',
'name': b'some_name',
'email': b'some-email',
}])
# when
actual_person = service.lookup_person('person_id')
# then
self.assertEqual(actual_person, {
'id': 'person_id',
'name': 'some_name',
'email': 'some-email',
})
mock_storage.person_get.assert_called_with(['person_id'])
@patch('swh.web.common.service.storage')
def test_lookup_directory_bad_checksum(self, mock_storage):
# given
mock_storage.directory_ls = MagicMock()
# when
with self.assertRaises(BadInputExc):
service.lookup_directory('directory_id')
# then
mock_storage.directory_ls.called = False
@patch('swh.web.common.service.storage')
@patch('swh.web.common.service.query')
def test_lookup_directory_not_found(self, mock_query, mock_storage):
# given
mock_query.parse_hash_with_algorithms_or_throws.return_value = (
'sha1',
'directory-id-bin')
mock_storage.directory_ls.return_value = []
# when
with self.assertRaises(NotFoundExc) as cm:
service.lookup_directory('directory_id')
self.assertIn('Directory with sha1_git directory_id not found',
cm.exception.args[0])
# then
mock_query.parse_hash_with_algorithms_or_throws.assert_called_with(
'directory_id', ['sha1'], 'Only sha1_git is supported.')
mock_storage.directory_ls.assert_called_with('directory-id-bin')
@patch('swh.web.common.service.storage')
@patch('swh.web.common.service.query')
def test_lookup_directory(self, mock_query, mock_storage):
mock_query.parse_hash_with_algorithms_or_throws.return_value = (
'sha1',
'directory-sha1-bin')
# given
stub_dir_entries = [{
'sha1': self.SHA1_SAMPLE_BIN,
'sha256': self.SHA256_SAMPLE_BIN,
'sha1_git': self.SHA1GIT_SAMPLE_BIN,
'blake2s256': self.BLAKE2S256_SAMPLE_BIN,
'target': hash_to_bytes(
'40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03'),
'dir_id': self.DIRECTORY_ID_BIN,
'name': b'bob',
'type': 10,
}]
expected_dir_entries = [{
'checksums': {
'sha1': self.SHA1_SAMPLE,
'sha256': self.SHA256_SAMPLE,
'sha1_git': self.SHA1GIT_SAMPLE,
'blake2s256': self.BLAKE2S256_SAMPLE
},
'target': '40e71b8614fcd89ccd17ca2b1d9e66c5b00a6d03',
'dir_id': self.DIRECTORY_ID,
'name': 'bob',
'type': 10,
}]
mock_storage.directory_ls.return_value = stub_dir_entries
# when
actual_directory_ls = list(service.lookup_directory(
'directory-sha1'))
# then
self.assertEqual(actual_directory_ls, expected_dir_entries)
mock_query.parse_hash_with_algorithms_or_throws.assert_called_with(
'directory-sha1', ['sha1'], 'Only sha1_git is supported.')
mock_storage.directory_ls.assert_called_with(
'directory-sha1-bin')
@patch('swh.web.common.service.storage')
def test_lookup_directory_empty(self, mock_storage):
empty_dir_sha1 = '4b825dc642cb6eb9a060e54bf8d69288fbee4904'
mock_storage.directory_ls.return_value = []
# when
actual_directory_ls = list(service.lookup_directory(empty_dir_sha1))
# then
self.assertEqual(actual_directory_ls, [])
self.assertFalse(mock_storage.directory_ls.called)
@patch('swh.web.common.service.storage')
def test_lookup_revision_by_nothing_found(self, mock_storage):
# given
mock_storage.revision_get_by.return_value = None
# when
with self.assertRaises(NotFoundExc):
service.lookup_revision_by(1)
# then
mock_storage.revision_get_by.assert_called_with(1, 'refs/heads/master', # noqa
limit=1,
timestamp=None)
@patch('swh.web.common.service.storage')
def test_lookup_revision_by(self, mock_storage):
# given
stub_rev = self.SAMPLE_REVISION_RAW
expected_rev = self.SAMPLE_REVISION
mock_storage.revision_get_by.return_value = [stub_rev]
# when
actual_revision = service.lookup_revision_by(10, 'master2', 'some-ts')
# then
- self.assertEquals(actual_revision, expected_rev)
+ self.assertEqual(actual_revision, expected_rev)
mock_storage.revision_get_by.assert_called_with(10, 'master2',
limit=1,
timestamp='some-ts')
@patch('swh.web.common.service.storage')
def test_lookup_revision_by_nomerge(self, mock_storage):
# given
stub_rev = self.SAMPLE_REVISION_RAW
stub_rev['parents'] = [
hash_to_bytes('adc83b19e793491b1c6ea0fd8b46cd9f32e592fc')]
expected_rev = self.SAMPLE_REVISION
expected_rev['parents'] = ['adc83b19e793491b1c6ea0fd8b46cd9f32e592fc']
mock_storage.revision_get_by.return_value = [stub_rev]
# when
actual_revision = service.lookup_revision_by(10, 'master2', 'some-ts')
# then
- self.assertEquals(actual_revision, expected_rev)
+ self.assertEqual(actual_revision, expected_rev)
mock_storage.revision_get_by.assert_called_with(10, 'master2',
limit=1,
timestamp='some-ts')
@patch('swh.web.common.service.storage')
def test_lookup_revision_by_merge(self, mock_storage):
# given
stub_rev = self.SAMPLE_REVISION_RAW
stub_rev['parents'] = [
hash_to_bytes('adc83b19e793491b1c6ea0fd8b46cd9f32e592fc'),
hash_to_bytes('ffff3b19e793491b1c6db0fd8b46cd9f32e592fc')
]
expected_rev = self.SAMPLE_REVISION
expected_rev['parents'] = [
'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc',
'ffff3b19e793491b1c6db0fd8b46cd9f32e592fc'
]
expected_rev['merge'] = True
mock_storage.revision_get_by.return_value = [stub_rev]
# when
actual_revision = service.lookup_revision_by(10, 'master2', 'some-ts')
# then
- self.assertEquals(actual_revision, expected_rev)
+ self.assertEqual(actual_revision, expected_rev)
mock_storage.revision_get_by.assert_called_with(10, 'master2',
limit=1,
timestamp='some-ts')
@patch('swh.web.common.service.storage')
def test_lookup_revision_with_context_by_ko(self, mock_storage):
# given
mock_storage.revision_get_by.return_value = None
# when
origin_id = 1
branch_name = 'master3'
ts = None
with self.assertRaises(NotFoundExc) as cm:
service.lookup_revision_with_context_by(origin_id, branch_name, ts,
'sha1')
# then
self.assertIn(
'Revision with (origin_id: %s, branch_name: %s'
', ts: %s) not found.' % (origin_id,
branch_name,
ts), cm.exception.args[0])
mock_storage.revision_get_by.assert_called_once_with(
origin_id, branch_name, limit=1, timestamp=ts)
@patch('swh.web.common.service.lookup_revision_with_context')
@patch('swh.web.common.service.storage')
def test_lookup_revision_with_context_by(
self, mock_storage, mock_lookup_revision_with_context,
):
# given
stub_root_rev = {'id': 'root-rev-id'}
mock_storage.revision_get_by.return_value = [{'id': 'root-rev-id'}]
stub_rev = {'id': 'rev-found'}
mock_lookup_revision_with_context.return_value = stub_rev
# when
origin_id = 1
branch_name = 'master3'
ts = None
sha1_git = 'sha1'
actual_root_rev, actual_rev = service.lookup_revision_with_context_by(
origin_id, branch_name, ts, sha1_git)
# then
- self.assertEquals(actual_root_rev, stub_root_rev)
- self.assertEquals(actual_rev, stub_rev)
+ self.assertEqual(actual_root_rev, stub_root_rev)
+ self.assertEqual(actual_rev, stub_rev)
mock_storage.revision_get_by.assert_called_once_with(
origin_id, branch_name, limit=1, timestamp=ts)
mock_lookup_revision_with_context.assert_called_once_with(
stub_root_rev, sha1_git, 100)
@patch('swh.web.common.service.storage')
@patch('swh.web.common.service.query')
def test_lookup_entity_by_uuid(self, mock_query, mock_storage):
# given
uuid_test = 'correct-uuid'
mock_query.parse_uuid4.return_value = uuid_test
stub_entities = [{'uuid': uuid_test}]
mock_storage.entity_get.return_value = stub_entities
# when
actual_entities = list(service.lookup_entity_by_uuid(uuid_test))
# then
- self.assertEquals(actual_entities, stub_entities)
+ self.assertEqual(actual_entities, stub_entities)
mock_query.parse_uuid4.assert_called_once_with(uuid_test)
mock_storage.entity_get.assert_called_once_with(uuid_test)
def test_lookup_revision_through_ko_not_implemented(self):
# then
with self.assertRaises(NotImplementedError):
service.lookup_revision_through({
'something-unknown': 10,
})
@patch('swh.web.common.service.lookup_revision_with_context_by')
def test_lookup_revision_through_with_context_by(self, mock_lookup):
# given
stub_rev = {'id': 'rev'}
mock_lookup.return_value = stub_rev
# when
actual_revision = service.lookup_revision_through({
'origin_id': 1,
'branch_name': 'master',
'ts': None,
'sha1_git': 'sha1-git'
}, limit=1000)
# then
- self.assertEquals(actual_revision, stub_rev)
+ self.assertEqual(actual_revision, stub_rev)
mock_lookup.assert_called_once_with(
1, 'master', None, 'sha1-git', 1000)
@patch('swh.web.common.service.lookup_revision_by')
def test_lookup_revision_through_with_revision_by(self, mock_lookup):
# given
stub_rev = {'id': 'rev'}
mock_lookup.return_value = stub_rev
# when
actual_revision = service.lookup_revision_through({
'origin_id': 2,
'branch_name': 'master2',
'ts': 'some-ts',
}, limit=10)
# then
- self.assertEquals(actual_revision, stub_rev)
+ self.assertEqual(actual_revision, stub_rev)
mock_lookup.assert_called_once_with(
2, 'master2', 'some-ts')
@patch('swh.web.common.service.lookup_revision_with_context')
def test_lookup_revision_through_with_context(self, mock_lookup):
# given
stub_rev = {'id': 'rev'}
mock_lookup.return_value = stub_rev
# when
actual_revision = service.lookup_revision_through({
'sha1_git_root': 'some-sha1-root',
'sha1_git': 'some-sha1',
})
# then
- self.assertEquals(actual_revision, stub_rev)
+ self.assertEqual(actual_revision, stub_rev)
mock_lookup.assert_called_once_with(
'some-sha1-root', 'some-sha1', 100)
@patch('swh.web.common.service.lookup_revision')
def test_lookup_revision_through_with_revision(self, mock_lookup):
# given
stub_rev = {'id': 'rev'}
mock_lookup.return_value = stub_rev
# when
actual_revision = service.lookup_revision_through({
'sha1_git': 'some-sha1',
})
# then
- self.assertEquals(actual_revision, stub_rev)
+ self.assertEqual(actual_revision, stub_rev)
mock_lookup.assert_called_once_with(
'some-sha1')
@patch('swh.web.common.service.lookup_revision_through')
def test_lookup_directory_through_revision_ko_not_found(
self, mock_lookup_rev):
# given
mock_lookup_rev.return_value = None
# when
with self.assertRaises(NotFoundExc):
service.lookup_directory_through_revision(
{'id': 'rev'}, 'some/path', 100)
mock_lookup_rev.assert_called_once_with({'id': 'rev'}, 100)
@patch('swh.web.common.service.lookup_revision_through')
@patch('swh.web.common.service.lookup_directory_with_revision')
def test_lookup_directory_through_revision_ok_with_data(
self, mock_lookup_dir, mock_lookup_rev):
# given
mock_lookup_rev.return_value = {'id': 'rev-id'}
mock_lookup_dir.return_value = {'type': 'dir',
'content': []}
# when
rev_id, dir_result = service.lookup_directory_through_revision(
{'id': 'rev'}, 'some/path', 100)
# then
- self.assertEquals(rev_id, 'rev-id')
- self.assertEquals(dir_result, {'type': 'dir',
- 'content': []})
+ self.assertEqual(rev_id, 'rev-id')
+ self.assertEqual(dir_result, {'type': 'dir',
+ 'content': []})
mock_lookup_rev.assert_called_once_with({'id': 'rev'}, 100)
mock_lookup_dir.assert_called_once_with('rev-id', 'some/path', False)
@patch('swh.web.common.service.lookup_revision_through')
@patch('swh.web.common.service.lookup_directory_with_revision')
def test_lookup_directory_through_revision_ok_with_content(
self, mock_lookup_dir, mock_lookup_rev):
# given
mock_lookup_rev.return_value = {'id': 'rev-id'}
stub_result = {'type': 'file',
'revision': 'rev-id',
'content': {'data': b'blah',
'sha1': 'sha1'}}
mock_lookup_dir.return_value = stub_result
# when
rev_id, dir_result = service.lookup_directory_through_revision(
{'id': 'rev'}, 'some/path', 10, with_data=True)
# then
- self.assertEquals(rev_id, 'rev-id')
- self.assertEquals(dir_result, stub_result)
+ self.assertEqual(rev_id, 'rev-id')
+ self.assertEqual(dir_result, stub_result)
mock_lookup_rev.assert_called_once_with({'id': 'rev'}, 10)
mock_lookup_dir.assert_called_once_with('rev-id', 'some/path', True)
diff --git a/swh/web/tests/common/test_templatetags.py b/swh/web/tests/common/test_templatetags.py
index 29daaf55f..6b77d5ffd 100644
--- a/swh/web/tests/common/test_templatetags.py
+++ b/swh/web/tests/common/test_templatetags.py
@@ -1,60 +1,63 @@
# Copyright (C) 2015-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
+import pytest
from swh.web.common import swh_templatetags
from swh.web.tests.testcase import SWHWebTestCase
class SWHTemplateTagsTest(SWHWebTestCase):
def test_urlize_api_links_api(self):
# update api link with html links content with links
content = '{"url": "/api/1/abc/"}'
expected_content = ('{"url": "/api/1/abc/"}')
- self.assertEquals(swh_templatetags.urlize_links_and_mails(content),
- expected_content)
+ self.assertEqual(swh_templatetags.urlize_links_and_mails(content),
+ expected_content)
def test_urlize_api_links_browse(self):
# update /browse link with html links content with links
content = '{"url": "/browse/def/"}'
expected_content = ('{"url": "'
'/browse/def/"}')
- self.assertEquals(swh_templatetags.urlize_links_and_mails(content),
- expected_content)
+ self.assertEqual(swh_templatetags.urlize_links_and_mails(content),
+ expected_content)
def test_urlize_header_links(self):
# update api link with html links content with links
content = """; rel="next"
; rel="prev"
"""
expected_content = """</api/1/abc/>; rel="next"
</api/1/def/>; rel="prev"
"""
- self.assertEquals(swh_templatetags.urlize_header_links(content),
- expected_content)
+ self.assertEqual(swh_templatetags.urlize_header_links(content),
+ expected_content)
+ # remove deprecation warnings related to docutils
+ @pytest.mark.filterwarnings('ignore:.*U.*mode is deprecated:DeprecationWarning') # noqa
def test_safe_docstring_display(self):
# update api link with html links content with links
docstring = """This is my list header:
- Here is item 1, with a continuation
line right here
- Here is item 2
Here is something that is not part of the list"""
expected_docstring = """This is my list header:
- Here is item 1, with a continuation
line right here
- Here is item 2
Here is something that is not part of the list
"""
- self.assertEquals(swh_templatetags.safe_docstring_display(docstring),
- expected_docstring)
+ self.assertEqual(swh_templatetags.safe_docstring_display(docstring),
+ expected_docstring)
diff --git a/swh/web/tests/common/test_utils.py b/swh/web/tests/common/test_utils.py
index 78c653789..ecb480eea 100644
--- a/swh/web/tests/common/test_utils.py
+++ b/swh/web/tests/common/test_utils.py
@@ -1,150 +1,150 @@
# Copyright (C) 2017-2018 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
import datetime
from unittest.mock import patch
from swh.web.common import utils
from swh.web.common.exc import BadInputExc
from swh.web.tests.testcase import SWHWebTestCase
class UtilsTestCase(SWHWebTestCase):
def test_shorten_path_noop(self):
noops = [
'/api/',
'/browse/',
'/content/symbol/foobar/'
]
for noop in noops:
self.assertEqual(
utils.shorten_path(noop),
noop
)
def test_shorten_path_sha1(self):
sha1 = 'aafb16d69fd30ff58afdd69036a26047f3aebdc6'
short_sha1 = sha1[:8] + '...'
templates = [
'/api/1/content/sha1:%s/',
'/api/1/content/sha1_git:%s/',
'/api/1/directory/%s/',
'/api/1/content/sha1:%s/ctags/',
]
for template in templates:
self.assertEqual(
utils.shorten_path(template % sha1),
template % short_sha1
)
def test_shorten_path_sha256(self):
sha256 = ('aafb16d69fd30ff58afdd69036a26047'
'213add102934013a014dfca031c41aef')
short_sha256 = sha256[:8] + '...'
templates = [
'/api/1/content/sha256:%s/',
'/api/1/directory/%s/',
'/api/1/content/sha256:%s/filetype/',
]
for template in templates:
self.assertEqual(
utils.shorten_path(template % sha256),
template % short_sha256
)
def test_parse_timestamp(self):
input_timestamps = [
None,
'2016-01-12',
'2016-01-12T09:19:12+0100',
'Today is January 1, 2047 at 8:21:00AM',
'1452591542',
]
output_dates = [
None,
datetime.datetime(2016, 1, 12, 0, 0),
datetime.datetime(2016, 1, 12, 8, 19, 12,
tzinfo=datetime.timezone.utc),
datetime.datetime(2047, 1, 1, 8, 21),
datetime.datetime(2016, 1, 12, 9, 39, 2,
tzinfo=datetime.timezone.utc),
]
for ts, exp_date in zip(input_timestamps, output_dates):
- self.assertEquals(utils.parse_timestamp(ts), exp_date)
+ self.assertEqual(utils.parse_timestamp(ts), exp_date)
def test_format_utc_iso_date(self):
self.assertEqual(utils.format_utc_iso_date('2017-05-04T13:27:13+02:00'), # noqa
'04 May 2017, 11:27 UTC')
def test_gen_path_info(self):
input_path = '/home/user/swh-environment/swh-web/'
expected_result = [
{'name': 'home', 'path': 'home'},
{'name': 'user', 'path': 'home/user'},
{'name': 'swh-environment', 'path': 'home/user/swh-environment'},
{'name': 'swh-web', 'path': 'home/user/swh-environment/swh-web'}
]
path_info = utils.gen_path_info(input_path)
- self.assertEquals(path_info, expected_result)
+ self.assertEqual(path_info, expected_result)
input_path = 'home/user/swh-environment/swh-web'
path_info = utils.gen_path_info(input_path)
- self.assertEquals(path_info, expected_result)
+ self.assertEqual(path_info, expected_result)
@patch('swh.web.common.utils.service')
def test_get_origin_visits(self, mock_service):
mock_service.MAX_LIMIT = 2
def _lookup_origin_visits(*args, **kwargs):
if kwargs['last_visit'] is None:
return [{'visit': 1,
'date': '2017-05-06T00:59:10+00:00',
'metadata': {}},
{'visit': 2,
'date': '2017-08-06T00:59:10+00:00',
'metadata': {}}
]
else:
return [{'visit': 3,
'date': '2017-09-06T00:59:10+00:00',
'metadata': {}}
]
mock_service.lookup_origin_visits.side_effect = _lookup_origin_visits
origin_info = {
'id': 1,
'type': 'git',
'url': 'https://github.com/foo/bar',
}
origin_visits = utils.get_origin_visits(origin_info)
self.assertEqual(len(origin_visits), 3)
def test_get_swh_persisent_id(self):
swh_object_type = 'content'
sha1_git = 'aafb16d69fd30ff58afdd69036a26047f3aebdc6'
expected_swh_id = 'swh:1:cnt:' + sha1_git
self.assertEqual(utils.get_swh_persistent_id(swh_object_type, sha1_git), # noqa
expected_swh_id)
with self.assertRaises(BadInputExc) as cm:
utils.get_swh_persistent_id('foo', sha1_git)
self.assertIn('Invalid object', cm.exception.args[0])
with self.assertRaises(BadInputExc) as cm:
utils.get_swh_persistent_id(swh_object_type, 'not a valid id')
self.assertIn('Invalid object', cm.exception.args[0])