diff --git a/swh/web/api/views/origin.py b/swh/web/api/views/origin.py --- a/swh/web/api/views/origin.py +++ b/swh/web/api/views/origin.py @@ -53,7 +53,7 @@ def _enrich_origin(origin): - if 'id' in origin: + if 'url' in origin: o = origin.copy() o['origin_visits_url'] = reverse( 'api-1-origin-visits', url_args={'origin_url': origin['url']}) diff --git a/swh/web/common/service.py b/swh/web/common/service.py --- a/swh/web/common/service.py +++ b/swh/web/common/service.py @@ -279,15 +279,15 @@ for match in matches: match['from_revision'] = hashutil.hash_to_hex(match['from_revision']) result = converters.from_origin( - storage.origin_get({'id': match.pop('id')})) + storage.origin_get({'url': match.pop('origin_url')})) result['metadata'] = match results.append(result) return results def lookup_origin_intrinsic_metadata(origin_dict): - """Return intrinsic metadata for origin whose origin_id matches given - origin_id. + """Return intrinsic metadata for origin whose origin matches given + origin. Args: origin_dict: origin's dict with keys ('type' AND 'url') @@ -302,9 +302,9 @@ (origin_dict['type'], origin_dict['url']) raise NotFoundExc(msg) - origin_ids = [origin_info['id']] + origins = [origin_info['url']] match = _first_element( - idx_storage.origin_intrinsic_metadata_get(origin_ids)) + idx_storage.origin_intrinsic_metadata_get(origins)) result = {} if match: result = match['metadata'] @@ -502,7 +502,7 @@ return res -def _lookup_revision_id_by(origin_id, branch_name, timestamp): +def _lookup_revision_id_by(origin, branch_name, timestamp): def _get_snapshot_branch(snapshot, branch_name): snapshot = lookup_snapshot(visit['snapshot'], branches_from=branch_name, @@ -512,7 +512,14 @@ branch = snapshot['branches'][branch_name] return branch - visit = get_origin_visit({'id': origin_id}, visit_ts=timestamp) + if isinstance(origin, int): + origin = {'id': origin} + elif isinstance(origin, str): + origin = {'url': origin} + else: + raise TypeError('"origin" must be an int or a string.') + + visit = get_origin_visit(origin, visit_ts=timestamp) branch = _get_snapshot_branch(visit['snapshot'], branch_name) rev_id = None if branch and branch['target_type'] == 'revision': @@ -524,21 +531,21 @@ if not rev_id: raise NotFoundExc('Revision for origin %s and branch %s not found.' - % (origin_id, branch_name)) + % (origin.get('url') or origin['id'], branch_name)) return rev_id -def lookup_revision_by(origin_id, +def lookup_revision_by(origin, branch_name='HEAD', timestamp=None): - """Lookup revision by origin id, snapshot branch name and visit timestamp. + """Lookup revision by origin, snapshot branch name and visit timestamp. If branch_name is not provided, lookup using 'HEAD' as default. If timestamp is not provided, use the most recent. Args: - origin_id (int): origin of the revision + origin (Union[int,str]): origin of the revision branch_name (str): snapshot branch name timestamp (str/int): origin visit time frame @@ -549,7 +556,7 @@ NotFoundExc if no revision corresponds to the criterion """ - rev_id = _lookup_revision_id_by(origin_id, branch_name, timestamp) + rev_id = _lookup_revision_id_by(origin, branch_name, timestamp) return lookup_revision(rev_id) @@ -574,11 +581,11 @@ return map(converters.from_revision, revision_entries) -def lookup_revision_log_by(origin_id, branch_name, timestamp, limit): - """Lookup revision by origin id, snapshot branch name and visit timestamp. +def lookup_revision_log_by(origin, branch_name, timestamp, limit): + """Lookup revision by origin, snapshot branch name and visit timestamp. Args: - origin_id (int): origin of the revision + origin (Union[int,str]): origin of the revision branch_name (str): snapshot branch timestamp (str/int): origin visit time frame limit (int): the maximum number of revisions returned @@ -590,21 +597,21 @@ NotFoundExc: if no revision corresponds to the criterion """ - rev_id = _lookup_revision_id_by(origin_id, branch_name, timestamp) + rev_id = _lookup_revision_id_by(origin, branch_name, timestamp) return lookup_revision_log(rev_id, limit) -def lookup_revision_with_context_by(origin_id, branch_name, timestamp, +def lookup_revision_with_context_by(origin, branch_name, timestamp, sha1_git, limit=100): """Return information about revision sha1_git, limited to the sub-graph of all transitive parents of sha1_git_root. - sha1_git_root being resolved through the lookup of a revision by origin_id, + sha1_git_root being resolved through the lookup of a revision by origin, branch_name and ts. In other words, sha1_git is an ancestor of sha1_git_root. Args: - - origin_id: origin of the revision. + - origin: origin of the revision. - branch_name: revision's branch. - timestamp: revision's time frame. - sha1_git: one of sha1_git_root's ancestors. @@ -621,7 +628,7 @@ ancestor of sha1_git_root. """ - rev_root_id = _lookup_revision_id_by(origin_id, branch_name, timestamp) + rev_root_id = _lookup_revision_id_by(origin, branch_name, timestamp) rev_root_id_bin = hashutil.hash_to_bytes(rev_root_id) @@ -811,7 +818,7 @@ def _lookup_origin_visits(origin_url, last_visit=None, limit=10): - """Yields the origin origin_ids' visits. + """Yields the origin origins' visits. Args: origin_url (str): origin to list visits for @@ -829,24 +836,24 @@ yield visit -def lookup_origin_visits(origin_id, last_visit=None, per_page=10): - """Yields the origin origin_ids' visits. +def lookup_origin_visits(origin, last_visit=None, per_page=10): + """Yields the origin origins' visits. Args: - origin_id: origin to list visits for + origin: origin to list visits for Yields: Dictionaries of origin_visit for that origin """ - visits = _lookup_origin_visits(origin_id, last_visit=last_visit, + visits = _lookup_origin_visits(origin, last_visit=last_visit, limit=per_page) for visit in visits: yield converters.from_origin_visit(visit) def lookup_origin_visit(origin_url, visit_id): - """Return information about visit visit_id with origin origin_id. + """Return information about visit visit_id with origin origin. Args: origin (str): origin concerned by the visit @@ -911,14 +918,14 @@ return converters.from_snapshot(snapshot) -def lookup_latest_origin_snapshot(origin_id, allowed_statuses=None): +def lookup_latest_origin_snapshot(origin, allowed_statuses=None): """Return information about the latest snapshot of an origin. .. warning:: At most 1000 branches contained in the snapshot will be returned for performance reasons. Args: - origin_id: integer identifier of the origin + origin: URL or integer identifier of the origin allowed_statuses: list of visit statuses considered to find the latest snapshot for the visit. For instance, ``allowed_statuses=['full']`` will only consider visits that @@ -927,7 +934,7 @@ Returns: A dict filled with the snapshot content. """ - snapshot = storage.snapshot_get_latest(origin_id, allowed_statuses) + snapshot = storage.snapshot_get_latest(origin, allowed_statuses) return converters.from_snapshot(snapshot) @@ -939,6 +946,8 @@ Here are the supported combination of possible values: - origin_id, branch_name, ts, sha1_git - origin_id, branch_name, ts + - origin_url, branch_name, ts, sha1_git + - origin_url, branch_name, ts - sha1_git_root, sha1_git - sha1_git @@ -946,6 +955,15 @@ None if the revision is not found or the actual revision. """ + if 'origin_url' in revision and \ + 'branch_name' in revision and \ + 'ts' in revision and \ + 'sha1_git' in revision: + return lookup_revision_with_context_by(revision['origin_url'], + revision['branch_name'], + revision['ts'], + revision['sha1_git'], + limit) if 'origin_id' in revision and \ 'branch_name' in revision and \ 'ts' in revision and \ @@ -955,6 +973,12 @@ revision['ts'], revision['sha1_git'], limit) + if 'origin_url' in revision and \ + 'branch_name' in revision and \ + 'ts' in revision: + return lookup_revision_by(revision['origin_url'], + revision['branch_name'], + revision['ts']) if 'origin_id' in revision and \ 'branch_name' in revision and \ 'ts' in revision: diff --git a/swh/web/tests/api/views/test_origin.py b/swh/web/tests/api/views/test_origin.py --- a/swh/web/tests/api/views/test_origin.py +++ b/swh/web/tests/api/views/test_origin.py @@ -6,6 +6,7 @@ import random from hypothesis import given +import pytest from rest_framework.test import APITestCase from unittest.mock import patch @@ -194,6 +195,7 @@ self.assertEqual(rv.data, expected_visit) + @pytest.mark.origin_id @given(new_origin(), visit_dates(3), new_snapshots(3)) def test_api_lookup_origin_visit_by_id(self, new_origin, visit_dates, new_snapshots): @@ -248,6 +250,7 @@ (origin['url'], max_visit_id+1) }) + @pytest.mark.origin_id @given(origin()) def test_api_lookup_origin_visit_not_found_by_id(self, origin): @@ -269,6 +272,7 @@ (origin['url'], max_visit_id+1) }) + @pytest.mark.origin_id @given(origin()) def test_api_origin_by_id(self, origin): @@ -348,7 +352,7 @@ b'p&\xb7\xc1\xa2\xafVR\x1e\x95\x1c\x01\xed ' b'\xf2U\xfa\x05B8'), 'metadata': {'author': 'Jane Doe'}, - 'id': origin['id'], + 'origin_url': origin['url'], 'tool': { 'configuration': { 'context': ['NpmMapping', 'CodemetaMapping'], @@ -367,7 +371,6 @@ self.assertEqual(rv.status_code, 200, rv.content) self.assertEqual(rv['Content-Type'], 'application/json') expected_data = [{ - 'id': origin['id'], 'type': origin['type'], 'url': origin['url'], 'metadata': { @@ -385,6 +388,10 @@ } } }] + actual_data = rv.data + for d in actual_data: + if 'id' in d: + del d['id'] self.assertEqual(rv.data, expected_data) mock_idx_storage.origin_intrinsic_metadata_search_fulltext \ .assert_called_with(conjunction=['Jane Doe'], limit=70) @@ -399,7 +406,7 @@ b'p&\xb7\xc1\xa2\xafVR\x1e\x95\x1c\x01\xed ' b'\xf2U\xfa\x05B8'), 'metadata': {'author': 'Jane Doe'}, - 'id': origin['id'], + 'origin_url': origin['url'], 'tool': { 'configuration': { 'context': ['NpmMapping', 'CodemetaMapping'], @@ -452,7 +459,7 @@ b'p&\xb7\xc1\xa2\xafVR\x1e\x95\x1c\x01\xed ' b'\xf2U\xfa\x05B8'), 'metadata': {'author': 'Jane Doe'}, - 'id': origin['id'], + 'origin_url': origin['url'], 'tool': { 'configuration': { 'context': ['NpmMapping', 'CodemetaMapping'], @@ -470,7 +477,7 @@ rv = self.client.get(url) mock_idx_storage.origin_intrinsic_metadata_get \ - .assert_called_once_with([origin['id']]) + .assert_called_once_with([origin['url']]) self.assertEqual(rv.status_code, 200, rv.content) self.assertEqual(rv['Content-Type'], 'application/json') expected_data = {'author': 'Jane Doe'} @@ -485,6 +492,7 @@ self.assertEqual(rv.status_code, 400, rv.content) mock_idx_storage.assert_not_called() + @pytest.mark.origin_id @given(new_origins(10)) def test_api_lookup_origins(self, new_origins): diff --git a/swh/web/tests/api/views/test_revision.py b/swh/web/tests/api/views/test_revision.py --- a/swh/web/tests/api/views/test_revision.py +++ b/swh/web/tests/api/views/test_revision.py @@ -6,6 +6,7 @@ import random from hypothesis import given +import pytest from rest_framework.test import APITestCase from unittest.mock import patch @@ -98,28 +99,30 @@ 'reason': 'Revision with sha1_git %s not found.' % unknown_revision_}) - def test_api_revision_with_origin_not_found(self): - unknown_origin_id_ = random.randint(1000, 1000000) + @pytest.mark.origin_id + def test_api_revision_with_origin_id_not_found(self): + unknown_origin_id = random.randint(1000, 1000000) url = reverse('api-1-revision-origin', - url_args={'origin_id': unknown_origin_id_}) + url_args={'origin_id': unknown_origin_id}) rv = self.client.get(url) self.assertEqual(rv.status_code, 404, rv.data) - self.assertEqual(rv['Content-Type'], 'application/json') + self.assertEqual(rv['content-type'], 'application/json') self.assertEqual(rv.data, { 'exception': 'NotFoundExc', 'reason': 'Origin %s not found!' % - unknown_origin_id_}) + unknown_origin_id}) + @pytest.mark.origin_id @given(origin()) - def test_api_revision_with_origin(self, origin): + def test_api_revision_with_origin_id(self, origin): url = reverse('api-1-revision-origin', url_args={'origin_id': origin['id']}) rv = self.client.get(url) - snapshot = self.snapshot_get_latest(origin['id']) + snapshot = self.snapshot_get_latest(origin['url']) expected_revision = self.revision_get( snapshot['branches']['HEAD']['target']) @@ -129,10 +132,11 @@ self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_revision) + @pytest.mark.origin_id @given(origin()) - def test_api_revision_with_origin_and_branch_name(self, origin): + def test_api_revision_with_origin_id_and_branch_name(self, origin): - snapshot = self.snapshot_get_latest(origin['id']) + snapshot = self.snapshot_get_latest(origin['url']) branch_name = random.choice( list(b for b in snapshot['branches'].keys() @@ -150,13 +154,14 @@ self._enrich_revision(expected_revision) self.assertEqual(rv.status_code, 200, rv.data) - self.assertEqual(rv['Content-Type'], 'application/json') + self.assertEqual(rv['content-type'], 'application/json') self.assertEqual(rv.data, expected_revision) + @pytest.mark.origin_id @given(origin_with_multiple_visits()) - def test_api_revision_with_origin_and_branch_name_and_ts(self, origin): + def test_api_revision_with_origin_id_and_branch_name_and_ts(self, origin): - visit = random.choice(self.origin_visit_get(origin['id'])) + visit = random.choice(self.origin_visit_get(origin['url'])) snapshot = self.snapshot_get(visit['snapshot']) @@ -180,10 +185,11 @@ self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_revision) + @pytest.mark.origin_id @given(origin_with_multiple_visits()) - def test_api_revision_with_origin_and_branch_name_and_ts_escapes(self, - origin): - visit = random.choice(self.origin_visit_get(origin['id'])) + def test_api_revision_with_origin_id_and_branch_name_and_ts_escapes( + self, origin): + visit = random.choice(self.origin_visit_get(origin['url'])) snapshot = self.snapshot_get(visit['snapshot']) @@ -211,7 +217,8 @@ self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_revision) - def test_api_directory_through_revision_origin_ko(self): + @pytest.mark.origin_id + def test_api_directory_through_revision_origin_id_ko(self): unknown_origin_id_ = random.randint(1000, 1000000) url = reverse('api-1-revision-origin-directory', @@ -226,8 +233,9 @@ unknown_origin_id_ }) + @pytest.mark.origin_id @given(origin()) - def test_api_directory_through_revision_origin(self, origin): + def test_api_directory_through_revision_origin_id(self, origin): url = reverse('api-1-revision-origin-directory', url_args={'origin_id': origin['id']}) @@ -348,8 +356,9 @@ self.assertEqual(rv['Content-Type'], 'application/json') self.assertEqual(rv.data, expected_log) + @pytest.mark.origin_id @given(origin()) - def test_api_revision_log_by(self, origin): + def test_api_revision_log_by_origin_id(self, origin): per_page = 10 @@ -359,7 +368,7 @@ rv = self.client.get(url) - snapshot = self.snapshot_get_latest(origin['id']) + snapshot = self.snapshot_get_latest(origin['url']) expected_log = self.revision_log( snapshot['branches']['HEAD']['target'], limit=per_page+1) @@ -382,6 +391,7 @@ 'sha1_git': expected_log[-1]['id']}) self.assertIn(next_log_url, rv['Link']) + @pytest.mark.origin_id @given(origin()) def test_api_revision_log_by_ko(self, origin): @@ -402,6 +412,27 @@ 'reason': 'Revision for origin %s and branch %s not found.' % (origin['id'], invalid_branch_name)}) + @pytest.mark.origin_id + @given(origin()) + def test_api_revision_log_by_origin_id_ko(self, origin): + + invalid_branch_name = 'foobar' + + url = reverse('api-1-revision-origin-log', + url_args={'origin_id': origin['id'], + 'branch_name': invalid_branch_name}) + + rv = self.client.get(url) + + self.assertEqual(rv.status_code, 404, rv.data) + self.assertEqual(rv['Content-Type'], 'application/json') + self.assertFalse(rv.has_header('Link')) + self.assertEqual( + rv.data, + {'exception': 'NotFoundExc', + 'reason': 'Revision for origin %s and branch %s not found.' % + (origin['id'], invalid_branch_name)}) + @patch('swh.web.api.views.revision._revision_directory_by') def test_api_revision_directory_ko_not_found(self, mock_rev_dir): # given diff --git a/swh/web/tests/browse/test_utils.py b/swh/web/tests/browse/test_utils.py --- a/swh/web/tests/browse/test_utils.py +++ b/swh/web/tests/browse/test_utils.py @@ -21,7 +21,7 @@ @given(origin_with_multiple_visits()) def test_get_origin_visit_snapshot_simple(self, origin): - visits = self.origin_visit_get(origin['id']) + visits = self.origin_visit_get(origin['url']) for visit in visits: diff --git a/swh/web/tests/browse/views/test_origin.py b/swh/web/tests/browse/views/test_origin.py --- a/swh/web/tests/browse/views/test_origin.py +++ b/swh/web/tests/browse/views/test_origin.py @@ -46,7 +46,7 @@ self.assertEqual(resp.status_code, 200) self.assertTemplateUsed('origin-visits.html') - visits = self.origin_visit_get(origin['id']) + visits = self.origin_visit_get(origin['url']) for v in visits: vdate = format_utc_iso_date(v['date'], '%Y-%m-%dT%H:%M:%SZ') @@ -194,7 +194,7 @@ @given(origin_with_multiple_visits()) def test_origin_content_view(self, origin): - origin_visits = self.origin_visit_get(origin['id']) + origin_visits = self.origin_visit_get(origin['url']) def _get_test_data(visit_idx): snapshot = self.snapshot_get(origin_visits[visit_idx]['snapshot']) @@ -398,7 +398,7 @@ @given(origin()) def test_origin_root_directory_view(self, origin): - origin_visits = self.origin_visit_get(origin['id']) + origin_visits = self.origin_visit_get(origin['url']) visit = origin_visits[-1] snapshot = self.snapshot_get(visit['snapshot']) @@ -470,7 +470,7 @@ @given(origin()) def test_origin_sub_directory_view(self, origin): - origin_visits = self.origin_visit_get(origin['id']) + origin_visits = self.origin_visit_get(origin['url']) visit = origin_visits[-1] snapshot = self.snapshot_get(visit['snapshot']) @@ -607,7 +607,7 @@ @given(origin()) def test_origin_branches(self, origin): - origin_visits = self.origin_visit_get(origin['id']) + origin_visits = self.origin_visit_get(origin['url']) visit = origin_visits[-1] snapshot = self.snapshot_get(visit['snapshot']) @@ -669,7 +669,7 @@ @given(origin()) def test_origin_releases(self, origin): - origin_visits = self.origin_visit_get(origin['id']) + origin_visits = self.origin_visit_get(origin['url']) visit = origin_visits[-1] snapshot = self.snapshot_get(visit['snapshot']) @@ -696,8 +696,9 @@ snp_dict['branches'][branch]['target'] = hash_to_bytes( revisions[i-1]) self.storage.snapshot_add([snp_dict]) - visit = self.storage.origin_visit_add(new_origin['id'], visit_dates[0]) - self.storage.origin_visit_update(new_origin['id'], visit['visit'], + visit = self.storage.origin_visit_add( + new_origin['url'], visit_dates[0]) + self.storage.origin_visit_update(new_origin['url'], visit['visit'], status='partial', snapshot=snp_dict['id']) diff --git a/swh/web/tests/browse/views/test_release.py b/swh/web/tests/browse/views/test_release.py --- a/swh/web/tests/browse/views/test_release.py +++ b/swh/web/tests/browse/views/test_release.py @@ -32,7 +32,7 @@ @given(origin_with_release()) def test_release_browse_with_origin(self, origin): - snapshot = self.snapshot_get_latest(origin['id']) + snapshot = self.snapshot_get_latest(origin['url']) release = random.choice([b for b in snapshot['branches'].values() if b['target_type'] == 'release']) url = reverse('browse-release', diff --git a/swh/web/tests/browse/views/test_revision.py b/swh/web/tests/browse/views/test_revision.py --- a/swh/web/tests/browse/views/test_revision.py +++ b/swh/web/tests/browse/views/test_revision.py @@ -73,7 +73,7 @@ @given(origin()) def test_revision_origin_browse(self, origin): - snapshot = self.snapshot_get_latest(origin['id']) + snapshot = self.snapshot_get_latest(origin['url']) revision = snapshot['branches']['HEAD']['target'] revision_data = self.revision_get(revision) dir_id = revision_data['directory'] diff --git a/swh/web/tests/common/test_service.py b/swh/web/tests/common/test_service.py --- a/swh/web/tests/common/test_service.py +++ b/swh/web/tests/common/test_service.py @@ -216,14 +216,19 @@ self.assertEqual(actual_origin_visit, expected_visit) + @pytest.mark.origin_id @given(new_origin()) - def test_lookup_origin(self, new_origin): + def test_lookup_origin_by_id(self, new_origin): origin_id = self.storage.origin_add_one(new_origin) actual_origin = service.lookup_origin({'id': origin_id}) expected_origin = self.storage.origin_get({'id': origin_id}) self.assertEqual(actual_origin, expected_origin) + @given(new_origin()) + def test_lookup_origin(self, new_origin): + self.storage.origin_add_one(new_origin) + actual_origin = service.lookup_origin({'type': new_origin['type'], 'url': new_origin['url']}) expected_origin = self.storage.origin_get({'type': new_origin['type'], @@ -580,7 +585,7 @@ self.assertEqual(actual_revision_log, expected_revision_log) def _get_origin_branches(self, origin): - origin_visit = self.origin_visit_get(origin['id'])[-1] + origin_visit = self.origin_visit_get(origin['url'])[-1] snapshot = self.snapshot_get(origin_visit['snapshot']) branches = {k: v for (k, v) in snapshot['branches'].items() if v['target_type'] == 'revision'} @@ -593,7 +598,7 @@ branch_name = random.choice(list(branches.keys())) actual_log = \ - list(service.lookup_revision_log_by(origin['id'], branch_name, + list(service.lookup_revision_log_by(origin['url'], branch_name, None, limit=25)) expected_log = \ @@ -606,7 +611,7 @@ with self.assertRaises(NotFoundExc): service.lookup_revision_log_by( - origin['id'], 'unknown_branch_name', None, limit=100) + origin['url'], 'unknown_branch_name', None, limit=100) def test_lookup_content_raw_not_found(self): unknown_content_ = random_content() @@ -702,7 +707,8 @@ def test_lookup_revision_by_nothing_found(self, origin): with self.assertRaises(NotFoundExc): - service.lookup_revision_by(origin['id'], 'invalid-branch-name') + service.lookup_revision_by( + origin['url'], 'invalid-branch-name') @given(origin()) def test_lookup_revision_by(self, origin): @@ -711,7 +717,7 @@ branch_name = random.choice(list(branches.keys())) actual_revision = \ - service.lookup_revision_by(origin['id'], branch_name, None) + service.lookup_revision_by(origin['url'], branch_name, None) expected_revision = \ self.revision_get(branches[branch_name]['target']) @@ -722,7 +728,7 @@ def test_lookup_revision_with_context_by_ko(self, origin, revision): with self.assertRaises(NotFoundExc): - service.lookup_revision_with_context_by(origin['id'], + service.lookup_revision_with_context_by(origin['url'], 'invalid-branch-name', None, revision) @@ -745,7 +751,7 @@ rev = root_rev_log[-1]['id'] actual_root_rev, actual_rev = service.lookup_revision_with_context_by( - origin['id'], branch_name, None, rev) + origin['url'], branch_name, None, rev) expected_root_rev = self.revision_get(root_rev) expected_rev = self.revision_get(rev) @@ -772,13 +778,13 @@ rev = root_rev_log[-1]['id'] self.assertEqual(service.lookup_revision_through({ - 'origin_id': origin['id'], + 'origin_url': origin['url'], 'branch_name': branch_name, 'ts': None, 'sha1_git': rev }), service.lookup_revision_with_context_by( - origin['id'], branch_name, None, rev) + origin['url'], branch_name, None, rev) ) @given(origin()) @@ -788,12 +794,12 @@ branch_name = random.choice(list(branches.keys())) self.assertEqual(service.lookup_revision_through({ - 'origin_id': origin['id'], + 'origin_url': origin['url'], 'branch_name': branch_name, 'ts': None, }), service.lookup_revision_by( - origin['id'], branch_name, None) + origin['url'], branch_name, None) ) @given(ancestor_revisions()) @@ -860,6 +866,7 @@ revision, dir_entry['name'], with_data=True)) ) + @pytest.mark.origin_id @given(new_origins(20)) def test_lookup_origins(self, new_origins): diff --git a/swh/web/tests/data.py b/swh/web/tests/data.py --- a/swh/web/tests/data.py +++ b/swh/web/tests/data.py @@ -138,20 +138,17 @@ # input data for tests _TEST_ORIGINS = [ { - 'id': 1, 'type': 'git', 'url': 'https://github.com/wcoder/highlightjs-line-numbers.js', 'archives': ['highlightjs-line-numbers.js.zip', 'highlightjs-line-numbers.js_visit2.zip'] }, { - 'id': 2, 'type': 'git', 'url': 'https://github.com/memononen/libtess2', 'archives': ['libtess2.zip'] }, { - 'id': 3, 'type': 'git', 'url': 'repo_with_submodules', 'archives': ['repo_with_submodules.tgz'] @@ -178,6 +175,7 @@ loader.load(origin['url'], origin_repo_archive, None) if nb_visits > 1 and i != nb_visits - 1: time.sleep(1) + origin.update(storage.origin_get(origin)) # add an 'id' key if enabled contents = set() directories = set() @@ -190,7 +188,7 @@ # Get all objects loaded into the test archive for origin in _TEST_ORIGINS: - snp = storage.snapshot_get_latest(origin['id']) + snp = storage.snapshot_get_latest(origin['url']) snapshots.add(hash_to_hex(snp['id'])) for branch_name, branch_data in snp['branches'].items(): if branch_data['target_type'] == 'revision': diff --git a/swh/web/tests/strategies.py b/swh/web/tests/strategies.py --- a/swh/web/tests/strategies.py +++ b/swh/web/tests/strategies.py @@ -212,7 +212,7 @@ ret = [] tests_data = get_tests_data() for origin in tests_data['origins']: - visits = list(tests_data['storage'].origin_visit_get(origin['id'])) + visits = list(tests_data['storage'].origin_visit_get(origin['url'])) if len(visits) > 1: ret.append(origin) return sampled_from(ret) @@ -226,7 +226,7 @@ ret = [] tests_data = get_tests_data() for origin in tests_data['origins']: - snapshot = tests_data['storage'].snapshot_get_latest(origin['id']) + snapshot = tests_data['storage'].snapshot_get_latest(origin['url']) if any([b['target_type'] == 'release' for b in snapshot['branches'].values()]): ret.append(origin) @@ -424,7 +424,7 @@ tests_data = get_tests_data() storage = tests_data['storage'] origin = random.choice(tests_data['origins'][:-1]) - snapshot = storage.snapshot_get_latest(origin['id']) + snapshot = storage.snapshot_get_latest(origin['url']) head = snapshot['branches'][b'HEAD']['target'] return get_revisions_walker('dfs', storage, head) diff --git a/tox.ini b/tox.ini --- a/tox.ini +++ b/tox.ini @@ -9,6 +9,16 @@ commands = pytest --hypothesis-profile=swh-web-fast --cov {envsitepackagesdir}/swh/web --cov-branch {posargs} {envsitepackagesdir}/swh/web +[testenv:py3-no-origin-ids] +deps = + .[testing] + pytest-cov + pytest-django +setenv = + SWH_STORAGE_IN_MEMORY_ENABLE_ORIGIN_IDS=false +commands = + pytest --hypothesis-profile=swh-web-fast --cov {envsitepackagesdir}/swh/web --cov-branch {posargs} {envsitepackagesdir}/swh/web -m "not origin_id" + [testenv:py3-slow] deps = .[testing]