diff --git a/swh/web/browse/views/origin.py b/swh/web/browse/views/origin.py --- a/swh/web/browse/views/origin.py +++ b/swh/web/browse/views/origin.py @@ -21,11 +21,11 @@ ) -@browse_route(r'origin/(?P.+)/visit/(?P.+)/directory/', - r'origin/(?P.+)/visit/(?P.+)' - '/directory/(?P.+)/', - r'origin/(?P.+)/directory/', +@browse_route(r'origin/(?P.+)/visit/(?P.+)' + r'/directory/(?P.+)/', r'origin/(?P.+)/directory/(?P.+)/', + r'origin/(?P.+)/visit/(?P.+)/directory/', + r'origin/(?P.+)/directory/', view_name='browse-origin-directory') def origin_directory_browse(request, origin_url, timestamp=None, path=None): diff --git a/swh/web/browse/views/snapshot.py b/swh/web/browse/views/snapshot.py --- a/swh/web/browse/views/snapshot.py +++ b/swh/web/browse/views/snapshot.py @@ -30,8 +30,8 @@ return redirect(browse_snapshot_url) -@browse_route(r'snapshot/(?P[0-9a-f]+)/directory/', - r'snapshot/(?P[0-9a-f]+)/directory/(?P.+)/', +@browse_route(r'snapshot/(?P[0-9a-f]+)/directory/(?P.+)/', + r'snapshot/(?P[0-9a-f]+)/directory/', view_name='browse-snapshot-directory', checksum_args=['snapshot_id']) def snapshot_directory_browse(request, snapshot_id, path=None): diff --git a/swh/web/tests/browse/views/test_origin.py b/swh/web/tests/browse/views/test_origin.py --- a/swh/web/tests/browse/views/test_origin.py +++ b/swh/web/tests/browse/views/test_origin.py @@ -12,6 +12,7 @@ from hypothesis import given + from swh.model.hashutil import hash_to_bytes from swh.web.browse.utils import process_snapshot_branches from swh.web.common.exc import NotFoundExc @@ -23,7 +24,8 @@ from swh.web.tests.django_asserts import assert_contains, assert_template_used from swh.web.tests.strategies import ( origin, origin_with_multiple_visits, new_origin, - new_snapshot, visit_dates, revisions, origin_with_releases + new_snapshot, visit_dates, revisions, origin_with_releases, + origin_with_directory_named_directory ) @@ -535,6 +537,17 @@ resp.content.decode('utf-8')) +@given(origin_with_directory_named_directory()) +def test_origin_browse_directory_named_directory(client, origin_and_path): + url = reverse('browse-origin-directory', + url_args={'origin_url': origin_and_path['origin'], + 'path': origin_and_path['path']}) + + resp = client.get(url) + assert resp.status_code == 200 + assert_template_used('directory.html') + + def _origin_content_view_test_helper(client, origin_info, origin_visits, origin_branches, origin_releases, root_dir_sha1, content, @@ -800,11 +813,13 @@ assert_contains(resp, 'vault-cook-directory') assert_contains(resp, 'vault-cook-revision') - swh_dir_id = get_swh_persistent_id('directory', directory_entries[0]['dir_id']) # noqa - swh_dir_id_url = reverse('browse-swh-id', - url_args={'swh_id': swh_dir_id}) - assert_contains(resp, swh_dir_id) - assert_contains(resp, swh_dir_id_url) + if directory_entries: + swh_dir_id = get_swh_persistent_id('directory', + directory_entries[0]['dir_id']) + swh_dir_id_url = reverse('browse-swh-id', + url_args={'swh_id': swh_dir_id}) + assert_contains(resp, swh_dir_id) + assert_contains(resp, swh_dir_id_url) assert_contains(resp, 'swh-take-new-snapshot') diff --git a/swh/web/tests/data.py b/swh/web/tests/data.py --- a/swh/web/tests/data.py +++ b/swh/web/tests/data.py @@ -7,6 +7,7 @@ import random from copy import deepcopy +from datetime import datetime from typing import Dict from rest_framework.decorators import api_view @@ -16,9 +17,14 @@ from swh.indexer.mimetype import MimetypeIndexer from swh.indexer.ctags import CtagsIndexer from swh.indexer.storage import get_indexer_storage -from swh.model.from_disk import Directory +from swh.model import from_disk +from swh.model.from_disk import DentryPerms from swh.model.hashutil import hash_to_hex, hash_to_bytes, DEFAULT_ALGORITHMS from swh.model.identifiers import directory_identifier +from swh.model.model import ( + Directory, DirectoryEntry, Revision, Snapshot, TimestampWithTimezone, + Person, Timestamp, SnapshotBranch, TargetType +) from swh.loader.git.from_disk import GitLoaderFromArchive from swh.storage.algos.dir_iterators import dir_iterator from swh.web import config @@ -247,6 +253,50 @@ empty_dir_id_bin = hash_to_bytes(empty_dir_id) storage.directory_add([{'id': empty_dir_id_bin, 'entries': []}]) + # Generate an origin whose root directory contains a single + # directory named 'directory' + origin = {'url': 'https://git-server-domain/project-name'} + visit_date = datetime.now() + + dir_entry = DirectoryEntry(name=b'directory', type='dir', + target=empty_dir_id_bin, + perms=DentryPerms.directory) + dir = Directory(entries=[dir_entry]) + + person = Person(name=b'abcd', email=b'abcd@company.org', + fullname=b'abcd ') + timestamp = visit_date.timestamp() + date = TimestampWithTimezone(negative_utc=False, offset=0, + timestamp=Timestamp(seconds=timestamp, + microseconds=0)) + rev = Revision(author=person, committer=person, date=date, + committer_date=date, message=b'Initial commit', + metadata=[], parents=[], synthetic=False, type='git', + directory=dir.id) + + snp = Snapshot(branches={ + b'master': SnapshotBranch(target_type=TargetType.REVISION, + target=rev.id), + b'HEAD': SnapshotBranch(target_type=TargetType.ALIAS, + target=b'master') + }) + + storage.directory_add([dir.to_dict()]) + storage.revision_add([rev.to_dict()]) + storage.snapshot_add([snp.to_dict()]) + + storage.origin_add_one(origin) + origin_visit = storage.origin_visit_add(origin['url'], visit_date, + type='git') + storage.origin_visit_update(origin['url'], origin_visit['visit'], + snapshot=snp.id) + + _TEST_ORIGINS.append({ + 'type': 'git', + 'url': origin['url'], + 'visit_date': [visit_date.isoformat()] + }) + # Return tests data return { 'storage': storage, @@ -345,8 +395,8 @@ """ test_contents_dir = os.path.join( os.path.dirname(__file__), data_path).encode('utf-8') - directory = Directory.from_disk(path=test_contents_dir, data=True, - save_path=True) + directory = from_disk.Directory.from_disk(path=test_contents_dir, + data=True, save_path=True) objects = directory.collect() for c in objects['content'].values(): c['status'] = 'visible' diff --git a/swh/web/tests/strategies.py b/swh/web/tests/strategies.py --- a/swh/web/tests/strategies.py +++ b/swh/web/tests/strategies.py @@ -16,6 +16,7 @@ from swh.model.hashutil import hash_to_hex, hash_to_bytes from swh.model.identifiers import directory_identifier +from swh.storage.algos.dir_iterators import dir_iterator from swh.storage.algos.revisions_walker import get_revisions_walker from swh.model.hypothesis_strategies import ( origins as new_origin_strategy, snapshots as new_snapshot @@ -166,7 +167,7 @@ """ return new_content().filter( lambda c: next(get_tests_data()['storage'].content_get( - [hash_to_bytes(c['sha1'])])) is None) + [hash_to_bytes(c['sha1'])])) is None) def unknown_contents(): @@ -256,8 +257,8 @@ into the test archive. """ return new_origin_strategy().map(lambda origin: origin.to_dict()).filter( - lambda origin: get_tests_data()['storage'].origin_get( - [origin])[0] is None) + lambda origin: get_tests_data()['storage'].origin_get( + [origin])[0] is None) def new_origins(nb_origins=None): @@ -413,16 +414,21 @@ hash_to_bytes(s)) is None) -def _get_origin_dfs_revisions_walker(): - tests_data = get_tests_data() - storage = tests_data['storage'] - origin = random.choice(tests_data['origins'][:-1]) - snapshot = storage.snapshot_get_latest(origin['url']) +def _get_snapshot_head_revision(snapshot): if snapshot['branches'][b'HEAD']['target_type'] == 'alias': target = snapshot['branches'][b'HEAD']['target'] head = snapshot['branches'][target]['target'] else: head = snapshot['branches'][b'HEAD']['target'] + return head + + +def _get_origin_dfs_revisions_walker(): + tests_data = get_tests_data() + storage = tests_data['storage'] + origin = random.choice(tests_data['origins'][:-2]) + snapshot = storage.snapshot_get_latest(origin['url']) + head = _get_snapshot_head_revision(snapshot) return get_revisions_walker('dfs', storage, head) @@ -531,3 +537,21 @@ 'rev_dir_sha1_git': 'd92a21446387fa28410e5a74379c934298f39ae2', 'rev_dir_rev_path': 'libtess2' }) + + +def origin_with_directory_named_directory(): + tests_data = get_tests_data() + storage = tests_data['storage'] + + origins = [] + for origin in tests_data['origins']: + snapshot = storage.snapshot_get_latest(origin['url']) + head = _get_snapshot_head_revision(snapshot) + head_data = list(storage.revision_get([head]))[0] + for entry in dir_iterator(storage, head_data['directory']): + if entry['type'] == 'dir' and entry['name'] == b'directory': + origins.append({ + 'origin': origin['url'], + 'path': entry['path'].decode('utf-8') + }) + return sampled_from(origins)