Page Menu
Home
Software Heritage
Search
Configure Global Search
Log In
Files
F7147859
D417.id1292.diff
No One
Temporary
Actions
View File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Flag For Later
Size
7 KB
Subscribers
None
D417.id1292.diff
View Options
diff --git a/swh/web/browse/utils.py b/swh/web/browse/utils.py
--- a/swh/web/browse/utils.py
+++ b/swh/web/browse/utils.py
@@ -4,6 +4,7 @@
# See top-level LICENSE file for more information
import base64
+from collections import defaultdict
import magic
import math
import pypandoc
@@ -370,57 +371,75 @@
if cache_entry:
return cache_entry['branches'], cache_entry['releases']
- branches = []
- releases = []
+ branches = {}
+ releases = {}
if snapshot_id:
- revision_ids = []
- releases_ids = []
+ revision_to_branch = defaultdict(set)
+ revision_to_release = defaultdict(set)
+ release_to_branch = defaultdict(set)
snapshot = service.lookup_snapshot(snapshot_id)
snapshot_branches = snapshot['branches']
- for key in sorted(snapshot_branches.keys()):
- if not snapshot_branches[key]:
+ for branch_name, target in snapshot_branches.items():
+ if not target:
+ # FIXME: display branches with an unknown target anyway
continue
- if snapshot_branches[key]['target_type'] == 'revision':
- branches.append({'name': key,
- 'revision': snapshot_branches[key]['target']})
- revision_ids.append(snapshot_branches[key]['target'])
- elif snapshot_branches[key]['target_type'] == 'release':
- releases_ids.append(snapshot_branches[key]['target'])
-
- releases_info = service.lookup_release_multiple(releases_ids)
+ target_id = target['target']
+ target_type = target['target_type']
+ if target_type == 'revision':
+ branches[branch_name] = {
+ 'name': branch_name,
+ 'revision': target_id,
+ }
+ revision_to_branch[target_id].add(branch_name)
+ elif target_type == 'release':
+ release_to_branch[target_id].add(branch_name)
+ # FIXME: handle pointers to other object types
+ # FIXME: handle branch aliases
+
+ releases_info = service.lookup_release_multiple(
+ release_to_branch.keys()
+ )
for release in releases_info:
- releases.append({'name': release['name'],
- 'date': format_utc_iso_date(release['date']),
- 'id': release['id'],
- 'message': release['message'],
- 'target_type': release['target_type'],
- 'target': release['target']})
- revision_ids.append(release['target'])
-
- revisions = service.lookup_revision_multiple(revision_ids)
-
- branches_to_remove = []
-
- for idx, revision in enumerate(revisions):
- if idx < len(branches):
- if revision:
- branches[idx]['directory'] = revision['directory']
- branches[idx]['date'] = format_utc_iso_date(revision['date']) # noqa
- branches[idx]['message'] = revision['message']
- else:
- branches_to_remove.append(branches[idx])
- else:
- rel_idx = idx - len(branches)
- if revision:
- releases[rel_idx]['directory'] = revision['directory']
-
- for b in branches_to_remove:
- branches.remove(b)
-
- cache.set(cache_entry_id, {'branches': branches, 'releases': releases})
-
- return branches, releases
+ branches_to_update = release_to_branch[release['id']]
+ for branch in branches_to_update:
+ releases[branch] = {
+ 'name': release['name'],
+ 'date': format_utc_iso_date(release['date']),
+ 'id': release['id'],
+ 'message': release['message'],
+ 'target_type': release['target_type'],
+ 'target': release['target'],
+ }
+ if release['target_type'] == 'revision':
+ revision_to_release[release['target']].update(
+ branches_to_update
+ )
+
+ revisions = service.lookup_revision_multiple(
+ set(revision_to_branch.keys()) | set(revision_to_release.keys())
+ )
+
+ for revision in revisions:
+ revision_data = {
+ 'directory': revision['directory'],
+ 'date': format_utc_iso_date(revision['date']),
+ 'message': revision['message'],
+ }
+ for branch in revision_to_branch[revision['id']]:
+ branches[branch].update(revision_data)
+ for release in revision_to_release[revision['id']]:
+ releases[release]['directory'] = revision['directory']
+
+ ret_branches = list(sorted(branches.values(), key=lambda b: b['name']))
+ ret_releases = list(sorted(releases.values(), key=lambda b: b['name']))
+
+ cache.set(cache_entry_id, {
+ 'branches': ret_branches,
+ 'releases': ret_releases,
+ })
+
+ return ret_branches, ret_releases
def get_origin_visit_snapshot(origin_info, visit_ts=None, visit_id=None,
diff --git a/swh/web/tests/browse/test_utils.py b/swh/web/tests/browse/test_utils.py
--- a/swh/web/tests/browse/test_utils.py
+++ b/swh/web/tests/browse/test_utils.py
@@ -115,6 +115,11 @@
mock_service.lookup_snapshot.return_value = \
{'branches': {
+ 'HEAD': {
+ 'target': '9fbd21adbac36be869514e82e2e98505dc47219c',
+ 'target_type': 'revision',
+ 'target_url': '/api/1/revision/9fbd21adbac36be869514e82e2e98505dc47219c/'
+ },
'refs/heads/master': {
'target': '9fbd21adbac36be869514e82e2e98505dc47219c',
'target_type': 'revision',
@@ -162,7 +167,12 @@
'message': '0.10.1\n'}]
expected_result = (
- [{'name': 'refs/heads/master',
+ [{'name': 'HEAD',
+ 'message': 'Merge pull request #678 from algernon',
+ 'date': '04 August 2015, 10:16 UTC',
+ 'revision': '9fbd21adbac36be869514e82e2e98505dc47219c',
+ 'directory': '828da2b80e41aa958b2c98526f4a1d2cc7d298b7'},
+ {'name': 'refs/heads/master',
'message': 'Merge pull request #678 from algernon',
'date': '04 August 2015, 10:16 UTC',
'revision': '9fbd21adbac36be869514e82e2e98505dc47219c',
@@ -196,7 +206,7 @@
self.assertEqual(len(lookup_release_calls), 1)
# Check that we looked up the two expected releases
- self.assertCountEqual(lookup_release_calls[0][0][0], {
+ self.assertCountEqual(set(lookup_release_calls[0][0][0]), {
'7045404f3d1c54e6473c71bbb716529fbad4be24',
'c893f4549c367e68288b0eb74595050410aa0de7',
})
@@ -205,7 +215,7 @@
self.assertEqual(len(lookup_revision_calls), 1)
# Check that we looked up the three expected revisions
- self.assertCountEqual(lookup_revision_calls[0][0][0], {
+ self.assertCountEqual(set(lookup_revision_calls[0][0][0]), {
'9fbd21adbac36be869514e82e2e98505dc47219c',
'6072557b6c10cd9a21145781e26ad1f978ed14b9',
'ecc003b43433e5b46511157598e4857a761007bf',
File Metadata
Details
Attached
Mime Type
text/plain
Expires
Thu, Jan 23, 2:08 AM (19 h, 50 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3216013
Attached To
D417: Rework the way we process snapshots and branches
Event Timeline
Log In to Comment