Changeset View
Changeset View
Standalone View
Standalone View
swh/loader/package/nixguix/tests/test_functional.py
- This file was moved from swh/loader/package/functional/tests/test_functional.py.
# Copyright (C) 2020 The Software Heritage developers | # Copyright (C) 2020 The Software Heritage developers | ||||
# See the AUTHORS file at the top-level directory of this distribution | # See the AUTHORS file at the top-level directory of this distribution | ||||
# License: GNU General Public License version 3, or any later version | # License: GNU General Public License version 3, or any later version | ||||
# See top-level LICENSE file for more information | # See top-level LICENSE file for more information | ||||
import pytest | import pytest | ||||
from json.decoder import JSONDecodeError | from json.decoder import JSONDecodeError | ||||
from swh.loader.package.functional.loader import ( | from swh.loader.package.nixguix.loader import ( | ||||
FunctionalLoader, retrieve_sources | NixGuixLoader, retrieve_sources | ||||
) | ) | ||||
from swh.loader.package.tests.common import ( | from swh.loader.package.tests.common import ( | ||||
get_stats, check_snapshot | get_stats, check_snapshot | ||||
) | ) | ||||
sources_url = 'https://nix-community.github.io/nixpkgs-swh/sources.json' | sources_url = 'https://nix-community.github.io/nixpkgs-swh/sources.json' | ||||
def test_retrieve_sources(swh_config, requests_mock_datadir): | def test_retrieve_sources(swh_config, requests_mock_datadir): | ||||
j = retrieve_sources(sources_url) | j = retrieve_sources(sources_url) | ||||
assert "sources" in j.keys() | assert "sources" in j.keys() | ||||
assert len(j["sources"]) == 2 | assert len(j["sources"]) == 2 | ||||
def test_retrieve_non_existing(swh_config, requests_mock_datadir): | def test_retrieve_non_existing(swh_config, requests_mock_datadir): | ||||
with pytest.raises(ValueError): | with pytest.raises(ValueError): | ||||
FunctionalLoader('https://non-existing-url') | NixGuixLoader('https://non-existing-url') | ||||
def test_retrieve_non_json(swh_config, requests_mock_datadir): | def test_retrieve_non_json(swh_config, requests_mock_datadir): | ||||
with pytest.raises(JSONDecodeError): | with pytest.raises(JSONDecodeError): | ||||
FunctionalLoader('https://example.com/file.txt') | NixGuixLoader('https://example.com/file.txt') | ||||
def test_loader_one_visit(swh_config, requests_mock_datadir): | def test_loader_one_visit(swh_config, requests_mock_datadir): | ||||
loader = FunctionalLoader(sources_url) | loader = NixGuixLoader(sources_url) | ||||
res = loader.load() | res = loader.load() | ||||
assert res['status'] == 'eventful' | assert res['status'] == 'eventful' | ||||
stats = get_stats(loader.storage) | stats = get_stats(loader.storage) | ||||
assert { | assert { | ||||
'content': 1, | 'content': 1, | ||||
'directory': 3, | 'directory': 3, | ||||
'origin': 1, | 'origin': 1, | ||||
'origin_visit': 1, | 'origin_visit': 1, | ||||
'person': 1, | 'person': 1, | ||||
'release': 0, | 'release': 0, | ||||
'revision': 1, | 'revision': 1, | ||||
'skipped_content': 0, | 'skipped_content': 0, | ||||
'snapshot': 1 | 'snapshot': 1 | ||||
} == stats | } == stats | ||||
origin_visit = loader.storage.origin_visit_get_latest(sources_url) | origin_visit = loader.storage.origin_visit_get_latest(sources_url) | ||||
# The visit is partial because urls pointing to non tarball file | # The visit is partial because urls pointing to non tarball file | ||||
# are not handled yet | # are not handled yet | ||||
assert origin_visit['status'] == 'partial' | assert origin_visit['status'] == 'partial' | ||||
assert origin_visit['type'] == 'functional' | assert origin_visit['type'] == 'nixguix' | ||||
def test_uncompress_failure(swh_config, requests_mock_datadir): | def test_uncompress_failure(swh_config, requests_mock_datadir): | ||||
"""Non tarball files are currently not supported and the uncompress | """Non tarball files are currently not supported and the uncompress | ||||
function fails on such kind of files. | function fails on such kind of files. | ||||
However, even in this case of failure (because of the url | However, even in this case of failure (because of the url | ||||
https://example.com/file.txt), a snapshot and a visit has to be | https://example.com/file.txt), a snapshot and a visit has to be | ||||
created (with a status partial since all files are not archived). | created (with a status partial since all files are not archived). | ||||
""" | """ | ||||
loader = FunctionalLoader(sources_url) | loader = NixGuixLoader(sources_url) | ||||
loader_status = loader.load() | loader_status = loader.load() | ||||
urls = [s['url'][0] for s in loader.sources] | urls = [s['url'][0] for s in loader.sources] | ||||
assert "https://example.com/file.txt" in urls | assert "https://example.com/file.txt" in urls | ||||
assert loader_status['status'] == 'eventful' | assert loader_status['status'] == 'eventful' | ||||
origin_visit = loader.storage.origin_visit_get_latest(sources_url) | origin_visit = loader.storage.origin_visit_get_latest(sources_url) | ||||
# The visit is partial because urls pointing to non tarball files | # The visit is partial because urls pointing to non tarball files | ||||
# are not handled yet | # are not handled yet | ||||
assert origin_visit['status'] == 'partial' | assert origin_visit['status'] == 'partial' | ||||
def test_loader_incremental(swh_config, requests_mock_datadir): | def test_loader_incremental(swh_config, requests_mock_datadir): | ||||
"""Ensure a second visit do not download artifact already | """Ensure a second visit do not download artifact already | ||||
downloaded by the previous visit. | downloaded by the previous visit. | ||||
""" | """ | ||||
loader = FunctionalLoader(sources_url) | loader = NixGuixLoader(sources_url) | ||||
load_status = loader.load() | load_status = loader.load() | ||||
loader = FunctionalLoader(sources_url) | loader = NixGuixLoader(sources_url) | ||||
loader.load() | loader.load() | ||||
expected_snapshot_id = '0c5881c74283793ebe9a09a105a9381e41380383' | expected_snapshot_id = '0c5881c74283793ebe9a09a105a9381e41380383' | ||||
assert load_status == { | assert load_status == { | ||||
'status': 'eventful', | 'status': 'eventful', | ||||
'snapshot_id': expected_snapshot_id | 'snapshot_id': expected_snapshot_id | ||||
} | } | ||||
expected_branches = { | expected_branches = { | ||||
'evaluation': { | 'evaluation': { | ||||
Show All 25 Lines | def test_loader_two_visits(swh_config, requests_mock_datadir_visits): | ||||
"""To ensure there is only one origin, but two visits, two revisions | """To ensure there is only one origin, but two visits, two revisions | ||||
and two snapshots are created. | and two snapshots are created. | ||||
The first visit creates a snapshot containing one tarball. The | The first visit creates a snapshot containing one tarball. The | ||||
second visit creates a snapshot containing the same tarball and | second visit creates a snapshot containing the same tarball and | ||||
another tarball. | another tarball. | ||||
""" | """ | ||||
loader = FunctionalLoader(sources_url) | loader = NixGuixLoader(sources_url) | ||||
load_status = loader.load() | load_status = loader.load() | ||||
expected_snapshot_id = '0c5881c74283793ebe9a09a105a9381e41380383' | expected_snapshot_id = '0c5881c74283793ebe9a09a105a9381e41380383' | ||||
assert load_status == { | assert load_status == { | ||||
'status': 'eventful', | 'status': 'eventful', | ||||
'snapshot_id': expected_snapshot_id | 'snapshot_id': expected_snapshot_id | ||||
} | } | ||||
expected_branches = { | expected_branches = { | ||||
Show All 21 Lines | assert { | ||||
'origin_visit': 1, | 'origin_visit': 1, | ||||
'person': 1, | 'person': 1, | ||||
'release': 0, | 'release': 0, | ||||
'revision': 1, | 'revision': 1, | ||||
'skipped_content': 0, | 'skipped_content': 0, | ||||
'snapshot': 1 | 'snapshot': 1 | ||||
} == stats | } == stats | ||||
loader = FunctionalLoader(sources_url) | loader = NixGuixLoader(sources_url) | ||||
load_status = loader.load() | load_status = loader.load() | ||||
expected_snapshot_id = 'b0bfa75cbd0cc90aac3b9e95fb0f59c731176d97' | expected_snapshot_id = 'b0bfa75cbd0cc90aac3b9e95fb0f59c731176d97' | ||||
assert load_status == { | assert load_status == { | ||||
'status': 'eventful', | 'status': 'eventful', | ||||
'snapshot_id': expected_snapshot_id | 'snapshot_id': expected_snapshot_id | ||||
} | } | ||||
# This ensures visits are incremental. Indeed, if we request a | # This ensures visits are incremental. Indeed, if we request a | ||||
Show All 30 Lines | assert { | ||||
'release': 0, | 'release': 0, | ||||
'revision': 2, | 'revision': 2, | ||||
'skipped_content': 0, | 'skipped_content': 0, | ||||
'snapshot': 2 | 'snapshot': 2 | ||||
} == stats | } == stats | ||||
def test_resolve_revision_from(swh_config, requests_mock_datadir): | def test_resolve_revision_from(swh_config, requests_mock_datadir): | ||||
loader = FunctionalLoader(sources_url) | loader = NixGuixLoader(sources_url) | ||||
known_artifacts = { | known_artifacts = { | ||||
'id1': {'extrinsic': {'raw': {'url': "url1"}}}, | 'id1': {'extrinsic': {'raw': {'url': "url1"}}}, | ||||
'id2': {'extrinsic': {'raw': {'url': "url2"}}} | 'id2': {'extrinsic': {'raw': {'url': "url2"}}} | ||||
} | } | ||||
metadata = {'url': 'url1'} | metadata = {'url': 'url1'} | ||||
assert loader.resolve_revision_from(known_artifacts, metadata) == 'id1' | assert loader.resolve_revision_from(known_artifacts, metadata) == 'id1' | ||||
metadata = {'url': 'url3'} | metadata = {'url': 'url3'} | ||||
assert loader.resolve_revision_from(known_artifacts, metadata) == None # noqa | assert loader.resolve_revision_from(known_artifacts, metadata) == None # noqa | ||||
def test_evaluation_branch(swh_config, requests_mock_datadir): | def test_evaluation_branch(swh_config, requests_mock_datadir): | ||||
loader = FunctionalLoader(sources_url) | loader = NixGuixLoader(sources_url) | ||||
res = loader.load() | res = loader.load() | ||||
assert res['status'] == 'eventful' | assert res['status'] == 'eventful' | ||||
expected_branches = { | expected_branches = { | ||||
'https://github.com/owner-1/repository-1/revision-1.tgz': { | 'https://github.com/owner-1/repository-1/revision-1.tgz': { | ||||
'target': '488ad4e7b8e2511258725063cf43a2b897c503b4', | 'target': '488ad4e7b8e2511258725063cf43a2b897c503b4', | ||||
'target_type': 'revision', | 'target_type': 'revision', | ||||
}, | }, | ||||
Show All 13 Lines | |||||
def test_eoferror(swh_config, requests_mock_datadir): | def test_eoferror(swh_config, requests_mock_datadir): | ||||
"""Load a truncated archive which is invalid to make the uncompress | """Load a truncated archive which is invalid to make the uncompress | ||||
function raising the exception EOFError. We then check if a | function raising the exception EOFError. We then check if a | ||||
snapshot is created, meaning this error is well managed. | snapshot is created, meaning this error is well managed. | ||||
""" | """ | ||||
sources = "https://nix-community.github.io/nixpkgs-swh/sources-EOFError.json" # noqa | sources = "https://nix-community.github.io/nixpkgs-swh/sources-EOFError.json" # noqa | ||||
loader = FunctionalLoader(sources) | loader = NixGuixLoader(sources) | ||||
loader.load() | loader.load() | ||||
expected_branches = { | expected_branches = { | ||||
'evaluation': { | 'evaluation': { | ||||
'target': 'cc4e04c26672dd74e5fd0fecb78b435fb55368f7', | 'target': 'cc4e04c26672dd74e5fd0fecb78b435fb55368f7', | ||||
'target_type': 'revision', | 'target_type': 'revision', | ||||
}, | }, | ||||
} | } | ||||
expected_snapshot = { | expected_snapshot = { | ||||
'id': '4257fa2350168c6bfec726a06452ea27a2c0cb33', | 'id': '4257fa2350168c6bfec726a06452ea27a2c0cb33', | ||||
'branches': expected_branches, | 'branches': expected_branches, | ||||
} | } | ||||
check_snapshot(expected_snapshot, storage=loader.storage) | check_snapshot(expected_snapshot, storage=loader.storage) |