Page Menu
Home
Software Heritage
Search
Configure Global Search
Log In
Files
F7123231
D6455.diff
No One
Temporary
Actions
View File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Flag For Later
Size
41 KB
Subscribers
None
D6455.diff
View Options
diff --git a/swh/web/tests/api/test_apiresponse.py b/swh/web/tests/api/test_apiresponse.py
--- a/swh/web/tests/api/test_apiresponse.py
+++ b/swh/web/tests/api/test_apiresponse.py
@@ -22,7 +22,7 @@
from swh.web.common.identifiers import gen_swhid
from swh.web.common.utils import reverse
from swh.web.tests.django_asserts import assert_contains
-from swh.web.tests.strategies import content, directory, revision
+from swh.web.tests.strategies import directory, revision
from swh.web.tests.utils import check_http_get_response, check_http_post_response
@@ -153,7 +153,7 @@
assert "Traceback" in resp.data["traceback"]
-@given(content(), directory(), revision())
+@given(directory(), revision())
def test_api_endpoints_have_cors_headers(client, content, directory, revision):
url = reverse("api-1-stat-counters")
diff --git a/swh/web/tests/api/test_utils.py b/swh/web/tests/api/test_utils.py
--- a/swh/web/tests/api/test_utils.py
+++ b/swh/web/tests/api/test_utils.py
@@ -11,14 +11,7 @@
from swh.web.api import utils
from swh.web.common.origin_visits import get_origin_visits
from swh.web.common.utils import resolve_branch_alias, reverse
-from swh.web.tests.strategies import (
- content,
- directory,
- origin,
- release,
- revision,
- snapshot,
-)
+from swh.web.tests.strategies import directory, origin, release, revision, snapshot
url_map = [
{
@@ -227,7 +220,6 @@
assert utils.enrich_content({"id": "123"}) == {"id": "123"}
-@given(content())
def test_enrich_content_with_hashes(api_request_factory, content):
for algo in DEFAULT_ALGORITHMS:
@@ -262,7 +254,6 @@
assert enriched_content == content_data
-@given(content())
def test_enrich_content_with_hashes_and_top_level_url(api_request_factory, content):
for algo in DEFAULT_ALGORITHMS:
diff --git a/swh/web/tests/api/views/test_content.py b/swh/web/tests/api/views/test_content.py
--- a/swh/web/tests/api/views/test_content.py
+++ b/swh/web/tests/api/views/test_content.py
@@ -3,13 +3,11 @@
# License: GNU Affero General Public License version 3, or any later version
# See top-level LICENSE file for more information
-from hypothesis import given
import pytest
from swh.web.common.utils import reverse
from swh.web.tests.conftest import ctags_json_missing, fossology_missing
from swh.web.tests.data import random_content
-from swh.web.tests.strategies import content, contents_with_ctags
from swh.web.tests.utils import (
check_api_get_responses,
check_api_post_responses,
@@ -17,7 +15,6 @@
)
-@given(content())
def test_api_content_filetype(api_client, indexer_data, content):
indexer_data.content_add_mimetype(content["sha1"])
url = reverse(
@@ -68,7 +65,6 @@
@pytest.mark.skipif(
ctags_json_missing, reason="requires ctags with json output support"
)
-@given(contents_with_ctags())
def test_api_content_symbol(api_client, indexer_data, contents_with_ctags):
expected_data = {}
for content_sha1 in contents_with_ctags["sha1s"]:
@@ -136,7 +132,6 @@
@pytest.mark.skipif(
ctags_json_missing, reason="requires ctags with json output support"
)
-@given(content())
def test_api_content_ctags(api_client, indexer_data, content):
indexer_data.content_add_ctags(content["sha1"])
url = reverse(
@@ -155,7 +150,6 @@
@pytest.mark.skipif(fossology_missing, reason="requires fossology-nomossa installed")
-@given(content())
def test_api_content_license(api_client, indexer_data, content):
indexer_data.content_add_license(content["sha1"])
url = reverse(
@@ -191,7 +185,6 @@
}
-@given(content())
def test_api_content_metadata(api_client, archive_data, content):
url = reverse("api-1-content", {"q": "sha1:%s" % content["sha1"]})
rv = check_api_get_responses(api_client, url, status_code=200)
@@ -236,7 +229,6 @@
}
-@given(content())
def test_api_content_raw_text(api_client, archive_data, content):
url = reverse("api-1-content-raw", url_args={"q": "sha1:%s" % content["sha1"]})
@@ -250,7 +242,6 @@
assert rv.content == expected_data["data"]
-@given(content())
def test_api_content_raw_text_with_filename(api_client, archive_data, content):
url = reverse(
"api-1-content-raw",
@@ -264,7 +255,6 @@
assert rv.content == expected_data["data"]
-@given(content())
def test_api_check_content_known(api_client, content):
url = reverse("api-1-content-known", url_args={"q": content["sha1"]})
rv = check_api_get_responses(api_client, url, status_code=200)
@@ -274,7 +264,6 @@
}
-@given(content())
def test_api_check_content_known_post(api_client, content):
url = reverse("api-1-content-known")
rv = check_api_post_responses(
@@ -298,7 +287,6 @@
}
-@given(content())
def test_api_content_uppercase(api_client, content):
url = reverse(
"api-1-content-uppercase-checksum", url_args={"q": content["sha1"].upper()}
diff --git a/swh/web/tests/api/views/test_identifiers.py b/swh/web/tests/api/views/test_identifiers.py
--- a/swh/web/tests/api/views/test_identifiers.py
+++ b/swh/web/tests/api/views/test_identifiers.py
@@ -10,7 +10,6 @@
from swh.web.common.utils import reverse
from swh.web.tests.data import random_sha1
from swh.web.tests.strategies import (
- content,
directory,
origin,
release,
@@ -25,9 +24,9 @@
from swh.web.tests.utils import check_api_get_responses, check_api_post_responses
-@given(origin(), content(), directory(), release(), revision(), snapshot())
+@given(origin(), directory(), release(), revision(), snapshot())
def test_swhid_resolve_success(
- api_client, client, origin, content, directory, release, revision, snapshot
+ api_client, client, content, origin, directory, release, revision, snapshot
):
for obj_type, obj_id in (
@@ -116,7 +115,7 @@
check_api_get_responses(api_client, url, status_code=400)
-@given(content(), directory(), release(), revision(), snapshot())
+@given(directory(), release(), revision(), snapshot())
def test_api_known_swhid_all_present(
api_client, content, directory, release, revision, snapshot
):
@@ -135,7 +134,7 @@
assert resp.data == {swhid: {"known": True} for swhid in input_swhids}
-@given(content(), directory())
+@given(directory())
def test_api_known_swhid_some_present(api_client, content, directory):
content_ = gen_swhid(ObjectType.CONTENT, content["sha1_git"])
directory_ = gen_swhid(ObjectType.DIRECTORY, directory)
diff --git a/swh/web/tests/api/views/test_release.py b/swh/web/tests/api/views/test_release.py
--- a/swh/web/tests/api/views/test_release.py
+++ b/swh/web/tests/api/views/test_release.py
@@ -17,7 +17,7 @@
)
from swh.web.common.utils import reverse
from swh.web.tests.data import random_sha1
-from swh.web.tests.strategies import content, directory, release
+from swh.web.tests.strategies import directory, release
from swh.web.tests.utils import check_api_get_responses, check_http_get_response
@@ -39,7 +39,7 @@
assert rv.data == expected_release
-@given(content(), directory(), release())
+@given(directory(), release())
def test_api_release_target_type_not_a_revision(
api_client, archive_data, content, directory, release
):
diff --git a/swh/web/tests/api/views/test_revision.py b/swh/web/tests/api/views/test_revision.py
--- a/swh/web/tests/api/views/test_revision.py
+++ b/swh/web/tests/api/views/test_revision.py
@@ -17,7 +17,7 @@
from swh.web.api.utils import enrich_content, enrich_directory_entry, enrich_revision
from swh.web.common.utils import reverse
from swh.web.tests.data import random_sha1
-from swh.web.tests.strategies import content, new_person, new_swh_date, revision
+from swh.web.tests.strategies import new_person, new_swh_date, revision
from swh.web.tests.utils import check_api_get_responses, check_http_get_response
@@ -132,7 +132,7 @@
}
-@given(content(), new_person(), new_swh_date())
+@given(new_person(), new_swh_date())
def test_api_revision_directory_ok_returns_content(
api_client, archive_data, content, person, date
):
diff --git a/swh/web/tests/browse/views/test_content.py b/swh/web/tests/browse/views/test_content.py
--- a/swh/web/tests/browse/views/test_content.py
+++ b/swh/web/tests/browse/views/test_content.py
@@ -21,14 +21,6 @@
from swh.web.common.utils import gen_path_info, reverse
from swh.web.tests.django_asserts import assert_contains, assert_not_contains
from swh.web.tests.strategies import (
- content,
- content_application_no_highlight,
- content_image_type,
- content_text,
- content_text_no_highlight,
- content_text_non_utf8,
- content_unsupported_image_type_rendering,
- content_utf8_detected_as_binary,
invalid_sha1,
origin_with_multiple_visits,
unknown_content,
@@ -36,23 +28,24 @@
from swh.web.tests.utils import check_html_get_response, check_http_get_response
-@given(content_text())
-def test_content_view_text(client, archive_data, content):
- sha1_git = content["sha1_git"]
+def test_content_view_text(client, archive_data, content_text):
+ sha1_git = content_text["sha1_git"]
url = reverse(
"browse-content",
- url_args={"query_string": content["sha1"]},
- query_params={"path": content["path"]},
+ url_args={"query_string": content_text["sha1"]},
+ query_params={"path": content_text["path"]},
)
- url_raw = reverse("browse-content-raw", url_args={"query_string": content["sha1"]})
+ url_raw = reverse(
+ "browse-content-raw", url_args={"query_string": content_text["sha1"]}
+ )
resp = check_html_get_response(
client, url, status_code=200, template_used="browse/content.html"
)
- content_display = _process_content_for_display(archive_data, content)
+ content_display = _process_content_for_display(archive_data, content_text)
mimetype = content_display["mimetype"]
if mimetype.startswith("text/"):
@@ -67,9 +60,10 @@
assert_not_contains(resp, "swh-metadata-popover")
-@given(content_application_no_highlight(), content_text_no_highlight())
-def test_content_view_no_highlight(client, archive_data, content_app, content_text):
- for content_ in (content_app, content_text):
+def test_content_view_no_highlight(
+ client, archive_data, content_application_no_highlight, content_text_no_highlight
+):
+ for content_ in (content_application_no_highlight, content_text_no_highlight):
content = content_
sha1_git = content["sha1_git"]
@@ -96,17 +90,18 @@
assert_contains(resp, swh_cnt_id_url)
-@given(content_text_non_utf8())
-def test_content_view_no_utf8_text(client, archive_data, content):
- sha1_git = content["sha1_git"]
+def test_content_view_no_utf8_text(client, archive_data, content_text_non_utf8):
+ sha1_git = content_text_non_utf8["sha1_git"]
- url = reverse("browse-content", url_args={"query_string": content["sha1"]})
+ url = reverse(
+ "browse-content", url_args={"query_string": content_text_non_utf8["sha1"]}
+ )
resp = check_html_get_response(
client, url, status_code=200, template_used="browse/content.html"
)
- content_display = _process_content_for_display(archive_data, content)
+ content_display = _process_content_for_display(archive_data, content_text_non_utf8)
swh_cnt_id = gen_swhid(ObjectType.CONTENT, sha1_git)
swh_cnt_id_url = reverse("browse-swhid", url_args={"swhid": swh_cnt_id})
@@ -114,33 +109,40 @@
assert_contains(resp, escape(content_display["content_data"]))
-@given(content_image_type())
-def test_content_view_image(client, archive_data, content):
- url = reverse("browse-content", url_args={"query_string": content["sha1"]})
+def test_content_view_image(client, archive_data, content_image_type):
+ url = reverse(
+ "browse-content", url_args={"query_string": content_image_type["sha1"]}
+ )
- url_raw = reverse("browse-content-raw", url_args={"query_string": content["sha1"]})
+ url_raw = reverse(
+ "browse-content-raw", url_args={"query_string": content_image_type["sha1"]}
+ )
resp = check_html_get_response(
client, url, status_code=200, template_used="browse/content.html"
)
- content_display = _process_content_for_display(archive_data, content)
+ content_display = _process_content_for_display(archive_data, content_image_type)
mimetype = content_display["mimetype"]
content_data = content_display["content_data"]
assert_contains(resp, '<img src="data:%s;base64,%s"/>' % (mimetype, content_data))
assert_contains(resp, url_raw)
-@given(content_unsupported_image_type_rendering())
-def test_content_view_image_no_rendering(client, archive_data, content):
- url = reverse("browse-content", url_args={"query_string": content["sha1"]})
+def test_content_view_image_no_rendering(
+ client, archive_data, content_unsupported_image_type_rendering
+):
+ url = reverse(
+ "browse-content",
+ url_args={"query_string": content_unsupported_image_type_rendering["sha1"]},
+ )
resp = check_html_get_response(
client, url, status_code=200, template_used="browse/content.html"
)
- mimetype = content["mimetype"]
- encoding = content["encoding"]
+ mimetype = content_unsupported_image_type_rendering["mimetype"]
+ encoding = content_unsupported_image_type_rendering["encoding"]
assert_contains(
resp,
(
@@ -150,13 +152,12 @@
)
-@given(content_text())
-def test_content_view_text_with_path(client, archive_data, content):
- path = content["path"]
+def test_content_view_text_with_path(client, archive_data, content_text):
+ path = content_text["path"]
url = reverse(
"browse-content",
- url_args={"query_string": content["sha1"]},
+ url_args={"query_string": content_text["sha1"]},
query_params={"path": path},
)
@@ -166,11 +167,11 @@
assert_contains(resp, '<nav class="bread-crumbs')
- content_display = _process_content_for_display(archive_data, content)
+ content_display = _process_content_for_display(archive_data, content_text)
mimetype = content_display["mimetype"]
if mimetype.startswith("text/"):
- hljs_language = content["hljs_language"]
+ hljs_language = content_text["hljs_language"]
assert_contains(resp, '<code class="%s">' % hljs_language)
assert_contains(resp, escape(content_display["content_data"]))
@@ -186,7 +187,7 @@
}
swh_cnt_id = gen_swhid(
- ObjectType.CONTENT, content["sha1_git"], metadata=swhid_context
+ ObjectType.CONTENT, content_text["sha1_git"], metadata=swhid_context
)
swh_cnt_id_url = reverse("browse-swhid", url_args={"swhid": swh_cnt_id})
assert_contains(resp, swh_cnt_id)
@@ -214,14 +215,14 @@
url_raw = reverse(
"browse-content-raw",
- url_args={"query_string": content["sha1"]},
+ url_args={"query_string": content_text["sha1"]},
query_params={"filename": filename},
)
assert_contains(resp, url_raw)
url = reverse(
"browse-content",
- url_args={"query_string": content["sha1"]},
+ url_args={"query_string": content_text["sha1"]},
query_params={"path": filename},
)
@@ -234,7 +235,7 @@
invalid_path = "%s/foo/bar/baz" % root_dir_sha1
url = reverse(
"browse-content",
- url_args={"query_string": content["sha1"]},
+ url_args={"query_string": content_text["sha1"]},
query_params={"path": invalid_path},
)
@@ -243,25 +244,26 @@
)
-@given(content_text())
-def test_content_raw_text(client, archive_data, content):
- url = reverse("browse-content-raw", url_args={"query_string": content["sha1"]})
+def test_content_raw_text(client, archive_data, content_text):
+ url = reverse("browse-content-raw", url_args={"query_string": content_text["sha1"]})
resp = check_http_get_response(
client, url, status_code=200, content_type="text/plain"
)
- content_data = archive_data.content_get_data(content["sha1"])["data"]
+ content_data = archive_data.content_get_data(content_text["sha1"])["data"]
assert resp["Content-Type"] == "text/plain"
- assert resp["Content-disposition"] == ("filename=%s_%s" % ("sha1", content["sha1"]))
+ assert resp["Content-disposition"] == (
+ "filename=%s_%s" % ("sha1", content_text["sha1"])
+ )
assert resp.content == content_data
- filename = content["path"].split("/")[-1]
+ filename = content_text["path"].split("/")[-1]
url = reverse(
"browse-content-raw",
- url_args={"query_string": content["sha1"]},
+ url_args={"query_string": content_text["sha1"]},
query_params={"filename": filename},
)
@@ -274,38 +276,40 @@
assert resp.content == content_data
-@given(content_text_non_utf8())
-def test_content_raw_no_utf8_text(client, content):
- url = reverse("browse-content-raw", url_args={"query_string": content["sha1"]})
+def test_content_raw_no_utf8_text(client, content_text_non_utf8):
+ url = reverse(
+ "browse-content-raw", url_args={"query_string": content_text_non_utf8["sha1"]}
+ )
resp = check_http_get_response(
client, url, status_code=200, content_type="text/plain"
)
_, encoding = get_mimetype_and_encoding_for_content(resp.content)
- assert encoding == content["encoding"]
+ assert encoding == content_text_non_utf8["encoding"]
-@given(content_image_type())
-def test_content_raw_bin(client, archive_data, content):
- url = reverse("browse-content-raw", url_args={"query_string": content["sha1"]})
+def test_content_raw_bin(client, archive_data, content_image_type):
+ url = reverse(
+ "browse-content-raw", url_args={"query_string": content_image_type["sha1"]}
+ )
resp = check_http_get_response(
client, url, status_code=200, content_type="application/octet-stream"
)
- filename = content["path"].split("/")[-1]
- content_data = archive_data.content_get_data(content["sha1"])["data"]
+ filename = content_image_type["path"].split("/")[-1]
+ content_data = archive_data.content_get_data(content_image_type["sha1"])["data"]
assert resp["Content-Type"] == "application/octet-stream"
assert resp["Content-disposition"] == "attachment; filename=%s_%s" % (
"sha1",
- content["sha1"],
+ content_image_type["sha1"],
)
assert resp.content == content_data
url = reverse(
"browse-content-raw",
- url_args={"query_string": content["sha1"]},
+ url_args={"query_string": content_image_type["sha1"]},
query_params={"filename": filename},
)
@@ -329,7 +333,6 @@
)
-@given(content())
def test_content_bytes_missing(client, archive_data, mocker, content):
mock_archive = mocker.patch("swh.web.browse.utils.archive")
content_data = archive_data.content_get(content["sha1"])
@@ -387,7 +390,6 @@
assert_contains(resp, url_raw)
-@given(content())
def test_content_uppercase(client, content):
url = reverse(
"browse-content-uppercase-checksum",
@@ -401,15 +403,21 @@
assert resp["location"] == redirect_url
-@given(content_utf8_detected_as_binary())
-def test_content_utf8_detected_as_binary_display(client, archive_data, content):
- url = reverse("browse-content", url_args={"query_string": content["sha1"]})
+def test_content_utf8_detected_as_binary_display(
+ client, archive_data, content_utf8_detected_as_binary
+):
+ url = reverse(
+ "browse-content",
+ url_args={"query_string": content_utf8_detected_as_binary["sha1"]},
+ )
resp = check_html_get_response(
client, url, status_code=200, template_used="browse/content.html"
)
- content_display = _process_content_for_display(archive_data, content)
+ content_display = _process_content_for_display(
+ archive_data, content_utf8_detected_as_binary
+ )
assert_contains(resp, escape(content_display["content_data"]))
diff --git a/swh/web/tests/browse/views/test_identifiers.py b/swh/web/tests/browse/views/test_identifiers.py
--- a/swh/web/tests/browse/views/test_identifiers.py
+++ b/swh/web/tests/browse/views/test_identifiers.py
@@ -13,18 +13,10 @@
from swh.web.common.identifiers import gen_swhid
from swh.web.common.utils import reverse
from swh.web.tests.django_asserts import assert_contains
-from swh.web.tests.strategies import (
- content,
- directory,
- origin,
- release,
- revision,
- snapshot,
-)
+from swh.web.tests.strategies import directory, origin, release, revision, snapshot
from swh.web.tests.utils import check_html_get_response
-@given(content())
def test_content_id_browse(client, content):
cnt_sha1_git = content["sha1_git"]
swhid = gen_swhid(ObjectType.CONTENT, cnt_sha1_git)
@@ -149,7 +141,6 @@
check_html_get_response(client, url, status_code=400)
-@given(content())
def test_content_id_optional_parts_browse(client, archive_data, content):
cnt_sha1_git = content["sha1_git"]
origin_url = "https://github.com/user/repo"
diff --git a/swh/web/tests/common/test_archive.py b/swh/web/tests/common/test_archive.py
--- a/swh/web/tests/common/test_archive.py
+++ b/swh/web/tests/common/test_archive.py
@@ -31,11 +31,7 @@
from swh.web.tests.data import random_content, random_sha1
from swh.web.tests.strategies import (
ancestor_revisions,
- content,
- contents,
- contents_with_ctags,
directory,
- empty_content,
empty_directory,
invalid_sha1,
new_origin,
@@ -59,7 +55,6 @@
)
-@given(contents())
def test_lookup_multiple_hashes_all_present(contents):
input_data = []
expected_output = []
@@ -70,7 +65,7 @@
assert archive.lookup_multiple_hashes(input_data) == expected_output
-@given(contents(), unknown_contents())
+@given(unknown_contents())
def test_lookup_multiple_hashes_some_missing(contents, unknown_contents):
input_contents = list(itertools.chain(contents, unknown_contents))
random.shuffle(input_contents)
@@ -92,7 +87,6 @@
assert actual_lookup == {"found": None, "algo": "sha1_git"}
-@given(content())
def test_lookup_hash_exist(archive_data, content):
actual_lookup = archive.lookup_hash("sha1:%s" % content["sha1"])
@@ -109,7 +103,6 @@
assert {"found": False} == actual_lookup
-@given(content())
def test_search_hash_exist(content):
actual_lookup = archive.search_hash("sha1:%s" % content["sha1"])
@@ -119,7 +112,6 @@
@pytest.mark.skipif(
ctags_json_missing, reason="requires ctags with json output support"
)
-@given(contents_with_ctags())
def test_lookup_content_ctags(indexer_data, contents_with_ctags):
content_sha1 = random.choice(contents_with_ctags["sha1s"])
indexer_data.content_add_ctags(content_sha1)
@@ -142,7 +134,6 @@
assert actual_ctags == []
-@given(content())
def test_lookup_content_filetype(indexer_data, content):
indexer_data.content_add_mimetype(content["sha1"])
actual_filetype = archive.lookup_content_filetype(content["sha1"])
@@ -151,7 +142,6 @@
assert actual_filetype == expected_filetype
-@given(contents_with_ctags())
def test_lookup_expression(indexer_data, contents_with_ctags):
per_page = 10
expected_ctags = []
@@ -187,7 +177,6 @@
@pytest.mark.skipif(fossology_missing, reason="requires fossology-nomossa installed")
-@given(content())
def test_lookup_content_license(indexer_data, content):
indexer_data.content_add_license(content["sha1"])
actual_license = archive.lookup_content_license(content["sha1"])
@@ -627,7 +616,6 @@
)
-@given(content())
def test_lookup_content_raw(archive_data, content):
actual_content = archive.lookup_content_raw("sha256:%s" % content["sha256"])
@@ -636,8 +624,7 @@
assert actual_content == expected_content
-@given(empty_content())
-def test_lookup_empty_content_raw(archive_data, empty_content):
+def test_lookup_empty_content_raw(empty_content):
content_raw = archive.lookup_content_raw(f"sha1_git:{empty_content['sha1_git']}")
assert content_raw["data"] == b""
@@ -654,7 +641,6 @@
)
-@given(content())
def test_lookup_content_with_sha1(archive_data, content):
actual_content = archive.lookup_content(f"sha1:{content['sha1']}")
@@ -663,7 +649,6 @@
assert actual_content == expected_content
-@given(content())
def test_lookup_content_with_sha256(archive_data, content):
actual_content = archive.lookup_content(f"sha256:{content['sha256']}")
@@ -850,7 +835,7 @@
)
-@given(content(), directory(), release(), revision(), snapshot())
+@given(directory(), release(), revision(), snapshot())
def test_lookup_known_objects(
archive_data, content, directory, release, revision, snapshot
):
@@ -955,8 +940,8 @@
}
-@given(content(), directory())
-def test_lookup_missing_hashes_some_present(archive_data, content, directory):
+@given(directory())
+def test_lookup_missing_hashes_some_present(content, directory):
missing_rev = random_sha1()
missing_rel = random_sha1()
missing_snp = random_sha1()
diff --git a/swh/web/tests/common/test_identifiers.py b/swh/web/tests/common/test_identifiers.py
--- a/swh/web/tests/common/test_identifiers.py
+++ b/swh/web/tests/common/test_identifiers.py
@@ -26,7 +26,6 @@
from swh.web.common.utils import reverse
from swh.web.tests.data import random_sha1
from swh.web.tests.strategies import (
- content,
directory,
directory_with_files,
directory_with_subdirs,
@@ -38,7 +37,6 @@
)
-@given(content())
def test_gen_swhid(content):
swh_object_type = ObjectType.CONTENT
sha1_git = content["sha1_git"]
@@ -74,7 +72,7 @@
assert e.match("Invalid swh object type")
-@given(content(), directory(), release(), revision(), snapshot())
+@given(directory(), release(), revision(), snapshot())
def test_resolve_swhid_legacy(content, directory, release, revision, snapshot):
for obj_type, obj_id in (
(ObjectType.CONTENT, content["sha1_git"]),
@@ -111,7 +109,7 @@
resolve_swhid(f"swh:1:ori:{random_sha1()}")
-@given(content(), directory(), release(), revision(), snapshot())
+@given(directory(), release(), revision(), snapshot())
def test_get_swhid(content, directory, release, revision, snapshot):
for obj_type, obj_id in (
(ObjectType.CONTENT, content["sha1_git"]),
@@ -130,7 +128,7 @@
get_swhid("foo")
-@given(content(), directory(), release(), revision(), snapshot())
+@given(directory(), release(), revision(), snapshot())
def test_group_swhids(content, directory, release, revision, snapshot):
swhids = []
expected = {}
diff --git a/swh/web/tests/conftest.py b/swh/web/tests/conftest.py
--- a/swh/web/tests/conftest.py
+++ b/swh/web/tests/conftest.py
@@ -6,6 +6,7 @@
from datetime import timedelta
import json
import os
+import random
import shutil
from subprocess import PIPE, run
import sys
@@ -20,7 +21,14 @@
from django.test.utils import setup_databases # type: ignore
from rest_framework.test import APIClient, APIRequestFactory
-from swh.model.hashutil import ALGORITHMS, hash_to_bytes
+from swh.model.hashutil import (
+ ALGORITHMS,
+ DEFAULT_ALGORITHMS,
+ hash_to_bytes,
+ hash_to_hex,
+)
+from swh.model.model import Content
+from swh.model.swhids import ObjectType
from swh.scheduler.tests.common import TASK_TYPES
from swh.storage.algos.origin import origin_get_latest_visit_status
from swh.storage.algos.snapshot import snapshot_get_all_branches, snapshot_get_latest
@@ -28,6 +36,7 @@
from swh.web.common import converters
from swh.web.common.origin_save import get_scheduler_load_task_types
from swh.web.common.typing import OriginVisitInfo
+from swh.web.common.utils import browsers_supported_image_mimes
from swh.web.config import get_config
from swh.web.tests.data import get_tests_data, override_storages
@@ -155,6 +164,197 @@
return data
+def _known_swh_objects(tests_data, object_type):
+ return tests_data[object_type]
+
+
+@pytest.fixture(scope="function")
+def content(tests_data):
+ """Fixture returning a random content ingested into the test archive.
+ """
+ return random.choice(_known_swh_objects(tests_data, "contents"))
+
+
+@pytest.fixture(scope="function")
+def contents(tests_data):
+ """Fixture returning random contents ingested into the test archive.
+ """
+ return random.choices(
+ _known_swh_objects(tests_data, "contents"), k=random.randint(2, 8)
+ )
+
+
+@pytest.fixture(scope="function")
+def empty_content():
+ """Fixture returning the empty content ingested into the test archive.
+ """
+ empty_content = Content.from_data(data=b"").to_dict()
+ for algo in DEFAULT_ALGORITHMS:
+ empty_content[algo] = hash_to_hex(empty_content[algo])
+ return empty_content
+
+
+@pytest.fixture(scope="function")
+def content_text(tests_data):
+ """
+ Fixture returning a random textual content ingested into the test archive.
+ """
+ return random.choice(
+ list(
+ filter(
+ lambda c: c["mimetype"].startswith("text/"),
+ _known_swh_objects(tests_data, "contents"),
+ )
+ )
+ )
+
+
+@pytest.fixture(scope="function")
+def content_text_non_utf8(tests_data):
+ """Fixture returning a random textual content not encoded to UTF-8 ingested
+ into the test archive.
+ """
+ return random.choice(
+ list(
+ filter(
+ lambda c: c["mimetype"].startswith("text/")
+ and c["encoding"] not in ("utf-8", "us-ascii"),
+ _known_swh_objects(tests_data, "contents"),
+ )
+ )
+ )
+
+
+@pytest.fixture(scope="function")
+def content_application_no_highlight(tests_data):
+ """Fixture returning a random textual content with mimetype
+ starting with application/ and no detected programming language to
+ highlight ingested into the test archive.
+ """
+ return random.choice(
+ list(
+ filter(
+ lambda c: c["mimetype"].startswith("application/")
+ and c["encoding"] != "binary"
+ and c["hljs_language"] == "nohighlight",
+ _known_swh_objects(tests_data, "contents"),
+ )
+ )
+ )
+
+
+@pytest.fixture(scope="function")
+def content_text_no_highlight(tests_data):
+ """Fixture returning a random textual content with no detected
+ programming language to highlight ingested into the test archive.
+ """
+ return random.choice(
+ list(
+ filter(
+ lambda c: c["mimetype"].startswith("text/")
+ and c["hljs_language"] == "nohighlight",
+ _known_swh_objects(tests_data, "contents"),
+ )
+ )
+ )
+
+
+@pytest.fixture(scope="function")
+def content_image_type(tests_data):
+ """Fixture returning a random image content ingested into the test archive.
+ """
+ return random.choice(
+ list(
+ filter(
+ lambda c: c["mimetype"] in browsers_supported_image_mimes,
+ _known_swh_objects(tests_data, "contents"),
+ )
+ )
+ )
+
+
+@pytest.fixture(scope="function")
+def content_unsupported_image_type_rendering(tests_data):
+ """Fixture returning a random image content ingested into the test archive that
+ can not be rendered by browsers.
+ """
+ return random.choice(
+ list(
+ filter(
+ lambda c: c["mimetype"].startswith("image/")
+ and c["mimetype"] not in browsers_supported_image_mimes,
+ _known_swh_objects(tests_data, "contents"),
+ )
+ )
+ )
+
+
+@pytest.fixture(scope="function")
+def content_utf8_detected_as_binary(tests_data):
+ """Fixture returning a random textual content detected as binary
+ by libmagic while they are valid UTF-8 encoded files.
+ """
+
+ def utf8_binary_detected(content):
+ if content["encoding"] != "binary":
+ return False
+ try:
+ content["raw_data"].decode("utf-8")
+ except Exception:
+ return False
+ else:
+ return True
+
+ return random.choice(
+ list(filter(utf8_binary_detected, _known_swh_objects(tests_data, "contents")))
+ )
+
+
+@pytest.fixture(scope="function")
+def contents_with_ctags():
+ """
+ Fixture returning contents ingested into the test archive.
+ Those contents are ctags compatible, that is running ctags on those lay results.
+ """
+ return {
+ "sha1s": [
+ "0ab37c02043ebff946c1937523f60aadd0844351",
+ "15554cf7608dde6bfefac7e3d525596343a85b6f",
+ "2ce837f1489bdfb8faf3ebcc7e72421b5bea83bd",
+ "30acd0b47fc25e159e27a980102ddb1c4bea0b95",
+ "4f81f05aaea3efb981f9d90144f746d6b682285b",
+ "5153aa4b6e4455a62525bc4de38ed0ff6e7dd682",
+ "59d08bafa6a749110dfb65ba43a61963d5a5bf9f",
+ "7568285b2d7f31ae483ae71617bd3db873deaa2c",
+ "7ed3ee8e94ac52ba983dd7690bdc9ab7618247b4",
+ "8ed7ef2e7ff9ed845e10259d08e4145f1b3b5b03",
+ "9b3557f1ab4111c8607a4f2ea3c1e53c6992916c",
+ "9c20da07ed14dc4fcd3ca2b055af99b2598d8bdd",
+ "c20ceebd6ec6f7a19b5c3aebc512a12fbdc9234b",
+ "e89e55a12def4cd54d5bff58378a3b5119878eb7",
+ "e8c0654fe2d75ecd7e0b01bee8a8fc60a130097e",
+ "eb6595e559a1d34a2b41e8d4835e0e4f98a5d2b5",
+ ],
+ "symbol_name": "ABS",
+ }
+
+
+@pytest.fixture(scope="function")
+def content_swhid(tests_data):
+ """
+ Fixture returning a qualified SWHID for a random content object
+ ingested into the test archive.
+ """
+ return random.choice(
+ list(
+ filter(
+ lambda swhid: swhid.object_type == ObjectType.CONTENT,
+ _known_swh_objects(tests_data, "swhids"),
+ )
+ )
+ )
+
+
# Fixture to manipulate data from a sample archive used in the tests
@pytest.fixture(scope="function")
def archive_data(tests_data):
diff --git a/swh/web/tests/misc/test_badges.py b/swh/web/tests/misc/test_badges.py
--- a/swh/web/tests/misc/test_badges.py
+++ b/swh/web/tests/misc/test_badges.py
@@ -14,7 +14,6 @@
from swh.web.misc.badges import _badge_config, _get_logo_data
from swh.web.tests.django_asserts import assert_contains
from swh.web.tests.strategies import (
- content,
directory,
invalid_sha1,
new_origin,
@@ -31,7 +30,6 @@
from swh.web.tests.utils import check_http_get_response
-@given(content())
def test_content_badge(client, content):
_test_badge_endpoints(client, "content", content["sha1_git"])
diff --git a/swh/web/tests/misc/test_iframe.py b/swh/web/tests/misc/test_iframe.py
--- a/swh/web/tests/misc/test_iframe.py
+++ b/swh/web/tests/misc/test_iframe.py
@@ -8,16 +8,10 @@
from swh.model.hashutil import hash_to_bytes
from swh.model.swhids import CoreSWHID, ObjectType
from swh.web.common.utils import reverse
-from swh.web.tests.strategies import (
- content_swhid,
- directory_swhid,
- revision_swhid,
- unknown_directory,
-)
+from swh.web.tests.strategies import directory_swhid, revision_swhid, unknown_directory
from swh.web.tests.utils import check_html_get_response
-@given(content_swhid())
def test_content_swhid_iframe(client, content_swhid):
url = reverse("swhid-iframe", url_args={"swhid": str(content_swhid)})
check_html_get_response(
@@ -25,7 +19,6 @@
)
-@given(content_swhid())
def test_content_core_swhid_iframe(client, content_swhid):
content_core_swhid = CoreSWHID(
object_type=content_swhid.object_type, object_id=content_swhid.object_id
@@ -74,7 +67,6 @@
)
-@given(content_swhid())
def test_swhid_iframe_unknown_error(client, mocker, content_swhid):
mocker.patch("swh.web.misc.iframe.get_swhid").side_effect = Exception("Error")
url = reverse("swhid-iframe", url_args={"swhid": str(content_swhid)})
diff --git a/swh/web/tests/strategies.py b/swh/web/tests/strategies.py
--- a/swh/web/tests/strategies.py
+++ b/swh/web/tests/strategies.py
@@ -20,11 +20,10 @@
text,
)
-from swh.model.hashutil import DEFAULT_ALGORITHMS, hash_to_bytes, hash_to_hex
+from swh.model.hashutil import hash_to_bytes, hash_to_hex
from swh.model.hypothesis_strategies import origins as new_origin_strategy
from swh.model.hypothesis_strategies import snapshots as new_snapshot
from swh.model.model import (
- Content,
Directory,
Person,
Revision,
@@ -34,7 +33,6 @@
from swh.model.swhids import ObjectType
from swh.storage.algos.revisions_walker import get_revisions_walker
from swh.storage.algos.snapshot import snapshot_get_latest
-from swh.web.common.utils import browsers_supported_image_mimes
from swh.web.tests.data import get_tests_data
# Module dedicated to the generation of input data for tests through
@@ -76,114 +74,6 @@
return binary(min_size=32, max_size=32).map(hash_to_hex)
-def content():
- """
- Hypothesis strategy returning a random content ingested
- into the test archive.
- """
- return _known_swh_object("contents")
-
-
-def contents():
- """
- Hypothesis strategy returning random contents ingested
- into the test archive.
- """
- return lists(content(), min_size=2, max_size=8)
-
-
-def empty_content():
- """
- Hypothesis strategy returning the empty content ingested
- into the test archive.
- """
- empty_content = Content.from_data(data=b"").to_dict()
- for algo in DEFAULT_ALGORITHMS:
- empty_content[algo] = hash_to_hex(empty_content[algo])
- return just(empty_content)
-
-
-def content_text():
- """
- Hypothesis strategy returning random textual contents ingested
- into the test archive.
- """
- return content().filter(lambda c: c["mimetype"].startswith("text/"))
-
-
-def content_text_non_utf8():
- """
- Hypothesis strategy returning random textual contents not encoded
- to UTF-8 ingested into the test archive.
- """
- return content().filter(
- lambda c: c["mimetype"].startswith("text/")
- and c["encoding"] not in ("utf-8", "us-ascii")
- )
-
-
-def content_application_no_highlight():
- """
- Hypothesis strategy returning random textual contents with mimetype
- starting with application/ and no detected programming language to
- highlight ingested into the test archive.
- """
- return content().filter(
- lambda c: c["mimetype"].startswith("application/")
- and c["encoding"] != "binary"
- and c["hljs_language"] == "nohighlight"
- )
-
-
-def content_text_no_highlight():
- """
- Hypothesis strategy returning random textual contents with no detected
- programming language to highlight ingested into the test archive.
- """
- return content().filter(
- lambda c: c["mimetype"].startswith("text/")
- and c["hljs_language"] == "nohighlight"
- )
-
-
-def content_image_type():
- """
- Hypothesis strategy returning random image contents ingested
- into the test archive.
- """
- return content().filter(lambda c: c["mimetype"] in browsers_supported_image_mimes)
-
-
-def content_unsupported_image_type_rendering():
- """
- Hypothesis strategy returning random image contents ingested
- into the test archive that can not be rendered by browsers.
- """
- return content().filter(
- lambda c: c["mimetype"].startswith("image/")
- and c["mimetype"] not in browsers_supported_image_mimes
- )
-
-
-def content_utf8_detected_as_binary():
- """
- Hypothesis strategy returning random textual contents detected as binary
- by libmagic while they are valid UTF-8 encoded files.
- """
-
- def utf8_binary_detected(content):
- if content["encoding"] != "binary":
- return False
- try:
- content["raw_data"].decode("utf-8")
- except Exception:
- return False
- else:
- return True
-
- return content().filter(utf8_binary_detected)
-
-
@composite
def new_content(draw):
blake2s256_hex = draw(sha256())
@@ -604,37 +494,6 @@
# that can not be generated and thus are hardcoded.
-def contents_with_ctags():
- """
- Hypothesis strategy returning contents ingested into the test
- archive. Those contents are ctags compatible, that is running
- ctags on those lay results.
- """
- return just(
- {
- "sha1s": [
- "0ab37c02043ebff946c1937523f60aadd0844351",
- "15554cf7608dde6bfefac7e3d525596343a85b6f",
- "2ce837f1489bdfb8faf3ebcc7e72421b5bea83bd",
- "30acd0b47fc25e159e27a980102ddb1c4bea0b95",
- "4f81f05aaea3efb981f9d90144f746d6b682285b",
- "5153aa4b6e4455a62525bc4de38ed0ff6e7dd682",
- "59d08bafa6a749110dfb65ba43a61963d5a5bf9f",
- "7568285b2d7f31ae483ae71617bd3db873deaa2c",
- "7ed3ee8e94ac52ba983dd7690bdc9ab7618247b4",
- "8ed7ef2e7ff9ed845e10259d08e4145f1b3b5b03",
- "9b3557f1ab4111c8607a4f2ea3c1e53c6992916c",
- "9c20da07ed14dc4fcd3ca2b055af99b2598d8bdd",
- "c20ceebd6ec6f7a19b5c3aebc512a12fbdc9234b",
- "e89e55a12def4cd54d5bff58378a3b5119878eb7",
- "e8c0654fe2d75ecd7e0b01bee8a8fc60a130097e",
- "eb6595e559a1d34a2b41e8d4835e0e4f98a5d2b5",
- ],
- "symbol_name": "ABS",
- }
- )
-
-
def revision_with_submodules():
"""
Hypothesis strategy returning a revision that is known to
@@ -657,14 +516,6 @@
return _known_swh_object("swhids")
-def content_swhid():
- """
- Hypothesis strategy returning a qualified SWHID for a content object
- ingested into the test archive.
- """
- return swhid().filter(lambda swhid: swhid.object_type == ObjectType.CONTENT)
-
-
def directory_swhid():
"""
Hypothesis strategy returning a qualified SWHID for a directory object
File Metadata
Details
Attached
Mime Type
text/plain
Expires
Wed, Dec 18, 4:11 AM (1 d, 1 h ago)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3219097
Attached To
D6455: tests: Turn content* hypothesis strategies into pytest fixtures
Event Timeline
Log In to Comment