diff --git a/docs/developers-info.rst b/docs/developers-info.rst --- a/docs/developers-info.rst +++ b/docs/developers-info.rst @@ -62,14 +62,14 @@ """""""""""""""" * :mod:`swh.web.common.converters`: conversion module used to transform raw data - to serializable ones. It is used by :mod:`swh.web.common.service`: to convert data + to serializable ones. It is used by :mod:`swh.web.common.archive`: to convert data before transmitting then to Django views. * :mod:`swh.web.common.exc`: module defining exceptions used in the web applications. * :mod:`swh.web.common.highlightjs`: utility module to ease the use of the highlightjs_ library in produced Django views. * :mod:`swh.web.common.query`: Utilities to parse data from HTTP endpoints. It is used - by :mod:`swh.web.common.service`. - * :mod:`swh.web.common.service`: Orchestration layer used by views module + by :mod:`swh.web.common.archive`. + * :mod:`swh.web.common.archive`: Orchestration layer used by views module in charge of communication with :mod:`swh.storage` to retrieve information and perform conversion for the upper layer. * :mod:`swh.web.common.swh_templatetags`: Custom Django template tags library for swh. diff --git a/swh/web/api/views/content.py b/swh/web/api/views/content.py --- a/swh/web/api/views/content.py +++ b/swh/web/api/views/content.py @@ -11,7 +11,7 @@ from swh.web.api.apidoc import api_doc, format_docstring from swh.web.api.apiurls import api_route from swh.web.api.views.utils import api_lookup -from swh.web.common import service +from swh.web.common import archive from swh.web.common.exc import NotFoundExc from swh.web.common.utils import reverse @@ -58,7 +58,7 @@ :swh_web_api:`content/sha1:dc2830a9e72f23c1dfebef4413003221baa5fb62/filetype/` """ return api_lookup( - service.lookup_content_filetype, + archive.lookup_content_filetype, q, notfound_msg="No filetype information found for content {}.".format(q), enrich_fn=utils.enrich_metadata_endpoint, @@ -109,7 +109,7 @@ :swh_web_api:`content/sha1:dc2830a9e72f23c1dfebef4413003221baa5fb62/language/` """ return api_lookup( - service.lookup_content_language, + archive.lookup_content_language, q, notfound_msg="No language information found for content {}.".format(q), enrich_fn=utils.enrich_metadata_endpoint, @@ -157,7 +157,7 @@ :swh_web_api:`content/sha1:dc2830a9e72f23c1dfebef4413003221baa5fb62/license/` """ return api_lookup( - service.lookup_content_license, + archive.lookup_content_license, q, notfound_msg="No license information found for content {}.".format(q), enrich_fn=utils.enrich_metadata_endpoint, @@ -173,7 +173,7 @@ symbols defined in a content object. """ return api_lookup( - service.lookup_content_ctags, + archive.lookup_content_ctags, q, notfound_msg="No ctags symbol found for content {}.".format(q), enrich_fn=utils.enrich_metadata_endpoint, @@ -218,7 +218,7 @@ def generate(content): yield content["data"] - content_raw = service.lookup_content_raw(q) + content_raw = archive.lookup_content_raw(q) if not content_raw: raise NotFoundExc("Content %s is not found." % q) @@ -245,7 +245,7 @@ per_page = int(request.query_params.get("per_page", "10")) def lookup_exp(exp, last_sha1=last_sha1, per_page=per_page): - exp = list(service.lookup_expression(exp, last_sha1, per_page)) + exp = list(archive.lookup_expression(exp, last_sha1, per_page)) return exp if exp else None symbols = api_lookup( @@ -334,7 +334,7 @@ queries.append({"filename": k, "sha1": v}) if queries: - lookup = service.lookup_multiple_hashes(queries) + lookup = archive.lookup_multiple_hashes(queries) result = [] nb_queries = len(queries) for el in lookup: @@ -401,7 +401,7 @@ :swh_web_api:`content/sha1_git:fe95a46679d128ff167b7c55df5d02356c5a1ae1/` """ return api_lookup( - service.lookup_content, + archive.lookup_content, q, notfound_msg="Content with {} not found.".format(q), enrich_fn=functools.partial(utils.enrich_content, query_string=q), diff --git a/swh/web/api/views/directory.py b/swh/web/api/views/directory.py --- a/swh/web/api/views/directory.py +++ b/swh/web/api/views/directory.py @@ -7,7 +7,7 @@ from swh.web.api.apidoc import api_doc, format_docstring from swh.web.api.apiurls import api_route from swh.web.api.views.utils import api_lookup -from swh.web.common import service +from swh.web.common import archive @api_route( @@ -75,7 +75,7 @@ "Entry with path %s relative to directory " "with sha1_git %s not found." ) % (path, sha1_git) return api_lookup( - service.lookup_directory_with_path, + archive.lookup_directory_with_path, sha1_git, path, notfound_msg=error_msg_path, @@ -85,7 +85,7 @@ else: error_msg_nopath = "Directory with sha1_git %s not found." % sha1_git return api_lookup( - service.lookup_directory, + archive.lookup_directory, sha1_git, notfound_msg=error_msg_nopath, enrich_fn=utils.enrich_directory, diff --git a/swh/web/api/views/graph.py b/swh/web/api/views/graph.py --- a/swh/web/api/views/graph.py +++ b/swh/web/api/views/graph.py @@ -17,7 +17,7 @@ from swh.web.api.apidoc import api_doc from swh.web.api.apiurls import api_route from swh.web.api.renderers import PlainTextRenderer -from swh.web.common import service +from swh.web.common import archive from swh.web.config import get_config API_GRAPH_PERM = "swh.web.api.graph" @@ -33,7 +33,7 @@ return origin_urls[parsed_swhid.object_id] else: origin_info = list( - service.lookup_origins_by_sha1s([parsed_swhid.object_id]) + archive.lookup_origins_by_sha1s([parsed_swhid.object_id]) )[0] assert origin_info is not None origin_urls[parsed_swhid.object_id] = origin_info["url"] @@ -94,7 +94,7 @@ special user permission in order to be able to request it. :param string graph_query: query to forward to the Software Heritage Graph - service (see its `documentation + archive (see its `documentation `_) :query boolean resolve_origins: extra parameter defined by that proxy enabling to resolve origin urls from their sha1 representations diff --git a/swh/web/api/views/identifiers.py b/swh/web/api/views/identifiers.py --- a/swh/web/api/views/identifiers.py +++ b/swh/web/api/views/identifiers.py @@ -5,7 +5,7 @@ from swh.web.api.apidoc import api_doc, format_docstring from swh.web.api.apiurls import api_route -from swh.web.common import service +from swh.web.common import archive from swh.web.common.exc import LargePayloadExc from swh.web.common.identifiers import get_swhid, group_swhids, resolve_swhid @@ -55,7 +55,7 @@ swhid_parsed = swhid_resolved["swhid_parsed"] object_type = swhid_parsed.object_type object_id = swhid_parsed.object_id - service.lookup_object(object_type, object_id) + archive.lookup_object(object_type, object_id) # id is well-formed and the pointed object exists swhid_data = swhid_parsed.to_dict() swhid_data["browse_url"] = request.build_absolute_uri(swhid_resolved["browse_url"]) @@ -103,7 +103,7 @@ # group swhids by their type swhids_by_type = group_swhids(swhids) # search for hashes not present in the storage - missing_hashes = service.lookup_missing_hashes(swhids_by_type) + missing_hashes = archive.lookup_missing_hashes(swhids_by_type) for swhid in swhids: if swhid.object_id not in missing_hashes: diff --git a/swh/web/api/views/origin.py b/swh/web/api/views/origin.py --- a/swh/web/api/views/origin.py +++ b/swh/web/api/views/origin.py @@ -10,7 +10,7 @@ from swh.web.api.apiurls import api_route from swh.web.api.utils import enrich_origin, enrich_origin_visit from swh.web.api.views.utils import api_lookup -from swh.web.common import service +from swh.web.common import archive from swh.web.common.exc import BadInputExc from swh.web.common.origin_visits import get_origin_visits from swh.web.common.utils import reverse @@ -89,7 +89,7 @@ page_token = request.query_params.get("page_token", None) limit = min(int(request.query_params.get("origin_count", "100")), 10000) - page_result = service.lookup_origins(page_token, limit) + page_result = archive.lookup_origins(page_token, limit) origins = [enrich_origin(o, request=request) for o in page_result.results] next_page_token = page_result.next_page_token @@ -133,7 +133,7 @@ error_msg = "Origin with url %s not found." % ori_dict["url"] return api_lookup( - service.lookup_origin, + archive.lookup_origin, ori_dict, notfound_msg=error_msg, enrich_fn=enrich_origin, @@ -188,7 +188,7 @@ with_visit = request.query_params.get("with_visit", "false") (results, page_token) = api_lookup( - service.search_origin, + archive.search_origin, url_pattern, limit, bool(strtobool(with_visit)), @@ -252,7 +252,7 @@ raise BadInputExc(content) results = api_lookup( - service.search_origin_metadata, fulltext, limit, request=request + archive.search_origin_metadata, fulltext, limit, request=request ) return { @@ -382,7 +382,7 @@ """ require_snapshot = request.query_params.get("require_snapshot", "false") return api_lookup( - service.lookup_origin_visit_latest, + archive.lookup_origin_visit_latest, origin_url, bool(strtobool(require_snapshot)), notfound_msg=("No visit for origin {} found".format(origin_url)), @@ -422,7 +422,7 @@ :swh_web_api:`origin/https://github.com/hylang/hy/visit/1/` """ return api_lookup( - service.lookup_origin_visit, + archive.lookup_origin_visit, origin_url, int(visit_id), notfound_msg=("No visit {} for origin {} found".format(visit_id, origin_url)), @@ -460,7 +460,7 @@ :swh_web_api:`origin/https://github.com/python/cpython/intrinsic-metadata` """ return api_lookup( - service.lookup_origin_intrinsic_metadata, + archive.lookup_origin_intrinsic_metadata, origin_url, notfound_msg=f"Origin with url {origin_url} not found", enrich_fn=enrich_origin, diff --git a/swh/web/api/views/release.py b/swh/web/api/views/release.py --- a/swh/web/api/views/release.py +++ b/swh/web/api/views/release.py @@ -7,7 +7,7 @@ from swh.web.api.apidoc import api_doc, format_docstring from swh.web.api.apiurls import api_route from swh.web.api.views.utils import api_lookup -from swh.web.common import service +from swh.web.common import archive @api_route( @@ -52,7 +52,7 @@ """ error_msg = "Release with sha1_git %s not found." % sha1_git return api_lookup( - service.lookup_release, + archive.lookup_release, sha1_git, notfound_msg=error_msg, enrich_fn=utils.enrich_release, diff --git a/swh/web/api/views/revision.py b/swh/web/api/views/revision.py --- a/swh/web/api/views/revision.py +++ b/swh/web/api/views/revision.py @@ -9,7 +9,7 @@ from swh.web.api.apidoc import api_doc, format_docstring from swh.web.api.apiurls import api_route from swh.web.api.views.utils import api_lookup -from swh.web.common import service +from swh.web.common import archive DOC_RETURN_REVISION = """ :>json object author: information about the author of the revision @@ -56,7 +56,7 @@ def enrich_directory_local(dir, context_url=request_path): return utils.enrich_directory(dir, context_url) - rev_id, result = service.lookup_directory_through_revision( + rev_id, result = archive.lookup_directory_through_revision( revision, path, limit=limit, with_data=with_data ) @@ -104,7 +104,7 @@ :swh_web_api:`revision/aafb16d69fd30ff58afdd69036a26047f3aebdc6/` """ return api_lookup( - service.lookup_revision, + archive.lookup_revision, sha1_git, notfound_msg="Revision with sha1_git {} not found.".format(sha1_git), enrich_fn=utils.enrich_revision, @@ -121,7 +121,7 @@ def api_revision_raw_message(request, sha1_git): """Return the raw data of the message of revision identified by sha1_git """ - raw = service.lookup_revision_message(sha1_git) + raw = archive.lookup_revision_message(sha1_git) response = HttpResponse(raw["message"], content_type="application/octet-stream") response["Content-disposition"] = "attachment;filename=rev_%s_raw" % sha1_git return response @@ -226,7 +226,7 @@ error_msg = "Revision with sha1_git %s not found." % sha1_git revisions = api_lookup( - service.lookup_revision_log, + archive.lookup_revision_log, sha1_git, limit, notfound_msg=error_msg, diff --git a/swh/web/api/views/snapshot.py b/swh/web/api/views/snapshot.py --- a/swh/web/api/views/snapshot.py +++ b/swh/web/api/views/snapshot.py @@ -7,7 +7,7 @@ from swh.web.api.apiurls import api_route from swh.web.api.utils import enrich_snapshot from swh.web.api.views.utils import api_lookup -from swh.web.common import service +from swh.web.common import archive from swh.web.common.utils import reverse from swh.web.config import get_config @@ -76,7 +76,7 @@ target_types = target_types.split(",") if target_types else None results = api_lookup( - service.lookup_snapshot, + archive.lookup_snapshot, snapshot_id, branches_from, branches_count, diff --git a/swh/web/api/views/stat.py b/swh/web/api/views/stat.py --- a/swh/web/api/views/stat.py +++ b/swh/web/api/views/stat.py @@ -5,7 +5,7 @@ from swh.web.api.apidoc import api_doc, format_docstring from swh.web.api.apiurls import api_route -from swh.web.common import service +from swh.web.common import archive @api_route(r"/stat/counters/", "api-1-stat-counters") @@ -47,4 +47,4 @@ :swh_web_api:`stat/counters/` """ - return service.stat_counters() + return archive.stat_counters() diff --git a/swh/web/api/views/vault.py b/swh/web/api/views/vault.py --- a/swh/web/api/views/vault.py +++ b/swh/web/api/views/vault.py @@ -11,7 +11,7 @@ from swh.web.api.apidoc import api_doc, format_docstring from swh.web.api.apiurls import api_route from swh.web.api.views.utils import api_lookup -from swh.web.common import query, service +from swh.web.common import archive, query from swh.web.common.utils import reverse @@ -21,7 +21,7 @@ object_name = obj_type.split("_")[0] if request.method == "GET": return api_lookup( - service.vault_progress, + archive.vault_progress, obj_type, obj_id, notfound_msg=( @@ -32,7 +32,7 @@ elif request.method == "POST": email = request.POST.get("email", request.GET.get("email", None)) return api_lookup( - service.vault_cook, + archive.vault_cook, obj_type, obj_id, email, @@ -135,7 +135,7 @@ dir_id, ["sha1"], "Only sha1_git is supported." ) res = api_lookup( - service.vault_fetch, + archive.vault_fetch, "directory", obj_id, notfound_msg="Cooked archive for directory '{}' not found.".format(dir_id), @@ -242,7 +242,7 @@ rev_id, ["sha1"], "Only sha1_git is supported." ) res = api_lookup( - service.vault_fetch, + archive.vault_fetch, "revision_gitfast", obj_id, notfound_msg="Cooked archive for revision '{}' not found.".format(rev_id), diff --git a/swh/web/browse/snapshot_context.py b/swh/web/browse/snapshot_context.py --- a/swh/web/browse/snapshot_context.py +++ b/swh/web/browse/snapshot_context.py @@ -38,7 +38,7 @@ prepare_content_for_display, request_content, ) -from swh.web.common import highlightjs, service +from swh.web.common import archive, highlightjs from swh.web.common.exc import BadInputExc, NotFoundExc, handle_view_exception from swh.web.common.identifiers import get_swhids_info from swh.web.common.origin_visits import get_origin_visit @@ -80,7 +80,7 @@ return branches[0] else: # case where a large branches list has been truncated - snp = service.lookup_snapshot( + snp = archive.lookup_snapshot( snapshot_id, branches_from=branch_name, branches_count=1, @@ -104,14 +104,14 @@ # case where a large branches list has been truncated try: # git origins have specific branches for releases - snp = service.lookup_snapshot( + snp = archive.lookup_snapshot( snapshot_id, branches_from=f"refs/tags/{release_name}", branches_count=1, target_types=["release"], ) except NotFoundExc: - snp = service.lookup_snapshot( + snp = archive.lookup_snapshot( snapshot_id, branches_from=release_name, branches_count=1, @@ -187,7 +187,7 @@ Args: snapshot: A dict describing a snapshot as returned for instance by - :func:`swh.web.common.service.lookup_snapshot` + :func:`swh.web.common.archive.lookup_snapshot` Returns: A tuple whose first member is the sorted list of branches @@ -246,7 +246,7 @@ url=None, ) - releases_info = service.lookup_release_multiple(release_to_branch.keys()) + releases_info = archive.lookup_release_multiple(release_to_branch.keys()) for release in releases_info: if release is None: continue @@ -256,7 +256,7 @@ if release["target_type"] == "revision": revision_to_release[release["target"]].update(branches_to_update) - revisions = service.lookup_revision_multiple( + revisions = archive.lookup_revision_multiple( set(revision_to_branch.keys()) | set(revision_to_release.keys()) ) @@ -272,7 +272,7 @@ if branch_target in branches: branches[branch_alias] = copy(branches[branch_target]) else: - snp = service.lookup_snapshot( + snp = archive.lookup_snapshot( snapshot["id"], branches_from=branch_target, branches_count=1 ) if snp and branch_target in snp["branches"]: @@ -284,10 +284,10 @@ target = snp["branches"][branch_target]["target"] if target_type == "revision": branches[branch_alias] = snp["branches"][branch_target] - revision = service.lookup_revision(target) + revision = archive.lookup_revision(target) _add_branch_info(branch_alias, revision) elif target_type == "release": - release = service.lookup_release(target) + release = archive.lookup_release(target) _add_release_info(branch_alias, release) if branch_alias in branches: @@ -333,7 +333,7 @@ snapshot_content_max_size = get_config()["snapshot_content_max_size"] if snapshot_id: - snapshot = service.lookup_snapshot( + snapshot = archive.lookup_snapshot( snapshot_id, branches_count=snapshot_content_max_size ) branches, releases = process_snapshot_branches(snapshot) @@ -442,7 +442,7 @@ elif snapshot_id is not None: query_params["snapshot"] = snapshot_id - origin_info = service.lookup_origin({"url": origin_url}) + origin_info = archive.lookup_origin({"url": origin_url}) visit_info = get_origin_visit(origin_info, timestamp, visit_id, snapshot_id) formatted_date = format_utc_iso_date(visit_info["date"]) @@ -492,7 +492,7 @@ releases = list(reversed(releases)) - snapshot_sizes = service.lookup_snapshot_sizes(snapshot_id) + snapshot_sizes = archive.lookup_snapshot_sizes(snapshot_id) is_empty = sum(snapshot_sizes.values()) == 0 @@ -515,7 +515,7 @@ query_params["path"] = path if snapshot_total_size and revision_id is not None: - revision = service.lookup_revision(revision_id) + revision = archive.lookup_revision(revision_id) root_directory = revision["directory"] branches.append( SnapshotBranchInfo( @@ -587,7 +587,7 @@ revision_info = None if revision_id: try: - revision_info = service.lookup_revision(revision_id) + revision_info = archive.lookup_revision(revision_id) except NotFoundExc: pass else: @@ -696,7 +696,7 @@ root_directory = snapshot_context["root_directory"] sha1_git = root_directory if root_directory and path: - dir_info = service.lookup_directory_with_path(root_directory, path) + dir_info = archive.lookup_directory_with_path(root_directory, path) sha1_git = dir_info["target"] dirs = [] @@ -783,7 +783,7 @@ revision_found = True if sha1_git is None and revision_id is not None: try: - service.lookup_revision(revision_id) + archive.lookup_revision(revision_id) except NotFoundExc: revision_found = False @@ -915,13 +915,13 @@ filename = split_path[-1] filepath = path[: -len(filename)] if root_directory: - content_info = service.lookup_directory_with_path(root_directory, path) + content_info = archive.lookup_directory_with_path(root_directory, path) sha1_git = content_info["target"] query_string = "sha1_git:" + sha1_git content_data = request_content(query_string, raise_if_unavailable=False) if filepath: - dir_info = service.lookup_directory_with_path(root_directory, filepath) + dir_info = archive.lookup_directory_with_path(root_directory, filepath) directory_id = dir_info["target"] else: directory_id = root_directory @@ -1102,7 +1102,7 @@ revs_walker_state = rev_log_session["revs_walker_state"] if len(rev_log) < offset + per_page: - revs_walker = service.get_revisions_walker( + revs_walker = archive.get_revisions_walker( revs_ordering, revision_id, max_revs=offset + per_page + 1, @@ -1112,7 +1112,7 @@ revs_walker_state = revs_walker.export_state() revs = rev_log[offset : offset + per_page] - revision_log = service.lookup_revision_multiple(revs) + revision_log = archive.lookup_revision_multiple(revs) request.session[session_key] = { "rev_log": rev_log, @@ -1242,7 +1242,7 @@ else: browse_view_name = "browse-snapshot-directory" - snapshot = service.lookup_snapshot( + snapshot = archive.lookup_snapshot( snapshot_context["snapshot_id"], branches_from, PER_PAGE + 1, @@ -1351,7 +1351,7 @@ url_args = snapshot_context["url_args"] query_params = snapshot_context["query_params"] - snapshot = service.lookup_snapshot( + snapshot = archive.lookup_snapshot( snapshot_context["snapshot_id"], rel_from, PER_PAGE + 1, diff --git a/swh/web/browse/utils.py b/swh/web/browse/utils.py --- a/swh/web/browse/utils.py +++ b/swh/web/browse/utils.py @@ -15,7 +15,7 @@ from django.utils.html import escape from django.utils.safestring import mark_safe -from swh.web.common import highlightjs, service +from swh.web.common import archive, highlightjs from swh.web.common.exc import http_status_code_message from swh.web.common.utils import ( browsers_supported_image_mimes, @@ -49,7 +49,7 @@ if cache_entry: return cache_entry - entries = list(service.lookup_directory(sha1_git)) + entries = list(archive.lookup_directory(sha1_git)) for e in entries: e["perms"] = stat.filemode(e["perms"]) if e["type"] == "rev": @@ -149,14 +149,14 @@ Raises: NotFoundExc if the content is not found """ - content_data = service.lookup_content(query_string) + content_data = archive.lookup_content(query_string) filetype = None language = None # requests to the indexer db may fail so properly handle # those cases in order to avoid content display errors try: - filetype = service.lookup_content_filetype(query_string) - language = service.lookup_content_language(query_string) + filetype = archive.lookup_content_filetype(query_string) + language = archive.lookup_content_language(query_string) except Exception as exc: sentry_sdk.capture_exception(exc) mimetype = "unknown" @@ -176,7 +176,7 @@ if not max_size or content_data["length"] < max_size: try: - content_raw = service.lookup_content_raw(query_string) + content_raw = archive.lookup_content_raw(query_string) except Exception as exc: if raise_if_unavailable: raise exc diff --git a/swh/web/browse/views/content.py b/swh/web/browse/views/content.py --- a/swh/web/browse/views/content.py +++ b/swh/web/browse/views/content.py @@ -23,7 +23,7 @@ prepare_content_for_display, request_content, ) -from swh.web.common import highlightjs, query, service +from swh.web.common import archive, highlightjs, query from swh.web.common.exc import NotFoundExc, handle_view_exception from swh.web.common.identifiers import get_swhids_info from swh.web.common.typing import ContentMetadata, SWHObjectInfo @@ -289,7 +289,7 @@ if path and root_dir != path: try: - dir_info = service.lookup_directory_with_path(root_dir, path) + dir_info = archive.lookup_directory_with_path(root_dir, path) directory_id = dir_info["target"] except Exception as exc: return handle_view_exception(request, exc) diff --git a/swh/web/browse/views/directory.py b/swh/web/browse/views/directory.py --- a/swh/web/browse/views/directory.py +++ b/swh/web/browse/views/directory.py @@ -15,7 +15,7 @@ from swh.web.browse.browseurls import browse_route from swh.web.browse.snapshot_context import get_snapshot_context from swh.web.browse.utils import gen_link, get_directory_entries, get_readme_to_display -from swh.web.common import service +from swh.web.common import archive from swh.web.common.exc import NotFoundExc, handle_view_exception from swh.web.common.identifiers import get_swhids_info from swh.web.common.typing import DirectoryMetadata, SWHObjectInfo @@ -26,7 +26,7 @@ root_sha1_git = sha1_git try: if path: - dir_info = service.lookup_directory_with_path(sha1_git, path) + dir_info = archive.lookup_directory_with_path(sha1_git, path) sha1_git = dir_info["target"] dirs, files = get_directory_entries(sha1_git) @@ -260,7 +260,7 @@ try: path = os.path.normpath(request.GET.get("path")) if not path.startswith("../"): - dir_info = service.lookup_directory_with_path(sha1_git, path) + dir_info = archive.lookup_directory_with_path(sha1_git, path) if dir_info["type"] == "file": sha1 = dir_info["checksums"]["sha1"] data_url = reverse( diff --git a/swh/web/browse/views/origin.py b/swh/web/browse/views/origin.py --- a/swh/web/browse/views/origin.py +++ b/swh/web/browse/views/origin.py @@ -14,7 +14,7 @@ browse_snapshot_releases, get_snapshot_context, ) -from swh.web.common import service +from swh.web.common import archive from swh.web.common.exc import BadInputExc, handle_view_exception from swh.web.common.origin_visits import get_origin_visits from swh.web.common.utils import format_utc_iso_date, parse_iso8601_date_to_utc, reverse @@ -232,7 +232,7 @@ if origin_url is None: raise BadInputExc("An origin URL must be provided as query parameter.") - origin_info = service.lookup_origin({"url": origin_url}) + origin_info = archive.lookup_origin({"url": origin_url}) origin_visits = get_origin_visits(origin_info) snapshot_context = get_snapshot_context(origin_url=origin_url) except Exception as exc: diff --git a/swh/web/browse/views/release.py b/swh/web/browse/views/release.py --- a/swh/web/browse/views/release.py +++ b/swh/web/browse/views/release.py @@ -19,7 +19,7 @@ gen_revision_link, gen_snapshot_link, ) -from swh.web.common import service +from swh.web.common import archive from swh.web.common.exc import NotFoundExc, handle_view_exception from swh.web.common.identifiers import get_swhids_info from swh.web.common.typing import ReleaseMetadata, SWHObjectInfo @@ -39,7 +39,7 @@ The url that points to it is :http:get:`/browse/release/(sha1_git)/`. """ try: - release = service.lookup_release(sha1_git) + release = archive.lookup_release(sha1_git) snapshot_context = {} origin_info = None snapshot_id = request.GET.get("snapshot_id") @@ -128,7 +128,7 @@ link_attrs=None, ) try: - revision = service.lookup_revision(release["target"]) + revision = archive.lookup_revision(release["target"]) rev_directory = revision["directory"] vault_cooking = { "directory_context": True, @@ -147,7 +147,7 @@ ) try: # check directory exists - service.lookup_directory(release["target"]) + archive.lookup_directory(release["target"]) vault_cooking = { "directory_context": True, "directory_id": release["target"], diff --git a/swh/web/browse/views/revision.py b/swh/web/browse/views/revision.py --- a/swh/web/browse/views/revision.py +++ b/swh/web/browse/views/revision.py @@ -30,7 +30,7 @@ prepare_content_for_display, request_content, ) -from swh.web.common import service +from swh.web.common import archive from swh.web.common.exc import NotFoundExc, handle_view_exception from swh.web.common.identifiers import get_swhids_info from swh.web.common.typing import RevisionMetadata, SWHObjectInfo @@ -129,7 +129,7 @@ "new file: %s" % _gen_diff_link(i, diff_link, change["to_path"]) ) elif change["type"] == "delete": - parent = service.lookup_revision(revision["parents"][0]) + parent = archive.lookup_revision(revision["parents"][0]) change["content_url"] = _gen_content_url( parent, from_query_string, change["from_path"], snapshot_context ) @@ -159,7 +159,7 @@ Browse internal endpoint to compute revision diff """ try: - revision = service.lookup_revision(sha1_git) + revision = archive.lookup_revision(sha1_git) snapshot_context = None origin_url = request.GET.get("origin_url", None) if not origin_url: @@ -173,7 +173,7 @@ except Exception as exc: return handle_view_exception(request, exc) - changes = service.diff_revision(sha1_git) + changes = archive.diff_revision(sha1_git) changes_msg = _gen_revision_changes_list(revision, changes, snapshot_context) diff_data = { @@ -225,7 +225,7 @@ revs_walker_state = rev_log_session["revs_walker_state"] if len(rev_log) < offset + per_page: - revs_walker = service.get_revisions_walker( + revs_walker = archive.get_revisions_walker( revs_ordering, sha1_git, max_revs=offset + per_page + 1, @@ -236,7 +236,7 @@ revs_walker_state = revs_walker.export_state() revs = rev_log[offset : offset + per_page] - revision_log = service.lookup_revision_multiple(revs) + revision_log = archive.lookup_revision_multiple(revs) request.session[session_key] = { "rev_log": rev_log, @@ -310,7 +310,7 @@ The url that points to it is :http:get:`/browse/revision/(sha1_git)/`. """ try: - revision = service.lookup_revision(sha1_git) + revision = archive.lookup_revision(sha1_git) origin_info = None snapshot_context = None origin_url = request.GET.get("origin_url") @@ -359,7 +359,7 @@ snapshot_context = get_snapshot_context(snapshot_id) if path: - file_info = service.lookup_directory_with_path(revision["directory"], path) + file_info = archive.lookup_directory_with_path(revision["directory"], path) if file_info["type"] == "dir": dir_id = file_info["target"] else: diff --git a/swh/web/common/service.py b/swh/web/common/archive.py rename from swh/web/common/service.py rename to swh/web/common/archive.py diff --git a/swh/web/common/identifiers.py b/swh/web/common/identifiers.py --- a/swh/web/common/identifiers.py +++ b/swh/web/common/identifiers.py @@ -23,7 +23,7 @@ parse_swhid, swhid, ) -from swh.web.common import service +from swh.web.common import archive from swh.web.common.exc import BadInputExc from swh.web.common.typing import ( QueryParameters, @@ -128,12 +128,12 @@ if anchor_swhid_parsed.object_type == DIRECTORY: directory = anchor_swhid_parsed.object_id elif anchor_swhid_parsed.object_type == REVISION: - revision = service.lookup_revision(anchor_swhid_parsed.object_id) + revision = archive.lookup_revision(anchor_swhid_parsed.object_id) directory = revision["directory"] elif anchor_swhid_parsed.object_type == RELEASE: - release = service.lookup_release(anchor_swhid_parsed.object_id) + release = archive.lookup_release(anchor_swhid_parsed.object_id) if release["target_type"] == REVISION: - revision = service.lookup_revision(release["target"]) + revision = archive.lookup_revision(release["target"]) directory = revision["directory"] if object_type == CONTENT: if "origin" not in swhid_parsed.metadata: @@ -159,7 +159,7 @@ if anchor_swhid_parsed: if anchor_swhid_parsed.object_type == REVISION: # check if the anchor revision is the tip of a branch - branch_name = service.lookup_snapshot_branch_name_from_tip_revision( + branch_name = archive.lookup_snapshot_branch_name_from_tip_revision( snp_swhid_parsed.object_id, anchor_swhid_parsed.object_id ) if branch_name: @@ -168,12 +168,12 @@ query_dict["revision"] = anchor_swhid_parsed.object_id elif anchor_swhid_parsed.object_type == RELEASE: - release = service.lookup_release(anchor_swhid_parsed.object_id) + release = archive.lookup_release(anchor_swhid_parsed.object_id) if release: query_dict["release"] = release["name"] if object_type == REVISION and "release" not in query_dict: - branch_name = service.lookup_snapshot_branch_name_from_tip_revision( + branch_name = archive.lookup_snapshot_branch_name_from_tip_revision( snp_swhid_parsed.object_id, object_id ) if branch_name: diff --git a/swh/web/common/origin_save.py b/swh/web/common/origin_save.py --- a/swh/web/common/origin_save.py +++ b/swh/web/common/origin_save.py @@ -20,7 +20,7 @@ from swh.scheduler.utils import create_oneshot_task_dict from swh.web import config -from swh.web.common import service +from swh.web.common import archive from swh.web.common.exc import BadInputExc, ForbiddenExc, NotFoundExc from swh.web.common.models import ( SAVE_REQUEST_ACCEPTED, @@ -154,7 +154,7 @@ if time_delta.days <= 30: try: origin = {"url": save_request.origin_url} - origin_info = service.lookup_origin(origin) + origin_info = archive.lookup_origin(origin) origin_visits = get_origin_visits(origin_info) visit_dates = [parse_iso8601_date_to_utc(v["date"]) for v in origin_visits] i = bisect_right(visit_dates, save_request.request_date) diff --git a/swh/web/common/origin_visits.py b/swh/web/common/origin_visits.py --- a/swh/web/common/origin_visits.py +++ b/swh/web/common/origin_visits.py @@ -8,7 +8,7 @@ from django.core.cache import cache -from swh.web.common import service +from swh.web.common import archive from swh.web.common.exc import NotFoundExc from swh.web.common.typing import OriginInfo, OriginVisitInfo from swh.web.common.utils import parse_iso8601_date_to_utc @@ -32,12 +32,12 @@ swh.web.common.exc.NotFoundExc: if the origin is not found """ - from swh.web.common import service + from swh.web.common import archive if "url" in origin_info: origin_url = origin_info["url"] else: - origin_url = service.lookup_origin(origin_info)["url"] + origin_url = archive.lookup_origin(origin_info)["url"] cache_entry_id = "origin_visits_%s" % origin_url cache_entry = cache.get(cache_entry_id) @@ -45,20 +45,20 @@ if cache_entry: last_visit = cache_entry[-1]["visit"] new_visits = list( - service.lookup_origin_visits(origin_url, last_visit=last_visit) + archive.lookup_origin_visits(origin_url, last_visit=last_visit) ) if not new_visits: - last_snp = service.lookup_latest_origin_snapshot(origin_url) + last_snp = archive.lookup_latest_origin_snapshot(origin_url) if not last_snp or last_snp["id"] == cache_entry[-1]["snapshot"]: return cache_entry origin_visits = [] - per_page = service.MAX_LIMIT + per_page = archive.MAX_LIMIT last_visit = None while 1: visits = list( - service.lookup_origin_visits( + archive.lookup_origin_visits( origin_url, last_visit=last_visit, per_page=per_page ) ) @@ -112,12 +112,12 @@ swh.web.common.exc.NotFoundExc: if no visit can be found """ # returns the latest full visit with a valid snapshot - visit = service.lookup_origin_visit_latest( + visit = archive.lookup_origin_visit_latest( origin_info["url"], allowed_statuses=["full"], require_snapshot=True ) if not visit: # or the latest partial visit with a valid snapshot otherwise - visit = service.lookup_origin_visit_latest( + visit = archive.lookup_origin_visit_latest( origin_info["url"], allowed_statuses=["partial"], require_snapshot=True ) diff --git a/swh/web/common/utils.py b/swh/web/common/utils.py --- a/swh/web/common/utils.py +++ b/swh/web/common/utils.py @@ -291,9 +291,9 @@ if branch["target"] in snapshot["branches"]: branch = snapshot["branches"][branch["target"]] else: - from swh.web.common import service + from swh.web.common import archive - snp = service.lookup_snapshot( + snp = archive.lookup_snapshot( snapshot["id"], branches_from=branch["target"], branches_count=1 ) if snp and branch["target"] in snp["branches"]: diff --git a/swh/web/misc/badges.py b/swh/web/misc/badges.py --- a/swh/web/misc/badges.py +++ b/swh/web/misc/badges.py @@ -23,7 +23,7 @@ parse_swhid, swhid, ) -from swh.web.common import service +from swh.web.common import archive from swh.web.common.exc import BadInputExc, NotFoundExc from swh.web.common.identifiers import resolve_swhid from swh.web.common.utils import reverse @@ -90,7 +90,7 @@ try: if object_type == ORIGIN: - service.lookup_origin({"url": object_id}) + archive.lookup_origin({"url": object_id}) right_text = "repository" whole_link = reverse( "browse-origin", query_params={"origin_url": object_id} @@ -102,7 +102,7 @@ parsed_swhid = parse_swhid(object_swhid) object_type = parsed_swhid.object_type object_id = parsed_swhid.object_id - swh_object = service.lookup_object(object_type, object_id) + swh_object = archive.lookup_object(object_type, object_id) if object_swhid: right_text = object_swhid else: diff --git a/swh/web/misc/urls.py b/swh/web/misc/urls.py --- a/swh/web/misc/urls.py +++ b/swh/web/misc/urls.py @@ -13,7 +13,7 @@ from django.http import JsonResponse from django.shortcuts import render -from swh.web.common import service +from swh.web.common import archive from swh.web.config import get_config from swh.web.misc.metrics import prometheus_metrics @@ -28,7 +28,7 @@ def _stat_counters(request): - stat_counters = service.stat_counters() + stat_counters = archive.stat_counters() url = get_config()["history_counters_url"] stat_counters_history = "null" if url: diff --git a/swh/web/tests/api/views/test_origin.py b/swh/web/tests/api/views/test_origin.py --- a/swh/web/tests/api/views/test_origin.py +++ b/swh/web/tests/api/views/test_origin.py @@ -414,7 +414,7 @@ def test_api_origin_search(api_client, mocker, backend): if backend != "swh-search": # equivalent to not configuring search in the config - mocker.patch("swh.web.common.service.search", None) + mocker.patch("swh.web.common.archive.search", None) expected_origins = { "https://github.com/wcoder/highlightjs-line-numbers.js", @@ -454,7 +454,7 @@ def test_api_origin_search_words(api_client, mocker, backend): if backend != "swh-search": # equivalent to not configuring search in the config - mocker.patch("swh.web.common.service.search", None) + mocker.patch("swh.web.common.archive.search", None) expected_origins = { "https://github.com/wcoder/highlightjs-line-numbers.js", @@ -506,7 +506,7 @@ if backend != "swh-search": # equivalent to not configuring search in the config - mocker.patch("swh.web.common.service.search", None) + mocker.patch("swh.web.common.archive.search", None) expected_origins = { "https://github.com/wcoder/highlightjs-line-numbers.js", @@ -532,7 +532,7 @@ ) else: # equivalent to not configuring search in the config - mocker.patch("swh.web.common.service.search", None) + mocker.patch("swh.web.common.archive.search", None) archive_data.origin_add( [Origin(url="http://foobar/{}".format(i)) for i in range(2000)] @@ -549,7 +549,7 @@ @given(origin()) def test_api_origin_metadata_search(api_client, mocker, origin): - mock_idx_storage = mocker.patch("swh.web.common.service.idx_storage") + mock_idx_storage = mocker.patch("swh.web.common.archive.idx_storage") oimsft = mock_idx_storage.origin_intrinsic_metadata_search_fulltext oimsft.side_effect = lambda conjunction, limit: [ { @@ -597,7 +597,7 @@ @given(origin()) def test_api_origin_metadata_search_limit(api_client, mocker, origin): - mock_idx_storage = mocker.patch("swh.web.common.service.idx_storage") + mock_idx_storage = mocker.patch("swh.web.common.archive.idx_storage") oimsft = mock_idx_storage.origin_intrinsic_metadata_search_fulltext oimsft.side_effect = lambda conjunction, limit: [ @@ -643,7 +643,7 @@ @given(origin()) def test_api_origin_intrinsic_metadata(api_client, mocker, origin): - mock_idx_storage = mocker.patch("swh.web.common.service.idx_storage") + mock_idx_storage = mocker.patch("swh.web.common.archive.idx_storage") oimg = mock_idx_storage.origin_intrinsic_metadata_get oimg.side_effect = lambda origin_urls: [ { @@ -676,7 +676,7 @@ def test_api_origin_metadata_search_invalid(api_client, mocker): - mock_idx_storage = mocker.patch("swh.web.common.service.idx_storage") + mock_idx_storage = mocker.patch("swh.web.common.archive.idx_storage") url = reverse("api-1-origin-metadata-search") check_api_get_responses(api_client, url, status_code=400) mock_idx_storage.assert_not_called() diff --git a/swh/web/tests/api/views/test_stat.py b/swh/web/tests/api/views/test_stat.py --- a/swh/web/tests/api/views/test_stat.py +++ b/swh/web/tests/api/views/test_stat.py @@ -10,8 +10,8 @@ def test_api_1_stat_counters_raise_error(api_client, mocker): - mock_service = mocker.patch("swh.web.api.views.stat.service") - mock_service.stat_counters.side_effect = BadInputExc( + mock_archive = mocker.patch("swh.web.api.views.stat.archive") + mock_archive.stat_counters.side_effect = BadInputExc( "voluntary error to check the bad request middleware." ) @@ -24,8 +24,8 @@ def test_api_1_stat_counters_raise_from_db(api_client, mocker): - mock_service = mocker.patch("swh.web.api.views.stat.service") - mock_service.stat_counters.side_effect = StorageDBError( + mock_archive = mocker.patch("swh.web.api.views.stat.archive") + mock_archive.stat_counters.side_effect = StorageDBError( "Storage exploded! Will be back online shortly!" ) @@ -39,8 +39,8 @@ def test_api_1_stat_counters_raise_from_api(api_client, mocker): - mock_service = mocker.patch("swh.web.api.views.stat.service") - mock_service.stat_counters.side_effect = StorageAPIError( + mock_archive = mocker.patch("swh.web.api.views.stat.archive") + mock_archive.stat_counters.side_effect = StorageAPIError( "Storage API dropped dead! Will resurrect from its ashes asap!" ) diff --git a/swh/web/tests/api/views/test_vault.py b/swh/web/tests/api/views/test_vault.py --- a/swh/web/tests/api/views/test_vault.py +++ b/swh/web/tests/api/views/test_vault.py @@ -19,7 +19,7 @@ @given(directory(), revision()) def test_api_vault_cook(api_client, mocker, directory, revision): - mock_service = mocker.patch("swh.web.api.views.vault.service") + mock_archive = mocker.patch("swh.web.api.views.vault.archive") for obj_type, obj_id in ( ("directory", directory), @@ -39,8 +39,8 @@ } stub_fetch = b"content" - mock_service.vault_cook.return_value = stub_cook - mock_service.vault_fetch.return_value = stub_fetch + mock_archive.vault_cook.return_value = stub_cook + mock_archive.vault_fetch.return_value = stub_fetch email = "test@test.mail" url = reverse( @@ -56,7 +56,7 @@ ) assert rv.data == stub_cook - mock_service.vault_cook.assert_called_with( + mock_archive.vault_cook.assert_called_with( obj_type, hashutil.hash_to_bytes(obj_id), email ) @@ -65,7 +65,7 @@ assert rv.status_code == 200 assert rv["Content-Type"] == "application/gzip" assert rv.content == stub_fetch - mock_service.vault_fetch.assert_called_with( + mock_archive.vault_fetch.assert_called_with( obj_type, hashutil.hash_to_bytes(obj_id) ) @@ -112,7 +112,7 @@ def test_api_vault_cook_notfound( api_client, mocker, directory, revision, unknown_directory, unknown_revision ): - mock_vault = mocker.patch("swh.web.common.service.vault") + mock_vault = mocker.patch("swh.web.common.archive.vault") mock_vault.cook.side_effect = NotFoundExc("object not found") mock_vault.fetch.side_effect = NotFoundExc("cooked archive not found") mock_vault.progress.side_effect = NotFoundExc("cooking request not found") diff --git a/swh/web/tests/browse/views/test_content.py b/swh/web/tests/browse/views/test_content.py --- a/swh/web/tests/browse/views/test_content.py +++ b/swh/web/tests/browse/views/test_content.py @@ -325,12 +325,12 @@ @given(content()) def test_content_bytes_missing(client, archive_data, mocker, content): - mock_service = mocker.patch("swh.web.browse.utils.service") + mock_archive = mocker.patch("swh.web.browse.utils.archive") content_data = archive_data.content_get(content["sha1"]) - mock_service.lookup_content.return_value = content_data - mock_service.lookup_content_filetype.side_effect = Exception() - mock_service.lookup_content_raw.side_effect = NotFoundExc( + mock_archive.lookup_content.return_value = content_data + mock_archive.lookup_content_filetype.side_effect = Exception() + mock_archive.lookup_content_raw.side_effect = NotFoundExc( "Content bytes not available!" ) diff --git a/swh/web/tests/browse/views/test_origin.py b/swh/web/tests/browse/views/test_origin.py --- a/swh/web/tests/browse/views/test_origin.py +++ b/swh/web/tests/browse/views/test_origin.py @@ -510,8 +510,8 @@ "swh.web.common.origin_visits.get_origin_visits" ) mock_get_origin_visits.return_value = [] - mock_service = mocker.patch("swh.web.common.origin_visits.service") - mock_service.lookup_origin_visit_latest.return_value = None + mock_archive = mocker.patch("swh.web.common.origin_visits.archive") + mock_archive.lookup_origin_visit_latest.return_value = None url = reverse("browse-origin-directory", query_params={"origin_url": origin["url"]}) resp = client.get(url) assert resp.status_code == 404 @@ -556,8 +556,8 @@ "swh.web.common.origin_visits.get_origin_visits" ) mock_get_origin_visits.return_value = [] - mock_service = mocker.patch("swh.web.common.origin_visits.service") - mock_service.lookup_origin_visit_latest.return_value = None + mock_archive = mocker.patch("swh.web.common.origin_visits.archive") + mock_archive.lookup_origin_visit_latest.return_value = None url = reverse( "browse-origin-content", query_params={"origin_url": origin["url"], "path": "foo"}, @@ -589,13 +589,13 @@ @given(origin()) def test_browse_origin_content_directory_empty_snapshot(client, mocker, origin): - mock_snapshot_service = mocker.patch("swh.web.browse.snapshot_context.service") + mock_snapshot_archive = mocker.patch("swh.web.browse.snapshot_context.archive") mock_get_origin_visit_snapshot = mocker.patch( "swh.web.browse.snapshot_context.get_origin_visit_snapshot" ) mock_get_origin_visit_snapshot.return_value = ([], []) - mock_snapshot_service.lookup_origin.return_value = origin - mock_snapshot_service.lookup_snapshot_sizes.return_value = { + mock_snapshot_archive.lookup_origin.return_value = origin + mock_snapshot_archive.lookup_snapshot_sizes.return_value = { "revision": 0, "release": 0, } @@ -611,8 +611,8 @@ assert_template_used(resp, f"browse/{browse_context}.html") assert re.search("snapshot.*is empty", resp.content.decode("utf-8")) assert mock_get_origin_visit_snapshot.called - assert mock_snapshot_service.lookup_origin.called - assert mock_snapshot_service.lookup_snapshot_sizes.called + assert mock_snapshot_archive.lookup_origin.called + assert mock_snapshot_archive.lookup_snapshot_sizes.called @given(origin()) @@ -643,16 +643,16 @@ @given(origin()) def test_origin_empty_snapshot(client, mocker, origin): - mock_service = mocker.patch("swh.web.browse.snapshot_context.service") + mock_archive = mocker.patch("swh.web.browse.snapshot_context.archive") mock_get_origin_visit_snapshot = mocker.patch( "swh.web.browse.snapshot_context.get_origin_visit_snapshot" ) mock_get_origin_visit_snapshot.return_value = ([], []) - mock_service.lookup_snapshot_sizes.return_value = { + mock_archive.lookup_snapshot_sizes.return_value = { "revision": 0, "release": 0, } - mock_service.lookup_origin.return_value = origin + mock_archive.lookup_origin.return_value = origin url = reverse("browse-origin-directory", query_params={"origin_url": origin["url"]}) resp = client.get(url) assert resp.status_code == 200 @@ -661,7 +661,7 @@ assert re.search("snapshot.*is empty", resp_content) assert not re.search("swh-tr-link", resp_content) assert mock_get_origin_visit_snapshot.called - assert mock_service.lookup_snapshot_sizes.called + assert mock_archive.lookup_snapshot_sizes.called @given(new_origin()) diff --git a/swh/web/tests/common/test_service.py b/swh/web/tests/common/test_archive.py rename from swh/web/tests/common/test_service.py rename to swh/web/tests/common/test_archive.py --- a/swh/web/tests/common/test_service.py +++ b/swh/web/tests/common/test_archive.py @@ -15,7 +15,7 @@ from swh.model.hashutil import hash_to_bytes, hash_to_hex from swh.model.identifiers import CONTENT, DIRECTORY, RELEASE, REVISION, SNAPSHOT from swh.model.model import Directory, DirectoryEntry, Origin, OriginVisit, Revision -from swh.web.common import service +from swh.web.common import archive from swh.web.common.exc import BadInputExc, NotFoundExc from swh.web.common.typing import OriginInfo from swh.web.tests.conftest import ctags_json_missing, fossology_missing @@ -57,7 +57,7 @@ input_data.append({"sha1": cnt["sha1"]}) expected_output.append({"sha1": cnt["sha1"], "found": True}) - assert service.lookup_multiple_hashes(input_data) == expected_output + assert archive.lookup_multiple_hashes(input_data) == expected_output @given(contents(), unknown_contents()) @@ -71,20 +71,20 @@ input_data.append({"sha1": cnt["sha1"]}) expected_output.append({"sha1": cnt["sha1"], "found": cnt in contents}) - assert service.lookup_multiple_hashes(input_data) == expected_output + assert archive.lookup_multiple_hashes(input_data) == expected_output def test_lookup_hash_does_not_exist(): unknown_content_ = random_content() - actual_lookup = service.lookup_hash("sha1_git:%s" % unknown_content_["sha1_git"]) + actual_lookup = archive.lookup_hash("sha1_git:%s" % unknown_content_["sha1_git"]) assert actual_lookup == {"found": None, "algo": "sha1_git"} @given(content()) def test_lookup_hash_exist(archive_data, content): - actual_lookup = service.lookup_hash("sha1:%s" % content["sha1"]) + actual_lookup = archive.lookup_hash("sha1:%s" % content["sha1"]) content_metadata = archive_data.content_get(content["sha1"]) @@ -94,14 +94,14 @@ def test_search_hash_does_not_exist(): unknown_content_ = random_content() - actual_lookup = service.search_hash("sha1_git:%s" % unknown_content_["sha1_git"]) + actual_lookup = archive.search_hash("sha1_git:%s" % unknown_content_["sha1_git"]) assert {"found": False} == actual_lookup @given(content()) def test_search_hash_exist(content): - actual_lookup = service.search_hash("sha1:%s" % content["sha1"]) + actual_lookup = archive.search_hash("sha1:%s" % content["sha1"]) assert {"found": True} == actual_lookup @@ -113,7 +113,7 @@ def test_lookup_content_ctags(indexer_data, contents_with_ctags): content_sha1 = random.choice(contents_with_ctags["sha1s"]) indexer_data.content_add_ctags(content_sha1) - actual_ctags = list(service.lookup_content_ctags("sha1:%s" % content_sha1)) + actual_ctags = list(archive.lookup_content_ctags("sha1:%s" % content_sha1)) expected_data = list(indexer_data.content_get_ctags(content_sha1)) for ctag in expected_data: @@ -126,7 +126,7 @@ unknown_content_ = random_content() actual_ctags = list( - service.lookup_content_ctags("sha1:%s" % unknown_content_["sha1"]) + archive.lookup_content_ctags("sha1:%s" % unknown_content_["sha1"]) ) assert actual_ctags == [] @@ -135,7 +135,7 @@ @given(content()) def test_lookup_content_filetype(indexer_data, content): indexer_data.content_add_mimetype(content["sha1"]) - actual_filetype = service.lookup_content_filetype(content["sha1"]) + actual_filetype = archive.lookup_content_filetype(content["sha1"]) expected_filetype = indexer_data.content_get_mimetype(content["sha1"]) assert actual_filetype == expected_filetype @@ -145,7 +145,7 @@ @given(content()) def test_lookup_content_language(indexer_data, content): indexer_data.content_add_language(content["sha1"]) - actual_language = service.lookup_content_language(content["sha1"]) + actual_language = archive.lookup_content_language(content["sha1"]) expected_language = indexer_data.content_get_language(content["sha1"]) assert actual_language == expected_language @@ -169,7 +169,7 @@ expected_ctags.append(ctag) actual_ctags = list( - service.lookup_expression( + archive.lookup_expression( contents_with_ctags["symbol_name"], last_sha1=None, per_page=10 ) ) @@ -181,7 +181,7 @@ expected_ctags = [] actual_ctags = list( - service.lookup_expression("barfoo", last_sha1=None, per_page=10) + archive.lookup_expression("barfoo", last_sha1=None, per_page=10) ) assert actual_ctags == expected_ctags @@ -190,14 +190,14 @@ @given(content()) def test_lookup_content_license(indexer_data, content): indexer_data.content_add_license(content["sha1"]) - actual_license = service.lookup_content_license(content["sha1"]) + actual_license = archive.lookup_content_license(content["sha1"]) expected_license = indexer_data.content_get_license(content["sha1"]) assert actual_license == expected_license def test_stat_counters(archive_data): - actual_stats = service.stat_counters() + actual_stats = archive.stat_counters() assert actual_stats == archive_data.stat_counters() @@ -210,7 +210,7 @@ ) actual_origin_visits = list( - service.lookup_origin_visits(new_origin.url, per_page=100) + archive.lookup_origin_visits(new_origin.url, per_page=100) ) expected_visits = archive_data.origin_visit_get(new_origin.url) @@ -228,7 +228,7 @@ ) visit = random.choice(visits).visit - actual_origin_visit = service.lookup_origin_visit(new_origin.url, visit) + actual_origin_visit = archive.lookup_origin_visit(new_origin.url, visit) expected_visit = dict(archive_data.origin_visit_get_by(new_origin.url, visit)) @@ -239,7 +239,7 @@ def test_lookup_origin(archive_data, new_origin): archive_data.origin_add([new_origin]) - actual_origin = service.lookup_origin({"url": new_origin.url}) + actual_origin = archive.lookup_origin({"url": new_origin.url}) expected_origin = archive_data.origin_get([new_origin.url])[0] assert actual_origin == expected_origin @@ -247,20 +247,20 @@ @given(invalid_sha1()) def test_lookup_release_ko_id_checksum_not_a_sha1(invalid_sha1): with pytest.raises(BadInputExc) as e: - service.lookup_release(invalid_sha1) + archive.lookup_release(invalid_sha1) assert e.match("Invalid checksum") @given(sha256()) def test_lookup_release_ko_id_checksum_too_long(sha256): with pytest.raises(BadInputExc) as e: - service.lookup_release(sha256) + archive.lookup_release(sha256) assert e.match("Only sha1_git is supported.") @given(releases()) def test_lookup_release_multiple(archive_data, releases): - actual_releases = list(service.lookup_release_multiple(releases)) + actual_releases = list(archive.lookup_release_multiple(releases)) expected_releases = [] for release_id in releases: @@ -273,7 +273,7 @@ def test_lookup_release_multiple_none_found(): unknown_releases_ = [random_sha1(), random_sha1(), random_sha1()] - actual_releases = list(service.lookup_release_multiple(unknown_releases_)) + actual_releases = list(archive.lookup_release_multiple(unknown_releases_)) assert actual_releases == [None] * len(unknown_releases_) @@ -282,7 +282,7 @@ def test_lookup_directory_with_path_not_found(directory): path = "some/invalid/path/here" with pytest.raises(NotFoundExc) as e: - service.lookup_directory_with_path(directory, path) + archive.lookup_directory_with_path(directory, path) assert e.match("Directory entry with path %s from %s not found" % (path, directory)) @@ -291,13 +291,13 @@ directory_content = archive_data.directory_ls(directory) directory_entry = random.choice(directory_content) path = directory_entry["name"] - actual_result = service.lookup_directory_with_path(directory, path) + actual_result = archive.lookup_directory_with_path(directory, path) assert actual_result == directory_entry @given(release()) def test_lookup_release(archive_data, release): - actual_release = service.lookup_release(release) + actual_release = archive.lookup_release(release) assert actual_release == archive_data.release_get(release) @@ -308,13 +308,13 @@ sha1_git = invalid_sha1 with pytest.raises(BadInputExc) as e: - service.lookup_revision_with_context(sha1_git_root, sha1_git) + archive.lookup_revision_with_context(sha1_git_root, sha1_git) assert e.match("Invalid checksum query string") sha1_git = sha256 with pytest.raises(BadInputExc) as e: - service.lookup_revision_with_context(sha1_git_root, sha1_git) + archive.lookup_revision_with_context(sha1_git_root, sha1_git) assert e.match("Only sha1_git is supported") @@ -326,7 +326,7 @@ sha1_git = unknown_revision with pytest.raises(NotFoundExc) as e: - service.lookup_revision_with_context(sha1_git_root, sha1_git) + archive.lookup_revision_with_context(sha1_git_root, sha1_git) assert e.match("Revision %s not found" % sha1_git) @@ -337,7 +337,7 @@ sha1_git_root = unknown_revision sha1_git = revision with pytest.raises(NotFoundExc) as e: - service.lookup_revision_with_context(sha1_git_root, sha1_git) + archive.lookup_revision_with_context(sha1_git_root, sha1_git) assert e.match("Revision root %s not found" % sha1_git_root) @@ -346,7 +346,7 @@ sha1_git = ancestor_revisions["sha1_git"] root_sha1_git = ancestor_revisions["sha1_git_root"] for sha1_git_root in (root_sha1_git, {"id": hash_to_bytes(root_sha1_git)}): - actual_revision = service.lookup_revision_with_context(sha1_git_root, sha1_git) + actual_revision = archive.lookup_revision_with_context(sha1_git_root, sha1_git) children = [] for rev in archive_data.revision_log(root_sha1_git): @@ -366,7 +366,7 @@ root_sha1_git = non_ancestor_revisions["sha1_git_root"] with pytest.raises(NotFoundExc) as e: - service.lookup_revision_with_context(root_sha1_git, sha1_git) + archive.lookup_revision_with_context(root_sha1_git, sha1_git) assert e.match("Revision %s is not an ancestor of %s" % (sha1_git, root_sha1_git)) @@ -374,7 +374,7 @@ unknown_revision_ = random_sha1() with pytest.raises(NotFoundExc) as e: - service.lookup_directory_with_revision(unknown_revision_) + archive.lookup_directory_with_revision(unknown_revision_) assert e.match("Revision %s not found" % unknown_revision_) @@ -408,7 +408,7 @@ archive_data.revision_add([new_revision]) new_revision_id = hash_to_hex(new_revision.id) with pytest.raises(NotFoundExc) as e: - service.lookup_directory_with_revision(new_revision_id, dir_path) + archive.lookup_directory_with_revision(new_revision_id, dir_path) assert e.match("Content not found for revision %s" % new_revision_id) @@ -416,7 +416,7 @@ def test_lookup_directory_with_revision_ko_path_to_nowhere(revision): invalid_path = "path/to/something/unknown" with pytest.raises(NotFoundExc) as e: - service.lookup_directory_with_revision(revision, invalid_path) + archive.lookup_directory_with_revision(revision, invalid_path) assert e.match("Directory or File") assert e.match(invalid_path) assert e.match("revision %s" % revision) @@ -430,7 +430,7 @@ rev_sha1_git = revision_with_submodules["rev_sha1_git"] rev_dir_path = revision_with_submodules["rev_dir_rev_path"] - actual_data = service.lookup_directory_with_revision(rev_sha1_git, rev_dir_path) + actual_data = archive.lookup_directory_with_revision(rev_sha1_git, rev_dir_path) revision = archive_data.revision_get(revision_with_submodules["rev_sha1_git"]) directory = archive_data.directory_ls(revision["directory"]) @@ -448,7 +448,7 @@ @given(revision()) def test_lookup_directory_with_revision_without_path(archive_data, revision): - actual_directory_entries = service.lookup_directory_with_revision(revision) + actual_directory_entries = archive.lookup_directory_with_revision(revision) revision_data = archive_data.revision_get(revision) expected_directory_entries = archive_data.directory_ls(revision_data["directory"]) @@ -467,7 +467,7 @@ ] expected_dir_entry = random.choice(dir_entries) - actual_dir_entry = service.lookup_directory_with_revision( + actual_dir_entry = archive.lookup_directory_with_revision( revision, expected_dir_entry["name"] ) @@ -498,7 +498,7 @@ expected_dir_entry["checksums"]["sha1"] ) - actual_dir_entry = service.lookup_directory_with_revision( + actual_dir_entry = archive.lookup_directory_with_revision( revision, expected_dir_entry["name"], with_data=True ) @@ -513,7 +513,7 @@ @given(revision()) def test_lookup_revision(archive_data, revision): - actual_revision = service.lookup_revision(revision) + actual_revision = archive.lookup_revision(revision) assert actual_revision == archive_data.revision_get(revision) @@ -523,7 +523,7 @@ new_revision["message"] = b"elegant fix for bug \xff" archive_data.revision_add([Revision.from_dict(new_revision)]) - revision = service.lookup_revision(hash_to_hex(new_revision["id"])) + revision = archive.lookup_revision(hash_to_hex(new_revision["id"])) assert revision["message"] == "elegant fix for bug \\xff" assert revision["decoding_failures"] == ["message"] @@ -532,7 +532,7 @@ def test_lookup_revision_msg_ok(archive_data, new_revision): archive_data.revision_add([new_revision]) - revision_message = service.lookup_revision_message(hash_to_hex(new_revision.id)) + revision_message = archive.lookup_revision_message(hash_to_hex(new_revision.id)) assert revision_message == {"message": new_revision.message} @@ -541,14 +541,14 @@ unknown_revision_ = random_sha1() with pytest.raises(NotFoundExc) as e: - service.lookup_revision_message(unknown_revision_) + archive.lookup_revision_message(unknown_revision_) assert e.match("Revision with sha1_git %s not found." % unknown_revision_) @given(revisions()) def test_lookup_revision_multiple(archive_data, revisions): - actual_revisions = list(service.lookup_revision_multiple(revisions)) + actual_revisions = list(archive.lookup_revision_multiple(revisions)) expected_revisions = [] for rev in revisions: @@ -560,14 +560,14 @@ def test_lookup_revision_multiple_none_found(): unknown_revisions_ = [random_sha1(), random_sha1(), random_sha1()] - actual_revisions = list(service.lookup_revision_multiple(unknown_revisions_)) + actual_revisions = list(archive.lookup_revision_multiple(unknown_revisions_)) assert actual_revisions == [None] * len(unknown_revisions_) @given(revision()) def test_lookup_revision_log(archive_data, revision): - actual_revision_log = list(service.lookup_revision_log(revision, limit=25)) + actual_revision_log = list(archive.lookup_revision_log(revision, limit=25)) expected_revision_log = archive_data.revision_log(revision, limit=25) assert actual_revision_log == expected_revision_log @@ -590,7 +590,7 @@ branch_name = random.choice(list(branches.keys())) actual_log = list( - service.lookup_revision_log_by(origin["url"], branch_name, None, limit=25) + archive.lookup_revision_log_by(origin["url"], branch_name, None, limit=25) ) expected_log = archive_data.revision_log(branches[branch_name]["target"], limit=25) @@ -601,7 +601,7 @@ @given(origin()) def test_lookup_revision_log_by_notfound(origin): with pytest.raises(NotFoundExc): - service.lookup_revision_log_by( + archive.lookup_revision_log_by( origin["url"], "unknown_branch_name", None, limit=100 ) @@ -610,7 +610,7 @@ unknown_content_ = random_content() with pytest.raises(NotFoundExc) as e: - service.lookup_content_raw("sha1:" + unknown_content_["sha1"]) + archive.lookup_content_raw("sha1:" + unknown_content_["sha1"]) assert e.match( "Content with %s checksum equals to %s not found!" @@ -620,7 +620,7 @@ @given(content()) def test_lookup_content_raw(archive_data, content): - actual_content = service.lookup_content_raw("sha256:%s" % content["sha256"]) + actual_content = archive.lookup_content_raw("sha256:%s" % content["sha256"]) expected_content = archive_data.content_get_data(content["sha1"]) @@ -631,7 +631,7 @@ unknown_content_ = random_content() with pytest.raises(NotFoundExc) as e: - service.lookup_content("sha1:%s" % unknown_content_["sha1"]) + archive.lookup_content("sha1:%s" % unknown_content_["sha1"]) assert e.match( "Content with %s checksum equals to %s not found!" @@ -641,7 +641,7 @@ @given(content()) def test_lookup_content_with_sha1(archive_data, content): - actual_content = service.lookup_content(f"sha1:{content['sha1']}") + actual_content = archive.lookup_content(f"sha1:{content['sha1']}") expected_content = archive_data.content_get(content["sha1"]) @@ -650,7 +650,7 @@ @given(content()) def test_lookup_content_with_sha256(archive_data, content): - actual_content = service.lookup_content(f"sha256:{content['sha256']}") + actual_content = archive.lookup_content(f"sha256:{content['sha256']}") expected_content = archive_data.content_get(content["sha1"]) @@ -659,21 +659,21 @@ def test_lookup_directory_bad_checksum(): with pytest.raises(BadInputExc): - service.lookup_directory("directory_id") + archive.lookup_directory("directory_id") def test_lookup_directory_not_found(): unknown_directory_ = random_sha1() with pytest.raises(NotFoundExc) as e: - service.lookup_directory(unknown_directory_) + archive.lookup_directory(unknown_directory_) assert e.match("Directory with sha1_git %s not found" % unknown_directory_) @given(directory()) def test_lookup_directory(archive_data, directory): - actual_directory_ls = list(service.lookup_directory(directory)) + actual_directory_ls = list(archive.lookup_directory(directory)) expected_directory_ls = archive_data.directory_ls(directory) @@ -682,7 +682,7 @@ @given(empty_directory()) def test_lookup_directory_empty(empty_directory): - actual_directory_ls = list(service.lookup_directory(empty_directory)) + actual_directory_ls = list(archive.lookup_directory(empty_directory)) assert actual_directory_ls == [] @@ -690,7 +690,7 @@ @given(origin()) def test_lookup_revision_by_nothing_found(origin): with pytest.raises(NotFoundExc): - service.lookup_revision_by(origin["url"], "invalid-branch-name") + archive.lookup_revision_by(origin["url"], "invalid-branch-name") @given(origin()) @@ -698,7 +698,7 @@ branches = _get_origin_branches(archive_data, origin) branch_name = random.choice(list(branches.keys())) - actual_revision = service.lookup_revision_by(origin["url"], branch_name) + actual_revision = archive.lookup_revision_by(origin["url"], branch_name) expected_revision = archive_data.revision_get(branches[branch_name]["target"]) @@ -708,7 +708,7 @@ @given(origin(), revision()) def test_lookup_revision_with_context_by_ko(origin, revision): with pytest.raises(NotFoundExc): - service.lookup_revision_with_context_by( + archive.lookup_revision_with_context_by( origin["url"], "invalid-branch-name", None, revision ) @@ -729,7 +729,7 @@ rev = root_rev_log[-1]["id"] - actual_root_rev, actual_rev = service.lookup_revision_with_context_by( + actual_root_rev, actual_rev = archive.lookup_revision_with_context_by( origin["url"], branch_name, None, rev ) @@ -743,7 +743,7 @@ def test_lookup_revision_through_ko_not_implemented(): with pytest.raises(NotImplementedError): - service.lookup_revision_through({"something-unknown": 10}) + archive.lookup_revision_through({"something-unknown": 10}) @given(origin()) @@ -755,14 +755,14 @@ root_rev_log = archive_data.revision_log(root_rev) rev = root_rev_log[-1]["id"] - assert service.lookup_revision_through( + assert archive.lookup_revision_through( { "origin_url": origin["url"], "branch_name": branch_name, "ts": None, "sha1_git": rev, } - ) == service.lookup_revision_with_context_by(origin["url"], branch_name, None, rev) + ) == archive.lookup_revision_with_context_by(origin["url"], branch_name, None, rev) @given(origin()) @@ -770,9 +770,9 @@ branches = _get_origin_branches(archive_data, origin) branch_name = random.choice(list(branches.keys())) - assert service.lookup_revision_through( + assert archive.lookup_revision_through( {"origin_url": origin["url"], "branch_name": branch_name, "ts": None,} - ) == service.lookup_revision_by(origin["url"], branch_name, None) + ) == archive.lookup_revision_by(origin["url"], branch_name, None) @given(ancestor_revisions()) @@ -780,22 +780,22 @@ sha1_git = ancestor_revisions["sha1_git"] sha1_git_root = ancestor_revisions["sha1_git_root"] - assert service.lookup_revision_through( + assert archive.lookup_revision_through( {"sha1_git_root": sha1_git_root, "sha1_git": sha1_git,} - ) == service.lookup_revision_with_context(sha1_git_root, sha1_git) + ) == archive.lookup_revision_with_context(sha1_git_root, sha1_git) @given(revision()) def test_lookup_revision_through_with_revision(revision): - assert service.lookup_revision_through( + assert archive.lookup_revision_through( {"sha1_git": revision} - ) == service.lookup_revision(revision) + ) == archive.lookup_revision(revision) @given(revision()) def test_lookup_directory_through_revision_ko_not_found(revision): with pytest.raises(NotFoundExc): - service.lookup_directory_through_revision( + archive.lookup_directory_through_revision( {"sha1_git": revision}, "some/invalid/path" ) @@ -810,9 +810,9 @@ ] dir_entry = random.choice(dir_entries) - assert service.lookup_directory_through_revision( + assert archive.lookup_directory_through_revision( {"sha1_git": revision}, dir_entry["name"] - ) == (revision, service.lookup_directory_with_revision(revision, dir_entry["name"])) + ) == (revision, archive.lookup_directory_with_revision(revision, dir_entry["name"])) @given(revision()) @@ -825,11 +825,11 @@ ] dir_entry = random.choice(dir_entries) - assert service.lookup_directory_through_revision( + assert archive.lookup_directory_through_revision( {"sha1_git": revision}, dir_entry["name"], with_data=True ) == ( revision, - service.lookup_directory_with_revision( + archive.lookup_directory_with_revision( revision, dir_entry["name"], with_data=True ), ) @@ -840,19 +840,19 @@ archive_data, content, directory, release, revision, snapshot ): expected = archive_data.content_find(content) - assert service.lookup_object(CONTENT, content["sha1_git"]) == expected + assert archive.lookup_object(CONTENT, content["sha1_git"]) == expected expected = archive_data.directory_get(directory) - assert service.lookup_object(DIRECTORY, directory) == expected + assert archive.lookup_object(DIRECTORY, directory) == expected expected = archive_data.release_get(release) - assert service.lookup_object(RELEASE, release) == expected + assert archive.lookup_object(RELEASE, release) == expected expected = archive_data.revision_get(revision) - assert service.lookup_object(REVISION, revision) == expected + assert archive.lookup_object(REVISION, revision) == expected expected = {**archive_data.snapshot_get(snapshot), "next_branch": None} - assert service.lookup_object(SNAPSHOT, snapshot) == expected + assert archive.lookup_object(SNAPSHOT, snapshot) == expected @given( @@ -870,23 +870,23 @@ unknown_snapshot, ): with pytest.raises(NotFoundExc) as e: - service.lookup_object(CONTENT, unknown_content["sha1_git"]) + archive.lookup_object(CONTENT, unknown_content["sha1_git"]) assert e.match(r"Content.*not found") with pytest.raises(NotFoundExc) as e: - service.lookup_object(DIRECTORY, unknown_directory) + archive.lookup_object(DIRECTORY, unknown_directory) assert e.match(r"Directory.*not found") with pytest.raises(NotFoundExc) as e: - service.lookup_object(RELEASE, unknown_release) + archive.lookup_object(RELEASE, unknown_release) assert e.match(r"Release.*not found") with pytest.raises(NotFoundExc) as e: - service.lookup_object(REVISION, unknown_revision) + archive.lookup_object(REVISION, unknown_revision) assert e.match(r"Revision.*not found") with pytest.raises(NotFoundExc) as e: - service.lookup_object(SNAPSHOT, unknown_snapshot) + archive.lookup_object(SNAPSHOT, unknown_snapshot) assert e.match(r"Snapshot.*not found") @@ -894,27 +894,27 @@ def test_lookup_invalid_objects(invalid_sha1): with pytest.raises(BadInputExc) as e: - service.lookup_object("foo", invalid_sha1) + archive.lookup_object("foo", invalid_sha1) assert e.match("Invalid swh object type") with pytest.raises(BadInputExc) as e: - service.lookup_object(CONTENT, invalid_sha1) + archive.lookup_object(CONTENT, invalid_sha1) assert e.match("Invalid hash") with pytest.raises(BadInputExc) as e: - service.lookup_object(DIRECTORY, invalid_sha1) + archive.lookup_object(DIRECTORY, invalid_sha1) assert e.match("Invalid checksum") with pytest.raises(BadInputExc) as e: - service.lookup_object(RELEASE, invalid_sha1) + archive.lookup_object(RELEASE, invalid_sha1) assert e.match("Invalid checksum") with pytest.raises(BadInputExc) as e: - service.lookup_object(REVISION, invalid_sha1) + archive.lookup_object(REVISION, invalid_sha1) assert e.match("Invalid checksum") with pytest.raises(BadInputExc) as e: - service.lookup_object(SNAPSHOT, invalid_sha1) + archive.lookup_object(SNAPSHOT, invalid_sha1) assert e.match("Invalid checksum") @@ -933,7 +933,7 @@ SNAPSHOT: [hash_to_bytes(missing_snp)], } - actual_result = service.lookup_missing_hashes(grouped_swhids) + actual_result = archive.lookup_missing_hashes(grouped_swhids) assert actual_result == { missing_cnt, @@ -958,21 +958,21 @@ SNAPSHOT: [hash_to_bytes(missing_snp)], } - actual_result = service.lookup_missing_hashes(grouped_swhids) + actual_result = archive.lookup_missing_hashes(grouped_swhids) assert actual_result == {missing_rev, missing_rel, missing_snp} @given(origin()) def test_lookup_origin_extra_trailing_slash(origin): - origin_info = service.lookup_origin({"url": f"{origin['url']}/"}) + origin_info = archive.lookup_origin({"url": f"{origin['url']}/"}) assert origin_info["url"] == origin["url"] def test_lookup_origin_missing_trailing_slash(archive_data): deb_origin = Origin(url="http://snapshot.debian.org/package/r-base/") archive_data.origin_add([deb_origin]) - origin_info = service.lookup_origin({"url": deb_origin.url[:-1]}) + origin_info = archive.lookup_origin({"url": deb_origin.url[:-1]}) assert origin_info["url"] == deb_origin.url @@ -990,7 +990,7 @@ ] assert ( - service.lookup_snapshot_branch_name_from_tip_revision( + archive.lookup_snapshot_branch_name_from_tip_revision( snapshot_id, branch_info["revision"] ) in possible_results @@ -1008,11 +1008,11 @@ hasher.update(unknown_origin.url.encode("ascii")) unknown_origin_sha1 = hasher.hexdigest() - origins = list(service.lookup_origins_by_sha1s([origin_sha1])) + origins = list(archive.lookup_origins_by_sha1s([origin_sha1])) assert origins == [origin_info] - origins = list(service.lookup_origins_by_sha1s([origin_sha1, origin_sha1])) + origins = list(archive.lookup_origins_by_sha1s([origin_sha1, origin_sha1])) assert origins == [origin_info, origin_info] - origins = list(service.lookup_origins_by_sha1s([origin_sha1, unknown_origin_sha1])) + origins = list(archive.lookup_origins_by_sha1s([origin_sha1, unknown_origin_sha1])) assert origins == [origin_info, None] diff --git a/swh/web/tests/common/test_origin_save.py b/swh/web/tests/common/test_origin_save.py --- a/swh/web/tests/common/test_origin_save.py +++ b/swh/web/tests/common/test_origin_save.py @@ -174,10 +174,10 @@ loading_task_id=_task_id, ) - # mock scheduler and services + # mock scheduler and archive _mock_scheduler(mocker) - mock_service = mocker.patch("swh.web.common.origin_save.service") - mock_service.lookup_origin.return_value = {"url": _origin_url} + mock_archive = mocker.patch("swh.web.common.origin_save.archive") + mock_archive.lookup_origin.return_value = {"url": _origin_url} mock_get_origin_visits = mocker.patch( "swh.web.common.origin_save.get_origin_visits" ) @@ -241,10 +241,10 @@ loading_task_id=_task_id, ) - # mock scheduler and services + # mock scheduler and archives _mock_scheduler(mocker, task_status="next_run_scheduled") - mock_service = mocker.patch("swh.web.common.origin_save.service") - mock_service.lookup_origin.return_value = {"url": _origin_url} + mock_archive = mocker.patch("swh.web.common.origin_save.archive") + mock_archive.lookup_origin.return_value = {"url": _origin_url} mock_get_origin_visits = mocker.patch( "swh.web.common.origin_save.get_origin_visits" ) diff --git a/swh/web/tests/common/test_origin_visits.py b/swh/web/tests/common/test_origin_visits.py --- a/swh/web/tests/common/test_origin_visits.py +++ b/swh/web/tests/common/test_origin_visits.py @@ -23,8 +23,8 @@ @given(new_snapshots(3)) def test_get_origin_visits(mocker, snapshots): - mock_service = mocker.patch("swh.web.common.service") - mock_service.MAX_LIMIT = 2 + mock_archive = mocker.patch("swh.web.common.archive") + mock_archive.MAX_LIMIT = 2 def _lookup_origin_visits(*args, **kwargs): if kwargs["last_visit"] is None: @@ -55,7 +55,7 @@ } ] - mock_service.lookup_origin_visits.side_effect = _lookup_origin_visits + mock_archive.lookup_origin_visits.side_effect = _lookup_origin_visits origin_info = { "url": "https://github.com/foo/bar", diff --git a/swh/web/tests/data.py b/swh/web/tests/data.py --- a/swh/web/tests/data.py +++ b/swh/web/tests/data.py @@ -29,7 +29,7 @@ get_mimetype_and_encoding_for_content, prepare_content_for_display, ) -from swh.web.common import service +from swh.web.common import archive # Module used to initialize data that will be provided as tests input @@ -167,13 +167,13 @@ # Load git repositories from archives for origin in _TEST_ORIGINS: - for i, archive in enumerate(origin["archives"]): + for i, archive_ in enumerate(origin["archives"]): if i > 0: # ensure visit dates will be different when simulating # multiple visits of an origin time.sleep(1) origin_repo_archive = os.path.join( - os.path.dirname(__file__), "resources/repos/%s" % archive + os.path.dirname(__file__), "resources/repos/%s" % archive_ ) loader = GitLoaderFromArchive( origin["url"], @@ -362,6 +362,6 @@ {"storage": storage, "indexer_storage": idx_storage, "search": search,} ) - service.storage = storage - service.idx_storage = idx_storage - service.search = search + archive.storage = storage + archive.idx_storage = idx_storage + archive.search = search diff --git a/swh/web/tests/misc/test_badges.py b/swh/web/tests/misc/test_badges.py --- a/swh/web/tests/misc/test_badges.py +++ b/swh/web/tests/misc/test_badges.py @@ -15,7 +15,7 @@ SNAPSHOT, swhid, ) -from swh.web.common import service +from swh.web.common import archive from swh.web.common.identifiers import resolve_swhid from swh.web.common.utils import reverse from swh.web.misc.badges import _badge_config, _get_logo_data @@ -165,7 +165,7 @@ text = swhid(object_type, object_id) link = resolve_swhid(text)["browse_url"] if object_type == RELEASE: - release = service.lookup_release(object_id) + release = archive.lookup_release(object_id) text = release["name"] elif error == "invalid id": text = "error"