Page MenuHomeSoftware Heritage

No OneTemporary

diff --git a/swh/storage/cassandra/storage.py b/swh/storage/cassandra/storage.py
index e79ddf06..76db6540 100644
--- a/swh/storage/cassandra/storage.py
+++ b/swh/storage/cassandra/storage.py
@@ -1,1298 +1,1305 @@
# Copyright (C) 2019-2020 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import base64
import datetime
import itertools
import json
import random
import re
from typing import Any, Callable, Dict, List, Iterable, Optional, Set, Tuple, Union
import attr
from swh.core.api.serializers import msgpack_loads, msgpack_dumps
from swh.model.identifiers import parse_swhid, SWHID
from swh.model.hashutil import DEFAULT_ALGORITHMS
from swh.model.model import (
Revision,
Release,
Directory,
DirectoryEntry,
Content,
SkippedContent,
OriginVisit,
OriginVisitStatus,
Snapshot,
SnapshotBranch,
TargetType,
Origin,
MetadataAuthority,
MetadataAuthorityType,
MetadataFetcher,
MetadataTargetType,
RawExtrinsicMetadata,
Sha1Git,
)
from swh.storage.interface import (
ListOrder,
PagedResult,
PartialBranches,
Sha1,
VISIT_STATUSES,
)
from swh.storage.objstorage import ObjStorage
from swh.storage.writer import JournalWriter
from swh.storage.utils import map_optional, now
from ..exc import StorageArgumentException, HashCollision
from .common import TOKEN_BEGIN, TOKEN_END, hash_url, remove_keys
from . import converters
from .cql import CqlRunner
from .schema import HASH_ALGORITHMS
from .model import (
ContentRow,
DirectoryEntryRow,
DirectoryRow,
MetadataAuthorityRow,
MetadataFetcherRow,
OriginRow,
OriginVisitRow,
OriginVisitStatusRow,
RawExtrinsicMetadataRow,
RevisionParentRow,
SkippedContentRow,
SnapshotBranchRow,
SnapshotRow,
)
# Max block size of contents to return
BULK_BLOCK_CONTENT_LEN_MAX = 10000
class CassandraStorage:
def __init__(self, hosts, keyspace, objstorage, port=9042, journal_writer=None):
self._cql_runner: CqlRunner = CqlRunner(hosts, keyspace, port)
self.journal_writer: JournalWriter = JournalWriter(journal_writer)
self.objstorage: ObjStorage = ObjStorage(objstorage)
def check_config(self, *, check_write: bool) -> bool:
self._cql_runner.check_read()
return True
def _content_get_from_hash(self, algo, hash_) -> Iterable:
"""From the name of a hash algorithm and a value of that hash,
looks up the "hash -> token" secondary table (content_by_{algo})
to get tokens.
Then, looks up the main table (content) to get all contents with
that token, and filters out contents whose hash doesn't match."""
found_tokens = self._cql_runner.content_get_tokens_from_single_hash(algo, hash_)
for token in found_tokens:
assert isinstance(token, int), found_tokens
# Query the main table ('content').
res = self._cql_runner.content_get_from_token(token)
for row in res:
# re-check the the hash (in case of murmur3 collision)
if getattr(row, algo) == hash_:
yield row
def _content_add(self, contents: List[Content], with_data: bool) -> Dict:
# Filter-out content already in the database.
contents = [
c for c in contents if not self._cql_runner.content_get_from_pk(c.to_dict())
]
self.journal_writer.content_add(contents)
if with_data:
# First insert to the objstorage, if the endpoint is
# `content_add` (as opposed to `content_add_metadata`).
# TODO: this should probably be done in concurrently to inserting
# in index tables (but still before the main table; so an entry is
# only added to the main table after everything else was
# successfully inserted.
summary = self.objstorage.content_add(
c for c in contents if c.status != "absent"
)
content_add_bytes = summary["content:add:bytes"]
content_add = 0
for content in contents:
content_add += 1
# Check for sha1 or sha1_git collisions. This test is not atomic
# with the insertion, so it won't detect a collision if both
# contents are inserted at the same time, but it's good enough.
#
# The proper way to do it would probably be a BATCH, but this
# would be inefficient because of the number of partitions we
# need to affect (len(HASH_ALGORITHMS)+1, which is currently 5)
for algo in {"sha1", "sha1_git"}:
collisions = []
# Get tokens of 'content' rows with the same value for
# sha1/sha1_git
rows = self._content_get_from_hash(algo, content.get_hash(algo))
for row in rows:
if getattr(row, algo) != content.get_hash(algo):
# collision of token(partition key), ignore this
# row
continue
for other_algo in HASH_ALGORITHMS:
if getattr(row, other_algo) != content.get_hash(other_algo):
# This hash didn't match; discard the row.
collisions.append(
{k: getattr(row, k) for k in HASH_ALGORITHMS}
)
if collisions:
collisions.append(content.hashes())
raise HashCollision(algo, content.get_hash(algo), collisions)
(token, insertion_finalizer) = self._cql_runner.content_add_prepare(
ContentRow(**remove_keys(content.to_dict(), ("data",)))
)
# Then add to index tables
for algo in HASH_ALGORITHMS:
self._cql_runner.content_index_add_one(algo, content, token)
# Then to the main table
insertion_finalizer()
summary = {
"content:add": content_add,
}
if with_data:
summary["content:add:bytes"] = content_add_bytes
return summary
def content_add(self, content: List[Content]) -> Dict:
contents = [attr.evolve(c, ctime=now()) for c in content]
return self._content_add(list(contents), with_data=True)
def content_update(
self, contents: List[Dict[str, Any]], keys: List[str] = []
) -> None:
raise NotImplementedError(
"content_update is not supported by the Cassandra backend"
)
def content_add_metadata(self, content: List[Content]) -> Dict:
return self._content_add(content, with_data=False)
def content_get_data(self, content: Sha1) -> Optional[bytes]:
# FIXME: Make this method support slicing the `data`
return self.objstorage.content_get(content)
def content_get_partition(
self,
partition_id: int,
nb_partitions: int,
page_token: Optional[str] = None,
limit: int = 1000,
) -> PagedResult[Content]:
if limit is None:
raise StorageArgumentException("limit should not be None")
# Compute start and end of the range of tokens covered by the
# requested partition
partition_size = (TOKEN_END - TOKEN_BEGIN) // nb_partitions
range_start = TOKEN_BEGIN + partition_id * partition_size
range_end = TOKEN_BEGIN + (partition_id + 1) * partition_size
# offset the range start according to the `page_token`.
if page_token is not None:
if not (range_start <= int(page_token) <= range_end):
raise StorageArgumentException("Invalid page_token.")
range_start = int(page_token)
next_page_token: Optional[str] = None
rows = self._cql_runner.content_get_token_range(
range_start, range_end, limit + 1
)
contents = []
for counter, (tok, row) in enumerate(rows):
if row.status == "absent":
continue
row_d = row.to_dict()
if counter >= limit:
next_page_token = str(tok)
break
contents.append(Content(**row_d))
assert len(contents) <= limit
return PagedResult(results=contents, next_page_token=next_page_token)
def content_get(self, contents: List[Sha1]) -> List[Optional[Content]]:
contents_by_sha1: Dict[Sha1, Optional[Content]] = {}
for sha1 in contents:
# Get all (sha1, sha1_git, sha256, blake2s256) whose sha1
# matches the argument, from the index table ('content_by_sha1')
for row in self._content_get_from_hash("sha1", sha1):
row_d = row.to_dict()
row_d.pop("ctime")
content = Content(**row_d)
contents_by_sha1[content.sha1] = content
return [contents_by_sha1.get(sha1) for sha1 in contents]
def content_find(self, content: Dict[str, Any]) -> List[Content]:
# Find an algorithm that is common to all the requested contents.
# It will be used to do an initial filtering efficiently.
filter_algos = list(set(content).intersection(HASH_ALGORITHMS))
if not filter_algos:
raise StorageArgumentException(
"content keys must contain at least one "
f"of: {', '.join(sorted(HASH_ALGORITHMS))}"
)
common_algo = filter_algos[0]
results = []
rows = self._content_get_from_hash(common_algo, content[common_algo])
for row in rows:
# Re-check all the hashes, in case of collisions (either of the
# hash of the partition key, or the hashes in it)
for algo in HASH_ALGORITHMS:
if content.get(algo) and getattr(row, algo) != content[algo]:
# This hash didn't match; discard the row.
break
else:
# All hashes match, keep this row.
row_d = row.to_dict()
row_d["ctime"] = row.ctime.replace(tzinfo=datetime.timezone.utc)
results.append(Content(**row_d))
return results
def content_missing(
self, contents: List[Dict[str, Any]], key_hash: str = "sha1"
) -> Iterable[bytes]:
if key_hash not in DEFAULT_ALGORITHMS:
raise StorageArgumentException(
"key_hash should be one of {','.join(DEFAULT_ALGORITHMS)}"
)
for content in contents:
res = self.content_find(content)
if not res:
yield content[key_hash]
def content_missing_per_sha1(self, contents: List[bytes]) -> Iterable[bytes]:
return self.content_missing([{"sha1": c} for c in contents])
def content_missing_per_sha1_git(
self, contents: List[Sha1Git]
) -> Iterable[Sha1Git]:
return self.content_missing(
[{"sha1_git": c for c in contents}], key_hash="sha1_git"
)
def content_get_random(self) -> Sha1Git:
content = self._cql_runner.content_get_random()
assert content, "Could not find any content"
return content.sha1_git
def _skipped_content_add(self, contents: List[SkippedContent]) -> Dict:
# Filter-out content already in the database.
contents = [
c
for c in contents
if not self._cql_runner.skipped_content_get_from_pk(c.to_dict())
]
self.journal_writer.skipped_content_add(contents)
for content in contents:
# Compute token of the row in the main table
(token, insertion_finalizer) = self._cql_runner.skipped_content_add_prepare(
SkippedContentRow.from_dict({"origin": None, **content.to_dict()})
)
# Then add to index tables
for algo in HASH_ALGORITHMS:
self._cql_runner.skipped_content_index_add_one(algo, content, token)
# Then to the main table
insertion_finalizer()
return {"skipped_content:add": len(contents)}
def skipped_content_add(self, content: List[SkippedContent]) -> Dict:
contents = [attr.evolve(c, ctime=now()) for c in content]
return self._skipped_content_add(contents)
def skipped_content_missing(
self, contents: List[Dict[str, Any]]
) -> Iterable[Dict[str, Any]]:
for content in contents:
if not self._cql_runner.skipped_content_get_from_pk(content):
yield {algo: content[algo] for algo in DEFAULT_ALGORITHMS}
def directory_add(self, directories: List[Directory]) -> Dict:
# Filter out directories that are already inserted.
missing = self.directory_missing([dir_.id for dir_ in directories])
directories = [dir_ for dir_ in directories if dir_.id in missing]
self.journal_writer.directory_add(directories)
for directory in directories:
# Add directory entries to the 'directory_entry' table
for entry in directory.entries:
self._cql_runner.directory_entry_add_one(
DirectoryEntryRow(directory_id=directory.id, **entry.to_dict())
)
# Add the directory *after* adding all the entries, so someone
# calling snapshot_get_branch in the meantime won't end up
# with half the entries.
self._cql_runner.directory_add_one(DirectoryRow(id=directory.id))
return {"directory:add": len(directories)}
def directory_missing(self, directories: List[Sha1Git]) -> Iterable[Sha1Git]:
return self._cql_runner.directory_missing(directories)
def _join_dentry_to_content(self, dentry: DirectoryEntry) -> Dict[str, Any]:
keys = (
"status",
"sha1",
"sha1_git",
"sha256",
"length",
)
ret = dict.fromkeys(keys)
ret.update(dentry.to_dict())
if ret["type"] == "file":
contents = self.content_find({"sha1_git": ret["target"]})
if contents:
content = contents[0]
for key in keys:
ret[key] = getattr(content, key)
return ret
def _directory_ls(
self, directory_id: Sha1Git, recursive: bool, prefix: bytes = b""
) -> Iterable[Dict[str, Any]]:
if self.directory_missing([directory_id]):
return
rows = list(self._cql_runner.directory_entry_get([directory_id]))
for row in rows:
entry_d = row.to_dict()
# Build and yield the directory entry dict
del entry_d["directory_id"]
entry = DirectoryEntry.from_dict(entry_d)
ret = self._join_dentry_to_content(entry)
ret["name"] = prefix + ret["name"]
ret["dir_id"] = directory_id
yield ret
if recursive and ret["type"] == "dir":
yield from self._directory_ls(
ret["target"], True, prefix + ret["name"] + b"/"
)
def directory_entry_get_by_path(
self, directory: Sha1Git, paths: List[bytes]
) -> Optional[Dict[str, Any]]:
return self._directory_entry_get_by_path(directory, paths, b"")
def _directory_entry_get_by_path(
self, directory: Sha1Git, paths: List[bytes], prefix: bytes
) -> Optional[Dict[str, Any]]:
if not paths:
return None
contents = list(self.directory_ls(directory))
if not contents:
return None
def _get_entry(entries, name):
"""Finds the entry with the requested name, prepends the
prefix (to get its full path), and returns it.
If no entry has that name, returns None."""
for entry in entries:
if entry["name"] == name:
entry = entry.copy()
entry["name"] = prefix + entry["name"]
return entry
first_item = _get_entry(contents, paths[0])
if len(paths) == 1:
return first_item
if not first_item or first_item["type"] != "dir":
return None
return self._directory_entry_get_by_path(
first_item["target"], paths[1:], prefix + paths[0] + b"/"
)
def directory_ls(
self, directory: Sha1Git, recursive: bool = False
) -> Iterable[Dict[str, Any]]:
yield from self._directory_ls(directory, recursive)
def directory_get_random(self) -> Sha1Git:
directory = self._cql_runner.directory_get_random()
assert directory, "Could not find any directory"
return directory.id
def revision_add(self, revisions: List[Revision]) -> Dict:
# Filter-out revisions already in the database
missing = self.revision_missing([rev.id for rev in revisions])
revisions = [rev for rev in revisions if rev.id in missing]
self.journal_writer.revision_add(revisions)
for revision in revisions:
revobject = converters.revision_to_db(revision)
if revobject:
# Add parents first
for (rank, parent) in enumerate(revision.parents):
self._cql_runner.revision_parent_add_one(
RevisionParentRow(
id=revobject.id, parent_rank=rank, parent_id=parent
)
)
# Then write the main revision row.
# Writing this after all parents were written ensures that
# read endpoints don't return a partial view while writing
# the parents
self._cql_runner.revision_add_one(revobject)
return {"revision:add": len(revisions)}
def revision_missing(self, revisions: List[Sha1Git]) -> Iterable[Sha1Git]:
return self._cql_runner.revision_missing(revisions)
def revision_get(
self, revisions: List[Sha1Git]
) -> Iterable[Optional[Dict[str, Any]]]:
rows = self._cql_runner.revision_get(revisions)
revs = {}
for row in rows:
# TODO: use a single query to get all parents?
# (it might have lower latency, but requires more code and more
# bandwidth, because revision id would be part of each returned
# row)
parents = tuple(self._cql_runner.revision_parent_get(row.id))
# parent_rank is the clustering key, so results are already
# sorted by rank.
rev = converters.revision_from_db(row, parents=parents)
revs[rev.id] = rev.to_dict()
for rev_id in revisions:
yield revs.get(rev_id)
def _get_parent_revs(
self,
rev_ids: Iterable[Sha1Git],
seen: Set[Sha1Git],
limit: Optional[int],
short: bool,
) -> Union[
Iterable[Dict[str, Any]], Iterable[Tuple[Sha1Git, Tuple[Sha1Git, ...]]],
]:
if limit and len(seen) >= limit:
return
rev_ids = [id_ for id_ in rev_ids if id_ not in seen]
if not rev_ids:
return
seen |= set(rev_ids)
# We need this query, even if short=True, to return consistent
# results (ie. not return only a subset of a revision's parents
# if it is being written)
if short:
ids = self._cql_runner.revision_get_ids(rev_ids)
for id_ in ids:
# TODO: use a single query to get all parents?
# (it might have less latency, but requires less code and more
# bandwidth (because revision id would be part of each returned
# row)
parents = tuple(self._cql_runner.revision_parent_get(id_))
# parent_rank is the clustering key, so results are already
# sorted by rank.
yield (id_, parents)
yield from self._get_parent_revs(parents, seen, limit, short)
else:
rows = self._cql_runner.revision_get(rev_ids)
for row in rows:
# TODO: use a single query to get all parents?
# (it might have less latency, but requires less code and more
# bandwidth (because revision id would be part of each returned
# row)
parents = tuple(self._cql_runner.revision_parent_get(row.id))
# parent_rank is the clustering key, so results are already
# sorted by rank.
rev = converters.revision_from_db(row, parents=parents)
yield rev.to_dict()
yield from self._get_parent_revs(parents, seen, limit, short)
def revision_log(
self, revisions: List[Sha1Git], limit: Optional[int] = None
) -> Iterable[Optional[Dict[str, Any]]]:
seen: Set[Sha1Git] = set()
yield from self._get_parent_revs(revisions, seen, limit, False)
def revision_shortlog(
self, revisions: List[Sha1Git], limit: Optional[int] = None
) -> Iterable[Optional[Tuple[Sha1Git, Tuple[Sha1Git, ...]]]]:
seen: Set[Sha1Git] = set()
yield from self._get_parent_revs(revisions, seen, limit, True)
def revision_get_random(self) -> Sha1Git:
revision = self._cql_runner.revision_get_random()
assert revision, "Could not find any revision"
return revision.id
def release_add(self, releases: List[Release]) -> Dict:
to_add = []
for rel in releases:
if rel not in to_add:
to_add.append(rel)
missing = set(self.release_missing([rel.id for rel in to_add]))
to_add = [rel for rel in to_add if rel.id in missing]
self.journal_writer.release_add(to_add)
for release in to_add:
if release:
self._cql_runner.release_add_one(converters.release_to_db(release))
return {"release:add": len(to_add)}
def release_missing(self, releases: List[Sha1Git]) -> Iterable[Sha1Git]:
return self._cql_runner.release_missing(releases)
def release_get(
self, releases: List[Sha1Git]
) -> Iterable[Optional[Dict[str, Any]]]:
rows = self._cql_runner.release_get(releases)
rels = {}
for row in rows:
release = converters.release_from_db(row)
rels[row.id] = release.to_dict()
for rel_id in releases:
yield rels.get(rel_id)
def release_get_random(self) -> Sha1Git:
release = self._cql_runner.release_get_random()
assert release, "Could not find any release"
return release.id
def snapshot_add(self, snapshots: List[Snapshot]) -> Dict:
missing = self._cql_runner.snapshot_missing([snp.id for snp in snapshots])
snapshots = [snp for snp in snapshots if snp.id in missing]
for snapshot in snapshots:
self.journal_writer.snapshot_add([snapshot])
# Add branches
for (branch_name, branch) in snapshot.branches.items():
if branch is None:
target_type: Optional[str] = None
target: Optional[bytes] = None
else:
target_type = branch.target_type.value
target = branch.target
self._cql_runner.snapshot_branch_add_one(
SnapshotBranchRow(
snapshot_id=snapshot.id,
name=branch_name,
target_type=target_type,
target=target,
)
)
# Add the snapshot *after* adding all the branches, so someone
# calling snapshot_get_branch in the meantime won't end up
# with half the branches.
self._cql_runner.snapshot_add_one(SnapshotRow(id=snapshot.id))
return {"snapshot:add": len(snapshots)}
def snapshot_missing(self, snapshots: List[Sha1Git]) -> Iterable[Sha1Git]:
return self._cql_runner.snapshot_missing(snapshots)
def snapshot_get(self, snapshot_id: Sha1Git) -> Optional[Dict[str, Any]]:
d = self.snapshot_get_branches(snapshot_id)
if d is None:
return None
return {
"id": d["id"],
"branches": {
name: branch.to_dict() if branch else None
for (name, branch) in d["branches"].items()
},
"next_branch": d["next_branch"],
}
def snapshot_count_branches(
self, snapshot_id: Sha1Git
) -> Optional[Dict[Optional[str], int]]:
if self._cql_runner.snapshot_missing([snapshot_id]):
# Makes sure we don't fetch branches for a snapshot that is
# being added.
return None
return self._cql_runner.snapshot_count_branches(snapshot_id)
def snapshot_get_branches(
self,
snapshot_id: Sha1Git,
branches_from: bytes = b"",
branches_count: int = 1000,
target_types: Optional[List[str]] = None,
) -> Optional[PartialBranches]:
if self._cql_runner.snapshot_missing([snapshot_id]):
# Makes sure we don't fetch branches for a snapshot that is
# being added.
return None
branches: List = []
while len(branches) < branches_count + 1:
new_branches = list(
self._cql_runner.snapshot_branch_get(
snapshot_id, branches_from, branches_count + 1
)
)
if not new_branches:
break
branches_from = new_branches[-1].name
new_branches_filtered = new_branches
# Filter by target_type
if target_types:
new_branches_filtered = [
branch
for branch in new_branches_filtered
if branch.target is not None and branch.target_type in target_types
]
branches.extend(new_branches_filtered)
if len(new_branches) < branches_count + 1:
break
if len(branches) > branches_count:
last_branch = branches.pop(-1).name
else:
last_branch = None
return PartialBranches(
id=snapshot_id,
branches={
branch.name: None
if branch.target is None
else SnapshotBranch(
target=branch.target, target_type=TargetType(branch.target_type)
)
for branch in branches
},
next_branch=last_branch,
)
def snapshot_get_random(self) -> Sha1Git:
snapshot = self._cql_runner.snapshot_get_random()
assert snapshot, "Could not find any snapshot"
return snapshot.id
def object_find_by_sha1_git(self, ids: List[Sha1Git]) -> Dict[Sha1Git, List[Dict]]:
results: Dict[Sha1Git, List[Dict]] = {id_: [] for id_ in ids}
missing_ids = set(ids)
# Mind the order, revision is the most likely one for a given ID,
# so we check revisions first.
queries: List[Tuple[str, Callable[[List[Sha1Git]], List[Sha1Git]]]] = [
("revision", self._cql_runner.revision_missing),
("release", self._cql_runner.release_missing),
("content", self._cql_runner.content_missing_by_sha1_git),
("directory", self._cql_runner.directory_missing),
]
for (object_type, query_fn) in queries:
found_ids = missing_ids - set(query_fn(list(missing_ids)))
for sha1_git in found_ids:
results[sha1_git].append(
{"sha1_git": sha1_git, "type": object_type,}
)
missing_ids.remove(sha1_git)
if not missing_ids:
# We found everything, skipping the next queries.
break
return results
def origin_get(self, origins: List[str]) -> Iterable[Optional[Origin]]:
return [self.origin_get_one(origin) for origin in origins]
def origin_get_one(self, origin_url: str) -> Optional[Origin]:
"""Given an origin url, return the origin if it exists, None otherwise
"""
rows = list(self._cql_runner.origin_get_by_url(origin_url))
if rows:
assert len(rows) == 1
return Origin(url=rows[0].url)
else:
return None
def origin_get_by_sha1(self, sha1s: List[bytes]) -> List[Optional[Dict[str, Any]]]:
results = []
for sha1 in sha1s:
rows = list(self._cql_runner.origin_get_by_sha1(sha1))
origin = {"url": rows[0].url} if rows else None
results.append(origin)
return results
def origin_list(
self, page_token: Optional[str] = None, limit: int = 100
) -> PagedResult[Origin]:
# Compute what token to begin the listing from
start_token = TOKEN_BEGIN
if page_token:
start_token = int(page_token)
if not (TOKEN_BEGIN <= start_token <= TOKEN_END):
raise StorageArgumentException("Invalid page_token.")
next_page_token = None
origins = []
# Take one more origin so we can reuse it as the next page token if any
for (tok, row) in self._cql_runner.origin_list(start_token, limit + 1):
origins.append(Origin(url=row.url))
# keep reference of the last id for pagination purposes
last_id = tok
if len(origins) > limit:
# last origin id is the next page token
next_page_token = str(last_id)
# excluding that origin from the result to respect the limit size
origins = origins[:limit]
assert len(origins) <= limit
return PagedResult(results=origins, next_page_token=next_page_token)
def origin_search(
self,
url_pattern: str,
page_token: Optional[str] = None,
limit: int = 50,
regexp: bool = False,
with_visit: bool = False,
) -> PagedResult[Origin]:
# TODO: remove this endpoint, swh-search should be used instead.
next_page_token = None
offset = int(page_token) if page_token else 0
origin_rows = [row for row in self._cql_runner.origin_iter_all()]
if regexp:
pat = re.compile(url_pattern)
origin_rows = [row for row in origin_rows if pat.search(row.url)]
else:
origin_rows = [row for row in origin_rows if url_pattern in row.url]
if with_visit:
origin_rows = [row for row in origin_rows if row.next_visit_id > 1]
origins = [Origin(url=row.url) for row in origin_rows]
origins = origins[offset : offset + limit + 1]
if len(origins) > limit:
# next offset
next_page_token = str(offset + limit)
# excluding that origin from the result to respect the limit size
origins = origins[:limit]
assert len(origins) <= limit
return PagedResult(results=origins, next_page_token=next_page_token)
+ def origin_count(
+ self, url_pattern: str, regexp: bool = False, with_visit: bool = False
+ ) -> int:
+ raise NotImplementedError(
+ "The Cassandra backend does not implement origin_count"
+ )
+
def origin_add(self, origins: List[Origin]) -> Dict[str, int]:
to_add = [ori for ori in origins if self.origin_get_one(ori.url) is None]
self.journal_writer.origin_add(to_add)
for origin in to_add:
self._cql_runner.origin_add_one(
OriginRow(sha1=hash_url(origin.url), url=origin.url, next_visit_id=1)
)
return {"origin:add": len(to_add)}
def origin_visit_add(self, visits: List[OriginVisit]) -> Iterable[OriginVisit]:
for visit in visits:
origin = self.origin_get_one(visit.origin)
if not origin: # Cannot add a visit without an origin
raise StorageArgumentException("Unknown origin %s", visit.origin)
all_visits = []
nb_visits = 0
for visit in visits:
nb_visits += 1
if not visit.visit:
visit_id = self._cql_runner.origin_generate_unique_visit_id(
visit.origin
)
visit = attr.evolve(visit, visit=visit_id)
self.journal_writer.origin_visit_add([visit])
self._cql_runner.origin_visit_add_one(OriginVisitRow(**visit.to_dict()))
assert visit.visit is not None
all_visits.append(visit)
self._origin_visit_status_add(
OriginVisitStatus(
origin=visit.origin,
visit=visit.visit,
date=visit.date,
status="created",
snapshot=None,
)
)
return all_visits
def _origin_visit_status_add(self, visit_status: OriginVisitStatus) -> None:
"""Add an origin visit status"""
self.journal_writer.origin_visit_status_add([visit_status])
self._cql_runner.origin_visit_status_add_one(
converters.visit_status_to_row(visit_status)
)
def origin_visit_status_add(self, visit_statuses: List[OriginVisitStatus]) -> None:
# First round to check existence (fail early if any is ko)
for visit_status in visit_statuses:
origin_url = self.origin_get_one(visit_status.origin)
if not origin_url:
raise StorageArgumentException(f"Unknown origin {visit_status.origin}")
for visit_status in visit_statuses:
self._origin_visit_status_add(visit_status)
def _origin_visit_apply_status(
self, visit: Dict[str, Any], visit_status: OriginVisitStatusRow
) -> Dict[str, Any]:
"""Retrieve the latest visit status information for the origin visit.
Then merge it with the visit and return it.
"""
return {
# default to the values in visit
**visit,
# override with the last update
**visit_status.to_dict(),
# visit['origin'] is the URL (via a join), while
# visit_status['origin'] is only an id.
"origin": visit["origin"],
# but keep the date of the creation of the origin visit
"date": visit["date"],
}
def _origin_visit_get_latest_status(self, visit: OriginVisit) -> OriginVisitStatus:
"""Retrieve the latest visit status information for the origin visit object.
"""
assert visit.visit
row = self._cql_runner.origin_visit_status_get_latest(visit.origin, visit.visit)
assert row is not None
visit_status = converters.row_to_visit_status(row)
return attr.evolve(visit_status, origin=visit.origin)
@staticmethod
def _format_origin_visit_row(visit):
return {
**visit.to_dict(),
"origin": visit.origin,
"date": visit.date.replace(tzinfo=datetime.timezone.utc),
}
def origin_visit_get(
self,
origin: str,
page_token: Optional[str] = None,
order: ListOrder = ListOrder.ASC,
limit: int = 10,
) -> PagedResult[OriginVisit]:
if not isinstance(order, ListOrder):
raise StorageArgumentException("order must be a ListOrder value")
if page_token and not isinstance(page_token, str):
raise StorageArgumentException("page_token must be a string.")
next_page_token = None
visit_from = None if page_token is None else int(page_token)
visits: List[OriginVisit] = []
extra_limit = limit + 1
rows = self._cql_runner.origin_visit_get(origin, visit_from, extra_limit, order)
for row in rows:
visits.append(converters.row_to_visit(row))
assert len(visits) <= extra_limit
if len(visits) == extra_limit:
visits = visits[:limit]
next_page_token = str(visits[-1].visit)
return PagedResult(results=visits, next_page_token=next_page_token)
def origin_visit_status_get(
self,
origin: str,
visit: int,
page_token: Optional[str] = None,
order: ListOrder = ListOrder.ASC,
limit: int = 10,
) -> PagedResult[OriginVisitStatus]:
next_page_token = None
date_from = None
if page_token is not None:
date_from = datetime.datetime.fromisoformat(page_token)
# Take one more visit status so we can reuse it as the next page token if any
rows = self._cql_runner.origin_visit_status_get_range(
origin, visit, date_from, limit + 1, order
)
visit_statuses = [converters.row_to_visit_status(row) for row in rows]
if len(visit_statuses) > limit:
# last visit status date is the next page token
next_page_token = str(visit_statuses[-1].date)
# excluding that visit status from the result to respect the limit size
visit_statuses = visit_statuses[:limit]
return PagedResult(results=visit_statuses, next_page_token=next_page_token)
def origin_visit_find_by_date(
self, origin: str, visit_date: datetime.datetime
) -> Optional[OriginVisit]:
# Iterator over all the visits of the origin
# This should be ok for now, as there aren't too many visits
# per origin.
rows = list(self._cql_runner.origin_visit_get_all(origin))
def key(visit):
dt = visit.date.replace(tzinfo=datetime.timezone.utc) - visit_date
return (abs(dt), -visit.visit)
if rows:
return converters.row_to_visit(min(rows, key=key))
return None
def origin_visit_get_by(self, origin: str, visit: int) -> Optional[OriginVisit]:
row = self._cql_runner.origin_visit_get_one(origin, visit)
if row:
return converters.row_to_visit(row)
return None
def origin_visit_get_latest(
self,
origin: str,
type: Optional[str] = None,
allowed_statuses: Optional[List[str]] = None,
require_snapshot: bool = False,
) -> Optional[OriginVisit]:
if allowed_statuses and not set(allowed_statuses).intersection(VISIT_STATUSES):
raise StorageArgumentException(
f"Unknown allowed statuses {','.join(allowed_statuses)}, only "
f"{','.join(VISIT_STATUSES)} authorized"
)
# TODO: Do not fetch all visits
rows = self._cql_runner.origin_visit_get_all(origin)
latest_visit = None
for row in rows:
visit = self._format_origin_visit_row(row)
for status_row in self._cql_runner.origin_visit_status_get(
origin, visit["visit"]
):
updated_visit = self._origin_visit_apply_status(visit, status_row)
if type is not None and updated_visit["type"] != type:
continue
if allowed_statuses and updated_visit["status"] not in allowed_statuses:
continue
if require_snapshot and updated_visit["snapshot"] is None:
continue
# updated_visit is a candidate
if latest_visit is not None:
if updated_visit["date"] < latest_visit["date"]:
continue
if updated_visit["visit"] < latest_visit["visit"]:
continue
latest_visit = updated_visit
if latest_visit is None:
return None
return OriginVisit(
origin=latest_visit["origin"],
visit=latest_visit["visit"],
date=latest_visit["date"],
type=latest_visit["type"],
)
def origin_visit_status_get_latest(
self,
origin_url: str,
visit: int,
allowed_statuses: Optional[List[str]] = None,
require_snapshot: bool = False,
) -> Optional[OriginVisitStatus]:
if allowed_statuses and not set(allowed_statuses).intersection(VISIT_STATUSES):
raise StorageArgumentException(
f"Unknown allowed statuses {','.join(allowed_statuses)}, only "
f"{','.join(VISIT_STATUSES)} authorized"
)
rows = list(self._cql_runner.origin_visit_status_get(origin_url, visit))
# filtering is done python side as we cannot do it server side
if allowed_statuses:
rows = [row for row in rows if row.status in allowed_statuses]
if require_snapshot:
rows = [row for row in rows if row.snapshot is not None]
if not rows:
return None
return converters.row_to_visit_status(rows[0])
def origin_visit_status_get_random(
self, type: str
) -> Optional[Tuple[OriginVisit, OriginVisitStatus]]:
back_in_the_day = now() - datetime.timedelta(weeks=12) # 3 months back
# Random position to start iteration at
start_token = random.randint(TOKEN_BEGIN, TOKEN_END)
# Iterator over all visits, ordered by token(origins) then visit_id
rows = self._cql_runner.origin_visit_iter(start_token)
for row in rows:
visit = converters.row_to_visit(row)
visit_status = self._origin_visit_get_latest_status(visit)
if visit.date > back_in_the_day and visit_status.status == "full":
return visit, visit_status
return None
def stat_counters(self):
rows = self._cql_runner.stat_counters()
keys = (
"content",
"directory",
"origin",
"origin_visit",
"release",
"revision",
"skipped_content",
"snapshot",
)
stats = {key: 0 for key in keys}
stats.update({row.object_type: row.count for row in rows})
return stats
def refresh_stat_counters(self):
pass
def raw_extrinsic_metadata_add(self, metadata: List[RawExtrinsicMetadata]) -> None:
self.journal_writer.raw_extrinsic_metadata_add(metadata)
for metadata_entry in metadata:
if not self._cql_runner.metadata_authority_get(
metadata_entry.authority.type.value, metadata_entry.authority.url
):
raise StorageArgumentException(
f"Unknown authority {metadata_entry.authority}"
)
if not self._cql_runner.metadata_fetcher_get(
metadata_entry.fetcher.name, metadata_entry.fetcher.version
):
raise StorageArgumentException(
f"Unknown fetcher {metadata_entry.fetcher}"
)
try:
row = RawExtrinsicMetadataRow(
type=metadata_entry.type.value,
id=str(metadata_entry.id),
authority_type=metadata_entry.authority.type.value,
authority_url=metadata_entry.authority.url,
discovery_date=metadata_entry.discovery_date,
fetcher_name=metadata_entry.fetcher.name,
fetcher_version=metadata_entry.fetcher.version,
format=metadata_entry.format,
metadata=metadata_entry.metadata,
origin=metadata_entry.origin,
visit=metadata_entry.visit,
snapshot=map_optional(str, metadata_entry.snapshot),
release=map_optional(str, metadata_entry.release),
revision=map_optional(str, metadata_entry.revision),
path=metadata_entry.path,
directory=map_optional(str, metadata_entry.directory),
)
self._cql_runner.raw_extrinsic_metadata_add(row)
except TypeError as e:
raise StorageArgumentException(*e.args)
def raw_extrinsic_metadata_get(
self,
type: MetadataTargetType,
id: Union[str, SWHID],
authority: MetadataAuthority,
after: Optional[datetime.datetime] = None,
page_token: Optional[bytes] = None,
limit: int = 1000,
) -> PagedResult[RawExtrinsicMetadata]:
if type == MetadataTargetType.ORIGIN:
if isinstance(id, SWHID):
raise StorageArgumentException(
f"raw_extrinsic_metadata_get called with type='origin', "
f"but provided id is an SWHID: {id!r}"
)
else:
if not isinstance(id, SWHID):
raise StorageArgumentException(
f"raw_extrinsic_metadata_get called with type!='origin', "
f"but provided id is not an SWHID: {id!r}"
)
if page_token is not None:
(after_date, after_fetcher_name, after_fetcher_url) = msgpack_loads(
base64.b64decode(page_token)
)
if after and after_date < after:
raise StorageArgumentException(
"page_token is inconsistent with the value of 'after'."
)
entries = self._cql_runner.raw_extrinsic_metadata_get_after_date_and_fetcher( # noqa
str(id),
authority.type.value,
authority.url,
after_date,
after_fetcher_name,
after_fetcher_url,
)
elif after is not None:
entries = self._cql_runner.raw_extrinsic_metadata_get_after_date(
str(id), authority.type.value, authority.url, after
)
else:
entries = self._cql_runner.raw_extrinsic_metadata_get(
str(id), authority.type.value, authority.url
)
if limit:
entries = itertools.islice(entries, 0, limit + 1)
results = []
for entry in entries:
discovery_date = entry.discovery_date.replace(tzinfo=datetime.timezone.utc)
assert str(id) == entry.id
result = RawExtrinsicMetadata(
type=MetadataTargetType(entry.type),
id=id,
authority=MetadataAuthority(
type=MetadataAuthorityType(entry.authority_type),
url=entry.authority_url,
),
fetcher=MetadataFetcher(
name=entry.fetcher_name, version=entry.fetcher_version,
),
discovery_date=discovery_date,
format=entry.format,
metadata=entry.metadata,
origin=entry.origin,
visit=entry.visit,
snapshot=map_optional(parse_swhid, entry.snapshot),
release=map_optional(parse_swhid, entry.release),
revision=map_optional(parse_swhid, entry.revision),
path=entry.path,
directory=map_optional(parse_swhid, entry.directory),
)
results.append(result)
if len(results) > limit:
results.pop()
assert len(results) == limit
last_result = results[-1]
next_page_token: Optional[str] = base64.b64encode(
msgpack_dumps(
(
last_result.discovery_date,
last_result.fetcher.name,
last_result.fetcher.version,
)
)
).decode()
else:
next_page_token = None
return PagedResult(next_page_token=next_page_token, results=results,)
def metadata_fetcher_add(self, fetchers: List[MetadataFetcher]) -> None:
self.journal_writer.metadata_fetcher_add(fetchers)
for fetcher in fetchers:
self._cql_runner.metadata_fetcher_add(
MetadataFetcherRow(
name=fetcher.name,
version=fetcher.version,
metadata=json.dumps(map_optional(dict, fetcher.metadata)),
)
)
def metadata_fetcher_get(
self, name: str, version: str
) -> Optional[MetadataFetcher]:
fetcher = self._cql_runner.metadata_fetcher_get(name, version)
if fetcher:
return MetadataFetcher(
name=fetcher.name,
version=fetcher.version,
metadata=json.loads(fetcher.metadata),
)
else:
return None
def metadata_authority_add(self, authorities: List[MetadataAuthority]) -> None:
self.journal_writer.metadata_authority_add(authorities)
for authority in authorities:
self._cql_runner.metadata_authority_add(
MetadataAuthorityRow(
url=authority.url,
type=authority.type.value,
metadata=json.dumps(map_optional(dict, authority.metadata)),
)
)
def metadata_authority_get(
self, type: MetadataAuthorityType, url: str
) -> Optional[MetadataAuthority]:
authority = self._cql_runner.metadata_authority_get(type.value, url)
if authority:
return MetadataAuthority(
type=MetadataAuthorityType(authority.type),
url=authority.url,
metadata=json.loads(authority.metadata),
)
else:
return None
def clear_buffers(self, object_types: Optional[List[str]] = None) -> None:
"""Do nothing
"""
return None
def flush(self, object_types: Optional[List[str]] = None) -> Dict:
return {}
diff --git a/swh/storage/interface.py b/swh/storage/interface.py
index 3cc4ec76..f2f00491 100644
--- a/swh/storage/interface.py
+++ b/swh/storage/interface.py
@@ -1,1205 +1,1206 @@
# Copyright (C) 2015-2020 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import datetime
from enum import Enum
from typing import Any, Dict, Iterable, List, Optional, Tuple, TypeVar, Union
-from typing_extensions import TypedDict
+from typing_extensions import Protocol, TypedDict, runtime_checkable
from swh.core.api import remote_api_endpoint
from swh.core.api.classes import PagedResult as CorePagedResult
from swh.model.identifiers import SWHID
from swh.model.model import (
Content,
Directory,
Origin,
OriginVisit,
OriginVisitStatus,
Revision,
Release,
Snapshot,
SkippedContent,
SnapshotBranch,
MetadataAuthority,
MetadataAuthorityType,
MetadataFetcher,
MetadataTargetType,
RawExtrinsicMetadata,
Sha1,
Sha1Git,
)
class ListOrder(Enum):
"""Specifies the order for paginated endpoints returning sorted results."""
ASC = "asc"
DESC = "desc"
class PartialBranches(TypedDict):
"""Type of the dictionary returned by snapshot_get_branches"""
id: Sha1Git
"""Identifier of the snapshot"""
branches: Dict[bytes, Optional[SnapshotBranch]]
"""A dict of branches contained in the snapshot
whose keys are the branches' names"""
next_branch: Optional[bytes]
"""The name of the first branch not returned or :const:`None` if
the snapshot has less than the request number of branches."""
TResult = TypeVar("TResult")
PagedResult = CorePagedResult[TResult, str]
# TODO: Make it an enum (too much impact)
VISIT_STATUSES = ["created", "ongoing", "full", "partial"]
def deprecated(f):
f.deprecated_endpoint = True
return f
-class StorageInterface:
+@runtime_checkable
+class StorageInterface(Protocol):
@remote_api_endpoint("check_config")
def check_config(self, *, check_write: bool) -> bool:
"""Check that the storage is configured and ready to go."""
...
@remote_api_endpoint("content/add")
def content_add(self, content: List[Content]) -> Dict:
"""Add content blobs to the storage
Args:
contents (iterable): iterable of dictionaries representing
individual pieces of content to add. Each dictionary has the
following keys:
- data (bytes): the actual content
- length (int): content length
- one key for each checksum algorithm in
:data:`swh.model.hashutil.ALGORITHMS`, mapped to the
corresponding checksum
- status (str): one of visible, hidden
Raises:
The following exceptions can occur:
- HashCollision in case of collision
- Any other exceptions raise by the db
In case of errors, some of the content may have been stored in
the DB and in the objstorage.
Since additions to both idempotent, that should not be a problem.
Returns:
Summary dict with the following keys and associated values:
content:add: New contents added
content:add:bytes: Sum of the contents' length data
"""
...
@remote_api_endpoint("content/update")
def content_update(
self, contents: List[Dict[str, Any]], keys: List[str] = []
) -> None:
"""Update content blobs to the storage. Does nothing for unknown
contents or skipped ones.
Args:
content: iterable of dictionaries representing
individual pieces of content to update. Each dictionary has the
following keys:
- data (bytes): the actual content
- length (int): content length (default: -1)
- one key for each checksum algorithm in
:data:`swh.model.hashutil.ALGORITHMS`, mapped to the
corresponding checksum
- status (str): one of visible, hidden, absent
keys (list): List of keys (str) whose values needs an update, e.g.,
new hash column
"""
...
@remote_api_endpoint("content/add_metadata")
def content_add_metadata(self, content: List[Content]) -> Dict:
"""Add content metadata to the storage (like `content_add`, but
without inserting to the objstorage).
Args:
content (iterable): iterable of dictionaries representing
individual pieces of content to add. Each dictionary has the
following keys:
- length (int): content length (default: -1)
- one key for each checksum algorithm in
:data:`swh.model.hashutil.ALGORITHMS`, mapped to the
corresponding checksum
- status (str): one of visible, hidden, absent
- reason (str): if status = absent, the reason why
- origin (int): if status = absent, the origin we saw the
content in
- ctime (datetime): time of insertion in the archive
Returns:
Summary dict with the following key and associated values:
content:add: New contents added
skipped_content:add: New skipped contents (no data) added
"""
...
@remote_api_endpoint("content/data")
def content_get_data(self, content: Sha1) -> Optional[bytes]:
"""Given a content identifier, returns its associated data if any.
Args:
content: sha1 identifier
Returns:
raw content data (bytes)
"""
...
@remote_api_endpoint("content/partition")
def content_get_partition(
self,
partition_id: int,
nb_partitions: int,
page_token: Optional[str] = None,
limit: int = 1000,
) -> PagedResult[Content]:
"""Splits contents into nb_partitions, and returns one of these based on
partition_id (which must be in [0, nb_partitions-1])
There is no guarantee on how the partitioning is done, or the
result order.
Args:
partition_id: index of the partition to fetch
nb_partitions: total number of partitions to split into
page_token: opaque token used for pagination.
limit: Limit result (default to 1000)
Returns:
PagedResult of Content model objects within the partition. If
next_page_token is None, there is no longer data to retrieve.
"""
...
@remote_api_endpoint("content/metadata")
def content_get(self, contents: List[Sha1]) -> List[Optional[Content]]:
"""Retrieve content metadata in bulk
Args:
content: List of content identifiers
Returns:
List of contents model objects when they exist, None otherwise.
"""
...
@remote_api_endpoint("content/missing")
def content_missing(
self, contents: List[Dict[str, Any]], key_hash: str = "sha1"
) -> Iterable[bytes]:
"""List content missing from storage
Args:
content: iterable of dictionaries whose keys are either 'length' or an item
of :data:`swh.model.hashutil.ALGORITHMS`; mapped to the
corresponding checksum (or length).
key_hash: name of the column to use as hash id result (default: 'sha1')
Raises:
StorageArgumentException when key_hash is unknown.
TODO: an exception when we get a hash collision.
Returns:
iterable of missing content ids (as per the `key_hash` column)
"""
...
@remote_api_endpoint("content/missing/sha1")
def content_missing_per_sha1(self, contents: List[bytes]) -> Iterable[bytes]:
"""List content missing from storage based only on sha1.
Args:
contents: List of sha1 to check for absence.
Raises:
TODO: an exception when we get a hash collision.
Returns:
Iterable of missing content ids (sha1)
"""
...
@remote_api_endpoint("content/missing/sha1_git")
def content_missing_per_sha1_git(
self, contents: List[Sha1Git]
) -> Iterable[Sha1Git]:
"""List content missing from storage based only on sha1_git.
Args:
contents (List): An iterable of content id (sha1_git)
Yields:
missing contents sha1_git
"""
...
@remote_api_endpoint("content/present")
def content_find(self, content: Dict[str, Any]) -> List[Content]:
"""Find a content hash in db.
Args:
content: a dictionary representing one content hash, mapping
checksum algorithm names (see swh.model.hashutil.ALGORITHMS) to
checksum values
Raises:
ValueError: in case the key of the dictionary is not sha1, sha1_git
nor sha256.
Returns:
an iterable of Content objects matching the search criteria if the
content exist. Empty iterable otherwise.
"""
...
@remote_api_endpoint("content/get_random")
def content_get_random(self) -> Sha1Git:
"""Finds a random content id.
Returns:
a sha1_git
"""
...
@remote_api_endpoint("content/skipped/add")
def skipped_content_add(self, content: List[SkippedContent]) -> Dict:
"""Add contents to the skipped_content list, which contains
(partial) information about content missing from the archive.
Args:
contents (iterable): iterable of dictionaries representing
individual pieces of content to add. Each dictionary has the
following keys:
- length (Optional[int]): content length (default: -1)
- one key for each checksum algorithm in
:data:`swh.model.hashutil.ALGORITHMS`, mapped to the
corresponding checksum; each is optional
- status (str): must be "absent"
- reason (str): the reason why the content is absent
- origin (int): if status = absent, the origin we saw the
content in
Raises:
The following exceptions can occur:
- HashCollision in case of collision
- Any other exceptions raise by the backend
In case of errors, some content may have been stored in
the DB and in the objstorage.
Since additions to both idempotent, that should not be a problem.
Returns:
Summary dict with the following key and associated values:
skipped_content:add: New skipped contents (no data) added
"""
...
@remote_api_endpoint("content/skipped/missing")
def skipped_content_missing(
self, contents: List[Dict[str, Any]]
) -> Iterable[Dict[str, Any]]:
"""List skipped contents missing from storage.
Args:
contents: iterable of dictionaries containing the data for each
checksum algorithm.
Returns:
Iterable of missing skipped contents as dict
"""
...
@remote_api_endpoint("directory/add")
def directory_add(self, directories: List[Directory]) -> Dict:
"""Add directories to the storage
Args:
directories (iterable): iterable of dictionaries representing the
individual directories to add. Each dict has the following
keys:
- id (sha1_git): the id of the directory to add
- entries (list): list of dicts for each entry in the
directory. Each dict has the following keys:
- name (bytes)
- type (one of 'file', 'dir', 'rev'): type of the
directory entry (file, directory, revision)
- target (sha1_git): id of the object pointed at by the
directory entry
- perms (int): entry permissions
Returns:
Summary dict of keys with associated count as values:
directory:add: Number of directories actually added
"""
...
@remote_api_endpoint("directory/missing")
def directory_missing(self, directories: List[Sha1Git]) -> Iterable[Sha1Git]:
"""List directories missing from storage.
Args:
directories: list of directory ids
Yields:
missing directory ids
"""
...
@remote_api_endpoint("directory/ls")
def directory_ls(
self, directory: Sha1Git, recursive: bool = False
) -> Iterable[Dict[str, Any]]:
"""List entries for one directory.
If `recursive=True`, names in the path of a dir/file not at the
root are concatenated with a slash (`/`).
Args:
directory: the directory to list entries from.
recursive: if flag on, this list recursively from this directory.
Yields:
directory entries for such directory.
"""
...
@remote_api_endpoint("directory/path")
def directory_entry_get_by_path(
self, directory: Sha1Git, paths: List[bytes]
) -> Optional[Dict[str, Any]]:
"""Get the directory entry (either file or dir) from directory with path.
Args:
directory: directory id
paths: path to lookup from the top level directory. From left
(top) to right (bottom).
Returns:
The corresponding directory entry as dict if found, None otherwise.
"""
...
@remote_api_endpoint("directory/get_random")
def directory_get_random(self) -> Sha1Git:
"""Finds a random directory id.
Returns:
a sha1_git
"""
...
@remote_api_endpoint("revision/add")
def revision_add(self, revisions: List[Revision]) -> Dict:
"""Add revisions to the storage
Args:
revisions (List[dict]): iterable of dictionaries representing
the individual revisions to add. Each dict has the following
keys:
- **id** (:class:`sha1_git`): id of the revision to add
- **date** (:class:`dict`): date the revision was written
- **committer_date** (:class:`dict`): date the revision got
added to the origin
- **type** (one of 'git', 'tar'): type of the
revision added
- **directory** (:class:`sha1_git`): the directory the
revision points at
- **message** (:class:`bytes`): the message associated with
the revision
- **author** (:class:`Dict[str, bytes]`): dictionary with
keys: name, fullname, email
- **committer** (:class:`Dict[str, bytes]`): dictionary with
keys: name, fullname, email
- **metadata** (:class:`jsonb`): extra information as
dictionary
- **synthetic** (:class:`bool`): revision's nature (tarball,
directory creates synthetic revision`)
- **parents** (:class:`list[sha1_git]`): the parents of
this revision
date dictionaries have the form defined in :mod:`swh.model`.
Returns:
Summary dict of keys with associated count as values
revision:add: New objects actually stored in db
"""
...
@remote_api_endpoint("revision/missing")
def revision_missing(self, revisions: List[Sha1Git]) -> Iterable[Sha1Git]:
"""List revisions missing from storage
Args:
revisions: revision ids
Yields:
missing revision ids
"""
...
@remote_api_endpoint("revision")
def revision_get(
self, revisions: List[Sha1Git]
) -> Iterable[Optional[Dict[str, Any]]]:
"""Get revisions from storage
Args:
revisions: revision ids
Yields:
revisions as dictionaries (or None if the revision doesn't exist)
"""
...
@remote_api_endpoint("revision/log")
def revision_log(
self, revisions: List[Sha1Git], limit: Optional[int] = None
) -> Iterable[Optional[Dict[str, Any]]]:
"""Fetch revision entry from the given root revisions.
Args:
revisions: array of root revisions to lookup
limit: limitation on the output result. Default to None.
Yields:
revision entries log from the given root root revisions
"""
...
@remote_api_endpoint("revision/shortlog")
def revision_shortlog(
self, revisions: List[Sha1Git], limit: Optional[int] = None
) -> Iterable[Optional[Tuple[Sha1Git, Tuple[Sha1Git, ...]]]]:
"""Fetch the shortlog for the given revisions
Args:
revisions: list of root revisions to lookup
limit: depth limitation for the output
Yields:
a list of (id, parents) tuples
"""
...
@remote_api_endpoint("revision/get_random")
def revision_get_random(self) -> Sha1Git:
"""Finds a random revision id.
Returns:
a sha1_git
"""
...
@remote_api_endpoint("release/add")
def release_add(self, releases: List[Release]) -> Dict:
"""Add releases to the storage
Args:
releases (List[dict]): iterable of dictionaries representing
the individual releases to add. Each dict has the following
keys:
- **id** (:class:`sha1_git`): id of the release to add
- **revision** (:class:`sha1_git`): id of the revision the
release points to
- **date** (:class:`dict`): the date the release was made
- **name** (:class:`bytes`): the name of the release
- **comment** (:class:`bytes`): the comment associated with
the release
- **author** (:class:`Dict[str, bytes]`): dictionary with
keys: name, fullname, email
the date dictionary has the form defined in :mod:`swh.model`.
Returns:
Summary dict of keys with associated count as values
release:add: New objects contents actually stored in db
"""
...
@remote_api_endpoint("release/missing")
def release_missing(self, releases: List[Sha1Git]) -> Iterable[Sha1Git]:
"""List missing release ids from storage
Args:
releases: release ids
Yields:
a list of missing release ids
"""
...
@remote_api_endpoint("release")
def release_get(
self, releases: List[Sha1Git]
) -> Iterable[Optional[Dict[str, Any]]]:
"""Given a list of sha1, return the releases's information
Args:
releases: list of sha1s
Yields:
dicts with the same keys as those given to `release_add`
(or ``None`` if a release does not exist)
"""
...
@remote_api_endpoint("release/get_random")
def release_get_random(self) -> Sha1Git:
"""Finds a random release id.
Returns:
a sha1_git
"""
...
@remote_api_endpoint("snapshot/add")
def snapshot_add(self, snapshots: List[Snapshot]) -> Dict:
"""Add snapshots to the storage.
Args:
snapshot ([dict]): the snapshots to add, containing the
following keys:
- **id** (:class:`bytes`): id of the snapshot
- **branches** (:class:`dict`): branches the snapshot contains,
mapping the branch name (:class:`bytes`) to the branch target,
itself a :class:`dict` (or ``None`` if the branch points to an
unknown object)
- **target_type** (:class:`str`): one of ``content``,
``directory``, ``revision``, ``release``,
``snapshot``, ``alias``
- **target** (:class:`bytes`): identifier of the target
(currently a ``sha1_git`` for all object kinds, or the name
of the target branch for aliases)
Raises:
ValueError: if the origin or visit id does not exist.
Returns:
Summary dict of keys with associated count as values
snapshot:add: Count of object actually stored in db
"""
...
@remote_api_endpoint("snapshot/missing")
def snapshot_missing(self, snapshots: List[Sha1Git]) -> Iterable[Sha1Git]:
"""List snapshots missing from storage
Args:
snapshots: snapshot ids
Yields:
missing snapshot ids
"""
...
@remote_api_endpoint("snapshot")
def snapshot_get(self, snapshot_id: Sha1Git) -> Optional[Dict[str, Any]]:
"""Get the content, possibly partial, of a snapshot with the given id
The branches of the snapshot are iterated in the lexicographical
order of their names.
.. warning:: At most 1000 branches contained in the snapshot will be
returned for performance reasons. In order to browse the whole
set of branches, the method :meth:`snapshot_get_branches`
should be used instead.
Args:
snapshot_id: snapshot identifier
Returns:
dict: a dict with three keys:
* **id**: identifier of the snapshot
* **branches**: a dict of branches contained in the snapshot
whose keys are the branches' names.
* **next_branch**: the name of the first branch not returned
or :const:`None` if the snapshot has less than 1000
branches.
"""
...
@remote_api_endpoint("snapshot/count_branches")
def snapshot_count_branches(
self, snapshot_id: Sha1Git
) -> Optional[Dict[Optional[str], int]]:
"""Count the number of branches in the snapshot with the given id
Args:
snapshot_id: snapshot identifier
Returns:
A dict whose keys are the target types of branches and values their
corresponding amount
"""
...
@remote_api_endpoint("snapshot/get_branches")
def snapshot_get_branches(
self,
snapshot_id: Sha1Git,
branches_from: bytes = b"",
branches_count: int = 1000,
target_types: Optional[List[str]] = None,
) -> Optional[PartialBranches]:
"""Get the content, possibly partial, of a snapshot with the given id
The branches of the snapshot are iterated in the lexicographical
order of their names.
Args:
snapshot_id: identifier of the snapshot
branches_from: optional parameter used to skip branches
whose name is lesser than it before returning them
branches_count: optional parameter used to restrain
the amount of returned branches
target_types: optional parameter used to filter the
target types of branch to return (possible values that can be
contained in that list are `'content', 'directory',
'revision', 'release', 'snapshot', 'alias'`)
Returns:
dict: None if the snapshot does not exist;
a dict with three keys otherwise:
* **id**: identifier of the snapshot
* **branches**: a dict of branches contained in the snapshot
whose keys are the branches' names.
* **next_branch**: the name of the first branch not returned
or :const:`None` if the snapshot has less than
`branches_count` branches after `branches_from` included.
"""
...
@remote_api_endpoint("snapshot/get_random")
def snapshot_get_random(self) -> Sha1Git:
"""Finds a random snapshot id.
Returns:
a sha1_git
"""
...
@remote_api_endpoint("origin/visit/add")
def origin_visit_add(self, visits: List[OriginVisit]) -> Iterable[OriginVisit]:
"""Add visits to storage. If the visits have no id, they will be created and assigned
one. The resulted visits are visits with their visit id set.
Args:
visits: List of OriginVisit objects to add
Raises:
StorageArgumentException if some origin visit reference unknown origins
Returns:
List[OriginVisit] stored
"""
...
@remote_api_endpoint("origin/visit_status/add")
def origin_visit_status_add(self, visit_statuses: List[OriginVisitStatus],) -> None:
"""Add origin visit statuses.
If there is already a status for the same origin and visit id at the same
date, the new one will be either dropped or will replace the existing one
(it is unspecified which one of these two behaviors happens).
Args:
visit_statuses: origin visit statuses to add
Raises: StorageArgumentException if the origin of the visit status is unknown
"""
...
@remote_api_endpoint("origin/visit/get")
def origin_visit_get(
self,
origin: str,
page_token: Optional[str] = None,
order: ListOrder = ListOrder.ASC,
limit: int = 10,
) -> PagedResult[OriginVisit]:
"""Retrieve page of OriginVisit information.
Args:
origin: The visited origin
page_token: opaque string used to get the next results of a search
order: Order on visit id fields to list origin visits (default to asc)
limit: Number of visits to return
Raises:
StorageArgumentException if the order is wrong or the page_token type is
mistyped.
Returns: Page of OriginVisit data model objects. if next_page_token is None,
there is no longer data to retrieve.
"""
...
@remote_api_endpoint("origin/visit/find_by_date")
def origin_visit_find_by_date(
self, origin: str, visit_date: datetime.datetime
) -> Optional[OriginVisit]:
"""Retrieves the origin visit whose date is closest to the provided
timestamp.
In case of a tie, the visit with largest id is selected.
Args:
origin: origin (URL)
visit_date: expected visit date
Returns:
A visit if found, None otherwise
"""
...
@remote_api_endpoint("origin/visit/getby")
def origin_visit_get_by(self, origin: str, visit: int) -> Optional[OriginVisit]:
"""Retrieve origin visit's information.
Args:
origin: origin (URL)
visit: visit id
Returns:
The information on that particular OriginVisit or None if
it does not exist
"""
...
@remote_api_endpoint("origin/visit/get_latest")
def origin_visit_get_latest(
self,
origin: str,
type: Optional[str] = None,
allowed_statuses: Optional[List[str]] = None,
require_snapshot: bool = False,
) -> Optional[OriginVisit]:
"""Get the latest origin visit for the given origin, optionally
looking only for those with one of the given allowed_statuses
or for those with a snapshot.
Args:
origin: origin URL
type: Optional visit type to filter on (e.g git, tar, dsc, svn,
hg, npm, pypi, ...)
allowed_statuses: list of visit statuses considered
to find the latest visit. For instance,
``allowed_statuses=['full']`` will only consider visits that
have successfully run to completion.
require_snapshot: If True, only a visit with a snapshot
will be returned.
Raises:
StorageArgumentException if values for the allowed_statuses parameters
are unknown
Returns:
OriginVisit matching the criteria if found, None otherwise. Note that as
OriginVisit no longer held reference on the visit status or snapshot, you
may want to use origin_visit_status_get_latest for those information.
"""
...
@remote_api_endpoint("origin/visit_status/get")
def origin_visit_status_get(
self,
origin: str,
visit: int,
page_token: Optional[str] = None,
order: ListOrder = ListOrder.ASC,
limit: int = 10,
) -> PagedResult[OriginVisitStatus]:
"""Retrieve page of OriginVisitStatus information.
Args:
origin: The visited origin
visit: The visit identifier
page_token: opaque string used to get the next results of a search
order: Order on visit status objects to list (default to asc)
limit: Number of visit statuses to return
Returns: Page of OriginVisitStatus data model objects. if next_page_token is
None, there is no longer data to retrieve.
"""
...
@remote_api_endpoint("origin/visit_status/get_latest")
def origin_visit_status_get_latest(
self,
origin_url: str,
visit: int,
allowed_statuses: Optional[List[str]] = None,
require_snapshot: bool = False,
) -> Optional[OriginVisitStatus]:
"""Get the latest origin visit status for the given origin visit, optionally
looking only for those with one of the given allowed_statuses or with a
snapshot.
Args:
origin: origin URL
allowed_statuses: list of visit statuses considered to find the latest
visit. Possible values are {created, ongoing, partial, full}. For
instance, ``allowed_statuses=['full']`` will only consider visits that
have successfully run to completion.
require_snapshot: If True, only a visit with a snapshot
will be returned.
Raises:
StorageArgumentException if values for the allowed_statuses parameters
are unknown
Returns:
The OriginVisitStatus matching the criteria
"""
...
@remote_api_endpoint("origin/visit_status/get_random")
def origin_visit_status_get_random(
self, type: str
) -> Optional[Tuple[OriginVisit, OriginVisitStatus]]:
"""Randomly select one successful origin visit with <type>
made in the last 3 months.
Returns:
One random tuple of (OriginVisit, OriginVisitStatus) matching the
selection criteria
"""
...
@remote_api_endpoint("object/find_by_sha1_git")
def object_find_by_sha1_git(self, ids: List[Sha1Git]) -> Dict[Sha1Git, List[Dict]]:
"""Return the objects found with the given ids.
Args:
ids: a generator of sha1_gits
Returns:
A dict from id to the list of objects found for that id. Each object
found is itself a dict with keys:
- sha1_git: the input id
- type: the type of object found
"""
...
@remote_api_endpoint("origin/get")
def origin_get(self, origins: List[str]) -> Iterable[Optional[Origin]]:
"""Return origins.
Args:
origin: a list of urls to find
Returns:
the list of associated existing origin model objects. The unknown origins
will be returned as None at the same index as the input.
"""
...
@remote_api_endpoint("origin/get_sha1")
def origin_get_by_sha1(self, sha1s: List[bytes]) -> List[Optional[Dict[str, Any]]]:
"""Return origins, identified by the sha1 of their URLs.
Args:
sha1s: a list of sha1s
Returns:
List of origins dict whose sha1 of their url match, None otherwise.
"""
...
@remote_api_endpoint("origin/list")
def origin_list(
self, page_token: Optional[str] = None, limit: int = 100
) -> PagedResult[Origin]:
"""Returns the list of origins
Args:
page_token: opaque token used for pagination.
limit: the maximum number of results to return
Returns:
Page of Origin data model objects. if next_page_token is None, there is
no longer data to retrieve.
"""
...
@remote_api_endpoint("origin/search")
def origin_search(
self,
url_pattern: str,
page_token: Optional[str] = None,
limit: int = 50,
regexp: bool = False,
with_visit: bool = False,
) -> PagedResult[Origin]:
"""Search for origins whose urls contain a provided string pattern
or match a provided regular expression.
The search is performed in a case insensitive way.
Args:
url_pattern: the string pattern to search for in origin urls
page_token: opaque token used for pagination
limit: the maximum number of found origins to return
regexp: if True, consider the provided pattern as a regular
expression and return origins whose urls match it
with_visit: if True, filter out origins with no visit
Yields:
PagedResult of Origin
"""
...
@deprecated
@remote_api_endpoint("origin/count")
def origin_count(
self, url_pattern: str, regexp: bool = False, with_visit: bool = False
) -> int:
"""Count origins whose urls contain a provided string pattern
or match a provided regular expression.
The pattern search in origin urls is performed in a case insensitive
way.
Args:
url_pattern (str): the string pattern to search for in origin urls
regexp (bool): if True, consider the provided pattern as a regular
expression and return origins whose urls match it
with_visit (bool): if True, filter out origins with no visit
Returns:
int: The number of origins matching the search criterion.
"""
...
@remote_api_endpoint("origin/add_multi")
def origin_add(self, origins: List[Origin]) -> Dict[str, int]:
"""Add origins to the storage
Args:
origins: list of dictionaries representing the individual origins,
with the following keys:
- type: the origin type ('git', 'svn', 'deb', ...)
- url (bytes): the url the origin points to
Returns:
Summary dict of keys with associated count as values
origin:add: Count of object actually stored in db
"""
...
def stat_counters(self):
"""compute statistics about the number of tuples in various tables
Returns:
dict: a dictionary mapping textual labels (e.g., content) to
integer values (e.g., the number of tuples in table content)
"""
...
def refresh_stat_counters(self):
"""Recomputes the statistics for `stat_counters`."""
...
@remote_api_endpoint("raw_extrinsic_metadata/add")
def raw_extrinsic_metadata_add(self, metadata: List[RawExtrinsicMetadata],) -> None:
"""Add extrinsic metadata on objects (contents, directories, ...).
The authority and fetcher must be known to the storage before
using this endpoint.
If there is already metadata for the same object, authority,
fetcher, and at the same date; the new one will be either dropped or
will replace the existing one
(it is unspecified which one of these two behaviors happens).
Args:
metadata: iterable of RawExtrinsicMetadata objects to be inserted.
"""
...
@remote_api_endpoint("raw_extrinsic_metadata/get")
def raw_extrinsic_metadata_get(
self,
type: MetadataTargetType,
id: Union[str, SWHID],
authority: MetadataAuthority,
after: Optional[datetime.datetime] = None,
page_token: Optional[bytes] = None,
limit: int = 1000,
) -> PagedResult[RawExtrinsicMetadata]:
"""Retrieve list of all raw_extrinsic_metadata entries for the id
Args:
type: one of the values of swh.model.model.MetadataTargetType
id: an URL if type is 'origin', else a core SWHID
authority: a dict containing keys `type` and `url`.
after: minimum discovery_date for a result to be returned
page_token: opaque token, used to get the next page of results
limit: maximum number of results to be returned
Returns:
PagedResult of RawExtrinsicMetadata
"""
...
@remote_api_endpoint("metadata_fetcher/add")
def metadata_fetcher_add(self, fetchers: List[MetadataFetcher],) -> None:
"""Add new metadata fetchers to the storage.
Their `name` and `version` together are unique identifiers of this
fetcher; and `metadata` is an arbitrary dict of JSONable data
with information about this fetcher, which must not be `None`
(but may be empty).
Args:
fetchers: iterable of MetadataFetcher to be inserted
"""
...
@remote_api_endpoint("metadata_fetcher/get")
def metadata_fetcher_get(
self, name: str, version: str
) -> Optional[MetadataFetcher]:
"""Retrieve information about a fetcher
Args:
name: the name of the fetcher
version: version of the fetcher
Returns:
a MetadataFetcher object (with a non-None metadata field) if it is known,
else None.
"""
...
@remote_api_endpoint("metadata_authority/add")
def metadata_authority_add(self, authorities: List[MetadataAuthority]) -> None:
"""Add new metadata authorities to the storage.
Their `type` and `url` together are unique identifiers of this
authority; and `metadata` is an arbitrary dict of JSONable data
with information about this authority, which must not be `None`
(but may be empty).
Args:
authorities: iterable of MetadataAuthority to be inserted
"""
...
@remote_api_endpoint("metadata_authority/get")
def metadata_authority_get(
self, type: MetadataAuthorityType, url: str
) -> Optional[MetadataAuthority]:
"""Retrieve information about an authority
Args:
type: one of "deposit_client", "forge", or "registry"
url: unique URI identifying the authority
Returns:
a MetadataAuthority object (with a non-None metadata field) if it is known,
else None.
"""
...
@remote_api_endpoint("clear/buffer")
def clear_buffers(self, object_types: Optional[List[str]] = None) -> None:
"""For backend storages (pg, storage, in-memory), this is a noop operation. For proxy
storages (especially filter, buffer), this is an operation which cleans internal
state.
"""
@remote_api_endpoint("flush")
def flush(self, object_types: Optional[List[str]] = None) -> Dict:
"""For backend storages (pg, storage, in-memory), this is expected to be a noop
operation. For proxy storages (especially buffer), this is expected to trigger
actual writes to the backend.
"""
...
diff --git a/swh/storage/tests/storage_tests.py b/swh/storage/tests/storage_tests.py
index 4ba04380..36e02c0f 100644
--- a/swh/storage/tests/storage_tests.py
+++ b/swh/storage/tests/storage_tests.py
@@ -1,3861 +1,3867 @@
# Copyright (C) 2015-2020 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
from collections import defaultdict
import datetime
from datetime import timedelta
import inspect
import itertools
import math
import random
import attr
import pytest
from hypothesis import given, strategies, settings, HealthCheck
from typing import Any, ClassVar, Dict, Iterator, Optional
from swh.model import from_disk
from swh.model.hashutil import hash_to_bytes
from swh.model.identifiers import SWHID
from swh.model.model import (
Content,
Directory,
MetadataTargetType,
Origin,
OriginVisit,
OriginVisitStatus,
Person,
Release,
Revision,
Snapshot,
TargetType,
)
from swh.model.hypothesis_strategies import objects
from swh.storage import get_storage
from swh.storage.common import origin_url_to_sha1 as sha1
from swh.storage.exc import HashCollision, StorageArgumentException
from swh.storage.interface import ListOrder, PagedResult, StorageInterface
from swh.storage.utils import content_hex_hashes, now, round_to_milliseconds
def transform_entries(
storage: StorageInterface, dir_: Directory, *, prefix: bytes = b""
) -> Iterator[Dict[str, Any]]:
"""Iterate through a directory's entries, and yields the items 'directory_ls' is
expected to return; including content metadata for file entries."""
for ent in dir_.entries:
if ent.type == "dir":
yield {
"dir_id": dir_.id,
"type": ent.type,
"target": ent.target,
"name": prefix + ent.name,
"perms": ent.perms,
"status": None,
"sha1": None,
"sha1_git": None,
"sha256": None,
"length": None,
}
elif ent.type == "file":
contents = storage.content_find({"sha1_git": ent.target})
assert contents
ent_dict = contents[0].to_dict()
for key in ["ctime", "blake2s256"]:
ent_dict.pop(key, None)
ent_dict.update(
{
"dir_id": dir_.id,
"type": ent.type,
"target": ent.target,
"name": prefix + ent.name,
"perms": ent.perms,
}
)
yield ent_dict
def assert_contents_ok(
expected_contents, actual_contents, keys_to_check={"sha1", "data"}
):
"""Assert that a given list of contents matches on a given set of keys.
"""
for k in keys_to_check:
expected_list = set([c.get(k) for c in expected_contents])
actual_list = set([c.get(k) for c in actual_contents])
assert actual_list == expected_list, k
class LazyContent(Content):
def with_data(self):
return Content.from_dict({**self.to_dict(), "data": b"42\n"})
class TestStorage:
"""Main class for Storage testing.
This class is used as-is to test local storage (see TestLocalStorage
below) and remote storage (see TestRemoteStorage in
test_remote_storage.py.
We need to have the two classes inherit from this base class
separately to avoid nosetests running the tests from the base
class twice.
"""
maxDiff = None # type: ClassVar[Optional[int]]
def test_types(self, swh_storage_backend_config):
"""Checks all methods of StorageInterface are implemented by this
backend, and that they have the same signature."""
# Create an instance of the protocol (which cannot be instantiated
# directly, so this creates a subclass, then instantiates it)
interface = type("_", (StorageInterface,), {})()
storage = get_storage(**swh_storage_backend_config)
assert "content_add" in dir(interface)
missing_methods = []
for meth_name in dir(interface):
if meth_name.startswith("_"):
continue
interface_meth = getattr(interface, meth_name)
try:
concrete_meth = getattr(storage, meth_name)
except AttributeError:
if not getattr(interface_meth, "deprecated_endpoint", False):
# The backend is missing a (non-deprecated) endpoint
missing_methods.append(meth_name)
continue
expected_signature = inspect.signature(interface_meth)
actual_signature = inspect.signature(concrete_meth)
assert expected_signature == actual_signature, meth_name
assert missing_methods == []
+ # If all the assertions above succeed, then this one should too.
+ # But there's no harm in double-checking.
+ # And we could replace the assertions above by this one, but unlike
+ # the assertions above, it doesn't explain what is missing.
+ assert isinstance(storage, StorageInterface)
+
def test_check_config(self, swh_storage):
assert swh_storage.check_config(check_write=True)
assert swh_storage.check_config(check_write=False)
def test_content_add(self, swh_storage, sample_data):
cont = sample_data.content
insertion_start_time = now()
actual_result = swh_storage.content_add([cont])
insertion_end_time = now()
assert actual_result == {
"content:add": 1,
"content:add:bytes": cont.length,
}
assert swh_storage.content_get_data(cont.sha1) == cont.data
expected_cont = attr.evolve(cont, data=None)
contents = [
obj
for (obj_type, obj) in swh_storage.journal_writer.journal.objects
if obj_type == "content"
]
assert len(contents) == 1
for obj in contents:
assert insertion_start_time <= obj.ctime
assert obj.ctime <= insertion_end_time
assert obj == expected_cont
swh_storage.refresh_stat_counters()
assert swh_storage.stat_counters()["content"] == 1
def test_content_add_from_lazy_content(self, swh_storage, sample_data):
cont = sample_data.content
lazy_content = LazyContent.from_dict(cont.to_dict())
insertion_start_time = now()
actual_result = swh_storage.content_add([lazy_content])
insertion_end_time = now()
assert actual_result == {
"content:add": 1,
"content:add:bytes": cont.length,
}
# the fact that we retrieve the content object from the storage with
# the correct 'data' field ensures it has been 'called'
assert swh_storage.content_get_data(cont.sha1) == cont.data
expected_cont = attr.evolve(lazy_content, data=None, ctime=None)
contents = [
obj
for (obj_type, obj) in swh_storage.journal_writer.journal.objects
if obj_type == "content"
]
assert len(contents) == 1
for obj in contents:
assert insertion_start_time <= obj.ctime
assert obj.ctime <= insertion_end_time
assert attr.evolve(obj, ctime=None).to_dict() == expected_cont.to_dict()
swh_storage.refresh_stat_counters()
assert swh_storage.stat_counters()["content"] == 1
def test_content_get_data_missing(self, swh_storage, sample_data):
cont, cont2 = sample_data.contents[:2]
swh_storage.content_add([cont])
# Query a single missing content
actual_content_data = swh_storage.content_get_data(cont2.sha1)
assert actual_content_data is None
# Check content_get does not abort after finding a missing content
actual_content_data = swh_storage.content_get_data(cont.sha1)
assert actual_content_data == cont.data
actual_content_data = swh_storage.content_get_data(cont2.sha1)
assert actual_content_data is None
def test_content_add_different_input(self, swh_storage, sample_data):
cont, cont2 = sample_data.contents[:2]
actual_result = swh_storage.content_add([cont, cont2])
assert actual_result == {
"content:add": 2,
"content:add:bytes": cont.length + cont2.length,
}
def test_content_add_twice(self, swh_storage, sample_data):
cont, cont2 = sample_data.contents[:2]
actual_result = swh_storage.content_add([cont])
assert actual_result == {
"content:add": 1,
"content:add:bytes": cont.length,
}
assert len(swh_storage.journal_writer.journal.objects) == 1
actual_result = swh_storage.content_add([cont, cont2])
assert actual_result == {
"content:add": 1,
"content:add:bytes": cont2.length,
}
assert 2 <= len(swh_storage.journal_writer.journal.objects) <= 3
assert len(swh_storage.content_find(cont.to_dict())) == 1
assert len(swh_storage.content_find(cont2.to_dict())) == 1
def test_content_add_collision(self, swh_storage, sample_data):
cont1 = sample_data.content
# create (corrupted) content with same sha1{,_git} but != sha256
sha256_array = bytearray(cont1.sha256)
sha256_array[0] += 1
cont1b = attr.evolve(cont1, sha256=bytes(sha256_array))
with pytest.raises(HashCollision) as cm:
swh_storage.content_add([cont1, cont1b])
exc = cm.value
actual_algo = exc.algo
assert actual_algo in ["sha1", "sha1_git"]
actual_id = exc.hash_id
assert actual_id == getattr(cont1, actual_algo).hex()
collisions = exc.args[2]
assert len(collisions) == 2
assert collisions == [
content_hex_hashes(cont1.hashes()),
content_hex_hashes(cont1b.hashes()),
]
assert exc.colliding_content_hashes() == [
cont1.hashes(),
cont1b.hashes(),
]
def test_content_add_duplicate(self, swh_storage, sample_data):
cont = sample_data.content
swh_storage.content_add([cont, cont])
assert swh_storage.content_get_data(cont.sha1) == cont.data
def test_content_update(self, swh_storage, sample_data):
cont1 = sample_data.content
if hasattr(swh_storage, "journal_writer"):
swh_storage.journal_writer.journal = None # TODO, not supported
swh_storage.content_add([cont1])
# alter the sha1_git for example
cont1b = attr.evolve(
cont1, sha1_git=hash_to_bytes("3a60a5275d0333bf13468e8b3dcab90f4046e654")
)
swh_storage.content_update([cont1b.to_dict()], keys=["sha1_git"])
actual_contents = swh_storage.content_get([cont1.sha1])
expected_content = attr.evolve(cont1b, data=None)
assert actual_contents == [expected_content]
def test_content_add_metadata(self, swh_storage, sample_data):
cont = attr.evolve(sample_data.content, data=None, ctime=now())
actual_result = swh_storage.content_add_metadata([cont])
assert actual_result == {
"content:add": 1,
}
expected_cont = cont
assert swh_storage.content_get([cont.sha1]) == [expected_cont]
contents = [
obj
for (obj_type, obj) in swh_storage.journal_writer.journal.objects
if obj_type == "content"
]
assert len(contents) == 1
for obj in contents:
obj = attr.evolve(obj, ctime=None)
assert obj == cont
def test_content_add_metadata_different_input(self, swh_storage, sample_data):
contents = sample_data.contents[:2]
cont = attr.evolve(contents[0], data=None, ctime=now())
cont2 = attr.evolve(contents[1], data=None, ctime=now())
actual_result = swh_storage.content_add_metadata([cont, cont2])
assert actual_result == {
"content:add": 2,
}
def test_content_add_metadata_collision(self, swh_storage, sample_data):
cont1 = attr.evolve(sample_data.content, data=None, ctime=now())
# create (corrupted) content with same sha1{,_git} but != sha256
sha1_git_array = bytearray(cont1.sha256)
sha1_git_array[0] += 1
cont1b = attr.evolve(cont1, sha256=bytes(sha1_git_array))
with pytest.raises(HashCollision) as cm:
swh_storage.content_add_metadata([cont1, cont1b])
exc = cm.value
actual_algo = exc.algo
assert actual_algo in ["sha1", "sha1_git", "blake2s256"]
actual_id = exc.hash_id
assert actual_id == getattr(cont1, actual_algo).hex()
collisions = exc.args[2]
assert len(collisions) == 2
assert collisions == [
content_hex_hashes(cont1.hashes()),
content_hex_hashes(cont1b.hashes()),
]
assert exc.colliding_content_hashes() == [
cont1.hashes(),
cont1b.hashes(),
]
def test_skipped_content_add(self, swh_storage, sample_data):
contents = sample_data.skipped_contents[:2]
cont = contents[0]
cont2 = attr.evolve(contents[1], blake2s256=None)
contents_dict = [c.to_dict() for c in [cont, cont2]]
missing = list(swh_storage.skipped_content_missing(contents_dict))
assert missing == [cont.hashes(), cont2.hashes()]
actual_result = swh_storage.skipped_content_add([cont, cont, cont2])
assert 2 <= actual_result.pop("skipped_content:add") <= 3
assert actual_result == {}
missing = list(swh_storage.skipped_content_missing(contents_dict))
assert missing == []
def test_skipped_content_add_missing_hashes(self, swh_storage, sample_data):
cont, cont2 = [
attr.evolve(c, sha1_git=None) for c in sample_data.skipped_contents[:2]
]
contents_dict = [c.to_dict() for c in [cont, cont2]]
missing = list(swh_storage.skipped_content_missing(contents_dict))
assert len(missing) == 2
actual_result = swh_storage.skipped_content_add([cont, cont, cont2])
assert 2 <= actual_result.pop("skipped_content:add") <= 3
assert actual_result == {}
missing = list(swh_storage.skipped_content_missing(contents_dict))
assert missing == []
def test_skipped_content_missing_partial_hash(self, swh_storage, sample_data):
cont = sample_data.skipped_content
cont2 = attr.evolve(cont, sha1_git=None)
contents_dict = [c.to_dict() for c in [cont, cont2]]
missing = list(swh_storage.skipped_content_missing(contents_dict))
assert len(missing) == 2
actual_result = swh_storage.skipped_content_add([cont])
assert actual_result.pop("skipped_content:add") == 1
assert actual_result == {}
missing = list(swh_storage.skipped_content_missing(contents_dict))
assert missing == [cont2.hashes()]
@pytest.mark.property_based
@settings(deadline=None) # this test is very slow
@given(
strategies.sets(
elements=strategies.sampled_from(["sha256", "sha1_git", "blake2s256"]),
min_size=0,
)
)
def test_content_missing(self, swh_storage, sample_data, algos):
algos |= {"sha1"}
content, missing_content = [sample_data.content2, sample_data.skipped_content]
swh_storage.content_add([content])
test_contents = [content.to_dict()]
missing_per_hash = defaultdict(list)
for i in range(256):
test_content = missing_content.to_dict()
for hash in algos:
test_content[hash] = bytes([i]) + test_content[hash][1:]
missing_per_hash[hash].append(test_content[hash])
test_contents.append(test_content)
assert set(swh_storage.content_missing(test_contents)) == set(
missing_per_hash["sha1"]
)
for hash in algos:
assert set(
swh_storage.content_missing(test_contents, key_hash=hash)
) == set(missing_per_hash[hash])
@pytest.mark.property_based
@given(
strategies.sets(
elements=strategies.sampled_from(["sha256", "sha1_git", "blake2s256"]),
min_size=0,
)
)
def test_content_missing_unknown_algo(self, swh_storage, sample_data, algos):
algos |= {"sha1"}
content, missing_content = [sample_data.content2, sample_data.skipped_content]
swh_storage.content_add([content])
test_contents = [content.to_dict()]
missing_per_hash = defaultdict(list)
for i in range(16):
test_content = missing_content.to_dict()
for hash in algos:
test_content[hash] = bytes([i]) + test_content[hash][1:]
missing_per_hash[hash].append(test_content[hash])
test_content["nonexisting_algo"] = b"\x00"
test_contents.append(test_content)
assert set(swh_storage.content_missing(test_contents)) == set(
missing_per_hash["sha1"]
)
for hash in algos:
assert set(
swh_storage.content_missing(test_contents, key_hash=hash)
) == set(missing_per_hash[hash])
def test_content_missing_per_sha1(self, swh_storage, sample_data):
# given
cont = sample_data.content
cont2 = sample_data.content2
missing_cont = sample_data.skipped_content
missing_cont2 = sample_data.skipped_content2
swh_storage.content_add([cont, cont2])
# when
gen = swh_storage.content_missing_per_sha1(
[cont.sha1, missing_cont.sha1, cont2.sha1, missing_cont2.sha1]
)
# then
assert list(gen) == [missing_cont.sha1, missing_cont2.sha1]
def test_content_missing_per_sha1_git(self, swh_storage, sample_data):
cont, cont2 = sample_data.contents[:2]
missing_cont = sample_data.skipped_content
swh_storage.content_add([cont, cont2])
contents = [cont.sha1_git, cont2.sha1_git, missing_cont.sha1_git]
missing_contents = swh_storage.content_missing_per_sha1_git(contents)
assert list(missing_contents) == [missing_cont.sha1_git]
def test_content_get_partition(self, swh_storage, swh_contents):
"""content_get_partition paginates results if limit exceeded"""
expected_contents = [
attr.evolve(c, data=None) for c in swh_contents if c.status != "absent"
]
actual_contents = []
for i in range(16):
actual_result = swh_storage.content_get_partition(i, 16)
assert actual_result.next_page_token is None
actual_contents.extend(actual_result.results)
assert len(actual_contents) == len(expected_contents)
for content in actual_contents:
assert content in expected_contents
def test_content_get_partition_full(self, swh_storage, swh_contents):
"""content_get_partition for a single partition returns all available contents
"""
expected_contents = [
attr.evolve(c, data=None) for c in swh_contents if c.status != "absent"
]
actual_result = swh_storage.content_get_partition(0, 1)
assert actual_result.next_page_token is None
actual_contents = actual_result.results
assert len(actual_contents) == len(expected_contents)
for content in actual_contents:
assert content in expected_contents
def test_content_get_partition_empty(self, swh_storage, swh_contents):
"""content_get_partition when at least one of the partitions is empty"""
expected_contents = {
cont.sha1 for cont in swh_contents if cont.status != "absent"
}
# nb_partitions = smallest power of 2 such that at least one of
# the partitions is empty
nb_partitions = 1 << math.floor(math.log2(len(swh_contents)) + 1)
seen_sha1s = []
for i in range(nb_partitions):
actual_result = swh_storage.content_get_partition(
i, nb_partitions, limit=len(swh_contents) + 1
)
for content in actual_result.results:
seen_sha1s.append(content.sha1)
# Limit is higher than the max number of results
assert actual_result.next_page_token is None
assert set(seen_sha1s) == expected_contents
def test_content_get_partition_limit_none(self, swh_storage):
"""content_get_partition call with wrong limit input should fail"""
with pytest.raises(StorageArgumentException, match="limit should not be None"):
swh_storage.content_get_partition(1, 16, limit=None)
def test_content_get_partition_pagination_generate(self, swh_storage, swh_contents):
"""content_get_partition returns contents within range provided"""
expected_contents = [
attr.evolve(c, data=None) for c in swh_contents if c.status != "absent"
]
# retrieve contents
actual_contents = []
for i in range(4):
page_token = None
while True:
actual_result = swh_storage.content_get_partition(
i, 4, limit=3, page_token=page_token
)
actual_contents.extend(actual_result.results)
page_token = actual_result.next_page_token
if page_token is None:
break
assert len(actual_contents) == len(expected_contents)
for content in actual_contents:
assert content in expected_contents
def test_content_get(self, swh_storage, sample_data):
cont1, cont2 = sample_data.contents[:2]
swh_storage.content_add([cont1, cont2])
actual_contents = swh_storage.content_get([cont1.sha1, cont2.sha1])
# we only retrieve the metadata so no data nor ctime within
expected_contents = [attr.evolve(c, data=None) for c in [cont1, cont2]]
assert actual_contents == expected_contents
def test_content_get_missing_sha1(self, swh_storage, sample_data):
cont1, cont2 = sample_data.contents[:2]
assert cont1.sha1 != cont2.sha1
missing_cont = sample_data.skipped_content
swh_storage.content_add([cont1, cont2])
actual_contents = swh_storage.content_get(
[cont1.sha1, cont2.sha1, missing_cont.sha1]
)
expected_contents = [
attr.evolve(c, data=None) if c else None for c in [cont1, cont2, None]
]
assert actual_contents == expected_contents
def test_content_get_random(self, swh_storage, sample_data):
cont, cont2, cont3 = sample_data.contents[:3]
swh_storage.content_add([cont, cont2, cont3])
assert swh_storage.content_get_random() in {
cont.sha1_git,
cont2.sha1_git,
cont3.sha1_git,
}
def test_directory_add(self, swh_storage, sample_data):
content = sample_data.content
directory = sample_data.directories[1]
assert directory.entries[0].target == content.sha1_git
swh_storage.content_add([content])
init_missing = list(swh_storage.directory_missing([directory.id]))
assert [directory.id] == init_missing
actual_result = swh_storage.directory_add([directory])
assert actual_result == {"directory:add": 1}
assert ("directory", directory) in list(
swh_storage.journal_writer.journal.objects
)
actual_data = list(swh_storage.directory_ls(directory.id))
expected_data = list(transform_entries(swh_storage, directory))
for data in actual_data:
assert data in expected_data
after_missing = list(swh_storage.directory_missing([directory.id]))
assert after_missing == []
swh_storage.refresh_stat_counters()
assert swh_storage.stat_counters()["directory"] == 1
def test_directory_add_twice(self, swh_storage, sample_data):
directory = sample_data.directories[1]
actual_result = swh_storage.directory_add([directory])
assert actual_result == {"directory:add": 1}
assert list(swh_storage.journal_writer.journal.objects) == [
("directory", directory)
]
actual_result = swh_storage.directory_add([directory])
assert actual_result == {"directory:add": 0}
assert list(swh_storage.journal_writer.journal.objects) == [
("directory", directory)
]
def test_directory_ls_recursive(self, swh_storage, sample_data):
# create consistent dataset regarding the directories we want to list
content, content2 = sample_data.contents[:2]
swh_storage.content_add([content, content2])
dir1, dir2, dir3 = sample_data.directories[:3]
dir_ids = [d.id for d in [dir1, dir2, dir3]]
init_missing = list(swh_storage.directory_missing(dir_ids))
assert init_missing == dir_ids
actual_result = swh_storage.directory_add([dir1, dir2, dir3])
assert actual_result == {"directory:add": 3}
# List directory containing one file
actual_data = list(swh_storage.directory_ls(dir1.id, recursive=True))
expected_data = list(transform_entries(swh_storage, dir1))
for data in actual_data:
assert data in expected_data
# List directory containing a file and an unknown subdirectory
actual_data = list(swh_storage.directory_ls(dir2.id, recursive=True))
expected_data = list(transform_entries(swh_storage, dir2))
for data in actual_data:
assert data in expected_data
# List directory containing both a known and unknown subdirectory, entries
# should be both those of the directory and of the known subdir (up to contents)
actual_data = list(swh_storage.directory_ls(dir3.id, recursive=True))
expected_data = list(
itertools.chain(
transform_entries(swh_storage, dir3),
transform_entries(swh_storage, dir2, prefix=b"subdir/"),
)
)
for data in actual_data:
assert data in expected_data
def test_directory_ls_non_recursive(self, swh_storage, sample_data):
# create consistent dataset regarding the directories we want to list
content, content2 = sample_data.contents[:2]
swh_storage.content_add([content, content2])
dir1, dir2, dir3, _, dir5 = sample_data.directories[:5]
dir_ids = [d.id for d in [dir1, dir2, dir3, dir5]]
init_missing = list(swh_storage.directory_missing(dir_ids))
assert init_missing == dir_ids
actual_result = swh_storage.directory_add([dir1, dir2, dir3, dir5])
assert actual_result == {"directory:add": 4}
# List directory containing a file and an unknown subdirectory
actual_data = list(swh_storage.directory_ls(dir1.id))
expected_data = list(transform_entries(swh_storage, dir1))
for data in actual_data:
assert data in expected_data
# List directory containing a single file
actual_data = list(swh_storage.directory_ls(dir2.id))
expected_data = list(transform_entries(swh_storage, dir2))
for data in actual_data:
assert data in expected_data
# List directory containing a known subdirectory, entries should
# only be those of the parent directory, not of the subdir
actual_data = list(swh_storage.directory_ls(dir3.id))
expected_data = list(transform_entries(swh_storage, dir3))
for data in actual_data:
assert data in expected_data
def test_directory_entry_get_by_path(self, swh_storage, sample_data):
cont, content2 = sample_data.contents[:2]
dir1, dir2, dir3, dir4, dir5 = sample_data.directories[:5]
# given
dir_ids = [d.id for d in [dir1, dir2, dir3, dir4, dir5]]
init_missing = list(swh_storage.directory_missing(dir_ids))
assert init_missing == dir_ids
actual_result = swh_storage.directory_add([dir3, dir4])
assert actual_result == {"directory:add": 2}
expected_entries = [
{
"dir_id": dir3.id,
"name": b"foo",
"type": "file",
"target": cont.sha1_git,
"sha1": None,
"sha1_git": None,
"sha256": None,
"status": None,
"perms": from_disk.DentryPerms.content,
"length": None,
},
{
"dir_id": dir3.id,
"name": b"subdir",
"type": "dir",
"target": dir2.id,
"sha1": None,
"sha1_git": None,
"sha256": None,
"status": None,
"perms": from_disk.DentryPerms.directory,
"length": None,
},
{
"dir_id": dir3.id,
"name": b"hello",
"type": "file",
"target": content2.sha1_git,
"sha1": None,
"sha1_git": None,
"sha256": None,
"status": None,
"perms": from_disk.DentryPerms.content,
"length": None,
},
]
# when (all must be found here)
for entry, expected_entry in zip(dir3.entries, expected_entries):
actual_entry = swh_storage.directory_entry_get_by_path(
dir3.id, [entry.name]
)
assert actual_entry == expected_entry
# same, but deeper
for entry, expected_entry in zip(dir3.entries, expected_entries):
actual_entry = swh_storage.directory_entry_get_by_path(
dir4.id, [b"subdir1", entry.name]
)
expected_entry = expected_entry.copy()
expected_entry["name"] = b"subdir1/" + expected_entry["name"]
assert actual_entry == expected_entry
# when (nothing should be found here since `dir` is not persisted.)
for entry in dir2.entries:
actual_entry = swh_storage.directory_entry_get_by_path(
dir2.id, [entry.name]
)
assert actual_entry is None
def test_directory_get_random(self, swh_storage, sample_data):
dir1, dir2, dir3 = sample_data.directories[:3]
swh_storage.directory_add([dir1, dir2, dir3])
assert swh_storage.directory_get_random() in {
dir1.id,
dir2.id,
dir3.id,
}
def test_revision_add(self, swh_storage, sample_data):
revision = sample_data.revision
init_missing = swh_storage.revision_missing([revision.id])
assert list(init_missing) == [revision.id]
actual_result = swh_storage.revision_add([revision])
assert actual_result == {"revision:add": 1}
end_missing = swh_storage.revision_missing([revision.id])
assert list(end_missing) == []
assert list(swh_storage.journal_writer.journal.objects) == [
("revision", revision)
]
# already there so nothing added
actual_result = swh_storage.revision_add([revision])
assert actual_result == {"revision:add": 0}
swh_storage.refresh_stat_counters()
assert swh_storage.stat_counters()["revision"] == 1
def test_revision_add_twice(self, swh_storage, sample_data):
revision, revision2 = sample_data.revisions[:2]
actual_result = swh_storage.revision_add([revision])
assert actual_result == {"revision:add": 1}
assert list(swh_storage.journal_writer.journal.objects) == [
("revision", revision)
]
actual_result = swh_storage.revision_add([revision, revision2])
assert actual_result == {"revision:add": 1}
assert list(swh_storage.journal_writer.journal.objects) == [
("revision", revision),
("revision", revision2),
]
def test_revision_add_name_clash(self, swh_storage, sample_data):
revision, revision2 = sample_data.revisions[:2]
revision1 = attr.evolve(
revision,
author=Person(
fullname=b"John Doe <john.doe@example.com>",
name=b"John Doe",
email=b"john.doe@example.com",
),
)
revision2 = attr.evolve(
revision2,
author=Person(
fullname=b"John Doe <john.doe@example.com>",
name=b"John Doe ",
email=b"john.doe@example.com ",
),
)
actual_result = swh_storage.revision_add([revision1, revision2])
assert actual_result == {"revision:add": 2}
def test_revision_get_order(self, swh_storage, sample_data):
revision, revision2 = sample_data.revisions[:2]
add_result = swh_storage.revision_add([revision, revision2])
assert add_result == {"revision:add": 2}
# order 1
res1 = swh_storage.revision_get([revision.id, revision2.id])
assert [Revision.from_dict(r) for r in res1] == [revision, revision2]
# order 2
res2 = swh_storage.revision_get([revision2.id, revision.id])
assert [Revision.from_dict(r) for r in res2] == [revision2, revision]
def test_revision_log(self, swh_storage, sample_data):
revision1, revision2, revision3, revision4 = sample_data.revisions[:4]
# rev4 -is-child-of-> rev3 -> rev1, (rev2 -> rev1)
swh_storage.revision_add([revision1, revision2, revision3, revision4])
# when
results = list(swh_storage.revision_log([revision4.id]))
# for comparison purposes
actual_results = [Revision.from_dict(r) for r in results]
assert len(actual_results) == 4 # rev4 -child-> rev3 -> rev1, (rev2 -> rev1)
assert actual_results == [revision4, revision3, revision1, revision2]
def test_revision_log_with_limit(self, swh_storage, sample_data):
revision1, revision2, revision3, revision4 = sample_data.revisions[:4]
# revision4 -is-child-of-> revision3
swh_storage.revision_add([revision3, revision4])
results = list(swh_storage.revision_log([revision4.id], 1))
actual_results = [Revision.from_dict(r) for r in results]
assert len(actual_results) == 1
assert actual_results[0] == revision4
def test_revision_log_unknown_revision(self, swh_storage, sample_data):
revision = sample_data.revision
rev_log = list(swh_storage.revision_log([revision.id]))
assert rev_log == []
def test_revision_shortlog(self, swh_storage, sample_data):
revision1, revision2, revision3, revision4 = sample_data.revisions[:4]
# rev4 -is-child-of-> rev3 -> (rev1, rev2); rev2 -> rev1
swh_storage.revision_add([revision1, revision2, revision3, revision4])
results = list(swh_storage.revision_shortlog([revision4.id]))
actual_results = [[id, tuple(parents)] for (id, parents) in results]
assert len(actual_results) == 4
assert actual_results == [
[revision4.id, revision4.parents],
[revision3.id, revision3.parents],
[revision1.id, revision1.parents],
[revision2.id, revision2.parents],
]
def test_revision_shortlog_with_limit(self, swh_storage, sample_data):
revision1, revision2, revision3, revision4 = sample_data.revisions[:4]
# revision4 -is-child-of-> revision3
swh_storage.revision_add([revision1, revision2, revision3, revision4])
results = list(swh_storage.revision_shortlog([revision4.id], 1))
actual_results = [[id, tuple(parents)] for (id, parents) in results]
assert len(actual_results) == 1
assert list(actual_results[0]) == [revision4.id, revision4.parents]
def test_revision_get(self, swh_storage, sample_data):
revision, revision2 = sample_data.revisions[:2]
swh_storage.revision_add([revision])
actual_revisions = list(swh_storage.revision_get([revision.id, revision2.id]))
assert len(actual_revisions) == 2
assert Revision.from_dict(actual_revisions[0]) == revision
assert actual_revisions[1] is None
def test_revision_get_no_parents(self, swh_storage, sample_data):
revision = sample_data.revision
swh_storage.revision_add([revision])
get = list(swh_storage.revision_get([revision.id]))
assert len(get) == 1
assert revision.parents == ()
assert tuple(get[0]["parents"]) == () # no parents on this one
def test_revision_get_random(self, swh_storage, sample_data):
revision1, revision2, revision3 = sample_data.revisions[:3]
swh_storage.revision_add([revision1, revision2, revision3])
assert swh_storage.revision_get_random() in {
revision1.id,
revision2.id,
revision3.id,
}
def test_release_add(self, swh_storage, sample_data):
release, release2 = sample_data.releases[:2]
init_missing = swh_storage.release_missing([release.id, release2.id])
assert list(init_missing) == [release.id, release2.id]
actual_result = swh_storage.release_add([release, release2])
assert actual_result == {"release:add": 2}
end_missing = swh_storage.release_missing([release.id, release2.id])
assert list(end_missing) == []
assert list(swh_storage.journal_writer.journal.objects) == [
("release", release),
("release", release2),
]
# already present so nothing added
actual_result = swh_storage.release_add([release, release2])
assert actual_result == {"release:add": 0}
swh_storage.refresh_stat_counters()
assert swh_storage.stat_counters()["release"] == 2
def test_release_add_no_author_date(self, swh_storage, sample_data):
full_release = sample_data.release
release = attr.evolve(full_release, author=None, date=None)
actual_result = swh_storage.release_add([release])
assert actual_result == {"release:add": 1}
end_missing = swh_storage.release_missing([release.id])
assert list(end_missing) == []
assert list(swh_storage.journal_writer.journal.objects) == [
("release", release)
]
def test_release_add_twice(self, swh_storage, sample_data):
release, release2 = sample_data.releases[:2]
actual_result = swh_storage.release_add([release])
assert actual_result == {"release:add": 1}
assert list(swh_storage.journal_writer.journal.objects) == [
("release", release)
]
actual_result = swh_storage.release_add([release, release2, release, release2])
assert actual_result == {"release:add": 1}
assert set(swh_storage.journal_writer.journal.objects) == set(
[("release", release), ("release", release2),]
)
def test_release_add_name_clash(self, swh_storage, sample_data):
release, release2 = [
attr.evolve(
c,
author=Person(
fullname=b"John Doe <john.doe@example.com>",
name=b"John Doe",
email=b"john.doe@example.com",
),
)
for c in sample_data.releases[:2]
]
actual_result = swh_storage.release_add([release, release2])
assert actual_result == {"release:add": 2}
def test_release_get(self, swh_storage, sample_data):
release, release2, release3 = sample_data.releases[:3]
# given
swh_storage.release_add([release, release2])
# when
releases = list(swh_storage.release_get([release.id, release2.id]))
actual_releases = [Release.from_dict(r) for r in releases]
# then
assert actual_releases == [release, release2]
unknown_releases = list(swh_storage.release_get([release3.id]))
assert unknown_releases[0] is None
def test_release_get_order(self, swh_storage, sample_data):
release, release2 = sample_data.releases[:2]
add_result = swh_storage.release_add([release, release2])
assert add_result == {"release:add": 2}
# order 1
res1 = swh_storage.release_get([release.id, release2.id])
assert list(res1) == [release.to_dict(), release2.to_dict()]
# order 2
res2 = swh_storage.release_get([release2.id, release.id])
assert list(res2) == [release2.to_dict(), release.to_dict()]
def test_release_get_random(self, swh_storage, sample_data):
release, release2, release3 = sample_data.releases[:3]
swh_storage.release_add([release, release2, release3])
assert swh_storage.release_get_random() in {
release.id,
release2.id,
release3.id,
}
def test_origin_add(self, swh_storage, sample_data):
origins = list(sample_data.origins[:2])
origin_urls = [o.url for o in origins]
assert swh_storage.origin_get(origin_urls) == [None, None]
stats = swh_storage.origin_add(origins)
assert stats == {"origin:add": 2}
actual_origins = swh_storage.origin_get(origin_urls)
assert actual_origins == origins
assert set(swh_storage.journal_writer.journal.objects) == set(
[("origin", origins[0]), ("origin", origins[1]),]
)
swh_storage.refresh_stat_counters()
assert swh_storage.stat_counters()["origin"] == 2
def test_origin_add_twice(self, swh_storage, sample_data):
origin, origin2 = sample_data.origins[:2]
add1 = swh_storage.origin_add([origin, origin2])
assert set(swh_storage.journal_writer.journal.objects) == set(
[("origin", origin), ("origin", origin2),]
)
assert add1 == {"origin:add": 2}
add2 = swh_storage.origin_add([origin, origin2])
assert set(swh_storage.journal_writer.journal.objects) == set(
[("origin", origin), ("origin", origin2),]
)
assert add2 == {"origin:add": 0}
def test_origin_get(self, swh_storage, sample_data):
origin, origin2 = sample_data.origins[:2]
assert swh_storage.origin_get([origin.url]) == [None]
swh_storage.origin_add([origin])
actual_origins = swh_storage.origin_get([origin.url])
assert actual_origins == [origin]
actual_origins = swh_storage.origin_get([origin.url, "not://exists"])
assert actual_origins == [origin, None]
def _generate_random_visits(self, nb_visits=100, start=0, end=7):
"""Generate random visits within the last 2 months (to avoid
computations)
"""
visits = []
today = now()
for weeks in range(nb_visits, 0, -1):
hours = random.randint(0, 24)
minutes = random.randint(0, 60)
seconds = random.randint(0, 60)
days = random.randint(0, 28)
weeks = random.randint(start, end)
date_visit = today - timedelta(
weeks=weeks, hours=hours, minutes=minutes, seconds=seconds, days=days
)
visits.append(date_visit)
return visits
def test_origin_visit_get__unknown_origin(self, swh_storage):
actual_page = swh_storage.origin_visit_get("foo")
assert actual_page.next_page_token is None
assert actual_page.results == []
assert actual_page == PagedResult()
def test_origin_visit_get__validation_failure(self, swh_storage, sample_data):
origin = sample_data.origin
swh_storage.origin_add([origin])
with pytest.raises(
StorageArgumentException, match="page_token must be a string"
):
swh_storage.origin_visit_get(origin.url, page_token=10) # not bytes
with pytest.raises(
StorageArgumentException, match="order must be a ListOrder value"
):
swh_storage.origin_visit_get(origin.url, order="foobar") # wrong order
def test_origin_visit_get_all(self, swh_storage, sample_data):
origin = sample_data.origin
swh_storage.origin_add([origin])
ov1, ov2, ov3 = swh_storage.origin_visit_add(
[
OriginVisit(
origin=origin.url,
date=sample_data.date_visit1,
type=sample_data.type_visit1,
),
OriginVisit(
origin=origin.url,
date=sample_data.date_visit2,
type=sample_data.type_visit2,
),
OriginVisit(
origin=origin.url,
date=sample_data.date_visit2,
type=sample_data.type_visit2,
),
]
)
# order asc, no token, no limit
actual_page = swh_storage.origin_visit_get(origin.url)
assert actual_page.next_page_token is None
assert actual_page.results == [ov1, ov2, ov3]
# order asc, no token, limit
actual_page = swh_storage.origin_visit_get(origin.url, limit=2)
next_page_token = actual_page.next_page_token
assert next_page_token is not None
assert actual_page.results == [ov1, ov2]
# order asc, token, no limit
actual_page = swh_storage.origin_visit_get(
origin.url, page_token=next_page_token
)
assert actual_page.next_page_token is None
assert actual_page.results == [ov3]
# order asc, no token, limit
actual_page = swh_storage.origin_visit_get(origin.url, limit=1)
next_page_token = actual_page.next_page_token
assert next_page_token is not None
assert actual_page.results == [ov1]
# order asc, token, no limit
actual_page = swh_storage.origin_visit_get(
origin.url, page_token=next_page_token
)
assert actual_page.next_page_token is None
assert actual_page.results == [ov2, ov3]
# order asc, token, limit
actual_page = swh_storage.origin_visit_get(
origin.url, page_token=next_page_token, limit=2
)
assert actual_page.next_page_token is None
assert actual_page.results == [ov2, ov3]
actual_page = swh_storage.origin_visit_get(
origin.url, page_token=next_page_token, limit=1
)
next_page_token = actual_page.next_page_token
assert next_page_token is not None
assert actual_page.results == [ov2]
actual_page = swh_storage.origin_visit_get(
origin.url, page_token=next_page_token, limit=1
)
assert actual_page.next_page_token is None
assert actual_page.results == [ov3]
# order desc, no token, no limit
actual_page = swh_storage.origin_visit_get(origin.url, order=ListOrder.DESC)
assert actual_page.next_page_token is None
assert actual_page.results == [ov3, ov2, ov1]
# order desc, no token, limit
actual_page = swh_storage.origin_visit_get(
origin.url, limit=2, order=ListOrder.DESC
)
next_page_token = actual_page.next_page_token
assert next_page_token is not None
assert actual_page.results == [ov3, ov2]
# order desc, token, no limit
actual_page = swh_storage.origin_visit_get(
origin.url, page_token=next_page_token, order=ListOrder.DESC
)
assert actual_page.next_page_token is None
assert actual_page.results == [ov1]
# order desc, no token, limit
actual_page = swh_storage.origin_visit_get(
origin.url, limit=1, order=ListOrder.DESC
)
next_page_token = actual_page.next_page_token
assert next_page_token is not None
assert actual_page.results == [ov3]
# order desc, token, no limit
actual_page = swh_storage.origin_visit_get(
origin.url, page_token=next_page_token, order=ListOrder.DESC
)
assert actual_page.next_page_token is None
assert actual_page.results == [ov2, ov1]
# order desc, token, limit
actual_page = swh_storage.origin_visit_get(
origin.url, page_token=next_page_token, order=ListOrder.DESC, limit=1
)
next_page_token = actual_page.next_page_token
assert next_page_token is not None
assert actual_page.results == [ov2]
actual_page = swh_storage.origin_visit_get(
origin.url, page_token=next_page_token, order=ListOrder.DESC
)
assert actual_page.next_page_token is None
assert actual_page.results == [ov1]
def test_origin_visit_status_get__unknown_cases(self, swh_storage, sample_data):
origin = sample_data.origin
actual_page = swh_storage.origin_visit_status_get("foobar", 1)
assert actual_page.next_page_token is None
assert actual_page.results == []
actual_page = swh_storage.origin_visit_status_get(origin.url, 1)
assert actual_page.next_page_token is None
assert actual_page.results == []
origin = sample_data.origin
swh_storage.origin_add([origin])
ov1 = swh_storage.origin_visit_add(
[
OriginVisit(
origin=origin.url,
date=sample_data.date_visit1,
type=sample_data.type_visit1,
),
]
)[0]
actual_page = swh_storage.origin_visit_status_get(origin.url, ov1.visit + 10)
assert actual_page.next_page_token is None
assert actual_page.results == []
def test_origin_visit_status_get_all(self, swh_storage, sample_data):
origin = sample_data.origin
swh_storage.origin_add([origin])
date_visit3 = round_to_milliseconds(now())
date_visit1 = date_visit3 - datetime.timedelta(hours=2)
date_visit2 = date_visit3 - datetime.timedelta(hours=1)
assert date_visit1 < date_visit2 < date_visit3
ov1 = swh_storage.origin_visit_add(
[
OriginVisit(
origin=origin.url, date=date_visit1, type=sample_data.type_visit1,
),
]
)[0]
ovs1 = OriginVisitStatus(
origin=origin.url,
visit=ov1.visit,
date=date_visit1,
status="created",
snapshot=None,
)
ovs2 = OriginVisitStatus(
origin=origin.url,
visit=ov1.visit,
date=date_visit2,
status="partial",
snapshot=None,
)
ovs3 = OriginVisitStatus(
origin=origin.url,
visit=ov1.visit,
date=date_visit3,
status="full",
snapshot=sample_data.snapshot.id,
metadata={},
)
swh_storage.origin_visit_status_add([ovs2, ovs3])
# order asc, no token, no limit
actual_page = swh_storage.origin_visit_status_get(origin.url, ov1.visit)
assert actual_page.next_page_token is None
assert actual_page.results == [ovs1, ovs2, ovs3]
# order asc, no token, limit
actual_page = swh_storage.origin_visit_status_get(
origin.url, ov1.visit, limit=2
)
next_page_token = actual_page.next_page_token
assert next_page_token is not None
assert actual_page.results == [ovs1, ovs2]
# order asc, token, no limit
actual_page = swh_storage.origin_visit_status_get(
origin.url, ov1.visit, page_token=next_page_token
)
assert actual_page.next_page_token is None
assert actual_page.results == [ovs3]
# order asc, no token, limit
actual_page = swh_storage.origin_visit_status_get(
origin.url, ov1.visit, limit=1
)
next_page_token = actual_page.next_page_token
assert next_page_token is not None
assert actual_page.results == [ovs1]
actual_page = swh_storage.origin_visit_status_get(
origin.url, ov1.visit, page_token=next_page_token
)
assert actual_page.next_page_token is None
assert actual_page.results == [ovs2, ovs3]
# order asc, token, limit
actual_page = swh_storage.origin_visit_status_get(
origin.url, ov1.visit, page_token=next_page_token, limit=2
)
assert actual_page.next_page_token is None
assert actual_page.results == [ovs2, ovs3]
# order asc, no token, limit
actual_page = swh_storage.origin_visit_status_get(
origin.url, ov1.visit, limit=2
)
next_page_token = actual_page.next_page_token
assert next_page_token is not None
assert actual_page.results == [ovs1, ovs2]
actual_page = swh_storage.origin_visit_status_get(
origin.url, ov1.visit, page_token=next_page_token, limit=1
)
assert actual_page.next_page_token is None
assert actual_page.results == [ovs3]
# order desc, no token, no limit
actual_page = swh_storage.origin_visit_status_get(
origin.url, ov1.visit, order=ListOrder.DESC
)
assert actual_page.next_page_token is None
assert actual_page.results == [ovs3, ovs2, ovs1]
# order desc, no token, limit
actual_page = swh_storage.origin_visit_status_get(
origin.url, ov1.visit, limit=2, order=ListOrder.DESC
)
next_page_token = actual_page.next_page_token
assert next_page_token is not None
assert actual_page.results == [ovs3, ovs2]
actual_page = swh_storage.origin_visit_status_get(
origin.url, ov1.visit, page_token=next_page_token, order=ListOrder.DESC
)
assert actual_page.next_page_token is None
assert actual_page.results == [ovs1]
# order desc, no token, limit
actual_page = swh_storage.origin_visit_status_get(
origin.url, ov1.visit, order=ListOrder.DESC, limit=1
)
next_page_token = actual_page.next_page_token
assert next_page_token is not None
assert actual_page.results == [ovs3]
# order desc, token, no limit
actual_page = swh_storage.origin_visit_status_get(
origin.url, ov1.visit, page_token=next_page_token, order=ListOrder.DESC
)
assert actual_page.next_page_token is None
assert actual_page.results == [ovs2, ovs1]
# order desc, token, limit
actual_page = swh_storage.origin_visit_status_get(
origin.url,
ov1.visit,
page_token=next_page_token,
order=ListOrder.DESC,
limit=1,
)
next_page_token = actual_page.next_page_token
assert next_page_token is not None
assert actual_page.results == [ovs2]
actual_page = swh_storage.origin_visit_status_get(
origin.url, ov1.visit, page_token=next_page_token, order=ListOrder.DESC
)
assert actual_page.next_page_token is None
assert actual_page.results == [ovs1]
def test_origin_visit_status_get_random(self, swh_storage, sample_data):
origins = sample_data.origins[:2]
swh_storage.origin_add(origins)
# Add some random visits within the selection range
visits = self._generate_random_visits()
visit_type = "git"
# Add visits to those origins
for origin in origins:
for date_visit in visits:
visit = swh_storage.origin_visit_add(
[OriginVisit(origin=origin.url, date=date_visit, type=visit_type,)]
)[0]
swh_storage.origin_visit_status_add(
[
OriginVisitStatus(
origin=origin.url,
visit=visit.visit,
date=now(),
status="full",
snapshot=None,
)
]
)
swh_storage.refresh_stat_counters()
stats = swh_storage.stat_counters()
assert stats["origin"] == len(origins)
assert stats["origin_visit"] == len(origins) * len(visits)
random_ov, random_ovs = swh_storage.origin_visit_status_get_random(visit_type)
assert random_ov and random_ovs
assert random_ov.origin is not None
assert random_ov.origin == random_ovs.origin
assert random_ov.origin in [o.url for o in origins]
def test_origin_visit_status_get_random_nothing_found(
self, swh_storage, sample_data
):
origins = sample_data.origins
swh_storage.origin_add(origins)
visit_type = "hg"
# Add some visits outside of the random generation selection so nothing
# will be found by the random selection
visits = self._generate_random_visits(nb_visits=3, start=13, end=24)
for origin in origins:
for date_visit in visits:
visit = swh_storage.origin_visit_add(
[OriginVisit(origin=origin.url, date=date_visit, type=visit_type,)]
)[0]
swh_storage.origin_visit_status_add(
[
OriginVisitStatus(
origin=origin.url,
visit=visit.visit,
date=now(),
status="full",
snapshot=None,
)
]
)
random_origin_visit = swh_storage.origin_visit_status_get_random(visit_type)
assert random_origin_visit is None
def test_origin_get_by_sha1(self, swh_storage, sample_data):
origin = sample_data.origin
assert swh_storage.origin_get([origin.url])[0] is None
swh_storage.origin_add([origin])
origins = list(swh_storage.origin_get_by_sha1([sha1(origin.url)]))
assert len(origins) == 1
assert origins[0]["url"] == origin.url
def test_origin_get_by_sha1_not_found(self, swh_storage, sample_data):
unknown_origin = sample_data.origin
assert swh_storage.origin_get([unknown_origin.url])[0] is None
origins = list(swh_storage.origin_get_by_sha1([sha1(unknown_origin.url)]))
assert len(origins) == 1
assert origins[0] is None
def test_origin_search_single_result(self, swh_storage, sample_data):
origin, origin2 = sample_data.origins[:2]
actual_page = swh_storage.origin_search(origin.url)
assert actual_page.next_page_token is None
assert actual_page.results == []
actual_page = swh_storage.origin_search(origin.url, regexp=True)
assert actual_page.next_page_token is None
assert actual_page.results == []
swh_storage.origin_add([origin])
actual_page = swh_storage.origin_search(origin.url)
assert actual_page.next_page_token is None
assert actual_page.results == [origin]
actual_page = swh_storage.origin_search(f".{origin.url[1:-1]}.", regexp=True)
assert actual_page.next_page_token is None
assert actual_page.results == [origin]
swh_storage.origin_add([origin2])
actual_page = swh_storage.origin_search(origin2.url)
assert actual_page.next_page_token is None
assert actual_page.results == [origin2]
actual_page = swh_storage.origin_search(f".{origin2.url[1:-1]}.", regexp=True)
assert actual_page.next_page_token is None
assert actual_page.results == [origin2]
def test_origin_search_no_regexp(self, swh_storage, sample_data):
origin, origin2 = sample_data.origins[:2]
swh_storage.origin_add([origin, origin2])
# no pagination
actual_page = swh_storage.origin_search("/")
assert actual_page.next_page_token is None
assert actual_page.results == [origin, origin2]
# offset=0
actual_page = swh_storage.origin_search("/", page_token=None, limit=1)
next_page_token = actual_page.next_page_token
assert next_page_token is not None
assert actual_page.results == [origin]
# offset=1
actual_page = swh_storage.origin_search(
"/", page_token=next_page_token, limit=1
)
assert actual_page.next_page_token is None
assert actual_page.results == [origin2]
def test_origin_search_regexp_substring(self, swh_storage, sample_data):
origin, origin2 = sample_data.origins[:2]
swh_storage.origin_add([origin, origin2])
# no pagination
actual_page = swh_storage.origin_search("/", regexp=True)
assert actual_page.next_page_token is None
assert actual_page.results == [origin, origin2]
# offset=0
actual_page = swh_storage.origin_search(
"/", page_token=None, limit=1, regexp=True
)
next_page_token = actual_page.next_page_token
assert next_page_token is not None
assert actual_page.results == [origin]
# offset=1
actual_page = swh_storage.origin_search(
"/", page_token=next_page_token, limit=1, regexp=True
)
assert actual_page.next_page_token is None
assert actual_page.results == [origin2]
def test_origin_search_regexp_fullstring(self, swh_storage, sample_data):
origin, origin2 = sample_data.origins[:2]
swh_storage.origin_add([origin, origin2])
# no pagination
actual_page = swh_storage.origin_search(".*/.*", regexp=True)
assert actual_page.next_page_token is None
assert actual_page.results == [origin, origin2]
# offset=0
actual_page = swh_storage.origin_search(
".*/.*", page_token=None, limit=1, regexp=True
)
next_page_token = actual_page.next_page_token
assert next_page_token is not None
assert actual_page.results == [origin]
# offset=1
actual_page = swh_storage.origin_search(
".*/.*", page_token=next_page_token, limit=1, regexp=True
)
assert actual_page.next_page_token is None
assert actual_page.results == [origin2]
def test_origin_visit_add(self, swh_storage, sample_data):
origin1 = sample_data.origins[1]
swh_storage.origin_add([origin1])
date_visit = now()
date_visit2 = date_visit + datetime.timedelta(minutes=1)
date_visit = round_to_milliseconds(date_visit)
date_visit2 = round_to_milliseconds(date_visit2)
visit1 = OriginVisit(
origin=origin1.url, date=date_visit, type=sample_data.type_visit1,
)
visit2 = OriginVisit(
origin=origin1.url, date=date_visit2, type=sample_data.type_visit2,
)
# add once
ov1, ov2 = swh_storage.origin_visit_add([visit1, visit2])
# then again (will be ignored as they already exist)
origin_visit1, origin_visit2 = swh_storage.origin_visit_add([ov1, ov2])
assert ov1 == origin_visit1
assert ov2 == origin_visit2
ovs1 = OriginVisitStatus(
origin=origin1.url,
visit=ov1.visit,
date=date_visit,
status="created",
snapshot=None,
)
ovs2 = OriginVisitStatus(
origin=origin1.url,
visit=ov2.visit,
date=date_visit2,
status="created",
snapshot=None,
)
actual_visits = swh_storage.origin_visit_get(origin1.url).results
expected_visits = [ov1, ov2]
assert len(expected_visits) == len(actual_visits)
for visit in expected_visits:
assert visit in actual_visits
actual_objects = list(swh_storage.journal_writer.journal.objects)
expected_objects = list(
[("origin", origin1)]
+ [("origin_visit", visit) for visit in expected_visits] * 2
+ [("origin_visit_status", ovs) for ovs in [ovs1, ovs2]]
)
for obj in expected_objects:
assert obj in actual_objects
def test_origin_visit_add_validation(self, swh_storage, sample_data):
"""Unknown origin when adding visits should raise"""
visit = attr.evolve(sample_data.origin_visit, origin="something-unknonw")
with pytest.raises(StorageArgumentException, match="Unknown origin"):
swh_storage.origin_visit_add([visit])
objects = list(swh_storage.journal_writer.journal.objects)
assert not objects
def test_origin_visit_status_add_validation(self, swh_storage):
"""Wrong origin_visit_status input should raise storage argument error"""
date_visit = now()
visit_status1 = OriginVisitStatus(
origin="unknown-origin-url",
visit=10,
date=date_visit,
status="full",
snapshot=None,
)
with pytest.raises(StorageArgumentException, match="Unknown origin"):
swh_storage.origin_visit_status_add([visit_status1])
objects = list(swh_storage.journal_writer.journal.objects)
assert not objects
def test_origin_visit_status_add(self, swh_storage, sample_data):
"""Correct origin visit statuses should add a new visit status
"""
snapshot = sample_data.snapshot
origin1 = sample_data.origins[1]
origin2 = Origin(url="new-origin")
swh_storage.origin_add([origin1, origin2])
ov1, ov2 = swh_storage.origin_visit_add(
[
OriginVisit(
origin=origin1.url,
date=sample_data.date_visit1,
type=sample_data.type_visit1,
),
OriginVisit(
origin=origin2.url,
date=sample_data.date_visit2,
type=sample_data.type_visit2,
),
]
)
ovs1 = OriginVisitStatus(
origin=origin1.url,
visit=ov1.visit,
date=sample_data.date_visit1,
status="created",
snapshot=None,
)
ovs2 = OriginVisitStatus(
origin=origin2.url,
visit=ov2.visit,
date=sample_data.date_visit2,
status="created",
snapshot=None,
)
date_visit_now = round_to_milliseconds(now())
visit_status1 = OriginVisitStatus(
origin=ov1.origin,
visit=ov1.visit,
date=date_visit_now,
status="full",
snapshot=snapshot.id,
)
date_visit_now = round_to_milliseconds(now())
visit_status2 = OriginVisitStatus(
origin=ov2.origin,
visit=ov2.visit,
date=date_visit_now,
status="ongoing",
snapshot=None,
metadata={"intrinsic": "something"},
)
swh_storage.origin_visit_status_add([visit_status1, visit_status2])
visit = swh_storage.origin_visit_get_latest(origin1.url, require_snapshot=True)
visit_status = swh_storage.origin_visit_status_get_latest(
origin1.url, visit.visit, require_snapshot=True
)
assert visit_status == visit_status1
visit = swh_storage.origin_visit_get_latest(origin2.url, require_snapshot=False)
visit_status = swh_storage.origin_visit_status_get_latest(
origin2.url, visit.visit, require_snapshot=False
)
assert origin2.url != origin1.url
assert visit_status == visit_status2
actual_objects = list(swh_storage.journal_writer.journal.objects)
expected_origins = [origin1, origin2]
expected_visits = [ov1, ov2]
expected_visit_statuses = [ovs1, ovs2, visit_status1, visit_status2]
expected_objects = (
[("origin", o) for o in expected_origins]
+ [("origin_visit", v) for v in expected_visits]
+ [("origin_visit_status", ovs) for ovs in expected_visit_statuses]
)
for obj in expected_objects:
assert obj in actual_objects
def test_origin_visit_status_add_twice(self, swh_storage, sample_data):
"""Correct origin visit statuses should add a new visit status
"""
snapshot = sample_data.snapshot
origin1 = sample_data.origins[1]
swh_storage.origin_add([origin1])
ov1 = swh_storage.origin_visit_add(
[
OriginVisit(
origin=origin1.url,
date=sample_data.date_visit1,
type=sample_data.type_visit1,
),
]
)[0]
ovs1 = OriginVisitStatus(
origin=origin1.url,
visit=ov1.visit,
date=sample_data.date_visit1,
status="created",
snapshot=None,
)
date_visit_now = round_to_milliseconds(now())
visit_status1 = OriginVisitStatus(
origin=ov1.origin,
visit=ov1.visit,
date=date_visit_now,
status="full",
snapshot=snapshot.id,
)
swh_storage.origin_visit_status_add([visit_status1])
# second call will ignore existing entries (will send to storage though)
swh_storage.origin_visit_status_add([visit_status1])
visit_status = swh_storage.origin_visit_status_get_latest(ov1.origin, ov1.visit)
assert visit_status == visit_status1
actual_objects = list(swh_storage.journal_writer.journal.objects)
expected_origins = [origin1]
expected_visits = [ov1]
expected_visit_statuses = [ovs1, visit_status1, visit_status1]
# write twice in the journal
expected_objects = (
[("origin", o) for o in expected_origins]
+ [("origin_visit", v) for v in expected_visits]
+ [("origin_visit_status", ovs) for ovs in expected_visit_statuses]
)
for obj in expected_objects:
assert obj in actual_objects
def test_origin_visit_find_by_date(self, swh_storage, sample_data):
origin = sample_data.origin
swh_storage.origin_add([origin])
visit1 = OriginVisit(
origin=origin.url,
date=sample_data.date_visit2,
type=sample_data.type_visit1,
)
visit2 = OriginVisit(
origin=origin.url,
date=sample_data.date_visit3,
type=sample_data.type_visit2,
)
visit3 = OriginVisit(
origin=origin.url,
date=sample_data.date_visit2,
type=sample_data.type_visit3,
)
ov1, ov2, ov3 = swh_storage.origin_visit_add([visit1, visit2, visit3])
ovs1 = OriginVisitStatus(
origin=origin.url,
visit=ov1.visit,
date=sample_data.date_visit2,
status="ongoing",
snapshot=None,
)
ovs2 = OriginVisitStatus(
origin=origin.url,
visit=ov2.visit,
date=sample_data.date_visit3,
status="ongoing",
snapshot=None,
)
ovs3 = OriginVisitStatus(
origin=origin.url,
visit=ov3.visit,
date=sample_data.date_visit2,
status="ongoing",
snapshot=None,
)
swh_storage.origin_visit_status_add([ovs1, ovs2, ovs3])
# Simple case
actual_visit = swh_storage.origin_visit_find_by_date(
origin.url, sample_data.date_visit3
)
assert actual_visit == ov2
# There are two visits at the same date, the latest must be returned
actual_visit = swh_storage.origin_visit_find_by_date(
origin.url, sample_data.date_visit2
)
assert actual_visit == ov3
def test_origin_visit_find_by_date__unknown_origin(self, swh_storage, sample_data):
actual_visit = swh_storage.origin_visit_find_by_date(
"foo", sample_data.date_visit2
)
assert actual_visit is None
def test_origin_visit_get_by(self, swh_storage, sample_data):
snapshot = sample_data.snapshot
origins = sample_data.origins[:2]
swh_storage.origin_add(origins)
origin_url, origin_url2 = [o.url for o in origins]
visit = OriginVisit(
origin=origin_url,
date=sample_data.date_visit2,
type=sample_data.type_visit2,
)
origin_visit1 = swh_storage.origin_visit_add([visit])[0]
swh_storage.snapshot_add([snapshot])
swh_storage.origin_visit_status_add(
[
OriginVisitStatus(
origin=origin_url,
visit=origin_visit1.visit,
date=now(),
status="ongoing",
snapshot=snapshot.id,
)
]
)
# Add some other {origin, visit} entries
visit2 = OriginVisit(
origin=origin_url,
date=sample_data.date_visit3,
type=sample_data.type_visit3,
)
visit3 = OriginVisit(
origin=origin_url2,
date=sample_data.date_visit3,
type=sample_data.type_visit3,
)
swh_storage.origin_visit_add([visit2, visit3])
# when
visit1_metadata = {
"contents": 42,
"directories": 22,
}
swh_storage.origin_visit_status_add(
[
OriginVisitStatus(
origin=origin_url,
visit=origin_visit1.visit,
date=now(),
status="full",
snapshot=snapshot.id,
metadata=visit1_metadata,
)
]
)
actual_visit = swh_storage.origin_visit_get_by(origin_url, origin_visit1.visit)
assert actual_visit == origin_visit1
def test_origin_visit_get_by__no_result(self, swh_storage, sample_data):
actual_visit = swh_storage.origin_visit_get_by("unknown", 10) # unknown origin
assert actual_visit is None
origin = sample_data.origin
swh_storage.origin_add([origin])
actual_visit = swh_storage.origin_visit_get_by(origin.url, 999) # unknown visit
assert actual_visit is None
def test_origin_visit_get_latest_edge_cases(self, swh_storage, sample_data):
# unknown origin so no result
assert swh_storage.origin_visit_get_latest("unknown-origin") is None
# unknown type so no result
origin = sample_data.origin
swh_storage.origin_add([origin])
assert swh_storage.origin_visit_get_latest(origin.url, type="unknown") is None
# unknown allowed statuses should raise
with pytest.raises(StorageArgumentException, match="Unknown allowed statuses"):
swh_storage.origin_visit_get_latest(
origin.url, allowed_statuses=["unknown"]
)
def test_origin_visit_get_latest_filter_type(self, swh_storage, sample_data):
"""Filtering origin visit get latest with filter type should be ok
"""
origin = sample_data.origin
swh_storage.origin_add([origin])
visit1 = OriginVisit(
origin=origin.url, date=sample_data.date_visit1, type="git",
)
visit2 = OriginVisit(
origin=origin.url, date=sample_data.date_visit2, type="hg",
)
date_now = round_to_milliseconds(now())
visit3 = OriginVisit(origin=origin.url, date=date_now, type="hg",)
assert sample_data.date_visit1 < sample_data.date_visit2
assert sample_data.date_visit2 < date_now
ov1, ov2, ov3 = swh_storage.origin_visit_add([visit1, visit2, visit3])
# Check type filter is ok
actual_visit = swh_storage.origin_visit_get_latest(origin.url, type="git")
assert actual_visit == ov1
actual_visit = swh_storage.origin_visit_get_latest(origin.url, type="hg")
assert actual_visit == ov3
actual_visit_unknown_type = swh_storage.origin_visit_get_latest(
origin.url, type="npm", # no visit matching that type
)
assert actual_visit_unknown_type is None
def test_origin_visit_get_latest(self, swh_storage, sample_data):
empty_snapshot, complete_snapshot = sample_data.snapshots[1:3]
origin = sample_data.origin
swh_storage.origin_add([origin])
visit1 = OriginVisit(
origin=origin.url, date=sample_data.date_visit1, type="git",
)
visit2 = OriginVisit(
origin=origin.url, date=sample_data.date_visit2, type="hg",
)
date_now = round_to_milliseconds(now())
visit3 = OriginVisit(origin=origin.url, date=date_now, type="hg",)
assert visit1.date < visit2.date
assert visit2.date < visit3.date
ov1, ov2, ov3 = swh_storage.origin_visit_add([visit1, visit2, visit3])
# no filters, latest visit is the last one (whose date is most recent)
actual_visit = swh_storage.origin_visit_get_latest(origin.url)
assert actual_visit == ov3
# 3 visits, none has snapshot so nothing is returned
actual_visit = swh_storage.origin_visit_get_latest(
origin.url, require_snapshot=True
)
assert actual_visit is None
# visit are created with "created" status, so nothing will get returned
actual_visit = swh_storage.origin_visit_get_latest(
origin.url, allowed_statuses=["partial"]
)
assert actual_visit is None
# visit are created with "created" status, so most recent again
actual_visit = swh_storage.origin_visit_get_latest(
origin.url, allowed_statuses=["created"]
)
assert actual_visit == ov3
# Add snapshot to visit1; require_snapshot=True makes it return first visit
swh_storage.snapshot_add([complete_snapshot])
visit_status_with_snapshot = OriginVisitStatus(
origin=origin.url,
visit=ov1.visit,
date=round_to_milliseconds(now()),
status="ongoing",
snapshot=complete_snapshot.id,
)
swh_storage.origin_visit_status_add([visit_status_with_snapshot])
# only the first visit has a snapshot now
actual_visit = swh_storage.origin_visit_get_latest(
origin.url, require_snapshot=True
)
assert actual_visit == ov1
# only the first visit has a status ongoing now
actual_visit = swh_storage.origin_visit_get_latest(
origin.url, allowed_statuses=["ongoing"]
)
assert actual_visit == ov1
actual_visit_status = swh_storage.origin_visit_status_get_latest(
origin.url, ov1.visit, require_snapshot=True
)
assert actual_visit_status == visit_status_with_snapshot
# ... and require_snapshot=False (defaults) still returns latest visit (3rd)
actual_visit = swh_storage.origin_visit_get_latest(
origin.url, require_snapshot=False
)
assert actual_visit == ov3
# no specific filter, this returns as before the latest visit
actual_visit = swh_storage.origin_visit_get_latest(origin.url)
assert actual_visit == ov3
# Status filter: all three visits are status=ongoing, so no visit
# returned
actual_visit = swh_storage.origin_visit_get_latest(
origin.url, allowed_statuses=["full"]
)
assert actual_visit is None
visit_status1_full = OriginVisitStatus(
origin=origin.url,
visit=ov1.visit,
date=round_to_milliseconds(now()),
status="full",
snapshot=complete_snapshot.id,
)
# Mark the first visit as completed and check status filter again
swh_storage.origin_visit_status_add([visit_status1_full])
# only the first visit has the full status
actual_visit = swh_storage.origin_visit_get_latest(
origin.url, allowed_statuses=["full"]
)
assert actual_visit == ov1
actual_visit_status = swh_storage.origin_visit_status_get_latest(
origin.url, ov1.visit, allowed_statuses=["full"]
)
assert actual_visit_status == visit_status1_full
# no specific filter, this returns as before the latest visit
actual_visit = swh_storage.origin_visit_get_latest(origin.url)
assert actual_visit == ov3
# Add snapshot to visit2 and check that the new snapshot is returned
swh_storage.snapshot_add([empty_snapshot])
visit_status2_full = OriginVisitStatus(
origin=origin.url,
visit=ov2.visit,
date=round_to_milliseconds(now()),
status="ongoing",
snapshot=empty_snapshot.id,
)
swh_storage.origin_visit_status_add([visit_status2_full])
actual_visit = swh_storage.origin_visit_get_latest(
origin.url, require_snapshot=True
)
# 2nd visit is most recent with a snapshot
assert actual_visit == ov2
actual_visit_status = swh_storage.origin_visit_status_get_latest(
origin.url, ov2.visit, require_snapshot=True
)
assert actual_visit_status == visit_status2_full
# no specific filter, this returns as before the latest visit, 3rd one
actual_origin = swh_storage.origin_visit_get_latest(origin.url)
assert actual_origin == ov3
# full status is still the first visit
actual_visit = swh_storage.origin_visit_get_latest(
origin.url, allowed_statuses=["full"]
)
assert actual_visit == ov1
# Add snapshot to visit3 (same date as visit2)
visit_status3_with_snapshot = OriginVisitStatus(
origin=origin.url,
visit=ov3.visit,
date=round_to_milliseconds(now()),
status="ongoing",
snapshot=complete_snapshot.id,
)
swh_storage.origin_visit_status_add([visit_status3_with_snapshot])
# full status is still the first visit
actual_visit = swh_storage.origin_visit_get_latest(
origin.url, allowed_statuses=["full"], require_snapshot=True,
)
assert actual_visit == ov1
actual_visit_status = swh_storage.origin_visit_status_get_latest(
origin.url,
visit=actual_visit.visit,
allowed_statuses=["full"],
require_snapshot=True,
)
assert actual_visit_status == visit_status1_full
# most recent is still the 3rd visit
actual_visit = swh_storage.origin_visit_get_latest(origin.url)
assert actual_visit == ov3
# 3rd visit has a snapshot now, so it's elected
actual_visit = swh_storage.origin_visit_get_latest(
origin.url, require_snapshot=True
)
assert actual_visit == ov3
actual_visit_status = swh_storage.origin_visit_status_get_latest(
origin.url, ov3.visit, require_snapshot=True
)
assert actual_visit_status == visit_status3_with_snapshot
def test_origin_visit_get_latest__same_date(self, swh_storage, sample_data):
empty_snapshot, complete_snapshot = sample_data.snapshots[1:3]
origin = sample_data.origin
swh_storage.origin_add([origin])
visit1 = OriginVisit(
origin=origin.url, date=sample_data.date_visit1, type="git",
)
visit2 = OriginVisit(
origin=origin.url, date=sample_data.date_visit1, type="hg",
)
ov1, ov2 = swh_storage.origin_visit_add([visit1, visit2])
# ties should be broken by using the visit id
actual_visit = swh_storage.origin_visit_get_latest(origin.url)
assert actual_visit == ov2
def test_origin_visit_get_latest__not_last(self, swh_storage, sample_data):
origin = sample_data.origin
swh_storage.origin_add([origin])
visit1, visit2 = sample_data.origin_visits[:2]
assert visit1.origin == origin.url
swh_storage.origin_visit_add([visit1])
ov1 = swh_storage.origin_visit_get_latest(origin.url)
# Add snapshot to visit1, latest snapshot = visit 1 snapshot
complete_snapshot = sample_data.snapshots[2]
swh_storage.snapshot_add([complete_snapshot])
swh_storage.origin_visit_status_add(
[
OriginVisitStatus(
origin=origin.url,
visit=ov1.visit,
date=visit2.date,
status="partial",
snapshot=None,
)
]
)
assert visit1.date < visit2.date
# no snapshot associated to the visit, so None
visit = swh_storage.origin_visit_get_latest(
origin.url, allowed_statuses=["partial"], require_snapshot=True,
)
assert visit is None
date_now = now()
assert visit2.date < date_now
swh_storage.origin_visit_status_add(
[
OriginVisitStatus(
origin=origin.url,
visit=ov1.visit,
date=date_now,
status="full",
snapshot=complete_snapshot.id,
)
]
)
swh_storage.origin_visit_add(
[OriginVisit(origin=origin.url, date=now(), type=visit1.type,)]
)
visit = swh_storage.origin_visit_get_latest(origin.url, require_snapshot=True)
assert visit is not None
def test_origin_visit_status_get_latest__validation(self, swh_storage, sample_data):
origin = sample_data.origin
swh_storage.origin_add([origin])
visit1 = OriginVisit(
origin=origin.url, date=sample_data.date_visit1, type="git",
)
# unknown allowed statuses should raise
with pytest.raises(StorageArgumentException, match="Unknown allowed statuses"):
swh_storage.origin_visit_status_get_latest(
origin.url, visit1.visit, allowed_statuses=["unknown"]
)
def test_origin_visit_status_get_latest(self, swh_storage, sample_data):
snapshot = sample_data.snapshots[2]
origin1 = sample_data.origin
swh_storage.origin_add([origin1])
# to have some reference visits
ov1, ov2 = swh_storage.origin_visit_add(
[
OriginVisit(
origin=origin1.url,
date=sample_data.date_visit1,
type=sample_data.type_visit1,
),
OriginVisit(
origin=origin1.url,
date=sample_data.date_visit2,
type=sample_data.type_visit2,
),
]
)
swh_storage.snapshot_add([snapshot])
date_now = round_to_milliseconds(now())
assert sample_data.date_visit1 < sample_data.date_visit2
assert sample_data.date_visit2 < date_now
ovs1 = OriginVisitStatus(
origin=origin1.url,
visit=ov1.visit,
date=sample_data.date_visit1,
status="partial",
snapshot=None,
)
ovs2 = OriginVisitStatus(
origin=origin1.url,
visit=ov1.visit,
date=sample_data.date_visit2,
status="ongoing",
snapshot=None,
)
ovs3 = OriginVisitStatus(
origin=origin1.url,
visit=ov2.visit,
date=sample_data.date_visit2
+ datetime.timedelta(minutes=1), # to not be ignored
status="ongoing",
snapshot=None,
)
ovs4 = OriginVisitStatus(
origin=origin1.url,
visit=ov2.visit,
date=date_now,
status="full",
snapshot=snapshot.id,
metadata={"something": "wicked"},
)
swh_storage.origin_visit_status_add([ovs1, ovs2, ovs3, ovs4])
# unknown origin so no result
actual_origin_visit = swh_storage.origin_visit_status_get_latest(
"unknown-origin", ov1.visit
)
assert actual_origin_visit is None
# unknown visit so no result
actual_origin_visit = swh_storage.origin_visit_status_get_latest(
ov1.origin, ov1.visit + 10
)
assert actual_origin_visit is None
# Two visits, both with no snapshot, take the most recent
actual_origin_visit2 = swh_storage.origin_visit_status_get_latest(
origin1.url, ov1.visit
)
assert isinstance(actual_origin_visit2, OriginVisitStatus)
assert actual_origin_visit2 == ovs2
assert ovs2.origin == origin1.url
assert ovs2.visit == ov1.visit
actual_origin_visit = swh_storage.origin_visit_status_get_latest(
origin1.url, ov1.visit, require_snapshot=True
)
# there is no visit with snapshot yet for that visit
assert actual_origin_visit is None
actual_origin_visit2 = swh_storage.origin_visit_status_get_latest(
origin1.url, ov1.visit, allowed_statuses=["partial", "ongoing"]
)
# visit status with partial status visit elected
assert actual_origin_visit2 == ovs2
assert actual_origin_visit2.status == "ongoing"
actual_origin_visit4 = swh_storage.origin_visit_status_get_latest(
origin1.url, ov2.visit, require_snapshot=True
)
assert actual_origin_visit4 == ovs4
assert actual_origin_visit4.snapshot == snapshot.id
actual_origin_visit = swh_storage.origin_visit_status_get_latest(
origin1.url, ov2.visit, require_snapshot=True, allowed_statuses=["ongoing"]
)
# nothing matches so nothing
assert actual_origin_visit is None # there is no visit with status full
actual_origin_visit3 = swh_storage.origin_visit_status_get_latest(
origin1.url, ov2.visit, allowed_statuses=["ongoing"]
)
assert actual_origin_visit3 == ovs3
def test_person_fullname_unicity(self, swh_storage, sample_data):
revision, rev2 = sample_data.revisions[0:2]
# create a revision with same committer fullname but wo name and email
revision2 = attr.evolve(
rev2,
committer=Person(
fullname=revision.committer.fullname, name=None, email=None
),
)
swh_storage.revision_add([revision, revision2])
# when getting added revisions
revisions = list(swh_storage.revision_get([revision.id, revision2.id]))
# then check committers are the same
assert revisions[0]["committer"] == revisions[1]["committer"]
def test_snapshot_add_get_empty(self, swh_storage, sample_data):
empty_snapshot = sample_data.snapshots[1]
empty_snapshot_dict = empty_snapshot.to_dict()
origin = sample_data.origin
swh_storage.origin_add([origin])
ov1 = swh_storage.origin_visit_add(
[
OriginVisit(
origin=origin.url,
date=sample_data.date_visit1,
type=sample_data.type_visit1,
)
]
)[0]
actual_result = swh_storage.snapshot_add([empty_snapshot])
assert actual_result == {"snapshot:add": 1}
date_now = now()
swh_storage.origin_visit_status_add(
[
OriginVisitStatus(
origin=origin.url,
visit=ov1.visit,
date=date_now,
status="full",
snapshot=empty_snapshot.id,
)
]
)
by_id = swh_storage.snapshot_get(empty_snapshot.id)
assert by_id == {**empty_snapshot_dict, "next_branch": None}
ovs1 = OriginVisitStatus.from_dict(
{
"origin": origin.url,
"date": sample_data.date_visit1,
"visit": ov1.visit,
"status": "created",
"snapshot": None,
"metadata": None,
}
)
ovs2 = OriginVisitStatus.from_dict(
{
"origin": origin.url,
"date": date_now,
"visit": ov1.visit,
"status": "full",
"metadata": None,
"snapshot": empty_snapshot.id,
}
)
actual_objects = list(swh_storage.journal_writer.journal.objects)
expected_objects = [
("origin", origin),
("origin_visit", ov1),
("origin_visit_status", ovs1,),
("snapshot", empty_snapshot),
("origin_visit_status", ovs2,),
]
for obj in expected_objects:
assert obj in actual_objects
def test_snapshot_add_get_complete(self, swh_storage, sample_data):
complete_snapshot = sample_data.snapshots[2]
complete_snapshot_dict = complete_snapshot.to_dict()
origin = sample_data.origin
swh_storage.origin_add([origin])
visit = OriginVisit(
origin=origin.url,
date=sample_data.date_visit1,
type=sample_data.type_visit1,
)
origin_visit1 = swh_storage.origin_visit_add([visit])[0]
actual_result = swh_storage.snapshot_add([complete_snapshot])
swh_storage.origin_visit_status_add(
[
OriginVisitStatus(
origin=origin.url,
visit=origin_visit1.visit,
date=now(),
status="ongoing",
snapshot=complete_snapshot.id,
)
]
)
assert actual_result == {"snapshot:add": 1}
by_id = swh_storage.snapshot_get(complete_snapshot.id)
assert by_id == {**complete_snapshot_dict, "next_branch": None}
def test_snapshot_add_many(self, swh_storage, sample_data):
snapshot, _, complete_snapshot = sample_data.snapshots[:3]
actual_result = swh_storage.snapshot_add([snapshot, complete_snapshot])
assert actual_result == {"snapshot:add": 2}
assert swh_storage.snapshot_get(complete_snapshot.id) == {
**complete_snapshot.to_dict(),
"next_branch": None,
}
assert swh_storage.snapshot_get(snapshot.id) == {
**snapshot.to_dict(),
"next_branch": None,
}
swh_storage.refresh_stat_counters()
assert swh_storage.stat_counters()["snapshot"] == 2
def test_snapshot_add_many_incremental(self, swh_storage, sample_data):
snapshot, _, complete_snapshot = sample_data.snapshots[:3]
actual_result = swh_storage.snapshot_add([complete_snapshot])
assert actual_result == {"snapshot:add": 1}
actual_result2 = swh_storage.snapshot_add([snapshot, complete_snapshot])
assert actual_result2 == {"snapshot:add": 1}
assert swh_storage.snapshot_get(complete_snapshot.id) == {
**complete_snapshot.to_dict(),
"next_branch": None,
}
assert swh_storage.snapshot_get(snapshot.id) == {
**snapshot.to_dict(),
"next_branch": None,
}
def test_snapshot_add_twice(self, swh_storage, sample_data):
snapshot, empty_snapshot = sample_data.snapshots[:2]
actual_result = swh_storage.snapshot_add([empty_snapshot])
assert actual_result == {"snapshot:add": 1}
assert list(swh_storage.journal_writer.journal.objects) == [
("snapshot", empty_snapshot)
]
actual_result = swh_storage.snapshot_add([snapshot])
assert actual_result == {"snapshot:add": 1}
assert list(swh_storage.journal_writer.journal.objects) == [
("snapshot", empty_snapshot),
("snapshot", snapshot),
]
def test_snapshot_add_count_branches(self, swh_storage, sample_data):
complete_snapshot = sample_data.snapshots[2]
actual_result = swh_storage.snapshot_add([complete_snapshot])
assert actual_result == {"snapshot:add": 1}
snp_size = swh_storage.snapshot_count_branches(complete_snapshot.id)
expected_snp_size = {
"alias": 1,
"content": 1,
"directory": 2,
"release": 1,
"revision": 1,
"snapshot": 1,
None: 1,
}
assert snp_size == expected_snp_size
def test_snapshot_add_get_paginated(self, swh_storage, sample_data):
complete_snapshot = sample_data.snapshots[2]
swh_storage.snapshot_add([complete_snapshot])
snp_id = complete_snapshot.id
branches = complete_snapshot.branches
branch_names = list(sorted(branches))
# Test branch_from
snapshot = swh_storage.snapshot_get_branches(snp_id, branches_from=b"release")
rel_idx = branch_names.index(b"release")
expected_snapshot = {
"id": snp_id,
"branches": {name: branches[name] for name in branch_names[rel_idx:]},
"next_branch": None,
}
assert snapshot == expected_snapshot
# Test branches_count
snapshot = swh_storage.snapshot_get_branches(snp_id, branches_count=1)
expected_snapshot = {
"id": snp_id,
"branches": {branch_names[0]: branches[branch_names[0]],},
"next_branch": b"content",
}
assert snapshot == expected_snapshot
# test branch_from + branches_count
snapshot = swh_storage.snapshot_get_branches(
snp_id, branches_from=b"directory", branches_count=3
)
dir_idx = branch_names.index(b"directory")
expected_snapshot = {
"id": snp_id,
"branches": {
name: branches[name] for name in branch_names[dir_idx : dir_idx + 3]
},
"next_branch": branch_names[dir_idx + 3],
}
assert snapshot == expected_snapshot
def test_snapshot_add_get_filtered(self, swh_storage, sample_data):
origin = sample_data.origin
complete_snapshot = sample_data.snapshots[2]
swh_storage.origin_add([origin])
visit = OriginVisit(
origin=origin.url,
date=sample_data.date_visit1,
type=sample_data.type_visit1,
)
origin_visit1 = swh_storage.origin_visit_add([visit])[0]
swh_storage.snapshot_add([complete_snapshot])
swh_storage.origin_visit_status_add(
[
OriginVisitStatus(
origin=origin.url,
visit=origin_visit1.visit,
date=now(),
status="ongoing",
snapshot=complete_snapshot.id,
)
]
)
snp_id = complete_snapshot.id
branches = complete_snapshot.branches
snapshot = swh_storage.snapshot_get_branches(
snp_id, target_types=["release", "revision"]
)
expected_snapshot = {
"id": snp_id,
"branches": {
name: tgt
for name, tgt in branches.items()
if tgt and tgt.target_type in [TargetType.RELEASE, TargetType.REVISION]
},
"next_branch": None,
}
assert snapshot == expected_snapshot
snapshot = swh_storage.snapshot_get_branches(snp_id, target_types=["alias"])
expected_snapshot = {
"id": snp_id,
"branches": {
name: tgt
for name, tgt in branches.items()
if tgt and tgt.target_type == TargetType.ALIAS
},
"next_branch": None,
}
assert snapshot == expected_snapshot
def test_snapshot_add_get_filtered_and_paginated(self, swh_storage, sample_data):
complete_snapshot = sample_data.snapshots[2]
swh_storage.snapshot_add([complete_snapshot])
snp_id = complete_snapshot.id
branches = complete_snapshot.branches
branch_names = list(sorted(branches))
# Test branch_from
snapshot = swh_storage.snapshot_get_branches(
snp_id, target_types=["directory", "release"], branches_from=b"directory2"
)
expected_snapshot = {
"id": snp_id,
"branches": {name: branches[name] for name in (b"directory2", b"release")},
"next_branch": None,
}
assert snapshot == expected_snapshot
# Test branches_count
snapshot = swh_storage.snapshot_get_branches(
snp_id, target_types=["directory", "release"], branches_count=1
)
expected_snapshot = {
"id": snp_id,
"branches": {b"directory": branches[b"directory"]},
"next_branch": b"directory2",
}
assert snapshot == expected_snapshot
# Test branches_count
snapshot = swh_storage.snapshot_get_branches(
snp_id, target_types=["directory", "release"], branches_count=2
)
expected_snapshot = {
"id": snp_id,
"branches": {
name: branches[name] for name in (b"directory", b"directory2")
},
"next_branch": b"release",
}
assert snapshot == expected_snapshot
# test branch_from + branches_count
snapshot = swh_storage.snapshot_get_branches(
snp_id,
target_types=["directory", "release"],
branches_from=b"directory2",
branches_count=1,
)
dir_idx = branch_names.index(b"directory2")
expected_snapshot = {
"id": snp_id,
"branches": {branch_names[dir_idx]: branches[branch_names[dir_idx]],},
"next_branch": b"release",
}
assert snapshot == expected_snapshot
def test_snapshot_add_get_branch_by_type(self, swh_storage, sample_data):
complete_snapshot = sample_data.snapshots[2]
snapshot = complete_snapshot.to_dict()
alias1 = b"alias1"
alias2 = b"alias2"
target1 = random.choice(list(snapshot["branches"].keys()))
target2 = random.choice(list(snapshot["branches"].keys()))
snapshot["branches"][alias2] = {
"target": target2,
"target_type": "alias",
}
snapshot["branches"][alias1] = {
"target": target1,
"target_type": "alias",
}
new_snapshot = Snapshot.from_dict(snapshot)
swh_storage.snapshot_add([new_snapshot])
branches = swh_storage.snapshot_get_branches(
new_snapshot.id,
target_types=["alias"],
branches_from=alias1,
branches_count=1,
)["branches"]
assert len(branches) == 1
assert alias1 in branches
def test_snapshot_add_get(self, swh_storage, sample_data):
snapshot = sample_data.snapshot
origin = sample_data.origin
swh_storage.origin_add([origin])
visit = OriginVisit(
origin=origin.url,
date=sample_data.date_visit1,
type=sample_data.type_visit1,
)
ov1 = swh_storage.origin_visit_add([visit])[0]
swh_storage.snapshot_add([snapshot])
swh_storage.origin_visit_status_add(
[
OriginVisitStatus(
origin=origin.url,
visit=ov1.visit,
date=now(),
status="ongoing",
snapshot=snapshot.id,
)
]
)
expected_snapshot = {**snapshot.to_dict(), "next_branch": None}
by_id = swh_storage.snapshot_get(snapshot.id)
assert by_id == expected_snapshot
actual_visit = swh_storage.origin_visit_get_by(origin.url, ov1.visit)
assert actual_visit == ov1
visit_status = swh_storage.origin_visit_status_get_latest(
origin.url, ov1.visit, require_snapshot=True
)
assert visit_status.snapshot == snapshot.id
def test_snapshot_get_random(self, swh_storage, sample_data):
snapshot, empty_snapshot, complete_snapshot = sample_data.snapshots[:3]
swh_storage.snapshot_add([snapshot, empty_snapshot, complete_snapshot])
assert swh_storage.snapshot_get_random() in {
snapshot.id,
empty_snapshot.id,
complete_snapshot.id,
}
def test_snapshot_missing(self, swh_storage, sample_data):
snapshot, missing_snapshot = sample_data.snapshots[:2]
snapshots = [snapshot.id, missing_snapshot.id]
swh_storage.snapshot_add([snapshot])
missing_snapshots = swh_storage.snapshot_missing(snapshots)
assert list(missing_snapshots) == [missing_snapshot.id]
def test_stat_counters(self, swh_storage, sample_data):
origin = sample_data.origin
snapshot = sample_data.snapshot
revision = sample_data.revision
release = sample_data.release
directory = sample_data.directory
content = sample_data.content
expected_keys = ["content", "directory", "origin", "revision"]
# Initially, all counters are 0
swh_storage.refresh_stat_counters()
counters = swh_storage.stat_counters()
assert set(expected_keys) <= set(counters)
for key in expected_keys:
assert counters[key] == 0
# Add a content. Only the content counter should increase.
swh_storage.content_add([content])
swh_storage.refresh_stat_counters()
counters = swh_storage.stat_counters()
assert set(expected_keys) <= set(counters)
for key in expected_keys:
if key != "content":
assert counters[key] == 0
assert counters["content"] == 1
# Add other objects. Check their counter increased as well.
swh_storage.origin_add([origin])
visit = OriginVisit(
origin=origin.url,
date=sample_data.date_visit2,
type=sample_data.type_visit2,
)
origin_visit1 = swh_storage.origin_visit_add([visit])[0]
swh_storage.snapshot_add([snapshot])
swh_storage.origin_visit_status_add(
[
OriginVisitStatus(
origin=origin.url,
visit=origin_visit1.visit,
date=now(),
status="ongoing",
snapshot=snapshot.id,
)
]
)
swh_storage.directory_add([directory])
swh_storage.revision_add([revision])
swh_storage.release_add([release])
swh_storage.refresh_stat_counters()
counters = swh_storage.stat_counters()
assert counters["content"] == 1
assert counters["directory"] == 1
assert counters["snapshot"] == 1
assert counters["origin"] == 1
assert counters["origin_visit"] == 1
assert counters["revision"] == 1
assert counters["release"] == 1
assert counters["snapshot"] == 1
if "person" in counters:
assert counters["person"] == 3
def test_content_find_ctime(self, swh_storage, sample_data):
origin_content = sample_data.content
ctime = round_to_milliseconds(now())
content = attr.evolve(origin_content, data=None, ctime=ctime)
swh_storage.content_add_metadata([content])
actually_present = swh_storage.content_find({"sha1": content.sha1})
assert actually_present[0] == content
def test_content_find_with_present_content(self, swh_storage, sample_data):
content = sample_data.content
expected_content = attr.evolve(content, data=None)
# 1. with something to find
swh_storage.content_add([content])
actually_present = swh_storage.content_find({"sha1": content.sha1})
assert 1 == len(actually_present)
assert actually_present[0] == expected_content
# 2. with something to find
actually_present = swh_storage.content_find({"sha1_git": content.sha1_git})
assert 1 == len(actually_present)
assert actually_present[0] == expected_content
# 3. with something to find
actually_present = swh_storage.content_find({"sha256": content.sha256})
assert 1 == len(actually_present)
assert actually_present[0] == expected_content
# 4. with something to find
actually_present = swh_storage.content_find(content.hashes())
assert 1 == len(actually_present)
assert actually_present[0] == expected_content
def test_content_find_with_non_present_content(self, swh_storage, sample_data):
missing_content = sample_data.skipped_content
# 1. with something that does not exist
actually_present = swh_storage.content_find({"sha1": missing_content.sha1})
assert actually_present == []
# 2. with something that does not exist
actually_present = swh_storage.content_find(
{"sha1_git": missing_content.sha1_git}
)
assert actually_present == []
# 3. with something that does not exist
actually_present = swh_storage.content_find({"sha256": missing_content.sha256})
assert actually_present == []
def test_content_find_with_duplicate_input(self, swh_storage, sample_data):
content = sample_data.content
# Create fake data with colliding sha256 and blake2s256
sha1_array = bytearray(content.sha1)
sha1_array[0] += 1
sha1git_array = bytearray(content.sha1_git)
sha1git_array[0] += 1
duplicated_content = attr.evolve(
content, sha1=bytes(sha1_array), sha1_git=bytes(sha1git_array)
)
# Inject the data
swh_storage.content_add([content, duplicated_content])
actual_result = swh_storage.content_find(
{
"blake2s256": duplicated_content.blake2s256,
"sha256": duplicated_content.sha256,
}
)
expected_content = attr.evolve(content, data=None)
expected_duplicated_content = attr.evolve(duplicated_content, data=None)
for result in actual_result:
assert result in [expected_content, expected_duplicated_content]
def test_content_find_with_duplicate_sha256(self, swh_storage, sample_data):
content = sample_data.content
hashes = {}
# Create fake data with colliding sha256
for hashalgo in ("sha1", "sha1_git", "blake2s256"):
value = bytearray(getattr(content, hashalgo))
value[0] += 1
hashes[hashalgo] = bytes(value)
duplicated_content = attr.evolve(
content,
sha1=hashes["sha1"],
sha1_git=hashes["sha1_git"],
blake2s256=hashes["blake2s256"],
)
swh_storage.content_add([content, duplicated_content])
actual_result = swh_storage.content_find({"sha256": duplicated_content.sha256})
assert len(actual_result) == 2
expected_content = attr.evolve(content, data=None)
expected_duplicated_content = attr.evolve(duplicated_content, data=None)
for result in actual_result:
assert result in [expected_content, expected_duplicated_content]
# Find with both sha256 and blake2s256
actual_result = swh_storage.content_find(
{
"sha256": duplicated_content.sha256,
"blake2s256": duplicated_content.blake2s256,
}
)
assert len(actual_result) == 1
assert actual_result == [expected_duplicated_content]
def test_content_find_with_duplicate_blake2s256(self, swh_storage, sample_data):
content = sample_data.content
# Create fake data with colliding sha256 and blake2s256
sha1_array = bytearray(content.sha1)
sha1_array[0] += 1
sha1git_array = bytearray(content.sha1_git)
sha1git_array[0] += 1
sha256_array = bytearray(content.sha256)
sha256_array[0] += 1
duplicated_content = attr.evolve(
content,
sha1=bytes(sha1_array),
sha1_git=bytes(sha1git_array),
sha256=bytes(sha256_array),
)
swh_storage.content_add([content, duplicated_content])
actual_result = swh_storage.content_find(
{"blake2s256": duplicated_content.blake2s256}
)
expected_content = attr.evolve(content, data=None)
expected_duplicated_content = attr.evolve(duplicated_content, data=None)
for result in actual_result:
assert result in [expected_content, expected_duplicated_content]
# Find with both sha256 and blake2s256
actual_result = swh_storage.content_find(
{
"sha256": duplicated_content.sha256,
"blake2s256": duplicated_content.blake2s256,
}
)
assert actual_result == [expected_duplicated_content]
def test_content_find_bad_input(self, swh_storage):
# 1. with no hash to lookup
with pytest.raises(StorageArgumentException):
swh_storage.content_find({}) # need at least one hash
# 2. with bad hash
with pytest.raises(StorageArgumentException):
swh_storage.content_find({"unknown-sha1": "something"}) # not the right key
def test_object_find_by_sha1_git(self, swh_storage, sample_data):
content = sample_data.content
directory = sample_data.directory
revision = sample_data.revision
release = sample_data.release
sha1_gits = [b"00000000000000000000"]
expected = {
b"00000000000000000000": [],
}
swh_storage.content_add([content])
sha1_gits.append(content.sha1_git)
expected[content.sha1_git] = [
{"sha1_git": content.sha1_git, "type": "content",}
]
swh_storage.directory_add([directory])
sha1_gits.append(directory.id)
expected[directory.id] = [{"sha1_git": directory.id, "type": "directory",}]
swh_storage.revision_add([revision])
sha1_gits.append(revision.id)
expected[revision.id] = [{"sha1_git": revision.id, "type": "revision",}]
swh_storage.release_add([release])
sha1_gits.append(release.id)
expected[release.id] = [{"sha1_git": release.id, "type": "release",}]
ret = swh_storage.object_find_by_sha1_git(sha1_gits)
assert expected == ret
def test_metadata_fetcher_add_get(self, swh_storage, sample_data):
fetcher = sample_data.metadata_fetcher
actual_fetcher = swh_storage.metadata_fetcher_get(fetcher.name, fetcher.version)
assert actual_fetcher is None # does not exist
swh_storage.metadata_fetcher_add([fetcher])
res = swh_storage.metadata_fetcher_get(fetcher.name, fetcher.version)
assert res == fetcher
actual_objects = list(swh_storage.journal_writer.journal.objects)
expected_objects = [
("metadata_fetcher", fetcher),
]
for obj in expected_objects:
assert obj in actual_objects
def test_metadata_fetcher_add_zero(self, swh_storage, sample_data):
fetcher = sample_data.metadata_fetcher
actual_fetcher = swh_storage.metadata_fetcher_get(fetcher.name, fetcher.version)
assert actual_fetcher is None # does not exist
swh_storage.metadata_fetcher_add([])
def test_metadata_authority_add_get(self, swh_storage, sample_data):
authority = sample_data.metadata_authority
actual_authority = swh_storage.metadata_authority_get(
authority.type, authority.url
)
assert actual_authority is None # does not exist
swh_storage.metadata_authority_add([authority])
res = swh_storage.metadata_authority_get(authority.type, authority.url)
assert res == authority
actual_objects = list(swh_storage.journal_writer.journal.objects)
expected_objects = [
("metadata_authority", authority),
]
for obj in expected_objects:
assert obj in actual_objects
def test_metadata_authority_add_zero(self, swh_storage, sample_data):
authority = sample_data.metadata_authority
actual_authority = swh_storage.metadata_authority_get(
authority.type, authority.url
)
assert actual_authority is None # does not exist
swh_storage.metadata_authority_add([])
def test_content_metadata_add(self, swh_storage, sample_data):
content = sample_data.content
fetcher = sample_data.metadata_fetcher
authority = sample_data.metadata_authority
content_metadata = sample_data.content_metadata[:2]
content_swhid = SWHID(
object_type="content", object_id=hash_to_bytes(content.sha1_git)
)
swh_storage.metadata_fetcher_add([fetcher])
swh_storage.metadata_authority_add([authority])
swh_storage.raw_extrinsic_metadata_add(content_metadata)
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.CONTENT, content_swhid, authority
)
assert result.next_page_token is None
assert list(sorted(result.results, key=lambda x: x.discovery_date,)) == list(
content_metadata
)
actual_objects = list(swh_storage.journal_writer.journal.objects)
expected_objects = [
("metadata_authority", authority),
("metadata_fetcher", fetcher),
] + [("raw_extrinsic_metadata", item) for item in content_metadata]
for obj in expected_objects:
assert obj in actual_objects
def test_content_metadata_add_duplicate(self, swh_storage, sample_data):
"""Duplicates should be silently updated."""
content = sample_data.content
fetcher = sample_data.metadata_fetcher
authority = sample_data.metadata_authority
content_metadata, content_metadata2 = sample_data.content_metadata[:2]
content_swhid = SWHID(
object_type="content", object_id=hash_to_bytes(content.sha1_git)
)
new_content_metadata2 = attr.evolve(
content_metadata2, format="new-format", metadata=b"new-metadata",
)
swh_storage.metadata_fetcher_add([fetcher])
swh_storage.metadata_authority_add([authority])
swh_storage.raw_extrinsic_metadata_add([content_metadata, content_metadata2])
swh_storage.raw_extrinsic_metadata_add([new_content_metadata2])
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.CONTENT, content_swhid, authority
)
assert result.next_page_token is None
expected_results1 = (content_metadata, new_content_metadata2)
expected_results2 = (content_metadata, content_metadata2)
assert tuple(sorted(result.results, key=lambda x: x.discovery_date,)) in (
expected_results1, # cassandra
expected_results2, # postgresql
)
def test_content_metadata_get(self, swh_storage, sample_data):
content, content2 = sample_data.contents[:2]
fetcher, fetcher2 = sample_data.fetchers[:2]
authority, authority2 = sample_data.authorities[:2]
(
content1_metadata1,
content1_metadata2,
content1_metadata3,
) = sample_data.content_metadata[:3]
content1_swhid = SWHID(object_type="content", object_id=content.sha1_git)
content2_swhid = SWHID(object_type="content", object_id=content2.sha1_git)
content2_metadata = attr.evolve(content1_metadata2, id=content2_swhid)
swh_storage.metadata_authority_add([authority, authority2])
swh_storage.metadata_fetcher_add([fetcher, fetcher2])
swh_storage.raw_extrinsic_metadata_add(
[
content1_metadata1,
content1_metadata2,
content1_metadata3,
content2_metadata,
]
)
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.CONTENT, content1_swhid, authority
)
assert result.next_page_token is None
assert [content1_metadata1, content1_metadata2] == list(
sorted(result.results, key=lambda x: x.discovery_date,)
)
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.CONTENT, content1_swhid, authority2
)
assert result.next_page_token is None
assert [content1_metadata3] == list(
sorted(result.results, key=lambda x: x.discovery_date,)
)
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.CONTENT, content2_swhid, authority
)
assert result.next_page_token is None
assert [content2_metadata] == list(result.results,)
def test_content_metadata_get_after(self, swh_storage, sample_data):
content = sample_data.content
fetcher = sample_data.metadata_fetcher
authority = sample_data.metadata_authority
content_metadata, content_metadata2 = sample_data.content_metadata[:2]
content_swhid = SWHID(object_type="content", object_id=content.sha1_git)
swh_storage.metadata_fetcher_add([fetcher])
swh_storage.metadata_authority_add([authority])
swh_storage.raw_extrinsic_metadata_add([content_metadata, content_metadata2])
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.CONTENT,
content_swhid,
authority,
after=content_metadata.discovery_date - timedelta(seconds=1),
)
assert result.next_page_token is None
assert [content_metadata, content_metadata2] == list(
sorted(result.results, key=lambda x: x.discovery_date,)
)
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.CONTENT,
content_swhid,
authority,
after=content_metadata.discovery_date,
)
assert result.next_page_token is None
assert result.results == [content_metadata2]
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.CONTENT,
content_swhid,
authority,
after=content_metadata2.discovery_date,
)
assert result.next_page_token is None
assert result.results == []
def test_content_metadata_get_paginate(self, swh_storage, sample_data):
content = sample_data.content
fetcher = sample_data.metadata_fetcher
authority = sample_data.metadata_authority
content_metadata, content_metadata2 = sample_data.content_metadata[:2]
content_swhid = SWHID(object_type="content", object_id=content.sha1_git)
swh_storage.metadata_fetcher_add([fetcher])
swh_storage.metadata_authority_add([authority])
swh_storage.raw_extrinsic_metadata_add([content_metadata, content_metadata2])
swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.CONTENT, content_swhid, authority
)
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.CONTENT, content_swhid, authority, limit=1
)
assert result.next_page_token is not None
assert result.results == [content_metadata]
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.CONTENT,
content_swhid,
authority,
limit=1,
page_token=result.next_page_token,
)
assert result.next_page_token is None
assert result.results == [content_metadata2]
def test_content_metadata_get_paginate_same_date(self, swh_storage, sample_data):
content = sample_data.content
fetcher1, fetcher2 = sample_data.fetchers[:2]
authority = sample_data.metadata_authority
content_metadata, content_metadata2 = sample_data.content_metadata[:2]
content_swhid = SWHID(object_type="content", object_id=content.sha1_git)
swh_storage.metadata_fetcher_add([fetcher1, fetcher2])
swh_storage.metadata_authority_add([authority])
new_content_metadata2 = attr.evolve(
content_metadata2,
discovery_date=content_metadata2.discovery_date,
fetcher=attr.evolve(fetcher2, metadata=None),
)
swh_storage.raw_extrinsic_metadata_add(
[content_metadata, new_content_metadata2]
)
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.CONTENT, content_swhid, authority, limit=1
)
assert result.next_page_token is not None
assert result.results == [content_metadata]
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.CONTENT,
content_swhid,
authority,
limit=1,
page_token=result.next_page_token,
)
assert result.next_page_token is None
assert result.results == [new_content_metadata2]
def test_content_metadata_get__invalid_id(self, swh_storage, sample_data):
origin = sample_data.origin
fetcher = sample_data.metadata_fetcher
authority = sample_data.metadata_authority
content_metadata, content_metadata2 = sample_data.content_metadata[:2]
swh_storage.metadata_fetcher_add([fetcher])
swh_storage.metadata_authority_add([authority])
swh_storage.raw_extrinsic_metadata_add([content_metadata, content_metadata2])
with pytest.raises(StorageArgumentException, match="SWHID"):
swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.CONTENT, origin.url, authority
)
def test_origin_metadata_add(self, swh_storage, sample_data):
origin = sample_data.origin
fetcher = sample_data.metadata_fetcher
authority = sample_data.metadata_authority
origin_metadata, origin_metadata2 = sample_data.origin_metadata[:2]
assert swh_storage.origin_add([origin]) == {"origin:add": 1}
swh_storage.metadata_fetcher_add([fetcher])
swh_storage.metadata_authority_add([authority])
swh_storage.raw_extrinsic_metadata_add([origin_metadata, origin_metadata2])
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.ORIGIN, origin.url, authority
)
assert result.next_page_token is None
assert list(sorted(result.results, key=lambda x: x.discovery_date)) == [
origin_metadata,
origin_metadata2,
]
actual_objects = list(swh_storage.journal_writer.journal.objects)
expected_objects = [
("metadata_authority", authority),
("metadata_fetcher", fetcher),
("raw_extrinsic_metadata", origin_metadata),
("raw_extrinsic_metadata", origin_metadata2),
]
for obj in expected_objects:
assert obj in actual_objects
def test_origin_metadata_add_duplicate(self, swh_storage, sample_data):
"""Duplicates should be silently updated."""
origin = sample_data.origin
fetcher = sample_data.metadata_fetcher
authority = sample_data.metadata_authority
origin_metadata, origin_metadata2 = sample_data.origin_metadata[:2]
assert swh_storage.origin_add([origin]) == {"origin:add": 1}
new_origin_metadata2 = attr.evolve(
origin_metadata2, format="new-format", metadata=b"new-metadata",
)
swh_storage.metadata_fetcher_add([fetcher])
swh_storage.metadata_authority_add([authority])
swh_storage.raw_extrinsic_metadata_add([origin_metadata, origin_metadata2])
swh_storage.raw_extrinsic_metadata_add([new_origin_metadata2])
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.ORIGIN, origin.url, authority
)
assert result.next_page_token is None
# which of the two behavior happens is backend-specific.
expected_results1 = (origin_metadata, new_origin_metadata2)
expected_results2 = (origin_metadata, origin_metadata2)
assert tuple(sorted(result.results, key=lambda x: x.discovery_date,)) in (
expected_results1, # cassandra
expected_results2, # postgresql
)
def test_origin_metadata_get(self, swh_storage, sample_data):
origin, origin2 = sample_data.origins[:2]
fetcher, fetcher2 = sample_data.fetchers[:2]
authority, authority2 = sample_data.authorities[:2]
(
origin1_metadata1,
origin1_metadata2,
origin1_metadata3,
) = sample_data.origin_metadata[:3]
assert swh_storage.origin_add([origin, origin2]) == {"origin:add": 2}
origin2_metadata = attr.evolve(origin1_metadata2, id=origin2.url)
swh_storage.metadata_authority_add([authority, authority2])
swh_storage.metadata_fetcher_add([fetcher, fetcher2])
swh_storage.raw_extrinsic_metadata_add(
[origin1_metadata1, origin1_metadata2, origin1_metadata3, origin2_metadata]
)
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.ORIGIN, origin.url, authority
)
assert result.next_page_token is None
assert [origin1_metadata1, origin1_metadata2] == list(
sorted(result.results, key=lambda x: x.discovery_date,)
)
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.ORIGIN, origin.url, authority2
)
assert result.next_page_token is None
assert [origin1_metadata3] == list(
sorted(result.results, key=lambda x: x.discovery_date,)
)
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.ORIGIN, origin2.url, authority
)
assert result.next_page_token is None
assert [origin2_metadata] == list(result.results,)
def test_origin_metadata_get_after(self, swh_storage, sample_data):
origin = sample_data.origin
fetcher = sample_data.metadata_fetcher
authority = sample_data.metadata_authority
origin_metadata, origin_metadata2 = sample_data.origin_metadata[:2]
assert swh_storage.origin_add([origin]) == {"origin:add": 1}
swh_storage.metadata_fetcher_add([fetcher])
swh_storage.metadata_authority_add([authority])
swh_storage.raw_extrinsic_metadata_add([origin_metadata, origin_metadata2])
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.ORIGIN,
origin.url,
authority,
after=origin_metadata.discovery_date - timedelta(seconds=1),
)
assert result.next_page_token is None
assert list(sorted(result.results, key=lambda x: x.discovery_date,)) == [
origin_metadata,
origin_metadata2,
]
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.ORIGIN,
origin.url,
authority,
after=origin_metadata.discovery_date,
)
assert result.next_page_token is None
assert result.results == [origin_metadata2]
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.ORIGIN,
origin.url,
authority,
after=origin_metadata2.discovery_date,
)
assert result.next_page_token is None
assert result.results == []
def test_origin_metadata_get_paginate(self, swh_storage, sample_data):
origin = sample_data.origin
fetcher = sample_data.metadata_fetcher
authority = sample_data.metadata_authority
origin_metadata, origin_metadata2 = sample_data.origin_metadata[:2]
assert swh_storage.origin_add([origin]) == {"origin:add": 1}
swh_storage.metadata_fetcher_add([fetcher])
swh_storage.metadata_authority_add([authority])
swh_storage.raw_extrinsic_metadata_add([origin_metadata, origin_metadata2])
swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.ORIGIN, origin.url, authority
)
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.ORIGIN, origin.url, authority, limit=1
)
assert result.next_page_token is not None
assert result.results == [origin_metadata]
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.ORIGIN,
origin.url,
authority,
limit=1,
page_token=result.next_page_token,
)
assert result.next_page_token is None
assert result.results == [origin_metadata2]
def test_origin_metadata_get_paginate_same_date(self, swh_storage, sample_data):
origin = sample_data.origin
fetcher1, fetcher2 = sample_data.fetchers[:2]
authority = sample_data.metadata_authority
origin_metadata, origin_metadata2 = sample_data.origin_metadata[:2]
assert swh_storage.origin_add([origin]) == {"origin:add": 1}
swh_storage.metadata_fetcher_add([fetcher1, fetcher2])
swh_storage.metadata_authority_add([authority])
new_origin_metadata2 = attr.evolve(
origin_metadata2,
discovery_date=origin_metadata2.discovery_date,
fetcher=attr.evolve(fetcher2, metadata=None),
)
swh_storage.raw_extrinsic_metadata_add([origin_metadata, new_origin_metadata2])
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.ORIGIN, origin.url, authority, limit=1
)
assert result.next_page_token is not None
assert result.results == [origin_metadata]
result = swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.ORIGIN,
origin.url,
authority,
limit=1,
page_token=result.next_page_token,
)
assert result.next_page_token is None
assert result.results == [new_origin_metadata2]
def test_origin_metadata_add_missing_authority(self, swh_storage, sample_data):
origin = sample_data.origin
fetcher = sample_data.metadata_fetcher
origin_metadata, origin_metadata2 = sample_data.origin_metadata[:2]
assert swh_storage.origin_add([origin]) == {"origin:add": 1}
swh_storage.metadata_fetcher_add([fetcher])
with pytest.raises(StorageArgumentException, match="authority"):
swh_storage.raw_extrinsic_metadata_add([origin_metadata, origin_metadata2])
def test_origin_metadata_add_missing_fetcher(self, swh_storage, sample_data):
origin = sample_data.origin
authority = sample_data.metadata_authority
origin_metadata, origin_metadata2 = sample_data.origin_metadata[:2]
assert swh_storage.origin_add([origin]) == {"origin:add": 1}
swh_storage.metadata_authority_add([authority])
with pytest.raises(StorageArgumentException, match="fetcher"):
swh_storage.raw_extrinsic_metadata_add([origin_metadata, origin_metadata2])
def test_origin_metadata_get__invalid_id_type(self, swh_storage, sample_data):
origin = sample_data.origin
authority = sample_data.metadata_authority
fetcher = sample_data.metadata_fetcher
origin_metadata, origin_metadata2 = sample_data.origin_metadata[:2]
content_metadata = sample_data.content_metadata[0]
assert swh_storage.origin_add([origin]) == {"origin:add": 1}
swh_storage.metadata_fetcher_add([fetcher])
swh_storage.metadata_authority_add([authority])
swh_storage.raw_extrinsic_metadata_add([origin_metadata, origin_metadata2])
with pytest.raises(StorageArgumentException, match="SWHID"):
swh_storage.raw_extrinsic_metadata_get(
MetadataTargetType.ORIGIN, content_metadata.id, authority,
)
class TestStorageGeneratedData:
def test_generate_content_get_data(self, swh_storage, swh_contents):
contents_with_data = [c for c in swh_contents if c.status != "absent"]
# retrieve contents
for content in contents_with_data:
actual_content_data = swh_storage.content_get_data(content.sha1)
assert actual_content_data is not None
assert actual_content_data == content.data
def test_generate_content_get(self, swh_storage, swh_contents):
expected_contents = [
attr.evolve(c, data=None) for c in swh_contents if c.status != "absent"
]
actual_contents = swh_storage.content_get([c.sha1 for c in expected_contents])
assert len(actual_contents) == len(expected_contents)
assert actual_contents == expected_contents
@pytest.mark.parametrize("limit", [1, 7, 10, 100, 1000])
def test_origin_list(self, swh_storage, swh_origins, limit):
returned_origins = []
page_token = None
i = 0
while True:
actual_page = swh_storage.origin_list(page_token=page_token, limit=limit)
assert len(actual_page.results) <= limit
returned_origins.extend(actual_page.results)
i += 1
page_token = actual_page.next_page_token
if page_token is None:
assert i * limit >= len(swh_origins)
break
else:
assert len(actual_page.results) == limit
assert sorted(returned_origins) == sorted(swh_origins)
def test_origin_count(self, swh_storage, sample_data):
swh_storage.origin_add(sample_data.origins)
assert swh_storage.origin_count("github") == 3
assert swh_storage.origin_count("gitlab") == 2
assert swh_storage.origin_count(".*user.*", regexp=True) == 5
assert swh_storage.origin_count(".*user.*", regexp=False) == 0
assert swh_storage.origin_count(".*user1.*", regexp=True) == 2
assert swh_storage.origin_count(".*user1.*", regexp=False) == 0
def test_origin_count_with_visit_no_visits(self, swh_storage, sample_data):
swh_storage.origin_add(sample_data.origins)
# none of them have visits, so with_visit=True => 0
assert swh_storage.origin_count("github", with_visit=True) == 0
assert swh_storage.origin_count("gitlab", with_visit=True) == 0
assert swh_storage.origin_count(".*user.*", regexp=True, with_visit=True) == 0
assert swh_storage.origin_count(".*user.*", regexp=False, with_visit=True) == 0
assert swh_storage.origin_count(".*user1.*", regexp=True, with_visit=True) == 0
assert swh_storage.origin_count(".*user1.*", regexp=False, with_visit=True) == 0
def test_origin_count_with_visit_with_visits_no_snapshot(
self, swh_storage, sample_data
):
swh_storage.origin_add(sample_data.origins)
origin_url = "https://github.com/user1/repo1"
visit = OriginVisit(origin=origin_url, date=now(), type="git",)
swh_storage.origin_visit_add([visit])
assert swh_storage.origin_count("github", with_visit=False) == 3
# it has a visit, but no snapshot, so with_visit=True => 0
assert swh_storage.origin_count("github", with_visit=True) == 0
assert swh_storage.origin_count("gitlab", with_visit=False) == 2
# these gitlab origins have no visit
assert swh_storage.origin_count("gitlab", with_visit=True) == 0
assert (
swh_storage.origin_count("github.*user1", regexp=True, with_visit=False)
== 1
)
assert (
swh_storage.origin_count("github.*user1", regexp=True, with_visit=True) == 0
)
assert swh_storage.origin_count("github", regexp=True, with_visit=True) == 0
def test_origin_count_with_visit_with_visits_and_snapshot(
self, swh_storage, sample_data
):
snapshot = sample_data.snapshot
swh_storage.origin_add(sample_data.origins)
swh_storage.snapshot_add([snapshot])
origin_url = "https://github.com/user1/repo1"
visit = OriginVisit(origin=origin_url, date=now(), type="git",)
visit = swh_storage.origin_visit_add([visit])[0]
swh_storage.origin_visit_status_add(
[
OriginVisitStatus(
origin=origin_url,
visit=visit.visit,
date=now(),
status="ongoing",
snapshot=snapshot.id,
)
]
)
assert swh_storage.origin_count("github", with_visit=False) == 3
# github/user1 has a visit and a snapshot, so with_visit=True => 1
assert swh_storage.origin_count("github", with_visit=True) == 1
assert (
swh_storage.origin_count("github.*user1", regexp=True, with_visit=False)
== 1
)
assert (
swh_storage.origin_count("github.*user1", regexp=True, with_visit=True) == 1
)
assert swh_storage.origin_count("github", regexp=True, with_visit=True) == 1
@settings(suppress_health_check=[HealthCheck.too_slow])
@given(strategies.lists(objects(split_content=True), max_size=2))
def test_add_arbitrary(self, swh_storage, objects):
for (obj_type, obj) in objects:
if obj.object_type == "origin_visit":
swh_storage.origin_add([Origin(url=obj.origin)])
visit = OriginVisit(origin=obj.origin, date=obj.date, type=obj.type,)
swh_storage.origin_visit_add([visit])
else:
method = getattr(swh_storage, obj_type + "_add")
try:
method([obj])
except HashCollision:
pass
diff --git a/swh/storage/tests/test_pytest_plugin.py b/swh/storage/tests/test_pytest_plugin.py
index 5a59c5e9..63646011 100644
--- a/swh/storage/tests/test_pytest_plugin.py
+++ b/swh/storage/tests/test_pytest_plugin.py
@@ -1,19 +1,19 @@
# Copyright (C) 2020 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
from swh.storage.interface import StorageInterface
from swh.storage.tests.storage_data import StorageData
def test_sample_data(sample_data):
assert isinstance(sample_data, StorageData)
def test_swh_storage(swh_storage: StorageInterface):
- assert isinstance(swh_storage, StorageInterface) is not None
+ assert isinstance(swh_storage, StorageInterface)
def test_swh_storage_backend_config(swh_storage_backend_config):
assert isinstance(swh_storage_backend_config, dict)

File Metadata

Mime Type
text/x-diff
Expires
Tue, Apr 15, 4:01 AM (9 h, 33 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3247295

Event Timeline