diff --git a/swh/storage/api/server.py b/swh/storage/api/server.py --- a/swh/storage/api/server.py +++ b/swh/storage/api/server.py @@ -15,7 +15,7 @@ from ..exc import StorageArgumentException from ..interface import StorageInterface -from ..metrics import timed +from ..metrics import send_metric, timed from .serializers import DECODERS, ENCODERS @@ -31,6 +31,61 @@ extra_type_decoders = DECODERS extra_type_encoders = ENCODERS + method_decorators = [timed] + + def _process_metrics(self, metrics, endpoint): + for metric, count in metrics.items(): + send_metric(metric=metric, count=count, method_name=endpoint) + + def post_content_add(self, ret, kw): + self._process_metrics(ret, "content_add") + + def post_content_add_metadata(self, ret, kw): + self._process_metrics(ret, "content_add_metadata") + + def post_skipped_content_add(self, ret, kw): + self._process_metrics(ret, "skipped_content_add") + + def post_directory_add(self, ret, kw): + self._process_metrics(ret, "directory_add") + + def post_revision_add(self, ret, kw): + self._process_metrics(ret, "revision_add") + + def post_release_add(self, ret, kw): + self._process_metrics(ret, "release_add") + + def post_snapshot_add(self, ret, kw): + self._process_metrics(ret, "snapshot_add") + + def post_origin_visit_status_add(self, ret, kw): + self._process_metrics(ret, "origin_visit_status_add") + + def post_origin_add(self, ret, kw): + self._process_metrics(ret, "origin_add") + + def post_raw_extrinsic_metadata_add(self, ret, kw): + self._process_metrics(ret, "raw_extrinsic_metadata_add") + + def post_metadata_fetcher_add(self, ret, kw): + self._process_metrics(ret, "metadata_fetcher_add") + + def post_metadata_authority_add(self, ret, kw): + self._process_metrics(ret, "metadata_authority_add") + + def post_extid_add(self, ret, kw): + self._process_metrics(ret, "extid_add") + + def post_origin_visit_add(self, ret, kw): + nb_visits = len(ret) + send_metric( + "origin_visit:add", + count=nb_visits, + # method_name should be "origin_visit_add", but changing it now would break + # existing metrics + method_name="origin_visit", + ) + app = StorageServerApp( __name__, backend_class=StorageInterface, backend_factory=get_storage diff --git a/swh/storage/cassandra/storage.py b/swh/storage/cassandra/storage.py --- a/swh/storage/cassandra/storage.py +++ b/swh/storage/cassandra/storage.py @@ -58,7 +58,6 @@ PartialBranches, Sha1, ) -from swh.storage.metrics import process_metrics, send_metric, timed from swh.storage.objstorage import ObjStorage from swh.storage.utils import map_optional, now from swh.storage.writer import JournalWriter @@ -152,7 +151,6 @@ self._hosts, self._keyspace, self._port, self._consistency_level ) - @timed def check_config(self, *, check_write: bool) -> bool: self._cql_runner.check_read() @@ -256,8 +254,6 @@ return summary - @timed - @process_metrics def content_add(self, content: List[Content]) -> Dict[str, int]: to_add = { (c.sha1, c.sha1_git, c.sha256, c.blake2s256): c for c in content @@ -265,7 +261,6 @@ contents = [attr.evolve(c, ctime=now()) for c in to_add] return self._content_add(list(contents), with_data=True) - @timed def content_update( self, contents: List[Dict[str, Any]], keys: List[str] = [] ) -> None: @@ -273,17 +268,13 @@ "content_update is not supported by the Cassandra backend" ) - @timed - @process_metrics def content_add_metadata(self, content: List[Content]) -> Dict[str, int]: return self._content_add(content, with_data=False) - @timed def content_get_data(self, content: Sha1) -> Optional[bytes]: # FIXME: Make this method support slicing the `data` return self.objstorage.content_get(content) - @timed def content_get_partition( self, partition_id: int, @@ -325,7 +316,6 @@ assert len(contents) <= limit return PagedResult(results=contents, next_page_token=next_page_token) - @timed def content_get( self, contents: List[bytes], algo: str = "sha1" ) -> List[Optional[Content]]: @@ -345,7 +335,6 @@ contents_by_hash[key(content)] = content return [contents_by_hash.get(hash_) for hash_ in contents] - @timed def content_find(self, content: Dict[str, Any]) -> List[Content]: return self._content_find_many([content]) @@ -386,7 +375,6 @@ pass return results - @timed def content_missing( self, contents: List[Dict[str, Any]], key_hash: str = "sha1" ) -> Iterable[bytes]: @@ -457,11 +445,9 @@ # Not found yield missing_content[key_hash] - @timed def content_missing_per_sha1(self, contents: List[bytes]) -> Iterable[bytes]: return self.content_missing([{"sha1": c} for c in contents]) - @timed def content_missing_per_sha1_git( self, contents: List[Sha1Git] ) -> Iterable[Sha1Git]: @@ -469,7 +455,6 @@ [{"sha1_git": c} for c in contents], key_hash="sha1_git" ) - @timed def content_get_random(self) -> Sha1Git: content = self._cql_runner.content_get_random() assert content, "Could not find any content" @@ -501,13 +486,10 @@ return {"skipped_content:add": len(contents)} - @timed - @process_metrics def skipped_content_add(self, content: List[SkippedContent]) -> Dict[str, int]: contents = [attr.evolve(c, ctime=now()) for c in content] return self._skipped_content_add(contents) - @timed def skipped_content_missing( self, contents: List[Dict[str, Any]] ) -> Iterable[Dict[str, Any]]: @@ -515,8 +497,6 @@ if not self._cql_runner.skipped_content_get_from_pk(content): yield {algo: content[algo] for algo in DEFAULT_ALGORITHMS} - @timed - @process_metrics def directory_add(self, directories: List[Directory]) -> Dict[str, int]: to_add = {d.id: d for d in directories}.values() if not self._allow_overwrite: @@ -554,7 +534,6 @@ return {"directory:add": len(directories)} - @timed def directory_missing(self, directories: List[Sha1Git]) -> Iterable[Sha1Git]: return self._cql_runner.directory_missing(directories) @@ -620,7 +599,6 @@ ret["target"], True, prefix + ret["name"] + b"/" ) - @timed def directory_entry_get_by_path( self, directory: Sha1Git, paths: List[bytes] ) -> Optional[Dict[str, Any]]: @@ -660,13 +638,11 @@ first_item["target"], paths[1:], prefix + paths[0] + b"/" ) - @timed def directory_ls( self, directory: Sha1Git, recursive: bool = False ) -> Iterable[Dict[str, Any]]: yield from self._directory_ls(directory, recursive) - @timed def directory_get_entries( self, directory_id: Sha1Git, @@ -691,7 +667,6 @@ next_page_token = None return PagedResult(results=entries, next_page_token=next_page_token) - @timed def directory_get_raw_manifest( self, directory_ids: List[Sha1Git] ) -> Dict[Sha1Git, Optional[bytes]]: @@ -700,14 +675,11 @@ for dir_ in self._cql_runner.directory_get(directory_ids) } - @timed def directory_get_random(self) -> Sha1Git: directory = self._cql_runner.directory_get_random() assert directory, "Could not find any directory" return directory.id - @timed - @process_metrics def revision_add(self, revisions: List[Revision]) -> Dict[str, int]: # Filter-out revisions already in the database if not self._allow_overwrite: @@ -735,11 +707,9 @@ return {"revision:add": len(revisions)} - @timed def revision_missing(self, revisions: List[Sha1Git]) -> Iterable[Sha1Git]: return self._cql_runner.revision_missing(revisions) - @timed def revision_get( self, revision_ids: List[Sha1Git], ignore_displayname: bool = False ) -> List[Optional[Revision]]: @@ -808,7 +778,6 @@ yield rev.to_dict() yield from self._get_parent_revs(parents, seen, limit, short) - @timed def revision_log( self, revisions: List[Sha1Git], @@ -818,21 +787,17 @@ seen: Set[Sha1Git] = set() yield from self._get_parent_revs(revisions, seen, limit, False) - @timed def revision_shortlog( self, revisions: List[Sha1Git], limit: Optional[int] = None ) -> Iterable[Optional[Tuple[Sha1Git, Tuple[Sha1Git, ...]]]]: seen: Set[Sha1Git] = set() yield from self._get_parent_revs(revisions, seen, limit, True) - @timed def revision_get_random(self) -> Sha1Git: revision = self._cql_runner.revision_get_random() assert revision, "Could not find any revision" return revision.id - @timed - @process_metrics def release_add(self, releases: List[Release]) -> Dict[str, int]: if not self._allow_overwrite: to_add = {r.id: r for r in releases}.values() @@ -846,11 +811,9 @@ return {"release:add": len(releases)} - @timed def release_missing(self, releases: List[Sha1Git]) -> Iterable[Sha1Git]: return self._cql_runner.release_missing(releases) - @timed def release_get( self, releases: List[Sha1Git], ignore_displayname: bool = False ) -> List[Optional[Release]]: @@ -862,14 +825,11 @@ return [rels.get(rel_id) for rel_id in releases] - @timed def release_get_random(self) -> Sha1Git: release = self._cql_runner.release_get_random() assert release, "Could not find any release" return release.id - @timed - @process_metrics def snapshot_add(self, snapshots: List[Snapshot]) -> Dict[str, int]: if not self._allow_overwrite: to_add = {s.id: s for s in snapshots}.values() @@ -903,11 +863,9 @@ return {"snapshot:add": len(snapshots)} - @timed def snapshot_missing(self, snapshots: List[Sha1Git]) -> Iterable[Sha1Git]: return self._cql_runner.snapshot_missing(snapshots) - @timed def snapshot_get(self, snapshot_id: Sha1Git) -> Optional[Dict[str, Any]]: d = self.snapshot_get_branches(snapshot_id) if d is None: @@ -921,7 +879,6 @@ "next_branch": d["next_branch"], } - @timed def snapshot_count_branches( self, snapshot_id: Sha1Git, branch_name_exclude_prefix: Optional[bytes] = None, ) -> Optional[Dict[Optional[str], int]]: @@ -934,7 +891,6 @@ snapshot_id, branch_name_exclude_prefix ) - @timed def snapshot_get_branches( self, snapshot_id: Sha1Git, @@ -1010,20 +966,18 @@ next_branch=last_branch, ) - @timed def snapshot_get_random(self) -> Sha1Git: snapshot = self._cql_runner.snapshot_get_random() assert snapshot, "Could not find any snapshot" return snapshot.id - @timed def object_find_by_sha1_git(self, ids: List[Sha1Git]) -> Dict[Sha1Git, List[Dict]]: results: Dict[Sha1Git, List[Dict]] = {id_: [] for id_ in ids} missing_ids = set(ids) # Mind the order, revision is the most likely one for a given ID, # so we check revisions first. - queries: List[Tuple[str, Callable[[List[Sha1Git]], List[Sha1Git]]]] = [ + queries: List[Tuple[str, Callable[[List[Sha1Git]], Iterable[Sha1Git]]]] = [ ("revision", self._cql_runner.revision_missing), ("release", self._cql_runner.release_missing), ("content", self.content_missing_per_sha1_git), @@ -1044,11 +998,9 @@ return results - @timed def origin_get(self, origins: List[str]) -> Iterable[Optional[Origin]]: return [self.origin_get_one(origin) for origin in origins] - @timed def origin_get_one(self, origin_url: str) -> Optional[Origin]: """Given an origin url, return the origin if it exists, None otherwise @@ -1060,7 +1012,6 @@ else: return None - @timed def origin_get_by_sha1(self, sha1s: List[bytes]) -> List[Optional[Dict[str, Any]]]: results = [] for sha1 in sha1s: @@ -1069,7 +1020,6 @@ results.append(origin) return results - @timed def origin_list( self, page_token: Optional[str] = None, limit: int = 100 ) -> PagedResult[Origin]: @@ -1098,7 +1048,6 @@ return PagedResult(results=origins, next_page_token=next_page_token) - @timed def origin_search( self, url_pattern: str, @@ -1146,7 +1095,6 @@ assert len(origins) <= limit return PagedResult(results=origins, next_page_token=next_page_token) - @timed def origin_count( self, url_pattern: str, regexp: bool = False, with_visit: bool = False ) -> int: @@ -1154,12 +1102,9 @@ "The Cassandra backend does not implement origin_count" ) - @timed def origin_snapshot_get_all(self, origin_url: str) -> List[Sha1Git]: return list(self._cql_runner.origin_snapshot_get_all(origin_url)) - @timed - @process_metrics def origin_add(self, origins: List[Origin]) -> Dict[str, int]: if not self._allow_overwrite: to_add = {o.url: o for o in origins}.values() @@ -1172,7 +1117,6 @@ ) return {"origin:add": len(origins)} - @timed def origin_visit_add(self, visits: List[OriginVisit]) -> Iterable[OriginVisit]: for visit in visits: origin = self.origin_get_one(visit.origin) @@ -1180,9 +1124,7 @@ raise StorageArgumentException("Unknown origin %s", visit.origin) all_visits = [] - nb_visits = 0 for visit in visits: - nb_visits += 1 if visit.visit: # Set origin.next_visit_id = max(origin.next_visit_id, visit.visit+1) # so the next loader run does not reuse the id. @@ -1206,7 +1148,6 @@ snapshot=None, ) ) - send_metric("origin_visit:add", count=nb_visits, method_name="origin_visit") return all_visits def _origin_visit_status_add(self, visit_status: OriginVisitStatus) -> None: @@ -1227,8 +1168,6 @@ converters.visit_status_to_row(visit_status) ) - @timed - @process_metrics def origin_visit_status_add( self, visit_statuses: List[OriginVisitStatus] ) -> Dict[str, int]: @@ -1282,7 +1221,6 @@ "date": visit.date.replace(tzinfo=datetime.timezone.utc), } - @timed def origin_visit_get( self, origin: str, @@ -1311,7 +1249,6 @@ return PagedResult(results=visits, next_page_token=next_page_token) - @timed def origin_visit_status_get( self, origin: str, @@ -1338,7 +1275,6 @@ return PagedResult(results=visit_statuses, next_page_token=next_page_token) - @timed def origin_visit_find_by_date( self, origin: str, visit_date: datetime.datetime ) -> Optional[OriginVisit]: @@ -1355,14 +1291,12 @@ return converters.row_to_visit(min(rows, key=key)) return None - @timed def origin_visit_get_by(self, origin: str, visit: int) -> Optional[OriginVisit]: row = self._cql_runner.origin_visit_get_one(origin, visit) if row: return converters.row_to_visit(row) return None - @timed def origin_visit_get_latest( self, origin: str, @@ -1410,7 +1344,6 @@ type=latest_visit["type"], ) - @timed def origin_visit_status_get_latest( self, origin_url: str, @@ -1433,7 +1366,6 @@ return None return converters.row_to_visit_status(rows[0]) - @timed def origin_visit_status_get_random(self, type: str) -> Optional[OriginVisitStatus]: back_in_the_day = now() - datetime.timedelta(weeks=12) # 3 months back @@ -1449,7 +1381,6 @@ return visit_status return None - @timed def stat_counters(self): rows = self._cql_runner.stat_counters() keys = ( @@ -1466,12 +1397,9 @@ stats.update({row.object_type: row.count for row in rows}) return stats - @timed def refresh_stat_counters(self): pass - @timed - @process_metrics def raw_extrinsic_metadata_add( self, metadata: List[RawExtrinsicMetadata] ) -> Dict[str, int]: @@ -1531,7 +1459,6 @@ f"{type.value}_metadata:add": count for (type, count) in counter.items() } - @timed def raw_extrinsic_metadata_get( self, target: ExtendedSWHID, @@ -1579,7 +1506,6 @@ return PagedResult(next_page_token=next_page_token, results=results,) - @timed def raw_extrinsic_metadata_get_by_ids( self, ids: List[Sha1Git] ) -> List[RawExtrinsicMetadata]: @@ -1602,7 +1528,6 @@ return list(results) - @timed def raw_extrinsic_metadata_get_authorities( self, target: ExtendedSWHID ) -> List[MetadataAuthority]: @@ -1615,8 +1540,6 @@ ) ] - @timed - @process_metrics def metadata_fetcher_add(self, fetchers: List[MetadataFetcher]) -> Dict[str, int]: self.journal_writer.metadata_fetcher_add(fetchers) for fetcher in fetchers: @@ -1625,7 +1548,6 @@ ) return {"metadata_fetcher:add": len(fetchers)} - @timed def metadata_fetcher_get( self, name: str, version: str ) -> Optional[MetadataFetcher]: @@ -1635,8 +1557,6 @@ else: return None - @timed - @process_metrics def metadata_authority_add( self, authorities: List[MetadataAuthority] ) -> Dict[str, int]: @@ -1647,7 +1567,6 @@ ) return {"metadata_authority:add": len(authorities)} - @timed def metadata_authority_get( self, type: MetadataAuthorityType, url: str ) -> Optional[MetadataAuthority]: @@ -1660,8 +1579,7 @@ return None # ExtID tables - @timed - @process_metrics + def extid_add(self, ids: List[ExtID]) -> Dict[str, int]: if not self._allow_overwrite: extids = [ @@ -1701,7 +1619,6 @@ inserted += 1 return {"extid:add": inserted} - @timed def extid_get_from_extid( self, id_type: str, ids: List[bytes], version: Optional[int] = None ) -> List[ExtID]: @@ -1726,7 +1643,6 @@ ) return result - @timed def extid_get_from_target( self, target_type: SwhidObjectType, diff --git a/swh/storage/postgresql/storage.py b/swh/storage/postgresql/storage.py --- a/swh/storage/postgresql/storage.py +++ b/swh/storage/postgresql/storage.py @@ -50,7 +50,6 @@ PagedResult, PartialBranches, ) -from swh.storage.metrics import process_metrics, send_metric, timed from swh.storage.objstorage import ObjStorage from swh.storage.utils import ( extract_collision_hash, @@ -160,7 +159,6 @@ if db: self.put_db(db) - @timed @db_transaction() def check_config(self, *, check_write: bool, db: Db, cur=None) -> bool: @@ -231,8 +229,6 @@ else: raise - @timed - @process_metrics def content_add(self, content: List[Content]) -> Dict[str, int]: ctime = now() @@ -265,7 +261,6 @@ "content:add:bytes": objstorage_summary["content:add:bytes"], } - @timed @db_transaction() def content_update( self, contents: List[Dict[str, Any]], keys: List[str] = [], *, db: Db, cur=None @@ -280,8 +275,6 @@ db.copy_to(contents, "tmp_content", select_keys, cur) db.content_update_from_temp(keys_to_update=keys, cur=cur) - @timed - @process_metrics @db_transaction() def content_add_metadata( self, content: List[Content], *, db: Db, cur=None @@ -298,12 +291,10 @@ "content:add": len(contents), } - @timed def content_get_data(self, content: Sha1) -> Optional[bytes]: # FIXME: Make this method support slicing the `data` return self.objstorage.content_get(content) - @timed @db_transaction() def content_get_partition( self, @@ -339,7 +330,6 @@ assert len(contents) <= limit return PagedResult(results=contents, next_page_token=next_page_token) - @timed @db_transaction(statement_timeout=500) def content_get( self, contents: List[bytes], algo: str = "sha1", *, db: Db, cur=None @@ -360,7 +350,6 @@ return [contents_by_hash.get(sha1) for sha1 in contents] - @timed @db_transaction_generator() def content_missing( self, @@ -381,7 +370,6 @@ for obj in db.content_missing_from_list(contents, cur): yield obj[key_hash_idx] - @timed @db_transaction_generator() def content_missing_per_sha1( self, contents: List[bytes], *, db: Db, cur=None @@ -389,7 +377,6 @@ for obj in db.content_missing_per_sha1(contents, cur): yield obj[0] - @timed @db_transaction_generator() def content_missing_per_sha1_git( self, contents: List[bytes], *, db: Db, cur=None @@ -397,7 +384,6 @@ for obj in db.content_missing_per_sha1_git(contents, cur): yield obj[0] - @timed @db_transaction() def content_find( self, content: Dict[str, Any], *, db: Db, cur=None @@ -421,7 +407,6 @@ contents.append(Content(**row_d)) return contents - @timed @db_transaction() def content_get_random(self, *, db: Db, cur=None) -> Sha1Git: return db.content_get_random(cur) @@ -455,8 +440,6 @@ # move metadata in place db.skipped_content_add_from_temp(cur) - @timed - @process_metrics @db_transaction() def skipped_content_add( self, content: List[SkippedContent], *, db: Db, cur=None @@ -486,7 +469,6 @@ "skipped_content:add": len(content), } - @timed @db_transaction_generator() def skipped_content_missing( self, contents: List[Dict[str, Any]], *, db: Db, cur=None @@ -495,8 +477,6 @@ for content in db.skipped_content_missing(contents, cur): yield dict(zip(db.content_hash_keys, content)) - @timed - @process_metrics @db_transaction() def directory_add( self, directories: List[Directory], *, db: Db, cur=None @@ -558,7 +538,6 @@ return summary - @timed @db_transaction_generator() def directory_missing( self, directories: List[Sha1Git], *, db: Db, cur=None @@ -566,7 +545,6 @@ for obj in db.directory_missing_from_list(directories, cur): yield obj[0] - @timed @db_transaction_generator(statement_timeout=20000) def directory_ls( self, directory: Sha1Git, recursive: bool = False, *, db: Db, cur=None @@ -579,7 +557,6 @@ for line in res_gen: yield dict(zip(db.directory_ls_cols, line)) - @timed @db_transaction(statement_timeout=2000) def directory_entry_get_by_path( self, directory: Sha1Git, paths: List[bytes], *, db: Db, cur=None @@ -587,7 +564,6 @@ res = db.directory_entry_get_by_path(directory, paths, cur) return dict(zip(db.directory_ls_cols, res)) if res else None - @timed @db_transaction() def directory_get_random(self, *, db: Db, cur=None) -> Sha1Git: return db.directory_get_random(cur) @@ -618,15 +594,12 @@ next_page_token=None, ) - @timed @db_transaction() def directory_get_raw_manifest( self, directory_ids: List[Sha1Git], *, db: Db, cur=None ) -> Dict[Sha1Git, Optional[bytes]]: return dict(db.directory_get_raw_manifest(directory_ids, cur=cur)) - @timed - @process_metrics @db_transaction() def revision_add( self, revisions: List[Revision], *, db: Db, cur=None @@ -674,7 +647,6 @@ return {"revision:add": len(revisions_missing)} - @timed @db_transaction_generator() def revision_missing( self, revisions: List[Sha1Git], *, db: Db, cur=None @@ -685,7 +657,6 @@ for obj in db.revision_missing_from_list(revisions, cur): yield obj[0] - @timed @db_transaction(statement_timeout=1000) def revision_get( self, @@ -702,7 +673,6 @@ return revisions - @timed @db_transaction_generator(statement_timeout=2000) def revision_log( self, @@ -722,19 +692,16 @@ continue yield data.to_dict() - @timed @db_transaction_generator(statement_timeout=2000) def revision_shortlog( self, revisions: List[Sha1Git], limit: Optional[int] = None, *, db: Db, cur=None ) -> Iterable[Optional[Tuple[Sha1Git, Tuple[Sha1Git, ...]]]]: yield from db.revision_shortlog(revisions, limit, cur) - @timed @db_transaction() def revision_get_random(self, *, db: Db, cur=None) -> Sha1Git: return db.revision_get_random(cur) - @timed @db_transaction() def extid_get_from_extid( self, @@ -751,7 +718,6 @@ extids.append(converters.db_to_extid(dict(zip(db.extid_cols, row)))) return extids - @timed @db_transaction() def extid_get_from_target( self, @@ -780,7 +746,6 @@ extids.append(converters.db_to_extid(dict(zip(db.extid_cols, row)))) return extids - @timed @db_transaction() def extid_add(self, ids: List[ExtID], *, db: Db, cur=None) -> Dict[str, int]: extid = [ @@ -804,8 +769,6 @@ return {"extid:add": len(extid)} - @timed - @process_metrics @db_transaction() def release_add( self, releases: List[Release], *, db: Db, cur=None @@ -835,7 +798,6 @@ return {"release:add": len(releases_missing)} - @timed @db_transaction_generator() def release_missing( self, releases: List[Sha1Git], *, db: Db, cur=None @@ -846,7 +808,6 @@ for obj in db.release_missing_from_list(releases, cur): yield obj[0] - @timed @db_transaction(statement_timeout=500) def release_get( self, @@ -862,13 +823,10 @@ rels.append(data if data else None) return rels - @timed @db_transaction() def release_get_random(self, *, db: Db, cur=None) -> Sha1Git: return db.release_get_random(cur) - @timed - @process_metrics @db_transaction() def snapshot_add( self, snapshots: List[Snapshot], *, db: Db, cur=None @@ -906,7 +864,6 @@ return {"snapshot:add": count} - @timed @db_transaction_generator() def snapshot_missing( self, snapshots: List[Sha1Git], *, db: Db, cur=None @@ -914,7 +871,6 @@ for obj in db.snapshot_missing_from_list(snapshots, cur): yield obj[0] - @timed @db_transaction(statement_timeout=2000) def snapshot_get( self, snapshot_id: Sha1Git, *, db: Db, cur=None @@ -931,7 +887,6 @@ "next_branch": d["next_branch"], } - @timed @db_transaction(statement_timeout=2000) def snapshot_count_branches( self, @@ -950,7 +905,6 @@ ] ) - @timed @db_transaction(statement_timeout=2000) def snapshot_get_branches( self, @@ -1011,12 +965,10 @@ id=snapshot_id, branches=branches, next_branch=next_branch, ) - @timed @db_transaction() def snapshot_get_random(self, *, db: Db, cur=None) -> Sha1Git: return db.snapshot_get_random(cur) - @timed @db_transaction() def origin_visit_add( self, visits: List[OriginVisit], *, db: Db, cur=None @@ -1027,9 +979,7 @@ raise StorageArgumentException("Unknown origin %s", visit.origin) all_visits = [] - nb_visits = 0 for visit in visits: - nb_visits += 1 if not visit.visit: with convert_validation_exceptions(): visit_id = db.origin_visit_add( @@ -1052,7 +1002,6 @@ ) self._origin_visit_status_add(visit_status, db=db, cur=cur) - send_metric("origin_visit:add", count=nb_visits, method_name="origin_visit") return all_visits def _origin_visit_status_add( @@ -1062,8 +1011,6 @@ self.journal_writer.origin_visit_status_add([visit_status]) db.origin_visit_status_add(visit_status, cur=cur) - @timed - @process_metrics @db_transaction() def origin_visit_status_add( self, visit_statuses: List[OriginVisitStatus], *, db: Db, cur=None, @@ -1096,7 +1043,6 @@ self._origin_visit_status_add(visit_status, db, cur) return {"origin_visit_status:add": len(visit_statuses_)} - @timed @db_transaction() def origin_visit_status_get_latest( self, @@ -1121,7 +1067,6 @@ return None return OriginVisitStatus(**row_d) - @timed @db_transaction(statement_timeout=500) def origin_visit_get( self, @@ -1164,7 +1109,6 @@ return PagedResult(results=visits, next_page_token=next_page_token) - @timed @db_transaction(statement_timeout=500) def origin_visit_find_by_date( self, origin: str, visit_date: datetime.datetime, *, db: Db, cur=None @@ -1179,7 +1123,6 @@ type=row_d["type"], ) - @timed @db_transaction(statement_timeout=500) def origin_visit_get_by( self, origin: str, visit: int, *, db: Db, cur=None @@ -1195,7 +1138,6 @@ ) return None - @timed @db_transaction(statement_timeout=4000) def origin_visit_get_latest( self, @@ -1231,7 +1173,6 @@ return visit return None - @timed @db_transaction(statement_timeout=500) def origin_visit_status_get( self, @@ -1265,7 +1206,6 @@ return PagedResult(results=visit_statuses, next_page_token=next_page_token) - @timed @db_transaction() def origin_visit_status_get_random( self, type: str, *, db: Db, cur=None @@ -1276,7 +1216,6 @@ return OriginVisitStatus(**row_d) return None - @timed @db_transaction(statement_timeout=2000) def object_find_by_sha1_git( self, ids: List[Sha1Git], *, db: Db, cur=None @@ -1291,7 +1230,6 @@ return ret - @timed @db_transaction(statement_timeout=500) def origin_get( self, origins: List[str], *, db: Db, cur=None @@ -1304,7 +1242,6 @@ result.append(None if url is None else Origin(url=url)) return result - @timed @db_transaction(statement_timeout=500) def origin_get_by_sha1( self, sha1s: List[bytes], *, db: Db, cur=None @@ -1314,13 +1251,11 @@ for row in db.origin_get_by_sha1(sha1s, cur) ] - @timed @db_transaction_generator() def origin_get_range(self, origin_from=1, origin_count=100, *, db: Db, cur=None): for origin in db.origin_get_range(origin_from, origin_count, cur): yield dict(zip(db.origin_get_range_cols, origin)) - @timed @db_transaction() def origin_list( self, page_token: Optional[str] = None, limit: int = 100, *, db: Db, cur=None @@ -1347,7 +1282,6 @@ assert len(origins) <= limit return PagedResult(results=origins, next_page_token=next_page_token) - @timed @db_transaction() def origin_search( self, @@ -1382,7 +1316,6 @@ return PagedResult(results=origins, next_page_token=next_page_token) - @timed @db_transaction() def origin_count( self, @@ -1395,15 +1328,12 @@ ) -> int: return db.origin_count(url_pattern, regexp, with_visit, cur) - @timed @db_transaction() def origin_snapshot_get_all( self, origin_url: str, *, db: Db, cur=None ) -> List[Sha1Git]: return list(db.origin_snapshot_get_all(origin_url, cur)) - @timed - @process_metrics @db_transaction() def origin_add(self, origins: List[Origin], *, db: Db, cur=None) -> Dict[str, int]: urls = [o.url for o in origins] @@ -1444,8 +1374,6 @@ for key in keys: cur.execute("select * from swh_update_counter(%s)", (key,)) - @timed - @process_metrics @db_transaction() def raw_extrinsic_metadata_add( self, metadata: List[RawExtrinsicMetadata], db, cur, @@ -1558,8 +1486,6 @@ ) in db.raw_extrinsic_metadata_get_authorities(str(target), cur) ] - @timed - @process_metrics @db_transaction() def metadata_fetcher_add( self, fetchers: List[MetadataFetcher], *, db: Db, cur=None @@ -1572,7 +1498,6 @@ count += 1 return {"metadata_fetcher:add": count} - @timed @db_transaction(statement_timeout=500) def metadata_fetcher_get( self, name: str, version: str, *, db: Db, cur=None @@ -1582,8 +1507,6 @@ return None return MetadataFetcher.from_dict(dict(zip(db.metadata_fetcher_cols, row))) - @timed - @process_metrics @db_transaction() def metadata_authority_add( self, authorities: List[MetadataAuthority], *, db: Db, cur=None @@ -1596,7 +1519,6 @@ count += 1 return {"metadata_authority:add": count} - @timed @db_transaction() def metadata_authority_get( self, type: MetadataAuthorityType, url: str, *, db: Db, cur=None