Page Menu
Home
Software Heritage
Search
Configure Global Search
Log In
Files
F7123040
D6256.diff
No One
Temporary
Actions
View File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Flag For Later
Size
4 KB
Subscribers
None
D6256.diff
View Options
diff --git a/swh/provenance/postgresql/archive.py b/swh/provenance/postgresql/archive.py
--- a/swh/provenance/postgresql/archive.py
+++ b/swh/provenance/postgresql/archive.py
@@ -8,6 +8,7 @@
from methodtools import lru_cache
import psycopg2.extensions
+from swh.core.statsd import statsd
from swh.model.model import Sha1Git
from swh.storage import get_storage
@@ -24,6 +25,10 @@
yield from entries
@lru_cache(maxsize=100000)
+ @statsd.timed(
+ metric="swh_provenance_archive_direct_accesstime_seconds",
+ tags={"method": "directory_ls"},
+ )
def _directory_ls(self, id: Sha1Git) -> List[Dict[str, Any]]:
# TODO: add file size filtering
with self.conn.cursor() as cursor:
@@ -66,6 +71,10 @@
{"type": row[0], "target": row[1], "name": row[2]} for row in cursor
]
+ @statsd.timed(
+ metric="swh_provenance_archive_direct_accesstime_seconds",
+ tags={"method": "revision_get_parents"},
+ )
def revision_get_parents(self, id: Sha1Git) -> Iterable[Sha1Git]:
with self.conn.cursor() as cursor:
cursor.execute(
@@ -80,6 +89,10 @@
# There should be at most one row anyway
yield from (row[0] for row in cursor)
+ @statsd.timed(
+ metric="swh_provenance_archive_direct_accesstime_seconds",
+ tags={"method": "snapshot_get_heads"},
+ )
def snapshot_get_heads(self, id: Sha1Git) -> Iterable[Sha1Git]:
with self.conn.cursor() as cursor:
cursor.execute(
diff --git a/swh/provenance/storage/archive.py b/swh/provenance/storage/archive.py
--- a/swh/provenance/storage/archive.py
+++ b/swh/provenance/storage/archive.py
@@ -6,6 +6,7 @@
from datetime import datetime
from typing import Any, Dict, Iterable, Set, Tuple
+from swh.core.statsd import statsd
from swh.model.model import ObjectType, Sha1Git, TargetType
from swh.storage.interface import StorageInterface
@@ -14,6 +15,10 @@
def __init__(self, storage: StorageInterface) -> None:
self.storage = storage
+ @statsd.timed(
+ metric="swh_provenance_archive_api_accesstime_seconds",
+ tags={"method": "directory_ls"},
+ )
def directory_ls(self, id: Sha1Git) -> Iterable[Dict[str, Any]]:
# TODO: add file size filtering
for entry in self.storage.directory_ls(id):
@@ -23,11 +28,19 @@
"type": entry["type"],
}
+ @statsd.timed(
+ metric="swh_provenance_archive_api_accesstime_seconds",
+ tags={"method": "revision_get_parents"},
+ )
def revision_get_parents(self, id: Sha1Git) -> Iterable[Sha1Git]:
rev = self.storage.revision_get([id])[0]
if rev is not None:
yield from rev.parents
+ @statsd.timed(
+ metric="swh_provenance_archive_api_accesstime_seconds",
+ tags={"method": "snapshot_get_heads"},
+ )
def snapshot_get_heads(self, id: Sha1Git) -> Iterable[Sha1Git]:
from swh.core.utils import grouper
from swh.storage.algos.snapshot import snapshot_get_all_branches
diff --git a/swh/provenance/tests/test_archive_interface.py b/swh/provenance/tests/test_archive_interface.py
--- a/swh/provenance/tests/test_archive_interface.py
+++ b/swh/provenance/tests/test_archive_interface.py
@@ -5,10 +5,12 @@
from collections import Counter
from operator import itemgetter
+from typing import Counter as TCounter
import pytest
from swh.core.db import BaseDb
+from swh.model.model import Sha1Git
from swh.provenance.postgresql.archive import ArchivePostgreSQL
from swh.provenance.storage.archive import ArchiveStorage
from swh.provenance.tests.conftest import fill_storage, load_repo_data
@@ -41,13 +43,19 @@
assert entries_api == entries_direct
for revision in data["revision"]:
- parents_api = Counter(archive_api.revision_get_parents(revision["id"]))
- parents_direct = Counter(
+ parents_api: TCounter[Sha1Git] = Counter(
+ archive_api.revision_get_parents(revision["id"])
+ )
+ parents_direct: TCounter[Sha1Git] = Counter(
archive_direct.revision_get_parents(revision["id"])
)
assert parents_api == parents_direct
for snapshot in data["snapshot"]:
- heads_api = Counter(archive_api.snapshot_get_heads(snapshot["id"]))
- heads_direct = Counter(archive_direct.snapshot_get_heads(snapshot["id"]))
+ heads_api: TCounter[Sha1Git] = Counter(
+ archive_api.snapshot_get_heads(snapshot["id"])
+ )
+ heads_direct: TCounter[Sha1Git] = Counter(
+ archive_direct.snapshot_get_heads(snapshot["id"])
+ )
assert heads_api == heads_direct
File Metadata
Details
Attached
Mime Type
text/plain
Expires
Tue, Dec 17, 5:14 PM (2 d, 19 h ago)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3218647
Attached To
D6256: Add StatsD support to `ArchiveInterface` implementations
Event Timeline
Log In to Comment