Changeset View
Changeset View
Standalone View
Standalone View
swh/provenance/postgresql/provenance.py
Show All 11 Lines | |||||
from types import TracebackType | from types import TracebackType | ||||
from typing import Dict, Generator, Iterable, List, Optional, Set, Type, Union | from typing import Dict, Generator, Iterable, List, Optional, Set, Type, Union | ||||
import psycopg2.extensions | import psycopg2.extensions | ||||
import psycopg2.extras | import psycopg2.extras | ||||
from typing_extensions import Literal | from typing_extensions import Literal | ||||
from swh.core.db import BaseDb | from swh.core.db import BaseDb | ||||
from swh.core.statsd import statsd | |||||
from swh.model.model import Sha1Git | from swh.model.model import Sha1Git | ||||
from ..interface import ( | from ..interface import ( | ||||
EntityType, | EntityType, | ||||
ProvenanceResult, | ProvenanceResult, | ||||
ProvenanceStorageInterface, | ProvenanceStorageInterface, | ||||
RelationData, | RelationData, | ||||
RelationType, | RelationType, | ||||
RevisionData, | RevisionData, | ||||
) | ) | ||||
LOGGER = logging.getLogger(__name__) | LOGGER = logging.getLogger(__name__) | ||||
STORAGE_DURATION_METRIC = "swh_provenance_storage_postgresql_duration_seconds" | |||||
class ProvenanceStoragePostgreSql: | class ProvenanceStoragePostgreSql: | ||||
def __init__(self, raise_on_commit: bool = False, **kwargs) -> None: | def __init__(self, raise_on_commit: bool = False, **kwargs) -> None: | ||||
self.conn_args = kwargs | self.conn_args = kwargs | ||||
self._flavor: Optional[str] = None | self._flavor: Optional[str] = None | ||||
self.raise_on_commit = raise_on_commit | self.raise_on_commit = raise_on_commit | ||||
def __enter__(self) -> ProvenanceStorageInterface: | def __enter__(self) -> ProvenanceStorageInterface: | ||||
Show All 25 Lines | def flavor(self) -> str: | ||||
self._flavor = cursor.fetchone()["flavor"] | self._flavor = cursor.fetchone()["flavor"] | ||||
assert self._flavor is not None | assert self._flavor is not None | ||||
return self._flavor | return self._flavor | ||||
@property | @property | ||||
def denormalized(self) -> bool: | def denormalized(self) -> bool: | ||||
return "denormalized" in self.flavor | return "denormalized" in self.flavor | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "close"}) | |||||
def close(self) -> None: | def close(self) -> None: | ||||
self.conn.close() | self.conn.close() | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "content_add"}) | |||||
def content_add( | def content_add( | ||||
self, cnts: Union[Iterable[Sha1Git], Dict[Sha1Git, Optional[datetime]]] | self, cnts: Union[Iterable[Sha1Git], Dict[Sha1Git, Optional[datetime]]] | ||||
) -> bool: | ) -> bool: | ||||
return self._entity_set_date("content", cnts) | return self._entity_set_date("content", cnts) | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "content_find_first"}) | |||||
def content_find_first(self, id: Sha1Git) -> Optional[ProvenanceResult]: | def content_find_first(self, id: Sha1Git) -> Optional[ProvenanceResult]: | ||||
sql = "SELECT * FROM swh_provenance_content_find_first(%s)" | sql = "SELECT * FROM swh_provenance_content_find_first(%s)" | ||||
with self.transaction(readonly=True) as cursor: | with self.transaction(readonly=True) as cursor: | ||||
cursor.execute(query=sql, vars=(id,)) | cursor.execute(query=sql, vars=(id,)) | ||||
row = cursor.fetchone() | row = cursor.fetchone() | ||||
return ProvenanceResult(**row) if row is not None else None | return ProvenanceResult(**row) if row is not None else None | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "content_find_all"}) | |||||
def content_find_all( | def content_find_all( | ||||
self, id: Sha1Git, limit: Optional[int] = None | self, id: Sha1Git, limit: Optional[int] = None | ||||
) -> Generator[ProvenanceResult, None, None]: | ) -> Generator[ProvenanceResult, None, None]: | ||||
sql = "SELECT * FROM swh_provenance_content_find_all(%s, %s)" | sql = "SELECT * FROM swh_provenance_content_find_all(%s, %s)" | ||||
with self.transaction(readonly=True) as cursor: | with self.transaction(readonly=True) as cursor: | ||||
cursor.execute(query=sql, vars=(id, limit)) | cursor.execute(query=sql, vars=(id, limit)) | ||||
yield from (ProvenanceResult(**row) for row in cursor) | yield from (ProvenanceResult(**row) for row in cursor) | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "content_get"}) | |||||
def content_get(self, ids: Iterable[Sha1Git]) -> Dict[Sha1Git, datetime]: | def content_get(self, ids: Iterable[Sha1Git]) -> Dict[Sha1Git, datetime]: | ||||
return self._entity_get_date("content", ids) | return self._entity_get_date("content", ids) | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "directory_add"}) | |||||
def directory_add( | def directory_add( | ||||
self, dirs: Union[Iterable[Sha1Git], Dict[Sha1Git, Optional[datetime]]] | self, dirs: Union[Iterable[Sha1Git], Dict[Sha1Git, Optional[datetime]]] | ||||
) -> bool: | ) -> bool: | ||||
return self._entity_set_date("directory", dirs) | return self._entity_set_date("directory", dirs) | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "directory_get"}) | |||||
def directory_get(self, ids: Iterable[Sha1Git]) -> Dict[Sha1Git, datetime]: | def directory_get(self, ids: Iterable[Sha1Git]) -> Dict[Sha1Git, datetime]: | ||||
return self._entity_get_date("directory", ids) | return self._entity_get_date("directory", ids) | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "entity_get_all"}) | |||||
def entity_get_all(self, entity: EntityType) -> Set[Sha1Git]: | def entity_get_all(self, entity: EntityType) -> Set[Sha1Git]: | ||||
with self.transaction(readonly=True) as cursor: | with self.transaction(readonly=True) as cursor: | ||||
cursor.execute(f"SELECT sha1 FROM {entity.value}") | cursor.execute(f"SELECT sha1 FROM {entity.value}") | ||||
return {row["sha1"] for row in cursor} | return {row["sha1"] for row in cursor} | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "location_add"}) | |||||
def location_add(self, paths: Iterable[bytes]) -> bool: | def location_add(self, paths: Iterable[bytes]) -> bool: | ||||
if not self.with_path(): | if not self.with_path(): | ||||
return True | return True | ||||
try: | try: | ||||
values = [(path,) for path in paths] | values = [(path,) for path in paths] | ||||
if values: | if values: | ||||
sql = """ | sql = """ | ||||
INSERT INTO location(path) VALUES %s | INSERT INTO location(path) VALUES %s | ||||
ON CONFLICT DO NOTHING | ON CONFLICT DO NOTHING | ||||
""" | """ | ||||
with self.transaction() as cursor: | with self.transaction() as cursor: | ||||
psycopg2.extras.execute_values(cursor, sql, argslist=values) | psycopg2.extras.execute_values(cursor, sql, argslist=values) | ||||
return True | return True | ||||
except: # noqa: E722 | except: # noqa: E722 | ||||
# Unexpected error occurred, rollback all changes and log message | # Unexpected error occurred, rollback all changes and log message | ||||
LOGGER.exception("Unexpected error") | LOGGER.exception("Unexpected error") | ||||
if self.raise_on_commit: | if self.raise_on_commit: | ||||
raise | raise | ||||
return False | return False | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "location_get_all"}) | |||||
def location_get_all(self) -> Set[bytes]: | def location_get_all(self) -> Set[bytes]: | ||||
with self.transaction(readonly=True) as cursor: | with self.transaction(readonly=True) as cursor: | ||||
cursor.execute("SELECT location.path AS path FROM location") | cursor.execute("SELECT location.path AS path FROM location") | ||||
return {row["path"] for row in cursor} | return {row["path"] for row in cursor} | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "origin_add"}) | |||||
def origin_add(self, orgs: Dict[Sha1Git, str]) -> bool: | def origin_add(self, orgs: Dict[Sha1Git, str]) -> bool: | ||||
try: | try: | ||||
if orgs: | if orgs: | ||||
sql = """ | sql = """ | ||||
INSERT INTO origin(sha1, url) VALUES %s | INSERT INTO origin(sha1, url) VALUES %s | ||||
ON CONFLICT DO NOTHING | ON CONFLICT DO NOTHING | ||||
""" | """ | ||||
with self.transaction() as cursor: | with self.transaction() as cursor: | ||||
psycopg2.extras.execute_values( | psycopg2.extras.execute_values( | ||||
cur=cursor, sql=sql, argslist=orgs.items() | cur=cursor, sql=sql, argslist=orgs.items() | ||||
) | ) | ||||
return True | return True | ||||
except: # noqa: E722 | except: # noqa: E722 | ||||
# Unexpected error occurred, rollback all changes and log message | # Unexpected error occurred, rollback all changes and log message | ||||
LOGGER.exception("Unexpected error") | LOGGER.exception("Unexpected error") | ||||
if self.raise_on_commit: | if self.raise_on_commit: | ||||
raise | raise | ||||
return False | return False | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "open"}) | |||||
def open(self) -> None: | def open(self) -> None: | ||||
self.conn = BaseDb.connect(**self.conn_args).conn | self.conn = BaseDb.connect(**self.conn_args).conn | ||||
BaseDb.adapt_conn(self.conn) | BaseDb.adapt_conn(self.conn) | ||||
with self.transaction() as cursor: | with self.transaction() as cursor: | ||||
cursor.execute("SET timezone TO 'UTC'") | cursor.execute("SET timezone TO 'UTC'") | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "origin_get"}) | |||||
def origin_get(self, ids: Iterable[Sha1Git]) -> Dict[Sha1Git, str]: | def origin_get(self, ids: Iterable[Sha1Git]) -> Dict[Sha1Git, str]: | ||||
urls: Dict[Sha1Git, str] = {} | urls: Dict[Sha1Git, str] = {} | ||||
sha1s = tuple(ids) | sha1s = tuple(ids) | ||||
if sha1s: | if sha1s: | ||||
# TODO: consider splitting this query in several ones if sha1s is too big! | # TODO: consider splitting this query in several ones if sha1s is too big! | ||||
values = ", ".join(itertools.repeat("%s", len(sha1s))) | values = ", ".join(itertools.repeat("%s", len(sha1s))) | ||||
sql = f""" | sql = f""" | ||||
SELECT sha1, url | SELECT sha1, url | ||||
FROM origin | FROM origin | ||||
WHERE sha1 IN ({values}) | WHERE sha1 IN ({values}) | ||||
""" | """ | ||||
with self.transaction(readonly=True) as cursor: | with self.transaction(readonly=True) as cursor: | ||||
cursor.execute(query=sql, vars=sha1s) | cursor.execute(query=sql, vars=sha1s) | ||||
urls.update((row["sha1"], row["url"]) for row in cursor) | urls.update((row["sha1"], row["url"]) for row in cursor) | ||||
return urls | return urls | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "revision_add"}) | |||||
def revision_add( | def revision_add( | ||||
self, revs: Union[Iterable[Sha1Git], Dict[Sha1Git, RevisionData]] | self, revs: Union[Iterable[Sha1Git], Dict[Sha1Git, RevisionData]] | ||||
) -> bool: | ) -> bool: | ||||
if isinstance(revs, dict): | if isinstance(revs, dict): | ||||
data = [(sha1, rev.date, rev.origin) for sha1, rev in revs.items()] | data = [(sha1, rev.date, rev.origin) for sha1, rev in revs.items()] | ||||
else: | else: | ||||
data = [(sha1, None, None) for sha1 in revs] | data = [(sha1, None, None) for sha1 in revs] | ||||
try: | try: | ||||
Show All 13 Lines | ) -> bool: | ||||
return True | return True | ||||
except: # noqa: E722 | except: # noqa: E722 | ||||
# Unexpected error occurred, rollback all changes and log message | # Unexpected error occurred, rollback all changes and log message | ||||
LOGGER.exception("Unexpected error") | LOGGER.exception("Unexpected error") | ||||
if self.raise_on_commit: | if self.raise_on_commit: | ||||
raise | raise | ||||
return False | return False | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "revision_get"}) | |||||
def revision_get(self, ids: Iterable[Sha1Git]) -> Dict[Sha1Git, RevisionData]: | def revision_get(self, ids: Iterable[Sha1Git]) -> Dict[Sha1Git, RevisionData]: | ||||
result: Dict[Sha1Git, RevisionData] = {} | result: Dict[Sha1Git, RevisionData] = {} | ||||
sha1s = tuple(ids) | sha1s = tuple(ids) | ||||
if sha1s: | if sha1s: | ||||
# TODO: consider splitting this query in several ones if sha1s is too big! | # TODO: consider splitting this query in several ones if sha1s is too big! | ||||
values = ", ".join(itertools.repeat("%s", len(sha1s))) | values = ", ".join(itertools.repeat("%s", len(sha1s))) | ||||
sql = f""" | sql = f""" | ||||
SELECT R.sha1, R.date, O.sha1 AS origin | SELECT R.sha1, R.date, O.sha1 AS origin | ||||
FROM revision AS R | FROM revision AS R | ||||
LEFT JOIN origin AS O ON (O.id=R.origin) | LEFT JOIN origin AS O ON (O.id=R.origin) | ||||
WHERE R.sha1 IN ({values}) | WHERE R.sha1 IN ({values}) | ||||
AND (R.date is not NULL OR O.sha1 is not NULL) | AND (R.date is not NULL OR O.sha1 is not NULL) | ||||
""" | """ | ||||
with self.transaction(readonly=True) as cursor: | with self.transaction(readonly=True) as cursor: | ||||
cursor.execute(query=sql, vars=sha1s) | cursor.execute(query=sql, vars=sha1s) | ||||
result.update( | result.update( | ||||
(row["sha1"], RevisionData(date=row["date"], origin=row["origin"])) | (row["sha1"], RevisionData(date=row["date"], origin=row["origin"])) | ||||
for row in cursor | for row in cursor | ||||
) | ) | ||||
return result | return result | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "relation_add"}) | |||||
def relation_add( | def relation_add( | ||||
self, relation: RelationType, data: Dict[Sha1Git, Set[RelationData]] | self, relation: RelationType, data: Dict[Sha1Git, Set[RelationData]] | ||||
) -> bool: | ) -> bool: | ||||
rows = [(src, rel.dst, rel.path) for src, dsts in data.items() for rel in dsts] | rows = [(src, rel.dst, rel.path) for src, dsts in data.items() for rel in dsts] | ||||
try: | try: | ||||
if rows: | if rows: | ||||
rel_table = relation.value | rel_table = relation.value | ||||
src_table, *_, dst_table = rel_table.split("_") | src_table, *_, dst_table = rel_table.split("_") | ||||
Show All 12 Lines | ) -> bool: | ||||
return True | return True | ||||
except: # noqa: E722 | except: # noqa: E722 | ||||
# Unexpected error occurred, rollback all changes and log message | # Unexpected error occurred, rollback all changes and log message | ||||
LOGGER.exception("Unexpected error") | LOGGER.exception("Unexpected error") | ||||
if self.raise_on_commit: | if self.raise_on_commit: | ||||
raise | raise | ||||
return False | return False | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "relation_get"}) | |||||
def relation_get( | def relation_get( | ||||
self, relation: RelationType, ids: Iterable[Sha1Git], reverse: bool = False | self, relation: RelationType, ids: Iterable[Sha1Git], reverse: bool = False | ||||
) -> Dict[Sha1Git, Set[RelationData]]: | ) -> Dict[Sha1Git, Set[RelationData]]: | ||||
return self._relation_get(relation, ids, reverse) | return self._relation_get(relation, ids, reverse) | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "relation_get_all"}) | |||||
def relation_get_all( | def relation_get_all( | ||||
self, relation: RelationType | self, relation: RelationType | ||||
) -> Dict[Sha1Git, Set[RelationData]]: | ) -> Dict[Sha1Git, Set[RelationData]]: | ||||
return self._relation_get(relation, None) | return self._relation_get(relation, None) | ||||
def _entity_get_date( | def _entity_get_date( | ||||
self, | self, | ||||
entity: Literal["content", "directory", "revision"], | entity: Literal["content", "directory", "revision"], | ||||
▲ Show 20 Lines • Show All 63 Lines • ▼ Show 20 Lines | ) -> Dict[Sha1Git, Set[RelationData]]: | ||||
cursor.execute( | cursor.execute( | ||||
query=sql, vars=(rel_table, src_table, dst_table, filter, sha1s) | query=sql, vars=(rel_table, src_table, dst_table, filter, sha1s) | ||||
) | ) | ||||
for row in cursor: | for row in cursor: | ||||
src = row.pop("src") | src = row.pop("src") | ||||
result.setdefault(src, set()).add(RelationData(**row)) | result.setdefault(src, set()).add(RelationData(**row)) | ||||
return result | return result | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "with_path"}) | |||||
def with_path(self) -> bool: | def with_path(self) -> bool: | ||||
return "with-path" in self.flavor | return "with-path" in self.flavor |