Changeset View
Changeset View
Standalone View
Standalone View
swh/provenance/mongo/backend.py
# Copyright (C) 2021 The Software Heritage developers | # Copyright (C) 2021 The Software Heritage developers | ||||
# See the AUTHORS file at the top-level directory of this distribution | # See the AUTHORS file at the top-level directory of this distribution | ||||
# License: GNU General Public License version 3, or any later version | # License: GNU General Public License version 3, or any later version | ||||
# See top-level LICENSE file for more information | # See top-level LICENSE file for more information | ||||
from __future__ import annotations | from __future__ import annotations | ||||
from datetime import datetime, timezone | from datetime import datetime, timezone | ||||
import os | import os | ||||
from types import TracebackType | from types import TracebackType | ||||
from typing import Any, Dict, Generator, Iterable, List, Optional, Set, Type, Union | from typing import Any, Dict, Generator, Iterable, List, Optional, Set, Type, Union | ||||
from bson import ObjectId | from bson import ObjectId | ||||
import mongomock | import mongomock | ||||
import pymongo | import pymongo | ||||
from pymongo import InsertOne, UpdateOne | |||||
from swh.core.statsd import statsd | from swh.core.statsd import statsd | ||||
from swh.model.model import Sha1Git | from swh.model.model import Sha1Git | ||||
from ..interface import ( | from ..interface import ( | ||||
EntityType, | EntityType, | ||||
ProvenanceResult, | ProvenanceResult, | ||||
ProvenanceStorageInterface, | ProvenanceStorageInterface, | ||||
RelationData, | RelationData, | ||||
RelationType, | RelationType, | ||||
RevisionData, | RevisionData, | ||||
) | ) | ||||
from .entity import Entity | |||||
STORAGE_DURATION_METRIC = "swh_provenance_storage_mongodb_duration_seconds" | STORAGE_DURATION_METRIC = "swh_provenance_storage_mongodb_duration_seconds" | ||||
class ProvenanceStorageMongoDb: | class ProvenanceStorageMongoDb: | ||||
def __init__(self, engine: str, **kwargs): | def __init__(self, engine: str, **kwargs): | ||||
self.engine = engine | self.engine = engine | ||||
self.dbname = kwargs.pop("dbname") | self.dbname = kwargs.pop("dbname") | ||||
Show All 10 Lines | def __exit__( | ||||
exc_tb: Optional[TracebackType], | exc_tb: Optional[TracebackType], | ||||
) -> None: | ) -> None: | ||||
self.close() | self.close() | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "close"}) | @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "close"}) | ||||
def close(self) -> None: | def close(self) -> None: | ||||
self.db.client.close() | self.db.client.close() | ||||
def _format_data(self, data: Union[Iterable[Sha1Git], Dict[Sha1Git, datetime]]): | |||||
return data if isinstance(data, dict) else dict.fromkeys(data) | |||||
def _generate_date_upserts(self, sha1, date, inserts={}, upsert=True): | |||||
ts = datetime.timestamp(date) if date is not None else None | |||||
# update only those with date either as None or later than the given one | |||||
return UpdateOne( | |||||
{"sha1": sha1, "$or": [{"ts": {"$gte": ts}}, {"ts": None}]}, | |||||
{ | |||||
"$set": {"ts": ts}, | |||||
"$setOnInsert": inserts, | |||||
}, | |||||
upsert=upsert, | |||||
) | |||||
def _generate_date_upserts_alternate(self, sha1, date, inserts={}, upsert=True): | |||||
# FIXME, Compare the performance and decide which one to keep | |||||
ts = datetime.timestamp(date) if date is not None else None | |||||
# update only those with date either as None or later than the given one | |||||
return UpdateOne( | |||||
{"sha1": sha1}, | |||||
[ | |||||
{ | |||||
"$set": { | |||||
"ts": { | |||||
"$cond": { | |||||
"if": {"$gt": ["$ts", ts]}, | |||||
"then": ts, | |||||
"else": None, | |||||
} | |||||
} | |||||
}, | |||||
}, | |||||
], | |||||
upsert=upsert, | |||||
) | |||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "content_add"}) | @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "content_add"}) | ||||
def content_add( | def content_add( | ||||
self, cnts: Union[Iterable[Sha1Git], Dict[Sha1Git, Optional[datetime]]] | self, cnts: Union[Iterable[Sha1Git], Dict[Sha1Git, Optional[datetime]]] | ||||
) -> bool: | ) -> bool: | ||||
data = cnts if isinstance(cnts, dict) else dict.fromkeys(cnts) | default_inserts = {"revision": {}, "directory": {}} | ||||
existing = { | writes = [ | ||||
x["sha1"]: x | self._generate_date_upserts(sha1, date, default_inserts) | ||||
for x in self.db.content.find( | for sha1, date in self._format_data(cnts).items() | ||||
{"sha1": {"$in": list(data)}}, {"sha1": 1, "ts": 1, "_id": 1} | ] | ||||
) | Entity.factory(self.db, "content").bulk_write(writes) | ||||
} | return True | ||||
for sha1, date in data.items(): | |||||
ts = datetime.timestamp(date) if date is not None else None | @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "directory_add"}) | ||||
if sha1 in existing: | def directory_add( | ||||
cnt = existing[sha1] | self, dirs: Union[Iterable[Sha1Git], Dict[Sha1Git, Optional[datetime]]] | ||||
if ts is not None and (cnt["ts"] is None or ts < cnt["ts"]): | ) -> bool: | ||||
self.db.content.update_one( | default_inserts = {"revision": {}} | ||||
{"_id": cnt["_id"]}, {"$set": {"ts": ts}} | writes = [ | ||||
) | self._generate_date_upserts(sha1, date, default_inserts) | ||||
else: | for sha1, date in self._format_data(dirs).items() | ||||
self.db.content.insert_one( | ] | ||||
Entity.factory(self.db, "directory").bulk_write(writes) | |||||
return True | |||||
def _get_oldest_revision_from_content(self, content): | |||||
# FIXME, returing with the assumption that content has all the | |||||
# revisons in the array | |||||
# Change to seperate collection content_in_revision if gets too big | |||||
return Entity.factory(self.db, "revision").find_one( | |||||
{ | { | ||||
"sha1": sha1, | "_id": {"$in": [ObjectId(obj_id) for obj_id in content["revision"]]}, | ||||
"ts": ts, | "ts": content["ts"], | ||||
"revision": {}, | |||||
"directory": {}, | |||||
} | } | ||||
) | ) | ||||
return True | |||||
def _get_preferred_origin(self, revision): | |||||
if revision.get("preferred"): | |||||
return Entity.factory(self.db, "origin").find_one( | |||||
{"sha1": revision["preferred"]} | |||||
) | |||||
return None | |||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "content_find_first"}) | @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "content_find_first"}) | ||||
def content_find_first(self, id: Sha1Git) -> Optional[ProvenanceResult]: | def content_find_first(self, id: Sha1Git) -> Optional[ProvenanceResult]: | ||||
# get all the revisions | content = Entity.factory(self.db, "content").find_one({"sha1": id}) | ||||
# iterate and find the earliest | |||||
content = self.db.content.find_one({"sha1": id}) | |||||
if not content: | if not content: | ||||
return None | return None | ||||
occurs = [] | oldest_revision = self._get_oldest_revision_from_content(content) | ||||
for revision in self.db.revision.find( | origin = self._get_preferred_origin(oldest_revision) | ||||
{"_id": {"$in": [ObjectId(obj_id) for obj_id in content["revision"]]}} | return ProvenanceResult( | ||||
): | |||||
if revision["preferred"] is not None: | |||||
origin = self.db.origin.find_one({"sha1": revision["preferred"]}) | |||||
else: | |||||
origin = {"url": None} | |||||
for path in content["revision"][str(revision["_id"])]: | |||||
occurs.append( | |||||
ProvenanceResult( | |||||
content=id, | content=id, | ||||
revision=revision["sha1"], | revision=oldest_revision["sha1"], | ||||
date=datetime.fromtimestamp(revision["ts"], timezone.utc), | date=datetime.fromtimestamp(oldest_revision["ts"], timezone.utc), | ||||
origin=origin["url"], | origin=origin["url"] if origin else None, | ||||
path=path, | path="", # FIXME, find the right path | ||||
) | |||||
) | ) | ||||
return sorted(occurs, key=lambda x: (x.date, x.revision, x.origin, x.path))[0] | |||||
# FIXME, refactor this | |||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "content_find_all"}) | @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "content_find_all"}) | ||||
def content_find_all( | def content_find_all( | ||||
self, id: Sha1Git, limit: Optional[int] = None | self, id: Sha1Git, limit: Optional[int] = None | ||||
) -> Generator[ProvenanceResult, None, None]: | ) -> Generator[ProvenanceResult, None, None]: | ||||
content = self.db.content.find_one({"sha1": id}) | content = self.db.content.find_one({"sha1": id}) | ||||
if not content: | if not content: | ||||
return None | return None | ||||
Show All 12 Lines | ) -> Generator[ProvenanceResult, None, None]: | ||||
content=id, | content=id, | ||||
revision=revision["sha1"], | revision=revision["sha1"], | ||||
date=datetime.fromtimestamp(revision["ts"], timezone.utc), | date=datetime.fromtimestamp(revision["ts"], timezone.utc), | ||||
origin=origin["url"], | origin=origin["url"], | ||||
path=path, | path=path, | ||||
) | ) | ||||
) | ) | ||||
for directory in self.db.directory.find( | for directory in self.db.directory.find( | ||||
{"_id": {"$in": [ObjectId(obj_id) for obj_id in content["directory"]]}} | { | ||||
"_id": { | |||||
"$in": [ObjectId(obj_id) for obj_id in content.get("directory", {})] | |||||
} | |||||
} | |||||
): | ): | ||||
for revision in self.db.revision.find( | for revision in self.db.revision.find( | ||||
{"_id": {"$in": [ObjectId(obj_id) for obj_id in directory["revision"]]}} | {"_id": {"$in": [ObjectId(obj_id) for obj_id in directory["revision"]]}} | ||||
): | ): | ||||
if revision["preferred"] is not None: | if revision["preferred"] is not None: | ||||
origin = self.db.origin.find_one({"sha1": revision["preferred"]}) | origin = self.db.origin.find_one({"sha1": revision["preferred"]}) | ||||
else: | else: | ||||
origin = {"url": None} | origin = {"url": None} | ||||
Show All 23 Lines | def content_get(self, ids: Iterable[Sha1Git]) -> Dict[Sha1Git, datetime]: | ||||
return { | return { | ||||
x["sha1"]: datetime.fromtimestamp(x["ts"], timezone.utc) | x["sha1"]: datetime.fromtimestamp(x["ts"], timezone.utc) | ||||
for x in self.db.content.find( | for x in self.db.content.find( | ||||
{"sha1": {"$in": list(ids)}, "ts": {"$ne": None}}, | {"sha1": {"$in": list(ids)}, "ts": {"$ne": None}}, | ||||
{"sha1": 1, "ts": 1, "_id": 0}, | {"sha1": 1, "ts": 1, "_id": 0}, | ||||
) | ) | ||||
} | } | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "directory_add"}) | |||||
def directory_add( | |||||
self, dirs: Union[Iterable[Sha1Git], Dict[Sha1Git, Optional[datetime]]] | |||||
) -> bool: | |||||
data = dirs if isinstance(dirs, dict) else dict.fromkeys(dirs) | |||||
existing = { | |||||
x["sha1"]: x | |||||
for x in self.db.directory.find( | |||||
{"sha1": {"$in": list(data)}}, {"sha1": 1, "ts": 1, "_id": 1} | |||||
) | |||||
} | |||||
for sha1, date in data.items(): | |||||
ts = datetime.timestamp(date) if date is not None else None | |||||
if sha1 in existing: | |||||
dir = existing[sha1] | |||||
if ts is not None and (dir["ts"] is None or ts < dir["ts"]): | |||||
self.db.directory.update_one( | |||||
{"_id": dir["_id"]}, {"$set": {"ts": ts}} | |||||
) | |||||
else: | |||||
self.db.directory.insert_one({"sha1": sha1, "ts": ts, "revision": {}}) | |||||
return True | |||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "directory_get"}) | @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "directory_get"}) | ||||
def directory_get(self, ids: Iterable[Sha1Git]) -> Dict[Sha1Git, datetime]: | def directory_get(self, ids: Iterable[Sha1Git]) -> Dict[Sha1Git, datetime]: | ||||
return { | return { | ||||
x["sha1"]: datetime.fromtimestamp(x["ts"], timezone.utc) | x["sha1"]: datetime.fromtimestamp(x["ts"], timezone.utc) | ||||
for x in self.db.directory.find( | for x in self.db.directory.find( | ||||
{"sha1": {"$in": list(ids)}, "ts": {"$ne": None}}, | {"sha1": {"$in": list(ids)}, "ts": {"$ne": None}}, | ||||
{"sha1": 1, "ts": 1, "_id": 0}, | {"sha1": 1, "ts": 1, "_id": 0}, | ||||
) | ) | ||||
} | } | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "entity_get_all"}) | @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "entity_get_all"}) | ||||
def entity_get_all(self, entity: EntityType) -> Set[Sha1Git]: | def entity_get_all(self, entity: EntityType) -> Set[Sha1Git]: | ||||
return { | return { | ||||
x["sha1"] | x["sha1"] | ||||
for x in self.db.get_collection(entity.value).find( | # for x in self.db.get_collection(entity.value).find( | ||||
for x in self.db.get_collection(entity).find( # Temp fix for test | |||||
{}, {"sha1": 1, "_id": 0} | {}, {"sha1": 1, "_id": 0} | ||||
) | ) | ||||
} | } | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "location_add"}) | @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "location_add"}) | ||||
def location_add(self, paths: Iterable[bytes]) -> bool: | def location_add(self, paths: Iterable[bytes]) -> bool: | ||||
# TODO: implement this methods if path are to be stored in a separate collection | # TODO: implement this methods if path are to be stored in a separate collection | ||||
return True | return True | ||||
Show All 16 Lines | def open(self) -> None: | ||||
if self.engine == "mongomock": | if self.engine == "mongomock": | ||||
self.db = mongomock.MongoClient(**self.conn_args).get_database(self.dbname) | self.db = mongomock.MongoClient(**self.conn_args).get_database(self.dbname) | ||||
else: | else: | ||||
# assume real MongoDB server by default | # assume real MongoDB server by default | ||||
self.db = pymongo.MongoClient(**self.conn_args).get_database(self.dbname) | self.db = pymongo.MongoClient(**self.conn_args).get_database(self.dbname) | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "origin_add"}) | @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "origin_add"}) | ||||
def origin_add(self, orgs: Dict[Sha1Git, str]) -> bool: | def origin_add(self, orgs: Dict[Sha1Git, str]) -> bool: | ||||
existing = { | writes = [InsertOne({"sha1": sha1, "url": url}) for (sha1, url) in orgs.items()] | ||||
x["sha1"]: x | Entity.factory(self.db, "origin").bulk_write(writes) | ||||
for x in self.db.origin.find( | |||||
{"sha1": {"$in": list(orgs)}}, {"sha1": 1, "url": 1, "_id": 1} | |||||
) | |||||
} | |||||
for sha1, url in orgs.items(): | |||||
if sha1 not in existing: | |||||
# add new origin | |||||
self.db.origin.insert_one({"sha1": sha1, "url": url}) | |||||
return True | return True | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "origin_get"}) | @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "origin_get"}) | ||||
def origin_get(self, ids: Iterable[Sha1Git]) -> Dict[Sha1Git, str]: | def origin_get(self, ids: Iterable[Sha1Git]) -> Dict[Sha1Git, str]: | ||||
# FIXME, change to entity factory | |||||
return { | return { | ||||
x["sha1"]: x["url"] | x["sha1"]: x["url"] | ||||
for x in self.db.origin.find( | for x in self.db.origin.find( | ||||
{"sha1": {"$in": list(ids)}}, {"sha1": 1, "url": 1, "_id": 0} | {"sha1": {"$in": list(ids)}}, {"sha1": 1, "url": 1, "_id": 0} | ||||
) | ) | ||||
} | } | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "revision_add"}) | @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "revision_add"}) | ||||
def revision_add( | def revision_add( | ||||
self, revs: Union[Iterable[Sha1Git], Dict[Sha1Git, RevisionData]] | self, revs: Union[Iterable[Sha1Git], Dict[Sha1Git, RevisionData]] | ||||
) -> bool: | ) -> bool: | ||||
# FIXME, change to bulk_write | |||||
data = ( | data = ( | ||||
revs | revs | ||||
if isinstance(revs, dict) | if isinstance(revs, dict) | ||||
else dict.fromkeys(revs, RevisionData(date=None, origin=None)) | else dict.fromkeys(revs, RevisionData(date=None, origin=None)) | ||||
) | ) | ||||
existing = { | existing = { | ||||
x["sha1"]: x | x["sha1"]: x | ||||
for x in self.db.revision.find( | for x in self.db.revision.find( | ||||
▲ Show 20 Lines • Show All 78 Lines • ▼ Show 20 Lines | ) -> bool: | ||||
) | ) | ||||
} | } | ||||
for sha1, dsts in denorm.items(): | for sha1, dsts in denorm.items(): | ||||
# update | # update | ||||
if src_relation != "revision": | if src_relation != "revision": | ||||
k = { | k = { | ||||
obj_id: list(set(paths + dsts.get(obj_id, []))) | obj_id: list(set(paths + dsts.get(obj_id, []))) | ||||
for obj_id, paths in src_objs[sha1][dst_relation].items() | for obj_id, paths in src_objs[sha1].get(dst_relation, {}).items() | ||||
} | } | ||||
self.db.get_collection(src_relation).update_one( | self.db.get_collection(src_relation).update_one( | ||||
{"_id": src_objs[sha1]["_id"]}, | {"_id": src_objs[sha1]["_id"]}, | ||||
{"$set": {dst_relation: dict(dsts, **k)}}, | {"$set": {dst_relation: dict(dsts, **k)}}, | ||||
) | ) | ||||
else: | else: | ||||
self.db.get_collection(src_relation).update_one( | self.db.get_collection(src_relation).update_one( | ||||
{"_id": src_objs[sha1]["_id"]}, | {"_id": src_objs[sha1]["_id"]}, | ||||
▲ Show 20 Lines • Show All 53 Lines • ▼ Show 20 Lines | ) -> Dict[Sha1Git, Set[RelationData]]: | ||||
else: | else: | ||||
dst_objs = { | dst_objs = { | ||||
x["sha1"]: x["_id"] | x["sha1"]: x["_id"] | ||||
for x in self.db.get_collection(dst).find( | for x in self.db.get_collection(dst).find( | ||||
{"sha1": {"$in": list(sha1s)}}, {"_id": 1, "sha1": 1} | {"sha1": {"$in": list(sha1s)}}, {"_id": 1, "sha1": 1} | ||||
) | ) | ||||
} | } | ||||
src_objs = { | src_objs = { | ||||
x["sha1"]: x[dst] | x["sha1"]: x.get(dst, {}) | ||||
for x in self.db.get_collection(src).find( | for x in self.db.get_collection(src).find( | ||||
{}, {"_id": 0, "sha1": 1, dst: 1} | {}, {"_id": 0, "sha1": 1, dst: 1} | ||||
) | ) | ||||
} | } | ||||
result: Dict[Sha1Git, Set[RelationData]] = {} | result: Dict[Sha1Git, Set[RelationData]] = {} | ||||
if src != "revision": | if src != "revision": | ||||
for dst_sha1, dst_obj_id in dst_objs.items(): | for dst_sha1, dst_obj_id in dst_objs.items(): | ||||
for src_sha1, denorm in src_objs.items(): | for src_sha1, denorm in src_objs.items(): | ||||
Show All 16 Lines | class ProvenanceStorageMongoDb: | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "relation_get_all"}) | @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "relation_get_all"}) | ||||
def relation_get_all( | def relation_get_all( | ||||
self, relation: RelationType | self, relation: RelationType | ||||
) -> Dict[Sha1Git, Set[RelationData]]: | ) -> Dict[Sha1Git, Set[RelationData]]: | ||||
src, *_, dst = relation.value.split("_") | src, *_, dst = relation.value.split("_") | ||||
empty: Union[Dict[str, bytes], List[str]] = {} if src != "revision" else [] | empty: Union[Dict[str, bytes], List[str]] = {} if src != "revision" else [] | ||||
src_objs = { | src_objs = { | ||||
x["sha1"]: x[dst] | x["sha1"]: x.get(dst, {}) | ||||
for x in self.db.get_collection(src).find( | for x in self.db.get_collection(src).find( | ||||
{dst: {"$ne": empty}}, {"_id": 0, "sha1": 1, dst: 1} | {dst: {"$ne": empty}}, {"_id": 0, "sha1": 1, dst: 1} | ||||
) | ) | ||||
} | } | ||||
dst_ids = list( | dst_ids = list( | ||||
{ObjectId(obj_id) for _, value in src_objs.items() for obj_id in value} | {ObjectId(obj_id) for _, value in src_objs.items() for obj_id in value} | ||||
) | ) | ||||
dst_objs = { | dst_objs = { | ||||
Show All 22 Lines | ) -> Dict[Sha1Git, Set[RelationData]]: | ||||
if dst_obj_id == dst_obj_ref | if dst_obj_id == dst_obj_ref | ||||
} | } | ||||
for src_sha1, denorm in src_objs.items() | for src_sha1, denorm in src_objs.items() | ||||
} | } | ||||
@statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "with_path"}) | @statsd.timed(metric=STORAGE_DURATION_METRIC, tags={"method": "with_path"}) | ||||
def with_path(self) -> bool: | def with_path(self) -> bool: | ||||
return True | return True | ||||
def entity_get(self, entity, filters=None): | |||||
""" | |||||
A method used only by tests | |||||
Returns a list of entities after applying the filter | |||||
""" | |||||
# FIXME, this should be added to the interface to make | |||||
# tests backend agnostic. Adding this in SQL could be tricky | |||||
# FIXME, change to entity factory | |||||
return list(Entity.factory(self.db, entity).find(filters)) | |||||
def entity_add(self, entity, data): | |||||
""" | |||||
A method used only by tests | |||||
""" | |||||
# FIXME, this should be added to the interface to make | |||||
# tests backend agnostic. Adding this in SQL could be tricky | |||||
# FIXME, change to entity factory | |||||
return Entity.factory(self.db, entity).insert(data) | |||||
def create_indexes(self, entity, fields, unique=False): | |||||
Entity.factory(self.db, entity).create_index(fields, unique) |