diff --git a/swh/graphql/backends/archive.py b/swh/graphql/backends/archive.py index 79485a8..a9e7289 100644 --- a/swh/graphql/backends/archive.py +++ b/swh/graphql/backends/archive.py @@ -1,67 +1,75 @@ from swh.storage import get_storage class Archive: def __init__(self): # FIXME, setup config self.storage = get_storage( cls="remote", url="http://moma.internal.softwareheritage.org:5002" ) def get_origin(self, url): return self.storage.origin_get([url])[0] def get_origins(self, after=None, first=50, url_pattern=None): # STORAGE-TODO # Make them a single function in the backend if url_pattern is None: return self.storage.origin_list(page_token=after, limit=first) return self.storage.origin_search( url_pattern=url_pattern, page_token=after, limit=first ) def get_origin_visits(self, origin_url, after=None, first=50): return self.storage.origin_visit_get(origin_url, page_token=after, limit=first) def get_origin_visit(self, origin_url, visit_id): return self.storage.origin_visit_get_by(origin_url, visit_id) def get_origin_latest_visit(self, origin_url): return self.storage.origin_visit_get_latest(origin_url) def get_visit_status(self, origin_url, visit_id, after=None, first=50): return self.storage.origin_visit_status_get( origin_url, visit_id, page_token=after, limit=first ) def get_latest_visit_status(self, origin_url, visit_id): return self.storage.origin_visit_status_get_latest(origin_url, visit_id) def get_origin_snapshots(self, origin_url): return self.storage.origin_snapshot_get_all(origin_url) + def is_snapshot_available(self, snapshot_ids): + return not self.storage.snapshot_missing(snapshot_ids) + def get_snapshot_branches(self, snapshot, after, first, target_types, name_include): return self.storage.snapshot_get_branches( snapshot, branches_from=after, branches_count=first, target_types=target_types, branch_name_include_substring=name_include, ) def get_revisions(self, revision_ids): return self.storage.revision_get(revision_ids=revision_ids) def get_revision_log(self, revision_ids, after=None, first=50): return self.storage.revision_log(revisions=revision_ids, limit=first) def get_releases(self, release_ids): return self.storage.release_get(releases=release_ids) - def get_directory_entries(self, directory_id): - return self.storage.directory_ls(directory_id) + def is_directory_available(self, directory_ids): + return not self.storage.directory_missing(directory_ids) + + def get_directory_entries(self, directory_id, after=None, first=50): + return self.storage.directory_get_entries( + directory_id, limit=first, page_token=after + ) def get_content(self, content_id): # FIXME, only for tests return self.storage.content_find({"sha1_git": content_id}) diff --git a/swh/graphql/resolvers/directory.py b/swh/graphql/resolvers/directory.py index 1e00257..15393ff 100644 --- a/swh/graphql/resolvers/directory.py +++ b/swh/graphql/resolvers/directory.py @@ -1,48 +1,50 @@ +from swh.graphql.backends import archive +from swh.model.model import Directory + from .base_node import BaseNode class BaseDirectoryNode(BaseNode): def _get_directory_by_id(self, directory_id): - # Now not fetching any data (schema is exposing just id) - # same pattern is used in snapshot resolver - # FIXME, use the right API to fetch metadata like name, path - return { - "id": directory_id, - } + # Return a Directory model object + # entries is initialized as empty + # Same pattern is used in snapshot + return Directory(id=directory_id, entries=()) def is_type_of(self): return "Directory" class DirectoryNode(BaseDirectoryNode): def _get_node_data(self): """ When a directory is requested directly with an id """ - # FXIME, query to make sure directory exists directory_id = self.kwargs.get("SWHID").object_id # path = "" - return self._get_directory_by_id(directory_id) + if archive.Archive().is_directory_available([directory_id]): + return self._get_directory_by_id(directory_id) + return None class RevisionDirectoryNode(BaseDirectoryNode): def _get_node_data(self): """ When a directory is requested from a revision self.obj is revision here self.obj.directoryId is the required dir id (set from resolvers.revision.py:BaseRevisionNode) """ directory_id = self.obj.directoryId return self._get_directory_by_id(directory_id) class TargetDirectoryNode(BaseDirectoryNode): def _get_node_data(self): """ When a directory is requested as a target self.obj can be a Release or a DirectoryEntry obj.target is the requested directory id here """ return self._get_directory_by_id(self.obj.target) diff --git a/swh/graphql/resolvers/directory_entry.py b/swh/graphql/resolvers/directory_entry.py index f91031f..f1cd249 100644 --- a/swh/graphql/resolvers/directory_entry.py +++ b/swh/graphql/resolvers/directory_entry.py @@ -1,33 +1,33 @@ from swh.graphql.backends import archive from swh.graphql.utils import utils from .base_connection import BaseConnection from .base_node import BaseNode class DirectoryEntryNode(BaseNode): """ """ @property def targetId(self): # To support the schema naming convention return self._node.target class DirectoryEntryConnection(BaseConnection): _node_class = DirectoryEntryNode def _get_paged_result(self): """ When entries requested from a directory self.obj.id is the directory_id here (as returned from resolvers/directory.py) This is not paginated from swh-storgae using dummy pagination """ # FIXME, using dummy(local) pagination, move pagination to backend # To remove localpagination, just drop the paginated call # STORAGE-TODO - entries = archive.Archive().get_directory_entries(self.obj.id) + entries = archive.Archive().get_directory_entries(self.obj.id).results return utils.paginated(entries, self._get_first_arg(), self._get_after_arg()) diff --git a/swh/graphql/resolvers/snapshot.py b/swh/graphql/resolvers/snapshot.py index 4258786..8ba9aeb 100644 --- a/swh/graphql/resolvers/snapshot.py +++ b/swh/graphql/resolvers/snapshot.py @@ -1,51 +1,53 @@ from swh.graphql.backends import archive from swh.graphql.utils import utils +from swh.model.model import Snapshot from .base_connection import BaseConnection from .base_node import BaseNode class BaseSnapshotNode(BaseNode): def _get_snapshot_by_id(self, snapshot_id): - # Now not fetching any data (schema is exposing just id) - # same pattern is used in directory resolver - return { - "id": snapshot_id, - } + # Return a Snapshot model object + # branches is initialized as empty + # Same pattern is used in directory + return Snapshot(id=snapshot_id, branches={}) class SnapshotNode(BaseSnapshotNode): """ For directly accessing a snapshot with an Id """ def _get_node_data(self): """ """ - # FXIME, query to make sure snapshot exists - return self._get_snapshot_by_id(self.kwargs.get("SWHID").object_id) + snapshot_id = self.kwargs.get("SWHID").object_id + if archive.Archive().is_snapshot_available([snapshot_id]): + return self._get_snapshot_by_id(snapshot_id) + return None class VisitSnapshotNode(BaseSnapshotNode): """ For accessing a snapshot from a visitstatus type """ def _get_node_data(self): """ self.obj is visitstatus here self.obj.snapshot is the requested snapshot id """ return self._get_snapshot_by_id(self.obj.snapshot) class OriginSnapshotConnection(BaseConnection): _node_class = BaseSnapshotNode def _get_paged_result(self): """ """ results = archive.Archive().get_origin_snapshots(self.obj.url) snapshots = [{"id": snapshot} for snapshot in results] # FIXME, using dummy(local) pagination, move pagination to backend # To remove localpagination, just drop the paginated call # STORAGE-TODO return utils.paginated(snapshots, self._get_first_arg(), self._get_after_arg())