Changeset View
Standalone View
swh/fuse/fs/artifact.py
# Copyright (C) 2020 The Software Heritage developers | # Copyright (C) 2020 The Software Heritage developers | ||||
# See the AUTHORS file at the top-level directory of this distribution | # See the AUTHORS file at the top-level directory of this distribution | ||||
# License: GNU General Public License version 3, or any later version | # License: GNU General Public License version 3, or any later version | ||||
# See top-level LICENSE file for more information | # See top-level LICENSE file for more information | ||||
from typing import Any, AsyncIterator | from dataclasses import dataclass | ||||
from pathlib import Path | |||||
from typing import Any, AsyncIterator, List | |||||
from swh.fuse.fs.entry import EntryMode, FuseEntry | from swh.fuse.fs.entry import EntryMode, FuseEntry | ||||
from swh.model.identifiers import CONTENT, DIRECTORY, SWHID | from swh.fuse.fs.symlink import SymlinkEntry | ||||
from swh.model.from_disk import DentryPerms | |||||
# Avoid cycling import | from swh.model.identifiers import CONTENT, DIRECTORY, REVISION, SWHID | ||||
Fuse = "Fuse" | |||||
@dataclass | |||||
class ArtifactEntry(FuseEntry): | class ArtifactEntry(FuseEntry): | ||||
""" FUSE virtual entry for a Software Heritage Artifact | """ FUSE virtual entry for a Software Heritage Artifact | ||||
Attributes: | Attributes: | ||||
swhid: Software Heritage persistent identifier | swhid: Software Heritage persistent identifier | ||||
prefetch: optional prefetched metadata used to set entry attributes | prefetch: optional prefetched metadata used to set entry attributes | ||||
""" | """ | ||||
def __init__( | swhid: SWHID | ||||
self, name: str, mode: int, fuse: Fuse, swhid: SWHID, prefetch: Any = None | prefetch: Any = None | ||||
): | |||||
super().__init__(name, mode, fuse) | |||||
self.swhid = swhid | |||||
self.prefetch = prefetch | |||||
def typify( | |||||
name: str, mode: int, fuse: Fuse, swhid: SWHID, prefetch: Any = None | |||||
) -> ArtifactEntry: | |||||
""" Create an artifact entry corresponding to the given artifact type """ | |||||
getters = {CONTENT: Content, DIRECTORY: Directory} | |||||
return getters[swhid.object_type](name, mode, fuse, swhid, prefetch) | |||||
class Content(ArtifactEntry): | class Content(ArtifactEntry): | ||||
""" Software Heritage content artifact. | """ Software Heritage content artifact. | ||||
Content leaves (AKA blobs) are represented on disks as regular files, | Content leaves (AKA blobs) are represented on disks as regular files, | ||||
containing the corresponding bytes, as archived. | containing the corresponding bytes, as archived. | ||||
Note that permissions are associated to blobs only in the context of | Note that permissions are associated to blobs only in the context of | ||||
directories. Hence, when accessing blobs from the top-level `archive/` | directories. Hence, when accessing blobs from the top-level `archive/` | ||||
directory, the permissions of the `archive/SWHID` file will be arbitrary and | directory, the permissions of the `archive/SWHID` file will be arbitrary and | ||||
not meaningful (e.g., `0x644`). """ | not meaningful (e.g., `0x644`). """ | ||||
async def content(self) -> bytes: | async def get_content(self) -> bytes: | ||||
return await self.fuse.get_blob(self.swhid) | data = await self.fuse.get_blob(self.swhid) | ||||
self.prefetch["length"] = len(data) | |||||
return data | |||||
async def length(self) -> int: | async def size(self) -> int: | ||||
# When listing entries from a directory, the API already gave us information | |||||
if self.prefetch: | if self.prefetch: | ||||
return self.prefetch["length"] | return self.prefetch["length"] | ||||
return len(await self.content()) | else: | ||||
seirl: This should probably be cached so that if you call `get_content()` and `size()` you only fetch… | |||||
Done Inline ActionsHm sure, i didn't want to add a cache on-top of the already existing blob cache, but i can put the information in prefetch["length"] haltode: Hm sure, i didn't want to add a cache on-top of the already existing blob cache, but i can put… | |||||
return len(await self.get_content()) | |||||
async def __aiter__(self): | async def __aiter__(self): | ||||
raise ValueError("Cannot iterate over a content type artifact") | raise ValueError("Cannot iterate over a content type artifact") | ||||
class Directory(ArtifactEntry): | class Directory(ArtifactEntry): | ||||
""" Software Heritage directory artifact. | """ Software Heritage directory artifact. | ||||
Directory nodes are represented as directories on the file-system, | Directory nodes are represented as directories on the file-system, | ||||
containing one entry for each entry of the archived directory. Entry names | containing one entry for each entry of the archived directory. Entry names | ||||
and other metadata, including permissions, will correspond to the archived | and other metadata, including permissions, will correspond to the archived | ||||
entry metadata. | entry metadata. | ||||
Note that the FUSE mount is read-only, no matter what the permissions say. | Note that the FUSE mount is read-only, no matter what the permissions say. | ||||
So it is possible that, in the context of a directory, a file is presented | So it is possible that, in the context of a directory, a file is presented | ||||
as writable, whereas actually writing to it will fail with `EPERM`. """ | as writable, whereas actually writing to it will fail with `EPERM`. """ | ||||
async def __aiter__(self) -> AsyncIterator[ArtifactEntry]: | async def __aiter__(self) -> AsyncIterator[FuseEntry]: | ||||
Not Done Inline ActionsIt looks like both Directory and Revision define an __aiter__ method, which is not defined in ArtifactEntry. That's understandable, as some artifacts are not iterable (e.g., content). But it poses the problem of where to document what one iterates on. Either you briefly describe it in both those classes (and do so also in the upcoming classes), or you introduce an intermediate class to distinguish between iterable artifact entries and singleton ones, and document the iterator in the former. Maybe there is (or will be) some common logic to be factored out in that new intermediate class, dunno. zack: It looks like both `Directory` and `Revision` define an `__aiter__` method, which is not… | |||||
Done Inline ActionsThe __aiter__ is defined in the upper-level FuseEntry class, we could document there all common methods + examples of which one use them (eg: Content not having an __aiter__ but a content()). haltode: The `__aiter__` is defined in the upper-level `FuseEntry` class, we could document there all… | |||||
metadata = await self.fuse.get_metadata(self.swhid) | metadata = await self.fuse.get_metadata(self.swhid) | ||||
for entry in metadata: | for entry in metadata: | ||||
yield typify( | name = entry["name"] | ||||
name=entry["name"], | swhid = entry["target"] | ||||
# Use default read-only permissions for directories, and | |||||
# archived permissions for contents | |||||
mode=( | mode = ( | ||||
entry["perms"] | # Archived permissions for directories are always set to | ||||
if entry["target"].object_type == CONTENT | # 0o040000 so use a read-only permission instead | ||||
else int(EntryMode.RDONLY_DIR) | int(EntryMode.RDONLY_DIR) | ||||
), | if swhid.object_type == DIRECTORY | ||||
fuse=self.fuse, | else entry["perms"] | ||||
swhid=entry["target"], | ) | ||||
# The directory API has extra info we can use to set attributes | |||||
# without additional Software Heritage API call | # 1. Symlinks | ||||
if mode == DentryPerms.symlink: | |||||
yield self.create_child( | |||||
SymlinkEntry, | |||||
name=name, | |||||
# Symlink target is stored in the blob content | |||||
target=await self.fuse.get_blob(swhid), | |||||
) | |||||
# 2. Submodules | |||||
elif swhid.object_type == REVISION: | |||||
# Make sure the revision metadata is fetched and create a | |||||
# symlink to distinguish it with regular directories | |||||
await self.fuse.get_metadata(swhid) | |||||
yield self.create_child( | |||||
SymlinkEntry, | |||||
name=name, | |||||
target=Path(self.get_relative_root_path(), f"archive/{swhid}"), | |||||
) | |||||
# 3. Regular entries (directories, contents) | |||||
else: | |||||
yield self.create_child( | |||||
OBJTYPE_GETTERS[swhid.object_type], | |||||
name=name, | |||||
mode=mode, | |||||
swhid=swhid, | |||||
# The directory API has extra info we can use to set | |||||
# attributes without additional Software Heritage API call | |||||
prefetch=entry, | prefetch=entry, | ||||
) | ) | ||||
class Revision(ArtifactEntry): | |||||
""" Software Heritage revision artifact. | |||||
Revision (AKA commit) nodes are represented on the file-system as | |||||
directories with the following entries: | |||||
- `root`: source tree at the time of the commit, as a symlink pointing into | |||||
`archive/`, to a SWHID of type `dir` | |||||
- `parents/` (note the plural): a virtual directory containing entries named | |||||
`1`, `2`, `3`, etc., one for each parent commit. Each of these entry is a | |||||
symlink pointing into `archive/`, to the SWHID file for the given parent | |||||
commit | |||||
- `parent` (note the singular): present if and only if the current commit | |||||
has at least one parent commit (which is the most common case). When | |||||
present it is a symlink pointing into `parents/1/` | |||||
- `meta.json`: metadata for the current node, as a symlink pointing to the | |||||
relevant `meta/<SWHID>.json` file """ | |||||
async def __aiter__(self) -> AsyncIterator[FuseEntry]: | |||||
metadata = await self.fuse.get_metadata(self.swhid) | |||||
directory = metadata["directory"] | |||||
parents = metadata["parents"] | |||||
# Make sure all necessary metadatas are fetched | |||||
await self.fuse.get_metadata(directory) | |||||
for parent in parents: | |||||
await self.fuse.get_metadata(parent["id"]) | |||||
root_path = self.get_relative_root_path() | |||||
yield self.create_child( | |||||
SymlinkEntry, name="root", target=Path(root_path, f"archive/{directory}"), | |||||
) | |||||
yield self.create_child( | |||||
SymlinkEntry, | |||||
name="meta.json", | |||||
target=Path(root_path, f"meta/{self.swhid}.json"), | |||||
) | |||||
yield self.create_child( | |||||
RevisionParents, | |||||
name="parents", | |||||
mode=int(EntryMode.RDONLY_DIR), | |||||
parents=[x["id"] for x in parents], | |||||
) | |||||
if len(parents) >= 1: | |||||
yield self.create_child( | |||||
SymlinkEntry, name="parent", target="parents/1/", | |||||
) | |||||
@dataclass | |||||
class RevisionParents(FuseEntry): | |||||
""" Revision virtual `parents/` directory """ | |||||
parents: List[SWHID] | |||||
async def __aiter__(self) -> AsyncIterator[FuseEntry]: | |||||
root_path = self.get_relative_root_path() | |||||
for i, parent in enumerate(self.parents): | |||||
yield self.create_child( | |||||
SymlinkEntry, | |||||
name=str(i + 1), | |||||
target=Path(root_path, f"archive/{parent}"), | |||||
) | |||||
OBJTYPE_GETTERS = {CONTENT: Content, DIRECTORY: Directory, REVISION: Revision} | |||||
Done Inline ActionsWhy do you create a list first and then yield it? Couldn't you just yield the elements one by one? seirl: Why do you create a list first and then yield it? Couldn't you just yield the elements one by… |
This should probably be cached so that if you call get_content() and size() you only fetch the content once?