diff --git a/swh/fuse/cli.py b/swh/fuse/cli.py index b36f255..532d5e3 100644 --- a/swh/fuse/cli.py +++ b/swh/fuse/cli.py @@ -1,203 +1,204 @@ # Copyright (C) 2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information # WARNING: do not import unnecessary things here to keep cli startup time under # control import os from pathlib import Path from typing import Any, Dict import click from swh.core.cli import CONTEXT_SETTINGS from swh.core.cli import swh as swh_cli_group from swh.model.cli import SWHIDParamType # All generic config code should reside in swh.core.config DEFAULT_CONFIG_PATH = os.environ.get( "SWH_CONFIG_FILE", os.path.join(click.get_app_dir("swh"), "global.yml") ) CACHE_HOME_DIR: Path = ( Path(os.environ["XDG_CACHE_HOME"]) if "XDG_CACHE_HOME" in os.environ else Path.home() / ".cache" ) DEFAULT_CONFIG: Dict[str, Any] = { "cache": { "metadata": {"path": str(CACHE_HOME_DIR / "swh/fuse/metadata.sqlite")}, "blob": {"path": str(CACHE_HOME_DIR / "swh/fuse/blob.sqlite")}, "history": {"path": str(CACHE_HOME_DIR / "swh/fuse/history.sqlite")}, "direntry": {"maxram": "10%"}, }, "web-api": { "url": "https://archive.softwareheritage.org/api/1", "auth-token": None, }, + "json-indent": 2, } @swh_cli_group.group(name="fs", context_settings=CONTEXT_SETTINGS) @click.option( "-C", "--config-file", default=None, type=click.Path(exists=True, dir_okay=False, path_type=str), help=f"Configuration file (default: {DEFAULT_CONFIG_PATH})", ) @click.pass_context def fuse(ctx, config_file): """Software Heritage virtual file system""" import logging import yaml from swh.core import config if not config_file: config_file = DEFAULT_CONFIG_PATH try: conf = config.read_raw_config(config.config_basepath(config_file)) if not conf: raise ValueError(f"Cannot parse configuration file: {config_file}") if config_file == DEFAULT_CONFIG_PATH: try: conf = conf["swh"]["fuse"] except KeyError: pass # recursive merge not done by config.read conf = config.merge_configs(DEFAULT_CONFIG, conf) except Exception: logging.warning( "Using default configuration (cannot load custom one)", exc_info=True ) conf = DEFAULT_CONFIG logging.debug("Read configuration: \n%s", yaml.dump(conf)) ctx.ensure_object(dict) ctx.obj["config"] = conf @fuse.command(name="mount") @click.argument( "path", required=True, metavar="PATH", type=click.Path(exists=True, dir_okay=True, file_okay=False), ) @click.argument("swhids", nargs=-1, metavar="[SWHID]...", type=SWHIDParamType()) @click.option( "-f/-d", "--foreground/--daemon", default=False, help="whether to run FUSE attached to the console (foreground) " "or daemonized in the background (default: daemon)", ) @click.pass_context def mount(ctx, swhids, path, foreground): """Mount the Software Heritage virtual file system at PATH. If specified, objects referenced by the given SWHIDs will be prefetched and used to populate the virtual file system (VFS). Otherwise the VFS will be populated on-demand, when accessing its content. \b Example: \b $ mkdir swhfs $ swh fs mount swhfs/ $ grep printf swhfs/archive/swh:1:cnt:c839dea9e8e6f0528b468214348fee8669b305b2 printf("Hello, World!"); $ """ import asyncio from contextlib import ExitStack import logging from daemon import DaemonContext from swh.fuse import fuse # TODO: set default logging settings when --log-config is not passed # DEFAULT_LOG_PATH = Path(".local/swh/fuse/mount.log") with ExitStack() as stack: if not foreground: # TODO: temporary fix until swh.core has the proper logging utilities # Disable logging config before daemonizing, and reset it once # daemonized to be sure to not close file handlers logging.shutdown() # Stay in the current working directory when spawning daemon cwd = os.getcwd() stack.enter_context(DaemonContext(working_directory=cwd)) logging.config.dictConfig( { "version": 1, "handlers": { "syslog": { "class": "logging.handlers.SysLogHandler", "address": "/dev/log", }, }, "root": {"level": ctx.obj["log_level"], "handlers": ["syslog"],}, } ) conf = ctx.obj["config"] asyncio.run(fuse.main(swhids, path, conf)) @fuse.command() @click.argument( "path", required=True, metavar="PATH", type=click.Path(exists=True, dir_okay=True, file_okay=False), ) @click.pass_context def umount(ctx, path): """Unmount a mounted virtual file system. Note: this is equivalent to ``fusermount -u PATH``, which can be used to unmount any FUSE-based virtual file system. See ``man fusermount3``. """ import logging import subprocess try: subprocess.run(["fusermount", "-u", path], check=True) except subprocess.CalledProcessError as err: logging.error( "cannot unmount virtual file system: '%s' returned exit status %d", " ".join(err.cmd), err.returncode, ) ctx.exit(1) @fuse.command() @click.pass_context def clean(ctx): """Clean on-disk cache(s). """ def rm_cache(conf, cache_name): try: conf["cache"][cache_name]["path"].unlink(missing_ok=True) except KeyError: pass conf = ctx.obj["config"] for cache_name in ["blob", "metadata", "history"]: rm_cache(conf, cache_name) diff --git a/swh/fuse/fs/artifact.py b/swh/fuse/fs/artifact.py index f2949fb..08493e0 100644 --- a/swh/fuse/fs/artifact.py +++ b/swh/fuse/fs/artifact.py @@ -1,559 +1,563 @@ # Copyright (C) 2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import asyncio from dataclasses import dataclass, field import json import logging from pathlib import Path from typing import Any, AsyncIterator, Dict, List import urllib.parse from swh.fuse.fs.entry import ( EntryMode, FuseDirEntry, FuseEntry, FuseFileEntry, FuseSymlinkEntry, ) from swh.model.from_disk import DentryPerms from swh.model.identifiers import CONTENT, DIRECTORY, RELEASE, REVISION, SNAPSHOT, SWHID @dataclass class Content(FuseFileEntry): """ Software Heritage content artifact. Attributes: swhid: Software Heritage persistent identifier prefetch: optional prefetched metadata used to set entry attributes Content leaves (AKA blobs) are represented on disks as regular files, containing the corresponding bytes, as archived. Note that permissions are associated to blobs only in the context of directories. Hence, when accessing blobs from the top-level `archive/` directory, the permissions of the `archive/SWHID` file will be arbitrary and not meaningful (e.g., `0x644`). """ swhid: SWHID prefetch: Any = None async def get_content(self) -> bytes: data = await self.fuse.get_blob(self.swhid) if not self.prefetch: self.prefetch = {"length": len(data)} return data async def size(self) -> int: if self.prefetch: return self.prefetch["length"] else: return await super().size() @dataclass class Directory(FuseDirEntry): """ Software Heritage directory artifact. Attributes: swhid: Software Heritage persistent identifier Directory nodes are represented as directories on the file-system, containing one entry for each entry of the archived directory. Entry names and other metadata, including permissions, will correspond to the archived entry metadata. Note that the FUSE mount is read-only, no matter what the permissions say. So it is possible that, in the context of a directory, a file is presented as writable, whereas actually writing to it will fail with `EPERM`. """ swhid: SWHID async def compute_entries(self) -> AsyncIterator[FuseEntry]: metadata = await self.fuse.get_metadata(self.swhid) for entry in metadata: name = entry["name"] swhid = entry["target"] mode = ( # Archived permissions for directories are always set to # 0o040000 so use a read-only permission instead int(EntryMode.RDONLY_DIR) if swhid.object_type == DIRECTORY else entry["perms"] ) # 1. Regular file if swhid.object_type == CONTENT: yield self.create_child( Content, name=name, mode=mode, swhid=swhid, # The directory API has extra info we can use to set # attributes without additional Software Heritage API call prefetch=entry, ) # 2. Regular directory elif swhid.object_type == DIRECTORY: yield self.create_child( Directory, name=name, mode=mode, swhid=swhid, ) # 3. Symlink elif mode == DentryPerms.symlink: yield self.create_child( FuseSymlinkEntry, name=name, # Symlink target is stored in the blob content target=await self.fuse.get_blob(swhid), ) # 4. Submodule elif swhid.object_type == REVISION: # Make sure the revision metadata is fetched and create a # symlink to distinguish it with regular directories await self.fuse.get_metadata(swhid) yield self.create_child( FuseSymlinkEntry, name=name, target=Path(self.get_relative_root_path(), f"archive/{swhid}"), ) else: raise ValueError("Unknown directory entry type: {swhid.object_type}") @dataclass class Revision(FuseDirEntry): """ Software Heritage revision artifact. Attributes: swhid: Software Heritage persistent identifier Revision (AKA commit) nodes are represented on the file-system as directories with the following entries: - `root`: source tree at the time of the commit, as a symlink pointing into `archive/`, to a SWHID of type `dir` - `parents/` (note the plural): a virtual directory containing entries named `1`, `2`, `3`, etc., one for each parent commit. Each of these entry is a symlink pointing into `archive/`, to the SWHID file for the given parent commit - `parent` (note the singular): present if and only if the current commit has at least one parent commit (which is the most common case). When present it is a symlink pointing into `parents/1/` - `history`: a virtual directory listing all its revision ancestors, sorted in reverse topological order. Each entry is a symlink pointing into `archive/SWHID`. - `meta.json`: metadata for the current node, as a symlink pointing to the relevant `meta/.json` file """ swhid: SWHID async def compute_entries(self) -> AsyncIterator[FuseEntry]: metadata = await self.fuse.get_metadata(self.swhid) directory = metadata["directory"] parents = metadata["parents"] root_path = self.get_relative_root_path() yield self.create_child( FuseSymlinkEntry, name="root", target=Path(root_path, f"archive/{directory}"), ) yield self.create_child( FuseSymlinkEntry, name="meta.json", target=Path(root_path, f"meta/{self.swhid}.json"), ) yield self.create_child( RevisionParents, name="parents", mode=int(EntryMode.RDONLY_DIR), parents=[x["id"] for x in parents], ) if len(parents) >= 1: yield self.create_child( FuseSymlinkEntry, name="parent", target="parents/1/", ) yield self.create_child( RevisionHistory, name="history", mode=int(EntryMode.RDONLY_DIR), swhid=self.swhid, ) @dataclass class RevisionParents(FuseDirEntry): """ Revision virtual `parents/` directory """ parents: List[SWHID] async def compute_entries(self) -> AsyncIterator[FuseEntry]: root_path = self.get_relative_root_path() for i, parent in enumerate(self.parents): yield self.create_child( FuseSymlinkEntry, name=str(i + 1), target=Path(root_path, f"archive/{parent}"), ) @dataclass class RevisionHistory(FuseDirEntry): """ Revision virtual `history/` directory """ swhid: SWHID async def prefill_caches(self) -> None: history = await self.fuse.get_history(self.swhid) for swhid in history: await self.fuse.get_metadata(swhid) async def compute_entries(self) -> AsyncIterator[FuseEntry]: # Run it concurrently because of the many API calls necessary asyncio.create_task(self.prefill_caches()) yield self.create_child( RevisionHistoryShardByDate, name="by-date", mode=int(EntryMode.RDONLY_DIR), history_swhid=self.swhid, ) yield self.create_child( RevisionHistoryShardByHash, name="by-hash", mode=int(EntryMode.RDONLY_DIR), history_swhid=self.swhid, ) yield self.create_child( RevisionHistoryShardByPage, name="by-page", mode=int(EntryMode.RDONLY_DIR), history_swhid=self.swhid, ) @dataclass class RevisionHistoryShardByDate(FuseDirEntry): """ Revision virtual `history/by-date` sharded directory """ history_swhid: SWHID prefix: str = field(default="") is_status_done: bool = field(default=False) DATE_FMT = "{year:04d}/{month:02d}/{day:02d}/" @dataclass class StatusFile(FuseFileEntry): """ Temporary file used to indicate loading progress in by-date/ """ name: str = field(init=False, default=".status") mode: int = field(init=False, default=int(EntryMode.RDONLY_FILE)) done: int todo: int async def get_content(self) -> bytes: fmt = f"Done: {self.done}/{self.todo}\n" return fmt.encode() async def compute_entries(self) -> AsyncIterator[FuseEntry]: history = await self.fuse.get_history(self.history_swhid) # Only check for cached revisions since fetching all of them with the # Web API would take too long swhids = await self.fuse.cache.metadata.get_cached_subset(history) depth = self.prefix.count("/") root_path = self.get_relative_root_path() sharded_dirs = set() for swhid in swhids: meta = await self.fuse.cache.metadata.get(swhid) date = meta["date"] sharded_name = self.DATE_FMT.format( year=date.year, month=date.month, day=date.day ) if not sharded_name.startswith(self.prefix): continue if depth == 3: yield self.create_child( FuseSymlinkEntry, name=str(swhid), target=Path(root_path, f"archive/{swhid}"), ) # Create sharded directories else: next_prefix = sharded_name.split("/")[depth] if next_prefix not in sharded_dirs: sharded_dirs.add(next_prefix) yield self.create_child( RevisionHistoryShardByDate, name=next_prefix, mode=int(EntryMode.RDONLY_DIR), prefix=f"{self.prefix}{next_prefix}/", history_swhid=self.history_swhid, ) self.is_status_done = len(swhids) == len(history) if not self.is_status_done and depth == 0: yield self.create_child( RevisionHistoryShardByDate.StatusFile, done=len(swhids), todo=len(history), ) @dataclass class RevisionHistoryShardByHash(FuseDirEntry): """ Revision virtual `history/by-hash` sharded directory """ history_swhid: SWHID prefix: str = field(default="") SHARDING_LENGTH = 2 async def compute_entries(self) -> AsyncIterator[FuseEntry]: history = await self.fuse.get_history(self.history_swhid) if self.prefix: root_path = self.get_relative_root_path() for swhid in history: if swhid.object_id.startswith(self.prefix): yield self.create_child( FuseSymlinkEntry, name=str(swhid), target=Path(root_path, f"archive/{swhid}"), ) # Create sharded directories else: sharded_dirs = set() for swhid in history: next_prefix = swhid.object_id[: self.SHARDING_LENGTH] if next_prefix not in sharded_dirs: sharded_dirs.add(next_prefix) yield self.create_child( RevisionHistoryShardByHash, name=next_prefix, mode=int(EntryMode.RDONLY_DIR), prefix=next_prefix, history_swhid=self.history_swhid, ) @dataclass class RevisionHistoryShardByPage(FuseDirEntry): """ Revision virtual `history/by-page` sharded directory """ history_swhid: SWHID prefix: int = field(default=None) PAGE_SIZE = 10_000 PAGE_FMT = "{page_number:03d}" async def compute_entries(self) -> AsyncIterator[FuseEntry]: history = await self.fuse.get_history(self.history_swhid) if self.prefix is not None: current_page = self.prefix root_path = self.get_relative_root_path() max_idx = min(len(history), (current_page + 1) * self.PAGE_SIZE) for i in range(current_page * self.PAGE_SIZE, max_idx): swhid = history[i] yield self.create_child( FuseSymlinkEntry, name=str(swhid), target=Path(root_path, f"archive/{swhid}"), ) # Create sharded directories else: for i in range(0, len(history), self.PAGE_SIZE): page_number = i // self.PAGE_SIZE yield self.create_child( RevisionHistoryShardByPage, name=self.PAGE_FMT.format(page_number=page_number), mode=int(EntryMode.RDONLY_DIR), history_swhid=self.history_swhid, prefix=page_number, ) @dataclass class Release(FuseDirEntry): """ Software Heritage release artifact. Attributes: swhid: Software Heritage persistent identifier Release nodes are represented on the file-system as directories with the following entries: - `target`: target node, as a symlink to `archive/` - `target_type`: regular file containing the type of the target SWHID - `root`: present if and only if the release points to something that (transitively) resolves to a directory. When present it is a symlink pointing into `archive/` to the SWHID of the given directory - `meta.json`: metadata for the current node, as a symlink pointing to the relevant `meta/.json` file """ swhid: SWHID async def find_root_directory(self, swhid: SWHID) -> SWHID: if swhid.object_type == RELEASE: metadata = await self.fuse.get_metadata(swhid) return await self.find_root_directory(metadata["target"]) elif swhid.object_type == REVISION: metadata = await self.fuse.get_metadata(swhid) return metadata["directory"] elif swhid.object_type == DIRECTORY: return swhid else: return None async def compute_entries(self) -> AsyncIterator[FuseEntry]: metadata = await self.fuse.get_metadata(self.swhid) root_path = self.get_relative_root_path() yield self.create_child( FuseSymlinkEntry, name="meta.json", target=Path(root_path, f"meta/{self.swhid}.json"), ) target = metadata["target"] yield self.create_child( FuseSymlinkEntry, name="target", target=Path(root_path, f"archive/{target}") ) yield self.create_child( ReleaseType, name="target_type", mode=int(EntryMode.RDONLY_FILE), target_type=target.object_type, ) target_dir = await self.find_root_directory(target) if target_dir is not None: yield self.create_child( FuseSymlinkEntry, name="root", target=Path(root_path, f"archive/{target_dir}"), ) @dataclass class ReleaseType(FuseFileEntry): """ Release type virtual file """ target_type: str async def get_content(self) -> bytes: return str.encode(self.target_type + "\n") @dataclass class Snapshot(FuseDirEntry): """ Software Heritage snapshot artifact. Attributes: swhid: Software Heritage persistent identifier Snapshot nodes are represented on the file-system as directories with one entry for each branch in the snapshot. Each entry is a symlink pointing into `archive/` to the branch target SWHID. Branch names are URL encoded (hence '/' are replaced with '%2F'). """ swhid: SWHID async def compute_entries(self) -> AsyncIterator[FuseEntry]: metadata = await self.fuse.get_metadata(self.swhid) root_path = self.get_relative_root_path() for branch_name, branch_meta in metadata.items(): # Mangle branch name to create a valid UNIX filename name = urllib.parse.quote_plus(branch_name) yield self.create_child( FuseSymlinkEntry, name=name, target=Path(root_path, f"archive/{branch_meta['target']}"), ) @dataclass class Origin(FuseDirEntry): """ Software Heritage origin artifact. Origin nodes are represented on the file-system as directories with one entry for each origin visit. The visits directories are named after the visit date (`YYYY-MM-DD`, if multiple visits occur the same day only the first one is kept). Each visit directory contains a `meta.json` with associated metadata for the origin node, and potentially a `snapshot` symlink pointing to the visit's snapshot node. """ DATE_FMT = "{year:04d}-{month:02d}-{day:02d}" async def compute_entries(self) -> AsyncIterator[FuseEntry]: # The origin's name is always its URL (encoded to create a valid UNIX filename) visits = await self.fuse.get_visits(self.name) seen_date = set() for visit in visits: date = visit["date"] name = self.DATE_FMT.format(year=date.year, month=date.month, day=date.day) if name in seen_date: logging.debug( "Conflict date on origin: %s, %s", visit["origin"], str(name) ) else: seen_date.add(name) yield self.create_child( OriginVisit, name=name, mode=int(EntryMode.RDONLY_DIR), meta=visit, ) @dataclass class OriginVisit(FuseDirEntry): """ Origin visit virtual directory """ meta: Dict[str, Any] @dataclass class MetaFile(FuseFileEntry): content: str async def get_content(self) -> bytes: return str.encode(self.content + "\n") async def compute_entries(self) -> AsyncIterator[FuseEntry]: snapshot_swhid = self.meta["snapshot"] if snapshot_swhid: root_path = self.get_relative_root_path() yield self.create_child( FuseSymlinkEntry, name="snapshot", target=Path(root_path, f"archive/{snapshot_swhid}"), ) yield self.create_child( OriginVisit.MetaFile, name="meta.json", mode=int(EntryMode.RDONLY_FILE), - content=json.dumps(self.meta, default=lambda x: str(x)), + content=json.dumps( + self.meta, + indent=self.fuse.conf["json-indent"], + default=lambda x: str(x), + ), ) OBJTYPE_GETTERS = { CONTENT: Content, DIRECTORY: Directory, REVISION: Revision, RELEASE: Release, SNAPSHOT: Snapshot, } diff --git a/swh/fuse/fs/mountpoint.py b/swh/fuse/fs/mountpoint.py index 9447c5d..73d22c6 100644 --- a/swh/fuse/fs/mountpoint.py +++ b/swh/fuse/fs/mountpoint.py @@ -1,136 +1,137 @@ # Copyright (C) 2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from dataclasses import dataclass, field import json from typing import AsyncIterator from swh.fuse.fs.artifact import OBJTYPE_GETTERS, Origin from swh.fuse.fs.entry import EntryMode, FuseDirEntry, FuseEntry, FuseFileEntry from swh.model.exceptions import ValidationError from swh.model.identifiers import CONTENT, SWHID, parse_swhid @dataclass class Root(FuseDirEntry): """ The FUSE mountpoint, consisting of the archive/ and meta/ directories """ name: str = field(init=False, default=None) mode: int = field(init=False, default=int(EntryMode.RDONLY_DIR)) depth: int = field(init=False, default=1) async def compute_entries(self) -> AsyncIterator[FuseEntry]: yield self.create_child(ArchiveDir) yield self.create_child(MetaDir) yield self.create_child(OriginDir) @dataclass class ArchiveDir(FuseDirEntry): """ The archive/ directory is lazily populated with one entry per accessed SWHID, having actual SWHIDs as names """ name: str = field(init=False, default="archive") mode: int = field(init=False, default=int(EntryMode.RDONLY_DIR)) def create_child(self, swhid: SWHID) -> FuseEntry: if swhid.object_type == CONTENT: mode = EntryMode.RDONLY_FILE else: mode = EntryMode.RDONLY_DIR return super().create_child( OBJTYPE_GETTERS[swhid.object_type], name=str(swhid), mode=int(mode), swhid=swhid, ) async def compute_entries(self) -> AsyncIterator[FuseEntry]: async for swhid in self.fuse.cache.get_cached_swhids(): yield self.create_child(swhid) async def lookup(self, name: str) -> FuseEntry: entry = await super().lookup(name) if entry: return entry # On the fly mounting of a new artifact try: swhid = parse_swhid(name) await self.fuse.get_metadata(swhid) return self.create_child(swhid) except ValidationError: return None @dataclass class MetaDir(FuseDirEntry): """ The meta/ directory contains one SWHID.json file for each SWHID entry under archive/. The JSON file contain all available meta information about the given SWHID, as returned by the Software Heritage Web API for that object. Note that, in case of pagination (e.g., snapshot objects with many branches) the JSON file will contain a complete version with all pages merged together. """ name: str = field(init=False, default="meta") mode: int = field(init=False, default=int(EntryMode.RDONLY_DIR)) async def compute_entries(self) -> AsyncIterator[FuseEntry]: async for swhid in self.fuse.cache.get_cached_swhids(): yield self.create_child( MetaEntry, name=f"{swhid}.json", mode=int(EntryMode.RDONLY_FILE), swhid=swhid, ) @dataclass class MetaEntry(FuseFileEntry): """ An entry from the meta/ directory, containing for each accessed SWHID a corresponding SWHID.json file with all the metadata from the Software Heritage archive. """ swhid: SWHID async def get_content(self) -> bytes: # Get raw JSON metadata from API (un-typified) metadata = await self.fuse.cache.metadata.get(self.swhid, typify=False) - return json.dumps(metadata).encode() + json_str = json.dumps(metadata, indent=self.fuse.conf["json-indent"]) + return (json_str + "\n").encode() async def size(self) -> int: return len(await self.get_content()) @dataclass class OriginDir(FuseDirEntry): """ The origin/ directory is lazily populated with one entry per accessed origin URL (mangled to create a valid UNIX filename). The URL encoding is done using the percent-encoding mechanism described in RFC 3986. """ name: str = field(init=False, default="origin") mode: int = field(init=False, default=int(EntryMode.RDONLY_DIR)) def create_child(self, url_encoded: str) -> FuseEntry: return super().create_child( Origin, name=url_encoded, mode=int(EntryMode.RDONLY_DIR), ) async def compute_entries(self) -> AsyncIterator[FuseEntry]: async for url in self.fuse.cache.get_cached_visits(): yield self.create_child(url) async def lookup(self, name: str) -> FuseEntry: entry = await super().lookup(name) if entry: return entry # On the fly mounting of new origin url try: url_encoded = name await self.fuse.get_visits(url_encoded) return self.create_child(url_encoded) except ValidationError: return None diff --git a/swh/fuse/tests/conftest.py b/swh/fuse/tests/conftest.py index f6c8ac3..a915d15 100644 --- a/swh/fuse/tests/conftest.py +++ b/swh/fuse/tests/conftest.py @@ -1,80 +1,81 @@ # Copyright (C) 2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json from multiprocessing import Process import os from pathlib import Path import subprocess from tempfile import NamedTemporaryFile, TemporaryDirectory import time from click.testing import CliRunner import pytest import yaml import swh.fuse.cli as cli from swh.fuse.tests.data.api_data import API_URL, MOCK_ARCHIVE @pytest.fixture def web_api_mock(requests_mock): for api_call, data in MOCK_ARCHIVE.items(): # Convert Python dict JSON into a string (only for non-raw API call) if not api_call.endswith("raw/") and not api_call.startswith("graph/"): data = json.dumps(data) requests_mock.get(f"{API_URL}/{api_call}", text=data) return requests_mock @pytest.fixture def fuse_mntdir(web_api_mock): tmpdir = TemporaryDirectory(suffix=".swh-fuse-test") tmpfile = NamedTemporaryFile(suffix=".swh-fuse-test.yml") config = { "cache": { "metadata": {"in-memory": True}, "blob": {"in-memory": True}, "history": {"in-memory": True}, }, "web-api": {"url": API_URL, "auth-token": None}, + "json-indent": None, } # Run FUSE in foreground mode but in a separate process, so it does not # block execution and remains easy to kill during teardown def fuse_process(tmpdir, tmpfile): with tmpdir as mntdir, tmpfile as config_path: config_path = Path(config_path.name) config_path.write_text(yaml.dump(config)) CliRunner().invoke( cli.fuse, args=[ "--config-file", str(config_path), "mount", mntdir, "--foreground", ], ) fuse = Process(target=fuse_process, args=[tmpdir, tmpfile]) fuse.start() # Wait max 3 seconds for the FUSE to correctly mount for i in range(30): try: root = os.listdir(tmpdir.name) if root: break except FileNotFoundError: pass time.sleep(0.1) else: raise FileNotFoundError(f"Could not mount FUSE in {tmpdir.name}") yield Path(tmpdir.name) subprocess.run(["fusermount", "-u", tmpdir.name], check=True) fuse.join() diff --git a/swh/fuse/tests/test_meta.py b/swh/fuse/tests/test_meta.py index 10600f0..1bfba6d 100644 --- a/swh/fuse/tests/test_meta.py +++ b/swh/fuse/tests/test_meta.py @@ -1,16 +1,16 @@ import json from swh.fuse.tests.common import get_data_from_web_archive from swh.fuse.tests.data.config import ALL_ENTRIES def test_access_meta_file(fuse_mntdir): for swhid in ALL_ENTRIES: # On the fly mounting file_path_archive = fuse_mntdir / "archive" / swhid file_path_archive.exists() file_path_meta = fuse_mntdir / f"meta/{swhid}.json" assert file_path_meta.exists() expected = json.dumps(get_data_from_web_archive(swhid)) - assert file_path_meta.read_text() == expected + assert file_path_meta.read_text().strip() == expected.strip() diff --git a/swh/fuse/tests/test_release.py b/swh/fuse/tests/test_release.py index de2c3f2..56c39cc 100644 --- a/swh/fuse/tests/test_release.py +++ b/swh/fuse/tests/test_release.py @@ -1,46 +1,46 @@ import json import os from swh.fuse.tests.common import check_dir_name_entries, get_data_from_web_archive from swh.fuse.tests.data.config import ( REL_TARGET_CNT, REL_TARGET_DIR, ROOT_DIR, ROOT_REL, TARGET_CNT, TARGET_DIR, ) def test_access_meta(fuse_mntdir): file_path = fuse_mntdir / "archive" / ROOT_REL / "meta.json" expected = json.dumps(get_data_from_web_archive(ROOT_REL)) - assert file_path.read_text() == expected + assert file_path.read_text().strip() == expected.strip() def test_access_rev_target(fuse_mntdir): target_path = fuse_mntdir / "archive" / ROOT_REL / "target" expected = set(["meta.json", "root", "parent", "parents", "history"]) actual = set(os.listdir(target_path)) assert expected.issubset(actual) def test_access_dir_target(fuse_mntdir): target_path = fuse_mntdir / "archive" / REL_TARGET_DIR / "target" check_dir_name_entries(target_path, TARGET_DIR) def test_access_cnt_target(fuse_mntdir): target_path = fuse_mntdir / "archive" / REL_TARGET_CNT / "target" expected = get_data_from_web_archive(TARGET_CNT, raw=True) assert target_path.read_text() == expected def test_target_type(fuse_mntdir): file_path = fuse_mntdir / "archive" / ROOT_REL / "target_type" assert file_path.read_text() == "revision\n" def test_access_root(fuse_mntdir): dir_path = fuse_mntdir / "archive" / ROOT_REL / "root" check_dir_name_entries(dir_path, ROOT_DIR) diff --git a/swh/fuse/tests/test_revision.py b/swh/fuse/tests/test_revision.py index 8011f14..860cd0e 100644 --- a/swh/fuse/tests/test_revision.py +++ b/swh/fuse/tests/test_revision.py @@ -1,88 +1,88 @@ import json import os import time import dateutil.parser from swh.fuse.fs.artifact import RevisionHistoryShardByDate, RevisionHistoryShardByPage from swh.fuse.tests.api_url import GRAPH_API_REQUEST from swh.fuse.tests.common import ( check_dir_name_entries, get_data_from_graph_archive, get_data_from_web_archive, ) from swh.fuse.tests.data.config import REV_SMALL_HISTORY, ROOT_DIR, ROOT_REV from swh.model.identifiers import parse_swhid def test_access_meta(fuse_mntdir): file_path = fuse_mntdir / "archive" / ROOT_REV / "meta.json" expected = json.dumps(get_data_from_web_archive(ROOT_REV)) - assert file_path.read_text() == expected + assert file_path.read_text().strip() == expected.strip() def test_list_root(fuse_mntdir): dir_path = fuse_mntdir / "archive" / ROOT_REV / "root" check_dir_name_entries(dir_path, ROOT_DIR) def test_list_parents(fuse_mntdir): rev_meta = get_data_from_web_archive(ROOT_REV) dir_path = fuse_mntdir / "archive" / ROOT_REV / "parents" for i, parent in enumerate(rev_meta["parents"]): parent_path = dir_path / str(i + 1) parent_swhid = f"swh:1:rev:{parent['id']}" assert parent_path.is_symlink() assert os.readlink(parent_path) == f"../../../archive/{parent_swhid}" def test_list_parent(fuse_mntdir): file_path = fuse_mntdir / "archive" / ROOT_REV / "parent" assert file_path.is_symlink() assert os.readlink(file_path) == "parents/1/" def test_list_history(fuse_mntdir): dir_path = fuse_mntdir / "archive" / REV_SMALL_HISTORY / "history" assert os.listdir(dir_path) == ["by-date", "by-hash", "by-page"] history_meta = get_data_from_graph_archive( REV_SMALL_HISTORY, GRAPH_API_REQUEST.HISTORY ) history = history_meta.strip() # Only keep second node in the edge because first node is redundant # information or the root node (hence not an ancestor) expected = set( map(parse_swhid, [edge.split(" ")[1] for edge in history.split("\n")]) ) dir_by_hash = dir_path / "by-hash" for swhid in expected: depth1 = swhid.object_id[:2] depth2 = str(swhid) assert (dir_by_hash / depth1).exists() assert depth2 in (os.listdir(dir_by_hash / depth1)) dir_by_page = dir_path / "by-page" for idx, swhid in enumerate(expected): page_number = idx // RevisionHistoryShardByPage.PAGE_SIZE depth1 = RevisionHistoryShardByPage.PAGE_FMT.format(page_number=page_number) depth2 = str(swhid) assert (dir_by_page / depth1).exists() assert depth2 in (os.listdir(dir_by_page / depth1)) dir_by_date = dir_path / "by-date" # Wait max 1 second to populate by-date/ dir for i in range(100): if ".status" not in os.listdir(dir_by_date): break time.sleep(0.01) for swhid in expected: meta = get_data_from_web_archive(str(swhid)) date = dateutil.parser.parse(meta["date"]) depth1 = RevisionHistoryShardByDate.DATE_FMT.format( year=date.year, month=date.month, day=date.day ) depth2 = str(swhid) assert (dir_by_date / depth1).exists() assert depth2 in (os.listdir(dir_by_date / depth1))