diff --git a/bin/migrate-extrinsic-metadata.py b/bin/migrate-extrinsic-metadata.py new file mode 100644 --- /dev/null +++ b/bin/migrate-extrinsic-metadata.py @@ -0,0 +1,213 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2020 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +import sys +from typing import Any, Dict + +from swh.core.db import BaseDb + +REVISION_COLS = ["id", "date", "date_offset", "type", "metadata"] + +FETCHERS = {} +AUTHORITIES = {} + + +def handle_row(row: Dict[str, Any], write_cur, write: bool): + type_ = row["type"] + + metadata = row["metadata"] + + if metadata is None: + return + + if type_ == "dsc": + if "extrinsic" in metadata: + extrinsic_files = metadata["extrinsic"]["raw"]["files"] + for artifact_entry in metadata["original_artifact"]: + extrinsic_file = extrinsic_files[artifact_entry["filename"]] + for key in ("sha256",): + assert artifact_entry["checksums"][key] == extrinsic_file[key] + artifact_entry["url"] = extrinsic_file["uri"] + del metadata["extrinsic"] + + elif type_ == "tar": + provider = metadata.get("extrinsic", {}).get("provider") + if provider is not None: + # New versions of the loaders write the provider; use it. + if provider.startswith("https://replicate.npmjs.com/"): + # npm loader format 1 + del metadata["extrinsic"] # TODO: load + + elif provider.startswith("https://pypi.org/"): + # npm loader format 1 + del metadata["extrinsic"] # TODO: load + + elif provider.startswith("https://cran.r-project.org/"): + # cran loader + del metadata["extrinsic"] # TODO: load + + elif provider.startswith("https://nix-community.github.io/nixpkgs-swh/"): + # nixguix loader + del metadata["extrinsic"] # TODO: load + + elif provider.startswith("https://ftp.gnu.org/"): + # archive loader + del metadata["extrinsic"] # TODO: load + + elif provider.startswith("https://deposit.softwareheritage.org/"): + if "@xmlns" in metadata: + assert ( + metadata["@xmlns:codemeta"] + == "https://doi.org/10.5063/SCHEMA/CODEMETA-2.0" + ) + assert "intrinsic" not in metadata + assert "extra_headers" not in metadata + + # deposit loader format 1 + # (pretty rare? In id order, the first revision with this format + # is 022310df16fd9e4d4f81fe36a142e82db977c01d) + # in the case, the metadata seems to be both directly in metadata + # and in metadata["extrinsic"]["raw"]["metadata"] + metadata = {} # TODO: load + else: + # deposit loader format 2 + del metadata["extrinsic"] # TODO: load + + else: + assert False, f"unknown provider {provider}" + + # Older versions don't write the provider; use heuristics instead. + elif ( + metadata.get("package_source", {}) + .get("url", "") + .startswith("https://registry.npmjs.org/") + ): + # npm loader format 2 + del metadata["package"] + del metadata["package_source"] # TODO: load + + elif "project" in metadata: + assert metadata["original_artifact"]["url"].startswith( + "https://files.pythonhosted.org/" + ) + + # pypi loader format 2 + del metadata["project"] # TODO: load + + elif "@xmlns" in metadata: + assert ( + metadata["@xmlns:codemeta"] + == "https://doi.org/10.5063/SCHEMA/CODEMETA-2.0" + ) + assert "intrinsic" not in metadata + assert "extra_headers" not in metadata + + # deposit loader format 3 + metadata = {} # TODO: load + + elif "{http://www.w3.org/2005/Atom}id" in metadata: + assert "{https://doi.org/10.5063/SCHEMA/CODEMETA-2.0}author" in metadata + assert "intrinsic" not in metadata + assert "extra_headers" not in metadata + + # deposit loader format 4 + metadata = {} # TODO: load + + # Remove common intrinsic metadata keys + for key in ("intrinsic", "extra_headers"): + if key in metadata: + del metadata[key] + + # Remove loader-specific intrinsic metadata keys + if type_ == "hg": + del metadata["node"] + elif type_ == "dsc": + if "package_info" in metadata: + del metadata["package_info"] + + for key in ("original_artifact",): + # TODO: send them + if key in metadata: + del metadata[key] + + assert metadata == {}, ( + f"remaining metadata keys for {row['id'].hex()} (type: {row['type']}): " + f"{metadata}" + ) + + +def create_fetchers(db): + with db.cursor() as cur: + for fetcher in FETCHERS.values(): + cur.execute( + """ + INSERT INTO metadata_fetcher (name, version, metadata) + VALUES (%s, %s, %s) + ON CONFLICT DO NOTHING + """, + (fetcher.name, fetcher.version, fetcher.metadata), + ) + + +def create_authorities(db): + with db.cursor() as cur: + for authority in AUTHORITIES.values(): + cur.execute( + """ + INSERT INTO metadata_authority (type, name, metadata) + VALUES (%s, %s, %s) + ON CONFLICT DO NOTHING + """, + (authority.type.value, authority.url, authority.metadata), + ) + + +def main(dbconn, first_id, dry_run): + db = BaseDb.connect(dbconn) + + if not dry_run: + create_fetchers(db) + create_authorities(db) + + total_rows = 0 + with db.cursor() as read_cur, db.cursor() as write_cur: + after_id = first_id + while True: + read_cur.execute( + f"SELECT {', '.join(REVISION_COLS)} FROM revision " + f"WHERE id > %s ORDER BY id LIMIT 10000", + (after_id,), + ) + new_rows = 0 + for row in read_cur: + row_d = dict(zip(REVISION_COLS, row)) + handle_row(row_d, write_cur, dry_run) + new_rows += 1 + + if new_rows == 0: + break + + after_id = row_d["id"] + + total_rows += new_rows + percents = int.from_bytes(after_id[0:4], byteorder="big") * 100 / (1 << 32) + print( + f"Migrated {total_rows/1000000.:.2f}M rows " + f"(~{percents:.1f}%, last revision: {after_id.hex()})" + ) + + +if __name__ == "__main__": + if len(sys.argv) == 2: + (_, dbconn) = sys.argv + first_id = "00" * 20 + elif len(sys.argv) == 3: + (_, dbconn, first_id) = sys.argv + else: + print(f"Syntax: {sys.argv[0]} []") + exit(1) + main(dbconn, bytes.fromhex(first_id), True)