diff --git a/PKG-INFO b/PKG-INFO index 4912824..3fa533b 100644 --- a/PKG-INFO +++ b/PKG-INFO @@ -1,71 +1,64 @@ Metadata-Version: 2.1 Name: swh.indexer -Version: 2.9.0 +Version: 2.9.1 Summary: Software Heritage Content Indexer Home-page: https://forge.softwareheritage.org/diffusion/78/ Author: Software Heritage developers Author-email: swh-devel@inria.fr Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest Project-URL: Funding, https://www.softwareheritage.org/donate Project-URL: Source, https://forge.softwareheritage.org/source/swh-indexer Project-URL: Documentation, https://docs.softwareheritage.org/devel/swh-indexer/ Classifier: Programming Language :: Python :: 3 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Classifier: Operating System :: OS Independent Classifier: Development Status :: 5 - Production/Stable Requires-Python: >=3.7 Description-Content-Type: text/markdown Provides-Extra: testing License-File: LICENSE License-File: AUTHORS swh-indexer ============ Tools to compute multiple indexes on SWH's raw contents: - content: - mimetype - - ctags - - language - fossology-license - metadata -- revision: - - metadata +- origin: + - metadata (intrinsic, using the content indexer; and extrinsic) An indexer is in charge of: - looking up objects - extracting information from those objects - store those information in the swh-indexer db There are multiple indexers working on different object types: - content indexer: works with content sha1 hashes - revision indexer: works with revision sha1 hashes - origin indexer: works with origin identifiers Indexation procedure: - receive batch of ids - retrieve the associated data depending on object type - compute for that object some index - store the result to swh's storage Current content indexers: - mimetype (queue swh_indexer_content_mimetype): detect the encoding and mimetype -- language (queue swh_indexer_content_language): detect the - programming language - -- ctags (queue swh_indexer_content_ctags): compute tags information - - fossology-license (queue swh_indexer_fossology_license): compute the license -- metadata: translate file into translated_metadata dict +- metadata: translate file from an ecosystem-specific formats to JSON-LD + (using schema.org/CodeMeta vocabulary) -Current revision indexers: +Current origin indexers: -- metadata: detects files containing metadata and retrieves translated_metadata - in content_metadata table in storage or run content indexer to translate - files. +- metadata: translate file from an ecosystem-specific formats to JSON-LD + (using schema.org/CodeMeta and ForgeFed vocabularies) diff --git a/README.md b/README.md index f4f2481..56e255b 100644 --- a/README.md +++ b/README.md @@ -1,49 +1,42 @@ swh-indexer ============ Tools to compute multiple indexes on SWH's raw contents: - content: - mimetype - - ctags - - language - fossology-license - metadata -- revision: - - metadata +- origin: + - metadata (intrinsic, using the content indexer; and extrinsic) An indexer is in charge of: - looking up objects - extracting information from those objects - store those information in the swh-indexer db There are multiple indexers working on different object types: - content indexer: works with content sha1 hashes - revision indexer: works with revision sha1 hashes - origin indexer: works with origin identifiers Indexation procedure: - receive batch of ids - retrieve the associated data depending on object type - compute for that object some index - store the result to swh's storage Current content indexers: - mimetype (queue swh_indexer_content_mimetype): detect the encoding and mimetype -- language (queue swh_indexer_content_language): detect the - programming language - -- ctags (queue swh_indexer_content_ctags): compute tags information - - fossology-license (queue swh_indexer_fossology_license): compute the license -- metadata: translate file into translated_metadata dict +- metadata: translate file from an ecosystem-specific formats to JSON-LD + (using schema.org/CodeMeta vocabulary) -Current revision indexers: +Current origin indexers: -- metadata: detects files containing metadata and retrieves translated_metadata - in content_metadata table in storage or run content indexer to translate - files. +- metadata: translate file from an ecosystem-specific formats to JSON-LD + (using schema.org/CodeMeta and ForgeFed vocabularies) diff --git a/debian/changelog b/debian/changelog index 0ffc0f9..eedb406 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,1620 +1,1626 @@ -swh-indexer (2.9.0-1~swh1~bpo10+1) buster-swh; urgency=medium +swh-indexer (2.9.1-1~swh1) unstable-swh; urgency=medium - * Rebuild for buster-swh + * New upstream release 2.9.1 - (tagged by Valentin Lorentz + on 2022-11-30 11:46:40 +0100) + * Upstream changes: - v2.9.1 - * Fix ordering and idempotence + in the 136 -> 137 upgrade script - * docs: Remove remaining + references to ctags and content_language - * Fix 'Invalid IPv6 + URL' crash - * Fix crash when indexing two REMD objects from the + same deposit - -- Software Heritage autobuilder (on jenkins-debian1) Tue, 29 Nov 2022 14:41:27 +0000 + -- Software Heritage autobuilder (on jenkins-debian1) Wed, 30 Nov 2022 10:52:39 +0000 swh-indexer (2.9.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.9.0 - (tagged by Valentin Lorentz on 2022-11-29 15:28:28 +0100) * Upstream changes: - v2.9.0 - * storage: Insert from temporary tables in consistent order - * Drop content_language and content_ctags tables and related SQL functions -- Software Heritage autobuilder (on jenkins-debian1) Tue, 29 Nov 2022 14:35:17 +0000 swh-indexer (2.8.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.8.0 - (tagged by Valentin Lorentz on 2022-11-23 08:57:20 +0100) * Upstream changes: - v2.8.0 - * journal writer: only flush kafka once per batch - * origin_head: Do not fetch complete snapshots for non-FTP visits - * ExtrinsicMetadataIndexer: Add support for metadata with origin in context -- Software Heritage autobuilder (on jenkins-debian1) Wed, 23 Nov 2022 08:04:03 +0000 swh-indexer (2.7.3-2~swh1) unstable-swh; urgency=medium * Fix debian package build by adding debian/pybuild.testfiles -- Antoine Lambert Wed, 02 Nov 2022 19:34:51 +0100 swh-indexer (2.7.3-1~swh1) unstable-swh; urgency=medium * New upstream release 2.7.3 - (tagged by Valentin Lorentz on 2022-11-02 17:42:38 +0100) * Upstream changes: - v2.7.3 - * codemeta: Fix crash on SWORD documents that specify an id -- Software Heritage autobuilder (on jenkins-debian1) Wed, 02 Nov 2022 16:48:00 +0000 swh-indexer (2.7.2-1~swh1) unstable-swh; urgency=medium * New upstream release 2.7.2 - (tagged by Valentin Lorentz on 2022-10-27 14:24:43 +0200) * Upstream changes: - v2.7.2 - * Reset Sentry tags when leaving an object's context - * metadata: Make default tool configuration follow swh.indexer versions - * Fix crashes in translation (mostly from NPM and Maven) - * Fix incorrect outputs when translating from SWORD -- Software Heritage autobuilder (on jenkins-debian1) Thu, 27 Oct 2022 12:35:51 +0000 swh-indexer (2.7.1-1~swh1) unstable-swh; urgency=medium * New upstream release 2.7.1 - (tagged by Valentin Lorentz on 2022-10-07 12:01:09 +0200) * Upstream changes: - v2.7.1 - * npm: Fix crash on invalid URLs in 'bugs' field. -- Software Heritage autobuilder (on jenkins-debian1) Fri, 07 Oct 2022 10:10:37 +0000 swh-indexer (2.6.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.6.0 - (tagged by Valentin Lorentz on 2022-09-12 10:55:11 +0200) * Upstream changes: - v2.6.0 - * Convert SWHID to str before passing to sentry_sdk.set_tag - * Fix various crashes - * github: Add support for 'topics' - * npm, maven: ignore blatantly invalid licenses and URLs - * cli: Pass all journal_client config keys to the JournalClient -- Software Heritage autobuilder (on jenkins-debian1) Mon, 12 Sep 2022 09:07:01 +0000 swh-indexer (2.5.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.5.0 - (tagged by Antoine R. Dumont (@ardumont) on 2022-08-31 18:10:38 +0200) * Upstream changes: - v2.5.0 - indexer.cli: Allow batch_size configuration on journal client -- Software Heritage autobuilder (on jenkins-debian1) Wed, 31 Aug 2022 16:20:18 +0000 swh-indexer (2.4.4-1~swh1) unstable-swh; urgency=medium * New upstream release 2.4.4 - (tagged by Valentin Lorentz on 2022-08-31 11:26:51 +0200) * Upstream changes: - v2.4.4 - * Revert "metadata: Drop unsupported key 'type'" - * rehash: Call objstorage.content_get() with a HashDict instead of single hash -- Software Heritage autobuilder (on jenkins-debian1) Wed, 31 Aug 2022 09:36:21 +0000 swh-indexer (2.4.3-1~swh2) unstable-swh; urgency=medium * Drop blocking dependency constraint and bump new version. -- Antoine R. Dumont (@ardumont) Tue, 30 Aug 2022 15:37:01 +0200 swh-indexer (2.4.3-1~swh1) unstable-swh; urgency=medium * New upstream release 2.4.3 - (tagged by Antoine R. Dumont (@ardumont) on 2022-08-30 11:09:04 +0200) * Upstream changes: - v2.4.3 - metadata: Drop unsupported key 'type' -- Software Heritage autobuilder (on jenkins-debian1) Tue, 30 Aug 2022 09:25:28 +0000 swh-indexer (2.4.2-1~swh2) unstable-swh; urgency=medium * Bump new release -- Antoine R. Dumont (@ardumont) Thu, 25 Aug 2022 14:30:54 +0200 swh-indexer (2.4.2-1~swh1) unstable-swh; urgency=medium * New upstream release 2.4.2 - (tagged by Valentin Lorentz on 2022-08-25 13:24:10 +0200) * Upstream changes: - v2.4.2 - * Re-trigger Debian build -- Software Heritage autobuilder (on jenkins-debian1) Thu, 25 Aug 2022 11:33:19 +0000 swh-indexer (2.4.1-1~swh1) unstable-swh; urgency=medium * New upstream release 2.4.1 - (tagged by Valentin Lorentz on 2022-08-25 12:22:48 +0200) * Upstream changes: - v2.4.1 - * metadata_dictionary: Fix crash on null list item in an uri_field. -- Software Heritage autobuilder (on jenkins-debian1) Thu, 25 Aug 2022 10:32:04 +0000 swh-indexer (2.4.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.4.0 - (tagged by Valentin Lorentz on 2022-08-25 11:58:05 +0200) * Upstream changes: - v2.4.0 - * metadata_dictionary: Add mappings for "*.nuspec" files - * Refactor metadata mappings using rdflib.Graph instead of JSON-LD internally - * Other internal refactorings - * metadata_dictionary: Add mapping for SWORD/Atom with Codemeta -- Software Heritage autobuilder (on jenkins-debian1) Thu, 25 Aug 2022 10:09:34 +0000 swh-indexer (2.3.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.3.0 - (tagged by Valentin Lorentz on 2022-08-10 12:16:48 +0200) * Upstream changes: - v2.3.0 - * Tag Sentry events with object ids - * Fix crashes on incorrect URLs in `@id` - * Fix crash on null characters in JSON - * Fix support of old RawExtrinsicMetadata objects with no id -- Software Heritage autobuilder (on jenkins-debian1) Wed, 10 Aug 2022 10:26:27 +0000 swh-indexer (2.2.2-1~swh1) unstable-swh; urgency=medium * New upstream release 2.2.2 - (tagged by Antoine R. Dumont (@ardumont) on 2022-07-29 13:41:43 +0200) * Upstream changes: - v2.2.2 - indexer.metadata: Warn and skip incomplete entries from the journal -- Software Heritage autobuilder (on jenkins-debian1) Fri, 29 Jul 2022 11:52:13 +0000 swh-indexer (2.2.1-1~swh1) unstable-swh; urgency=medium * New upstream release 2.2.1 - (tagged by Antoine R. Dumont (@ardumont) on 2022-07-29 10:56:57 +0200) * Upstream changes: - v2.2.1 - Normalize journal client indexer type names -- Software Heritage autobuilder (on jenkins-debian1) Fri, 29 Jul 2022 09:07:15 +0000 swh-indexer (2.2.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.2.0 - (tagged by Antoine R. Dumont (@ardumont) on 2022-07-25 16:23:12 +0200) * Upstream changes: - v2.2.0 - cli: Add content mimetype indexer journal client support - cli: Add fossology license indexer journal client support - cli: Add extrinsic-metadata indexer journal client support - docs: Fix incorrect terminology (term -> property) - mapping: Fix inconsistent name - Drop decommissioned content indexer: ctags, language -- Software Heritage autobuilder (on jenkins-debian1) Mon, 25 Jul 2022 14:33:50 +0000 swh-indexer (2.1.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.1.0 - (tagged by Valentin Lorentz on 2022-07-21 10:23:44 +0200) * Upstream changes: - v2.1.0 - * DirectoryIndexer: Remove incorrect assumption on object types - * docs: Explain the indexation workflow for extrinsic metadata - * docs: Update description of the metadata workflow - * metadata_dictionary: Add mappings for pubspec.yaml - * Add extrinsic metadata indexer - * Add GitHub metadata mapping - * Refactor Mapping hierarchy - * cff: Add checks for value types -- Software Heritage autobuilder (on jenkins-debian1) Thu, 21 Jul 2022 08:32:23 +0000 swh-indexer (2.0.2-1~swh1) unstable-swh; urgency=medium * New upstream release 2.0.2 - (tagged by Valentin Lorentz on 2022-06-22 12:32:41 +0200) * Upstream changes: - v2.0.2 - * Fix mypy issue with swh- journal>=1.1.0 - * cff: Ignore invalid yaml files - * npm: Add workaround for mangled package descriptions - * npm: Fix crash when npm description is not a string -- Software Heritage autobuilder (on jenkins-debian1) Wed, 22 Jun 2022 10:40:25 +0000 swh-indexer (2.0.1-1~swh1) unstable-swh; urgency=medium * New upstream release 2.0.1 - (tagged by Antoine R. Dumont (@ardumont) on 2022-06-10 10:35:15 +0200) * Upstream changes: - v2.0.1 - upgrades/134: Add missing index creation -- Software Heritage autobuilder (on jenkins-debian1) Fri, 10 Jun 2022 09:17:44 +0000 swh-indexer (2.0.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.0.0 - (tagged by Antoine R. Dumont (@ardumont) on 2022-06-03 15:40:32 +0200) * Upstream changes: - v2.0.0 - Set current_version attribute to postgresql datastore - Add support for indexing from head releases - Replace RevisionMetadataIndexer with DirectoryMetadataIndexer - Add support for running the server with 'postgresql' storage cls - tests: Shorten definition of REVISION - tests: Simplify definition of ORIGINS list - tests: use stock pytest_postgresql factory function - Rewrite origin_head.py as a normal function instead of an indexer - Convert test_origin_head from unittest to pytest -- Software Heritage autobuilder (on jenkins-debian1) Fri, 03 Jun 2022 13:59:59 +0000 swh-indexer (1.2.0-1~swh1) unstable-swh; urgency=medium * New upstream release 1.2.0 - (tagged by Valentin Lorentz on 2022-06-01 16:44:30 +0200) * Upstream changes: - v1.2.0 - * cli: Add support for running "all" indexers in the journal client -- Software Heritage autobuilder (on jenkins-debian1) Wed, 01 Jun 2022 15:08:39 +0000 swh-indexer (1.1.0-1~swh1) unstable-swh; urgency=medium * New upstream release 1.1.0 - (tagged by Valentin Lorentz on 2022-05-30 15:56:19 +0200) * Upstream changes: - v1.1.0 - * Add support for indexing directly from the journal client - * cff: Do not change yaml.SafeLoader globally - * add missing sentry captures - * Change misleading documentation in swh-indexer/cli.py - * test and typing maintenance -- Software Heritage autobuilder (on jenkins-debian1) Mon, 30 May 2022 14:03:54 +0000 swh-indexer (1.0.0-1~swh1) unstable-swh; urgency=medium * New upstream release 1.0.0 - (tagged by David Douard on 2022-02-24 17:35:56 +0100) * Upstream changes: - v1.0.0 -- Software Heritage autobuilder (on jenkins-debian1) Thu, 24 Feb 2022 16:42:39 +0000 swh-indexer (0.8.2-1~swh1) unstable-swh; urgency=medium * New upstream release 0.8.2 - (tagged by Valentin Lorentz on 2022-01-12 13:53:22 +0100) * Upstream changes: - v0.8.2 - * tests: Use TimestampWithTimezone.from_datetime() instead of the constructor - * docs: Use reference instead of absolute link -- Software Heritage autobuilder (on jenkins-debian1) Wed, 12 Jan 2022 12:56:56 +0000 swh-indexer (0.8.1-1~swh1) unstable-swh; urgency=medium * New upstream release 0.8.1 - (tagged by Vincent SELLIER on 2021-12-21 16:23:37 +0100) * Upstream changes: - v0.8.1 - Changelog: - tag frozendict version to avoid segfaults on the ci -- Software Heritage autobuilder (on jenkins-debian1) Tue, 21 Dec 2021 15:28:27 +0000 swh-indexer (0.8.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.8.0 - (tagged by Antoine R. Dumont (@ardumont) on 2021-05-28 16:57:47 +0200) * Upstream changes: - v0.8.0 - metadata_dictionary: Add mapping for CITATION.cff - metadata/maven: Ignore ill-formed xml instead of failing - metadata: Fix UnboundLocalError in edge case - data/codemeta: sync with official codemeta repo - Fix SingleFileMapping case sensitivity - Use swh.core 0.14 - tox: Add sphinx environments to check sane doc build -- Software Heritage autobuilder (on jenkins-debian1) Fri, 28 May 2021 15:05:39 +0000 swh-indexer (0.7.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.7.0 - (tagged by Antoine R. Dumont (@ardumont) on 2021-02-03 14:10:16 +0100) * Upstream changes: - v0.7.0 - Adapt origin_get_latest_visit_status according to latest api change -- Software Heritage autobuilder (on jenkins-debian1) Wed, 03 Feb 2021 13:15:37 +0000 swh-indexer (0.6.4-1~swh1) unstable-swh; urgency=medium * New upstream release 0.6.4 - (tagged by Antoine R. Dumont (@ardumont) on 2021-02-01 15:06:04 +0100) * Upstream changes: - v0.6.4 - indexer: Remove pagination logic using stream_results() instead. - ContentPartitionIndexer: Do not index the same content multiple times at once. - Add a cli section in the doc - test_journal_client_cli: Send production objects to journal - test_journal_client: Migrate away from mocks - tests: Use production backends within the indexer tests -- Software Heritage autobuilder (on jenkins-debian1) Mon, 01 Feb 2021 14:10:18 +0000 swh-indexer (0.6.3-1~swh1) unstable-swh; urgency=medium * New upstream release 0.6.3 - (tagged by Antoine R. Dumont (@ardumont) on 2020-11-27 14:42:30 +0100) * Upstream changes: - v0.6.3 - storage.writer: Fix journal writer sanitizer function -- Software Heritage autobuilder (on jenkins-debian1) Fri, 27 Nov 2020 13:46:03 +0000 swh-indexer (0.6.2-1~swh1) unstable-swh; urgency=medium * New upstream release 0.6.2 - (tagged by Antoine R. Dumont (@ardumont) on 2020-11-27 13:55:53 +0100) * Upstream changes: - v0.6.2 - BaseRow.unique_key: Don't crash when indexer_configuration_id is None. - idx.storage.JournalWriter: pass value_sanitizer to get_journal_writer. -- Software Heritage autobuilder (on jenkins-debian1) Fri, 27 Nov 2020 13:00:28 +0000 swh-indexer (0.6.1-1~swh1) unstable-swh; urgency=medium * New upstream release 0.6.1 - (tagged by Antoine R. Dumont (@ardumont) on 2020-11-27 10:43:14 +0100) * Upstream changes: - v0.6.1 - Fix test within the debian package builds - refactor tests to pytest -- Software Heritage autobuilder (on jenkins-debian1) Fri, 27 Nov 2020 09:49:35 +0000 swh-indexer (0.6.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.6.0 - (tagged by Antoine R. Dumont (@ardumont) on 2020-11-26 17:08:03 +0100) * Upstream changes: - v0.6.0 - indexer.journal_client: Subscribe to OriginVisitStatus topic - swh.indexer.cli.journal_client: ensure the minimal configuration exists - Drop all deprecated uses of `args` in component factories - Drop vcversioner from requirements - Make the indexer storage write to the journal. -- Software Heritage autobuilder (on jenkins-debian1) Thu, 26 Nov 2020 16:39:45 +0000 swh-indexer (0.5.0-2~swh1) unstable-swh; urgency=medium * Move distutils package from python3-swh.indexer to python3-swh.indexer.storage. -- Nicolas Dandrimont Wed, 18 Nov 2020 20:04:23 +0100 swh-indexer (0.5.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.5.0 - (tagged by Valentin Lorentz on 2020-11-06 15:25:04 +0100) * Upstream changes: - v0.5.0 - * Remove metadata deletion endpoints and algorithms - * Remove conflict_update/policy_update option from BaseIndexer.run() - * Remove conflict_update option from _add() endpoints. -- Software Heritage autobuilder (on jenkins-debian1) Fri, 06 Nov 2020 14:28:05 +0000 swh-indexer (0.4.2-1~swh1) unstable-swh; urgency=medium * New upstream release 0.4.2 - (tagged by Antoine R. Dumont (@ardumont) on 2020-10-30 17:22:22 +0100) * Upstream changes: - v0.4.2 - tests.conftest: Fix the indexer scheduler initialization - indexer.cli: Fix missing retries_left parameter - Rename sql files according to new conventions -- Software Heritage autobuilder (on jenkins-debian1) Fri, 30 Oct 2020 16:24:14 +0000 swh-indexer (0.4.1-1~swh1) unstable-swh; urgency=medium * New upstream release 0.4.1 - (tagged by Antoine R. Dumont (@ardumont) on 2020-10-16 10:48:51 +0200) * Upstream changes: - v0.4.1 - test_cli: Remove unneeded config args parameter - api.server: Align configuration structure with clients configuration - storage.api.server: Add types to module and refactor tests -- Software Heritage autobuilder (on jenkins-debian1) Fri, 16 Oct 2020 08:59:09 +0000 swh-indexer (0.4.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.4.0 - (tagged by Antoine R. Dumont (@ardumont) on 2020-10-15 18:17:59 +0200) * Upstream changes: - v0.4.0 - swh.indexer.storage: Unify get_indexer_storage function with others -- Software Heritage autobuilder (on jenkins-debian1) Thu, 15 Oct 2020 16:19:01 +0000 swh-indexer (0.3.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.0 - (tagged by Valentin Lorentz on 2020-10-08 13:33:02 +0200) * Upstream changes: - v0.3.0 - * Make indexer-storage endpoints use attr-based classes instead of dicts - * Add more typing to indexers and their tests -- Software Heritage autobuilder (on jenkins-debian1) Thu, 08 Oct 2020 11:35:50 +0000 swh-indexer (0.2.4-1~swh1) unstable-swh; urgency=medium * New upstream release 0.2.4 - (tagged by David Douard on 2020-09-25 12:49:04 +0200) * Upstream changes: - v0.2.4 -- Software Heritage autobuilder (on jenkins-debian1) Fri, 25 Sep 2020 10:51:28 +0000 swh-indexer (0.2.3-1~swh1) unstable-swh; urgency=medium * New upstream release 0.2.3 - (tagged by David Douard on 2020-09-11 15:12:01 +0200) * Upstream changes: - v0.2.3 -- Software Heritage autobuilder (on jenkins-debian1) Fri, 11 Sep 2020 13:15:41 +0000 swh-indexer (0.2.2-1~swh1) unstable-swh; urgency=medium * New upstream release 0.2.2 - (tagged by Antoine R. Dumont (@ardumont) on 2020-09-04 13:21:19 +0200) * Upstream changes: - v0.2.2 - metadata: Adapt to latest storage revision_get change - Tell pytest not to recurse in dotdirs. -- Software Heritage autobuilder (on jenkins-debian1) Fri, 04 Sep 2020 11:33:41 +0000 swh-indexer (0.2.1-1~swh1) unstable-swh; urgency=medium * New upstream release 0.2.1 - (tagged by Valentin Lorentz on 2020-08-20 12:59:53 +0200) * Upstream changes: - v0.2.1 - * indexer.rehash: Adapt content_get_metadata call to content_get - * origin_head: Use snapshot_get_all_branches instead of snapshot_get. - * Import SortedList, db_transaction_generator, and db_transaction from swh- core instead of swh-storage. - * tests: remove invalid assertion -- Software Heritage autobuilder (on jenkins-debian1) Thu, 20 Aug 2020 11:03:58 +0000 swh-indexer (0.2.0-1~swh2) unstable-swh; urgency=medium * Bump dependencies -- Antoine R. Dumont (@ardumont) Wed, 06 Aug 2020 13:28:00 +0200 swh-indexer (0.2.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.2.0 - (tagged by Antoine R. Dumont (@ardumont) on 2020-08-06 15:12:44 +0200) * Upstream changes: - v0.2.0 - Make content indexer work on partition of ids -- Software Heritage autobuilder (on jenkins-debian1) Thu, 06 Aug 2020 13:14:35 +0000 swh-indexer (0.1.1-1~swh1) unstable-swh; urgency=medium * New upstream release 0.1.1 - (tagged by Antoine R. Dumont (@ardumont) on 2020-07-28 12:42:19 +0200) * Upstream changes: - v0.1.1 - setup.py: Migrate from vcversioner to setuptools-scm - MANIFEST: Include missing conftest.py requirement - metadata: Update swh.storage.origin_get call to latest api change - Drop unsupported "validate" proxy - tests: Drop deprecated storage.origin_add_one use - Drop useless use of pifpaf - Clean up the swh.scheduler and swh.storage pytest plugin imports - tests: Drop obsolete origin visit fields -- Software Heritage autobuilder (on jenkins-debian1) Tue, 28 Jul 2020 10:44:54 +0000 swh-indexer (0.1.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.1.0 - (tagged by Antoine R. Dumont (@ardumont) on 2020-06-23 15:44:15 +0200) * Upstream changes: - v0.1.0 - origin_head: Retrieve snapshot out of the last visit status - Fix tests according to latest internal api changes -- Software Heritage autobuilder (on jenkins-debian1) Tue, 23 Jun 2020 13:46:23 +0000 swh-indexer (0.0.171-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.171 - (tagged by Antoine R. Dumont (@ardumont) on 2020-04-23 16:46:52 +0200) * Upstream changes: - v0.0.171 - cli: Adapt journal client instantiation according to latest change - codemeta: Add compatibility with PyLD >= 2.0.0. - setup: Update the minimum required runtime python3 version - Add a pyproject.toml file to target py37 for black - Enable black - test: make test data properly typed - indexer.cli.journal_client: Simplify the journal client call - Remove type from origin_add calls - Rename --max-messages to --stop-after-objects. - tests: Migrate to latest swh-storage api change -- Software Heritage autobuilder (on jenkins-debian1) Thu, 23 Apr 2020 14:49:17 +0000 swh-indexer (0.0.170-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.170 - (tagged by Antoine R. Dumont (@ardumont) on 2020-03-08 09:57:39 +0100) * Upstream changes: - v0.0.170 - indexer.metadata: Make compatible old task format -- Software Heritage autobuilder (on jenkins-debian1) Sun, 08 Mar 2020 09:03:59 +0000 swh-indexer (0.0.169-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.169 - (tagged by Antoine R. Dumont (@ardumont) on 2020-03-06 15:19:21 +0100) * Upstream changes: - v0.0.169 - storage: Add @timed metrics on remaining indexer storage endpoints - indexer.storage: Use the correct metrics module - idx.storage: Add time and counter metric to idx_configuration_add - indexer.storage: Remove redundant calls to send_metric - indexer: Fix mypy issues -- Software Heritage autobuilder (on jenkins-debian1) Fri, 06 Mar 2020 14:24:50 +0000 swh-indexer (0.0.168-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.168 - (tagged by Antoine R. Dumont (@ardumont) on 2020-03-05 15:48:32 +0100) * Upstream changes: - v0.0.168 - mimetype: Make the parsing more resilient - storage.fossology_license_add: Fix one insert query too many - tests: Migrate some tests to pytest -- Software Heritage autobuilder (on jenkins-debian1) Thu, 05 Mar 2020 14:52:27 +0000 swh-indexer (0.0.167-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.167 - (tagged by Antoine R. Dumont (@ardumont) on 2020-03-04 16:33:20 +0100) * Upstream changes: - v0.0.167 - indexer (revision, origin): Fix indexer summary to output a status -- Software Heritage autobuilder (on jenkins-debian1) Wed, 04 Mar 2020 15:37:59 +0000 swh-indexer (0.0.166-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.166 - (tagged by Valentin Lorentz on 2020-03-04 15:46:37 +0100) * Upstream changes: - v0.0.166 - * Fix merging documents with @list elements. -- Software Heritage autobuilder (on jenkins-debian1) Wed, 04 Mar 2020 14:50:54 +0000 swh-indexer (0.0.165-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.165 - (tagged by Antoine R. Dumont (@ardumont) on 2020-03-04 15:29:52 +0100) * Upstream changes: - v0.0.165 - indexers: Fix summary computation for range indexers - tests: Use assertEqual instead of deprecated assertEquals -- Software Heritage autobuilder (on jenkins-debian1) Wed, 04 Mar 2020 14:33:09 +0000 swh-indexer (0.0.164-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.164 - (tagged by Antoine R. Dumont (@ardumont) on 2020-03-04 13:52:15 +0100) * Upstream changes: - v0.0.164 - range-indexers: Fix hard- coded summary key value - indexers: Improve persist_index_computations type - indexer.metadata: Fix wrong update -- Software Heritage autobuilder (on jenkins-debian1) Wed, 04 Mar 2020 13:00:18 +0000 swh-indexer (0.0.163-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.163 - (tagged by Antoine R. Dumont (@ardumont) on 2020-03-04 11:26:56 +0100) * Upstream changes: - v0.0.163 - Make indexers return a summary of their actions - swh.indexer.storage: Add metrics to add/del endpoints - indexer.storage: Make add/del endpoints sum up added objects count - indexer: Remove unused next_step pattern -- Software Heritage autobuilder (on jenkins-debian1) Wed, 04 Mar 2020 10:31:03 +0000 swh-indexer (0.0.162-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.162 - (tagged by Antoine R. Dumont (@ardumont) on 2020-02-27 11:01:29 +0100) * Upstream changes: - v0.0.162 - fossology_license: Improve add query endpoint - pgstorage: Empty temp tables instead of dropping them - indexer.metadata: Fix edge case on unknown origin -- Software Heritage autobuilder (on jenkins-debian1) Thu, 27 Feb 2020 10:09:36 +0000 swh-indexer (0.0.161-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.161 - (tagged by Antoine R. Dumont (@ardumont) on 2020-02-25 12:07:39 +0100) * Upstream changes: - v0.0.161 - sql/128: Add content_mimetype index - storage.db: Improve content range queries to actually finish - Add a new IndexerStorageArgumentException class, for exceptions caused by the client. - Use swh-storage validation proxy. - Fix type errors with hypothesis 5.5 - Add type annotations to indexer classes -- Software Heritage autobuilder (on jenkins-debian1) Tue, 25 Feb 2020 11:20:51 +0000 swh-indexer (0.0.160-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.160 - (tagged by Antoine R. Dumont (@ardumont) on 2020-02-05 18:13:16 +0100) * Upstream changes: - v0.0.160 - Fix missing import -- Software Heritage autobuilder (on jenkins-debian1) Wed, 05 Feb 2020 17:28:18 +0000 swh-indexer (0.0.159-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.159 - (tagged by Antoine R. Dumont (@ardumont) on 2020-02-05 16:01:03 +0100) * Upstream changes: - v0.0.159 - Monkey-patch backend classes instead of 'get_storage' functions. - Fix DeprecationWarning about get_storage args. - Move IndexerStorage documentation and endpoint paths to a new IndexerStorageInterface class. - conftest: Use module's `get_` to instantiate backend - docs: Fix sphinx warnings - Fix merge_documents to work with input document with an @id. - Fix support of VCSs whose HEAD branch is an alias. - Fix type of 'author' in gemspec mapping output. - Fix test_origin_metadata mistakenly broken by e50660efca - Fix several typos reported by pre-commit hooks - Add a pre-commit config file - Remove unused property-based test environment - Migrate tox.ini to extras = xxx instead of deps = .[testing] - Merge tox test environments - Drop version constraint on pytest - Include all requirements in MANIFEST.in -- Software Heritage autobuilder (on jenkins-debian1) Wed, 05 Feb 2020 15:09:42 +0000 swh-indexer (0.0.158-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.158 - (tagged by Antoine R. Dumont (@ardumont) on 2019-11-20 10:26:59 +0100) * Upstream changes: - v0.0.158 - Re-enable tests for the in- memory storage. - Truncate result list instead of doing a copy. - journal client: add support for new origin_visit schema. - Fix alter table rename column syntax on 126->127 upgrade script -- Software Heritage autobuilder (on jenkins-debian1) Wed, 20 Nov 2019 09:30:37 +0000 swh-indexer (0.0.157-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.157 - (tagged by Valentin Lorentz on 2019-11-08 16:33:36 +0100) * Upstream changes: - v0.0.157 - * migrate storage tests to pytest - * proper pagination for IndexerStorage.origin_intrinsic_metadata_search_by_producer -- Software Heritage autobuilder (on jenkins-debian1) Fri, 08 Nov 2019 15:36:48 +0000 swh-indexer (0.0.156-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.156 - (tagged by Stefano Zacchiroli on 2019-11-05 17:36:11 +0100) * Upstream changes: - v0.0.156 - * update indexer for storage 0.0.156 - * cli: fix max-message handling in the journal-client command - * tests: fix test_metadata.py for frozen entities in swh.model.model - * tests: update tests for storage>=0.0.155 - * test_metadata typing: use type-specific mappings instead of cast - * storage/db.py: drop unused format arg regconfig from query - * typing: minimal changes to make a no-op mypy run pass -- Software Heritage autobuilder (on jenkins-debian1) Tue, 05 Nov 2019 16:45:10 +0000 swh-indexer (0.0.155-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.155 - (tagged by Valentin Lorentz on 2019-10-15 14:51:28 +0200) * Upstream changes: - v0.0.155 - * Avoid spamming logs with processed %d messages every message - * tox.ini: Fix py3 environment to use packaged tests - * Remove indirection swh.indexer.storage.api.wsgi to start server - * Add a command- line tool to run metadata translation. -- Software Heritage autobuilder (on jenkins-debian1) Tue, 15 Oct 2019 12:55:33 +0000 swh-indexer (0.0.154-1~swh2) unstable-swh; urgency=medium * Force pg_ctl path -- Nicolas Dandrimont Mon, 07 Oct 2019 16:42:08 +0200 swh-indexer (0.0.154-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.154 - (tagged by Nicolas Dandrimont on 2019-10-07 16:34:20 +0200) * Upstream changes: - Release swh.indexer v0.0.154 - Remove old scheduler compat code - Clean up CLI aliases - Port to python-magic instead of file_magic -- Software Heritage autobuilder (on jenkins-debian1) Mon, 07 Oct 2019 14:38:47 +0000 swh-indexer (0.0.153-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.153 - (tagged by Antoine R. Dumont (@ardumont) on 2019-09-11 11:46:41 +0200) * Upstream changes: - v0.0.153 - indexer-storage: Send smaller batches to origin_get - Update origin_url/from_revision/metadata_tsvector when conflict_update=True - Remove concept of 'minimal set' of metadata - npm: Fix crash on invalid 'author' field - api/client: use RPCClient instead of deprecated SWHRemoteAPI - api/server: use RPCServerApp instead of deprecated SWHServerAPIApp - tests/utils: Fix various test data model issues failing validation -- Software Heritage autobuilder (on jenkins-debian1) Wed, 11 Sep 2019 09:50:58 +0000 swh-indexer (0.0.152-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.152 - (tagged by Valentin Lorentz on 2019-07-19 11:15:41 +0200) * Upstream changes: - Send smaller batches to revision_get -- Software Heritage autobuilder (on jenkins-debian1) Fri, 19 Jul 2019 09:20:34 +0000 swh-indexer (0.0.151-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.151 - (tagged by Valentin Lorentz on 2019-07-03 17:58:32 +0200) * Upstream changes: - v0.0.151 - Fix key names in the journal client; it crashed in prod. -- Software Heritage autobuilder (on jenkins-debian1) Wed, 03 Jul 2019 16:03:07 +0000 swh-indexer (0.0.150-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.150 - (tagged by Antoine R. Dumont (@ardumont) on 2019-07-03 12:09:43 +0200) * Upstream changes: - v0.0.150 - indexer.cli: Drop unused extra alias `--consumer-id` flag -- Software Heritage autobuilder (on jenkins-debian1) Wed, 03 Jul 2019 10:20:46 +0000 swh-indexer (0.0.149-1~swh2) unstable-swh; urgency=medium * No-change: Bump dependency version -- Antoine R. Dumont (@ardumont) Wed, 03 Jul 2019 10:44:12 +0200 swh-indexer (0.0.149-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.149 - (tagged by Antoine R. Dumont (@ardumont) on 2019-07-02 18:11:12 +0200) * Upstream changes: - v0.0.149 - swh.indexer.cli: Fix get_journal_client api call - sql/upgrades/125: Fix migration script -- Software Heritage autobuilder (on jenkins-debian1) Tue, 02 Jul 2019 16:26:50 +0000 swh-indexer (0.0.148-1~swh3) unstable-swh; urgency=medium * Upstream release 0.0.148: Update version dependency -- Antoine Romain Dumont (@ardumont) Mon, 01 Jul 2019 01:50:29 +0100 swh-indexer (0.0.148-1~swh2) unstable-swh; urgency=medium * Upstream release 0.0.148 -- Antoine Romain Dumont (@ardumont) Mon, 01 Jul 2019 01:50:29 +0100 swh-indexer (0.0.148-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.148 - (tagged by Antoine R. Dumont (@ardumont) on 2019-07-01 12:21:32 +0200) * Upstream changes: - v0.0.148 - Manipulate origin URLs instead of origin ids - journal: create tasks for multiple origins - Tests: Improvements -- Software Heritage autobuilder (on jenkins-debian1) Mon, 01 Jul 2019 10:34:26 +0000 swh-indexer (0.0.147-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.147 - (tagged by Antoine Lambert on 2019-05-23 11:03:02 +0200) * Upstream changes: - version 0.0.147 -- Software Heritage autobuilder (on jenkins-debian1) Thu, 23 May 2019 09:11:05 +0000 swh-indexer (0.0.146-1~swh2) unstable-swh; urgency=medium * Remove hypothesis directory -- Nicolas Dandrimont Thu, 18 Apr 2019 18:29:09 +0200 swh-indexer (0.0.146-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.146 - (tagged by Valentin Lorentz on 2019-04-11 11:08:29 +0200) * Upstream changes: - Better explain what the 'string fields' are. -- Software Heritage autobuilder (on jenkins-debian1) Thu, 11 Apr 2019 09:47:24 +0000 swh-indexer (0.0.145-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.145 - (tagged by Valentin Lorentz on 2019-03-15 11:18:25 +0100) * Upstream changes: - Add support for keywords in PKG-INFO. -- Software Heritage autobuilder (on jenkins-debian1) Fri, 15 Mar 2019 11:34:53 +0000 swh-indexer (0.0.144-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.144 - (tagged by Thibault Allançon on 2019-03-07 08:16:49 +0100) * Upstream changes: - Fix heterogeneity of names in metadata tables -- Software Heritage autobuilder (on jenkins-debian1) Thu, 14 Mar 2019 13:30:44 +0000 swh-indexer (0.0.143-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.143 - (tagged by Thibault Allançon on 2019-03-12 10:18:37 +0100) * Upstream changes: - Use hashutil.MultiHash in swh.indexer.tests.test_utils.fill_storage - Summary: Closes T1448 - Reviewers: #reviewers - Subscribers: swh-public-ci - Maniphest Tasks: T1448 - Differential Revision: https://forge.softwareheritage.org/D1235 -- Software Heritage autobuilder (on jenkins-debian1) Wed, 13 Mar 2019 10:24:37 +0000 swh-indexer (0.0.142-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.142 - (tagged by Valentin Lorentz on 2019-03-01 14:19:05 +0100) * Upstream changes: - Skip useless requests. -- Software Heritage autobuilder (on jenkins-debian1) Fri, 01 Mar 2019 13:26:06 +0000 swh-indexer (0.0.141-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.141 - (tagged by Valentin Lorentz on 2019-03-01 10:59:54 +0100) * Upstream changes: - Prevent origin metadata indexer from writing empty records -- Software Heritage autobuilder (on jenkins-debian1) Fri, 01 Mar 2019 10:10:56 +0000 swh-indexer (0.0.140-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.140 - (tagged by Valentin Lorentz on 2019-02-25 10:38:52 +0100) * Upstream changes: - Drop the 'context' and 'type' config of metadata indexers. - They are both ignored already. -- Software Heritage autobuilder (on jenkins-debian1) Mon, 25 Feb 2019 10:40:10 +0000 swh-indexer (0.0.139-1~swh2) unstable-swh; urgency=low * New release fixing debian build -- Antoine Romain Dumont (@ardumont) Fri, 22 Feb 2019 16:27:47 +0100 swh-indexer (0.0.139-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.139 - (tagged by Antoine R. Dumont (@ardumont) on 2019-02-22 15:53:22 +0100) * Upstream changes: - v0.0.139 - Clean up no longer used tasks -- Software Heritage autobuilder (on jenkins-debian1) Fri, 22 Feb 2019 14:59:40 +0000 swh-indexer (0.0.138-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.138 - (tagged by Valentin Lorentz on 2019-02-22 15:30:30 +0100) * Upstream changes: - Make the 'config' argument of OriginMetadaIndexer optional again. -- Software Heritage autobuilder (on jenkins-debian1) Fri, 22 Feb 2019 14:37:35 +0000 swh-indexer (0.0.137-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.137 - (tagged by Antoine R. Dumont (@ardumont) on 2019-02-22 10:59:53 +0100) * Upstream changes: - v0.0.137 - swh.indexer.storage.api.wsgi: Open production wsgi entrypoint - swh.indexer.cli: Move dev app entrypoint in dedicated cli - indexer.storage: Make server load explicit configuration and check - config: use already loaded swh config, if any, when instantiating an Indexer - api: Add support for filtering by tool_id to origin_intrinsic_metadata_search_by_producer. - api: Add storage endpoint to search metadata by mapping. - runtime: Remove implicit configuration from the metadata indexers. - debian: Remove debian packaging from master branch - docs: Update missing documentation -- Software Heritage autobuilder (on jenkins-debian1) Fri, 22 Feb 2019 10:11:29 +0000 swh-indexer (0.0.136-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.136 - (tagged by Valentin Lorentz on 2019-02-14 17:09:00 +0100) * Upstream changes: - Don't send 'None' as a revision id to storage.revision_get. - This error wasn't caught before because the in-mem storage - accepts None values, but the pg storage doesn't. -- Software Heritage autobuilder (on jenkins-debian1) Thu, 14 Feb 2019 16:22:41 +0000 swh-indexer (0.0.135-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.135 - (tagged by Valentin Lorentz on 2019-02-14 14:45:24 +0100) * Upstream changes: - Fix deduplication of origins when persisting origin intrinsic metadata. -- Software Heritage autobuilder (on jenkins-debian1) Thu, 14 Feb 2019 14:32:55 +0000 swh-indexer (0.0.134-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.134 - (tagged by Antoine R. Dumont (@ardumont) on 2019-02-13 23:46:44 +0100) * Upstream changes: - v0.0.134 - package: Break dependency of swh.indexer.storage on swh.indexer. - api/server: Do not read configuration at each request - metadata: Fix gemspec test - metadata: Prevent OriginMetadataIndexer from sending duplicate - revisions to revision_metadata_add. - test: Fix bugs found by hypothesis. - test: Use hypothesis to generate adversarial inputs. - Add more type checks in metadata dictionary. - Add checks in the idx_storage that the same content/rev/orig is not - present twice in the new data. -- Software Heritage autobuilder (on jenkins-debian1) Thu, 14 Feb 2019 09:16:15 +0000 swh-indexer (0.0.133-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.133 - (tagged by Antoine R. Dumont (@ardumont) on 2019-02-12 10:28:01 +0100) * Upstream changes: - v0.0.133 - Migrate BaseDB api calls from core to storage - Improve storage api calls using latest storage api - OriginIndexer: Refactoring - tests: Refactoring - metadata search: Use index - indexer metadata: Provide stats per origin - indexer metadata: Update mapping column - indexer metadata: Improve and fix issues -- Software Heritage autobuilder (on jenkins-debian1) Tue, 12 Feb 2019 09:34:43 +0000 swh-indexer (0.0.132-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.132 - (tagged by Antoine R. Dumont (@ardumont) on 2019-01-30 15:03:14 +0100) * Upstream changes: - v0.0.132 - swh/indexer/tasks: Fix range indexer tasks - Maven: Add support for empty XML nodes. - Add support for alternative call format for Gem::Specification.new. -- Software Heritage autobuilder (on jenkins-debian1) Wed, 30 Jan 2019 14:09:48 +0000 swh-indexer (0.0.131-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.131 - (tagged by Antoine R. Dumont (@ardumont) on 2019-01-30 10:56:43 +0100) * Upstream changes: - v0.0.131 - fix pep8 violations - fix misspellings -- Software Heritage autobuilder (on jenkins-debian1) Wed, 30 Jan 2019 10:01:47 +0000 swh-indexer (0.0.129-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.129 - (tagged by Valentin Lorentz on 2019-01-29 14:11:22 +0100) * Upstream changes: - Fix missing config file name change. -- Software Heritage autobuilder (on jenkins-debian1) Tue, 29 Jan 2019 13:34:17 +0000 swh-indexer (0.0.128-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.128 - (tagged by Valentin Lorentz on 2019-01-25 15:22:52 +0100) * Upstream changes: - Make metadata indexers store the mappings used to translate metadata. -- Software Heritage autobuilder (on jenkins-debian1) Tue, 29 Jan 2019 12:18:16 +0000 swh-indexer (0.0.127-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.127 - (tagged by Valentin Lorentz on 2019-01-15 15:56:49 +0100) * Upstream changes: - Prevent repository normalization from crashing on malformed input. -- Software Heritage autobuilder (on jenkins-debian1) Tue, 15 Jan 2019 16:20:32 +0000 swh-indexer (0.0.126-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.126 - (tagged by Valentin Lorentz on 2019-01-14 11:42:52 +0100) * Upstream changes: - Don't call OriginHeadIndexer.next_step when there is no revision. -- Software Heritage autobuilder (on jenkins-debian1) Mon, 14 Jan 2019 10:57:34 +0000 swh-indexer (0.0.125-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.125 - (tagged by Antoine R. Dumont (@ardumont) on 2019-01-11 12:01:42 +0100) * Upstream changes: - v0.0.125 - Add journal client that listens for origin visits and schedules - OriginHead - Fix tests to work with the new version of swh.storage -- Software Heritage autobuilder (on jenkins-debian1) Fri, 11 Jan 2019 11:08:51 +0000 swh-indexer (0.0.124-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.124 - (tagged by Antoine R. Dumont (@ardumont) on 2019-01-08 14:09:32 +0100) * Upstream changes: - v0.0.124 - indexer: Fix type check on indexing result -- Software Heritage autobuilder (on jenkins-debian1) Thu, 10 Jan 2019 17:12:07 +0000 swh-indexer (0.0.118-1~swh1) unstable-swh; urgency=medium * v0.0.118 * metadata-indexer: Fix setup initialization * tests: Refactoring -- Antoine R. Dumont (@ardumont) Fri, 30 Nov 2018 14:50:52 +0100 swh-indexer (0.0.67-1~swh1) unstable-swh; urgency=medium * v0.0.67 * mimetype: Migrate to indexed data as text -- Antoine R. Dumont (@ardumont) Wed, 28 Nov 2018 11:35:37 +0100 swh-indexer (0.0.66-1~swh1) unstable-swh; urgency=medium * v0.0.66 * range-indexer: Stream indexing range computations -- Antoine R. Dumont (@ardumont) Tue, 27 Nov 2018 11:48:24 +0100 swh-indexer (0.0.65-1~swh1) unstable-swh; urgency=medium * v0.0.65 * Fix revision metadata indexer -- Antoine R. Dumont (@ardumont) Mon, 26 Nov 2018 19:30:48 +0100 swh-indexer (0.0.64-1~swh1) unstable-swh; urgency=medium * v0.0.64 * indexer: Fix mixed identifier encodings issues * Add missing config filename for origin intrinsic metadata indexer. -- Antoine R. Dumont (@ardumont) Mon, 26 Nov 2018 12:20:01 +0100 swh-indexer (0.0.63-1~swh1) unstable-swh; urgency=medium * v0.0.63 * Make the OriginMetadataIndexer fetch rev metadata from the storage * instead of getting them via the scheduler. * Make the 'result_name' key of 'next_step' optional. * Add missing return. * doc: update index to match new swh-doc format -- Antoine R. Dumont (@ardumont) Fri, 23 Nov 2018 17:56:10 +0100 swh-indexer (0.0.62-1~swh1) unstable-swh; urgency=medium * v0.0.62 * metadata indexer: Add empty tool configuration * Add fulltext search on origin intrinsic metadata -- Antoine R. Dumont (@ardumont) Fri, 23 Nov 2018 14:25:55 +0100 swh-indexer (0.0.61-1~swh1) unstable-swh; urgency=medium * v0.0.61 * indexer: Fix origin indexer's default arguments -- Antoine R. Dumont (@ardumont) Wed, 21 Nov 2018 16:01:50 +0100 swh-indexer (0.0.60-1~swh1) unstable-swh; urgency=medium * v0.0.60 * origin_head: Make next step optional * tests: Increase coverage -- Antoine R. Dumont (@ardumont) Wed, 21 Nov 2018 12:33:13 +0100 swh-indexer (0.0.59-1~swh1) unstable-swh; urgency=medium * v0.0.59 * fossology license: Fix issue on license computation * Improve docstrings * Fix pep8 violations * Increase coverage on content indexers -- Antoine R. Dumont (@ardumont) Tue, 20 Nov 2018 14:27:20 +0100 swh-indexer (0.0.58-1~swh1) unstable-swh; urgency=medium * v0.0.58 * Add missing default configuration for fossology license indexer * tests: Remove dead code -- Antoine R. Dumont (@ardumont) Tue, 20 Nov 2018 12:06:56 +0100 swh-indexer (0.0.57-1~swh1) unstable-swh; urgency=medium * v0.0.57 * storage: Open new endpoint on fossology license range retrieval * indexer: Open new fossology license range indexer -- Antoine R. Dumont (@ardumont) Tue, 20 Nov 2018 11:44:57 +0100 swh-indexer (0.0.56-1~swh1) unstable-swh; urgency=medium * v0.0.56 * storage.api: Open new endpoints (mimetype range, fossology range) * content indexers: Open mimetype and fossology range indexers * Remove orchestrator modules * tests: Improve coverage -- Antoine R. Dumont (@ardumont) Mon, 19 Nov 2018 11:56:06 +0100 swh-indexer (0.0.55-1~swh1) unstable-swh; urgency=medium * v0.0.55 * swh.indexer: Let task reschedule itself through the scheduler * Use swh.scheduler instead of celery leaking all around * swh.indexer.orchestrator: Fix orchestrator initialization step * swh.indexer.tasks: Fix type error when no result or list result -- Antoine R. Dumont (@ardumont) Mon, 29 Oct 2018 10:41:54 +0100 swh-indexer (0.0.54-1~swh1) unstable-swh; urgency=medium * v0.0.54 * swh.indexer.tasks: Fix task to use the scheduler's -- Antoine R. Dumont (@ardumont) Thu, 25 Oct 2018 20:13:51 +0200 swh-indexer (0.0.53-1~swh1) unstable-swh; urgency=medium * v0.0.53 * swh.indexer.rehash: Migrate to latest swh.model.hashutil.MultiHash * indexer: Add the origin intrinsic metadata indexer * indexer: Add OriginIndexer and OriginHeadIndexer. * indexer.storage: Add the origin intrinsic metadata storage database * indexer.storage: Autogenerate the Indexer Storage HTTP API. * setup: prepare for pypi upload * tests: Add a tox file * tests: migrate to pytest * tests: Add tests around celery stack * docs: Improve documentation and reuse README in generated documentation -- Antoine R. Dumont (@ardumont) Thu, 25 Oct 2018 19:03:56 +0200 swh-indexer (0.0.52-1~swh1) unstable-swh; urgency=medium * v0.0.52 * swh.indexer.storage: Refactor fossology license get (first external * contribution, cf. /CONTRIBUTORS) * swh.indexer.storage: Fix typo in invariable name metadata * swh.indexer.storage: No longer use temp table when reading data * swh.indexer.storage: Clean up unused import * swh.indexer.storage: Remove dead entry points origin_metadata* * swh.indexer.storage: Update docstrings information and format -- Antoine R. Dumont (@ardumont) Wed, 13 Jun 2018 11:20:40 +0200 swh-indexer (0.0.51-1~swh1) unstable-swh; urgency=medium * Release swh.indexer v0.0.51 * Update for new db_transaction{,_generator} -- Nicolas Dandrimont Tue, 05 Jun 2018 14:10:39 +0200 swh-indexer (0.0.50-1~swh1) unstable-swh; urgency=medium * v0.0.50 * swh.indexer.api.client: Permit to specify the query timeout option -- Antoine R. Dumont (@ardumont) Thu, 24 May 2018 12:19:06 +0200 swh-indexer (0.0.49-1~swh1) unstable-swh; urgency=medium * v0.0.49 * test_storage: Instantiate the tools during tests' setUp phase * test_storage: Deallocate storage during teardown step * test_storage: Make storage test fixture connect to postgres itself * storage.api.server: Only instantiate storage backend once per import * Use thread-aware psycopg2 connection pooling for database access -- Antoine R. Dumont (@ardumont) Mon, 14 May 2018 11:09:30 +0200 swh-indexer (0.0.48-1~swh1) unstable-swh; urgency=medium * Release swh.indexer v0.0.48 * Update for new swh.storage -- Nicolas Dandrimont Sat, 12 May 2018 18:30:10 +0200 swh-indexer (0.0.47-1~swh1) unstable-swh; urgency=medium * v0.0.47 * d/control: Fix runtime typo in packaging dependency -- Antoine R. Dumont (@ardumont) Thu, 07 Dec 2017 16:54:49 +0100 swh-indexer (0.0.46-1~swh1) unstable-swh; urgency=medium * v0.0.46 * Split swh-indexer packages in 2 python3-swh.indexer.storage and * python3-swh.indexer -- Antoine R. Dumont (@ardumont) Thu, 07 Dec 2017 16:18:04 +0100 swh-indexer (0.0.45-1~swh1) unstable-swh; urgency=medium * v0.0.45 * Fix usual error raised when deploying -- Antoine R. Dumont (@ardumont) Thu, 07 Dec 2017 15:01:01 +0100 swh-indexer (0.0.44-1~swh1) unstable-swh; urgency=medium * v0.0.44 * swh.indexer: Make indexer use their own storage -- Antoine R. Dumont (@ardumont) Thu, 07 Dec 2017 13:20:44 +0100 swh-indexer (0.0.43-1~swh1) unstable-swh; urgency=medium * v0.0.43 * swh.indexer.mimetype: Work around problem in detection -- Antoine R. Dumont (@ardumont) Wed, 29 Nov 2017 10:26:11 +0100 swh-indexer (0.0.42-1~swh1) unstable-swh; urgency=medium * v0.0.42 * swh.indexer: Make indexers register tools in prepare method -- Antoine R. Dumont (@ardumont) Fri, 24 Nov 2017 11:26:03 +0100 swh-indexer (0.0.41-1~swh1) unstable-swh; urgency=medium * v0.0.41 * mimetype: Use magic library api instead of parsing `file` cli output -- Antoine R. Dumont (@ardumont) Mon, 20 Nov 2017 13:05:29 +0100 swh-indexer (0.0.39-1~swh1) unstable-swh; urgency=medium * v0.0.39 * swh.indexer.producer: Fix argument to match the abstract definition -- Antoine R. Dumont (@ardumont) Thu, 19 Oct 2017 10:03:44 +0200 swh-indexer (0.0.38-1~swh1) unstable-swh; urgency=medium * v0.0.38 * swh.indexer.indexer: Fix argument to match the abstract definition -- Antoine R. Dumont (@ardumont) Wed, 18 Oct 2017 19:57:47 +0200 swh-indexer (0.0.37-1~swh1) unstable-swh; urgency=medium * v0.0.37 * swh.indexer.indexer: Fix argument to match the abstract definition -- Antoine R. Dumont (@ardumont) Wed, 18 Oct 2017 18:59:42 +0200 swh-indexer (0.0.36-1~swh1) unstable-swh; urgency=medium * v0.0.36 * packaging: Cleanup * codemeta: Adding codemeta.json file to document metadata * swh.indexer.mimetype: Fix edge case regarding empty raw content * docs: sanitize docstrings for sphinx documentation generation * swh.indexer.metadata: Add RevisionMetadataIndexer * swh.indexer.metadata: Add ContentMetadataIndexer * swh.indexer: Refactor base class to improve inheritance * swh.indexer.metadata: First draft of the metadata content indexer * for npm (package.json) * swh.indexer.tests: Added tests for language indexer -- Antoine R. Dumont (@ardumont) Wed, 18 Oct 2017 16:24:24 +0200 swh-indexer (0.0.35-1~swh1) unstable-swh; urgency=medium * Release swh.indexer 0.0.35 * Update tasks to new swh.scheduler API -- Nicolas Dandrimont Mon, 12 Jun 2017 18:02:04 +0200 swh-indexer (0.0.34-1~swh1) unstable-swh; urgency=medium * v0.0.34 * Fix unbound local error on edge case -- Antoine R. Dumont (@ardumont) Wed, 07 Jun 2017 11:23:29 +0200 swh-indexer (0.0.33-1~swh1) unstable-swh; urgency=medium * v0.0.33 * language indexer: Improve edge case policy -- Antoine R. Dumont (@ardumont) Wed, 07 Jun 2017 11:02:47 +0200 swh-indexer (0.0.32-1~swh1) unstable-swh; urgency=medium * v0.0.32 * Update fossology license to use the latest swh-storage * Improve language indexer to deal with potential error on bad * chunking -- Antoine R. Dumont (@ardumont) Tue, 06 Jun 2017 18:13:40 +0200 swh-indexer (0.0.31-1~swh1) unstable-swh; urgency=medium * v0.0.31 * Reduce log verbosity on language indexer -- Antoine R. Dumont (@ardumont) Fri, 02 Jun 2017 19:08:52 +0200 swh-indexer (0.0.30-1~swh1) unstable-swh; urgency=medium * v0.0.30 * Fix wrong default configuration -- Antoine R. Dumont (@ardumont) Fri, 02 Jun 2017 18:01:27 +0200 swh-indexer (0.0.29-1~swh1) unstable-swh; urgency=medium * v0.0.29 * Update indexer to resolve indexer configuration identifier * Adapt language indexer to use partial raw content -- Antoine R. Dumont (@ardumont) Fri, 02 Jun 2017 16:21:27 +0200 swh-indexer (0.0.28-1~swh1) unstable-swh; urgency=medium * v0.0.28 * Add error resilience to fossology indexer -- Antoine R. Dumont (@ardumont) Mon, 22 May 2017 12:57:55 +0200 swh-indexer (0.0.27-1~swh1) unstable-swh; urgency=medium * v0.0.27 * swh.indexer.language: Incremental encoding detection -- Antoine R. Dumont (@ardumont) Wed, 17 May 2017 18:04:27 +0200 swh-indexer (0.0.26-1~swh1) unstable-swh; urgency=medium * v0.0.26 * swh.indexer.orchestrator: Add batch size option per indexer * Log caught exception in a unified manner * Add rescheduling option (not by default) on rehash + indexers -- Antoine R. Dumont (@ardumont) Wed, 17 May 2017 14:08:07 +0200 swh-indexer (0.0.25-1~swh1) unstable-swh; urgency=medium * v0.0.25 * Add reschedule on error parameter for indexers -- Antoine R. Dumont (@ardumont) Fri, 12 May 2017 12:13:15 +0200 swh-indexer (0.0.24-1~swh1) unstable-swh; urgency=medium * v0.0.24 * Make rehash indexer more resilient to errors by rescheduling contents * in error (be it reading or updating problems) -- Antoine R. Dumont (@ardumont) Thu, 04 May 2017 14:22:43 +0200 swh-indexer (0.0.23-1~swh1) unstable-swh; urgency=medium * v0.0.23 * Improve producer to optionally make it synchronous -- Antoine R. Dumont (@ardumont) Wed, 03 May 2017 15:29:44 +0200 swh-indexer (0.0.22-1~swh1) unstable-swh; urgency=medium * v0.0.22 * Improve mimetype indexer implementation * Make the chaining option in the mimetype indexer -- Antoine R. Dumont (@ardumont) Tue, 02 May 2017 16:31:14 +0200 swh-indexer (0.0.21-1~swh1) unstable-swh; urgency=medium * v0.0.21 * swh.indexer.rehash: Actually make the worker log -- Antoine R. Dumont (@ardumont) Tue, 02 May 2017 14:28:55 +0200 swh-indexer (0.0.20-1~swh1) unstable-swh; urgency=medium * v0.0.20 * swh.indexer.rehash: * Improve reading from objstorage only when needed * Fix empty file use case (which was skipped) * Add logging -- Antoine R. Dumont (@ardumont) Fri, 28 Apr 2017 09:39:09 +0200 swh-indexer (0.0.19-1~swh1) unstable-swh; urgency=medium * v0.0.19 * Fix rehash indexer's default configuration file -- Antoine R. Dumont (@ardumont) Thu, 27 Apr 2017 19:17:20 +0200 swh-indexer (0.0.18-1~swh1) unstable-swh; urgency=medium * v0.0.18 * Add new rehash indexer -- Antoine R. Dumont (@ardumont) Wed, 26 Apr 2017 15:23:02 +0200 swh-indexer (0.0.17-1~swh1) unstable-swh; urgency=medium * v0.0.17 * Add information on indexer tools (T610) -- Antoine R. Dumont (@ardumont) Fri, 02 Dec 2016 18:32:54 +0100 swh-indexer (0.0.16-1~swh1) unstable-swh; urgency=medium * v0.0.16 * bug fixes -- Antoine R. Dumont (@ardumont) Tue, 15 Nov 2016 19:31:52 +0100 swh-indexer (0.0.15-1~swh1) unstable-swh; urgency=medium * v0.0.15 * Improve message producer -- Antoine R. Dumont (@ardumont) Tue, 15 Nov 2016 18:16:42 +0100 swh-indexer (0.0.14-1~swh1) unstable-swh; urgency=medium * v0.0.14 * Update package dependency on fossology-nomossa -- Antoine R. Dumont (@ardumont) Tue, 15 Nov 2016 14:13:41 +0100 swh-indexer (0.0.13-1~swh1) unstable-swh; urgency=medium * v0.0.13 * Add new license indexer * ctags indexer: align behavior with other indexers regarding the * conflict update policy -- Antoine R. Dumont (@ardumont) Mon, 14 Nov 2016 14:13:34 +0100 swh-indexer (0.0.12-1~swh1) unstable-swh; urgency=medium * v0.0.12 * Add runtime dependency on universal-ctags -- Antoine R. Dumont (@ardumont) Fri, 04 Nov 2016 13:59:59 +0100 swh-indexer (0.0.11-1~swh1) unstable-swh; urgency=medium * v0.0.11 * Remove dependency on exuberant-ctags -- Antoine R. Dumont (@ardumont) Thu, 03 Nov 2016 16:13:26 +0100 swh-indexer (0.0.10-1~swh1) unstable-swh; urgency=medium * v0.0.10 * Add ctags indexer -- Antoine R. Dumont (@ardumont) Thu, 20 Oct 2016 16:12:42 +0200 swh-indexer (0.0.9-1~swh1) unstable-swh; urgency=medium * v0.0.9 * d/control: Bump dependency to latest python3-swh.storage api * mimetype: Use the charset to filter out data * orchestrator: Separate 2 distincts orchestrators (one for all * contents, one for text contents) * mimetype: once index computed, send text contents to text orchestrator -- Antoine R. Dumont (@ardumont) Thu, 13 Oct 2016 15:28:17 +0200 swh-indexer (0.0.8-1~swh1) unstable-swh; urgency=medium * v0.0.8 * Separate configuration file per indexer (no need for language) * Rename module file_properties to mimetype consistently with other * layers -- Antoine R. Dumont (@ardumont) Sat, 08 Oct 2016 11:46:29 +0200 swh-indexer (0.0.7-1~swh1) unstable-swh; urgency=medium * v0.0.7 * Adapt indexer language and mimetype to store result in storage. * Clean up obsolete code -- Antoine R. Dumont (@ardumont) Sat, 08 Oct 2016 10:26:08 +0200 swh-indexer (0.0.6-1~swh1) unstable-swh; urgency=medium * v0.0.6 * Fix multiple issues on production -- Antoine R. Dumont (@ardumont) Fri, 30 Sep 2016 17:00:11 +0200 swh-indexer (0.0.5-1~swh1) unstable-swh; urgency=medium * v0.0.5 * Fix debian/control dependency issue -- Antoine R. Dumont (@ardumont) Fri, 30 Sep 2016 16:06:20 +0200 swh-indexer (0.0.4-1~swh1) unstable-swh; urgency=medium * v0.0.4 * Upgrade dependencies issues -- Antoine R. Dumont (@ardumont) Fri, 30 Sep 2016 16:01:52 +0200 swh-indexer (0.0.3-1~swh1) unstable-swh; urgency=medium * v0.0.3 * Add encoding detection * Use encoding to improve language detection * bypass language detection for binary files * bypass ctags for binary files or decoding failure file -- Antoine R. Dumont (@ardumont) Fri, 30 Sep 2016 12:30:11 +0200 swh-indexer (0.0.2-1~swh1) unstable-swh; urgency=medium * v0.0.2 * Provide one possible sha1's name for the multiple tools to ease * information extrapolation * Fix debian package dependency issue -- Antoine R. Dumont (@ardumont) Thu, 29 Sep 2016 21:45:44 +0200 swh-indexer (0.0.1-1~swh1) unstable-swh; urgency=medium * Initial release * v0.0.1 * First implementation on poc -- Antoine R. Dumont (@ardumont) Wed, 28 Sep 2016 23:40:13 +0200 diff --git a/docs/README.md b/docs/README.md index f4f2481..56e255b 100644 --- a/docs/README.md +++ b/docs/README.md @@ -1,49 +1,42 @@ swh-indexer ============ Tools to compute multiple indexes on SWH's raw contents: - content: - mimetype - - ctags - - language - fossology-license - metadata -- revision: - - metadata +- origin: + - metadata (intrinsic, using the content indexer; and extrinsic) An indexer is in charge of: - looking up objects - extracting information from those objects - store those information in the swh-indexer db There are multiple indexers working on different object types: - content indexer: works with content sha1 hashes - revision indexer: works with revision sha1 hashes - origin indexer: works with origin identifiers Indexation procedure: - receive batch of ids - retrieve the associated data depending on object type - compute for that object some index - store the result to swh's storage Current content indexers: - mimetype (queue swh_indexer_content_mimetype): detect the encoding and mimetype -- language (queue swh_indexer_content_language): detect the - programming language - -- ctags (queue swh_indexer_content_ctags): compute tags information - - fossology-license (queue swh_indexer_fossology_license): compute the license -- metadata: translate file into translated_metadata dict +- metadata: translate file from an ecosystem-specific formats to JSON-LD + (using schema.org/CodeMeta vocabulary) -Current revision indexers: +Current origin indexers: -- metadata: detects files containing metadata and retrieves translated_metadata - in content_metadata table in storage or run content indexer to translate - files. +- metadata: translate file from an ecosystem-specific formats to JSON-LD + (using schema.org/CodeMeta and ForgeFed vocabularies) diff --git a/docs/dev-info.rst b/docs/dev-info.rst index 9ef8497..4720098 100644 --- a/docs/dev-info.rst +++ b/docs/dev-info.rst @@ -1,198 +1,190 @@ Hacking on swh-indexer ====================== This tutorial will guide you through the hacking on the swh-indexer. If you do not have a local copy of the Software Heritage archive, go to the :ref:`getting started tutorial `. Configuration files ------------------- You will need the following YAML configuration files to run the swh-indexer commands: - Orchestrator at ``~/.config/swh/indexer/orchestrator.yml`` .. code-block:: yaml indexers: mimetype: check_presence: false batch_size: 100 - Orchestrator-text at ``~/.config/swh/indexer/orchestrator-text.yml`` .. code-block:: yaml indexers: - # language: - # batch_size: 10 - # check_presence: false fossology_license: batch_size: 10 check_presence: false - # ctags: - # batch_size: 2 - # check_presence: false - Mimetype indexer at ``~/.config/swh/indexer/mimetype.yml`` .. code-block:: yaml # storage to read sha1's metadata (path) # storage: # cls: local # db: "service=swh-dev" # objstorage: # cls: pathslicing # root: /home/storage/swh-storage/ # slicing: 0:1/1:5 storage: cls: remote url: http://localhost:5002/ indexer_storage: cls: remote args: url: http://localhost:5007/ # storage to read sha1's content # adapt this to your need # locally: this needs to match your storage's setup objstorage: cls: pathslicing slicing: 0:1/1:5 root: /home/storage/swh-storage/ destination_task: swh.indexer.tasks.SWHOrchestratorTextContentsTask rescheduling_task: swh.indexer.tasks.SWHContentMimetypeTask - Fossology indexer at ``~/.config/swh/indexer/fossology_license.yml`` .. code-block:: yaml # storage to read sha1's metadata (path) # storage: # cls: local # db: "service=swh-dev" # objstorage: # cls: pathslicing # root: /home/storage/swh-storage/ # slicing: 0:1/1:5 storage: cls: remote url: http://localhost:5002/ indexer_storage: cls: remote args: url: http://localhost:5007/ # storage to read sha1's content # adapt this to your need # locally: this needs to match your storage's setup objstorage: cls: pathslicing slicing: 0:1/1:5 root: /home/storage/swh-storage/ workdir: /tmp/swh/worker.indexer/license/ tools: name: 'nomos' version: '3.1.0rc2-31-ga2cbb8c' configuration: command_line: 'nomossa ' - Worker at ``~/.config/swh/worker.yml`` .. code-block:: yaml task_broker: amqp://guest@localhost// task_modules: - swh.loader.svn.tasks - swh.loader.tar.tasks - swh.loader.git.tasks - swh.storage.archiver.tasks - swh.indexer.tasks - swh.indexer.orchestrator task_queues: - swh_loader_svn - swh_loader_tar - swh_reader_git_to_azure_archive - swh_storage_archive_worker_to_backend - swh_indexer_orchestrator_content_all - swh_indexer_orchestrator_content_text - swh_indexer_content_mimetype - - swh_indexer_content_language - - swh_indexer_content_ctags - swh_indexer_content_fossology_license - swh_loader_svn_mount_and_load - swh_loader_git_express - swh_loader_git_archive - swh_loader_svn_archive task_soft_time_limit: 0 Database -------- swh-indxer uses a database to store the indexed content. The default db is expected to be called swh-indexer-dev. Create or add ``swh-dev`` and ``swh-indexer-dev`` to the ``~/.pg_service.conf`` and ``~/.pgpass`` files, which are postgresql's configuration files. Add data to local DB -------------------- from within the ``swh-environment``, run the following command:: make rebuild-testdata and fetch some real data to work with, using:: python3 -m swh.loader.git.updater --origin-url Then you can list all content files using this script:: #!/usr/bin/env bash psql service=swh-dev -c "copy (select sha1 from content) to stdin" | sed -e 's/^\\\\x//g' Run the indexers ----------------- Use the list off contents to feed the indexers with with the following command:: ./list-sha1.sh | python3 -m swh.indexer.producer --batch 100 --task-name orchestrator_all Activate the workers -------------------- To send messages to different queues using rabbitmq (which should already be installed through dependencies installation), run the following command in a dedicated terminal:: python3 -m celery worker --app=swh.scheduler.celery_backend.config.app \ --pool=prefork \ --concurrency=1 \ -Ofair \ --loglevel=info \ --without-gossip \ --without-mingle \ --without-heartbeat 2>&1 With this command rabbitmq will consume message using the worker configuration file. Note: for the fossology_license indexer, you need a package fossology-nomossa which is in our `public debian repository `_. diff --git a/swh.indexer.egg-info/PKG-INFO b/swh.indexer.egg-info/PKG-INFO index 4912824..3fa533b 100644 --- a/swh.indexer.egg-info/PKG-INFO +++ b/swh.indexer.egg-info/PKG-INFO @@ -1,71 +1,64 @@ Metadata-Version: 2.1 Name: swh.indexer -Version: 2.9.0 +Version: 2.9.1 Summary: Software Heritage Content Indexer Home-page: https://forge.softwareheritage.org/diffusion/78/ Author: Software Heritage developers Author-email: swh-devel@inria.fr Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest Project-URL: Funding, https://www.softwareheritage.org/donate Project-URL: Source, https://forge.softwareheritage.org/source/swh-indexer Project-URL: Documentation, https://docs.softwareheritage.org/devel/swh-indexer/ Classifier: Programming Language :: Python :: 3 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Classifier: Operating System :: OS Independent Classifier: Development Status :: 5 - Production/Stable Requires-Python: >=3.7 Description-Content-Type: text/markdown Provides-Extra: testing License-File: LICENSE License-File: AUTHORS swh-indexer ============ Tools to compute multiple indexes on SWH's raw contents: - content: - mimetype - - ctags - - language - fossology-license - metadata -- revision: - - metadata +- origin: + - metadata (intrinsic, using the content indexer; and extrinsic) An indexer is in charge of: - looking up objects - extracting information from those objects - store those information in the swh-indexer db There are multiple indexers working on different object types: - content indexer: works with content sha1 hashes - revision indexer: works with revision sha1 hashes - origin indexer: works with origin identifiers Indexation procedure: - receive batch of ids - retrieve the associated data depending on object type - compute for that object some index - store the result to swh's storage Current content indexers: - mimetype (queue swh_indexer_content_mimetype): detect the encoding and mimetype -- language (queue swh_indexer_content_language): detect the - programming language - -- ctags (queue swh_indexer_content_ctags): compute tags information - - fossology-license (queue swh_indexer_fossology_license): compute the license -- metadata: translate file into translated_metadata dict +- metadata: translate file from an ecosystem-specific formats to JSON-LD + (using schema.org/CodeMeta vocabulary) -Current revision indexers: +Current origin indexers: -- metadata: detects files containing metadata and retrieves translated_metadata - in content_metadata table in storage or run content indexer to translate - files. +- metadata: translate file from an ecosystem-specific formats to JSON-LD + (using schema.org/CodeMeta and ForgeFed vocabularies) diff --git a/swh/indexer/metadata.py b/swh/indexer/metadata.py index 14212a3..5a7a25c 100644 --- a/swh/indexer/metadata.py +++ b/swh/indexer/metadata.py @@ -1,567 +1,567 @@ # Copyright (C) 2017-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from copy import deepcopy import hashlib -import itertools import logging import time from typing import ( Any, Callable, Dict, Iterable, Iterator, List, Optional, Tuple, TypeVar, cast, ) from urllib.parse import urlparse import pkg_resources import sentry_sdk from swh.core.config import merge_configs from swh.core.utils import grouper from swh.indexer.codemeta import merge_documents from swh.indexer.indexer import ( BaseIndexer, ContentIndexer, DirectoryIndexer, ObjectsDict, OriginIndexer, ) from swh.indexer.metadata_detector import detect_metadata from swh.indexer.metadata_dictionary import EXTRINSIC_MAPPINGS, INTRINSIC_MAPPINGS from swh.indexer.metadata_dictionary.base import DirectoryLsEntry from swh.indexer.origin_head import get_head_swhid from swh.indexer.storage import INDEXER_CFG_KEY, Sha1 from swh.indexer.storage.model import ( ContentMetadataRow, DirectoryIntrinsicMetadataRow, OriginExtrinsicMetadataRow, OriginIntrinsicMetadataRow, ) from swh.model import hashutil from swh.model.model import Directory, MetadataAuthorityType from swh.model.model import ObjectType as ModelObjectType from swh.model.model import Origin, RawExtrinsicMetadata, Sha1Git from swh.model.swhids import CoreSWHID, ExtendedObjectType, ObjectType REVISION_GET_BATCH_SIZE = 10 RELEASE_GET_BATCH_SIZE = 10 ORIGIN_GET_BATCH_SIZE = 10 T1 = TypeVar("T1") T2 = TypeVar("T2") logger = logging.getLogger(__name__) def call_with_batches( f: Callable[[List[T1]], Iterable[T2]], args: List[T1], batch_size: int, ) -> Iterator[T2]: """Calls a function with batches of args, and concatenates the results.""" groups = grouper(args, batch_size) for group in groups: yield from f(list(group)) class ExtrinsicMetadataIndexer( BaseIndexer[Sha1Git, RawExtrinsicMetadata, OriginExtrinsicMetadataRow] ): def process_journal_objects(self, objects: ObjectsDict) -> Dict: summary: Dict[str, Any] = {"status": "uneventful"} try: results = {} for item in objects.get("raw_extrinsic_metadata", []): remd = RawExtrinsicMetadata.from_dict(item) sentry_sdk.set_tag("swh-indexer-remd-swhid", str(remd.swhid())) - results[remd.target] = self.index(remd.id, data=remd) + for result in self.index(remd.id, data=remd): + results[result.id] = result except Exception: if not self.catch_exceptions: raise summary["status"] = "failed" return summary - self.results = list(itertools.chain.from_iterable(results.values())) + self.results = list(results.values()) summary_persist = self.persist_index_computations(self.results) if summary_persist: for value in summary_persist.values(): if value > 0: summary["status"] = "eventful" summary.update(summary_persist) return summary def index( self, id: Sha1Git, data: Optional[RawExtrinsicMetadata], **kwargs, ) -> List[OriginExtrinsicMetadataRow]: if data is None: raise NotImplementedError( "ExtrinsicMetadataIndexer.index() without RawExtrinsicMetadata data" ) if data.target.object_type == ExtendedObjectType.ORIGIN: origin_sha1 = data.target.object_id elif data.origin is not None: # HACK: As swh-search does (yet?) not support searching on directories # and traversing back to origins, we index metadata on non-origins with # an origin context as if they were on the origin itself. origin_sha1 = hashlib.sha1(data.origin.encode()).digest() else: # other types are not supported yet return [] if data.authority.type == MetadataAuthorityType.REGISTRY: # metadata provided by a third-party; don't trust it # (technically this could be handled below, but we check it here # to return early; sparing a translation and origin lookup) # TODO: add ways to define trusted authorities return [] metadata_items = [] mappings: List[str] = [] for mapping_cls in EXTRINSIC_MAPPINGS.values(): if data.format in mapping_cls.extrinsic_metadata_formats(): mapping = mapping_cls() metadata_item = mapping.translate(data.metadata) if metadata_item is not None: metadata_items.append(metadata_item) mappings.append(mapping.name) if not metadata_items: # Don't have any mapping to parse it, ignore return [] # TODO: batch requests to origin_get_by_sha1() for _ in range(6): origins = self.storage.origin_get_by_sha1([origin_sha1]) try: (origin,) = origins if origin is not None: break except ValueError: pass # The origin does not exist. This may be due to some replication lag # between the loader's DB/journal and the DB we are consuming from. # Wait a bit and try again logger.debug("Origin %s not found, sleeping for 10s.", data.target) time.sleep(10) else: # Does not exist, or replication lag > 60s. raise ValueError(f"Unknown origin {data.target}") from None if urlparse(data.authority.url).netloc != urlparse(origin["url"]).netloc: # metadata provided by a third-party; don't trust it # TODO: add ways to define trusted authorities return [] metadata = merge_documents(metadata_items) return [ OriginExtrinsicMetadataRow( id=origin["url"], indexer_configuration_id=self.tool["id"], from_remd_id=data.id, mappings=mappings, metadata=metadata, ) ] def persist_index_computations( self, results: List[OriginExtrinsicMetadataRow] ) -> Dict[str, int]: """Persist the results in storage.""" return self.idx_storage.origin_extrinsic_metadata_add(results) class ContentMetadataIndexer(ContentIndexer[ContentMetadataRow]): """Content-level indexer This indexer is in charge of: - filtering out content already indexed in content_metadata - reading content from objstorage with the content's id sha1 - computing metadata by given context - using the metadata_dictionary as the 'swh-metadata-translator' tool - store result in content_metadata table """ def filter(self, ids): """Filter out known sha1s and return only missing ones.""" yield from self.idx_storage.content_metadata_missing( ( { "id": sha1, "indexer_configuration_id": self.tool["id"], } for sha1 in ids ) ) def index( self, id: Sha1, data: Optional[bytes] = None, log_suffix="unknown directory", **kwargs, ) -> List[ContentMetadataRow]: """Index sha1s' content and store result. Args: id: content's identifier data: raw content in bytes Returns: dict: dictionary representing a content_metadata. If the translation wasn't successful the metadata keys will be returned as None """ assert isinstance(id, bytes) assert data is not None metadata = None try: mapping_name = self.tool["tool_configuration"]["context"] log_suffix += ", content_id=%s" % hashutil.hash_to_hex(id) metadata = INTRINSIC_MAPPINGS[mapping_name](log_suffix).translate(data) except Exception: self.log.exception( "Problem during metadata translation " "for content %s" % hashutil.hash_to_hex(id) ) sentry_sdk.capture_exception() if metadata is None: return [] return [ ContentMetadataRow( id=id, indexer_configuration_id=self.tool["id"], metadata=metadata, ) ] def persist_index_computations( self, results: List[ContentMetadataRow] ) -> Dict[str, int]: """Persist the results in storage.""" return self.idx_storage.content_metadata_add(results) DEFAULT_CONFIG: Dict[str, Any] = { "tools": { "name": "swh.indexer.metadata", "version": pkg_resources.get_distribution("swh.indexer").version, "configuration": {}, }, } class DirectoryMetadataIndexer(DirectoryIndexer[DirectoryIntrinsicMetadataRow]): """Directory-level indexer This indexer is in charge of: - filtering directories already indexed in directory_intrinsic_metadata table with defined computation tool - retrieve all entry_files in directory - use metadata_detector for file_names containing metadata - compute metadata translation if necessary and possible (depends on tool) - send sha1s to content indexing if possible - store the results for directory """ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.config = merge_configs(DEFAULT_CONFIG, self.config) def filter(self, sha1_gits): """Filter out known sha1s and return only missing ones.""" yield from self.idx_storage.directory_intrinsic_metadata_missing( ( { "id": sha1_git, "indexer_configuration_id": self.tool["id"], } for sha1_git in sha1_gits ) ) def index( self, id: Sha1Git, data: Optional[Directory] = None, **kwargs ) -> List[DirectoryIntrinsicMetadataRow]: """Index directory by processing it and organizing result. use metadata_detector to iterate on filenames, passes them to the content indexers, then merges (if more than one) Args: id: sha1_git of the directory data: should always be None Returns: dict: dictionary representing a directory_intrinsic_metadata, with keys: - id: directory's identifier (sha1_git) - indexer_configuration_id (bytes): tool used - metadata: dict of retrieved metadata """ dir_: List[DirectoryLsEntry] assert data is None, "Unexpected directory object" dir_ = cast( List[DirectoryLsEntry], list(self.storage.directory_ls(id, recursive=False)), ) try: if [entry["type"] for entry in dir_] == ["dir"]: # If the root is just a single directory, recurse into it # eg. PyPI packages, GNU tarballs subdir = dir_[0]["target"] dir_ = cast( List[DirectoryLsEntry], list(self.storage.directory_ls(subdir, recursive=False)), ) files = [entry for entry in dir_ if entry["type"] == "file"] (mappings, metadata) = self.translate_directory_intrinsic_metadata( files, log_suffix="directory=%s" % hashutil.hash_to_hex(id), ) except Exception as e: self.log.exception("Problem when indexing dir: %r", e) sentry_sdk.capture_exception() return [] return [ DirectoryIntrinsicMetadataRow( id=id, indexer_configuration_id=self.tool["id"], mappings=mappings, metadata=metadata, ) ] def persist_index_computations( self, results: List[DirectoryIntrinsicMetadataRow] ) -> Dict[str, int]: """Persist the results in storage.""" # TODO: add functions in storage to keep data in # directory_intrinsic_metadata return self.idx_storage.directory_intrinsic_metadata_add(results) def translate_directory_intrinsic_metadata( self, files: List[DirectoryLsEntry], log_suffix: str ) -> Tuple[List[Any], Any]: """ Determine plan of action to translate metadata in the given root directory Args: files: list of file entries, as returned by :meth:`swh.storage.interface.StorageInterface.directory_ls` Returns: (List[str], dict): list of mappings used and dict with translated metadata according to the CodeMeta vocabulary """ metadata = [] # TODO: iterate on each context, on each file # -> get raw_contents # -> translate each content config = { k: self.config[k] for k in [INDEXER_CFG_KEY, "objstorage", "storage", "tools"] } all_detected_files = detect_metadata(files) used_mappings = [ INTRINSIC_MAPPINGS[context].name for context in all_detected_files ] for (mapping_name, detected_files) in all_detected_files.items(): cfg = deepcopy(config) cfg["tools"]["configuration"]["context"] = mapping_name c_metadata_indexer = ContentMetadataIndexer(config=cfg) # sha1s that are in content_metadata table sha1s_in_storage = [] metadata_generator = self.idx_storage.content_metadata_get(detected_files) for c in metadata_generator: # extracting metadata sha1 = c.id sha1s_in_storage.append(sha1) local_metadata = c.metadata # local metadata is aggregated if local_metadata: metadata.append(local_metadata) sha1s_filtered = [ item for item in detected_files if item not in sha1s_in_storage ] if sha1s_filtered: # content indexing try: c_metadata_indexer.run( sha1s_filtered, log_suffix=log_suffix, ) # on the fly possibility: for result in c_metadata_indexer.results: local_metadata = result.metadata metadata.append(local_metadata) except Exception: self.log.exception("Exception while indexing metadata on contents") sentry_sdk.capture_exception() metadata = merge_documents(metadata) return (used_mappings, metadata) class OriginMetadataIndexer( OriginIndexer[Tuple[OriginIntrinsicMetadataRow, DirectoryIntrinsicMetadataRow]] ): USE_TOOLS = False def __init__(self, config=None, **kwargs) -> None: super().__init__(config=config, **kwargs) self.directory_metadata_indexer = DirectoryMetadataIndexer(config=config) def index_list( self, origins: List[Origin], *, check_origin_known: bool = True, **kwargs, ) -> List[Tuple[OriginIntrinsicMetadataRow, DirectoryIntrinsicMetadataRow]]: head_rev_ids = [] head_rel_ids = [] origin_heads: Dict[Origin, CoreSWHID] = {} # Filter out origins not in the storage if check_origin_known: known_origins = list( call_with_batches( self.storage.origin_get, [origin.url for origin in origins], ORIGIN_GET_BATCH_SIZE, ) ) else: known_origins = list(origins) for origin in known_origins: if origin is None: continue head_swhid = get_head_swhid(self.storage, origin.url) if head_swhid: origin_heads[origin] = head_swhid if head_swhid.object_type == ObjectType.REVISION: head_rev_ids.append(head_swhid.object_id) elif head_swhid.object_type == ObjectType.RELEASE: head_rel_ids.append(head_swhid.object_id) else: assert False, head_swhid head_revs = dict( zip( head_rev_ids, call_with_batches( self.storage.revision_get, head_rev_ids, REVISION_GET_BATCH_SIZE ), ) ) head_rels = dict( zip( head_rel_ids, call_with_batches( self.storage.release_get, head_rel_ids, RELEASE_GET_BATCH_SIZE ), ) ) results = [] for (origin, head_swhid) in origin_heads.items(): sentry_sdk.set_tag("swh-indexer-origin-url", origin.url) sentry_sdk.set_tag("swh-indexer-origin-head-swhid", str(head_swhid)) if head_swhid.object_type == ObjectType.REVISION: rev = head_revs[head_swhid.object_id] if not rev: self.log.warning( "Missing head object %s of origin %r", head_swhid, origin.url ) continue directory_id = rev.directory elif head_swhid.object_type == ObjectType.RELEASE: rel = head_rels[head_swhid.object_id] if not rel: self.log.warning( "Missing head object %s of origin %r", head_swhid, origin.url ) continue if rel.target_type != ModelObjectType.DIRECTORY: # TODO self.log.warning( "Head release %s of %r has unexpected target type %s", head_swhid, origin.url, rel.target_type, ) continue assert rel.target, rel directory_id = rel.target else: assert False, head_swhid for dir_metadata in self.directory_metadata_indexer.index(directory_id): # There is at most one dir_metadata orig_metadata = OriginIntrinsicMetadataRow( from_directory=dir_metadata.id, id=origin.url, metadata=dir_metadata.metadata, mappings=dir_metadata.mappings, indexer_configuration_id=dir_metadata.indexer_configuration_id, ) results.append((orig_metadata, dir_metadata)) return results def persist_index_computations( self, results: List[Tuple[OriginIntrinsicMetadataRow, DirectoryIntrinsicMetadataRow]], ) -> Dict[str, int]: # Deduplicate directories dir_metadata: Dict[bytes, DirectoryIntrinsicMetadataRow] = {} orig_metadata: Dict[str, OriginIntrinsicMetadataRow] = {} summary: Dict = {} for (orig_item, dir_item) in results: assert dir_item.metadata == orig_item.metadata if dir_item.metadata and not (dir_item.metadata.keys() <= {"@context"}): # Only store non-empty metadata sets if dir_item.id not in dir_metadata: dir_metadata[dir_item.id] = dir_item if orig_item.id not in orig_metadata: orig_metadata[orig_item.id] = orig_item if dir_metadata: summary_dir = self.idx_storage.directory_intrinsic_metadata_add( list(dir_metadata.values()) ) summary.update(summary_dir) if orig_metadata: summary_ori = self.idx_storage.origin_intrinsic_metadata_add( list(orig_metadata.values()) ) summary.update(summary_ori) return summary diff --git a/swh/indexer/metadata_dictionary/utils.py b/swh/indexer/metadata_dictionary/utils.py index 8a5fdb9..6aaf4fd 100644 --- a/swh/indexer/metadata_dictionary/utils.py +++ b/swh/indexer/metadata_dictionary/utils.py @@ -1,112 +1,116 @@ # Copyright (C) 2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json from typing import Any, Callable, Iterable, Optional, Sequence, TypeVar import urllib.parse from pyld import jsonld from rdflib import RDF, Graph, URIRef import rdflib.term from swh.indexer.codemeta import _document_loader def prettyprint_graph(graph: Graph, root: URIRef): s = graph.serialize(format="application/ld+json") jsonld_graph = json.loads(s) translated_metadata = jsonld.frame( jsonld_graph, {"@id": str(root)}, options={ "documentLoader": _document_loader, "processingMode": "json-ld-1.1", }, ) print(json.dumps(translated_metadata, indent=4)) def add_list( graph: Graph, subject: rdflib.term.Node, predicate: rdflib.term.Identifier, objects: Sequence[rdflib.term.Node], ) -> None: """Adds triples to the ``graph`` so that they are equivalent to this JSON-LD object:: { "@id": subject, predicate: {"@list": objects} } This is a naive implementation of https://json-ld.org/spec/latest/json-ld-api/#list-to-rdf-conversion """ # JSON-LD's @list is syntactic sugar for a linked list / chain in the RDF graph, # which is what we are going to construct, starting from the end: last_link: rdflib.term.Node last_link = RDF.nil for item in reversed(objects): link = rdflib.BNode() graph.add((link, RDF.first, item)) graph.add((link, RDF.rest, last_link)) last_link = link graph.add((subject, predicate, last_link)) TValue = TypeVar("TValue") def add_map( graph: Graph, subject: rdflib.term.Node, predicate: rdflib.term.Identifier, f: Callable[[Graph, TValue], Optional[rdflib.term.Node]], values: Iterable[TValue], ) -> None: """Helper for :func:`add_list` that takes a mapper function ``f``.""" nodes = [f(graph, value) for value in values] add_list(graph, subject, predicate, [node for node in nodes if node]) def add_url_if_valid( graph: Graph, subject: rdflib.term.Node, predicate: rdflib.term.Identifier, url: Any, ) -> None: """Adds ``(subject, predicate, url)`` to the graph if ``url`` is well-formed. This is meant as a workaround for https://github.com/digitalbazaar/pyld/issues/91 to drop URLs that are blatantly invalid early, so PyLD does not crash. >>> from pprint import pprint >>> graph = Graph() >>> subject = rdflib.term.URIRef("http://example.org/test-software") >>> predicate = rdflib.term.URIRef("http://schema.org/license") >>> add_url_if_valid( ... graph, subject, predicate, "https//www.apache.org/licenses/LICENSE-2.0.txt" ... ) >>> add_url_if_valid( ... graph, subject, predicate, "http:s//www.apache.org/licenses/LICENSE-2.0.txt" ... ) >>> add_url_if_valid( ... graph, subject, predicate, "https://www.apache.org/licenses/LICENSE-2.0.txt" ... ) >>> add_url_if_valid( ... graph, subject, predicate, 42 ... ) >>> pprint(set(graph.triples((subject, predicate, None)))) {(rdflib.term.URIRef('http://example.org/test-software'), rdflib.term.URIRef('http://schema.org/license'), rdflib.term.URIRef('https://www.apache.org/licenses/LICENSE-2.0.txt'))} """ if not isinstance(url, str): return - if " " in url or not urllib.parse.urlparse(url).netloc: + try: + parsed_url = urllib.parse.urlparse(url) + except Exception: + return + if " " in url or not parsed_url.netloc: return graph.add((subject, predicate, rdflib.term.URIRef(url))) diff --git a/swh/indexer/sql/upgrades/137.sql b/swh/indexer/sql/upgrades/137.sql index a7d69f1..152ae0e 100644 --- a/swh/indexer/sql/upgrades/137.sql +++ b/swh/indexer/sql/upgrades/137.sql @@ -1,23 +1,19 @@ -- SWH Indexer DB schema upgrade -- from_version: 136 -- to_version: 137 -- description: Drop content_language and content_ctags tables and related functions -insert into dbversion(version, release, description) - values(137, now(), 'Work In Progress'); +drop function if exists swh_content_language_add; +drop function if exists swh_mktemp_content_language(); +drop function if exists swh_mktemp_content_ctags(); +drop function if exists swh_content_ctags_add(); +drop function if exists swh_content_ctags_search; -drop function swh_content_language_add; -drop function swh_mktemp_content_language(); -drop function swh_mktemp_content_ctags(); -drop function swh_content_ctags_add(); -drop function swh_content_ctags_search; +drop type if exists content_ctags_signature; -drop index content_language_pkey; +drop table if exists content_language; +drop table if exists content_ctags; -drop table content_language; -drop table content_ctags; - -drop type languages; -drop type ctags_languages; -drop type content_ctags_signature; +drop type if exists languages; +drop type if exists ctags_languages; diff --git a/swh/indexer/tests/metadata_dictionary/test_npm.py b/swh/indexer/tests/metadata_dictionary/test_npm.py index 9b52bfd..08f8ea6 100644 --- a/swh/indexer/tests/metadata_dictionary/test_npm.py +++ b/swh/indexer/tests/metadata_dictionary/test_npm.py @@ -1,449 +1,460 @@ # Copyright (C) 2017-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json from hypothesis import HealthCheck, given, settings import pytest from swh.indexer.metadata_detector import detect_metadata from swh.indexer.metadata_dictionary import MAPPINGS from swh.indexer.storage.model import ContentMetadataRow from ..test_metadata import TRANSLATOR_TOOL, ContentMetadataTestIndexer from ..utils import ( BASE_TEST_CONFIG, MAPPING_DESCRIPTION_CONTENT_SHA1, json_document_strategy, ) def test_compute_metadata_none(): """ testing content empty content is empty should return None """ content = b"" # None if no metadata was found or an error occurred declared_metadata = None result = MAPPINGS["NpmMapping"]().translate(content) assert declared_metadata == result def test_compute_metadata_npm(): """ testing only computation of metadata with hard_mapping_npm """ content = b""" { "name": "test_metadata", "version": "0.0.2", "description": "Simple package.json test for indexer", "repository": { "type": "git", "url": "https://github.com/moranegg/metadata_test" }, "author": { "email": "moranegg@example.com", "name": "Morane G" } } """ declared_metadata = { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "type": "SoftwareSourceCode", "name": "test_metadata", "version": "0.0.2", "description": "Simple package.json test for indexer", "codeRepository": "git+https://github.com/moranegg/metadata_test", "author": [ { "type": "Person", "name": "Morane G", "email": "moranegg@example.com", } ], } result = MAPPINGS["NpmMapping"]().translate(content) assert declared_metadata == result def test_compute_metadata_invalid_description_npm(): """ testing only computation of metadata with hard_mapping_npm """ content = b""" { "name": "test_metadata", "version": "0.0.2", "description": 1234 } """ declared_metadata = { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "type": "SoftwareSourceCode", "name": "test_metadata", "version": "0.0.2", } result = MAPPINGS["NpmMapping"]().translate(content) assert declared_metadata == result def test_index_content_metadata_npm(storage, obj_storage): """ testing NPM with package.json - one sha1 uses a file that can't be translated to metadata and should return None in the translated metadata """ sha1s = [ MAPPING_DESCRIPTION_CONTENT_SHA1["json:test-metadata-package.json"], MAPPING_DESCRIPTION_CONTENT_SHA1["json:npm-package.json"], MAPPING_DESCRIPTION_CONTENT_SHA1["python:code"], ] # this metadata indexer computes only metadata for package.json # in npm context with a hard mapping config = BASE_TEST_CONFIG.copy() config["tools"] = [TRANSLATOR_TOOL] metadata_indexer = ContentMetadataTestIndexer(config=config) metadata_indexer.run(sha1s, log_suffix="unknown content") results = list(metadata_indexer.idx_storage.content_metadata_get(sha1s)) expected_results = [ ContentMetadataRow( id=sha1s[0], tool=TRANSLATOR_TOOL, metadata={ "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "type": "SoftwareSourceCode", "codeRepository": "git+https://github.com/moranegg/metadata_test", "description": "Simple package.json test for indexer", "name": "test_metadata", "version": "0.0.1", }, ), ContentMetadataRow( id=sha1s[1], tool=TRANSLATOR_TOOL, metadata={ "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "type": "SoftwareSourceCode", "issueTracker": "https://github.com/npm/npm/issues", "author": [ { "type": "Person", "name": "Isaac Z. Schlueter", "email": "i@izs.me", "url": "http://blog.izs.me", } ], "codeRepository": "git+https://github.com/npm/npm", "description": "a package manager for JavaScript", "license": "https://spdx.org/licenses/Artistic-2.0", "version": "5.0.3", "name": "npm", "url": "https://docs.npmjs.com/", }, ), ] for result in results: del result.tool["id"] result.metadata.pop("keywords", None) # The assertion below returns False sometimes because of nested lists assert expected_results == results def test_npm_null_list_item_normalization(): package_json = b"""{ "name": "foo", "keywords": [ "foo", null ], "homepage": [ "http://example.org/", null ] }""" result = MAPPINGS["NpmMapping"]().translate(package_json) assert result == { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "name": "foo", "type": "SoftwareSourceCode", "url": "http://example.org/", "keywords": "foo", } def test_npm_bugs_normalization(): # valid dictionary package_json = b"""{ "name": "foo", "bugs": { "url": "https://github.com/owner/project/issues", "email": "foo@example.com" } }""" result = MAPPINGS["NpmMapping"]().translate(package_json) assert result == { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "name": "foo", "issueTracker": "https://github.com/owner/project/issues", "type": "SoftwareSourceCode", } # "invalid" dictionary package_json = b"""{ "name": "foo", "bugs": { "email": "foo@example.com" } }""" result = MAPPINGS["NpmMapping"]().translate(package_json) assert result == { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "name": "foo", "type": "SoftwareSourceCode", } # string package_json = b"""{ "name": "foo", "bugs": "https://github.com/owner/project/issues" }""" result = MAPPINGS["NpmMapping"]().translate(package_json) assert result == { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "name": "foo", "issueTracker": "https://github.com/owner/project/issues", "type": "SoftwareSourceCode", } def test_npm_repository_normalization(): # normal package_json = b"""{ "name": "foo", "repository": { "type" : "git", "url" : "https://github.com/npm/cli.git" } }""" result = MAPPINGS["NpmMapping"]().translate(package_json) assert result == { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "name": "foo", "codeRepository": "git+https://github.com/npm/cli.git", "type": "SoftwareSourceCode", } # missing url package_json = b"""{ "name": "foo", "repository": { "type" : "git" } }""" result = MAPPINGS["NpmMapping"]().translate(package_json) assert result == { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "name": "foo", "type": "SoftwareSourceCode", } # github shortcut package_json = b"""{ "name": "foo", "repository": "github:npm/cli" }""" result = MAPPINGS["NpmMapping"]().translate(package_json) expected_result = { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "name": "foo", "codeRepository": "git+https://github.com/npm/cli.git", "type": "SoftwareSourceCode", } assert result == expected_result # github shortshortcut package_json = b"""{ "name": "foo", "repository": "npm/cli" }""" result = MAPPINGS["NpmMapping"]().translate(package_json) assert result == expected_result # gitlab shortcut package_json = b"""{ "name": "foo", "repository": "gitlab:user/repo" }""" result = MAPPINGS["NpmMapping"]().translate(package_json) assert result == { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "name": "foo", "codeRepository": "git+https://gitlab.com/user/repo.git", "type": "SoftwareSourceCode", } def test_npm_author(): package_json = rb"""{ "version": "1.0.0", "author": "Foo Bar (@example)" }""" result = MAPPINGS["NpmMapping"]().translate(package_json) assert result == { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "type": "SoftwareSourceCode", "author": [{"name": "Foo Bar", "type": "Person"}], "version": "1.0.0", } def test_npm_invalid_uris(): package_json = rb"""{ "version": "1.0.0", "homepage": "", "author": { "name": "foo", "url": "http://example.org" } }""" result = MAPPINGS["NpmMapping"]().translate(package_json) assert result == { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "type": "SoftwareSourceCode", "author": [{"name": "foo", "type": "Person", "url": "http://example.org"}], "version": "1.0.0", } package_json = rb"""{ "version": "1.0.0", "homepage": "http://example.org", "author": { "name": "foo", "url": "" } }""" result = MAPPINGS["NpmMapping"]().translate(package_json) assert result == { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "type": "SoftwareSourceCode", "author": [{"name": "foo", "type": "Person"}], "url": "http://example.org", "version": "1.0.0", } package_json = rb"""{ "version": "1.0.0", "homepage": "", "author": { "name": "foo", "url": "" }, "bugs": "" }""" result = MAPPINGS["NpmMapping"]().translate(package_json) assert result == { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "type": "SoftwareSourceCode", "author": [{"name": "foo", "type": "Person"}], "version": "1.0.0", } package_json = rb"""{ "version": "1.0.0", "homepage": "http:example.org", "author": { "name": "foo", "url": "http:example.com" }, "bugs": { "url": "http:example.com" } }""" result = MAPPINGS["NpmMapping"]().translate(package_json) assert result == { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "type": "SoftwareSourceCode", "author": [{"name": "foo", "type": "Person"}], "version": "1.0.0", } package_json = rb"""{ "version": "1.0.0", "repository": "git+https://g ithub.com/foo/bar.git" }""" result = MAPPINGS["NpmMapping"]().translate(package_json) assert result == { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "type": "SoftwareSourceCode", "version": "1.0.0", } + package_json = rb"""{ + "version": "1.0.0", + "repository": "git+http://\\u001b[D\\u001b[D\\u001b[Ds\\u001b[C\\u001b[C\\u001b[D\\u001b://github.com/dearzoe/array-combination" +}""" # noqa + result = MAPPINGS["NpmMapping"]().translate(package_json) + assert result == { + "@context": "https://doi.org/10.5063/schema/codemeta-2.0", + "type": "SoftwareSourceCode", + "version": "1.0.0", + } + def test_npm_invalid_licenses(): package_json = rb"""{ "version": "1.0.0", "license": "SEE LICENSE IN LICENSE.md", "author": { "name": "foo", "url": "http://example.org" } }""" result = MAPPINGS["NpmMapping"]().translate(package_json) assert result == { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "type": "SoftwareSourceCode", "author": [{"name": "foo", "type": "Person", "url": "http://example.org"}], "version": "1.0.0", } @settings(suppress_health_check=[HealthCheck.too_slow]) @given(json_document_strategy(keys=list(MAPPINGS["NpmMapping"].mapping))) # type: ignore def test_npm_adversarial(doc): raw = json.dumps(doc).encode() MAPPINGS["NpmMapping"]().translate(raw) @pytest.mark.parametrize( "filename", [b"package.json", b"Package.json", b"PACKAGE.json", b"PACKAGE.JSON"] ) def test_detect_metadata_package_json(filename): df = [ { "sha1_git": b"abc", "name": b"index.js", "target": b"abc", "length": 897, "status": "visible", "type": "file", "perms": 33188, "dir_id": b"dir_a", "sha1": b"bcd", }, { "sha1_git": b"aab", "name": filename, "target": b"aab", "length": 712, "status": "visible", "type": "file", "perms": 33188, "dir_id": b"dir_a", "sha1": b"cde", }, ] results = detect_metadata(df) expected_results = {"NpmMapping": [b"cde"]} assert expected_results == results diff --git a/swh/indexer/tests/test_metadata.py b/swh/indexer/tests/test_metadata.py index bb6b883..61c71cd 100644 --- a/swh/indexer/tests/test_metadata.py +++ b/swh/indexer/tests/test_metadata.py @@ -1,419 +1,452 @@ # Copyright (C) 2017-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime from unittest.mock import call import attr from swh.indexer.metadata import ( ContentMetadataIndexer, DirectoryMetadataIndexer, ExtrinsicMetadataIndexer, ) from swh.indexer.storage.model import ( ContentMetadataRow, DirectoryIntrinsicMetadataRow, OriginExtrinsicMetadataRow, ) from swh.indexer.tests.utils import DIRECTORY2 from swh.model.model import ( Directory, DirectoryEntry, MetadataAuthority, MetadataAuthorityType, MetadataFetcher, RawExtrinsicMetadata, ) from swh.model.swhids import ExtendedObjectType, ExtendedSWHID from .utils import ( BASE_TEST_CONFIG, MAPPING_DESCRIPTION_CONTENT_SHA1, MAPPING_DESCRIPTION_CONTENT_SHA1GIT, YARN_PARSER_METADATA, fill_obj_storage, fill_storage, ) TRANSLATOR_TOOL = { "name": "swh-metadata-translator", "version": "0.0.2", "configuration": {"type": "local", "context": "NpmMapping"}, } class ContentMetadataTestIndexer(ContentMetadataIndexer): """Specific Metadata whose configuration is enough to satisfy the indexing tests. """ def parse_config_file(self, *args, **kwargs): assert False, "should not be called; the dir indexer configures it." DIRECTORY_METADATA_CONFIG = { **BASE_TEST_CONFIG, "tools": TRANSLATOR_TOOL, } DEPOSIT_REMD = RawExtrinsicMetadata( target=ExtendedSWHID( object_type=ExtendedObjectType.DIRECTORY, object_id=b"\x02" * 20, ), discovery_date=datetime.datetime.now(tz=datetime.timezone.utc), authority=MetadataAuthority( type=MetadataAuthorityType.DEPOSIT_CLIENT, url="https://example.org/", ), fetcher=MetadataFetcher( name="example-fetcher", version="1.0.0", ), format="sword-v2-atom-codemeta-v2", metadata=""" My Software Author 1 foo@example.org Author 2 """.encode(), origin="https://example.org/jdoe/myrepo", ) GITHUB_REMD = RawExtrinsicMetadata( target=ExtendedSWHID( object_type=ExtendedObjectType.ORIGIN, object_id=b"\x01" * 20, ), discovery_date=datetime.datetime.now(tz=datetime.timezone.utc), authority=MetadataAuthority( type=MetadataAuthorityType.FORGE, url="https://example.org/", ), fetcher=MetadataFetcher( name="example-fetcher", version="1.0.0", ), format="application/vnd.github.v3+json", metadata=b'{"full_name": "test software", "html_url": "http://example.org/"}', ) class TestMetadata: """ Tests metadata_mock_tool tool for Metadata detection """ def test_directory_metadata_indexer(self): metadata_indexer = DirectoryMetadataIndexer(config=DIRECTORY_METADATA_CONFIG) fill_obj_storage(metadata_indexer.objstorage) fill_storage(metadata_indexer.storage) tool = metadata_indexer.idx_storage.indexer_configuration_get( {f"tool_{k}": v for (k, v) in TRANSLATOR_TOOL.items()} ) assert tool is not None dir_ = DIRECTORY2 assert ( dir_.entries[0].target == MAPPING_DESCRIPTION_CONTENT_SHA1GIT["json:yarn-parser-package.json"] ) metadata_indexer.idx_storage.content_metadata_add( [ ContentMetadataRow( id=MAPPING_DESCRIPTION_CONTENT_SHA1[ "json:yarn-parser-package.json" ], indexer_configuration_id=tool["id"], metadata=YARN_PARSER_METADATA, ) ] ) metadata_indexer.run([dir_.id]) results = list( metadata_indexer.idx_storage.directory_intrinsic_metadata_get([dir_.id]) ) expected_results = [ DirectoryIntrinsicMetadataRow( id=dir_.id, tool=TRANSLATOR_TOOL, metadata=YARN_PARSER_METADATA, mappings=["npm"], ) ] for result in results: del result.tool["id"] assert results == expected_results def test_directory_metadata_indexer_single_root_dir(self): metadata_indexer = DirectoryMetadataIndexer(config=DIRECTORY_METADATA_CONFIG) fill_obj_storage(metadata_indexer.objstorage) fill_storage(metadata_indexer.storage) # Add a parent directory, that is the only directory at the root # of the directory dir_ = DIRECTORY2 assert ( dir_.entries[0].target == MAPPING_DESCRIPTION_CONTENT_SHA1GIT["json:yarn-parser-package.json"] ) new_dir = Directory( entries=( DirectoryEntry( name=b"foobar-1.0.0", type="dir", target=dir_.id, perms=16384, ), ), ) assert new_dir.id is not None metadata_indexer.storage.directory_add([new_dir]) tool = metadata_indexer.idx_storage.indexer_configuration_get( {f"tool_{k}": v for (k, v) in TRANSLATOR_TOOL.items()} ) assert tool is not None metadata_indexer.idx_storage.content_metadata_add( [ ContentMetadataRow( id=MAPPING_DESCRIPTION_CONTENT_SHA1[ "json:yarn-parser-package.json" ], indexer_configuration_id=tool["id"], metadata=YARN_PARSER_METADATA, ) ] ) metadata_indexer.run([new_dir.id]) results = list( metadata_indexer.idx_storage.directory_intrinsic_metadata_get([new_dir.id]) ) expected_results = [ DirectoryIntrinsicMetadataRow( id=new_dir.id, tool=TRANSLATOR_TOOL, metadata=YARN_PARSER_METADATA, mappings=["npm"], ) ] for result in results: del result.tool["id"] assert results == expected_results def test_extrinsic_metadata_indexer_unknown_format(self, mocker): """Should be ignored when unknown format""" metadata_indexer = ExtrinsicMetadataIndexer(config=DIRECTORY_METADATA_CONFIG) metadata_indexer.storage = mocker.patch.object(metadata_indexer, "storage") remd = attr.evolve(GITHUB_REMD, format="unknown format") results = metadata_indexer.index(remd.id, data=remd) assert metadata_indexer.storage.method_calls == [] assert results == [] def test_extrinsic_metadata_indexer_github(self, mocker): """Nominal case, calling the mapping and storing the result""" origin = "https://example.org/jdoe/myrepo" metadata_indexer = ExtrinsicMetadataIndexer(config=DIRECTORY_METADATA_CONFIG) metadata_indexer.catch_exceptions = False metadata_indexer.storage = mocker.patch.object(metadata_indexer, "storage") metadata_indexer.storage.origin_get_by_sha1.return_value = [{"url": origin}] tool = metadata_indexer.idx_storage.indexer_configuration_get( {f"tool_{k}": v for (k, v) in TRANSLATOR_TOOL.items()} ) assert tool is not None assert metadata_indexer.process_journal_objects( {"raw_extrinsic_metadata": [GITHUB_REMD.to_dict()]} ) == {"status": "eventful", "origin_extrinsic_metadata:add": 1} assert metadata_indexer.storage.method_calls == [ call.origin_get_by_sha1([b"\x01" * 20]) ] results = list( metadata_indexer.idx_storage.origin_extrinsic_metadata_get([origin]) ) assert results == [ OriginExtrinsicMetadataRow( id="https://example.org/jdoe/myrepo", tool={"id": tool["id"], **TRANSLATOR_TOOL}, metadata={ "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "id": "http://example.org/", "type": "https://forgefed.org/ns#Repository", "name": "test software", }, from_remd_id=GITHUB_REMD.id, mappings=["github"], ) ] def test_extrinsic_metadata_indexer_firstparty_deposit(self, mocker): """Also nominal case, calling the mapping and storing the result""" origin = "https://example.org/jdoe/myrepo" metadata_indexer = ExtrinsicMetadataIndexer(config=DIRECTORY_METADATA_CONFIG) metadata_indexer.catch_exceptions = False metadata_indexer.storage = mocker.patch.object(metadata_indexer, "storage") metadata_indexer.storage.origin_get_by_sha1.return_value = [{"url": origin}] tool = metadata_indexer.idx_storage.indexer_configuration_get( {f"tool_{k}": v for (k, v) in TRANSLATOR_TOOL.items()} ) assert tool is not None assert metadata_indexer.process_journal_objects( {"raw_extrinsic_metadata": [DEPOSIT_REMD.to_dict()]} ) == {"status": "eventful", "origin_extrinsic_metadata:add": 1} assert metadata_indexer.storage.method_calls == [ call.origin_get_by_sha1( [b"\xb1\x0c\\\xd2w\x1b\xdd\xac\x07\xdb\xdf>\x93O1\xd0\xc9L\x0c\xcf"] ) ] results = list( metadata_indexer.idx_storage.origin_extrinsic_metadata_get([origin]) ) assert results == [ OriginExtrinsicMetadataRow( id="https://example.org/jdoe/myrepo", tool={"id": tool["id"], **TRANSLATOR_TOOL}, metadata={ "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "author": [ {"email": "foo@example.org", "name": "Author 1"}, {"name": "Author 2"}, ], "name": "My Software", }, from_remd_id=DEPOSIT_REMD.id, mappings=["sword-codemeta"], ) ] def test_extrinsic_metadata_indexer_thirdparty_deposit(self, mocker): """Metadata-only deposit: currently ignored""" origin = "https://not-from-example.org/jdoe/myrepo" metadata_indexer = ExtrinsicMetadataIndexer(config=DIRECTORY_METADATA_CONFIG) metadata_indexer.catch_exceptions = False metadata_indexer.storage = mocker.patch.object(metadata_indexer, "storage") metadata_indexer.storage.origin_get_by_sha1.return_value = [{"url": origin}] tool = metadata_indexer.idx_storage.indexer_configuration_get( {f"tool_{k}": v for (k, v) in TRANSLATOR_TOOL.items()} ) assert tool is not None assert metadata_indexer.process_journal_objects( {"raw_extrinsic_metadata": [DEPOSIT_REMD.to_dict()]} ) == {"status": "uneventful", "origin_extrinsic_metadata:add": 0} assert metadata_indexer.storage.method_calls == [ call.origin_get_by_sha1( [b"\xb1\x0c\\\xd2w\x1b\xdd\xac\x07\xdb\xdf>\x93O1\xd0\xc9L\x0c\xcf"] ) ] results = list( metadata_indexer.idx_storage.origin_extrinsic_metadata_get([origin]) ) assert results == [] def test_extrinsic_metadata_indexer_nonforge_authority(self, mocker): """Early abort on non-forge authorities""" metadata_indexer = ExtrinsicMetadataIndexer(config=DIRECTORY_METADATA_CONFIG) metadata_indexer.storage = mocker.patch.object(metadata_indexer, "storage") remd = attr.evolve( GITHUB_REMD, authority=attr.evolve( GITHUB_REMD.authority, type=MetadataAuthorityType.REGISTRY ), ) results = metadata_indexer.index(remd.id, data=remd) assert metadata_indexer.storage.method_calls == [] assert results == [] def test_extrinsic_metadata_indexer_thirdparty_authority(self, mocker): """Should be ignored when authority URL does not match the origin""" origin = "https://different-domain.example.org/jdoe/myrepo" metadata_indexer = ExtrinsicMetadataIndexer(config=DIRECTORY_METADATA_CONFIG) metadata_indexer.catch_exceptions = False metadata_indexer.storage = mocker.patch.object(metadata_indexer, "storage") metadata_indexer.storage.origin_get_by_sha1.return_value = [{"url": origin}] tool = metadata_indexer.idx_storage.indexer_configuration_get( {f"tool_{k}": v for (k, v) in TRANSLATOR_TOOL.items()} ) assert tool is not None results = metadata_indexer.index(GITHUB_REMD.id, data=GITHUB_REMD) assert metadata_indexer.storage.method_calls == [ call.origin_get_by_sha1([b"\x01" * 20]) ] assert results == [] def test_extrinsic_metadata_indexer_duplicate_origin(self, mocker): - """Nominal case, calling the mapping and storing the result""" + """Two metadata objects with the same origin target""" origin = "https://example.org/jdoe/myrepo" metadata_indexer = ExtrinsicMetadataIndexer(config=DIRECTORY_METADATA_CONFIG) metadata_indexer.catch_exceptions = False metadata_indexer.storage = mocker.patch.object(metadata_indexer, "storage") metadata_indexer.storage.origin_get_by_sha1.return_value = [{"url": origin}] tool = metadata_indexer.idx_storage.indexer_configuration_get( {f"tool_{k}": v for (k, v) in TRANSLATOR_TOOL.items()} ) assert tool is not None assert metadata_indexer.process_journal_objects( { "raw_extrinsic_metadata": [ GITHUB_REMD.to_dict(), {**GITHUB_REMD.to_dict(), "id": b"\x00" * 20}, ] } ) == {"status": "eventful", "origin_extrinsic_metadata:add": 1} results = list( metadata_indexer.idx_storage.origin_extrinsic_metadata_get([origin]) ) assert len(results) == 1, results assert results[0].from_remd_id == b"\x00" * 20 + + def test_extrinsic_directory_metadata_indexer_duplicate_origin(self, mocker): + """Two metadata objects on directories, but with an origin context""" + origin = DEPOSIT_REMD.origin + + metadata_indexer = ExtrinsicMetadataIndexer(config=DIRECTORY_METADATA_CONFIG) + metadata_indexer.catch_exceptions = False + metadata_indexer.storage = mocker.patch.object(metadata_indexer, "storage") + metadata_indexer.storage.origin_get_by_sha1.return_value = [{"url": origin}] + + tool = metadata_indexer.idx_storage.indexer_configuration_get( + {f"tool_{k}": v for (k, v) in TRANSLATOR_TOOL.items()} + ) + assert tool is not None + + assert metadata_indexer.process_journal_objects( + { + "raw_extrinsic_metadata": [ + DEPOSIT_REMD.to_dict(), + { + **DEPOSIT_REMD.to_dict(), + "id": b"\x00" * 20, + "target": "swh:1:dir:" + "01" * 20, + }, + ] + } + ) == {"status": "eventful", "origin_extrinsic_metadata:add": 1} + + results = list( + metadata_indexer.idx_storage.origin_extrinsic_metadata_get([origin]) + ) + assert len(results) == 1, results + assert results[0].from_remd_id == b"\x00" * 20