diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3cc45b3..05398bb 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,49 +1,42 @@ repos: -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v2.4.0 - hooks: - - id: trailing-whitespace - - id: check-json - - id: check-yaml + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.1.0 + hooks: + - id: trailing-whitespace + - id: check-json + - id: check-yaml -- repo: https://gitlab.com/pycqa/flake8 - rev: 3.8.3 - hooks: - - id: flake8 + - repo: https://gitlab.com/pycqa/flake8 + rev: 4.0.1 + hooks: + - id: flake8 -- repo: https://github.com/codespell-project/codespell - rev: v1.16.0 - hooks: - - id: codespell + - repo: https://github.com/codespell-project/codespell + rev: v2.1.0 + hooks: + - id: codespell + name: Check source code spelling + stages: [commit] + - id: codespell + name: Check commit message spelling + stages: [commit-msg] -- repo: local - hooks: - - id: mypy - name: mypy - entry: mypy - args: [swh] - pass_filenames: false - language: system - types: [python] + - repo: local + hooks: + - id: mypy + name: mypy + entry: mypy + args: [swh] + pass_filenames: false + language: system + types: [python] -- repo: https://github.com/PyCQA/isort - rev: 5.5.2 - hooks: - - id: isort - -- repo: https://github.com/python/black - rev: 19.10b0 - hooks: - - id: black - -# unfortunately, we are far from being able to enable this... -# - repo: https://github.com/PyCQA/pydocstyle.git -# rev: 4.0.0 -# hooks: -# - id: pydocstyle -# name: pydocstyle -# description: pydocstyle is a static analysis tool for checking compliance with Python docstring conventions. -# entry: pydocstyle --convention=google -# language: python -# types: [python] + - repo: https://github.com/PyCQA/isort + rev: 5.10.1 + hooks: + - id: isort + - repo: https://github.com/python/black + rev: 19.10b0 + hooks: + - id: black diff --git a/PKG-INFO b/PKG-INFO index ae50202..e55a226 100644 --- a/PKG-INFO +++ b/PKG-INFO @@ -1,46 +1,46 @@ Metadata-Version: 2.1 Name: swh.model -Version: 4.4.0 +Version: 5.0.0 Summary: Software Heritage data model Home-page: https://forge.softwareheritage.org/diffusion/DMOD/ Author: Software Heritage developers Author-email: swh-devel@inria.fr License: UNKNOWN Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest Project-URL: Funding, https://www.softwareheritage.org/donate Project-URL: Source, https://forge.softwareheritage.org/source/swh-model Project-URL: Documentation, https://docs.softwareheritage.org/devel/swh-model/ Platform: UNKNOWN Classifier: Programming Language :: Python :: 3 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Classifier: Operating System :: OS Independent Classifier: Development Status :: 5 - Production/Stable Requires-Python: >=3.7 Description-Content-Type: text/markdown Provides-Extra: cli Provides-Extra: testing-minimal Provides-Extra: testing License-File: LICENSE License-File: AUTHORS swh-model ========= Implementation of the Data model of the Software Heritage project, used to archive source code artifacts. This module defines the notion of SoftWare Heritage persistent IDentifiers (SWHIDs) and provides tools to compute them: ```sh $ swh-identify fork.c kmod.c sched/deadline.c swh:1:cnt:2e391c754ae730bd2d8520c2ab497c403220c6e3 fork.c swh:1:cnt:0277d1216f80ae1adeed84a686ed34c9b2931fc2 kmod.c swh:1:cnt:57b939c81bce5d06fa587df8915f05affbe22b82 sched/deadline.c $ swh-identify --no-filename /usr/src/linux/kernel/ swh:1:dir:f9f858a48d663b3809c9e2f336412717496202ab ``` diff --git a/debian/changelog b/debian/changelog index e68969c..df8d79f 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,1261 +1,1268 @@ -swh-model (4.4.0-1~swh1~bpo10+1) buster-swh; urgency=medium - - * Rebuild for buster-swh - - -- Software Heritage autobuilder (on jenkins-debian1) Fri, 21 Jan 2022 13:14:30 +0000 +swh-model (5.0.0-1~swh1) unstable-swh; urgency=medium + + * New upstream release 5.0.0 - (tagged by Valentin Lorentz + on 2022-03-16 10:33:49 +0100) + * Upstream changes: - v5.0.0 - * Fix f-string - * Fix + crash in check_entries. - * Add missing __slots__ to + HashableObjectWithManifest - * docs: Explain we prefer dir + SWHIDs over rev/rel. - * Remove 'offset' and 'negative_utc' + arguments and make them optional - * Remove deprecated property + 'TimestampWithTimezone.offset' + + -- Software Heritage autobuilder (on jenkins-debian1) Wed, 16 Mar 2022 09:38:26 +0000 swh-model (4.4.0-1~swh1) unstable-swh; urgency=medium * New upstream release 4.4.0 - (tagged by Valentin Lorentz on 2022-01-21 14:08:57 +0100) * Upstream changes: - v4.4.0 - * model: Add support for more edge cases in _parse_offset_bytes - * model: Add method 'TimestampWithTimezone.offset_minutes' -- Software Heritage autobuilder (on jenkins-debian1) Fri, 21 Jan 2022 13:12:25 +0000 swh-model (4.3.0-1~swh1) unstable-swh; urgency=medium * New upstream release 4.3.0 - (tagged by Valentin Lorentz on 2022-01-14 15:10:34 +0100) * Upstream changes: - v4.3.0 - * docs: Add anchors to important sections of persistent-identifiers.rst - * Fix TimestampWithTimezone.from_dict() on datetimes before 1970 with non- integer seconds - * TimestampWithTimezone: Make 'offset' and 'negative_utc' optional -- Software Heritage autobuilder (on jenkins-debian1) Fri, 14 Jan 2022 14:13:48 +0000 swh-model (4.2.0-1~swh1) unstable-swh; urgency=medium * New upstream release 4.2.0 - (tagged by Valentin Lorentz on 2022-01-10 15:56:36 +0100) * Upstream changes: - v4.2.0 - * git_objects: Use raw offset_bytes to format dates, and remove format_offset() -- Software Heritage autobuilder (on jenkins-debian1) Mon, 10 Jan 2022 14:59:18 +0000 swh-model (4.1.0-1~swh1) unstable-swh; urgency=medium * New upstream release 4.1.0 - (tagged by Nicolas Dandrimont on 2021-12-22 15:58:36 +0100) * Upstream changes: - Release swh.model v4.1.0 - Drop pre-3.6 blake2 compatibility, which hasn't been in use since - we've mandated python3.7 anyway. -- Software Heritage autobuilder (on jenkins-debian1) Wed, 22 Dec 2021 15:01:40 +0000 swh-model (4.0.0-1~swh1) unstable-swh; urgency=medium * New upstream release 4.0.0 - (tagged by Valentin Lorentz on 2021-12-22 13:24:58 +0100) * Upstream changes: - v4.0.0 - * Add attribute TimestampWithTimezone.offset_bytes, to store raw Git offsets - * model: Add a check() method to model objects - * test_model: Fix compatibility with pytest-xdist - * docs: Update the data model description - * hypothesis_strategies: Generate only consistent directory entry permissions. - * model: Add a raw_manifest attribute -- Software Heritage autobuilder (on jenkins-debian1) Wed, 22 Dec 2021 12:28:54 +0000 swh-model (3.2.0-1~swh1) unstable-swh; urgency=medium * New upstream release 3.2.0 - (tagged by Valentin Lorentz on 2021-12-15 13:36:48 +0100) * Upstream changes: - v3.2.0 - * hypothesis_strategies: Ensure to generate valid directory entry name - * from_disk: Implement Directory.__contains__ -- Software Heritage autobuilder (on jenkins-debian1) Wed, 15 Dec 2021 12:39:37 +0000 swh-model (3.1.0-1~swh1) unstable-swh; urgency=medium * New upstream release 3.1.0 - (tagged by Antoine Lambert on 2021-12-06 19:35:40 +0100) * Upstream changes: - version 0.3.1 -- Software Heritage autobuilder (on jenkins-debian1) Mon, 06 Dec 2021 18:51:48 +0000 swh-model (3.0.0-1~swh1) unstable-swh; urgency=medium * New upstream release 3.0.0 - (tagged by Valentin Lorentz on 2021-09-28 15:59:18 +0200) * Upstream changes: - v3.0.0 - * Add bazaar as supported revision type - * Move SWHID classes and functions from identifiers.py to swhids.py - * Refactor identifiers & model to make *_git_object() functions work on model classes instead of dicts - * Move manifest computation functions from identifiers.py to git_objects.py - * Remove identifier_to_bytes and identifier_to_hex - * Deprecate identifiers.py -- Software Heritage autobuilder (on jenkins-debian1) Tue, 28 Sep 2021 14:05:19 +0000 swh-model (2.9.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.9.0 - (tagged by Valentin Lorentz on 2021-09-16 14:21:44 +0200) * Upstream changes: - v2.9.0 - * HashableObject: Add type annotation for 'id' attribute -- Software Heritage autobuilder (on jenkins-debian1) Thu, 16 Sep 2021 12:24:48 +0000 swh-model (2.8.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.8.0 - (tagged by Antoine R. Dumont (@ardumont) on 2021-07-27 16:20:14 +0200) * Upstream changes: - v2.8.0 - Add a CVS revision type for use with the CVS loader -- Software Heritage autobuilder (on jenkins-debian1) Tue, 27 Jul 2021 14:26:10 +0000 swh-model (2.7.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.7.0 - (tagged by Nicolas Dandrimont on 2021-07-23 16:50:59 +0200) * Upstream changes: - Release swh.model 2.7.0 - Add versioning of ExtID objects -- Software Heritage autobuilder (on jenkins-debian1) Fri, 23 Jul 2021 14:53:44 +0000 swh-model (2.6.4-1~swh1) unstable-swh; urgency=medium * New upstream release 2.6.4 - (tagged by Daniele Serafini on 2021-06-29 13:42:54 +0100) * Upstream changes: - make deduplication optional when iterating over the merkle tree -- Software Heritage autobuilder (on jenkins-debian1) Fri, 02 Jul 2021 16:11:31 +0000 swh-model (2.6.3-1~swh1) unstable-swh; urgency=medium * New upstream release 2.6.3 - (tagged by Valentin Lorentz on 2021-06-25 16:13:53 +0200) * Upstream changes: - v2.6.3 - * hypothesis_strategies: Generate None metadata instead of {} -- Software Heritage autobuilder (on jenkins-debian1) Fri, 25 Jun 2021 14:17:34 +0000 swh-model (2.6.2-1~swh1) unstable-swh; urgency=medium * New upstream release 2.6.2 - (tagged by Valentin Lorentz on 2021-06-25 12:40:45 +0200) * Upstream changes: - v2.6.2 - * from_disk: get swhid from Content/Directory objects - * hypothesis_strategies: Add raw_extrinsic_metadata() strategy -- Software Heritage autobuilder (on jenkins-debian1) Fri, 25 Jun 2021 10:44:34 +0000 swh-model (2.6.1-1~swh1) unstable-swh; urgency=medium * New upstream release 2.6.1 - (tagged by Antoine Lambert on 2021-06-16 11:58:53 +0200) * Upstream changes: - version 2.6.1 -- Software Heritage autobuilder (on jenkins-debian1) Wed, 16 Jun 2021 10:03:28 +0000 swh-model (2.6.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.6.0 - (tagged by David Douard on 2021-06-15 16:51:49 +0200) * Upstream changes: - v2.6.0 -- Software Heritage autobuilder (on jenkins-debian1) Tue, 15 Jun 2021 14:56:10 +0000 swh-model (2.5.1-1~swh1) unstable-swh; urgency=medium * New upstream release 2.5.1 - (tagged by David Douard on 2021-05-20 15:22:50 +0200) * Upstream changes: - v2.5.1 -- Software Heritage autobuilder (on jenkins-debian1) Thu, 20 May 2021 13:40:27 +0000 swh-model (2.5.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.5.0 - (tagged by Valentin Lorentz on 2021-05-11 12:02:49 +0200) * Upstream changes: - v2.5.0 - * identifiers: Expose manifest/git_object computation -- Software Heritage autobuilder (on jenkins-debian1) Tue, 11 May 2021 10:07:47 +0000 swh-model (2.4.2-1~swh1) unstable-swh; urgency=medium * New upstream release 2.4.2 - (tagged by Valentin Lorentz on 2021-05-06 14:31:04 +0200) * Upstream changes: - v2.4.2 - * docs/persistent-identifiers: Add guidelines for fixing invalid SWHIDs. - * Blacklist attr 21.1.0 -- Software Heritage autobuilder (on jenkins-debian1) Thu, 06 May 2021 12:35:43 +0000 swh-model (2.4.1-1~swh1) unstable-swh; urgency=medium * New upstream release 2.4.1 - (tagged by Antoine Lambert on 2021-04-29 14:19:28 +0200) * Upstream changes: - version 2.4.1 -- Software Heritage autobuilder (on jenkins-debian1) Thu, 29 Apr 2021 12:23:21 +0000 swh-model (2.4.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.4.0 - (tagged by Antoine Lambert on 2021-04-13 15:26:51 +0200) * Upstream changes: - version 2.4.0 -- Software Heritage autobuilder (on jenkins-debian1) Tue, 13 Apr 2021 13:31:21 +0000 swh-model (2.3.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.3.0 - (tagged by Nicolas Dandrimont on 2021-03-19 17:15:00 +0100) * Upstream changes: - Release swh.model 2.3.0 - Properly truncate RawExtrinsicMetadata objects to a precision of one - second, as does their unique id. -- Software Heritage autobuilder (on jenkins-debian1) Fri, 19 Mar 2021 16:17:48 +0000 swh-model (2.2.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.2.0 - (tagged by Valentin Lorentz on 2021-03-15 10:32:36 +0100) * Upstream changes: - v2.2.0 - * Add a swhid() method to RawExtrinsicMetadata. -- Software Heritage autobuilder (on jenkins-debian1) Mon, 15 Mar 2021 09:35:25 +0000 swh-model (2.1.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.1.0 - (tagged by David Douard on 2021-03-11 14:19:00 +0100) * Upstream changes: - v2.1.0 -- Software Heritage autobuilder (on jenkins-debian1) Thu, 11 Mar 2021 13:21:40 +0000 swh-model (2.0.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.0.0 - (tagged by Valentin Lorentz on 2021-03-05 10:11:47 +0100) * Upstream changes: - v2.0.0 - Breaking change: - * model: Remove override of RawExtrinsicMetadata.unique_key(), so it now returns the hash. - Other changes: - * identifiers: Add raw_extrinsic_metadata_identifier - * model: Add 'id' field to RawExtrinsicMetadata -- Software Heritage autobuilder (on jenkins-debian1) Fri, 05 Mar 2021 09:14:35 +0000 swh-model (1.0.1-1~swh1) unstable-swh; urgency=medium * New upstream release 1.0.1 - (tagged by Valentin Lorentz on 2021-03-04 15:08:55 +0100) * Upstream changes: - v1.0.1 - * cli: stop using the deprecated SWHID class - * identifiers: Remove the deprecated SWHID class -- Software Heritage autobuilder (on jenkins-debian1) Thu, 04 Mar 2021 14:11:09 +0000 swh-model (1.0.0-1~swh1) unstable-swh; urgency=medium * New upstream release 1.0.0 - (tagged by Valentin Lorentz on 2021-03-01 18:01:29 +0100) * Upstream changes: - v1.0.0 - Two breaking changes: - * RawExtrinsicMetadata: Use ExtendedSWHID as target and remove type - * RawExtrinsicMetadata: Use CoreSWHID instead of SWHID for contexts - And two minor changes: - * Add CoreSWHID.to_extended() - * Add a swhid() method to all hashable objects. -- Software Heritage autobuilder (on jenkins-debian1) Tue, 02 Mar 2021 08:18:42 +0000 swh-model (0.13.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.13.0 - (tagged by Valentin Lorentz on 2021-02-25 17:56:31 +0100) * Upstream changes: - v0.13.0 - * Update persistent identifiers doc with pip install info - * Make explicit Python 3 dependency - * tests: Clean hashutil._blake2_hash_cache after mocking blake2 functions. - * Introduce new classes CoreSWHID/QualifiedSWHID/ExtendedSWHID - * Deprecate SWHID class - * Disallow 'ori' type in SWHID class -- Software Heritage autobuilder (on jenkins-debian1) Thu, 25 Feb 2021 16:59:26 +0000 swh-model (0.12.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.12.0 - (tagged by David Douard on 2021-01-26 17:22:28 +0100) * Upstream changes: - v0.12.0 -- Software Heritage autobuilder (on jenkins-debian1) Tue, 26 Jan 2021 16:27:16 +0000 swh-model (0.11.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.11.0 - (tagged by Antoine R. Dumont (@ardumont) on 2021-01-20 15:31:54 +0100) * Upstream changes: - v0.11.0 - model: Allow new status values not_found and failed to OriginVisitStatus -- Software Heritage autobuilder (on jenkins-debian1) Wed, 20 Jan 2021 14:34:53 +0000 swh-model (0.10.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.10.0 - (tagged by Vincent SELLIER on 2021-01-14 14:13:22 +0100) * Upstream changes: - v0.10.0 - * 2021-01-12 Add an optional type field on OriginVisitStatus object - * 2021-01-12 test_identifiers: Reorder SWHID tests. - * 2021-01-12 test_identifiers: Make sure that {directory,revision,release,snapshot}_identifier() doesn't just return a value from the dict. - * 2021-01-04 Add missing slots=True for Directory. - * 2020-12-19 SWHID parsing: simplify and deduplicate validation logic - * 2020-12-14 model: Make all classes slotted. -- Software Heritage autobuilder (on jenkins-debian1) Thu, 14 Jan 2021 13:16:10 +0000 swh-model (0.9.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.9.0 - (tagged by Nicolas Dandrimont on 2020-11-17 10:12:47 +0100) * Upstream changes: - Release swh.model v0.9.0 - Drop backwards compatibility for RawExtrinsicMetadata.id -- Software Heritage autobuilder (on jenkins-debian1) Tue, 17 Nov 2020 09:15:43 +0000 swh-model (0.8.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.8.0 - (tagged by Antoine R. Dumont (@ardumont) on 2020-11-12 13:09:48 +0100) * Upstream changes: - v0.8.0 - identifiers.parse_swhid: Make SWHIDs with whitespaces invalid - identifiers.parse_swhid: Check the swhid qualifiers and fail if invalid - model.identifiers: Improve error messages in case of invalid SWHIDs -- Software Heritage autobuilder (on jenkins-debian1) Thu, 12 Nov 2020 12:10:46 +0000 swh-model (0.7.3-1~swh1) unstable-swh; urgency=medium * New upstream release 0.7.3 - (tagged by Nicolas Dandrimont on 2020-10-27 16:03:58 +0100) * Upstream changes: - Release swh.model v0.7.3 - Reduce the amount of DeprecationWarnings for RawExtrinsicMetadata -- Software Heritage autobuilder (on jenkins-debian1) Tue, 27 Oct 2020 15:06:50 +0000 swh-model (0.7.2-1~swh1) unstable-swh; urgency=medium * New upstream release 0.7.2 - (tagged by Nicolas Dandrimont on 2020-10-27 10:37:19 +0100) * Upstream changes: - Release swh.model v0.7.2 - Add a new -- exclude flag to swh identify - Migrate RawExtrinsicMetadata `id` attribute to `target` - Future-proof the swh.model.model.HashableObject interface -- Software Heritage autobuilder (on jenkins-debian1) Tue, 27 Oct 2020 09:41:19 +0000 swh-model (0.7.1-1~swh1) unstable-swh; urgency=medium * New upstream release 0.7.1 - (tagged by Valentin Lorentz on 2020-10-12 12:16:47 +0200) * Upstream changes: - v0.7.1 - Add a 'unique_key' method on model objects -- Software Heritage autobuilder (on jenkins-debian1) Mon, 12 Oct 2020 10:19:10 +0000 swh-model (0.7.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.7.0 - (tagged by Antoine R. Dumont (@ardumont) on 2020-10-07 12:47:02 +0200) * Upstream changes: - v0.7.0 - cli: make SWHIDParamType return SWHID type instead of string - tox.ini: pin black to the pre- commit version (19.10b0) to avoid flip-flops - Merge the two test_identifiers.py files. -- Software Heritage autobuilder (on jenkins-debian1) Wed, 07 Oct 2020 10:47:55 +0000 swh-model (0.6.7-3~swh1) unstable-swh; urgency=medium * Fix a typo in d/control. -- David Douard Fri, 25 Sep 2020 17:36:14 +0200 swh-model (0.6.7-2~swh1) unstable-swh; urgency=medium * Fix dependencies on d/control -- David Douard Fri, 25 Sep 2020 17:03:31 +0200 swh-model (0.6.7-1~swh1) unstable-swh; urgency=medium * New upstream release 0.6.7 - (tagged by David Douard on 2020-09-25 15:28:58 +0200) * Upstream changes: - v0.6.7 -- Software Heritage autobuilder (on jenkins-debian1) Fri, 25 Sep 2020 13:32:18 +0000 swh-model (0.6.6-1~swh1) unstable-swh; urgency=medium * New upstream release 0.6.6 - (tagged by Antoine R. Dumont (@ardumont) on 2020-08-07 10:18:50 +0200) * Upstream changes: - v0.6.6 - model.Content.to_dict: Remove ctime entry when it's None - model: Add Sha1 alias -- Software Heritage autobuilder (on jenkins-debian1) Fri, 07 Aug 2020 08:22:35 +0000 swh-model (0.6.5-1~swh1) unstable-swh; urgency=medium * New upstream release 0.6.5 - (tagged by Antoine R. Dumont (@ardumont) on 2020-08-06 19:59:26 +0200) * Upstream changes: - v0.6.5 - model: Add final object_type field on metadata related model objects -- Software Heritage autobuilder (on jenkins-debian1) Thu, 06 Aug 2020 18:01:05 +0000 swh-model (0.6.4-1~swh1) unstable-swh; urgency=medium * New upstream release 0.6.4 - (tagged by Antoine R. Dumont (@ardumont) on 2020-08-06 18:44:48 +0200) * Upstream changes: - v0.6.4 - Use correct setuptools-scm keyword this time -- Software Heritage autobuilder (on jenkins-debian1) Thu, 06 Aug 2020 16:47:14 +0000 swh-model (0.6.1-1~swh1) unstable-swh; urgency=medium * New upstream release 0.6.1 - (tagged by Valentin Lorentz on 2020-07-31 10:47:56 +0200) * Upstream changes: - v0.6.1 - * Declare pytest markers - * Import Mapping from collections.abc instead of collections - * Fix incorrectly typed null constants in extra_headers byte strings - * add ImmutableDict.__repr__ - * Add missing object_type class attributes on MetadataAuthority, MetadataFetcher, and RawExtrinsicMetadata. -- Software Heritage autobuilder (on jenkins-debian1) Fri, 31 Jul 2020 08:51:42 +0000 swh-model (0.6.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.6.0 - (tagged by Valentin Lorentz on 2020-07-20 12:45:59 +0200) * Upstream changes: - v0.6.0 - * Rework dia -> pdf pipeline for inkscape 1.0 - * Rename MetadataAuthorityType.DEPOSIT to MetadataAuthorityType.DEPOSIT_CLIENT. -- Software Heritage autobuilder (on jenkins-debian1) Mon, 20 Jul 2020 10:49:27 +0000 swh-model (0.5.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.5.0 - (tagged by Antoine Lambert on 2020-07-08 17:12:44 +0200) * Upstream changes: - version 0.5.0 -- Software Heritage autobuilder (on jenkins-debian1) Wed, 08 Jul 2020 15:23:51 +0000 swh-model (0.4.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.4.0 - (tagged by David Douard on 2020-07-06 14:13:31 +0200) * Upstream changes: - v0.4.0 -- Software Heritage autobuilder (on jenkins-debian1) Mon, 06 Jul 2020 12:16:51 +0000 swh-model (0.3.8-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.8 - (tagged by Antoine Lambert on 2020-07-03 16:06:44 +0200) * Upstream changes: - version 0.3.8 -- Software Heritage autobuilder (on jenkins-debian1) Fri, 03 Jul 2020 14:10:51 +0000 swh-model (0.3.7-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.7 - (tagged by Antoine R. Dumont (@ardumont) on 2020-07-02 15:15:46 +0200) * Upstream changes: - v0.3.7 - Refactor common loader behavior within from_disk.iter_directory - Unify object_type some more within the merkle and from_disk modules -- Software Heritage autobuilder (on jenkins-debian1) Thu, 02 Jul 2020 13:17:32 +0000 swh-model (0.3.6-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.6 - (tagged by Antoine R. Dumont (@ardumont) on 2020-07-01 15:46:23 +0200) * Upstream changes: - v0.3.6 - model.OriginVisit: Drop obsolete fields -- Software Heritage autobuilder (on jenkins-debian1) Wed, 01 Jul 2020 13:48:43 +0000 swh-model (0.3.5-2~swh1) unstable-swh; urgency=medium * Update dependency + Bump -- Antoine R. Dumont (@ardumont) Tue, 30 Jun 2020 12:40:52 +0200 swh-model (0.3.5-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.5 - (tagged by Antoine R. Dumont (@ardumont) on 2020-06-30 11:21:07 +0200) * Upstream changes: - v0.3.5 - Tag model entities with their "object_type" -- Software Heritage autobuilder (on jenkins-debian1) Tue, 30 Jun 2020 09:31:43 +0000 swh-model (0.3.4-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.4 - (tagged by Antoine R. Dumont (@ardumont) on 2020-06-24 10:43:48 +0200) * Upstream changes: - v0.3.4 - OriginVisitStatus: Allow "created" status - model.OriginVisit: Make obsolete fields optional - swh.model.model.OriginVisit: Drop the dateutil.parser.parse use -- Software Heritage autobuilder (on jenkins-debian1) Wed, 24 Jun 2020 08:47:12 +0000 swh-model (0.3.3-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.3 - (tagged by Antoine R. Dumont (@ardumont) on 2020-06-17 09:38:34 +0200) * Upstream changes: - v0.3.3 - model.hypothesis_strategies: Make metadata always none on origin_visit -- Software Heritage autobuilder (on jenkins-debian1) Wed, 17 Jun 2020 07:40:50 +0000 swh-model (0.3.2-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.2 - (tagged by David Douard on 2020-06-16 10:41:05 +0200) * Upstream changes: - v0.3.2 -- Software Heritage autobuilder (on jenkins-debian1) Tue, 16 Jun 2020 08:45:55 +0000 swh-model (0.3.1-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.1 - (tagged by David Douard on 2020-06-15 09:43:30 +0200) * Upstream changes: - v0.3.1 -- Software Heritage autobuilder (on jenkins-debian1) Mon, 15 Jun 2020 07:52:09 +0000 swh-model (0.3.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.0 - (tagged by David Douard on 2020-06-03 11:59:02 +0200) * Upstream changes: - v0.3.0 -- Software Heritage autobuilder (on jenkins-debian1) Wed, 03 Jun 2020 10:04:35 +0000 swh-model (0.2.2-1~swh1) unstable-swh; urgency=medium * New upstream release 0.2.2 - (tagged by David Douard on 2020-06-03 11:28:38 +0200) * Upstream changes: - v0.2.2 -- Software Heritage autobuilder (on jenkins-debian1) Wed, 03 Jun 2020 09:33:46 +0000 swh-model (0.2.1-1~swh1) unstable-swh; urgency=medium * New upstream release 0.2.1 - (tagged by David Douard on 2020-05-29 17:39:37 +0200) * Upstream changes: - v0.2.1 -- Software Heritage autobuilder (on jenkins-debian1) Fri, 29 May 2020 15:43:44 +0000 swh-model (0.2.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.2.0 - (tagged by David Douard on 2020-05-25 10:06:12 +0200) * Upstream changes: - v0.2.0 -- Software Heritage autobuilder (on jenkins-debian1) Mon, 25 May 2020 08:11:07 +0000 swh-model (0.1.1-1~swh1) unstable-swh; urgency=medium * New upstream release 0.1.1 - (tagged by Antoine R. Dumont (@ardumont) on 2020-05-05 14:43:40 +0200) * Upstream changes: - v0.1.1 - Make aware_datetimes() generate only ISO8601-encodable datetimes -- Software Heritage autobuilder (on jenkins-debian1) Tue, 05 May 2020 12:45:37 +0000 swh-model (0.1.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.1.0 - (tagged by Stefano Zacchiroli on 2020-04-30 19:23:13 +0200) * Upstream changes: - v0.1.0 / 2020-04-30 - * SWHID spec: full reread - * setup.py: add documentation link - * hypothesis_strategies: Generate aware datetimes instead of naive ones. - * doc: check-in IANA registration template for the "swh" URI scheme - * Restructure SWHID documentation in preparation for T2385 - merge grammars into a single one - explain better that SWHIDs are made up of core identifier + qualifiers - separate qualifier into context and fragment onex - add reference to swh-identify -- Software Heritage autobuilder (on jenkins-debian1) Thu, 30 Apr 2020 20:31:00 +0000 swh-model (0.0.69-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.69 - (tagged by Stefano Zacchiroli on 2020-04-28 16:05:58 +0200) * Upstream changes: - v0.0.69 / 2020-04-28 - * SWHID spec: bump version to 1.3 and add last modified date - * SWHID spec: make SWHIDs plural where needed - * SWHID spec: simplify and generalize escaping requirements - * SWHID spec: add support for IRI - * SWHID: deal with escaping in origin qualifiers - * SWHID doc: improve wording of intrinsic parts v. the rest -- Software Heritage autobuilder (on jenkins-debian1) Tue, 28 Apr 2020 14:10:35 +0000 swh-model (0.0.68-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.68 - (tagged by David Douard on 2020-04-21 16:20:58 +0200) * Upstream changes: - v0.0.68 -- Software Heritage autobuilder (on jenkins-debian1) Tue, 21 Apr 2020 14:28:38 +0000 swh-model (0.0.67-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.67 - (tagged by Stefano Zacchiroli on 2020-04-17 17:49:42 +0200) * Upstream changes: - v0.0.67 / 2020-04-17 - * CLI: add test for swh identify w/o args - * CLI: require explicit "-" to identify via stdin - * SWHID doc: fix minor grammar issue - * SWHID doc: fix link in CISE paper reference - * identifiers.py: reference to SWHIDs using explicit anchors - * swh identify: embrace SWHID naming in user-facing doc/messages - * PID doc: embrace the SWHID naming - * PID doc: add reference to CISE paper - * doc: document identify CLI -- Software Heritage autobuilder (on jenkins-debian1) Fri, 17 Apr 2020 15:54:03 +0000 swh-model (0.0.66-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.66 - (tagged by Antoine R. Dumont (@ardumont) on 2020-04-10 16:46:31 +0200) * Upstream changes: - v0.0.66 - rename-visit-status model: Rename OriginVisitUpdate to OriginVisitStatus -- Software Heritage autobuilder (on jenkins-debian1) Fri, 10 Apr 2020 14:48:17 +0000 swh-model (0.0.65-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.65 - (tagged by Antoine R. Dumont (@ardumont) on 2020-04-09 16:25:24 +0200) * Upstream changes: - v0.0.65 - from_disk: path parameter to dir_filter functions - Enable black -- Software Heritage autobuilder (on jenkins-debian1) Thu, 09 Apr 2020 14:27:21 +0000 swh-model (0.0.64-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.64 - (tagged by Antoine Lambert on 2020-04-03 15:00:36 +0200) * Upstream changes: - version 0.0.64 -- Software Heritage autobuilder (on jenkins-debian1) Fri, 03 Apr 2020 13:03:34 +0000 swh-model (0.0.63-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.63 - (tagged by Antoine R. Dumont (@ardumont) on 2020-04-01 10:07:07 +0200) * Upstream changes: - v0.0.63 - origin/master model: Add new OriginVisitUpdate model object + test strategy - docs: Extend SWH PID definition with additional context qualifiers. -- Software Heritage autobuilder (on jenkins-debian1) Wed, 01 Apr 2020 08:08:58 +0000 swh-model (0.0.62-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.62 - (tagged by Valentin Lorentz on 2020-03-26 14:19:40 +0100) * Upstream changes: - v0.0.62 - * identifiers: encode origin URLs in utf-8 -- Software Heritage autobuilder (on jenkins-debian1) Thu, 26 Mar 2020 13:22:20 +0000 swh-model (0.0.60-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.60 - (tagged by Valentin Lorentz on 2020-03-05 12:05:18 +0100) * Upstream changes: - v0.0.60 - * Add a method to generate Content/SkippedContent from binary data - * Draw contents from a byte string instead of generating arbitrary hashes - * Add classmethod Person.from_address, to parse from 'name ' strings. -- Software Heritage autobuilder (on jenkins-debian1) Thu, 05 Mar 2020 11:07:50 +0000 swh-model (0.0.59-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.59 - (tagged by Nicolas Dandrimont on 2020-02-27 18:03:53 +0100) * Upstream changes: - Release swh.model v0.0.59 - Use proper hypothesis strategy to generate Person objects -- Software Heritage autobuilder (on jenkins-debian1) Thu, 27 Feb 2020 17:07:16 +0000 swh-model (0.0.57-1~swh2) unstable-swh; urgency=medium * Bump dependency release -- Antoine R. Dumont (@ardumont) Thu, 27 Feb 2020 16:24:21 +0200 swh-model (0.0.57-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.57 - (tagged by Valentin Lorentz on 2020-02-27 15:17:04 +0100) * Upstream changes: - v0.0.57 - * Add method BaseModel.hashes(). - * Re-introduce the swh.core dependency in swh.model[cli] - * Add support for skipping large contents in from_disk. - * Add to_model() method to from_disk.{Content,Directory}, to convert to canonical model objects. - * Take the value of MerkleNode.data into account to compute equality. - * Add method MerkleNode.iter_tree, to visit all nodes in the subtree of a node. - * Add from_datetime and from_iso8601 constructors for TimestampWithTimezone. - * Make attributes name and email of Person optional. -- Software Heritage autobuilder (on jenkins-debian1) Thu, 27 Feb 2020 14:20:21 +0000 swh-model (0.0.56-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.56 - (tagged by Valentin Lorentz on 2020-02-10 11:46:35 +0100) * Upstream changes: - v0.0.56 - Make OriginVisit.snapshot optional. -- Software Heritage autobuilder (on jenkins-debian1) Mon, 10 Feb 2020 10:48:55 +0000 swh-model (0.0.55-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.55 - (tagged by Valentin Lorentz on 2020-02-07 16:13:23 +0100) * Upstream changes: - v0.0.55 - * Make content length mandatory. - * Make 'visible' the default status for present Contents. -- Software Heritage autobuilder (on jenkins-debian1) Fri, 07 Feb 2020 15:16:58 +0000 swh-model (0.0.54-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.54 - (tagged by Valentin Lorentz on 2020-02-06 13:15:45 +0100) * Upstream changes: - v0.0.54 - * Split Content class into two classes, for missing and non-missing contents. -- Software Heritage autobuilder (on jenkins-debian1) Thu, 06 Feb 2020 12:18:04 +0000 swh-model (0.0.53-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.53 - (tagged by Valentin Lorentz on 2020-02-03 15:58:31 +0100) * Upstream changes: - v0.0.53 - * hypothesis_strategies/snapshots: Explain last post-processing step - * cli: add support for reading a file content from stdin in 'swh identify' command - * model: Update revision date types to be optional -- Software Heritage autobuilder (on jenkins-debian1) Mon, 03 Feb 2020 15:01:26 +0000 swh-model (0.0.52-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.52 - (tagged by Antoine Lambert on 2019-11-29 16:27:24 +0100) * Upstream changes: - version 0.0.52 -- Software Heritage autobuilder (on jenkins-debian1) Fri, 29 Nov 2019 15:30:57 +0000 swh-model (0.0.51-1~swh3) unstable-swh; urgency=medium * Add manual pytz dependency -- Nicolas Dandrimont Wed, 30 Oct 2019 17:52:33 +0100 swh-model (0.0.51-1~swh2) unstable-swh; urgency=medium * Add missing build-dependency on pytz -- Nicolas Dandrimont Wed, 30 Oct 2019 17:25:55 +0100 swh-model (0.0.51-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.51 - (tagged by Valentin Lorentz on 2019-10-30 15:03:19 +0100) * Upstream changes: - v0.0.51 - Make OriginVisit.origin a string instead of a dict. -- Software Heritage autobuilder (on jenkins-debian1) Wed, 30 Oct 2019 14:05:55 +0000 swh-model (0.0.50-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.50 - (tagged by David Douard on 2019-10-30 09:30:17 +0100) * Upstream changes: - v0.0.50 -- Software Heritage autobuilder (on jenkins-debian1) Wed, 30 Oct 2019 08:32:50 +0000 swh-model (0.0.49-1~swh2) unstable-swh; urgency=medium * Add missing dependency on dulwich for tests -- Nicolas Dandrimont Wed, 23 Oct 2019 14:37:45 +0200 swh-model (0.0.49-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.49 - (tagged by Nicolas Dandrimont on 2019-10-23 14:28:01 +0200) * Upstream changes: - Release swh.model v0.0.49 - Add symbolic refs to swh identify -t snapshot - Cleanup model.BaseModel.to_dict() recursion -- Software Heritage autobuilder (on jenkins-debian1) Wed, 23 Oct 2019 12:30:41 +0000 swh-model (0.0.48-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.48 - (tagged by Nicolas Dandrimont on 2019-10-18 17:06:59 +0200) * Upstream changes: - Release swh.model 0.0.48 - Split CLI dependencies to another subpackage - Stop exporting origin.type in models - Document origin PIDs -- Software Heritage autobuilder (on jenkins-debian1) Fri, 18 Oct 2019 15:11:01 +0000 swh-model (0.0.47-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.47 - (tagged by Stefano Zacchiroli on 2019-09-27 10:20:40 +0200) * Upstream changes: - v0.0.47 - init.py: switch to documented way of extending path -- Software Heritage autobuilder (on jenkins-debian1) Fri, 27 Sep 2019 08:22:54 +0000 swh-model (0.0.46-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.46 - (tagged by Stefano Zacchiroli on 2019-09-20 15:51:17 +0200) * Upstream changes: - v0.0.46 - MANIFEST.in: ship py.typed -- Software Heritage autobuilder (on jenkins-debian1) Fri, 20 Sep 2019 13:53:45 +0000 swh-model (0.0.45-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.45 - (tagged by Stefano Zacchiroli on 2019-09-20 15:09:47 +0200) * Upstream changes: - v0.0.45 - * identifiers.py: do not inherit from on-the-fly namedtuple - * mypy: ignore django- stubs, needed only by hypothesis - * mypy.ini: remove left-over sample section - * typing: minimal changes to make a no-op mypy run pass - * fix indentation and spelling: make "make check" happy -- Software Heritage autobuilder (on jenkins-debian1) Fri, 20 Sep 2019 13:12:10 +0000 swh-model (0.0.44-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.44 - (tagged by Valentin Lorentz on 2019-09-04 14:36:01 +0200) * Upstream changes: - Fix Revision.from_dict to allow optional fields. -- Software Heritage autobuilder (on jenkins-debian1) Wed, 04 Sep 2019 13:07:59 +0000 swh-model (0.0.43-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.43 - (tagged by Antoine R. Dumont (@ardumont) on 2019-09-03 14:04:44 +0200) * Upstream changes: - v0.0.43 - swh identify: add support for origin PIDs - identifiers.py: add constants for 'swh:1' and sanitize namespace -- Software Heritage autobuilder (on jenkins-debian1) Tue, 03 Sep 2019 12:09:04 +0000 swh-model (0.0.42-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.42 - (tagged by Valentin Lorentz on 2019-08-22 14:04:03 +0200) * Upstream changes: - v0.0.42 - Tweak swh.model.model to be closer to what swh-storage - accepts for releases and origin visits. -- Software Heritage autobuilder (on jenkins-debian1) Thu, 22 Aug 2019 12:12:22 +0000 swh-model (0.0.41-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.41 - (tagged by Valentin Lorentz on 2019-08-20 11:46:13 +0200) * Upstream changes: - tweaks to swh.model.model to support more valid inputs - * Allow -1 as Content length. - * Add optional 'ctime' field to Content. - * Generated content with status=hidden should have a data field. - * Add a get_hash helper method to Content. -- Software Heritage autobuilder (on jenkins-debian1) Tue, 20 Aug 2019 09:50:09 +0000 swh-model (0.0.40-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.40 - (tagged by Valentin Lorentz on 2019-08-06 14:36:37 +0200) * Upstream changes: - Add SHA1_SIZE constant. -- Software Heritage autobuilder (on jenkins-debian1) Tue, 06 Aug 2019 12:38:36 +0000 swh-model (0.0.39-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.39 - (tagged by Valentin Lorentz on 2019-07-18 12:28:42 +0200) * Upstream changes: - * fix pyblake2 dependency * origin persistent identifiers * release metadata -- Software Heritage autobuilder (on jenkins-debian1) Thu, 18 Jul 2019 10:31:00 +0000 swh-model (0.0.38-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.38 - (tagged by Valentin Lorentz on 2019-06-18 13:40:20 +0200) * Upstream changes: - Remove dependency on swh-core. - This is a fix to workaround pip's inability to correctly solve - extra requirements (swh-model depends on swh-core[], but if other - packages depend on swh-model and swh-core[http], the 'http' extra - does not always get installed). -- Software Heritage autobuilder (on jenkins-debian1) Tue, 18 Jun 2019 11:50:14 +0000 swh-model (0.0.37-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.37 - (tagged by David Douard on 2019-05-15 15:44:21 +0200) * Upstream changes: - cli: add support for --help on the 'identify' cli tool -- Software Heritage autobuilder (on jenkins-debian1) Thu, 13 Jun 2019 14:40:16 +0000 swh-model (0.0.36-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.36 - (tagged by Valentin Lorentz on 2019-04-26 13:33:29 +0200) * Upstream changes: - Prevent from_dict() from changing its input dict. -- Software Heritage autobuilder (on jenkins-debian1) Fri, 26 Apr 2019 11:57:45 +0000 swh-model (0.0.35-1~swh2) unstable-swh; urgency=medium * Remove hypothesis directory -- Nicolas Dandrimont Thu, 18 Apr 2019 18:27:33 +0200 swh-model (0.0.35-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.35 - (tagged by Nicolas Dandrimont on 2019-04-11 12:05:11 +0200) * Upstream changes: - Release swh.model v0.0.35 - Fix hypothesis strategies to work in non-UTC timezones -- Software Heritage autobuilder (on jenkins-debian1) Thu, 11 Apr 2019 10:08:14 +0000 swh-model (0.0.34-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.34 - (tagged by Valentin Lorentz on 2019-04-09 18:30:50 +0200) * Upstream changes: - Limit Content.length to what the pgsql storage supports. -- Software Heritage autobuilder (on jenkins-debian1) Wed, 10 Apr 2019 07:45:31 +0000 swh-model (0.0.33-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.33 - (tagged by Valentin Lorentz on 2019-04-08 21:46:28 +0200) * Upstream changes: - Tune the model generation to work with the pgsql storage. -- Software Heritage autobuilder (on jenkins-debian1) Tue, 09 Apr 2019 15:11:51 +0000 swh-model (0.0.32-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.32 - (tagged by Valentin Lorentz on 2019-04-05 19:15:16 +0200) * Upstream changes: - Add a model based using 'attrs' and Hypothesis strategies to generate it. -- Software Heritage autobuilder (on jenkins-debian1) Mon, 08 Apr 2019 12:57:45 +0000 swh-model (0.0.31-1~swh2) unstable-swh; urgency=medium * Add new dependencies on python3-attr and python3-hypothesis -- Nicolas Dandrimont Mon, 08 Apr 2019 14:55:50 +0200 swh-model (0.0.31-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.31 - (tagged by Valentin Lorentz on 2019-04-04 20:46:15 +0200) * Upstream changes: - Make snapshot_identifier add the cycle to the exception's arguments when it detects one. -- Software Heritage autobuilder (on jenkins-debian1) Fri, 05 Apr 2019 09:07:35 +0000 swh-model (0.0.30-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.30 - (tagged by David Douard on 2019-01-08 12:28:35 +0100) * Upstream changes: - v0.0.30 -- Software Heritage autobuilder (on jenkins-debian1) Wed, 09 Jan 2019 17:31:53 +0000 swh-model (0.0.29-1~swh1) unstable-swh; urgency=medium * Release swh.model v0.0.29 * Reference iPRES paper in PID documentation * Remove deprecated swh.model.hashutil.hash_* functions * Split debian packaging to separate branch -- Nicolas Dandrimont Wed, 31 Oct 2018 18:26:32 +0100 swh-model (0.0.28-1~swh1) unstable-swh; urgency=medium * v0.0.28 * setup: prepare for pypi upload * tests: Initialize tox use * tests: Migrate to pytest * docs: Improve basic repository information * docs: document PID resolution possibilities other than Web UI / * hashutil: Migrate towards MultiHash api -- Antoine R. Dumont (@ardumont) Tue, 23 Oct 2018 16:24:21 +0200 swh-model (0.0.27-1~swh1) unstable-swh; urgency=medium * v0.0.27 * Refactor: Add MultiHash class to improve hash computations * swh.model.hashutil: Improve and clarify docstrings * swh.model.hashutil: Mark hash_* function as deprecated -- Antoine R. Dumont (@ardumont) Mon, 17 Sep 2018 12:07:59 +0200 swh-model (0.0.26-1~swh1) unstable-swh; urgency=medium * v0.0.26 * swh.model.identifiers: Open metadata in persistent_identifier method * refactor CLI tests to avoid duplicate assertion pairs * swh-identify: follow symlinks for CLI arguments (by default) * cli.py: prefer os.fsdecode() over manual fiddling with locale.getpref... * swh-identify: add support for passing multiple CLI arguments -- Antoine R. Dumont (@ardumont) Mon, 23 Jul 2018 14:29:54 +0200 swh-model (0.0.25-1~swh1) unstable-swh; urgency=medium * version 0.0.25 -- Antoine Lambert Fri, 29 Jun 2018 11:49:25 +0200 swh-model (0.0.24-1~swh1) unstable-swh; urgency=medium * v0.0.24 * swh.model.cli: Catch specific exception during identifiers check * identifiers: Validate input * identifiers: Raise when error during parsing persistent identifiers * Update blake2 support to be less Debian-specific * add swh-identify CLI tool to compute persistent identifiers * docs: Update high-level documentation (Merkle DAG description, * contextual information for persistent IDs, etc...) -- Antoine R. Dumont (@ardumont) Fri, 22 Jun 2018 15:38:32 +0200 swh-model (0.0.23-1~swh1) unstable-swh; urgency=medium * version 0.0.23 -- Antoine Lambert Tue, 29 May 2018 14:08:45 +0200 swh-model (0.0.22-1~swh1) unstable-swh; urgency=medium * version 0.0.22 -- Antoine Pietri Tue, 30 Jan 2018 18:22:42 +0100 swh-model (0.0.21-1~swh1) unstable-swh; urgency=medium * v0.0.21 * swh.model.identifiers: Add persistent identifier function * docs: document the naming scheme for persistent identifiers * bin/swh-hash-file: new binary to compute SWH-style content identifiers -- Antoine R. Dumont (@ardumont) Wed, 17 Jan 2018 11:06:33 +0100 swh-model (0.0.20-1~swh1) unstable-swh; urgency=medium * v0.0.20 * swh.model.hashutil.hash_data: Optionally integrate length in result * hashutil: add `snapshot` object type for git hashes * docs: add absolute anchor to documentation index -- Antoine R. Dumont (@ardumont) Wed, 20 Dec 2017 10:47:10 +0100 swh-model (0.0.19-1~swh1) unstable-swh; urgency=medium * Release swh.model version 0.0.19 * Update packaging runes -- Nicolas Dandrimont Thu, 12 Oct 2017 18:07:59 +0200 swh-model (0.0.18-1~swh1) unstable-swh; urgency=medium * Release swh.model v0.0.18 * Replace swh.model.git with swh.model.from_disk (T709). * Clean up documentation -- Nicolas Dandrimont Thu, 05 Oct 2017 20:48:29 +0200 swh-model (0.0.17-1~swh1) unstable-swh; urgency=medium * Release swh.model v0.0.17 * Clean up pyblake2 requirement for Python 3.5+ -- Nicolas Dandrimont Mon, 26 Jun 2017 14:41:49 +0200 swh-model (0.0.16-1~swh1) unstable-swh; urgency=medium * Release swh.model v0.0.16 * Make sure we generate proper permissions in directories -- Nicolas Dandrimont Fri, 07 Apr 2017 14:32:34 +0200 swh-model (0.0.15-1~swh1) unstable-swh; urgency=medium * v0.0.15 * Add possibility to compute new blake2 hashes * Add blake2s256 hash as default new hash computation algorithm -- Antoine R. Dumont (@ardumont) Fri, 24 Mar 2017 16:32:35 +0100 swh-model (0.0.14-1~swh1) unstable-swh; urgency=medium * v0.0.14 * Migrate functions from swh.core.hashutil to swh.model.hashutil -- Antoine R. Dumont (@ardumont) Wed, 15 Mar 2017 16:00:56 +0100 swh-model (0.0.13-1~swh1) unstable-swh; urgency=medium * Release swh.model v0.0.13 * Timestamps are now fully integer values -- Nicolas Dandrimont Tue, 14 Feb 2017 19:32:24 +0100 swh-model (0.0.12-1~swh1) unstable-swh; urgency=medium * Release swh.model v0.0.12 * Add more tests to git tree hash computations -- Nicolas Dandrimont Tue, 14 Jun 2016 17:08:20 +0200 swh-model (0.0.11-1~swh1) unstable-swh; urgency=medium * v0.0.11 * Open git.children_hashes api * Rename git.walk_and_compute_sha1_from_directory_2 to git.compute_hashes_from_directory * Remove dead code -- Antoine R. Dumont (@ardumont) Sat, 11 Jun 2016 02:23:19 +0200 swh-model (0.0.10-1~swh1) unstable-swh; urgency=medium * v0.0.10 * Add objects_per_type api * Open a new walk_and_compute_sha1_from_directory_2 api * Improve internal api regarding directory and tree hash computations -- Antoine R. Dumont (@ardumont) Wed, 08 Jun 2016 15:54:59 +0200 swh-model (0.0.9-1~swh1) unstable-swh; urgency=medium * v0.0.9 * Add coverage on edge case * Optimize git hash walk -- Antoine R. Dumont (@ardumont) Thu, 26 May 2016 12:56:17 +0200 swh-model (0.0.8-1~swh1) unstable-swh; urgency=medium * v0.0.8 * Add coverage on edge case * Optimize git hash walk -- Antoine R. Dumont (@ardumont) Thu, 26 May 2016 12:33:59 +0200 swh-model (0.0.7-1~swh1) unstable-swh; urgency=medium * v0.0.7 * Improve corner case policy about walking and computing hash tree (+ update) -- Antoine R. Dumont (@ardumont) Wed, 25 May 2016 23:47:19 +0200 swh-model (0.0.6-1~swh1) unstable-swh; urgency=medium * v0.0.6 * Improve corner case on git hash memory update function * debian packaging: Ignore fs tests for packaging -- Antoine R. Dumont (@ardumont) Tue, 24 May 2016 17:01:06 +0200 swh-model (0.0.5-1~swh1) unstable-swh; urgency=medium * v0.0.5 * Add update git hash computation from existing data * Add revision identifier data for hash identifier computation (extra- headers) -- Antoine R. Dumont (@ardumont) Fri, 15 Apr 2016 12:51:21 +0200 swh-model (0.0.4-1~swh1) unstable-swh; urgency=medium * v0.0.4 * Migrate swh.loader.dir.git module to swh.model.git -- Antoine R. Dumont (@ardumont) Mon, 21 Mar 2016 15:20:28 +0100 swh-model (0.0.3-1~swh1) unstable-swh; urgency=medium * v0.0.3 * Release name is now in bytes -- Antoine R. Dumont (@ardumont) Wed, 27 Jan 2016 15:50:08 +0100 swh-model (0.0.2-1~swh1) unstable-swh; urgency=medium * Prepare release of v0.0.2 * Import the rest of swh.core.hashutil -- Nicolas Dandrimont Wed, 16 Dec 2015 18:30:12 +0100 swh-model (0.0.1-1~swh1) unstable-swh; urgency=medium * Initial release * Prepare swh.model release v0.0.1 -- Nicolas Dandrimont Mon, 07 Dec 2015 18:26:58 +0100 diff --git a/docs/persistent-identifiers.rst b/docs/persistent-identifiers.rst index 588e5e0..105f58c 100644 --- a/docs/persistent-identifiers.rst +++ b/docs/persistent-identifiers.rst @@ -1,408 +1,427 @@ .. _persistent-identifiers: .. _swhids: ================================================= SoftWare Heritage persistent IDentifiers (SWHIDs) ================================================= **version 1.6, last modified 2021-04-30** .. contents:: :local: :depth: 2 Overview ======== You can point to objects present in the `Software Heritage `_ `archive `_ by the means of **SoftWare Heritage persistent IDentifiers**, or **SWHIDs** for short, that are guaranteed to remain stable (persistent) over time. Their syntax, meaning, and usage is described below. Note that they are identifiers and not URLs, even though URL-based `resolvers`_ for SWHIDs are also available. A SWHID consists of two separate parts, a mandatory *core identifier* that can point to any software artifact (or "object") available in the Software Heritage archive, and an optional list of *qualifiers* that allows to specify the context where the object is meant to be seen and point to a subpart of the object itself. Objects come in different types: * contents * directories * revisions * releases * snapshots Each object is identified by an intrinsic, type-specific object identifier that is embedded in its SWHID as described below. The intrinsic identifiers embedded in SWHIDs are strong cryptographic hashes computed on the entire set of object properties. Together, these identifiers form a `Merkle structure `_, specifically a Merkle `DAG `_. See the :ref:`Software Heritage data model ` for an overview of object types and how they are linked together. See :py:mod:`swh.model.git_objects` for details on how the intrinsic identifiers embedded in SWHIDs are computed. The optional qualifiers are of two kinds: * **context qualifiers:** carry information about the context where a given object is meant to be seen. This is particularly important, as the same object can be reached in the Merkle graph following different *paths* starting from different nodes (or *anchors*), and it may have been retrieved from different *origins*, that may evolve between different *visits* * **fragment qualifiers:** allow to pinpoint specific subparts of an object .. _swhids-syntax: Syntax ====== Syntactically, SWHIDs are generated by the ```` entry point in the following grammar: .. code-block:: bnf ::= [ ] ; ::= "swh" ":" ":" ":" ; ::= "1" ; ::= "snp" (* snapshot *) | "rel" (* release *) | "rev" (* revision *) | "dir" (* directory *) | "cnt" (* content *) ; ::= 40 * ; (* intrinsic object id, as hex-encoded SHA1 *) ::= "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" ; ::= | "a" | "b" | "c" | "d" | "e" | "f" ; := ";" [ ] ; ::= | ; ::= | | | ; ::= "origin" "=" ; ::= "visit" "=" ; ::= "anchor" "=" ; ::= "path" "=" ; ::= "lines" "=" ["-" ] ; ::= + ; ::= (* RFC 3987 IRI *) ::= (* RFC 3987 absolute path *) Where: - ```` is an ```` from `RFC 3987`_, and - ```` is a `RFC 3987`_ IRI in either case all occurrences of ``;`` (and ``%``, as required by the RFC) have been percent-encoded (as ``%3B`` and ``%25`` respectively). Other characters *can* be percent-encoded, e.g., to improve readability and/or embeddability of SWHID in other contexts. .. _RFC 3987: https://tools.ietf.org/html/rfc3987 .. _swhids-semantics: Semantics ========= .. _swhids-core: Core identifiers ---------------- ``:`` is used as separator between the logical parts of core identifiers. The ``swh`` prefix makes explicit that these identifiers are related to *SoftWare Heritage*. ``1`` (````) is the current version of this identifier *scheme*. Future editions will use higher version numbers, possibly breaking backward compatibility, but without breaking the resolvability of SWHIDs that conform to previous versions of the scheme. A SWHID points to a single object, whose type is explicitly captured by ````: * ``snp`` to **snapshots**, * ``rel`` to **releases**, * ``rev`` to **revisions**, * ``dir`` to **directories**, * ``cnt`` to **contents**. The actual object pointed to is identified by the intrinsic identifier ````, which is a hex-encoded (using lowercase ASCII characters) SHA1 computed on the content and metadata of the object itself, as follows: * for **snapshots**, intrinsic identifiers are SHA1 hashes of manifests computed as per :py:func:`swh.model.git_objects.snapshot_git_object` * for **releases**, as per :py:func:`swh.model.git_objects.release_git_object` that produces the same result as a git release hash * for **revisions**, as per :py:func:`swh.model.git_objects.revision_git_object` that produces the same result as a git commit hash * for **directories**, per :py:func:`swh.model.git_objects.directory_git_object` that produces the same result as a git tree hash * for **contents**, the intrinsic identifier is the ``sha1_git`` hash returned by :py:meth:`swh.hashutil.MultiHash.digest`, i.e., the SHA1 of a byte sequence obtained by juxtaposing the ASCII string ``"blob"`` (without quotes), a space, the length of the content as decimal digits, a NULL byte, and the actual content of the file. .. _swhids-qualifiers: Qualifiers ---------- ``;`` is used as separator between the core identifier and the optional qualifiers, as well as between qualifiers. Each qualifier is specified as a key/value pair, using ``=`` as a separator. The following *context qualifiers* are available: * **origin:** the *software origin* where an object has been found or observed in the wild, as an URI; * **visit:** the core identifier of a *snapshot* corresponding to a specific *visit* of a repository containing the designated object; * **anchor:** a *designated node* in the Merkle DAG relative to which a *path to the object* is specified, as the core identifier of a directory, a revision, a release or a snapshot; * **path:** the *absolute file path*, from the *root directory* associated to the *anchor node*, to the object; when the anchor denotes a directory or a revision, and almost always when it's a release, the root directory is uniquely determined; when the anchor denotes a snapshot, the root directory is the one pointed to by ``HEAD`` (possibly indirectly), and undefined if such a reference is missing; The following *fragment qualifier* is available: * **lines:** *line number(s)* of interest, usually within a content object We recommend to equip identifiers meant to be shared with as many qualifiers as possible. While qualifiers may be listed in any order, it is good practice to present them in the order given above, i.e., ``origin``, ``visit``, ``anchor``, ``path``, ``lines``. Redundant information should be omitted: for example, if the *visit* is present, and the *path* is relative to the snapshot indicated there, then the *anchor* qualifier is superfluous; similarly, if the *path* is empty, it may be omitted. Interoperability ================ URI scheme ---------- The ``swh`` URI scheme is registered at IANA for SWHIDs. The present documents constitutes the scheme specification for such URI scheme. Git compatibility ----------------- SWHIDs for contents, directories, revisions, and releases are, at present, compatible with the `Git `_ way of `computing identifiers `_ for its objects. The ```` part of a SWHID for a content object is the Git blob identifier of any file with the same content; for a revision it is the Git commit identifier for the same revision, etc. This is not the case for snapshot identifiers, as Git does not have a corresponding object type. Note that Git compatibility is incidental and is not guaranteed to be maintained in future versions of this scheme (or Git). Automatically fixing invalid SWHIDs ----------------------------------- User interfaces may fix invalid SWHIDs, by lower-casing the ```` part of a SWHID, if it contains upper-case letters because of user errors or limitations in software displaying SWHIDs. However, implementations displaying or generating SWHIDs should not rely on this behavior, and must display or generate only valid SWHIDs when technically possible. User interfaces should show an error when such an automatic fix occurs, so users have a chance to fix their SWHID before pasting it to an other interface that does not perform the same corrections. This also makes it easier to understand issues when a case-sensitive qualifier has its casing altered. Examples ======== Core identifiers ---------------- * ``swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2`` points to the content of a file containing the full text of the GPL3 license * ``swh:1:dir:d198bc9d7a6bcf6db04f476d29314f157507d505`` points to a directory containing the source code of the Darktable photography application as it was at some point on 4 May 2017 * ``swh:1:rev:309cf2674ee7a0749978cf8265ab91a60aea0f7d`` points to a commit in the development history of Darktable, dated 16 January 2017, that added undo/redo supports for masks * ``swh:1:rel:22ece559cc7cc2364edc5e5593d63ae8bd229f9f`` points to Darktable release 2.3.0, dated 24 December 2016 * ``swh:1:snp:c7c108084bc0bf3d81436bf980b46e98bd338453`` points to a snapshot of the entire Darktable Git repository taken on 4 May 2017 from GitHub Identifiers with qualifiers --------------------------- * The following :swh_web:`SWHID ` denotes the lines 9 to 15 of a file content that can be found at absolute path ``/Examples/SimpleFarm/simplefarm.ml`` from the root directory of the revision ``swh:1:rev:2db189928c94d62a3b4757b3eec68f0a4d4113f0`` that is contained in the snapshot ``swh:1:snp:d7f1b9eb7ccb596c2622c4780febaa02549830f9`` taken from the origin ``https://gitorious.org/ocamlp3l/ocamlp3l_cvs.git``:: swh:1:cnt:4d99d2d18326621ccdd70f5ea66c2e2ac236ad8b; origin=https://gitorious.org/ocamlp3l/ocamlp3l_cvs.git; visit=swh:1:snp:d7f1b9eb7ccb596c2622c4780febaa02549830f9; anchor=swh:1:rev:2db189928c94d62a3b4757b3eec68f0a4d4113f0; path=/Examples/SimpleFarm/simplefarm.ml; lines=9-15 * Here is an example of a :swh_web:`SWHID ` with a file path that requires percent-escaping:: swh:1:cnt:f10371aa7b8ccabca8479196d6cd640676fd4a04; origin=https://github.com/web-platform-tests/wpt; visit=swh:1:snp:b37d435721bbd450624165f334724e3585346499; anchor=swh:1:rev:259d0612af038d14f2cd889a14a3adb6c9e96d96; path=/html/semantics/document-metadata/the-meta-element/pragma-directives/attr-meta-http-equiv-refresh/support/x%3Burl=foo/ Implementation ============== Computing --------- An important property of any SWHID is that its core identifier is *intrinsic*: it can be *computed from the object itself*, without having to rely on any third party. An implementation of SWHID that allows to do so locally is the `swh identify `_ tool, available from the `swh.model `_ Python package under the GPL license. This package can be installed via the ``pip`` package manager with the one liner ``pip3 install swh.model[cli]`` on any machine with Python (at least version 3.7) and ``pip`` installed (on a Debian or Ubuntu system a simple ``apt install python3 python3-pip`` will suffice, see `the general instructions `_ for other platforms). SWHIDs are also automatically computed by Software Heritage for all archived objects as part of its archival activity, and can be looked up via the project :swh_web:`Web interface <>`. This has various practical implications: * when a software artifact is obtained from Software Heritage by resolving a SWHID, it is straightforward to verify that it is exactly the intended one: just compute the core identifier from the artefact itself, and check that it is the same as the core identifier part of the SHWID * the core identifier of a software artifact can be computed *before* its archival on Software Heritage +Choosing what type of SWHID to use +---------------------------------- + +``swh:1:dir:`` SWHIDs are the most robust SWHIDs, as they can be recomputed from +the simplest objects (a directory structure on a filesystem), even when all +metadata is lost, without relying on the Software Heritage archive. + +Therefore, we advise implementers and users to prefer this type of SWHIDs +over ``swh:1:rev:`` and ``swh:1:rel:`` to reference a source code artifacts. + +However, since keeping the metadata is also important, you should add an anchor +qualifier to ``swh:1:dir:`` SWHIDs whenever possible, so the metadata stored +in the Software Heritage archive can be retrieved when needed. + +This means, for example, that you should prefer +``swh:1:dir:a8eded6a2d062c998ba2dcc3dcb0ce68a4e15a58;anchor=swh:1:rel:22ece559cc7cc2364edc5e5593d63ae8bd229f9f`` +over ``swh:1:rel:22ece559cc7cc2364edc5e5593d63ae8bd229f9f``. + + Resolvers --------- Software Heritage resolver ~~~~~~~~~~~~~~~~~~~~~~~~~~ SWHIDs can be resolved using the Software Heritage :swh_web:`Web interface <>`. In particular, the **root endpoint** ``/`` can be given a SWHID and will lead to the browsing page of the corresponding object, like this: ``https://archive.softwareheritage.org/``. A **dedicated** ``/resolve`` **endpoint** of the Software Heritage :swh_web:`Web API ` is also available to programmatically resolve SWHIDs; see: :http:get:`/api/1/resolve/(swhid)/`. Examples: * :swh_web:`` * :swh_web:`` * :swh_web:`` * :swh_web:`` * :swh_web:`` * :swh_web:`` * :swh_web:`` Third-party resolvers ~~~~~~~~~~~~~~~~~~~~~ The following **third party resolvers** support SWHID resolution: * `Identifiers.org `_; see: ``_ (registry identifier `MIR:00000655 `_). * `Name-to-Thing (N2T) `_ Note that resolution via Identifiers.org currently only supports *core identifiers* due to `syntactic incompatibilities with qualifiers `_. Examples: * ``_ * ``_ * ``_ * ``_ * ``_ * ``_ * ``_ References ========== * Roberto Di Cosmo, Morane Gruenpeter, Stefano Zacchiroli. `Identifiers for Digital Objects: the Case of Software Source Code Preservation `_. In Proceedings of `iPRES 2018 `_: 15th International Conference on Digital Preservation, Boston, MA, USA, September 2018, 9 pages. * Roberto Di Cosmo, Morane Gruenpeter, Stefano Zacchiroli. `Referencing Source Code Artifacts: a Separate Concern in Software Citation `_. In Computing in Science and Engineering, volume 22, issue 2, pages 33-43. ISSN 1521-9615, IEEE. March 2020. diff --git a/requirements.txt b/requirements.txt index 1e48ffb..2980e48 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,11 +1,11 @@ # Add here external Python modules dependencies, one per line. Module names # should match https://pypi.python.org/pypi names. For the full spec or # dependency lines, see https://pip.readthedocs.org/en/1.1/requirements.html -attrs >= 21.1.1 +attrs != 21.1.0 # https://github.com/python-attrs/attrs/issues/804 attrs_strict >= 0.0.7 deprecated hypothesis iso8601 python-dateutil typing_extensions diff --git a/swh.model.egg-info/PKG-INFO b/swh.model.egg-info/PKG-INFO index ae50202..e55a226 100644 --- a/swh.model.egg-info/PKG-INFO +++ b/swh.model.egg-info/PKG-INFO @@ -1,46 +1,46 @@ Metadata-Version: 2.1 Name: swh.model -Version: 4.4.0 +Version: 5.0.0 Summary: Software Heritage data model Home-page: https://forge.softwareheritage.org/diffusion/DMOD/ Author: Software Heritage developers Author-email: swh-devel@inria.fr License: UNKNOWN Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest Project-URL: Funding, https://www.softwareheritage.org/donate Project-URL: Source, https://forge.softwareheritage.org/source/swh-model Project-URL: Documentation, https://docs.softwareheritage.org/devel/swh-model/ Platform: UNKNOWN Classifier: Programming Language :: Python :: 3 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Classifier: Operating System :: OS Independent Classifier: Development Status :: 5 - Production/Stable Requires-Python: >=3.7 Description-Content-Type: text/markdown Provides-Extra: cli Provides-Extra: testing-minimal Provides-Extra: testing License-File: LICENSE License-File: AUTHORS swh-model ========= Implementation of the Data model of the Software Heritage project, used to archive source code artifacts. This module defines the notion of SoftWare Heritage persistent IDentifiers (SWHIDs) and provides tools to compute them: ```sh $ swh-identify fork.c kmod.c sched/deadline.c swh:1:cnt:2e391c754ae730bd2d8520c2ab497c403220c6e3 fork.c swh:1:cnt:0277d1216f80ae1adeed84a686ed34c9b2931fc2 kmod.c swh:1:cnt:57b939c81bce5d06fa587df8915f05affbe22b82 sched/deadline.c $ swh-identify --no-filename /usr/src/linux/kernel/ swh:1:dir:f9f858a48d663b3809c9e2f336412717496202ab ``` diff --git a/swh.model.egg-info/entry_points.txt b/swh.model.egg-info/entry_points.txt index f754c9a..4e29a6a 100644 --- a/swh.model.egg-info/entry_points.txt +++ b/swh.model.egg-info/entry_points.txt @@ -1,6 +1,5 @@ +[console_scripts] +swh-identify = swh.model.cli:identify - [console_scripts] - swh-identify=swh.model.cli:identify - [swh.cli.subcommands] - identify=swh.model.cli - \ No newline at end of file +[swh.cli.subcommands] +identify = swh.model.cli diff --git a/swh.model.egg-info/requires.txt b/swh.model.egg-info/requires.txt index ba7eaef..6ec0311 100644 --- a/swh.model.egg-info/requires.txt +++ b/swh.model.egg-info/requires.txt @@ -1,29 +1,29 @@ -attrs>=21.1.1 +attrs!=21.1.0 attrs_strict>=0.0.7 deprecated hypothesis iso8601 python-dateutil typing_extensions [cli] swh.core>=0.3 Click dulwich [testing] click pytest pytz types-python-dateutil types-pytz swh.core>=0.3 Click dulwich [testing-minimal] click pytest pytz types-python-dateutil types-pytz diff --git a/swh/model/model.py b/swh/model/model.py index b981b48..7bed1c3 100644 --- a/swh/model/model.py +++ b/swh/model/model.py @@ -1,1562 +1,1491 @@ # Copyright (C) 2018-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information """ Implementation of Software Heritage's data model See :ref:`data-model` for an overview of the data model. The classes defined in this module are immutable `attrs objects `__ and enums. All classes define a ``from_dict`` class method and a ``to_dict`` method to convert between them and msgpack-serializable objects. """ from abc import ABCMeta, abstractmethod import datetime from enum import Enum import hashlib from typing import Any, Dict, Iterable, Optional, Tuple, TypeVar, Union import attr from attrs_strict import AttributeTypeError import dateutil.parser import iso8601 from typing_extensions import Final from . import git_objects from .collections import ImmutableDict from .hashutil import DEFAULT_ALGORITHMS, MultiHash, hash_to_hex from .swhids import CoreSWHID from .swhids import ExtendedObjectType as SwhidExtendedObjectType from .swhids import ExtendedSWHID from .swhids import ObjectType as SwhidObjectType class MissingData(Exception): """Raised by `Content.with_data` when it has no way of fetching the data (but not when fetching the data fails).""" pass KeyType = Union[Dict[str, str], Dict[str, bytes], bytes] """The type returned by BaseModel.unique_key().""" SHA1_SIZE = 20 _OFFSET_CHARS = frozenset(b"+-0123456789") # TODO: Limit this to 20 bytes Sha1Git = bytes Sha1 = bytes KT = TypeVar("KT") VT = TypeVar("VT") def hash_repr(h: bytes) -> str: if h is None: return "None" else: return f"hash_to_bytes('{hash_to_hex(h)}')" def freeze_optional_dict( d: Union[None, Dict[KT, VT], ImmutableDict[KT, VT]] # type: ignore ) -> Optional[ImmutableDict[KT, VT]]: if isinstance(d, dict): return ImmutableDict(d) else: return d def dictify(value): "Helper function used by BaseModel.to_dict()" if isinstance(value, BaseModel): return value.to_dict() elif isinstance(value, (CoreSWHID, ExtendedSWHID)): return str(value) elif isinstance(value, Enum): return value.value elif isinstance(value, (dict, ImmutableDict)): return {k: dictify(v) for k, v in value.items()} elif isinstance(value, tuple): return tuple(dictify(v) for v in value) else: return value def _check_type(type_, value): if type_ is object or type_ is Any: return True if type_ is None: return value is None origin = getattr(type_, "__origin__", None) # Non-generic type, check it directly if origin is None: # This is functionally equivalent to using just this: # return isinstance(value, type) # but using type equality before isinstance allows very quick checks # when the exact class is used (which is the overwhelming majority of cases) # while still allowing subclasses to be used. return type(value) == type_ or isinstance(value, type_) # Check the type of the value itself # # For the same reason as above, this condition is functionally equivalent to: # if origin is not Union and not isinstance(value, origin): if origin is not Union and type(value) != origin and not isinstance(value, origin): return False # Then, if it's a container, check its items. if origin is tuple: args = type_.__args__ if len(args) == 2 and args[1] is Ellipsis: # Infinite tuple return all(_check_type(args[0], item) for item in value) else: # Finite tuple if len(args) != len(value): return False return all( _check_type(item_type, item) for (item_type, item) in zip(args, value) ) elif origin is Union: args = type_.__args__ return any(_check_type(variant, value) for variant in args) elif origin is ImmutableDict: (key_type, value_type) = type_.__args__ return all( _check_type(key_type, key) and _check_type(value_type, value) for (key, value) in value.items() ) else: # No need to check dict or list. because they are converted to ImmutableDict # and tuple respectively. raise NotImplementedError(f"Type-checking {type_}") def type_validator(): """Like attrs_strict.type_validator(), but stricter. It is an attrs validator, which checks attributes have the specified type, using type equality instead of ``isinstance()``, for improved performance """ def validator(instance, attribute, value): if not _check_type(attribute.type, value): raise AttributeTypeError(value, attribute) return validator ModelType = TypeVar("ModelType", bound="BaseModel") class BaseModel: """Base class for SWH model classes. Provides serialization/deserialization to/from Python dictionaries, that are suitable for JSON/msgpack-like formats.""" __slots__ = () def to_dict(self): """Wrapper of `attr.asdict` that can be overridden by subclasses that have special handling of some of the fields.""" return dictify(attr.asdict(self, recurse=False)) @classmethod def from_dict(cls, d): """Takes a dictionary representing a tree of SWH objects, and recursively builds the corresponding objects.""" return cls(**d) def anonymize(self: ModelType) -> Optional[ModelType]: """Returns an anonymized version of the object, if needed. If the object model does not need/support anonymization, returns None. """ return None def unique_key(self) -> KeyType: """Returns a unique key for this object, that can be used for deduplication.""" raise NotImplementedError(f"unique_key for {self}") def check(self) -> None: """Performs internal consistency checks, and raises an error if one fails.""" attr.validate(self) def _compute_hash_from_manifest(manifest: bytes) -> Sha1Git: return hashlib.new("sha1", manifest).digest() class HashableObject(metaclass=ABCMeta): """Mixin to automatically compute object identifier hash when the associated model is instantiated.""" __slots__ = () id: Sha1Git def compute_hash(self) -> bytes: """Derived model classes must implement this to compute the object hash. This method is called by the object initialization if the `id` attribute is set to an empty value. """ return self._compute_hash_from_attributes() @abstractmethod def _compute_hash_from_attributes(self) -> Sha1Git: raise NotImplementedError(f"_compute_hash_from_attributes for {self}") def __attrs_post_init__(self): if not self.id: obj_id = self.compute_hash() object.__setattr__(self, "id", obj_id) def unique_key(self) -> KeyType: return self.id def check(self) -> None: super().check() # type: ignore if self.id != self.compute_hash(): raise ValueError("'id' does not match recomputed hash.") class HashableObjectWithManifest(HashableObject): """Derived class of HashableObject, for objects that may need to store verbatim git objects as ``raw_manifest`` to preserve original hashes.""" + __slots__ = () + raw_manifest: Optional[bytes] = None """Stores the original content of git objects when they cannot be faithfully represented using only the other attributes. This should only be used as a last resort, and only set in the Git loader, for objects too corrupt to fit the data model.""" def to_dict(self): d = super().to_dict() if d["raw_manifest"] is None: del d["raw_manifest"] return d def compute_hash(self) -> bytes: """Derived model classes must implement this to compute the object hash. This method is called by the object initialization if the `id` attribute is set to an empty value. """ if self.raw_manifest is None: return super().compute_hash() else: return _compute_hash_from_manifest(self.raw_manifest) def check(self) -> None: super().check() if ( self.raw_manifest is not None and self.id == self._compute_hash_from_attributes() ): raise ValueError( f"{self} has a non-none raw_manifest attribute, but does not need it." ) @attr.s(frozen=True, slots=True) class Person(BaseModel): """Represents the author/committer of a revision or release.""" object_type: Final = "person" fullname = attr.ib(type=bytes, validator=type_validator()) name = attr.ib(type=Optional[bytes], validator=type_validator()) email = attr.ib(type=Optional[bytes], validator=type_validator()) @classmethod def from_fullname(cls, fullname: bytes): """Returns a Person object, by guessing the name and email from the fullname, in the `name ` format. The fullname is left unchanged.""" if fullname is None: raise TypeError("fullname is None.") name: Optional[bytes] email: Optional[bytes] try: open_bracket = fullname.index(b"<") except ValueError: name = fullname email = None else: raw_name = fullname[:open_bracket] raw_email = fullname[open_bracket + 1 :] if not raw_name: name = None else: name = raw_name.strip() try: close_bracket = raw_email.rindex(b">") except ValueError: email = raw_email else: email = raw_email[:close_bracket] return Person(name=name or None, email=email or None, fullname=fullname,) def anonymize(self) -> "Person": """Returns an anonymized version of the Person object. Anonymization is simply a Person which fullname is the hashed, with unset name or email. """ return Person( fullname=hashlib.sha256(self.fullname).digest(), name=None, email=None, ) @classmethod def from_dict(cls, d): """ If the fullname is missing, construct a fullname using the following heuristics: if the name value is None, we return the email in angle brackets, else, we return the name, a space, and the email in angle brackets. """ if "fullname" not in d: parts = [] if d["name"] is not None: parts.append(d["name"]) if d["email"] is not None: parts.append(b"".join([b"<", d["email"], b">"])) fullname = b" ".join(parts) d = {**d, "fullname": fullname} d = {"name": None, "email": None, **d} return super().from_dict(d) @attr.s(frozen=True, slots=True) class Timestamp(BaseModel): """Represents a naive timestamp from a VCS.""" object_type: Final = "timestamp" seconds = attr.ib(type=int, validator=type_validator()) microseconds = attr.ib(type=int, validator=type_validator()) @seconds.validator def check_seconds(self, attribute, value): """Check that seconds fit in a 64-bits signed integer.""" if not (-(2 ** 63) <= value < 2 ** 63): raise ValueError("Seconds must be a signed 64-bits integer.") @microseconds.validator def check_microseconds(self, attribute, value): """Checks that microseconds are positive and < 1000000.""" if not (0 <= value < 10 ** 6): raise ValueError("Microseconds must be in [0, 1000000[.") -@attr.s(frozen=True, slots=True, init=False) +@attr.s(frozen=True, slots=True) class TimestampWithTimezone(BaseModel): """Represents a TZ-aware timestamp from a VCS.""" object_type: Final = "timestamp_with_timezone" timestamp = attr.ib(type=Timestamp, validator=type_validator()) - offset = attr.ib(type=int, validator=type_validator()) - negative_utc = attr.ib(type=bool, validator=type_validator()) - offset_bytes = attr.ib(type=bytes, validator=type_validator()) """Raw git representation of the timezone, as an offset from UTC. It should follow this format: ``+HHMM`` or ``-HHMM`` (including ``+0000`` and ``-0000``). However, when created from git objects, it must be the exact bytes used in the original objects, so it may differ from this format when they do. """ - def __init__( - self, - timestamp: Timestamp, - offset: int = None, - negative_utc: bool = None, - offset_bytes: bytes = None, - ): - if offset_bytes is None: - if offset is None: - raise AttributeError("Neither 'offset' nor 'offset_bytes' was passed.") - if negative_utc is None: - raise AttributeError( - "Neither 'negative_utc' nor 'offset_bytes' was passed." - ) - negative = offset < 0 or negative_utc - (hours, minutes) = divmod(abs(offset), 60) - offset_bytes = f"{'-' if negative else '+'}{hours:02}{minutes:02}".encode() - else: - offset = self._parse_offset_bytes(offset_bytes) - negative_utc = offset == 0 and offset_bytes.startswith(b"-") - - self.__attrs_init__( # type: ignore - timestamp=timestamp, - offset=offset, - negative_utc=negative_utc, - offset_bytes=offset_bytes, - ) - - @offset.validator - def check_offset(self, attribute, value): - """Checks the offset is a 16-bits signed integer (in theory, it - should always be between -14 and +14 hours).""" - if not (-(2 ** 15) <= value < 2 ** 15): - # max 14 hours offset in theory, but you never know what - # you'll find in the wild... - raise ValueError("offset too large: %d minutes" % value) - - self._check_offsets_match() - - @negative_utc.validator - def check_negative_utc(self, attribute, value): - if self.offset and value: - raise ValueError("negative_utc can only be True is offset=0") - - self._check_offsets_match() - - @offset_bytes.validator - def check_offset_bytes(self, attribute, value): - if not set(value) <= _OFFSET_CHARS: - raise ValueError(f"invalid characters in offset_bytes: {value!r}") - - self._check_offsets_match() - - @staticmethod - def _parse_offset_bytes(offset_bytes: bytes) -> int: - """Parses an ``offset_bytes`` value (in Git's ``[+-]HHMM`` format), - and returns the corresponding numeric values (in number of minutes). - - Tries to account for some mistakes in the format, to support incorrect - Git implementations. - - >>> TimestampWithTimezone._parse_offset_bytes(b"+0000") - 0 - >>> TimestampWithTimezone._parse_offset_bytes(b"-0000") - 0 - >>> TimestampWithTimezone._parse_offset_bytes(b"+0200") - 120 - >>> TimestampWithTimezone._parse_offset_bytes(b"-0200") - -120 - >>> TimestampWithTimezone._parse_offset_bytes(b"+200") - 120 - >>> TimestampWithTimezone._parse_offset_bytes(b"-200") - -120 - >>> TimestampWithTimezone._parse_offset_bytes(b"+02") - 120 - >>> TimestampWithTimezone._parse_offset_bytes(b"-02") - -120 - >>> TimestampWithTimezone._parse_offset_bytes(b"+0010") - 10 - >>> TimestampWithTimezone._parse_offset_bytes(b"-0010") - -10 - >>> TimestampWithTimezone._parse_offset_bytes(b"+200000000000000000") - 0 - >>> TimestampWithTimezone._parse_offset_bytes(b"+0160") # 60 minutes... - 0 - """ - offset_str = offset_bytes.decode() - assert offset_str[0] in "+-" - sign = int(offset_str[0] + "1") - if len(offset_str) <= 3: - hours = int(offset_str[1:]) - minutes = 0 - else: - hours = int(offset_str[1:-2]) - minutes = int(offset_str[-2:]) - - offset = sign * (hours * 60 + minutes) - if (0 <= minutes <= 59) and (-(2 ** 15) <= offset < 2 ** 15): - return offset - else: - # can't parse it to a reasonable value; give up and pretend it's UTC. - return 0 - - def _check_offsets_match(self): - offset = self._parse_offset_bytes(self.offset_bytes) - if offset != self.offset: - raise ValueError( - f"offset_bytes ({self.offset_bytes!r}) does not match offset " - f"{divmod(self.offset, 60)}" - ) - - if offset == 0 and self.negative_utc != self.offset_bytes.startswith(b"-"): - raise ValueError( - f"offset_bytes ({self.offset_bytes!r}) does not match negative_utc " - f"({self.negative_utc})" - ) - @classmethod def from_numeric_offset( cls, timestamp: Timestamp, offset: int, negative_utc: bool ) -> "TimestampWithTimezone": """Returns a :class:`TimestampWithTimezone` instance from the old dictionary format (with ``offset`` and ``negative_utc`` instead of ``offset_bytes``). """ negative = offset < 0 or negative_utc (hours, minutes) = divmod(abs(offset), 60) offset_bytes = f"{'-' if negative else '+'}{hours:02}{minutes:02}".encode() - tstz = TimestampWithTimezone( - timestamp=timestamp, - offset_bytes=offset_bytes, - offset=offset, - negative_utc=negative_utc, - ) - assert tstz.offset == offset, (tstz.offset, offset) + tstz = TimestampWithTimezone(timestamp=timestamp, offset_bytes=offset_bytes) + assert tstz.offset_minutes() == offset, (tstz.offset_minutes(), offset) return tstz @classmethod def from_dict( cls, time_representation: Union[Dict, datetime.datetime, int] ) -> "TimestampWithTimezone": """Builds a TimestampWithTimezone from any of the formats accepted by :func:`swh.model.normalize_timestamp`.""" # TODO: this accept way more types than just dicts; find a better # name if isinstance(time_representation, dict): ts = time_representation["timestamp"] if isinstance(ts, dict): seconds = ts.get("seconds", 0) microseconds = ts.get("microseconds", 0) elif isinstance(ts, int): seconds = ts microseconds = 0 else: raise ValueError( f"TimestampWithTimezone.from_dict received non-integer timestamp " f"member {ts!r}" ) timestamp = Timestamp(seconds=seconds, microseconds=microseconds) if "offset_bytes" in time_representation: - return TimestampWithTimezone( + return cls( timestamp=timestamp, offset_bytes=time_representation["offset_bytes"], ) else: # old format offset = time_representation["offset"] negative_utc = time_representation.get("negative_utc") or False return cls.from_numeric_offset(timestamp, offset, negative_utc) elif isinstance(time_representation, datetime.datetime): # TODO: warn when using from_dict() on a datetime utcoffset = time_representation.utcoffset() time_representation = time_representation.astimezone(datetime.timezone.utc) microseconds = time_representation.microsecond if microseconds: time_representation = time_representation.replace(microsecond=0) seconds = int(time_representation.timestamp()) if utcoffset is None: raise ValueError( f"TimestampWithTimezone.from_dict received datetime without " f"timezone: {time_representation}" ) # utcoffset is an integer number of minutes seconds_offset = utcoffset.total_seconds() offset = int(seconds_offset) // 60 # TODO: warn if remainder is not zero return cls.from_numeric_offset( Timestamp(seconds=seconds, microseconds=microseconds), offset, False ) elif isinstance(time_representation, int): # TODO: warn when using from_dict() on an int seconds = time_representation timestamp = Timestamp(seconds=time_representation, microseconds=0) - return TimestampWithTimezone(timestamp=timestamp, offset_bytes=b"+0000") + return cls(timestamp=timestamp, offset_bytes=b"+0000") else: raise ValueError( f"TimestampWithTimezone.from_dict received non-integer timestamp: " f"{time_representation!r}" ) @classmethod def from_datetime(cls, dt: datetime.datetime) -> "TimestampWithTimezone": return cls.from_dict(dt) def to_datetime(self) -> datetime.datetime: """Convert to a datetime (with a timezone set to the recorded fixed UTC offset) Beware that this conversion can be lossy: ``-0000`` and 'weird' offsets cannot be represented. Also note that it may fail due to type overflow. """ timestamp = datetime.datetime.fromtimestamp( self.timestamp.seconds, - datetime.timezone(datetime.timedelta(minutes=self.offset)), + datetime.timezone(datetime.timedelta(minutes=self.offset_minutes())), ) timestamp = timestamp.replace(microsecond=self.timestamp.microseconds) return timestamp @classmethod def from_iso8601(cls, s): """Builds a TimestampWithTimezone from an ISO8601-formatted string. """ dt = iso8601.parse_date(s) tstz = cls.from_datetime(dt) if dt.tzname() == "-00:00": assert tstz.offset_bytes == b"+0000" - tstz = attr.evolve(tstz, offset_bytes=b"-0000", negative_utc=True) + tstz = attr.evolve(tstz, offset_bytes=b"-0000") return tstz + @staticmethod + def _parse_offset_bytes(offset_bytes: bytes) -> int: + """Parses an ``offset_bytes`` value (in Git's ``[+-]HHMM`` format), + and returns the corresponding numeric values (in number of minutes). + + Tries to account for some mistakes in the format, to support incorrect + Git implementations. + + >>> TimestampWithTimezone._parse_offset_bytes(b"+0000") + 0 + >>> TimestampWithTimezone._parse_offset_bytes(b"-0000") + 0 + >>> TimestampWithTimezone._parse_offset_bytes(b"+0200") + 120 + >>> TimestampWithTimezone._parse_offset_bytes(b"-0200") + -120 + >>> TimestampWithTimezone._parse_offset_bytes(b"+200") + 120 + >>> TimestampWithTimezone._parse_offset_bytes(b"-200") + -120 + >>> TimestampWithTimezone._parse_offset_bytes(b"+02") + 120 + >>> TimestampWithTimezone._parse_offset_bytes(b"-02") + -120 + >>> TimestampWithTimezone._parse_offset_bytes(b"+0010") + 10 + >>> TimestampWithTimezone._parse_offset_bytes(b"-0010") + -10 + >>> TimestampWithTimezone._parse_offset_bytes(b"+200000000000000000") + 0 + >>> TimestampWithTimezone._parse_offset_bytes(b"+0160") # 60 minutes... + 0 + """ + offset_str = offset_bytes.decode() + assert offset_str[0] in "+-" + sign = int(offset_str[0] + "1") + if len(offset_str) <= 3: + hours = int(offset_str[1:]) + minutes = 0 + else: + hours = int(offset_str[1:-2]) + minutes = int(offset_str[-2:]) + + offset = sign * (hours * 60 + minutes) + if (0 <= minutes <= 59) and (-(2 ** 15) <= offset < 2 ** 15): + return offset + else: + # can't parse it to a reasonable value; give up and pretend it's UTC. + return 0 + def offset_minutes(self): """Returns the offset, as a number of minutes since UTC. >>> TimestampWithTimezone( ... Timestamp(seconds=1642765364, microseconds=0), offset_bytes=b"+0000" ... ).offset_minutes() 0 >>> TimestampWithTimezone( ... Timestamp(seconds=1642765364, microseconds=0), offset_bytes=b"+0200" ... ).offset_minutes() 120 >>> TimestampWithTimezone( ... Timestamp(seconds=1642765364, microseconds=0), offset_bytes=b"-0200" ... ).offset_minutes() -120 >>> TimestampWithTimezone( ... Timestamp(seconds=1642765364, microseconds=0), offset_bytes=b"+0530" ... ).offset_minutes() 330 """ - return self.offset + return self._parse_offset_bytes(self.offset_bytes) @attr.s(frozen=True, slots=True) class Origin(HashableObject, BaseModel): """Represents a software source: a VCS and an URL.""" object_type: Final = "origin" url = attr.ib(type=str, validator=type_validator()) id = attr.ib(type=Sha1Git, validator=type_validator(), default=b"") def unique_key(self) -> KeyType: return {"url": self.url} def _compute_hash_from_attributes(self) -> bytes: return _compute_hash_from_manifest(self.url.encode("utf-8")) def swhid(self) -> ExtendedSWHID: """Returns a SWHID representing this origin.""" return ExtendedSWHID( object_type=SwhidExtendedObjectType.ORIGIN, object_id=self.id, ) @attr.s(frozen=True, slots=True) class OriginVisit(BaseModel): """Represents an origin visit with a given type at a given point in time, by a SWH loader.""" object_type: Final = "origin_visit" origin = attr.ib(type=str, validator=type_validator()) date = attr.ib(type=datetime.datetime, validator=type_validator()) type = attr.ib(type=str, validator=type_validator()) """Should not be set before calling 'origin_visit_add()'.""" visit = attr.ib(type=Optional[int], validator=type_validator(), default=None) @date.validator def check_date(self, attribute, value): """Checks the date has a timezone.""" if value is not None and value.tzinfo is None: raise ValueError("date must be a timezone-aware datetime.") def to_dict(self): """Serializes the date as a string and omits the visit id if it is `None`.""" ov = super().to_dict() if ov["visit"] is None: del ov["visit"] return ov def unique_key(self) -> KeyType: return {"origin": self.origin, "date": str(self.date)} @attr.s(frozen=True, slots=True) class OriginVisitStatus(BaseModel): """Represents a visit update of an origin at a given point in time. """ object_type: Final = "origin_visit_status" origin = attr.ib(type=str, validator=type_validator()) visit = attr.ib(type=int, validator=type_validator()) date = attr.ib(type=datetime.datetime, validator=type_validator()) status = attr.ib( type=str, validator=attr.validators.in_( ["created", "ongoing", "full", "partial", "not_found", "failed"] ), ) snapshot = attr.ib( type=Optional[Sha1Git], validator=type_validator(), repr=hash_repr ) # Type is optional be to able to use it before adding it to the database model type = attr.ib(type=Optional[str], validator=type_validator(), default=None) metadata = attr.ib( type=Optional[ImmutableDict[str, object]], validator=type_validator(), converter=freeze_optional_dict, default=None, ) @date.validator def check_date(self, attribute, value): """Checks the date has a timezone.""" if value is not None and value.tzinfo is None: raise ValueError("date must be a timezone-aware datetime.") def unique_key(self) -> KeyType: return {"origin": self.origin, "visit": str(self.visit), "date": str(self.date)} class TargetType(Enum): """The type of content pointed to by a snapshot branch. Usually a revision or an alias.""" CONTENT = "content" DIRECTORY = "directory" REVISION = "revision" RELEASE = "release" SNAPSHOT = "snapshot" ALIAS = "alias" def __repr__(self): return f"TargetType.{self.name}" class ObjectType(Enum): """The type of content pointed to by a release. Usually a revision""" CONTENT = "content" DIRECTORY = "directory" REVISION = "revision" RELEASE = "release" SNAPSHOT = "snapshot" def __repr__(self): return f"ObjectType.{self.name}" @attr.s(frozen=True, slots=True) class SnapshotBranch(BaseModel): """Represents one of the branches of a snapshot.""" object_type: Final = "snapshot_branch" target = attr.ib(type=bytes, validator=type_validator(), repr=hash_repr) target_type = attr.ib(type=TargetType, validator=type_validator()) @target.validator def check_target(self, attribute, value): """Checks the target type is not an alias, checks the target is a valid sha1_git.""" if self.target_type != TargetType.ALIAS and self.target is not None: if len(value) != 20: raise ValueError("Wrong length for bytes identifier: %d" % len(value)) @classmethod def from_dict(cls, d): return cls(target=d["target"], target_type=TargetType(d["target_type"])) @attr.s(frozen=True, slots=True) class Snapshot(HashableObject, BaseModel): """Represents the full state of an origin at a given point in time.""" object_type: Final = "snapshot" branches = attr.ib( type=ImmutableDict[bytes, Optional[SnapshotBranch]], validator=type_validator(), converter=freeze_optional_dict, ) id = attr.ib(type=Sha1Git, validator=type_validator(), default=b"", repr=hash_repr) def _compute_hash_from_attributes(self) -> bytes: return _compute_hash_from_manifest(git_objects.snapshot_git_object(self)) @classmethod def from_dict(cls, d): d = d.copy() return cls( branches=ImmutableDict( (name, SnapshotBranch.from_dict(branch) if branch else None) for (name, branch) in d.pop("branches").items() ), **d, ) def swhid(self) -> CoreSWHID: """Returns a SWHID representing this object.""" return CoreSWHID(object_type=SwhidObjectType.SNAPSHOT, object_id=self.id) @attr.s(frozen=True, slots=True) class Release(HashableObjectWithManifest, BaseModel): object_type: Final = "release" name = attr.ib(type=bytes, validator=type_validator()) message = attr.ib(type=Optional[bytes], validator=type_validator()) target = attr.ib(type=Optional[Sha1Git], validator=type_validator(), repr=hash_repr) target_type = attr.ib(type=ObjectType, validator=type_validator()) synthetic = attr.ib(type=bool, validator=type_validator()) author = attr.ib(type=Optional[Person], validator=type_validator(), default=None) date = attr.ib( type=Optional[TimestampWithTimezone], validator=type_validator(), default=None ) metadata = attr.ib( type=Optional[ImmutableDict[str, object]], validator=type_validator(), converter=freeze_optional_dict, default=None, ) id = attr.ib(type=Sha1Git, validator=type_validator(), default=b"", repr=hash_repr) raw_manifest = attr.ib(type=Optional[bytes], default=None) def _compute_hash_from_attributes(self) -> bytes: return _compute_hash_from_manifest(git_objects.release_git_object(self)) @author.validator def check_author(self, attribute, value): """If the author is `None`, checks the date is `None` too.""" if self.author is None and self.date is not None: raise ValueError("release date must be None if author is None.") def to_dict(self): rel = super().to_dict() if rel["metadata"] is None: del rel["metadata"] return rel @classmethod def from_dict(cls, d): d = d.copy() if d.get("author"): d["author"] = Person.from_dict(d["author"]) if d.get("date"): d["date"] = TimestampWithTimezone.from_dict(d["date"]) return cls(target_type=ObjectType(d.pop("target_type")), **d) def swhid(self) -> CoreSWHID: """Returns a SWHID representing this object.""" return CoreSWHID(object_type=SwhidObjectType.RELEASE, object_id=self.id) def anonymize(self) -> "Release": """Returns an anonymized version of the Release object. Anonymization consists in replacing the author with an anonymized Person object. """ author = self.author and self.author.anonymize() return attr.evolve(self, author=author) class RevisionType(Enum): GIT = "git" TAR = "tar" DSC = "dsc" SUBVERSION = "svn" MERCURIAL = "hg" CVS = "cvs" BAZAAR = "bzr" def __repr__(self): return f"RevisionType.{self.name}" def tuplify_extra_headers(value: Iterable): return tuple((k, v) for k, v in value) @attr.s(frozen=True, slots=True) class Revision(HashableObjectWithManifest, BaseModel): object_type: Final = "revision" message = attr.ib(type=Optional[bytes], validator=type_validator()) author = attr.ib(type=Person, validator=type_validator()) committer = attr.ib(type=Person, validator=type_validator()) date = attr.ib(type=Optional[TimestampWithTimezone], validator=type_validator()) committer_date = attr.ib( type=Optional[TimestampWithTimezone], validator=type_validator() ) type = attr.ib(type=RevisionType, validator=type_validator()) directory = attr.ib(type=Sha1Git, validator=type_validator(), repr=hash_repr) synthetic = attr.ib(type=bool, validator=type_validator()) metadata = attr.ib( type=Optional[ImmutableDict[str, object]], validator=type_validator(), converter=freeze_optional_dict, default=None, ) parents = attr.ib(type=Tuple[Sha1Git, ...], validator=type_validator(), default=()) id = attr.ib(type=Sha1Git, validator=type_validator(), default=b"", repr=hash_repr) extra_headers = attr.ib( type=Tuple[Tuple[bytes, bytes], ...], validator=type_validator(), converter=tuplify_extra_headers, default=(), ) raw_manifest = attr.ib(type=Optional[bytes], default=None) def __attrs_post_init__(self): super().__attrs_post_init__() # ensure metadata is a deep copy of whatever was given, and if needed # extract extra_headers from there if self.metadata: metadata = self.metadata if not self.extra_headers and "extra_headers" in metadata: (extra_headers, metadata) = metadata.copy_pop("extra_headers") object.__setattr__( self, "extra_headers", tuplify_extra_headers(extra_headers), ) attr.validate(self) object.__setattr__(self, "metadata", metadata) def _compute_hash_from_attributes(self) -> bytes: return _compute_hash_from_manifest(git_objects.revision_git_object(self)) @classmethod def from_dict(cls, d): d = d.copy() date = d.pop("date") if date: date = TimestampWithTimezone.from_dict(date) committer_date = d.pop("committer_date") if committer_date: committer_date = TimestampWithTimezone.from_dict(committer_date) return cls( author=Person.from_dict(d.pop("author")), committer=Person.from_dict(d.pop("committer")), date=date, committer_date=committer_date, type=RevisionType(d.pop("type")), parents=tuple(d.pop("parents")), # for BW compat **d, ) def swhid(self) -> CoreSWHID: """Returns a SWHID representing this object.""" return CoreSWHID(object_type=SwhidObjectType.REVISION, object_id=self.id) def anonymize(self) -> "Revision": """Returns an anonymized version of the Revision object. Anonymization consists in replacing the author and committer with an anonymized Person object. """ return attr.evolve( self, author=self.author.anonymize(), committer=self.committer.anonymize() ) @attr.s(frozen=True, slots=True) class DirectoryEntry(BaseModel): object_type: Final = "directory_entry" name = attr.ib(type=bytes, validator=type_validator()) type = attr.ib(type=str, validator=attr.validators.in_(["file", "dir", "rev"])) target = attr.ib(type=Sha1Git, validator=type_validator(), repr=hash_repr) perms = attr.ib(type=int, validator=type_validator(), converter=int, repr=oct) """Usually one of the values of `swh.model.from_disk.DentryPerms`.""" @name.validator def check_name(self, attribute, value): if b"/" in value: raise ValueError(f"{value!r} is not a valid directory entry name.") @attr.s(frozen=True, slots=True) class Directory(HashableObjectWithManifest, BaseModel): object_type: Final = "directory" entries = attr.ib(type=Tuple[DirectoryEntry, ...], validator=type_validator()) id = attr.ib(type=Sha1Git, validator=type_validator(), default=b"", repr=hash_repr) raw_manifest = attr.ib(type=Optional[bytes], default=None) def _compute_hash_from_attributes(self) -> bytes: return _compute_hash_from_manifest(git_objects.directory_git_object(self)) @entries.validator def check_entries(self, attribute, value): seen = set() for entry in value: if entry.name in seen: + # Cannot use self.swhid() here, self.id may be None raise ValueError( - "{self.swhid()} has duplicated entry name: {entry.name!r}" + f"swh:1:dir:{hash_to_hex(self.id)} has duplicated entry name: " + f"{entry.name!r}" ) seen.add(entry.name) @classmethod def from_dict(cls, d): d = d.copy() return cls( entries=tuple( DirectoryEntry.from_dict(entry) for entry in d.pop("entries") ), **d, ) def swhid(self) -> CoreSWHID: """Returns a SWHID representing this object.""" return CoreSWHID(object_type=SwhidObjectType.DIRECTORY, object_id=self.id) @attr.s(frozen=True, slots=True) class BaseContent(BaseModel): status = attr.ib( type=str, validator=attr.validators.in_(["visible", "hidden", "absent"]) ) @staticmethod def _hash_data(data: bytes): """Hash some data, returning most of the fields of a content object""" d = MultiHash.from_data(data).digest() d["data"] = data d["length"] = len(data) return d @classmethod def from_dict(cls, d, use_subclass=True): if use_subclass: # Chooses a subclass to instantiate instead. if d["status"] == "absent": return SkippedContent.from_dict(d) else: return Content.from_dict(d) else: return super().from_dict(d) def get_hash(self, hash_name): if hash_name not in DEFAULT_ALGORITHMS: raise ValueError("{} is not a valid hash name.".format(hash_name)) return getattr(self, hash_name) def hashes(self) -> Dict[str, bytes]: """Returns a dictionary {hash_name: hash_value}""" return {algo: getattr(self, algo) for algo in DEFAULT_ALGORITHMS} @attr.s(frozen=True, slots=True) class Content(BaseContent): object_type: Final = "content" sha1 = attr.ib(type=bytes, validator=type_validator(), repr=hash_repr) sha1_git = attr.ib(type=Sha1Git, validator=type_validator(), repr=hash_repr) sha256 = attr.ib(type=bytes, validator=type_validator(), repr=hash_repr) blake2s256 = attr.ib(type=bytes, validator=type_validator(), repr=hash_repr) length = attr.ib(type=int, validator=type_validator()) status = attr.ib( type=str, validator=attr.validators.in_(["visible", "hidden"]), default="visible", ) data = attr.ib(type=Optional[bytes], validator=type_validator(), default=None) ctime = attr.ib( type=Optional[datetime.datetime], validator=type_validator(), default=None, eq=False, ) @length.validator def check_length(self, attribute, value): """Checks the length is positive.""" if value < 0: raise ValueError("Length must be positive.") @ctime.validator def check_ctime(self, attribute, value): """Checks the ctime has a timezone.""" if value is not None and value.tzinfo is None: raise ValueError("ctime must be a timezone-aware datetime.") def to_dict(self): content = super().to_dict() if content["data"] is None: del content["data"] if content["ctime"] is None: del content["ctime"] return content @classmethod def from_data(cls, data, status="visible", ctime=None) -> "Content": """Generate a Content from a given `data` byte string. This populates the Content with the hashes and length for the data passed as argument, as well as the data itself. """ d = cls._hash_data(data) d["status"] = status d["ctime"] = ctime return cls(**d) @classmethod def from_dict(cls, d): if isinstance(d.get("ctime"), str): d = d.copy() d["ctime"] = dateutil.parser.parse(d["ctime"]) return super().from_dict(d, use_subclass=False) def with_data(self) -> "Content": """Loads the `data` attribute; meaning that it is guaranteed not to be None after this call. This call is almost a no-op, but subclasses may overload this method to lazy-load data (eg. from disk or objstorage).""" if self.data is None: raise MissingData("Content data is None.") return self def unique_key(self) -> KeyType: return self.sha1 # TODO: use a dict of hashes def swhid(self) -> CoreSWHID: """Returns a SWHID representing this object.""" return CoreSWHID(object_type=SwhidObjectType.CONTENT, object_id=self.sha1_git) @attr.s(frozen=True, slots=True) class SkippedContent(BaseContent): object_type: Final = "skipped_content" sha1 = attr.ib(type=Optional[bytes], validator=type_validator(), repr=hash_repr) sha1_git = attr.ib( type=Optional[Sha1Git], validator=type_validator(), repr=hash_repr ) sha256 = attr.ib(type=Optional[bytes], validator=type_validator(), repr=hash_repr) blake2s256 = attr.ib( type=Optional[bytes], validator=type_validator(), repr=hash_repr ) length = attr.ib(type=Optional[int], validator=type_validator()) status = attr.ib(type=str, validator=attr.validators.in_(["absent"])) reason = attr.ib(type=Optional[str], validator=type_validator(), default=None) origin = attr.ib(type=Optional[str], validator=type_validator(), default=None) ctime = attr.ib( type=Optional[datetime.datetime], validator=type_validator(), default=None, eq=False, ) @reason.validator def check_reason(self, attribute, value): """Checks the reason is full if status != absent.""" assert self.reason == value if value is None: raise ValueError("Must provide a reason if content is absent.") @length.validator def check_length(self, attribute, value): """Checks the length is positive or -1.""" if value < -1: raise ValueError("Length must be positive or -1.") @ctime.validator def check_ctime(self, attribute, value): """Checks the ctime has a timezone.""" if value is not None and value.tzinfo is None: raise ValueError("ctime must be a timezone-aware datetime.") def to_dict(self): content = super().to_dict() if content["origin"] is None: del content["origin"] if content["ctime"] is None: del content["ctime"] return content @classmethod def from_data( cls, data: bytes, reason: str, ctime: Optional[datetime.datetime] = None ) -> "SkippedContent": """Generate a SkippedContent from a given `data` byte string. This populates the SkippedContent with the hashes and length for the data passed as argument. You can use `attr.evolve` on such a generated content to nullify some of its attributes, e.g. for tests. """ d = cls._hash_data(data) del d["data"] d["status"] = "absent" d["reason"] = reason d["ctime"] = ctime return cls(**d) @classmethod def from_dict(cls, d): d2 = d.copy() if d2.pop("data", None) is not None: raise ValueError('SkippedContent has no "data" attribute %r' % d) return super().from_dict(d2, use_subclass=False) def unique_key(self) -> KeyType: return self.hashes() class MetadataAuthorityType(Enum): DEPOSIT_CLIENT = "deposit_client" FORGE = "forge" REGISTRY = "registry" def __repr__(self): return f"MetadataAuthorityType.{self.name}" @attr.s(frozen=True, slots=True) class MetadataAuthority(BaseModel): """Represents an entity that provides metadata about an origin or software artifact.""" object_type: Final = "metadata_authority" type = attr.ib(type=MetadataAuthorityType, validator=type_validator()) url = attr.ib(type=str, validator=type_validator()) metadata = attr.ib( type=Optional[ImmutableDict[str, Any]], default=None, validator=type_validator(), converter=freeze_optional_dict, ) def to_dict(self): d = super().to_dict() if d["metadata"] is None: del d["metadata"] return d @classmethod def from_dict(cls, d): d = { **d, "type": MetadataAuthorityType(d["type"]), } return super().from_dict(d) def unique_key(self) -> KeyType: return {"type": self.type.value, "url": self.url} @attr.s(frozen=True, slots=True) class MetadataFetcher(BaseModel): """Represents a software component used to fetch metadata from a metadata authority, and ingest them into the Software Heritage archive.""" object_type: Final = "metadata_fetcher" name = attr.ib(type=str, validator=type_validator()) version = attr.ib(type=str, validator=type_validator()) metadata = attr.ib( type=Optional[ImmutableDict[str, Any]], default=None, validator=type_validator(), converter=freeze_optional_dict, ) def to_dict(self): d = super().to_dict() if d["metadata"] is None: del d["metadata"] return d def unique_key(self) -> KeyType: return {"name": self.name, "version": self.version} def normalize_discovery_date(value: Any) -> datetime.datetime: if not isinstance(value, datetime.datetime): raise TypeError("discovery_date must be a timezone-aware datetime.") if value.tzinfo is None: raise ValueError("discovery_date must be a timezone-aware datetime.") # Normalize timezone to utc, and truncate microseconds to 0 return value.astimezone(datetime.timezone.utc).replace(microsecond=0) @attr.s(frozen=True, slots=True) class RawExtrinsicMetadata(HashableObject, BaseModel): object_type: Final = "raw_extrinsic_metadata" # target object target = attr.ib(type=ExtendedSWHID, validator=type_validator()) # source discovery_date = attr.ib(type=datetime.datetime, converter=normalize_discovery_date) authority = attr.ib(type=MetadataAuthority, validator=type_validator()) fetcher = attr.ib(type=MetadataFetcher, validator=type_validator()) # the metadata itself format = attr.ib(type=str, validator=type_validator()) metadata = attr.ib(type=bytes, validator=type_validator()) # context origin = attr.ib(type=Optional[str], default=None, validator=type_validator()) visit = attr.ib(type=Optional[int], default=None, validator=type_validator()) snapshot = attr.ib( type=Optional[CoreSWHID], default=None, validator=type_validator() ) release = attr.ib( type=Optional[CoreSWHID], default=None, validator=type_validator() ) revision = attr.ib( type=Optional[CoreSWHID], default=None, validator=type_validator() ) path = attr.ib(type=Optional[bytes], default=None, validator=type_validator()) directory = attr.ib( type=Optional[CoreSWHID], default=None, validator=type_validator() ) id = attr.ib(type=Sha1Git, validator=type_validator(), default=b"", repr=hash_repr) def _compute_hash_from_attributes(self) -> bytes: return _compute_hash_from_manifest( git_objects.raw_extrinsic_metadata_git_object(self) ) @origin.validator def check_origin(self, attribute, value): if value is None: return if self.target.object_type not in ( SwhidExtendedObjectType.SNAPSHOT, SwhidExtendedObjectType.RELEASE, SwhidExtendedObjectType.REVISION, SwhidExtendedObjectType.DIRECTORY, SwhidExtendedObjectType.CONTENT, ): raise ValueError( f"Unexpected 'origin' context for " f"{self.target.object_type.name.lower()} object: {value}" ) if value.startswith("swh:"): # Technically this is valid; but: # 1. SWHIDs are URIs, not URLs # 2. if a SWHID gets here, it's very likely to be a mistake # (and we can remove this check if it turns out there is a # legitimate use for it). raise ValueError(f"SWHID used as context origin URL: {value}") @visit.validator def check_visit(self, attribute, value): if value is None: return if self.target.object_type not in ( SwhidExtendedObjectType.SNAPSHOT, SwhidExtendedObjectType.RELEASE, SwhidExtendedObjectType.REVISION, SwhidExtendedObjectType.DIRECTORY, SwhidExtendedObjectType.CONTENT, ): raise ValueError( f"Unexpected 'visit' context for " f"{self.target.object_type.name.lower()} object: {value}" ) if self.origin is None: raise ValueError("'origin' context must be set if 'visit' is.") if value <= 0: raise ValueError("Nonpositive visit id") @snapshot.validator def check_snapshot(self, attribute, value): if value is None: return if self.target.object_type not in ( SwhidExtendedObjectType.RELEASE, SwhidExtendedObjectType.REVISION, SwhidExtendedObjectType.DIRECTORY, SwhidExtendedObjectType.CONTENT, ): raise ValueError( f"Unexpected 'snapshot' context for " f"{self.target.object_type.name.lower()} object: {value}" ) self._check_swhid(SwhidObjectType.SNAPSHOT, value) @release.validator def check_release(self, attribute, value): if value is None: return if self.target.object_type not in ( SwhidExtendedObjectType.REVISION, SwhidExtendedObjectType.DIRECTORY, SwhidExtendedObjectType.CONTENT, ): raise ValueError( f"Unexpected 'release' context for " f"{self.target.object_type.name.lower()} object: {value}" ) self._check_swhid(SwhidObjectType.RELEASE, value) @revision.validator def check_revision(self, attribute, value): if value is None: return if self.target.object_type not in ( SwhidExtendedObjectType.DIRECTORY, SwhidExtendedObjectType.CONTENT, ): raise ValueError( f"Unexpected 'revision' context for " f"{self.target.object_type.name.lower()} object: {value}" ) self._check_swhid(SwhidObjectType.REVISION, value) @path.validator def check_path(self, attribute, value): if value is None: return if self.target.object_type not in ( SwhidExtendedObjectType.DIRECTORY, SwhidExtendedObjectType.CONTENT, ): raise ValueError( f"Unexpected 'path' context for " f"{self.target.object_type.name.lower()} object: {value}" ) @directory.validator def check_directory(self, attribute, value): if value is None: return if self.target.object_type not in (SwhidExtendedObjectType.CONTENT,): raise ValueError( f"Unexpected 'directory' context for " f"{self.target.object_type.name.lower()} object: {value}" ) self._check_swhid(SwhidObjectType.DIRECTORY, value) def _check_swhid(self, expected_object_type, swhid): if isinstance(swhid, str): raise ValueError(f"Expected SWHID, got a string: {swhid}") if swhid.object_type != expected_object_type: raise ValueError( f"Expected SWHID type '{expected_object_type.name.lower()}', " f"got '{swhid.object_type.name.lower()}' in {swhid}" ) def to_dict(self): d = super().to_dict() context_keys = ( "origin", "visit", "snapshot", "release", "revision", "directory", "path", ) for context_key in context_keys: if d[context_key] is None: del d[context_key] return d @classmethod def from_dict(cls, d): d = { **d, "target": ExtendedSWHID.from_string(d["target"]), "authority": MetadataAuthority.from_dict(d["authority"]), "fetcher": MetadataFetcher.from_dict(d["fetcher"]), } swhid_keys = ("snapshot", "release", "revision", "directory") for swhid_key in swhid_keys: if d.get(swhid_key): d[swhid_key] = CoreSWHID.from_string(d[swhid_key]) return super().from_dict(d) def swhid(self) -> ExtendedSWHID: """Returns a SWHID representing this RawExtrinsicMetadata object.""" return ExtendedSWHID( object_type=SwhidExtendedObjectType.RAW_EXTRINSIC_METADATA, object_id=self.id, ) @attr.s(frozen=True, slots=True) class ExtID(HashableObject, BaseModel): object_type: Final = "extid" extid_type = attr.ib(type=str, validator=type_validator()) extid = attr.ib(type=bytes, validator=type_validator()) target = attr.ib(type=CoreSWHID, validator=type_validator()) extid_version = attr.ib(type=int, validator=type_validator(), default=0) id = attr.ib(type=Sha1Git, validator=type_validator(), default=b"", repr=hash_repr) @classmethod def from_dict(cls, d): return cls( extid=d["extid"], extid_type=d["extid_type"], target=CoreSWHID.from_string(d["target"]), extid_version=d.get("extid_version", 0), ) def _compute_hash_from_attributes(self) -> bytes: return _compute_hash_from_manifest(git_objects.extid_git_object(self)) diff --git a/swh/model/tests/test_identifiers.py b/swh/model/tests/test_identifiers.py index 5ad08d6..6214584 100644 --- a/swh/model/tests/test_identifiers.py +++ b/swh/model/tests/test_identifiers.py @@ -1,1289 +1,1187 @@ # Copyright (C) 2015-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import hashlib from typing import Dict import unittest import pytest from swh.model import git_objects, hashutil from swh.model.hashutil import hash_to_bytes as _x from swh.model.model import ( Content, Directory, ExtID, Origin, RawExtrinsicMetadata, Release, Revision, Snapshot, - Timestamp, TimestampWithTimezone, ) def remove_id(d: Dict) -> Dict: """Returns a (shallow) copy of a dict with the 'id' key removed.""" d = d.copy() if "id" in d: del d["id"] return d class UtilityFunctionsDateOffset(unittest.TestCase): def setUp(self): self.dates = { b"1448210036": {"seconds": 1448210036, "microseconds": 0,}, b"1448210036.002342": {"seconds": 1448210036, "microseconds": 2342,}, b"1448210036.12": {"seconds": 1448210036, "microseconds": 120000,}, } def test_format_date(self): for date_repr, date in self.dates.items(): self.assertEqual(git_objects.format_date(date), date_repr) content_example = { "status": "visible", "length": 5, "data": b"1984\n", "ctime": datetime.datetime(2015, 11, 22, 16, 33, 56, tzinfo=datetime.timezone.utc), } class ContentIdentifier(unittest.TestCase): def setUp(self): self.content_id = hashutil.MultiHash.from_data(content_example["data"]).digest() def test_content_identifier(self): self.assertEqual( Content.from_data(content_example["data"]).hashes(), self.content_id ) directory_example = { "id": _x("d7ed3d2c31d608823be58b1cbe57605310615231"), "entries": [ { "type": "file", "perms": 33188, "name": b"README", "target": _x("37ec8ea2110c0b7a32fbb0e872f6e7debbf95e21"), }, { "type": "file", "perms": 33188, "name": b"Rakefile", "target": _x("3bb0e8592a41ae3185ee32266c860714980dbed7"), }, { "type": "dir", "perms": 16384, "name": b"app", "target": _x("61e6e867f5d7ba3b40540869bc050b0c4fed9e95"), }, { "type": "file", "perms": 33188, "name": b"1.megabyte", "target": _x("7c2b2fbdd57d6765cdc9d84c2d7d333f11be7fb3"), }, { "type": "dir", "perms": 16384, "name": b"config", "target": _x("591dfe784a2e9ccc63aaba1cb68a765734310d98"), }, { "type": "dir", "perms": 16384, "name": b"public", "target": _x("9588bf4522c2b4648bfd1c61d175d1f88c1ad4a5"), }, { "type": "file", "perms": 33188, "name": b"development.sqlite3", "target": _x("e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"), }, { "type": "dir", "perms": 16384, "name": b"doc", "target": _x("154705c6aa1c8ead8c99c7915373e3c44012057f"), }, { "type": "dir", "perms": 16384, "name": b"db", "target": _x("85f157bdc39356b7bc7de9d0099b4ced8b3b382c"), }, { "type": "dir", "perms": 16384, "name": b"log", "target": _x("5e3d3941c51cce73352dff89c805a304ba96fffe"), }, { "type": "dir", "perms": 16384, "name": b"script", "target": _x("1b278423caf176da3f3533592012502aa10f566c"), }, { "type": "dir", "perms": 16384, "name": b"test", "target": _x("035f0437c080bfd8711670b3e8677e686c69c763"), }, { "type": "dir", "perms": 16384, "name": b"vendor", "target": _x("7c0dc9ad978c1af3f9a4ce061e50f5918bd27138"), }, { "type": "rev", "perms": 57344, "name": b"will_paginate", "target": _x("3d531e169db92a16a9a8974f0ae6edf52e52659e"), }, # in git order, the dir named "order" should be between the files # named "order." and "order0" { "type": "dir", "perms": 16384, "name": b"order", "target": _x("62cdb7020ff920e5aa642c3d4066950dd1f01f4d"), }, { "type": "file", "perms": 16384, "name": b"order.", "target": _x("0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33"), }, { "type": "file", "perms": 16384, "name": b"order0", "target": _x("bbe960a25ea311d21d40669e93df2003ba9b90a2"), }, ], } class DirectoryIdentifier(unittest.TestCase): def setUp(self): self.directory = directory_example self.empty_directory = { "id": "4b825dc642cb6eb9a060e54bf8d69288fbee4904", "entries": [], } def test_dir_identifier(self): self.assertEqual(Directory.from_dict(self.directory).id, self.directory["id"]) self.assertEqual( Directory.from_dict(remove_id(self.directory)).id, self.directory["id"], ) def test_dir_identifier_entry_order(self): # Reverse order of entries, check the id is still the same. directory = {"entries": reversed(self.directory["entries"])} self.assertEqual( Directory.from_dict(remove_id(directory)).id, self.directory["id"], ) def test_dir_identifier_empty_directory(self): self.assertEqual( Directory.from_dict(remove_id(self.empty_directory)).id, _x(self.empty_directory["id"]), ) linus_tz = datetime.timezone(datetime.timedelta(minutes=-420)) revision_example = { "id": _x("bc0195aad0daa2ad5b0d76cce22b167bc3435590"), "directory": _x("85a74718d377195e1efd0843ba4f3260bad4fe07"), "parents": [_x("01e2d0627a9a6edb24c37db45db5ecb31e9de808")], "author": { "name": b"Linus Torvalds", "email": b"torvalds@linux-foundation.org", "fullname": b"Linus Torvalds ", }, "date": datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz), "committer": { "name": b"Linus Torvalds", "email": b"torvalds@linux-foundation.org", "fullname": b"Linus Torvalds ", }, "committer_date": datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz), "message": b"Linux 4.2-rc2\n", "type": "git", "synthetic": False, } class RevisionIdentifier(unittest.TestCase): def setUp(self): gpgsig = b"""\ -----BEGIN PGP SIGNATURE----- Version: GnuPG v1.4.13 (Darwin) iQIcBAABAgAGBQJVJcYsAAoJEBiY3kIkQRNJVAUQAJ8/XQIfMqqC5oYeEFfHOPYZ L7qy46bXHVBa9Qd8zAJ2Dou3IbI2ZoF6/Et89K/UggOycMlt5FKV/9toWyuZv4Po L682wonoxX99qvVTHo6+wtnmYO7+G0f82h+qHMErxjP+I6gzRNBvRr+SfY7VlGdK wikMKOMWC5smrScSHITnOq1Ews5pe3N7qDYMzK0XVZmgDoaem4RSWMJs4My/qVLN e0CqYWq2A22GX7sXl6pjneJYQvcAXUX+CAzp24QnPSb+Q22Guj91TcxLFcHCTDdn qgqMsEyMiisoglwrCbO+D+1xq9mjN9tNFWP66SQ48mrrHYTBV5sz9eJyDfroJaLP CWgbDTgq6GzRMehHT3hXfYS5NNatjnhkNISXR7pnVP/obIi/vpWh5ll6Gd8q26z+ a/O41UzOaLTeNI365MWT4/cnXohVLRG7iVJbAbCxoQmEgsYMRc/pBAzWJtLfcB2G jdTswYL6+MUdL8sB9pZ82D+BP/YAdHe69CyTu1lk9RT2pYtI/kkfjHubXBCYEJSG +VGllBbYG6idQJpyrOYNRJyrDi9yvDJ2W+S0iQrlZrxzGBVGTB/y65S8C+2WTBcE lf1Qb5GDsQrZWgD+jtWTywOYHtCBwyCKSAXxSARMbNPeak9WPlcW/Jmu+fUcMe2x dg1KdHOa34shrKDaOVzW =od6m -----END PGP SIGNATURE-----""" self.revision = revision_example self.revision_none_metadata = { "id": _x("bc0195aad0daa2ad5b0d76cce22b167bc3435590"), "directory": _x("85a74718d377195e1efd0843ba4f3260bad4fe07"), "parents": [_x("01e2d0627a9a6edb24c37db45db5ecb31e9de808")], "author": { "name": b"Linus Torvalds", "email": b"torvalds@linux-foundation.org", }, "date": datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz), "committer": { "name": b"Linus Torvalds", "email": b"torvalds@linux-foundation.org", }, "committer_date": datetime.datetime( 2015, 7, 12, 15, 10, 30, tzinfo=linus_tz ), "message": b"Linux 4.2-rc2\n", "type": "git", "synthetic": False, "metadata": None, } self.synthetic_revision = { "id": _x("b2a7e1260492e344fab3cbf91bc13c91e05426fd"), "author": { "name": b"Software Heritage", "email": b"robot@softwareheritage.org", }, "date": {"timestamp": {"seconds": 1437047495}, "offset_bytes": b"+0000",}, "type": "tar", "committer": { "name": b"Software Heritage", "email": b"robot@softwareheritage.org", }, "committer_date": 1437047495, "synthetic": True, "parents": [], "message": b"synthetic revision message\n", "directory": _x("d11f00a6a0fea6055341d25584b5a96516c0d2b8"), "metadata": { "original_artifact": [ { "archive_type": "tar", "name": "gcc-5.2.0.tar.bz2", "sha1_git": "39d281aff934d44b439730057e55b055e206a586", "sha1": "fe3f5390949d47054b613edc36c557eb1d51c18e", "sha256": "5f835b04b5f7dd4f4d2dc96190ec1621b8d89f" "2dc6f638f9f8bc1b1014ba8cad", } ] }, } # cat commit.txt | git hash-object -t commit --stdin self.revision_with_extra_headers = { "id": _x("010d34f384fa99d047cdd5e2f41e56e5c2feee45"), "directory": _x("85a74718d377195e1efd0843ba4f3260bad4fe07"), "parents": [_x("01e2d0627a9a6edb24c37db45db5ecb31e9de808")], "author": { "name": b"Linus Torvalds", "email": b"torvalds@linux-foundation.org", "fullname": b"Linus Torvalds ", }, "date": datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz), "committer": { "name": b"Linus Torvalds", "email": b"torvalds@linux-foundation.org", "fullname": b"Linus Torvalds ", }, "committer_date": datetime.datetime( 2015, 7, 12, 15, 10, 30, tzinfo=linus_tz ), "message": b"Linux 4.2-rc2\n", "type": "git", "synthetic": False, "extra_headers": ( (b"svn-repo-uuid", b"046f1af7-66c2-d61b-5410-ce57b7db7bff"), (b"svn-revision", b"10"), ), } self.revision_with_gpgsig = { "id": _x("44cc742a8ca17b9c279be4cc195a93a6ef7a320e"), "directory": _x("b134f9b7dc434f593c0bab696345548b37de0558"), "parents": [ _x("689664ae944b4692724f13b709a4e4de28b54e57"), _x("c888305e1efbaa252d01b4e5e6b778f865a97514"), ], "author": { "name": b"Jiang Xin", "email": b"worldhello.net@gmail.com", "fullname": b"Jiang Xin ", }, "date": {"timestamp": 1428538899, "offset": 480,}, "committer": {"name": b"Jiang Xin", "email": b"worldhello.net@gmail.com",}, "committer_date": {"timestamp": 1428538899, "offset": 480,}, "extra_headers": ((b"gpgsig", gpgsig),), "message": b"""Merge branch 'master' of git://github.com/alexhenrie/git-po * 'master' of git://github.com/alexhenrie/git-po: l10n: ca.po: update translation """, "type": "git", "synthetic": False, } self.revision_no_message = { "id": _x("4cfc623c9238fa92c832beed000ce2d003fd8333"), "directory": _x("b134f9b7dc434f593c0bab696345548b37de0558"), "parents": [ _x("689664ae944b4692724f13b709a4e4de28b54e57"), _x("c888305e1efbaa252d01b4e5e6b778f865a97514"), ], "author": { "name": b"Jiang Xin", "email": b"worldhello.net@gmail.com", "fullname": b"Jiang Xin ", }, "date": {"timestamp": 1428538899, "offset": 480,}, "committer": {"name": b"Jiang Xin", "email": b"worldhello.net@gmail.com",}, "committer_date": {"timestamp": 1428538899, "offset": 480,}, "message": None, "type": "git", "synthetic": False, } self.revision_empty_message = { "id": _x("7442cd78bd3b4966921d6a7f7447417b7acb15eb"), "directory": _x("b134f9b7dc434f593c0bab696345548b37de0558"), "parents": [ _x("689664ae944b4692724f13b709a4e4de28b54e57"), _x("c888305e1efbaa252d01b4e5e6b778f865a97514"), ], "author": { "name": b"Jiang Xin", "email": b"worldhello.net@gmail.com", "fullname": b"Jiang Xin ", }, "date": {"timestamp": 1428538899, "offset": 480,}, "committer": {"name": b"Jiang Xin", "email": b"worldhello.net@gmail.com",}, "committer_date": {"timestamp": 1428538899, "offset": 480,}, "message": b"", "type": "git", "synthetic": False, } self.revision_only_fullname = { "id": _x("010d34f384fa99d047cdd5e2f41e56e5c2feee45"), "directory": _x("85a74718d377195e1efd0843ba4f3260bad4fe07"), "parents": [_x("01e2d0627a9a6edb24c37db45db5ecb31e9de808")], "author": {"fullname": b"Linus Torvalds ",}, "date": datetime.datetime(2015, 7, 12, 15, 10, 30, tzinfo=linus_tz), "committer": { "fullname": b"Linus Torvalds ", }, "committer_date": datetime.datetime( 2015, 7, 12, 15, 10, 30, tzinfo=linus_tz ), "message": b"Linux 4.2-rc2\n", "type": "git", "synthetic": False, "extra_headers": ( (b"svn-repo-uuid", b"046f1af7-66c2-d61b-5410-ce57b7db7bff"), (b"svn-revision", b"10"), ), } def test_revision_identifier(self): self.assertEqual( Revision.from_dict(self.revision).id, self.revision["id"], ) self.assertEqual( Revision.from_dict(remove_id(self.revision)).id, self.revision["id"], ) def test_revision_identifier_none_metadata(self): self.assertEqual( Revision.from_dict(remove_id(self.revision_none_metadata)).id, self.revision_none_metadata["id"], ) def test_revision_identifier_synthetic(self): self.assertEqual( Revision.from_dict(remove_id(self.synthetic_revision)).id, self.synthetic_revision["id"], ) def test_revision_identifier_with_extra_headers(self): self.assertEqual( Revision.from_dict(remove_id(self.revision_with_extra_headers)).id, self.revision_with_extra_headers["id"], ) def test_revision_identifier_with_gpgsig(self): self.assertEqual( Revision.from_dict(remove_id(self.revision_with_gpgsig)).id, self.revision_with_gpgsig["id"], ) def test_revision_identifier_no_message(self): self.assertEqual( Revision.from_dict(remove_id(self.revision_no_message)).id, self.revision_no_message["id"], ) def test_revision_identifier_empty_message(self): self.assertEqual( Revision.from_dict(remove_id(self.revision_empty_message)).id, self.revision_empty_message["id"], ) def test_revision_identifier_only_fullname(self): self.assertEqual( Revision.from_dict(remove_id(self.revision_only_fullname)).id, self.revision_only_fullname["id"], ) release_example = { "id": _x("2b10839e32c4c476e9d94492756bb1a3e1ec4aa8"), "target": _x("741b2252a5e14d6c60a913c77a6099abe73a854a"), "target_type": "revision", "name": b"v2.6.14", "author": { "name": b"Linus Torvalds", "email": b"torvalds@g5.osdl.org", "fullname": b"Linus Torvalds ", }, "date": datetime.datetime(2005, 10, 27, 17, 2, 33, tzinfo=linus_tz), "message": b"""\ Linux 2.6.14 release -----BEGIN PGP SIGNATURE----- Version: GnuPG v1.4.1 (GNU/Linux) iD8DBQBDYWq6F3YsRnbiHLsRAmaeAJ9RCez0y8rOBbhSv344h86l/VVcugCeIhO1 wdLOnvj91G4wxYqrvThthbE= =7VeT -----END PGP SIGNATURE----- """, "synthetic": False, } class ReleaseIdentifier(unittest.TestCase): def setUp(self): linus_tz = datetime.timezone(datetime.timedelta(minutes=-420)) self.release = release_example self.release_no_author = { "id": _x("26791a8bcf0e6d33f43aef7682bdb555236d56de"), "target": _x("9ee1c939d1cb936b1f98e8d81aeffab57bae46ab"), "target_type": "revision", "name": b"v2.6.12", "message": b"""\ This is the final 2.6.12 release -----BEGIN PGP SIGNATURE----- Version: GnuPG v1.2.4 (GNU/Linux) iD8DBQBCsykyF3YsRnbiHLsRAvPNAJ482tCZwuxp/bJRz7Q98MHlN83TpACdHr37 o6X/3T+vm8K3bf3driRr34c= =sBHn -----END PGP SIGNATURE----- """, "synthetic": False, } self.release_no_message = { "id": _x("b6f4f446715f7d9543ef54e41b62982f0db40045"), "target": _x("9ee1c939d1cb936b1f98e8d81aeffab57bae46ab"), "target_type": "revision", "name": b"v2.6.12", "author": {"name": b"Linus Torvalds", "email": b"torvalds@g5.osdl.org",}, "date": datetime.datetime(2005, 10, 27, 17, 2, 33, tzinfo=linus_tz), "message": None, "synthetic": False, } self.release_empty_message = { "id": _x("71a0aea72444d396575dc25ac37fec87ee3c6492"), "target": _x("9ee1c939d1cb936b1f98e8d81aeffab57bae46ab"), "target_type": "revision", "name": b"v2.6.12", "author": {"name": b"Linus Torvalds", "email": b"torvalds@g5.osdl.org",}, "date": datetime.datetime(2005, 10, 27, 17, 2, 33, tzinfo=linus_tz), "message": b"", "synthetic": False, } self.release_negative_utc = { "id": _x("97c8d2573a001f88e72d75f596cf86b12b82fd01"), "name": b"20081029", "target": _x("54e9abca4c77421e2921f5f156c9fe4a9f7441c7"), "target_type": "revision", "date": {"timestamp": {"seconds": 1225281976}, "offset_bytes": b"-0000",}, "author": {"name": b"Otavio Salvador", "email": b"otavio@debian.org",}, "synthetic": False, "message": b"tagging version 20081029\n\nr56558\n", } self.release_newline_in_author = { "author": { "email": b"esycat@gmail.com", "fullname": b"Eugene Janusov\n", "name": b"Eugene Janusov\n", }, "date": { "offset_bytes": b"+1000", "timestamp": {"microseconds": 0, "seconds": 1377480558,}, }, "id": _x("5c98f559d034162de22d3ebeb95433e6f8885231"), "message": b"Release of v0.3.2.", "name": b"0.3.2", "synthetic": False, "target": _x("c06aa3d93b78a2865c4935170030f8c2d7396fd3"), "target_type": "revision", } self.release_snapshot_target = dict(self.release) self.release_snapshot_target["target_type"] = "snapshot" self.release_snapshot_target["id"] = _x( "c29c3ddcc6769a04e54dd69d63a6fdcbc566f850" ) def test_release_identifier(self): self.assertEqual( Release.from_dict(self.release).id, self.release["id"], ) self.assertEqual( Release.from_dict(remove_id(self.release)).id, self.release["id"], ) def test_release_identifier_no_author(self): self.assertEqual( Release.from_dict(remove_id(self.release_no_author)).id, self.release_no_author["id"], ) def test_release_identifier_no_message(self): self.assertEqual( Release.from_dict(remove_id(self.release_no_message)).id, self.release_no_message["id"], ) def test_release_identifier_empty_message(self): self.assertEqual( Release.from_dict(remove_id(self.release_empty_message)).id, self.release_empty_message["id"], ) def test_release_identifier_negative_utc(self): self.assertEqual( Release.from_dict(remove_id(self.release_negative_utc)).id, self.release_negative_utc["id"], ) def test_release_identifier_newline_in_author(self): self.assertEqual( Release.from_dict(remove_id(self.release_newline_in_author)).id, self.release_newline_in_author["id"], ) def test_release_identifier_snapshot_target(self): self.assertEqual( Release.from_dict(self.release_snapshot_target).id, self.release_snapshot_target["id"], ) snapshot_example = { "id": _x("6e65b86363953b780d92b0a928f3e8fcdd10db36"), "branches": { b"directory": { "target": _x("1bd0e65f7d2ff14ae994de17a1e7fe65111dcad8"), "target_type": "directory", }, b"content": { "target": _x("fe95a46679d128ff167b7c55df5d02356c5a1ae1"), "target_type": "content", }, b"alias": {"target": b"revision", "target_type": "alias",}, b"revision": { "target": _x("aafb16d69fd30ff58afdd69036a26047f3aebdc6"), "target_type": "revision", }, b"release": { "target": _x("7045404f3d1c54e6473c71bbb716529fbad4be24"), "target_type": "release", }, b"snapshot": { "target": _x("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e"), "target_type": "snapshot", }, b"dangling": None, }, } class SnapshotIdentifier(unittest.TestCase): def setUp(self): super().setUp() self.empty = { "id": _x("1a8893e6a86f444e8be8e7bda6cb34fb1735a00e"), "branches": {}, } self.dangling_branch = { "id": _x("c84502e821eb21ed84e9fd3ec40973abc8b32353"), "branches": {b"HEAD": None,}, } self.unresolved = { "id": _x("84b4548ea486e4b0a7933fa541ff1503a0afe1e0"), "branches": {b"foo": {"target": b"bar", "target_type": "alias",},}, } self.all_types = snapshot_example def test_empty_snapshot(self): self.assertEqual( Snapshot.from_dict(remove_id(self.empty)).id, self.empty["id"], ) def test_dangling_branch(self): self.assertEqual( Snapshot.from_dict(remove_id(self.dangling_branch)).id, self.dangling_branch["id"], ) def test_unresolved(self): with self.assertRaisesRegex(ValueError, "b'foo' -> b'bar'"): Snapshot.from_dict(remove_id(self.unresolved)) def test_all_types(self): self.assertEqual( Snapshot.from_dict(remove_id(self.all_types)).id, self.all_types["id"], ) authority_example = { "type": "forge", "url": "https://forge.softwareheritage.org/", } fetcher_example = { "name": "swh-phabricator-metadata-fetcher", "version": "0.0.1", } metadata_example = { "target": "swh:1:cnt:568aaf43d83b2c3df8067f3bedbb97d83260be6d", "discovery_date": datetime.datetime( 2021, 1, 25, 11, 27, 51, tzinfo=datetime.timezone.utc ), "authority": authority_example, "fetcher": fetcher_example, "format": "json", "metadata": b'{"foo": "bar"}', } class RawExtrinsicMetadataIdentifier(unittest.TestCase): def setUp(self): super().setUp() self.minimal = metadata_example self.maximal = { **self.minimal, "origin": "https://forge.softwareheritage.org/source/swh-model/", "visit": 42, "snapshot": "swh:1:snp:" + "00" * 20, "release": "swh:1:rel:" + "01" * 20, "revision": "swh:1:rev:" + "02" * 20, "path": b"/abc/def", "directory": "swh:1:dir:" + "03" * 20, } def test_minimal(self): git_object = ( b"raw_extrinsic_metadata 210\0" b"target swh:1:cnt:568aaf43d83b2c3df8067f3bedbb97d83260be6d\n" b"discovery_date 1611574071\n" b"authority forge https://forge.softwareheritage.org/\n" b"fetcher swh-phabricator-metadata-fetcher 0.0.1\n" b"format json\n" b"\n" b'{"foo": "bar"}' ) self.assertEqual( git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(self.minimal) ), git_object, ) self.assertEqual( RawExtrinsicMetadata.from_dict(self.minimal).id, hashlib.sha1(git_object).digest(), ) self.assertEqual( RawExtrinsicMetadata.from_dict(self.minimal).id, _x("5c13f20ba336e44549baf3d7b9305b027ec9f43d"), ) def test_maximal(self): git_object = ( b"raw_extrinsic_metadata 533\0" b"target swh:1:cnt:568aaf43d83b2c3df8067f3bedbb97d83260be6d\n" b"discovery_date 1611574071\n" b"authority forge https://forge.softwareheritage.org/\n" b"fetcher swh-phabricator-metadata-fetcher 0.0.1\n" b"format json\n" b"origin https://forge.softwareheritage.org/source/swh-model/\n" b"visit 42\n" b"snapshot swh:1:snp:0000000000000000000000000000000000000000\n" b"release swh:1:rel:0101010101010101010101010101010101010101\n" b"revision swh:1:rev:0202020202020202020202020202020202020202\n" b"path /abc/def\n" b"directory swh:1:dir:0303030303030303030303030303030303030303\n" b"\n" b'{"foo": "bar"}' ) self.assertEqual( git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(self.maximal) ), git_object, ) self.assertEqual( RawExtrinsicMetadata.from_dict(self.maximal).id, hashlib.sha1(git_object).digest(), ) self.assertEqual( RawExtrinsicMetadata.from_dict(self.maximal).id, _x("f96966e1093d15236a31fde07e47d5b1c9428049"), ) def test_nonascii_path(self): metadata = { **self.minimal, "path": b"/ab\nc/d\xf0\x9f\xa4\xb7e\x00f", } git_object = ( b"raw_extrinsic_metadata 231\0" b"target swh:1:cnt:568aaf43d83b2c3df8067f3bedbb97d83260be6d\n" b"discovery_date 1611574071\n" b"authority forge https://forge.softwareheritage.org/\n" b"fetcher swh-phabricator-metadata-fetcher 0.0.1\n" b"format json\n" b"path /ab\n" b" c/d\xf0\x9f\xa4\xb7e\x00f\n" b"\n" b'{"foo": "bar"}' ) self.assertEqual( git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(metadata) ), git_object, ) self.assertEqual( RawExtrinsicMetadata.from_dict(metadata).id, hashlib.sha1(git_object).digest(), ) self.assertEqual( RawExtrinsicMetadata.from_dict(metadata).id, _x("7cc83fd1912176510c083f5df43f01b09af4b333"), ) def test_timezone_insensitive(self): """Checks the timezone of the datetime.datetime does not affect the hashed git_object.""" utc_plus_one = datetime.timezone(datetime.timedelta(hours=1)) metadata = { **self.minimal, "discovery_date": datetime.datetime( 2021, 1, 25, 12, 27, 51, tzinfo=utc_plus_one, ), } self.assertEqual( git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(self.minimal) ), git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(metadata) ), ) self.assertEqual( RawExtrinsicMetadata.from_dict(self.minimal).id, RawExtrinsicMetadata.from_dict(metadata).id, ) self.assertEqual( RawExtrinsicMetadata.from_dict(metadata).id, _x("5c13f20ba336e44549baf3d7b9305b027ec9f43d"), ) def test_microsecond_insensitive(self): """Checks the microseconds of the datetime.datetime does not affect the hashed manifest.""" metadata = { **self.minimal, "discovery_date": datetime.datetime( 2021, 1, 25, 11, 27, 51, 123456, tzinfo=datetime.timezone.utc, ), } self.assertEqual( git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(self.minimal) ), git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(metadata) ), ) self.assertEqual( RawExtrinsicMetadata.from_dict(self.minimal).id, RawExtrinsicMetadata.from_dict(metadata).id, ) self.assertEqual( RawExtrinsicMetadata.from_dict(metadata).id, _x("5c13f20ba336e44549baf3d7b9305b027ec9f43d"), ) def test_noninteger_timezone(self): """Checks the discovery_date is translated to UTC before truncating microseconds""" tz = datetime.timezone(datetime.timedelta(microseconds=-42)) metadata = { **self.minimal, "discovery_date": datetime.datetime( 2021, 1, 25, 11, 27, 50, 1_000_000 - 42, tzinfo=tz, ), } self.assertEqual( git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(self.minimal) ), git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(metadata) ), ) self.assertEqual( RawExtrinsicMetadata.from_dict(self.minimal).id, RawExtrinsicMetadata.from_dict(metadata).id, ) self.assertEqual( RawExtrinsicMetadata.from_dict(metadata).id, _x("5c13f20ba336e44549baf3d7b9305b027ec9f43d"), ) def test_negative_timestamp(self): metadata = { **self.minimal, "discovery_date": datetime.datetime( 1960, 1, 25, 11, 27, 51, tzinfo=datetime.timezone.utc, ), } git_object = ( b"raw_extrinsic_metadata 210\0" b"target swh:1:cnt:568aaf43d83b2c3df8067f3bedbb97d83260be6d\n" b"discovery_date -313504329\n" b"authority forge https://forge.softwareheritage.org/\n" b"fetcher swh-phabricator-metadata-fetcher 0.0.1\n" b"format json\n" b"\n" b'{"foo": "bar"}' ) self.assertEqual( git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(metadata) ), git_object, ) self.assertEqual( RawExtrinsicMetadata.from_dict(metadata).id, hashlib.sha1(git_object).digest(), ) self.assertEqual( RawExtrinsicMetadata.from_dict(metadata).id, _x("895d0821a2991dd376ddc303424aceb7c68280f9"), ) def test_epoch(self): metadata = { **self.minimal, "discovery_date": datetime.datetime( 1970, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc, ), } git_object = ( b"raw_extrinsic_metadata 201\0" b"target swh:1:cnt:568aaf43d83b2c3df8067f3bedbb97d83260be6d\n" b"discovery_date 0\n" b"authority forge https://forge.softwareheritage.org/\n" b"fetcher swh-phabricator-metadata-fetcher 0.0.1\n" b"format json\n" b"\n" b'{"foo": "bar"}' ) self.assertEqual( git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(metadata) ), git_object, ) self.assertEqual( RawExtrinsicMetadata.from_dict(metadata).id, hashlib.sha1(git_object).digest(), ) self.assertEqual( RawExtrinsicMetadata.from_dict(metadata).id, _x("27a53df54ace35ebd910493cdc70b334d6b7cb88"), ) def test_negative_epoch(self): metadata = { **self.minimal, "discovery_date": datetime.datetime( 1969, 12, 31, 23, 59, 59, 1, tzinfo=datetime.timezone.utc, ), } git_object = ( b"raw_extrinsic_metadata 202\0" b"target swh:1:cnt:568aaf43d83b2c3df8067f3bedbb97d83260be6d\n" b"discovery_date -1\n" b"authority forge https://forge.softwareheritage.org/\n" b"fetcher swh-phabricator-metadata-fetcher 0.0.1\n" b"format json\n" b"\n" b'{"foo": "bar"}' ) self.assertEqual( git_objects.raw_extrinsic_metadata_git_object( RawExtrinsicMetadata.from_dict(metadata) ), git_object, ) self.assertEqual( RawExtrinsicMetadata.from_dict(metadata).id, hashlib.sha1(git_object).digest(), ) self.assertEqual( RawExtrinsicMetadata.from_dict(metadata).id, _x("be7154a8fd49d87f81547ea634d1e2152907d089"), ) origin_example = { "url": "https://github.com/torvalds/linux", } class OriginIdentifier(unittest.TestCase): def test_content_identifier(self): self.assertEqual( Origin.from_dict(origin_example).id, _x("b63a575fe3faab7692c9f38fb09d4bb45651bb0f"), ) # Format: [ # ( # input1, # expected_output1, # ), # ( # input2, # expected_output2, # ), # ... # ] TS_DICTS = [ # with current input dict format (offset_bytes) ( {"timestamp": 12345, "offset_bytes": b"+0000"}, - { - "timestamp": {"seconds": 12345, "microseconds": 0}, - "offset_bytes": b"+0000", - "offset": 0, - "negative_utc": False, - }, + {"timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"+0000",}, ), ( {"timestamp": 12345, "offset_bytes": b"-0000"}, - { - "timestamp": {"seconds": 12345, "microseconds": 0}, - "offset_bytes": b"-0000", - "offset": 0, - "negative_utc": True, - }, + {"timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"-0000",}, ), ( {"timestamp": 12345, "offset_bytes": b"+0200"}, - { - "timestamp": {"seconds": 12345, "microseconds": 0}, - "offset_bytes": b"+0200", - "offset": 120, - "negative_utc": False, - }, + {"timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"+0200",}, ), ( {"timestamp": 12345, "offset_bytes": b"-0200"}, + {"timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"-0200",}, + ), + ( + {"timestamp": 12345, "offset_bytes": b"--700"}, + {"timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"--700",}, + ), + ( + {"timestamp": 12345, "offset_bytes": b"1234567"}, { "timestamp": {"seconds": 12345, "microseconds": 0}, - "offset_bytes": b"-0200", - "offset": -120, - "negative_utc": False, + "offset_bytes": b"1234567", }, ), - # not working yet: - # ( - # {"timestamp": 12345, "offset_bytes": b"--700"}, - # { - # "timestamp": {"seconds": 12345, "microseconds": 0}, - # "offset_bytes": b"--700", - # "offset": 0, - # "negative_utc": False, - # }, - # ), - # ( - # {"timestamp": 12345, "offset_bytes": b"1234567"}, - # { - # "timestamp": {"seconds": 12345, "microseconds": 0}, - # "offset_bytes": b"1234567", - # "offset": 0, - # "negative_utc": False, - # }, - # ), # with old-style input dicts (numeric offset + optional negative_utc): ( {"timestamp": 12345, "offset": 0}, - { - "timestamp": {"seconds": 12345, "microseconds": 0}, - "offset_bytes": b"+0000", - "offset": 0, - "negative_utc": False, - }, + {"timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"+0000",}, ), ( {"timestamp": 12345, "offset": 0, "negative_utc": False}, - { - "timestamp": {"seconds": 12345, "microseconds": 0}, - "offset_bytes": b"+0000", - "offset": 0, - "negative_utc": False, - }, + {"timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"+0000",}, ), ( {"timestamp": 12345, "offset": 0, "negative_utc": False}, - { - "timestamp": {"seconds": 12345, "microseconds": 0}, - "offset_bytes": b"+0000", - "offset": 0, - "negative_utc": False, - }, + {"timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"+0000",}, ), ( {"timestamp": 12345, "offset": 0, "negative_utc": None}, - { - "timestamp": {"seconds": 12345, "microseconds": 0}, - "offset_bytes": b"+0000", - "offset": 0, - "negative_utc": False, - }, + {"timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"+0000",}, ), ( {"timestamp": {"seconds": 12345}, "offset": 0, "negative_utc": None}, - { - "timestamp": {"seconds": 12345, "microseconds": 0}, - "offset_bytes": b"+0000", - "offset": 0, - "negative_utc": False, - }, + {"timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"+0000",}, ), ( { "timestamp": {"seconds": 12345, "microseconds": 0}, "offset": 0, "negative_utc": None, }, - { - "timestamp": {"seconds": 12345, "microseconds": 0}, - "offset_bytes": b"+0000", - "offset": 0, - "negative_utc": False, - }, + {"timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"+0000",}, ), ( { "timestamp": {"seconds": 12345, "microseconds": 100}, "offset": 0, "negative_utc": None, }, { "timestamp": {"seconds": 12345, "microseconds": 100}, "offset_bytes": b"+0000", - "offset": 0, - "negative_utc": False, }, ), ( {"timestamp": 12345, "offset": 0, "negative_utc": True}, - { - "timestamp": {"seconds": 12345, "microseconds": 0}, - "offset_bytes": b"-0000", - "offset": 0, - "negative_utc": True, - }, + {"timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"-0000",}, ), ( {"timestamp": 12345, "offset": 0, "negative_utc": None}, - { - "timestamp": {"seconds": 12345, "microseconds": 0}, - "offset_bytes": b"+0000", - "offset": 0, - "negative_utc": False, - }, + {"timestamp": {"seconds": 12345, "microseconds": 0}, "offset_bytes": b"+0000",}, ), ] @pytest.mark.parametrize("dict_input,expected", TS_DICTS) def test_normalize_timestamp_dict(dict_input, expected): assert TimestampWithTimezone.from_dict(dict_input).to_dict() == expected -def test_timestampwithtimezone_init(): - ts = Timestamp(seconds=1234567, microseconds=0) - tstz = TimestampWithTimezone( - timestamp=ts, offset=120, negative_utc=False, offset_bytes=b"+0200" - ) - assert tstz.timestamp == ts - assert tstz.offset == 120 - assert tstz.negative_utc is False - assert tstz.offset_bytes == b"+0200" - - assert tstz == TimestampWithTimezone(timestamp=ts, offset=120, negative_utc=False) - assert tstz == TimestampWithTimezone(timestamp=ts, offset_bytes=b"+0200") - - assert tstz != TimestampWithTimezone(timestamp=ts, offset_bytes=b"+0100") - - tstz = TimestampWithTimezone( - timestamp=ts, offset=0, negative_utc=True, offset_bytes=b"-0000" - ) - assert tstz.timestamp == ts - assert tstz.offset == 0 - assert tstz.negative_utc is True - assert tstz.offset_bytes == b"-0000" - - assert tstz == TimestampWithTimezone(timestamp=ts, offset=0, negative_utc=True) - assert tstz == TimestampWithTimezone(timestamp=ts, offset_bytes=b"-0000") - - assert tstz != TimestampWithTimezone(timestamp=ts, offset_bytes=b"+0000") - - TS_DICTS_INVALID_TIMESTAMP = [ {"timestamp": 1.2, "offset": 0}, {"timestamp": "1", "offset": 0}, # these below should really also trigger a ValueError... # {"timestamp": {"seconds": "1"}, "offset": 0}, # {"timestamp": {"seconds": 1.2}, "offset": 0}, # {"timestamp": {"seconds": 1.2}, "offset": 0}, ] @pytest.mark.parametrize("dict_input", TS_DICTS_INVALID_TIMESTAMP) def test_normalize_timestamp_dict_invalid_timestamp(dict_input): with pytest.raises(ValueError, match="non-integer timestamp"): TimestampWithTimezone.from_dict(dict_input) UTC = datetime.timezone.utc TS_TIMEZONES = [ datetime.timezone.min, datetime.timezone(datetime.timedelta(hours=-1)), UTC, datetime.timezone(datetime.timedelta(minutes=+60)), datetime.timezone.max, ] TS_TZ_EXPECTED = [-1439, -60, 0, 60, 1439] TS_TZ_BYTES_EXPECTED = [b"-2359", b"-0100", b"+0000", b"+0100", b"+2359"] TS_DATETIMES = [ datetime.datetime(2020, 2, 27, 14, 39, 19, tzinfo=UTC), datetime.datetime(2120, 12, 31, 23, 59, 59, tzinfo=UTC), datetime.datetime(1610, 5, 14, 15, 43, 0, tzinfo=UTC), ] TS_DT_EXPECTED = [1582814359, 4765132799, -11348929020] @pytest.mark.parametrize("date, seconds", zip(TS_DATETIMES, TS_DT_EXPECTED)) @pytest.mark.parametrize( "tz, offset, offset_bytes", zip(TS_TIMEZONES, TS_TZ_EXPECTED, TS_TZ_BYTES_EXPECTED) ) @pytest.mark.parametrize("microsecond", [0, 1, 10, 100, 1000, 999999]) def test_normalize_timestamp_datetime( date, seconds, tz, offset, offset_bytes, microsecond ): date = date.astimezone(tz).replace(microsecond=microsecond) assert TimestampWithTimezone.from_dict(date).to_dict() == { "timestamp": {"seconds": seconds, "microseconds": microsecond}, "offset_bytes": offset_bytes, - "offset": offset, - "negative_utc": False, } def test_extid_identifier_bwcompat(): extid_dict = { "extid_type": "test-type", "extid": b"extid", "target": "swh:1:dir:" + "00" * 20, } assert ExtID.from_dict(extid_dict).id == _x( "b9295e1931c31e40a7e3e1e967decd1c89426455" ) assert ( ExtID.from_dict({**extid_dict, "extid_version": 0}).id == ExtID.from_dict(extid_dict).id ) assert ( ExtID.from_dict({**extid_dict, "extid_version": 1}).id != ExtID.from_dict(extid_dict).id ) diff --git a/swh/model/tests/test_model.py b/swh/model/tests/test_model.py index 8e19124..6d97e59 100644 --- a/swh/model/tests/test_model.py +++ b/swh/model/tests/test_model.py @@ -1,1479 +1,1479 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import collections import copy import datetime import hashlib from typing import Any, List, Optional, Tuple, Union import attr from attrs_strict import AttributeTypeError import dateutil from hypothesis import given from hypothesis.strategies import binary import pytest from swh.model.collections import ImmutableDict from swh.model.from_disk import DentryPerms import swh.model.git_objects from swh.model.hashutil import MultiHash, hash_to_bytes import swh.model.hypothesis_strategies as strategies import swh.model.model from swh.model.model import ( BaseModel, Content, Directory, DirectoryEntry, MetadataAuthority, MetadataAuthorityType, MetadataFetcher, MissingData, Origin, OriginVisit, OriginVisitStatus, Person, RawExtrinsicMetadata, Release, Revision, SkippedContent, Snapshot, TargetType, Timestamp, TimestampWithTimezone, type_validator, ) import swh.model.swhids from swh.model.swhids import CoreSWHID, ExtendedSWHID, ObjectType from swh.model.tests.swh_model_data import TEST_OBJECTS from swh.model.tests.test_identifiers import ( TS_DATETIMES, TS_TIMEZONES, directory_example, metadata_example, release_example, revision_example, snapshot_example, ) EXAMPLE_HASH = hash_to_bytes("94a9ed024d3859793618152ea559a168bbcbb5e2") @given(strategies.objects()) def test_todict_inverse_fromdict(objtype_and_obj): (obj_type, obj) = objtype_and_obj if obj_type in ("origin", "origin_visit"): return obj_as_dict = obj.to_dict() obj_as_dict_copy = copy.deepcopy(obj_as_dict) # Check the composition of to_dict and from_dict is the identity assert obj == type(obj).from_dict(obj_as_dict) # Check from_dict() does not change the input dict assert obj_as_dict == obj_as_dict_copy # Check the composition of from_dict and to_dict is the identity assert obj_as_dict == type(obj).from_dict(obj_as_dict).to_dict() @given(strategies.objects()) def test_repr(objtype_and_obj): """Checks every model object has a working repr(), and that it can be eval()uated (so that printed objects can be copy-pasted to write test cases.)""" (obj_type, obj) = objtype_and_obj r = repr(obj) env = { "tzutc": lambda: datetime.timezone.utc, "tzfile": dateutil.tz.tzfile, "hash_to_bytes": hash_to_bytes, **swh.model.swhids.__dict__, **swh.model.model.__dict__, } assert eval(r, env) == obj @attr.s class Cls1: pass @attr.s class Cls2(Cls1): pass _custom_namedtuple = collections.namedtuple("_custom_namedtuple", "a b") class _custom_tuple(tuple): pass # List of (type, valid_values, invalid_values) _TYPE_VALIDATOR_PARAMETERS: List[Tuple[Any, List[Any], List[Any]]] = [ # base types: ( bool, [True, False], [-1, 0, 1, 42, 1000, None, "123", 0.0, (), ("foo",), ImmutableDict()], ), ( int, [-1, 0, 1, 42, 1000, DentryPerms.directory, True, False], [None, "123", 0.0, (), ImmutableDict()], ), ( float, [-1.0, 0.0, 1.0, float("infinity"), float("NaN")], [True, False, None, 1, "1.2", (), ImmutableDict()], ), ( bytes, [b"", b"123"], [None, bytearray(b"\x12\x34"), "123", 0, 123, (), (1, 2, 3), ImmutableDict()], ), (str, ["", "123"], [None, b"123", b"", 0, (), (1, 2, 3), ImmutableDict()]), (None, [None], [b"", b"123", "", "foo", 0, 123, ImmutableDict(), float("NaN")]), # unions: ( Optional[int], [None, -1, 0, 1, 42, 1000, DentryPerms.directory], ["123", 0.0, (), ImmutableDict()], ), ( Optional[bytes], [None, b"", b"123"], ["123", "", 0, (), (1, 2, 3), ImmutableDict()], ), ( Union[str, bytes], ["", "123", b"123", b""], [None, 0, (), (1, 2, 3), ImmutableDict()], ), ( Union[str, bytes, None], ["", "123", b"123", b"", None], [0, (), (1, 2, 3), ImmutableDict()], ), # tuples ( Tuple[str, str], [("foo", "bar"), ("", ""), _custom_namedtuple("", ""), _custom_tuple(("", ""))], [("foo",), ("foo", "bar", "baz"), ("foo", 42), (42, "foo")], ), ( Tuple[str, ...], [ ("foo",), ("foo", "bar"), ("", ""), ("foo", "bar", "baz"), _custom_namedtuple("", ""), _custom_tuple(("", "")), ], [("foo", 42), (42, "foo")], ), # composite generic: ( Tuple[Union[str, int], Union[str, int]], [("foo", "foo"), ("foo", 42), (42, "foo"), (42, 42)], [("foo", b"bar"), (b"bar", "foo")], ), ( Union[Tuple[str, str], Tuple[int, int]], [("foo", "foo"), (42, 42)], [("foo", b"bar"), (b"bar", "foo"), ("foo", 42), (42, "foo")], ), ( Tuple[Tuple[bytes, bytes], ...], [(), ((b"foo", b"bar"),), ((b"foo", b"bar"), (b"baz", b"qux"))], [((b"foo", "bar"),), ((b"foo", b"bar"), ("baz", b"qux"))], ), # standard types: ( datetime.datetime, [ datetime.datetime(2021, 12, 15, 12, 59, 27), datetime.datetime(2021, 12, 15, 12, 59, 27, tzinfo=datetime.timezone.utc), ], [None, 123], ), # ImmutableDict ( ImmutableDict[str, int], [ ImmutableDict(), ImmutableDict({"foo": 42}), ImmutableDict({"foo": 42, "bar": 123}), ], [ImmutableDict({"foo": "bar"}), ImmutableDict({42: 123})], ), # Any: (object, [-1, 0, 1, 42, 1000, None, "123", 0.0, (), ImmutableDict()], [],), (Any, [-1, 0, 1, 42, 1000, None, "123", 0.0, (), ImmutableDict()], [],), ( ImmutableDict[Any, int], [ ImmutableDict(), ImmutableDict({"foo": 42}), ImmutableDict({"foo": 42, "bar": 123}), ImmutableDict({42: 123}), ], [ImmutableDict({"foo": "bar"})], ), ( ImmutableDict[str, Any], [ ImmutableDict(), ImmutableDict({"foo": 42}), ImmutableDict({"foo": "bar"}), ImmutableDict({"foo": 42, "bar": 123}), ], [ImmutableDict({42: 123})], ), # attr objects: ( Timestamp, [Timestamp(seconds=123, microseconds=0),], [None, "2021-09-28T11:27:59", 123], ), (Cls1, [Cls1(), Cls2()], [None, b"abcd"],), # enums: ( TargetType, [TargetType.CONTENT, TargetType.ALIAS], ["content", "alias", 123, None], ), ] @pytest.mark.parametrize( "type_,value", [ pytest.param(type_, value, id=f"type={type_}, value={value}") for (type_, values, _) in _TYPE_VALIDATOR_PARAMETERS for value in values ], ) def test_type_validator_valid(type_, value): type_validator()(None, attr.ib(type=type_), value) @pytest.mark.parametrize( "type_,value", [ pytest.param(type_, value, id=f"type={type_}, value={value}") for (type_, _, values) in _TYPE_VALIDATOR_PARAMETERS for value in values ], ) def test_type_validator_invalid(type_, value): with pytest.raises(AttributeTypeError): type_validator()(None, attr.ib(type=type_), value) @pytest.mark.parametrize("object_type, objects", TEST_OBJECTS.items()) def test_swh_model_todict_fromdict(object_type, objects): """checks model objects in swh_model_data are in correct shape""" assert objects for obj in objects: # Check the composition of from_dict and to_dict is the identity obj_as_dict = obj.to_dict() assert obj == type(obj).from_dict(obj_as_dict) assert obj_as_dict == type(obj).from_dict(obj_as_dict).to_dict() def test_unique_key(): url = "http://example.org/" date = datetime.datetime.now(tz=datetime.timezone.utc) id_ = b"42" * 10 assert Origin(url=url).unique_key() == {"url": url} assert OriginVisit(origin=url, date=date, type="git").unique_key() == { "origin": url, "date": str(date), } assert OriginVisitStatus( origin=url, visit=42, date=date, status="created", snapshot=None ).unique_key() == {"origin": url, "visit": "42", "date": str(date),} assert Snapshot.from_dict({**snapshot_example, "id": id_}).unique_key() == id_ assert Release.from_dict({**release_example, "id": id_}).unique_key() == id_ assert Revision.from_dict({**revision_example, "id": id_}).unique_key() == id_ assert Directory.from_dict({**directory_example, "id": id_}).unique_key() == id_ assert ( RawExtrinsicMetadata.from_dict({**metadata_example, "id": id_}).unique_key() == id_ ) cont = Content.from_data(b"foo") assert cont.unique_key().hex() == "0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33" kwargs = { **cont.to_dict(), "reason": "foo", "status": "absent", } del kwargs["data"] assert SkippedContent(**kwargs).unique_key() == cont.hashes() # Anonymization @given(strategies.objects()) def test_anonymization(objtype_and_obj): (obj_type, obj) = objtype_and_obj def check_person(p): if p is not None: assert p.name is None assert p.email is None assert len(p.fullname) == 32 anon_obj = obj.anonymize() if obj_type == "person": assert anon_obj is not None check_person(anon_obj) elif obj_type == "release": assert anon_obj is not None check_person(anon_obj.author) elif obj_type == "revision": assert anon_obj is not None check_person(anon_obj.author) check_person(anon_obj.committer) else: assert anon_obj is None # Origin, OriginVisit, OriginVisitStatus @given(strategies.origins()) def test_todict_origins(origin): obj = origin.to_dict() assert "type" not in obj assert type(origin)(url=origin.url) == type(origin).from_dict(obj) @given(strategies.origin_visits()) def test_todict_origin_visits(origin_visit): obj = origin_visit.to_dict() assert origin_visit == type(origin_visit).from_dict(obj) def test_origin_visit_naive_datetime(): with pytest.raises(ValueError, match="must be a timezone-aware datetime"): OriginVisit( origin="http://foo/", date=datetime.datetime.now(), type="git", ) @given(strategies.origin_visit_statuses()) def test_todict_origin_visit_statuses(origin_visit_status): obj = origin_visit_status.to_dict() assert origin_visit_status == type(origin_visit_status).from_dict(obj) def test_origin_visit_status_naive_datetime(): with pytest.raises(ValueError, match="must be a timezone-aware datetime"): OriginVisitStatus( origin="http://foo/", visit=42, date=datetime.datetime.now(), status="ongoing", snapshot=None, ) # Timestamp @given(strategies.timestamps()) def test_timestamps_strategy(timestamp): attr.validate(timestamp) def test_timestamp_seconds(): attr.validate(Timestamp(seconds=0, microseconds=0)) with pytest.raises(AttributeTypeError): Timestamp(seconds="0", microseconds=0) attr.validate(Timestamp(seconds=2 ** 63 - 1, microseconds=0)) with pytest.raises(ValueError): Timestamp(seconds=2 ** 63, microseconds=0) attr.validate(Timestamp(seconds=-(2 ** 63), microseconds=0)) with pytest.raises(ValueError): Timestamp(seconds=-(2 ** 63) - 1, microseconds=0) def test_timestamp_microseconds(): attr.validate(Timestamp(seconds=0, microseconds=0)) with pytest.raises(AttributeTypeError): Timestamp(seconds=0, microseconds="0") attr.validate(Timestamp(seconds=0, microseconds=10 ** 6 - 1)) with pytest.raises(ValueError): Timestamp(seconds=0, microseconds=10 ** 6) with pytest.raises(ValueError): Timestamp(seconds=0, microseconds=-1) def test_timestamp_from_dict(): assert Timestamp.from_dict({"seconds": 10, "microseconds": 5}) with pytest.raises(AttributeTypeError): Timestamp.from_dict({"seconds": "10", "microseconds": 5}) with pytest.raises(AttributeTypeError): Timestamp.from_dict({"seconds": 10, "microseconds": "5"}) with pytest.raises(ValueError): Timestamp.from_dict({"seconds": 0, "microseconds": -1}) Timestamp.from_dict({"seconds": 0, "microseconds": 10 ** 6 - 1}) with pytest.raises(ValueError): Timestamp.from_dict({"seconds": 0, "microseconds": 10 ** 6}) # TimestampWithTimezone def test_timestampwithtimezone(): ts = Timestamp(seconds=0, microseconds=0) tstz = TimestampWithTimezone(timestamp=ts, offset_bytes=b"+0000") attr.validate(tstz) - assert tstz.offset == 0 + assert tstz.offset_minutes() == 0 assert tstz.offset_bytes == b"+0000" tstz = TimestampWithTimezone(timestamp=ts, offset_bytes=b"+0010") attr.validate(tstz) - assert tstz.offset == 10 + assert tstz.offset_minutes() == 10 assert tstz.offset_bytes == b"+0010" tstz = TimestampWithTimezone(timestamp=ts, offset_bytes=b"-0010") attr.validate(tstz) - assert tstz.offset == -10 + assert tstz.offset_minutes() == -10 assert tstz.offset_bytes == b"-0010" tstz = TimestampWithTimezone(timestamp=ts, offset_bytes=b"-0000") attr.validate(tstz) - assert tstz.offset == 0 + assert tstz.offset_minutes() == 0 assert tstz.offset_bytes == b"-0000" tstz = TimestampWithTimezone(timestamp=ts, offset_bytes=b"-1030") attr.validate(tstz) - assert tstz.offset == -630 + assert tstz.offset_minutes() == -630 assert tstz.offset_bytes == b"-1030" tstz = TimestampWithTimezone(timestamp=ts, offset_bytes=b"+1320") attr.validate(tstz) - assert tstz.offset == 800 + assert tstz.offset_minutes() == 800 assert tstz.offset_bytes == b"+1320" tstz = TimestampWithTimezone(timestamp=ts, offset_bytes=b"+200") attr.validate(tstz) - assert tstz.offset == 120 + assert tstz.offset_minutes() == 120 assert tstz.offset_bytes == b"+200" tstz = TimestampWithTimezone(timestamp=ts, offset_bytes=b"+02") attr.validate(tstz) - assert tstz.offset == 120 + assert tstz.offset_minutes() == 120 assert tstz.offset_bytes == b"+02" tstz = TimestampWithTimezone(timestamp=ts, offset_bytes=b"+2000000000") attr.validate(tstz) - assert tstz.offset == 0 + assert tstz.offset_minutes() == 0 assert tstz.offset_bytes == b"+2000000000" with pytest.raises(AttributeTypeError): TimestampWithTimezone(timestamp=datetime.datetime.now(), offset_bytes=b"+0000") - with pytest.raises((AttributeTypeError, AttributeError, TypeError)): + with pytest.raises((AttributeTypeError, TypeError)): TimestampWithTimezone(timestamp=ts, offset_bytes=0) def test_timestampwithtimezone_from_datetime(): # Typical case tz = datetime.timezone(datetime.timedelta(minutes=+60)) date = datetime.datetime(2020, 2, 27, 14, 39, 19, tzinfo=tz) tstz = TimestampWithTimezone.from_datetime(date) assert tstz == TimestampWithTimezone( timestamp=Timestamp(seconds=1582810759, microseconds=0,), offset_bytes=b"+0100" ) # Typical case (close to epoch) tz = datetime.timezone(datetime.timedelta(minutes=+60)) date = datetime.datetime(1970, 1, 1, 1, 0, 5, tzinfo=tz) tstz = TimestampWithTimezone.from_datetime(date) assert tstz == TimestampWithTimezone( timestamp=Timestamp(seconds=5, microseconds=0,), offset_bytes=b"+0100" ) # non-integer number of seconds before UNIX epoch date = datetime.datetime( 1969, 12, 31, 23, 59, 59, 100000, tzinfo=datetime.timezone.utc ) tstz = TimestampWithTimezone.from_datetime(date) assert tstz == TimestampWithTimezone( timestamp=Timestamp(seconds=-1, microseconds=100000,), offset_bytes=b"+0000" ) # non-integer number of seconds in both the timestamp and the offset tz = datetime.timezone(datetime.timedelta(microseconds=-600000)) date = datetime.datetime(1969, 12, 31, 23, 59, 59, 600000, tzinfo=tz) tstz = TimestampWithTimezone.from_datetime(date) assert tstz == TimestampWithTimezone( timestamp=Timestamp(seconds=0, microseconds=200000,), offset_bytes=b"+0000" ) # timezone offset with non-integer number of seconds, for dates before epoch # we round down to the previous second, so it should be the same as # 1969-01-01T23:59:59Z tz = datetime.timezone(datetime.timedelta(microseconds=900000)) date = datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=tz) tstz = TimestampWithTimezone.from_datetime(date) assert tstz == TimestampWithTimezone( timestamp=Timestamp(seconds=-1, microseconds=100000,), offset_bytes=b"+0000" ) def test_timestampwithtimezone_from_naive_datetime(): date = datetime.datetime(2020, 2, 27, 14, 39, 19) with pytest.raises(ValueError, match="datetime without timezone"): TimestampWithTimezone.from_datetime(date) def test_timestampwithtimezone_from_iso8601(): date = "2020-02-27 14:39:19.123456+0100" tstz = TimestampWithTimezone.from_iso8601(date) assert tstz == TimestampWithTimezone( timestamp=Timestamp(seconds=1582810759, microseconds=123456,), offset_bytes=b"+0100", ) def test_timestampwithtimezone_from_iso8601_negative_utc(): date = "2020-02-27 13:39:19-0000" tstz = TimestampWithTimezone.from_iso8601(date) assert tstz == TimestampWithTimezone( timestamp=Timestamp(seconds=1582810759, microseconds=0,), offset_bytes=b"-0000" ) @pytest.mark.parametrize("date", TS_DATETIMES) @pytest.mark.parametrize("tz", TS_TIMEZONES) @pytest.mark.parametrize("microsecond", [0, 1, 10, 100, 1000, 999999]) def test_timestampwithtimezone_to_datetime(date, tz, microsecond): date = date.replace(tzinfo=tz, microsecond=microsecond) tstz = TimestampWithTimezone.from_datetime(date) assert tstz.to_datetime() == date assert tstz.to_datetime().utcoffset() == date.utcoffset() def test_person_from_fullname(): """The author should have name, email and fullname filled. """ actual_person = Person.from_fullname(b"tony ") assert actual_person == Person( fullname=b"tony ", name=b"tony", email=b"ynot@dagobah", ) def test_person_from_fullname_no_email(): """The author and fullname should be the same as the input (author). """ actual_person = Person.from_fullname(b"tony") assert actual_person == Person(fullname=b"tony", name=b"tony", email=None,) def test_person_from_fullname_empty_person(): """Empty person has only its fullname filled with the empty byte-string. """ actual_person = Person.from_fullname(b"") assert actual_person == Person(fullname=b"", name=None, email=None,) def test_git_author_line_to_author(): # edge case out of the way with pytest.raises(TypeError): Person.from_fullname(None) tests = { b"a ": Person(name=b"a", email=b"b@c.com", fullname=b"a ",), b"": Person( name=None, email=b"foo@bar.com", fullname=b"", ), b"malformed ': Person( name=b"malformed", email=b'"', ), b"trailing ": Person( name=b"trailing", email=b"sp@c.e", fullname=b"trailing ", ), b"no": Person(name=b"no", email=b"sp@c.e", fullname=b"no",), b" more ": Person( name=b"more", email=b"sp@c.es", fullname=b" more ", ), b" <>": Person(name=None, email=None, fullname=b" <>",), } for person in sorted(tests): expected_person = tests[person] assert expected_person == Person.from_fullname(person) # Content def test_content_get_hash(): hashes = dict(sha1=b"foo", sha1_git=b"bar", sha256=b"baz", blake2s256=b"qux") c = Content(length=42, status="visible", **hashes) for (hash_name, hash_) in hashes.items(): assert c.get_hash(hash_name) == hash_ def test_content_hashes(): hashes = dict(sha1=b"foo", sha1_git=b"bar", sha256=b"baz", blake2s256=b"qux") c = Content(length=42, status="visible", **hashes) assert c.hashes() == hashes def test_content_data(): c = Content( length=42, status="visible", data=b"foo", sha1=b"foo", sha1_git=b"bar", sha256=b"baz", blake2s256=b"qux", ) assert c.with_data() == c def test_content_data_missing(): c = Content( length=42, status="visible", sha1=b"foo", sha1_git=b"bar", sha256=b"baz", blake2s256=b"qux", ) with pytest.raises(MissingData): c.with_data() @given(strategies.present_contents_d()) def test_content_from_dict(content_d): c = Content.from_data(**content_d) assert c assert c.ctime == content_d["ctime"] content_d2 = c.to_dict() c2 = Content.from_dict(content_d2) assert c2.ctime == c.ctime def test_content_from_dict_str_ctime(): # test with ctime as a string n = datetime.datetime(2020, 5, 6, 12, 34, tzinfo=datetime.timezone.utc) content_d = { "ctime": n.isoformat(), "data": b"", "length": 0, "sha1": b"\x00", "sha256": b"\x00", "sha1_git": b"\x00", "blake2s256": b"\x00", } c = Content.from_dict(content_d) assert c.ctime == n def test_content_from_dict_str_naive_ctime(): # test with ctime as a string n = datetime.datetime(2020, 5, 6, 12, 34) content_d = { "ctime": n.isoformat(), "data": b"", "length": 0, "sha1": b"\x00", "sha256": b"\x00", "sha1_git": b"\x00", "blake2s256": b"\x00", } with pytest.raises(ValueError, match="must be a timezone-aware datetime."): Content.from_dict(content_d) @given(binary(max_size=4096)) def test_content_from_data(data): c = Content.from_data(data) assert c.data == data assert c.length == len(data) assert c.status == "visible" for key, value in MultiHash.from_data(data).digest().items(): assert getattr(c, key) == value @given(binary(max_size=4096)) def test_hidden_content_from_data(data): c = Content.from_data(data, status="hidden") assert c.data == data assert c.length == len(data) assert c.status == "hidden" for key, value in MultiHash.from_data(data).digest().items(): assert getattr(c, key) == value def test_content_naive_datetime(): c = Content.from_data(b"foo") with pytest.raises(ValueError, match="must be a timezone-aware datetime"): Content( **c.to_dict(), ctime=datetime.datetime.now(), ) # SkippedContent @given(binary(max_size=4096)) def test_skipped_content_from_data(data): c = SkippedContent.from_data(data, reason="reason") assert c.reason == "reason" assert c.length == len(data) assert c.status == "absent" for key, value in MultiHash.from_data(data).digest().items(): assert getattr(c, key) == value @given(strategies.skipped_contents_d()) def test_skipped_content_origin_is_str(skipped_content_d): assert SkippedContent.from_dict(skipped_content_d) skipped_content_d["origin"] = "http://path/to/origin" assert SkippedContent.from_dict(skipped_content_d) skipped_content_d["origin"] = Origin(url="http://path/to/origin") with pytest.raises(ValueError, match="origin"): SkippedContent.from_dict(skipped_content_d) def test_skipped_content_naive_datetime(): c = SkippedContent.from_data(b"foo", reason="reason") with pytest.raises(ValueError, match="must be a timezone-aware datetime"): SkippedContent( **c.to_dict(), ctime=datetime.datetime.now(), ) # Directory @given(strategies.directories().filter(lambda d: d.raw_manifest is None)) def test_directory_check(directory): directory.check() directory2 = attr.evolve(directory, id=b"\x00" * 20) with pytest.raises(ValueError, match="does not match recomputed hash"): directory2.check() directory2 = attr.evolve( directory, raw_manifest=swh.model.git_objects.directory_git_object(directory) ) with pytest.raises( ValueError, match="non-none raw_manifest attribute, but does not need it." ): directory2.check() @given(strategies.directories().filter(lambda d: d.raw_manifest is None)) def test_directory_raw_manifest(directory): assert "raw_manifest" not in directory.to_dict() raw_manifest = b"foo" id_ = hashlib.new("sha1", raw_manifest).digest() directory2 = attr.evolve(directory, raw_manifest=raw_manifest) assert directory2.to_dict()["raw_manifest"] == raw_manifest with pytest.raises(ValueError, match="does not match recomputed hash"): directory2.check() directory2 = attr.evolve(directory, raw_manifest=raw_manifest, id=id_) assert directory2.id is not None assert directory2.id == id_ != directory.id assert directory2.to_dict()["raw_manifest"] == raw_manifest directory2.check() def test_directory_entry_name_validation(): with pytest.raises(ValueError, match="valid directory entry name."): DirectoryEntry(name=b"foo/", type="dir", target=b"\x00" * 20, perms=0), def test_directory_duplicate_entry_name(): entries = ( DirectoryEntry(name=b"foo", type="file", target=b"\x00" * 20, perms=0), DirectoryEntry(name=b"foo", type="dir", target=b"\x01" * 20, perms=1), ) with pytest.raises(ValueError, match="duplicated entry name"): Directory(entries=entries) entries = ( DirectoryEntry(name=b"foo", type="file", target=b"\x00" * 20, perms=0), DirectoryEntry(name=b"foo", type="file", target=b"\x00" * 20, perms=0), ) with pytest.raises(ValueError, match="duplicated entry name"): Directory(entries=entries) # Release @given(strategies.releases().filter(lambda rel: rel.raw_manifest is None)) def test_release_check(release): release.check() release2 = attr.evolve(release, id=b"\x00" * 20) with pytest.raises(ValueError, match="does not match recomputed hash"): release2.check() release2 = attr.evolve( release, raw_manifest=swh.model.git_objects.release_git_object(release) ) with pytest.raises( ValueError, match="non-none raw_manifest attribute, but does not need it." ): release2.check() @given(strategies.releases().filter(lambda rev: rev.raw_manifest is None)) def test_release_raw_manifest(release): raw_manifest = b"foo" id_ = hashlib.new("sha1", raw_manifest).digest() release2 = attr.evolve(release, raw_manifest=raw_manifest) assert release2.to_dict()["raw_manifest"] == raw_manifest with pytest.raises(ValueError, match="does not match recomputed hash"): release2.check() release2 = attr.evolve(release, raw_manifest=raw_manifest, id=id_) assert release2.id is not None assert release2.id == id_ != release.id assert release2.to_dict()["raw_manifest"] == raw_manifest release2.check() # Revision @given(strategies.revisions().filter(lambda rev: rev.raw_manifest is None)) def test_revision_check(revision): revision.check() revision2 = attr.evolve(revision, id=b"\x00" * 20) with pytest.raises(ValueError, match="does not match recomputed hash"): revision2.check() revision2 = attr.evolve( revision, raw_manifest=swh.model.git_objects.revision_git_object(revision) ) with pytest.raises( ValueError, match="non-none raw_manifest attribute, but does not need it." ): revision2.check() @given(strategies.revisions().filter(lambda rev: rev.raw_manifest is None)) def test_revision_raw_manifest(revision): raw_manifest = b"foo" id_ = hashlib.new("sha1", raw_manifest).digest() revision2 = attr.evolve(revision, raw_manifest=raw_manifest) assert revision2.to_dict()["raw_manifest"] == raw_manifest with pytest.raises(ValueError, match="does not match recomputed hash"): revision2.check() revision2 = attr.evolve(revision, raw_manifest=raw_manifest, id=id_) assert revision2.id is not None assert revision2.id == id_ != revision.id assert revision2.to_dict()["raw_manifest"] == raw_manifest revision2.check() def test_revision_extra_headers_no_headers(): rev_dict = revision_example.copy() rev_dict.pop("id") rev = Revision.from_dict(rev_dict) rev_dict = attr.asdict(rev, recurse=False) rev_model = Revision(**rev_dict) assert rev_model.metadata is None assert rev_model.extra_headers == () rev_dict["metadata"] = { "something": "somewhere", "some other thing": "stranger", } rev_model = Revision(**rev_dict) assert rev_model.metadata == rev_dict["metadata"] assert rev_model.extra_headers == () def test_revision_extra_headers_with_headers(): rev_dict = revision_example.copy() rev_dict.pop("id") rev = Revision.from_dict(rev_dict) rev_dict = attr.asdict(rev, recurse=False) rev_dict["metadata"] = { "something": "somewhere", "some other thing": "stranger", } extra_headers = ( (b"header1", b"value1"), (b"header2", b"42"), (b"header3", b"should I?\x00"), (b"header1", b"again"), ) rev_dict["extra_headers"] = extra_headers rev_model = Revision(**rev_dict) assert "extra_headers" not in rev_model.metadata assert rev_model.extra_headers == extra_headers def test_revision_extra_headers_in_metadata(): rev_dict = revision_example.copy() rev_dict.pop("id") rev = Revision.from_dict(rev_dict) rev_dict = attr.asdict(rev, recurse=False) rev_dict["metadata"] = { "something": "somewhere", "some other thing": "stranger", } extra_headers = ( (b"header1", b"value1"), (b"header2", b"42"), (b"header3", b"should I?\x00"), (b"header1", b"again"), ) # check the bw-compat init hook does the job # ie. extra_headers are given in the metadata field rev_dict["metadata"]["extra_headers"] = extra_headers rev_model = Revision(**rev_dict) assert "extra_headers" not in rev_model.metadata assert rev_model.extra_headers == extra_headers def test_revision_extra_headers_as_lists(): rev_dict = revision_example.copy() rev_dict.pop("id") rev = Revision.from_dict(rev_dict) rev_dict = attr.asdict(rev, recurse=False) rev_dict["metadata"] = {} extra_headers = ( (b"header1", b"value1"), (b"header2", b"42"), (b"header3", b"should I?\x00"), (b"header1", b"again"), ) # check Revision.extra_headers tuplify does the job rev_dict["extra_headers"] = [list(x) for x in extra_headers] rev_model = Revision(**rev_dict) assert "extra_headers" not in rev_model.metadata assert rev_model.extra_headers == extra_headers def test_revision_extra_headers_type_error(): rev_dict = revision_example.copy() rev_dict.pop("id") rev = Revision.from_dict(rev_dict) orig_rev_dict = attr.asdict(rev, recurse=False) orig_rev_dict["metadata"] = { "something": "somewhere", "some other thing": "stranger", } extra_headers = ( ("header1", b"value1"), (b"header2", 42), ("header1", "again"), ) # check headers one at a time # if given as extra_header for extra_header in extra_headers: rev_dict = copy.deepcopy(orig_rev_dict) rev_dict["extra_headers"] = (extra_header,) with pytest.raises(AttributeTypeError): Revision(**rev_dict) # if given as metadata for extra_header in extra_headers: rev_dict = copy.deepcopy(orig_rev_dict) rev_dict["metadata"]["extra_headers"] = (extra_header,) with pytest.raises(AttributeTypeError): Revision(**rev_dict) def test_revision_extra_headers_from_dict(): rev_dict = revision_example.copy() rev_dict.pop("id") rev_model = Revision.from_dict(rev_dict) assert rev_model.metadata is None assert rev_model.extra_headers == () rev_dict["metadata"] = { "something": "somewhere", "some other thing": "stranger", } rev_model = Revision.from_dict(rev_dict) assert rev_model.metadata == rev_dict["metadata"] assert rev_model.extra_headers == () extra_headers = ( (b"header1", b"value1"), (b"header2", b"42"), (b"header3", b"should I?\nmaybe\x00\xff"), (b"header1", b"again"), ) rev_dict["extra_headers"] = extra_headers rev_model = Revision.from_dict(rev_dict) assert "extra_headers" not in rev_model.metadata assert rev_model.extra_headers == extra_headers def test_revision_extra_headers_in_metadata_from_dict(): rev_dict = revision_example.copy() rev_dict.pop("id") rev_dict["metadata"] = { "something": "somewhere", "some other thing": "stranger", } extra_headers = ( (b"header1", b"value1"), (b"header2", b"42"), (b"header3", b"should I?\nmaybe\x00\xff"), (b"header1", b"again"), ) # check the bw-compat init hook does the job rev_dict["metadata"]["extra_headers"] = extra_headers rev_model = Revision.from_dict(rev_dict) assert "extra_headers" not in rev_model.metadata assert rev_model.extra_headers == extra_headers def test_revision_extra_headers_as_lists_from_dict(): rev_dict = revision_example.copy() rev_dict.pop("id") rev_model = Revision.from_dict(rev_dict) rev_dict["metadata"] = { "something": "somewhere", "some other thing": "stranger", } extra_headers = ( (b"header1", b"value1"), (b"header2", b"42"), (b"header3", b"should I?\nmaybe\x00\xff"), (b"header1", b"again"), ) # check Revision.extra_headers converter does the job rev_dict["extra_headers"] = [list(x) for x in extra_headers] rev_model = Revision.from_dict(rev_dict) assert "extra_headers" not in rev_model.metadata assert rev_model.extra_headers == extra_headers @given(strategies.objects(split_content=True)) def test_object_type(objtype_and_obj): obj_type, obj = objtype_and_obj assert obj_type == obj.object_type def test_object_type_is_final(): object_types = set() def check_final(cls): if hasattr(cls, "object_type"): assert cls.object_type not in object_types object_types.add(cls.object_type) if cls.__subclasses__(): assert not hasattr(cls, "object_type") for subcls in cls.__subclasses__(): check_final(subcls) check_final(BaseModel) _metadata_authority = MetadataAuthority( type=MetadataAuthorityType.FORGE, url="https://forge.softwareheritage.org", ) _metadata_fetcher = MetadataFetcher(name="test-fetcher", version="0.0.1",) _content_swhid = ExtendedSWHID.from_string( "swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2" ) _origin_url = "https://forge.softwareheritage.org/source/swh-model.git" _origin_swhid = ExtendedSWHID.from_string( "swh:1:ori:94a9ed024d3859793618152ea559a168bbcbb5e2" ) _dummy_qualifiers = {"origin": "https://example.com", "lines": "42"} _common_metadata_fields = dict( discovery_date=datetime.datetime( 2021, 1, 29, 13, 57, 9, tzinfo=datetime.timezone.utc ), authority=_metadata_authority, fetcher=_metadata_fetcher, format="json", metadata=b'{"origin": "https://example.com", "lines": "42"}', ) def test_metadata_valid(): """Checks valid RawExtrinsicMetadata objects don't raise an error.""" # Simplest case RawExtrinsicMetadata(target=_origin_swhid, **_common_metadata_fields) # Object with an SWHID RawExtrinsicMetadata( target=_content_swhid, **_common_metadata_fields, ) def test_metadata_to_dict(): """Checks valid RawExtrinsicMetadata objects don't raise an error.""" common_fields = { "authority": {"type": "forge", "url": "https://forge.softwareheritage.org"}, "fetcher": {"name": "test-fetcher", "version": "0.0.1",}, "discovery_date": _common_metadata_fields["discovery_date"], "format": "json", "metadata": b'{"origin": "https://example.com", "lines": "42"}', } m = RawExtrinsicMetadata(target=_origin_swhid, **_common_metadata_fields,) assert m.to_dict() == { "target": str(_origin_swhid), "id": b"@j\xc9\x01\xbc\x1e#p*\xf3q9\xa7u\x97\x00\x14\x02xa", **common_fields, } assert RawExtrinsicMetadata.from_dict(m.to_dict()) == m m = RawExtrinsicMetadata(target=_content_swhid, **_common_metadata_fields,) assert m.to_dict() == { "target": "swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2", "id": b"\xbc\xa3U\xddf\x19U\xc5\xd2\xd7\xdfK\xd7c\x1f\xa8\xfeh\x992", **common_fields, } assert RawExtrinsicMetadata.from_dict(m.to_dict()) == m hash_hex = "6162" * 10 hash_bin = b"ab" * 10 m = RawExtrinsicMetadata( target=_content_swhid, **_common_metadata_fields, origin="https://example.org/", snapshot=CoreSWHID(object_type=ObjectType.SNAPSHOT, object_id=hash_bin), release=CoreSWHID(object_type=ObjectType.RELEASE, object_id=hash_bin), revision=CoreSWHID(object_type=ObjectType.REVISION, object_id=hash_bin), path=b"/foo/bar", directory=CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=hash_bin), ) assert m.to_dict() == { "target": "swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2", "id": b"\x14l\xb0\x1f\xb9\xc0{)\xc7\x0f\xbd\xc0*,YZ\xf5C\xab\xfc", **common_fields, "origin": "https://example.org/", "snapshot": f"swh:1:snp:{hash_hex}", "release": f"swh:1:rel:{hash_hex}", "revision": f"swh:1:rev:{hash_hex}", "path": b"/foo/bar", "directory": f"swh:1:dir:{hash_hex}", } assert RawExtrinsicMetadata.from_dict(m.to_dict()) == m def test_metadata_invalid_target(): """Checks various invalid values for the 'target' field.""" # SWHID passed as string instead of SWHID with pytest.raises(ValueError, match="target must be.*ExtendedSWHID"): RawExtrinsicMetadata( target="swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2", **_common_metadata_fields, ) def test_metadata_naive_datetime(): with pytest.raises(ValueError, match="must be a timezone-aware datetime"): RawExtrinsicMetadata( target=_origin_swhid, **{**_common_metadata_fields, "discovery_date": datetime.datetime.now()}, ) def test_metadata_validate_context_origin(): """Checks validation of RawExtrinsicMetadata.origin.""" # Origins can't have an 'origin' context with pytest.raises( ValueError, match="Unexpected 'origin' context for origin object" ): RawExtrinsicMetadata( target=_origin_swhid, origin=_origin_url, **_common_metadata_fields, ) # but all other types can RawExtrinsicMetadata( target=_content_swhid, origin=_origin_url, **_common_metadata_fields, ) # SWHIDs aren't valid origin URLs with pytest.raises(ValueError, match="SWHID used as context origin URL"): RawExtrinsicMetadata( target=_content_swhid, origin="swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2", **_common_metadata_fields, ) def test_metadata_validate_context_visit(): """Checks validation of RawExtrinsicMetadata.visit.""" # Origins can't have a 'visit' context with pytest.raises( ValueError, match="Unexpected 'visit' context for origin object" ): RawExtrinsicMetadata( target=_origin_swhid, visit=42, **_common_metadata_fields, ) # but all other types can RawExtrinsicMetadata( target=_content_swhid, origin=_origin_url, visit=42, **_common_metadata_fields, ) # Missing 'origin' with pytest.raises(ValueError, match="'origin' context must be set if 'visit' is"): RawExtrinsicMetadata( target=_content_swhid, visit=42, **_common_metadata_fields, ) # visit id must be positive with pytest.raises(ValueError, match="Nonpositive visit id"): RawExtrinsicMetadata( target=_content_swhid, origin=_origin_url, visit=-42, **_common_metadata_fields, ) def test_metadata_validate_context_snapshot(): """Checks validation of RawExtrinsicMetadata.snapshot.""" # Origins can't have a 'snapshot' context with pytest.raises( ValueError, match="Unexpected 'snapshot' context for origin object" ): RawExtrinsicMetadata( target=_origin_swhid, snapshot=CoreSWHID( object_type=ObjectType.SNAPSHOT, object_id=EXAMPLE_HASH, ), **_common_metadata_fields, ) # but content can RawExtrinsicMetadata( target=_content_swhid, snapshot=CoreSWHID(object_type=ObjectType.SNAPSHOT, object_id=EXAMPLE_HASH), **_common_metadata_fields, ) # SWHID type doesn't match the expected type of this context key with pytest.raises( ValueError, match="Expected SWHID type 'snapshot', got 'content'" ): RawExtrinsicMetadata( target=_content_swhid, snapshot=CoreSWHID(object_type=ObjectType.CONTENT, object_id=EXAMPLE_HASH,), **_common_metadata_fields, ) def test_metadata_validate_context_release(): """Checks validation of RawExtrinsicMetadata.release.""" # Origins can't have a 'release' context with pytest.raises( ValueError, match="Unexpected 'release' context for origin object" ): RawExtrinsicMetadata( target=_origin_swhid, release=CoreSWHID(object_type=ObjectType.RELEASE, object_id=EXAMPLE_HASH,), **_common_metadata_fields, ) # but content can RawExtrinsicMetadata( target=_content_swhid, release=CoreSWHID(object_type=ObjectType.RELEASE, object_id=EXAMPLE_HASH), **_common_metadata_fields, ) # SWHID type doesn't match the expected type of this context key with pytest.raises( ValueError, match="Expected SWHID type 'release', got 'content'" ): RawExtrinsicMetadata( target=_content_swhid, release=CoreSWHID(object_type=ObjectType.CONTENT, object_id=EXAMPLE_HASH,), **_common_metadata_fields, ) def test_metadata_validate_context_revision(): """Checks validation of RawExtrinsicMetadata.revision.""" # Origins can't have a 'revision' context with pytest.raises( ValueError, match="Unexpected 'revision' context for origin object" ): RawExtrinsicMetadata( target=_origin_swhid, revision=CoreSWHID( object_type=ObjectType.REVISION, object_id=EXAMPLE_HASH, ), **_common_metadata_fields, ) # but content can RawExtrinsicMetadata( target=_content_swhid, revision=CoreSWHID(object_type=ObjectType.REVISION, object_id=EXAMPLE_HASH), **_common_metadata_fields, ) # SWHID type doesn't match the expected type of this context key with pytest.raises( ValueError, match="Expected SWHID type 'revision', got 'content'" ): RawExtrinsicMetadata( target=_content_swhid, revision=CoreSWHID(object_type=ObjectType.CONTENT, object_id=EXAMPLE_HASH,), **_common_metadata_fields, ) def test_metadata_validate_context_path(): """Checks validation of RawExtrinsicMetadata.path.""" # Origins can't have a 'path' context with pytest.raises(ValueError, match="Unexpected 'path' context for origin object"): RawExtrinsicMetadata( target=_origin_swhid, path=b"/foo/bar", **_common_metadata_fields, ) # but content can RawExtrinsicMetadata( target=_content_swhid, path=b"/foo/bar", **_common_metadata_fields, ) def test_metadata_validate_context_directory(): """Checks validation of RawExtrinsicMetadata.directory.""" # Origins can't have a 'directory' context with pytest.raises( ValueError, match="Unexpected 'directory' context for origin object" ): RawExtrinsicMetadata( target=_origin_swhid, directory=CoreSWHID( object_type=ObjectType.DIRECTORY, object_id=EXAMPLE_HASH, ), **_common_metadata_fields, ) # but content can RawExtrinsicMetadata( target=_content_swhid, directory=CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=EXAMPLE_HASH,), **_common_metadata_fields, ) # SWHID type doesn't match the expected type of this context key with pytest.raises( ValueError, match="Expected SWHID type 'directory', got 'content'" ): RawExtrinsicMetadata( target=_content_swhid, directory=CoreSWHID( object_type=ObjectType.CONTENT, object_id=EXAMPLE_HASH, ), **_common_metadata_fields, ) def test_metadata_normalize_discovery_date(): fields_copy = {**_common_metadata_fields} truncated_date = fields_copy.pop("discovery_date") assert truncated_date.microsecond == 0 # Check for TypeError on disabled object type: we removed attrs_strict's # type_validator with pytest.raises(TypeError): RawExtrinsicMetadata( target=_content_swhid, discovery_date="not a datetime", **fields_copy ) # Check for truncation to integral second date_with_us = truncated_date.replace(microsecond=42) md = RawExtrinsicMetadata( target=_content_swhid, discovery_date=date_with_us, **fields_copy, ) assert md.discovery_date == truncated_date assert md.discovery_date.tzinfo == datetime.timezone.utc # Check that the timezone gets normalized. Timezones can be offset by a # non-integral number of seconds, so we need to handle that. timezone = datetime.timezone(offset=datetime.timedelta(hours=2)) date_with_tz = truncated_date.astimezone(timezone) assert date_with_tz.tzinfo != datetime.timezone.utc md = RawExtrinsicMetadata( target=_content_swhid, discovery_date=date_with_tz, **fields_copy, ) assert md.discovery_date == truncated_date assert md.discovery_date.tzinfo == datetime.timezone.utc