diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d0b93d3..1c95e3d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,43 +1,40 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.1.0 hooks: - id: trailing-whitespace - id: check-json - id: check-yaml - repo: https://gitlab.com/pycqa/flake8 rev: 4.0.1 hooks: - id: flake8 additional_dependencies: [flake8-bugbear==22.3.23] - repo: https://github.com/codespell-project/codespell rev: v2.1.0 hooks: - id: codespell name: Check source code spelling stages: [commit] - - id: codespell - name: Check commit message spelling - stages: [commit-msg] - repo: local hooks: - id: mypy name: mypy entry: mypy args: [swh] pass_filenames: false language: system types: [python] - repo: https://github.com/PyCQA/isort rev: 5.10.1 hooks: - id: isort - repo: https://github.com/python/black rev: 22.3.0 hooks: - id: black diff --git a/PKG-INFO b/PKG-INFO index e632705..8b07a86 100644 --- a/PKG-INFO +++ b/PKG-INFO @@ -1,46 +1,46 @@ Metadata-Version: 2.1 Name: swh.model -Version: 6.1.0 +Version: 6.2.0 Summary: Software Heritage data model Home-page: https://forge.softwareheritage.org/diffusion/DMOD/ Author: Software Heritage developers Author-email: swh-devel@inria.fr License: UNKNOWN Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest Project-URL: Funding, https://www.softwareheritage.org/donate Project-URL: Source, https://forge.softwareheritage.org/source/swh-model Project-URL: Documentation, https://docs.softwareheritage.org/devel/swh-model/ Platform: UNKNOWN Classifier: Programming Language :: Python :: 3 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Classifier: Operating System :: OS Independent Classifier: Development Status :: 5 - Production/Stable Requires-Python: >=3.7 Description-Content-Type: text/markdown Provides-Extra: cli Provides-Extra: testing-minimal Provides-Extra: testing License-File: LICENSE License-File: AUTHORS swh-model ========= Implementation of the Data model of the Software Heritage project, used to archive source code artifacts. This module defines the notion of SoftWare Heritage persistent IDentifiers (SWHIDs) and provides tools to compute them: ```sh $ swh-identify fork.c kmod.c sched/deadline.c swh:1:cnt:2e391c754ae730bd2d8520c2ab497c403220c6e3 fork.c swh:1:cnt:0277d1216f80ae1adeed84a686ed34c9b2931fc2 kmod.c swh:1:cnt:57b939c81bce5d06fa587df8915f05affbe22b82 sched/deadline.c $ swh-identify --no-filename /usr/src/linux/kernel/ swh:1:dir:f9f858a48d663b3809c9e2f336412717496202ab ``` diff --git a/debian/changelog b/debian/changelog index 16bfc2b..d0b679d 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,1302 +1,1305 @@ -swh-model (6.1.0-1~swh1~bpo10+1) buster-swh; urgency=medium +swh-model (6.2.0-1~swh1) unstable-swh; urgency=medium - * Rebuild for buster-swh + * New upstream release 6.2.0 - (tagged by Valentin Lorentz + on 2022-04-27 18:36:27 +0200) + * Upstream changes: - v6.2.0 - * Add missing + `content_git_object` - * test/pre-commit maintenance - -- Software Heritage autobuilder (on jenkins-debian1) Mon, 11 Apr 2022 10:50:00 +0000 + -- Software Heritage autobuilder (on jenkins-debian1) Wed, 27 Apr 2022 16:40:00 +0000 swh-model (6.1.0-1~swh1) unstable-swh; urgency=medium * New upstream release 6.1.0 - (tagged by David Douard on 2022-04-11 12:43:26 +0200) * Upstream changes: - v6.1.0 -- Software Heritage autobuilder (on jenkins-debian1) Mon, 11 Apr 2022 10:47:57 +0000 swh-model (6.0.1-1~swh1) unstable-swh; urgency=medium * New upstream release 6.0.1 - (tagged by Antoine R. Dumont (@ardumont) on 2022-04-01 10:21:34 +0200) * Upstream changes: - v6.0.1 - Fix documentation papercuts -- Software Heritage autobuilder (on jenkins-debian1) Fri, 01 Apr 2022 08:25:24 +0000 swh-model (6.0.0-1~swh1) unstable-swh; urgency=medium * New upstream release 6.0.0 - (tagged by Valentin Lorentz on 2022-03-23 11:02:28 +0100) * Upstream changes: - v6.0.0 - * Add objects with non-None raw_manifest to TEST_OBJECTS - * Exclude name and email attributes from People comparison - * Add support for None as author or committer of a Revision -- Software Heritage autobuilder (on jenkins-debian1) Wed, 23 Mar 2022 10:06:05 +0000 swh-model (5.0.0-1~swh1) unstable-swh; urgency=medium * New upstream release 5.0.0 - (tagged by Valentin Lorentz on 2022-03-16 10:33:49 +0100) * Upstream changes: - v5.0.0 - * Fix f-string - * Fix crash in check_entries. - * Add missing __slots__ to HashableObjectWithManifest - * docs: Explain we prefer dir SWHIDs over rev/rel. - * Remove 'offset' and 'negative_utc' arguments and make them optional - * Remove deprecated property 'TimestampWithTimezone.offset' -- Software Heritage autobuilder (on jenkins-debian1) Wed, 16 Mar 2022 09:38:26 +0000 swh-model (4.4.0-1~swh1) unstable-swh; urgency=medium * New upstream release 4.4.0 - (tagged by Valentin Lorentz on 2022-01-21 14:08:57 +0100) * Upstream changes: - v4.4.0 - * model: Add support for more edge cases in _parse_offset_bytes - * model: Add method 'TimestampWithTimezone.offset_minutes' -- Software Heritage autobuilder (on jenkins-debian1) Fri, 21 Jan 2022 13:12:25 +0000 swh-model (4.3.0-1~swh1) unstable-swh; urgency=medium * New upstream release 4.3.0 - (tagged by Valentin Lorentz on 2022-01-14 15:10:34 +0100) * Upstream changes: - v4.3.0 - * docs: Add anchors to important sections of persistent-identifiers.rst - * Fix TimestampWithTimezone.from_dict() on datetimes before 1970 with non- integer seconds - * TimestampWithTimezone: Make 'offset' and 'negative_utc' optional -- Software Heritage autobuilder (on jenkins-debian1) Fri, 14 Jan 2022 14:13:48 +0000 swh-model (4.2.0-1~swh1) unstable-swh; urgency=medium * New upstream release 4.2.0 - (tagged by Valentin Lorentz on 2022-01-10 15:56:36 +0100) * Upstream changes: - v4.2.0 - * git_objects: Use raw offset_bytes to format dates, and remove format_offset() -- Software Heritage autobuilder (on jenkins-debian1) Mon, 10 Jan 2022 14:59:18 +0000 swh-model (4.1.0-1~swh1) unstable-swh; urgency=medium * New upstream release 4.1.0 - (tagged by Nicolas Dandrimont on 2021-12-22 15:58:36 +0100) * Upstream changes: - Release swh.model v4.1.0 - Drop pre-3.6 blake2 compatibility, which hasn't been in use since - we've mandated python3.7 anyway. -- Software Heritage autobuilder (on jenkins-debian1) Wed, 22 Dec 2021 15:01:40 +0000 swh-model (4.0.0-1~swh1) unstable-swh; urgency=medium * New upstream release 4.0.0 - (tagged by Valentin Lorentz on 2021-12-22 13:24:58 +0100) * Upstream changes: - v4.0.0 - * Add attribute TimestampWithTimezone.offset_bytes, to store raw Git offsets - * model: Add a check() method to model objects - * test_model: Fix compatibility with pytest-xdist - * docs: Update the data model description - * hypothesis_strategies: Generate only consistent directory entry permissions. - * model: Add a raw_manifest attribute -- Software Heritage autobuilder (on jenkins-debian1) Wed, 22 Dec 2021 12:28:54 +0000 swh-model (3.2.0-1~swh1) unstable-swh; urgency=medium * New upstream release 3.2.0 - (tagged by Valentin Lorentz on 2021-12-15 13:36:48 +0100) * Upstream changes: - v3.2.0 - * hypothesis_strategies: Ensure to generate valid directory entry name - * from_disk: Implement Directory.__contains__ -- Software Heritage autobuilder (on jenkins-debian1) Wed, 15 Dec 2021 12:39:37 +0000 swh-model (3.1.0-1~swh1) unstable-swh; urgency=medium * New upstream release 3.1.0 - (tagged by Antoine Lambert on 2021-12-06 19:35:40 +0100) * Upstream changes: - version 0.3.1 -- Software Heritage autobuilder (on jenkins-debian1) Mon, 06 Dec 2021 18:51:48 +0000 swh-model (3.0.0-1~swh1) unstable-swh; urgency=medium * New upstream release 3.0.0 - (tagged by Valentin Lorentz on 2021-09-28 15:59:18 +0200) * Upstream changes: - v3.0.0 - * Add bazaar as supported revision type - * Move SWHID classes and functions from identifiers.py to swhids.py - * Refactor identifiers & model to make *_git_object() functions work on model classes instead of dicts - * Move manifest computation functions from identifiers.py to git_objects.py - * Remove identifier_to_bytes and identifier_to_hex - * Deprecate identifiers.py -- Software Heritage autobuilder (on jenkins-debian1) Tue, 28 Sep 2021 14:05:19 +0000 swh-model (2.9.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.9.0 - (tagged by Valentin Lorentz on 2021-09-16 14:21:44 +0200) * Upstream changes: - v2.9.0 - * HashableObject: Add type annotation for 'id' attribute -- Software Heritage autobuilder (on jenkins-debian1) Thu, 16 Sep 2021 12:24:48 +0000 swh-model (2.8.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.8.0 - (tagged by Antoine R. Dumont (@ardumont) on 2021-07-27 16:20:14 +0200) * Upstream changes: - v2.8.0 - Add a CVS revision type for use with the CVS loader -- Software Heritage autobuilder (on jenkins-debian1) Tue, 27 Jul 2021 14:26:10 +0000 swh-model (2.7.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.7.0 - (tagged by Nicolas Dandrimont on 2021-07-23 16:50:59 +0200) * Upstream changes: - Release swh.model 2.7.0 - Add versioning of ExtID objects -- Software Heritage autobuilder (on jenkins-debian1) Fri, 23 Jul 2021 14:53:44 +0000 swh-model (2.6.4-1~swh1) unstable-swh; urgency=medium * New upstream release 2.6.4 - (tagged by Daniele Serafini on 2021-06-29 13:42:54 +0100) * Upstream changes: - make deduplication optional when iterating over the merkle tree -- Software Heritage autobuilder (on jenkins-debian1) Fri, 02 Jul 2021 16:11:31 +0000 swh-model (2.6.3-1~swh1) unstable-swh; urgency=medium * New upstream release 2.6.3 - (tagged by Valentin Lorentz on 2021-06-25 16:13:53 +0200) * Upstream changes: - v2.6.3 - * hypothesis_strategies: Generate None metadata instead of {} -- Software Heritage autobuilder (on jenkins-debian1) Fri, 25 Jun 2021 14:17:34 +0000 swh-model (2.6.2-1~swh1) unstable-swh; urgency=medium * New upstream release 2.6.2 - (tagged by Valentin Lorentz on 2021-06-25 12:40:45 +0200) * Upstream changes: - v2.6.2 - * from_disk: get swhid from Content/Directory objects - * hypothesis_strategies: Add raw_extrinsic_metadata() strategy -- Software Heritage autobuilder (on jenkins-debian1) Fri, 25 Jun 2021 10:44:34 +0000 swh-model (2.6.1-1~swh1) unstable-swh; urgency=medium * New upstream release 2.6.1 - (tagged by Antoine Lambert on 2021-06-16 11:58:53 +0200) * Upstream changes: - version 2.6.1 -- Software Heritage autobuilder (on jenkins-debian1) Wed, 16 Jun 2021 10:03:28 +0000 swh-model (2.6.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.6.0 - (tagged by David Douard on 2021-06-15 16:51:49 +0200) * Upstream changes: - v2.6.0 -- Software Heritage autobuilder (on jenkins-debian1) Tue, 15 Jun 2021 14:56:10 +0000 swh-model (2.5.1-1~swh1) unstable-swh; urgency=medium * New upstream release 2.5.1 - (tagged by David Douard on 2021-05-20 15:22:50 +0200) * Upstream changes: - v2.5.1 -- Software Heritage autobuilder (on jenkins-debian1) Thu, 20 May 2021 13:40:27 +0000 swh-model (2.5.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.5.0 - (tagged by Valentin Lorentz on 2021-05-11 12:02:49 +0200) * Upstream changes: - v2.5.0 - * identifiers: Expose manifest/git_object computation -- Software Heritage autobuilder (on jenkins-debian1) Tue, 11 May 2021 10:07:47 +0000 swh-model (2.4.2-1~swh1) unstable-swh; urgency=medium * New upstream release 2.4.2 - (tagged by Valentin Lorentz on 2021-05-06 14:31:04 +0200) * Upstream changes: - v2.4.2 - * docs/persistent-identifiers: Add guidelines for fixing invalid SWHIDs. - * Blacklist attr 21.1.0 -- Software Heritage autobuilder (on jenkins-debian1) Thu, 06 May 2021 12:35:43 +0000 swh-model (2.4.1-1~swh1) unstable-swh; urgency=medium * New upstream release 2.4.1 - (tagged by Antoine Lambert on 2021-04-29 14:19:28 +0200) * Upstream changes: - version 2.4.1 -- Software Heritage autobuilder (on jenkins-debian1) Thu, 29 Apr 2021 12:23:21 +0000 swh-model (2.4.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.4.0 - (tagged by Antoine Lambert on 2021-04-13 15:26:51 +0200) * Upstream changes: - version 2.4.0 -- Software Heritage autobuilder (on jenkins-debian1) Tue, 13 Apr 2021 13:31:21 +0000 swh-model (2.3.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.3.0 - (tagged by Nicolas Dandrimont on 2021-03-19 17:15:00 +0100) * Upstream changes: - Release swh.model 2.3.0 - Properly truncate RawExtrinsicMetadata objects to a precision of one - second, as does their unique id. -- Software Heritage autobuilder (on jenkins-debian1) Fri, 19 Mar 2021 16:17:48 +0000 swh-model (2.2.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.2.0 - (tagged by Valentin Lorentz on 2021-03-15 10:32:36 +0100) * Upstream changes: - v2.2.0 - * Add a swhid() method to RawExtrinsicMetadata. -- Software Heritage autobuilder (on jenkins-debian1) Mon, 15 Mar 2021 09:35:25 +0000 swh-model (2.1.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.1.0 - (tagged by David Douard on 2021-03-11 14:19:00 +0100) * Upstream changes: - v2.1.0 -- Software Heritage autobuilder (on jenkins-debian1) Thu, 11 Mar 2021 13:21:40 +0000 swh-model (2.0.0-1~swh1) unstable-swh; urgency=medium * New upstream release 2.0.0 - (tagged by Valentin Lorentz on 2021-03-05 10:11:47 +0100) * Upstream changes: - v2.0.0 - Breaking change: - * model: Remove override of RawExtrinsicMetadata.unique_key(), so it now returns the hash. - Other changes: - * identifiers: Add raw_extrinsic_metadata_identifier - * model: Add 'id' field to RawExtrinsicMetadata -- Software Heritage autobuilder (on jenkins-debian1) Fri, 05 Mar 2021 09:14:35 +0000 swh-model (1.0.1-1~swh1) unstable-swh; urgency=medium * New upstream release 1.0.1 - (tagged by Valentin Lorentz on 2021-03-04 15:08:55 +0100) * Upstream changes: - v1.0.1 - * cli: stop using the deprecated SWHID class - * identifiers: Remove the deprecated SWHID class -- Software Heritage autobuilder (on jenkins-debian1) Thu, 04 Mar 2021 14:11:09 +0000 swh-model (1.0.0-1~swh1) unstable-swh; urgency=medium * New upstream release 1.0.0 - (tagged by Valentin Lorentz on 2021-03-01 18:01:29 +0100) * Upstream changes: - v1.0.0 - Two breaking changes: - * RawExtrinsicMetadata: Use ExtendedSWHID as target and remove type - * RawExtrinsicMetadata: Use CoreSWHID instead of SWHID for contexts - And two minor changes: - * Add CoreSWHID.to_extended() - * Add a swhid() method to all hashable objects. -- Software Heritage autobuilder (on jenkins-debian1) Tue, 02 Mar 2021 08:18:42 +0000 swh-model (0.13.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.13.0 - (tagged by Valentin Lorentz on 2021-02-25 17:56:31 +0100) * Upstream changes: - v0.13.0 - * Update persistent identifiers doc with pip install info - * Make explicit Python 3 dependency - * tests: Clean hashutil._blake2_hash_cache after mocking blake2 functions. - * Introduce new classes CoreSWHID/QualifiedSWHID/ExtendedSWHID - * Deprecate SWHID class - * Disallow 'ori' type in SWHID class -- Software Heritage autobuilder (on jenkins-debian1) Thu, 25 Feb 2021 16:59:26 +0000 swh-model (0.12.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.12.0 - (tagged by David Douard on 2021-01-26 17:22:28 +0100) * Upstream changes: - v0.12.0 -- Software Heritage autobuilder (on jenkins-debian1) Tue, 26 Jan 2021 16:27:16 +0000 swh-model (0.11.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.11.0 - (tagged by Antoine R. Dumont (@ardumont) on 2021-01-20 15:31:54 +0100) * Upstream changes: - v0.11.0 - model: Allow new status values not_found and failed to OriginVisitStatus -- Software Heritage autobuilder (on jenkins-debian1) Wed, 20 Jan 2021 14:34:53 +0000 swh-model (0.10.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.10.0 - (tagged by Vincent SELLIER on 2021-01-14 14:13:22 +0100) * Upstream changes: - v0.10.0 - * 2021-01-12 Add an optional type field on OriginVisitStatus object - * 2021-01-12 test_identifiers: Reorder SWHID tests. - * 2021-01-12 test_identifiers: Make sure that {directory,revision,release,snapshot}_identifier() doesn't just return a value from the dict. - * 2021-01-04 Add missing slots=True for Directory. - * 2020-12-19 SWHID parsing: simplify and deduplicate validation logic - * 2020-12-14 model: Make all classes slotted. -- Software Heritage autobuilder (on jenkins-debian1) Thu, 14 Jan 2021 13:16:10 +0000 swh-model (0.9.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.9.0 - (tagged by Nicolas Dandrimont on 2020-11-17 10:12:47 +0100) * Upstream changes: - Release swh.model v0.9.0 - Drop backwards compatibility for RawExtrinsicMetadata.id -- Software Heritage autobuilder (on jenkins-debian1) Tue, 17 Nov 2020 09:15:43 +0000 swh-model (0.8.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.8.0 - (tagged by Antoine R. Dumont (@ardumont) on 2020-11-12 13:09:48 +0100) * Upstream changes: - v0.8.0 - identifiers.parse_swhid: Make SWHIDs with whitespaces invalid - identifiers.parse_swhid: Check the swhid qualifiers and fail if invalid - model.identifiers: Improve error messages in case of invalid SWHIDs -- Software Heritage autobuilder (on jenkins-debian1) Thu, 12 Nov 2020 12:10:46 +0000 swh-model (0.7.3-1~swh1) unstable-swh; urgency=medium * New upstream release 0.7.3 - (tagged by Nicolas Dandrimont on 2020-10-27 16:03:58 +0100) * Upstream changes: - Release swh.model v0.7.3 - Reduce the amount of DeprecationWarnings for RawExtrinsicMetadata -- Software Heritage autobuilder (on jenkins-debian1) Tue, 27 Oct 2020 15:06:50 +0000 swh-model (0.7.2-1~swh1) unstable-swh; urgency=medium * New upstream release 0.7.2 - (tagged by Nicolas Dandrimont on 2020-10-27 10:37:19 +0100) * Upstream changes: - Release swh.model v0.7.2 - Add a new -- exclude flag to swh identify - Migrate RawExtrinsicMetadata `id` attribute to `target` - Future-proof the swh.model.model.HashableObject interface -- Software Heritage autobuilder (on jenkins-debian1) Tue, 27 Oct 2020 09:41:19 +0000 swh-model (0.7.1-1~swh1) unstable-swh; urgency=medium * New upstream release 0.7.1 - (tagged by Valentin Lorentz on 2020-10-12 12:16:47 +0200) * Upstream changes: - v0.7.1 - Add a 'unique_key' method on model objects -- Software Heritage autobuilder (on jenkins-debian1) Mon, 12 Oct 2020 10:19:10 +0000 swh-model (0.7.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.7.0 - (tagged by Antoine R. Dumont (@ardumont) on 2020-10-07 12:47:02 +0200) * Upstream changes: - v0.7.0 - cli: make SWHIDParamType return SWHID type instead of string - tox.ini: pin black to the pre- commit version (19.10b0) to avoid flip-flops - Merge the two test_identifiers.py files. -- Software Heritage autobuilder (on jenkins-debian1) Wed, 07 Oct 2020 10:47:55 +0000 swh-model (0.6.7-3~swh1) unstable-swh; urgency=medium * Fix a typo in d/control. -- David Douard Fri, 25 Sep 2020 17:36:14 +0200 swh-model (0.6.7-2~swh1) unstable-swh; urgency=medium * Fix dependencies on d/control -- David Douard Fri, 25 Sep 2020 17:03:31 +0200 swh-model (0.6.7-1~swh1) unstable-swh; urgency=medium * New upstream release 0.6.7 - (tagged by David Douard on 2020-09-25 15:28:58 +0200) * Upstream changes: - v0.6.7 -- Software Heritage autobuilder (on jenkins-debian1) Fri, 25 Sep 2020 13:32:18 +0000 swh-model (0.6.6-1~swh1) unstable-swh; urgency=medium * New upstream release 0.6.6 - (tagged by Antoine R. Dumont (@ardumont) on 2020-08-07 10:18:50 +0200) * Upstream changes: - v0.6.6 - model.Content.to_dict: Remove ctime entry when it's None - model: Add Sha1 alias -- Software Heritage autobuilder (on jenkins-debian1) Fri, 07 Aug 2020 08:22:35 +0000 swh-model (0.6.5-1~swh1) unstable-swh; urgency=medium * New upstream release 0.6.5 - (tagged by Antoine R. Dumont (@ardumont) on 2020-08-06 19:59:26 +0200) * Upstream changes: - v0.6.5 - model: Add final object_type field on metadata related model objects -- Software Heritage autobuilder (on jenkins-debian1) Thu, 06 Aug 2020 18:01:05 +0000 swh-model (0.6.4-1~swh1) unstable-swh; urgency=medium * New upstream release 0.6.4 - (tagged by Antoine R. Dumont (@ardumont) on 2020-08-06 18:44:48 +0200) * Upstream changes: - v0.6.4 - Use correct setuptools-scm keyword this time -- Software Heritage autobuilder (on jenkins-debian1) Thu, 06 Aug 2020 16:47:14 +0000 swh-model (0.6.1-1~swh1) unstable-swh; urgency=medium * New upstream release 0.6.1 - (tagged by Valentin Lorentz on 2020-07-31 10:47:56 +0200) * Upstream changes: - v0.6.1 - * Declare pytest markers - * Import Mapping from collections.abc instead of collections - * Fix incorrectly typed null constants in extra_headers byte strings - * add ImmutableDict.__repr__ - * Add missing object_type class attributes on MetadataAuthority, MetadataFetcher, and RawExtrinsicMetadata. -- Software Heritage autobuilder (on jenkins-debian1) Fri, 31 Jul 2020 08:51:42 +0000 swh-model (0.6.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.6.0 - (tagged by Valentin Lorentz on 2020-07-20 12:45:59 +0200) * Upstream changes: - v0.6.0 - * Rework dia -> pdf pipeline for inkscape 1.0 - * Rename MetadataAuthorityType.DEPOSIT to MetadataAuthorityType.DEPOSIT_CLIENT. -- Software Heritage autobuilder (on jenkins-debian1) Mon, 20 Jul 2020 10:49:27 +0000 swh-model (0.5.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.5.0 - (tagged by Antoine Lambert on 2020-07-08 17:12:44 +0200) * Upstream changes: - version 0.5.0 -- Software Heritage autobuilder (on jenkins-debian1) Wed, 08 Jul 2020 15:23:51 +0000 swh-model (0.4.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.4.0 - (tagged by David Douard on 2020-07-06 14:13:31 +0200) * Upstream changes: - v0.4.0 -- Software Heritage autobuilder (on jenkins-debian1) Mon, 06 Jul 2020 12:16:51 +0000 swh-model (0.3.8-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.8 - (tagged by Antoine Lambert on 2020-07-03 16:06:44 +0200) * Upstream changes: - version 0.3.8 -- Software Heritage autobuilder (on jenkins-debian1) Fri, 03 Jul 2020 14:10:51 +0000 swh-model (0.3.7-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.7 - (tagged by Antoine R. Dumont (@ardumont) on 2020-07-02 15:15:46 +0200) * Upstream changes: - v0.3.7 - Refactor common loader behavior within from_disk.iter_directory - Unify object_type some more within the merkle and from_disk modules -- Software Heritage autobuilder (on jenkins-debian1) Thu, 02 Jul 2020 13:17:32 +0000 swh-model (0.3.6-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.6 - (tagged by Antoine R. Dumont (@ardumont) on 2020-07-01 15:46:23 +0200) * Upstream changes: - v0.3.6 - model.OriginVisit: Drop obsolete fields -- Software Heritage autobuilder (on jenkins-debian1) Wed, 01 Jul 2020 13:48:43 +0000 swh-model (0.3.5-2~swh1) unstable-swh; urgency=medium * Update dependency + Bump -- Antoine R. Dumont (@ardumont) Tue, 30 Jun 2020 12:40:52 +0200 swh-model (0.3.5-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.5 - (tagged by Antoine R. Dumont (@ardumont) on 2020-06-30 11:21:07 +0200) * Upstream changes: - v0.3.5 - Tag model entities with their "object_type" -- Software Heritage autobuilder (on jenkins-debian1) Tue, 30 Jun 2020 09:31:43 +0000 swh-model (0.3.4-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.4 - (tagged by Antoine R. Dumont (@ardumont) on 2020-06-24 10:43:48 +0200) * Upstream changes: - v0.3.4 - OriginVisitStatus: Allow "created" status - model.OriginVisit: Make obsolete fields optional - swh.model.model.OriginVisit: Drop the dateutil.parser.parse use -- Software Heritage autobuilder (on jenkins-debian1) Wed, 24 Jun 2020 08:47:12 +0000 swh-model (0.3.3-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.3 - (tagged by Antoine R. Dumont (@ardumont) on 2020-06-17 09:38:34 +0200) * Upstream changes: - v0.3.3 - model.hypothesis_strategies: Make metadata always none on origin_visit -- Software Heritage autobuilder (on jenkins-debian1) Wed, 17 Jun 2020 07:40:50 +0000 swh-model (0.3.2-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.2 - (tagged by David Douard on 2020-06-16 10:41:05 +0200) * Upstream changes: - v0.3.2 -- Software Heritage autobuilder (on jenkins-debian1) Tue, 16 Jun 2020 08:45:55 +0000 swh-model (0.3.1-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.1 - (tagged by David Douard on 2020-06-15 09:43:30 +0200) * Upstream changes: - v0.3.1 -- Software Heritage autobuilder (on jenkins-debian1) Mon, 15 Jun 2020 07:52:09 +0000 swh-model (0.3.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.0 - (tagged by David Douard on 2020-06-03 11:59:02 +0200) * Upstream changes: - v0.3.0 -- Software Heritage autobuilder (on jenkins-debian1) Wed, 03 Jun 2020 10:04:35 +0000 swh-model (0.2.2-1~swh1) unstable-swh; urgency=medium * New upstream release 0.2.2 - (tagged by David Douard on 2020-06-03 11:28:38 +0200) * Upstream changes: - v0.2.2 -- Software Heritage autobuilder (on jenkins-debian1) Wed, 03 Jun 2020 09:33:46 +0000 swh-model (0.2.1-1~swh1) unstable-swh; urgency=medium * New upstream release 0.2.1 - (tagged by David Douard on 2020-05-29 17:39:37 +0200) * Upstream changes: - v0.2.1 -- Software Heritage autobuilder (on jenkins-debian1) Fri, 29 May 2020 15:43:44 +0000 swh-model (0.2.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.2.0 - (tagged by David Douard on 2020-05-25 10:06:12 +0200) * Upstream changes: - v0.2.0 -- Software Heritage autobuilder (on jenkins-debian1) Mon, 25 May 2020 08:11:07 +0000 swh-model (0.1.1-1~swh1) unstable-swh; urgency=medium * New upstream release 0.1.1 - (tagged by Antoine R. Dumont (@ardumont) on 2020-05-05 14:43:40 +0200) * Upstream changes: - v0.1.1 - Make aware_datetimes() generate only ISO8601-encodable datetimes -- Software Heritage autobuilder (on jenkins-debian1) Tue, 05 May 2020 12:45:37 +0000 swh-model (0.1.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.1.0 - (tagged by Stefano Zacchiroli on 2020-04-30 19:23:13 +0200) * Upstream changes: - v0.1.0 / 2020-04-30 - * SWHID spec: full reread - * setup.py: add documentation link - * hypothesis_strategies: Generate aware datetimes instead of naive ones. - * doc: check-in IANA registration template for the "swh" URI scheme - * Restructure SWHID documentation in preparation for T2385 - merge grammars into a single one - explain better that SWHIDs are made up of core identifier + qualifiers - separate qualifier into context and fragment onex - add reference to swh-identify -- Software Heritage autobuilder (on jenkins-debian1) Thu, 30 Apr 2020 20:31:00 +0000 swh-model (0.0.69-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.69 - (tagged by Stefano Zacchiroli on 2020-04-28 16:05:58 +0200) * Upstream changes: - v0.0.69 / 2020-04-28 - * SWHID spec: bump version to 1.3 and add last modified date - * SWHID spec: make SWHIDs plural where needed - * SWHID spec: simplify and generalize escaping requirements - * SWHID spec: add support for IRI - * SWHID: deal with escaping in origin qualifiers - * SWHID doc: improve wording of intrinsic parts v. the rest -- Software Heritage autobuilder (on jenkins-debian1) Tue, 28 Apr 2020 14:10:35 +0000 swh-model (0.0.68-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.68 - (tagged by David Douard on 2020-04-21 16:20:58 +0200) * Upstream changes: - v0.0.68 -- Software Heritage autobuilder (on jenkins-debian1) Tue, 21 Apr 2020 14:28:38 +0000 swh-model (0.0.67-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.67 - (tagged by Stefano Zacchiroli on 2020-04-17 17:49:42 +0200) * Upstream changes: - v0.0.67 / 2020-04-17 - * CLI: add test for swh identify w/o args - * CLI: require explicit "-" to identify via stdin - * SWHID doc: fix minor grammar issue - * SWHID doc: fix link in CISE paper reference - * identifiers.py: reference to SWHIDs using explicit anchors - * swh identify: embrace SWHID naming in user-facing doc/messages - * PID doc: embrace the SWHID naming - * PID doc: add reference to CISE paper - * doc: document identify CLI -- Software Heritage autobuilder (on jenkins-debian1) Fri, 17 Apr 2020 15:54:03 +0000 swh-model (0.0.66-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.66 - (tagged by Antoine R. Dumont (@ardumont) on 2020-04-10 16:46:31 +0200) * Upstream changes: - v0.0.66 - rename-visit-status model: Rename OriginVisitUpdate to OriginVisitStatus -- Software Heritage autobuilder (on jenkins-debian1) Fri, 10 Apr 2020 14:48:17 +0000 swh-model (0.0.65-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.65 - (tagged by Antoine R. Dumont (@ardumont) on 2020-04-09 16:25:24 +0200) * Upstream changes: - v0.0.65 - from_disk: path parameter to dir_filter functions - Enable black -- Software Heritage autobuilder (on jenkins-debian1) Thu, 09 Apr 2020 14:27:21 +0000 swh-model (0.0.64-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.64 - (tagged by Antoine Lambert on 2020-04-03 15:00:36 +0200) * Upstream changes: - version 0.0.64 -- Software Heritage autobuilder (on jenkins-debian1) Fri, 03 Apr 2020 13:03:34 +0000 swh-model (0.0.63-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.63 - (tagged by Antoine R. Dumont (@ardumont) on 2020-04-01 10:07:07 +0200) * Upstream changes: - v0.0.63 - origin/master model: Add new OriginVisitUpdate model object + test strategy - docs: Extend SWH PID definition with additional context qualifiers. -- Software Heritage autobuilder (on jenkins-debian1) Wed, 01 Apr 2020 08:08:58 +0000 swh-model (0.0.62-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.62 - (tagged by Valentin Lorentz on 2020-03-26 14:19:40 +0100) * Upstream changes: - v0.0.62 - * identifiers: encode origin URLs in utf-8 -- Software Heritage autobuilder (on jenkins-debian1) Thu, 26 Mar 2020 13:22:20 +0000 swh-model (0.0.60-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.60 - (tagged by Valentin Lorentz on 2020-03-05 12:05:18 +0100) * Upstream changes: - v0.0.60 - * Add a method to generate Content/SkippedContent from binary data - * Draw contents from a byte string instead of generating arbitrary hashes - * Add classmethod Person.from_address, to parse from 'name ' strings. -- Software Heritage autobuilder (on jenkins-debian1) Thu, 05 Mar 2020 11:07:50 +0000 swh-model (0.0.59-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.59 - (tagged by Nicolas Dandrimont on 2020-02-27 18:03:53 +0100) * Upstream changes: - Release swh.model v0.0.59 - Use proper hypothesis strategy to generate Person objects -- Software Heritage autobuilder (on jenkins-debian1) Thu, 27 Feb 2020 17:07:16 +0000 swh-model (0.0.57-1~swh2) unstable-swh; urgency=medium * Bump dependency release -- Antoine R. Dumont (@ardumont) Thu, 27 Feb 2020 16:24:21 +0200 swh-model (0.0.57-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.57 - (tagged by Valentin Lorentz on 2020-02-27 15:17:04 +0100) * Upstream changes: - v0.0.57 - * Add method BaseModel.hashes(). - * Re-introduce the swh.core dependency in swh.model[cli] - * Add support for skipping large contents in from_disk. - * Add to_model() method to from_disk.{Content,Directory}, to convert to canonical model objects. - * Take the value of MerkleNode.data into account to compute equality. - * Add method MerkleNode.iter_tree, to visit all nodes in the subtree of a node. - * Add from_datetime and from_iso8601 constructors for TimestampWithTimezone. - * Make attributes name and email of Person optional. -- Software Heritage autobuilder (on jenkins-debian1) Thu, 27 Feb 2020 14:20:21 +0000 swh-model (0.0.56-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.56 - (tagged by Valentin Lorentz on 2020-02-10 11:46:35 +0100) * Upstream changes: - v0.0.56 - Make OriginVisit.snapshot optional. -- Software Heritage autobuilder (on jenkins-debian1) Mon, 10 Feb 2020 10:48:55 +0000 swh-model (0.0.55-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.55 - (tagged by Valentin Lorentz on 2020-02-07 16:13:23 +0100) * Upstream changes: - v0.0.55 - * Make content length mandatory. - * Make 'visible' the default status for present Contents. -- Software Heritage autobuilder (on jenkins-debian1) Fri, 07 Feb 2020 15:16:58 +0000 swh-model (0.0.54-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.54 - (tagged by Valentin Lorentz on 2020-02-06 13:15:45 +0100) * Upstream changes: - v0.0.54 - * Split Content class into two classes, for missing and non-missing contents. -- Software Heritage autobuilder (on jenkins-debian1) Thu, 06 Feb 2020 12:18:04 +0000 swh-model (0.0.53-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.53 - (tagged by Valentin Lorentz on 2020-02-03 15:58:31 +0100) * Upstream changes: - v0.0.53 - * hypothesis_strategies/snapshots: Explain last post-processing step - * cli: add support for reading a file content from stdin in 'swh identify' command - * model: Update revision date types to be optional -- Software Heritage autobuilder (on jenkins-debian1) Mon, 03 Feb 2020 15:01:26 +0000 swh-model (0.0.52-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.52 - (tagged by Antoine Lambert on 2019-11-29 16:27:24 +0100) * Upstream changes: - version 0.0.52 -- Software Heritage autobuilder (on jenkins-debian1) Fri, 29 Nov 2019 15:30:57 +0000 swh-model (0.0.51-1~swh3) unstable-swh; urgency=medium * Add manual pytz dependency -- Nicolas Dandrimont Wed, 30 Oct 2019 17:52:33 +0100 swh-model (0.0.51-1~swh2) unstable-swh; urgency=medium * Add missing build-dependency on pytz -- Nicolas Dandrimont Wed, 30 Oct 2019 17:25:55 +0100 swh-model (0.0.51-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.51 - (tagged by Valentin Lorentz on 2019-10-30 15:03:19 +0100) * Upstream changes: - v0.0.51 - Make OriginVisit.origin a string instead of a dict. -- Software Heritage autobuilder (on jenkins-debian1) Wed, 30 Oct 2019 14:05:55 +0000 swh-model (0.0.50-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.50 - (tagged by David Douard on 2019-10-30 09:30:17 +0100) * Upstream changes: - v0.0.50 -- Software Heritage autobuilder (on jenkins-debian1) Wed, 30 Oct 2019 08:32:50 +0000 swh-model (0.0.49-1~swh2) unstable-swh; urgency=medium * Add missing dependency on dulwich for tests -- Nicolas Dandrimont Wed, 23 Oct 2019 14:37:45 +0200 swh-model (0.0.49-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.49 - (tagged by Nicolas Dandrimont on 2019-10-23 14:28:01 +0200) * Upstream changes: - Release swh.model v0.0.49 - Add symbolic refs to swh identify -t snapshot - Cleanup model.BaseModel.to_dict() recursion -- Software Heritage autobuilder (on jenkins-debian1) Wed, 23 Oct 2019 12:30:41 +0000 swh-model (0.0.48-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.48 - (tagged by Nicolas Dandrimont on 2019-10-18 17:06:59 +0200) * Upstream changes: - Release swh.model 0.0.48 - Split CLI dependencies to another subpackage - Stop exporting origin.type in models - Document origin PIDs -- Software Heritage autobuilder (on jenkins-debian1) Fri, 18 Oct 2019 15:11:01 +0000 swh-model (0.0.47-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.47 - (tagged by Stefano Zacchiroli on 2019-09-27 10:20:40 +0200) * Upstream changes: - v0.0.47 - init.py: switch to documented way of extending path -- Software Heritage autobuilder (on jenkins-debian1) Fri, 27 Sep 2019 08:22:54 +0000 swh-model (0.0.46-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.46 - (tagged by Stefano Zacchiroli on 2019-09-20 15:51:17 +0200) * Upstream changes: - v0.0.46 - MANIFEST.in: ship py.typed -- Software Heritage autobuilder (on jenkins-debian1) Fri, 20 Sep 2019 13:53:45 +0000 swh-model (0.0.45-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.45 - (tagged by Stefano Zacchiroli on 2019-09-20 15:09:47 +0200) * Upstream changes: - v0.0.45 - * identifiers.py: do not inherit from on-the-fly namedtuple - * mypy: ignore django- stubs, needed only by hypothesis - * mypy.ini: remove left-over sample section - * typing: minimal changes to make a no-op mypy run pass - * fix indentation and spelling: make "make check" happy -- Software Heritage autobuilder (on jenkins-debian1) Fri, 20 Sep 2019 13:12:10 +0000 swh-model (0.0.44-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.44 - (tagged by Valentin Lorentz on 2019-09-04 14:36:01 +0200) * Upstream changes: - Fix Revision.from_dict to allow optional fields. -- Software Heritage autobuilder (on jenkins-debian1) Wed, 04 Sep 2019 13:07:59 +0000 swh-model (0.0.43-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.43 - (tagged by Antoine R. Dumont (@ardumont) on 2019-09-03 14:04:44 +0200) * Upstream changes: - v0.0.43 - swh identify: add support for origin PIDs - identifiers.py: add constants for 'swh:1' and sanitize namespace -- Software Heritage autobuilder (on jenkins-debian1) Tue, 03 Sep 2019 12:09:04 +0000 swh-model (0.0.42-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.42 - (tagged by Valentin Lorentz on 2019-08-22 14:04:03 +0200) * Upstream changes: - v0.0.42 - Tweak swh.model.model to be closer to what swh-storage - accepts for releases and origin visits. -- Software Heritage autobuilder (on jenkins-debian1) Thu, 22 Aug 2019 12:12:22 +0000 swh-model (0.0.41-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.41 - (tagged by Valentin Lorentz on 2019-08-20 11:46:13 +0200) * Upstream changes: - tweaks to swh.model.model to support more valid inputs - * Allow -1 as Content length. - * Add optional 'ctime' field to Content. - * Generated content with status=hidden should have a data field. - * Add a get_hash helper method to Content. -- Software Heritage autobuilder (on jenkins-debian1) Tue, 20 Aug 2019 09:50:09 +0000 swh-model (0.0.40-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.40 - (tagged by Valentin Lorentz on 2019-08-06 14:36:37 +0200) * Upstream changes: - Add SHA1_SIZE constant. -- Software Heritage autobuilder (on jenkins-debian1) Tue, 06 Aug 2019 12:38:36 +0000 swh-model (0.0.39-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.39 - (tagged by Valentin Lorentz on 2019-07-18 12:28:42 +0200) * Upstream changes: - * fix pyblake2 dependency * origin persistent identifiers * release metadata -- Software Heritage autobuilder (on jenkins-debian1) Thu, 18 Jul 2019 10:31:00 +0000 swh-model (0.0.38-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.38 - (tagged by Valentin Lorentz on 2019-06-18 13:40:20 +0200) * Upstream changes: - Remove dependency on swh-core. - This is a fix to workaround pip's inability to correctly solve - extra requirements (swh-model depends on swh-core[], but if other - packages depend on swh-model and swh-core[http], the 'http' extra - does not always get installed). -- Software Heritage autobuilder (on jenkins-debian1) Tue, 18 Jun 2019 11:50:14 +0000 swh-model (0.0.37-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.37 - (tagged by David Douard on 2019-05-15 15:44:21 +0200) * Upstream changes: - cli: add support for --help on the 'identify' cli tool -- Software Heritage autobuilder (on jenkins-debian1) Thu, 13 Jun 2019 14:40:16 +0000 swh-model (0.0.36-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.36 - (tagged by Valentin Lorentz on 2019-04-26 13:33:29 +0200) * Upstream changes: - Prevent from_dict() from changing its input dict. -- Software Heritage autobuilder (on jenkins-debian1) Fri, 26 Apr 2019 11:57:45 +0000 swh-model (0.0.35-1~swh2) unstable-swh; urgency=medium * Remove hypothesis directory -- Nicolas Dandrimont Thu, 18 Apr 2019 18:27:33 +0200 swh-model (0.0.35-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.35 - (tagged by Nicolas Dandrimont on 2019-04-11 12:05:11 +0200) * Upstream changes: - Release swh.model v0.0.35 - Fix hypothesis strategies to work in non-UTC timezones -- Software Heritage autobuilder (on jenkins-debian1) Thu, 11 Apr 2019 10:08:14 +0000 swh-model (0.0.34-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.34 - (tagged by Valentin Lorentz on 2019-04-09 18:30:50 +0200) * Upstream changes: - Limit Content.length to what the pgsql storage supports. -- Software Heritage autobuilder (on jenkins-debian1) Wed, 10 Apr 2019 07:45:31 +0000 swh-model (0.0.33-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.33 - (tagged by Valentin Lorentz on 2019-04-08 21:46:28 +0200) * Upstream changes: - Tune the model generation to work with the pgsql storage. -- Software Heritage autobuilder (on jenkins-debian1) Tue, 09 Apr 2019 15:11:51 +0000 swh-model (0.0.32-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.32 - (tagged by Valentin Lorentz on 2019-04-05 19:15:16 +0200) * Upstream changes: - Add a model based using 'attrs' and Hypothesis strategies to generate it. -- Software Heritage autobuilder (on jenkins-debian1) Mon, 08 Apr 2019 12:57:45 +0000 swh-model (0.0.31-1~swh2) unstable-swh; urgency=medium * Add new dependencies on python3-attr and python3-hypothesis -- Nicolas Dandrimont Mon, 08 Apr 2019 14:55:50 +0200 swh-model (0.0.31-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.31 - (tagged by Valentin Lorentz on 2019-04-04 20:46:15 +0200) * Upstream changes: - Make snapshot_identifier add the cycle to the exception's arguments when it detects one. -- Software Heritage autobuilder (on jenkins-debian1) Fri, 05 Apr 2019 09:07:35 +0000 swh-model (0.0.30-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.30 - (tagged by David Douard on 2019-01-08 12:28:35 +0100) * Upstream changes: - v0.0.30 -- Software Heritage autobuilder (on jenkins-debian1) Wed, 09 Jan 2019 17:31:53 +0000 swh-model (0.0.29-1~swh1) unstable-swh; urgency=medium * Release swh.model v0.0.29 * Reference iPRES paper in PID documentation * Remove deprecated swh.model.hashutil.hash_* functions * Split debian packaging to separate branch -- Nicolas Dandrimont Wed, 31 Oct 2018 18:26:32 +0100 swh-model (0.0.28-1~swh1) unstable-swh; urgency=medium * v0.0.28 * setup: prepare for pypi upload * tests: Initialize tox use * tests: Migrate to pytest * docs: Improve basic repository information * docs: document PID resolution possibilities other than Web UI / * hashutil: Migrate towards MultiHash api -- Antoine R. Dumont (@ardumont) Tue, 23 Oct 2018 16:24:21 +0200 swh-model (0.0.27-1~swh1) unstable-swh; urgency=medium * v0.0.27 * Refactor: Add MultiHash class to improve hash computations * swh.model.hashutil: Improve and clarify docstrings * swh.model.hashutil: Mark hash_* function as deprecated -- Antoine R. Dumont (@ardumont) Mon, 17 Sep 2018 12:07:59 +0200 swh-model (0.0.26-1~swh1) unstable-swh; urgency=medium * v0.0.26 * swh.model.identifiers: Open metadata in persistent_identifier method * refactor CLI tests to avoid duplicate assertion pairs * swh-identify: follow symlinks for CLI arguments (by default) * cli.py: prefer os.fsdecode() over manual fiddling with locale.getpref... * swh-identify: add support for passing multiple CLI arguments -- Antoine R. Dumont (@ardumont) Mon, 23 Jul 2018 14:29:54 +0200 swh-model (0.0.25-1~swh1) unstable-swh; urgency=medium * version 0.0.25 -- Antoine Lambert Fri, 29 Jun 2018 11:49:25 +0200 swh-model (0.0.24-1~swh1) unstable-swh; urgency=medium * v0.0.24 * swh.model.cli: Catch specific exception during identifiers check * identifiers: Validate input * identifiers: Raise when error during parsing persistent identifiers * Update blake2 support to be less Debian-specific * add swh-identify CLI tool to compute persistent identifiers * docs: Update high-level documentation (Merkle DAG description, * contextual information for persistent IDs, etc...) -- Antoine R. Dumont (@ardumont) Fri, 22 Jun 2018 15:38:32 +0200 swh-model (0.0.23-1~swh1) unstable-swh; urgency=medium * version 0.0.23 -- Antoine Lambert Tue, 29 May 2018 14:08:45 +0200 swh-model (0.0.22-1~swh1) unstable-swh; urgency=medium * version 0.0.22 -- Antoine Pietri Tue, 30 Jan 2018 18:22:42 +0100 swh-model (0.0.21-1~swh1) unstable-swh; urgency=medium * v0.0.21 * swh.model.identifiers: Add persistent identifier function * docs: document the naming scheme for persistent identifiers * bin/swh-hash-file: new binary to compute SWH-style content identifiers -- Antoine R. Dumont (@ardumont) Wed, 17 Jan 2018 11:06:33 +0100 swh-model (0.0.20-1~swh1) unstable-swh; urgency=medium * v0.0.20 * swh.model.hashutil.hash_data: Optionally integrate length in result * hashutil: add `snapshot` object type for git hashes * docs: add absolute anchor to documentation index -- Antoine R. Dumont (@ardumont) Wed, 20 Dec 2017 10:47:10 +0100 swh-model (0.0.19-1~swh1) unstable-swh; urgency=medium * Release swh.model version 0.0.19 * Update packaging runes -- Nicolas Dandrimont Thu, 12 Oct 2017 18:07:59 +0200 swh-model (0.0.18-1~swh1) unstable-swh; urgency=medium * Release swh.model v0.0.18 * Replace swh.model.git with swh.model.from_disk (T709). * Clean up documentation -- Nicolas Dandrimont Thu, 05 Oct 2017 20:48:29 +0200 swh-model (0.0.17-1~swh1) unstable-swh; urgency=medium * Release swh.model v0.0.17 * Clean up pyblake2 requirement for Python 3.5+ -- Nicolas Dandrimont Mon, 26 Jun 2017 14:41:49 +0200 swh-model (0.0.16-1~swh1) unstable-swh; urgency=medium * Release swh.model v0.0.16 * Make sure we generate proper permissions in directories -- Nicolas Dandrimont Fri, 07 Apr 2017 14:32:34 +0200 swh-model (0.0.15-1~swh1) unstable-swh; urgency=medium * v0.0.15 * Add possibility to compute new blake2 hashes * Add blake2s256 hash as default new hash computation algorithm -- Antoine R. Dumont (@ardumont) Fri, 24 Mar 2017 16:32:35 +0100 swh-model (0.0.14-1~swh1) unstable-swh; urgency=medium * v0.0.14 * Migrate functions from swh.core.hashutil to swh.model.hashutil -- Antoine R. Dumont (@ardumont) Wed, 15 Mar 2017 16:00:56 +0100 swh-model (0.0.13-1~swh1) unstable-swh; urgency=medium * Release swh.model v0.0.13 * Timestamps are now fully integer values -- Nicolas Dandrimont Tue, 14 Feb 2017 19:32:24 +0100 swh-model (0.0.12-1~swh1) unstable-swh; urgency=medium * Release swh.model v0.0.12 * Add more tests to git tree hash computations -- Nicolas Dandrimont Tue, 14 Jun 2016 17:08:20 +0200 swh-model (0.0.11-1~swh1) unstable-swh; urgency=medium * v0.0.11 * Open git.children_hashes api * Rename git.walk_and_compute_sha1_from_directory_2 to git.compute_hashes_from_directory * Remove dead code -- Antoine R. Dumont (@ardumont) Sat, 11 Jun 2016 02:23:19 +0200 swh-model (0.0.10-1~swh1) unstable-swh; urgency=medium * v0.0.10 * Add objects_per_type api * Open a new walk_and_compute_sha1_from_directory_2 api * Improve internal api regarding directory and tree hash computations -- Antoine R. Dumont (@ardumont) Wed, 08 Jun 2016 15:54:59 +0200 swh-model (0.0.9-1~swh1) unstable-swh; urgency=medium * v0.0.9 * Add coverage on edge case * Optimize git hash walk -- Antoine R. Dumont (@ardumont) Thu, 26 May 2016 12:56:17 +0200 swh-model (0.0.8-1~swh1) unstable-swh; urgency=medium * v0.0.8 * Add coverage on edge case * Optimize git hash walk -- Antoine R. Dumont (@ardumont) Thu, 26 May 2016 12:33:59 +0200 swh-model (0.0.7-1~swh1) unstable-swh; urgency=medium * v0.0.7 * Improve corner case policy about walking and computing hash tree (+ update) -- Antoine R. Dumont (@ardumont) Wed, 25 May 2016 23:47:19 +0200 swh-model (0.0.6-1~swh1) unstable-swh; urgency=medium * v0.0.6 * Improve corner case on git hash memory update function * debian packaging: Ignore fs tests for packaging -- Antoine R. Dumont (@ardumont) Tue, 24 May 2016 17:01:06 +0200 swh-model (0.0.5-1~swh1) unstable-swh; urgency=medium * v0.0.5 * Add update git hash computation from existing data * Add revision identifier data for hash identifier computation (extra- headers) -- Antoine R. Dumont (@ardumont) Fri, 15 Apr 2016 12:51:21 +0200 swh-model (0.0.4-1~swh1) unstable-swh; urgency=medium * v0.0.4 * Migrate swh.loader.dir.git module to swh.model.git -- Antoine R. Dumont (@ardumont) Mon, 21 Mar 2016 15:20:28 +0100 swh-model (0.0.3-1~swh1) unstable-swh; urgency=medium * v0.0.3 * Release name is now in bytes -- Antoine R. Dumont (@ardumont) Wed, 27 Jan 2016 15:50:08 +0100 swh-model (0.0.2-1~swh1) unstable-swh; urgency=medium * Prepare release of v0.0.2 * Import the rest of swh.core.hashutil -- Nicolas Dandrimont Wed, 16 Dec 2015 18:30:12 +0100 swh-model (0.0.1-1~swh1) unstable-swh; urgency=medium * Initial release * Prepare swh.model release v0.0.1 -- Nicolas Dandrimont Mon, 07 Dec 2015 18:26:58 +0100 diff --git a/swh.model.egg-info/PKG-INFO b/swh.model.egg-info/PKG-INFO index e632705..8b07a86 100644 --- a/swh.model.egg-info/PKG-INFO +++ b/swh.model.egg-info/PKG-INFO @@ -1,46 +1,46 @@ Metadata-Version: 2.1 Name: swh.model -Version: 6.1.0 +Version: 6.2.0 Summary: Software Heritage data model Home-page: https://forge.softwareheritage.org/diffusion/DMOD/ Author: Software Heritage developers Author-email: swh-devel@inria.fr License: UNKNOWN Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest Project-URL: Funding, https://www.softwareheritage.org/donate Project-URL: Source, https://forge.softwareheritage.org/source/swh-model Project-URL: Documentation, https://docs.softwareheritage.org/devel/swh-model/ Platform: UNKNOWN Classifier: Programming Language :: Python :: 3 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Classifier: Operating System :: OS Independent Classifier: Development Status :: 5 - Production/Stable Requires-Python: >=3.7 Description-Content-Type: text/markdown Provides-Extra: cli Provides-Extra: testing-minimal Provides-Extra: testing License-File: LICENSE License-File: AUTHORS swh-model ========= Implementation of the Data model of the Software Heritage project, used to archive source code artifacts. This module defines the notion of SoftWare Heritage persistent IDentifiers (SWHIDs) and provides tools to compute them: ```sh $ swh-identify fork.c kmod.c sched/deadline.c swh:1:cnt:2e391c754ae730bd2d8520c2ab497c403220c6e3 fork.c swh:1:cnt:0277d1216f80ae1adeed84a686ed34c9b2931fc2 kmod.c swh:1:cnt:57b939c81bce5d06fa587df8915f05affbe22b82 sched/deadline.c $ swh-identify --no-filename /usr/src/linux/kernel/ swh:1:dir:f9f858a48d663b3809c9e2f336412717496202ab ``` diff --git a/swh/model/git_objects.py b/swh/model/git_objects.py index d0f7bf8..566aaa3 100644 --- a/swh/model/git_objects.py +++ b/swh/model/git_objects.py @@ -1,638 +1,651 @@ # Copyright (C) 2015-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information """ Converts SWH model objects to git(-like) objects Most of the functions in this module take as argument an object from :mod:`swh.model.model`, and format it like a git object. They are the inverse functions of those in :mod:`swh.loader.git.converters`, but with extensions, as SWH's model is a superset of Git's: * extensions of existing types (eg. revision/commit and release/tag dates can be expressed with precision up to milliseconds, to support formatting Mercurial objects) * new types, for SWH's specific needs (:class:`swh.model.model.RawExtrinsicMetadata` and :class:`swh.model.model.ExtID`) * support for somewhat corrupted git objects that we need to reproduce This is used for two purposes: * Format manifests that can be hashed to produce :ref:`intrinsic identifiers ` * Write git objects to reproduce git repositories that were ingested in the archive. """ from __future__ import annotations import datetime from functools import lru_cache from typing import Dict, Iterable, List, Optional, Tuple, Union, cast import warnings from . import model from .collections import ImmutableDict from .hashutil import git_object_header, hash_to_bytehex +def content_git_object(content: model.Content) -> bytes: + """Formats a content as a git blob. + + A content's identifier is the blob sha1 à la git of the tagged content. + """ + content = cast(model.Content, content) + + if content.data is None: + raise model.MissingData("Content data is None, cannot format.") + + return git_object_header("blob", len(content.data)) + content.data + + def directory_entry_sort_key(entry: model.DirectoryEntry): """The sorting key for tree entries""" if isinstance(entry, dict): # For backward compatibility entry = model.DirectoryEntry.from_dict(entry) if entry.type == "dir": return entry.name + b"/" else: return entry.name @lru_cache() def _perms_to_bytes(perms): """Convert the perms value to its canonical bytes representation""" oc = oct(perms)[2:] return oc.encode("ascii") def escape_newlines(snippet): """Escape the newlines present in snippet according to git rules. New lines in git manifests are escaped by indenting the next line by one space. """ if b"\n" in snippet: return b"\n ".join(snippet.split(b"\n")) else: return snippet def format_date(date: model.Timestamp) -> bytes: """Convert a date object into an UTC timestamp encoded as ascii bytes. Git stores timestamps as an integer number of seconds since the UNIX epoch. However, Software Heritage stores timestamps as an integer number of microseconds (postgres type "datetime with timezone"). Therefore, we print timestamps with no microseconds as integers, and timestamps with microseconds as floating point values. We elide the trailing zeroes from microsecond values, to "future-proof" our representation if we ever need more precision in timestamps. """ if isinstance(date, dict): # For backward compatibility date = model.Timestamp.from_dict(date) if not date.microseconds: return str(date.seconds).encode() else: float_value = "%d.%06d" % (date.seconds, date.microseconds) return float_value.rstrip("0").encode() def normalize_timestamp(time_representation): """Normalize a time representation for processing by Software Heritage This function supports a numeric timestamp (representing a number of seconds since the UNIX epoch, 1970-01-01 at 00:00 UTC), a :obj:`datetime.datetime` object (with timezone information), or a normalized Software Heritage time representation (idempotency). Args: time_representation: the representation of a timestamp Returns: dict: a normalized dictionary with three keys: - timestamp: a dict with two optional keys: - seconds: the integral number of seconds since the UNIX epoch - microseconds: the integral number of microseconds - offset: the timezone offset as a number of minutes relative to UTC - negative_utc: a boolean representing whether the offset is -0000 when offset = 0. """ if time_representation is None: return None else: return model.TimestampWithTimezone.from_dict(time_representation).to_dict() def directory_git_object(directory: Union[Dict, model.Directory]) -> bytes: """Formats a directory as a git tree. A directory's identifier is the tree sha1 à la git of a directory listing, using the following algorithm, which is equivalent to the git algorithm for trees: 1. Entries of the directory are sorted using the name (or the name with '/' appended for directory entries) as key, in bytes order. 2. For each entry of the directory, the following bytes are output: - the octal representation of the permissions for the entry (stored in the 'perms' member), which is a representation of the entry type: - b'100644' (int 33188) for files - b'100755' (int 33261) for executable files - b'120000' (int 40960) for symbolic links - b'40000' (int 16384) for directories - b'160000' (int 57344) for references to revisions - an ascii space (b'\x20') - the entry's name (as raw bytes), stored in the 'name' member - a null byte (b'\x00') - the 20 byte long identifier of the object pointed at by the entry, stored in the 'target' member: - for files or executable files: their blob sha1_git - for symbolic links: the blob sha1_git of a file containing the link destination - for directories: their intrinsic identifier - for revisions: their intrinsic identifier (Note that there is no separator between entries) """ if isinstance(directory, dict): # For backward compatibility warnings.warn( "directory_git_object's argument should be a swh.model.model.Directory " "object.", DeprecationWarning, stacklevel=2, ) directory = model.Directory.from_dict(directory) directory = cast(model.Directory, directory) components = [] for entry in sorted(directory.entries, key=directory_entry_sort_key): components.extend( [ _perms_to_bytes(entry.perms), b"\x20", entry.name, b"\x00", entry.target, ] ) return format_git_object_from_parts("tree", components) def format_git_object_from_headers( git_type: str, headers: Iterable[Tuple[bytes, bytes]], message: Optional[bytes] = None, ) -> bytes: """Format a git_object comprised of a git header and a manifest, which is itself a sequence of `headers`, and an optional `message`. The git_object format, compatible with the git format for tag and commit objects, is as follows: - for each `key`, `value` in `headers`, emit: - the `key`, literally - an ascii space (``\\x20``) - the `value`, with newlines escaped using :func:`escape_newlines`, - an ascii newline (``\\x0a``) - if the `message` is not None, emit: - an ascii newline (``\\x0a``) - the `message`, literally Args: headers: a sequence of key/value headers stored in the manifest; message: an optional message used to trail the manifest. Returns: the formatted git_object as bytes """ entries: List[bytes] = [] for key, value in headers: entries.extend((key, b" ", escape_newlines(value), b"\n")) if message is not None: entries.extend((b"\n", message)) concatenated_entries = b"".join(entries) header = git_object_header(git_type, len(concatenated_entries)) return header + concatenated_entries def format_git_object_from_parts(git_type: str, parts: Iterable[bytes]) -> bytes: """Similar to :func:`format_git_object_from_headers`, but for manifests made of a flat list of entries, instead of key-value + message, ie. trees and snapshots.""" concatenated_parts = b"".join(parts) header = git_object_header(git_type, len(concatenated_parts)) return header + concatenated_parts def format_author_data( author: model.Person, date_offset: Optional[model.TimestampWithTimezone] ) -> bytes: """Format authorship data according to git standards. Git authorship data has two components: - an author specification, usually a name and email, but in practice an arbitrary bytestring - optionally, a timestamp with a UTC offset specification The authorship data is formatted thus:: `name and email`[ `timestamp` `utc_offset`] The timestamp is encoded as a (decimal) number of seconds since the UNIX epoch (1970-01-01 at 00:00 UTC). As an extension to the git format, we support fractional timestamps, using a dot as the separator for the decimal part. The utc offset is a number of minutes encoded as '[+-]HHMM'. Note that some tools can pass a negative offset corresponding to the UTC timezone ('-0000'), which is valid and is encoded as such. Returns: the byte string containing the authorship data """ ret = [author.fullname] if date_offset is not None: date_f = format_date(date_offset.timestamp) ret.extend([b" ", date_f, b" ", date_offset.offset_bytes]) return b"".join(ret) def revision_git_object(revision: Union[Dict, model.Revision]) -> bytes: """Formats a revision as a git tree. The fields used for the revision identifier computation are: - directory - parents - author - author_date - committer - committer_date - extra_headers or metadata -> extra_headers - message A revision's identifier is the 'git'-checksum of a commit manifest constructed as follows (newlines are a single ASCII newline character):: tree [for each parent in parents] parent [end for each parents] author committer [for each key, value in extra_headers] [end for each extra_headers] The directory identifier is the ascii representation of its hexadecimal encoding. Author and committer are formatted using the :attr:`Person.fullname` attribute only. Dates are formatted with the :func:`format_offset` function. Extra headers are an ordered list of [key, value] pairs. Keys are strings and get encoded to utf-8 for identifier computation. Values are either byte strings, unicode strings (that get encoded to utf-8), or integers (that get encoded to their utf-8 decimal representation). Multiline extra header values are escaped by indenting the continuation lines with one ascii space. If the message is None, the manifest ends with the last header. Else, the message is appended to the headers after an empty line. The checksum of the full manifest is computed using the 'commit' git object type. """ if isinstance(revision, dict): # For backward compatibility warnings.warn( "revision_git_object's argument should be a swh.model.model.Revision " "object.", DeprecationWarning, stacklevel=2, ) revision = model.Revision.from_dict(revision) revision = cast(model.Revision, revision) headers = [(b"tree", hash_to_bytehex(revision.directory))] for parent in revision.parents: if parent: headers.append((b"parent", hash_to_bytehex(parent))) if revision.author is not None: headers.append((b"author", format_author_data(revision.author, revision.date))) if revision.committer is not None: headers.append( ( b"committer", format_author_data(revision.committer, revision.committer_date), ) ) # Handle extra headers metadata = revision.metadata or ImmutableDict() extra_headers = revision.extra_headers or () if not extra_headers and "extra_headers" in metadata: extra_headers = metadata["extra_headers"] headers.extend(extra_headers) return format_git_object_from_headers("commit", headers, revision.message) def target_type_to_git(target_type: model.ObjectType) -> bytes: """Convert a software heritage target type to a git object type""" return { model.ObjectType.CONTENT: b"blob", model.ObjectType.DIRECTORY: b"tree", model.ObjectType.REVISION: b"commit", model.ObjectType.RELEASE: b"tag", model.ObjectType.SNAPSHOT: b"refs", }[target_type] def release_git_object(release: Union[Dict, model.Release]) -> bytes: if isinstance(release, dict): # For backward compatibility warnings.warn( "release_git_object's argument should be a swh.model.model.Directory " "object.", DeprecationWarning, stacklevel=2, ) release = model.Release.from_dict(release) release = cast(model.Release, release) headers = [ (b"object", hash_to_bytehex(release.target)), (b"type", target_type_to_git(release.target_type)), (b"tag", release.name), ] if release.author is not None: headers.append((b"tagger", format_author_data(release.author, release.date))) return format_git_object_from_headers("tag", headers, release.message) def snapshot_git_object(snapshot: Union[Dict, model.Snapshot]) -> bytes: """Formats a snapshot as a git-like object. Snapshots are a set of named branches, which are pointers to objects at any level of the Software Heritage DAG. As well as pointing to other objects in the Software Heritage DAG, branches can also be *alias*es, in which case their target is the name of another branch in the same snapshot, or *dangling*, in which case the target is unknown (and represented by the ``None`` value). A snapshot identifier is a salted sha1 (using the git hashing algorithm with the ``snapshot`` object type) of a manifest following the algorithm: 1. Branches are sorted using the name as key, in bytes order. 2. For each branch, the following bytes are output: - the type of the branch target: - ``content``, ``directory``, ``revision``, ``release`` or ``snapshot`` for the corresponding entries in the DAG; - ``alias`` for branches referencing another branch; - ``dangling`` for dangling branches - an ascii space (``\\x20``) - the branch name (as raw bytes) - a null byte (``\\x00``) - the length of the target identifier, as an ascii-encoded decimal number (``20`` for current intrinsic identifiers, ``0`` for dangling branches, the length of the target branch name for branch aliases) - a colon (``:``) - the identifier of the target object pointed at by the branch, stored in the 'target' member: - for contents: their *sha1_git* - for directories, revisions, releases or snapshots: their intrinsic identifier - for branch aliases, the name of the target branch (as raw bytes) - for dangling branches, the empty string Note that, akin to directory manifests, there is no separator between entries. Because of symbolic branches, identifiers are of arbitrary length but are length-encoded to avoid ambiguity. """ if isinstance(snapshot, dict): # For backward compatibility warnings.warn( "snapshot_git_object's argument should be a swh.model.model.Snapshot " "object.", DeprecationWarning, stacklevel=2, ) snapshot = model.Snapshot.from_dict(snapshot) snapshot = cast(model.Snapshot, snapshot) unresolved = [] lines = [] for name, target in sorted(snapshot.branches.items()): if not target: target_type = b"dangling" target_id = b"" elif target.target_type == model.TargetType.ALIAS: target_type = b"alias" target_id = target.target if target_id not in snapshot.branches or target_id == name: unresolved.append((name, target_id)) else: target_type = target.target_type.value.encode() target_id = target.target lines.extend( [ target_type, b"\x20", name, b"\x00", ("%d:" % len(target_id)).encode(), target_id, ] ) if unresolved: raise ValueError( "Branch aliases unresolved: %s" % ", ".join("%r -> %r" % x for x in unresolved), unresolved, ) return format_git_object_from_parts("snapshot", lines) def raw_extrinsic_metadata_git_object( metadata: Union[Dict, model.RawExtrinsicMetadata] ) -> bytes: """Formats RawExtrinsicMetadata as a git-like object. A raw_extrinsic_metadata identifier is a salted sha1 (using the git hashing algorithm with the ``raw_extrinsic_metadata`` object type) of a manifest following the format:: target $ExtendedSwhid discovery_date $Timestamp authority $StrWithoutSpaces $IRI fetcher $Str $Version format $StrWithoutSpaces origin $IRI <- optional visit $IntInDecimal <- optional snapshot $CoreSwhid <- optional release $CoreSwhid <- optional revision $CoreSwhid <- optional path $Bytes <- optional directory $CoreSwhid <- optional $MetadataBytes $IRI must be RFC 3987 IRIs (so they may contain newlines, that are escaped as described below) $StrWithoutSpaces and $Version are ASCII strings, and may not contain spaces. $Str is an UTF-8 string. $CoreSwhid are core SWHIDs, as defined in :ref:`persistent-identifiers`. $ExtendedSwhid is a core SWHID, with extra types allowed ('ori' for origins and 'emd' for raw extrinsic metadata) $Timestamp is a decimal representation of the rounded-down integer number of seconds since the UNIX epoch (1970-01-01 00:00:00 UTC), with no leading '0' (unless the timestamp value is zero) and no timezone. It may be negative by prefixing it with a '-', which must not be followed by a '0'. Newlines in $Bytes, $Str, and $Iri are escaped as with other git fields, ie. by adding a space after them. """ if isinstance(metadata, dict): # For backward compatibility warnings.warn( "raw_extrinsic_metadata_git_object's argument should be a " "swh.model.model.RawExtrinsicMetadata object.", DeprecationWarning, stacklevel=2, ) metadata = model.RawExtrinsicMetadata.from_dict(metadata) metadata = cast(model.RawExtrinsicMetadata, metadata) # equivalent to using math.floor(dt.timestamp()) to round down, # as int(dt.timestamp()) rounds toward zero, # which would map two seconds on the 0 timestamp. # # This should never be an issue in practice as Software Heritage didn't # start collecting metadata before 2015. timestamp = ( metadata.discovery_date.astimezone(datetime.timezone.utc) .replace(microsecond=0) .timestamp() ) assert timestamp.is_integer() headers = [ (b"target", str(metadata.target).encode()), (b"discovery_date", str(int(timestamp)).encode("ascii")), ( b"authority", f"{metadata.authority.type.value} {metadata.authority.url}".encode(), ), ( b"fetcher", f"{metadata.fetcher.name} {metadata.fetcher.version}".encode(), ), (b"format", metadata.format.encode()), ] for key in ( "origin", "visit", "snapshot", "release", "revision", "path", "directory", ): if getattr(metadata, key, None) is not None: value: bytes if key == "path": value = getattr(metadata, key) else: value = str(getattr(metadata, key)).encode() headers.append((key.encode("ascii"), value)) return format_git_object_from_headers( "raw_extrinsic_metadata", headers, metadata.metadata ) def extid_git_object(extid: model.ExtID) -> bytes: """Formats an extid as a gi-like object. An ExtID identifier is a salted sha1 (using the git hashing algorithm with the ``extid`` object type) of a manifest following the format: ``` extid_type $StrWithoutSpaces [extid_version $Str] extid $Bytes target $CoreSwhid ``` $StrWithoutSpaces is an ASCII string, and may not contain spaces. Newlines in $Bytes are escaped as with other git fields, ie. by adding a space after them. The extid_version line is only generated if the version is non-zero. """ headers = [ (b"extid_type", extid.extid_type.encode("ascii")), ] extid_version = extid.extid_version if extid_version != 0: headers.append((b"extid_version", str(extid_version).encode("ascii"))) headers.extend( [ (b"extid", extid.extid), (b"target", str(extid.target).encode("ascii")), ] ) return format_git_object_from_headers("extid", headers) diff --git a/swh/model/tests/test_model.py b/swh/model/tests/test_model.py index 8c058b9..590e4b4 100644 --- a/swh/model/tests/test_model.py +++ b/swh/model/tests/test_model.py @@ -1,1641 +1,1649 @@ # Copyright (C) 2019-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import collections import copy import datetime import hashlib from typing import Any, List, Optional, Tuple, Union import attr from attrs_strict import AttributeTypeError import dateutil from hypothesis import given from hypothesis.strategies import binary import pytest from swh.model.collections import ImmutableDict from swh.model.from_disk import DentryPerms import swh.model.git_objects from swh.model.hashutil import MultiHash, hash_to_bytes import swh.model.hypothesis_strategies as strategies import swh.model.model from swh.model.model import ( BaseModel, Content, Directory, DirectoryEntry, MetadataAuthority, MetadataAuthorityType, MetadataFetcher, MissingData, Origin, OriginVisit, OriginVisitStatus, Person, RawExtrinsicMetadata, Release, Revision, SkippedContent, Snapshot, TargetType, Timestamp, TimestampWithTimezone, type_validator, ) import swh.model.swhids from swh.model.swhids import CoreSWHID, ExtendedSWHID, ObjectType from swh.model.tests.swh_model_data import TEST_OBJECTS from swh.model.tests.test_identifiers import ( TS_DATETIMES, TS_TIMEZONES, directory_example, metadata_example, release_example, revision_example, snapshot_example, ) EXAMPLE_HASH = hash_to_bytes("94a9ed024d3859793618152ea559a168bbcbb5e2") @given(strategies.objects()) def test_todict_inverse_fromdict(objtype_and_obj): (obj_type, obj) = objtype_and_obj if obj_type in ("origin", "origin_visit"): return obj_as_dict = obj.to_dict() obj_as_dict_copy = copy.deepcopy(obj_as_dict) # Check the composition of to_dict and from_dict is the identity assert obj == type(obj).from_dict(obj_as_dict) # Check from_dict() does not change the input dict assert obj_as_dict == obj_as_dict_copy # Check the composition of from_dict and to_dict is the identity assert obj_as_dict == type(obj).from_dict(obj_as_dict).to_dict() @given(strategies.objects()) def test_repr(objtype_and_obj): """Checks every model object has a working repr(), and that it can be eval()uated (so that printed objects can be copy-pasted to write test cases.)""" (obj_type, obj) = objtype_and_obj r = repr(obj) env = { "tzutc": lambda: datetime.timezone.utc, "tzfile": dateutil.tz.tzfile, "hash_to_bytes": hash_to_bytes, **swh.model.swhids.__dict__, **swh.model.model.__dict__, } assert eval(r, env) == obj @attr.s class Cls1: pass @attr.s class Cls2(Cls1): pass _custom_namedtuple = collections.namedtuple("_custom_namedtuple", "a b") class _custom_tuple(tuple): pass # List of (type, valid_values, invalid_values) _TYPE_VALIDATOR_PARAMETERS: List[Tuple[Any, List[Any], List[Any]]] = [ # base types: ( bool, [True, False], [-1, 0, 1, 42, 1000, None, "123", 0.0, (), ("foo",), ImmutableDict()], ), ( int, [-1, 0, 1, 42, 1000, DentryPerms.directory, True, False], [None, "123", 0.0, (), ImmutableDict()], ), ( float, [-1.0, 0.0, 1.0, float("infinity"), float("NaN")], [True, False, None, 1, "1.2", (), ImmutableDict()], ), ( bytes, [b"", b"123"], [None, bytearray(b"\x12\x34"), "123", 0, 123, (), (1, 2, 3), ImmutableDict()], ), (str, ["", "123"], [None, b"123", b"", 0, (), (1, 2, 3), ImmutableDict()]), (None, [None], [b"", b"123", "", "foo", 0, 123, ImmutableDict(), float("NaN")]), # unions: ( Optional[int], [None, -1, 0, 1, 42, 1000, DentryPerms.directory], ["123", 0.0, (), ImmutableDict()], ), ( Optional[bytes], [None, b"", b"123"], ["123", "", 0, (), (1, 2, 3), ImmutableDict()], ), ( Union[str, bytes], ["", "123", b"123", b""], [None, 0, (), (1, 2, 3), ImmutableDict()], ), ( Union[str, bytes, None], ["", "123", b"123", b"", None], [0, (), (1, 2, 3), ImmutableDict()], ), # tuples ( Tuple[str, str], [("foo", "bar"), ("", ""), _custom_namedtuple("", ""), _custom_tuple(("", ""))], [("foo",), ("foo", "bar", "baz"), ("foo", 42), (42, "foo")], ), ( Tuple[str, ...], [ ("foo",), ("foo", "bar"), ("", ""), ("foo", "bar", "baz"), _custom_namedtuple("", ""), _custom_tuple(("", "")), ], [("foo", 42), (42, "foo")], ), # composite generic: ( Tuple[Union[str, int], Union[str, int]], [("foo", "foo"), ("foo", 42), (42, "foo"), (42, 42)], [("foo", b"bar"), (b"bar", "foo")], ), ( Union[Tuple[str, str], Tuple[int, int]], [("foo", "foo"), (42, 42)], [("foo", b"bar"), (b"bar", "foo"), ("foo", 42), (42, "foo")], ), ( Tuple[Tuple[bytes, bytes], ...], [(), ((b"foo", b"bar"),), ((b"foo", b"bar"), (b"baz", b"qux"))], [((b"foo", "bar"),), ((b"foo", b"bar"), ("baz", b"qux"))], ), # standard types: ( datetime.datetime, [ datetime.datetime(2021, 12, 15, 12, 59, 27), datetime.datetime(2021, 12, 15, 12, 59, 27, tzinfo=datetime.timezone.utc), ], [None, 123], ), # ImmutableDict ( ImmutableDict[str, int], [ ImmutableDict(), ImmutableDict({"foo": 42}), ImmutableDict({"foo": 42, "bar": 123}), ], [ImmutableDict({"foo": "bar"}), ImmutableDict({42: 123})], ), # Any: ( object, [-1, 0, 1, 42, 1000, None, "123", 0.0, (), ImmutableDict()], [], ), ( Any, [-1, 0, 1, 42, 1000, None, "123", 0.0, (), ImmutableDict()], [], ), ( ImmutableDict[Any, int], [ ImmutableDict(), ImmutableDict({"foo": 42}), ImmutableDict({"foo": 42, "bar": 123}), ImmutableDict({42: 123}), ], [ImmutableDict({"foo": "bar"})], ), ( ImmutableDict[str, Any], [ ImmutableDict(), ImmutableDict({"foo": 42}), ImmutableDict({"foo": "bar"}), ImmutableDict({"foo": 42, "bar": 123}), ], [ImmutableDict({42: 123})], ), # attr objects: ( Timestamp, [ Timestamp(seconds=123, microseconds=0), ], [None, "2021-09-28T11:27:59", 123], ), ( Cls1, [Cls1(), Cls2()], [None, b"abcd"], ), # enums: ( TargetType, [TargetType.CONTENT, TargetType.ALIAS], ["content", "alias", 123, None], ), ] @pytest.mark.parametrize( "type_,value", [ pytest.param(type_, value, id=f"type={type_}, value={value}") for (type_, values, _) in _TYPE_VALIDATOR_PARAMETERS for value in values ], ) def test_type_validator_valid(type_, value): type_validator()(None, attr.ib(type=type_), value) @pytest.mark.parametrize( "type_,value", [ pytest.param(type_, value, id=f"type={type_}, value={value}") for (type_, _, values) in _TYPE_VALIDATOR_PARAMETERS for value in values ], ) def test_type_validator_invalid(type_, value): with pytest.raises(AttributeTypeError): type_validator()(None, attr.ib(type=type_), value) @pytest.mark.parametrize("object_type, objects", TEST_OBJECTS.items()) def test_swh_model_todict_fromdict(object_type, objects): """checks model objects in swh_model_data are in correct shape""" assert objects for obj in objects: # Check the composition of from_dict and to_dict is the identity obj_as_dict = obj.to_dict() assert obj == type(obj).from_dict(obj_as_dict) assert obj_as_dict == type(obj).from_dict(obj_as_dict).to_dict() def test_unique_key(): url = "http://example.org/" date = datetime.datetime.now(tz=datetime.timezone.utc) id_ = b"42" * 10 assert Origin(url=url).unique_key() == {"url": url} assert OriginVisit(origin=url, date=date, type="git").unique_key() == { "origin": url, "date": str(date), } assert OriginVisitStatus( origin=url, visit=42, date=date, status="created", snapshot=None ).unique_key() == { "origin": url, "visit": "42", "date": str(date), } assert Snapshot.from_dict({**snapshot_example, "id": id_}).unique_key() == id_ assert Release.from_dict({**release_example, "id": id_}).unique_key() == id_ assert Revision.from_dict({**revision_example, "id": id_}).unique_key() == id_ assert Directory.from_dict({**directory_example, "id": id_}).unique_key() == id_ assert ( RawExtrinsicMetadata.from_dict({**metadata_example, "id": id_}).unique_key() == id_ ) cont = Content.from_data(b"foo") assert cont.unique_key().hex() == "0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33" kwargs = { **cont.to_dict(), "reason": "foo", "status": "absent", } del kwargs["data"] assert SkippedContent(**kwargs).unique_key() == cont.hashes() # Anonymization @given(strategies.objects()) def test_anonymization(objtype_and_obj): (obj_type, obj) = objtype_and_obj def check_person(p): if p is not None: assert p.name is None assert p.email is None assert len(p.fullname) == 32 anon_obj = obj.anonymize() if obj_type == "person": assert anon_obj is not None check_person(anon_obj) elif obj_type == "release": assert anon_obj is not None check_person(anon_obj.author) elif obj_type == "revision": assert anon_obj is not None check_person(anon_obj.author) check_person(anon_obj.committer) else: assert anon_obj is None # Origin, OriginVisit, OriginVisitStatus @given(strategies.origins()) def test_todict_origins(origin): obj = origin.to_dict() assert "type" not in obj assert type(origin)(url=origin.url) == type(origin).from_dict(obj) @given(strategies.origin_visits()) def test_todict_origin_visits(origin_visit): obj = origin_visit.to_dict() assert origin_visit == type(origin_visit).from_dict(obj) def test_origin_visit_naive_datetime(): with pytest.raises(ValueError, match="must be a timezone-aware datetime"): OriginVisit( origin="http://foo/", date=datetime.datetime.now(), type="git", ) @given(strategies.origin_visit_statuses()) def test_todict_origin_visit_statuses(origin_visit_status): obj = origin_visit_status.to_dict() assert origin_visit_status == type(origin_visit_status).from_dict(obj) def test_origin_visit_status_naive_datetime(): with pytest.raises(ValueError, match="must be a timezone-aware datetime"): OriginVisitStatus( origin="http://foo/", visit=42, date=datetime.datetime.now(), status="ongoing", snapshot=None, ) # Timestamp @given(strategies.timestamps()) def test_timestamps_strategy(timestamp): attr.validate(timestamp) def test_timestamp_seconds(): attr.validate(Timestamp(seconds=0, microseconds=0)) with pytest.raises(AttributeTypeError): Timestamp(seconds="0", microseconds=0) attr.validate(Timestamp(seconds=2**63 - 1, microseconds=0)) with pytest.raises(ValueError): Timestamp(seconds=2**63, microseconds=0) attr.validate(Timestamp(seconds=-(2**63), microseconds=0)) with pytest.raises(ValueError): Timestamp(seconds=-(2**63) - 1, microseconds=0) def test_timestamp_microseconds(): attr.validate(Timestamp(seconds=0, microseconds=0)) with pytest.raises(AttributeTypeError): Timestamp(seconds=0, microseconds="0") attr.validate(Timestamp(seconds=0, microseconds=10**6 - 1)) with pytest.raises(ValueError): Timestamp(seconds=0, microseconds=10**6) with pytest.raises(ValueError): Timestamp(seconds=0, microseconds=-1) def test_timestamp_from_dict(): assert Timestamp.from_dict({"seconds": 10, "microseconds": 5}) with pytest.raises(AttributeTypeError): Timestamp.from_dict({"seconds": "10", "microseconds": 5}) with pytest.raises(AttributeTypeError): Timestamp.from_dict({"seconds": 10, "microseconds": "5"}) with pytest.raises(ValueError): Timestamp.from_dict({"seconds": 0, "microseconds": -1}) Timestamp.from_dict({"seconds": 0, "microseconds": 10**6 - 1}) with pytest.raises(ValueError): Timestamp.from_dict({"seconds": 0, "microseconds": 10**6}) # TimestampWithTimezone def test_timestampwithtimezone(): ts = Timestamp(seconds=0, microseconds=0) tstz = TimestampWithTimezone(timestamp=ts, offset_bytes=b"+0000") attr.validate(tstz) assert tstz.offset_minutes() == 0 assert tstz.offset_bytes == b"+0000" tstz = TimestampWithTimezone(timestamp=ts, offset_bytes=b"+0010") attr.validate(tstz) assert tstz.offset_minutes() == 10 assert tstz.offset_bytes == b"+0010" tstz = TimestampWithTimezone(timestamp=ts, offset_bytes=b"-0010") attr.validate(tstz) assert tstz.offset_minutes() == -10 assert tstz.offset_bytes == b"-0010" tstz = TimestampWithTimezone(timestamp=ts, offset_bytes=b"-0000") attr.validate(tstz) assert tstz.offset_minutes() == 0 assert tstz.offset_bytes == b"-0000" tstz = TimestampWithTimezone(timestamp=ts, offset_bytes=b"-1030") attr.validate(tstz) assert tstz.offset_minutes() == -630 assert tstz.offset_bytes == b"-1030" tstz = TimestampWithTimezone(timestamp=ts, offset_bytes=b"+1320") attr.validate(tstz) assert tstz.offset_minutes() == 800 assert tstz.offset_bytes == b"+1320" tstz = TimestampWithTimezone(timestamp=ts, offset_bytes=b"+200") attr.validate(tstz) assert tstz.offset_minutes() == 120 assert tstz.offset_bytes == b"+200" tstz = TimestampWithTimezone(timestamp=ts, offset_bytes=b"+02") attr.validate(tstz) assert tstz.offset_minutes() == 120 assert tstz.offset_bytes == b"+02" tstz = TimestampWithTimezone(timestamp=ts, offset_bytes=b"+2000000000") attr.validate(tstz) assert tstz.offset_minutes() == 0 assert tstz.offset_bytes == b"+2000000000" with pytest.raises(AttributeTypeError): TimestampWithTimezone(timestamp=datetime.datetime.now(), offset_bytes=b"+0000") with pytest.raises((AttributeTypeError, TypeError)): TimestampWithTimezone(timestamp=ts, offset_bytes=0) def test_timestampwithtimezone_from_datetime(): # Typical case tz = datetime.timezone(datetime.timedelta(minutes=+60)) date = datetime.datetime(2020, 2, 27, 14, 39, 19, tzinfo=tz) tstz = TimestampWithTimezone.from_datetime(date) assert tstz == TimestampWithTimezone( timestamp=Timestamp( seconds=1582810759, microseconds=0, ), offset_bytes=b"+0100", ) # Typical case (close to epoch) tz = datetime.timezone(datetime.timedelta(minutes=+60)) date = datetime.datetime(1970, 1, 1, 1, 0, 5, tzinfo=tz) tstz = TimestampWithTimezone.from_datetime(date) assert tstz == TimestampWithTimezone( timestamp=Timestamp( seconds=5, microseconds=0, ), offset_bytes=b"+0100", ) # non-integer number of seconds before UNIX epoch date = datetime.datetime( 1969, 12, 31, 23, 59, 59, 100000, tzinfo=datetime.timezone.utc ) tstz = TimestampWithTimezone.from_datetime(date) assert tstz == TimestampWithTimezone( timestamp=Timestamp( seconds=-1, microseconds=100000, ), offset_bytes=b"+0000", ) # non-integer number of seconds in both the timestamp and the offset tz = datetime.timezone(datetime.timedelta(microseconds=-600000)) date = datetime.datetime(1969, 12, 31, 23, 59, 59, 600000, tzinfo=tz) tstz = TimestampWithTimezone.from_datetime(date) assert tstz == TimestampWithTimezone( timestamp=Timestamp( seconds=0, microseconds=200000, ), offset_bytes=b"+0000", ) # timezone offset with non-integer number of seconds, for dates before epoch # we round down to the previous second, so it should be the same as # 1969-01-01T23:59:59Z tz = datetime.timezone(datetime.timedelta(microseconds=900000)) date = datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=tz) tstz = TimestampWithTimezone.from_datetime(date) assert tstz == TimestampWithTimezone( timestamp=Timestamp( seconds=-1, microseconds=100000, ), offset_bytes=b"+0000", ) def test_timestampwithtimezone_from_naive_datetime(): date = datetime.datetime(2020, 2, 27, 14, 39, 19) with pytest.raises(ValueError, match="datetime without timezone"): TimestampWithTimezone.from_datetime(date) def test_timestampwithtimezone_from_iso8601(): date = "2020-02-27 14:39:19.123456+0100" tstz = TimestampWithTimezone.from_iso8601(date) assert tstz == TimestampWithTimezone( timestamp=Timestamp( seconds=1582810759, microseconds=123456, ), offset_bytes=b"+0100", ) def test_timestampwithtimezone_from_iso8601_negative_utc(): date = "2020-02-27 13:39:19-0000" tstz = TimestampWithTimezone.from_iso8601(date) assert tstz == TimestampWithTimezone( timestamp=Timestamp( seconds=1582810759, microseconds=0, ), offset_bytes=b"-0000", ) @pytest.mark.parametrize("date", TS_DATETIMES) @pytest.mark.parametrize("tz", TS_TIMEZONES) @pytest.mark.parametrize("microsecond", [0, 1, 10, 100, 1000, 999999]) def test_timestampwithtimezone_to_datetime(date, tz, microsecond): date = date.replace(tzinfo=tz, microsecond=microsecond) tstz = TimestampWithTimezone.from_datetime(date) assert tstz.to_datetime() == date assert tstz.to_datetime().utcoffset() == date.utcoffset() def test_person_from_fullname(): """The author should have name, email and fullname filled.""" actual_person = Person.from_fullname(b"tony ") assert actual_person == Person( fullname=b"tony ", name=b"tony", email=b"ynot@dagobah", ) def test_person_from_fullname_no_email(): """The author and fullname should be the same as the input (author).""" actual_person = Person.from_fullname(b"tony") assert actual_person == Person( fullname=b"tony", name=b"tony", email=None, ) def test_person_from_fullname_empty_person(): """Empty person has only its fullname filled with the empty byte-string. """ actual_person = Person.from_fullname(b"") assert actual_person == Person( fullname=b"", name=None, email=None, ) def test_git_author_line_to_author(): # edge case out of the way with pytest.raises(TypeError): Person.from_fullname(None) tests = { b"a ": Person( name=b"a", email=b"b@c.com", fullname=b"a ", ), b"": Person( name=None, email=b"foo@bar.com", fullname=b"", ), b"malformed ': Person( name=b"malformed", email=b'"', ), b"trailing ": Person( name=b"trailing", email=b"sp@c.e", fullname=b"trailing ", ), b"no": Person( name=b"no", email=b"sp@c.e", fullname=b"no", ), b" more ": Person( name=b"more", email=b"sp@c.es", fullname=b" more ", ), b" <>": Person( name=None, email=None, fullname=b" <>", ), } for person in sorted(tests): expected_person = tests[person] assert expected_person == Person.from_fullname(person) def test_person_comparison(): """Check only the fullname attribute is used to compare Person objects""" person = Person(fullname=b"p1", name=None, email=None) assert attr.evolve(person, name=b"toto") == person assert attr.evolve(person, email=b"toto@example.com") == person person = Person(fullname=b"", name=b"toto", email=b"toto@example.com") assert attr.evolve(person, fullname=b"dude") != person # Content def test_content_get_hash(): hashes = dict(sha1=b"foo", sha1_git=b"bar", sha256=b"baz", blake2s256=b"qux") c = Content(length=42, status="visible", **hashes) for (hash_name, hash_) in hashes.items(): assert c.get_hash(hash_name) == hash_ def test_content_hashes(): hashes = dict(sha1=b"foo", sha1_git=b"bar", sha256=b"baz", blake2s256=b"qux") c = Content(length=42, status="visible", **hashes) assert c.hashes() == hashes def test_content_data(): c = Content( length=42, status="visible", data=b"foo", sha1=b"foo", sha1_git=b"bar", sha256=b"baz", blake2s256=b"qux", ) assert c.with_data() == c def test_content_data_missing(): c = Content( length=42, status="visible", sha1=b"foo", sha1_git=b"bar", sha256=b"baz", blake2s256=b"qux", ) with pytest.raises(MissingData): c.with_data() @given(strategies.present_contents_d()) def test_content_from_dict(content_d): c = Content.from_data(**content_d) assert c assert c.ctime == content_d["ctime"] content_d2 = c.to_dict() c2 = Content.from_dict(content_d2) assert c2.ctime == c.ctime def test_content_from_dict_str_ctime(): # test with ctime as a string n = datetime.datetime(2020, 5, 6, 12, 34, tzinfo=datetime.timezone.utc) content_d = { "ctime": n.isoformat(), "data": b"", "length": 0, "sha1": b"\x00", "sha256": b"\x00", "sha1_git": b"\x00", "blake2s256": b"\x00", } c = Content.from_dict(content_d) assert c.ctime == n def test_content_from_dict_str_naive_ctime(): # test with ctime as a string n = datetime.datetime(2020, 5, 6, 12, 34) content_d = { "ctime": n.isoformat(), "data": b"", "length": 0, "sha1": b"\x00", "sha256": b"\x00", "sha1_git": b"\x00", "blake2s256": b"\x00", } with pytest.raises(ValueError, match="must be a timezone-aware datetime."): Content.from_dict(content_d) @given(binary(max_size=4096)) def test_content_from_data(data): c = Content.from_data(data) assert c.data == data assert c.length == len(data) assert c.status == "visible" for key, value in MultiHash.from_data(data).digest().items(): assert getattr(c, key) == value @given(binary(max_size=4096)) def test_hidden_content_from_data(data): c = Content.from_data(data, status="hidden") assert c.data == data assert c.length == len(data) assert c.status == "hidden" for key, value in MultiHash.from_data(data).digest().items(): assert getattr(c, key) == value def test_content_naive_datetime(): c = Content.from_data(b"foo") with pytest.raises(ValueError, match="must be a timezone-aware datetime"): Content( **c.to_dict(), ctime=datetime.datetime.now(), ) +@given(strategies.present_contents().filter(lambda cnt: cnt.data is not None)) +def test_content_git_roundtrip(content): + assert content.data is not None + raw = swh.model.git_objects.content_git_object(content) + sha1_git = hashlib.new("sha1", raw).digest() + assert content.sha1_git == sha1_git + + # SkippedContent @given(binary(max_size=4096)) def test_skipped_content_from_data(data): c = SkippedContent.from_data(data, reason="reason") assert c.reason == "reason" assert c.length == len(data) assert c.status == "absent" for key, value in MultiHash.from_data(data).digest().items(): assert getattr(c, key) == value @given(strategies.skipped_contents_d()) def test_skipped_content_origin_is_str(skipped_content_d): assert SkippedContent.from_dict(skipped_content_d) skipped_content_d["origin"] = "http://path/to/origin" assert SkippedContent.from_dict(skipped_content_d) skipped_content_d["origin"] = Origin(url="http://path/to/origin") with pytest.raises(ValueError, match="origin"): SkippedContent.from_dict(skipped_content_d) def test_skipped_content_naive_datetime(): c = SkippedContent.from_data(b"foo", reason="reason") with pytest.raises(ValueError, match="must be a timezone-aware datetime"): SkippedContent( **c.to_dict(), ctime=datetime.datetime.now(), ) # Directory @given(strategies.directories().filter(lambda d: d.raw_manifest is None)) def test_directory_check(directory): directory.check() directory2 = attr.evolve(directory, id=b"\x00" * 20) with pytest.raises(ValueError, match="does not match recomputed hash"): directory2.check() directory2 = attr.evolve( directory, raw_manifest=swh.model.git_objects.directory_git_object(directory) ) with pytest.raises( ValueError, match="non-none raw_manifest attribute, but does not need it." ): directory2.check() @given(strategies.directories().filter(lambda d: d.raw_manifest is None)) def test_directory_raw_manifest(directory): assert "raw_manifest" not in directory.to_dict() raw_manifest = b"foo" id_ = hashlib.new("sha1", raw_manifest).digest() directory2 = attr.evolve(directory, raw_manifest=raw_manifest) assert directory2.to_dict()["raw_manifest"] == raw_manifest with pytest.raises(ValueError, match="does not match recomputed hash"): directory2.check() directory2 = attr.evolve(directory, raw_manifest=raw_manifest, id=id_) assert directory2.id is not None assert directory2.id == id_ != directory.id assert directory2.to_dict()["raw_manifest"] == raw_manifest directory2.check() def test_directory_entry_name_validation(): with pytest.raises(ValueError, match="valid directory entry name."): DirectoryEntry(name=b"foo/", type="dir", target=b"\x00" * 20, perms=0), def test_directory_duplicate_entry_name(): entries = ( DirectoryEntry(name=b"foo", type="file", target=b"\x00" * 20, perms=0), DirectoryEntry(name=b"foo", type="dir", target=b"\x01" * 20, perms=1), ) with pytest.raises(ValueError, match="duplicated entry name"): Directory(entries=entries) entries = ( DirectoryEntry(name=b"foo", type="file", target=b"\x00" * 20, perms=0), DirectoryEntry(name=b"foo", type="file", target=b"\x00" * 20, perms=0), ) with pytest.raises(ValueError, match="duplicated entry name"): Directory(entries=entries) # Release @given(strategies.releases().filter(lambda rel: rel.raw_manifest is None)) def test_release_check(release): release.check() release2 = attr.evolve(release, id=b"\x00" * 20) with pytest.raises(ValueError, match="does not match recomputed hash"): release2.check() release2 = attr.evolve( release, raw_manifest=swh.model.git_objects.release_git_object(release) ) with pytest.raises( ValueError, match="non-none raw_manifest attribute, but does not need it." ): release2.check() @given(strategies.releases().filter(lambda rev: rev.raw_manifest is None)) def test_release_raw_manifest(release): raw_manifest = b"foo" id_ = hashlib.new("sha1", raw_manifest).digest() release2 = attr.evolve(release, raw_manifest=raw_manifest) assert release2.to_dict()["raw_manifest"] == raw_manifest with pytest.raises(ValueError, match="does not match recomputed hash"): release2.check() release2 = attr.evolve(release, raw_manifest=raw_manifest, id=id_) assert release2.id is not None assert release2.id == id_ != release.id assert release2.to_dict()["raw_manifest"] == raw_manifest release2.check() # Revision @given(strategies.revisions().filter(lambda rev: rev.raw_manifest is None)) def test_revision_check(revision): revision.check() revision2 = attr.evolve(revision, id=b"\x00" * 20) with pytest.raises(ValueError, match="does not match recomputed hash"): revision2.check() revision2 = attr.evolve( revision, raw_manifest=swh.model.git_objects.revision_git_object(revision) ) with pytest.raises( ValueError, match="non-none raw_manifest attribute, but does not need it." ): revision2.check() @given(strategies.revisions().filter(lambda rev: rev.raw_manifest is None)) def test_revision_raw_manifest(revision): raw_manifest = b"foo" id_ = hashlib.new("sha1", raw_manifest).digest() revision2 = attr.evolve(revision, raw_manifest=raw_manifest) assert revision2.to_dict()["raw_manifest"] == raw_manifest with pytest.raises(ValueError, match="does not match recomputed hash"): revision2.check() revision2 = attr.evolve(revision, raw_manifest=raw_manifest, id=id_) assert revision2.id is not None assert revision2.id == id_ != revision.id assert revision2.to_dict()["raw_manifest"] == raw_manifest revision2.check() def test_revision_extra_headers_no_headers(): rev_dict = revision_example.copy() rev_dict.pop("id") rev = Revision.from_dict(rev_dict) rev_dict = attr.asdict(rev, recurse=False) rev_model = Revision(**rev_dict) assert rev_model.metadata is None assert rev_model.extra_headers == () rev_dict["metadata"] = { "something": "somewhere", "some other thing": "stranger", } rev_model = Revision(**rev_dict) assert rev_model.metadata == rev_dict["metadata"] assert rev_model.extra_headers == () def test_revision_extra_headers_with_headers(): rev_dict = revision_example.copy() rev_dict.pop("id") rev = Revision.from_dict(rev_dict) rev_dict = attr.asdict(rev, recurse=False) rev_dict["metadata"] = { "something": "somewhere", "some other thing": "stranger", } extra_headers = ( (b"header1", b"value1"), (b"header2", b"42"), (b"header3", b"should I?\x00"), (b"header1", b"again"), ) rev_dict["extra_headers"] = extra_headers rev_model = Revision(**rev_dict) assert "extra_headers" not in rev_model.metadata assert rev_model.extra_headers == extra_headers def test_revision_extra_headers_in_metadata(): rev_dict = revision_example.copy() rev_dict.pop("id") rev = Revision.from_dict(rev_dict) rev_dict = attr.asdict(rev, recurse=False) rev_dict["metadata"] = { "something": "somewhere", "some other thing": "stranger", } extra_headers = ( (b"header1", b"value1"), (b"header2", b"42"), (b"header3", b"should I?\x00"), (b"header1", b"again"), ) # check the bw-compat init hook does the job # ie. extra_headers are given in the metadata field rev_dict["metadata"]["extra_headers"] = extra_headers rev_model = Revision(**rev_dict) assert "extra_headers" not in rev_model.metadata assert rev_model.extra_headers == extra_headers def test_revision_extra_headers_as_lists(): rev_dict = revision_example.copy() rev_dict.pop("id") rev = Revision.from_dict(rev_dict) rev_dict = attr.asdict(rev, recurse=False) rev_dict["metadata"] = {} extra_headers = ( (b"header1", b"value1"), (b"header2", b"42"), (b"header3", b"should I?\x00"), (b"header1", b"again"), ) # check Revision.extra_headers tuplify does the job rev_dict["extra_headers"] = [list(x) for x in extra_headers] rev_model = Revision(**rev_dict) assert "extra_headers" not in rev_model.metadata assert rev_model.extra_headers == extra_headers def test_revision_extra_headers_type_error(): rev_dict = revision_example.copy() rev_dict.pop("id") rev = Revision.from_dict(rev_dict) orig_rev_dict = attr.asdict(rev, recurse=False) orig_rev_dict["metadata"] = { "something": "somewhere", "some other thing": "stranger", } extra_headers = ( ("header1", b"value1"), (b"header2", 42), ("header1", "again"), ) # check headers one at a time # if given as extra_header for extra_header in extra_headers: rev_dict = copy.deepcopy(orig_rev_dict) rev_dict["extra_headers"] = (extra_header,) with pytest.raises(AttributeTypeError): Revision(**rev_dict) # if given as metadata for extra_header in extra_headers: rev_dict = copy.deepcopy(orig_rev_dict) rev_dict["metadata"]["extra_headers"] = (extra_header,) with pytest.raises(AttributeTypeError): Revision(**rev_dict) def test_revision_extra_headers_from_dict(): rev_dict = revision_example.copy() rev_dict.pop("id") rev_model = Revision.from_dict(rev_dict) assert rev_model.metadata is None assert rev_model.extra_headers == () rev_dict["metadata"] = { "something": "somewhere", "some other thing": "stranger", } rev_model = Revision.from_dict(rev_dict) assert rev_model.metadata == rev_dict["metadata"] assert rev_model.extra_headers == () extra_headers = ( (b"header1", b"value1"), (b"header2", b"42"), (b"header3", b"should I?\nmaybe\x00\xff"), (b"header1", b"again"), ) rev_dict["extra_headers"] = extra_headers rev_model = Revision.from_dict(rev_dict) assert "extra_headers" not in rev_model.metadata assert rev_model.extra_headers == extra_headers def test_revision_extra_headers_in_metadata_from_dict(): rev_dict = revision_example.copy() rev_dict.pop("id") rev_dict["metadata"] = { "something": "somewhere", "some other thing": "stranger", } extra_headers = ( (b"header1", b"value1"), (b"header2", b"42"), (b"header3", b"should I?\nmaybe\x00\xff"), (b"header1", b"again"), ) # check the bw-compat init hook does the job rev_dict["metadata"]["extra_headers"] = extra_headers rev_model = Revision.from_dict(rev_dict) assert "extra_headers" not in rev_model.metadata assert rev_model.extra_headers == extra_headers def test_revision_extra_headers_as_lists_from_dict(): rev_dict = revision_example.copy() rev_dict.pop("id") rev_model = Revision.from_dict(rev_dict) rev_dict["metadata"] = { "something": "somewhere", "some other thing": "stranger", } extra_headers = ( (b"header1", b"value1"), (b"header2", b"42"), (b"header3", b"should I?\nmaybe\x00\xff"), (b"header1", b"again"), ) # check Revision.extra_headers converter does the job rev_dict["extra_headers"] = [list(x) for x in extra_headers] rev_model = Revision.from_dict(rev_dict) assert "extra_headers" not in rev_model.metadata assert rev_model.extra_headers == extra_headers def test_revision_no_author_or_committer_from_dict(): rev_dict = revision_example.copy() rev_dict["author"] = rev_dict["date"] = None rev_dict["committer"] = rev_dict["committer_date"] = None rev_model = Revision.from_dict(rev_dict) assert rev_model.to_dict() == { **rev_dict, "parents": tuple(rev_dict["parents"]), "extra_headers": (), "metadata": None, } def test_revision_none_author_or_committer(): rev_dict = revision_example.copy() rev_dict["author"] = None with pytest.raises(ValueError, match=".*date must be None if author is None.*"): Revision.from_dict(rev_dict) rev_dict = revision_example.copy() rev_dict["committer"] = None with pytest.raises( ValueError, match=".*committer_date must be None if committer is None.*" ): Revision.from_dict(rev_dict) @given(strategies.objects(split_content=True)) def test_object_type(objtype_and_obj): obj_type, obj = objtype_and_obj assert obj_type == obj.object_type def test_object_type_is_final(): object_types = set() def check_final(cls): if hasattr(cls, "object_type"): assert cls.object_type not in object_types object_types.add(cls.object_type) if cls.__subclasses__(): assert not hasattr(cls, "object_type") for subcls in cls.__subclasses__(): check_final(subcls) check_final(BaseModel) _metadata_authority = MetadataAuthority( type=MetadataAuthorityType.FORGE, url="https://forge.softwareheritage.org", ) _metadata_fetcher = MetadataFetcher( name="test-fetcher", version="0.0.1", ) _content_swhid = ExtendedSWHID.from_string( "swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2" ) _origin_url = "https://forge.softwareheritage.org/source/swh-model.git" _origin_swhid = ExtendedSWHID.from_string( "swh:1:ori:94a9ed024d3859793618152ea559a168bbcbb5e2" ) _dummy_qualifiers = {"origin": "https://example.com", "lines": "42"} _common_metadata_fields = dict( discovery_date=datetime.datetime( 2021, 1, 29, 13, 57, 9, tzinfo=datetime.timezone.utc ), authority=_metadata_authority, fetcher=_metadata_fetcher, format="json", metadata=b'{"origin": "https://example.com", "lines": "42"}', ) def test_metadata_valid(): """Checks valid RawExtrinsicMetadata objects don't raise an error.""" # Simplest case RawExtrinsicMetadata(target=_origin_swhid, **_common_metadata_fields) # Object with an SWHID RawExtrinsicMetadata( target=_content_swhid, **_common_metadata_fields, ) def test_metadata_to_dict(): """Checks valid RawExtrinsicMetadata objects don't raise an error.""" common_fields = { "authority": {"type": "forge", "url": "https://forge.softwareheritage.org"}, "fetcher": { "name": "test-fetcher", "version": "0.0.1", }, "discovery_date": _common_metadata_fields["discovery_date"], "format": "json", "metadata": b'{"origin": "https://example.com", "lines": "42"}', } m = RawExtrinsicMetadata( target=_origin_swhid, **_common_metadata_fields, ) assert m.to_dict() == { "target": str(_origin_swhid), "id": b"@j\xc9\x01\xbc\x1e#p*\xf3q9\xa7u\x97\x00\x14\x02xa", **common_fields, } assert RawExtrinsicMetadata.from_dict(m.to_dict()) == m m = RawExtrinsicMetadata( target=_content_swhid, **_common_metadata_fields, ) assert m.to_dict() == { "target": "swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2", "id": b"\xbc\xa3U\xddf\x19U\xc5\xd2\xd7\xdfK\xd7c\x1f\xa8\xfeh\x992", **common_fields, } assert RawExtrinsicMetadata.from_dict(m.to_dict()) == m hash_hex = "6162" * 10 hash_bin = b"ab" * 10 m = RawExtrinsicMetadata( target=_content_swhid, **_common_metadata_fields, origin="https://example.org/", snapshot=CoreSWHID(object_type=ObjectType.SNAPSHOT, object_id=hash_bin), release=CoreSWHID(object_type=ObjectType.RELEASE, object_id=hash_bin), revision=CoreSWHID(object_type=ObjectType.REVISION, object_id=hash_bin), path=b"/foo/bar", directory=CoreSWHID(object_type=ObjectType.DIRECTORY, object_id=hash_bin), ) assert m.to_dict() == { "target": "swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2", "id": b"\x14l\xb0\x1f\xb9\xc0{)\xc7\x0f\xbd\xc0*,YZ\xf5C\xab\xfc", **common_fields, "origin": "https://example.org/", "snapshot": f"swh:1:snp:{hash_hex}", "release": f"swh:1:rel:{hash_hex}", "revision": f"swh:1:rev:{hash_hex}", "path": b"/foo/bar", "directory": f"swh:1:dir:{hash_hex}", } assert RawExtrinsicMetadata.from_dict(m.to_dict()) == m def test_metadata_invalid_target(): """Checks various invalid values for the 'target' field.""" # SWHID passed as string instead of SWHID with pytest.raises(ValueError, match="target must be.*ExtendedSWHID"): RawExtrinsicMetadata( target="swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2", **_common_metadata_fields, ) def test_metadata_naive_datetime(): with pytest.raises(ValueError, match="must be a timezone-aware datetime"): RawExtrinsicMetadata( target=_origin_swhid, **{**_common_metadata_fields, "discovery_date": datetime.datetime.now()}, ) def test_metadata_validate_context_origin(): """Checks validation of RawExtrinsicMetadata.origin.""" # Origins can't have an 'origin' context with pytest.raises( ValueError, match="Unexpected 'origin' context for origin object" ): RawExtrinsicMetadata( target=_origin_swhid, origin=_origin_url, **_common_metadata_fields, ) # but all other types can RawExtrinsicMetadata( target=_content_swhid, origin=_origin_url, **_common_metadata_fields, ) # SWHIDs aren't valid origin URLs with pytest.raises(ValueError, match="SWHID used as context origin URL"): RawExtrinsicMetadata( target=_content_swhid, origin="swh:1:cnt:94a9ed024d3859793618152ea559a168bbcbb5e2", **_common_metadata_fields, ) def test_metadata_validate_context_visit(): """Checks validation of RawExtrinsicMetadata.visit.""" # Origins can't have a 'visit' context with pytest.raises( ValueError, match="Unexpected 'visit' context for origin object" ): RawExtrinsicMetadata( target=_origin_swhid, visit=42, **_common_metadata_fields, ) # but all other types can RawExtrinsicMetadata( target=_content_swhid, origin=_origin_url, visit=42, **_common_metadata_fields, ) # Missing 'origin' with pytest.raises(ValueError, match="'origin' context must be set if 'visit' is"): RawExtrinsicMetadata( target=_content_swhid, visit=42, **_common_metadata_fields, ) # visit id must be positive with pytest.raises(ValueError, match="Nonpositive visit id"): RawExtrinsicMetadata( target=_content_swhid, origin=_origin_url, visit=-42, **_common_metadata_fields, ) def test_metadata_validate_context_snapshot(): """Checks validation of RawExtrinsicMetadata.snapshot.""" # Origins can't have a 'snapshot' context with pytest.raises( ValueError, match="Unexpected 'snapshot' context for origin object" ): RawExtrinsicMetadata( target=_origin_swhid, snapshot=CoreSWHID( object_type=ObjectType.SNAPSHOT, object_id=EXAMPLE_HASH, ), **_common_metadata_fields, ) # but content can RawExtrinsicMetadata( target=_content_swhid, snapshot=CoreSWHID(object_type=ObjectType.SNAPSHOT, object_id=EXAMPLE_HASH), **_common_metadata_fields, ) # SWHID type doesn't match the expected type of this context key with pytest.raises( ValueError, match="Expected SWHID type 'snapshot', got 'content'" ): RawExtrinsicMetadata( target=_content_swhid, snapshot=CoreSWHID( object_type=ObjectType.CONTENT, object_id=EXAMPLE_HASH, ), **_common_metadata_fields, ) def test_metadata_validate_context_release(): """Checks validation of RawExtrinsicMetadata.release.""" # Origins can't have a 'release' context with pytest.raises( ValueError, match="Unexpected 'release' context for origin object" ): RawExtrinsicMetadata( target=_origin_swhid, release=CoreSWHID( object_type=ObjectType.RELEASE, object_id=EXAMPLE_HASH, ), **_common_metadata_fields, ) # but content can RawExtrinsicMetadata( target=_content_swhid, release=CoreSWHID(object_type=ObjectType.RELEASE, object_id=EXAMPLE_HASH), **_common_metadata_fields, ) # SWHID type doesn't match the expected type of this context key with pytest.raises( ValueError, match="Expected SWHID type 'release', got 'content'" ): RawExtrinsicMetadata( target=_content_swhid, release=CoreSWHID( object_type=ObjectType.CONTENT, object_id=EXAMPLE_HASH, ), **_common_metadata_fields, ) def test_metadata_validate_context_revision(): """Checks validation of RawExtrinsicMetadata.revision.""" # Origins can't have a 'revision' context with pytest.raises( ValueError, match="Unexpected 'revision' context for origin object" ): RawExtrinsicMetadata( target=_origin_swhid, revision=CoreSWHID( object_type=ObjectType.REVISION, object_id=EXAMPLE_HASH, ), **_common_metadata_fields, ) # but content can RawExtrinsicMetadata( target=_content_swhid, revision=CoreSWHID(object_type=ObjectType.REVISION, object_id=EXAMPLE_HASH), **_common_metadata_fields, ) # SWHID type doesn't match the expected type of this context key with pytest.raises( ValueError, match="Expected SWHID type 'revision', got 'content'" ): RawExtrinsicMetadata( target=_content_swhid, revision=CoreSWHID( object_type=ObjectType.CONTENT, object_id=EXAMPLE_HASH, ), **_common_metadata_fields, ) def test_metadata_validate_context_path(): """Checks validation of RawExtrinsicMetadata.path.""" # Origins can't have a 'path' context with pytest.raises(ValueError, match="Unexpected 'path' context for origin object"): RawExtrinsicMetadata( target=_origin_swhid, path=b"/foo/bar", **_common_metadata_fields, ) # but content can RawExtrinsicMetadata( target=_content_swhid, path=b"/foo/bar", **_common_metadata_fields, ) def test_metadata_validate_context_directory(): """Checks validation of RawExtrinsicMetadata.directory.""" # Origins can't have a 'directory' context with pytest.raises( ValueError, match="Unexpected 'directory' context for origin object" ): RawExtrinsicMetadata( target=_origin_swhid, directory=CoreSWHID( object_type=ObjectType.DIRECTORY, object_id=EXAMPLE_HASH, ), **_common_metadata_fields, ) # but content can RawExtrinsicMetadata( target=_content_swhid, directory=CoreSWHID( object_type=ObjectType.DIRECTORY, object_id=EXAMPLE_HASH, ), **_common_metadata_fields, ) # SWHID type doesn't match the expected type of this context key with pytest.raises( ValueError, match="Expected SWHID type 'directory', got 'content'" ): RawExtrinsicMetadata( target=_content_swhid, directory=CoreSWHID( object_type=ObjectType.CONTENT, object_id=EXAMPLE_HASH, ), **_common_metadata_fields, ) def test_metadata_normalize_discovery_date(): fields_copy = {**_common_metadata_fields} truncated_date = fields_copy.pop("discovery_date") assert truncated_date.microsecond == 0 # Check for TypeError on disabled object type: we removed attrs_strict's # type_validator with pytest.raises(TypeError): RawExtrinsicMetadata( target=_content_swhid, discovery_date="not a datetime", **fields_copy ) # Check for truncation to integral second date_with_us = truncated_date.replace(microsecond=42) md = RawExtrinsicMetadata( target=_content_swhid, discovery_date=date_with_us, **fields_copy, ) assert md.discovery_date == truncated_date assert md.discovery_date.tzinfo == datetime.timezone.utc # Check that the timezone gets normalized. Timezones can be offset by a # non-integral number of seconds, so we need to handle that. timezone = datetime.timezone(offset=datetime.timedelta(hours=2)) date_with_tz = truncated_date.astimezone(timezone) assert date_with_tz.tzinfo != datetime.timezone.utc md = RawExtrinsicMetadata( target=_content_swhid, discovery_date=date_with_tz, **fields_copy, ) assert md.discovery_date == truncated_date assert md.discovery_date.tzinfo == datetime.timezone.utc diff --git a/tox.ini b/tox.ini index cf034c9..5198d08 100644 --- a/tox.ini +++ b/tox.ini @@ -1,83 +1,83 @@ [tox] envlist=black,flake8,mypy,py3-{minimal,full} [testenv] extras = full: testing minimal: testing-minimal deps = pytest-cov commands = pytest \ --doctest-modules \ full: --cov={envsitepackagesdir}/swh/model --cov-branch {posargs} \ full: {envsitepackagesdir}/swh/model minimal: {envsitepackagesdir}/swh/model/tests/test_cli.py -m 'not requires_optional_deps' [testenv:py3] skip_install = true deps = tox commands = tox -e py3-full -- {posargs} tox -e py3-minimal -- {posargs} [testenv:black] skip_install = true deps = black==22.3.0 commands = {envpython} -m black --check swh [testenv:flake8] skip_install = true deps = flake8==4.0.1 flake8-bugbear==22.3.23 commands = {envpython} -m flake8 [testenv:mypy] extras = testing deps = - mypy==0.920 + mypy==0.942 commands = mypy swh # build documentation outside swh-environment using the current # git HEAD of swh-docs, is executed on CI for each diff to prevent # breaking doc build [testenv:sphinx] whitelist_externals = make usedevelop = true extras = testing deps = # fetch and install swh-docs in develop mode -e git+https://forge.softwareheritage.org/source/swh-docs#egg=swh.docs setenv = SWH_PACKAGE_DOC_TOX_BUILD = 1 # turn warnings into errors SPHINXOPTS = -W commands = make -I ../.tox/sphinx/src/swh-docs/swh/ -C docs # build documentation only inside swh-environment using local state # of swh-docs package [testenv:sphinx-dev] whitelist_externals = make usedevelop = true extras = testing deps = # install swh-docs in develop mode -e ../swh-docs setenv = SWH_PACKAGE_DOC_TOX_BUILD = 1 # turn warnings into errors SPHINXOPTS = -W commands = make -I ../.tox/sphinx-dev/src/swh-docs/swh/ -C docs