diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3cc45b3..05398bb 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,49 +1,42 @@ repos: -- repo: https://github.com/pre-commit/pre-commit-hooks - rev: v2.4.0 - hooks: - - id: trailing-whitespace - - id: check-json - - id: check-yaml + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.1.0 + hooks: + - id: trailing-whitespace + - id: check-json + - id: check-yaml -- repo: https://gitlab.com/pycqa/flake8 - rev: 3.8.3 - hooks: - - id: flake8 + - repo: https://gitlab.com/pycqa/flake8 + rev: 4.0.1 + hooks: + - id: flake8 -- repo: https://github.com/codespell-project/codespell - rev: v1.16.0 - hooks: - - id: codespell + - repo: https://github.com/codespell-project/codespell + rev: v2.1.0 + hooks: + - id: codespell + name: Check source code spelling + stages: [commit] + - id: codespell + name: Check commit message spelling + stages: [commit-msg] -- repo: local - hooks: - - id: mypy - name: mypy - entry: mypy - args: [swh] - pass_filenames: false - language: system - types: [python] + - repo: local + hooks: + - id: mypy + name: mypy + entry: mypy + args: [swh] + pass_filenames: false + language: system + types: [python] -- repo: https://github.com/PyCQA/isort - rev: 5.5.2 - hooks: - - id: isort - -- repo: https://github.com/python/black - rev: 19.10b0 - hooks: - - id: black - -# unfortunately, we are far from being able to enable this... -# - repo: https://github.com/PyCQA/pydocstyle.git -# rev: 4.0.0 -# hooks: -# - id: pydocstyle -# name: pydocstyle -# description: pydocstyle is a static analysis tool for checking compliance with Python docstring conventions. -# entry: pydocstyle --convention=google -# language: python -# types: [python] + - repo: https://github.com/PyCQA/isort + rev: 5.10.1 + hooks: + - id: isort + - repo: https://github.com/python/black + rev: 19.10b0 + hooks: + - id: black diff --git a/PKG-INFO b/PKG-INFO index 717b711..b9d6de0 100644 --- a/PKG-INFO +++ b/PKG-INFO @@ -1,90 +1,90 @@ Metadata-Version: 2.1 Name: swh.search -Version: 0.11.6 +Version: 0.13.0 Summary: Software Heritage search service Home-page: https://forge.softwareheritage.org/diffusion/DSEA Author: Software Heritage developers Author-email: swh-devel@inria.fr License: UNKNOWN Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest Project-URL: Funding, https://www.softwareheritage.org/donate Project-URL: Source, https://forge.softwareheritage.org/source/swh-search Project-URL: Documentation, https://docs.softwareheritage.org/devel/swh-search/ Platform: UNKNOWN Classifier: Programming Language :: Python :: 3 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Classifier: Operating System :: OS Independent Classifier: Development Status :: 3 - Alpha Requires-Python: >=3.7 Description-Content-Type: text/markdown Provides-Extra: testing License-File: LICENSE License-File: AUTHORS swh-search ========== Search service for the Software Heritage archive. It is similar to swh-storage in what it contains, but provides different ways to query it: while swh-storage is mostly a key-value store that returns an object from a primary key, swh-search is focused on reverse indices, to allow finding objects that match some criteria; for example full-text search. Currently uses ElasticSearch, and provides only origin search (by URL and metadata) ## Dependencies - Python tests for this module include tests that cannot be run without a local ElasticSearch instance, so you need the ElasticSearch server executable on your machine (no need to have a running ElasticSearch server). - Debian-like host The elasticsearch package is required. As it's not part of debian-stable, [another debian repository is required to be configured](https://www.elastic.co/guide/en/elasticsearch/reference/current/deb.html#deb-repo) - Non Debian-like host The tests expect: - `/usr/share/elasticsearch/jdk/bin/java` to exist. - `org.elasticsearch.bootstrap.Elasticsearch` to be in java's classpath. - Emscripten is required for generating tree-sitter WASM module. The following commands need to be executed for the setup: ```bash cd /opt && git clone https://github.com/emscripten-core/emsdk.git && cd emsdk && \ ./emsdk install latest && ./emsdk activate latest PATH="${PATH}:/opt/emsdk/upstream/emscripten" ``` **Note:** If emsdk isn't found in the PATH, the tree-sitter cli automatically pulls `emscripten/emsdk` image from docker hub when `make ts-build-wasm` or `make ts-build` is used. ## Make targets Below is the list of available make targets that can be executed from the root directory of swh-search in order to build and/or execute the swh-search under various configurations: * **ts-install**: Install node_modules and emscripten SDK required for TreeSitter * **ts-generate**: Generate parser files(C and JSON) from the grammar * **ts-repl**: Starts a web based playground for the TreeSitter grammar. It's the recommended way for developing TreeSitter grammar. * **ts-dev**: Parse the `query_language/sample_query` and print the corresponding syntax expression along with the start and end positions of all the nodes. * **ts-dev sanitize=1**: Same as **ts-dev** but without start and end position of the nodes. This format is expected by TreeSitter's native test command. `sanitize=1` cleans the output of **ts-dev** using `sed` to achieve the desired format. * **ts-test**: executes TreeSitter's native tests * **ts-build-so**: Generates `swh_ql.so` file from the previously generated parser using py-tree-sitter * **ts-build-so**: Generates `swh_ql.wasm` file from the previously generated parser using emscripten * **ts-build**: Executes both **ts-build-so** and **ts-build-so** diff --git a/debian/changelog b/debian/changelog index d7f1970..eef2d9d 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,339 +1,367 @@ -swh-search (0.11.6-1~swh1~bpo10+1) buster-swh; urgency=medium +swh-search (0.13.0-1~swh1) unstable-swh; urgency=medium - * Rebuild for buster-swh + * New upstream release 0.13.0 - (tagged by Valentin Lorentz + on 2022-02-16 13:12:20 +0100) + * Upstream changes: - v0.13.0 - * Use ':' for substring + matching instead of '=' - * translator: Fix 'visited = false' + queries to actually return results. - * grammar: Prevent + 'isoDateTime' rule from being too greedy - -- Software Heritage autobuilder (on jenkins-debian1) Wed, 29 Sep 2021 13:56:57 +0000 + -- Software Heritage autobuilder (on jenkins-debian1) Wed, 16 Feb 2022 12:17:38 +0000 + +swh-search (0.12.1-1~swh1) unstable-swh; urgency=medium + + * New upstream release 0.12.1 - (tagged by Valentin Lorentz + on 2022-02-14 15:26:46 +0100) + * Upstream changes: - v0.12.1 - * Make RemoteSearch reraise + specific exceptions instead of generic RemoteException - * Fix + crash when no filter but the main query is given + + -- Software Heritage autobuilder (on jenkins-debian1) Mon, 14 Feb 2022 14:31:40 +0000 + +swh-search (0.12.0-1~swh1) unstable-swh; urgency=medium + + * New upstream release 0.12.0 - (tagged by Valentin Lorentz + on 2022-01-12 13:55:44 +0100) + * Upstream changes: - v0.12.0 - * search: Ensure CodeMeta + dates are properly formatted - * setup.py: use yarnpkg instead + of yarn if present in PATH - * swh.search.utils: Fix type - + * conftest: Fix tests hang since elasticsearch 7.16 release - * + Unpin tree-sitter dependency - * tests: Use + TimestampWithTimezone.from_datetime() instead of the constructor + + -- Software Heritage autobuilder (on jenkins-debian1) Wed, 12 Jan 2022 13:00:33 +0000 swh-search (0.11.6-1~swh1) unstable-swh; urgency=medium * New upstream release 0.11.6 - (tagged by Antoine Lambert on 2021-09-29 15:47:53 +0200) * Upstream changes: - version 0.11.6 -- Software Heritage autobuilder (on jenkins-debian1) Wed, 29 Sep 2021 13:53:44 +0000 swh-search (0.11.5-1~swh1) unstable-swh; urgency=medium * New upstream release 0.11.5 - (tagged by Antoine Lambert on 2021-09-28 17:39:15 +0200) * Upstream changes: - version 0.11.5 -- Software Heritage autobuilder (on jenkins-debian1) Tue, 28 Sep 2021 15:48:00 +0000 swh-search (0.11.4-2~swh1) unstable-swh; urgency=medium * Use --no-ext-rename in dh_python3 to avoid renaming swh_ql.so -- Nicolas Dandrimont Wed, 01 Sep 2021 17:12:49 +0200 swh-search (0.11.4-1~swh1) unstable-swh; urgency=medium * New upstream release 0.11.4 - (tagged by Valentin Lorentz on 2021-08-31 15:01:41 +0200) * Upstream changes: - v0.11.4 - * Fix debian build -- Software Heritage autobuilder (on jenkins-debian1) Tue, 31 Aug 2021 13:15:08 +0000 swh-search (0.11.3-3~swh1) unstable-swh; urgency=medium * This package is now architecture-dependent * Make pytest more verbose -- Nicolas Dandrimont Tue, 31 Aug 2021 15:00:42 +0200 swh-search (0.11.3-2~swh1) unstable-swh; urgency=medium * Add python3-tree-sitter build-dependency -- Nicolas Dandrimont Tue, 31 Aug 2021 14:18:43 +0200 swh-search (0.11.3-1~swh1) unstable-swh; urgency=medium * New upstream release 0.11.3 - (tagged by Valentin Lorentz on 2021-08-31 14:04:03 +0200) * Upstream changes: - v0.11.3 - * clean up sdist -- Software Heritage autobuilder (on jenkins-debian1) Tue, 31 Aug 2021 12:14:47 +0000 swh-search (0.11.2-1~swh1) unstable-swh; urgency=medium * New upstream release 0.11.2 - (tagged by Valentin Lorentz on 2021-08-18 12:02:09 +0200) * Upstream changes: - v0.11.2 - * cli.py: Add rpc-serve command - * grammar.js: Improve grammar and export tokens -- Software Heritage autobuilder (on jenkins-debian1) Wed, 18 Aug 2021 10:07:04 +0000 swh-search (0.11.1-1~swh1) unstable-swh; urgency=medium * New upstream release 0.11.1 - (tagged by Vincent SELLIER on 2021-08-16 18:33:00 +0200) * Upstream changes: - v0.11.1 - fix the tree-sitter dependency management during the pypi build -- Software Heritage autobuilder (on jenkins-debian1) Mon, 16 Aug 2021 16:40:38 +0000 swh-search (0.11.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.11.0 - (tagged by Valentin Lorentz on 2021-08-09 17:27:33 +0200) * Upstream changes: - v0.11.0 - * Add logging for search terms in debug mode - * journal_client: use origin_visit_status.type instead of origin_visit - * Add query language - * Disable fetch_last_revision_release_date outside tests -- Software Heritage autobuilder (on jenkins-debian1) Fri, 13 Aug 2021 14:42:01 +0000 swh-search (0.10.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.10.0 - (tagged by Nicolas Dandrimont on 2021-07-21 10:35:59 +0200) * Upstream changes: - Release swh.search v0.10.0 -- Software Heritage autobuilder (on jenkins-debian1) Wed, 21 Jul 2021 08:41:27 +0000 swh-search (0.9.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.9.0 - (tagged by Vincent SELLIER on 2021-06-17 16:54:50 +0200) * Upstream changes: - v0.9.0 - Changelog: - * Fix boolean mapping in metadata document - * Store nb_visits and last_visit_date - * test_origin_intrinsic_metadata_long_description: Re-increase description size - * tests/test_search: Use a reasonably long description value - * tests/elasticsearch: Catch painless script errors and pretty print them - * mypy: Fix errors with release >= v0.900 -- Software Heritage autobuilder (on jenkins-debian1) Thu, 17 Jun 2021 15:01:42 +0000 swh-search (0.8.1-1~swh1) unstable-swh; urgency=medium * New upstream release 0.8.1 - (tagged by Antoine Lambert on 2021-04-29 14:36:43 +0200) * Upstream changes: - version 0.8.1 -- Software Heritage autobuilder (on jenkins-debian1) Thu, 29 Apr 2021 12:41:23 +0000 swh-search (0.8.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.8.0 - (tagged by Nicolas Dandrimont on 2021-04-08 17:37:41 +0200) * Upstream changes: - Release swh.search 0.8.0 - Implement a blocklist for origin results - Fix docs typesetting -- Software Heritage autobuilder (on jenkins-debian1) Thu, 08 Apr 2021 15:42:22 +0000 swh-search (0.7.1-1~swh1) unstable-swh; urgency=medium * New upstream release 0.7.1 - (tagged by Vincent SELLIER on 2021-03-04 15:59:28 +0100) * Upstream changes: - v0.7.1 - Changelog: - * Allow to instantiate the service with default indexes configuration -- Software Heritage autobuilder (on jenkins-debian1) Thu, 04 Mar 2021 15:06:34 +0000 swh-search (0.7.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.7.0 - (tagged by Vincent SELLIER on 2021-03-04 12:09:12 +0100) * Upstream changes: - v0.7.0 - Changelog: - * Ensure the elasticsearch indexes are initialized before the first request - * Use elasticsearch aliases to simplify maintenance operations - * search.cli: Drop unused and untested rpc-serve cli entrypoint - * api.wsgi: Drop unused wsgi module - * Add missing server tests - * Add typing to origin_update's argument and origin_search's return -- Software Heritage autobuilder (on jenkins-debian1) Thu, 04 Mar 2021 11:19:29 +0000 swh-search (0.6.1-1~swh1) unstable-swh; urgency=medium * New upstream release 0.6.1 - (tagged by Antoine Lambert on 2021-02-18 18:55:56 +0100) * Upstream changes: - version 0.6.1 -- Software Heritage autobuilder (on jenkins-debian1) Thu, 18 Feb 2021 18:00:51 +0000 swh-search (0.6.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.6.0 - (tagged by Antoine Lambert on 2021-02-18 15:28:07 +0100) * Upstream changes: - version 0.6.0 -- Software Heritage autobuilder (on jenkins-debian1) Thu, 18 Feb 2021 14:31:07 +0000 swh-search (0.5.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.5.0 - (tagged by Vincent SELLIER on 2021-02-18 11:20:43 +0100) * Upstream changes: - v0.5.0 - Add monitoring metrics -- Software Heritage autobuilder (on jenkins-debian1) Thu, 18 Feb 2021 10:25:39 +0000 swh-search (0.4.2-1~swh1) unstable-swh; urgency=medium * New upstream release 0.4.2 - (tagged by Antoine Lambert on 2021-02-17 11:09:21 +0100) * Upstream changes: - version 0.4.2 -- Software Heritage autobuilder (on jenkins-debian1) Wed, 17 Feb 2021 10:14:16 +0000 swh-search (0.4.1-1~swh1) unstable-swh; urgency=medium * New upstream release 0.4.1 - (tagged by Vincent SELLIER on 2021-01-07 16:15:23 +0100) * Upstream changes: - v0.4.1 -- Software Heritage autobuilder (on jenkins-debian1) Thu, 07 Jan 2021 15:18:24 +0000 swh-search (0.4.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.4.0 - (tagged by Vincent SELLIER on 2020-12-23 16:37:18 +0100) * Upstream changes: - Support an index name prefix -- Software Heritage autobuilder (on jenkins-debian1) Wed, 23 Dec 2020 15:41:09 +0000 swh-search (0.3.5-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.5 - (tagged by Valentin Lorentz on 2020-12-22 17:32:26 +0100) * Upstream changes: - v0.3.5 - * Write some basic documentation to describe what swh-search is. - * Add more comments in elasticsearch.py -- Software Heritage autobuilder (on jenkins-debian1) Tue, 22 Dec 2020 16:38:29 +0000 swh-search (0.3.4-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.4 - (tagged by Antoine R. Dumont (@ardumont) on 2020-12-17 12:13:49 +0100) * Upstream changes: - v0.3.4 - search.journal_client: Actually filter on full origin_visit_status -- Software Heritage autobuilder (on jenkins-debian1) Thu, 17 Dec 2020 11:16:32 +0000 swh-search (0.3.3-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.3 - (tagged by Antoine R. Dumont (@ardumont) on 2020-12-11 15:20:01 +0100) * Upstream changes: - v0.3.3 - Use cross-field search. - Normalize Codemeta documents by expanding them. - Add test for long descriptions. -- Software Heritage autobuilder (on jenkins-debian1) Fri, 11 Dec 2020 14:22:59 +0000 swh-search (0.3.2-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.2 - (tagged by Antoine R. Dumont (@ardumont) on 2020-12-10 09:49:35 +0100) * Upstream changes: - v0.3.2 - search.journal_client: Fix key error - test_journal_client: Migrate to pytest -- Software Heritage autobuilder (on jenkins-debian1) Thu, 10 Dec 2020 08:54:53 +0000 swh-search (0.3.1-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.1 - (tagged by Antoine R. Dumont (@ardumont) on 2020-12-09 18:21:33 +0100) * Upstream changes: - v0.3.1 - Allow configuration through cli or config file -- Software Heritage autobuilder (on jenkins-debian1) Wed, 09 Dec 2020 18:53:39 +0000 swh-search (0.3.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.3.0 - (tagged by Antoine R. Dumont (@ardumont) on 2020-12-08 11:30:33 +0100) * Upstream changes: - v0.3.0 - cli: Subscribe journal client to origin_intrinsic_metadata topic - cli: Subscribe journal client to origin_visit_status - cli: Allow topic prefix declaration through cli or configuration - cli: Allow object- type declaration through cli or configuration - tox.ini: pin black to the pre-commit version (19.10b0) to avoid flip-flops - Run isort after the CLI import changes -- Software Heritage autobuilder (on jenkins-debian1) Tue, 08 Dec 2020 10:33:30 +0000 swh-search (0.2.3-1~swh1) unstable-swh; urgency=medium * New upstream release 0.2.3 - (tagged by David Douard on 2020-09-25 12:51:11 +0200) * Upstream changes: - v0.2.3 -- Software Heritage autobuilder (on jenkins-debian1) Fri, 25 Sep 2020 10:53:12 +0000 swh-search (0.2.2-1~swh1) unstable-swh; urgency=medium * New upstream release 0.2.2 - (tagged by Antoine R. Dumont (@ardumont) on 2020-08-03 11:58:53 +0200) * Upstream changes: - v0.2.2 - Fix test_cli.invoke for old PyYAML versions (such as 3.13, in Debian 10). -- Software Heritage autobuilder (on jenkins-debian1) Mon, 03 Aug 2020 10:00:05 +0000 swh-search (0.2.1-1~swh1) unstable-swh; urgency=medium * New upstream release 0.2.1 - (tagged by Antoine R. Dumont (@ardumont) on 2020-08-03 10:59:31 +0200) * Upstream changes: - v0.2.1 - setup.py: Migrate from vcversioner to setuptools-scm -- Software Heritage autobuilder (on jenkins-debian1) Mon, 03 Aug 2020 09:00:39 +0000 swh-search (0.2.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.2.0 - (tagged by Antoine R. Dumont (@ardumont) on 2020-08-03 10:40:39 +0200) * Upstream changes: - v0.2.0 - swh.search: Define an interface for search backends and use it - swh.search.get_search: Simplify instantiation -- Software Heritage autobuilder (on jenkins-debian1) Mon, 03 Aug 2020 08:42:45 +0000 swh-search (0.1.0-1~swh1) unstable-swh; urgency=medium * New upstream release 0.1.0 - (tagged by Antoine R. Dumont (@ardumont) on 2020-07-31 14:05:22 +0200) * Upstream changes: - v0.1.0 - Type origin_search(...) -> PagedResult[Dict] - README: Update necessary dependencies for test purposes - Fixes on journal updates - Blackify strings - setup: Update the minimum required runtime python3 version -- Software Heritage autobuilder (on jenkins-debian1) Fri, 31 Jul 2020 12:10:22 +0000 swh-search (0.0.4-1~swh1) unstable-swh; urgency=medium * New upstream release 0.0.4 - (tagged by Antoine R. Dumont (@ardumont) on 2020-01-23 15:00:50 +0100) * Upstream changes: - v0.0.4 docs: Remove swh-py-template label - Only return results where all terms match. - Don't use refresh='wait_for' when updating origins. - Add a 'sha1' field to origin documents, used for sorting. - Add a pre-commit config file - Migrate tox.ini to extras = xxx instead of deps = .[testing] - De- specify testenv:py3 - Include all requirements in MANIFEST.in -- Software Heritage autobuilder (on jenkins-debian1) Thu, 23 Jan 2020 14:04:17 +0000 swh-search (0.0.3-1~swh2) unstable-swh; urgency=medium * Filter out swh/__init__.py from package -- Nicolas Dandrimont Tue, 14 Jan 2020 16:38:23 +0100 swh-search (0.0.3-1~swh1) unstable-swh; urgency=medium * Initial packaging -- Nicolas Dandrimont Mon, 13 Jan 2020 16:59:11 +0100 diff --git a/docs/query-language.rst b/docs/query-language.rst index ed6623a..11dba04 100644 --- a/docs/query-language.rst +++ b/docs/query-language.rst @@ -1,190 +1,190 @@ Search Query Language ===================== Every query is composed of filters separated by ``and`` or ``or``. These filters have 3 components in the order : ``Name Operator Value`` Some of the examples are : - * ``origin = django and language in [python] and visits >= 5`` + * ``origin : plasma and language in [python] and visits >= 5`` * ``last_revision > 2020-01-01 and limit = 10`` * ``last_visit > 2021-01-01 or last_visit < 2020-01-01`` - * ``visited = false and metadata = "kubernetes" or origin = "minikube"`` + * ``visited = false and metadata = "kubernetes" or origin : "minikube"`` * ``keyword in ["orchestration", "kubectl"] and language in ["go", "rust"]`` - * ``(origin = debian or visit_type = ["deb"]) and license in ["GPL-3"]`` + * ``(origin : debian or visit_type = ["deb"]) and license in ["GPL-3"]`` **Note**: * Whitespaces are optional between the three components of a filter. * The conjunction operators have left precedence. Therefore ``foo and bar and baz`` means ``(foo and bar) and baz`` * ``and`` has higher precedence than ``or``. Therefore ``foo or bar and baz`` means ``foo or (bar and baz)`` * Precedence can be overridden using parentheses: ``(`` and ``)``. For example, you can override the default precedence in the previous query as: ``(foo or bar) and baz`` * To actually search for ``and`` or ``or`` as strings, just put them within quotes. Example : ``metadata : "vcs history and metadata"``, or even just ``metadata : "and"`` to search for the string ``and`` in the metadata The filters have been classified based on the type of value that they expects. Pattern filters --------------- Returns origins having the given keywords in their url or intrinsic metadata * Name: * ``origin``: Keywords from the origin url * ``metadata``: Keywords from all the intrinsic metadata fields - * Operator: ``=`` + * Operator: ``:`` * Value: String wrapped in quotation marks(``"`` or ``'``) **Note:** If a string has no whitespace then the quotation marks become optional. **Examples:** - * ``origin = https://github.com/Django/django`` - * ``origin = kubernetes`` - * ``origin = "github python"`` - * ``metadata = orchestration`` - * ``metadata = "javascript language"`` + * ``origin : https://github.com/Django/django`` + * ``origin : kubernetes`` + * ``origin : "github python"`` + * ``metadata : orchestration`` + * ``metadata : "javascript language"`` Boolean filters --------------- Returns origins having their boolean type values equal to given values * Name: ``visited`` : Whether the origin has been visited * Operator: ``=`` * Value: ``true`` or ``false`` **Examples:** * ``visited = true`` * ``visited = false`` Numeric filters --------------- Returns origins having their numeric type values in the given range * Name: ``visits`` : Number of visits of an origin * Operator: ``<`` ``<=`` ``=`` ``!=`` ``>`` ``>=`` * Value: Positive integer **Examples:** * ``visits > 2`` * ``visits = 5`` * ``visits <= 10`` Un-bounded List filters ----------------------- Returns origins that satisfy the criteria based on a given list * Name: * ``language`` : Programming languages used * ``license`` : License used * ``keyword`` : keywords (often same as tags) or description (includes README) from the metadata * Operator: ``in`` ``not in`` * Value: Array of strings **Note:** * If a string has no whitespace then the quotation marks become optional. * The ``keyword`` filter gives more priority to the keywords field of intrinsic metadata than the description field. So origins having the queried term in their intrinsic metadata keyword will appear first. **Examples:** * ``language in [python, js]`` * ``license in ["GPL 3.0 or later", MIT]`` * ``keyword in ["Software Heritage", swh]`` Bounded List filters -------------------- Returns origins that satisfy the criteria based on a list of fixed options **visit_type** * Name: ``visit_type`` : Returns only origins with at least one of the specified visit types * Operator: ``=`` * Value: Array of the following values ``any`` ``cran`` ``deb`` ``deposit`` ``ftp`` ``hg`` ``git`` ``nixguix`` ``npm`` ``pypi`` ``svn`` ``tar`` **sort_by** * Name: ``sort_by`` : Sorts origins based on the given list of origin attributes * Operator: ``=`` * Value: Array of the following values ``visits`` ``last_visit`` ``last_eventful_visit`` ``last_revision`` ``last_release`` ``created`` ``modified`` ``published`` **Examples:** * ``visit_type = [svn, npm]`` * ``visit_type = [nixguix, "ftp"]`` * ``sort_by = ["last_visit", created]`` * ``sort_by = [visits, modified]`` Date filters ------------ Returns origins having their date type values in the given range * Name: * ``last_visit`` : Latest visit date * ``last_eventful_visit`` : Latest visit date where a new snapshot was detected * ``last_revision`` : Latest commit date * ``last_release`` : Latest release date * ``created`` Creation date * ``modified`` Modification date * ``published`` Published date * Operator: ``<`` ``<=`` ``=`` ``!=`` ``>`` ``>=`` * Value: Date in ``Standard ISO`` format **Note:** The last three date filters are based on metadata that has to be manually entered by the repository authors. So they might not be correct or up-to-date. **Examples:** * ``last_visit > 2001-01-01 and last_visit < 2101-01-01`` * ``last_revision = "2000-01-01 18:35Z"`` * ``last_release != "2021-07-17T18:35:00Z"`` * ``created <= "2021-07-17 18:35"`` Limit filter ------------ Limits the number of results to at most N * Name: ``limit`` * Operator: ``=`` * Value: Positive Integer **Note:** The default value of the limit is 50 **Examples:** * ``limit = 1`` * ``limit = 15`` diff --git a/mypy.ini b/mypy.ini index 02b0e9f..35e2cdd 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,24 +1,27 @@ [mypy] namespace_packages = True warn_unused_ignores = True # 3rd party libraries without stubs (yet) [mypy-confluent_kafka.*] ignore_missing_imports = True [mypy-elasticsearch.*] ignore_missing_imports = True [mypy-msgpack.*] ignore_missing_imports = True [mypy-pkg_resources.*] ignore_missing_imports = True [mypy-pytest.*] ignore_missing_imports = True [mypy-tree_sitter.*] ignore_missing_imports = True + +[mypy-iso8601.*] +ignore_missing_imports = True diff --git a/requirements-test.txt b/requirements-test.txt index 7b5b9a1..4bc7eb9 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,8 +1,8 @@ -pytest +pytest < 7.0.0 # v7.0.0 removed _pytest.tmpdir.TempdirFactory, which is used by some of the pytest plugins we use pytest-mock confluent-kafka types-click types-pytz types-pyyaml types-requests types-setuptools diff --git a/requirements.txt b/requirements.txt index 422247f..4c08657 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,8 @@ # Add here external Python modules dependencies, one per line. Module names # should match https://pypi.python.org/pypi names. For the full spec or # dependency lines, see https://pip.readthedocs.org/en/1.1/requirements.html click elasticsearch>=7.0.0,<8.0.0 typing-extensions tree_sitter +iso8601 diff --git a/setup.py b/setup.py index 1e7e26e..2126810 100755 --- a/setup.py +++ b/setup.py @@ -1,188 +1,206 @@ #!/usr/bin/env python3 # Copyright (C) 2015-2020 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from distutils.cmd import Command from distutils.command.build import build +import glob from io import open import os import shutil import subprocess from setuptools import find_packages, setup from setuptools.command.develop import develop from setuptools.command.sdist import sdist here = os.path.abspath(os.path.dirname(__file__)) # Get the long description from the README file with open(os.path.join(here, "README.md"), encoding="utf-8") as f: long_description = f.read() def parse_requirements(name=None): if name: reqf = "requirements-%s.txt" % name else: reqf = "requirements.txt" requirements = [] if not os.path.exists(reqf): return requirements with open(reqf) as f: for line in f.readlines(): line = line.strip() if not line or line.startswith("#"): continue requirements.append(line) return requirements -yarn = os.environ.get("YARN", "yarn") +def needs_regen(dest, sources) -> bool: + """Returns whether any of the 'sources' files was modified after 'dst'.""" + if not os.path.exists(dest): + return True + + dest_mtime = os.stat(dest).st_mtime + + for source in sources: + if os.stat(source).st_mtime > dest_mtime: + return True + + return False + + +yarn = os.environ.get("YARN", "yarnpkg" if shutil.which("yarnpkg") else "yarn") class TSCommand(Command): user_options = [] def initialize_options(self): pass def finalize_options(self): pass class TSInstallCommand(TSCommand): description = "Installs node_modules related to query language" def run(self): subprocess.run([yarn, "install"], check=True) class TSBuildSoCommand(TSCommand): description = "Builds swh_ql.so" def initialize_options(self): self.build_lib = None super().initialize_options() def finalize_options(self): self.set_undefined_options("build", ("build_lib", "build_lib")) super().finalize_options() def run(self): ql_dir = os.path.join(self.build_lib, "swh/search/query_language") copy_ql_tree(ql_dir) - if not os.path.exists(os.path.join(ql_dir, "src/parser.c")): + if needs_regen( + os.path.join(ql_dir, "src/parser.c"), + glob.glob("swh/search/query_language/**/*"), + ): print("parser.c missing from build dir.") self.run_command("ts_install") generate_parser(ql_dir) static_dir = os.path.join(self.build_lib, "swh/search/static") os.makedirs(static_dir, exist_ok=True) # This import cannot be toplevel, as setuptools installs it after the script # starts running from tree_sitter import Language Language.build_library(os.path.join(static_dir, "swh_ql.so"), [ql_dir]) print("swh_ql.so file generated") class TSBuildCommand(TSCommand): description = "Builds swh_ql.so" def run(self): self.run_command("ts_build_so") class custom_build(build): def run(self): super().run() if not self.dry_run: self.run_command("ts_build") class custom_sdist(sdist): def make_release_tree(self, base_dir, files): super().make_release_tree(base_dir, files) dist_ql_path = os.path.join(base_dir, "swh/search/query_language") if not self.dry_run: self.run_command("ts_install") copy_ql_tree(dist_ql_path) generate_parser(dist_ql_path) class custom_develop(develop): def run(self): super().run() if not self.dry_run: self.run_command("ts_install") generate_parser("swh/search/query_language") def copy_ql_tree(dest_path): # FIXME: setuptools should copy this itself... print("Copying parser files") if os.path.exists(dest_path): shutil.rmtree(dest_path) shutil.copytree("swh/search/query_language", dest_path) def generate_parser(dest_path): print("Getting path") path = subprocess.check_output([yarn, "bin"]).decode().strip() env = {**os.environ, "PATH": os.pathsep.join([path, os.environ["PATH"]])} print("Generating") subprocess.run(["tree-sitter", "generate", "--no-bindings"], cwd=dest_path, env=env) setup( name="swh.search", description="Software Heritage search service", long_description=long_description, long_description_content_type="text/markdown", python_requires=">=3.7", author="Software Heritage developers", author_email="swh-devel@inria.fr", url="https://forge.softwareheritage.org/diffusion/DSEA", packages=find_packages(), # packages's modules install_requires=parse_requirements() + parse_requirements("swh"), tests_require=parse_requirements("test"), entry_points=""" [swh.cli.subcommands] search=swh.search.cli """, - setup_requires=["setuptools-scm", "tree-sitter==0.19.0"], + setup_requires=["setuptools-scm", "tree-sitter"], use_scm_version=True, extras_require={"testing": parse_requirements("test")}, include_package_data=True, classifiers=[ "Programming Language :: Python :: 3", "Intended Audience :: Developers", "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", "Operating System :: OS Independent", "Development Status :: 3 - Alpha", ], project_urls={ "Bug Reports": "https://forge.softwareheritage.org/maniphest", "Funding": "https://www.softwareheritage.org/donate", "Source": "https://forge.softwareheritage.org/source/swh-search", "Documentation": "https://docs.softwareheritage.org/devel/swh-search/", }, cmdclass={ "build": custom_build, "sdist": custom_sdist, "develop": custom_develop, "ts_install": TSInstallCommand, "ts_build_so": TSBuildSoCommand, "ts_build": TSBuildCommand, }, zip_safe=False, ) diff --git a/swh.search.egg-info/PKG-INFO b/swh.search.egg-info/PKG-INFO index 717b711..b9d6de0 100644 --- a/swh.search.egg-info/PKG-INFO +++ b/swh.search.egg-info/PKG-INFO @@ -1,90 +1,90 @@ Metadata-Version: 2.1 Name: swh.search -Version: 0.11.6 +Version: 0.13.0 Summary: Software Heritage search service Home-page: https://forge.softwareheritage.org/diffusion/DSEA Author: Software Heritage developers Author-email: swh-devel@inria.fr License: UNKNOWN Project-URL: Bug Reports, https://forge.softwareheritage.org/maniphest Project-URL: Funding, https://www.softwareheritage.org/donate Project-URL: Source, https://forge.softwareheritage.org/source/swh-search Project-URL: Documentation, https://docs.softwareheritage.org/devel/swh-search/ Platform: UNKNOWN Classifier: Programming Language :: Python :: 3 Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3) Classifier: Operating System :: OS Independent Classifier: Development Status :: 3 - Alpha Requires-Python: >=3.7 Description-Content-Type: text/markdown Provides-Extra: testing License-File: LICENSE License-File: AUTHORS swh-search ========== Search service for the Software Heritage archive. It is similar to swh-storage in what it contains, but provides different ways to query it: while swh-storage is mostly a key-value store that returns an object from a primary key, swh-search is focused on reverse indices, to allow finding objects that match some criteria; for example full-text search. Currently uses ElasticSearch, and provides only origin search (by URL and metadata) ## Dependencies - Python tests for this module include tests that cannot be run without a local ElasticSearch instance, so you need the ElasticSearch server executable on your machine (no need to have a running ElasticSearch server). - Debian-like host The elasticsearch package is required. As it's not part of debian-stable, [another debian repository is required to be configured](https://www.elastic.co/guide/en/elasticsearch/reference/current/deb.html#deb-repo) - Non Debian-like host The tests expect: - `/usr/share/elasticsearch/jdk/bin/java` to exist. - `org.elasticsearch.bootstrap.Elasticsearch` to be in java's classpath. - Emscripten is required for generating tree-sitter WASM module. The following commands need to be executed for the setup: ```bash cd /opt && git clone https://github.com/emscripten-core/emsdk.git && cd emsdk && \ ./emsdk install latest && ./emsdk activate latest PATH="${PATH}:/opt/emsdk/upstream/emscripten" ``` **Note:** If emsdk isn't found in the PATH, the tree-sitter cli automatically pulls `emscripten/emsdk` image from docker hub when `make ts-build-wasm` or `make ts-build` is used. ## Make targets Below is the list of available make targets that can be executed from the root directory of swh-search in order to build and/or execute the swh-search under various configurations: * **ts-install**: Install node_modules and emscripten SDK required for TreeSitter * **ts-generate**: Generate parser files(C and JSON) from the grammar * **ts-repl**: Starts a web based playground for the TreeSitter grammar. It's the recommended way for developing TreeSitter grammar. * **ts-dev**: Parse the `query_language/sample_query` and print the corresponding syntax expression along with the start and end positions of all the nodes. * **ts-dev sanitize=1**: Same as **ts-dev** but without start and end position of the nodes. This format is expected by TreeSitter's native test command. `sanitize=1` cleans the output of **ts-dev** using `sed` to achieve the desired format. * **ts-test**: executes TreeSitter's native tests * **ts-build-so**: Generates `swh_ql.so` file from the previously generated parser using py-tree-sitter * **ts-build-so**: Generates `swh_ql.wasm` file from the previously generated parser using emscripten * **ts-build**: Executes both **ts-build-so** and **ts-build-so** diff --git a/swh.search.egg-info/SOURCES.txt b/swh.search.egg-info/SOURCES.txt index 156c662..ee72f20 100644 --- a/swh.search.egg-info/SOURCES.txt +++ b/swh.search.egg-info/SOURCES.txt @@ -1,69 +1,71 @@ .gitignore .pre-commit-config.yaml AUTHORS CODE_OF_CONDUCT.md CONTRIBUTORS LICENSE MANIFEST.in Makefile README.md mypy.ini package.json pyproject.toml pytest.ini requirements-swh.txt requirements-test.txt requirements.txt setup.cfg setup.py tox.ini yarn.lock docs/.gitignore docs/Makefile docs/cli.rst docs/conf.py docs/index.rst docs/query-language.rst docs/_static/.placeholder docs/_templates/.placeholder es_config/elasticsearch.keystore es_config/elasticsearch.yml es_config/jvm.options es_config/log4j2.properties swh/__init__.py swh.search.egg-info/PKG-INFO swh.search.egg-info/SOURCES.txt swh.search.egg-info/dependency_links.txt swh.search.egg-info/entry_points.txt swh.search.egg-info/not-zip-safe swh.search.egg-info/requires.txt swh.search.egg-info/top_level.txt swh/search/__init__.py swh/search/cli.py swh/search/elasticsearch.py +swh/search/exc.py swh/search/in_memory.py swh/search/interface.py swh/search/journal_client.py swh/search/metrics.py swh/search/py.typed swh/search/translator.py swh/search/utils.py swh/search/api/__init__.py swh/search/api/client.py swh/search/api/server.py swh/search/query_language/.gitignore swh/search/query_language/grammar.js swh/search/query_language/sample_query swh/search/query_language/tokens.js swh/search/query_language/test/corpus/combinations.txt swh/search/tests/__init__.py swh/search/tests/conftest.py swh/search/tests/test_api_client.py swh/search/tests/test_cli.py swh/search/tests/test_elasticsearch.py swh/search/tests/test_in_memory.py swh/search/tests/test_init.py swh/search/tests/test_journal_client.py swh/search/tests/test_search.py swh/search/tests/test_server.py -swh/search/tests/test_translator.py \ No newline at end of file +swh/search/tests/test_translator.py +swh/search/tests/test_utils.py \ No newline at end of file diff --git a/swh.search.egg-info/requires.txt b/swh.search.egg-info/requires.txt index 1e51e34..bbde510 100644 --- a/swh.search.egg-info/requires.txt +++ b/swh.search.egg-info/requires.txt @@ -1,18 +1,19 @@ click elasticsearch<8.0.0,>=7.0.0 typing-extensions tree_sitter +iso8601 swh.core[http]>=0.3.0 swh.indexer swh.journal>=0.1.0 swh.model [testing] -pytest +pytest<7.0.0 pytest-mock confluent-kafka types-click types-pytz types-pyyaml types-requests types-setuptools diff --git a/swh/__init__.py b/swh/__init__.py index 030e022..b36383a 100644 --- a/swh/__init__.py +++ b/swh/__init__.py @@ -1,9 +1,3 @@ -# Copyright (C) 2019-2021 The Software Heritage developers -# See the AUTHORS file at the top-level directory of this distribution -# License: GNU General Public License version 3, or any later version -# See top-level LICENSE file for more information - from pkgutil import extend_path -from typing import List -__path__: List[str] = extend_path(__path__, __name__) +__path__ = extend_path(__path__, __name__) diff --git a/swh/search/api/client.py b/swh/search/api/client.py index bd2bdee..80b63b8 100644 --- a/swh/search/api/client.py +++ b/swh/search/api/client.py @@ -1,14 +1,16 @@ -# Copyright (C) 2019-2020 The Software Heritage developers +# Copyright (C) 2019-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from swh.core.api import RPCClient +from .. import exc from ..interface import SearchInterface class RemoteSearch(RPCClient): """Proxy to a remote search API""" backend_class = SearchInterface + reraise_exceptions = [getattr(exc, exc_name) for exc_name in exc.__all__] diff --git a/swh/search/elasticsearch.py b/swh/search/elasticsearch.py index dc04fa5..5cc0451 100644 --- a/swh/search/elasticsearch.py +++ b/swh/search/elasticsearch.py @@ -1,549 +1,555 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import base64 from collections import Counter import logging import pprint from textwrap import dedent from typing import Any, Dict, Iterable, List, Optional from elasticsearch import Elasticsearch, helpers import msgpack from swh.indexer import codemeta from swh.model import model from swh.model.hashutil import hash_to_hex from swh.search.interface import ( SORT_BY_OPTIONS, MinimalOriginDict, OriginDict, PagedResult, ) from swh.search.metrics import send_metric, timed from swh.search.translator import Translator -from swh.search.utils import escape, get_expansion, is_date_parsable +from swh.search.utils import escape, get_expansion, parse_and_format_date logger = logging.getLogger(__name__) INDEX_NAME_PARAM = "index" READ_ALIAS_PARAM = "read_alias" WRITE_ALIAS_PARAM = "write_alias" ORIGIN_DEFAULT_CONFIG = { INDEX_NAME_PARAM: "origin", READ_ALIAS_PARAM: "origin-read", WRITE_ALIAS_PARAM: "origin-write", } def _sanitize_origin(origin): origin = origin.copy() # Whitelist fields to be saved in Elasticsearch res = {"url": origin.pop("url")} for field_name in ( "blocklisted", "has_visits", "intrinsic_metadata", "visit_types", "nb_visits", "snapshot_id", "last_visit_date", "last_eventful_visit_date", "last_revision_date", "last_release_date", ): if field_name in origin: res[field_name] = origin.pop(field_name) # Run the JSON-LD expansion algorithm # # to normalize the Codemeta metadata. # This is required as Elasticsearch will needs each field to have a consistent # type across documents to be searchable; and non-expanded JSON-LD documents # can have various types in the same field. For example, all these are # equivalent in JSON-LD: # * {"author": "Jane Doe"} # * {"author": ["Jane Doe"]} # * {"author": {"@value": "Jane Doe"}} # * {"author": [{"@value": "Jane Doe"}]} # and JSON-LD expansion will convert them all to the last one. if "intrinsic_metadata" in res: intrinsic_metadata = res["intrinsic_metadata"] for date_field in ["dateCreated", "dateModified", "datePublished"]: if date_field in intrinsic_metadata: date = intrinsic_metadata[date_field] # If date{Created,Modified,Published} value isn't parsable # It gets rejected and isn't stored (unlike other fields) - if not is_date_parsable(date): + formatted_date = parse_and_format_date(date) + if formatted_date is None: intrinsic_metadata.pop(date_field) + else: + intrinsic_metadata[date_field] = formatted_date res["intrinsic_metadata"] = codemeta.expand(intrinsic_metadata) return res def token_encode(index_to_tokenize: Dict[bytes, Any]) -> str: """Tokenize as string an index page result from a search""" page_token = base64.b64encode(msgpack.dumps(index_to_tokenize)) return page_token.decode() def token_decode(page_token: str) -> Dict[bytes, Any]: """Read the page_token""" return msgpack.loads(base64.b64decode(page_token.encode()), raw=True) class ElasticSearch: def __init__(self, hosts: List[str], indexes: Dict[str, Dict[str, str]] = {}): self._backend = Elasticsearch(hosts=hosts) self._translator = Translator() # Merge current configuration with default values origin_config = indexes.get("origin", {}) self.origin_config = {**ORIGIN_DEFAULT_CONFIG, **origin_config} def _get_origin_index(self) -> str: return self.origin_config[INDEX_NAME_PARAM] def _get_origin_read_alias(self) -> str: return self.origin_config[READ_ALIAS_PARAM] def _get_origin_write_alias(self) -> str: return self.origin_config[WRITE_ALIAS_PARAM] @timed def check(self): return self._backend.ping() def deinitialize(self) -> None: """Removes all indices from the Elasticsearch backend""" self._backend.indices.delete(index="*") def initialize(self) -> None: """Declare Elasticsearch indices, aliases and mappings""" if not self._backend.indices.exists(index=self._get_origin_index()): self._backend.indices.create(index=self._get_origin_index()) if not self._backend.indices.exists_alias(name=self._get_origin_read_alias()): self._backend.indices.put_alias( index=self._get_origin_index(), name=self._get_origin_read_alias() ) if not self._backend.indices.exists_alias(name=self._get_origin_write_alias()): self._backend.indices.put_alias( index=self._get_origin_index(), name=self._get_origin_write_alias() ) self._backend.indices.put_mapping( index=self._get_origin_index(), body={ "dynamic_templates": [ { "booleans_as_string": { # All fields stored as string in the metadata # even the booleans "match_mapping_type": "boolean", "path_match": "intrinsic_metadata.*", "mapping": {"type": "keyword"}, } } ], "date_detection": False, "properties": { # sha1 of the URL; used as the document id "sha1": {"type": "keyword", "doc_values": True,}, # Used both to search URLs, and as the result to return # as a response to queries "url": { "type": "text", # To split URLs into token on any character # that is not alphanumerical "analyzer": "simple", # 2-gram and partial-3-gram search (ie. with the end of the # third word potentially missing) "fields": { "as_you_type": { "type": "search_as_you_type", "analyzer": "simple", } }, }, "visit_types": {"type": "keyword"}, # used to filter out origins that were never visited "has_visits": {"type": "boolean",}, "nb_visits": {"type": "integer"}, "snapshot_id": {"type": "keyword"}, "last_visit_date": {"type": "date"}, "last_eventful_visit_date": {"type": "date"}, "last_release_date": {"type": "date"}, "last_revision_date": {"type": "date"}, "intrinsic_metadata": { "type": "nested", "properties": { "@context": { # don't bother indexing tokens in these URIs, as the # are used as namespaces "type": "keyword", }, "http://schema": { "properties": { "org/dateCreated": { "properties": {"@value": {"type": "date",}} }, "org/dateModified": { "properties": {"@value": {"type": "date",}} }, "org/datePublished": { "properties": {"@value": {"type": "date",}} }, } }, }, }, # Has this origin been taken down? "blocklisted": {"type": "boolean",}, }, }, ) @timed def flush(self) -> None: self._backend.indices.refresh(index=self._get_origin_write_alias()) @timed def origin_update(self, documents: Iterable[OriginDict]) -> None: write_index = self._get_origin_write_alias() documents = map(_sanitize_origin, documents) documents_with_sha1 = ( (hash_to_hex(model.Origin(url=document["url"]).id), document) for document in documents ) # painless script that will be executed when updating an origin document update_script = dedent( """ // utility function to get and parse date ZonedDateTime getDate(def ctx, String date_field) { String default_date = "0001-01-01T00:00:00Z"; String date = ctx._source.getOrDefault(date_field, default_date); return ZonedDateTime.parse(date); } // backup current visit_types field value List visit_types = ctx._source.getOrDefault("visit_types", []); int nb_visits = ctx._source.getOrDefault("nb_visits", 0); ZonedDateTime last_visit_date = getDate(ctx, "last_visit_date"); String snapshot_id = ctx._source.getOrDefault("snapshot_id", ""); ZonedDateTime last_eventful_visit_date = getDate(ctx, "last_eventful_visit_date"); ZonedDateTime last_revision_date = getDate(ctx, "last_revision_date"); ZonedDateTime last_release_date = getDate(ctx, "last_release_date"); // update origin document with new field values ctx._source.putAll(params); // restore previous visit types after visit_types field overriding if (ctx._source.containsKey("visit_types")) { for (int i = 0; i < visit_types.length; ++i) { if (!ctx._source.visit_types.contains(visit_types[i])) { ctx._source.visit_types.add(visit_types[i]); } } } // Undo overwrite if incoming nb_visits is smaller if (ctx._source.containsKey("nb_visits")) { int incoming_nb_visits = ctx._source.getOrDefault("nb_visits", 0); if(incoming_nb_visits < nb_visits){ ctx._source.nb_visits = nb_visits; } } // Undo overwrite if incoming last_visit_date is older if (ctx._source.containsKey("last_visit_date")) { ZonedDateTime incoming_last_visit_date = getDate(ctx, "last_visit_date"); int difference = // returns -1, 0 or 1 incoming_last_visit_date.compareTo(last_visit_date); if(difference < 0){ ctx._source.last_visit_date = last_visit_date; } } // Undo update of last_eventful_date and snapshot_id if // snapshot_id hasn't changed OR incoming_last_eventful_visit_date is older if (ctx._source.containsKey("snapshot_id")) { String incoming_snapshot_id = ctx._source.getOrDefault("snapshot_id", ""); ZonedDateTime incoming_last_eventful_visit_date = getDate(ctx, "last_eventful_visit_date"); int difference = // returns -1, 0 or 1 incoming_last_eventful_visit_date.compareTo(last_eventful_visit_date); if(snapshot_id == incoming_snapshot_id || difference < 0){ ctx._source.snapshot_id = snapshot_id; ctx._source.last_eventful_visit_date = last_eventful_visit_date; } } // Undo overwrite if incoming last_revision_date is older if (ctx._source.containsKey("last_revision_date")) { ZonedDateTime incoming_last_revision_date = getDate(ctx, "last_revision_date"); int difference = // returns -1, 0 or 1 incoming_last_revision_date.compareTo(last_revision_date); if(difference < 0){ ctx._source.last_revision_date = last_revision_date; } } // Undo overwrite if incoming last_release_date is older if (ctx._source.containsKey("last_release_date")) { ZonedDateTime incoming_last_release_date = getDate(ctx, "last_release_date"); // returns -1, 0 or 1 int difference = incoming_last_release_date.compareTo(last_release_date); if(difference < 0){ ctx._source.last_release_date = last_release_date; } } """ # noqa ) actions = [ { "_op_type": "update", "_id": sha1, "_index": write_index, "scripted_upsert": True, "upsert": {**document, "sha1": sha1,}, "retry_on_conflict": 10, "script": { "source": update_script, "lang": "painless", "params": document, }, } for (sha1, document) in documents_with_sha1 ] indexed_count, errors = helpers.bulk(self._backend, actions, index=write_index) assert isinstance(errors, List) # Make mypy happy send_metric("document:index", count=indexed_count, method_name="origin_update") send_metric( "document:index_error", count=len(errors), method_name="origin_update" ) @timed def origin_search( self, *, query: str = "", url_pattern: Optional[str] = None, metadata_pattern: Optional[str] = None, with_visit: bool = False, visit_types: Optional[List[str]] = None, min_nb_visits: int = 0, min_last_visit_date: str = "", min_last_eventful_visit_date: str = "", min_last_revision_date: str = "", min_last_release_date: str = "", min_date_created: str = "", min_date_modified: str = "", min_date_published: str = "", programming_languages: Optional[List[str]] = None, licenses: Optional[List[str]] = None, keywords: Optional[List[str]] = None, sort_by: Optional[List[str]] = None, page_token: Optional[str] = None, limit: int = 50, ) -> PagedResult[MinimalOriginDict]: query_clauses: List[Dict[str, Any]] = [] query_filters = [] if url_pattern: - query_filters.append(f"origin = {escape(url_pattern)}") + query_filters.append(f"origin : {escape(url_pattern)}") if metadata_pattern: - query_filters.append(f"metadata = {escape(metadata_pattern)}") + query_filters.append(f"metadata : {escape(metadata_pattern)}") # if not query_clauses: # raise ValueError( # "At least one of url_pattern and metadata_pattern must be provided." # ) if with_visit: query_filters.append(f"visited = {'true' if with_visit else 'false'}") if min_nb_visits: query_filters.append(f"visits >= {min_nb_visits}") if min_last_visit_date: query_filters.append( f"last_visit >= {min_last_visit_date.replace('Z', '+00:00')}" ) if min_last_eventful_visit_date: query_filters.append( "last_eventful_visit >= " f"{min_last_eventful_visit_date.replace('Z', '+00:00')}" ) if min_last_revision_date: query_filters.append( f"last_revision >= {min_last_revision_date.replace('Z', '+00:00')}" ) if min_last_release_date: query_filters.append( f"last_release >= {min_last_release_date.replace('Z', '+00:00')}" ) if keywords: query_filters.append(f"keyword in {escape(keywords)}") if licenses: query_filters.append(f"license in {escape(licenses)}") if programming_languages: query_filters.append(f"language in {escape(programming_languages)}") if min_date_created: query_filters.append( f"created >= {min_date_created.replace('Z', '+00:00')}" ) if min_date_modified: query_filters.append( f"modified >= {min_date_modified.replace('Z', '+00:00')}" ) if min_date_published: query_filters.append( f"published >= {min_date_published.replace('Z', '+00:00')}" ) if visit_types is not None: query_filters.append(f"visit_type = {escape(visit_types)}") - combined_filters = f"({' and '.join(query_filters)})" - query = f"{combined_filters}{' and ' if query != '' else ' '}{query}" + combined_filters = " and ".join(query_filters) + if combined_filters and query: + query = f"{combined_filters} and {query}" + else: + query = combined_filters or query parsed_query = self._translator.parse_query(query) query_clauses.append(parsed_query["filters"]) field_map = { "visits": "nb_visits", "last_visit": "last_visit_date", "last_eventful_visit": "last_eventful_visit_date", "last_revision": "last_revision_date", "last_release": "last_release_date", "created": "date_created", "modified": "date_modified", "published": "date_published", } if "sortBy" in parsed_query: if sort_by is None: sort_by = [] for sort_by_option in parsed_query["sortBy"]: if sort_by_option[0] == "-": sort_by.append("-" + field_map[sort_by_option[1:]]) else: sort_by.append(field_map[sort_by_option]) if parsed_query.get("limit", 0): limit = parsed_query["limit"] sorting_params: List[Dict[str, Any]] = [] if sort_by: for field in sort_by: order = "asc" if field and field[0] == "-": field = field[1:] order = "desc" if field in ["date_created", "date_modified", "date_published"]: sorting_params.append( { get_expansion(field, "."): { "nested_path": "intrinsic_metadata", "order": order, } } ) elif field in SORT_BY_OPTIONS: sorting_params.append({field: order}) sorting_params.extend( [{"_score": "desc"}, {"sha1": "asc"},] ) body = { "query": { "bool": { "must": query_clauses, "must_not": [{"term": {"blocklisted": True}}], } }, "sort": sorting_params, } if page_token: # TODO: use ElasticSearch's scroll API? page_token_content = token_decode(page_token) body["search_after"] = [ page_token_content[b"score"], page_token_content[b"sha1"].decode("ascii"), ] if logger.isEnabledFor(logging.DEBUG): formatted_body = pprint.pformat(body) logger.debug("Search query body: %s", formatted_body) res = self._backend.search( index=self._get_origin_read_alias(), body=body, size=limit ) hits = res["hits"]["hits"] next_page_token: Optional[str] = None if len(hits) == limit: # There are more results after this page; return a pagination token # to get them in a future query last_hit = hits[-1] next_page_token_content = { b"score": last_hit["_score"], b"sha1": last_hit["_source"]["sha1"], } next_page_token = token_encode(next_page_token_content) assert len(hits) <= limit return PagedResult( results=[{"url": hit["_source"]["url"]} for hit in hits], next_page_token=next_page_token, ) def visit_types_count(self) -> Counter: body = { "aggs": { "not_blocklisted": { "filter": {"bool": {"must_not": [{"term": {"blocklisted": True}}]}}, "aggs": { "visit_types": {"terms": {"field": "visit_types", "size": 1000}} }, } } } res = self._backend.search( index=self._get_origin_read_alias(), body=body, size=0 ) buckets = ( res.get("aggregations", {}) .get("not_blocklisted", {}) .get("visit_types", {}) .get("buckets", []) ) return Counter({bucket["key"]: bucket["doc_count"] for bucket in buckets}) diff --git a/swh/search/exc.py b/swh/search/exc.py new file mode 100644 index 0000000..0dd6d9f --- /dev/null +++ b/swh/search/exc.py @@ -0,0 +1,18 @@ +# Copyright (C) 2022 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +__all__ = ("SearchException", "SearchQuerySyntaxError") + + +class SearchException(Exception): + """Base exception for errors specific to swh-search""" + + pass + + +class SearchQuerySyntaxError(SearchException): + """Raised when the 'query' argument of origin_search cannot be parsed""" + + pass diff --git a/swh/search/in_memory.py b/swh/search/in_memory.py index b185636..3c20dda 100644 --- a/swh/search/in_memory.py +++ b/swh/search/in_memory.py @@ -1,517 +1,520 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from collections import Counter, defaultdict from datetime import datetime, timezone from itertools import chain import re from typing import Any, Dict, Iterable, Iterator, List, Optional from swh.indexer import codemeta from swh.model import model from swh.model.hashutil import hash_to_hex from swh.search.interface import ( SORT_BY_OPTIONS, MinimalOriginDict, OriginDict, PagedResult, ) -from swh.search.utils import get_expansion, is_date_parsable +from swh.search.utils import get_expansion, parse_and_format_date _words_regexp = re.compile(r"\w+") def _dict_words_set(d): """Recursively extract set of words from dict content.""" values = set() def extract(obj, words): if isinstance(obj, dict): for k, v in obj.items(): extract(v, words) elif isinstance(obj, list): for item in obj: extract(item, words) else: words.update(_words_regexp.findall(str(obj).lower())) return words return extract(d, values) def _nested_get(nested_dict, nested_keys, default=""): """Extracts values from deeply nested dictionary nested_dict using the nested_keys and returns a list of all of the values discovered in the process. >>> nested_dict = [ ... {"name": [{"@value": {"first": "f1", "last": "l1"}}], "address": "XYZ"}, ... {"name": [{"@value": {"first": "f2", "last": "l2"}}], "address": "ABC"}, ... ] >>> _nested_get(nested_dict, ["name", "@value", "last"]) ['l1', 'l2'] >>> _nested_get(nested_dict, ["address"]) ['XYZ', 'ABC'] It doesn't allow fetching intermediate values and returns "" for such cases >>> _nested_get(nested_dict, ["name", "@value"]) ['', ''] """ def _nested_get_recursive(nested_dict, nested_keys): try: curr_obj = nested_dict type_curr_obj = type(curr_obj) for i, key in enumerate(nested_keys): if key in curr_obj: curr_obj = curr_obj[key] type_curr_obj = type(curr_obj) else: if type_curr_obj == list: curr_obj = [ _nested_get_recursive(obj, nested_keys[i:]) for obj in curr_obj ] # If value isn't a list or string or integer elif type_curr_obj != str and type_curr_obj != int: return default # If only one element is present in the list, take it out # This ensures a flat array every time if type_curr_obj == list and len(curr_obj) == 1: curr_obj = curr_obj[0] return curr_obj except Exception: return default res = _nested_get_recursive(nested_dict, nested_keys) if type(res) != list: return [res] return res def _tokenize(x): return x.lower().replace(",", " ").split() def _get_sorting_key(origin, field): """Get value of the field from an origin for sorting origins. Here field should be a member of SORT_BY_OPTIONS. If "-" is present at the start of field then invert the value in a way that it reverses the sorting order. """ reversed = False if field[0] == "-": field = field[1:] reversed = True DATETIME_OBJ_MAX = datetime.max.replace(tzinfo=timezone.utc) DATETIME_MIN = "0001-01-01T00:00:00Z" DATE_OBJ_MAX = datetime.max DATE_MIN = "0001-01-01" if field == "score": if reversed: return -origin.get(field, 0) else: return origin.get(field, 0) if field in ["date_created", "date_modified", "date_published"]: date = datetime.strptime( _nested_get(origin, get_expansion(field), DATE_MIN)[0], "%Y-%m-%d" ) if reversed: return DATE_OBJ_MAX - date else: return date elif field in ["nb_visits"]: # unlike other options, nb_visits is of type integer if reversed: return -origin.get(field, 0) else: return origin.get(field, 0) elif field in SORT_BY_OPTIONS: date = datetime.fromisoformat( origin.get(field, DATETIME_MIN).replace("Z", "+00:00") ) if reversed: return DATETIME_OBJ_MAX - date else: return date class InMemorySearch: def __init__(self): pass def check(self): return True def deinitialize(self) -> None: if hasattr(self, "_origins"): del self._origins del self._origin_ids def initialize(self) -> None: self._origins: Dict[str, Dict[str, Any]] = defaultdict(dict) self._origin_ids: List[str] = [] def flush(self) -> None: pass _url_splitter = re.compile(r"\W") def origin_update(self, documents: Iterable[OriginDict]) -> None: for source_document in documents: document: Dict[str, Any] = dict(source_document) id_ = hash_to_hex(model.Origin(url=document["url"]).id) if "url" in document: document["_url_tokens"] = set( self._url_splitter.split(source_document["url"]) ) if "visit_types" in document: document["visit_types"] = set(source_document["visit_types"]) if "visit_types" in self._origins[id_]: document["visit_types"].update(self._origins[id_]["visit_types"]) if "nb_visits" in document: document["nb_visits"] = max( document["nb_visits"], self._origins[id_].get("nb_visits", 0) ) if "last_visit_date" in document: document["last_visit_date"] = max( datetime.fromisoformat(document["last_visit_date"]), datetime.fromisoformat( self._origins[id_] .get("last_visit_date", "0001-01-01T00:00:00.000000Z",) .replace("Z", "+00:00") ), ).isoformat() if "snapshot_id" in document and "last_eventful_visit_date" in document: incoming_date = datetime.fromisoformat( document["last_eventful_visit_date"] ) current_date = datetime.fromisoformat( self._origins[id_] .get("last_eventful_visit_date", "0001-01-01T00:00:00Z",) .replace("Z", "+00:00") ) incoming_snapshot_id = document["snapshot_id"] current_snapshot_id = self._origins[id_].get("snapshot_id", "") if ( incoming_snapshot_id == current_snapshot_id or incoming_date < current_date ): # update not required so override the incoming_values document["snapshot_id"] = current_snapshot_id document["last_eventful_visit_date"] = current_date.isoformat() if "last_revision_date" in document: document["last_revision_date"] = max( datetime.fromisoformat(document["last_revision_date"]), datetime.fromisoformat( self._origins[id_] .get("last_revision_date", "0001-01-01T00:00:00Z",) .replace("Z", "+00:00") ), ).isoformat() if "last_release_date" in document: document["last_release_date"] = max( datetime.fromisoformat(document["last_release_date"]), datetime.fromisoformat( self._origins[id_] .get("last_release_date", "0001-01-01T00:00:00Z",) .replace("Z", "+00:00") ), ).isoformat() if "intrinsic_metadata" in document: intrinsic_metadata = document["intrinsic_metadata"] for date_field in ["dateCreated", "dateModified", "datePublished"]: if date_field in intrinsic_metadata: date = intrinsic_metadata[date_field] # If date{Created,Modified,Published} value isn't parsable # It gets rejected and isn't stored (unlike other fields) - if not is_date_parsable(date): + formatted_date = parse_and_format_date(date) + if formatted_date is None: intrinsic_metadata.pop(date_field) + else: + intrinsic_metadata[date_field] = formatted_date document["intrinsic_metadata"] = codemeta.expand(intrinsic_metadata) if len(document["intrinsic_metadata"]) != 1: continue metadata = document["intrinsic_metadata"][0] if "http://schema.org/license" in metadata: metadata["http://schema.org/license"] = [ {"@id": license["@id"].lower()} for license in metadata["http://schema.org/license"] ] if "http://schema.org/programmingLanguage" in metadata: metadata["http://schema.org/programmingLanguage"] = [ {"@value": license["@value"].lower()} for license in metadata["http://schema.org/programmingLanguage"] ] self._origins[id_].update(document) if id_ not in self._origin_ids: self._origin_ids.append(id_) def origin_search( self, *, query: str = "", url_pattern: Optional[str] = None, metadata_pattern: Optional[str] = None, with_visit: bool = False, visit_types: Optional[List[str]] = None, min_nb_visits: int = 0, min_last_visit_date: str = "", min_last_eventful_visit_date: str = "", min_last_revision_date: str = "", min_last_release_date: str = "", min_date_created: str = "", min_date_modified: str = "", min_date_published: str = "", programming_languages: Optional[List[str]] = None, licenses: Optional[List[str]] = None, keywords: Optional[List[str]] = None, sort_by: Optional[List[str]] = None, page_token: Optional[str] = None, limit: int = 50, ) -> PagedResult[MinimalOriginDict]: hits = self._get_hits() if url_pattern: tokens = set(self._url_splitter.split(url_pattern)) def predicate(match): missing_tokens = tokens - match["_url_tokens"] if len(missing_tokens) == 0: return True elif len(missing_tokens) > 1: return False else: # There is one missing token, look up by prefix. (missing_token,) = missing_tokens return any( token.startswith(missing_token) for token in match["_url_tokens"] ) hits = filter(predicate, hits) if metadata_pattern: metadata_pattern_words = set( _words_regexp.findall(metadata_pattern.lower()) ) def predicate(match): if "intrinsic_metadata" not in match: return False return metadata_pattern_words.issubset( _dict_words_set(match["intrinsic_metadata"]) ) hits = filter(predicate, hits) if not url_pattern and not metadata_pattern: raise ValueError( "At least one of url_pattern and metadata_pattern must be provided." ) next_page_token: Optional[str] = None if with_visit: hits = filter(lambda o: o.get("has_visits"), hits) if min_nb_visits: hits = filter(lambda o: o.get("nb_visits", 0) >= min_nb_visits, hits) if min_last_visit_date: hits = filter( lambda o: datetime.fromisoformat( o.get("last_visit_date", "0001-01-01T00:00:00Z").replace( "Z", "+00:00" ) ) >= datetime.fromisoformat(min_last_visit_date), hits, ) if min_last_eventful_visit_date: hits = filter( lambda o: datetime.fromisoformat( o.get("last_eventful_visit_date", "0001-01-01T00:00:00Z").replace( "Z", "+00:00" ) ) >= datetime.fromisoformat(min_last_eventful_visit_date), hits, ) if min_last_revision_date: hits = filter( lambda o: datetime.fromisoformat( o.get("last_revision_date", "0001-01-01T00:00:00Z").replace( "Z", "+00:00" ) ) >= datetime.fromisoformat(min_last_revision_date), hits, ) if min_last_release_date: hits = filter( lambda o: datetime.fromisoformat( o.get("last_release_date", "0001-01-01T00:00:00Z").replace( "Z", "+00:00" ) ) >= datetime.fromisoformat(min_last_release_date), hits, ) if min_date_created: min_date_created_obj = datetime.strptime(min_date_created, "%Y-%m-%d") hits = filter( lambda o: datetime.strptime( _nested_get(o, get_expansion("date_created"))[0], "%Y-%m-%d" ) >= min_date_created_obj, hits, ) if min_date_modified: min_date_modified_obj = datetime.strptime(min_date_modified, "%Y-%m-%d") hits = filter( lambda o: datetime.strptime( _nested_get(o, get_expansion("date_modified"))[0], "%Y-%m-%d" ) >= min_date_modified_obj, hits, ) if min_date_published: min_date_published_obj = datetime.strptime(min_date_published, "%Y-%m-%d") hits = filter( lambda o: datetime.strptime( _nested_get(o, get_expansion("date_published"))[0], "%Y-%m-%d" ) >= min_date_published_obj, hits, ) if licenses: queried_licenses = [license_keyword.lower() for license_keyword in licenses] hits = filter( lambda o: any( # If any of the queried licenses are found, include the origin any( # returns True if queried_license_keyword is found # in any of the licenses of the origin queried_license_keyword in origin_license for origin_license in _nested_get(o, get_expansion("licenses")) ) for queried_license_keyword in queried_licenses ), hits, ) if programming_languages: queried_programming_languages = [ lang_keyword.lower() for lang_keyword in programming_languages ] hits = filter( lambda o: any( # If any of the queried languages are found, include the origin any( # returns True if queried_lang_keyword is found # in any of the langs of the origin queried_lang_keyword in origin_lang for origin_lang in _nested_get( o, get_expansion("programming_languages") ) ) for queried_lang_keyword in queried_programming_languages ), hits, ) if keywords: if sort_by: sort_by.append("-score") else: sort_by = ["-score"] from copy import deepcopy hits_list = deepcopy(list(hits)) for origin in hits_list: origin_keywords = [ _tokenize(keyword) for keyword in _nested_get(origin, get_expansion("keywords")) ] origin_descriptions = [ _tokenize(description) for description in _nested_get( origin, get_expansion("descriptions") ) ] for q_keyword in keywords: for origin_keyword_tokens in origin_keywords: if q_keyword in origin_keyword_tokens: origin["score"] = origin.get("score", 0) + 2 for origin_description_token in origin_descriptions: if q_keyword in origin_description_token: origin["score"] = origin.get("score", 0) + 1 hits = (origin for origin in hits_list if origin.get("score", 0) > 0) if visit_types is not None: visit_types_set = set(visit_types) hits = filter( lambda o: visit_types_set.intersection(o.get("visit_types", set())), hits, ) hits_list = list(hits) if sort_by: sort_by_list = list(sort_by) hits_list.sort( key=lambda o: tuple( _get_sorting_key(o, field) for field in sort_by_list ) ) start_at_index = int(page_token) if page_token else 0 origins = [ {"url": hit["url"]} for hit in hits_list[start_at_index : start_at_index + limit] ] if len(origins) == limit: next_page_token = str(start_at_index + limit) assert len(origins) <= limit return PagedResult(results=origins, next_page_token=next_page_token,) def visit_types_count(self) -> Counter: hits = self._get_hits() return Counter(chain(*[hit.get("visit_types", []) for hit in hits])) def _get_hits(self) -> Iterator[Dict[str, Any]]: return ( self._origins[id_] for id_ in self._origin_ids if not self._origins[id_].get("blocklisted") ) diff --git a/swh/search/query_language/grammar.js b/swh/search/query_language/grammar.js index 594a934..7dca22d 100644 --- a/swh/search/query_language/grammar.js +++ b/swh/search/query_language/grammar.js @@ -1,192 +1,193 @@ // Copyright (C) 2021 The Software Heritage developers // See the AUTHORS file at the top-level directory of this distribution // License: GNU General Public License version 3, or any later version // See top-level LICENSE file for more information const { visitTypeField, sortByField, limitField } = require("./tokens.js"); const { patternFields, booleanFields, numericFields, listFields, dateFields } = require("./tokens.js"); -const { equalOp, rangeOp, choiceOp } = require("./tokens.js"); +const { equalOp, containOp, rangeOp, choiceOp } = require("./tokens.js"); const { sortByOptions, visitTypeOptions } = require("./tokens.js"); const { OR, AND, TRUE, FALSE } = require("./tokens.js"); const PRECEDENCE = { or: 2, and: 3, bracket: 4, } module.exports = grammar({ name: 'swh_search_ql', rules: { query: $ => seq( $.filters, optional(seq( optional($.and), choice( seq($.sortBy, optional($.and), optional($.limit)), seq($.limit, optional($.and), optional($.sortBy)), ), )) ), filters: $ => choice( prec.left(PRECEDENCE.and, seq( field('left', $.filters), field('operator', $.and), field('right', $.filters), ) ), prec.left(PRECEDENCE.or, seq( field('left', $.filters), field('operator', $.or), field('right', $.filters), ) ), prec.left(PRECEDENCE.bracket, seq("(", $.filters, ")"), ), $.filter ), sortBy: $ => annotateFilter($.sortByField, $.sortByOp, $.sortByVal), sortByField: $ => token(sortByField), sortByOp: $ => $.equalOp, sortByVal: $ => createArray(optionalWrapWith($.sortByOptions, ["'", '"'])), sortByOptions: $ => seq( optional('-'), choice(...sortByOptions) ), limit: $ => annotateFilter($.limitField, $.equalOp, $.number), limitField: $ => token(limitField), filter: $ => field('category', choice( $.patternFilter, $.booleanFilter, $.numericFilter, $.boundedListFilter, $.unboundedListFilter, $.dateFilter )), patternFilter: $ => annotateFilter($.patternField, $.patternOp, $.patternVal), patternField: $ => token(choice(...patternFields)), - patternOp: $ => $.equalOp, + patternOp: $ => $.containOp, patternVal: $ => $.string, booleanFilter: $ => annotateFilter($.booleanField, $.booleanOp, $.booleanVal), booleanField: $ => token(choice(...booleanFields)), booleanOp: $ => $.equalOp, booleanVal: $ => choice($.booleanTrue, $.booleanFalse), numericFilter: $ => annotateFilter($.numericField, $.numericOp, $.numberVal), numericField: $ => token(choice(...numericFields)), numericOp: $ => $.rangeOp, numberVal: $ => $.number, // Array members must be from the given options boundedListFilter: $ => choice($.visitTypeFilter), visitTypeFilter: $ => annotateFilter($.visitTypeField, $.visitTypeOp, $.visitTypeVal), visitTypeField: $ => token(visitTypeField), visitTypeOp: $ => $.equalOp, visitTypeVal: $ => createArray(optionalWrapWith($.visitTypeOptions, ["'", '"'])), visitTypeOptions: $ => choice(...visitTypeOptions), // TODO: fetch visitTypeOptions choices dynamically from other swh services? // Array members can be any string unboundedListFilter: $ => annotateFilter($.listField, $.listOp, $.listVal), listField: $ => token(choice(...listFields)), listOp: $ => $.choiceOp, listVal: $ => createArray($.string), dateFilter: $ => annotateFilter($.dateField, $.dateOp, $.dateVal), dateField: $ => token(choice(...dateFields)), dateOp: $ => $.rangeOp, dateVal: $ => $.isoDateTime, rangeOp: $ => token(choice(...rangeOp)), equalOp: $ => token(choice(...equalOp)), + containOp: $ => token(choice(...containOp)), choiceOp: $ => token(choice(...choiceOp)), isoDateTime: $ => { const dateRegex = (/\d{4}[-]\d{2}[-]\d{2}/).source const dateTimeSepRegex = (/(\s|T)*/).source - const timeRegex = (/(\d{2}:\d{2}(:\d{2}(\.\d{6})?)?)?/).source - const timezoneRegex = (/(\+\d{2}:\d{2}|Z)?/).source - return new RegExp(dateRegex + dateTimeSepRegex + timeRegex + timezoneRegex) + const timeRegex = (/\d{2}:\d{2}(:\d{2}(\.\d{6})?)?/).source + const timezoneRegex = (/\+\d{2}:\d{2}|Z/).source + return new RegExp(`${dateRegex}(${dateTimeSepRegex}${timeRegex}(${timezoneRegex})?)?`) }, string: $ => choice(wrapWith($.stringContent, ["'", '"']), $.singleWord), number: $ => /\d+/, booleanTrue: $ => TRUE, booleanFalse: $ => FALSE, or: $ => OR, and: $ => AND, singleWord: $ => /[^\s"'\[\]\(\),]+/, // Based on tree-sitter-json grammar: stringContent: $ => repeat1(choice( token.immediate(/[^\\'"\n]+/), $.escape_sequence )), escape_sequence: $ => token.immediate(seq( '\\', /(\"|\'|\\|\/|b|n|r|t|u)/ )), } }); function joinBySep1(rule, sep) { // At least one repetition of the rule separated by `sep` return seq(rule, repeat(seq(sep, optional(rule)))) } function joinBySep(rule, sep = ",") { // Any number of repetitions of the rule separated by `sep` return optional(joinBySep1(rule, sep)) } function createArray(rule) { // An array having `rule` as its member return seq( "[", joinBySep( field('array_member', rule), "," ), "]" ) } function wrapWith(rule, wrappers = ["'", '"']) { // The rule must be wrapped with one of the wrappers const wrappedRules = wrappers.map(wrapper => seq(wrapper, rule, wrapper)) return choice(...wrappedRules) } function optionalWrapWith(rule, wrappers = ["'", '"']) { // The rule may or may not be wrapped with the wrappers return choice(wrapWith(rule, wrappers), rule) } function annotateFilter(filterField, filterOp, filterVal) { return seq( field('field', filterField), field('op', filterOp), field('value', filterVal) ); } diff --git a/swh/search/query_language/sample_query b/swh/search/query_language/sample_query index 3d8c08d..1ea3ad5 100644 --- a/swh/search/query_language/sample_query +++ b/swh/search/query_language/sample_query @@ -1,6 +1,6 @@ -(origin = django/django and language in ["python"] or visits >= 5) or +(origin : django/django and language in ["python"] or visits >= 5) or (last_revision > 2020-01-01 and limit = 10) or (last_visit > 2021-01-01 or last_visit < 2020-01-01) or -(visited = false and metadata = "gitlab") or +(visited = false and metadata : "gitlab") or (keyword in ["orchestration", "kubectl"] and language in ["go", "rust"]) or (visit_type = [deb] and license in ["GPL-3"]) diff --git a/swh/search/query_language/src/grammar.json b/swh/search/query_language/src/grammar.json index 22ced21..61b54e7 100644 --- a/swh/search/query_language/src/grammar.json +++ b/swh/search/query_language/src/grammar.json @@ -1,1314 +1,1326 @@ { "name": "swh_search_ql", "rules": { "query": { "type": "SEQ", "members": [ { "type": "SYMBOL", "name": "filters" }, { "type": "CHOICE", "members": [ { "type": "SEQ", "members": [ { "type": "CHOICE", "members": [ { "type": "SYMBOL", "name": "and" }, { "type": "BLANK" } ] }, { "type": "CHOICE", "members": [ { "type": "SEQ", "members": [ { "type": "SYMBOL", "name": "sortBy" }, { "type": "CHOICE", "members": [ { "type": "SYMBOL", "name": "and" }, { "type": "BLANK" } ] }, { "type": "CHOICE", "members": [ { "type": "SYMBOL", "name": "limit" }, { "type": "BLANK" } ] } ] }, { "type": "SEQ", "members": [ { "type": "SYMBOL", "name": "limit" }, { "type": "CHOICE", "members": [ { "type": "SYMBOL", "name": "and" }, { "type": "BLANK" } ] }, { "type": "CHOICE", "members": [ { "type": "SYMBOL", "name": "sortBy" }, { "type": "BLANK" } ] } ] } ] } ] }, { "type": "BLANK" } ] } ] }, "filters": { "type": "CHOICE", "members": [ { "type": "PREC_LEFT", "value": 3, "content": { "type": "SEQ", "members": [ { "type": "FIELD", "name": "left", "content": { "type": "SYMBOL", "name": "filters" } }, { "type": "FIELD", "name": "operator", "content": { "type": "SYMBOL", "name": "and" } }, { "type": "FIELD", "name": "right", "content": { "type": "SYMBOL", "name": "filters" } } ] } }, { "type": "PREC_LEFT", "value": 2, "content": { "type": "SEQ", "members": [ { "type": "FIELD", "name": "left", "content": { "type": "SYMBOL", "name": "filters" } }, { "type": "FIELD", "name": "operator", "content": { "type": "SYMBOL", "name": "or" } }, { "type": "FIELD", "name": "right", "content": { "type": "SYMBOL", "name": "filters" } } ] } }, { "type": "PREC_LEFT", "value": 4, "content": { "type": "SEQ", "members": [ { "type": "STRING", "value": "(" }, { "type": "SYMBOL", "name": "filters" }, { "type": "STRING", "value": ")" } ] } }, { "type": "SYMBOL", "name": "filter" } ] }, "sortBy": { "type": "SEQ", "members": [ { "type": "FIELD", "name": "field", "content": { "type": "SYMBOL", "name": "sortByField" } }, { "type": "FIELD", "name": "op", "content": { "type": "SYMBOL", "name": "sortByOp" } }, { "type": "FIELD", "name": "value", "content": { "type": "SYMBOL", "name": "sortByVal" } } ] }, "sortByField": { "type": "TOKEN", "content": { "type": "STRING", "value": "sort_by" } }, "sortByOp": { "type": "SYMBOL", "name": "equalOp" }, "sortByVal": { "type": "SEQ", "members": [ { "type": "STRING", "value": "[" }, { "type": "CHOICE", "members": [ { "type": "SEQ", "members": [ { "type": "FIELD", "name": "array_member", "content": { "type": "CHOICE", "members": [ { "type": "CHOICE", "members": [ { "type": "SEQ", "members": [ { "type": "STRING", "value": "'" }, { "type": "SYMBOL", "name": "sortByOptions" }, { "type": "STRING", "value": "'" } ] }, { "type": "SEQ", "members": [ { "type": "STRING", "value": "\"" }, { "type": "SYMBOL", "name": "sortByOptions" }, { "type": "STRING", "value": "\"" } ] } ] }, { "type": "SYMBOL", "name": "sortByOptions" } ] } }, { "type": "REPEAT", "content": { "type": "SEQ", "members": [ { "type": "STRING", "value": "," }, { "type": "CHOICE", "members": [ { "type": "FIELD", "name": "array_member", "content": { "type": "CHOICE", "members": [ { "type": "CHOICE", "members": [ { "type": "SEQ", "members": [ { "type": "STRING", "value": "'" }, { "type": "SYMBOL", "name": "sortByOptions" }, { "type": "STRING", "value": "'" } ] }, { "type": "SEQ", "members": [ { "type": "STRING", "value": "\"" }, { "type": "SYMBOL", "name": "sortByOptions" }, { "type": "STRING", "value": "\"" } ] } ] }, { "type": "SYMBOL", "name": "sortByOptions" } ] } }, { "type": "BLANK" } ] } ] } } ] }, { "type": "BLANK" } ] }, { "type": "STRING", "value": "]" } ] }, "sortByOptions": { "type": "SEQ", "members": [ { "type": "CHOICE", "members": [ { "type": "STRING", "value": "-" }, { "type": "BLANK" } ] }, { "type": "CHOICE", "members": [ { "type": "STRING", "value": "visits" }, { "type": "STRING", "value": "last_visit" }, { "type": "STRING", "value": "last_eventful_visit" }, { "type": "STRING", "value": "last_revision" }, { "type": "STRING", "value": "last_release" }, { "type": "STRING", "value": "created" }, { "type": "STRING", "value": "modified" }, { "type": "STRING", "value": "published" } ] } ] }, "limit": { "type": "SEQ", "members": [ { "type": "FIELD", "name": "field", "content": { "type": "SYMBOL", "name": "limitField" } }, { "type": "FIELD", "name": "op", "content": { "type": "SYMBOL", "name": "equalOp" } }, { "type": "FIELD", "name": "value", "content": { "type": "SYMBOL", "name": "number" } } ] }, "limitField": { "type": "TOKEN", "content": { "type": "STRING", "value": "limit" } }, "filter": { "type": "FIELD", "name": "category", "content": { "type": "CHOICE", "members": [ { "type": "SYMBOL", "name": "patternFilter" }, { "type": "SYMBOL", "name": "booleanFilter" }, { "type": "SYMBOL", "name": "numericFilter" }, { "type": "SYMBOL", "name": "boundedListFilter" }, { "type": "SYMBOL", "name": "unboundedListFilter" }, { "type": "SYMBOL", "name": "dateFilter" } ] } }, "patternFilter": { "type": "SEQ", "members": [ { "type": "FIELD", "name": "field", "content": { "type": "SYMBOL", "name": "patternField" } }, { "type": "FIELD", "name": "op", "content": { "type": "SYMBOL", "name": "patternOp" } }, { "type": "FIELD", "name": "value", "content": { "type": "SYMBOL", "name": "patternVal" } } ] }, "patternField": { "type": "TOKEN", "content": { "type": "CHOICE", "members": [ { "type": "STRING", "value": "origin" }, { "type": "STRING", "value": "metadata" } ] } }, "patternOp": { "type": "SYMBOL", - "name": "equalOp" + "name": "containOp" }, "patternVal": { "type": "SYMBOL", "name": "string" }, "booleanFilter": { "type": "SEQ", "members": [ { "type": "FIELD", "name": "field", "content": { "type": "SYMBOL", "name": "booleanField" } }, { "type": "FIELD", "name": "op", "content": { "type": "SYMBOL", "name": "booleanOp" } }, { "type": "FIELD", "name": "value", "content": { "type": "SYMBOL", "name": "booleanVal" } } ] }, "booleanField": { "type": "TOKEN", "content": { "type": "CHOICE", "members": [ { "type": "STRING", "value": "visited" } ] } }, "booleanOp": { "type": "SYMBOL", "name": "equalOp" }, "booleanVal": { "type": "CHOICE", "members": [ { "type": "SYMBOL", "name": "booleanTrue" }, { "type": "SYMBOL", "name": "booleanFalse" } ] }, "numericFilter": { "type": "SEQ", "members": [ { "type": "FIELD", "name": "field", "content": { "type": "SYMBOL", "name": "numericField" } }, { "type": "FIELD", "name": "op", "content": { "type": "SYMBOL", "name": "numericOp" } }, { "type": "FIELD", "name": "value", "content": { "type": "SYMBOL", "name": "numberVal" } } ] }, "numericField": { "type": "TOKEN", "content": { "type": "CHOICE", "members": [ { "type": "STRING", "value": "visits" } ] } }, "numericOp": { "type": "SYMBOL", "name": "rangeOp" }, "numberVal": { "type": "SYMBOL", "name": "number" }, "boundedListFilter": { "type": "CHOICE", "members": [ { "type": "SYMBOL", "name": "visitTypeFilter" } ] }, "visitTypeFilter": { "type": "SEQ", "members": [ { "type": "FIELD", "name": "field", "content": { "type": "SYMBOL", "name": "visitTypeField" } }, { "type": "FIELD", "name": "op", "content": { "type": "SYMBOL", "name": "visitTypeOp" } }, { "type": "FIELD", "name": "value", "content": { "type": "SYMBOL", "name": "visitTypeVal" } } ] }, "visitTypeField": { "type": "TOKEN", "content": { "type": "STRING", "value": "visit_type" } }, "visitTypeOp": { "type": "SYMBOL", "name": "equalOp" }, "visitTypeVal": { "type": "SEQ", "members": [ { "type": "STRING", "value": "[" }, { "type": "CHOICE", "members": [ { "type": "SEQ", "members": [ { "type": "FIELD", "name": "array_member", "content": { "type": "CHOICE", "members": [ { "type": "CHOICE", "members": [ { "type": "SEQ", "members": [ { "type": "STRING", "value": "'" }, { "type": "SYMBOL", "name": "visitTypeOptions" }, { "type": "STRING", "value": "'" } ] }, { "type": "SEQ", "members": [ { "type": "STRING", "value": "\"" }, { "type": "SYMBOL", "name": "visitTypeOptions" }, { "type": "STRING", "value": "\"" } ] } ] }, { "type": "SYMBOL", "name": "visitTypeOptions" } ] } }, { "type": "REPEAT", "content": { "type": "SEQ", "members": [ { "type": "STRING", "value": "," }, { "type": "CHOICE", "members": [ { "type": "FIELD", "name": "array_member", "content": { "type": "CHOICE", "members": [ { "type": "CHOICE", "members": [ { "type": "SEQ", "members": [ { "type": "STRING", "value": "'" }, { "type": "SYMBOL", "name": "visitTypeOptions" }, { "type": "STRING", "value": "'" } ] }, { "type": "SEQ", "members": [ { "type": "STRING", "value": "\"" }, { "type": "SYMBOL", "name": "visitTypeOptions" }, { "type": "STRING", "value": "\"" } ] } ] }, { "type": "SYMBOL", "name": "visitTypeOptions" } ] } }, { "type": "BLANK" } ] } ] } } ] }, { "type": "BLANK" } ] }, { "type": "STRING", "value": "]" } ] }, "visitTypeOptions": { "type": "CHOICE", "members": [ { "type": "STRING", "value": "any" }, { "type": "STRING", "value": "bzr" }, { "type": "STRING", "value": "cran" }, { "type": "STRING", "value": "cvs" }, { "type": "STRING", "value": "deb" }, { "type": "STRING", "value": "deposit" }, { "type": "STRING", "value": "ftp" }, { "type": "STRING", "value": "hg" }, { "type": "STRING", "value": "git" }, { "type": "STRING", "value": "nixguix" }, { "type": "STRING", "value": "npm" }, { "type": "STRING", "value": "opam" }, { "type": "STRING", "value": "pypi" }, { "type": "STRING", "value": "svn" }, { "type": "STRING", "value": "tar" } ] }, "unboundedListFilter": { "type": "SEQ", "members": [ { "type": "FIELD", "name": "field", "content": { "type": "SYMBOL", "name": "listField" } }, { "type": "FIELD", "name": "op", "content": { "type": "SYMBOL", "name": "listOp" } }, { "type": "FIELD", "name": "value", "content": { "type": "SYMBOL", "name": "listVal" } } ] }, "listField": { "type": "TOKEN", "content": { "type": "CHOICE", "members": [ { "type": "STRING", "value": "language" }, { "type": "STRING", "value": "license" }, { "type": "STRING", "value": "keyword" } ] } }, "listOp": { "type": "SYMBOL", "name": "choiceOp" }, "listVal": { "type": "SEQ", "members": [ { "type": "STRING", "value": "[" }, { "type": "CHOICE", "members": [ { "type": "SEQ", "members": [ { "type": "FIELD", "name": "array_member", "content": { "type": "SYMBOL", "name": "string" } }, { "type": "REPEAT", "content": { "type": "SEQ", "members": [ { "type": "STRING", "value": "," }, { "type": "CHOICE", "members": [ { "type": "FIELD", "name": "array_member", "content": { "type": "SYMBOL", "name": "string" } }, { "type": "BLANK" } ] } ] } } ] }, { "type": "BLANK" } ] }, { "type": "STRING", "value": "]" } ] }, "dateFilter": { "type": "SEQ", "members": [ { "type": "FIELD", "name": "field", "content": { "type": "SYMBOL", "name": "dateField" } }, { "type": "FIELD", "name": "op", "content": { "type": "SYMBOL", "name": "dateOp" } }, { "type": "FIELD", "name": "value", "content": { "type": "SYMBOL", "name": "dateVal" } } ] }, "dateField": { "type": "TOKEN", "content": { "type": "CHOICE", "members": [ { "type": "STRING", "value": "last_visit" }, { "type": "STRING", "value": "last_eventful_visit" }, { "type": "STRING", "value": "last_revision" }, { "type": "STRING", "value": "last_release" }, { "type": "STRING", "value": "created" }, { "type": "STRING", "value": "modified" }, { "type": "STRING", "value": "published" } ] } }, "dateOp": { "type": "SYMBOL", "name": "rangeOp" }, "dateVal": { "type": "SYMBOL", "name": "isoDateTime" }, "rangeOp": { "type": "TOKEN", "content": { "type": "CHOICE", "members": [ { "type": "STRING", "value": "<" }, { "type": "STRING", "value": "<=" }, { "type": "STRING", "value": "=" }, { "type": "STRING", "value": "!=" }, { "type": "STRING", "value": ">=" }, { "type": "STRING", "value": ">" } ] } }, "equalOp": { "type": "TOKEN", "content": { "type": "CHOICE", "members": [ { "type": "STRING", "value": "=" } ] } }, + "containOp": { + "type": "TOKEN", + "content": { + "type": "CHOICE", + "members": [ + { + "type": "STRING", + "value": ":" + } + ] + } + }, "choiceOp": { "type": "TOKEN", "content": { "type": "CHOICE", "members": [ { "type": "STRING", "value": "in" }, { "type": "STRING", "value": "not in" } ] } }, "isoDateTime": { "type": "PATTERN", - "value": "\\d{4}[-]\\d{2}[-]\\d{2}(\\s|T)*(\\d{2}:\\d{2}(:\\d{2}(\\.\\d{6})?)?)?(\\+\\d{2}:\\d{2}|Z)?" + "value": "\\d{4}[-]\\d{2}[-]\\d{2}((\\s|T)*\\d{2}:\\d{2}(:\\d{2}(\\.\\d{6})?)?(\\+\\d{2}:\\d{2}|Z)?)?" }, "string": { "type": "CHOICE", "members": [ { "type": "CHOICE", "members": [ { "type": "SEQ", "members": [ { "type": "STRING", "value": "'" }, { "type": "SYMBOL", "name": "stringContent" }, { "type": "STRING", "value": "'" } ] }, { "type": "SEQ", "members": [ { "type": "STRING", "value": "\"" }, { "type": "SYMBOL", "name": "stringContent" }, { "type": "STRING", "value": "\"" } ] } ] }, { "type": "SYMBOL", "name": "singleWord" } ] }, "number": { "type": "PATTERN", "value": "\\d+" }, "booleanTrue": { "type": "STRING", "value": "true" }, "booleanFalse": { "type": "STRING", "value": "false" }, "or": { "type": "STRING", "value": "or" }, "and": { "type": "STRING", "value": "and" }, "singleWord": { "type": "PATTERN", "value": "[^\\s\"'\\[\\]\\(\\),]+" }, "stringContent": { "type": "REPEAT1", "content": { "type": "CHOICE", "members": [ { "type": "IMMEDIATE_TOKEN", "content": { "type": "PATTERN", "value": "[^\\\\'\"\\n]+" } }, { "type": "SYMBOL", "name": "escape_sequence" } ] } }, "escape_sequence": { "type": "IMMEDIATE_TOKEN", "content": { "type": "SEQ", "members": [ { "type": "STRING", "value": "\\" }, { "type": "PATTERN", "value": "(\\\"|\\'|\\\\|\\/|b|n|r|t|u)" } ] } } }, "extras": [ { "type": "PATTERN", "value": "\\s" } ], "conflicts": [], "precedences": [], "externals": [], "inline": [], "supertypes": [] } diff --git a/swh/search/query_language/src/node-types.json b/swh/search/query_language/src/node-types.json index 71d51a5..aab470f 100644 --- a/swh/search/query_language/src/node-types.json +++ b/swh/search/query_language/src/node-types.json @@ -1,899 +1,903 @@ [ { "type": "booleanFilter", "named": true, "fields": { "field": { "multiple": false, "required": true, "types": [ { "type": "booleanField", "named": true } ] }, "op": { "multiple": false, "required": true, "types": [ { "type": "booleanOp", "named": true } ] }, "value": { "multiple": false, "required": true, "types": [ { "type": "booleanVal", "named": true } ] } } }, { "type": "booleanOp", "named": true, "fields": {}, "children": { "multiple": false, "required": true, "types": [ { "type": "equalOp", "named": true } ] } }, { "type": "booleanVal", "named": true, "fields": {}, "children": { "multiple": false, "required": true, "types": [ { "type": "booleanFalse", "named": true }, { "type": "booleanTrue", "named": true } ] } }, { "type": "boundedListFilter", "named": true, "fields": {}, "children": { "multiple": false, "required": true, "types": [ { "type": "visitTypeFilter", "named": true } ] } }, { "type": "dateFilter", "named": true, "fields": { "field": { "multiple": false, "required": true, "types": [ { "type": "dateField", "named": true } ] }, "op": { "multiple": false, "required": true, "types": [ { "type": "dateOp", "named": true } ] }, "value": { "multiple": false, "required": true, "types": [ { "type": "dateVal", "named": true } ] } } }, { "type": "dateOp", "named": true, "fields": {}, "children": { "multiple": false, "required": true, "types": [ { "type": "rangeOp", "named": true } ] } }, { "type": "dateVal", "named": true, "fields": {}, "children": { "multiple": false, "required": true, "types": [ { "type": "isoDateTime", "named": true } ] } }, { "type": "filter", "named": true, "fields": { "category": { "multiple": false, "required": true, "types": [ { "type": "booleanFilter", "named": true }, { "type": "boundedListFilter", "named": true }, { "type": "dateFilter", "named": true }, { "type": "numericFilter", "named": true }, { "type": "patternFilter", "named": true }, { "type": "unboundedListFilter", "named": true } ] } } }, { "type": "filters", "named": true, "fields": { "left": { "multiple": false, "required": false, "types": [ { "type": "filters", "named": true } ] }, "operator": { "multiple": false, "required": false, "types": [ { "type": "and", "named": true }, { "type": "or", "named": true } ] }, "right": { "multiple": false, "required": false, "types": [ { "type": "filters", "named": true } ] } }, "children": { "multiple": false, "required": false, "types": [ { "type": "filter", "named": true }, { "type": "filters", "named": true } ] } }, { "type": "limit", "named": true, "fields": { "field": { "multiple": false, "required": true, "types": [ { "type": "limitField", "named": true } ] }, "op": { "multiple": false, "required": true, "types": [ { "type": "equalOp", "named": true } ] }, "value": { "multiple": false, "required": true, "types": [ { "type": "number", "named": true } ] } } }, { "type": "listOp", "named": true, "fields": {}, "children": { "multiple": false, "required": true, "types": [ { "type": "choiceOp", "named": true } ] } }, { "type": "listVal", "named": true, "fields": { "array_member": { "multiple": true, "required": false, "types": [ { "type": "string", "named": true } ] } } }, { "type": "numberVal", "named": true, "fields": {}, "children": { "multiple": false, "required": true, "types": [ { "type": "number", "named": true } ] } }, { "type": "numericFilter", "named": true, "fields": { "field": { "multiple": false, "required": true, "types": [ { "type": "numericField", "named": true } ] }, "op": { "multiple": false, "required": true, "types": [ { "type": "numericOp", "named": true } ] }, "value": { "multiple": false, "required": true, "types": [ { "type": "numberVal", "named": true } ] } } }, { "type": "numericOp", "named": true, "fields": {}, "children": { "multiple": false, "required": true, "types": [ { "type": "rangeOp", "named": true } ] } }, { "type": "patternFilter", "named": true, "fields": { "field": { "multiple": false, "required": true, "types": [ { "type": "patternField", "named": true } ] }, "op": { "multiple": false, "required": true, "types": [ { "type": "patternOp", "named": true } ] }, "value": { "multiple": false, "required": true, "types": [ { "type": "patternVal", "named": true } ] } } }, { "type": "patternOp", "named": true, "fields": {}, "children": { "multiple": false, "required": true, "types": [ { - "type": "equalOp", + "type": "containOp", "named": true } ] } }, { "type": "patternVal", "named": true, "fields": {}, "children": { "multiple": false, "required": true, "types": [ { "type": "string", "named": true } ] } }, { "type": "query", "named": true, "fields": {}, "children": { "multiple": true, "required": true, "types": [ { "type": "and", "named": true }, { "type": "filters", "named": true }, { "type": "limit", "named": true }, { "type": "sortBy", "named": true } ] } }, { "type": "sortBy", "named": true, "fields": { "field": { "multiple": false, "required": true, "types": [ { "type": "sortByField", "named": true } ] }, "op": { "multiple": false, "required": true, "types": [ { "type": "sortByOp", "named": true } ] }, "value": { "multiple": false, "required": true, "types": [ { "type": "sortByVal", "named": true } ] } } }, { "type": "sortByOp", "named": true, "fields": {}, "children": { "multiple": false, "required": true, "types": [ { "type": "equalOp", "named": true } ] } }, { "type": "sortByOptions", "named": true, "fields": {} }, { "type": "sortByVal", "named": true, "fields": { "array_member": { "multiple": true, "required": false, "types": [ { "type": "\"", "named": false }, { "type": "'", "named": false }, { "type": "sortByOptions", "named": true } ] } } }, { "type": "string", "named": true, "fields": {}, "children": { "multiple": false, "required": true, "types": [ { "type": "singleWord", "named": true }, { "type": "stringContent", "named": true } ] } }, { "type": "stringContent", "named": true, "fields": {}, "children": { "multiple": true, "required": false, "types": [ { "type": "escape_sequence", "named": true } ] } }, { "type": "unboundedListFilter", "named": true, "fields": { "field": { "multiple": false, "required": true, "types": [ { "type": "listField", "named": true } ] }, "op": { "multiple": false, "required": true, "types": [ { "type": "listOp", "named": true } ] }, "value": { "multiple": false, "required": true, "types": [ { "type": "listVal", "named": true } ] } } }, { "type": "visitTypeFilter", "named": true, "fields": { "field": { "multiple": false, "required": true, "types": [ { "type": "visitTypeField", "named": true } ] }, "op": { "multiple": false, "required": true, "types": [ { "type": "visitTypeOp", "named": true } ] }, "value": { "multiple": false, "required": true, "types": [ { "type": "visitTypeVal", "named": true } ] } } }, { "type": "visitTypeOp", "named": true, "fields": {}, "children": { "multiple": false, "required": true, "types": [ { "type": "equalOp", "named": true } ] } }, { "type": "visitTypeOptions", "named": true, "fields": {} }, { "type": "visitTypeVal", "named": true, "fields": { "array_member": { "multiple": true, "required": false, "types": [ { "type": "\"", "named": false }, { "type": "'", "named": false }, { "type": "visitTypeOptions", "named": true } ] } } }, { "type": "\"", "named": false }, { "type": "'", "named": false }, { "type": "(", "named": false }, { "type": ")", "named": false }, { "type": ",", "named": false }, { "type": "-", "named": false }, { "type": "[", "named": false }, { "type": "]", "named": false }, { "type": "and", "named": true }, { "type": "any", "named": false }, { "type": "booleanFalse", "named": true }, { "type": "booleanField", "named": true }, { "type": "booleanTrue", "named": true }, { "type": "bzr", "named": false }, { "type": "choiceOp", "named": true }, + { + "type": "containOp", + "named": true + }, { "type": "cran", "named": false }, { "type": "created", "named": false }, { "type": "cvs", "named": false }, { "type": "dateField", "named": true }, { "type": "deb", "named": false }, { "type": "deposit", "named": false }, { "type": "equalOp", "named": true }, { "type": "escape_sequence", "named": true }, { "type": "ftp", "named": false }, { "type": "git", "named": false }, { "type": "hg", "named": false }, { "type": "isoDateTime", "named": true }, { "type": "last_eventful_visit", "named": false }, { "type": "last_release", "named": false }, { "type": "last_revision", "named": false }, { "type": "last_visit", "named": false }, { "type": "limitField", "named": true }, { "type": "listField", "named": true }, { "type": "modified", "named": false }, { "type": "nixguix", "named": false }, { "type": "npm", "named": false }, { "type": "number", "named": true }, { "type": "numericField", "named": true }, { "type": "opam", "named": false }, { "type": "or", "named": true }, { "type": "patternField", "named": true }, { "type": "published", "named": false }, { "type": "pypi", "named": false }, { "type": "rangeOp", "named": true }, { "type": "singleWord", "named": true }, { "type": "sortByField", "named": true }, { "type": "svn", "named": false }, { "type": "tar", "named": false }, { "type": "visitTypeField", "named": true }, { "type": "visits", "named": false } ] \ No newline at end of file diff --git a/swh/search/query_language/src/parser.c b/swh/search/query_language/src/parser.c index 32d2988..b2095d2 100644 --- a/swh/search/query_language/src/parser.c +++ b/swh/search/query_language/src/parser.c @@ -1,3385 +1,3404 @@ #include #if defined(__GNUC__) || defined(__clang__) #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wmissing-field-initializers" #endif #ifdef _MSC_VER #pragma optimize("", off) #elif defined(__clang__) #pragma clang optimize off #elif defined(__GNUC__) #pragma GCC optimize ("O0") #endif #define LANGUAGE_VERSION 13 #define STATE_COUNT 126 #define LARGE_STATE_COUNT 2 -#define SYMBOL_COUNT 86 +#define SYMBOL_COUNT 87 #define ALIAS_COUNT 0 -#define TOKEN_COUNT 52 +#define TOKEN_COUNT 53 #define EXTERNAL_TOKEN_COUNT 0 #define FIELD_COUNT 8 #define MAX_ALIAS_SEQUENCE_LENGTH 6 #define PRODUCTION_ID_COUNT 9 enum { anon_sym_LPAREN = 1, anon_sym_RPAREN = 2, sym_sortByField = 3, anon_sym_LBRACK = 4, anon_sym_SQUOTE = 5, anon_sym_DQUOTE = 6, anon_sym_COMMA = 7, anon_sym_RBRACK = 8, anon_sym_DASH = 9, anon_sym_visits = 10, anon_sym_last_visit = 11, anon_sym_last_eventful_visit = 12, anon_sym_last_revision = 13, anon_sym_last_release = 14, anon_sym_created = 15, anon_sym_modified = 16, anon_sym_published = 17, sym_limitField = 18, sym_patternField = 19, sym_booleanField = 20, sym_numericField = 21, sym_visitTypeField = 22, anon_sym_any = 23, anon_sym_bzr = 24, anon_sym_cran = 25, anon_sym_cvs = 26, anon_sym_deb = 27, anon_sym_deposit = 28, anon_sym_ftp = 29, anon_sym_hg = 30, anon_sym_git = 31, anon_sym_nixguix = 32, anon_sym_npm = 33, anon_sym_opam = 34, anon_sym_pypi = 35, anon_sym_svn = 36, anon_sym_tar = 37, sym_listField = 38, sym_dateField = 39, sym_rangeOp = 40, sym_equalOp = 41, - sym_choiceOp = 42, - sym_isoDateTime = 43, - sym_number = 44, - sym_booleanTrue = 45, - sym_booleanFalse = 46, - sym_or = 47, - sym_and = 48, - sym_singleWord = 49, - aux_sym_stringContent_token1 = 50, - sym_escape_sequence = 51, - sym_query = 52, - sym_filters = 53, - sym_sortBy = 54, - sym_sortByOp = 55, - sym_sortByVal = 56, - sym_sortByOptions = 57, - sym_limit = 58, - sym_filter = 59, - sym_patternFilter = 60, - sym_patternOp = 61, - sym_patternVal = 62, - sym_booleanFilter = 63, - sym_booleanOp = 64, - sym_booleanVal = 65, - sym_numericFilter = 66, - sym_numericOp = 67, - sym_numberVal = 68, - sym_boundedListFilter = 69, - sym_visitTypeFilter = 70, - sym_visitTypeOp = 71, - sym_visitTypeVal = 72, - sym_visitTypeOptions = 73, - sym_unboundedListFilter = 74, - sym_listOp = 75, - sym_listVal = 76, - sym_dateFilter = 77, - sym_dateOp = 78, - sym_dateVal = 79, - sym_string = 80, - sym_stringContent = 81, - aux_sym_sortByVal_repeat1 = 82, - aux_sym_visitTypeVal_repeat1 = 83, - aux_sym_listVal_repeat1 = 84, - aux_sym_stringContent_repeat1 = 85, + sym_containOp = 42, + sym_choiceOp = 43, + sym_isoDateTime = 44, + sym_number = 45, + sym_booleanTrue = 46, + sym_booleanFalse = 47, + sym_or = 48, + sym_and = 49, + sym_singleWord = 50, + aux_sym_stringContent_token1 = 51, + sym_escape_sequence = 52, + sym_query = 53, + sym_filters = 54, + sym_sortBy = 55, + sym_sortByOp = 56, + sym_sortByVal = 57, + sym_sortByOptions = 58, + sym_limit = 59, + sym_filter = 60, + sym_patternFilter = 61, + sym_patternOp = 62, + sym_patternVal = 63, + sym_booleanFilter = 64, + sym_booleanOp = 65, + sym_booleanVal = 66, + sym_numericFilter = 67, + sym_numericOp = 68, + sym_numberVal = 69, + sym_boundedListFilter = 70, + sym_visitTypeFilter = 71, + sym_visitTypeOp = 72, + sym_visitTypeVal = 73, + sym_visitTypeOptions = 74, + sym_unboundedListFilter = 75, + sym_listOp = 76, + sym_listVal = 77, + sym_dateFilter = 78, + sym_dateOp = 79, + sym_dateVal = 80, + sym_string = 81, + sym_stringContent = 82, + aux_sym_sortByVal_repeat1 = 83, + aux_sym_visitTypeVal_repeat1 = 84, + aux_sym_listVal_repeat1 = 85, + aux_sym_stringContent_repeat1 = 86, }; static const char * const ts_symbol_names[] = { [ts_builtin_sym_end] = "end", [anon_sym_LPAREN] = "(", [anon_sym_RPAREN] = ")", [sym_sortByField] = "sortByField", [anon_sym_LBRACK] = "[", [anon_sym_SQUOTE] = "'", [anon_sym_DQUOTE] = "\"", [anon_sym_COMMA] = ",", [anon_sym_RBRACK] = "]", [anon_sym_DASH] = "-", [anon_sym_visits] = "visits", [anon_sym_last_visit] = "last_visit", [anon_sym_last_eventful_visit] = "last_eventful_visit", [anon_sym_last_revision] = "last_revision", [anon_sym_last_release] = "last_release", [anon_sym_created] = "created", [anon_sym_modified] = "modified", [anon_sym_published] = "published", [sym_limitField] = "limitField", [sym_patternField] = "patternField", [sym_booleanField] = "booleanField", [sym_numericField] = "numericField", [sym_visitTypeField] = "visitTypeField", [anon_sym_any] = "any", [anon_sym_bzr] = "bzr", [anon_sym_cran] = "cran", [anon_sym_cvs] = "cvs", [anon_sym_deb] = "deb", [anon_sym_deposit] = "deposit", [anon_sym_ftp] = "ftp", [anon_sym_hg] = "hg", [anon_sym_git] = "git", [anon_sym_nixguix] = "nixguix", [anon_sym_npm] = "npm", [anon_sym_opam] = "opam", [anon_sym_pypi] = "pypi", [anon_sym_svn] = "svn", [anon_sym_tar] = "tar", [sym_listField] = "listField", [sym_dateField] = "dateField", [sym_rangeOp] = "rangeOp", [sym_equalOp] = "equalOp", + [sym_containOp] = "containOp", [sym_choiceOp] = "choiceOp", [sym_isoDateTime] = "isoDateTime", [sym_number] = "number", [sym_booleanTrue] = "booleanTrue", [sym_booleanFalse] = "booleanFalse", [sym_or] = "or", [sym_and] = "and", [sym_singleWord] = "singleWord", [aux_sym_stringContent_token1] = "stringContent_token1", [sym_escape_sequence] = "escape_sequence", [sym_query] = "query", [sym_filters] = "filters", [sym_sortBy] = "sortBy", [sym_sortByOp] = "sortByOp", [sym_sortByVal] = "sortByVal", [sym_sortByOptions] = "sortByOptions", [sym_limit] = "limit", [sym_filter] = "filter", [sym_patternFilter] = "patternFilter", [sym_patternOp] = "patternOp", [sym_patternVal] = "patternVal", [sym_booleanFilter] = "booleanFilter", [sym_booleanOp] = "booleanOp", [sym_booleanVal] = "booleanVal", [sym_numericFilter] = "numericFilter", [sym_numericOp] = "numericOp", [sym_numberVal] = "numberVal", [sym_boundedListFilter] = "boundedListFilter", [sym_visitTypeFilter] = "visitTypeFilter", [sym_visitTypeOp] = "visitTypeOp", [sym_visitTypeVal] = "visitTypeVal", [sym_visitTypeOptions] = "visitTypeOptions", [sym_unboundedListFilter] = "unboundedListFilter", [sym_listOp] = "listOp", [sym_listVal] = "listVal", [sym_dateFilter] = "dateFilter", [sym_dateOp] = "dateOp", [sym_dateVal] = "dateVal", [sym_string] = "string", [sym_stringContent] = "stringContent", [aux_sym_sortByVal_repeat1] = "sortByVal_repeat1", [aux_sym_visitTypeVal_repeat1] = "visitTypeVal_repeat1", [aux_sym_listVal_repeat1] = "listVal_repeat1", [aux_sym_stringContent_repeat1] = "stringContent_repeat1", }; static const TSSymbol ts_symbol_map[] = { [ts_builtin_sym_end] = ts_builtin_sym_end, [anon_sym_LPAREN] = anon_sym_LPAREN, [anon_sym_RPAREN] = anon_sym_RPAREN, [sym_sortByField] = sym_sortByField, [anon_sym_LBRACK] = anon_sym_LBRACK, [anon_sym_SQUOTE] = anon_sym_SQUOTE, [anon_sym_DQUOTE] = anon_sym_DQUOTE, [anon_sym_COMMA] = anon_sym_COMMA, [anon_sym_RBRACK] = anon_sym_RBRACK, [anon_sym_DASH] = anon_sym_DASH, [anon_sym_visits] = anon_sym_visits, [anon_sym_last_visit] = anon_sym_last_visit, [anon_sym_last_eventful_visit] = anon_sym_last_eventful_visit, [anon_sym_last_revision] = anon_sym_last_revision, [anon_sym_last_release] = anon_sym_last_release, [anon_sym_created] = anon_sym_created, [anon_sym_modified] = anon_sym_modified, [anon_sym_published] = anon_sym_published, [sym_limitField] = sym_limitField, [sym_patternField] = sym_patternField, [sym_booleanField] = sym_booleanField, [sym_numericField] = sym_numericField, [sym_visitTypeField] = sym_visitTypeField, [anon_sym_any] = anon_sym_any, [anon_sym_bzr] = anon_sym_bzr, [anon_sym_cran] = anon_sym_cran, [anon_sym_cvs] = anon_sym_cvs, [anon_sym_deb] = anon_sym_deb, [anon_sym_deposit] = anon_sym_deposit, [anon_sym_ftp] = anon_sym_ftp, [anon_sym_hg] = anon_sym_hg, [anon_sym_git] = anon_sym_git, [anon_sym_nixguix] = anon_sym_nixguix, [anon_sym_npm] = anon_sym_npm, [anon_sym_opam] = anon_sym_opam, [anon_sym_pypi] = anon_sym_pypi, [anon_sym_svn] = anon_sym_svn, [anon_sym_tar] = anon_sym_tar, [sym_listField] = sym_listField, [sym_dateField] = sym_dateField, [sym_rangeOp] = sym_rangeOp, [sym_equalOp] = sym_equalOp, + [sym_containOp] = sym_containOp, [sym_choiceOp] = sym_choiceOp, [sym_isoDateTime] = sym_isoDateTime, [sym_number] = sym_number, [sym_booleanTrue] = sym_booleanTrue, [sym_booleanFalse] = sym_booleanFalse, [sym_or] = sym_or, [sym_and] = sym_and, [sym_singleWord] = sym_singleWord, [aux_sym_stringContent_token1] = aux_sym_stringContent_token1, [sym_escape_sequence] = sym_escape_sequence, [sym_query] = sym_query, [sym_filters] = sym_filters, [sym_sortBy] = sym_sortBy, [sym_sortByOp] = sym_sortByOp, [sym_sortByVal] = sym_sortByVal, [sym_sortByOptions] = sym_sortByOptions, [sym_limit] = sym_limit, [sym_filter] = sym_filter, [sym_patternFilter] = sym_patternFilter, [sym_patternOp] = sym_patternOp, [sym_patternVal] = sym_patternVal, [sym_booleanFilter] = sym_booleanFilter, [sym_booleanOp] = sym_booleanOp, [sym_booleanVal] = sym_booleanVal, [sym_numericFilter] = sym_numericFilter, [sym_numericOp] = sym_numericOp, [sym_numberVal] = sym_numberVal, [sym_boundedListFilter] = sym_boundedListFilter, [sym_visitTypeFilter] = sym_visitTypeFilter, [sym_visitTypeOp] = sym_visitTypeOp, [sym_visitTypeVal] = sym_visitTypeVal, [sym_visitTypeOptions] = sym_visitTypeOptions, [sym_unboundedListFilter] = sym_unboundedListFilter, [sym_listOp] = sym_listOp, [sym_listVal] = sym_listVal, [sym_dateFilter] = sym_dateFilter, [sym_dateOp] = sym_dateOp, [sym_dateVal] = sym_dateVal, [sym_string] = sym_string, [sym_stringContent] = sym_stringContent, [aux_sym_sortByVal_repeat1] = aux_sym_sortByVal_repeat1, [aux_sym_visitTypeVal_repeat1] = aux_sym_visitTypeVal_repeat1, [aux_sym_listVal_repeat1] = aux_sym_listVal_repeat1, [aux_sym_stringContent_repeat1] = aux_sym_stringContent_repeat1, }; static const TSSymbolMetadata ts_symbol_metadata[] = { [ts_builtin_sym_end] = { .visible = false, .named = true, }, [anon_sym_LPAREN] = { .visible = true, .named = false, }, [anon_sym_RPAREN] = { .visible = true, .named = false, }, [sym_sortByField] = { .visible = true, .named = true, }, [anon_sym_LBRACK] = { .visible = true, .named = false, }, [anon_sym_SQUOTE] = { .visible = true, .named = false, }, [anon_sym_DQUOTE] = { .visible = true, .named = false, }, [anon_sym_COMMA] = { .visible = true, .named = false, }, [anon_sym_RBRACK] = { .visible = true, .named = false, }, [anon_sym_DASH] = { .visible = true, .named = false, }, [anon_sym_visits] = { .visible = true, .named = false, }, [anon_sym_last_visit] = { .visible = true, .named = false, }, [anon_sym_last_eventful_visit] = { .visible = true, .named = false, }, [anon_sym_last_revision] = { .visible = true, .named = false, }, [anon_sym_last_release] = { .visible = true, .named = false, }, [anon_sym_created] = { .visible = true, .named = false, }, [anon_sym_modified] = { .visible = true, .named = false, }, [anon_sym_published] = { .visible = true, .named = false, }, [sym_limitField] = { .visible = true, .named = true, }, [sym_patternField] = { .visible = true, .named = true, }, [sym_booleanField] = { .visible = true, .named = true, }, [sym_numericField] = { .visible = true, .named = true, }, [sym_visitTypeField] = { .visible = true, .named = true, }, [anon_sym_any] = { .visible = true, .named = false, }, [anon_sym_bzr] = { .visible = true, .named = false, }, [anon_sym_cran] = { .visible = true, .named = false, }, [anon_sym_cvs] = { .visible = true, .named = false, }, [anon_sym_deb] = { .visible = true, .named = false, }, [anon_sym_deposit] = { .visible = true, .named = false, }, [anon_sym_ftp] = { .visible = true, .named = false, }, [anon_sym_hg] = { .visible = true, .named = false, }, [anon_sym_git] = { .visible = true, .named = false, }, [anon_sym_nixguix] = { .visible = true, .named = false, }, [anon_sym_npm] = { .visible = true, .named = false, }, [anon_sym_opam] = { .visible = true, .named = false, }, [anon_sym_pypi] = { .visible = true, .named = false, }, [anon_sym_svn] = { .visible = true, .named = false, }, [anon_sym_tar] = { .visible = true, .named = false, }, [sym_listField] = { .visible = true, .named = true, }, [sym_dateField] = { .visible = true, .named = true, }, [sym_rangeOp] = { .visible = true, .named = true, }, [sym_equalOp] = { .visible = true, .named = true, }, + [sym_containOp] = { + .visible = true, + .named = true, + }, [sym_choiceOp] = { .visible = true, .named = true, }, [sym_isoDateTime] = { .visible = true, .named = true, }, [sym_number] = { .visible = true, .named = true, }, [sym_booleanTrue] = { .visible = true, .named = true, }, [sym_booleanFalse] = { .visible = true, .named = true, }, [sym_or] = { .visible = true, .named = true, }, [sym_and] = { .visible = true, .named = true, }, [sym_singleWord] = { .visible = true, .named = true, }, [aux_sym_stringContent_token1] = { .visible = false, .named = false, }, [sym_escape_sequence] = { .visible = true, .named = true, }, [sym_query] = { .visible = true, .named = true, }, [sym_filters] = { .visible = true, .named = true, }, [sym_sortBy] = { .visible = true, .named = true, }, [sym_sortByOp] = { .visible = true, .named = true, }, [sym_sortByVal] = { .visible = true, .named = true, }, [sym_sortByOptions] = { .visible = true, .named = true, }, [sym_limit] = { .visible = true, .named = true, }, [sym_filter] = { .visible = true, .named = true, }, [sym_patternFilter] = { .visible = true, .named = true, }, [sym_patternOp] = { .visible = true, .named = true, }, [sym_patternVal] = { .visible = true, .named = true, }, [sym_booleanFilter] = { .visible = true, .named = true, }, [sym_booleanOp] = { .visible = true, .named = true, }, [sym_booleanVal] = { .visible = true, .named = true, }, [sym_numericFilter] = { .visible = true, .named = true, }, [sym_numericOp] = { .visible = true, .named = true, }, [sym_numberVal] = { .visible = true, .named = true, }, [sym_boundedListFilter] = { .visible = true, .named = true, }, [sym_visitTypeFilter] = { .visible = true, .named = true, }, [sym_visitTypeOp] = { .visible = true, .named = true, }, [sym_visitTypeVal] = { .visible = true, .named = true, }, [sym_visitTypeOptions] = { .visible = true, .named = true, }, [sym_unboundedListFilter] = { .visible = true, .named = true, }, [sym_listOp] = { .visible = true, .named = true, }, [sym_listVal] = { .visible = true, .named = true, }, [sym_dateFilter] = { .visible = true, .named = true, }, [sym_dateOp] = { .visible = true, .named = true, }, [sym_dateVal] = { .visible = true, .named = true, }, [sym_string] = { .visible = true, .named = true, }, [sym_stringContent] = { .visible = true, .named = true, }, [aux_sym_sortByVal_repeat1] = { .visible = false, .named = false, }, [aux_sym_visitTypeVal_repeat1] = { .visible = false, .named = false, }, [aux_sym_listVal_repeat1] = { .visible = false, .named = false, }, [aux_sym_stringContent_repeat1] = { .visible = false, .named = false, }, }; enum { field_array_member = 1, field_category = 2, field_field = 3, field_left = 4, field_op = 5, field_operator = 6, field_right = 7, field_value = 8, }; static const char * const ts_field_names[] = { [0] = NULL, [field_array_member] = "array_member", [field_category] = "category", [field_field] = "field", [field_left] = "left", [field_op] = "op", [field_operator] = "operator", [field_right] = "right", [field_value] = "value", }; static const TSFieldMapSlice ts_field_map_slices[PRODUCTION_ID_COUNT] = { [1] = {.index = 0, .length = 1}, [2] = {.index = 1, .length = 3}, [3] = {.index = 4, .length = 3}, [4] = {.index = 7, .length = 1}, [5] = {.index = 8, .length = 2}, [6] = {.index = 10, .length = 2}, [7] = {.index = 12, .length = 3}, [8] = {.index = 15, .length = 4}, }; static const TSFieldMapEntry ts_field_map_entries[] = { [0] = {field_category, 0}, [1] = {field_field, 0}, {field_op, 1}, {field_value, 2}, [4] = {field_left, 0}, {field_operator, 1}, {field_right, 2}, [7] = {field_array_member, 1}, [8] = {field_array_member, 1}, {field_array_member, 2, .inherited = true}, [10] = {field_array_member, 0, .inherited = true}, {field_array_member, 1, .inherited = true}, [12] = {field_array_member, 1}, {field_array_member, 2}, {field_array_member, 3}, [15] = {field_array_member, 1}, {field_array_member, 2}, {field_array_member, 3}, {field_array_member, 4, .inherited = true}, }; static const TSSymbol ts_alias_sequences[PRODUCTION_ID_COUNT][MAX_ALIAS_SEQUENCE_LENGTH] = { [0] = {0}, }; static const uint16_t ts_non_terminal_alias_map[] = { 0, }; static inline bool sym_singleWord_character_set_1(int32_t c) { return (c < '"' ? (c < '\r' ? (c < '\t' ? c == 0 : c <= '\n') : (c <= '\r' || c == ' ')) : (c <= '"' || (c < '[' ? (c < ',' ? (c >= '\'' && c <= ')') : c <= ',') : (c <= '[' || c == ']')))); } static bool ts_lex(TSLexer *lexer, TSStateId state) { START_LEXER(); eof = lexer->eof(lexer); switch (state) { case 0: - if (eof) ADVANCE(241); + if (eof) ADVANCE(242); if (lookahead == '!') ADVANCE(10); - if (lookahead == '"') ADVANCE(247); - if (lookahead == '\'') ADVANCE(246); - if (lookahead == '(') ADVANCE(242); - if (lookahead == ')') ADVANCE(243); - if (lookahead == ',') ADVANCE(248); - if (lookahead == '-') ADVANCE(250); - if (lookahead == '<') ADVANCE(282); - if (lookahead == '=') ADVANCE(281); - if (lookahead == '>') ADVANCE(282); - if (lookahead == '[') ADVANCE(245); - if (lookahead == '\\') ADVANCE(216); - if (lookahead == ']') ADVANCE(249); + if (lookahead == '"') ADVANCE(248); + if (lookahead == '\'') ADVANCE(247); + if (lookahead == '(') ADVANCE(243); + if (lookahead == ')') ADVANCE(244); + if (lookahead == ',') ADVANCE(249); + if (lookahead == '-') ADVANCE(251); + if (lookahead == ':') ADVANCE(285); + if (lookahead == '<') ADVANCE(283); + if (lookahead == '=') ADVANCE(282); + if (lookahead == '>') ADVANCE(283); + if (lookahead == '[') ADVANCE(246); + if (lookahead == '\\') ADVANCE(217); + if (lookahead == ']') ADVANCE(250); if (lookahead == 'a') ADVANCE(126); if (lookahead == 'b') ADVANCE(215); if (lookahead == 'c') ADVANCE(150); if (lookahead == 'd') ADVANCE(49); if (lookahead == 'f') ADVANCE(18); if (lookahead == 'g') ADVANCE(85); if (lookahead == 'h') ADVANCE(78); if (lookahead == 'i') ADVANCE(127); if (lookahead == 'k') ADVANCE(50); if (lookahead == 'l') ADVANCE(19); if (lookahead == 'm') ADVANCE(59); if (lookahead == 'n') ADVANCE(86); if (lookahead == 'o') ADVANCE(146); if (lookahead == 'p') ADVANCE(198); if (lookahead == 's') ADVANCE(141); if (lookahead == 't') ADVANCE(23); if (lookahead == 'v') ADVANCE(90); if (lookahead == '\t' || lookahead == '\n' || lookahead == '\r' || - lookahead == ' ') SKIP(239) - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(293); + lookahead == ' ') SKIP(240) + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(295); END_STATE(); case 1: if (lookahead == '\n') SKIP(4) - if (lookahead == '"') ADVANCE(247); - if (lookahead == '\'') ADVANCE(246); - if (lookahead == '\\') ADVANCE(216); + if (lookahead == '"') ADVANCE(248); + if (lookahead == '\'') ADVANCE(247); + if (lookahead == '\\') ADVANCE(217); if (lookahead == '\t' || lookahead == '\r' || - lookahead == ' ') ADVANCE(301); - if (lookahead != 0) ADVANCE(302); + lookahead == ' ') ADVANCE(303); + if (lookahead != 0) ADVANCE(304); END_STATE(); case 2: if (lookahead == ' ') ADVANCE(89); END_STATE(); case 3: - if (lookahead == '"') ADVANCE(247); - if (lookahead == '\'') ADVANCE(246); - if (lookahead == ',') ADVANCE(248); - if (lookahead == ']') ADVANCE(249); + if (lookahead == '"') ADVANCE(248); + if (lookahead == '\'') ADVANCE(247); + if (lookahead == ',') ADVANCE(249); + if (lookahead == ']') ADVANCE(250); if (lookahead == '\t' || lookahead == '\n' || lookahead == '\r' || lookahead == ' ') SKIP(3) if (lookahead != 0 && lookahead != '(' && lookahead != ')' && - lookahead != '[') ADVANCE(300); + lookahead != '[') ADVANCE(302); END_STATE(); case 4: - if (lookahead == '"') ADVANCE(247); - if (lookahead == '\'') ADVANCE(246); + if (lookahead == '"') ADVANCE(248); + if (lookahead == '\'') ADVANCE(247); if (lookahead == '\t' || lookahead == '\n' || lookahead == '\r' || lookahead == ' ') SKIP(4) END_STATE(); case 5: - if (lookahead == '(') ADVANCE(242); - if (lookahead == '=') ADVANCE(283); + if (lookahead == '(') ADVANCE(243); + if (lookahead == '=') ADVANCE(284); if (lookahead == 'c') ADVANCE(157); if (lookahead == 'k') ADVANCE(50); if (lookahead == 'l') ADVANCE(32); if (lookahead == 'm') ADVANCE(60); if (lookahead == 'o') ADVANCE(156); if (lookahead == 'p') ADVANCE(203); if (lookahead == 's') ADVANCE(140); if (lookahead == 'v') ADVANCE(113); if (lookahead == '\t' || lookahead == '\n' || lookahead == '\r' || lookahead == ' ') SKIP(5) - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(294); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(296); END_STATE(); case 6: - if (lookahead == '-') ADVANCE(224); + if (lookahead == '-') ADVANCE(225); END_STATE(); case 7: - if (lookahead == '-') ADVANCE(225); + if (lookahead == '-') ADVANCE(226); END_STATE(); case 8: - if (lookahead == ':') ADVANCE(226); + if (lookahead == ':') ADVANCE(227); END_STATE(); case 9: - if (lookahead == ':') ADVANCE(227); + if (lookahead == ':') ADVANCE(229); END_STATE(); case 10: - if (lookahead == '=') ADVANCE(281); + if (lookahead == '=') ADVANCE(282); END_STATE(); case 11: if (lookahead == '_') ADVANCE(53); END_STATE(); case 12: if (lookahead == '_') ADVANCE(35); END_STATE(); case 13: if (lookahead == '_') ADVANCE(206); END_STATE(); case 14: if (lookahead == '_') ADVANCE(189); if (lookahead == 'e') ADVANCE(42); - if (lookahead == 's') ADVANCE(251); + if (lookahead == 's') ADVANCE(252); END_STATE(); case 15: if (lookahead == '_') ADVANCE(189); if (lookahead == 'e') ADVANCE(42); - if (lookahead == 's') ADVANCE(262); + if (lookahead == 's') ADVANCE(263); END_STATE(); case 16: if (lookahead == '_') ADVANCE(73); END_STATE(); case 17: if (lookahead == '_') ADVANCE(207); END_STATE(); case 18: if (lookahead == 'a') ADVANCE(116); if (lookahead == 't') ADVANCE(147); END_STATE(); case 19: if (lookahead == 'a') ADVANCE(133); if (lookahead == 'i') ADVANCE(37); END_STATE(); case 20: - if (lookahead == 'a') ADVANCE(260); + if (lookahead == 'a') ADVANCE(261); END_STATE(); case 21: if (lookahead == 'a') ADVANCE(47); END_STATE(); case 22: if (lookahead == 'a') ADVANCE(124); END_STATE(); case 23: if (lookahead == 'a') ADVANCE(152); if (lookahead == 'r') ADVANCE(200); END_STATE(); case 24: if (lookahead == 'a') ADVANCE(129); if (lookahead == 'e') ADVANCE(27); END_STATE(); case 25: if (lookahead == 'a') ADVANCE(82); END_STATE(); case 26: if (lookahead == 'a') ADVANCE(162); if (lookahead == 'i') ADVANCE(125); END_STATE(); case 27: if (lookahead == 'a') ADVANCE(191); END_STATE(); case 28: if (lookahead == 'a') ADVANCE(194); END_STATE(); case 29: if (lookahead == 'a') ADVANCE(192); END_STATE(); case 30: if (lookahead == 'a') ADVANCE(166); END_STATE(); case 31: if (lookahead == 'a') ADVANCE(167); END_STATE(); case 32: if (lookahead == 'a') ADVANCE(134); if (lookahead == 'i') ADVANCE(37); END_STATE(); case 33: - if (lookahead == 'b') ADVANCE(268); + if (lookahead == 'b') ADVANCE(269); if (lookahead == 'p') ADVANCE(145); END_STATE(); case 34: if (lookahead == 'b') ADVANCE(119); END_STATE(); case 35: if (lookahead == 'b') ADVANCE(213); END_STATE(); case 36: if (lookahead == 'b') ADVANCE(120); END_STATE(); case 37: if (lookahead == 'c') ADVANCE(63); if (lookahead == 'm') ADVANCE(97); END_STATE(); case 38: - if (lookahead == 'd') ADVANCE(299); + if (lookahead == 'd') ADVANCE(301); END_STATE(); case 39: - if (lookahead == 'd') ADVANCE(299); - if (lookahead == 'y') ADVANCE(264); + if (lookahead == 'd') ADVANCE(301); + if (lookahead == 'y') ADVANCE(265); END_STATE(); case 40: - if (lookahead == 'd') ADVANCE(256); + if (lookahead == 'd') ADVANCE(257); END_STATE(); case 41: - if (lookahead == 'd') ADVANCE(279); + if (lookahead == 'd') ADVANCE(280); END_STATE(); case 42: - if (lookahead == 'd') ADVANCE(261); + if (lookahead == 'd') ADVANCE(262); END_STATE(); case 43: - if (lookahead == 'd') ADVANCE(257); + if (lookahead == 'd') ADVANCE(258); END_STATE(); case 44: - if (lookahead == 'd') ADVANCE(258); + if (lookahead == 'd') ADVANCE(259); END_STATE(); case 45: - if (lookahead == 'd') ADVANCE(280); + if (lookahead == 'd') ADVANCE(281); END_STATE(); case 46: if (lookahead == 'd') ADVANCE(87); END_STATE(); case 47: if (lookahead == 'd') ADVANCE(29); END_STATE(); case 48: if (lookahead == 'd') ADVANCE(109); END_STATE(); case 49: if (lookahead == 'e') ADVANCE(33); END_STATE(); case 50: if (lookahead == 'e') ADVANCE(212); END_STATE(); case 51: - if (lookahead == 'e') ADVANCE(295); + if (lookahead == 'e') ADVANCE(297); END_STATE(); case 52: - if (lookahead == 'e') ADVANCE(296); + if (lookahead == 'e') ADVANCE(298); END_STATE(); case 53: if (lookahead == 'e') ADVANCE(205); if (lookahead == 'r') ADVANCE(55); if (lookahead == 'v') ADVANCE(111); END_STATE(); case 54: - if (lookahead == 'e') ADVANCE(279); + if (lookahead == 'e') ADVANCE(280); END_STATE(); case 55: if (lookahead == 'e') ADVANCE(118); END_STATE(); case 56: - if (lookahead == 'e') ADVANCE(263); + if (lookahead == 'e') ADVANCE(264); END_STATE(); case 57: - if (lookahead == 'e') ADVANCE(255); + if (lookahead == 'e') ADVANCE(256); END_STATE(); case 58: - if (lookahead == 'e') ADVANCE(280); + if (lookahead == 'e') ADVANCE(281); END_STATE(); case 59: if (lookahead == 'e') ADVANCE(190); if (lookahead == 'o') ADVANCE(46); END_STATE(); case 60: if (lookahead == 'e') ADVANCE(190); if (lookahead == 'o') ADVANCE(48); END_STATE(); case 61: if (lookahead == 'e') ADVANCE(27); END_STATE(); case 62: if (lookahead == 'e') ADVANCE(40); END_STATE(); case 63: if (lookahead == 'e') ADVANCE(137); END_STATE(); case 64: if (lookahead == 'e') ADVANCE(43); END_STATE(); case 65: if (lookahead == 'e') ADVANCE(136); END_STATE(); case 66: if (lookahead == 'e') ADVANCE(44); END_STATE(); case 67: if (lookahead == 'e') ADVANCE(30); END_STATE(); case 68: if (lookahead == 'e') ADVANCE(45); END_STATE(); case 69: if (lookahead == 'e') ADVANCE(28); END_STATE(); case 70: if (lookahead == 'e') ADVANCE(31); END_STATE(); case 71: if (lookahead == 'e') ADVANCE(121); END_STATE(); case 72: if (lookahead == 'e') ADVANCE(138); END_STATE(); case 73: if (lookahead == 'e') ADVANCE(208); if (lookahead == 'r') ADVANCE(71); if (lookahead == 'v') ADVANCE(114); END_STATE(); case 74: if (lookahead == 'f') ADVANCE(199); END_STATE(); case 75: if (lookahead == 'f') ADVANCE(99); END_STATE(); case 76: if (lookahead == 'f') ADVANCE(103); END_STATE(); case 77: if (lookahead == 'f') ADVANCE(204); END_STATE(); case 78: - if (lookahead == 'g') ADVANCE(271); + if (lookahead == 'g') ADVANCE(272); END_STATE(); case 79: if (lookahead == 'g') ADVANCE(201); END_STATE(); case 80: if (lookahead == 'g') ADVANCE(202); END_STATE(); case 81: if (lookahead == 'g') ADVANCE(96); END_STATE(); case 82: if (lookahead == 'g') ADVANCE(54); END_STATE(); case 83: if (lookahead == 'h') ADVANCE(66); END_STATE(); case 84: if (lookahead == 'h') ADVANCE(68); END_STATE(); case 85: if (lookahead == 'i') ADVANCE(177); END_STATE(); case 86: if (lookahead == 'i') ADVANCE(211); if (lookahead == 'o') ADVANCE(178); if (lookahead == 'p') ADVANCE(123); END_STATE(); case 87: if (lookahead == 'i') ADVANCE(75); END_STATE(); case 88: - if (lookahead == 'i') ADVANCE(276); + if (lookahead == 'i') ADVANCE(277); END_STATE(); case 89: if (lookahead == 'i') ADVANCE(127); END_STATE(); case 90: if (lookahead == 'i') ADVANCE(170); END_STATE(); case 91: if (lookahead == 'i') ADVANCE(210); END_STATE(); case 92: if (lookahead == 'i') ADVANCE(81); END_STATE(); case 93: if (lookahead == 'i') ADVANCE(143); END_STATE(); case 94: if (lookahead == 'i') ADVANCE(160); END_STATE(); case 95: if (lookahead == 'i') ADVANCE(165); END_STATE(); case 96: if (lookahead == 'i') ADVANCE(130); END_STATE(); case 97: if (lookahead == 'i') ADVANCE(180); END_STATE(); case 98: if (lookahead == 'i') ADVANCE(181); END_STATE(); case 99: if (lookahead == 'i') ADVANCE(64); END_STATE(); case 100: if (lookahead == 'i') ADVANCE(182); END_STATE(); case 101: if (lookahead == 'i') ADVANCE(183); END_STATE(); case 102: if (lookahead == 'i') ADVANCE(184); END_STATE(); case 103: if (lookahead == 'i') ADVANCE(68); END_STATE(); case 104: if (lookahead == 'i') ADVANCE(188); END_STATE(); case 105: if (lookahead == 'i') ADVANCE(185); END_STATE(); case 106: if (lookahead == 'i') ADVANCE(193); END_STATE(); case 107: if (lookahead == 'i') ADVANCE(144); END_STATE(); case 108: if (lookahead == 'i') ADVANCE(168); END_STATE(); case 109: if (lookahead == 'i') ADVANCE(76); END_STATE(); case 110: if (lookahead == 'i') ADVANCE(169); END_STATE(); case 111: if (lookahead == 'i') ADVANCE(172); END_STATE(); case 112: if (lookahead == 'i') ADVANCE(173); END_STATE(); case 113: if (lookahead == 'i') ADVANCE(174); END_STATE(); case 114: if (lookahead == 'i') ADVANCE(175); END_STATE(); case 115: if (lookahead == 'i') ADVANCE(176); END_STATE(); case 116: if (lookahead == 'l') ADVANCE(163); END_STATE(); case 117: if (lookahead == 'l') ADVANCE(13); END_STATE(); case 118: if (lookahead == 'l') ADVANCE(67); if (lookahead == 'v') ADVANCE(95); END_STATE(); case 119: if (lookahead == 'l') ADVANCE(94); END_STATE(); case 120: if (lookahead == 'l') ADVANCE(108); END_STATE(); case 121: if (lookahead == 'l') ADVANCE(70); if (lookahead == 'v') ADVANCE(110); END_STATE(); case 122: if (lookahead == 'l') ADVANCE(17); END_STATE(); case 123: - if (lookahead == 'm') ADVANCE(274); + if (lookahead == 'm') ADVANCE(275); END_STATE(); case 124: - if (lookahead == 'm') ADVANCE(275); + if (lookahead == 'm') ADVANCE(276); END_STATE(); case 125: if (lookahead == 'm') ADVANCE(97); END_STATE(); case 126: if (lookahead == 'n') ADVANCE(39); END_STATE(); case 127: - if (lookahead == 'n') ADVANCE(284); + if (lookahead == 'n') ADVANCE(286); END_STATE(); case 128: - if (lookahead == 'n') ADVANCE(277); + if (lookahead == 'n') ADVANCE(278); END_STATE(); case 129: - if (lookahead == 'n') ADVANCE(266); + if (lookahead == 'n') ADVANCE(267); END_STATE(); case 130: - if (lookahead == 'n') ADVANCE(260); + if (lookahead == 'n') ADVANCE(261); END_STATE(); case 131: - if (lookahead == 'n') ADVANCE(254); + if (lookahead == 'n') ADVANCE(255); END_STATE(); case 132: - if (lookahead == 'n') ADVANCE(280); + if (lookahead == 'n') ADVANCE(281); END_STATE(); case 133: if (lookahead == 'n') ADVANCE(79); if (lookahead == 's') ADVANCE(179); END_STATE(); case 134: if (lookahead == 'n') ADVANCE(79); if (lookahead == 's') ADVANCE(195); END_STATE(); case 135: if (lookahead == 'n') ADVANCE(38); END_STATE(); case 136: if (lookahead == 'n') ADVANCE(187); END_STATE(); case 137: if (lookahead == 'n') ADVANCE(164); END_STATE(); case 138: if (lookahead == 'n') ADVANCE(196); END_STATE(); case 139: if (lookahead == 'o') ADVANCE(46); END_STATE(); case 140: if (lookahead == 'o') ADVANCE(155); END_STATE(); case 141: if (lookahead == 'o') ADVANCE(155); if (lookahead == 'v') ADVANCE(128); END_STATE(); case 142: if (lookahead == 'o') ADVANCE(154); END_STATE(); case 143: if (lookahead == 'o') ADVANCE(131); END_STATE(); case 144: if (lookahead == 'o') ADVANCE(132); END_STATE(); case 145: if (lookahead == 'o') ADVANCE(171); END_STATE(); case 146: if (lookahead == 'p') ADVANCE(22); - if (lookahead == 'r') ADVANCE(298); + if (lookahead == 'r') ADVANCE(300); END_STATE(); case 147: - if (lookahead == 'p') ADVANCE(270); + if (lookahead == 'p') ADVANCE(271); END_STATE(); case 148: if (lookahead == 'p') ADVANCE(88); END_STATE(); case 149: if (lookahead == 'p') ADVANCE(56); END_STATE(); case 150: if (lookahead == 'r') ADVANCE(24); if (lookahead == 'v') ADVANCE(159); END_STATE(); case 151: - if (lookahead == 'r') ADVANCE(265); + if (lookahead == 'r') ADVANCE(266); END_STATE(); case 152: - if (lookahead == 'r') ADVANCE(278); + if (lookahead == 'r') ADVANCE(279); END_STATE(); case 153: - if (lookahead == 'r') ADVANCE(297); + if (lookahead == 'r') ADVANCE(299); END_STATE(); case 154: if (lookahead == 'r') ADVANCE(41); END_STATE(); case 155: if (lookahead == 'r') ADVANCE(186); END_STATE(); case 156: if (lookahead == 'r') ADVANCE(92); END_STATE(); case 157: if (lookahead == 'r') ADVANCE(69); END_STATE(); case 158: if (lookahead == 'r') ADVANCE(61); END_STATE(); case 159: - if (lookahead == 's') ADVANCE(267); + if (lookahead == 's') ADVANCE(268); END_STATE(); case 160: if (lookahead == 's') ADVANCE(83); END_STATE(); case 161: - if (lookahead == 's') ADVANCE(251); + if (lookahead == 's') ADVANCE(252); END_STATE(); case 162: if (lookahead == 's') ADVANCE(179); END_STATE(); case 163: if (lookahead == 's') ADVANCE(52); END_STATE(); case 164: if (lookahead == 's') ADVANCE(54); END_STATE(); case 165: if (lookahead == 's') ADVANCE(93); END_STATE(); case 166: if (lookahead == 's') ADVANCE(57); END_STATE(); case 167: if (lookahead == 's') ADVANCE(58); END_STATE(); case 168: if (lookahead == 's') ADVANCE(84); END_STATE(); case 169: if (lookahead == 's') ADVANCE(107); END_STATE(); case 170: if (lookahead == 's') ADVANCE(98); END_STATE(); case 171: if (lookahead == 's') ADVANCE(100); END_STATE(); case 172: if (lookahead == 's') ADVANCE(101); END_STATE(); case 173: if (lookahead == 's') ADVANCE(102); END_STATE(); case 174: if (lookahead == 's') ADVANCE(104); END_STATE(); case 175: if (lookahead == 's') ADVANCE(105); END_STATE(); case 176: if (lookahead == 's') ADVANCE(106); END_STATE(); case 177: - if (lookahead == 't') ADVANCE(272); + if (lookahead == 't') ADVANCE(273); END_STATE(); case 178: if (lookahead == 't') ADVANCE(2); END_STATE(); case 179: if (lookahead == 't') ADVANCE(11); END_STATE(); case 180: - if (lookahead == 't') ADVANCE(259); + if (lookahead == 't') ADVANCE(260); END_STATE(); case 181: if (lookahead == 't') ADVANCE(14); END_STATE(); case 182: - if (lookahead == 't') ADVANCE(269); + if (lookahead == 't') ADVANCE(270); END_STATE(); case 183: - if (lookahead == 't') ADVANCE(252); + if (lookahead == 't') ADVANCE(253); END_STATE(); case 184: - if (lookahead == 't') ADVANCE(253); + if (lookahead == 't') ADVANCE(254); END_STATE(); case 185: - if (lookahead == 't') ADVANCE(280); + if (lookahead == 't') ADVANCE(281); END_STATE(); case 186: if (lookahead == 't') ADVANCE(12); END_STATE(); case 187: if (lookahead == 't') ADVANCE(74); END_STATE(); case 188: if (lookahead == 't') ADVANCE(15); END_STATE(); case 189: if (lookahead == 't') ADVANCE(214); END_STATE(); case 190: if (lookahead == 't') ADVANCE(21); END_STATE(); case 191: if (lookahead == 't') ADVANCE(62); END_STATE(); case 192: if (lookahead == 't') ADVANCE(20); END_STATE(); case 193: if (lookahead == 't') ADVANCE(161); END_STATE(); case 194: if (lookahead == 't') ADVANCE(68); END_STATE(); case 195: if (lookahead == 't') ADVANCE(16); END_STATE(); case 196: if (lookahead == 't') ADVANCE(77); END_STATE(); case 197: if (lookahead == 'u') ADVANCE(34); END_STATE(); case 198: if (lookahead == 'u') ADVANCE(34); if (lookahead == 'y') ADVANCE(148); END_STATE(); case 199: if (lookahead == 'u') ADVANCE(117); END_STATE(); case 200: if (lookahead == 'u') ADVANCE(51); END_STATE(); case 201: if (lookahead == 'u') ADVANCE(25); END_STATE(); case 202: if (lookahead == 'u') ADVANCE(91); END_STATE(); case 203: if (lookahead == 'u') ADVANCE(36); END_STATE(); case 204: if (lookahead == 'u') ADVANCE(122); END_STATE(); case 205: if (lookahead == 'v') ADVANCE(65); END_STATE(); case 206: if (lookahead == 'v') ADVANCE(112); END_STATE(); case 207: if (lookahead == 'v') ADVANCE(114); END_STATE(); case 208: if (lookahead == 'v') ADVANCE(72); END_STATE(); case 209: if (lookahead == 'w') ADVANCE(142); END_STATE(); case 210: - if (lookahead == 'x') ADVANCE(273); + if (lookahead == 'x') ADVANCE(274); END_STATE(); case 211: if (lookahead == 'x') ADVANCE(80); END_STATE(); case 212: if (lookahead == 'y') ADVANCE(209); END_STATE(); case 213: - if (lookahead == 'y') ADVANCE(244); + if (lookahead == 'y') ADVANCE(245); END_STATE(); case 214: if (lookahead == 'y') ADVANCE(149); END_STATE(); case 215: if (lookahead == 'z') ADVANCE(151); END_STATE(); case 216: + if (lookahead == '\t' || + lookahead == '\n' || + lookahead == '\r' || + lookahead == ' ' || + lookahead == 'T') ADVANCE(216); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(220); + END_STATE(); + case 217: if (lookahead == '"' || lookahead == '\'' || lookahead == '/' || lookahead == '\\' || lookahead == 'b' || lookahead == 'n' || lookahead == 'r' || lookahead == 't' || - lookahead == 'u') ADVANCE(303); - END_STATE(); - case 217: - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(7); + lookahead == 'u') ADVANCE(305); END_STATE(); case 218: - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(289); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(7); END_STATE(); case 219: - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(285); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(291); END_STATE(); case 220: if (('0' <= lookahead && lookahead <= '9')) ADVANCE(8); END_STATE(); case 221: - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(287); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(289); END_STATE(); case 222: - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(286); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(287); END_STATE(); case 223: if (('0' <= lookahead && lookahead <= '9')) ADVANCE(288); END_STATE(); case 224: - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(217); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(290); END_STATE(); case 225: if (('0' <= lookahead && lookahead <= '9')) ADVANCE(218); END_STATE(); case 226: - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(221); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(219); END_STATE(); case 227: - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(219); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(221); END_STATE(); case 228: - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(222); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(223); END_STATE(); case 229: - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(223); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(222); END_STATE(); case 230: - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(6); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(224); END_STATE(); case 231: - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(230); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(6); END_STATE(); case 232: - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(229); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(231); END_STATE(); case 233: - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(231); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(230); END_STATE(); case 234: - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(9); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(232); END_STATE(); case 235: - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(234); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(9); END_STATE(); case 236: - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(232); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(235); END_STATE(); case 237: - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(236); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(233); END_STATE(); case 238: if (('0' <= lookahead && lookahead <= '9')) ADVANCE(237); END_STATE(); case 239: - if (eof) ADVANCE(241); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(238); + END_STATE(); + case 240: + if (eof) ADVANCE(242); if (lookahead == '!') ADVANCE(10); - if (lookahead == '"') ADVANCE(247); - if (lookahead == '\'') ADVANCE(246); - if (lookahead == '(') ADVANCE(242); - if (lookahead == ')') ADVANCE(243); - if (lookahead == ',') ADVANCE(248); - if (lookahead == '-') ADVANCE(250); - if (lookahead == '<') ADVANCE(282); - if (lookahead == '=') ADVANCE(281); - if (lookahead == '>') ADVANCE(282); - if (lookahead == '[') ADVANCE(245); - if (lookahead == ']') ADVANCE(249); + if (lookahead == '"') ADVANCE(248); + if (lookahead == '\'') ADVANCE(247); + if (lookahead == '(') ADVANCE(243); + if (lookahead == ')') ADVANCE(244); + if (lookahead == ',') ADVANCE(249); + if (lookahead == '-') ADVANCE(251); + if (lookahead == ':') ADVANCE(285); + if (lookahead == '<') ADVANCE(283); + if (lookahead == '=') ADVANCE(282); + if (lookahead == '>') ADVANCE(283); + if (lookahead == '[') ADVANCE(246); + if (lookahead == ']') ADVANCE(250); if (lookahead == 'a') ADVANCE(126); if (lookahead == 'b') ADVANCE(215); if (lookahead == 'c') ADVANCE(150); if (lookahead == 'd') ADVANCE(49); if (lookahead == 'f') ADVANCE(18); if (lookahead == 'g') ADVANCE(85); if (lookahead == 'h') ADVANCE(78); if (lookahead == 'i') ADVANCE(127); if (lookahead == 'k') ADVANCE(50); if (lookahead == 'l') ADVANCE(19); if (lookahead == 'm') ADVANCE(59); if (lookahead == 'n') ADVANCE(86); if (lookahead == 'o') ADVANCE(146); if (lookahead == 'p') ADVANCE(198); if (lookahead == 's') ADVANCE(141); if (lookahead == 't') ADVANCE(23); if (lookahead == 'v') ADVANCE(90); if (lookahead == '\t' || lookahead == '\n' || lookahead == '\r' || - lookahead == ' ') SKIP(239) - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(293); + lookahead == ' ') SKIP(240) + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(295); END_STATE(); - case 240: - if (eof) ADVANCE(241); + case 241: + if (eof) ADVANCE(242); if (lookahead == '!') ADVANCE(10); - if (lookahead == '"') ADVANCE(247); - if (lookahead == '\'') ADVANCE(246); - if (lookahead == ')') ADVANCE(243); - if (lookahead == ',') ADVANCE(248); - if (lookahead == '-') ADVANCE(250); - if (lookahead == '<') ADVANCE(282); - if (lookahead == '=') ADVANCE(281); - if (lookahead == '>') ADVANCE(282); - if (lookahead == ']') ADVANCE(249); + if (lookahead == '"') ADVANCE(248); + if (lookahead == '\'') ADVANCE(247); + if (lookahead == ')') ADVANCE(244); + if (lookahead == ',') ADVANCE(249); + if (lookahead == '-') ADVANCE(251); + if (lookahead == '<') ADVANCE(283); + if (lookahead == '=') ADVANCE(282); + if (lookahead == '>') ADVANCE(283); + if (lookahead == ']') ADVANCE(250); if (lookahead == 'a') ADVANCE(135); if (lookahead == 'c') ADVANCE(158); if (lookahead == 'l') ADVANCE(26); if (lookahead == 'm') ADVANCE(139); if (lookahead == 'o') ADVANCE(153); if (lookahead == 'p') ADVANCE(197); if (lookahead == 's') ADVANCE(140); if (lookahead == 'v') ADVANCE(115); if (lookahead == '\t' || lookahead == '\n' || lookahead == '\r' || - lookahead == ' ') SKIP(240) - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(233); - END_STATE(); - case 241: - ACCEPT_TOKEN(ts_builtin_sym_end); + lookahead == ' ') SKIP(241) + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(234); END_STATE(); case 242: - ACCEPT_TOKEN(anon_sym_LPAREN); + ACCEPT_TOKEN(ts_builtin_sym_end); END_STATE(); case 243: - ACCEPT_TOKEN(anon_sym_RPAREN); + ACCEPT_TOKEN(anon_sym_LPAREN); END_STATE(); case 244: - ACCEPT_TOKEN(sym_sortByField); + ACCEPT_TOKEN(anon_sym_RPAREN); END_STATE(); case 245: - ACCEPT_TOKEN(anon_sym_LBRACK); + ACCEPT_TOKEN(sym_sortByField); END_STATE(); case 246: - ACCEPT_TOKEN(anon_sym_SQUOTE); + ACCEPT_TOKEN(anon_sym_LBRACK); END_STATE(); case 247: - ACCEPT_TOKEN(anon_sym_DQUOTE); + ACCEPT_TOKEN(anon_sym_SQUOTE); END_STATE(); case 248: - ACCEPT_TOKEN(anon_sym_COMMA); + ACCEPT_TOKEN(anon_sym_DQUOTE); END_STATE(); case 249: - ACCEPT_TOKEN(anon_sym_RBRACK); + ACCEPT_TOKEN(anon_sym_COMMA); END_STATE(); case 250: - ACCEPT_TOKEN(anon_sym_DASH); + ACCEPT_TOKEN(anon_sym_RBRACK); END_STATE(); case 251: - ACCEPT_TOKEN(anon_sym_visits); + ACCEPT_TOKEN(anon_sym_DASH); END_STATE(); case 252: - ACCEPT_TOKEN(anon_sym_last_visit); + ACCEPT_TOKEN(anon_sym_visits); END_STATE(); case 253: - ACCEPT_TOKEN(anon_sym_last_eventful_visit); + ACCEPT_TOKEN(anon_sym_last_visit); END_STATE(); case 254: - ACCEPT_TOKEN(anon_sym_last_revision); + ACCEPT_TOKEN(anon_sym_last_eventful_visit); END_STATE(); case 255: - ACCEPT_TOKEN(anon_sym_last_release); + ACCEPT_TOKEN(anon_sym_last_revision); END_STATE(); case 256: - ACCEPT_TOKEN(anon_sym_created); + ACCEPT_TOKEN(anon_sym_last_release); END_STATE(); case 257: - ACCEPT_TOKEN(anon_sym_modified); + ACCEPT_TOKEN(anon_sym_created); END_STATE(); case 258: - ACCEPT_TOKEN(anon_sym_published); + ACCEPT_TOKEN(anon_sym_modified); END_STATE(); case 259: - ACCEPT_TOKEN(sym_limitField); + ACCEPT_TOKEN(anon_sym_published); END_STATE(); case 260: - ACCEPT_TOKEN(sym_patternField); + ACCEPT_TOKEN(sym_limitField); END_STATE(); case 261: - ACCEPT_TOKEN(sym_booleanField); + ACCEPT_TOKEN(sym_patternField); END_STATE(); case 262: - ACCEPT_TOKEN(sym_numericField); + ACCEPT_TOKEN(sym_booleanField); END_STATE(); case 263: - ACCEPT_TOKEN(sym_visitTypeField); + ACCEPT_TOKEN(sym_numericField); END_STATE(); case 264: - ACCEPT_TOKEN(anon_sym_any); + ACCEPT_TOKEN(sym_visitTypeField); END_STATE(); case 265: - ACCEPT_TOKEN(anon_sym_bzr); + ACCEPT_TOKEN(anon_sym_any); END_STATE(); case 266: - ACCEPT_TOKEN(anon_sym_cran); + ACCEPT_TOKEN(anon_sym_bzr); END_STATE(); case 267: - ACCEPT_TOKEN(anon_sym_cvs); + ACCEPT_TOKEN(anon_sym_cran); END_STATE(); case 268: - ACCEPT_TOKEN(anon_sym_deb); + ACCEPT_TOKEN(anon_sym_cvs); END_STATE(); case 269: - ACCEPT_TOKEN(anon_sym_deposit); + ACCEPT_TOKEN(anon_sym_deb); END_STATE(); case 270: - ACCEPT_TOKEN(anon_sym_ftp); + ACCEPT_TOKEN(anon_sym_deposit); END_STATE(); case 271: - ACCEPT_TOKEN(anon_sym_hg); + ACCEPT_TOKEN(anon_sym_ftp); END_STATE(); case 272: - ACCEPT_TOKEN(anon_sym_git); + ACCEPT_TOKEN(anon_sym_hg); END_STATE(); case 273: - ACCEPT_TOKEN(anon_sym_nixguix); + ACCEPT_TOKEN(anon_sym_git); END_STATE(); case 274: - ACCEPT_TOKEN(anon_sym_npm); + ACCEPT_TOKEN(anon_sym_nixguix); END_STATE(); case 275: - ACCEPT_TOKEN(anon_sym_opam); + ACCEPT_TOKEN(anon_sym_npm); END_STATE(); case 276: - ACCEPT_TOKEN(anon_sym_pypi); + ACCEPT_TOKEN(anon_sym_opam); END_STATE(); case 277: - ACCEPT_TOKEN(anon_sym_svn); + ACCEPT_TOKEN(anon_sym_pypi); END_STATE(); case 278: - ACCEPT_TOKEN(anon_sym_tar); + ACCEPT_TOKEN(anon_sym_svn); END_STATE(); case 279: - ACCEPT_TOKEN(sym_listField); + ACCEPT_TOKEN(anon_sym_tar); END_STATE(); case 280: - ACCEPT_TOKEN(sym_dateField); + ACCEPT_TOKEN(sym_listField); END_STATE(); case 281: - ACCEPT_TOKEN(sym_rangeOp); + ACCEPT_TOKEN(sym_dateField); END_STATE(); case 282: ACCEPT_TOKEN(sym_rangeOp); - if (lookahead == '=') ADVANCE(281); END_STATE(); case 283: - ACCEPT_TOKEN(sym_equalOp); + ACCEPT_TOKEN(sym_rangeOp); + if (lookahead == '=') ADVANCE(282); END_STATE(); case 284: - ACCEPT_TOKEN(sym_choiceOp); + ACCEPT_TOKEN(sym_equalOp); END_STATE(); case 285: - ACCEPT_TOKEN(sym_isoDateTime); + ACCEPT_TOKEN(sym_containOp); END_STATE(); case 286: - ACCEPT_TOKEN(sym_isoDateTime); - if (lookahead == '+') ADVANCE(235); - if (lookahead == '.') ADVANCE(238); - if (lookahead == 'Z') ADVANCE(285); + ACCEPT_TOKEN(sym_choiceOp); END_STATE(); case 287: ACCEPT_TOKEN(sym_isoDateTime); - if (lookahead == '+') ADVANCE(235); - if (lookahead == ':') ADVANCE(228); - if (lookahead == 'Z') ADVANCE(285); END_STATE(); case 288: ACCEPT_TOKEN(sym_isoDateTime); - if (lookahead == '+') ADVANCE(235); - if (lookahead == 'Z') ADVANCE(285); + if (lookahead == '+') ADVANCE(236); + if (lookahead == '.') ADVANCE(239); + if (lookahead == 'Z') ADVANCE(287); END_STATE(); case 289: ACCEPT_TOKEN(sym_isoDateTime); - if (lookahead == '+') ADVANCE(235); + if (lookahead == '+') ADVANCE(236); + if (lookahead == ':') ADVANCE(228); + if (lookahead == 'Z') ADVANCE(287); + END_STATE(); + case 290: + ACCEPT_TOKEN(sym_isoDateTime); + if (lookahead == '+') ADVANCE(236); + if (lookahead == 'Z') ADVANCE(287); + END_STATE(); + case 291: + ACCEPT_TOKEN(sym_isoDateTime); if (lookahead == '\t' || lookahead == '\n' || lookahead == '\r' || lookahead == ' ' || - lookahead == 'T') ADVANCE(289); - if (lookahead == 'Z') ADVANCE(285); + lookahead == 'T') ADVANCE(216); if (('0' <= lookahead && lookahead <= '9')) ADVANCE(220); END_STATE(); - case 290: - ACCEPT_TOKEN(sym_number); - if (lookahead == '-') ADVANCE(224); - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(294); - END_STATE(); - case 291: - ACCEPT_TOKEN(sym_number); - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(290); - END_STATE(); case 292: ACCEPT_TOKEN(sym_number); - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(291); + if (lookahead == '-') ADVANCE(225); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(296); END_STATE(); case 293: ACCEPT_TOKEN(sym_number); if (('0' <= lookahead && lookahead <= '9')) ADVANCE(292); END_STATE(); case 294: ACCEPT_TOKEN(sym_number); - if (('0' <= lookahead && lookahead <= '9')) ADVANCE(294); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(293); END_STATE(); case 295: - ACCEPT_TOKEN(sym_booleanTrue); + ACCEPT_TOKEN(sym_number); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(294); END_STATE(); case 296: - ACCEPT_TOKEN(sym_booleanFalse); + ACCEPT_TOKEN(sym_number); + if (('0' <= lookahead && lookahead <= '9')) ADVANCE(296); END_STATE(); case 297: - ACCEPT_TOKEN(sym_or); + ACCEPT_TOKEN(sym_booleanTrue); END_STATE(); case 298: + ACCEPT_TOKEN(sym_booleanFalse); + END_STATE(); + case 299: + ACCEPT_TOKEN(sym_or); + END_STATE(); + case 300: ACCEPT_TOKEN(sym_or); if (lookahead == 'i') ADVANCE(81); END_STATE(); - case 299: + case 301: ACCEPT_TOKEN(sym_and); END_STATE(); - case 300: + case 302: ACCEPT_TOKEN(sym_singleWord); - if (!sym_singleWord_character_set_1(lookahead)) ADVANCE(300); + if (!sym_singleWord_character_set_1(lookahead)) ADVANCE(302); END_STATE(); - case 301: + case 303: ACCEPT_TOKEN(aux_sym_stringContent_token1); if (lookahead == '\t' || lookahead == '\r' || - lookahead == ' ') ADVANCE(301); + lookahead == ' ') ADVANCE(303); if (lookahead != 0 && lookahead != '\n' && lookahead != '"' && lookahead != '\'' && - lookahead != '\\') ADVANCE(302); + lookahead != '\\') ADVANCE(304); END_STATE(); - case 302: + case 304: ACCEPT_TOKEN(aux_sym_stringContent_token1); if (lookahead != 0 && lookahead != '\n' && lookahead != '"' && lookahead != '\'' && - lookahead != '\\') ADVANCE(302); + lookahead != '\\') ADVANCE(304); END_STATE(); - case 303: + case 305: ACCEPT_TOKEN(sym_escape_sequence); END_STATE(); default: return false; } } static const TSLexMode ts_lex_modes[STATE_COUNT] = { [0] = {.lex_state = 0}, [1] = {.lex_state = 5}, [2] = {.lex_state = 5}, [3] = {.lex_state = 0}, [4] = {.lex_state = 0}, [5] = {.lex_state = 5}, [6] = {.lex_state = 0}, [7] = {.lex_state = 5}, [8] = {.lex_state = 0}, [9] = {.lex_state = 5}, [10] = {.lex_state = 0}, [11] = {.lex_state = 0}, - [12] = {.lex_state = 240}, - [13] = {.lex_state = 240}, - [14] = {.lex_state = 240}, - [15] = {.lex_state = 240}, - [16] = {.lex_state = 240}, - [17] = {.lex_state = 240}, - [18] = {.lex_state = 240}, - [19] = {.lex_state = 240}, - [20] = {.lex_state = 240}, - [21] = {.lex_state = 240}, - [22] = {.lex_state = 240}, - [23] = {.lex_state = 240}, - [24] = {.lex_state = 240}, - [25] = {.lex_state = 240}, - [26] = {.lex_state = 240}, - [27] = {.lex_state = 240}, - [28] = {.lex_state = 240}, - [29] = {.lex_state = 240}, - [30] = {.lex_state = 240}, - [31] = {.lex_state = 240}, - [32] = {.lex_state = 240}, - [33] = {.lex_state = 240}, - [34] = {.lex_state = 240}, - [35] = {.lex_state = 240}, - [36] = {.lex_state = 240}, - [37] = {.lex_state = 240}, - [38] = {.lex_state = 240}, - [39] = {.lex_state = 240}, - [40] = {.lex_state = 240}, - [41] = {.lex_state = 240}, - [42] = {.lex_state = 240}, - [43] = {.lex_state = 240}, + [12] = {.lex_state = 241}, + [13] = {.lex_state = 241}, + [14] = {.lex_state = 241}, + [15] = {.lex_state = 241}, + [16] = {.lex_state = 241}, + [17] = {.lex_state = 241}, + [18] = {.lex_state = 241}, + [19] = {.lex_state = 241}, + [20] = {.lex_state = 241}, + [21] = {.lex_state = 241}, + [22] = {.lex_state = 241}, + [23] = {.lex_state = 241}, + [24] = {.lex_state = 241}, + [25] = {.lex_state = 241}, + [26] = {.lex_state = 241}, + [27] = {.lex_state = 241}, + [28] = {.lex_state = 241}, + [29] = {.lex_state = 241}, + [30] = {.lex_state = 241}, + [31] = {.lex_state = 241}, + [32] = {.lex_state = 241}, + [33] = {.lex_state = 241}, + [34] = {.lex_state = 241}, + [35] = {.lex_state = 241}, + [36] = {.lex_state = 241}, + [37] = {.lex_state = 241}, + [38] = {.lex_state = 241}, + [39] = {.lex_state = 241}, + [40] = {.lex_state = 241}, + [41] = {.lex_state = 241}, + [42] = {.lex_state = 241}, + [43] = {.lex_state = 241}, [44] = {.lex_state = 3}, - [45] = {.lex_state = 240}, - [46] = {.lex_state = 240}, + [45] = {.lex_state = 241}, + [46] = {.lex_state = 241}, [47] = {.lex_state = 3}, [48] = {.lex_state = 3}, [49] = {.lex_state = 1}, [50] = {.lex_state = 1}, [51] = {.lex_state = 0}, [52] = {.lex_state = 1}, [53] = {.lex_state = 0}, [54] = {.lex_state = 0}, [55] = {.lex_state = 0}, [56] = {.lex_state = 0}, [57] = {.lex_state = 0}, [58] = {.lex_state = 0}, [59] = {.lex_state = 1}, [60] = {.lex_state = 0}, [61] = {.lex_state = 0}, [62] = {.lex_state = 0}, [63] = {.lex_state = 0}, [64] = {.lex_state = 0}, [65] = {.lex_state = 0}, [66] = {.lex_state = 0}, [67] = {.lex_state = 0}, [68] = {.lex_state = 0}, [69] = {.lex_state = 0}, [70] = {.lex_state = 0}, [71] = {.lex_state = 0}, [72] = {.lex_state = 0}, [73] = {.lex_state = 0}, [74] = {.lex_state = 0}, [75] = {.lex_state = 0}, [76] = {.lex_state = 0}, [77] = {.lex_state = 0}, [78] = {.lex_state = 0}, [79] = {.lex_state = 0}, - [80] = {.lex_state = 240}, + [80] = {.lex_state = 241}, [81] = {.lex_state = 3}, [82] = {.lex_state = 0}, [83] = {.lex_state = 0}, [84] = {.lex_state = 0}, [85] = {.lex_state = 0}, [86] = {.lex_state = 0}, [87] = {.lex_state = 0}, [88] = {.lex_state = 0}, [89] = {.lex_state = 5}, [90] = {.lex_state = 0}, - [91] = {.lex_state = 5}, + [91] = {.lex_state = 0}, [92] = {.lex_state = 0}, [93] = {.lex_state = 0}, [94] = {.lex_state = 5}, [95] = {.lex_state = 0}, [96] = {.lex_state = 0}, [97] = {.lex_state = 0}, [98] = {.lex_state = 0}, [99] = {.lex_state = 0}, [100] = {.lex_state = 5}, - [101] = {.lex_state = 240}, + [101] = {.lex_state = 241}, [102] = {.lex_state = 5}, - [103] = {.lex_state = 240}, - [104] = {.lex_state = 240}, + [103] = {.lex_state = 241}, + [104] = {.lex_state = 241}, [105] = {.lex_state = 0}, [106] = {.lex_state = 0}, - [107] = {.lex_state = 240}, + [107] = {.lex_state = 241}, [108] = {.lex_state = 0}, [109] = {.lex_state = 0}, [110] = {.lex_state = 0}, [111] = {.lex_state = 0}, [112] = {.lex_state = 0}, [113] = {.lex_state = 0}, [114] = {.lex_state = 0}, [115] = {.lex_state = 5}, [116] = {.lex_state = 0}, [117] = {.lex_state = 5}, [118] = {.lex_state = 0}, [119] = {.lex_state = 0}, [120] = {.lex_state = 0}, [121] = {.lex_state = 0}, [122] = {.lex_state = 0}, [123] = {.lex_state = 0}, [124] = {.lex_state = 0}, [125] = {.lex_state = 5}, }; static const uint16_t ts_parse_table[LARGE_STATE_COUNT][SYMBOL_COUNT] = { [0] = { [ts_builtin_sym_end] = ACTIONS(1), [anon_sym_LPAREN] = ACTIONS(1), [anon_sym_RPAREN] = ACTIONS(1), [sym_sortByField] = ACTIONS(1), [anon_sym_LBRACK] = ACTIONS(1), [anon_sym_SQUOTE] = ACTIONS(1), [anon_sym_DQUOTE] = ACTIONS(1), [anon_sym_COMMA] = ACTIONS(1), [anon_sym_RBRACK] = ACTIONS(1), [anon_sym_DASH] = ACTIONS(1), [anon_sym_visits] = ACTIONS(1), [anon_sym_last_visit] = ACTIONS(1), [anon_sym_last_eventful_visit] = ACTIONS(1), [anon_sym_last_revision] = ACTIONS(1), [anon_sym_last_release] = ACTIONS(1), [anon_sym_created] = ACTIONS(1), [anon_sym_modified] = ACTIONS(1), [anon_sym_published] = ACTIONS(1), [sym_limitField] = ACTIONS(1), [sym_patternField] = ACTIONS(1), [sym_booleanField] = ACTIONS(1), [sym_numericField] = ACTIONS(1), [sym_visitTypeField] = ACTIONS(1), [anon_sym_any] = ACTIONS(1), [anon_sym_bzr] = ACTIONS(1), [anon_sym_cran] = ACTIONS(1), [anon_sym_cvs] = ACTIONS(1), [anon_sym_deb] = ACTIONS(1), [anon_sym_deposit] = ACTIONS(1), [anon_sym_ftp] = ACTIONS(1), [anon_sym_hg] = ACTIONS(1), [anon_sym_git] = ACTIONS(1), [anon_sym_nixguix] = ACTIONS(1), [anon_sym_npm] = ACTIONS(1), [anon_sym_opam] = ACTIONS(1), [anon_sym_pypi] = ACTIONS(1), [anon_sym_svn] = ACTIONS(1), [anon_sym_tar] = ACTIONS(1), [sym_listField] = ACTIONS(1), [sym_dateField] = ACTIONS(1), [sym_rangeOp] = ACTIONS(1), [sym_equalOp] = ACTIONS(1), + [sym_containOp] = ACTIONS(1), [sym_choiceOp] = ACTIONS(1), [sym_isoDateTime] = ACTIONS(1), [sym_number] = ACTIONS(1), [sym_booleanTrue] = ACTIONS(1), [sym_booleanFalse] = ACTIONS(1), [sym_or] = ACTIONS(1), [sym_and] = ACTIONS(1), [sym_escape_sequence] = ACTIONS(1), }, [1] = { [sym_query] = STATE(116), [sym_filters] = STATE(21), [sym_filter] = STATE(31), [sym_patternFilter] = STATE(30), [sym_booleanFilter] = STATE(30), [sym_numericFilter] = STATE(30), [sym_boundedListFilter] = STATE(30), [sym_visitTypeFilter] = STATE(29), [sym_unboundedListFilter] = STATE(30), [sym_dateFilter] = STATE(30), [anon_sym_LPAREN] = ACTIONS(3), [sym_patternField] = ACTIONS(5), [sym_booleanField] = ACTIONS(7), [sym_numericField] = ACTIONS(9), [sym_visitTypeField] = ACTIONS(11), [sym_listField] = ACTIONS(13), [sym_dateField] = ACTIONS(15), }, }; static const uint16_t ts_small_parse_table[] = { [0] = 15, ACTIONS(3), 1, anon_sym_LPAREN, ACTIONS(5), 1, sym_patternField, ACTIONS(7), 1, sym_booleanField, ACTIONS(9), 1, sym_numericField, ACTIONS(11), 1, sym_visitTypeField, ACTIONS(13), 1, sym_listField, ACTIONS(15), 1, sym_dateField, ACTIONS(17), 1, sym_sortByField, ACTIONS(19), 1, sym_limitField, STATE(25), 1, sym_filters, STATE(29), 1, sym_visitTypeFilter, STATE(31), 1, sym_filter, STATE(55), 1, sym_sortBy, STATE(56), 1, sym_limit, STATE(30), 6, sym_patternFilter, sym_booleanFilter, sym_numericFilter, sym_boundedListFilter, sym_unboundedListFilter, sym_dateFilter, [51] = 5, ACTIONS(21), 1, anon_sym_SQUOTE, ACTIONS(23), 1, anon_sym_DQUOTE, STATE(96), 1, sym_visitTypeOptions, ACTIONS(25), 2, anon_sym_COMMA, anon_sym_RBRACK, ACTIONS(27), 15, anon_sym_any, anon_sym_bzr, anon_sym_cran, anon_sym_cvs, anon_sym_deb, anon_sym_deposit, anon_sym_ftp, anon_sym_hg, anon_sym_git, anon_sym_nixguix, anon_sym_npm, anon_sym_opam, anon_sym_pypi, anon_sym_svn, anon_sym_tar, [82] = 5, ACTIONS(29), 1, anon_sym_SQUOTE, ACTIONS(31), 1, anon_sym_DQUOTE, ACTIONS(33), 1, anon_sym_RBRACK, STATE(83), 1, sym_visitTypeOptions, ACTIONS(27), 15, anon_sym_any, anon_sym_bzr, anon_sym_cran, anon_sym_cvs, anon_sym_deb, anon_sym_deposit, anon_sym_ftp, anon_sym_hg, anon_sym_git, anon_sym_nixguix, anon_sym_npm, anon_sym_opam, anon_sym_pypi, anon_sym_svn, anon_sym_tar, [112] = 11, ACTIONS(3), 1, anon_sym_LPAREN, ACTIONS(5), 1, sym_patternField, ACTIONS(7), 1, sym_booleanField, ACTIONS(9), 1, sym_numericField, ACTIONS(11), 1, sym_visitTypeField, ACTIONS(13), 1, sym_listField, ACTIONS(15), 1, sym_dateField, STATE(29), 1, sym_visitTypeFilter, STATE(31), 1, sym_filter, STATE(80), 1, sym_filters, STATE(30), 6, sym_patternFilter, sym_booleanFilter, sym_numericFilter, sym_boundedListFilter, sym_unboundedListFilter, sym_dateFilter, [151] = 2, STATE(106), 1, sym_visitTypeOptions, ACTIONS(27), 15, anon_sym_any, anon_sym_bzr, anon_sym_cran, anon_sym_cvs, anon_sym_deb, anon_sym_deposit, anon_sym_ftp, anon_sym_hg, anon_sym_git, anon_sym_nixguix, anon_sym_npm, anon_sym_opam, anon_sym_pypi, anon_sym_svn, anon_sym_tar, [172] = 11, ACTIONS(3), 1, anon_sym_LPAREN, ACTIONS(5), 1, sym_patternField, ACTIONS(7), 1, sym_booleanField, ACTIONS(9), 1, sym_numericField, ACTIONS(11), 1, sym_visitTypeField, ACTIONS(13), 1, sym_listField, ACTIONS(15), 1, sym_dateField, STATE(25), 1, sym_filters, STATE(29), 1, sym_visitTypeFilter, STATE(31), 1, sym_filter, STATE(30), 6, sym_patternFilter, sym_booleanFilter, sym_numericFilter, sym_boundedListFilter, sym_unboundedListFilter, sym_dateFilter, [211] = 2, STATE(114), 1, sym_visitTypeOptions, ACTIONS(27), 15, anon_sym_any, anon_sym_bzr, anon_sym_cran, anon_sym_cvs, anon_sym_deb, anon_sym_deposit, anon_sym_ftp, anon_sym_hg, anon_sym_git, anon_sym_nixguix, anon_sym_npm, anon_sym_opam, anon_sym_pypi, anon_sym_svn, anon_sym_tar, [232] = 11, ACTIONS(3), 1, anon_sym_LPAREN, ACTIONS(5), 1, sym_patternField, ACTIONS(7), 1, sym_booleanField, ACTIONS(9), 1, sym_numericField, ACTIONS(11), 1, sym_visitTypeField, ACTIONS(13), 1, sym_listField, ACTIONS(15), 1, sym_dateField, STATE(29), 1, sym_visitTypeFilter, STATE(31), 1, sym_filter, STATE(35), 1, sym_filters, STATE(30), 6, sym_patternFilter, sym_booleanFilter, sym_numericFilter, sym_boundedListFilter, sym_unboundedListFilter, sym_dateFilter, [271] = 2, STATE(111), 1, sym_visitTypeOptions, ACTIONS(27), 15, anon_sym_any, anon_sym_bzr, anon_sym_cran, anon_sym_cvs, anon_sym_deb, anon_sym_deposit, anon_sym_ftp, anon_sym_hg, anon_sym_git, anon_sym_nixguix, anon_sym_npm, anon_sym_opam, anon_sym_pypi, anon_sym_svn, anon_sym_tar, [292] = 2, STATE(110), 1, sym_visitTypeOptions, ACTIONS(27), 15, anon_sym_any, anon_sym_bzr, anon_sym_cran, anon_sym_cvs, anon_sym_deb, anon_sym_deposit, anon_sym_ftp, anon_sym_hg, anon_sym_git, anon_sym_nixguix, anon_sym_npm, anon_sym_opam, anon_sym_pypi, anon_sym_svn, anon_sym_tar, [313] = 6, ACTIONS(35), 1, anon_sym_SQUOTE, ACTIONS(37), 1, anon_sym_DQUOTE, ACTIONS(41), 1, anon_sym_DASH, STATE(88), 1, sym_sortByOptions, ACTIONS(39), 2, anon_sym_COMMA, anon_sym_RBRACK, ACTIONS(43), 8, anon_sym_visits, anon_sym_last_visit, anon_sym_last_eventful_visit, anon_sym_last_revision, anon_sym_last_release, anon_sym_created, anon_sym_modified, anon_sym_published, [340] = 6, ACTIONS(41), 1, anon_sym_DASH, ACTIONS(45), 1, anon_sym_SQUOTE, ACTIONS(47), 1, anon_sym_DQUOTE, ACTIONS(49), 1, anon_sym_RBRACK, STATE(86), 1, sym_sortByOptions, ACTIONS(43), 8, anon_sym_visits, anon_sym_last_visit, anon_sym_last_eventful_visit, anon_sym_last_revision, anon_sym_last_release, anon_sym_created, anon_sym_modified, anon_sym_published, [366] = 3, ACTIONS(41), 1, anon_sym_DASH, STATE(108), 1, sym_sortByOptions, ACTIONS(43), 8, anon_sym_visits, anon_sym_last_visit, anon_sym_last_eventful_visit, anon_sym_last_revision, anon_sym_last_release, anon_sym_created, anon_sym_modified, anon_sym_published, [383] = 3, ACTIONS(41), 1, anon_sym_DASH, STATE(123), 1, sym_sortByOptions, ACTIONS(43), 8, anon_sym_visits, anon_sym_last_visit, anon_sym_last_eventful_visit, anon_sym_last_revision, anon_sym_last_release, anon_sym_created, anon_sym_modified, anon_sym_published, [400] = 3, ACTIONS(41), 1, anon_sym_DASH, STATE(122), 1, sym_sortByOptions, ACTIONS(43), 8, anon_sym_visits, anon_sym_last_visit, anon_sym_last_eventful_visit, anon_sym_last_revision, anon_sym_last_release, anon_sym_created, anon_sym_modified, anon_sym_published, [417] = 3, ACTIONS(41), 1, anon_sym_DASH, STATE(113), 1, sym_sortByOptions, ACTIONS(43), 8, anon_sym_visits, anon_sym_last_visit, anon_sym_last_eventful_visit, anon_sym_last_revision, anon_sym_last_release, anon_sym_created, anon_sym_modified, anon_sym_published, [434] = 1, ACTIONS(51), 8, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, anon_sym_COMMA, anon_sym_RBRACK, sym_limitField, sym_or, sym_and, [445] = 1, ACTIONS(53), 8, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, anon_sym_COMMA, anon_sym_RBRACK, sym_limitField, sym_or, sym_and, [456] = 1, ACTIONS(55), 8, anon_sym_visits, anon_sym_last_visit, anon_sym_last_eventful_visit, anon_sym_last_revision, anon_sym_last_release, anon_sym_created, anon_sym_modified, anon_sym_published, [467] = 7, ACTIONS(17), 1, sym_sortByField, ACTIONS(19), 1, sym_limitField, ACTIONS(57), 1, ts_builtin_sym_end, ACTIONS(59), 1, sym_or, ACTIONS(61), 1, sym_and, STATE(51), 1, sym_sortBy, STATE(57), 1, sym_limit, [489] = 1, ACTIONS(63), 6, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, sym_and, [498] = 1, ACTIONS(65), 6, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, sym_and, [507] = 1, ACTIONS(67), 6, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, sym_and, [516] = 1, ACTIONS(69), 6, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, sym_and, [525] = 1, ACTIONS(71), 6, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, sym_and, [534] = 1, ACTIONS(73), 6, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, sym_and, [543] = 1, ACTIONS(75), 6, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, sym_and, [552] = 1, ACTIONS(77), 6, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, sym_and, [561] = 1, ACTIONS(79), 6, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, sym_and, [570] = 1, ACTIONS(81), 6, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, sym_and, [579] = 1, ACTIONS(83), 6, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, sym_and, [588] = 1, ACTIONS(85), 6, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, sym_and, [597] = 1, ACTIONS(87), 6, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, sym_and, [606] = 2, ACTIONS(89), 1, sym_and, ACTIONS(69), 5, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, [617] = 1, ACTIONS(91), 6, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, sym_and, [626] = 1, ACTIONS(93), 6, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, sym_and, [635] = 1, ACTIONS(95), 6, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, sym_and, [644] = 1, ACTIONS(97), 6, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, sym_and, [653] = 1, ACTIONS(99), 6, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, sym_and, [662] = 1, ACTIONS(101), 6, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, sym_and, [671] = 1, ACTIONS(103), 6, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, sym_and, [680] = 1, ACTIONS(105), 6, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, sym_and, [689] = 5, ACTIONS(107), 1, anon_sym_SQUOTE, ACTIONS(109), 1, anon_sym_DQUOTE, ACTIONS(113), 1, sym_singleWord, STATE(99), 1, sym_string, ACTIONS(111), 2, anon_sym_COMMA, anon_sym_RBRACK, [706] = 1, ACTIONS(115), 6, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, sym_and, [715] = 1, ACTIONS(117), 6, ts_builtin_sym_end, anon_sym_RPAREN, sym_sortByField, sym_limitField, sym_or, sym_and, [724] = 5, ACTIONS(107), 1, anon_sym_SQUOTE, ACTIONS(109), 1, anon_sym_DQUOTE, ACTIONS(113), 1, sym_singleWord, ACTIONS(119), 1, anon_sym_RBRACK, STATE(69), 1, sym_string, [740] = 5, ACTIONS(107), 1, anon_sym_SQUOTE, ACTIONS(109), 1, anon_sym_DQUOTE, ACTIONS(113), 1, sym_singleWord, STATE(38), 1, sym_patternVal, STATE(39), 1, sym_string, [756] = 3, STATE(50), 1, aux_sym_stringContent_repeat1, ACTIONS(121), 2, anon_sym_SQUOTE, anon_sym_DQUOTE, ACTIONS(123), 2, aux_sym_stringContent_token1, sym_escape_sequence, [768] = 3, STATE(50), 1, aux_sym_stringContent_repeat1, ACTIONS(125), 2, anon_sym_SQUOTE, anon_sym_DQUOTE, ACTIONS(127), 2, aux_sym_stringContent_token1, sym_escape_sequence, [780] = 4, ACTIONS(19), 1, sym_limitField, ACTIONS(130), 1, ts_builtin_sym_end, ACTIONS(132), 1, sym_and, STATE(119), 1, sym_limit, [793] = 3, STATE(49), 1, aux_sym_stringContent_repeat1, STATE(124), 1, sym_stringContent, ACTIONS(134), 2, aux_sym_stringContent_token1, sym_escape_sequence, [804] = 1, ACTIONS(136), 4, anon_sym_SQUOTE, anon_sym_DQUOTE, anon_sym_COMMA, anon_sym_RBRACK, [811] = 1, ACTIONS(138), 4, anon_sym_SQUOTE, anon_sym_DQUOTE, anon_sym_COMMA, anon_sym_RBRACK, [818] = 4, ACTIONS(19), 1, sym_limitField, ACTIONS(140), 1, ts_builtin_sym_end, ACTIONS(142), 1, sym_and, STATE(109), 1, sym_limit, [831] = 4, ACTIONS(17), 1, sym_sortByField, ACTIONS(140), 1, ts_builtin_sym_end, ACTIONS(144), 1, sym_and, STATE(109), 1, sym_sortBy, [844] = 4, ACTIONS(17), 1, sym_sortByField, ACTIONS(130), 1, ts_builtin_sym_end, ACTIONS(146), 1, sym_and, STATE(119), 1, sym_sortBy, [857] = 1, ACTIONS(148), 4, anon_sym_SQUOTE, anon_sym_DQUOTE, anon_sym_COMMA, anon_sym_RBRACK, [864] = 3, STATE(49), 1, aux_sym_stringContent_repeat1, STATE(120), 1, sym_stringContent, ACTIONS(134), 2, aux_sym_stringContent_token1, sym_escape_sequence, [875] = 2, STATE(41), 1, sym_booleanVal, ACTIONS(150), 2, sym_booleanTrue, sym_booleanFalse, [883] = 3, ACTIONS(19), 1, sym_limitField, ACTIONS(152), 1, ts_builtin_sym_end, STATE(112), 1, sym_limit, [893] = 1, ACTIONS(154), 3, ts_builtin_sym_end, sym_limitField, sym_and, [899] = 3, ACTIONS(17), 1, sym_sortByField, ACTIONS(140), 1, ts_builtin_sym_end, STATE(109), 1, sym_sortBy, [909] = 3, ACTIONS(156), 1, anon_sym_COMMA, ACTIONS(158), 1, anon_sym_RBRACK, STATE(66), 1, aux_sym_sortByVal_repeat1, [919] = 1, ACTIONS(160), 3, ts_builtin_sym_end, sym_limitField, sym_and, [925] = 3, ACTIONS(162), 1, anon_sym_COMMA, ACTIONS(165), 1, anon_sym_RBRACK, STATE(66), 1, aux_sym_sortByVal_repeat1, [935] = 3, ACTIONS(167), 1, anon_sym_COMMA, ACTIONS(169), 1, anon_sym_RBRACK, STATE(73), 1, aux_sym_visitTypeVal_repeat1, [945] = 1, ACTIONS(171), 3, ts_builtin_sym_end, sym_limitField, sym_and, [951] = 3, ACTIONS(173), 1, anon_sym_COMMA, ACTIONS(175), 1, anon_sym_RBRACK, STATE(85), 1, aux_sym_listVal_repeat1, [961] = 3, ACTIONS(156), 1, anon_sym_COMMA, ACTIONS(177), 1, anon_sym_RBRACK, STATE(64), 1, aux_sym_sortByVal_repeat1, [971] = 1, ACTIONS(179), 3, ts_builtin_sym_end, sym_limitField, sym_and, [977] = 1, ACTIONS(181), 3, ts_builtin_sym_end, sym_sortByField, sym_and, [983] = 3, ACTIONS(167), 1, anon_sym_COMMA, ACTIONS(183), 1, anon_sym_RBRACK, STATE(79), 1, aux_sym_visitTypeVal_repeat1, [993] = 3, ACTIONS(19), 1, sym_limitField, ACTIONS(140), 1, ts_builtin_sym_end, STATE(109), 1, sym_limit, [1003] = 3, ACTIONS(17), 1, sym_sortByField, ACTIONS(152), 1, ts_builtin_sym_end, STATE(112), 1, sym_sortBy, [1013] = 3, ACTIONS(156), 1, anon_sym_COMMA, ACTIONS(185), 1, anon_sym_RBRACK, STATE(66), 1, aux_sym_sortByVal_repeat1, [1023] = 1, ACTIONS(187), 3, ts_builtin_sym_end, sym_limitField, sym_and, [1029] = 3, ACTIONS(189), 1, anon_sym_COMMA, ACTIONS(192), 1, anon_sym_RBRACK, STATE(78), 1, aux_sym_listVal_repeat1, [1039] = 3, ACTIONS(194), 1, anon_sym_COMMA, ACTIONS(197), 1, anon_sym_RBRACK, STATE(79), 1, aux_sym_visitTypeVal_repeat1, [1049] = 3, ACTIONS(59), 1, sym_or, ACTIONS(89), 1, sym_and, ACTIONS(199), 1, anon_sym_RPAREN, [1059] = 1, ACTIONS(201), 3, anon_sym_SQUOTE, anon_sym_DQUOTE, sym_singleWord, [1065] = 3, ACTIONS(167), 1, anon_sym_COMMA, ACTIONS(203), 1, anon_sym_RBRACK, STATE(79), 1, aux_sym_visitTypeVal_repeat1, [1075] = 3, ACTIONS(167), 1, anon_sym_COMMA, ACTIONS(205), 1, anon_sym_RBRACK, STATE(82), 1, aux_sym_visitTypeVal_repeat1, [1085] = 1, ACTIONS(207), 3, ts_builtin_sym_end, sym_limitField, sym_and, [1091] = 3, ACTIONS(173), 1, anon_sym_COMMA, ACTIONS(209), 1, anon_sym_RBRACK, STATE(78), 1, aux_sym_listVal_repeat1, [1101] = 3, ACTIONS(156), 1, anon_sym_COMMA, ACTIONS(211), 1, anon_sym_RBRACK, STATE(76), 1, aux_sym_sortByVal_repeat1, [1111] = 2, ACTIONS(213), 1, sym_choiceOp, STATE(98), 1, sym_listOp, [1118] = 1, ACTIONS(215), 2, anon_sym_COMMA, anon_sym_RBRACK, [1123] = 2, ACTIONS(217), 1, sym_number, STATE(43), 1, sym_numberVal, [1130] = 1, ACTIONS(219), 2, anon_sym_COMMA, anon_sym_RBRACK, [1135] = 2, ACTIONS(221), 1, - sym_equalOp, + sym_containOp, STATE(48), 1, sym_patternOp, [1142] = 2, ACTIONS(223), 1, anon_sym_LBRACK, STATE(71), 1, sym_sortByVal, [1149] = 1, ACTIONS(225), 2, anon_sym_COMMA, anon_sym_RBRACK, [1154] = 2, ACTIONS(227), 1, sym_equalOp, STATE(60), 1, sym_booleanOp, [1161] = 2, ACTIONS(229), 1, anon_sym_LBRACK, STATE(45), 1, sym_visitTypeVal, [1168] = 1, ACTIONS(231), 2, anon_sym_COMMA, anon_sym_RBRACK, [1173] = 1, ACTIONS(233), 2, sym_booleanTrue, sym_booleanFalse, [1178] = 2, ACTIONS(235), 1, anon_sym_LBRACK, STATE(46), 1, sym_listVal, [1185] = 1, ACTIONS(237), 2, anon_sym_COMMA, anon_sym_RBRACK, [1190] = 2, ACTIONS(239), 1, sym_equalOp, STATE(92), 1, sym_sortByOp, [1197] = 2, ACTIONS(241), 1, sym_rangeOp, STATE(89), 1, sym_numericOp, [1204] = 2, ACTIONS(243), 1, sym_equalOp, STATE(95), 1, sym_visitTypeOp, [1211] = 2, ACTIONS(245), 1, sym_isoDateTime, STATE(26), 1, sym_dateVal, [1218] = 2, ACTIONS(247), 1, sym_rangeOp, STATE(103), 1, sym_dateOp, [1225] = 1, ACTIONS(249), 1, anon_sym_LBRACK, [1229] = 1, ACTIONS(251), 1, anon_sym_SQUOTE, [1233] = 1, ACTIONS(253), 1, sym_isoDateTime, [1237] = 1, ACTIONS(255), 1, anon_sym_DQUOTE, [1241] = 1, ACTIONS(152), 1, ts_builtin_sym_end, [1245] = 1, ACTIONS(257), 1, anon_sym_SQUOTE, [1249] = 1, ACTIONS(257), 1, anon_sym_DQUOTE, [1253] = 1, ACTIONS(259), 1, ts_builtin_sym_end, [1257] = 1, ACTIONS(255), 1, anon_sym_SQUOTE, [1261] = 1, ACTIONS(251), 1, anon_sym_DQUOTE, [1265] = 1, ACTIONS(261), 1, sym_number, [1269] = 1, ACTIONS(263), 1, ts_builtin_sym_end, [1273] = 1, ACTIONS(265), 1, sym_equalOp, [1277] = 1, ACTIONS(267), 1, anon_sym_LBRACK, [1281] = 1, ACTIONS(140), 1, ts_builtin_sym_end, [1285] = 1, ACTIONS(269), 1, anon_sym_DQUOTE, [1289] = 1, ACTIONS(271), 1, anon_sym_LBRACK, [1293] = 1, ACTIONS(273), 1, anon_sym_SQUOTE, [1297] = 1, ACTIONS(273), 1, anon_sym_DQUOTE, [1301] = 1, ACTIONS(269), 1, anon_sym_SQUOTE, [1305] = 1, ACTIONS(275), 1, sym_number, }; static const uint32_t ts_small_parse_table_map[] = { [SMALL_STATE(2)] = 0, [SMALL_STATE(3)] = 51, [SMALL_STATE(4)] = 82, [SMALL_STATE(5)] = 112, [SMALL_STATE(6)] = 151, [SMALL_STATE(7)] = 172, [SMALL_STATE(8)] = 211, [SMALL_STATE(9)] = 232, [SMALL_STATE(10)] = 271, [SMALL_STATE(11)] = 292, [SMALL_STATE(12)] = 313, [SMALL_STATE(13)] = 340, [SMALL_STATE(14)] = 366, [SMALL_STATE(15)] = 383, [SMALL_STATE(16)] = 400, [SMALL_STATE(17)] = 417, [SMALL_STATE(18)] = 434, [SMALL_STATE(19)] = 445, [SMALL_STATE(20)] = 456, [SMALL_STATE(21)] = 467, [SMALL_STATE(22)] = 489, [SMALL_STATE(23)] = 498, [SMALL_STATE(24)] = 507, [SMALL_STATE(25)] = 516, [SMALL_STATE(26)] = 525, [SMALL_STATE(27)] = 534, [SMALL_STATE(28)] = 543, [SMALL_STATE(29)] = 552, [SMALL_STATE(30)] = 561, [SMALL_STATE(31)] = 570, [SMALL_STATE(32)] = 579, [SMALL_STATE(33)] = 588, [SMALL_STATE(34)] = 597, [SMALL_STATE(35)] = 606, [SMALL_STATE(36)] = 617, [SMALL_STATE(37)] = 626, [SMALL_STATE(38)] = 635, [SMALL_STATE(39)] = 644, [SMALL_STATE(40)] = 653, [SMALL_STATE(41)] = 662, [SMALL_STATE(42)] = 671, [SMALL_STATE(43)] = 680, [SMALL_STATE(44)] = 689, [SMALL_STATE(45)] = 706, [SMALL_STATE(46)] = 715, [SMALL_STATE(47)] = 724, [SMALL_STATE(48)] = 740, [SMALL_STATE(49)] = 756, [SMALL_STATE(50)] = 768, [SMALL_STATE(51)] = 780, [SMALL_STATE(52)] = 793, [SMALL_STATE(53)] = 804, [SMALL_STATE(54)] = 811, [SMALL_STATE(55)] = 818, [SMALL_STATE(56)] = 831, [SMALL_STATE(57)] = 844, [SMALL_STATE(58)] = 857, [SMALL_STATE(59)] = 864, [SMALL_STATE(60)] = 875, [SMALL_STATE(61)] = 883, [SMALL_STATE(62)] = 893, [SMALL_STATE(63)] = 899, [SMALL_STATE(64)] = 909, [SMALL_STATE(65)] = 919, [SMALL_STATE(66)] = 925, [SMALL_STATE(67)] = 935, [SMALL_STATE(68)] = 945, [SMALL_STATE(69)] = 951, [SMALL_STATE(70)] = 961, [SMALL_STATE(71)] = 971, [SMALL_STATE(72)] = 977, [SMALL_STATE(73)] = 983, [SMALL_STATE(74)] = 993, [SMALL_STATE(75)] = 1003, [SMALL_STATE(76)] = 1013, [SMALL_STATE(77)] = 1023, [SMALL_STATE(78)] = 1029, [SMALL_STATE(79)] = 1039, [SMALL_STATE(80)] = 1049, [SMALL_STATE(81)] = 1059, [SMALL_STATE(82)] = 1065, [SMALL_STATE(83)] = 1075, [SMALL_STATE(84)] = 1085, [SMALL_STATE(85)] = 1091, [SMALL_STATE(86)] = 1101, [SMALL_STATE(87)] = 1111, [SMALL_STATE(88)] = 1118, [SMALL_STATE(89)] = 1123, [SMALL_STATE(90)] = 1130, [SMALL_STATE(91)] = 1135, [SMALL_STATE(92)] = 1142, [SMALL_STATE(93)] = 1149, [SMALL_STATE(94)] = 1154, [SMALL_STATE(95)] = 1161, [SMALL_STATE(96)] = 1168, [SMALL_STATE(97)] = 1173, [SMALL_STATE(98)] = 1178, [SMALL_STATE(99)] = 1185, [SMALL_STATE(100)] = 1190, [SMALL_STATE(101)] = 1197, [SMALL_STATE(102)] = 1204, [SMALL_STATE(103)] = 1211, [SMALL_STATE(104)] = 1218, [SMALL_STATE(105)] = 1225, [SMALL_STATE(106)] = 1229, [SMALL_STATE(107)] = 1233, [SMALL_STATE(108)] = 1237, [SMALL_STATE(109)] = 1241, [SMALL_STATE(110)] = 1245, [SMALL_STATE(111)] = 1249, [SMALL_STATE(112)] = 1253, [SMALL_STATE(113)] = 1257, [SMALL_STATE(114)] = 1261, [SMALL_STATE(115)] = 1265, [SMALL_STATE(116)] = 1269, [SMALL_STATE(117)] = 1273, [SMALL_STATE(118)] = 1277, [SMALL_STATE(119)] = 1281, [SMALL_STATE(120)] = 1285, [SMALL_STATE(121)] = 1289, [SMALL_STATE(122)] = 1293, [SMALL_STATE(123)] = 1297, [SMALL_STATE(124)] = 1301, [SMALL_STATE(125)] = 1305, }; static const TSParseActionEntry ts_parse_actions[] = { [0] = {.entry = {.count = 0, .reusable = false}}, [1] = {.entry = {.count = 1, .reusable = false}}, RECOVER(), [3] = {.entry = {.count = 1, .reusable = true}}, SHIFT(5), [5] = {.entry = {.count = 1, .reusable = true}}, SHIFT(91), [7] = {.entry = {.count = 1, .reusable = true}}, SHIFT(94), [9] = {.entry = {.count = 1, .reusable = true}}, SHIFT(101), [11] = {.entry = {.count = 1, .reusable = true}}, SHIFT(102), [13] = {.entry = {.count = 1, .reusable = true}}, SHIFT(87), [15] = {.entry = {.count = 1, .reusable = true}}, SHIFT(104), [17] = {.entry = {.count = 1, .reusable = true}}, SHIFT(100), [19] = {.entry = {.count = 1, .reusable = true}}, SHIFT(117), [21] = {.entry = {.count = 1, .reusable = true}}, SHIFT(11), [23] = {.entry = {.count = 1, .reusable = true}}, SHIFT(10), [25] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_visitTypeVal_repeat1, 1), [27] = {.entry = {.count = 1, .reusable = true}}, SHIFT(58), [29] = {.entry = {.count = 1, .reusable = true}}, SHIFT(6), [31] = {.entry = {.count = 1, .reusable = true}}, SHIFT(8), [33] = {.entry = {.count = 1, .reusable = true}}, SHIFT(28), [35] = {.entry = {.count = 1, .reusable = true}}, SHIFT(16), [37] = {.entry = {.count = 1, .reusable = true}}, SHIFT(15), [39] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_sortByVal_repeat1, 1), [41] = {.entry = {.count = 1, .reusable = true}}, SHIFT(20), [43] = {.entry = {.count = 1, .reusable = true}}, SHIFT(54), [45] = {.entry = {.count = 1, .reusable = true}}, SHIFT(17), [47] = {.entry = {.count = 1, .reusable = true}}, SHIFT(14), [49] = {.entry = {.count = 1, .reusable = true}}, SHIFT(84), [51] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_string, 1), [53] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_string, 3), [55] = {.entry = {.count = 1, .reusable = true}}, SHIFT(53), [57] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_query, 1), [59] = {.entry = {.count = 1, .reusable = true}}, SHIFT(9), [61] = {.entry = {.count = 1, .reusable = true}}, SHIFT(2), [63] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_listVal, 4, .production_id = 5), [65] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_visitTypeVal, 5, .production_id = 7), [67] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_visitTypeVal, 3, .production_id = 4), [69] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_filters, 3, .production_id = 3), [71] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_dateFilter, 3, .production_id = 2), [73] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_listVal, 2), [75] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_visitTypeVal, 2), [77] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_boundedListFilter, 1), [79] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_filter, 1, .production_id = 1), [81] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_filters, 1), [83] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_listVal, 3, .production_id = 4), [85] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_filters, 3), [87] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_visitTypeVal, 4, .production_id = 5), [89] = {.entry = {.count = 1, .reusable = true}}, SHIFT(7), [91] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_visitTypeVal, 6, .production_id = 8), [93] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_dateVal, 1), [95] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_patternFilter, 3, .production_id = 2), [97] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_patternVal, 1), [99] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_booleanVal, 1), [101] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_booleanFilter, 3, .production_id = 2), [103] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_numberVal, 1), [105] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_numericFilter, 3, .production_id = 2), [107] = {.entry = {.count = 1, .reusable = true}}, SHIFT(52), [109] = {.entry = {.count = 1, .reusable = true}}, SHIFT(59), [111] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_listVal_repeat1, 1), [113] = {.entry = {.count = 1, .reusable = true}}, SHIFT(18), [115] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_visitTypeFilter, 3, .production_id = 2), [117] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_unboundedListFilter, 3, .production_id = 2), [119] = {.entry = {.count = 1, .reusable = true}}, SHIFT(27), [121] = {.entry = {.count = 1, .reusable = false}}, REDUCE(sym_stringContent, 1), [123] = {.entry = {.count = 1, .reusable = true}}, SHIFT(50), [125] = {.entry = {.count = 1, .reusable = false}}, REDUCE(aux_sym_stringContent_repeat1, 2), [127] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_stringContent_repeat1, 2), SHIFT_REPEAT(50), [130] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_query, 2), [132] = {.entry = {.count = 1, .reusable = true}}, SHIFT(74), [134] = {.entry = {.count = 1, .reusable = true}}, SHIFT(49), [136] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_sortByOptions, 2), [138] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_sortByOptions, 1), [140] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_query, 3), [142] = {.entry = {.count = 1, .reusable = true}}, SHIFT(61), [144] = {.entry = {.count = 1, .reusable = true}}, SHIFT(75), [146] = {.entry = {.count = 1, .reusable = true}}, SHIFT(63), [148] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_visitTypeOptions, 1), [150] = {.entry = {.count = 1, .reusable = true}}, SHIFT(40), [152] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_query, 4), [154] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_sortByVal, 6, .production_id = 8), [156] = {.entry = {.count = 1, .reusable = true}}, SHIFT(12), [158] = {.entry = {.count = 1, .reusable = true}}, SHIFT(62), [160] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_sortByVal, 5, .production_id = 7), [162] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_sortByVal_repeat1, 2, .production_id = 6), SHIFT_REPEAT(12), [165] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_sortByVal_repeat1, 2, .production_id = 6), [167] = {.entry = {.count = 1, .reusable = true}}, SHIFT(3), [169] = {.entry = {.count = 1, .reusable = true}}, SHIFT(23), [171] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_sortByVal, 4, .production_id = 5), [173] = {.entry = {.count = 1, .reusable = true}}, SHIFT(44), [175] = {.entry = {.count = 1, .reusable = true}}, SHIFT(32), [177] = {.entry = {.count = 1, .reusable = true}}, SHIFT(65), [179] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_sortBy, 3, .production_id = 2), [181] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_limit, 3, .production_id = 2), [183] = {.entry = {.count = 1, .reusable = true}}, SHIFT(36), [185] = {.entry = {.count = 1, .reusable = true}}, SHIFT(68), [187] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_sortByVal, 3, .production_id = 4), [189] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_listVal_repeat1, 2, .production_id = 6), SHIFT_REPEAT(44), [192] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_listVal_repeat1, 2, .production_id = 6), [194] = {.entry = {.count = 2, .reusable = true}}, REDUCE(aux_sym_visitTypeVal_repeat1, 2, .production_id = 6), SHIFT_REPEAT(3), [197] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_visitTypeVal_repeat1, 2, .production_id = 6), [199] = {.entry = {.count = 1, .reusable = true}}, SHIFT(33), [201] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_patternOp, 1), [203] = {.entry = {.count = 1, .reusable = true}}, SHIFT(34), [205] = {.entry = {.count = 1, .reusable = true}}, SHIFT(24), [207] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_sortByVal, 2), [209] = {.entry = {.count = 1, .reusable = true}}, SHIFT(22), [211] = {.entry = {.count = 1, .reusable = true}}, SHIFT(77), [213] = {.entry = {.count = 1, .reusable = true}}, SHIFT(118), [215] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_sortByVal_repeat1, 2, .production_id = 4), [217] = {.entry = {.count = 1, .reusable = true}}, SHIFT(42), [219] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_sortByVal_repeat1, 4, .production_id = 7), [221] = {.entry = {.count = 1, .reusable = true}}, SHIFT(81), [223] = {.entry = {.count = 1, .reusable = true}}, SHIFT(13), [225] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_visitTypeVal_repeat1, 4, .production_id = 7), [227] = {.entry = {.count = 1, .reusable = true}}, SHIFT(97), [229] = {.entry = {.count = 1, .reusable = true}}, SHIFT(4), [231] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_visitTypeVal_repeat1, 2, .production_id = 4), [233] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_booleanOp, 1), [235] = {.entry = {.count = 1, .reusable = true}}, SHIFT(47), [237] = {.entry = {.count = 1, .reusable = true}}, REDUCE(aux_sym_listVal_repeat1, 2, .production_id = 4), [239] = {.entry = {.count = 1, .reusable = true}}, SHIFT(121), [241] = {.entry = {.count = 1, .reusable = true}}, SHIFT(125), [243] = {.entry = {.count = 1, .reusable = true}}, SHIFT(105), [245] = {.entry = {.count = 1, .reusable = true}}, SHIFT(37), [247] = {.entry = {.count = 1, .reusable = true}}, SHIFT(107), [249] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_visitTypeOp, 1), [251] = {.entry = {.count = 1, .reusable = true}}, SHIFT(67), [253] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_dateOp, 1), [255] = {.entry = {.count = 1, .reusable = true}}, SHIFT(70), [257] = {.entry = {.count = 1, .reusable = true}}, SHIFT(93), [259] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_query, 5), [261] = {.entry = {.count = 1, .reusable = true}}, SHIFT(72), [263] = {.entry = {.count = 1, .reusable = true}}, ACCEPT_INPUT(), [265] = {.entry = {.count = 1, .reusable = true}}, SHIFT(115), [267] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_listOp, 1), [269] = {.entry = {.count = 1, .reusable = true}}, SHIFT(19), [271] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_sortByOp, 1), [273] = {.entry = {.count = 1, .reusable = true}}, SHIFT(90), [275] = {.entry = {.count = 1, .reusable = true}}, REDUCE(sym_numericOp, 1), }; #ifdef __cplusplus extern "C" { #endif #ifdef _WIN32 #define extern __declspec(dllexport) #endif extern const TSLanguage *tree_sitter_swh_search_ql(void) { static const TSLanguage language = { .version = LANGUAGE_VERSION, .symbol_count = SYMBOL_COUNT, .alias_count = ALIAS_COUNT, .token_count = TOKEN_COUNT, .external_token_count = EXTERNAL_TOKEN_COUNT, .state_count = STATE_COUNT, .large_state_count = LARGE_STATE_COUNT, .production_id_count = PRODUCTION_ID_COUNT, .field_count = FIELD_COUNT, .max_alias_sequence_length = MAX_ALIAS_SEQUENCE_LENGTH, .parse_table = &ts_parse_table[0][0], .small_parse_table = ts_small_parse_table, .small_parse_table_map = ts_small_parse_table_map, .parse_actions = ts_parse_actions, .symbol_names = ts_symbol_names, .field_names = ts_field_names, .field_map_slices = ts_field_map_slices, .field_map_entries = ts_field_map_entries, .symbol_metadata = ts_symbol_metadata, .public_symbol_map = ts_symbol_map, .alias_map = ts_non_terminal_alias_map, .alias_sequences = &ts_alias_sequences[0][0], .lex_modes = ts_lex_modes, .lex_fn = ts_lex, }; return &language; } #ifdef __cplusplus } #endif diff --git a/swh/search/query_language/test/corpus/combinations.txt b/swh/search/query_language/test/corpus/combinations.txt index 999ed76..7953e96 100644 --- a/swh/search/query_language/test/corpus/combinations.txt +++ b/swh/search/query_language/test/corpus/combinations.txt @@ -1,82 +1,82 @@ ============================== Empty query (should throw error) ============================== --- (ERROR) ================== Origins with django as keyword, python language, and more than 5 visits ================== -origin = django and language in ["python"] and visits >= 5 +origin : django and language in ["python"] and visits >= 5 --- -(query (filters (filters (filters (filter (patternFilter (patternField) (patternOp (equalOp)) (patternVal (string (singleWord)))))) (and) (filters (filter (unboundedListFilter (listField) (listOp (choiceOp)) (listVal (string (stringContent))))))) (and) (filters (filter (numericFilter (numericField) (numericOp (rangeOp)) (numberVal (number))))))) +(query (filters (filters (filters (filter (patternFilter (patternField) (patternOp (containOp)) (patternVal (string (singleWord)))))) (and) (filters (filter (unboundedListFilter (listField) (listOp (choiceOp)) (listVal (string (stringContent))))))) (and) (filters (filter (numericFilter (numericField) (numericOp (rangeOp)) (numberVal (number))))))) ================== 10 origins with latest revision after 2020-01-01 ================== last_revision > 2020-01-01 limit = 10 --- (query (filters (filter (dateFilter (dateField) (dateOp (rangeOp)) (dateVal (isoDateTime))))) (limit (limitField) (equalOp) (number))) ================== Origins with last visit date not in 2020-2021 (sorted by number of visits) ================== last_visit > 2021-01-01 or last_visit < 2020-01-01 sort_by = ["visits"] --- (query (filters (filters (filter (dateFilter (dateField) (dateOp (rangeOp)) (dateVal (isoDateTime))))) (or) (filters (filter (dateFilter (dateField) (dateOp (rangeOp)) (dateVal (isoDateTime)))))) (sortBy (sortByField) (sortByOp (equalOp)) (sortByVal (sortByOptions)))) ================== Unvisited origins with kubernetes in metadata or minikube in url ================== -visited = false and metadata = "kubernetes" or origin = "minikube" +visited = false and metadata : "kubernetes" or origin : "minikube" --- -(query (filters (filters (filters (filter (booleanFilter (booleanField) (booleanOp (equalOp)) (booleanVal (booleanFalse))))) (and) (filters (filter (patternFilter (patternField) (patternOp (equalOp)) (patternVal (string (stringContent))))))) (or) (filters (filter (patternFilter (patternField) (patternOp (equalOp)) (patternVal (string (stringContent)))))))) +(query (filters (filters (filters (filter (booleanFilter (booleanField) (booleanOp (equalOp)) (booleanVal (booleanFalse))))) (and) (filters (filter (patternFilter (patternField) (patternOp (containOp)) (patternVal (string (stringContent))))))) (or) (filters (filter (patternFilter (patternField) (patternOp (containOp)) (patternVal (string (stringContent)))))))) ================== Origins with "orchestration" or "kubectl" as keywords and language as "go" or "rust" ================== keyword in ["orchestration", "kubectl"] and language in ["go", "rust"] --- (query (filters (filters (filter (unboundedListFilter (listField) (listOp (choiceOp)) (listVal (string (stringContent)) (string (stringContent)))))) (and) (filters (filter (unboundedListFilter (listField) (listOp (choiceOp)) (listVal (string (stringContent)) (string (stringContent)))))))) ================== Origins with a GPL-3 license that have "debian" in their url or have visit type as "deb" ================== -(origin = debian or visit_type = ["deb"]) and license in ["GPL-3"] +(origin : debian or visit_type = ["deb"]) and license in ["GPL-3"] --- -(query (filters (filters (filters (filters (filter (patternFilter (patternField) (patternOp (equalOp)) (patternVal (string (singleWord)))))) (or) (filters (filter (boundedListFilter (visitTypeFilter (visitTypeField) (visitTypeOp (equalOp)) (visitTypeVal (visitTypeOptions)))))))) (and) (filters (filter (unboundedListFilter (listField) (listOp (choiceOp)) (listVal (string (stringContent)))))))) +(query (filters (filters (filters (filters (filter (patternFilter (patternField) (patternOp (containOp)) (patternVal (string (singleWord)))))) (or) (filters (filter (boundedListFilter (visitTypeFilter (visitTypeField) (visitTypeOp (equalOp)) (visitTypeVal (visitTypeOptions)))))))) (and) (filters (filter (unboundedListFilter (listField) (listOp (choiceOp)) (listVal (string (stringContent)))))))) ================== Origins with `and` and `or` inside filter values ================== -(origin = "foo and bar or baz") +(origin : "foo and bar or baz") --- -(query (filters (filters (filter (patternFilter (patternField) (patternOp (equalOp)) (patternVal (string (stringContent)))))))) +(query (filters (filters (filter (patternFilter (patternField) (patternOp (containOp)) (patternVal (string (stringContent)))))))) ================== Origins with `'` and `"` inside filter values ================== -(origin = "foo \\ \'bar\' \"baz\" ") +(origin : "foo \\ \'bar\' \"baz\" ") --- -(query (filters (filters (filter (patternFilter (patternField) (patternOp (equalOp)) (patternVal (string (stringContent (escape_sequence) (escape_sequence) (escape_sequence) (escape_sequence) (escape_sequence))))))))) +(query (filters (filters (filter (patternFilter (patternField) (patternOp (containOp)) (patternVal (string (stringContent (escape_sequence) (escape_sequence) (escape_sequence) (escape_sequence) (escape_sequence))))))))) ================== Incomplete conjunction operators should throw error ================== visits > 5 and --- (query (filters (filter (numericFilter (numericField) (numericOp (rangeOp)) (numberVal (number))))) (ERROR (and))) diff --git a/swh/search/query_language/tokens.js b/swh/search/query_language/tokens.js index 9b29164..c8859cb 100644 --- a/swh/search/query_language/tokens.js +++ b/swh/search/query_language/tokens.js @@ -1,108 +1,110 @@ // Copyright (C) 2021 The Software Heritage developers // See the AUTHORS file at the top-level directory of this distribution // License: GNU General Public License version 3, or any later version // See top-level LICENSE file for more information // Field tokens const visitTypeField = 'visit_type'; const sortByField = 'sort_by'; const limitField = 'limit'; // Field categories const patternFields = ['origin', 'metadata']; const booleanFields = ['visited']; const numericFields = ['visits']; const boundedListFields = [visitTypeField]; const listFields = ['language', 'license', 'keyword']; const dateFields = [ 'last_visit', 'last_eventful_visit', 'last_revision', 'last_release', 'created', 'modified', 'published' ]; const fields = [].concat( patternFields, booleanFields, numericFields, boundedListFields, listFields, dateFields ); // Operators const equalOp = ['=']; +const containOp = [':']; const rangeOp = ['<', '<=', '=', '!=', '>=', '>']; const choiceOp = ['in', 'not in']; // Values const sortByOptions = [ 'visits', 'last_visit', 'last_eventful_visit', 'last_revision', 'last_release', 'created', 'modified', 'published' ]; const visitTypeOptions = [ "any", "bzr", "cran", "cvs", "deb", "deposit", "ftp", "hg", "git", "nixguix", "npm", "opam", "pypi", "svn", "tar" ]; // Extra tokens const OR = "or"; const AND = "and"; const TRUE = "true"; const FALSE = "false"; module.exports = { // Field tokens visitTypeField, sortByField, limitField, // Field categories patternFields, booleanFields, numericFields, boundedListFields, listFields, dateFields, fields, // Operators equalOp, + containOp, rangeOp, choiceOp, // Values sortByOptions, visitTypeOptions, // Extra tokens OR, AND, TRUE, FALSE } diff --git a/swh/search/tests/conftest.py b/swh/search/tests/conftest.py index 62de0a2..22276a5 100644 --- a/swh/search/tests/conftest.py +++ b/swh/search/tests/conftest.py @@ -1,139 +1,142 @@ -# Copyright (C) 2019-2020 The Software Heritage developers +# Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import logging import socket import subprocess import time import elasticsearch import pytest from swh.search import get_search logger = logging.getLogger(__name__) def free_port(): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.bind(("127.0.0.1", 0)) port = sock.getsockname()[1] sock.close() return port def wait_for_peer(addr, port): while True: try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((addr, port)) except ConnectionRefusedError: time.sleep(0.1) else: sock.close() break CONFIG_TEMPLATE = """ node.name: node-1 path.data: {data} path.logs: {logs} network.host: 127.0.0.1 http.port: {http_port} transport.port: {transport_port} """ -def _run_elasticsearch(conf_dir, data_dir, logs_dir, http_port, transport_port): +def _run_elasticsearch( + conf_dir, data_dir, logs_dir, http_port, transport_port, libffi_tmpdir +): es_home = "/usr/share/elasticsearch" with open(conf_dir + "/elasticsearch.yml", "w") as fd: fd.write( CONFIG_TEMPLATE.format( data=data_dir, logs=logs_dir, http_port=http_port, transport_port=transport_port, ) ) with open(conf_dir + "/log4j2.properties", "w") as fd: pass cmd = [ "/usr/share/elasticsearch/jdk/bin/java", "-Des.path.home={}".format(es_home), "-Des.path.conf={}".format(conf_dir), "-Des.bundled_jdk=true", "-Dlog4j2.disable.jmx=true", "-cp", "{}/lib/*".format(es_home), "org.elasticsearch.bootstrap.Elasticsearch", ] host = "127.0.0.1:{}".format(http_port) with open(logs_dir + "/output.txt", "w") as fd: - p = subprocess.Popen(cmd) + p = subprocess.Popen(cmd, env={"LIBFFI_TMPDIR": libffi_tmpdir}) wait_for_peer("127.0.0.1", http_port) client = elasticsearch.Elasticsearch([host]) assert client.ping() return p @pytest.fixture(scope="session") def elasticsearch_session(tmpdir_factory): tmpdir = tmpdir_factory.mktemp("elasticsearch") es_conf = tmpdir.mkdir("conf") http_port = free_port() transport_port = free_port() p = _run_elasticsearch( conf_dir=str(es_conf), data_dir=str(tmpdir.mkdir("data")), logs_dir=str(tmpdir.mkdir("logs")), http_port=http_port, transport_port=transport_port, + libffi_tmpdir=str(tmpdir.mkdir("libffi")), ) yield "127.0.0.1:{}".format(http_port) # Check ES didn't stop assert p.returncode is None, p.returncode p.kill() p.wait() @pytest.fixture(scope="class") def elasticsearch_host(elasticsearch_session): yield elasticsearch_session @pytest.fixture def swh_search(elasticsearch_host): """Instantiate a search client, initialize the elasticsearch instance, and returns it """ logger.debug("swh_search: elasticsearch_host: %s", elasticsearch_host) search = get_search( "elasticsearch", hosts=[elasticsearch_host], indexes={ "origin": { "index": "test", "read_alias": "test-read", "write_alias": "test-write", } }, ) search.deinitialize() # To reset internal state from previous runs search.initialize() # install required index yield search diff --git a/swh/search/tests/test_elasticsearch.py b/swh/search/tests/test_elasticsearch.py index 943f9ed..7e460d5 100644 --- a/swh/search/tests/test_elasticsearch.py +++ b/swh/search/tests/test_elasticsearch.py @@ -1,167 +1,278 @@ -# Copyright (C) 2019-2021 The Software Heritage developers +# Copyright (C) 2019-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from datetime import datetime, timedelta, timezone from textwrap import dedent import types import unittest from elasticsearch.helpers.errors import BulkIndexError import pytest +from swh.search.exc import SearchQuerySyntaxError from swh.search.metrics import OPERATIONS_METRIC from .test_search import CommonSearchTest +now = datetime.now(tz=timezone.utc).isoformat() +now_minus_5_days = (datetime.now(tz=timezone.utc) - timedelta(days=5)).isoformat() +now_plus_5_days = (datetime.now(tz=timezone.utc) + timedelta(days=5)).isoformat() + +ORIGINS = [ + { + "url": "http://foobar.1.com", + "nb_visits": 1, + "last_visit_date": now_minus_5_days, + "last_eventful_visit_date": now_minus_5_days, + }, + { + "url": "http://foobar.2.com", + "nb_visits": 2, + "last_visit_date": now, + "last_eventful_visit_date": now, + }, + { + "url": "http://foobar.3.com", + "nb_visits": 3, + "last_visit_date": now_plus_5_days, + "last_eventful_visit_date": now_minus_5_days, + }, + { + "url": "http://barbaz.4.com", + "nb_visits": 3, + "last_visit_date": now_plus_5_days, + "last_eventful_visit_date": now_minus_5_days, + }, +] + class BaseElasticsearchTest(unittest.TestCase): @pytest.fixture(autouse=True) def _instantiate_search(self, swh_search, elasticsearch_host, mocker): self._elasticsearch_host = elasticsearch_host self.search = swh_search self.mocker = mocker # override self.search.origin_update to catch painless script errors # and pretty print them origin_update = self.search.origin_update def _origin_update(self, *args, **kwargs): script_error = False error_detail = "" try: origin_update(*args, **kwargs) except BulkIndexError as e: error = e.errors[0].get("update", {}).get("error", {}).get("caused_by") if error and "script_stack" in error: script_error = True error_detail = dedent( f""" Painless update script failed ({error.get('reason')}). error type: {error.get('caused_by', {}).get('type')} error reason: {error.get('caused_by', {}).get('reason')} script stack: """ ) error_detail += "\n".join(error["script_stack"]) else: raise e assert script_error is False, error_detail[1:] self.search.origin_update = types.MethodType(_origin_update, self.search) def reset(self): self.search.deinitialize() self.search.initialize() class TestElasticsearchSearch(CommonSearchTest, BaseElasticsearchTest): def test_metrics_update_duration(self): mock = self.mocker.patch("swh.search.metrics.statsd.timing") for url in ["http://foobar.bar", "http://foobar.baz"]: self.search.origin_update([{"url": url}]) assert mock.call_count == 2 def test_metrics_search_duration(self): mock = self.mocker.patch("swh.search.metrics.statsd.timing") for url_pattern in ["foobar", "foobaz"]: self.search.origin_search(url_pattern=url_pattern, with_visit=True) assert mock.call_count == 2 def test_metrics_indexation_counters(self): mock_es = self.mocker.patch("elasticsearch.helpers.bulk") mock_es.return_value = 2, ["error"] mock_metrics = self.mocker.patch("swh.search.metrics.statsd.increment") self.search.origin_update([{"url": "http://foobar.baz"}]) assert mock_metrics.call_count == 2 mock_metrics.assert_any_call( OPERATIONS_METRIC, 2, tags={ "endpoint": "origin_update", "object_type": "document", "operation": "index", }, ) mock_metrics.assert_any_call( OPERATIONS_METRIC, 1, tags={ "endpoint": "origin_update", "object_type": "document", "operation": "index_error", }, ) def test_write_alias_usage(self): mock = self.mocker.patch("elasticsearch.helpers.bulk") mock.return_value = 2, ["result"] self.search.origin_update([{"url": "http://foobar.baz"}]) assert mock.call_args[1]["index"] == "test-write" def test_read_alias_usage(self): mock = self.mocker.patch("elasticsearch.Elasticsearch.search") mock.return_value = {"hits": {"hits": []}} self.search.origin_search(url_pattern="foobar.baz") assert mock.call_args[1]["index"] == "test-read" def test_sort_by_and_limit_query(self): - now = datetime.now(tz=timezone.utc).isoformat() - now_minus_5_hours = ( - datetime.now(tz=timezone.utc) - timedelta(hours=5) - ).isoformat() - now_plus_5_hours = ( - datetime.now(tz=timezone.utc) + timedelta(hours=5) - ).isoformat() - - ORIGINS = [ - { - "url": "http://foobar.1.com", - "nb_visits": 1, - "last_visit_date": now_minus_5_hours, - "last_eventful_visit_date": now_minus_5_hours, - }, - { - "url": "http://foobar.2.com", - "nb_visits": 2, - "last_visit_date": now, - "last_eventful_visit_date": now, - }, - { - "url": "http://foobar.3.com", - "nb_visits": 3, - "last_visit_date": now_plus_5_hours, - "last_eventful_visit_date": now_minus_5_hours, - }, - ] self.search.origin_update(ORIGINS) self.search.flush() def _check_results(query, origin_indices): page = self.search.origin_search(url_pattern="foobar", query=query) results = [r["url"] for r in page.results] assert results == [ORIGINS[index]["url"] for index in origin_indices] _check_results("sort_by = [-visits]", [2, 1, 0]) _check_results("sort_by = [last_visit]", [0, 1, 2]) _check_results("sort_by = [-last_eventful_visit, visits]", [1, 0, 2]) _check_results("sort_by = [last_eventful_visit,-last_visit]", [2, 0, 1]) _check_results("sort_by = [-visits] limit = 1", [2]) _check_results("sort_by = [last_visit] and limit = 2", [0, 1]) _check_results("sort_by = [-last_eventful_visit, visits] limit = 3", [1, 0, 2]) + + def test_search_ql_simple(self): + self.search.origin_update(ORIGINS) + self.search.flush() + + results = { + r["url"] + for r in self.search.origin_search(query='origin : "foobar"').results + } + assert results == { + "http://foobar.1.com", + "http://foobar.2.com", + "http://foobar.3.com", + } + + def test_search_ql_datetimes(self): + self.search.origin_update(ORIGINS) + self.search.flush() + + now_minus_5_minutes = ( + datetime.now(tz=timezone.utc) - timedelta(minutes=5) + ).isoformat() + now_plus_5_minutes = ( + datetime.now(tz=timezone.utc) + timedelta(minutes=5) + ).isoformat() + + results = { + r["url"] + for r in self.search.origin_search( + query=( + f"last_visit < {now_minus_5_minutes} " + f"or last_visit > {now_plus_5_minutes}" + ) + ).results + } + assert results == { + "http://foobar.1.com", + "http://foobar.3.com", + "http://barbaz.4.com", + } + + def test_search_ql_dates(self): + self.search.origin_update(ORIGINS) + self.search.flush() + + now_minus_2_days = ( + (datetime.now(tz=timezone.utc) - timedelta(days=2)).date().isoformat() + ) + now_plus_2_days = ( + (datetime.now(tz=timezone.utc) + timedelta(days=2)).date().isoformat() + ) + + results = { + r["url"] + for r in self.search.origin_search( + query=( + f"last_visit < {now_minus_2_days} " + f"or last_visit > {now_plus_2_days}" + ) + ).results + } + assert results == { + "http://foobar.1.com", + "http://foobar.3.com", + "http://barbaz.4.com", + } + + def test_search_ql_visited(self): + self.search.origin_update( + [ + { + "url": "http://foobar.1.com", + "has_visits": True, + "nb_visits": 1, + "last_visit_date": now_minus_5_days, + "last_eventful_visit_date": now_minus_5_days, + }, + {"url": "http://foobar.2.com",}, + {"url": "http://foobar.3.com", "has_visits": False,}, + ] + ) + self.search.flush() + + assert { + r["url"] for r in self.search.origin_search(query="visited = true").results + } == {"http://foobar.1.com"} + assert { + r["url"] for r in self.search.origin_search(query="visited = false").results + } == {"http://foobar.2.com", "http://foobar.3.com"} + + assert ( + self.search.origin_search( + query="visited = true and visited = false" + ).results + == [] + ) + assert ( + self.search.origin_search(query="visited = false", with_visit=True).results + == [] + ) + + def test_query_syntax_error(self): + self.search.origin_update(ORIGINS) + self.search.flush() + + with pytest.raises(SearchQuerySyntaxError): + self.search.origin_search(query="foobar") diff --git a/swh/search/tests/test_journal_client.py b/swh/search/tests/test_journal_client.py index b7a66a4..f1bd668 100644 --- a/swh/search/tests/test_journal_client.py +++ b/swh/search/tests/test_journal_client.py @@ -1,300 +1,291 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information -from datetime import datetime, timezone +from datetime import datetime, timedelta, timezone import functools from unittest.mock import MagicMock import pytest from swh.model.hashutil import hash_to_bytes from swh.model.model import ( ObjectType, Person, Release, Revision, RevisionType, Snapshot, SnapshotBranch, TargetType, - Timestamp, TimestampWithTimezone, ) from swh.search.journal_client import ( fetch_last_revision_release_date, process_journal_objects, ) from swh.storage import get_storage DATES = [ - TimestampWithTimezone( - timestamp=Timestamp(seconds=1234567891, microseconds=0,), - offset=120, - negative_utc=False, + TimestampWithTimezone.from_datetime( + datetime(2009, 2, 14, 1, 31, 31, tzinfo=timezone(timedelta(hours=2))) ), - TimestampWithTimezone( - timestamp=Timestamp(seconds=1234567892, microseconds=0,), - offset=120, - negative_utc=False, + TimestampWithTimezone.from_datetime( + datetime(2009, 2, 14, 1, 31, 32, tzinfo=timezone(timedelta(hours=2))) ), - TimestampWithTimezone( - timestamp=Timestamp(seconds=1234567893, microseconds=0,), - offset=120, - negative_utc=False, + TimestampWithTimezone.from_datetime( + datetime(2009, 2, 14, 1, 31, 33, tzinfo=timezone(timedelta(hours=2))) ), - TimestampWithTimezone( - timestamp=Timestamp(seconds=1234567894, microseconds=0,), - offset=120, - negative_utc=False, + TimestampWithTimezone.from_datetime( + datetime(2009, 2, 14, 1, 31, 34, tzinfo=timezone(timedelta(hours=2))) ), ] COMMITTERS = [ Person(fullname=b"foo", name=b"foo", email=b""), Person(fullname=b"bar", name=b"bar", email=b""), ] REVISIONS = [ Revision( message=b"revision_1_message", date=DATES[0], committer=COMMITTERS[0], author=COMMITTERS[0], committer_date=DATES[0], type=RevisionType.GIT, directory=b"\x01" * 20, synthetic=False, metadata=None, parents=( hash_to_bytes("9b918dd063cec85c2bc63cc7f167e29f5894dcbc"), hash_to_bytes("757f38bdcd8473aaa12df55357f5e2f1a318e672"), ), ), Revision( message=b"revision_2_message", date=DATES[1], committer=COMMITTERS[1], author=COMMITTERS[1], committer_date=DATES[1], type=RevisionType.MERCURIAL, directory=b"\x02" * 20, synthetic=False, metadata=None, parents=(), extra_headers=((b"foo", b"bar"),), ), Revision( message=b"revision_3_message", date=DATES[2], committer=COMMITTERS[0], author=COMMITTERS[0], committer_date=DATES[2], type=RevisionType.GIT, directory=b"\x03" * 20, synthetic=False, metadata=None, parents=(), ), ] RELEASES = [ Release( name=b"v0.0.1", date=DATES[1], author=COMMITTERS[0], target_type=ObjectType.REVISION, target=b"\x04" * 20, message=b"foo", synthetic=False, ), Release( name=b"v0.0.2", date=DATES[2], author=COMMITTERS[1], target_type=ObjectType.REVISION, target=b"\x05" * 20, message=b"bar", synthetic=False, ), Release( name=b"v0.0.3", date=DATES[3], author=COMMITTERS[1], target_type=ObjectType.REVISION, target=b"\x05" * 20, message=b"foobar", synthetic=False, ), ] SNAPSHOTS = [ Snapshot( branches={ b"target/revision1": SnapshotBranch( target_type=TargetType.REVISION, target=REVISIONS[0].id, ), b"target/revision2": SnapshotBranch( target_type=TargetType.REVISION, target=REVISIONS[1].id, ), b"target/revision3": SnapshotBranch( target_type=TargetType.REVISION, target=REVISIONS[2].id, ), b"target/release1": SnapshotBranch( target_type=TargetType.RELEASE, target=RELEASES[0].id ), b"target/release2": SnapshotBranch( target_type=TargetType.RELEASE, target=RELEASES[1].id ), b"target/release3": SnapshotBranch( target_type=TargetType.RELEASE, target=RELEASES[2].id ), b"target/alias": SnapshotBranch( target_type=TargetType.ALIAS, target=b"target/revision1" ), }, ), Snapshot( branches={ b"target/revision1": SnapshotBranch( target_type=TargetType.REVISION, target=REVISIONS[0].id, ) }, ), Snapshot( branches={ b"target/release1": SnapshotBranch( target_type=TargetType.RELEASE, target=RELEASES[0].id ) }, ), Snapshot(branches={}), ] @pytest.fixture def storage(): storage = get_storage("memory") storage.revision_add(REVISIONS) storage.release_add(RELEASES) storage.snapshot_add(SNAPSHOTS) return storage def test_journal_client_origin_from_journal(): search_mock = MagicMock() worker_fn = functools.partial(process_journal_objects, search=search_mock,) worker_fn({"origin": [{"url": "http://foobar.baz"},]}) search_mock.origin_update.assert_called_once_with( [{"url": "http://foobar.baz"},] ) search_mock.reset_mock() worker_fn({"origin": [{"url": "http://foobar.baz"}, {"url": "http://barbaz.qux"},]}) search_mock.origin_update.assert_called_once_with( [{"url": "http://foobar.baz"}, {"url": "http://barbaz.qux"},] ) def test_journal_client_origin_visit_status_from_journal(storage): search_mock = MagicMock() worker_fn = functools.partial( process_journal_objects, search=search_mock, storage=storage ) current_datetime = datetime.now(tz=timezone.utc) worker_fn( { "origin_visit_status": [ { "origin": "http://foobar.baz", "status": "full", "type": "git", "visit": 5, "date": current_datetime, "snapshot": SNAPSHOTS[0].id, } # full visits ok ] } ) search_mock.origin_update.assert_called_once_with( [ { "url": "http://foobar.baz", "visit_types": ["git"], "has_visits": True, "nb_visits": 5, "snapshot_id": SNAPSHOTS[0].id.hex(), "last_visit_date": current_datetime.isoformat(), "last_eventful_visit_date": current_datetime.isoformat(), "last_revision_date": "2009-02-14T01:31:33+02:00", "last_release_date": "2009-02-14T01:31:34+02:00", }, ] ) search_mock.reset_mock() # non-full visits only set the visit_types attribute worker_fn( { "origin_visit_status": [ { "origin": "http://foobar.baz", "type": "git", "status": "partial", "visit": 5, "date": current_datetime, } ] } ) search_mock.origin_update.assert_called_once_with( [{"url": "http://foobar.baz", "visit_types": ["git"]}] ) def test_journal_client_origin_metadata_from_journal(): search_mock = MagicMock() worker_fn = functools.partial(process_journal_objects, search=search_mock,) worker_fn( { "origin_intrinsic_metadata": [ { "id": "http://foobar.baz", "metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "description": "foo bar", "programmingLanguage": "python", "license": "MIT", }, }, ] } ) search_mock.origin_update.assert_called_once_with( [ { "url": "http://foobar.baz", "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "description": "foo bar", "programmingLanguage": "python", "license": "MIT", }, }, ] ) def test_fetch_last_revision_release_date(storage): for snapshot in SNAPSHOTS: assert fetch_last_revision_release_date(snapshot.id, storage) is not None diff --git a/swh/search/tests/test_search.py b/swh/search/tests/test_search.py index 1559ddb..5653685 100644 --- a/swh/search/tests/test_search.py +++ b/swh/search/tests/test_search.py @@ -1,1189 +1,1235 @@ # Copyright (C) 2019-2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information from collections import Counter from datetime import datetime, timedelta, timezone from itertools import permutations from hypothesis import given, settings, strategies import pytest from swh.core.api.classes import stream_results class CommonSearchTest: def test_origin_url_unique_word_prefix(self): origin_foobar_baz = {"url": "http://foobar.baz"} origin_barbaz_qux = {"url": "http://barbaz.qux"} origin_qux_quux = {"url": "http://qux.quux"} origins = [origin_foobar_baz, origin_barbaz_qux, origin_qux_quux] self.search.origin_update(origins) self.search.flush() actual_page = self.search.origin_search(url_pattern="foobar") assert actual_page.next_page_token is None assert actual_page.results == [origin_foobar_baz] actual_page = self.search.origin_search(url_pattern="barb") assert actual_page.next_page_token is None assert actual_page.results == [origin_barbaz_qux] # 'bar' is part of 'foobar', but is not the beginning of it actual_page = self.search.origin_search(url_pattern="bar") assert actual_page.next_page_token is None assert actual_page.results == [origin_barbaz_qux] actual_page = self.search.origin_search(url_pattern="barbaz") assert actual_page.next_page_token is None assert actual_page.results == [origin_barbaz_qux] def test_origin_url_unique_word_prefix_multiple_results(self): origin_foobar_baz = {"url": "http://foobar.baz"} origin_barbaz_qux = {"url": "http://barbaz.qux"} origin_qux_quux = {"url": "http://qux.quux"} self.search.origin_update( [origin_foobar_baz, origin_barbaz_qux, origin_qux_quux] ) self.search.flush() actual_page = self.search.origin_search(url_pattern="qu") assert actual_page.next_page_token is None results = [r["url"] for r in actual_page.results] expected_results = [o["url"] for o in [origin_qux_quux, origin_barbaz_qux]] assert sorted(results) == sorted(expected_results) actual_page = self.search.origin_search(url_pattern="qux") assert actual_page.next_page_token is None results = [r["url"] for r in actual_page.results] expected_results = [o["url"] for o in [origin_qux_quux, origin_barbaz_qux]] assert sorted(results) == sorted(expected_results) def test_origin_url_all_terms(self): origin_foo_bar_baz = {"url": "http://foo.bar/baz"} origin_foo_bar_foo_bar = {"url": "http://foo.bar/foo.bar"} origins = [origin_foo_bar_baz, origin_foo_bar_foo_bar] self.search.origin_update(origins) self.search.flush() # Only results containing all terms should be returned. actual_page = self.search.origin_search(url_pattern="foo bar baz") assert actual_page.next_page_token is None assert actual_page.results == [origin_foo_bar_baz] def test_origin_with_visit(self): origin_foobar_baz = {"url": "http://foobar/baz"} self.search.origin_update( [{**o, "has_visits": True} for o in [origin_foobar_baz]] ) self.search.flush() actual_page = self.search.origin_search(url_pattern="foobar", with_visit=True) assert actual_page.next_page_token is None assert actual_page.results == [origin_foobar_baz] def test_origin_with_visit_added(self): origin_foobar_baz = {"url": "http://foobar.baz"} self.search.origin_update([origin_foobar_baz]) self.search.flush() actual_page = self.search.origin_search(url_pattern="foobar", with_visit=True) assert actual_page.next_page_token is None assert actual_page.results == [] self.search.origin_update( [{**o, "has_visits": True} for o in [origin_foobar_baz]] ) self.search.flush() actual_page = self.search.origin_search(url_pattern="foobar", with_visit=True) assert actual_page.next_page_token is None assert actual_page.results == [origin_foobar_baz] def test_origin_no_visit_types_search(self): origins = [{"url": "http://foobar.baz"}] self.search.origin_update(origins) self.search.flush() actual_page = self.search.origin_search(url_pattern="http", visit_types=["git"]) assert actual_page.next_page_token is None results = [r["url"] for r in actual_page.results] expected_results = [] assert sorted(results) == sorted(expected_results) actual_page = self.search.origin_search(url_pattern="http", visit_types=None) assert actual_page.next_page_token is None results = [r["url"] for r in actual_page.results] expected_results = [origin["url"] for origin in origins] assert sorted(results) == sorted(expected_results) def test_origin_visit_types_search(self): origins = [ {"url": "http://foobar.baz", "visit_types": ["git"]}, {"url": "http://barbaz.qux", "visit_types": ["svn"]}, {"url": "http://qux.quux", "visit_types": ["hg"]}, ] self.search.origin_update(origins) self.search.flush() for origin in origins: actual_page = self.search.origin_search( url_pattern="http", visit_types=origin["visit_types"] ) assert actual_page.next_page_token is None results = [r["url"] for r in actual_page.results] expected_results = [origin["url"]] assert sorted(results) == sorted(expected_results) actual_page = self.search.origin_search(url_pattern="http", visit_types=None) assert actual_page.next_page_token is None results = [r["url"] for r in actual_page.results] expected_results = [origin["url"] for origin in origins] assert sorted(results) == sorted(expected_results) def test_origin_visit_types_update_search(self): origin_url = "http://foobar.baz" self.search.origin_update([{"url": origin_url}]) self.search.flush() def _add_visit_type(visit_type): self.search.origin_update( [{"url": origin_url, "visit_types": [visit_type]}] ) self.search.flush() def _check_visit_types(visit_types_list): for visit_types in visit_types_list: actual_page = self.search.origin_search( url_pattern="http", visit_types=visit_types ) assert actual_page.next_page_token is None results = [r["url"] for r in actual_page.results] expected_results = [origin_url] assert sorted(results) == sorted(expected_results) _add_visit_type("git") _check_visit_types([["git"], ["git", "hg"]]) _add_visit_type("svn") _check_visit_types([["git"], ["svn"], ["svn", "git"], ["git", "hg", "svn"]]) _add_visit_type("hg") _check_visit_types( [ ["git"], ["svn"], ["hg"], ["svn", "git"], ["hg", "git"], ["hg", "svn"], ["git", "hg", "svn"], ] ) def test_origin_nb_visits_update_search(self): origin_url = "http://foobar.baz" self.search.origin_update([{"url": origin_url}]) self.search.flush() def _update_nb_visits(nb_visits): self.search.origin_update([{"url": origin_url, "nb_visits": nb_visits}]) self.search.flush() def _check_min_nb_visits(min_nb_visits): actual_page = self.search.origin_search( url_pattern=origin_url, min_nb_visits=min_nb_visits, ) assert actual_page.next_page_token is None results = [r["url"] for r in actual_page.results] expected_results = [origin_url] assert sorted(results) == sorted(expected_results) _update_nb_visits(2) _check_min_nb_visits(2) # Works for = 2 _check_min_nb_visits(1) # Works for < 2 with pytest.raises(AssertionError): _check_min_nb_visits( 5 ) # No results for nb_visits >= 5 (should throw error) _update_nb_visits(5) _check_min_nb_visits(5) # Works for = 5 _check_min_nb_visits(3) # Works for < 5 def test_origin_last_visit_date_update_search(self): origin_url = "http://foobar.baz" self.search.origin_update([{"url": origin_url}]) self.search.flush() def _update_last_visit_date(last_visit_date): self.search.origin_update( [{"url": origin_url, "last_visit_date": last_visit_date}] ) self.search.flush() def _check_min_last_visit_date(min_last_visit_date): actual_page = self.search.origin_search( url_pattern=origin_url, min_last_visit_date=min_last_visit_date, ) assert actual_page.next_page_token is None results = [r["url"] for r in actual_page.results] expected_results = [origin_url] assert sorted(results) == sorted(expected_results) now = datetime.now(tz=timezone.utc).isoformat() now_minus_5_hours = ( datetime.now(tz=timezone.utc) - timedelta(hours=5) ).isoformat() now_plus_5_hours = ( datetime.now(tz=timezone.utc) + timedelta(hours=5) ).isoformat() _update_last_visit_date(now) _check_min_last_visit_date(now) # Works for = _check_min_last_visit_date(now_minus_5_hours) # Works for < with pytest.raises(AssertionError): _check_min_last_visit_date(now_plus_5_hours) # Fails for > _update_last_visit_date(now_plus_5_hours) _check_min_last_visit_date(now_plus_5_hours) # Works for = _check_min_last_visit_date(now) # Works for < def test_journal_client_origin_visit_status_permutation(self): NOW = datetime.now(tz=timezone.utc).isoformat() NOW_MINUS_5_HOURS = ( datetime.now(tz=timezone.utc) - timedelta(hours=5) ).isoformat() NOW_PLUS_5_HOURS = ( datetime.now(tz=timezone.utc) + timedelta(hours=5) ).isoformat() VISIT_STATUSES = [ { "url": "http://foobar.baz", "snapshot_id": "SNAPSHOT_1", "last_eventful_visit_date": NOW, }, { "url": "http://foobar.baz", "snapshot_id": "SNAPSHOT_1", "last_eventful_visit_date": NOW_MINUS_5_HOURS, }, { "url": "http://foobar.baz", "snapshot_id": "SNAPSHOT_2", "last_eventful_visit_date": NOW_PLUS_5_HOURS, }, ] for visit_statuses in permutations(VISIT_STATUSES, len(VISIT_STATUSES)): self.search.origin_update(visit_statuses) self.search.flush() origin_url = "http://foobar.baz" actual_page = self.search.origin_search( url_pattern=origin_url, min_last_eventful_visit_date=NOW_PLUS_5_HOURS, ) assert actual_page.next_page_token is None results = [r["url"] for r in actual_page.results] expected_results = [origin_url] assert sorted(results) == sorted(expected_results) self.reset() def test_origin_last_eventful_visit_date_update_search(self): origin_url = "http://foobar.baz" self.search.origin_update([{"url": origin_url}]) self.search.flush() def _update_last_eventful_visit_date(snapshot_id, last_eventful_visit_date): self.search.origin_update( [ { "url": origin_url, "snapshot_id": snapshot_id, "last_eventful_visit_date": last_eventful_visit_date, } ] ) self.search.flush() def _check_min_last_eventful_visit_date(min_last_eventful_visit_date): actual_page = self.search.origin_search( url_pattern=origin_url, min_last_eventful_visit_date=min_last_eventful_visit_date, ) assert actual_page.next_page_token is None results = [r["url"] for r in actual_page.results] expected_results = [origin_url] assert sorted(results) == sorted(expected_results) now = datetime.now(tz=timezone.utc).isoformat() now_minus_5_hours = ( datetime.now(tz=timezone.utc) - timedelta(hours=5) ).isoformat() now_plus_5_hours = ( datetime.now(tz=timezone.utc) + timedelta(hours=5) ).isoformat() snapshot_1 = "SNAPSHOT_1" snapshot_2 = "SNAPSHOT_2" _update_last_eventful_visit_date(snapshot_1, now) _check_min_last_eventful_visit_date(now) # Works for = _check_min_last_eventful_visit_date(now_minus_5_hours) # Works for < with pytest.raises(AssertionError): _check_min_last_eventful_visit_date(now_plus_5_hours) # Fails for > _update_last_eventful_visit_date( snapshot_1, now_plus_5_hours ) # Revisit(not eventful) same origin _check_min_last_eventful_visit_date( now ) # Should remain the same because recent visit wasn't eventful with pytest.raises(AssertionError): _check_min_last_eventful_visit_date(now_plus_5_hours) _update_last_eventful_visit_date( snapshot_2, now_plus_5_hours ) # Revisit(eventful) same origin _check_min_last_eventful_visit_date(now_plus_5_hours) # Works for = _check_min_last_eventful_visit_date(now) # Works for < def _test_origin_last_revision_release_date_update_search(self, date_type): origin_url = "http://foobar.baz" self.search.origin_update([{"url": origin_url}]) self.search.flush() def _update_last_revision_release_date(date): self.search.origin_update([{"url": origin_url, date_type: date,}]) self.search.flush() def _check_min_last_revision_release_date(date): actual_page = self.search.origin_search( url_pattern=origin_url, **{f"min_{date_type}": date}, ) assert actual_page.next_page_token is None results = [r["url"] for r in actual_page.results] expected_results = [origin_url] assert sorted(results) == sorted(expected_results) now = datetime.now(tz=timezone.utc).isoformat() now_minus_5_hours = ( datetime.now(tz=timezone.utc) - timedelta(hours=5) ).isoformat() now_plus_5_hours = ( datetime.now(tz=timezone.utc) + timedelta(hours=5) ).isoformat() _update_last_revision_release_date(now) _check_min_last_revision_release_date(now) _check_min_last_revision_release_date(now_minus_5_hours) with pytest.raises(AssertionError): _check_min_last_revision_release_date(now_plus_5_hours) _update_last_revision_release_date(now_plus_5_hours) _check_min_last_revision_release_date(now_plus_5_hours) _check_min_last_revision_release_date(now) def test_origin_last_revision_date_update_search(self): self._test_origin_last_revision_release_date_update_search( date_type="last_revision_date" ) def test_origin_last_release_date_update_search(self): self._test_origin_last_revision_release_date_update_search( date_type="last_revision_date" ) def test_origin_instrinsic_metadata_dates_filter_sorting_search(self): DATE_0 = "1999-06-28" DATE_1 = "2001-02-13" DATE_2 = "2005-10-02" ORIGINS = [ { "url": "http://foobar.0.com", "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "dateCreated": DATE_0, "dateModified": DATE_1, "datePublished": DATE_2, }, }, { "url": "http://foobar.1.com", "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "dateCreated": DATE_1, "dateModified": DATE_2, "datePublished": DATE_2, }, }, { "url": "http://foobar.2.com", "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "dateCreated": DATE_2, "dateModified": DATE_2, "datePublished": DATE_2, }, }, ] self.search.origin_update(ORIGINS) self.search.flush() def _check_results(origin_indices, sort_results=True, **kwargs): page = self.search.origin_search(url_pattern="foobar", **kwargs) results = [r["url"] for r in page.results] if sort_results: assert sorted(results) == sorted( [ORIGINS[index]["url"] for index in origin_indices] ) else: assert results == [ORIGINS[index]["url"] for index in origin_indices] _check_results(min_date_created=DATE_0, origin_indices=[0, 1, 2]) _check_results(min_date_created=DATE_1, origin_indices=[1, 2]) _check_results(min_date_created=DATE_2, origin_indices=[2]) _check_results(min_date_modified=DATE_0, origin_indices=[0, 1, 2]) _check_results(min_date_modified=DATE_1, origin_indices=[0, 1, 2]) _check_results(min_date_modified=DATE_2, origin_indices=[1, 2]) _check_results(min_date_published=DATE_0, origin_indices=[0, 1, 2]) _check_results(min_date_published=DATE_1, origin_indices=[0, 1, 2]) _check_results(min_date_published=DATE_2, origin_indices=[0, 1, 2]) # Sorting _check_results( sort_by=["-date_created"], origin_indices=[2, 1, 0], sort_results=False ) _check_results( sort_by=["date_created"], origin_indices=[0, 1, 2], sort_results=False ) + def test_origin_instrinsic_metadata_dates_processing(self): + + DATE_0 = "foo" # will be discarded + DATE_1 = "2001-2-13" # will be formatted to 2001-02-13 + DATE_2 = "2005-10-2" # will be formatted to 2005-10-02 + + ORIGINS = [ + { + "url": "http://foobar.0.com", + "intrinsic_metadata": { + "@context": "https://doi.org/10.5063/schema/codemeta-2.0", + "dateCreated": DATE_0, + "dateModified": DATE_1, + "datePublished": DATE_2, + }, + }, + { + "url": "http://foobar.1.com", + "intrinsic_metadata": { + "@context": "https://doi.org/10.5063/schema/codemeta-2.0", + "dateCreated": DATE_1, + "dateModified": DATE_2, + "datePublished": DATE_2, + }, + }, + { + "url": "http://foobar.2.com", + "intrinsic_metadata": { + "@context": "https://doi.org/10.5063/schema/codemeta-2.0", + "dateCreated": DATE_2, + "dateModified": DATE_2, + "datePublished": DATE_2, + }, + }, + ] + self.search.origin_update(ORIGINS) + self.search.flush() + + # check origins have been successfully processed + page = self.search.origin_search(url_pattern="foobar") + assert {r["url"] for r in page.results} == { + "http://foobar.0.com", + "http://foobar.2.com", + "http://foobar.1.com", + } + def test_origin_keywords_search(self): ORIGINS = [ { "url": "http://foobar.1.com", "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "description": "Django is a backend framework for applications", "keywords": "django,backend,server,web,framework", }, }, { "url": "http://foobar.2.com", "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "description": "Native Android applications are fast", "keywords": "android,mobile,ui", }, }, { "url": "http://foobar.3.com", "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "description": "React framework helps you build web applications", "keywords": "react,web,ui", }, }, ] self.search.origin_update(ORIGINS) self.search.flush() def _check_results(keywords, origin_indices, sorting=False): page = self.search.origin_search(url_pattern="foobar", keywords=keywords) results = [r["url"] for r in page.results] if sorting: assert sorted(results) == sorted( [ORIGINS[index]["url"] for index in origin_indices] ) else: assert results == [ORIGINS[index]["url"] for index in origin_indices] _check_results(["build"], [2]) _check_results(["web"], [2, 0]) _check_results(["ui"], [1, 2]) # Following tests ensure that boosts work properly # Baseline: "applications" is common in all origin descriptions _check_results(["applications"], [1, 0, 2], True) # ORIGINS[0] has 'framework' in: keyword + description # ORIGINS[2] has 'framework' in: description # ORIGINS[1] has 'framework' in: None _check_results(["framework", "applications"], [0, 2, 1]) # ORIGINS[1] has 'ui' in: keyword # ORIGINS[1] has 'ui' in: keyword # ORIGINS[0] has 'ui' in: None _check_results(["applications", "ui"], [1, 2, 0]) # ORIGINS[2] has 'web' in: keyword + description # ORIGINS[0] has 'web' in: keyword # ORIGINS[1] has 'web' in: None _check_results(["web", "applications"], [2, 0, 1]) def test_origin_sort_by_search(self): now = datetime.now(tz=timezone.utc).isoformat() now_minus_5_hours = ( datetime.now(tz=timezone.utc) - timedelta(hours=5) ).isoformat() now_plus_5_hours = ( datetime.now(tz=timezone.utc) + timedelta(hours=5) ).isoformat() ORIGINS = [ { "url": "http://foobar.1.com", "nb_visits": 1, "last_visit_date": now_minus_5_hours, }, {"url": "http://foobar.2.com", "nb_visits": 2, "last_visit_date": now,}, { "url": "http://foobar.3.com", "nb_visits": 3, "last_visit_date": now_plus_5_hours, }, ] self.search.origin_update(ORIGINS) self.search.flush() def _check_results(sort_by, origins): page = self.search.origin_search(url_pattern="foobar", sort_by=sort_by) results = [r["url"] for r in page.results] assert results == [origin["url"] for origin in origins] _check_results(["nb_visits"], ORIGINS) _check_results(["-nb_visits"], ORIGINS[::-1]) _check_results(["last_visit_date"], ORIGINS) _check_results(["-last_visit_date"], ORIGINS[::-1]) _check_results(["nb_visits", "-last_visit_date"], ORIGINS) _check_results(["-last_visit_date", "nb_visits"], ORIGINS[::-1]) def test_origin_instrinsic_metadata_license_search(self): ORIGINS = [ { "url": "http://foobar.1.com", "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "description": "foo bar", "license": "https://spdx.org/licenses/MIT", }, }, { "url": "http://foobar.2.com", "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "description": "foo bar", "license": "BSD-3-Clause", }, }, ] self.search.origin_update(ORIGINS) self.search.flush() def _check_results(licenses, origin_indices): page = self.search.origin_search(url_pattern="foobar", licenses=licenses) results = [r["url"] for r in page.results] assert sorted(results) == sorted( [ORIGINS[i]["url"] for i in origin_indices] ) _check_results(["MIT"], [0]) _check_results(["bsd"], [1]) _check_results(["mit", "3-Clause"], [0, 1]) def test_origin_instrinsic_metadata_programming_language_search(self): ORIGINS = [ { "url": "http://foobar.1.com", "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "description": "foo bar", "programmingLanguage": "python", }, }, { "url": "http://foobar.2.com", "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "description": "foo bar", "programmingLanguage": "javascript", }, }, ] self.search.origin_update(ORIGINS) self.search.flush() def _check_results(programming_languages, origin_indices): page = self.search.origin_search( url_pattern="foobar", programming_languages=programming_languages ) results = [r["url"] for r in page.results] assert sorted(results) == sorted( [ORIGINS[i]["url"] for i in origin_indices] ) _check_results(["python"], [0]) _check_results(["javascript"], [1]) _check_results(["python", "javascript"], [0, 1]) def test_origin_instrinsic_metadata_multiple_field_search(self): ORIGINS = [ { "url": "http://foobar.1.com", "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "description": "foo bar 1", "programmingLanguage": "python", "license": "https://spdx.org/licenses/MIT", }, }, { "url": "http://foobar.2.com", "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "description": "foo bar 2", "programmingLanguage": ["javascript", "html", "css"], "license": [ "https://spdx.org/licenses/CC-BY-1.0", "https://spdx.org/licenses/Apache-1.0", ], }, }, { "url": "http://foobar.3.com", "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "description": "foo bar 3", "programmingLanguage": ["Cpp", "c"], "license": "https://spdx.org/licenses/LGPL-2.0-only", }, }, ] self.search.origin_update(ORIGINS) self.search.flush() def _check_result(programming_languages, licenses, origin_indices): page = self.search.origin_search( url_pattern="foobar", programming_languages=programming_languages, licenses=licenses, ) results = [r["url"] for r in page.results] assert sorted(results) == sorted( [ORIGINS[i]["url"] for i in origin_indices] ) _check_result(["javascript"], ["CC"], [1]) _check_result(["css"], ["CC"], [1]) _check_result(["css"], ["CC", "apache"], [1]) _check_result(["python", "javascript"], ["MIT"], [0]) _check_result(["c", "python"], ["LGPL", "mit"], [2, 0]) def test_origin_update_with_no_visit_types(self): """ Update an origin with visit types first then with no visit types, check origin can still be searched with visit types afterwards. """ origin_url = "http://foobar.baz" self.search.origin_update([{"url": origin_url, "visit_types": ["git"]}]) self.search.flush() self.search.origin_update([{"url": origin_url}]) self.search.flush() actual_page = self.search.origin_search(url_pattern="http", visit_types=["git"]) assert actual_page.next_page_token is None results = [r["url"] for r in actual_page.results] expected_results = [origin_url] assert results == expected_results def test_origin_intrinsic_metadata_description(self): origin1_nothin = {"url": "http://origin1"} origin2_foobar = {"url": "http://origin2"} origin3_barbaz = {"url": "http://origin3"} self.search.origin_update( [ {**origin1_nothin, "intrinsic_metadata": {},}, { **origin2_foobar, "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "description": "foo bar", }, }, { **origin3_barbaz, "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "description": "bar baz", }, }, ] ) self.search.flush() actual_page = self.search.origin_search(metadata_pattern="foo") assert actual_page.next_page_token is None assert actual_page.results == [origin2_foobar] actual_page = self.search.origin_search(metadata_pattern="foo bar") assert actual_page.next_page_token is None assert actual_page.results == [origin2_foobar] actual_page = self.search.origin_search(metadata_pattern="bar baz") assert actual_page.next_page_token is None assert actual_page.results == [origin3_barbaz] def test_origin_intrinsic_metadata_all_terms(self): origin1_foobarfoobar = {"url": "http://origin1"} origin3_foobarbaz = {"url": "http://origin2"} self.search.origin_update( [ { **origin1_foobarfoobar, "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "description": "foo bar foo bar", }, }, { **origin3_foobarbaz, "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "description": "foo bar baz", }, }, ] ) self.search.flush() actual_page = self.search.origin_search(metadata_pattern="foo bar baz") assert actual_page.next_page_token is None assert actual_page.results == [origin3_foobarbaz] def test_origin_intrinsic_metadata_long_description(self): """Checks ElasticSearch does not try to store large values untokenize, which would be inefficient and crash it with: Document contains at least one immense term in field="intrinsic_metadata.http://schema.org/description.@value" (whose UTF8 encoding is longer than the max length 32766), all of which were skipped. """ # noqa origin1 = {"url": "http://origin1"} self.search.origin_update( [ { **origin1, "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "description": " ".join(f"foo{i}" for i in range(100000)), }, }, ] ) self.search.flush() actual_page = self.search.origin_search(metadata_pattern="foo42") assert actual_page.next_page_token is None assert actual_page.results == [origin1] def test_origin_intrinsic_metadata_matches_cross_fields(self): """Checks the backend finds results even if the two words in the query are each in a different field.""" origin1 = {"url": "http://origin1"} self.search.origin_update( [ { **origin1, "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "description": "foo bar", "author": "John Doe", }, }, ] ) self.search.flush() actual_page = self.search.origin_search(metadata_pattern="foo John") assert actual_page.next_page_token is None assert actual_page.results == [origin1] def test_origin_intrinsic_metadata_nested(self): origin1_nothin = {"url": "http://origin1"} origin2_foobar = {"url": "http://origin2"} origin3_barbaz = {"url": "http://origin3"} self.search.origin_update( [ {**origin1_nothin, "intrinsic_metadata": {},}, { **origin2_foobar, "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "keywords": ["foo", "bar"], }, }, { **origin3_barbaz, "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "keywords": ["bar", "baz"], }, }, ] ) self.search.flush() actual_page = self.search.origin_search(metadata_pattern="foo") assert actual_page.next_page_token is None assert actual_page.results == [origin2_foobar] actual_page = self.search.origin_search(metadata_pattern="foo bar") assert actual_page.next_page_token is None assert actual_page.results == [origin2_foobar] actual_page = self.search.origin_search(metadata_pattern="bar baz") assert actual_page.next_page_token is None assert actual_page.results == [origin3_barbaz] def test_origin_intrinsic_metadata_inconsistent_type(self): """Checks the same field can have a concrete value, an object, or an array in different documents.""" origin1_foobar = {"url": "http://origin1"} origin2_barbaz = {"url": "http://origin2"} origin3_bazqux = {"url": "http://origin3"} self.search.origin_update( [ { **origin1_foobar, "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "author": {"familyName": "Foo", "givenName": "Bar",}, }, }, ] ) self.search.flush() self.search.origin_update( [ { **origin2_barbaz, "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "author": "Bar Baz", }, }, { **origin3_bazqux, "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "author": ["Baz", "Qux"], }, }, ] ) self.search.flush() actual_page = self.search.origin_search(metadata_pattern="bar") assert actual_page.next_page_token is None results = [r["url"] for r in actual_page.results] expected_results = [o["url"] for o in [origin2_barbaz, origin1_foobar]] assert sorted(results) == sorted(expected_results) actual_page = self.search.origin_search(metadata_pattern="baz") assert actual_page.next_page_token is None assert actual_page.results == [origin2_barbaz, origin3_bazqux] actual_page = self.search.origin_search(metadata_pattern="foo") assert actual_page.next_page_token is None assert actual_page.results == [origin1_foobar] actual_page = self.search.origin_search(metadata_pattern="bar baz") assert actual_page.next_page_token is None assert actual_page.results == [origin2_barbaz] actual_page = self.search.origin_search(metadata_pattern="qux") assert actual_page.next_page_token is None assert actual_page.results == [origin3_bazqux] actual_page = self.search.origin_search(metadata_pattern="baz qux") assert actual_page.next_page_token is None assert actual_page.results == [origin3_bazqux] actual_page = self.search.origin_search(metadata_pattern="foo bar") assert actual_page.next_page_token is None assert actual_page.results == [origin1_foobar] def test_origin_intrinsic_metadata_string_mapping(self): """Checks inserting a date-like in a field does not update the mapping to require every document uses a date in that field; or that search queries use a date either. Likewise for numeric and boolean fields.""" origin1 = {"url": "http://origin1"} origin2 = {"url": "http://origin2"} self.search.origin_update( [ { **origin1, "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "dateCreated": "2021-02-18T10:16:52", "version": "1.0", "isAccessibleForFree": True, }, } ] ) self.search.flush() self.search.origin_update( [ { **origin2, "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "dateCreated": "a long time ago", "address": "in a galaxy far, far away", "version": "a new hope", "isAccessibleForFree": "it depends", }, }, ] ) self.search.flush() actual_page = self.search.origin_search(metadata_pattern="1.0") assert actual_page.next_page_token is None assert actual_page.results == [origin1] actual_page = self.search.origin_search(metadata_pattern="long") assert actual_page.next_page_token is None assert ( actual_page.results == [] ) # "%Y-%m-%d" not followed, so value is rejected actual_page = self.search.origin_search(metadata_pattern="true") assert actual_page.next_page_token is None assert actual_page.results == [origin1] actual_page = self.search.origin_search(metadata_pattern="it depends") assert actual_page.next_page_token is None assert actual_page.results == [origin2] def test_origin_intrinsic_metadata_update(self): origin = {"url": "http://origin1"} origin_data = { **origin, "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "author": "John Doe", }, } self.search.origin_update([origin_data]) self.search.flush() actual_page = self.search.origin_search(metadata_pattern="John") assert actual_page.next_page_token is None assert actual_page.results == [origin] origin_data["intrinsic_metadata"]["author"] = "Jane Doe" self.search.origin_update([origin_data]) self.search.flush() actual_page = self.search.origin_search(metadata_pattern="Jane") assert actual_page.next_page_token is None assert actual_page.results == [origin] # TODO: add more tests with more codemeta terms # TODO: add more tests with edge cases @settings(deadline=None) @given(strategies.integers(min_value=1, max_value=4)) def test_origin_url_paging(self, limit): # TODO: no hypothesis origin1_foo = {"url": "http://origin1/foo"} origin2_foobar = {"url": "http://origin2/foo/bar"} origin3_foobarbaz = {"url": "http://origin3/foo/bar/baz"} self.reset() self.search.origin_update([origin1_foo, origin2_foobar, origin3_foobarbaz]) self.search.flush() results = stream_results( self.search.origin_search, url_pattern="foo bar baz", limit=limit ) results = [res["url"] for res in results] expected_results = [o["url"] for o in [origin3_foobarbaz]] assert sorted(results[0 : len(expected_results)]) == sorted(expected_results) results = stream_results( self.search.origin_search, url_pattern="foo bar", limit=limit ) results = [res["url"] for res in results] expected_results = [o["url"] for o in [origin2_foobar, origin3_foobarbaz]] assert sorted(results[0 : len(expected_results)]) == sorted(expected_results) results = stream_results( self.search.origin_search, url_pattern="foo", limit=limit ) results = [res["url"] for res in results] expected_results = [ o["url"] for o in [origin1_foo, origin2_foobar, origin3_foobarbaz] ] assert sorted(results[0 : len(expected_results)]) == sorted(expected_results) @settings(deadline=None) @given(strategies.integers(min_value=1, max_value=4)) def test_origin_intrinsic_metadata_paging(self, limit): # TODO: no hypothesis origin1_foo = {"url": "http://origin1"} origin2_foobar = {"url": "http://origin2"} origin3_foobarbaz = {"url": "http://origin3"} self.reset() self.search.origin_update( [ { **origin1_foo, "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "keywords": ["foo"], }, }, { **origin2_foobar, "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "keywords": ["foo", "bar"], }, }, { **origin3_foobarbaz, "intrinsic_metadata": { "@context": "https://doi.org/10.5063/schema/codemeta-2.0", "keywords": ["foo", "bar", "baz"], }, }, ] ) self.search.flush() results = stream_results( self.search.origin_search, metadata_pattern="foo bar baz", limit=limit ) assert list(results) == [origin3_foobarbaz] results = stream_results( self.search.origin_search, metadata_pattern="foo bar", limit=limit ) assert list(results) == [origin2_foobar, origin3_foobarbaz] results = stream_results( self.search.origin_search, metadata_pattern="foo", limit=limit ) assert list(results) == [origin1_foo, origin2_foobar, origin3_foobarbaz] def test_search_blocklisted_results(self): origin1 = {"url": "http://origin1"} origin2 = {"url": "http://origin2", "blocklisted": True} self.search.origin_update([origin1, origin2]) self.search.flush() actual_page = self.search.origin_search(url_pattern="origin") assert actual_page.next_page_token is None assert actual_page.results == [origin1] def test_search_blocklisted_update(self): origin1 = {"url": "http://origin1"} self.search.origin_update([origin1]) self.search.flush() result_page = self.search.origin_search(url_pattern="origin") assert result_page.next_page_token is None assert result_page.results == [origin1] self.search.origin_update([{**origin1, "blocklisted": True}]) self.search.flush() result_page = self.search.origin_search(url_pattern="origin") assert result_page.next_page_token is None assert result_page.results == [] self.search.origin_update( [{**origin1, "has_visits": True, "visit_types": ["git"]}] ) self.search.flush() result_page = self.search.origin_search(url_pattern="origin") assert result_page.next_page_token is None assert result_page.results == [] def test_filter_keyword_in_filter(self): origin1 = { "url": "foo language in ['foo baz'] bar", } self.search.origin_update([origin1]) self.search.flush() result_page = self.search.origin_search(url_pattern="language in ['foo bar']") assert result_page.next_page_token is None assert result_page.results == [origin1] result_page = self.search.origin_search(url_pattern="baaz") assert result_page.next_page_token is None assert result_page.results == [] def test_visit_types_count(self): assert self.search.visit_types_count() == Counter() origins = [ {"url": "http://foobar.baz", "visit_types": ["git"], "blocklisted": True} ] for idx, visit_type in enumerate(["git", "hg", "svn"]): for i in range(idx + 1): origins.append( { "url": f"http://{visit_type}.foobar.baz.{i}", "visit_types": [visit_type], } ) self.search.origin_update(origins) self.search.flush() assert self.search.visit_types_count() == Counter(git=1, hg=2, svn=3) diff --git a/swh/search/tests/test_translator.py b/swh/search/tests/test_translator.py index af0b675..9789c62 100644 --- a/swh/search/tests/test_translator.py +++ b/swh/search/tests/test_translator.py @@ -1,400 +1,442 @@ +# Copyright (C) 2021 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + import pytest from swh.search.translator import Translator from swh.search.utils import get_expansion def _test_results(query, expected): output = Translator().parse_query(query) assert output == expected def test_empty_query(): query = "" with pytest.raises(Exception): _test_results(query, {}) def test_conjunction_operators(): query = "visited = true or visits > 2 and visits < 5" expected = { "filters": { "bool": { "should": [ {"term": {"has_visits": True}}, { "bool": { "must": [ {"range": {"nb_visits": {"gt": 2}}}, {"range": {"nb_visits": {"lt": 5}}}, ] } }, ] } } } _test_results(query, expected) +def test_visited(): + query = "visited = true" + expected = { + "filters": {"term": {"has_visits": True}}, + } + _test_results(query, expected) + + query = "visited = false" + expected = { + "filters": { + "bool": { + "should": [ + {"term": {"has_visits": False}}, + {"bool": {"must_not": {"exists": {"field": "has_visits"}}}}, + ] + } + } + } + _test_results(query, expected) + + def test_conjunction_op_precedence_override(): - query = "(visited = false or visits > 2) and visits < 5" + query = "(visited = true or visits > 2) and visits < 5" expected = { "filters": { "bool": { "must": [ { "bool": { "should": [ - {"term": {"has_visits": False}}, + {"term": {"has_visits": True}}, {"range": {"nb_visits": {"gt": 2}}}, ] } }, {"range": {"nb_visits": {"lt": 5}}}, ] } } } _test_results(query, expected) def test_limit_and_sortby(): query = "visited = true sort_by = [-visits,last_visit] limit = 15" expected = { "filters": {"term": {"has_visits": True}}, "sortBy": ["-visits", "last_visit"], "limit": 15, } _test_results(query, expected) def test_deeply_nested_filters(): query = "(((visited = true and visits > 0)))" expected = { "filters": { "bool": { "must": [ {"term": {"has_visits": True},}, {"range": {"nb_visits": {"gt": 0}}}, ] } }, } _test_results(query, expected) def test_origin_and_metadata_filters(): - query = 'origin = django or metadata = "framework and web"' + query = 'origin : django or metadata : "framework and web"' expected = { "filters": { "bool": { "should": [ { "multi_match": { "query": "django", "type": "bool_prefix", "operator": "and", "fields": [ "url.as_you_type", "url.as_you_type._2gram", "url.as_you_type._3gram", ], } }, { "nested": { "path": "intrinsic_metadata", "query": { "multi_match": { "query": "framework and web", "type": "cross_fields", "operator": "and", "fields": ["intrinsic_metadata.*"], "lenient": True, } }, } }, ] } } } _test_results(query, expected) def test_visits_not_equal_to_filter(): query = "visits != 5" expected = { "filters": { "bool": {"must_not": [{"range": {"nb_visits": {"gte": 5, "lte": 5}}},]} }, } _test_results(query, expected) def test_visit_type_filter(): query = 'visit_type = [git,"pypi"]' expected = {"filters": {"terms": {"visit_types": ["git", "pypi"]}}} _test_results(query, expected) def test_keyword_filter(): query = r"""keyword in [word1, "word2 \" \' word3"]""" expected = { "filters": { "nested": { "path": "intrinsic_metadata", "query": { "multi_match": { "query": r"""word1 word2 " ' word3""", "fields": [ get_expansion("keywords", ".") + "^2", get_expansion("descriptions", "."), ], } }, } } } _test_results(query, expected) def test_language_filter(): query = 'language in [python, "go lang", cpp]' expected = { "filters": { "nested": { "path": "intrinsic_metadata", "query": { "bool": { "should": [ { "match": { get_expansion( "programming_languages", "." ): "python" } }, { "match": { get_expansion( "programming_languages", "." ): "go lang" } }, { "match": { get_expansion("programming_languages", "."): "cpp" } }, ] } }, } } } _test_results(query, expected) def test_license_filter(): query = 'license in ["GPL 3", Apache, MIT]' expected = { "filters": { "nested": { "path": "intrinsic_metadata", "query": { "bool": { "should": [ {"match": {get_expansion("licenses", "."): "GPL 3"}}, {"match": {get_expansion("licenses", "."): "Apache"}}, {"match": {get_expansion("licenses", "."): "MIT"}}, ] } }, } } } _test_results(query, expected) def test_date_created_not_equal_to_filter(): query = "created != 2020-01-01" expected = { "filters": { "nested": { "path": "intrinsic_metadata", "query": { "bool": { "must_not": [ { "range": { get_expansion("date_created", "."): { "gte": "2020-01-01", "lte": "2020-01-01", } } } ] } }, } } } _test_results(query, expected) def test_date_created_greater_than_filter(): query = "created >= 2020-01-01" expected = { "filters": { "nested": { "path": "intrinsic_metadata", "query": { "bool": { "must": [ { "range": { get_expansion("date_created", "."): { "gte": "2020-01-01", } } } ] } }, } } } _test_results(query, expected) +def test_visit_date_range(): + query = "last_visit >= 2020-01-01 and last_visit < 2021-01-01" + expected = { + "filters": { + "bool": { + "must": [ + {"range": {"last_visit_date": {"gte": "2020-01-01"}}}, + {"range": {"last_visit_date": {"lt": "2021-01-01"}}}, + ] + } + }, + } + + _test_results(query, expected) + + def test_last_eventful_visit_not_equal_to_filter(): query = "last_visit != 2020-01-01" expected = { "filters": { "bool": { "must_not": [ { "range": { "last_visit_date": { "gte": "2020-01-01", "lte": "2020-01-01", } } } ] } } } _test_results(query, expected) def test_last_eventful_visit_less_than_to_filter(): query = "last_visit < 2020-01-01" expected = {"filters": {"range": {"last_visit_date": {"lt": "2020-01-01"}}}} _test_results(query, expected) def test_keyword_no_escape_inside_filter(): # any keyword (filter name/operator/value) inside a filter # must be considered a string. - query = r'''origin = "language in [\'go lang\', python]"''' + query = r'''origin : "language in [\'go lang\', python]"''' expected = { "filters": { "multi_match": { "query": r"""language in ['go lang', python]""", "type": "bool_prefix", "operator": "and", "fields": [ "url.as_you_type", "url.as_you_type._2gram", "url.as_you_type._3gram", ], } } } _test_results(query, expected) def test_escaped_punctuation_parsing(): query = r"""keyword in ["foo \'\" bar"]""" expected = { "filters": { "nested": { "path": "intrinsic_metadata", "query": { "multi_match": { "query": r"""foo '" bar""", "fields": [ get_expansion("keywords", ".") + "^2", get_expansion("descriptions", "."), ], } }, } } } _test_results(query, expected) def test_nonascii(): query = r"""keyword in ["café"]""" expected = { "filters": { "nested": { "path": "intrinsic_metadata", "query": { "multi_match": { "query": r"""café""", "fields": [ get_expansion("keywords", ".") + "^2", get_expansion("descriptions", "."), ], } }, } } } _test_results(query, expected) def test_nonascii_before_operator(): query = r"""keyword in ["🐍"] and visited = true""" expected = { "filters": { "bool": { "must": [ { "nested": { "path": "intrinsic_metadata", "query": { "multi_match": { "query": r"""🐍""", "fields": [ get_expansion("keywords", ".") + "^2", get_expansion("descriptions", "."), ], } }, }, }, {"term": {"has_visits": True,},}, ], } } } _test_results(query, expected) diff --git a/swh/search/tests/test_utils.py b/swh/search/tests/test_utils.py new file mode 100644 index 0000000..8db4838 --- /dev/null +++ b/swh/search/tests/test_utils.py @@ -0,0 +1,23 @@ +# Copyright (C) 2021 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +import pytest + +from swh.search.utils import parse_and_format_date + + +@pytest.mark.parametrize( + "date_str", + ["2021-07-03", "2021-7-03", "2021-07-3", "2021-7-3", "2021-07-03T15:17:08Z"], +) +def test_parse_and_format_date_success(date_str): + assert parse_and_format_date(date_str) == "2021-07-03" + + +@pytest.mark.parametrize( + "date_str", ["foo", "2021/07/03", "2021+07+03T15,17,08Z"], +) +def test_parse_and_format_date_failure(date_str): + assert parse_and_format_date(date_str) is None diff --git a/swh/search/translator.py b/swh/search/translator.py index af3b16d..2e29c71 100644 --- a/swh/search/translator.py +++ b/swh/search/translator.py @@ -1,307 +1,324 @@ -# Copyright (C) 2021 The Software Heritage developers +# Copyright (C) 2021-2022 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import logging import os import tempfile from pkg_resources import resource_filename from tree_sitter import Language, Parser +from swh.search.exc import SearchQuerySyntaxError from swh.search.utils import get_expansion, unescape logger = logging.getLogger(__name__) class Translator: RANGE_OPERATOR_MAP = { ">": "gt", "<": "lt", ">=": "gte", "<=": "lte", } def __init__(self): ql_path = resource_filename("swh.search", "static/swh_ql.so") if not os.path.exists(ql_path): logging.info("%s does not exist, building in temporary directory", ql_path) self._build_dir = tempfile.TemporaryDirectory(prefix="swh.search-build") source_path = resource_filename("swh.search", "query_language") ql_path = os.path.join(self._build_dir.name, "swh_ql.so") Language.build_library(ql_path, [source_path]) search_ql = Language(ql_path, "swh_search_ql") self.parser = Parser() self.parser.set_language(search_ql) self.query = "" def parse_query(self, query): self.query = query.encode() tree = self.parser.parse(self.query) self.query_node = tree.root_node if self.query_node.has_error: - raise Exception("Invalid query") + raise SearchQuerySyntaxError("Invalid query") return self._traverse(self.query_node) def _traverse(self, node): if len(node.children) == 3 and node.children[1].type == "filters": # filters => ( filters ) return self._traverse(node.children[1]) # Go past the () brackets if node.type == "query": result = {} for child in node.children: # query => filters sort_by limit result[child.type] = self._traverse(child) return result if node.type == "filters": if len(node.children) == 1: # query => filters # filters => filters # filters => filter # Current node is just a wrapper, so go one level deep return self._traverse(node.children[0]) if len(node.children) == 3: # filters => filters conj_op filters filters1 = self._traverse(node.children[0]) conj_op = self._get_value(node.children[1]) filters2 = self._traverse(node.children[2]) if conj_op == "and": # "must" is equivalent to "AND" return {"bool": {"must": [filters1, filters2]}} if conj_op == "or": # "should" is equivalent to "OR" return {"bool": {"should": [filters1, filters2]}} if node.type == "filter": filter_category = node.children[0] return self._parse_filter(filter_category) if node.type == "sortBy": return self._parse_filter(node) if node.type == "limit": return self._parse_filter(node) return Exception( f"Unknown node type ({node.type}) " f"or unexpected number of children ({node.children})" ) def _get_value(self, node): if ( len(node.children) > 0 and node.children[0].type == "[" and node.children[-1].type == "]" ): # array return [self._get_value(child) for child in node.children if child.is_named] start = node.start_point[1] end = node.end_point[1] value = self.query[start:end].decode() if len(value) > 1 and ( (value[0] == "'" and value[-1] == "'") or (value[0] and value[-1] == '"') ): return unescape(value[1:-1]) if node.type in ["number", "numberVal"]: return int(value) return unescape(value) def _parse_filter(self, filter): if filter.type == "boundedListFilter": filter = filter.children[0] children = filter.children assert len(children) == 3 category = filter.type name, op, value = [self._get_value(child) for child in children] if category == "patternFilter": if name == "origin": return { "multi_match": { "query": value, "type": "bool_prefix", "operator": "and", "fields": [ "url.as_you_type", "url.as_you_type._2gram", "url.as_you_type._3gram", ], } } elif name == "metadata": return { "nested": { "path": "intrinsic_metadata", "query": { "multi_match": { "query": value, # Makes it so that the "foo bar" query returns # documents which contain "foo" in a field and "bar" # in a different field "type": "cross_fields", # All keywords must be found in a document for it to # be considered a match. # TODO: allow missing keywords? "operator": "and", # Searches on all fields of the intrinsic_metadata dict, # recursively. "fields": ["intrinsic_metadata.*"], # date{Created,Modified,Published} are of type date "lenient": True, } }, } } if category == "booleanFilter": if name == "visited": - return {"term": {"has_visits": value == "true"}} + if value == "true": + return {"term": {"has_visits": True}} + else: + # non-visited origins will typically not have "has_visits" set + # at all + return { + "bool": { + "should": [ + {"term": {"has_visits": False}}, + { + "bool": { + "must_not": {"exists": {"field": "has_visits"}} + } + }, + ] + } + } if category == "numericFilter": if name == "visits": if op in ["=", "!="]: return { "bool": { ("must" if op == "=" else "must_not"): [ {"range": {"nb_visits": {"gte": value, "lte": value}}} ] } } else: return { "range": {"nb_visits": {self.RANGE_OPERATOR_MAP[op]: value}} } if category == "visitTypeFilter": if name == "visit_type": return {"terms": {"visit_types": value}} if category == "unboundedListFilter": value_array = value if name == "keyword": return { "nested": { "path": "intrinsic_metadata", "query": { "multi_match": { "query": " ".join(value_array), "fields": [ get_expansion("keywords", ".") + "^2", get_expansion("descriptions", "."), # "^2" boosts an origin's score by 2x # if it the queried keywords are # found in its intrinsic_metadata.keywords ], } }, } } elif name in ["language", "license"]: name_mapping = { "language": "programming_languages", "license": "licenses", } name = name_mapping[name] return { "nested": { "path": "intrinsic_metadata", "query": { "bool": { "should": [ {"match": {get_expansion(name, "."): val}} for val in value_array ], } }, } } if category == "dateFilter": if name in ["created", "modified", "published"]: if op in ["=", "!="]: return { "nested": { "path": "intrinsic_metadata", "query": { "bool": { ("must" if op == "=" else "must_not"): [ { "range": { get_expansion(f"date_{name}", "."): { "gte": value, "lte": value, } } } ], } }, } } return { "nested": { "path": "intrinsic_metadata", "query": { "bool": { "must": [ { "range": { get_expansion(f"date_{name}", "."): { self.RANGE_OPERATOR_MAP[op]: value, } } } ], } }, } } else: if op in ["=", "!="]: return { "bool": { ("must" if op == "=" else "must_not"): [ { "range": { f"{name}_date": {"gte": value, "lte": value,} } } ], } } return { "range": { f"{name}_date": { self.RANGE_OPERATOR_MAP[op]: value.replace("Z", "+00:00"), } } } if category == "sortBy": return value if category == "limit": return value - raise Exception(f"Unknown filter {category}.{name}") + raise SearchQuerySyntaxError(f"Unknown filter {category}.{name}") diff --git a/swh/search/utils.py b/swh/search/utils.py index e55b26a..464c435 100644 --- a/swh/search/utils.py +++ b/swh/search/utils.py @@ -1,112 +1,111 @@ # Copyright (C) 2021 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import codecs from datetime import datetime +from typing import Optional -import iso8601 # type: ignore +import iso8601 def get_expansion(field, sep=None): METADATA_FIELDS = { "licenses": ["intrinsic_metadata", "http://schema.org/license", "@id"], "programming_languages": [ "intrinsic_metadata", "http://schema.org/programmingLanguage", "@value", ], "keywords": ["intrinsic_metadata", "http://schema.org/keywords", "@value",], "descriptions": [ "intrinsic_metadata", "http://schema.org/description", "@value", ], "date_created": [ "intrinsic_metadata", "http://schema.org/dateCreated", "@value", ], "date_modified": [ "intrinsic_metadata", "http://schema.org/dateModified", "@value", ], "date_published": [ "intrinsic_metadata", "http://schema.org/datePublished", "@value", ], } if sep: return sep.join(METADATA_FIELDS[field]) return METADATA_FIELDS[field] -def is_date_parsable(date_str): +def parse_and_format_date(date_str: str) -> Optional[str]: """ - Return True if date_str is in the format - %Y-%m-%d or the standard ISO format. - Otherwise return False. + Parses a string date in the format %Y-%m-%d or ISO8601 and returns + a new string date in the format YYYY-mm-dd if the parsing succeeded + otherwise None. """ try: - datetime.strptime(date_str, "%Y-%m-%d") - return True + return datetime.strptime(date_str, "%Y-%m-%d").strftime("%Y-%m-%d") except Exception: try: - iso8601.parse_date(date_str) - return True + return iso8601.parse_date(date_str).strftime("%Y-%m-%d") except Exception: - return False + return None def escape(obj): r"""Makes the object directly injectable into the query language by converting the escapable parts of the object into escape sequences. For strings, appends \ before special characters like ', ", and \ For arrays, applies the same transformation on each element, joins the elements and returns a string-like representation of the list. >>> print(escape("foo ' bar")) "foo \' bar" >>> print(escape([r"foo ' bar", r"bar \\\' baz", r'foo " baz'])) ["foo \' bar", "bar \\\\\\\' baz", "foo \" baz"] """ if type(obj) == list: items = [escape(item) for item in obj] return "[" + ", ".join(items) + "]" elif type(obj) == str: return ( '"' + obj.translate({ord("'"): r"\'", ord('"'): r"\"", ord("\\"): r"\\",}) + '"' ) else: raise Exception(f"Unexpected item type {type(obj)}") def unescape(string): r"""Processes the escaped special characters >>> unescape(r'''foo " bar''') == r'''foo " bar''' True >>> unescape(r'''foo \" bar''') == r'''foo " bar''' True >>> unescape(r'''foo \\" bar''') == r'''foo \" bar''' True >>> unescape(r'''foo \\\" bar''') == r'''foo \" bar''' True >>> unescape(r'''foo \\\\" bar''') == r'''foo \\" bar''' True >>> unescape(r'''café \" foo''') == r'''café " foo''' True """ return codecs.escape_decode(string.encode())[0].decode() diff --git a/tox.ini b/tox.ini index b70d51c..19ff8b2 100644 --- a/tox.ini +++ b/tox.ini @@ -1,74 +1,74 @@ [tox] envlist=black,flake8,mypy,py3 [testenv] passenv = YARN extras = testing deps = pytest-cov commands = pytest --doctest-modules \ {envsitepackagesdir}/swh/search \ --cov={envsitepackagesdir}/swh/search \ --cov-branch {posargs} [testenv:black] skip_install = true deps = black==19.10b0 commands = {envpython} -m black --check swh [testenv:flake8] skip_install = true deps = flake8 commands = {envpython} -m flake8 [testenv:mypy] extras = testing deps = - mypy + mypy==0.920 commands = mypy swh # build documentation outside swh-environment using the current # git HEAD of swh-docs, is executed on CI for each diff to prevent # breaking doc build [testenv:sphinx] whitelist_externals = make usedevelop = true extras = testing deps = # fetch and install swh-docs in develop mode -e git+https://forge.softwareheritage.org/source/swh-docs#egg=swh.docs setenv = SWH_PACKAGE_DOC_TOX_BUILD = 1 # turn warnings into errors SPHINXOPTS = -W commands = make -I ../.tox/sphinx/src/swh-docs/swh/ -C docs # build documentation only inside swh-environment using local state # of swh-docs package [testenv:sphinx-dev] whitelist_externals = make usedevelop = true extras = testing deps = # install swh-docs in develop mode -e ../swh-docs setenv = SWH_PACKAGE_DOC_TOX_BUILD = 1 # turn warnings into errors SPHINXOPTS = -W commands = make -I ../.tox/sphinx-dev/src/swh-docs/swh/ -C docs