diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 90037e2..f91265d 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,20 +1,17 @@ [bumpversion] commit = True tag = True message = "Release {new_version}" -current_version = 2.1.0 +current_version = 2.4.2 -[bumpversion:file:setup.py] -search = version='{current_version}' -replace = version='{new_version}' +[bumpversion:file:setup.cfg] +search = version = {current_version} +replace = version = {new_version} [bumpversion:file:src/mirakuru/__init__.py] -[bumpversion:file:README.rst] - [bumpversion:file:CHANGES.rst] search = unreleased ---------- replace = {new_version} ---------- - diff --git a/.dependabot/config.yml b/.dependabot/config.yml deleted file mode 100644 index c77885a..0000000 --- a/.dependabot/config.yml +++ /dev/null @@ -1,8 +0,0 @@ -version: 1 -update_configs: - - package_manager: "python" - directory: "/" - update_schedule: "daily" - automerged_updates: - - match: - dependency_name: "*" \ No newline at end of file diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..c1d7b42 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,14 @@ +version: 2 +updates: +- package-ecosystem: pip + directory: "/" + schedule: + interval: daily + time: "04:00" + open-pull-requests-limit: 4 +- package-ecosystem: github-actions + directory: "/" + schedule: + interval: weekly + time: "04:00" + open-pull-requests-limit: 2 diff --git a/.github/workflows/automerge.yml b/.github/workflows/automerge.yml new file mode 100644 index 0000000..c5db063 --- /dev/null +++ b/.github/workflows/automerge.yml @@ -0,0 +1,38 @@ +name: Merge me test dependencies! + +on: + workflow_run: + types: + - completed + workflows: + # List all required workflow names here. + - 'Run linters' + - 'Run tests' + - 'Run tests on macos' + - 'Test build package' + +jobs: + merge-me: + name: Merge me! + runs-on: ubuntu-latest + steps: + - # It is often a desired behavior to merge only when a workflow execution + # succeeds. This can be changed as needed. + if: ${{ github.event.workflow_run.conclusion == 'success' }} + name: Merge me! + uses: ridedott/merge-me-action@v2.9.105 + with: + # Depending on branch protection rules, a manually populated + # `GITHUB_TOKEN_WORKAROUND` secret with permissions to push to + # a protected branch must be used. This secret can have an arbitrary + # name, as an example, this repository uses `DOTTBOTT_TOKEN`. + # + # When using a custom token, it is recommended to leave the following + # comment for other developers to be aware of the reasoning behind it: + # + # This must be used as GitHub Actions token does not support pushing + # to protected branches. + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + MERGE_METHOD: MERGE + PRESET: DEPENDABOT_MINOR + ENABLED_FOR_MANUAL_CHANGES: 'true' diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 0000000..440b193 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,11 @@ +name: Test build package + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + +jobs: + build: + uses: fizyk/actions-reuse/.github/workflows/pypi.yml@v1.3.1 diff --git a/.github/workflows/linters.yml b/.github/workflows/linters.yml new file mode 100644 index 0000000..d53dc91 --- /dev/null +++ b/.github/workflows/linters.yml @@ -0,0 +1,18 @@ +name: Run linters + +on: + push: + branches: [ main ] + paths: + - '**.py' + - .github/workflows/linters.yml + - requirements-lint.txt + pull_request: + branches: [ main ] + +jobs: + lint: + uses: fizyk/actions-reuse/.github/workflows/linters-python.yml@v1.3.1 + with: + mypy: true + rst: true diff --git a/.github/workflows/pypi.yml b/.github/workflows/pypi.yml new file mode 100644 index 0000000..7c3de6c --- /dev/null +++ b/.github/workflows/pypi.yml @@ -0,0 +1,15 @@ +name: Package and publish +on: + push: + tags: + - v* + + + +jobs: + build: + uses: fizyk/actions-reuse/.github/workflows/pypi.yml@v1.3.1 + with: + publish: true + secrets: + pypi_token: ${{ secrets.pypi_password }} diff --git a/.github/workflows/tests-macos.yml b/.github/workflows/tests-macos.yml new file mode 100644 index 0000000..b44a176 --- /dev/null +++ b/.github/workflows/tests-macos.yml @@ -0,0 +1,19 @@ +name: Run tests on macos + +on: + push: + branches: [ main ] + paths: + - '**.py' + - .github/workflows/tests-macos.yml + - requirements-test.txt + pull_request: + branches: [ main ] + +jobs: + macostests: + uses: fizyk/actions-reuse/.github/workflows/tests-pytests.yml@v1.3.1 + with: + python-versions: '["3.7", "3.8", "3.9", "3.10", "pypy-3.8"]' + cover_package: src/mirakuru + os: macos-latest diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 0000000..ddd84d1 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,18 @@ +name: Run tests + +on: + push: + branches: [ main ] + paths: + - '**.py' + - .github/workflows/tests.yml + - requirements-test.txt + pull_request: + branches: [ main ] + +jobs: + tests: + uses: fizyk/actions-reuse/.github/workflows/tests-pytests.yml@v1.3.1 + with: + python-versions: '["3.7", "3.8", "3.9", "3.10", "pypy-3.8"]' + cover_package: src/mirakuru diff --git a/.gitignore b/.gitignore index cac04c5..ec16128 100644 --- a/.gitignore +++ b/.gitignore @@ -1,12 +1,13 @@ .cache/* *.egg-info dist build *.pyc .coverage +venv/* .idea/ atlassian-ide-plugin.xml /.pytest_cache/ /.mypy_cache/ diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 16e2918..0000000 --- a/.travis.yml +++ /dev/null @@ -1,57 +0,0 @@ -dist: xenial -language: python -conditions: v1 -python: -- 3.6 -- 3.7 -- 3.8-dev -- pypy3 -# blocklist branches -branches: - except: - - requires-io-master - - /^dependabot.*$/ -install: - - pip install "setuptools>=21" - - pip install "pip>=9" - - pip install -r requirements-test.txt - - pip install coveralls -script: - - pytest -after_success: - - coveralls -jobs: - include: - - stage: linters - python: 3.7 - install: - - pip install -r requirements-lint.txt - script: - - pycodestyle - - pydocstyle - - pylint mirakuru tests - - mypy src tests - - rst-lint *.rst - after_success: skip - - stage: osx - language: generic - os: osx - before_install: - - pip3 install virtualenv - - virtualenv venv -p python3 - - source venv/bin/activate - script: - - pytest - - stage: deploy - python: 3.7 - if: tag IS present - script: skip - deploy: - provider: pypi - user: fizyk - password: - secure: IBVXG0zLKsBkzdeoC33Lxir01jbvDHdjQ81CPC8PbDPCmUozXgf9eqRFV5VOIYQOboTBzQYRq7RB8efeNKSH3nKf73iahwIYf4ezIxRzUaMzoY4GkyrC/0fQhMk1lAjexrRM1f2o7TIAALPUDyB/EaRcPCBEghxscQEeTlAw08c= - on: - tags: true - repo: ClearcodeHQ/mirakuru - distributions: sdist bdist_wheel diff --git a/AUTHORS.rst b/AUTHORS.rst index c76d5b6..d5799ce 100644 --- a/AUTHORS.rst +++ b/AUTHORS.rst @@ -1,16 +1,17 @@ Authors ======= This file contains the list of people involved in the development of mirakuru along its history. * Mateusz Lenik * Tomasz Święcicki * Tomasz Krzyszczyk * Grzegorz Śliwiński * Paweł Wilczyński * Daniel O'Connell * Michał Pawłowski * Grégoire Détrez +* Lars Gohr Great thanks to `Mateusz Lenik `_ for original package! diff --git a/CHANGES.rst b/CHANGES.rst index 2f1dd27..9508e1a 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,183 +1,246 @@ CHANGELOG ========= +2.4.2 +---------- + +Misc +++++ + ++ Added Python 3.10 to classifiers + +2.4.1 +---------- + +Misc +++++ + +- Use strictier mypy checks + +2.4.0 +---------- + +Features +++++++++ + +- Replace `exp_sig` executor parameter with `expected_returncode`. + Parameter description already assumed that, however handing it assumed full + POSIX compatibility on the process side. Now the POSIX is only assumed if no + `expected_returncode` is passed to the executor, and returncode is simply that, + a returncode, nothing more + +2.3.1 +---------- + +Misc +++++ + +- Moved CI to Github Actions +- Blackified codebase +- Compacted Documentation into readme (was pretty small anyway) + +2.3.0 +---------- + +- [enhancement] Ability to set up expected exit code for executor. In Java exit codes 1- 127 have + special meaning, and the regular exit codes are offset by those of special meaning. + +2.2.0 +---------- + +- [enhancement] If process is being closed and the shutdown won't be clean (won't return exit code 0) + mirakuru will now rise ProcessFinishedWithError exception with exit_code + +2.1.2 +---------- + +- [bugfix][macos] Fixed typing issue on macOS + +2.1.1 +---------- + +- [bug] Always close connection for HTTPExecutor after_start_check +- [enhancement] Log debug message if execption occured during + HTTPExecutor start check +- [ehnancement] adjust typing handling in HTTPExecutor + 2.1.0 ---------- - [feature] Drop support for python 3.5. Rely on typing syntax and fstrings that is available since python 3.6 only - [ehnancement] For output executor on MacOs fallback to `select.select` for OutputExecutor. Increases compatibility with MacOS where presence of `select.poll` depends on the compiler used. - [enhancement] Apply shelx.quote on command parts if command is given as a list Should result in similar results when running such command with or without shell. 2.0.1 ---------- - [repackage] - mark python 3.5 as required. Should disallow installing on python 2 2.0.0 ---------- - [feature] Add UnixSocketExecutor for executors that communicate with Unix Sockets - [feature] Mirakuru is now fully type hinted - [feature] Drop support for python 2 - [feature] Allow for configuring process outputs to pipe to - [feature] OutputExecutor can now check for banner in stderr - [feature] HTTPEecutor now can check status on different method. Along with properly configured payload and headers. - [feature] Ability to set custom env vars for orchestrated process - [feature] Ability to set custom cwd path for orchestrated process - [enhancement] psutil is no longer required on cygwin 1.1.0 ---------- - [enhancement] Executor's timeout to be set for both executor's start and stop - [enhancement] It's no longer possible to hang indefinitely on the start or stop. Timeout is set to 3600 seconds by default, with values possible between `0` and `sys.maxsize` with the latter still bit longer than `2924712086` centuries. 1.0.0 ---------- - [enhancement] Do not fail if processes child throw EPERM error during clean up phase - [enhancement] Run subprocesses in shell by default on Windows - [ehnancement] Do not pass preexec_fn on windows 0.9.0 ---------- - [enhancement] Fallback to kill through SIGTERM on Windows, since SIGKILL is not available - [enhancement] detect cases where during stop process already exited, and simply clean up afterwards 0.8.3 ---------- - [enhancement] when killing the process ignore OsError with errno `no such process` as the process have already died. - [enhancement] small context manager code cleanup 0.8.2 ---------- - [bugfix] atexit cleanup_subprocesses() function now reimports needed functions 0.8.1 ---------- - [bugfix] Handle IOErrors from psutil (#112) - [bugfix] Pass global vars to atexit cleanup_subprocesses function (#111) 0.8.0 ---------- - [feature] Kill all running mirakuru subprocesses on python exit. - [enhancement] Prefer psutil library (>=4.0.0) over calling 'ps xe' command to find leaked subprocesses. 0.7.0 ---------- - [feature] HTTPExecutor enriched with the 'status' argument. It allows to define which HTTP status code(s) signify that a HTTP server is running. - [feature] Changed executor methods to return itself to allow method chaining. - [feature] Context Manager to return Executor instance, allows creating Executor instance on the fly. - [style] Migrated `%` string formating to `format()`. - [style] Explicitly numbered replacement fields in string. - [docs] Added documentation for timeouts. 0.6.1 ---------- - [refactoring] Moved source to src directory. - [fix, feature] Python 3.5 fixes. - [fix] Docstring changes for updated pep257. 0.6.0 ---------- - [fix] Modify MANIFEST to prune tests folder. - [feature] HTTPExecutor will now set the default 80 if not present in a URL. - [feature] Detect subprocesses exiting erroneously while polling the checks and error early. - [fix] Make test_forgotten_stop pass by preventing the shell from optimizing forking out. 0.5.0 ---------- - [style] Corrected code to conform with W503, D210 and E402 linters errors as reported by pylama `6.3.1`. - [feature] Introduced a hack that kills all subprocesses of executor process. It requires 'ps xe -ww' command being available in OS otherwise logs error. - [refactoring] Classes name convention change. Executor class got renamed into SimpleExecutor and StartCheckExecutor class got renamed into Executor. 0.4.0 ------- - [feature] Ability to set up custom signal for stopping and killing processes managed by executors. - [feature] Replaced explicit parameters with keywords for kwargs handled by basic Executor init method. - [feature] Executor now accepts both list and string as a command. - [fix] Even it's not recommended to import all but `from mirakuru import *` didn't worked. Now it's fixed. - [tests] increased tests coverage. Even test cover 100% of code it doesn't mean they cover 100% of use cases! - [code quality] Increased Pylint code evaluation. 0.3.0 ------- - [feature] Introduced PidExecutor that waits for specified file to be created. - [feature] Provided PyPy compatibility. - [fix] Closing all resources explicitly. 0.2.0 ------- - [fix] Kill all children processes of Executor started with shell=True. - [feature] Executors are now context managers - to start executors for given context. - [feature] Executor.stopped - context manager for stopping executors for given context. - [feature] HTTPExecutor and TCPExecutor before .start() check whether port is already used by other processes and raise AlreadyRunning if detects it. - [refactoring] Moved python version conditional imports into compat.py module. 0.1.4 ------- - [fix] Fixed an issue where setting shell to True would execute only part of the command. 0.1.3 ------- - [fix] Fixed an issue where OutputExecutor would hang, if started process stopped producing output. 0.1.2 ------- - [fix] Removed leftover sleep from TCPExecutor._wait_for_connection. 0.1.1 ------- - [fix] Fixed `MANIFEST.in`. - Updated packaging options. 0.1.0 ------- - Exposed process attribute on Executor. - Exposed port and host on TCPExecutor. - Exposed URL on HTTPExecutor. - Simplified package structure. - Simplified executors operating API. - Updated documentation. - Added docblocks for every function. - Applied license headers. - Stripped orchestrators. - Forked off from `summon_process`. diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 734216b..6b0ee63 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -1,44 +1,48 @@ Contribute to mirakuru ====================== Thank you for taking time to contribute to mirakuru! The following is a set of guidelines for contributing to mirakuru. These are just guidelines, not rules, use your best judgment and feel free to propose changes to this document in a pull request. Bug Reports ----------- #. Use a clear and descriptive title for the issue - it'll be much easier to identify the problem. #. Describe the steps to reproduce the problems in as many details as possible. #. If possible, provide a code snippet to reproduce the issue. Feature requests/proposals -------------------------- #. Use a clear and descriptive title for the proposal #. Provide as detailed description as possible * Use case is great to have #. There'll be a bit of discussion for the feature. Don't worry, if it is to be accepted, we'd like to support it, so we need to understand it thoroughly. Pull requests ------------- #. Start with a bug report or feature request #. Use a clear and descriptive title #. Provide a description - which issue does it refers to, and what part of the issue is being solved #. Be ready for code review :) Commits ------- #. Make sure commits are atomic, and each atomic change is being followed by test. #. If the commit solves part of the issue reported, include *refs #[Issue number]* in a commit message. #. If the commit solves whole issue reported, please refer to `Closing issues via commit messages `_ for ways to close issues when commits will be merged. - Coding style ------------ -#. All python coding style are being enforced by `Pylama `_ and configured in pylama.ini file. -#. Additional, not always mandatory checks are being performed by `QuantifiedCode `_ \ No newline at end of file +#. Coding style is being handled by black and doublechecked by pycodestyle and pydocstyle + +Testing +------- + +# Tests are writen using pytest. +# PR tests run on Github Actions. diff --git a/README.rst b/README.rst index acf6ae5..8ebc23b 100644 --- a/README.rst +++ b/README.rst @@ -1,123 +1,292 @@ +.. image:: https://raw.githubusercontent.com/ClearcodeHQ/mirakuru/master/logo.png + :height: 100px + mirakuru ======== Mirakuru is a process orchestration tool designed for functional and integration tests. Maybe you want to be able to start a database before you start your program or maybe you just need to set additional services up for your tests. This is where you should consider using **mirakuru** to add superpowers to your program or tests. .. image:: https://img.shields.io/pypi/v/mirakuru.svg :target: https://pypi.python.org/pypi/mirakuru/ :alt: Latest PyPI version -.. image:: https://readthedocs.org/projects/mirakuru/badge/?version=v2.1.0 - :target: http://mirakuru.readthedocs.io/en/v2.1.0/ - :alt: Documentation Status - .. image:: https://img.shields.io/pypi/wheel/mirakuru.svg :target: https://pypi.python.org/pypi/mirakuru/ :alt: Wheel Status .. image:: https://img.shields.io/pypi/pyversions/mirakuru.svg :target: https://pypi.python.org/pypi/mirakuru/ :alt: Supported Python Versions .. image:: https://img.shields.io/pypi/l/mirakuru.svg :target: https://pypi.python.org/pypi/mirakuru/ :alt: License -Package status --------------- - -.. image:: https://travis-ci.org/ClearcodeHQ/mirakuru.svg?branch=v2.1.0 - :target: https://travis-ci.org/ClearcodeHQ/mirakuru - :alt: Tests - -.. image:: https://coveralls.io/repos/ClearcodeHQ/mirakuru/badge.png?branch=v2.1.0 - :target: https://coveralls.io/r/ClearcodeHQ/mirakuru?branch=v2.1.0 - :alt: Coverage Status -.. image:: https://requires.io/github/ClearcodeHQ/mirakuru/requirements.svg?tag=v2.1.0 - :target: https://requires.io/github/ClearcodeHQ/mirakuru/requirements/?tag=v2.1.0 - :alt: Requirements Status - - -About +Usage ----- In a project that relies on multiple processes there might be a need to guard code with tests that verify interprocess communication. So one needs to set up all of required databases, auxiliary and application services to verify their cooperation. Synchronising (or orchestrating) test procedure with tested processes might be a hell. If so, then **mirakuru** is what you need. ``Mirakuru`` starts your process and waits for the clear indication that it's running. Library provides seven executors to fit different cases: * **SimpleExecutor** - starts a process and does not wait for anything. It is useful to stop or kill a process and its subprocesses. Base class for all the rest of executors. * **Executor** - base class for executors verifying if a process has started. * **OutputExecutor** - waits for a specified output to be printed by a process. * **TCPExecutor** - waits for the ability to connect through TCP with a process. * **UnixSocketExecutor** - waits for the ability to connect through Unix socket with a process * **HTTPExecutor** - waits for a successful HEAD request (and TCP before). * **PidExecutor** - waits for a specified .pid file to exist. +SimpleExecutor +++++++++++++++ + +The simplest executor implementation. +It simply starts the process passed to constructor, and reports it as running. + +.. code-block:: python + + from mirakuru import SimpleExecutor + + process = SimpleExecutor('my_special_process') + process.start() + + # Here you can do your stuff, e.g. communicate with the started process + + process.stop() + +OutputExecutor +++++++++++++++ + +OutputExecutor is the executor that starts the process, +but does not report it as started, unless it receives specified marker/banner in +process output. + +.. code-block:: python + + from mirakuru import OutputExecutor + + process = OutputExecutor('my_special_process', banner='processed!') + process.start() + + # Here you can do your stuff, e.g. communicate with the started process + + process.stop() + +What happens during start here, is that the executor constantly checks output +produced by started process, and looks for the banner part occurring within the +output. +Once the output is identified, as in example `processed!` is found in output. +It is considered as started, and executor releases your script from wait to work. + + +TCPExecutor ++++++++++++ + +Is the executor that should be used to start +processes that are using TCP connection. This executor tries to connect with +the process on given host:port to see if it started accepting connections. Once it +does, it reports the process as started and a code returns to normal execution. + +.. code-block:: python + + from mirakuru import TCPExecutor + + process = TCPExecutor('my_special_process', host='localhost', port=1234) + process.start() + + # Here you can do your stuff, e.g. communicate with the started process + + process.stop() + +HTTPExecutor +++++++++++++ + +Is executor that will be used to start web applications for example. +To start it, you apart from command, you need to pass a URL. +This URL will be used to make a (by default) HEAD request. Once successful, +the executor will be considered started, and a code will return to normal execution. + +.. code-block:: python + + from mirakuru import HTTPExecutor + + process = HTTPExecutor('my_special_process', url='http://localhost:6543/status') + process.start() + + # Here you can do your stuff, e.g. communicate with the started process + + process.stop() + +This executor, however, apart from HEAD request, also inherits TCPExecutor, +so it'll try to connect to process over TCP first, to determine, +if it can try to make a HEAD request already. + +By default HTTPExecutor waits until its subprocess responds with 2XX HTTP status code. +If you consider other codes as valid you need to specify them in 'status' argument. + +.. code-block:: python + + from mirakuru import HTTPExecutor + + process = HTTPExecutor('my_special_process', url='http://localhost:6543/status', status='(200|404)') + process.start() + +The "status" argument can be a single code integer like 200, 404, 500 or a regular expression string - +'^(2|4)00$', '2\d\d', '\d{3}', etc. + +There's also a possibility to change the request method used to perform request to the server. +By default it's HEAD, but GET, POST or other are also possible. + +.. code-block:: python + + from mirakuru import HTTPExecutor + + process = HTTPExecutor('my_special_process', url='http://localhost:6543/status', status='(200|404)', method='GET') + process.start() + + +PidExecutor ++++++++++++ + +Is an executor that starts the given +process, and then waits for a given file to be found before it gives back control. +An example use for this class is writing integration tests for processes that +notify their running by creating a .pid file. + +.. code-block:: python + + from mirakuru import PidExecutor + + process = PidExecutor('my_special_process', filename='/var/msp/my_special_process.pid') + process.start() + + # Here you can do your stuff, e.g. communicate with the started process + + process.stop() + + .. code-block:: python from mirakuru import HTTPExecutor from httplib import HTTPConnection, OK def test_it_works(): # The ``./http_server`` here launches some HTTP server on the 6543 port, # but naturally it is not immediate and takes a non-deterministic time: executor = HTTPExecutor("./http_server", url="http://127.0.0.1:6543/") # Start the server and wait for it to run (blocking): executor.start() # Here the server should be running! conn = HTTPConnection("127.0.0.1", 6543) conn.request("GET", "/") assert conn.getresponse().status is OK executor.stop() A command by which executor spawns a process can be defined by either string or list. .. code-block:: python # command as string TCPExecutor('python -m smtpd -n -c DebuggingServer localhost:1025', host='localhost', port=1025) # command as list TCPExecutor( ['python', '-m', 'smtpd', '-n', '-c', 'DebuggingServer', 'localhost:1025'], host='localhost', port=1025 ) -Authors -------- +Use as a Context manager +------------------------ + +Starting +++++++++ + +Mirakuru executors can also work as a context managers. + +.. code-block:: python + + from mirakuru import HTTPExecutor + + with HTTPExecutor('my_special_process', url='http://localhost:6543/status') as process: -The project was firstly developed by `Mateusz Lenik `_ -as the `summon_process `_. -Later forked, renamed into **mirakuru** and tended to by The A Room @ `Clearcode `_ -and `the other authors `_. + # Here you can do your stuff, e.g. communicate with the started process + assert process.running() is True -License -------- + assert process.running() is False -``mirakuru`` is licensed under LGPL license, version 3. +Defined process starts upon entering context, and exit upon exiting it. + +Stopping +++++++++ + +Mirakuru also allows to stop process for given context. +To do this, simply use built-in stopped context manager. + +.. code-block:: python + + from mirakuru import HTTPExecutor + + process = HTTPExecutor('my_special_process', url='http://localhost:6543/status').start() + + # Here you can do your stuff, e.g. communicate with the started process + + with process.stopped(): + + # Here you will not be able to communicate with the process as it is killed here + assert process.running() is False + + assert process.running() is True + +Defined process stops upon entering context, and starts upon exiting it. + + +Methods chaining +++++++++++++++++ + +Mirakuru encourages methods chaining so you can inline some operations, e.g.: + +.. code-block:: python + + from mirakuru import SimpleExecutor + + command_stdout = SimpleExecutor('my_special_process').start().stop().output Contributing and reporting bugs ------------------------------- Source code is available at: `ClearcodeHQ/mirakuru `_. Issue tracker is located at `GitHub Issues `_. Projects `PyPI page `_. -When contributing, don't forget to add your name to the AUTHORS.rst file. +Windows support +--------------- + +Frankly, there's none, Python's support differs a bit in required places +and the team has no experience in developing for Windows. +However we'd welcome contributions that will allow the windows support. + +See: + +* `#392 `_ +* `#336 `_ + +Also, With the introduction of `WSL `_ +the need for raw Windows support might not be that urgant... If you've got any thoughts or are willing to contribute, +please start with the issues listed above. diff --git a/docs/Makefile b/docs/Makefile deleted file mode 100644 index fe20658..0000000 --- a/docs/Makefile +++ /dev/null @@ -1,153 +0,0 @@ -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = -BUILDDIR = build - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source - -.PHONY: help clean html html_venv dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext - -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - -clean: - -rm -rf $(BUILDDIR)/* - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Subscribepyramidplugin.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Subscribepyramidplugin.qhc" - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/Subscribepyramidplugin" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Subscribepyramidplugin" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." diff --git a/docs/make.bat b/docs/make.bat deleted file mode 100644 index 0c5f8f6..0000000 --- a/docs/make.bat +++ /dev/null @@ -1,190 +0,0 @@ -@ECHO OFF - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set BUILDDIR=build -set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source -set I18NSPHINXOPTS=%SPHINXOPTS% source -if NOT "%PAPER%" == "" ( - set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% - set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% -) - -if "%1" == "" goto help - -if "%1" == "help" ( - :help - echo.Please use `make ^` where ^ is one of - echo. html to make standalone HTML files - echo. dirhtml to make HTML files named index.html in directories - echo. singlehtml to make a single large HTML file - echo. pickle to make pickle files - echo. json to make JSON files - echo. htmlhelp to make HTML files and a HTML help project - echo. qthelp to make HTML files and a qthelp project - echo. devhelp to make HTML files and a Devhelp project - echo. epub to make an epub - echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter - echo. text to make text files - echo. man to make manual pages - echo. texinfo to make Texinfo files - echo. gettext to make PO message catalogs - echo. changes to make an overview over all changed/added/deprecated items - echo. linkcheck to check all external links for integrity - echo. doctest to run all doctests embedded in the documentation if enabled - goto end -) - -if "%1" == "clean" ( - for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i - del /q /s %BUILDDIR%\* - goto end -) - -if "%1" == "html" ( - %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/html. - goto end -) - -if "%1" == "dirhtml" ( - %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. - goto end -) - -if "%1" == "singlehtml" ( - %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. - goto end -) - -if "%1" == "pickle" ( - %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the pickle files. - goto end -) - -if "%1" == "json" ( - %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the JSON files. - goto end -) - -if "%1" == "htmlhelp" ( - %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run HTML Help Workshop with the ^ -.hhp project file in %BUILDDIR%/htmlhelp. - goto end -) - -if "%1" == "qthelp" ( - %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run "qcollectiongenerator" with the ^ -.qhcp project file in %BUILDDIR%/qthelp, like this: - echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Subscribepyramidplugin.qhcp - echo.To view the help file: - echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Subscribepyramidplugin.ghc - goto end -) - -if "%1" == "devhelp" ( - %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. - goto end -) - -if "%1" == "epub" ( - %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The epub file is in %BUILDDIR%/epub. - goto end -) - -if "%1" == "latex" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "text" ( - %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The text files are in %BUILDDIR%/text. - goto end -) - -if "%1" == "man" ( - %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The manual pages are in %BUILDDIR%/man. - goto end -) - -if "%1" == "texinfo" ( - %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. - goto end -) - -if "%1" == "gettext" ( - %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The message catalogs are in %BUILDDIR%/locale. - goto end -) - -if "%1" == "changes" ( - %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes - if errorlevel 1 exit /b 1 - echo. - echo.The overview file is in %BUILDDIR%/changes. - goto end -) - -if "%1" == "linkcheck" ( - %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck - if errorlevel 1 exit /b 1 - echo. - echo.Link check complete; look for any errors in the above output ^ -or in %BUILDDIR%/linkcheck/output.txt. - goto end -) - -if "%1" == "doctest" ( - %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest - if errorlevel 1 exit /b 1 - echo. - echo.Testing of doctests in the sources finished, look at the ^ -results in %BUILDDIR%/doctest/output.txt. - goto end -) - -:end diff --git a/docs/source/api.rst b/docs/source/api.rst deleted file mode 100644 index e9ea4d5..0000000 --- a/docs/source/api.rst +++ /dev/null @@ -1,8 +0,0 @@ -Api -=== - -.. toctree:: - :maxdepth: 2 - - api/executors - api/exceptions diff --git a/docs/source/api/exceptions.rst b/docs/source/api/exceptions.rst deleted file mode 100644 index 3650687..0000000 --- a/docs/source/api/exceptions.rst +++ /dev/null @@ -1,5 +0,0 @@ -Exceptions -========== - -.. automodule:: mirakuru.exceptions - :members: diff --git a/docs/source/api/executors.rst b/docs/source/api/executors.rst deleted file mode 100644 index 38ef03f..0000000 --- a/docs/source/api/executors.rst +++ /dev/null @@ -1,20 +0,0 @@ -Basic executors -=============== - -.. automodule:: mirakuru.base - :private-members: - -.. automodule:: mirakuru.output - :private-members: - -.. automodule:: mirakuru.unixsocket - :private-members: - -.. automodule:: mirakuru.tcp - :private-members: - -.. automodule:: mirakuru.http - :private-members: - -.. automodule:: mirakuru.pid - :private-members: diff --git a/docs/source/authors.rst b/docs/source/authors.rst deleted file mode 100644 index 0181789..0000000 --- a/docs/source/authors.rst +++ /dev/null @@ -1,3 +0,0 @@ -.. _authors: - -.. include:: ../../AUTHORS.rst diff --git a/docs/source/basic.rst b/docs/source/basic.rst deleted file mode 100644 index e06dda6..0000000 --- a/docs/source/basic.rst +++ /dev/null @@ -1,195 +0,0 @@ -Basic executors -=============== - -Mirakuru :class:`~mirakuru.base.Executor` is something that you will use when you -need to make some code dependant from other process being run, and in certain state, -and you wouldn't want this process to be running all the time. - -Tests would be best example here or a script that sets up processes and databases -for dev environment with one simple run. - - -SimpleExecutor --------------- - -:class:`mirakuru.base.SimpleExecutor` is the simplest executor implementation. -It simply starts the process passed to constructor, and reports it as running. - -.. code-block:: python - - from mirakuru import SimpleExecutor - - process = SimpleExecutor('my_special_process') - process.start() - - # Here you can do your stuff, e.g. communicate with the started process - - process.stop() - -OutputExecutor --------------- - -:class:`mirakuru.output.OutputExecutor` is the executor that starts the process, -but does not report it as started, unless it receives specified marker/banner in -process output. - -.. code-block:: python - - from mirakuru import OutputExecutor - - process = OutputExecutor('my_special_process', banner='processed!') - process.start() - - # Here you can do your stuff, e.g. communicate with the started process - - process.stop() - -What happens during start here, is that the executor constantly checks output -produced by started process, and looks for the banner part occurring within the -output. -Once the output is identified, as in example `processed!` is found in output. -It is considered as started, and executor releases your script from wait to work. - - -TCPExecutor ------------ - -:class:`mirakuru.tcp.TCPExecutor` is the executor that should be used to start -processes that are using TCP connection. This executor tries to connect with -the process on given host:port to see if it started accepting connections. Once it -does, it reports the process as started and a code returns to normal execution. - -.. code-block:: python - - from mirakuru import TCPExecutor - - process = TCPExecutor('my_special_process', host='localhost', port=1234) - process.start() - - # Here you can do your stuff, e.g. communicate with the started process - - process.stop() - - -HTTPExecutor ------------- - -:class:`mirakuru.http.HTTPExecutor` is executor that will be used to start -web applications for example. To start it, you apart from command, you need to pass a URL. -This URL will be used to make a (by default) HEAD request. Once successful, -the executor will be considered started, and a code will return to normal execution. - -.. code-block:: python - - from mirakuru import HTTPExecutor - - process = HTTPExecutor('my_special_process', url='http://localhost:6543/status') - process.start() - - # Here you can do your stuff, e.g. communicate with the started process - - process.stop() - -This executor, however, apart from HEAD request, also inherits TCPExecutor, -so it'll try to connect to process over TCP first, to determine, -if it can try to make a HEAD request already. - -By default HTTPExecutor waits until its subprocess responds with 2XX HTTP status code. -If you consider other codes as valid you need to specify them in 'status' argument. - -.. code-block:: python - - from mirakuru import HTTPExecutor - - process = HTTPExecutor('my_special_process', url='http://localhost:6543/status', status='(200|404)') - process.start() - -The "status" argument can be a single code integer like 200, 404, 500 or a regular expression string - -'^(2|4)00$', '2\d\d', '\d{3}', etc. - -There's also a possibility to change the request method used to perform request to the server. -By default it's HEAD, but GET, POST or other are also possible. - -.. code-block:: python - - from mirakuru import HTTPExecutor - - process = HTTPExecutor('my_special_process', url='http://localhost:6543/status', status='(200|404)', method='GET') - process.start() - - -PidExecutor ------------ - -:class:`mirakuru.pid.PidExecutor` is an executor that starts the given -process, and then waits for a given file to be found before it gives back control. -An example use for this class is writing integration tests for processes that -notify their running by creating a .pid file. - -.. code-block:: python - - from mirakuru import PidExecutor - - process = PidExecutor('my_special_process', filename='/var/msp/my_special_process.pid') - process.start() - - # Here you can do your stuff, e.g. communicate with the started process - - process.stop() - - -As a Context manager --------------------- - -Starting -++++++++ - -Mirakuru executors can also work as a context managers. - -.. code-block:: python - - from mirakuru import HTTPExecutor - - with HTTPExecutor('my_special_process', url='http://localhost:6543/status') as process: - - # Here you can do your stuff, e.g. communicate with the started process - assert process.running() is True - - assert process.running() is False - -Defined process starts upon entering context, and exit upon exiting it. - -Stopping -++++++++ - -Mirakuru also allows to stop process for given context. -To do this, simply use built-in stopped context manager. - -.. code-block:: python - - from mirakuru import HTTPExecutor - - process = HTTPExecutor('my_special_process', url='http://localhost:6543/status').start() - - # Here you can do your stuff, e.g. communicate with the started process - - with process.stopped(): - - # Here you will not be able to communicate with the process as it is killed here - assert process.running() is False - - assert process.running() is True - -Defined process stops upon entering context, and starts upon exiting it. - - -Methods chaining ----------------- - -Mirakuru encourages methods chaining so you can inline some operations, e.g.: - -.. code-block:: python - - from mirakuru import SimpleExecutor - - command_stdout = SimpleExecutor('my_special_process').start().stop().output diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst deleted file mode 100644 index cca3726..0000000 --- a/docs/source/changelog.rst +++ /dev/null @@ -1,3 +0,0 @@ -.. _changelog: - -.. include:: ../../CHANGES.rst diff --git a/docs/source/conf.py b/docs/source/conf.py deleted file mode 100644 index 9e33941..0000000 --- a/docs/source/conf.py +++ /dev/null @@ -1,305 +0,0 @@ -# Copyright (C) 2014 by Clearcode -# and associates (see AUTHORS). - -# This file is part of mirakuru. - -# mirakuru is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. - -# mirakuru is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. - -# You should have received a copy of the GNU Lesser General Public License -# along with mirakuru. If not, see . - -import sys -import os - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# sys.path.insert(0, os.path.abspath('.')) - -# -- General configuration ----------------------------------------------------- - -# If your documentation needs a minimal Sphinx version, state it here. -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', - 'sphinx.ext.viewcode', - 'sphinx.ext.intersphinx'] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'mirakuru' -basename = ''.join(project.split('.')) -author = u'The A Room @ Clearcode' -copyright = u'2014, ' + author - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. - -from mirakuru import __version__ - -# The full version, including alpha/beta/rc tags. -release = __version__ - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = [] - -# The reST default role (used for this markup: `text`) to use for all documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - - -# -- Options for HTML output --------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = 'default' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = basename + 'doc' - - -# -- Options for LaTeX output -------------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - #'papersize': 'letterpaper', - - # The font size ('10pt', '11pt' or '12pt'). - #'pointsize': '10pt', - - # Additional stuff for the LaTeX preamble. - #'preamble': '', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). -latex_documents = [ - ('index', basename + '.tex', project + ' Documentation', - author, 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output -------------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ('index', basename, project + u' Documentation', - [author], 1) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------------ - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ('index', basename, project + u' Documentation', - author, basename, 'One line description of project.', - 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - - -# -- Options for Epub output --------------------------------------------------- - -# Bibliographic Dublin Core info. -epub_title = project -epub_author = author -epub_publisher = author -epub_copyright = u'2014, ' + author - -# The language of the text. It defaults to the language option -# or en if the language is not set. -# epub_language = '' - -# The scheme of the identifier. Typical schemes are ISBN or URL. -# epub_scheme = '' - -# The unique identifier of the text. This can be a ISBN number -# or the project homepage. -# epub_identifier = '' - -# A unique identification for the text. -# epub_uid = '' - -# A tuple containing the cover image and cover page html template filenames. -# epub_cover = () - -# HTML files that should be inserted before the pages created by sphinx. -# The format is a list of tuples containing the path and title. -# epub_pre_files = [] - -# HTML files shat should be inserted after the pages created by sphinx. -# The format is a list of tuples containing the path and title. -# epub_post_files = [] - -# A list of files that should not be packed into the epub file. -# epub_exclude_files = [] - -# The depth of the table of contents in toc.ncx. -# epub_tocdepth = 3 - -# Allow duplicate toc entries. -# epub_tocdup = True - -# Autodoc configuration: - -autoclass_content = 'both' -autodoc_default_flags = ['members', 'show-inheritance'] - -# Intersphinx configuration -intersphinx_mapping = {'python': ('http://docs.python.org/', None)} diff --git a/docs/source/contributing.rst b/docs/source/contributing.rst deleted file mode 100644 index 2b6578f..0000000 --- a/docs/source/contributing.rst +++ /dev/null @@ -1,3 +0,0 @@ -.. _contributing: - -.. include:: ../../CONTRIBUTING.rst diff --git a/docs/source/index.rst b/docs/source/index.rst deleted file mode 100644 index a1a7197..0000000 --- a/docs/source/index.rst +++ /dev/null @@ -1,20 +0,0 @@ -.. include:: ../../README.rst - -Contents --------- - -.. toctree:: - :maxdepth: 2 - - basic - api - contributing - changelog - - -License -------- - -Copyright (c) 2014 by Clearcode, mirakuru authors and contributors. See :ref:`authors` - -This module is part of mirakuru and is released under the LGPL license, version 3. diff --git a/logo.png b/logo.png new file mode 100644 index 0000000..8190747 Binary files /dev/null and b/logo.png differ diff --git a/logo.svg b/logo.svg new file mode 100644 index 0000000..8cdd981 --- /dev/null +++ b/logo.svg @@ -0,0 +1,100 @@ + + + + + + + image/svg+xml + + + + + + + + + + + + + + + + + + + MIRAKURU + diff --git a/mypy.ini b/mypy.ini index d3dbfcf..a4c20b8 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,15 +1,26 @@ [mypy] +allow_redefinition = False +allow_untyped_globals = False check_untyped_defs = True -mypy_path = src - -[mypy-mirakuru.*] +disallow_incomplete_defs = True +disallow_subclassing_any = True +disallow_untyped_calls = True +disallow_untyped_decorators = True disallow_untyped_defs = True +follow_imports = silent +ignore_missing_imports = False +implicit_reexport = False +no_implicit_optional = True +pretty = True +show_error_codes = True +strict_equality = True +warn_no_return = True +warn_return_any = True +warn_unreachable = True +warn_unused_ignores = True [mypy-daemon.*] ignore_missing_imports = True [mypy-psutil.*] ignore_missing_imports = True - -[mypy-pytest.*] -ignore_missing_imports = True diff --git a/pylintrc b/pylintrc deleted file mode 100644 index 8631c94..0000000 --- a/pylintrc +++ /dev/null @@ -1,473 +0,0 @@ -[MASTER] - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code -extension-pkg-whitelist= - -# Add files or directories to the blacklist. They should be base names, not -# paths. -ignore=CVS - -# Add files or directories matching the regex patterns to the blacklist. The -# regex matches against base names, not paths. -ignore-patterns= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Use multiple processes to speed up Pylint. -jobs=1 - -# List of plugins (as comma separated values of python modules names) to load, -# usually to register additional checkers. -load-plugins= - -# Pickle collected data for later comparisons. -persistent=yes - -# Specify a configuration file. -#rcfile= - -# When enabled, pylint would attempt to guess common misconfiguration and emit -# user-friendly hints instead of false-positive error messages -suggestion-mode=yes - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED -confidence= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once).You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use"--disable=all --enable=classes -# --disable=W" -disable=superfluous-parens, - inconsistent-return-statements, - print-statement, - useless-object-inheritance, - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable=c-extension-no-member - - -[REPORTS] - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details -#msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio).You can also give a reporter class, eg -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages -reports=no - -# Activate the evaluation score. -score=yes - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=optparse.Values,sys.exit - - -[SIMILARITIES] - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=no - -# Minimum lines number of a similarity. -min-similarity-lines=4 - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=100 - -# Maximum number of lines in a module -max-module-lines=1000 - -# List of optional constructs for which whitespace checking is disabled. `dict- -# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. -# `trailing-comma` allows a space between comma and closing bracket: (a, ). -# `empty-line` allows space-only lines. -no-space-check=trailing-comma, - dict-separator - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[BASIC] - -# Naming style matching correct argument names -argument-naming-style=snake_case - -# Regular expression matching correct argument names. Overrides argument- -# naming-style -#argument-rgx= - -# Naming style matching correct attribute names -attr-naming-style=snake_case - -# Regular expression matching correct attribute names. Overrides attr-naming- -# style -#attr-rgx= - -# Bad variable names which should always be refused, separated by a comma -bad-names=foo, - bar, - baz, - toto, - tutu, - tata - -# Naming style matching correct class attribute names -class-attribute-naming-style=any - -# Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style -#class-attribute-rgx= - -# Naming style matching correct class names -class-naming-style=PascalCase - -# Regular expression matching correct class names. Overrides class-naming-style -#class-rgx= - -# Naming style matching correct constant names -const-naming-style=UPPER_CASE - -# Regular expression matching correct constant names. Overrides const-naming- -# style -#const-rgx= - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - -# Naming style matching correct function names -function-naming-style=snake_case - -# Regular expression matching correct function names. Overrides function- -# naming-style -function-rgx=(([a-z_][a-z0-9_]{2,50})|(_[a-z0-9_]*)|(__[a-z][a-z0-9_]+__))$ - -# Good variable names which should always be accepted, separated by a comma -good-names=i, - j, - k, - ex, - Run, - _ - -# Include a hint for the correct naming format with invalid-name -include-naming-hint=yes - -# Naming style matching correct inline iteration names -inlinevar-naming-style=any - -# Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style -#inlinevar-rgx= - -# Naming style matching correct method names -method-naming-style=snake_case - -# Regular expression matching correct method names. Overrides method-naming- -# style -#method-rgx= - -# Naming style matching correct module names -module-naming-style=snake_case - -# Regular expression matching correct module names. Overrides module-naming- -# style -#module-rgx= - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -property-classes=abc.abstractproperty - -# Naming style matching correct variable names -variable-naming-style=snake_case - -# Regular expression matching correct variable names. Overrides variable- -# naming-style -#variable-rgx= - - -[SPELLING] - -# Limits count of emitted suggestions for spelling mistakes -max-spelling-suggestions=4 - -# Spelling dictionary name. Available dictionaries: none. To make it working -# install python-enchant package. -spelling-dict= - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to indicated private dictionary in -# --spelling-private-dict-file option instead of raising a message. -spelling-store-unknown-words=no - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis. It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid to define new builtins when possible. -additional-builtins= - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb - -# A regular expression matching the name of dummy variables (i.e. expectedly -# not used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore -ignored-argument-names=_.*|^ignored_|^unused_ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six.moves,past.builtins,future.builtins,io,builtins - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME, - XXX, - TODO - - -[LOGGING] - -# Logging modules to check that the string format arguments are in logging -# function parameter format -logging-modules=logging - - -[CLASSES] - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=mcs - - -[DESIGN] - -# Maximum number of arguments for function / method -max-args=9 - -# Maximum number of attributes for a class (see R0902). -max-attributes=12 - -# Maximum number of boolean expressions in a if statement -max-bool-expr=5 - -# Maximum number of branch for function / method body -max-branches=12 - -# Maximum number of locals for function / method body -max-locals=15 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of return / yield for function / method body -max-returns=6 - -# Maximum number of statements in function / method body -max-statements=50 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - - -[IMPORTS] - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=yes - -# Deprecated modules which should not be used, separated by a comma -deprecated-modules=regsub, - TERMIOS, - Bastion, - rexec - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled) -ext-import-graph= - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled) -import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled) -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "Exception" -overgeneral-exceptions=Exception diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..7790d5e --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,8 @@ +[build-system] +requires = ["setuptools >= 40.6.0", "wheel"] +build-backend = "setuptools.build_meta" + +[tool.black] +line-length = 80 +target-version = ['py38'] +include = '.*\.pyi?$' diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index 5588693..0000000 --- a/pytest.ini +++ /dev/null @@ -1,5 +0,0 @@ -[pytest] -addopts = -vvv --capture=no --showlocals --verbose --cov src/mirakuru --cov tests -testpaths = tests/ -filterwarnings = error -xfail_strict = True diff --git a/requirements-lint.txt b/requirements-lint.txt index 3de3d3a..24a01f4 100644 --- a/requirements-lint.txt +++ b/requirements-lint.txt @@ -1,8 +1,8 @@ # linters -pycodestyle==2.5.0 -pydocstyle==4.0.1 -pylint==2.3.1 +pycodestyle==2.8.0 +pydocstyle==6.1.1 pygments -restructuredtext-lint==1.3.0 -mypy==0.720 +restructuredtext-lint==1.3.2 +mypy==0.931 +black==22.1.0 -r requirements-test.txt \ No newline at end of file diff --git a/requirements-test.txt b/requirements-test.txt index 5d3164c..1be3dfd 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,8 +1,8 @@ # test runs requirements (versions we'll be testing against) - automatically updated -psutil==5.6.3 -pytest==5.1.2 # tests framework used -pytest-cov==2.7.1 # coverage reports to verify tests quality -coverage==4.5.4 # pytest-cov -python-daemon==2.2.3 # used in test for easy creation of daemons +psutil==5.9.0 +pytest==7.0.0 # tests framework used +pytest-cov==3.0.0 # coverage reports to verify tests quality +coverage==6.3.1 # pytest-cov +python-daemon==2.3.0 # used in test for easy creation of daemons docutils # needed for python-daemon -e .[tests] diff --git a/setup.cfg b/setup.cfg index b570f82..f273926 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,7 +1,60 @@ +[metadata] +name = mirakuru +version = 2.4.2 +url = https://github.com/ClearcodeHQ/mirakuru +description = Process executor (not only) for tests. +long_description = file: README.rst, CHANGES.rst +long_description_content_type = text/x-rst +keywords = process, executor, tests, orchestration +license = LGPLv3+ +maintainer = Grzegorz Śliwiński +maintainer_email = fizyk+pypi@fizyk.net.pl +classifiers = + Development Status :: 5 - Production/Stable + Intended Audience :: Developers + License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+) + Natural Language :: English + Operating System :: OS Independent + Programming Language :: Python + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.7 + Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 + Programming Language :: Python :: 3 :: Only + +[options] +zip_safe = False +include_package_data = True +python_requires = >= 3.7 +packages = find: +package_dir = + =src +install_requires = + # psutil is used to find processes leaked during termination. + # It runs on many platforms but not Cygwin: + # . + psutil>=4.0.0; sys_platform != "cygwin" + +[options.packages.find] +where = src + +[options.extras_require] +tests = + pytest + pytest-cov + python-daemon + [pycodestyle] max-line-length = 80 exclude = docs/*,build/*,venv/* [pydocstyle] ignore = D203,D212 match = '(?!docs|build|venv).*\.py' + +[tool:pytest] +addopts = -vvv --capture=no --showlocals --verbose --cov src/mirakuru --cov tests +testpaths = tests/ +filterwarnings = error +xfail_strict = True \ No newline at end of file diff --git a/setup.py b/setup.py index a519f18..c62c8c5 100644 --- a/setup.py +++ b/setup.py @@ -1,91 +1,21 @@ -# Copyright (C) 2014 by Clearcode +# Copyright (C) 2014-2021 by Clearcode # and associates (see AUTHORS). # This file is part of mirakuru. # mirakuru is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # mirakuru is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # You should have received a copy of the GNU Lesser General Public License # along with mirakuru. If not, see . """Mirakuru installation module.""" +from setuptools import setup -import os -from setuptools import setup, find_packages - - -here = os.path.dirname(__file__) - - -requirements = [ - # psutil is used to find processes leaked during termination. - # It runs on many platforms but not Cygwin: - # . - 'psutil>=4.0.0; sys_platform != "cygwin"', -] - -tests_require = ( - 'pytest', # tests framework used - 'pytest-cov', # coverage reports to verify tests quality - 'python-daemon', # used in test for easy creation of daemons -) -extras_require = { - 'docs': ['sphinx'], - 'tests': tests_require, -} - - -def read(fname): - """ - Read filename. - - :param str fname: name of a file to read - """ - return open(os.path.join(here, fname)).read() - - -setup( - name='mirakuru', - version='2.1.0', - description='Process executor for tests.', - long_description=( - read('README.rst') + '\n\n' + read('CHANGES.rst') - ), - keywords='process executor tests summon_process', - url='https://github.com/ClearcodeHQ/mirakuru', - author='Clearcode - The A Room', - author_email='thearoom@clearcode.cc', - license='LGPL', - python_requires='>=3.6', - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Environment :: Web Environment', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: ' - 'GNU Lesser General Public License v3 or later (LGPLv3+)', - 'Natural Language :: English', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3 :: Only', - 'Programming Language :: Python :: Implementation :: PyPy', - 'Topic :: Software Development :: Testing', - ], - package_dir={'': 'src'}, - packages=find_packages('src'), - install_requires=requirements, - tests_require=tests_require, - test_suite='tests', - include_package_data=True, - zip_safe=False, - extras_require=extras_require, -) +setup() diff --git a/src/mirakuru/__init__.py b/src/mirakuru/__init__.py index 8d32c83..96c8927 100644 --- a/src/mirakuru/__init__.py +++ b/src/mirakuru/__init__.py @@ -1,53 +1,53 @@ # Copyright (C) 2014 by Clearcode # and associates (see AUTHORS). # This file is part of mirakuru. # mirakuru is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # mirakuru is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # You should have received a copy of the GNU Lesser General Public License # along with mirakuru. If not, see . """Mirakuru main module.""" import logging from mirakuru.base import Executor, SimpleExecutor from mirakuru.output import OutputExecutor from mirakuru.tcp import TCPExecutor from mirakuru.http import HTTPExecutor from mirakuru.pid import PidExecutor from mirakuru.exceptions import ( ExecutorError, TimeoutExpired, AlreadyRunning, ProcessExitedWithError, ) -__version__ = '2.1.0' +__version__ = "2.4.2" __all__ = ( - 'Executor', - 'SimpleExecutor', - 'OutputExecutor', - 'TCPExecutor', - 'HTTPExecutor', - 'PidExecutor', - 'ExecutorError', - 'TimeoutExpired', - 'AlreadyRunning', - 'ProcessExitedWithError', + "Executor", + "SimpleExecutor", + "OutputExecutor", + "TCPExecutor", + "HTTPExecutor", + "PidExecutor", + "ExecutorError", + "TimeoutExpired", + "AlreadyRunning", + "ProcessExitedWithError", ) # Set default logging handler to avoid "No handler found" warnings. logging.getLogger(__name__).addHandler(logging.NullHandler()) diff --git a/src/mirakuru/base.py b/src/mirakuru/base.py index 4a281e2..8147ac9 100644 --- a/src/mirakuru/base.py +++ b/src/mirakuru/base.py @@ -1,520 +1,558 @@ # Copyright (C) 2014 by Clearcode # and associates (see AUTHORS). # This file is part of mirakuru. # mirakuru is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # mirakuru is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # You should have received a copy of the GNU Lesser General Public License # along with mirakuru. If not, see . """Executor with the core functionality.""" import atexit from contextlib import contextmanager import logging import os import shlex import signal import subprocess import time import uuid import errno import platform from types import TracebackType from typing import ( - Union, IO, Any, List, Tuple, Optional, Dict, TypeVar, Type, Set, Iterator, - Callable + Union, + IO, + Any, + List, + Tuple, + Optional, + Dict, + TypeVar, + Type, + Set, + Iterator, + Callable, ) from mirakuru.base_env import processes_with_env from mirakuru.exceptions import ( AlreadyRunning, ProcessExitedWithError, + ProcessFinishedWithError, TimeoutExpired, ) from mirakuru.compat import SIGKILL -log = logging.getLogger(__name__) # pylint: disable=invalid-name +LOG = logging.getLogger(__name__) - -ENV_UUID = 'mirakuru_uuid' +ENV_UUID = "mirakuru_uuid" """ Name of the environment variable used by mirakuru to mark its subprocesses. """ IGNORED_ERROR_CODES = [errno.ESRCH] -if platform.system() == 'Darwin': +if platform.system() == "Darwin": IGNORED_ERROR_CODES = [errno.ESRCH, errno.EPERM] # Type variables used for self in functions returning self, so it's correctly # typed in derived classes. SimpleExecutorType = TypeVar("SimpleExecutorType", bound="SimpleExecutor") ExecutorType = TypeVar("ExecutorType", bound="Executor") @atexit.register def cleanup_subprocesses() -> None: """On python exit: find possibly running subprocesses and kill them.""" - # pylint: disable=redefined-outer-name, reimported # atexit functions tends to loose global imports sometimes so reimport # everything what is needed again here: import os import errno from mirakuru.base_env import processes_with_env from mirakuru.compat import SIGKILL pids = processes_with_env(ENV_UUID, str(os.getpid())) for pid in pids: try: os.kill(pid, SIGKILL) except OSError as err: if err.errno != errno.ESRCH: print("Can not kill the", pid, "leaked process", err) class SimpleExecutor: # pylint:disable=too-many-instance-attributes """Simple subprocess executor with start/stop/kill functionality.""" def __init__( # pylint:disable=too-many-arguments - self, - command: Union[str, List[str], Tuple[str, ...]], - cwd: Optional[str] = None, - shell: bool = False, - timeout: Union[int, float] = 3600, - sleep: float = 0.1, - sig_stop: int = signal.SIGTERM, - sig_kill: int = SIGKILL, - envvars: Optional[Dict[str, str]] = None, - stdin: Union[None, int, IO[Any]] = subprocess.PIPE, - stdout: Union[None, int, IO[Any]] = subprocess.PIPE, - stderr: Union[None, int, IO[Any]] = None + self, + command: Union[str, List[str], Tuple[str, ...]], + cwd: Optional[str] = None, + shell: bool = False, + timeout: Union[int, float] = 3600, + sleep: float = 0.1, + stop_signal: int = signal.SIGTERM, + kill_signal: int = SIGKILL, + expected_returncode: Optional[int] = None, + envvars: Optional[Dict[str, str]] = None, + stdin: Union[None, int, IO[Any]] = subprocess.PIPE, + stdout: Union[None, int, IO[Any]] = subprocess.PIPE, + stderr: Union[None, int, IO[Any]] = None, ) -> None: """ Initialize executor. :param (str, list) command: command to be run by the subprocess :param str cwd: current working directory to be set for executor :param bool shell: same as the `subprocess.Popen` shell definition. On Windows always set to True. :param int timeout: number of seconds to wait for the process to start or stop. :param float sleep: how often to check for start/stop condition - :param int sig_stop: signal used to stop process run by the executor. + :param int stop_signal: signal used to stop process run by the executor. default is `signal.SIGTERM` - :param int sig_kill: signal used to kill process run by the executor. + :param int kill_signal: signal used to kill process run by the executor. default is `signal.SIGKILL` (`signal.SIGTERM` on Windows) + :param int expected_returncode: expected exit code. + default is None which means, Executor will determine a POSIX + compatible return code based on signal sent. :param dict envvars: Additional environment variables :param int stdin: file descriptor for stdin :param int stdout: file descriptor for stdout :param int stderr: file descriptor for stderr .. note:: **timeout** set for an executor is valid for all the level of waits on the way up. That means that if some more advanced executor establishes the timeout to 10 seconds and it will take 5 seconds for the first check, second check will only have 5 seconds left. Your executor will raise an exception if something goes wrong during this time. The default value of timeout is ``None``, so it is a good practice to set this. """ if isinstance(command, (list, tuple)): - self.command = ' '.join((shlex.quote(c) for c in command)) + self.command = " ".join((shlex.quote(c) for c in command)) """Command that the executor runs.""" self.command_parts = command else: self.command = command self.command_parts = shlex.split(command) self._cwd = cwd self._shell = True - if platform.system() != 'Windows': + if platform.system() != "Windows": self._shell = shell self._timeout = timeout self._sleep = sleep - self._sig_stop = sig_stop - self._sig_kill = sig_kill + self._stop_signal = stop_signal + self._kill_signal = kill_signal + self._expected_returncode = expected_returncode self._envvars = envvars or {} self._stdin = stdin self._stdout = stdout self._stderr = stderr self._endtime: Optional[float] = None self.process: Optional[subprocess.Popen] = None """A :class:`subprocess.Popen` instance once process is started.""" - self._uuid = f'{os.getpid()}:{uuid.uuid4()}' + self._uuid = f"{os.getpid()}:{uuid.uuid4()}" def __enter__(self: SimpleExecutorType) -> SimpleExecutorType: """ Enter context manager starting the subprocess. :returns: itself :rtype: SimpleExecutor """ return self.start() - def __exit__(self, - exc_type: Optional[Type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType]) -> None: + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: """Exit context manager stopping the subprocess.""" self.stop() def running(self) -> bool: """ Check if executor is running. - :returns: SimpleExecutorTyperue if process is running, False otherwise + :returns: True if process is running, False otherwise :rtype: bool """ if self.process is None: + LOG.debug("There is no process running!") return False return self.process.poll() is None @property def _popen_kwargs(self) -> Dict[str, Any]: """ Get kwargs for the process instance. .. note:: We want to open ``stdin``, ``stdout`` and ``stderr`` as text streams in universal newlines mode, so we have to set ``universal_newlines`` to ``True``. :return: """ kwargs: Dict[str, Any] = {} if self._stdin: - kwargs['stdin'] = self._stdin + kwargs["stdin"] = self._stdin if self._stdout: - kwargs['stdout'] = self._stdout + kwargs["stdout"] = self._stdout if self._stderr: - kwargs['stderr'] = self._stderr - kwargs['universal_newlines'] = True + kwargs["stderr"] = self._stderr + kwargs["universal_newlines"] = True - kwargs['shell'] = self._shell + kwargs["shell"] = self._shell env = os.environ.copy() env.update(self._envvars) # Trick with marking subprocesses with an environment variable. # # There is no easy way to recognize all subprocesses that were # spawned during lifetime of a certain subprocess so mirakuru does # this hack in order to mark who was the original parent. Even if # some subprocess got daemonized or changed original process group # mirakuru will be able to find it by this environment variable. # # There may be a situation when some subprocess will abandon # original envs from parents and then it won't be later found. env[ENV_UUID] = self._uuid - kwargs['env'] = env + kwargs["env"] = env - kwargs['cwd'] = self._cwd - if platform.system() != 'Windows': - kwargs['preexec_fn'] = os.setsid + kwargs["cwd"] = self._cwd + if platform.system() != "Windows": + kwargs["preexec_fn"] = os.setsid return kwargs def start(self: SimpleExecutorType) -> SimpleExecutorType: """ Start defined process. After process gets started, timeout countdown begins as well. :returns: itself :rtype: SimpleExecutor """ if self.process is None: command: Union[str, List[str], Tuple[str, ...]] = self.command if not self._shell: command = self.command_parts - - self.process = subprocess.Popen( - command, - **self._popen_kwargs - ) + LOG.debug("Starting process: %s", command) + self.process = subprocess.Popen(command, **self._popen_kwargs) self._set_timeout() return self def _set_timeout(self) -> None: """Set timeout for possible wait.""" self._endtime = time.time() + self._timeout def _clear_process(self) -> None: """ Close stdin/stdout of subprocess. It is required because of ResourceWarning in Python 3. """ if self.process: self.process.__exit__(None, None, None) self.process = None self._endtime = None def _kill_all_kids(self, sig: int) -> Set[int]: """ Kill all subprocesses (and its subprocesses) that executor started. This function tries to kill all leftovers in process tree that current executor may have left. It uses environment variable to recognise if process have origin in this Executor so it does not give 100 % and some daemons fired by subprocess may still be running. :param int sig: signal used to stop process run by executor. :return: process ids (pids) of killed processes :rtype: set """ pids = processes_with_env(ENV_UUID, self._uuid) for pid in pids: - log.debug("Killing process %d ...", pid) + LOG.debug("Killing process %d ...", pid) try: os.kill(pid, sig) except OSError as err: if err.errno in IGNORED_ERROR_CODES: # the process has died before we tried to kill it. pass else: raise - log.debug("Killed process %d.", pid) + LOG.debug("Killed process %d.", pid) return pids - def stop(self: SimpleExecutorType, sig: int = None) -> SimpleExecutorType: + def stop( + self: SimpleExecutorType, + stop_signal: Optional[int] = None, + expected_returncode: Optional[int] = None, + ) -> SimpleExecutorType: """ Stop process running. Wait 10 seconds for the process to end, then just kill it. - :param int sig: signal used to stop process run by executor. + :param int stop_signal: signal used to stop process run by executor. None for default. - :returns: itself + :param int expected_returncode: expected exit code. + None for default - POSIX compatible behaviour. + :returns: self :rtype: SimpleExecutor .. note:: When gathering coverage for the subprocess in tests, you have to allow subprocesses to end gracefully. """ if self.process is None: return self - if sig is None: - sig = self._sig_stop + if stop_signal is None: + stop_signal = self._stop_signal try: - os.killpg(self.process.pid, sig) + os.killpg(self.process.pid, stop_signal) except OSError as err: if err.errno in IGNORED_ERROR_CODES: pass else: raise def process_stopped() -> bool: """Return True only only when self.process is not running.""" return self.running() is False self._set_timeout() try: self.wait_for(process_stopped) except TimeoutExpired: # at this moment, process got killed, pass - self._kill_all_kids(sig) + if self.process is None: + # the process has already been force killed and cleaned up by the + # `wait_for` above. + return self # type: ignore[unreachable] + self._kill_all_kids(stop_signal) + exit_code = self.process.wait() self._clear_process() + + if expected_returncode is None: + expected_returncode = self._expected_returncode + if expected_returncode is None: + # Assume a POSIX approach where sending a SIGNAL means + # that the process should exist with -SIGNAL exit code. + # https://docs.python.org/3/library/subprocess.html#subprocess.Popen.returncode + expected_returncode = -stop_signal + + if exit_code and exit_code != expected_returncode: + raise ProcessFinishedWithError(self, exit_code) + return self @contextmanager def stopped(self: SimpleExecutorType) -> Iterator[SimpleExecutorType]: """ Stop process for given context and starts it afterwards. Allows for easier writing resistance integration tests whenever one of the service fails. :yields: itself :rtype: SimpleExecutor """ if self.running(): self.stop() yield self self.start() def kill( - self: SimpleExecutorType, - wait: bool = True, - sig: Optional[int] = None) -> SimpleExecutorType: + self: SimpleExecutorType, wait: bool = True, sig: Optional[int] = None + ) -> SimpleExecutorType: """ Kill the process if running. :param bool wait: set to `True` to wait for the process to end, or False, to simply proceed after sending signal. :param int sig: signal used to kill process run by the executor. None by default. :returns: itself :rtype: SimpleExecutor """ if sig is None: - sig = self._sig_kill + sig = self._kill_signal if self.process and self.running(): os.killpg(self.process.pid, sig) if wait: self.process.wait() self._kill_all_kids(sig) self._clear_process() return self def output(self) -> Optional[IO[Any]]: """Return subprocess output.""" if self.process is not None: return self.process.stdout return None # pragma: no cover def err_output(self) -> Optional[IO[Any]]: """Return subprocess stderr.""" if self.process is not None: return self.process.stderr return None # pragma: no cover def wait_for( - self: SimpleExecutorType, - wait_for: Callable[[], bool]) -> SimpleExecutorType: + self: SimpleExecutorType, wait_for: Callable[[], bool] + ) -> SimpleExecutorType: """ Wait for callback to return True. Simply returns if wait_for condition has been met, raises TimeoutExpired otherwise and kills the process. :param callback wait_for: callback to call :raises: mirakuru.exceptions.TimeoutExpired :returns: itself :rtype: SimpleExecutor """ while self.check_timeout(): if wait_for(): return self time.sleep(self._sleep) self.kill() raise TimeoutExpired(self, timeout=self._timeout) def check_timeout(self) -> bool: """ Check if timeout has expired. Returns True if there is no timeout set or the timeout has not expired. Kills the process and raises TimeoutExpired exception otherwise. This method should be used in while loops waiting for some data. :return: True if timeout expired, False if not :rtype: bool """ return self._endtime is None or time.time() <= self._endtime def __del__(self) -> None: """Cleanup subprocesses created during Executor lifetime.""" try: if self.process: self.kill() except Exception: # pragma: no cover print("*" * 80) - print("Exception while deleting Executor. '" - "It is strongly suggested that you use") + print( + "Exception while deleting Executor. '" + "It is strongly suggested that you use" + ) print("it as a context manager instead.") print("*" * 80) raise def __repr__(self) -> str: """Return unambiguous executor representation.""" command = self.command if len(command) > 10: - command = command[:10] + '...' + command = command[:10] + "..." module = self.__class__.__module__ executor = self.__class__.__name__ return f'<{module}.{executor}: "{command}" {hex(id(self))}>' def __str__(self) -> str: """Return readable executor representation.""" module = self.__class__.__module__ executor = self.__class__.__name__ return f'<{module}.{executor}: "{self.command}" {hex(id(self))}>' class Executor(SimpleExecutor): """Base class for executors with a pre- and after-start checks.""" def pre_start_check(self) -> bool: """ Check process before the start of executor. Should be overridden in order to return True when some other executor (or process) has already started with the same configuration. :rtype: bool """ raise NotImplementedError def start(self: ExecutorType) -> ExecutorType: """ Start executor with additional checks. Checks if previous executor isn't running then start process (executor) and wait until it's started. :returns: itself :rtype: Executor """ if self.pre_start_check(): # Some other executor (or process) is running with same config: raise AlreadyRunning(self) - super(Executor, self).start() + super().start() self.wait_for(self.check_subprocess) return self def check_subprocess(self) -> bool: """ Make sure the process didn't exit with an error and run the checks. :rtype: bool :return: the actual check status or False before starting the process :raise ProcessExitedWithError: when the main process exits with an error """ if self.process is None: # pragma: no cover # No process was started. return False exit_code = self.process.poll() if exit_code is not None and exit_code != 0: # The main process exited with an error. Clean up the children # if any. - self._kill_all_kids(self._sig_kill) + self._kill_all_kids(self._kill_signal) self._clear_process() raise ProcessExitedWithError(self, exit_code) return self.after_start_check() def after_start_check(self) -> bool: """ Check process after the start of executor. Should be overridden in order to return boolean value if executor can be treated as started. :rtype: bool """ raise NotImplementedError diff --git a/src/mirakuru/base_env.py b/src/mirakuru/base_env.py index fe265a3..adb32c5 100644 --- a/src/mirakuru/base_env.py +++ b/src/mirakuru/base_env.py @@ -1,115 +1,116 @@ # Copyright (C) 2016 by Clearcode # and associates (see AUTHORS). # This file is part of mirakuru. # mirakuru is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # mirakuru is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # You should have received a copy of the GNU Lesser General Public License # along with mirakuru. If not, see . """Module contains functions used for finding process descendants.""" import errno import logging import re import subprocess -from typing import Set +from typing import Set, List try: import psutil except ImportError: psutil = None -log = logging.getLogger(__name__) # pylint: disable=invalid-name +LOG = logging.getLogger(__name__) -PS_XE_PID_MATCH = re.compile(r'^.*?(\d+).+$') +PS_XE_PID_MATCH = re.compile(r"^.*?(\d+).+$") """_sre.SRE_Pattern matching PIDs in result from `$ ps xe -o pid,cmd`.""" def processes_with_env_psutil(env_name: str, env_value: str) -> Set[int]: """ Find PIDs of processes having environment variable matching given one. Internally it uses `psutil` library. :param str env_name: name of environment variable to be found :param str env_value: environment variable value prefix :return: process identifiers (PIDs) of processes that have certain environment variable equal certain value :rtype: set """ pids = set() for proc in psutil.process_iter(): try: - pinfo = proc.as_dict(attrs=['pid', 'environ']) + pinfo = proc.as_dict(attrs=["pid", "environ"]) except (psutil.NoSuchProcess, IOError): # can't do much if psutil is not able to get this process details pass else: - penv = pinfo.get('environ') - if penv and env_value in penv.get(env_name, ''): - pids.add(pinfo['pid']) + penv = pinfo.get("environ") + if penv and env_value in penv.get(env_name, ""): + pids.add(pinfo["pid"]) return pids def processes_with_env_ps(env_name: str, env_value: str) -> Set[int]: """ Find PIDs of processes having environment variable matching given one. It uses `$ ps xe -o pid,cmd` command so it works only on systems having such command available (Linux, MacOS). If not available function - will just log error. + will just LOG error. :param str env_name: name of environment variable to be found :param str env_value: environment variable value prefix :return: process identifiers (PIDs) of processes that have certain environment variable equal certain value :rtype: set """ pids: Set[int] = set() - ps_xe = '' + ps_xe: List[bytes] = [] try: - cmd = 'ps', 'xe', '-o', 'pid,cmd' + cmd = "ps", "xe", "-o", "pid,cmd" ps_xe = subprocess.check_output(cmd).splitlines() except OSError as err: if err.errno == errno.ENOENT: - log.error("`$ ps xe -o pid,cmd` command was called but it is not " - "available on this operating system. Mirakuru will not " - "be able to list the process tree and find if there are " - "any leftovers of the Executor.") + LOG.error( + "`$ ps xe -o pid,cmd` command was called but it is not " + "available on this operating system. Mirakuru will not " + "be able to list the process tree and find if there are " + "any leftovers of the Executor." + ) return pids except subprocess.CalledProcessError: - log.error("`$ ps xe -o pid,cmd` command exited with non-zero code.") + LOG.error("`$ ps xe -o pid,cmd` command exited with non-zero code.") - env = f'{env_name}={env_value}' + env = f"{env_name}={env_value}" for line in ps_xe: - line = str(line) - if env in line: - match = PS_XE_PID_MATCH.match(line) + sline = str(line) + if env in sline: + match = PS_XE_PID_MATCH.match(sline) # This always matches: all lines other than the header (not # containing our environment variable) have a PID required by the # reggex. Still check it for mypy. if match: pids.add(int(match.group(1))) return pids -# pylint: disable=invalid-name if psutil: processes_with_env = processes_with_env_psutil else: # In case psutil can't be imported (on pypy3) we try to use '$ ps xe' processes_with_env = processes_with_env_ps diff --git a/src/mirakuru/compat.py b/src/mirakuru/compat.py index 62cd7c6..1ad90e0 100644 --- a/src/mirakuru/compat.py +++ b/src/mirakuru/compat.py @@ -1,27 +1,25 @@ # Copyright (C) 2014 by Clearcode # and associates (see AUTHORS). # This file is part of mirakuru. # mirakuru is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # mirakuru is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # You should have received a copy of the GNU Lesser General Public License # along with mirakuru. If not, see . """Mirakuru compatibility module.""" import signal # Windows does not have SIGKILL, fall back to SIGTERM. -SIGKILL = getattr(signal, 'SIGKILL', signal.SIGTERM) +SIGKILL = getattr(signal, "SIGKILL", signal.SIGTERM) -__all__ = ( - 'SIGKILL', -) +__all__ = ("SIGKILL",) diff --git a/src/mirakuru/exceptions.py b/src/mirakuru/exceptions.py index 58d627b..c5e870c 100644 --- a/src/mirakuru/exceptions.py +++ b/src/mirakuru/exceptions.py @@ -1,103 +1,119 @@ """Mirakuru exceptions.""" from typing import Union, TYPE_CHECKING if TYPE_CHECKING: # pragma: no cover from mirakuru.base import SimpleExecutor # pylint:disable=cyclic-import class ExecutorError(Exception): """Base exception for executor failures.""" def __init__(self, executor: "SimpleExecutor") -> None: """ Exception initialization. :param mirakuru.base.SimpleExecutor executor: for which exception occurred """ - super(ExecutorError, self).__init__(self) + super().__init__(self) self.executor = executor class TimeoutExpired(ExecutorError): """Is raised when the timeout expires while starting an executor.""" - def __init__(self, - executor: "SimpleExecutor", - timeout: Union[int, float]) -> None: + def __init__( + self, executor: "SimpleExecutor", timeout: Union[int, float] + ) -> None: """ Exception initialization with an extra ``timeout`` argument. :param mirakuru.base.SimpleExecutor executor: for which exception occurred :param int timeout: timeout for which exception occurred """ - super(TimeoutExpired, self).__init__(executor) + super().__init__(executor) self.timeout = timeout def __str__(self) -> str: """ Return Exception's string representation. :returns: string representation :rtype: str """ return ( - f'Executor {self.executor} timed out after {self.timeout} seconds' + f"Executor {self.executor} timed out after {self.timeout} seconds" ) class AlreadyRunning(ExecutorError): """ Is raised when the executor seems to be already running. When some other process (not necessary executor) seems to be started with same configuration we can't bind to same port. """ def __str__(self) -> str: """ Return Exception's string representation. :returns: string representation :rtype: str """ - port = getattr(self.executor, 'port') - return (f"Executor {self.executor} seems to be already running. " - f"It looks like the previous executor process hasn't been " - f"terminated or killed." - + ("" if port is None else - f" Also there might be some completely " - f"different service listening on {port} port.")) + port = getattr(self.executor, "port") + return ( + f"Executor {self.executor} seems to be already running. " + f"It looks like the previous executor process hasn't been " + f"terminated or killed." + + ( + "" + if port is None + else f" Also there might be some completely " + f"different service listening on {port} port." + ) + ) class ProcessExitedWithError(ExecutorError): """ Raised when the process invoked by the executor returns a non-zero code. We allow the process to exit with zero because we support daemonizing subprocesses. We assume that when double-forking, the parent process will exit with 0 in case of successful daemonization. """ def __init__(self, executor: "SimpleExecutor", exit_code: int) -> None: """ Exception initialization with an extra ``exit_code`` argument. :param mirakuru.base.SimpleExecutor executor: for which exception occurred :param int exit_code: code the subprocess exited with """ - super(ProcessExitedWithError, self).__init__(executor) + super().__init__(executor) self.exit_code = exit_code def __str__(self) -> str: """ Return Exception's string representation. :returns: string representation :rtype: str """ - return (f"The process invoked by the {self.executor} executor has " - f"exited with a non-zero code: {self.exit_code}.") + return ( + f"The process invoked by the {self.executor} executor has " + f"exited with a non-zero code: {self.exit_code}." + ) + + +class ProcessFinishedWithError(ProcessExitedWithError): + """ + Raised when the process invoked by the executor fails when stopping. + + When a process is stopped, it should shut down cleanly and return zero as + exit code. When is returns a non-zero exit code, this exception is raised. + """ diff --git a/src/mirakuru/http.py b/src/mirakuru/http.py index 35207c9..bf44a84 100644 --- a/src/mirakuru/http.py +++ b/src/mirakuru/http.py @@ -1,111 +1,120 @@ # Copyright (C) 2014 by Clearcode # and associates (see AUTHORS). # This file is part of mirakuru. # mirakuru is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # mirakuru is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # You should have received a copy of the GNU Lesser General Public License # along with mirakuru. If not, see . """HTTP enabled process executor.""" import re import socket +from logging import getLogger from urllib.parse import urlparse, urlencode from http.client import HTTPConnection, HTTPException from typing import Union, List, Tuple, Optional, Dict, Any from mirakuru.tcp import TCPExecutor +LOG = getLogger(__name__) + class HTTPExecutor(TCPExecutor): """Http enabled process executor.""" DEFAULT_PORT = 80 """Default TCP port for the HTTP protocol.""" def __init__( - self, - command: Union[str, List[str], Tuple[str, ...]], - url: str, - status: str = r'^2\d\d$', - method: str = 'HEAD', - payload: Optional[Dict[str, str]] = None, - headers: Optional[Dict[str, str]] = None, - **kwargs: Any + self, + command: Union[str, List[str], Tuple[str, ...]], + url: str, + status: Union[str, int] = r"^2\d\d$", + method: str = "HEAD", + payload: Optional[Dict[str, str]] = None, + headers: Optional[Dict[str, str]] = None, + **kwargs: Any, ) -> None: """ Initialize HTTPExecutor executor. :param (str, list) command: command to be run by the subprocess :param str url: URL that executor checks to verify if process has already started. :param bool shell: same as the `subprocess.Popen` shell definition :param str|int status: HTTP status code(s) that an endpoint must return for the executor being considered as running. This argument is interpreted as a single status code - e.g. '200' or '404' but also it can be a regular expression - e.g. '4..' or '(200|404)'. Default: any 2XX HTTP status code. :param str method: request method to check status on. Defaults to HEAD. :param dict payload: Payload to send along the request :param dict headers: :param int timeout: number of seconds to wait for the process to start or stop. If None or False, wait indefinitely. :param float sleep: how often to check for start/stop condition :param int sig_stop: signal used to stop process run by the executor. default is `signal.SIGTERM` :param int sig_kill: signal used to kill process run by the executor. default is `signal.SIGKILL` """ self.url = urlparse(url) """ An :func:`urlparse.urlparse` representation of an url. It'll be used to check process status on. """ + if not self.url.hostname: + raise ValueError("Url provided does not contain hostname") + port = self.url.port if port is None: port = self.DEFAULT_PORT self.status = str(status) self.status_re = re.compile(str(status)) self.method = method self.payload = payload self.headers = headers - super(HTTPExecutor, self).__init__( - command, host=self.url.hostname, port=port, **kwargs - ) + super().__init__(command, host=self.url.hostname, port=port, **kwargs) def after_start_check(self) -> bool: - """Check if defined URL returns expected status to a HEAD request.""" + """Check if defined URL returns expected status to a check request.""" + conn = HTTPConnection(self.host, self.port) try: - conn = HTTPConnection(self.host, self.port) body = urlencode(self.payload) if self.payload else None headers = self.headers if self.headers else {} conn.request( self.method, self.url.path, body, headers, ) - status = str(conn.getresponse().status) + try: + status = str(conn.getresponse().status) + finally: + conn.close() if status == self.status or self.status_re.match(status): - conn.close() return True return False - except (HTTPException, socket.timeout, socket.error): + except (HTTPException, socket.timeout, socket.error) as ex: + LOG.debug( + "Encounter %s while trying to check if service has started.", ex + ) return False diff --git a/src/mirakuru/output.py b/src/mirakuru/output.py index 22c4d28..9cc94aa 100644 --- a/src/mirakuru/output.py +++ b/src/mirakuru/output.py @@ -1,154 +1,157 @@ # Copyright (C) 2014 by Clearcode # and associates (see AUTHORS). # This file is part of mirakuru. # mirakuru is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # mirakuru is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # You should have received a copy of the GNU Lesser General Public License # along with mirakuru. If not, see . """Executor that awaits for appearance of a predefined banner in output.""" import platform import re import select from typing import Union, List, Any, TypeVar, Tuple, IO, Optional from mirakuru.base import SimpleExecutor -IS_DARWIN = platform.system() == 'Darwin' +IS_DARWIN = platform.system() == "Darwin" OutputExecutorType = TypeVar("OutputExecutorType", bound="OutputExecutor") class OutputExecutor(SimpleExecutor): """Executor that awaits for string output being present in output.""" - def __init__(self, - command: Union[str, List[str], Tuple[str, ...]], - banner: str, - **kwargs: Any) -> None: + def __init__( + self, + command: Union[str, List[str], Tuple[str, ...]], + banner: str, + **kwargs: Any, + ) -> None: """ Initialize OutputExecutor executor. :param (str, list) command: command to be run by the subprocess :param str banner: string that has to appear in process output - should compile to regular expression. :param bool shell: same as the `subprocess.Popen` shell definition :param int timeout: number of seconds to wait for the process to start or stop. If None or False, wait indefinitely. :param float sleep: how often to check for start/stop condition :param int sig_stop: signal used to stop process run by the executor. default is `signal.SIGTERM` :param int sig_kill: signal used to kill process run by the executor. default is `signal.SIGKILL` (`signal.SIGTERM` on Windows) """ - super(OutputExecutor, self).__init__(command, **kwargs) + super().__init__(command, **kwargs) self._banner = re.compile(banner) if not any((self._stdout, self._stderr)): raise TypeError( - 'At least one of stdout or stderr has to be initialized' + "At least one of stdout or stderr has to be initialized" ) def start(self: OutputExecutorType) -> OutputExecutorType: """ Start process. :returns: itself :rtype: OutputExecutor .. note:: Process will be considered started, when defined banner will appear in process output. """ - super(OutputExecutor, self).start() + super().start() if not IS_DARWIN: polls: List[Tuple[select.poll, IO[Any]]] = [] for output_handle, output_method in ( - (self._stdout, self.output), - (self._stderr, self.err_output) + (self._stdout, self.output), + (self._stderr, self.err_output), ): if output_handle is not None: # get a polling object std_poll = select.poll() output_file = output_method() if output_file is None: raise ValueError( "The process is started but " "the output file is None" ) # register a file descriptor # POLLIN because we will wait for data to read std_poll.register(output_file, select.POLLIN) polls.append((std_poll, output_file)) try: + def await_for_output() -> bool: return self._wait_for_output(*polls) self.wait_for(await_for_output) for poll, output in polls: # unregister the file descriptor # and delete the polling object poll.unregister(output) finally: for poll_and_output in polls: del poll_and_output else: outputs = [] for output_handle, output_method in ( - (self._stdout, self.output), - (self._stderr, self.err_output) + (self._stdout, self.output), + (self._stderr, self.err_output), ): if output_handle is not None: outputs.append(output_method()) def await_for_output() -> bool: return self._wait_for_darwin_output(*outputs) self.wait_for(await_for_output) return self def _wait_for_darwin_output(self, *fds: Optional[IO[Any]]) -> bool: """Select implementation to be used on MacOSX""" rlist, _, _ = select.select(fds, [], [], 0) for output in rlist: line = output.readline() if self._banner.match(line): return True return False - def _wait_for_output(self, *polls: Tuple[select.poll, IO[Any]]) -> bool: + def _wait_for_output(self, *polls: Tuple["select.poll", IO[Any]]) -> bool: """ Check if output matches banner. .. warning:: Waiting for I/O completion. It does not work on Windows. Sorry. """ for poll, output in polls: # Here we should get an empty list or list with a tuple # [(fd, event)]. When we get list with a tuple we can use readline # method on the file descriptor. poll_result = poll.poll(0) if poll_result: line = output.readline() if self._banner.match(line): return True return False diff --git a/src/mirakuru/pid.py b/src/mirakuru/pid.py index 855f580..e2b32ab 100644 --- a/src/mirakuru/pid.py +++ b/src/mirakuru/pid.py @@ -1,84 +1,86 @@ # Copyright (C) 2014 by Clearcode # and associates (see AUTHORS). # This file is part of mirakuru. # mirakuru is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # mirakuru is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # You should have received a copy of the GNU Lesser General Public License # along with mirakuru. If not, see . """Pid executor definition.""" import os.path from typing import Union, List, Tuple, Any from mirakuru.base import Executor class PidExecutor(Executor): """ File existence checking process executor. Used to start processes that create pid files (or any other for that matter). Starts the given process and waits for the given file to be created. """ - def __init__(self, - command: Union[str, List[str], Tuple[str, ...]], - filename: str, - **kwargs: Any) -> None: + def __init__( + self, + command: Union[str, List[str], Tuple[str, ...]], + filename: str, + **kwargs: Any, + ) -> None: """ Initialize the PidExecutor executor. If the filename is empty, a ValueError is thrown. :param (str, list) command: command to be run by the subprocess :param str filename: the file which is to exist :param bool shell: same as the `subprocess.Popen` shell definition :param int timeout: number of seconds to wait for the process to start or stop. If None or False, wait indefinitely. :param float sleep: how often to check for start/stop condition :param int sig_stop: signal used to stop process run by the executor. default is `signal.SIGTERM` :param int sig_kill: signal used to kill process run by the executor. default is `signal.SIGKILL` (`signal.SIGTERM` on Windows) :raises: ValueError """ - super(PidExecutor, self).__init__(command, **kwargs) + super().__init__(command, **kwargs) if not filename: raise ValueError("filename must be defined") self.filename = filename """the name of the file which the process is to create.""" def pre_start_check(self) -> bool: """ Check if the specified file has been created. .. note:: The process will be considered started when it will have created the specified file as defined in the initializer. """ return os.path.isfile(self.filename) def after_start_check(self) -> bool: """ Check if the process has created the specified file. .. note:: The process will be considered started when it will have created the specified file as defined in the initializer. """ return self.pre_start_check() # we can reuse logic from `pre_start()` diff --git a/src/mirakuru/tcp.py b/src/mirakuru/tcp.py index 8b668bd..a126582 100644 --- a/src/mirakuru/tcp.py +++ b/src/mirakuru/tcp.py @@ -1,89 +1,91 @@ # Copyright (C) 2014 by Clearcode # and associates (see AUTHORS). # This file is part of mirakuru. # mirakuru is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # mirakuru is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # You should have received a copy of the GNU Lesser General Public License # along with mirakuru. If not, see . """TCP executor definition.""" import socket from typing import Union, List, Tuple, Any from mirakuru.base import Executor class TCPExecutor(Executor): """ TCP-listening process executor. Used to start (and wait to actually be running) processes that can accept TCP connections. """ - def __init__(self, - command: Union[str, List[str], Tuple[str, ...]], - host: str, - port: int, - **kwargs: Any) -> None: + def __init__( + self, + command: Union[str, List[str], Tuple[str, ...]], + host: str, + port: int, + **kwargs: Any, + ) -> None: """ Initialize TCPExecutor executor. :param (str, list) command: command to be run by the subprocess :param str host: host under which process is accessible :param int port: port under which process is accessible :param bool shell: same as the `subprocess.Popen` shell definition :param int timeout: number of seconds to wait for the process to start or stop. If None or False, wait indefinitely. :param float sleep: how often to check for start/stop condition :param int sig_stop: signal used to stop process run by the executor. default is `signal.SIGTERM` :param int sig_kill: signal used to kill process run by the executor. default is `signal.SIGKILL` (`signal.SIGTERM` on Windows) """ - super(TCPExecutor, self).__init__(command, **kwargs) + super().__init__(command, **kwargs) self.host = host """Host name, process is listening on.""" self.port = port """Port number, process is listening on.""" def pre_start_check(self) -> bool: """ Check if process accepts connections. .. note:: Process will be considered started, when it'll be able to accept TCP connections as defined in initializer. """ try: sock = socket.socket() sock.connect((self.host, self.port)) return True except (socket.error, socket.timeout): return False finally: # close socket manually for sake of PyPy sock.close() def after_start_check(self) -> bool: """ Check if process accepts connections. .. note:: Process will be considered started, when it'll be able to accept TCP connections as defined in initializer. """ return self.pre_start_check() # we can reuse logic from `pre_start()` diff --git a/src/mirakuru/unixsocket.py b/src/mirakuru/unixsocket.py index d33163c..3dd1f9f 100644 --- a/src/mirakuru/unixsocket.py +++ b/src/mirakuru/unixsocket.py @@ -1,88 +1,88 @@ # Copyright (C) 2019 by Clearcode # and associates (see AUTHORS). # This file is part of mirakuru. # mirakuru is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # mirakuru is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # You should have received a copy of the GNU Lesser General Public License # along with mirakuru. If not, see . """TCP Socket executor definition.""" import logging import socket from typing import Union, List, Tuple, Any from mirakuru import Executor LOG = logging.getLogger(__name__) class UnixSocketExecutor(Executor): """ Unixsocket listening process executor. Used to start (and wait to actually be running) processes that can accept stream Unix socket connections. """ def __init__( - self, - command: Union[str, List[str], Tuple[str, ...]], - socket_name: str, - **kwargs: Any + self, + command: Union[str, List[str], Tuple[str, ...]], + socket_name: str, + **kwargs: Any, ) -> None: """ Initialize UnixSocketExecutor executor. :param (str, list) command: command to be run by the subprocess :param str socket_name: unix socket path :param bool shell: same as the `subprocess.Popen` shell definition :param int timeout: number of seconds to wait for the process to start or stop. If None or False, wait indefinitely. :param float sleep: how often to check for start/stop condition :param int sig_stop: signal used to stop process run by the executor. default is `signal.SIGTERM` :param int sig_kill: signal used to kill process run by the executor. default is `signal.SIGKILL` (`signal.SIGTERM` on Windows) """ super().__init__(command, **kwargs) self.socket = socket_name def pre_start_check(self) -> bool: """ Check if process accepts connections. .. note:: Process will be considered started, when it'll be able to accept Unix Socket connections as defined in initializer. """ exec_sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) try: exec_sock.connect(self.socket) return True except socket.error as msg: - LOG.debug('Can not connect to socket: %s', msg) + LOG.debug("Can not connect to socket: %s", msg) return False finally: # close socket manually for sake of PyPy exec_sock.close() def after_start_check(self) -> bool: """ Check if process accepts connections. .. note:: Process will be considered started, when it'll be able to accept Unix Socket connections as defined in initializer. """ return self.pre_start_check() # we can reuse logic from `pre_start()` diff --git a/tests/__init__.py b/tests/__init__.py index 086a51d..6105b41 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,27 +1,27 @@ """ Package of tests for mirakuru. Tests are written using py.test framework which dictates patterns that should be followed in test cases. """ import sys from os import path from subprocess import check_output TEST_PATH = path.abspath(path.dirname(__file__)) TEST_SERVER_PATH = path.join(TEST_PATH, "server_for_tests.py") -TEST_SOCKET_SERVER_PATH = path.join(TEST_PATH, 'unixsocketserver_for_tests.py') +TEST_SOCKET_SERVER_PATH = path.join(TEST_PATH, "unixsocketserver_for_tests.py") SAMPLE_DAEMON_PATH = path.join(TEST_PATH, "sample_daemon.py") HTTP_SERVER_CMD = f"{sys.executable} -m http.server" -def ps_aux(): +def ps_aux() -> str: """ Return output of systems `ps aux -w` call. :rtype str """ - return str(check_output(('ps', 'aux', '-w'))) + return str(check_output(("ps", "aux", "-w"))) diff --git a/tests/executors/test_executor.py b/tests/executors/test_executor.py index 8500e4b..c049124 100644 --- a/tests/executors/test_executor.py +++ b/tests/executors/test_executor.py @@ -1,242 +1,268 @@ # mypy: no-strict-optional """Test basic executor functionality.""" import gc import shlex import signal from subprocess import check_output import uuid from unittest import mock +from typing import Union, List import pytest from mirakuru import Executor from mirakuru.base import SimpleExecutor from mirakuru.exceptions import ProcessExitedWithError, TimeoutExpired from tests import SAMPLE_DAEMON_PATH, ps_aux +from tests.retry import retry -SLEEP_300 = 'sleep 300' +SLEEP_300 = "sleep 300" -@pytest.mark.parametrize('command', (SLEEP_300, SLEEP_300.split())) -def test_running_process(command): +@pytest.mark.parametrize("command", (SLEEP_300, SLEEP_300.split())) +def test_running_process(command: Union[str, List[str]]) -> None: """Start process and shuts it down.""" executor = SimpleExecutor(command) executor.start() assert executor.running() is True executor.stop() assert executor.running() is False # check proper __str__ and __repr__ rendering: - assert 'SimpleExecutor' in repr(executor) + assert "SimpleExecutor" in repr(executor) assert SLEEP_300 in str(executor) -@pytest.mark.parametrize('command', (SLEEP_300, SLEEP_300.split())) -def test_command(command): +@pytest.mark.parametrize("command", (SLEEP_300, SLEEP_300.split())) +def test_command(command: Union[str, List[str]]) -> None: """Check that the command and command parts are equivalent.""" executor = SimpleExecutor(command) assert executor.command == SLEEP_300 assert executor.command_parts == SLEEP_300.split() -def test_custom_signal_stop(): +def test_custom_signal_stop() -> None: """Start process and shuts it down using signal SIGQUIT.""" - executor = SimpleExecutor(SLEEP_300, sig_stop=signal.SIGQUIT) + executor = SimpleExecutor(SLEEP_300, stop_signal=signal.SIGQUIT) executor.start() assert executor.running() is True executor.stop() assert executor.running() is False -def test_stop_custom_signal_stop(): +def test_stop_custom_signal_stop() -> None: """Start process and shuts it down using signal SIGQUIT passed to stop.""" executor = SimpleExecutor(SLEEP_300) executor.start() assert executor.running() is True - executor.stop(sig=signal.SIGQUIT) + executor.stop(stop_signal=signal.SIGQUIT) assert executor.running() is False -def test_running_context(): +def test_stop_custom_exit_signal_stop() -> None: + """Start process and expect it to finish with custom signal.""" + executor = SimpleExecutor("false", shell=True) + executor.start() + # false exits instant, so there should not be a process to stop + retry( + lambda: executor.stop( + stop_signal=signal.SIGQUIT, expected_returncode=-3 + ) + ) + assert executor.running() is False + + +def test_stop_custom_exit_signal_context() -> None: + """Start process and expect custom exit signal in context manager.""" + with SimpleExecutor( + "false", expected_returncode=-3, shell=True + ) as executor: + executor.stop(stop_signal=signal.SIGQUIT) + assert executor.running() is False + + +def test_running_context() -> None: """Start process and shuts it down.""" executor = SimpleExecutor(SLEEP_300) with executor: assert executor.running() is True assert executor.running() is False -def test_executor_in_context_only(): +def test_executor_in_context_only() -> None: """Start process and shuts it down only in context.""" with SimpleExecutor(SLEEP_300) as executor: assert executor.running() is True -def test_context_stopped(): +def test_context_stopped() -> None: """Start for context, and shuts it for nested context.""" executor = SimpleExecutor(SLEEP_300) with executor: assert executor.running() is True with executor.stopped(): assert executor.running() is False assert executor.running() is True assert executor.running() is False ECHO_FOOBAR = 'echo "foobar"' -@pytest.mark.parametrize('command', (ECHO_FOOBAR, shlex.split(ECHO_FOOBAR))) -def test_process_output(command): +@pytest.mark.parametrize("command", (ECHO_FOOBAR, shlex.split(ECHO_FOOBAR))) +def test_process_output(command: Union[str, List[str]]) -> None: """Start process, check output and shut it down.""" executor = SimpleExecutor(command) executor.start() - assert executor.output().read() == 'foobar\n' + assert executor.output().read() == "foobar\n" executor.stop() -@pytest.mark.parametrize('command', (ECHO_FOOBAR, shlex.split(ECHO_FOOBAR))) -def test_process_output_shell(command): +@pytest.mark.parametrize("command", (ECHO_FOOBAR, shlex.split(ECHO_FOOBAR))) +def test_process_output_shell(command: Union[str, List[str]]) -> None: """Start process, check output and shut it down with shell set to True.""" executor = SimpleExecutor(command, shell=True) executor.start() - assert executor.output().read().strip() == 'foobar' + assert executor.output().read().strip() == "foobar" executor.stop() -def test_start_check_executor(): +def test_start_check_executor() -> None: """Validate Executor base class having NotImplemented methods.""" executor = Executor(SLEEP_300) with pytest.raises(NotImplementedError): executor.pre_start_check() with pytest.raises(NotImplementedError): executor.after_start_check() -def test_stopping_not_yet_running_executor(): +def test_stopping_not_yet_running_executor() -> None: """ Test if SimpleExecutor can be stopped even it was never running. We must make sure that it's possible to call .stop() and SimpleExecutor will not raise any exception and .start() can be called afterwards. """ executor = SimpleExecutor(SLEEP_300) executor.stop() executor.start() assert executor.running() is True executor.stop() -def test_forgotten_stop(): +def test_forgotten_stop() -> None: """ Test if SimpleExecutor subprocess is killed after an instance is deleted. Existence can end because of context scope end or by calling 'del'. If someone forgot to stop() or kill() subprocess it should be killed by default on instance cleanup. """ mark = str(uuid.uuid1()) # We cannot simply do `sleep 300 #` in a shell because in that # case bash (default shell on some systems) does `execve` without cloning # itself - that means there will be no process with commandline like: # '/bin/sh -c sleep 300 && true #' - instead that process would # get substituted with 'sleep 300' and the marked commandline would be # overwritten. # Injecting some flow control (`&&`) forces bash to fork properly. - marked_command = f'sleep 300 && true #{mark!s}' + marked_command = f"sleep 300 && true #{mark!s}" executor = SimpleExecutor(marked_command, shell=True) executor.start() assert executor.running() is True assert mark in ps_aux(), "The test process should be running." del executor gc.collect() # to force 'del' immediate effect - assert mark not in ps_aux(), \ - "The test process should not be running at this point." + assert ( + mark not in ps_aux() + ), "The test process should not be running at this point." -def test_executor_raises_if_process_exits_with_error(): +def test_executor_raises_if_process_exits_with_error() -> None: """ Test process exit detection. If the process exits with an error while checks are being polled, executor should raise an exception. """ error_code = 12 failing_executor = Executor( - ['bash', '-c', f'exit {error_code!s}'], - timeout=5 + ["bash", "-c", f"exit {error_code!s}"], timeout=5 ) failing_executor.pre_start_check = mock.Mock( # type: ignore - return_value=False) + return_value=False + ) # After-start check will keep returning False to let the process terminate. failing_executor.after_start_check = mock.Mock( # type: ignore - return_value=False) + return_value=False + ) with pytest.raises(ProcessExitedWithError) as exc: failing_executor.start() assert exc.value.exit_code == 12 - error_msg = f'exited with a non-zero code: {error_code!s}' + error_msg = f"exited with a non-zero code: {error_code!s}" assert error_msg in str(exc.value) # Pre-start check should have been called - after-start check might or # might not have been called - depending on the timing. - assert failing_executor.pre_start_check.called is True # type: ignore + assert failing_executor.pre_start_check.called is True -def test_executor_ignores_processes_exiting_with_0(): +def test_executor_ignores_processes_exiting_with_0() -> None: """ Test process exit detection. Subprocess exiting with zero should be tolerated in order to support double-forking applications. """ # We execute a process that will return zero. In order to give the process # enough time to return we keep the polling loop spinning for a second. - executor = Executor(['bash', '-c', 'exit 0'], timeout=1.0) + executor = Executor(["bash", "-c", "exit 0"], timeout=1.0) executor.pre_start_check = mock.Mock(return_value=False) # type: ignore executor.after_start_check = mock.Mock(return_value=False) # type: ignore with pytest.raises(TimeoutExpired): # We keep the post-checks spinning forever so it eventually times out. executor.start() # Both checks should have been called. - assert executor.pre_start_check.called is True # type: ignore - assert executor.after_start_check.called is True # type: ignore + assert executor.pre_start_check.called is True + assert executor.after_start_check.called is True -def test_executor_methods_returning_self(): +def test_executor_methods_returning_self() -> None: """Test if SimpleExecutor lets to chain start, stop and kill methods.""" executor = SimpleExecutor(SLEEP_300).start().stop().kill().stop() assert not executor.running() # Check if context manager returns executor to use it in 'as' phrase: with SimpleExecutor(SLEEP_300) as executor: assert executor.running() with SimpleExecutor(SLEEP_300).start().stopped() as executor: assert not executor.running() assert SimpleExecutor(SLEEP_300).start().stop().output -def test_mirakuru_cleanup(): +def test_mirakuru_cleanup() -> None: """Test if cleanup_subprocesses is fired correctly on python exit.""" - cmd = f''' + cmd = f""" python -c 'from mirakuru import SimpleExecutor; from time import sleep; import gc; gc.disable(); ex = SimpleExecutor( ("python", "{SAMPLE_DAEMON_PATH}")).start(); sleep(1); ' - ''' - check_output(shlex.split(cmd.replace('\n', ''))) + """ + check_output(shlex.split(cmd.replace("\n", ""))) assert SAMPLE_DAEMON_PATH not in ps_aux() diff --git a/tests/executors/test_executor_kill.py b/tests/executors/test_executor_kill.py index ce5f42e..ae58616 100644 --- a/tests/executors/test_executor_kill.py +++ b/tests/executors/test_executor_kill.py @@ -1,123 +1,125 @@ # mypy: no-strict-optional """Tests that check various kill behaviours.""" import signal import time import sys +from typing import NoReturn, Set import errno import os from unittest.mock import patch + import pytest from mirakuru import SimpleExecutor, HTTPExecutor from mirakuru.compat import SIGKILL +from mirakuru.exceptions import ProcessFinishedWithError from tests import SAMPLE_DAEMON_PATH, ps_aux, TEST_SERVER_PATH -SLEEP_300 = 'sleep 300' +SLEEP_300 = "sleep 300" -def test_custom_signal_kill(): +def test_custom_signal_kill() -> None: """Start process and shuts it down using signal SIGQUIT.""" - executor = SimpleExecutor(SLEEP_300, sig_kill=signal.SIGQUIT) + executor = SimpleExecutor(SLEEP_300, kill_signal=signal.SIGQUIT) executor.start() assert executor.running() is True executor.kill() assert executor.running() is False -def test_kill_custom_signal_kill(): +def test_kill_custom_signal_kill() -> None: """Start process and shuts it down using signal SIGQUIT passed to kill.""" executor = SimpleExecutor(SLEEP_300) executor.start() assert executor.running() is True executor.kill(sig=signal.SIGQUIT) assert executor.running() is False -def test_already_closed(): +def test_already_closed() -> None: """Check that the executor cleans after itself after it exited earlier.""" - with SimpleExecutor('python') as executor: - assert executor.running() - os.killpg(executor.process.pid, SIGKILL) - - def process_stopped(): - """Return True only only when self.process is not running.""" - return executor.running() is False - executor.wait_for(process_stopped) - assert executor.process + with pytest.raises(ProcessFinishedWithError) as excinfo: + with SimpleExecutor("python") as executor: + assert executor.running() + os.killpg(executor.process.pid, SIGKILL) + + def process_stopped() -> bool: + """Return True only only when self.process is not running.""" + return executor.running() is False + + executor.wait_for(process_stopped) + assert executor.process + assert excinfo.value.exit_code == -9 assert not executor.process -@pytest.mark.xfail( - condition=sys.version_info >= (3, 8), - reason='python-daemon 2.2.3 fails with ' - '; ' - 'unxfail when a newer version is used') -def test_daemons_killing(): +def test_daemons_killing() -> None: """ Test if all subprocesses of SimpleExecutor can be killed. The most problematic subprocesses are daemons or other services that change the process group ID. This test verifies that daemon process is killed after executor's kill(). """ - executor = SimpleExecutor(('python', SAMPLE_DAEMON_PATH), shell=True) + executor = SimpleExecutor(("python", SAMPLE_DAEMON_PATH), shell=True) executor.start() time.sleep(2) - assert executor.running() is not True, \ - "Executor should not have subprocess running as it started a daemon." + assert ( + executor.running() is not True + ), "Executor should not have subprocess running as it started a daemon." assert SAMPLE_DAEMON_PATH in ps_aux() executor.kill() assert SAMPLE_DAEMON_PATH not in ps_aux() -def test_stopping_brutally(): +def test_stopping_brutally() -> None: """ Test if SimpleExecutor is stopping insubordinate process. Check if the process that doesn't react to SIGTERM signal will be killed by executor with SIGKILL automatically. """ host_port = "127.0.0.1:8000" - cmd = f'{sys.executable} {TEST_SERVER_PATH} {host_port} True' - executor = HTTPExecutor(cmd, f'http://{host_port!s}/', timeout=20) + cmd = f"{sys.executable} {TEST_SERVER_PATH} {host_port} True" + executor = HTTPExecutor(cmd, f"http://{host_port!s}/", timeout=20) executor.start() assert executor.running() is True stop_at = time.time() + 10 executor.stop() assert executor.running() is False assert stop_at <= time.time(), "Subprocess killed earlier than in 10 secs" -def test_stopping_children_of_stopped_process(): +def test_stopping_children_of_stopped_process() -> None: """ Check that children exiting between listing and killing are ignored. Given: Executor is running and it's process spawn children, and we requested it's stop, and it's stopped When: At the time of the check for subprocesses they're still active, but before we start killing them, they are already dead. Then: We ignore and skip OsError indicates there's no such process. """ # pylint: disable=protected-access, missing-docstring - def raise_os_error(*_, **__): + def raise_os_error(*_: int, **__: int) -> NoReturn: os_error = OSError() os_error.errno = errno.ESRCH raise os_error - def processes_with_env_mock(*_, **__): - return [1] + def processes_with_env_mock(*_: str, **__: str) -> Set[int]: + return {1} with patch( - 'mirakuru.base.processes_with_env', new=processes_with_env_mock - ), patch('os.kill', new=raise_os_error): + "mirakuru.base.processes_with_env", new=processes_with_env_mock + ), patch("os.kill", new=raise_os_error): executor = SimpleExecutor(SLEEP_300) - executor._kill_all_kids(executor._sig_stop) + executor._kill_all_kids(executor._stop_signal) diff --git a/tests/executors/test_http_executor.py b/tests/executors/test_http_executor.py index 06c502f..00c01c3 100644 --- a/tests/executors/test_http_executor.py +++ b/tests/executors/test_http_executor.py @@ -1,220 +1,217 @@ """HTTP Executor tests.""" import sys import socket from functools import partial from http.client import HTTPConnection, OK -from typing import Dict, Any +from typing import Dict, Any, Union from unittest.mock import patch import pytest from mirakuru import HTTPExecutor, TCPExecutor from mirakuru import TimeoutExpired, AlreadyRunning from tests import TEST_SERVER_PATH, HTTP_SERVER_CMD HOST = "127.0.0.1" PORT = 7987 -HTTP_NORMAL_CMD = f'{HTTP_SERVER_CMD} {PORT}' -HTTP_SLOW_CMD = f'{sys.executable} {TEST_SERVER_PATH} {HOST}:{PORT}' +HTTP_NORMAL_CMD = f"{HTTP_SERVER_CMD} {PORT}" +HTTP_SLOW_CMD = f"{sys.executable} {TEST_SERVER_PATH} {HOST}:{PORT}" slow_server_executor = partial( # pylint: disable=invalid-name HTTPExecutor, HTTP_SLOW_CMD, - f'http://{HOST}:{PORT}/', + f"http://{HOST}:{PORT}/", ) -def connect_to_server(): +def connect_to_server() -> None: """Connect to http server and assert 200 response.""" conn = HTTPConnection(HOST, PORT) - conn.request('GET', '/') + conn.request("GET", "/") assert conn.getresponse().status == OK conn.close() -def test_executor_starts_and_waits(): +def test_executor_starts_and_waits() -> None: """Test if process awaits for HEAD request to be completed.""" command = f'bash -c "sleep 3 && {HTTP_NORMAL_CMD}"' - executor = HTTPExecutor( - command, - f'http://{HOST}:{PORT}/', - timeout=20 - ) + executor = HTTPExecutor(command, f"http://{HOST}:{PORT}/", timeout=20) executor.start() assert executor.running() is True connect_to_server() executor.stop() # check proper __str__ and __repr__ rendering: - assert 'HTTPExecutor' in repr(executor) + assert "HTTPExecutor" in repr(executor) assert command in str(executor) -def test_shell_started_server_stops(): +def test_shell_started_server_stops() -> None: """Test if executor terminates properly executor with shell=True.""" executor = HTTPExecutor( - HTTP_NORMAL_CMD, - f'http://{HOST}:{PORT}/', - timeout=20, - shell=True + HTTP_NORMAL_CMD, f"http://{HOST}:{PORT}/", timeout=20, shell=True ) with pytest.raises(socket.error): connect_to_server() with executor: assert executor.running() is True connect_to_server() assert executor.running() is False with pytest.raises(socket.error): connect_to_server() -@pytest.mark.parametrize('method', ( - 'HEAD', 'GET', 'POST' -)) -def test_slow_method_server_starting(method): +@pytest.mark.parametrize("method", ("HEAD", "GET", "POST")) +def test_slow_method_server_starting(method: str) -> None: """ Test whether or not executor awaits for slow starting servers. Simple example. You run Gunicorn and it is working but you have to wait for worker processes. """ http_method_slow_cmd = ( - f'{sys.executable} {TEST_SERVER_PATH} {HOST}:{PORT} False {method}' + f"{sys.executable} {TEST_SERVER_PATH} {HOST}:{PORT} False {method}" ) with HTTPExecutor( - http_method_slow_cmd, - f'http://{HOST}:{PORT}/', method=method, timeout=30 + http_method_slow_cmd, + f"http://{HOST}:{PORT}/", + method=method, + timeout=30, ) as executor: assert executor.running() is True connect_to_server() -def test_slow_post_payload_server_starting(): +def test_slow_post_payload_server_starting() -> None: """ Test whether or not executor awaits for slow starting servers. Simple example. You run Gunicorn and it is working but you have to wait for worker processes. """ http_method_slow_cmd = ( - f'{sys.executable} {TEST_SERVER_PATH} {HOST}:{PORT} False Key' + f"{sys.executable} {TEST_SERVER_PATH} {HOST}:{PORT} False Key" ) with HTTPExecutor( - http_method_slow_cmd, - f'http://{HOST}:{PORT}/', - method='POST', - timeout=30, - payload={'key': 'hole'} + http_method_slow_cmd, + f"http://{HOST}:{PORT}/", + method="POST", + timeout=30, + payload={"key": "hole"}, ) as executor: assert executor.running() is True connect_to_server() -@pytest.mark.parametrize('method', ( - 'HEAD', 'GET', 'POST' -)) -def test_slow_method_server_timed_out(method): +@pytest.mark.parametrize("method", ("HEAD", "GET", "POST")) +def test_slow_method_server_timed_out(method: str) -> None: """Check if timeout properly expires.""" http_method_slow_cmd = ( - f'{sys.executable} {TEST_SERVER_PATH} {HOST}:{PORT} False {method}' + f"{sys.executable} {TEST_SERVER_PATH} {HOST}:{PORT} False {method}" ) executor = HTTPExecutor( - http_method_slow_cmd, - f'http://{HOST}:{PORT}/', method=method, timeout=1 + http_method_slow_cmd, f"http://{HOST}:{PORT}/", method=method, timeout=1 ) with pytest.raises(TimeoutExpired) as exc: executor.start() assert executor.running() is False - assert 'timed out after' in str(exc.value) + assert "timed out after" in str(exc.value) -def test_fail_if_other_running(): +def test_fail_if_other_running() -> None: """Test raising AlreadyRunning exception when port is blocked.""" executor = HTTPExecutor( - HTTP_NORMAL_CMD, f'http://{HOST}:{PORT}/', + HTTP_NORMAL_CMD, + f"http://{HOST}:{PORT}/", ) executor2 = HTTPExecutor( - HTTP_NORMAL_CMD, f'http://{HOST}:{PORT}/', + HTTP_NORMAL_CMD, + f"http://{HOST}:{PORT}/", ) with executor: assert executor.running() is True with pytest.raises(AlreadyRunning): executor2.start() with pytest.raises(AlreadyRunning) as exc: with executor2: pass - assert 'seems to be already running' in str(exc.value) + assert "seems to be already running" in str(exc.value) -@patch.object(HTTPExecutor, 'DEFAULT_PORT', PORT) -def test_default_port(): +@patch.object(HTTPExecutor, "DEFAULT_PORT", PORT) +def test_default_port() -> None: """ Test default port for the base TCP check. Check if HTTP executor fills in the default port for the TCP check from the base class if no port is provided in the URL. """ - executor = HTTPExecutor(HTTP_NORMAL_CMD, f'http://{HOST}/') + executor = HTTPExecutor(HTTP_NORMAL_CMD, f"http://{HOST}/") assert executor.url.port is None assert executor.port == PORT assert TCPExecutor.pre_start_check(executor) is False executor.start() assert TCPExecutor.pre_start_check(executor) is True executor.stop() -@pytest.mark.parametrize('accepted_status, expected_timeout', ( - # default behaviour - only 2XX HTTP status codes are accepted - (None, True), - # one explicit integer status code - (200, True), - # one explicit status code as a string - ('404', False), - # status codes as a regular expression - (r'(2|4)\d\d', False), - # status codes as a regular expression - ('(200|404)', False), -)) -def test_http_status_codes(accepted_status, expected_timeout): +@pytest.mark.parametrize( + "accepted_status, expected_timeout", + ( + # default behaviour - only 2XX HTTP status codes are accepted + (None, True), + # one explicit integer status code + (200, True), + # one explicit status code as a string + ("404", False), + # status codes as a regular expression + (r"(2|4)\d\d", False), + # status codes as a regular expression + ("(200|404)", False), + ), +) +def test_http_status_codes( + accepted_status: Union[None, int, str], expected_timeout: bool +) -> None: """ Test how 'status' argument influences executor start. :param int|str accepted_status: Executor 'status' value :param bool expected_timeout: if Executor raises TimeoutExpired or not """ kwargs: Dict[str, Any] = { - 'command': HTTP_NORMAL_CMD, - 'url': f'http://{HOST}:{PORT}/badpath', - 'timeout': 2 + "command": HTTP_NORMAL_CMD, + "url": f"http://{HOST}:{PORT}/badpath", + "timeout": 2, } if accepted_status: - kwargs['status'] = accepted_status + kwargs["status"] = accepted_status executor = HTTPExecutor(**kwargs) if not expected_timeout: executor.start() executor.stop() else: with pytest.raises(TimeoutExpired): executor.start() executor.stop() diff --git a/tests/executors/test_output_executor.py b/tests/executors/test_output_executor.py index f20d40d..6b3c643 100644 --- a/tests/executors/test_output_executor.py +++ b/tests/executors/test_output_executor.py @@ -1,50 +1,50 @@ # mypy: no-strict-optional """Output executor test.""" import subprocess import pytest from mirakuru import OutputExecutor from mirakuru.exceptions import TimeoutExpired -def test_executor_waits_for_process_output(): +def test_executor_waits_for_process_output() -> None: """Check if executor waits for specified output.""" command = 'bash -c "sleep 2 && echo foo && echo bar && sleep 100"' - executor = OutputExecutor(command, 'foo', timeout=10).start() + executor = OutputExecutor(command, "foo", timeout=10).start() assert executor.running() is True # foo has been used for start as a banner. - assert executor.output().readline() == 'bar\n' + assert executor.output().readline() == "bar\n" executor.stop() # check proper __str__ and __repr__ rendering: - assert 'OutputExecutor' in repr(executor) - assert 'foo' in str(executor) + assert "OutputExecutor" in repr(executor) + assert "foo" in str(executor) -def test_executor_waits_for_process_err_output(): +def test_executor_waits_for_process_err_output() -> None: """Check if executor waits for specified error output.""" command = 'bash -c "sleep 2 && >&2 echo foo && >&2 echo bar && sleep 100"' executor = OutputExecutor( - command, 'foo', timeout=10, stdin=None, stderr=subprocess.PIPE + command, "foo", timeout=10, stdin=None, stderr=subprocess.PIPE ).start() assert executor.running() is True # foo has been used for start as a banner. - assert executor.err_output().readline() == 'bar\n' + assert executor.err_output().readline() == "bar\n" executor.stop() # check proper __str__ and __repr__ rendering: - assert 'OutputExecutor' in repr(executor) - assert 'foo' in str(executor) + assert "OutputExecutor" in repr(executor) + assert "foo" in str(executor) -def test_executor_dont_start(): +def test_executor_dont_start() -> None: """Executor should not start.""" command = 'bash -c "sleep 2 && echo foo && echo bar && sleep 100"' - executor = OutputExecutor(command, 'foobar', timeout=3) + executor = OutputExecutor(command, "foobar", timeout=3) with pytest.raises(TimeoutExpired): executor.start() assert executor.running() is False diff --git a/tests/executors/test_pid_executor.py b/tests/executors/test_pid_executor.py index e513f76..69c5240 100644 --- a/tests/executors/test_pid_executor.py +++ b/tests/executors/test_pid_executor.py @@ -1,82 +1,83 @@ """PidExecutor tests.""" import os +from typing import Iterator, Optional import pytest from mirakuru import PidExecutor from mirakuru import TimeoutExpired, AlreadyRunning FILENAME = f"pid-test-tmp{os.getpid()}" SLEEP = f'bash -c "sleep 1 && touch {FILENAME} && sleep 1"' -@pytest.yield_fixture(autouse=True) -def run_around_tests(): +@pytest.fixture(autouse=True) +def run_around_tests() -> Iterator[None]: """ Make sure the **FILENAME** file is not present. This executor actually removes FILENAME as process used to test PidExecutor only creates it. """ try: os.remove(FILENAME) except OSError: pass yield try: os.remove(FILENAME) except OSError: pass -def test_start_and_wait(): +def test_start_and_wait() -> None: """Test if the executor will await for the process to create a file.""" process = f'bash -c "sleep 2 && touch {FILENAME} && sleep 10"' with PidExecutor(process, FILENAME, timeout=5) as executor: assert executor.running() is True # check proper __str__ and __repr__ rendering: - assert 'PidExecutor' in repr(executor) + assert "PidExecutor" in repr(executor) assert process in str(executor) -@pytest.mark.parametrize('pid_file', (None, "")) -def test_empty_filename(pid_file): +@pytest.mark.parametrize("pid_file", (None, "")) +def test_empty_filename(pid_file: Optional[str]) -> None: """Check whether an exception is raised if an empty FILENAME is given.""" with pytest.raises(ValueError): - PidExecutor(SLEEP, pid_file) + PidExecutor(SLEEP, pid_file) # type: ignore[arg-type] -def test_if_file_created(): +def test_if_file_created() -> None: """Check whether the process really created the given file.""" assert os.path.isfile(FILENAME) is False executor = PidExecutor(SLEEP, FILENAME) with executor: assert os.path.isfile(FILENAME) is True -def test_timeout_error(): +def test_timeout_error() -> None: """Check if timeout properly expires.""" executor = PidExecutor(SLEEP, FILENAME, timeout=1) with pytest.raises(TimeoutExpired): executor.start() assert executor.running() is False -def test_fail_if_other_executor_running(): +def test_fail_if_other_executor_running() -> None: """Test raising AlreadyRunning exception when port is blocked.""" process = f'bash -c "sleep 2 && touch {FILENAME} && sleep 10"' executor = PidExecutor(process, FILENAME) executor2 = PidExecutor(process, FILENAME) with executor: assert executor.running() is True with pytest.raises(AlreadyRunning): executor2.start() diff --git a/tests/executors/test_tcp_executor.py b/tests/executors/test_tcp_executor.py index 2dd2920..3c07c6f 100644 --- a/tests/executors/test_tcp_executor.py +++ b/tests/executors/test_tcp_executor.py @@ -1,58 +1,66 @@ """ TCPExecutor tests. Some of these tests run ``nc``: when running Debian, make sure the ``netcat-openbsd`` package is used, not ``netcat-traditional``. """ +import logging + import pytest +from _pytest.logging import LogCaptureFixture + from mirakuru import TCPExecutor from mirakuru import TimeoutExpired, AlreadyRunning from tests import HTTP_SERVER_CMD PORT = 7986 -HTTP_SERVER = f'{HTTP_SERVER_CMD} {PORT}' +HTTP_SERVER = f"{HTTP_SERVER_CMD} {PORT}" +NC_COMMAND = 'bash -c "sleep 2 && nc -lk 3000"' -def test_start_and_wait(): +def test_start_and_wait(caplog: LogCaptureFixture) -> None: """Test if executor await for process to accept connections.""" - command = 'bash -c "sleep 2 && nc -l 3000"' - executor = TCPExecutor(command, 'localhost', port=3000, timeout=5) + caplog.set_level(logging.DEBUG, logger="mirakuru") + executor = TCPExecutor(NC_COMMAND, "localhost", port=3000, timeout=5) executor.start() - assert executor.running() is True executor.stop() + +def test_repr_and_str() -> None: + """Check the proper str and repr conversion.""" + executor = TCPExecutor(NC_COMMAND, "localhost", port=3000, timeout=5) # check proper __str__ and __repr__ rendering: - assert 'TCPExecutor' in repr(executor) - assert command in str(executor) + assert "TCPExecutor" in repr(executor) + assert NC_COMMAND in str(executor) -def test_it_raises_error_on_timeout(): +def test_it_raises_error_on_timeout() -> None: """Check if TimeoutExpired gets raised correctly.""" - command = 'bash -c "sleep 10 && nc -l 3000"' - executor = TCPExecutor(command, host='localhost', port=3000, timeout=2) + command = 'bash -c "sleep 10 && nc -lk 3000"' + executor = TCPExecutor(command, host="localhost", port=3000, timeout=2) with pytest.raises(TimeoutExpired): executor.start() assert executor.running() is False -def test_fail_if_other_executor_running(): +def test_fail_if_other_executor_running() -> None: """Test raising AlreadyRunning exception.""" - executor = TCPExecutor(HTTP_SERVER, host='localhost', port=PORT) - executor2 = TCPExecutor(HTTP_SERVER, host='localhost', port=PORT) + executor = TCPExecutor(HTTP_SERVER, host="localhost", port=PORT) + executor2 = TCPExecutor(HTTP_SERVER, host="localhost", port=PORT) with executor: assert executor.running() is True with pytest.raises(AlreadyRunning): executor2.start() with pytest.raises(AlreadyRunning): with executor2: pass diff --git a/tests/executors/test_unixsocket_executor.py b/tests/executors/test_unixsocket_executor.py index 8f317e3..61befd5 100644 --- a/tests/executors/test_unixsocket_executor.py +++ b/tests/executors/test_unixsocket_executor.py @@ -1,38 +1,38 @@ """ TCPExecutor tests. Some of these tests run ``nc``: when running Debian, make sure the ``netcat-openbsd`` package is used, not ``netcat-traditional``. """ import sys import pytest from mirakuru import TimeoutExpired from mirakuru.unixsocket import UnixSocketExecutor from tests import TEST_SOCKET_SERVER_PATH -SOCKET_PATH = '/tmp/mirakuru.sock' +SOCKET_PATH = "/tmp/mirakuru.sock" SOCKET_SERVER_CMD = f"{sys.executable} {TEST_SOCKET_SERVER_PATH} {SOCKET_PATH}" -def test_start_and_wait(): +def test_start_and_wait() -> None: """Test if executor await for process to accept connections.""" executor = UnixSocketExecutor( SOCKET_SERVER_CMD + " 2", socket_name=SOCKET_PATH, timeout=5 ) with executor: assert executor.running() is True -def test_start_and_timeout(): +def test_start_and_timeout() -> None: """Test if executor will properly times out.""" executor = UnixSocketExecutor( SOCKET_SERVER_CMD + " 10", socket_name=SOCKET_PATH, timeout=5 ) with pytest.raises(TimeoutExpired): executor.start() assert executor.running() is False diff --git a/tests/retry.py b/tests/retry.py new file mode 100644 index 0000000..a8f22f9 --- /dev/null +++ b/tests/retry.py @@ -0,0 +1,34 @@ +"""Small retry callable in case of specific error occured""" + +from datetime import datetime, timedelta +from time import sleep +from typing import TypeVar, Callable, Type + +from mirakuru import ExecutorError + + +T = TypeVar("T") + + +def retry( + func: Callable[[], T], + timeout: int = 60, + possible_exception: Type[Exception] = ExecutorError, +) -> T: + """ + Attempt to retry the function for timeout time. + """ + time: datetime = datetime.utcnow() + timeout_diff: timedelta = timedelta(seconds=timeout) + i = 0 + while True: + i += 1 + try: + res = func() + return res + except possible_exception as e: + if time + timeout_diff < datetime.utcnow(): + raise TimeoutError( + "Failed after {i} attempts".format(i=i) + ) from e + sleep(1) diff --git a/tests/sample_daemon.py b/tests/sample_daemon.py index 0dea205..79e625a 100644 --- a/tests/sample_daemon.py +++ b/tests/sample_daemon.py @@ -1,26 +1,26 @@ """ Daemon sample application for tests purposes. Stopping this process is possible only by the SIGKILL signal. Usage: python tests/sample_daemon.py """ import os import sys import time import daemon sys.path.append(os.getcwd()) # noqa from tests.signals import block_signals # pylint:disable=wrong-import-position with daemon.DaemonContext(initgroups=False): block_signals() while True: - print('Sleeping mirakuru daemon...') + print("Sleeping mirakuru daemon...") time.sleep(1) diff --git a/tests/server_for_tests.py b/tests/server_for_tests.py index 3837210..aee8d27 100644 --- a/tests/server_for_tests.py +++ b/tests/server_for_tests.py @@ -1,145 +1,146 @@ """ HTTP server that responses with delays used for tests. Example usage: python tests/slow_server.py [HOST:PORT] - run HTTP Server, HOST and PORT are optional python tests/slow_server.py [HOST:PORT] True - run IMMORTAL server (stopping process only by SIGKILL) """ import ast import sys import os import time from http.server import HTTPServer, BaseHTTPRequestHandler from urllib.parse import parse_qs sys.path.append(os.getcwd()) # noqa # pylint:disable=wrong-import-position from tests.signals import block_signals + # pylint:enable=wrong-import-position class SlowServerHandler(BaseHTTPRequestHandler): """Slow server handler.""" timeout = 2 endtime = None - def do_GET(self): # pylint:disable=invalid-name + def do_GET(self) -> None: # pylint:disable=invalid-name """Serve GET request.""" self.send_response(200) self.send_header("Content-type", "text/html") self.end_headers() - self.wfile.write(b'Hi. I am very slow.') + self.wfile.write(b"Hi. I am very slow.") - def do_HEAD(self): # pylint:disable=invalid-name + def do_HEAD(self) -> None: # pylint:disable=invalid-name """ Serve HEAD request. but count to wait and return 500 response if wait time not exceeded due to the fact that HTTPServer will hang waiting for response to return otherwise if none response will be returned. """ self.timeout_status() self.end_headers() - def timeout_status(self): + def timeout_status(self) -> None: """Set proper response status based on timeout.""" if self.count_timeout(): self.send_response(200) else: self.send_response(500) - def count_timeout(self): # pylint: disable=no-self-use + def count_timeout(self) -> bool: # pylint: disable=no-self-use """Count down the timeout time.""" if SlowServerHandler.endtime is None: SlowServerHandler.endtime = time.time() + SlowServerHandler.timeout return time.time() >= SlowServerHandler.endtime class SlowGetServerHandler(SlowServerHandler): """Responds only on GET after a while.""" - def do_GET(self): # pylint:disable=invalid-name + def do_GET(self) -> None: # pylint:disable=invalid-name "Serve GET request." self.timeout_status() self.send_header("Content-type", "text/html") self.end_headers() - self.wfile.write(b'Hi. I am very slow.') + self.wfile.write(b"Hi. I am very slow.") - def do_HEAD(self): # pylint:disable=invalid-name + def do_HEAD(self) -> None: # pylint:disable=invalid-name "Serve HEAD request." self.send_response(500) self.end_headers() class SlowPostServerHandler(SlowServerHandler): """Responds only on POST after a while.""" - def do_POST(self): # pylint:disable=invalid-name + def do_POST(self) -> None: # pylint:disable=invalid-name "Serve POST request." self.timeout_status() self.end_headers() - self.wfile.write(b'Hi. I am very slow.') + self.wfile.write(b"Hi. I am very slow.") - def do_HEAD(self): # pylint:disable=invalid-name + def do_HEAD(self) -> None: # pylint:disable=invalid-name "Serve HEAD request." self.send_response(500) self.end_headers() class SlowPostKeyServerHandler(SlowServerHandler): """Responds only on POST after a while.""" - def do_POST(self): # pylint:disable=invalid-name + def do_POST(self) -> None: # pylint:disable=invalid-name "Serve POST request." - content_len = int(self.headers.get('Content-Length')) + content_len = int(self.headers.get("Content-Length")) post_body = self.rfile.read(content_len) form = parse_qs(post_body) - if form.get(b'key') == [b'hole']: + if form.get(b"key") == [b"hole"]: self.timeout_status() else: self.send_response(500) self.end_headers() - self.wfile.write(b'Hi. I am very slow.') + self.wfile.write(b"Hi. I am very slow.") - def do_HEAD(self): # pylint:disable=invalid-name + def do_HEAD(self) -> None: # pylint:disable=invalid-name "Serve HEAD request." self.send_response(500) self.end_headers() HANDLERS = { - 'HEAD': SlowServerHandler, - 'GET': SlowGetServerHandler, - 'POST': SlowPostServerHandler, - 'Key': SlowPostKeyServerHandler, + "HEAD": SlowServerHandler, + "GET": SlowGetServerHandler, + "POST": SlowPostServerHandler, + "Key": SlowPostKeyServerHandler, } if __name__ == "__main__": - HOST, PORT, IMMORTAL, METHOD = "127.0.0.1", "8000", "False", 'HEAD' + HOST, PORT, IMMORTAL, METHOD = "127.0.0.1", "8000", "False", "HEAD" if len(sys.argv) >= 2: HOST, PORT = sys.argv[1].split(":") if len(sys.argv) >= 3: IMMORTAL = sys.argv[2] if len(sys.argv) == 4: METHOD = sys.argv[3] if ast.literal_eval(IMMORTAL): block_signals() - server = HTTPServer( # pylint: disable=invalid-name + server = HTTPServer( (HOST, int(PORT)), HANDLERS[METHOD] - ) + ) # pylint: disable=invalid-name print(f"Starting slow server on {HOST}:{PORT}...") server.serve_forever() diff --git a/tests/signals.py b/tests/signals.py index 4b2e188..4e4fc19 100644 --- a/tests/signals.py +++ b/tests/signals.py @@ -1,22 +1,24 @@ """Contains `block_signals` function for tests purposes.""" import signal +from typing import Any -def block_signals(): +def block_signals() -> None: """ Catch all of the signals that it is possible. Reject their default behaviour. The process is actually mortal but the only way to kill is to send SIGKILL signal (kill -9). """ - def sighandler(signum, _): + + def sighandler(signum: int, _: Any) -> None: """Signal handling function.""" - print(f'Tried to kill with signal {signum}.') + print(f"Tried to kill with signal {signum}.") for sgn in [x for x in dir(signal) if x.startswith("SIG")]: try: signum = getattr(signal, sgn) signal.signal(signum, sighandler) except (ValueError, RuntimeError, OSError): pass diff --git a/tests/test_base.py b/tests/test_base.py index 7032417..a37331b 100644 --- a/tests/test_base.py +++ b/tests/test_base.py @@ -1,17 +1,17 @@ """General mirakuru library tests.""" # pylint: disable=wildcard-import,unused-wildcard-import from mirakuru import * -def test_importing_mirakuru(): +def test_importing_mirakuru() -> None: """Test if all most commonly used classes are imported by default.""" - assert 'Executor' in globals() - assert 'SimpleExecutor' in globals() - assert 'OutputExecutor' in globals() - assert 'TCPExecutor' in globals() - assert 'HTTPExecutor' in globals() - assert 'PidExecutor' in globals() - assert 'ExecutorError' in globals() - assert 'TimeoutExpired' in globals() - assert 'AlreadyRunning' in globals() - assert 'ProcessExitedWithError' in globals() + assert "Executor" in globals() + assert "SimpleExecutor" in globals() + assert "OutputExecutor" in globals() + assert "TCPExecutor" in globals() + assert "HTTPExecutor" in globals() + assert "PidExecutor" in globals() + assert "ExecutorError" in globals() + assert "TimeoutExpired" in globals() + assert "AlreadyRunning" in globals() + assert "ProcessExitedWithError" in globals() diff --git a/tests/unixsocketserver_for_tests.py b/tests/unixsocketserver_for_tests.py index 17f3f74..fee73f2 100644 --- a/tests/unixsocketserver_for_tests.py +++ b/tests/unixsocketserver_for_tests.py @@ -1,78 +1,78 @@ # Copyright (c) 2015, Doug Hellmann, All Rights Reserved # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. """Sample unixsocket server with small modifications.""" import socket import sys import os from time import sleep -SOCKET_ADDRESS = './uds_socket' +SOCKET_ADDRESS = "./uds_socket" SLEEP = 0 if len(sys.argv) >= 2: SOCKET_ADDRESS = sys.argv[1] if len(sys.argv) >= 3: SLEEP = int(sys.argv[2]) # Make sure the socket does not already exist try: os.unlink(SOCKET_ADDRESS) except OSError: if os.path.exists(SOCKET_ADDRESS): raise # Create a UDS socket SOCK = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) # Bind the socket to the address -print(f'starting up on {SOCKET_ADDRESS}') +print(f"starting up on {SOCKET_ADDRESS}") SOCK.bind(SOCKET_ADDRESS) sleep(SLEEP) # Listen for incoming connections SOCK.listen(1) while True: # Wait for a connection - print('waiting for a connection') + print("waiting for a connection") CONNECTION, CLIENT_ADDRESS = SOCK.accept() try: - print('connection from', CLIENT_ADDRESS) + print("connection from", CLIENT_ADDRESS) # Receive the data in small chunks and retransmit it while True: RECEIVED_DATA = CONNECTION.recv(16) - print(f'received {RECEIVED_DATA!r}') + print(f"received {RECEIVED_DATA!r}") if RECEIVED_DATA: - print('sending data back to the client') + print("sending data back to the client") CONNECTION.sendall(RECEIVED_DATA) else: - print('no data from', CLIENT_ADDRESS) + print("no data from", CLIENT_ADDRESS) break finally: # Clean up the connection CONNECTION.close()