diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..ecb10a8 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,15 @@ +# editorconfig.org + +# Managed by modulesync - DO NOT EDIT +# https://voxpupuli.org/docs/updating-files-managed-with-modulesync/ + +root = true + +[*] +charset = utf-8 +end_of_line = lf +indent_size = 2 +tab_width = 2 +indent_style = space +insert_final_newline = true +trim_trailing_whitespace = true diff --git a/.fixtures.yml b/.fixtures.yml index 2c850bd..f1a9996 100644 --- a/.fixtures.yml +++ b/.fixtures.yml @@ -1,35 +1,13 @@ fixtures: - forge_modules: - archive: - repo: puppet/archive - ref: 0.5.1 - augeas_core: - repo: puppetlabs/augeas_core - ref: 1.0.4 - stdlib: - repo: puppetlabs/stdlib - ref: 4.13.1 - java: - repo: puppetlabs/java - ref: 6.5.0 - concat: - repo: puppetlabs/concat - ref: 2.2.1 - datacat: - repo: richardc/datacat - ref: 0.6.2 - apt: - repo: puppetlabs/apt - ref: 7.4.1 - zypprepo: - repo: puppet/zypprepo - ref: 2.2.2 - yumrepo_core: - repo: puppetlabs/yumrepo_core - ref: 1.0.3 - java_ks: puppetlabs/java_ks - elastic_stack: - repo: elastic/elastic_stack - ref: 6.1.0 - symlinks: - elasticsearch: "#{source_dir}" + repositories: + apt: https://github.com/puppetlabs/puppetlabs-apt.git + archive: https://github.com/voxpupuli/puppet-archive.git + augeas_core: https://github.com/puppetlabs/puppetlabs-augeas_core.git + concat: https://github.com/puppetlabs/puppetlabs-concat.git + datacat: https://github.com/richardc/puppet-datacat.git + elastic_stack: https://github.com/voxpupuli/puppet-elastic_stack.git + java: https://github.com/puppetlabs/puppetlabs-java.git + java_ks: https://github.com/puppetlabs/puppetlabs-java_ks.git + stdlib: https://github.com/puppetlabs/puppetlabs-stdlib.git + yumrepo_core: https://github.com/puppetlabs/puppetlabs-yumrepo_core.git + zypprepo: https://github.com/voxpupuli/puppet-zypprepo.git diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md new file mode 100644 index 0000000..048d2b5 --- /dev/null +++ b/.github/CONTRIBUTING.md @@ -0,0 +1,252 @@ +# Contribution guidelines + +## Table of contents + +* [Contributing](#contributing) +* [Writing proper commits - short version](#writing-proper-commits-short-version) +* [Writing proper commits - long version](#writing-proper-commits-long-version) +* [Dependencies](#dependencies) + * [Note for OS X users](#note-for-os-x-users) +* [The test matrix](#the-test-matrix) +* [Syntax and style](#syntax-and-style) +* [Running the unit tests](#running-the-unit-tests) +* [Unit tests in docker](#unit-tests-in-docker) +* [Integration tests](#integration-tests) + +This module has grown over time based on a range of contributions from +people using it. If you follow these contributing guidelines your patch +will likely make it into a release a little more quickly. + +## Contributing + +Please note that this project is released with a Contributor Code of Conduct. +By participating in this project you agree to abide by its terms. +[Contributor Code of Conduct](https://voxpupuli.org/coc/). + +* Fork the repo. +* Create a separate branch for your change. +* We only take pull requests with passing tests, and documentation. [GitHub Actions](https://docs.github.com/en/actions) run the tests for us. You can also execute them locally. This is explained [in a later section](#the-test-matrix). +* Checkout [our docs](https://voxpupuli.org/docs/reviewing_pr/) we use to review a module and the [official styleguide](https://puppet.com/docs/puppet/6.0/style_guide.html). They provide some guidance for new code that might help you before you submit a pull request. +* Add a test for your change. Only refactoring and documentation changes require no new tests. If you are adding functionality or fixing a bug, please add a test. +* Squash your commits down into logical components. Make sure to rebase against our current master. +* Push the branch to your fork and submit a pull request. + +Please be prepared to repeat some of these steps as our contributors review your code. + +Also consider sending in your profile code that calls this component module as an acceptance test or provide it via an issue. This helps reviewers a lot to test your use case and prevents future regressions! + +## Writing proper commits - short version + +* Make commits of logical units. +* Check for unnecessary whitespace with "git diff --check" before committing. +* Commit using Unix line endings (check the settings around "crlf" in git-config(1)). +* Do not check in commented out code or unneeded files. +* The first line of the commit message should be a short description (50 characters is the soft limit, excluding ticket number(s)), and should skip the full stop. +* Associate the issue in the message. The first line should include the issue number in the form "(#XXXX) Rest of message". +* The body should provide a meaningful commit message, which: + *uses the imperative, present tense: `change`, not `changed` or `changes`. + * includes motivation for the change, and contrasts its implementation with the previous behavior. + * Make sure that you have tests for the bug you are fixing, or feature you are adding. + * Make sure the test suites passes after your commit: + * When introducing a new feature, make sure it is properly documented in the README.md + +## Writing proper commits - long version + + 1. Make separate commits for logically separate changes. + + Please break your commits down into logically consistent units + which include new or changed tests relevant to the rest of the + change. The goal of doing this is to make the diff easier to + read for whoever is reviewing your code. In general, the easier + your diff is to read, the more likely someone will be happy to + review it and get it into the code base. + + If you are going to refactor a piece of code, please do so as a + separate commit from your feature or bug fix changes. + + We also really appreciate changes that include tests to make + sure the bug is not re-introduced, and that the feature is not + accidentally broken. + + Describe the technical detail of the change(s). If your + description starts to get too long, that is a good sign that you + probably need to split up your commit into more finely grained + pieces. + + Commits which plainly describe the things which help + reviewers check the patch and future developers understand the + code are much more likely to be merged in with a minimum of + bike-shedding or requested changes. Ideally, the commit message + would include information, and be in a form suitable for + inclusion in the release notes for the version of Puppet that + includes them. + + Please also check that you are not introducing any trailing + whitespace or other "whitespace errors". You can do this by + running "git diff --check" on your changes before you commit. + + 2. Sending your patches + + To submit your changes via a GitHub pull request, we _highly_ + recommend that you have them on a topic branch, instead of + directly on `master`. + It makes things much easier to keep track of, especially if + you decide to work on another thing before your first change + is merged in. + + GitHub has some pretty good + [general documentation](http://help.github.com/) on using + their site. They also have documentation on + [creating pull requests](http://help.github.com/send-pull-requests/). + + In general, after pushing your topic branch up to your + repository on GitHub, you can switch to the branch in the + GitHub UI and click "Pull Request" towards the top of the page + in order to open a pull request. + + + 3. Update the related GitHub issue. + + If there is a GitHub issue associated with the change you + submitted, then you should update the ticket to include the + location of your branch, along with any other commentary you + may wish to make. + +## Dependencies + +The testing and development tools have a bunch of dependencies, +all managed by [bundler](http://bundler.io/) according to the +[Puppet support matrix](http://docs.puppetlabs.com/guides/platforms.html#ruby-versions). + +By default the tests use a baseline version of Puppet. + +If you have Ruby 2.x or want a specific version of Puppet, +you must set an environment variable such as: + +```sh +export PUPPET_VERSION="~> 5.5.6" +``` + +You can install all needed gems for spec tests into the modules directory by +running: + +```sh +bundle install --path .vendor/ --without development system_tests release --jobs "$(nproc)" +``` + +If you also want to run acceptance tests: + +```sh +bundle install --path .vendor/ --with system_tests --without development release --jobs "$(nproc)" +``` + +Our all in one solution if you don't know if you need to install or update gems: + +```sh +bundle install --path .vendor/ --with system_tests --without development release --jobs "$(nproc)"; bundle update; bundle clean +``` + +As an alternative to the `--jobs "$(nproc)` parameter, you can set an +environment variable: + +```sh +BUNDLE_JOBS="$(nproc)" +``` + +### Note for OS X users + +`nproc` isn't a valid command under OS x. As an alternative, you can do: + +```sh +--jobs "$(sysctl -n hw.ncpu)" +``` + +## The test matrix + +### Syntax and style + +The test suite will run [Puppet Lint](http://puppet-lint.com/) and +[Puppet Syntax](https://github.com/gds-operations/puppet-syntax) to +check various syntax and style things. You can run these locally with: + +```sh +bundle exec rake lint +bundle exec rake validate +``` + +It will also run some [Rubocop](http://batsov.com/rubocop/) tests +against it. You can run those locally ahead of time with: + +```sh +bundle exec rake rubocop +``` + +### Running the unit tests + +The unit test suite covers most of the code, as mentioned above please +add tests if you're adding new functionality. If you've not used +[rspec-puppet](http://rspec-puppet.com/) before then feel free to ask +about how best to test your new feature. + +To run the linter, the syntax checker and the unit tests: + +```sh +bundle exec rake test +``` + +To run your all the unit tests + +```sh +bundle exec rake spec +``` + +To run a specific spec test set the `SPEC` variable: + +```sh +bundle exec rake spec SPEC=spec/foo_spec.rb +``` + +#### Unit tests in docker + +Some people don't want to run the dependencies locally or don't want to install +ruby. We ship a Dockerfile that enables you to run all unit tests and linting. +You only need to run: + +```sh +docker build . +``` + +Please ensure that a docker daemon is running and that your user has the +permission to talk to it. You can specify a remote docker host by setting the +`DOCKER_HOST` environment variable. it will copy the content of the module into +the docker image. So it will not work if a Gemfile.lock exists. + +### Integration tests + +The unit tests just check the code runs, not that it does exactly what +we want on a real machine. For that we're using +[beaker](https://github.com/puppetlabs/beaker). + +This fires up a new virtual machine (using vagrant) and runs a series of +simple tests against it after applying the module. You can run this +with: + +```sh +BEAKER_setfile=debian10-x64 bundle exec rake beaker +``` + +You can replace the string `debian10` with any common operating system. +The following strings are known to work: + +* ubuntu1604 +* ubuntu1804 +* ubuntu2004 +* debian9 +* debian10 +* centos7 +* centos8 + +For more information and tips & tricks, see [voxpupuli-acceptance's documentation](https://github.com/voxpupuli/voxpupuli-acceptance#running-tests). + +The source of this file is in our [modulesync_config](https://github.com/voxpupuli/modulesync_config/blob/master/moduleroot/.github/CONTRIBUTING.md.erb) +repository. diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md index 1279361..593e7aa 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE.md @@ -1,30 +1,26 @@ +Thank you for contributing to this project! - -* Module version: -* Puppet version: -* OS and version: +## Affected Puppet, Ruby, OS and module versions/distributions -## Bug description +- Puppet: +- Ruby: +- Distribution: +- Module version: - +## How to reproduce (e.g Puppet code you use) + +## What are you seeing + +## What behaviour did you expect instead + +## Output log -## Feature Description +## Any additional information you'd like to impart diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 2b0d43c..342807b 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,9 +1,20 @@ - + +#### Pull Request (PR) description + + +#### This Pull Request (PR) fixes the following issues + diff --git a/.github/SECURITY.md b/.github/SECURITY.md new file mode 100644 index 0000000..cacadf2 --- /dev/null +++ b/.github/SECURITY.md @@ -0,0 +1,3 @@ +# Vox Pupuli Security Policy + +Our vulnerabilities reporting process is at https://voxpupuli.org/security/ diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..d08d05e --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,90 @@ +--- +# Managed by modulesync - DO NOT EDIT +# https://voxpupuli.org/docs/updating-files-managed-with-modulesync/ + +name: CI + +on: pull_request + +concurrency: + group: ${{ github.head_ref }} + cancel-in-progress: true + +jobs: + setup_matrix: + name: 'Setup Test Matrix' + runs-on: ubuntu-latest + timeout-minutes: 40 + outputs: + puppet_unit_test_matrix: ${{ steps.get-outputs.outputs.puppet_unit_test_matrix }} + github_action_test_matrix: ${{ steps.get-outputs.outputs.github_action_test_matrix }} + env: + BUNDLE_WITHOUT: development:system_tests:release + steps: + - uses: actions/checkout@v2 + - name: Setup ruby + uses: ruby/setup-ruby@v1 + with: + ruby-version: '3.0' + bundler-cache: true + - name: Run static validations + run: bundle exec rake validate lint check + - name: Run rake rubocop + run: bundle exec rake rubocop + - name: Setup Test Matrix + id: get-outputs + run: bundle exec metadata2gha --use-fqdn --pidfile-workaround false + + unit: + needs: setup_matrix + runs-on: ubuntu-latest + timeout-minutes: 40 + strategy: + fail-fast: false + matrix: + include: ${{fromJson(needs.setup_matrix.outputs.puppet_unit_test_matrix)}} + env: + BUNDLE_WITHOUT: development:system_tests:release + PUPPET_VERSION: "~> ${{ matrix.puppet }}.0" + name: Puppet ${{ matrix.puppet }} (Ruby ${{ matrix.ruby }}) + steps: + - uses: actions/checkout@v2 + - name: Setup ruby + uses: ruby/setup-ruby@v1 + with: + ruby-version: ${{ matrix.ruby }} + bundler-cache: true + - name: Run tests + run: bundle exec rake parallel_spec + + acceptance: + needs: setup_matrix + runs-on: ubuntu-latest + env: + BUNDLE_WITHOUT: development:test:release + strategy: + fail-fast: false + matrix: + include: ${{fromJson(needs.setup_matrix.outputs.github_action_test_matrix)}} + name: ${{ matrix.puppet.name }} - ${{ matrix.setfile.name }} + steps: + - uses: actions/checkout@v2 + - name: Setup ruby + uses: ruby/setup-ruby@v1 + with: + ruby-version: '3.0' + bundler-cache: true + - name: Run tests + run: bundle exec rake beaker + env: + BEAKER_PUPPET_COLLECTION: ${{ matrix.puppet.collection }} + BEAKER_setfile: ${{ matrix.setfile.value }} + + tests: + needs: + - unit + - acceptance + runs-on: ubuntu-latest + name: Test suite + steps: + - run: echo Test suite completed diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..664ba69 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,34 @@ +--- +# Managed by modulesync - DO NOT EDIT +# https://voxpupuli.org/docs/updating-files-managed-with-modulesync/ + +name: Release + +on: + push: + tags: + - '*' + +env: + BUNDLE_WITHOUT: development:test:system_tests + +jobs: + deploy: + name: 'deploy to forge' + runs-on: ubuntu-latest + if: github.repository_owner == 'voxpupuli' + steps: + - name: Checkout repository + uses: actions/checkout@v2 + - name: Setup Ruby + uses: ruby/setup-ruby@v1 + with: + ruby-version: '2.7' + bundler-cache: true + - name: Build and Deploy + env: + # Configure secrets here: + # https://docs.github.com/en/free-pro-team@latest/actions/reference/encrypted-secrets + BLACKSMITH_FORGE_USERNAME: '${{ secrets.PUPPET_FORGE_USERNAME }}' + BLACKSMITH_FORGE_API_KEY: '${{ secrets.PUPPET_FORGE_API_KEY }}' + run: bundle exec rake module:push diff --git a/.gitignore b/.gitignore index 218e73e..9b95224 100644 --- a/.gitignore +++ b/.gitignore @@ -1,20 +1,23 @@ -*.deb -*.lock -*.rpm -*.sha1 -*.tar.gz -*.zip -.bundle -.ruby-version -.swp -.vagrant -.vendor -.yardoc -doc -log +# Managed by modulesync - DO NOT EDIT +# https://voxpupuli.org/docs/updating-files-managed-with-modulesync/ + pkg/ -spec/fixtures/manifests -spec/fixtures/modules -spec/logs -spec/reports/ -vendor +Gemfile.lock +Gemfile.local +vendor/ +.vendor/ +spec/fixtures/manifests/ +spec/fixtures/modules/ +.vagrant/ +.bundle/ +.ruby-version +coverage/ +log/ +.idea/ +.dependencies/ +.librarian/ +Puppetfile.lock +*.iml +.*.sw? +.yardoc/ +Guardfile diff --git a/.msync.yml b/.msync.yml new file mode 100644 index 0000000..a83abd9 --- /dev/null +++ b/.msync.yml @@ -0,0 +1,5 @@ +--- +# Managed by modulesync - DO NOT EDIT +# https://voxpupuli.org/docs/updating-files-managed-with-modulesync/ + +modulesync_config_version: '5.1.0' diff --git a/.overcommit.yml b/.overcommit.yml new file mode 100644 index 0000000..d367ada --- /dev/null +++ b/.overcommit.yml @@ -0,0 +1,65 @@ +# Managed by modulesync - DO NOT EDIT +# https://voxpupuli.org/docs/updating-files-managed-with-modulesync/ +# +# Hooks are only enabled if you take action. +# +# To enable the hooks run: +# +# ``` +# bundle exec overcommit --install +# # ensure .overcommit.yml does not harm to you and then +# bundle exec overcommit --sign +# ``` +# +# (it will manage the .git/hooks directory): +# +# Examples howto skip a test for a commit or push: +# +# ``` +# SKIP=RuboCop git commit +# SKIP=PuppetLint git commit +# SKIP=RakeTask git push +# ``` +# +# Don't invoke overcommit at all: +# +# ``` +# OVERCOMMIT_DISABLE=1 git commit +# ``` +# +# Read more about overcommit: https://github.com/brigade/overcommit +# +# To manage this config yourself in your module add +# +# ``` +# .overcommit.yml: +# unmanaged: true +# ``` +# +# to your modules .sync.yml config +--- +PreCommit: + RuboCop: + enabled: true + description: 'Runs rubocop on modified files only' + command: ['bundle', 'exec', 'rubocop'] + PuppetLint: + enabled: true + description: 'Runs puppet-lint on modified files only' + command: ['bundle', 'exec', 'puppet-lint'] + YamlSyntax: + enabled: true + JsonSyntax: + enabled: true + TrailingWhitespace: + enabled: true + +PrePush: + RakeTarget: + enabled: true + description: 'Run rake targets' + targets: + - 'validate' + - 'test' + - 'rubocop' + command: ['bundle', 'exec', 'rake'] diff --git a/.pmtignore b/.pmtignore index 2ba009d..65f5051 100644 --- a/.pmtignore +++ b/.pmtignore @@ -1,11 +1,37 @@ -*.swp -*.org +# Managed by modulesync - DO NOT EDIT +# https://voxpupuli.org/docs/updating-files-managed-with-modulesync/ + +docs/ +pkg/ Gemfile Gemfile.lock -Makefile +Gemfile.local +vendor/ +.vendor/ +spec/ Rakefile -doc/ -junit/ +.vagrant/ +.bundle/ +.ruby-version +coverage/ log/ -logs/ -spec/ +.idea/ +.dependencies/ +.github/ +.librarian/ +Puppetfile.lock +*.iml +.editorconfig +.fixtures.yml +.gitignore +.msync.yml +.overcommit.yml +.pmtignore +.rspec +.rspec_parallel +.rubocop.yml +.sync.yml +.*.sw? +.yardoc/ +.yardopts +Dockerfile diff --git a/.puppet-lint.rc b/.puppet-lint.rc new file mode 100644 index 0000000..02a3e71 --- /dev/null +++ b/.puppet-lint.rc @@ -0,0 +1 @@ +--fail-on-warnings diff --git a/.rspec b/.rspec new file mode 100644 index 0000000..f634583 --- /dev/null +++ b/.rspec @@ -0,0 +1,5 @@ +# Managed by modulesync - DO NOT EDIT +# https://voxpupuli.org/docs/updating-files-managed-with-modulesync/ + +--format documentation +--color diff --git a/.rspec_parallel b/.rspec_parallel new file mode 100644 index 0000000..a9a84f8 --- /dev/null +++ b/.rspec_parallel @@ -0,0 +1,4 @@ +# Managed by modulesync - DO NOT EDIT +# https://voxpupuli.org/docs/updating-files-managed-with-modulesync/ + +--format progress diff --git a/.rubocop.yml b/.rubocop.yml index 4e8824c..53ac189 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -1,66 +1,6 @@ -AllCops: - Exclude: - - pkg/**/* - - spec/fixtures/**/* - - vendor/**/* - Include: - - Rakefile - TargetRubyVersion: 1.9 +--- +# Managed by modulesync - DO NOT EDIT +# https://voxpupuli.org/docs/updating-files-managed-with-modulesync/ -Metrics/AbcSize: - Enabled: false - -Metrics/LineLength: - Enabled: false - -Metrics/MethodLength: - Enabled: false - -Metrics/ParameterLists: - Enabled: false - -Style/AndOr: - Enabled: false - -Style/BlockDelimiters: - Enabled: false - -Style/BlockEndNewline: - Enabled: false - -Style/ClassAndModuleCamelCase: - Enabled: false - -Style/ClassAndModuleChildren: - Enabled: false - -Style/FirstParameterIndentation: - Enabled: false - -Style/FrozenStringLiteralComment: - Enabled: false - -Style/HashSyntax: - Enabled: false - -Style/IndentArray: - Enabled: false - -Style/Lambda: - Enabled: false - -Style/MultilineBlockChain: - Enabled: false - -Style/MultilineBlockLayout: - Enabled: false - -Style/Not: - Enabled: false - -# This is the default post-0.41.2 rubocop, set it to be forward-compatible here. -Style/PercentLiteralDelimiters: - PreferredDelimiters: - '%r': '{}' - '%w': '[]' - '%W': '[]' +inherit_gem: + voxpupuli-test: rubocop.yml diff --git a/.sync.yml b/.sync.yml new file mode 100644 index 0000000..994a69d --- /dev/null +++ b/.sync.yml @@ -0,0 +1,18 @@ +--- +.puppet-lint.rc: + enabled_lint_checks: + - parameter_documentation + - parameter_types +Gemfile: + optional: + ':test': + - gem: bcrypt + - gem: webmock + ':system_tests': + - gem: bcrypt + - gem: rspec-retry + - gem: simp-beaker-helpers +spec/spec_helper.rb: + hiera_config: "'spec/fixtures/hiera/hiera.yaml'" +spec/spec_helper_acceptance.rb: + unmanaged: false diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..e3cf307 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,24 @@ +# MANAGED BY MODULESYNC +# https://voxpupuli.org/docs/updating-files-managed-with-modulesync/ + +FROM ruby:2.7 + +WORKDIR /opt/puppet + +# https://github.com/puppetlabs/puppet/blob/06ad255754a38f22fb3a22c7c4f1e2ce453d01cb/lib/puppet/provider/service/runit.rb#L39 +RUN mkdir -p /etc/sv + +ARG PUPPET_VERSION="~> 6.0" +ARG PARALLEL_TEST_PROCESSORS=4 + +# Cache gems +COPY Gemfile . +RUN bundle install --without system_tests development release --path=${BUNDLE_PATH:-vendor/bundle} + +COPY . . + +RUN bundle install +RUN bundle exec rake release_checks + +# Container should not saved +RUN exit 1 diff --git a/Gemfile b/Gemfile index 6abd16b..8edfc7b 100644 --- a/Gemfile +++ b/Gemfile @@ -1,58 +1,39 @@ -source ENV['GEM_SOURCE'] || 'https://rubygems.org' +# Managed by modulesync - DO NOT EDIT +# https://voxpupuli.org/docs/updating-files-managed-with-modulesync/ + +source ENV['GEM_SOURCE'] || "https://rubygems.org" group :test do - gem 'puppet', (ENV['PUPPET_VERSION'] || '~> 6.0'), :require => false - - gem 'metadata-json-lint' - gem 'specinfra', '~> 2.60' - gem 'xmlrpc' - - gem 'ci_reporter_rspec' - gem 'facter', "~> 2.4" - gem 'pry' - gem 'puppet-lint' - gem 'puppet-strings' - gem 'puppet-syntax' - gem 'puppetlabs_spec_helper', '>= 2.7.0' - gem 'rake' - gem 'rspec', '~> 3.0' - gem 'rspec-puppet', '~> 2.6' - gem 'rspec-puppet-facts' - gem 'rspec-puppet-utils' - gem 'rspec-retry' - # Required to test against Ruby 1.9 - gem 'rubocop', '~> 0.41.2' - gem 'rubysl-securerandom' - gem 'webmock' - - # Extra Puppet-lint gems - gem 'puppet-lint-appends-check', - :git => 'https://github.com/voxpupuli/puppet-lint-appends-check', - :ref => '07be8ce22d69353db055820b60bb77fe020238a6', - :require => false - gem 'puppet-lint-empty_string-check', :require => false - gem 'puppet-lint-file_ensure-check', :require => false - gem 'puppet-lint-leading_zero-check', :require => false - gem 'puppet-lint-param-docs', :require => false - gem 'puppet-lint-trailing_comma-check', :require => false - gem 'puppet-lint-undef_in_function-check', :require => false - gem 'puppet-lint-unquoted_string-check', :require => false - gem 'puppet-lint-version_comparison-check', :require => false + gem 'voxpupuli-test', '~> 5.0', :require => false + gem 'coveralls', :require => false + gem 'simplecov-console', :require => false + gem 'puppet_metadata', '~> 1.0', :require => false + gem 'bcrypt', :require => false + gem 'webmock', :require => false end group :development do - gem 'puppet-blacksmith' + gem 'guard-rake', :require => false + gem 'overcommit', '>= 0.39.1', :require => false end group :system_tests do - gem 'bcrypt' - gem 'beaker', '>= 4.2.0' - gem 'beaker-rspec', '~> 6.0' - gem 'beaker-docker' - gem 'beaker-puppet' - gem 'beaker-puppet_install_helper' - gem 'simp-beaker-helpers' - gem 'docker-api', '~> 1.0' - gem 'infrataster' - gem 'vault' + gem 'voxpupuli-acceptance', '~> 1.0', :require => false + gem 'bcrypt', :require => false + gem 'rspec-retry', :require => false + gem 'simp-beaker-helpers', :require => false +end + +group :release do + gem 'github_changelog_generator', '>= 1.16.1', :require => false if RUBY_VERSION >= '2.5' + gem 'voxpupuli-release', '>= 1.2.0', :require => false + gem 'puppet-strings', '>= 2.2', :require => false end + +gem 'rake', :require => false +gem 'facter', ENV['FACTER_GEM_VERSION'], :require => false, :groups => [:test] + +puppetversion = ENV['PUPPET_VERSION'] || '>= 6.0' +gem 'puppet', puppetversion, :require => false, :groups => [:test] + +# vim: syntax=ruby diff --git a/README.md b/README.md index 13bc974..099b5ab 100644 --- a/README.md +++ b/README.md @@ -1,845 +1,845 @@ # Elasticsearch Puppet Module [![Puppet Forge endorsed](https://img.shields.io/puppetforge/e/elastic/elasticsearch.svg)](https://forge.puppetlabs.com/elastic/elasticsearch) [![Puppet Forge Version](https://img.shields.io/puppetforge/v/elastic/elasticsearch.svg)](https://forge.puppetlabs.com/elastic/elasticsearch) [![Puppet Forge Downloads](https://img.shields.io/puppetforge/dt/elastic/elasticsearch.svg)](https://forge.puppetlabs.com/elastic/elasticsearch) [![Puppet Forge Score](https://img.shields.io/puppetforge/f/elastic/elasticsearch.svg)](https://forge.puppetlabs.com/elastic/elasticsearch) [![Build Status](https://travis-ci.org/elastic/puppet-elasticsearch.png?branch=master)](https://travis-ci.org/elastic/puppet-elasticsearch) #### Table of Contents 1. [Module description - What the module does and why it is useful](#module-description) 2. [Setup - The basics of getting started with Elasticsearch](#setup) * [The module manages the following](#the-module-manages-the-following) * [Requirements](#requirements) 3. [Usage - Configuration options and additional functionality](#usage) 4. [Advanced features - Extra information on advanced usage](#advanced-features) 5. [Reference - An under-the-hood peek at what the module is doing and how](#reference) 6. [Limitations - OS compatibility, etc.](#limitations) 7. [Development - Guide for contributing to the module](#development) 8. [Support - When you need help with this module](#support) ## Module description This module sets up [Elasticsearch](https://www.elastic.co/overview/elasticsearch/) instances with additional resource for plugins, templates, and more. This module is actively tested against Elasticsearch 2.x, 5.x, and 6.x. # WARNING: The 7.x major release of this module contains breaking changes! In order to simplify the management of Elasticsearch moving forward, and add support for both Elasticsearch 6.x and 7.x, support for running multiple instances of Elasticsearch has been removed. -This module also does not currently handle the migration from the instance based configuration to the new single deployment model. -Therefore in-place upgrades from version 6.x of this module to 7.x, or migrations from multi-instance to single deployment is not currently supported. +This module also does not currently handle the migration from the instance based configuration to the new single deployment model. +Therefore in-place upgrades from version 6.x of this module to 7.x, or migrations from multi-instance to single deployment is not currently supported. We hope to add support for this in a future release. Therefore please ensure that you test this major release in your environment before using it in production! ## Setup ### The module manages the following * Elasticsearch repository files. * Elasticsearch package. * Elasticsearch configuration file. * Elasticsearch service. * Elasticsearch plugins. * Elasticsearch snapshot repositories. * Elasticsearch templates. * Elasticsearch ingest pipelines. * Elasticsearch index settings. * Elasticsearch users, roles, and certificates. * Elasticsearch licenses. * Elasticsearch keystores. ### Requirements * The [stdlib](https://forge.puppetlabs.com/puppetlabs/stdlib) Puppet library. * [richardc/datacat](https://forge.puppetlabs.com/richardc/datacat) * [Augeas](http://augeas.net/) * [puppetlabs-java_ks](https://forge.puppetlabs.com/puppetlabs/java_ks) for certificate management (optional). -Beginning with Elasticsearch 7.0.0, a Java JDK has been bundled as part of the elasticsearch package. -However there still needs to be a version of Java present on the system being managed in order for Puppet to be able to run various utilities. +Beginning with Elasticsearch 7.0.0, a Java JDK has been bundled as part of the elasticsearch package. +However there still needs to be a version of Java present on the system being managed in order for Puppet to be able to run various utilities. We recommend managing your Java installation with the [puppetlabs-java](https://forge.puppetlabs.com/puppetlabs/java) module. #### Repository management When using the repository management, the following module dependencies are required: * General: [Elastic/elastic_stack](https://forge.puppet.com/elastic/elastic_stack) * Debian/Ubuntu: [Puppetlabs/apt](https://forge.puppetlabs.com/puppetlabs/apt) * openSUSE/SLES: [puppet/zypprepo](https://forge.puppetlabs.com/puppet/zypprepo) ### Beginning with Elasticsearch Declare the top-level `elasticsearch` class (managing repositories) and set up an instance: ```puppet include ::java class { 'elasticsearch': } ``` ## Usage ### Main class Most top-level parameters in the `elasticsearch` class are set to reasonable defaults. The following are some parameters that may be useful to override: #### Install a specific version ```puppet class { 'elasticsearch': version => '7.9.3' } ``` Note: This will only work when using the repository. #### Automatically restarting the service (default set to false) By default, the module will not restart Elasticsearch when the configuration file, package, or plugins change. This can be overridden globally with the following option: ```puppet class { 'elasticsearch': restart_on_change => true } ``` Or controlled with the more granular options: `restart_config_change`, `restart_package_change`, and `restart_plugin_change.` #### Automatic upgrades (default set to false) ```puppet class { 'elasticsearch': autoupgrade => true } ``` #### Removal/Decommissioning ```puppet class { 'elasticsearch': ensure => 'absent' } ``` #### Install everything but disable service(s) afterwards ```puppet class { 'elasticsearch': status => 'disabled' } ``` #### API Settings Some resources, such as `elasticsearch::template`, require communicating with the Elasticsearch REST API. By default, these API settings are set to: ```puppet class { 'elasticsearch': api_protocol => 'http', api_host => 'localhost', api_port => 9200, api_timeout => 10, api_basic_auth_username => undef, api_basic_auth_password => undef, api_ca_file => undef, api_ca_path => undef, validate_tls => true, } ``` Each of these can be set at the top-level `elasticsearch` class and inherited for each resource or overridden on a per-resource basis. #### Dynamically Created Resources This module supports managing all of its defined types through top-level parameters to better support Hiera and Puppet Enterprise. For example, to manage an index template directly from the `elasticsearch` class: ```puppet class { 'elasticsearch': templates => { 'logstash' => { 'content' => { 'template' => 'logstash-*', 'settings' => { 'number_of_replicas' => 0 } } } } } ``` ### Plugins This module can help manage [a variety of plugins](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/modules-plugins.html#known-plugins). Note that `module_dir` is where the plugin will install itself to and must match that published by the plugin author; it is not where you would like to install it yourself. #### From an official repository ```puppet elasticsearch::plugin { 'x-pack': } ``` #### From a custom url ```puppet elasticsearch::plugin { 'jetty': url => 'https://oss-es-plugins.s3.amazonaws.com/elasticsearch-jetty/elasticsearch-jetty-1.2.1.zip' } ``` #### Using a proxy You can also use a proxy if required by setting the `proxy_host` and `proxy_port` options: ```puppet elasticsearch::plugin { 'lmenezes/elasticsearch-kopf', proxy_host => 'proxy.host.com', proxy_port => 3128 } ``` Proxies that require usernames and passwords are similarly supported with the `proxy_username` and `proxy_password` parameters. Plugin name formats that are supported include: * `elasticsearch/plugin/version` (for official elasticsearch plugins downloaded from download.elastic.co) * `groupId/artifactId/version` (for community plugins downloaded from maven central or OSS Sonatype) * `username/repository` (for site plugins downloaded from github master) #### Upgrading plugins When you specify a certain plugin version, you can upgrade that plugin by specifying the new version. ```puppet elasticsearch::plugin { 'elasticsearch/elasticsearch-cloud-aws/2.1.1': } ``` And to upgrade, you would simply change it to ```puppet elasticsearch::plugin { 'elasticsearch/elasticsearch-cloud-aws/2.4.1': } ``` Please note that this does not work when you specify 'latest' as a version number. #### ES 6.x and 7.x official plugins For the Elasticsearch commercial plugins you can refer them to the simple name. See [Plugin installation](https://www.elastic.co/guide/en/elasticsearch/plugins/current/installation.html) for more details. ### Scripts Installs [scripts](http://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting.html) to be used by Elasticsearch. These scripts are shared across all defined instances on the same host. ```puppet elasticsearch::script { 'myscript': ensure => 'present', source => 'puppet:///path/to/my/script.groovy' } ``` Script directories can also be recursively managed for large collections of scripts: ```puppet elasticsearch::script { 'myscripts_dir': ensure => 'directory, source => 'puppet:///path/to/myscripts_dir' recurse => 'remote', } ``` ### Templates By default templates use the top-level `elasticsearch::api_*` settings to communicate with Elasticsearch. The following is an example of how to override these settings: ```puppet elasticsearch::template { 'templatename': api_protocol => 'https', api_host => $::ipaddress, api_port => 9201, api_timeout => 60, api_basic_auth_username => 'admin', api_basic_auth_password => 'adminpassword', api_ca_file => '/etc/ssl/certs', api_ca_path => '/etc/pki/certs', validate_tls => false, source => 'puppet:///path/to/template.json', } ``` #### Add a new template using a file This will install and/or replace the template in Elasticsearch: ```puppet elasticsearch::template { 'templatename': source => 'puppet:///path/to/template.json', } ``` #### Add a new template using content This will install and/or replace the template in Elasticsearch: ```puppet elasticsearch::template { 'templatename': content => { 'template' => "*", 'settings' => { 'number_of_replicas' => 0 } } } ``` Plain JSON strings are also supported. ```puppet elasticsearch::template { 'templatename': content => '{"template":"*","settings":{"number_of_replicas":0}}' } ``` #### Delete a template ```puppet elasticsearch::template { 'templatename': ensure => 'absent' } ``` ### Ingestion Pipelines Pipelines behave similar to templates in that their contents can be controlled over the Elasticsearch REST API with a custom Puppet resource. API parameters follow the same rules as templates (those settings can either be controlled at the top-level in the `elasticsearch` class or set per-resource). #### Adding a new pipeline This will install and/or replace an ingestion pipeline in Elasticsearch (ingestion settings are compared against the present configuration): ```puppet elasticsearch::pipeline { 'addfoo': content => { 'description' => 'Add the foo field', 'processors' => [{ 'set' => { 'field' => 'foo', 'value' => 'bar' } }] } } ``` #### Delete a pipeline ```puppet elasticsearch::pipeline { 'addfoo': ensure => 'absent' } ``` ### Index Settings This module includes basic support for ensuring an index is present or absent with optional index settings. API access settings follow the pattern previously mentioned for templates. #### Creating an index At the time of this writing, only index settings are supported. Note that some settings (such as `number_of_shards`) can only be set at index creation time. ```puppet elasticsearch::index { 'foo': settings => { 'index' => { 'number_of_replicas' => 0 } } } ``` #### Delete an index ```puppet elasticsearch::index { 'foo': ensure => 'absent' } ``` ### Snapshot Repositories By default snapshot_repositories use the top-level `elasticsearch::api_*` settings to communicate with Elasticsearch. The following is an example of how to override these settings: ```puppet elasticsearch::snapshot_repository { 'backups': api_protocol => 'https', api_host => $::ipaddress, api_port => 9201, api_timeout => 60, api_basic_auth_username => 'admin', api_basic_auth_password => 'adminpassword', api_ca_file => '/etc/ssl/certs', api_ca_path => '/etc/pki/certs', validate_tls => false, location => '/backups', } ``` #### Delete a snapshot repository ```puppet elasticsearch::snapshot_repository { 'backups': ensure => 'absent', location => '/backup' } ``` ### Connection Validator This module offers a way to make sure an instance has been started and is up and running before doing a next action. This is done via the use of the `es_instance_conn_validator` resource. ```puppet es_instance_conn_validator { 'myinstance' : server => 'es.example.com', port => '9200', } ``` A common use would be for example : ```puppet class { 'kibana4' : require => Es_Instance_Conn_Validator['myinstance'], } ``` ### Package installation There are two different ways of installing Elasticsearch: #### Repository ##### Choosing an Elasticsearch major version This module uses the `elastic/elastic_stack` module to manage package repositories. Because there is a separate repository for each major version of the Elastic stack, selecting which version to configure is necessary to change the default repository value, like this: ```puppet class { 'elastic_stack::repo': version => 6, } class { 'elasticsearch': version => '6.8.12', } ``` This module defaults to the upstream package repositories, which as of Elasticsearch 6.3, includes X-Pack. In order to use the purely OSS (open source) package and repository, the appropriate `oss` flag must be set on the `elastic_stack::repo` and `elasticsearch` classes: ```puppet class { 'elastic_stack::repo': oss => true, } class { 'elasticsearch': oss => true, } ``` ##### Manual repository management You may want to manage repositories manually. You can disable automatic repository management like this: ```puppet class { 'elasticsearch': manage_repo => false, } ``` #### Remote package source When a repository is not available or preferred you can install the packages from a remote source: ##### http/https/ftp ```puppet class { 'elasticsearch': package_url => 'https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-1.4.2.deb', proxy_url => 'http://proxy.example.com:8080/', } ``` Setting `proxy_url` to a location will enable download using the provided proxy server. This parameter is also used by `elasticsearch::plugin`. Setting the port in the `proxy_url` is mandatory. `proxy_url` defaults to `undef` (proxy disabled). ##### puppet:// ```puppet class { 'elasticsearch': package_url => 'puppet:///path/to/elasticsearch-1.4.2.deb' } ``` ##### Local file ```puppet class { 'elasticsearch': package_url => 'file:/path/to/elasticsearch-1.4.2.deb' } ``` ### JVM Configuration When configuring Elasticsearch's memory usage, you can modify it by setting `jvm_options`: ```puppet class { 'elasticsearch': jvm_options => [ '-Xms4g', '-Xmx4g' ] } ``` ### Service management Currently only the basic SysV-style [init](https://en.wikipedia.org/wiki/Init) and [Systemd](http://en.wikipedia.org/wiki/Systemd) service providers are supported, but other systems could be implemented as necessary (pull requests welcome). #### Defaults File The *defaults* file (`/etc/defaults/elasticsearch` or `/etc/sysconfig/elasticsearch`) for the Elasticsearch service can be populated as necessary. This can either be a static file resource or a simple key value-style [hash](http://docs.puppetlabs.com/puppet/latest/reference/lang_datatypes.html#hashes) object, the latter being particularly well-suited to pulling out of a data source such as Hiera. ##### File source ```puppet class { 'elasticsearch': init_defaults_file => 'puppet:///path/to/defaults' } ``` ##### Hash representation ```puppet $config_hash = { 'ES_HEAP_SIZE' => '30g', } class { 'elasticsearch': init_defaults => $config_hash } ``` Note: `init_defaults` hash can be passed to the main class and to the instance. ## Advanced features ### Security File-based users, roles, and certificates can be managed by this module. **Note**: If you are planning to use these features, it is *highly recommended* you read the following documentation to understand the caveats and extent of the resources available to you. #### Roles Roles in the file realm can be managed using the `elasticsearch::role` type. For example, to create a role called `myrole`, you could use the following resource: ```puppet elasticsearch::role { 'myrole': privileges => { 'cluster' => [ 'monitor' ], 'indices' => [{ 'names' => [ '*' ], 'privileges' => [ 'read' ], }] } } ``` This role would grant users access to cluster monitoring and read access to all indices. See the [Security](https://www.elastic.co/guide/en/elasticsearch/reference/current/elasticsearch-security.html) documentation for your version to determine what `privileges` to use and how to format them (the Puppet hash representation will simply be translated into yaml.) **Note**: The Puppet provider for `elasticsearch_user` has fine-grained control over the `roles.yml` file and thus will leave the default roles in-place. If you would like to explicitly purge the default roles (leaving only roles managed by puppet), you can do so by including the following in your manifest: ```puppet resources { 'elasticsearch_role': purge => true, } ``` ##### Mappings Associating mappings with a role for file-based management is done by passing an array of strings to the `mappings` parameter of the `elasticsearch::role` type. For example, to define a role with mappings: ```puppet elasticsearch::role { 'logstash': mappings => [ 'cn=group,ou=devteam', ], privileges => { 'cluster' => 'manage_index_templates', 'indices' => [{ 'names' => ['logstash-*'], 'privileges' => [ 'write', 'delete', 'create_index', ], }], }, } ``` If you'd like to keep the mappings file purged of entries not under Puppet's control, you should use the following `resources` declaration because mappings are a separate low-level type: ```puppet resources { 'elasticsearch_role_mapping': purge => true, } ``` #### Users Users can be managed using the `elasticsearch::user` type. For example, to create a user `mysuser` with membership in `myrole`: ```puppet elasticsearch::user { 'myuser': password => 'mypassword', roles => ['myrole'], } ``` The `password` parameter will also accept password hashes generated from the `esusers`/`users` utility and ensure the password is kept in-sync with the Shield `users` file for all Elasticsearch instances. ```puppet elasticsearch::user { 'myuser': password => '$2a$10$IZMnq6DF4DtQ9c4sVovgDubCbdeH62XncmcyD1sZ4WClzFuAdqspy', roles => ['myrole'], } ``` **Note**: When using the `esusers`/`users` provider (the default for plaintext passwords), Puppet has no way to determine whether the given password is in-sync with the password hashed by Elasticsearch. In order to work around this, the `elasticsearch::user` resource has been designed to accept refresh events in order to update password values. This is not ideal, but allows you to instruct the resource to change the password when needed. For example, to update the aforementioned user's password, you could include the following your manifest: ```puppet notify { 'update password': } ~> elasticsearch::user { 'myuser': password => 'mynewpassword', roles => ['myrole'], } ``` #### Certificates SSL/TLS can be enabled by providing the appropriate class params with paths to the certificate and private key files, and a password for the keystore. ```puppet class { 'elasticsearch' : ssl => true, ca_certificate => '/path/to/ca.pem', certificate => '/path/to/cert.pem', private_key => '/path/to/key.pem', keystore_password => 'keystorepassword', } ``` **Note**: Setting up a proper CA and certificate infrastructure is outside the scope of this documentation, see the aforementioned security guide for more information regarding the generation of these certificate files. The module will set up a keystore file for the node to use and set the relevant options in `elasticsearch.yml` to enable TLS/SSL using the certificates and key provided. #### System Keys System keys can be passed to the module, where they will be placed into individual instance configuration directories. This can be set at the `elasticsearch` class and inherited across all instances: ```puppet class { 'elasticsearch': system_key => 'puppet:///path/to/key', } ``` ### Licensing If you use the aforementioned security features, you may need to install a user license to leverage particular features outside of a trial license. This module can handle installation of licenses without the need to write custom `exec` or `curl` code to install license data. You may instruct the module to install a license through the `elasticsearch::license` parameter: ```puppet class { 'elasticsearch': license => $license, } ``` The `license` parameter will accept either a Puppet hash representation of the license file json or a plain json string that will be parsed into a native Puppet hash. Although dependencies are automatically created to ensure that the Elasticsearch service is listening and ready before API calls are made, you may need to set the appropriate `api_*` parameters to ensure that the module can interact with the Elasticsearch API over the appropriate port, protocol, and with sufficient user rights to install the license. The native provider for licenses will _not_ print license signatures as part of Puppet's changelog to ensure that sensitive values are not included in console output or Puppet reports. Any fields present in the `license` parameter that differ from the license installed in a cluster will trigger a flush of the resource and new `POST` to the Elasticsearch API with the license content, though the sensitive `signature` field is not compared as it is not returned from the Elasticsearch licensing APIs. ### Data directories There are several different ways of setting data directories for Elasticsearch. In every case the required configuration options are placed in the `elasticsearch.yml` file. #### Default By default we use: /var/lib/elasticsearch Which mirrors the upstream defaults. #### Single global data directory It is possible to override the default data directory by specifying the `datadir` param: ```puppet class { 'elasticsearch': datadir => '/var/lib/elasticsearch-data' } ``` #### Multiple Global data directories It's also possible to specify multiple data directories using the `datadir` param: ```puppet class { 'elasticsearch': datadir => [ '/var/lib/es-data1', '/var/lib/es-data2'] } ``` See [the Elasticsearch documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-node.html#max-local-storage-nodes) for additional information regarding this configuration. ### Elasticsearch configuration The `config` option can be used to provide additional configuration options to Elasticsearch. #### Configuration writeup The `config` hash can be written in 2 different ways: ##### Full hash writeup Instead of writing the full hash representation: ```puppet class { 'elasticsearch': config => { 'cluster' => { 'name' => 'ClusterName', 'routing' => { 'allocation' => { 'awareness' => { 'attributes' => 'rack' } } } } } } ``` ##### Short hash writeup ```puppet class { 'elasticsearch': config => { 'cluster' => { 'name' => 'ClusterName', 'routing.allocation.awareness.attributes' => 'rack' } } } ``` #### Keystore Settings Recent versions of Elasticsearch include the [elasticsearch-keystore](https://www.elastic.co/guide/en/elasticsearch/reference/current/secure-settings.html) utility to create and manage the `elasticsearch.keystore` file which can store sensitive values for certain settings. The settings and values for this file can be controlled by this module. Settings follow the behavior of the `config` parameter for the top-level Elasticsearch class and `elasticsearch::instance` defined types. That is, you may define keystore settings globally, and all values will be merged with instance-specific settings for final inclusion in the `elasticsearch.keystore` file. Note that each hash key is passed to the `elasticsearch-keystore` utility in a straightforward manner, so you should specify the hash passed to `secrets` in flattened form (that is, without full nested hash representation). For example, to define cloud plugin credentials for all instances: ```puppet class { 'elasticsearch': secrets => { 'cloud.aws.access_key' => 'AKIA....', 'cloud.aws.secret_key' => 'AKIA....', } } ``` ##### Purging Secrets By default, if a secret setting exists on-disk that is not present in the `secrets` hash, this module will leave it intact. If you prefer to keep only secrets in the keystore that are specified in the `secrets` hash, use the `purge_secrets` boolean parameter either on the `elasticsearch` class to set it globally or per-instance. ##### Notifying Services Any changes to keystore secrets will notify running elasticsearch services by respecting the `restart_on_change` and `restart_config_change` parameters. ## Reference Class parameters are available in [the auto-generated documentation pages](https://elastic.github.io/puppet-elasticsearch/puppet_classes/elasticsearch.html). Autogenerated documentation for types, providers, and ruby helpers is also available on the same documentation site. ## Limitations This module is built upon and tested against the versions of Puppet listed in the metadata.json file (i.e. the listed compatible versions on the Puppet Forge). The module has been tested on: * Amazon Linux 1/2 * Debian 8/9/10 * CentOS 7/8 * OracleLinux 7/8 * Ubuntu 16.04, 18.04, 20.04 * SLES 12 Testing on other platforms has been light and cannot be guaranteed. ## Development Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file for instructions regarding development environments and testing. ## Support The Puppet Elasticsearch module is community supported and not officially supported by Elastic Support. For questions about the module, open a topic in the [Discuss](http://discuss.elastic.co/) forums or join us in [#elasticsearch](https://webchat.freenode.net/?channels=%23elasticsearch) on Freenode IRC. For bugs or feature requests, open an issue in [Github](https://github.com/elastic/puppet-elasticsearch/issues). diff --git a/Rakefile b/Rakefile index 715ae18..f92f051 100644 --- a/Rakefile +++ b/Rakefile @@ -1,219 +1,72 @@ -require 'digest/sha1' -require 'rubygems' -require 'puppetlabs_spec_helper/rake_tasks' -require 'puppet_blacksmith/rake_tasks' -require 'net/http' -require 'uri' -require 'fileutils' -require 'rspec/core/rake_task' -require 'puppet-strings' -require 'puppet-strings/tasks' -require 'yaml' -require 'json' -require_relative 'spec/spec_utilities' - -ENV['VAULT_APPROLE_ROLE_ID'] ||= '48adc137-3270-fc4a-ae65-1306919d4bb0' -oss_package = ENV['OSS_PACKAGE'] and ENV['OSS_PACKAGE'] == 'true' - -elasticsearch_default_version = '7.10.1' - -# Workaround for certain rspec/beaker versions -module TempFixForRakeLastComment - def last_comment - last_description +# Managed by modulesync - DO NOT EDIT +# https://voxpupuli.org/docs/updating-files-managed-with-modulesync/ + +# Attempt to load voxpupuli-test (which pulls in puppetlabs_spec_helper), +# otherwise attempt to load it directly. +begin + require 'voxpupuli/test/rake' +rescue LoadError + begin + require 'puppetlabs_spec_helper/rake_tasks' + rescue LoadError end end -Rake::Application.send :include, TempFixForRakeLastComment - -exclude_paths = [ - 'pkg/**/*', - 'vendor/**/*', - 'spec/**/*' -] - -require 'puppet-lint/tasks/puppet-lint' -require 'puppet-syntax/tasks/puppet-syntax' - -PuppetSyntax.exclude_paths = exclude_paths -PuppetSyntax.future_parser = true if ENV['FUTURE_PARSER'] == 'true' - -%w[ - 80chars - class_inherits_from_params_class - class_parameter_defaults - single_quote_string_with_variable -].each do |check| - PuppetLint.configuration.send("disable_#{check}") -end - -PuppetLint.configuration.ignore_paths = exclude_paths -PuppetLint.configuration.log_format = \ - '%{path}:%{line}:%{check}:%{KIND}:%{message}' - -# Append custom cleanup tasks to :clean -task :clean => [ - :'artifact:clean', - :spec_clean -] - -desc 'remove outdated module fixtures' -task :spec_prune do - mods = 'spec/fixtures/modules' - fixtures = YAML.load_file '.fixtures.yml' - fixtures['fixtures']['forge_modules'].each do |mod, params| - next unless params.is_a? Hash \ - and params.key? 'ref' \ - and File.exist? "#{mods}/#{mod}" - - metadata = JSON.parse(File.read("#{mods}/#{mod}/metadata.json")) - FileUtils.rm_rf "#{mods}/#{mod}" unless metadata['version'] == params['ref'] - end -end -task :spec_prep => [:spec_prune] - -RSpec::Core::RakeTask.new(:spec_verbose) do |t| - t.pattern = 'spec/{classes,defines,unit,functions,templates}/**/*_spec.rb' - t.rspec_opts = [ - '--format documentation', - '--require "ci/reporter/rspec"', - '--format CI::Reporter::RSpecFormatter', - '--color' - ] -end -task :spec_verbose => :spec_prep -RSpec::Core::RakeTask.new(:spec_puppet) do |t| - t.pattern = 'spec/{classes,defines,functions,templates,unit/facter}/**/*_spec.rb' - t.rspec_opts = ['--color'] +# load optional tasks for acceptance +# only available if gem group releases is installed +begin + require 'voxpupuli/acceptance/rake' +rescue LoadError end -task :spec_puppet => :spec_prep -RSpec::Core::RakeTask.new(:spec_unit) do |t| - t.pattern = 'spec/unit/{type,provider}/**/*_spec.rb' - t.rspec_opts = ['--color'] +# load optional tasks for releases +# only available if gem group releases is installed +begin + require 'voxpupuli/release/rake_tasks' +rescue LoadError end -task :spec_unit => :spec_prep - -task :beaker => [:spec_prep] - -desc 'Run all linting/unit tests.' -task :intake => [ - :syntax, - :rubocop, - :lint, - :validate, - :spec_unit, - :spec_puppet -] - -# Plumbing for snapshot tests -desc 'Run the snapshot tests' -RSpec::Core::RakeTask.new('beaker:snapshot', [:filter]) do |task, args| - task.rspec_opts = ['--color'] - task.pattern = 'spec/acceptance/tests/acceptance_spec.rb' - task.rspec_opts = [] - task.rspec_opts << '--format documentation' if ENV['CI'].nil? - task.rspec_opts << "--example '#{args[:filter]}'" if args[:filter] - ENV['SNAPSHOT_TEST'] = 'true' - if Rake::Task.task_defined? 'artifact:snapshot:not_found' - puts 'No snapshot artifacts found, skipping snapshot tests.' - exit(0) +desc "Run main 'test' task and report merged results to coveralls" +task test_with_coveralls: [:test] do + if Dir.exist?(File.expand_path('../lib', __FILE__)) + require 'coveralls/rake/task' + Coveralls::RakeTask.new + Rake::Task['coveralls:push'].invoke + else + puts 'Skipping reporting to coveralls. Module has no lib dir' end end -beaker_node_sets.each do |node| - desc "Run the snapshot tests against the #{node} nodeset" - task "beaker:#{node}:snapshot", [:filter] => %w[ - spec_prep - artifact:snapshot:deb - artifact:snapshot:rpm - ] do |_task, args| - ENV['BEAKER_set'] = node - Rake::Task['beaker:snapshot'].reenable - Rake::Task['beaker:snapshot'].invoke args[:filter] - end - - desc "Run acceptance tests against #{node}" - RSpec::Core::RakeTask.new( - "beaker:#{node}:acceptance", [:version, :filter] => [:spec_prep] - ) do |task, args| - ENV['BEAKER_set'] = node - args.with_defaults(:version => elasticsearch_default_version, :filter => nil) - task.pattern = 'spec/acceptance/tests/acceptance_spec.rb' - task.rspec_opts = [] - task.rspec_opts << '--format documentation' - task.rspec_opts << "--example '#{args[:filter]}'" if args[:filter] - ENV['ELASTICSEARCH_VERSION'] ||= args[:version] - Rake::Task['artifact:fetch'].invoke(ENV['ELASTICSEARCH_VERSION']) - end +desc 'Generate REFERENCE.md' +task :reference, [:debug, :backtrace] do |t, args| + patterns = '' + Rake::Task['strings:generate:reference'].invoke(patterns, args[:debug], args[:backtrace]) end -namespace :artifact do - desc 'Fetch specific installation artifacts' - task :fetch, [:version] do |_t, args| - fetch_archives( - derive_artifact_urls_for(args[:version]) - ) +begin + require 'github_changelog_generator/task' + require 'puppet_blacksmith' + GitHubChangelogGenerator::RakeTask.new :changelog do |config| + metadata = Blacksmith::Modulefile.new + config.future_release = "v#{metadata.version}" if metadata.version =~ /^\d+\.\d+.\d+$/ + config.header = "# Changelog\n\nAll notable changes to this project will be documented in this file.\nEach new release typically also includes the latest modulesync defaults.\nThese should not affect the functionality of the module." + config.exclude_labels = %w{duplicate question invalid wontfix wont-fix modulesync skip-changelog} + config.user = 'voxpupuli' + config.project = metadata.metadata['name'] end - namespace :snapshot do - snapshot_version = JSON.parse(http_retry('https://artifacts-api.elastic.co/v1/versions'))['versions'].reject do |version| - version.include? 'alpha' - end.last - - ENV['snapshot_version'] = snapshot_version - - downloads = JSON.parse(http_retry("https://artifacts-api.elastic.co/v1/search/#{snapshot_version}/elasticsearch"))['packages'].select do |pkg, _| - pkg =~ /(?:deb|rpm)/ and (oss_package ? pkg =~ /oss/ : pkg !~ /oss/) - end.map do |package, urls| - [package.split('.').last, urls] - end.to_h - - # We end up with something like: - # { - # 'rpm' => {'url' => 'https://...', 'sha_url' => 'https://...'}, - # 'deb' => {'url' => 'https://...', 'sha_url' => 'https://...'} - # } - # Note that checksums are currently broken on the Elastic unified release - # side; once they start working we can verify them. - - if downloads.empty? - puts 'No snapshot release available; skipping snapshot download' - %w[deb rpm].each { |ext| task ext } - task 'not_found' - else - # Download snapshot files - downloads.each_pair do |extension, urls| - filename = artifact urls['url'] - checksum = artifact urls['sha_url'] - link = artifact "elasticsearch-snapshot.#{extension}" - FileUtils.rm link if File.exist? link - - task extension => link - file link => filename do - unless File.exist?(link) and File.symlink?(link) \ - and File.readlink(link) == filename - File.delete link if File.exist? link - File.symlink File.basename(filename), link - end - end - - # file filename => checksum do - file filename do - get urls['url'], filename - end - - task checksum do - File.delete checksum if File.exist? checksum - get urls['sha_url'], checksum - end - end + # Workaround for https://github.com/github-changelog-generator/github-changelog-generator/issues/715 + require 'rbconfig' + if RbConfig::CONFIG['host_os'] =~ /linux/ + task :changelog do + puts 'Fixing line endings...' + changelog_file = File.join(__dir__, 'CHANGELOG.md') + changelog_txt = File.read(changelog_file) + new_contents = changelog_txt.gsub(%r{\r\n}, "\n") + File.open(changelog_file, "w") {|file| file.puts new_contents } end end - desc 'Purge fetched artifacts' - task :clean do - FileUtils.rm_rf(Dir.glob('spec/fixtures/artifacts/*')) - end +rescue LoadError end +# vim: syntax=ruby diff --git a/lib/facter/es_facts.rb b/lib/facter/es_facts.rb index eb587f4..002f289 100644 --- a/lib/facter/es_facts.rb +++ b/lib/facter/es_facts.rb @@ -1,137 +1,136 @@ +# frozen_string_literal: true + require 'net/http' require 'json' require 'yaml' # Helper module to encapsulate custom fact injection module EsFacts # Add a fact to the catalog of host facts def self.add_fact(prefix, key, value) key = "#{prefix}_#{key}".to_sym ::Facter.add(key) do setcode { value } end end def self.ssl?(config) tls_keys = [ 'xpack.security.http.ssl.enabled' ] tls_keys.any? { |key| (config.key? key) && (config[key] == true) } end # Helper to determine the instance http.port number def self.get_httpport(config) enabled = 'http.enabled' httpport = 'http.port' return false, false if !config[enabled].nil? && config[enabled] == 'false' return config[httpport], ssl?(config) unless config[httpport].nil? + ['9200', ssl?(config)] end # Entrypoint for custom fact populator # # This is a super old function but works; disable a bunch of checks. - # rubocop:disable Lint/HandleExceptions - # rubocop:disable Metrics/CyclomaticComplexity - # rubocop:disable Metrics/PerceivedComplexity def self.run dir_prefix = '/etc/elasticsearch' # httpports is a hash of port_number => ssl? transportports = [] http_bound_addresses = [] transport_bound_addresses = [] transport_publish_addresses = [] nodes = {} # only when the directory exists we need to process the stuff return unless File.directory?(dir_prefix) if File.readable?("#{dir_prefix}/elasticsearch.yml") config_data = YAML.load_file("#{dir_prefix}/elasticsearch.yml") httpport, ssl = get_httpport(config_data) end begin add_fact('elasticsearch', 'port', httpport) unless ssl key_prefix = 'elasticsearch' # key_prefix = "elasticsearch_#{httpport}" uri = URI("http://localhost:#{httpport}") http = Net::HTTP.new(uri.host, uri.port) http.read_timeout = 10 http.open_timeout = 2 response = http.get('/') json_data = JSON.parse(response.body) if json_data['status'] && json_data['status'] == 200 add_fact(key_prefix, 'name', json_data['name']) add_fact(key_prefix, 'version', json_data['version']['number']) uri2 = URI("http://localhost:#{httpport}/_nodes/#{json_data['name']}") http2 = Net::HTTP.new(uri2.host, uri2.port) http2.read_timeout = 10 http2.open_timeout = 2 response2 = http2.get(uri2.path) json_data_node = JSON.parse(response2.body) add_fact(key_prefix, 'cluster_name', json_data_node['cluster_name']) node_data = json_data_node['nodes'].first add_fact(key_prefix, 'node_id', node_data[0]) nodes_data = json_data_node['nodes'][node_data[0]] process = nodes_data['process'] add_fact(key_prefix, 'mlockall', process['mlockall']) plugins = nodes_data['plugins'] plugin_names = [] plugins.each do |plugin| plugin_names << plugin['name'] plugin.each do |key, value| prefix = "#{key_prefix}_plugin_#{plugin['name']}" add_fact(prefix, key, value) unless key == 'name' end end add_fact(key_prefix, 'plugins', plugin_names.join(',')) nodes_data['http']['bound_address'].each { |i| http_bound_addresses << i } nodes_data['transport']['bound_address'].each { |i| transport_bound_addresses << i } transport_publish_addresses << nodes_data['transport']['publish_address'] unless nodes_data['transport']['publish_address'].nil? - transportports << nodes_data['settings']['transport']['tcp']['port'] unless nodes_data['settings']['transport']['tcp'].nil? or nodes_data['settings']['transport']['tcp']['port'].nil? + transportports << nodes_data['settings']['transport']['tcp']['port'] unless nodes_data['settings']['transport']['tcp'].nil? || nodes_data['settings']['transport']['tcp']['port'].nil? node = { - 'http_ports' => httpports.keys, - 'transport_ports' => transportports, - 'http_bound_addresses' => http_bound_addresses, - 'transport_bound_addresses' => transport_bound_addresses, + 'http_ports' => httpports.keys, + 'transport_ports' => transportports, + 'http_bound_addresses' => http_bound_addresses, + 'transport_bound_addresses' => transport_bound_addresses, 'transport_publish_addresses' => transport_publish_addresses, - json_data['name'] => { - 'settings' => nodes_data['settings'], - 'http' => nodes_data['http'], + json_data['name'] => { + 'settings' => nodes_data['settings'], + 'http' => nodes_data['http'], 'transport' => nodes_data['transport'] } } nodes.merge! node end end - rescue + rescue StandardError + # ignore end Facter.add(:elasticsearch) do setcode do nodes end nodes unless nodes.empty? end end - # rubocop:enable Metrics/CyclomaticComplexity - # rubocop:enable Metrics/PerceivedComplexity end EsFacts.run diff --git a/lib/puppet/feature/elasticsearch_shield_users_native.rb b/lib/puppet/feature/elasticsearch_shield_users_native.rb index d79e364..26d7f04 100644 --- a/lib/puppet/feature/elasticsearch_shield_users_native.rb +++ b/lib/puppet/feature/elasticsearch_shield_users_native.rb @@ -1,16 +1,22 @@ +# frozen_string_literal: true + require 'puppet/util/feature' require 'puppet/util/package' shield_plugin_dir = '/usr/share/elasticsearch/plugins/shield' -Puppet.features.add(:elasticsearch_shield_users_native) { - File.exist? shield_plugin_dir and - Dir[shield_plugin_dir + '/*.jar'].map do |file| - File.basename(file, '.jar').split('-') - end.select do |parts| - parts.include? 'shield' - end.any? do |parts| - parts.last =~ /^[\d.]+$/ and - Puppet::Util::Package.versioncmp(parts.last, '2.3') >= 0 - end -} +Puppet.features.add(:elasticsearch_shield_users_native) do + return false unless File.exist?(shield_plugin_dir) + + jars = Dir["#{shield_plugin_dir}/*.jar"] + jar_parts = jars.map do |file| + File.basename(file, '.jar').split('-') + end + shield_components = jar_parts.select do |parts| + parts.include? 'shield' + end + shield_components.any? do |parts| + parts.last =~ %r{^[\d.]+$} && + Puppet::Util::Package.versioncmp(parts.last, '2.3') >= 0 + end +end diff --git a/lib/puppet/parser/functions/array_suffix.rb b/lib/puppet/parser/functions/array_suffix.rb index 0e4ce3b..3afad3d 100644 --- a/lib/puppet/parser/functions/array_suffix.rb +++ b/lib/puppet/parser/functions/array_suffix.rb @@ -1,46 +1,44 @@ +# frozen_string_literal: true + # Top-level Puppet functions module Puppet::Parser::Functions newfunction( :array_suffix, - :type => :rvalue, - :doc => <<-EOS -This function applies a suffix to all elements in an array. + type: :rvalue, + doc: <<~EOS + This function applies a suffix to all elements in an array. -*Examples:* + *Examples:* - array_suffix(['a','b','c'], 'p') + array_suffix(['a','b','c'], 'p') -Will return: ['ap','bp','cp'] + Will return: ['ap','bp','cp'] -@return Array + @return Array EOS ) do |arguments| # Technically we support two arguments but only first is mandatory ... - raise(Puppet::ParseError, 'array_suffix(): Wrong number of arguments ' \ - "given (#{arguments.size} for 1)") if arguments.empty? + if arguments.empty? + raise(Puppet::ParseError, 'array_suffix(): Wrong number of arguments ' \ + "given (#{arguments.size} for 1)") + end array = arguments[0] - unless array.is_a?(Array) - raise Puppet::ParseError, "array_suffix(): expected first argument to be an Array, got #{array.inspect}" - end + raise Puppet::ParseError, "array_suffix(): expected first argument to be an Array, got #{array.inspect}" unless array.is_a?(Array) suffix = arguments[1] if arguments[1] - if suffix - unless suffix.is_a? String - raise Puppet::ParseError, "array_suffix(): expected second argument to be a String, got #{suffix.inspect}" - end - end + raise Puppet::ParseError, "array_suffix(): expected second argument to be a String, got #{suffix.inspect}" if suffix && !(suffix.is_a? String) # Turn everything into string same as join would do ... - result = array.collect do |i| + result = array.map do |i| i = i.to_s suffix ? i + suffix : i end return result end end # vim: set ts=2 sw=2 et : diff --git a/lib/puppet/parser/functions/concat_merge.rb b/lib/puppet/parser/functions/concat_merge.rb index cddb7e2..27b20c7 100644 --- a/lib/puppet/parser/functions/concat_merge.rb +++ b/lib/puppet/parser/functions/concat_merge.rb @@ -1,50 +1,48 @@ +# frozen_string_literal: true + # Top-level Puppet functions module Puppet::Parser::Functions newfunction( :concat_merge, - :type => :rvalue, - :doc => <<-'ENDHEREDOC') do |args| + type: :rvalue, + doc: <<-'ENDHEREDOC') do |args| Merges two or more hashes together concatenating duplicate keys with array values and returns the resulting hash. For example: $hash1 = {'a' => [1]} $hash2 = {'a' => [2]} concat_merge($hash1, $hash2) # The resulting hash is equivalent to: # { 'a' => [1, 2] } When there is a duplicate key that is not an array, the key in the rightmost hash will "win." @return String ENDHEREDOC - if args.length < 2 - raise Puppet::ParseError, "concat_merge(): wrong number of arguments (#{args.length}; must be at least 2)" - end + raise Puppet::ParseError, "concat_merge(): wrong number of arguments (#{args.length}; must be at least 2)" if args.length < 2 concat_merge = proc do |hash1, hash2| hash1.merge(hash2) do |_key, old_value, new_value| if old_value.is_a?(Array) && new_value.is_a?(Array) old_value + new_value else new_value end end end result = {} args.each do |arg| - next if arg.is_a? String and arg.empty? # empty string is synonym for puppet's undef + next if arg.is_a?(String) && arg.empty? # empty string is synonym for puppet's undef # If the argument was not a hash, skip it. - unless arg.is_a?(Hash) - raise Puppet::ParseError, "concat_merge: unexpected argument type #{arg.class}, only expects hash arguments" - end + raise Puppet::ParseError, "concat_merge: unexpected argument type #{arg.class}, only expects hash arguments" unless arg.is_a?(Hash) result = concat_merge.call(result, arg) end result end end diff --git a/lib/puppet/parser/functions/deep_implode.rb b/lib/puppet/parser/functions/deep_implode.rb index 0cba5d6..92dce02 100644 --- a/lib/puppet/parser/functions/deep_implode.rb +++ b/lib/puppet/parser/functions/deep_implode.rb @@ -1,46 +1,44 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..', '..')) require 'puppet_x/elastic/deep_implode' # Top-level Puppet functions module Puppet::Parser::Functions newfunction( :deep_implode, - :type => :rvalue, - :doc => <<-'ENDHEREDOC') do |args| + type: :rvalue, + doc: <<-'ENDHEREDOC') do |args| Recursively flattens all keys of a hash into a dot-notated hash, deeply merging duplicate key values by natively combining them and returns the resulting hash. That is confusing, look at the examples for more clarity. For example: $hash = {'top' => {'sub' => [1]}, 'top.sub' => [2] } $flattened_hash = deep_implode($hash) # The resulting hash is equivalent to: # { 'top.sub' => [1, 2] } When the function encounters array or hash values, they are concatenated or merged, respectively. When duplace paths for a key are generated, the function will prefer to retain keys with the longest root key. @return Hash ENDHEREDOC - if args.length != 1 - raise Puppet::ParseError, "deep_implode(): wrong number of arguments (#{args.length}; must be 1)" - end + raise Puppet::ParseError, "deep_implode(): wrong number of arguments (#{args.length}; must be 1)" if args.length != 1 arg = args[0] - unless arg.is_a? Hash - raise Puppet::ParseError, 'deep_implode: unexpected argument type, only expects hashes' - end + raise Puppet::ParseError, 'deep_implode: unexpected argument type, only expects hashes' unless arg.is_a? Hash return {} if arg.empty? Puppet_X::Elastic.deep_implode arg end end diff --git a/lib/puppet/parser/functions/es_plugin_name.rb b/lib/puppet/parser/functions/es_plugin_name.rb index 680b943..33813e2 100644 --- a/lib/puppet/parser/functions/es_plugin_name.rb +++ b/lib/puppet/parser/functions/es_plugin_name.rb @@ -1,42 +1,44 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..', '..')) require 'puppet_x/elastic/plugin_parsing' # Top-level Puppet functions module Puppet::Parser::Functions newfunction( :es_plugin_name, - :type => :rvalue, - :doc => <<-'ENDHEREDOC') do |args| + type: :rvalue, + doc: <<-'ENDHEREDOC') do |args| Given a string, return the best guess at what the directory name will be for the given plugin. Any arguments past the first will be fallbacks (using the same logic) should the first fail. For example, all the following return values are "plug": es_plugin_name('plug') es_plugin_name('foo/plug') es_plugin_name('foo/plug/1.0.0') es_plugin_name('foo/elasticsearch-plug') es_plugin_name('foo/es-plug/1.3.2') @return String ENDHEREDOC if args.empty? raise Puppet::ParseError, 'wrong number of arguments, at least one value required' end ret = args.select do |arg| - arg.is_a? String and not arg.empty? + arg.is_a?(String) && !arg.empty? end.first if ret Puppet_X::Elastic.plugin_name ret else raise Puppet::Error, 'could not determine plugin name' end end end diff --git a/lib/puppet/parser/functions/plugin_dir.rb b/lib/puppet/parser/functions/plugin_dir.rb index aee8174..fadba1c 100644 --- a/lib/puppet/parser/functions/plugin_dir.rb +++ b/lib/puppet/parser/functions/plugin_dir.rb @@ -1,43 +1,38 @@ +# frozen_string_literal: true + # Top-level Puppet functions module Puppet::Parser::Functions newfunction( :plugin_dir, - :type => :rvalue, - :doc => <<-EOS + type: :rvalue, + doc: <<-EOS Extracts the end plugin directory of the name @return String EOS ) do |arguments| - if arguments.empty? - raise(Puppet::ParseError, 'plugin_dir(): No arguments given') - elsif arguments.size > 2 - raise(Puppet::ParseError, "plugin_dir(): Too many arguments given (#{arguments.size})") - else - - unless arguments[0].is_a?(String) - raise(Puppet::ParseError, 'plugin_dir(): Requires string as first argument') - end + raise(Puppet::ParseError, 'plugin_dir(): No arguments given') if arguments.empty? + raise(Puppet::ParseError, "plugin_dir(): Too many arguments given (#{arguments.size})") if arguments.size > 2 + raise(Puppet::ParseError, 'plugin_dir(): Requires string as first argument') unless arguments[0].is_a?(String) - plugin_name = arguments[0] - items = plugin_name.split('/') + plugin_name = arguments[0] + items = plugin_name.split('/') - return items[0] if items.count == 1 + return items[0] if items.count == 1 - plugin = items[1] - endname = if plugin.include?('-') # example elasticsearch-head - if plugin.start_with?('elasticsearch-') - plugin.gsub('elasticsearch-', '') - elsif plugin.start_with?('es-') - plugin.gsub('es-', '') - else - plugin - end + plugin = items[1] + endname = if plugin.include?('-') # example elasticsearch-head + if plugin.start_with?('elasticsearch-') + plugin.gsub('elasticsearch-', '') + elsif plugin.start_with?('es-') + plugin.gsub('es-', '') else plugin end + else + plugin + end - return endname - end + return endname end end diff --git a/lib/puppet/provider/elastic_parsedfile.rb b/lib/puppet/provider/elastic_parsedfile.rb index cc2a6b8..c13e6a5 100644 --- a/lib/puppet/provider/elastic_parsedfile.rb +++ b/lib/puppet/provider/elastic_parsedfile.rb @@ -1,12 +1,14 @@ +# frozen_string_literal: true + require 'puppet/provider/parsedfile' # Parent class for Elasticsearch-based providers that need to access # specific configuration directories. class Puppet::Provider::ElasticParsedFile < Puppet::Provider::ParsedFile # Find/set an x-pack configuration file. # # @return String def self.xpack_config(val) - @default_target ||= "/etc/elasticsearch/#{val}" + @xpack_config ||= "/etc/elasticsearch/#{val}" end end diff --git a/lib/puppet/provider/elastic_plugin.rb b/lib/puppet/provider/elastic_plugin.rb index 97e4d6c..2ddf9f2 100644 --- a/lib/puppet/provider/elastic_plugin.rb +++ b/lib/puppet/provider/elastic_plugin.rb @@ -1,161 +1,166 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..')) require 'uri' require 'puppet_x/elastic/es_versioning' require 'puppet_x/elastic/plugin_parsing' # Generalized parent class for providers that behave like Elasticsearch's plugin # command line tool. -# rubocop:disable Metrics/ClassLength class Puppet::Provider::ElasticPlugin < Puppet::Provider # Elasticsearch's home directory. # # @return String def homedir case Facter.value('osfamily') when 'OpenBSD' '/usr/local/elasticsearch' else '/usr/share/elasticsearch' end end def exists? # First, attempt to list whether the named plugin exists by finding a # plugin descriptor file, which each plugin should have. We must wildcard # the name to match meta plugins, see upstream issue for this change: # https://github.com/elastic/elasticsearch/pull/28022 properties_files = Dir[File.join(@resource[:plugin_dir], plugin_path, '**', '*plugin-descriptor.properties')] return false if properties_files.empty? begin # Use the basic name format that the plugin tool supports in order to # determine the version from the resource name. plugin_version = Puppet_X::Elastic.plugin_version(@resource[:name]) # Naively parse the Java .properties file to check version equality. # Because we don't have the luxury of installing arbitrary gems, perform # simple parse with a degree of safety checking in the call chain # # Note that x-pack installs "meta" plugins which bundle multiple plugins # in one. Therefore, we need to find the first "sub" plugin that # indicates which version of x-pack this is. properties = properties_files.sort.map do |prop_file| - IO.readlines(prop_file).map(&:strip).reject do |line| - line.start_with?('#') or line.empty? - end.map do |property| + lines = File.readlines(prop_file).map(&:strip).reject do |line| + line.start_with?('#') || line.empty? + end + lines = lines.map do |property| property.split('=') - end.reject do |pairs| - pairs.length != 2 - end.to_h - end.find { |prop| prop.key? 'version' } + end + lines = lines.select do |pairs| + pairs.length == 2 + end + lines.to_h + end + properties = properties.find { |prop| prop.key? 'version' } - if properties and properties['version'] != plugin_version + if properties && properties['version'] != plugin_version debug "Elasticsearch plugin #{@resource[:name]} not version #{plugin_version}, reinstalling" destroy return false end rescue ElasticPluginParseFailure debug "Failed to parse plugin version for #{@resource[:name]}" end # If there is no version string, we do not check version equality debug "No version found in #{@resource[:name]}, not enforcing any version" true end def plugin_path @resource[:plugin_path] || Puppet_X::Elastic.plugin_name(@resource[:name]) end # Intelligently returns the correct installation arguments for Elasticsearch. # # @return [Array] # arguments to pass to the plugin installation utility def install_args if !@resource[:url].nil? [@resource[:url]] elsif !@resource[:source].nil? ["file://#{@resource[:source]}"] else [@resource[:name]] end end # Format proxy arguments for consumption by the elasticsearch plugin # management tool (i.e., Java properties). # # @return Array # of flags for command-line tools def proxy_args(url) parsed = URI(url) %w[http https].map do |schema| - [:host, :port, :user, :password].map do |param| + %i[host port user password].map do |param| option = parsed.send(param) "-D#{schema}.proxy#{param.to_s.capitalize}=#{option}" unless option.nil? end end.flatten.compact end # Install this plugin on the host. def create commands = [] commands += proxy_args(@resource[:proxy]) if @resource[:proxy] commands << 'install' commands << '--batch' commands += install_args debug("Commands: #{commands.inspect}") retry_count = 3 retry_times = 0 begin with_environment do plugin(commands) end rescue Puppet::ExecutionFailure => e retry_times += 1 debug("Failed to install plugin. Retrying... #{retry_times} of #{retry_count}") sleep 2 retry if retry_times < retry_count raise "Failed to install plugin. Received error: #{e.inspect}" end end # Remove this plugin from the host. def destroy with_environment do plugin(['remove', Puppet_X::Elastic.plugin_name(@resource[:name])]) end end # Run a command wrapped in necessary env vars def with_environment(&block) env_vars = { 'ES_JAVA_OPTS' => @resource[:java_opts], 'ES_PATH_CONF' => @resource[:configdir] } saved_vars = {} # Use 'java_home' param if supplied, otherwise default to Elasticsearch shipped JDK - env_vars['JAVA_HOME'] = if @resource[:java_home].nil? or @resource[:java_home] == '' + env_vars['JAVA_HOME'] = if @resource[:java_home].nil? || @resource[:java_home] == '' "#{homedir}/jdk" else @resource[:java_home] end env_vars['ES_JAVA_OPTS'] = env_vars['ES_JAVA_OPTS'].join(' ') env_vars.each do |env_var, value| saved_vars[env_var] = ENV[env_var] ENV[env_var] = value end ret = block.yield saved_vars.each do |env_var, value| ENV[env_var] = value end ret end end diff --git a/lib/puppet/provider/elastic_rest.rb b/lib/puppet/provider/elastic_rest.rb index 0b993fb..2a56ed3 100644 --- a/lib/puppet/provider/elastic_rest.rb +++ b/lib/puppet/provider/elastic_rest.rb @@ -1,303 +1,286 @@ +# frozen_string_literal: true + require 'json' require 'net/http' require 'openssl' # Parent class encapsulating general-use functions for children REST-based # providers. -# rubocop:disable Metrics/ClassLength class Puppet::Provider::ElasticREST < Puppet::Provider class << self - attr_accessor :api_discovery_uri - attr_accessor :api_resource_style - attr_accessor :api_uri - attr_accessor :discrete_resource_creation - attr_accessor :metadata - attr_accessor :metadata_pipeline - attr_accessor :query_string + attr_accessor :api_discovery_uri, :api_resource_style, :api_uri, :discrete_resource_creation, :metadata, :metadata_pipeline, :query_string end # Fetch arbitrary metadata for the class from an instance object. # # @return String def metadata self.class.metadata end # Retrieve the class query_string variable # # @return String def query_string self.class.query_string end # Perform a REST API request against the indicated endpoint. # # @return Net::HTTPResponse - # rubocop:disable Metrics/CyclomaticComplexity - # rubocop:disable Metrics/PerceivedComplexity - def self.rest(http, \ - req, \ - validate_tls = true, \ - timeout = 10, \ - username = nil, \ - password = nil) - - if username and password + def self.rest(http, + req, + timeout = 10, + username = nil, + password = nil, + validate_tls: true) + + if username && password req.basic_auth username, password - elsif username or password + elsif username || password Puppet.warning( 'username and password must both be defined, skipping basic auth' ) end req['Accept'] = 'application/json' http.read_timeout = timeout http.open_timeout = timeout http.verify_mode = OpenSSL::SSL::VERIFY_NONE unless validate_tls begin http.request req rescue EOFError => e # Because the provider attempts a best guess at API access, we # only fail when HTTP operations fail for mutating methods. unless %w[GET OPTIONS HEAD].include? req.method raise Puppet::Error, "Received '#{e}' from the Elasticsearch API. Are your API settings correct?" end end end - # rubocop:enable Metrics/CyclomaticComplexity - # rubocop:enable Metrics/PerceivedComplexity # Helper to format a remote URL request for Elasticsearch which takes into # account path ordering, et cetera. def self.format_uri(resource_path, property_flush = {}) - return api_uri if resource_path.nil? or api_resource_style == :bare - if discrete_resource_creation and not property_flush[:ensure].nil? + return api_uri if resource_path.nil? || api_resource_style == :bare + + if discrete_resource_creation && !property_flush[:ensure].nil? resource_path else case api_resource_style when :prefix - resource_path + '/' + api_uri + "#{resource_path}/#{api_uri}" else - api_uri + '/' + resource_path + "#{api_uri}/#{resource_path}" end end end # Fetch Elasticsearch API objects. Accepts a variety of argument functions # dictating how to connect to the Elasticsearch API. # # @return Array # an array of Hashes representing the found API objects, whether they be # templates, pipelines, et cetera. - def self.api_objects(protocol = 'http', \ - validate_tls = true, \ - host = 'localhost', \ - port = 9200, \ - timeout = 10, \ - username = nil, \ - password = nil, \ - ca_file = nil, \ - ca_path = nil) + def self.api_objects(protocol = 'http', + host = 'localhost', + port = 9200, + timeout = 10, + username = nil, + password = nil, + ca_file = nil, + ca_path = nil, + validate_tls: true) uri = URI("#{protocol}://#{host}:#{port}/#{format_uri(api_discovery_uri)}") http = Net::HTTP.new uri.host, uri.port req = Net::HTTP::Get.new uri.request_uri http.use_ssl = uri.scheme == 'https' [[ca_file, :ca_file=], [ca_path, :ca_path=]].each do |arg, method| - http.send method, arg if arg and http.respond_to? method + http.send method, arg if arg && http.respond_to?(method) end - response = rest http, req, validate_tls, timeout, username, password + response = rest http, req, timeout, username, password, validate_tls: validate_tls results = [] - if response.respond_to? :code and response.code.to_i == 200 - results = process_body(response.body) - end + results = process_body(response.body) if response.respond_to?(:code) && response.code.to_i == 200 results end # Process the JSON response body def self.process_body(body) - results = JSON.parse(body).map do |object_name, api_object| + JSON.parse(body).map do |object_name, api_object| { - :name => object_name, - :ensure => :present, - metadata => process_metadata(api_object), + :name => object_name, + :ensure => :present, + metadata => process_metadata(api_object), :provider => name } end - - results end # Passes API objects through arbitrary Procs/lambdas in order to postprocess # API responses. def self.process_metadata(raw_metadata) - if metadata_pipeline.is_a? Array and !metadata_pipeline.empty? + if metadata_pipeline.is_a?(Array) && !metadata_pipeline.empty? metadata_pipeline.reduce(raw_metadata) do |md, processor| processor.call md end else raw_metadata end end # Fetch an array of provider objects from the Elasticsearch API. def self.instances api_objects.map { |resource| new resource } end # Unlike a typical #prefetch, which just ties discovered #instances to the # correct resources, we need to quantify all the ways the resources in the # catalog know about Elasticsearch API access and use those settings to # fetch any templates we can before associating resources and providers. def self.prefetch(resources) # Get all relevant API access methods from the resources we know about - resources.map do |_, resource| + res = resources.map do |_, resource| p = resource.parameters [ p[:protocol].value, - p[:validate_tls].value, p[:host].value, p[:port].value, p[:timeout].value, (p.key?(:username) ? p[:username].value : nil), (p.key?(:password) ? p[:password].value : nil), (p.key?(:ca_file) ? p[:ca_file].value : nil), - (p.key?(:ca_path) ? p[:ca_path].value : nil) + (p.key?(:ca_path) ? p[:ca_path].value : nil), + { validate_tls: p[:validate_tls].value }, ] # Deduplicate identical settings, and fetch templates - end.uniq.map do |api| + end.uniq + res = res.map do |api| api_objects(*api) # Flatten and deduplicate the array, instantiate providers, and do the # typical association dance - end.flatten.uniq.map { |resource| new resource }.each do |prov| + end + res.flatten.uniq.map { |resource| new resource }.each do |prov| if (resource = resources[prov.name]) resource.provider = prov end end end def initialize(value = {}) super(value) @property_flush = {} end # Generate a request body def generate_body JSON.generate( - if metadata != :content and @property_flush[:ensure] == :present + if metadata != :content && @property_flush[:ensure] == :present { metadata.to_s => resource[metadata] } else resource[metadata] end ) end # Call Elasticsearch's REST API to appropriately PUT/DELETE/or otherwise # update any managed API objects. - # rubocop:disable Metrics/CyclomaticComplexity - # rubocop:disable Metrics/PerceivedComplexity def flush Puppet.debug('Got to flush') uri = URI( format( '%s://%s:%d/%s', resource[:protocol], resource[:host], resource[:port], self.class.format_uri(resource[:name], @property_flush) ) ) uri.query = URI.encode_www_form query_string if query_string Puppet.debug("Generated URI = #{uri.inspect}") case @property_flush[:ensure] when :absent req = Net::HTTP::Delete.new uri.request_uri else req = Net::HTTP::Put.new uri.request_uri req.body = generate_body Puppet.debug("Generated body looks like: #{req.body.inspect}") # As of Elasticsearch 6.x, required when requesting with a payload (so we # set it always to be safe) req['Content-Type'] = 'application/json' if req['Content-Type'].nil? end http = Net::HTTP.new uri.host, uri.port http.use_ssl = uri.scheme == 'https' - [:ca_file, :ca_path].each do |arg| - if !resource[arg].nil? and http.respond_to? arg - http.send "#{arg}=".to_sym, resource[arg] - end + %i[ca_file ca_path].each do |arg| + http.send "#{arg}=".to_sym, resource[arg] if !resource[arg].nil? && http.respond_to?(arg) end response = self.class.rest( http, req, - resource[:validate_tls], resource[:timeout], resource[:username], - resource[:password] + resource[:password], + validate_tls: resource[:validate_tls] ) # Attempt to return useful error output unless response.code.to_i == 200 Puppet.debug("Non-OK reponse: Body = #{response.body.inspect}") json = JSON.parse(response.body) err_msg = if json.key? 'error' - if json['error'].is_a? Hash \ - and json['error'].key? 'root_cause' + if json['error'].is_a?(Hash) \ + && json['error'].key?('root_cause') # Newer versions have useful output json['error']['root_cause'].first['reason'] else # Otherwise fallback to old-style error messages json['error'] end else # As a last resort, return the response error code "HTTP #{response.code}" end raise Puppet::Error, "Elasticsearch API responded with: #{err_msg}" end - # rubocop:enable Metrics/CyclomaticComplexity - # rubocop:enable Metrics/PerceivedComplexity - @property_hash = self.class.api_objects( resource[:protocol], - resource[:validate_tls], resource[:host], resource[:port], resource[:timeout], resource[:username], resource[:password], resource[:ca_file], - resource[:ca_path] - ).detect do |t| + resource[:ca_path], + validate_tls: resource[:validate_tls] + ).find do |t| t[:name] == resource[:name] end end # Set this provider's `:ensure` property to `:present`. def create @property_flush[:ensure] = :present end def exists? @property_hash[:ensure] == :present end # Set this provider's `:ensure` property to `:absent`. def destroy @property_flush[:ensure] = :absent end -end # of class +end diff --git a/lib/puppet/provider/elastic_user_command.rb b/lib/puppet/provider/elastic_user_command.rb index b6ca8e7..6c76e8c 100644 --- a/lib/puppet/provider/elastic_user_command.rb +++ b/lib/puppet/provider/elastic_user_command.rb @@ -1,125 +1,131 @@ +# frozen_string_literal: true + # Parent provider for Elasticsearch Shield/X-Pack file-based user management # tools. class Puppet::Provider::ElasticUserCommand < Puppet::Provider attr_accessor :homedir # Elasticsearch's home directory. # # @return String def self.homedir @homedir ||= case Facter.value('osfamily') when 'OpenBSD' '/usr/local/elasticsearch' else '/usr/share/elasticsearch' end end # Run the user management command with specified tool arguments. def self.command_with_path(args, configdir = nil) options = { - :combine => true, - :custom_environment => { + combine: true, + custom_environment: { 'ES_PATH_CONF' => configdir || '/etc/elasticsearch' }, - :failonfail => true + failonfail: true } execute( [command(:users_cli)] + (args.is_a?(Array) ? args : [args]), options ) end # Gather local file-based users into an array of Hash objects. def self.fetch_users begin output = command_with_path('list') rescue Puppet::ExecutionFailure => e debug("#fetch_users had an error: #{e.inspect}") return nil end debug("Raw command output: #{output}") - output.split("\n").select { |u| + matching_lines = output.split("\n").select do |u| # Keep only expected "user : role1,role2" formatted lines - u[/^[^:]+:\s+\S+$/] - }.map { |u| + u[%r{^[^:]+:\s+\S+$}] + end + + users = matching_lines.map do |u| # Break into ["user ", " role1,role2"] u.split(':').first.strip - }.map do |user| + end + + users.map do |user| { - :name => user, - :ensure => :present, - :provider => name + name: user, + ensure: :present, + provider: name } end end # Fetch an array of provider objects from the the list of local users. def self.instances fetch_users.map do |user| new user end end # Generic prefetch boilerplate. def self.prefetch(resources) instances.each do |prov| if (resource = resources[prov.name]) resource.provider = prov end end end def initialize(value = {}) super(value) @property_flush = {} end # Enforce the desired state for this user on-disk. def flush arguments = [] case @property_flush[:ensure] when :absent arguments << 'userdel' arguments << resource[:name] else arguments << 'useradd' arguments << resource[:name] arguments << '-p' << resource[:password] end self.class.command_with_path(arguments, resource[:configdir]) - @property_hash = self.class.fetch_users.detect do |u| + @property_hash = self.class.fetch_users.find do |u| u[:name] == resource[:name] end end # Set this provider's `:ensure` property to `:present`. def create @property_flush[:ensure] = :present end def exists? @property_hash[:ensure] == :present end # Set this provider's `:ensure` property to `:absent`. def destroy @property_flush[:ensure] = :absent end # Manually set this user's password. def passwd self.class.command_with_path( [ 'passwd', resource[:name], '-p', resource[:password] ], resource[:configdir] ) end end diff --git a/lib/puppet/provider/elastic_user_roles.rb b/lib/puppet/provider/elastic_user_roles.rb index ee86a4c..9ce660a 100644 --- a/lib/puppet/provider/elastic_user_roles.rb +++ b/lib/puppet/provider/elastic_user_roles.rb @@ -1,49 +1,59 @@ +# frozen_string_literal: true + require 'puppet/provider/elastic_yaml' # Provider to help manage file-based X-Pack user/role configuration # files. class Puppet::Provider::ElasticUserRoles < Puppet::Provider::ElasticYaml # Override the ancestor `parse` method to process a users/roles file # managed by the Elasticsearch user tools. def self.parse(text) - text.split("\n").map(&:strip).select do |line| + lines = text.split("\n").map(&:strip).select do |line| # Strip comments - not line.start_with? '#' and not line.empty? - end.map do |line| + (!line.start_with? '#') && !line.empty? + end + lines = lines.map do |line| # Turn array of roles into array of users that have the role role, users = line.split(':') users.split(',').map do |user| { user => [role] } end - end.flatten.inject({}) do |hash, user| + end + lines = lines.flatten.reduce({}) do |hash, user| # Gather up user => role hashes by append-merging role lists hash.merge(user) { |_, o, n| o + n } - end.map do |user, roles| + end + lines = lines.map do |user, roles| # Map those hashes into what the provider expects { - :name => user, - :roles => roles + name: user, + roles: roles } - end.to_a + end + lines.to_a end # Represent this user/role record as a correctly-formatted config file. def self.to_file(records) debug "Flushing: #{records.inspect}" records.map do |record| record[:roles].map do |r| { [record[:name]] => r } end - end.flatten.map(&:invert).inject({}) do |acc, role| + end + records = records.flatten.map(&:invert).reduce({}) do |acc, role| acc.merge(role) { |_, o, n| o + n } - end.delete_if do |_, users| + end + records = records.delete_if do |_, users| users.empty? - end.map do |role, users| + end + records = records.map do |role, users| "#{role}:#{users.join(',')}" - end.join("\n") + "\n" + end + "#{records.join("\n")}\n" end def self.skip_record?(_record) false end end diff --git a/lib/puppet/provider/elastic_yaml.rb b/lib/puppet/provider/elastic_yaml.rb index 0d855fb..57d6681 100644 --- a/lib/puppet/provider/elastic_yaml.rb +++ b/lib/puppet/provider/elastic_yaml.rb @@ -1,58 +1,61 @@ -# -*- coding: utf-8 -*- +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..')) require 'puppet/provider/elastic_parsedfile' require 'puppet/util/package' require 'puppet_x/elastic/hash' # Provider for yaml-based Elasticsearch configuration files. class Puppet::Provider::ElasticYaml < Puppet::Provider::ElasticParsedFile class << self attr_accessor :metadata end # Transform a given string into a Hash-based representation of the # provider. def self.parse(text) - yaml = YAML.load text + yaml = YAML.safe_load text if yaml yaml.map do |key, metadata| { :name => key, :ensure => :present, @metadata => metadata } end else [] end end # Transform a given list of provider records into yaml-based # representation. def self.to_file(records) yaml = records.map do |record| # Convert top-level symbols to strings - Hash[record.map { |k, v| [k.to_s, v] }] - end.inject({}) do |hash, record| + record.transform_keys(&:to_s) + end + yaml = yaml.reduce({}) do |hash, record| # Flatten array of hashes into single hash hash.merge(record['name'] => record.delete(@metadata.to_s)) - end.extend(Puppet_X::Elastic::SortedHash).to_yaml.split("\n") + end + yaml = yaml.extend(Puppet_X::Elastic::SortedHash).to_yaml.split("\n") - yaml.shift if yaml.first =~ /---/ + yaml.shift if yaml.first =~ %r{---} yaml = yaml.join("\n") yaml << "\n" end def self.skip_record?(_record) false end # This is ugly, but it's overridden in ParsedFile with abstract # functionality we don't need for our simple provider class. # This has been observed to break in Puppet version 3/4 switches. def self.valid_attr?(klass, attr_name) klass.is_a? Class ? klass.parameters.include?(attr_name) : true end end diff --git a/lib/puppet/provider/elasticsearch_index/ruby.rb b/lib/puppet/provider/elasticsearch_index/ruby.rb index 0fa171b..e97b58e 100644 --- a/lib/puppet/provider/elasticsearch_index/ruby.rb +++ b/lib/puppet/provider/elasticsearch_index/ruby.rb @@ -1,25 +1,27 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..', '..')) require 'puppet/provider/elastic_rest' require 'puppet_x/elastic/deep_to_i' require 'puppet_x/elastic/deep_to_s' Puppet::Type.type(:elasticsearch_index).provide( :ruby, - :parent => Puppet::Provider::ElasticREST, - :metadata => :settings, - :metadata_pipeline => [ - lambda { |data| data['settings'] }, - lambda { |data| Puppet_X::Elastic.deep_to_s data }, - lambda { |data| Puppet_X::Elastic.deep_to_i data } + parent: Puppet::Provider::ElasticREST, + metadata: :settings, + metadata_pipeline: [ + ->(data) { data['settings'] }, + ->(data) { Puppet_X::Elastic.deep_to_s data }, + ->(data) { Puppet_X::Elastic.deep_to_i data } ], - :api_uri => '_settings', - :api_discovery_uri => '_all', - :api_resource_style => :prefix, - :discrete_resource_creation => true + api_uri: '_settings', + api_discovery_uri: '_all', + api_resource_style: :prefix, + discrete_resource_creation: true ) do desc 'A REST API based provider to manage Elasticsearch index settings.' mk_resource_methods end diff --git a/lib/puppet/provider/elasticsearch_keystore/ruby.rb b/lib/puppet/provider/elasticsearch_keystore/ruby.rb index 6233564..9c39eaf 100644 --- a/lib/puppet/provider/elasticsearch_keystore/ruby.rb +++ b/lib/puppet/provider/elasticsearch_keystore/ruby.rb @@ -1,167 +1,168 @@ +# frozen_string_literal: true + Puppet::Type.type(:elasticsearch_keystore).provide( :elasticsearch_keystore ) do desc 'Provider for `elasticsearch-keystore` based secret management.' def self.defaults_dir @defaults_dir ||= case Facter.value('osfamily') when 'RedHat' '/etc/sysconfig' else '/etc/default' end end def self.home_dir @home_dir ||= case Facter.value('osfamily') when 'OpenBSD' '/usr/local/elasticsearch' else '/usr/share/elasticsearch' end end attr_accessor :defaults_dir, :home_dir - commands :keystore => "#{home_dir}/bin/elasticsearch-keystore" + commands keystore: "#{home_dir}/bin/elasticsearch-keystore" def self.run_keystore(args, instance, configdir = '/etc/elasticsearch', stdin = nil) options = { - :custom_environment => { - 'ES_INCLUDE' => File.join(defaults_dir, "elasticsearch-#{instance}"), + custom_environment: { + 'ES_INCLUDE' => File.join(defaults_dir, "elasticsearch-#{instance}"), 'ES_PATH_CONF' => "#{configdir}/#{instance}" }, - :uid => 'elasticsearch', - :gid => 'elasticsearch', - :failonfail => true + uid: 'elasticsearch', + gid: 'elasticsearch', + failonfail: true } unless stdin.nil? stdinfile = Tempfile.new('elasticsearch-keystore') stdinfile << stdin stdinfile.flush options[:stdinfile] = stdinfile.path end begin stdout = execute([command(:keystore)] + args, options) ensure unless stdin.nil? stdinfile.close stdinfile.unlink end end stdout.exitstatus.zero? ? stdout : raise(Puppet::Error, stdout) end def self.present_keystores - Dir[File.join(%w[/ etc elasticsearch *])].select do |directory| + files = Dir[File.join(%w[/ etc elasticsearch *])].select do |directory| File.exist? File.join(directory, 'elasticsearch.keystore') - end.map do |instance| + end + + files.map do |instance| settings = run_keystore(['list'], File.basename(instance)).split("\n") { - :name => File.basename(instance), - :ensure => :present, - :provider => name, - :settings => settings + name: File.basename(instance), + ensure: :present, + provider: name, + settings: settings } end end def self.instances present_keystores.map do |keystore| new keystore end end def self.prefetch(resources) instances.each do |prov| if (resource = resources[prov.name]) resource.provider = prov end end end def initialize(value = {}) super(value) @property_flush = {} end - # rubocop:disable Metrics/CyclomaticComplexity - # rubocop:disable Metrics/PerceivedComplexity def flush case @property_flush[:ensure] when :present debug(self.class.run_keystore(['create'], resource[:name], resource[:configdir])) @property_flush[:settings] = resource[:settings] when :absent File.delete(File.join([ - '/', 'etc', 'elasticsearch', resource[:instance], 'elasticsearch.keystore' - ])) + '/', 'etc', 'elasticsearch', resource[:instance], 'elasticsearch.keystore' + ])) end # Note that since the property is :array_matching => :all, we have to # expect that the hash is wrapped in an array. - if @property_flush[:settings] and not @property_flush[:settings].first.empty? + if @property_flush[:settings] && !@property_flush[:settings].first.empty? # Flush properties that _should_ be present @property_flush[:settings].first.each_pair do |setting, value| next unless @property_hash[:settings].nil? \ - or not @property_hash[:settings].include? setting + || (!@property_hash[:settings].include? setting) + debug(self.class.run_keystore( - ['add', '--force', '--stdin', setting], resource[:name], resource[:configdir], value - )) + ['add', '--force', '--stdin', setting], resource[:name], resource[:configdir], value + )) end # Remove properties that are no longer present - if resource[:purge] and not (@property_hash.nil? or @property_hash[:settings].nil?) + if resource[:purge] && !(@property_hash.nil? || @property_hash[:settings].nil?) (@property_hash[:settings] - @property_flush[:settings].first.keys).each do |setting| debug(self.class.run_keystore( - ['remove', setting], resource[:name], resource[:configdir] - )) + ['remove', setting], resource[:name], resource[:configdir] + )) end end end - @property_hash = self.class.present_keystores.detect do |u| + @property_hash = self.class.present_keystores.find do |u| u[:name] == resource[:name] end end - # rubocop:enable Metrics/CyclomaticComplexity - # rubocop:enable Metrics/PerceivedComplexity # settings property setter # # @return [Hash] settings def settings=(new_settings) @property_flush[:settings] = new_settings end # settings property getter # # @return [Hash] settings def settings @property_hash[:settings] end # Sets the ensure property in the @property_flush hash. # # @return [Symbol] :present def create @property_flush[:ensure] = :present end # Determine whether this resource is present on the system. # # @return [Boolean] def exists? @property_hash[:ensure] == :present end # Set flushed ensure property to absent. # # @return [Symbol] :absent def destroy @property_flush[:ensure] = :absent end end diff --git a/lib/puppet/provider/elasticsearch_license/xpack.rb b/lib/puppet/provider/elasticsearch_license/xpack.rb index d7dda2c..4bae734 100644 --- a/lib/puppet/provider/elasticsearch_license/xpack.rb +++ b/lib/puppet/provider/elasticsearch_license/xpack.rb @@ -1,31 +1,33 @@ +# frozen_string_literal: true + require 'puppet/provider/elastic_rest' Puppet::Type.type(:elasticsearch_license).provide( :xpack, - :api_resource_style => :bare, - :parent => Puppet::Provider::ElasticREST, - :metadata => :content, - :metadata_pipeline => [ - lambda { |data| Puppet_X::Elastic.deep_to_s data }, - lambda { |data| Puppet_X::Elastic.deep_to_i data } + api_resource_style: :bare, + parent: Puppet::Provider::ElasticREST, + metadata: :content, + metadata_pipeline: [ + ->(data) { Puppet_X::Elastic.deep_to_s data }, + ->(data) { Puppet_X::Elastic.deep_to_i data } ], - :api_uri => '_xpack/license', - :query_string => { + api_uri: '_xpack/license', + query_string: { 'acknowledge' => 'true' } ) do desc 'A REST API based provider to manage Elasticsearch X-Pack licenses.' mk_resource_methods def self.process_body(body) JSON.parse(body).map do |_object_name, api_object| { - :name => name.to_s, - :ensure => :present, - metadata => { 'license' => process_metadata(api_object) }, + :name => name.to_s, + :ensure => :present, + metadata => { 'license' => process_metadata(api_object) }, :provider => name } end end end diff --git a/lib/puppet/provider/elasticsearch_pipeline/ruby.rb b/lib/puppet/provider/elasticsearch_pipeline/ruby.rb index c277dc8..ed0bd50 100644 --- a/lib/puppet/provider/elasticsearch_pipeline/ruby.rb +++ b/lib/puppet/provider/elasticsearch_pipeline/ruby.rb @@ -1,12 +1,14 @@ +# frozen_string_literal: true + require 'puppet/provider/elastic_rest' Puppet::Type.type(:elasticsearch_pipeline).provide( :ruby, - :parent => Puppet::Provider::ElasticREST, - :metadata => :content, - :api_uri => '_ingest/pipeline' + parent: Puppet::Provider::ElasticREST, + metadata: :content, + api_uri: '_ingest/pipeline' ) do desc 'A REST API based provider to manage Elasticsearch ingest pipelines.' mk_resource_methods end diff --git a/lib/puppet/provider/elasticsearch_plugin/ruby.rb b/lib/puppet/provider/elasticsearch_plugin/ruby.rb index adf5a73..f57d512 100644 --- a/lib/puppet/provider/elasticsearch_plugin/ruby.rb +++ b/lib/puppet/provider/elasticsearch_plugin/ruby.rb @@ -1,21 +1,23 @@ +# frozen_string_literal: true + require 'puppet/provider/elastic_plugin' Puppet::Type.type(:elasticsearch_plugin).provide( :elasticsearch_plugin, - :parent => Puppet::Provider::ElasticPlugin + parent: Puppet::Provider::ElasticPlugin ) do desc <<-END Post-5.x provider for Elasticsearch bin/elasticsearch-plugin command operations.' END case Facter.value('osfamily') when 'OpenBSD' - commands :plugin => '/usr/local/elasticsearch/bin/elasticsearch-plugin' - commands :es => '/usr/local/elasticsearch/bin/elasticsearch' - commands :javapathhelper => '/usr/local/bin/javaPathHelper' + commands plugin: '/usr/local/elasticsearch/bin/elasticsearch-plugin' + commands es: '/usr/local/elasticsearch/bin/elasticsearch' + commands javapathhelper: '/usr/local/bin/javaPathHelper' else - commands :plugin => '/usr/share/elasticsearch/bin/elasticsearch-plugin' - commands :es => '/usr/share/elasticsearch/bin/elasticsearch' + commands plugin: '/usr/share/elasticsearch/bin/elasticsearch-plugin' + commands es: '/usr/share/elasticsearch/bin/elasticsearch' end end diff --git a/lib/puppet/provider/elasticsearch_role/ruby.rb b/lib/puppet/provider/elasticsearch_role/ruby.rb index d05e87e..19f5bdf 100644 --- a/lib/puppet/provider/elasticsearch_role/ruby.rb +++ b/lib/puppet/provider/elasticsearch_role/ruby.rb @@ -1,11 +1,13 @@ +# frozen_string_literal: true + require 'puppet/provider/elastic_yaml' Puppet::Type.type(:elasticsearch_role).provide( :ruby, - :parent => Puppet::Provider::ElasticYaml, - :metadata => :privileges + parent: Puppet::Provider::ElasticYaml, + metadata: :privileges ) do desc 'Provider for X-Pack role resources.' xpack_config 'roles.yml' end diff --git a/lib/puppet/provider/elasticsearch_role_mapping/ruby.rb b/lib/puppet/provider/elasticsearch_role_mapping/ruby.rb index c73ea00..2e3a796 100644 --- a/lib/puppet/provider/elasticsearch_role_mapping/ruby.rb +++ b/lib/puppet/provider/elasticsearch_role_mapping/ruby.rb @@ -1,11 +1,13 @@ +# frozen_string_literal: true + require 'puppet/provider/elastic_yaml' Puppet::Type.type(:elasticsearch_role_mapping).provide( :ruby, - :parent => Puppet::Provider::ElasticYaml, - :metadata => :mappings + parent: Puppet::Provider::ElasticYaml, + metadata: :mappings ) do desc 'Provider for X-Pack role mappings.' xpack_config 'role_mapping.yml' end diff --git a/lib/puppet/provider/elasticsearch_snapshot_repository/ruby.rb b/lib/puppet/provider/elasticsearch_snapshot_repository/ruby.rb index 9b5e6e3..f866d65 100644 --- a/lib/puppet/provider/elasticsearch_snapshot_repository/ruby.rb +++ b/lib/puppet/provider/elasticsearch_snapshot_repository/ruby.rb @@ -1,52 +1,53 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..', '..')) require 'puppet/provider/elastic_rest' Puppet::Type.type(:elasticsearch_snapshot_repository).provide( :ruby, - :parent => Puppet::Provider::ElasticREST, - :api_uri => '_snapshot' + parent: Puppet::Provider::ElasticREST, + api_uri: '_snapshot' ) do desc 'A REST API based provider to manage Elasticsearch snapshot repositories.' mk_resource_methods def self.process_body(body) Puppet.debug('Got to snapshot_repository.process_body') - results = JSON.parse(body).map do |object_name, api_object| + JSON.parse(body).map do |object_name, api_object| { - :name => object_name, - :ensure => :present, - :type => api_object['type'], - :compress => api_object['settings']['compress'], - :location => api_object['settings']['location'], - :chunk_size => api_object['settings']['chunk_size'], - :max_restore_rate => api_object['settings']['max_restore_rate'], - :max_snapshot_rate => api_object['settings']['max_snapshot_rate'], - :provider => name - }.reject { |_k, v| v.nil? } + name: object_name, + ensure: :present, + type: api_object['type'], + compress: api_object['settings']['compress'], + location: api_object['settings']['location'], + chunk_size: api_object['settings']['chunk_size'], + max_restore_rate: api_object['settings']['max_restore_rate'], + max_snapshot_rate: api_object['settings']['max_snapshot_rate'], + provider: name + }.compact end - results end def generate_body Puppet.debug('Got to snapshot_repository.generate_body') # Build core request body body = { - 'type' => resource[:type], + 'type' => resource[:type], 'settings' => { 'compress' => resource[:compress], 'location' => resource[:location] } } # Add optional values body['settings']['chunk_size'] = resource[:chunk_size] unless resource[:chunk_size].nil? body['settings']['max_restore_rate'] = resource[:max_restore_rate] unless resource[:max_restore_rate].nil? body['settings']['max_snapshot_rate'] = resource[:max_snapshot_rate] unless resource[:max_snapshot_rate].nil? # Convert to JSON and return JSON.generate(body) end end diff --git a/lib/puppet/provider/elasticsearch_template/ruby.rb b/lib/puppet/provider/elasticsearch_template/ruby.rb index 2512992..7bc6d8e 100644 --- a/lib/puppet/provider/elasticsearch_template/ruby.rb +++ b/lib/puppet/provider/elasticsearch_template/ruby.rb @@ -1,21 +1,23 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..', '..')) require 'puppet/provider/elastic_rest' require 'puppet_x/elastic/deep_to_i' require 'puppet_x/elastic/deep_to_s' Puppet::Type.type(:elasticsearch_template).provide( :ruby, - :parent => Puppet::Provider::ElasticREST, - :api_uri => '_template', - :metadata => :content, - :metadata_pipeline => [ - lambda { |data| Puppet_X::Elastic.deep_to_s data }, - lambda { |data| Puppet_X::Elastic.deep_to_i data } + parent: Puppet::Provider::ElasticREST, + api_uri: '_template', + metadata: :content, + metadata_pipeline: [ + ->(data) { Puppet_X::Elastic.deep_to_s data }, + ->(data) { Puppet_X::Elastic.deep_to_i data } ] ) do desc 'A REST API based provider to manage Elasticsearch templates.' mk_resource_methods end diff --git a/lib/puppet/provider/elasticsearch_user/ruby.rb b/lib/puppet/provider/elasticsearch_user/ruby.rb index d12c4f6..3a9fd10 100644 --- a/lib/puppet/provider/elasticsearch_user/ruby.rb +++ b/lib/puppet/provider/elasticsearch_user/ruby.rb @@ -1,15 +1,17 @@ +# frozen_string_literal: true + require File.join(File.dirname(__FILE__), '..', '..', '..', 'puppet/provider/elastic_user_command') Puppet::Type.type(:elasticsearch_user).provide( :ruby, - :parent => Puppet::Provider::ElasticUserCommand + parent: Puppet::Provider::ElasticUserCommand ) do desc 'Provider for X-Pack user resources.' has_feature :manages_plaintext_passwords mk_resource_methods - commands :users_cli => "#{homedir}/bin/elasticsearch-users" - commands :es => "#{homedir}/bin/elasticsearch" + commands users_cli: "#{homedir}/bin/elasticsearch-users" + commands es: "#{homedir}/bin/elasticsearch" end diff --git a/lib/puppet/provider/elasticsearch_user_file/ruby.rb b/lib/puppet/provider/elasticsearch_user_file/ruby.rb index 7438be0..bc35132 100644 --- a/lib/puppet/provider/elasticsearch_user_file/ruby.rb +++ b/lib/puppet/provider/elasticsearch_user_file/ruby.rb @@ -1,28 +1,30 @@ +# frozen_string_literal: true + require 'puppet/provider/elastic_parsedfile' Puppet::Type.type(:elasticsearch_user_file).provide( :ruby, - :parent => Puppet::Provider::ElasticParsedFile + parent: Puppet::Provider::ElasticParsedFile ) do desc 'Provider for X-Pack elasticsearch users using plain files.' xpack_config 'users' has_feature :manages_encrypted_passwords text_line :comment, - :match => /^\s*#/ + match: %r{^\s*#} record_line :ruby, - :fields => %w[name hashed_password], - :separator => ':', - :joiner => ':' + fields: %w[name hashed_password], + separator: ':', + joiner: ':' def self.valid_attr?(klass, attr_name) if klass.respond_to? :parameters klass.parameters.include?(attr_name) else true end end end diff --git a/lib/puppet/provider/elasticsearch_user_roles/ruby.rb b/lib/puppet/provider/elasticsearch_user_roles/ruby.rb index a5da043..f130454 100644 --- a/lib/puppet/provider/elasticsearch_user_roles/ruby.rb +++ b/lib/puppet/provider/elasticsearch_user_roles/ruby.rb @@ -1,10 +1,12 @@ +# frozen_string_literal: true + require 'puppet/provider/elastic_user_roles' Puppet::Type.type(:elasticsearch_user_roles).provide( :ruby, - :parent => Puppet::Provider::ElasticUserRoles + parent: Puppet::Provider::ElasticUserRoles ) do desc 'Provider for X-Pack user roles (parsed file.)' xpack_config 'users_roles' end diff --git a/lib/puppet/provider/es_instance_conn_validator/tcp_port.rb b/lib/puppet/provider/es_instance_conn_validator/tcp_port.rb index e492712..93e72d5 100644 --- a/lib/puppet/provider/es_instance_conn_validator/tcp_port.rb +++ b/lib/puppet/provider/es_instance_conn_validator/tcp_port.rb @@ -1,51 +1,53 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..', '..')) require 'puppet/util/es_instance_validator' # This file contains a provider for the resource type `es_instance_conn_validator`, # which validates the Elasticsearch connection by attempting a tcp connection. Puppet::Type.type(:es_instance_conn_validator).provide(:tcp_port) do desc "A provider for the resource type `es_instance_conn_validator`, which validates the connection by attempting an https connection to Elasticsearch." def exists? start_time = Time.now timeout = resource[:timeout] sleep_interval = resource[:sleep_interval] success = validator.attempt_connection while success == false && ((Time.now - start_time) < timeout) # It can take several seconds for the Elasticsearch to start up; # especially on the first install. Therefore, our first connection attempt # may fail. Here we have somewhat arbitrarily chosen to retry every 10 # seconds until the configurable timeout has expired. Puppet.debug("Failed to connect to Elasticsearch; sleeping #{sleep_interval} seconds before retry") sleep sleep_interval success = validator.attempt_connection end if success Puppet.debug("Connected to the Elasticsearch in #{Time.now - start_time} seconds.") else Puppet.notice("Failed to connect to the Elasticsearch within timeout window of #{timeout} seconds; giving up.") end success end def create # If `#create` is called, that means that `#exists?` returned false, which # means that the connection could not be established... so we need to # cause a failure here. raise Puppet::Error, "Unable to connect to Elasticsearch! (#{@validator.instance_server}:#{@validator.instance_port})" end private # @api private def validator @validator ||= Puppet::Util::EsInstanceValidator.new(resource[:server], resource[:port]) end end diff --git a/lib/puppet/type/elasticsearch_index.rb b/lib/puppet/type/elasticsearch_index.rb index 942ee6a..4d7aa7f 100644 --- a/lib/puppet/type/elasticsearch_index.rb +++ b/lib/puppet/type/elasticsearch_index.rb @@ -1,34 +1,36 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..')) require 'puppet_x/elastic/asymmetric_compare' require 'puppet_x/elastic/deep_to_i' require 'puppet_x/elastic/deep_to_s' require 'puppet_x/elastic/elasticsearch_rest_resource' Puppet::Type.newtype(:elasticsearch_index) do extend ElasticsearchRESTResource desc 'Manages Elasticsearch index settings.' ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'Index name.' end newproperty(:settings) do desc 'Structured settings for the index in hash form.' - def insync?(is) - Puppet_X::Elastic.asymmetric_compare(should, is) + def insync?(value) + Puppet_X::Elastic.asymmetric_compare(should, value) end munge do |value| Puppet_X::Elastic.deep_to_i(Puppet_X::Elastic.deep_to_s(value)) end validate do |value| raise Puppet::Error, 'hash expected' unless value.is_a? Hash end end -end # of newtype +end diff --git a/lib/puppet/type/elasticsearch_keystore.rb b/lib/puppet/type/elasticsearch_keystore.rb index 46f5d7d..6f87b6e 100644 --- a/lib/puppet/type/elasticsearch_keystore.rb +++ b/lib/puppet/type/elasticsearch_keystore.rb @@ -1,64 +1,66 @@ +# frozen_string_literal: true + require 'puppet/parameter/boolean' Puppet::Type.newtype(:elasticsearch_keystore) do desc 'Manages an Elasticsearch keystore settings file.' ensurable - newparam(:instance, :namevar => true) do + newparam(:instance, namevar: true) do desc 'Elasticsearch instance this keystore belongs to.' end newparam(:configdir) do desc 'Path to the elasticsearch configuration directory (ES_PATH_CONF).' defaultto '/etc/elasticsearch' end - newparam(:purge, :boolean => true, :parent => Puppet::Parameter::Boolean) do + newparam(:purge, boolean: true, parent: Puppet::Parameter::Boolean) do desc <<-EOS Whether to proactively remove settings that exist in the keystore but are not present in this resource's settings. EOS defaultto false end - newproperty(:settings, :array_matching => :all) do + newproperty(:settings, array_matching: :all) do desc 'A key/value hash of settings names and values.' # The keystore utility can only retrieve a list of stored settings, # so here we only compare the existing settings (sorted) with the # desired settings' keys - def insync?(is) + def insync?(value) if resource[:purge] - is.sort == @should.first.keys.sort + value.sort == @should.first.keys.sort else - (@should.first.keys - is).empty? + (@should.first.keys - value).empty? end end def change_to_s(currentvalue, newvalue_raw) ret = '' newvalue = newvalue_raw.first.keys added_settings = newvalue - currentvalue ret << "added: #{added_settings.join(', ')} " unless added_settings.empty? removed_settings = currentvalue - newvalue unless removed_settings.empty? ret << if resource[:purge] "removed: #{removed_settings.join(', ')}" else "would have removed: #{removed_settings.join(', ')}, but purging is disabled" end end ret end end autorequire(:augeas) do "defaults_#{self[:name]}" end end diff --git a/lib/puppet/type/elasticsearch_license.rb b/lib/puppet/type/elasticsearch_license.rb index 6611836..0e43f30 100644 --- a/lib/puppet/type/elasticsearch_license.rb +++ b/lib/puppet/type/elasticsearch_license.rb @@ -1,52 +1,51 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..')) require 'puppet_x/elastic/asymmetric_compare' require 'puppet_x/elastic/deep_to_i' require 'puppet_x/elastic/deep_to_s' require 'puppet_x/elastic/elasticsearch_rest_resource' Puppet::Type.newtype(:elasticsearch_license) do extend ElasticsearchRESTResource desc 'Manages Elasticsearch licenses.' ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'Pipeline name.' end newproperty(:content) do desc 'Structured hash for license content data.' - def insync?(is) + def insync?(value) Puppet_X::Elastic.asymmetric_compare( - should.map { |k, v| [k, v.is_a?(Hash) ? (v.reject { |s, _| s == 'signature' }) : v] }.to_h, - is + should.transform_values { |v| v.is_a?(Hash) ? (v.reject { |s, _| s == 'signature' }) : v }, + value ) end def should_to_s(newvalue) - newvalue.map do |license, license_data| - [ - license, - if license_data.is_a? Hash - license_data.map do |field, value| - [field, field == 'signature' ? '[redacted]' : value] - end.to_h - else - v - end - ] - end.to_h.to_s + newvalue.transform_values do |license_data| + if license_data.is_a? Hash + license_data.map do |field, value| + [field, field == 'signature' ? '[redacted]' : value] + end.to_h + else + v + end + end.to_s end validate do |value| raise Puppet::Error, 'hash expected' unless value.is_a? Hash end munge do |value| Puppet_X::Elastic.deep_to_i(Puppet_X::Elastic.deep_to_s(value)) end end -end # of newtype +end diff --git a/lib/puppet/type/elasticsearch_pipeline.rb b/lib/puppet/type/elasticsearch_pipeline.rb index 993f94f..8c51058 100644 --- a/lib/puppet/type/elasticsearch_pipeline.rb +++ b/lib/puppet/type/elasticsearch_pipeline.rb @@ -1,29 +1,31 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..')) require 'puppet_x/elastic/deep_to_i' require 'puppet_x/elastic/deep_to_s' require 'puppet_x/elastic/elasticsearch_rest_resource' Puppet::Type.newtype(:elasticsearch_pipeline) do extend ElasticsearchRESTResource desc 'Manages Elasticsearch ingest pipelines.' ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'Pipeline name.' end newproperty(:content) do desc 'Structured content of pipeline.' validate do |value| raise Puppet::Error, 'hash expected' unless value.is_a? Hash end munge do |value| Puppet_X::Elastic.deep_to_i(Puppet_X::Elastic.deep_to_s(value)) end end -end # of newtype +end diff --git a/lib/puppet/type/elasticsearch_plugin.rb b/lib/puppet/type/elasticsearch_plugin.rb index 1b52b50..cf22902 100644 --- a/lib/puppet/type/elasticsearch_plugin.rb +++ b/lib/puppet/type/elasticsearch_plugin.rb @@ -1,52 +1,54 @@ +# frozen_string_literal: true + Puppet::Type.newtype(:elasticsearch_plugin) do @doc = 'Plugin installation type' ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'An arbitrary name used as the identity of the resource.' end newparam(:configdir) do desc 'Path to the elasticsearch configuration directory (ES_PATH_CONF).' defaultto '/etc/elasticsearch' validate do |value| raise Puppet::Error, 'path expected' if value.nil? end end newparam(:elasticsearch_package_name) do desc 'Name of the system Elasticsearch package.' end newparam(:java_opts) do desc 'Optional array of Java options for ES_JAVA_OPTS.' defaultto [] end newparam(:java_home) do desc 'Optional string to set the environment variable JAVA_HOME.' end newparam(:url) do desc 'Url of the package' end newparam(:source) do desc 'Source of the package. puppet:// or file:// resource' end newparam(:proxy) do desc 'Proxy Host' end newparam(:plugin_dir) do desc 'Path to the Plugins directory' defaultto '/usr/share/elasticsearch/plugins' end newparam(:plugin_path) do desc 'Override name of the directory created for the plugin' end end diff --git a/lib/puppet/type/elasticsearch_role.rb b/lib/puppet/type/elasticsearch_role.rb index 2248ab5..8cf2d68 100644 --- a/lib/puppet/type/elasticsearch_role.rb +++ b/lib/puppet/type/elasticsearch_role.rb @@ -1,15 +1,17 @@ +# frozen_string_literal: true + Puppet::Type.newtype(:elasticsearch_role) do desc 'Type to model Elasticsearch roles.' ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'Role name.' - newvalues(/^[a-zA-Z_]{1}[-\w@.$]{0,39}$/) + newvalues(%r{^[a-zA-Z_]{1}[-\w@.$]{0,39}$}) end newproperty(:privileges) do desc 'Security privileges of the given role.' end end diff --git a/lib/puppet/type/elasticsearch_role_mapping.rb b/lib/puppet/type/elasticsearch_role_mapping.rb index 6bc669c..47b0f69 100644 --- a/lib/puppet/type/elasticsearch_role_mapping.rb +++ b/lib/puppet/type/elasticsearch_role_mapping.rb @@ -1,15 +1,17 @@ +# frozen_string_literal: true + Puppet::Type.newtype(:elasticsearch_role_mapping) do desc 'Type to model Elasticsearch role mappings.' ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'Role name.' - newvalues(/^[a-zA-Z_]{1}[-\w@.$]{0,39}$/) + newvalues(%r{^[a-zA-Z_]{1}[-\w@.$]{0,39}$}) end - newproperty(:mappings, :array_matching => :all) do + newproperty(:mappings, array_matching: :all) do desc 'List of role mappings.' end end diff --git a/lib/puppet/type/elasticsearch_snapshot_repository.rb b/lib/puppet/type/elasticsearch_snapshot_repository.rb index 17357a9..d23368d 100644 --- a/lib/puppet/type/elasticsearch_snapshot_repository.rb +++ b/lib/puppet/type/elasticsearch_snapshot_repository.rb @@ -1,51 +1,53 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..')) require 'puppet_x/elastic/elasticsearch_rest_resource' Puppet::Type.newtype(:elasticsearch_snapshot_repository) do extend ElasticsearchRESTResource desc 'Manages Elasticsearch snapshot repositories.' ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'Repository name.' end newparam(:type) do desc 'Repository type' defaultto 'fs' validate do |value| raise Puppet::Error, 'string expected' unless value.is_a? String end end # newproperty(:compress, :boolean => true, :parent => Puppet::Property::Boolean) do - newproperty(:compress, :boolean => true) do + newproperty(:compress, boolean: true) do desc 'Compress the repository data' - defaultto :true + defaultto true end newproperty(:location) do desc 'Repository location' end newproperty(:chunk_size) do desc 'File chunk size' end newproperty(:max_restore_rate) do desc 'Maximum Restore rate' end newproperty(:max_snapshot_rate) do desc 'Maximum Snapshot rate' end validate do raise ArgumentError, 'Location is required.' if self[:location].nil? end -end # of newtype +end diff --git a/lib/puppet/type/elasticsearch_template.rb b/lib/puppet/type/elasticsearch_template.rb index c44a5bd..bc37b59 100644 --- a/lib/puppet/type/elasticsearch_template.rb +++ b/lib/puppet/type/elasticsearch_template.rb @@ -1,114 +1,117 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..')) require 'puppet/file_serving/content' require 'puppet/file_serving/metadata' require 'puppet_x/elastic/deep_implode' require 'puppet_x/elastic/deep_to_i' require 'puppet_x/elastic/deep_to_s' require 'puppet_x/elastic/elasticsearch_rest_resource' Puppet::Type.newtype(:elasticsearch_template) do extend ElasticsearchRESTResource desc 'Manages Elasticsearch index templates.' ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'Template name.' end newproperty(:content) do desc 'Structured content of template.' validate do |value| raise Puppet::Error, 'hash expected' unless value.is_a? Hash end munge do |value| # The Elasticsearch API will return default empty values for # order, aliases, and mappings if they aren't defined in the # user mapping, so we need to set defaults here to keep the # `in` and `should` states consistent if the user hasn't # provided any. # # The value is first stringified, then integers are parse out as # necessary, since the Elasticsearch API enforces some fields to be # integers. # # We also need to fully qualify index settings, since users # can define those with the index json key absent, but the API # always fully qualifies them. { 'order' => 0, 'aliases' => {}, 'mappings' => {} }.merge( Puppet_X::Elastic.deep_to_i( Puppet_X::Elastic.deep_to_s( value.tap do |val| if val.key? 'settings' val['settings']['index'] = {} unless val['settings'].key? 'index' (val['settings'].keys - ['index']).each do |setting| new_key = if setting.start_with? 'index.' setting[6..-1] else setting end val['settings']['index'][new_key] = \ val['settings'].delete setting end end end ) ) ) end - def insync?(is) - Puppet_X::Elastic.deep_implode(is) == \ + def insync?(value) + Puppet_X::Elastic.deep_implode(value) == \ Puppet_X::Elastic.deep_implode(should) end end newparam(:source) do desc 'Puppet source to file containing template contents.' validate do |value| raise Puppet::Error, 'string expected' unless value.is_a? String end end # rubocop:disable Style/SignalException validate do # Ensure that at least one source of template content has been provided if self[:ensure] == :present fail Puppet::ParseError, '"content" or "source" required' \ - if self[:content].nil? and self[:source].nil? - if !self[:content].nil? and !self[:source].nil? + if self[:content].nil? && self[:source].nil? + + if !self[:content].nil? && !self[:source].nil? fail( Puppet::ParseError, "'content' and 'source' cannot be simultaneously defined" ) end end # If a source was passed, retrieve the source content from Puppet's # FileServing indirection and set the content property unless self[:source].nil? - unless Puppet::FileServing::Metadata.indirection.find(self[:source]) - fail(format('Could not retrieve source %s', self[:source])) - end + fail(format('Could not retrieve source %s', self[:source])) unless Puppet::FileServing::Metadata.indirection.find(self[:source]) tmp = if !catalog.nil? \ - and catalog.respond_to?(:environment_instance) + && catalog.respond_to?(:environment_instance) Puppet::FileServing::Content.indirection.find( self[:source], - :environment => catalog.environment_instance + environment: catalog.environment_instance ) else Puppet::FileServing::Content.indirection.find(self[:source]) end fail(format('Could not find any content at %s', self[:source])) unless tmp + self[:content] = PSON.load(tmp.content) end end -end # of newtype + # rubocop:enable Style/SignalException +end diff --git a/lib/puppet/type/elasticsearch_user.rb b/lib/puppet/type/elasticsearch_user.rb index 01b0a18..02b44d1 100644 --- a/lib/puppet/type/elasticsearch_user.rb +++ b/lib/puppet/type/elasticsearch_user.rb @@ -1,51 +1,51 @@ +# frozen_string_literal: true + Puppet::Type.newtype(:elasticsearch_user) do desc 'Type to model Elasticsearch users.' feature :manages_plaintext_passwords, 'The provider can control the password in plaintext form.' ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'User name.' end newparam(:configdir) do desc 'Path to the elasticsearch configuration directory (ES_PATH_CONF).' validate do |value| raise Puppet::Error, 'path expected' if value.nil? end end newparam( :password, - :required_features => :manages_plaintext_passwords + required_features: :manages_plaintext_passwords ) do desc 'Plaintext password for user.' validate do |value| - if value.length < 6 - raise ArgumentError, 'Password must be at least 6 characters long' - end + raise ArgumentError, 'Password must be at least 6 characters long' if value.length < 6 end # rubocop:disable Style/PredicateName def is_to_s(_currentvalue) '[old password hash redacted]' end # rubocop:enable Style/PredicateName def should_to_s(_newvalue) '[new password hash redacted]' end end def refresh if @parameters[:ensure] provider.passwd else debug 'skipping password set' end end end diff --git a/lib/puppet/type/elasticsearch_user_file.rb b/lib/puppet/type/elasticsearch_user_file.rb index 250563d..5fc31f8 100644 --- a/lib/puppet/type/elasticsearch_user_file.rb +++ b/lib/puppet/type/elasticsearch_user_file.rb @@ -1,30 +1,32 @@ +# frozen_string_literal: true + Puppet::Type.newtype(:elasticsearch_user_file) do desc 'Type to model Elasticsearch users.' feature :manages_encrypted_passwords, 'The provider can control the password hash without a need to explicitly refresh.' ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'User name.' end newparam(:configdir) do desc 'Path to the elasticsearch configuration directory (ES_PATH_CONF).' validate do |value| raise Puppet::Error, 'path expected' if value.nil? end end newproperty( :hashed_password, - :required_features => :manages_encrypted_passwords + required_features: :manages_encrypted_passwords ) do desc 'Hashed password for user.' - newvalues(/^[$]2a[$].{56}$/) + newvalues(%r{^[$]2a[$].{56}$}) end end diff --git a/lib/puppet/type/elasticsearch_user_roles.rb b/lib/puppet/type/elasticsearch_user_roles.rb index fb8a86a..28cf661 100644 --- a/lib/puppet/type/elasticsearch_user_roles.rb +++ b/lib/puppet/type/elasticsearch_user_roles.rb @@ -1,20 +1,22 @@ +# frozen_string_literal: true + Puppet::Type.newtype(:elasticsearch_user_roles) do desc 'Type to model Elasticsearch user roles.' ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'User name.' end - newproperty(:roles, :array_matching => :all) do + newproperty(:roles, array_matching: :all) do desc 'Array of roles that the user should belong to.' - def insync?(is) - is.sort == should.sort + def insync?(value) + value.sort == should.sort end end autorequire(:elasticsearch_user) do self[:name] end end diff --git a/lib/puppet/type/es_instance_conn_validator.rb b/lib/puppet/type/es_instance_conn_validator.rb index 938c626..afd5f1b 100644 --- a/lib/puppet/type/es_instance_conn_validator.rb +++ b/lib/puppet/type/es_instance_conn_validator.rb @@ -1,45 +1,47 @@ +# frozen_string_literal: true + Puppet::Type.newtype(:es_instance_conn_validator) do @doc = "Verify that a connection can be successfully established between a node and Elasticsearch. It could potentially be used for other purposes such as monitoring." ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'An arbitrary name used as the identity of the resource.' end newparam(:server) do desc 'DNS name or IP address of the server where Elasticsearch should be running.' defaultto 'localhost' end newparam(:port) do desc 'The port that the Elasticsearch instance should be listening on.' defaultto 9200 end newparam(:timeout) do desc 'The max number of seconds that the validator should wait before giving up and deciding that Elasticsearch is not running; defaults to 60 seconds.' defaultto 60 validate do |value| # This will raise an error if the string is not convertible to an integer Integer(value) end munge do |value| Integer(value) end end newparam(:sleep_interval) do desc 'The number of seconds that the validator should wait before retrying the connection to Elasticsearch; defaults to 10 seconds.' defaultto 10 validate do |value| # This will raise an error if the string is not convertible to an integer Integer(value) end munge do |value| Integer(value) end end end diff --git a/lib/puppet/util/es_instance_validator.rb b/lib/puppet/util/es_instance_validator.rb index 77f0b3c..d9585d9 100644 --- a/lib/puppet/util/es_instance_validator.rb +++ b/lib/puppet/util/es_instance_validator.rb @@ -1,44 +1,43 @@ +# frozen_string_literal: true + require 'socket' require 'timeout' module Puppet # Namespace for miscellaneous tools module Util # Helper class to assist with talking to the Elasticsearch service ports. class EsInstanceValidator - attr_reader :instance_server - attr_reader :instance_port + attr_reader :instance_server, :instance_port def initialize(instance_server, instance_port) @instance_server = instance_server @instance_port = instance_port # Avoid deprecation warnings in Puppet versions < 4 @timeout = if Facter.value(:puppetversion).split('.').first.to_i < 4 Puppet[:configtimeout] else Puppet[:http_connect_timeout] end end # Utility method; attempts to make an https connection to the Elasticsearch instance. # This is abstracted out into a method so that it can be called multiple times # for retry attempts. # # @return true if the connection is successful, false otherwise. def attempt_connection Timeout.timeout(@timeout) do - begin - TCPSocket.new(@instance_server, @instance_port).close - true - rescue Errno::EADDRNOTAVAIL, Errno::ECONNREFUSED, Errno::EHOSTUNREACH => e - Puppet.debug "Unable to connect to Elasticsearch instance (#{@instance_server}:#{@instance_port}): #{e.message}" - false - end + TCPSocket.new(@instance_server, @instance_port).close + true + rescue Errno::EADDRNOTAVAIL, Errno::ECONNREFUSED, Errno::EHOSTUNREACH => e + Puppet.debug "Unable to connect to Elasticsearch instance (#{@instance_server}:#{@instance_port}): #{e.message}" + false end rescue Timeout::Error false end end end end diff --git a/lib/puppet_x/elastic/asymmetric_compare.rb b/lib/puppet_x/elastic/asymmetric_compare.rb index 850d885..8662550 100644 --- a/lib/puppet_x/elastic/asymmetric_compare.rb +++ b/lib/puppet_x/elastic/asymmetric_compare.rb @@ -1,24 +1,26 @@ -module Puppet_X +# frozen_string_literal: true + +module Puppet_X # rubocop:disable Style/ClassAndModuleCamelCase # Custom Elastic functions module Elastic # Certain Elasticsearch APIs return fields that are present in responses # but not present when sending API requests such as creation time, and so # on. When comparing desired settings and extant settings, only indicate # that a value differs from another when user-desired settings differ from # existing settings - we ignore keys that exist in the response that aren't # being explicitly controlled by Puppet. def self.asymmetric_compare(should_val, is_val) should_val.reduce(true) do |is_synced, (should_key, should_setting)| if is_val.key? should_key if is_val[should_key].is_a? Hash asymmetric_compare(should_setting, is_val[should_key]) else is_synced && is_val[should_key] == should_setting end else is_synced && true end end end - end # of Elastic -end # of Puppet_X + end +end diff --git a/lib/puppet_x/elastic/deep_implode.rb b/lib/puppet_x/elastic/deep_implode.rb index 3a51355..103967d 100644 --- a/lib/puppet_x/elastic/deep_implode.rb +++ b/lib/puppet_x/elastic/deep_implode.rb @@ -1,33 +1,35 @@ -module Puppet_X +# frozen_string_literal: true + +module Puppet_X # rubocop:disable Style/ClassAndModuleCamelCase # Custom ruby for some Elastic utilities. module Elastic # Recursively implode a hash into dot-delimited structure of Hash # keys/values. def self.deep_implode(hash) ret = {} implode ret, hash ret end # Recursively descend into hash values, flattening the key structure into # dot-delimited keyed Hash. def self.implode(new_hash, hash, path = []) hash.sort_by { |k, _v| k.length }.reverse.each do |key, value| new_path = path + [key] case value when Hash implode(new_hash, value, new_path) else new_key = new_path.join('.') - if value.is_a? Array \ - and new_hash.key? new_key \ - and new_hash[new_key].is_a? Array + if value.is_a?(Array) \ + && new_hash.key?(new_key) \ + && new_hash[new_key].is_a?(Array) new_hash[new_key] += value else new_hash[new_key] ||= value end end end - end # of deep_implode - end # of Elastic -end # of Puppet_X + end + end +end diff --git a/lib/puppet_x/elastic/deep_to_i.rb b/lib/puppet_x/elastic/deep_to_i.rb index 32f9a1f..8fc5d26 100644 --- a/lib/puppet_x/elastic/deep_to_i.rb +++ b/lib/puppet_x/elastic/deep_to_i.rb @@ -1,20 +1,22 @@ -module Puppet_X +# frozen_string_literal: true + +module Puppet_X # rubocop:disable Style/ClassAndModuleCamelCase # Custom Elastic functions module Elastic # This ugly hack is required due to the fact Puppet passes in the # puppet-native hash with stringified numerics, which causes the # decoded JSON from the Elasticsearch API to be seen as out-of-sync # when the parsed template hash is compared against the puppet hash. def self.deep_to_i(obj) - if obj.is_a? String and obj =~ /^-?[0-9]+$/ + if obj.is_a?(String) && obj =~ %r{^-?[0-9]+$} obj.to_i elsif obj.is_a? Array obj.map { |element| deep_to_i(element) } elsif obj.is_a? Hash obj.merge(obj) { |_key, val| deep_to_i(val) } else obj end end - end # of Elastic -end # of Puppet_X + end +end diff --git a/lib/puppet_x/elastic/deep_to_s.rb b/lib/puppet_x/elastic/deep_to_s.rb index 2d32f17..6ed8a28 100644 --- a/lib/puppet_x/elastic/deep_to_s.rb +++ b/lib/puppet_x/elastic/deep_to_s.rb @@ -1,20 +1,22 @@ -module Puppet_X +# frozen_string_literal: true + +module Puppet_X # rubocop:disable Style/ClassAndModuleCamelCase # Custom Elastic functions module Elastic # When given a hash, this method recurses deeply into all values to convert # any that aren't data structures into strings. This is necessary when # comparing results from Elasticsearch API calls, because values like # integers and booleans are in string form. def self.deep_to_s(obj) if obj.is_a? Array obj.map { |element| deep_to_s(element) } elsif obj.is_a? Hash obj.merge(obj) { |_key, val| deep_to_s(val) } - elsif (not obj.is_a? String) and (not [true, false].include?(obj)) and obj.respond_to? :to_s + elsif (!obj.is_a? String) && ![true, false].include?(obj) && obj.respond_to?(:to_s) obj.to_s else obj end end - end # of Elastic -end # of Puppet_X + end +end diff --git a/lib/puppet_x/elastic/elasticsearch_rest_resource.rb b/lib/puppet_x/elastic/elasticsearch_rest_resource.rb index b00d5c2..d3a3a1d 100644 --- a/lib/puppet_x/elastic/elasticsearch_rest_resource.rb +++ b/lib/puppet_x/elastic/elasticsearch_rest_resource.rb @@ -1,93 +1,91 @@ +# frozen_string_literal: true + require 'puppet/parameter/boolean' # Provides common properties and parameters for REST-based Elasticsearch types module ElasticsearchRESTResource - # rubocop:disable Metrics/CyclomaticComplexity - # rubocop:disable Metrics/PerceivedComplexity def self.extended(extender) extender.newparam(:ca_file) do desc 'Absolute path to a CA file to authenticate server certs against.' end extender.newparam(:ca_path) do desc 'Absolute path to a directory containing CA files.' end extender.newparam(:host) do desc 'Hostname or address of Elasticsearch instance.' defaultto 'localhost' validate do |value| - unless value.is_a? String - raise Puppet::Error, 'invalid parameter, expected string' - end + raise Puppet::Error, 'invalid parameter, expected string' unless value.is_a? String end end extender.newparam(:password) do desc 'Optional HTTP basic auth plaintext password for Elasticsearch.' end extender.newparam(:port) do desc 'Port to use for Elasticsearch HTTP API operations.' defaultto 9200 munge do |value| - if value.is_a? String + case value + when String value.to_i - elsif value.is_a? Integer + when Integer value else raise Puppet::Error, "unknown '#{value}' timeout type #{value.class}" end end validate do |value| raise Puppet::Error, "invalid port value '#{value}'" \ - unless value.to_s =~ /^([0-9]+)$/ + unless value.to_s =~ %r{^([0-9]+)$} raise Puppet::Error, "invalid port value '#{value}'" \ - unless (0 < Regexp.last_match[0].to_i) \ - and (Regexp.last_match[0].to_i < 65_535) + unless Regexp.last_match[0].to_i.positive? \ + && (Regexp.last_match[0].to_i < 65_535) end end extender.newparam(:protocol) do desc 'Protocol to use for communication with Elasticsearch.' defaultto 'http' end extender.newparam(:timeout) do desc 'HTTP timeout for reading/writing content to Elasticsearch.' defaultto 10 munge do |value| - if value.is_a? String + case value + when String value.to_i - elsif value.is_a? Integer + when Integer value else raise Puppet::Error, "unknown '#{value}' timeout type #{value.class}" end end validate do |value| - if value.to_s !~ /^\d+$/ - raise Puppet::Error, 'timeout must be a positive integer' - end + raise Puppet::Error, 'timeout must be a positive integer' if value.to_s !~ %r{^\d+$} end end extender.newparam(:username) do desc 'Optional HTTP basic auth username for Elasticsearch.' end extender.newparam( :validate_tls, - :boolean => true, - :parent => Puppet::Parameter::Boolean + boolean: true, + parent: Puppet::Parameter::Boolean ) do desc 'Whether to verify TLS/SSL certificates.' defaultto true end end -end # of newtype +end diff --git a/lib/puppet_x/elastic/es_versioning.rb b/lib/puppet_x/elastic/es_versioning.rb index f284170..bf01c02 100644 --- a/lib/puppet_x/elastic/es_versioning.rb +++ b/lib/puppet_x/elastic/es_versioning.rb @@ -1,68 +1,71 @@ +# frozen_string_literal: true + class ElasticsearchPackageNotFoundError < StandardError; end -module Puppet_X +module Puppet_X # rubocop:disable Style/ClassAndModuleCamelCase module Elastic # Assists with discerning the locally installed version of Elasticsearch. # Implemented in a way to be called from native types and providers in order # to lazily fetch the package version from various arcane Puppet mechanisms. class EsVersioning # All of the default options we'll set for Elasticsearch's command # invocation. DEFAULT_OPTS = { 'home' => 'ES_HOME', 'logs' => 'LOG_DIR', 'data' => 'DATA_DIR', 'work' => 'WORK_DIR', 'conf' => 'CONF_DIR' }.freeze # Create an array of command-line flags to append to an `elasticsearch` # startup command. def self.opt_flags(package_name, catalog, opts = DEFAULT_OPTS.dup) opt_flag = opt_flag(min_version('5.0.0', package_name, catalog)) opts.delete 'work' if min_version '5.0.0', package_name, catalog opts.delete 'home' if min_version '5.4.0', package_name, catalog opt_args = if min_version '6.0.0', package_name, catalog [] else opts.map do |k, v| "-#{opt_flag}default.path.#{k}=${#{v}}" end.sort end opt_args << '--quiet' if min_version '5.0.0', package_name, catalog [opt_flag, opt_args] end # Get the correct option flag depending on whether Elasticsearch is post # version 5. def self.opt_flag(v5_or_later) v5_or_later ? 'E' : 'Des.' end # Predicate to determine whether a package is at least a certain version. def self.min_version(ver, package_name, catalog) Puppet::Util::Package.versioncmp( version(package_name, catalog), ver ) >= 0 end # Fetch the package version for a locally installed package. def self.version(package_name, catalog) es_pkg = catalog.resource("Package[#{package_name}]") raise Puppet::Error, "could not find `Package[#{package_name}]` resource" unless es_pkg + [ es_pkg.provider.properties[:version], es_pkg.provider.properties[:ensure] ].each do |property| return property if property.is_a? String end Puppet.warning("could not find valid version for `Package[#{package_name}]` resource") raise ElasticsearchPackageNotFoundError end end end end diff --git a/lib/puppet_x/elastic/hash.rb b/lib/puppet_x/elastic/hash.rb index 47f9685..44c2d32 100644 --- a/lib/puppet_x/elastic/hash.rb +++ b/lib/puppet_x/elastic/hash.rb @@ -1,75 +1,75 @@ +# frozen_string_literal: true + # Custom extensions namespace -module Puppet_X +module Puppet_X # rubocop:disable Style/ClassAndModuleCamelCase # Elastic helpers module Elastic # Utility extension for consistent to_yaml behavior. module SortedHash # Upon extension, modify the hash appropriately to render # sorted yaml dependent upon whichever way is supported for # this version of Puppet/Ruby's yaml implementation. - # rubocop:disable Metrics/CyclomaticComplexity - # rubocop:disable Metrics/PerceivedComplexity def self.extended(base) if RUBY_VERSION >= '1.9' # We can sort the hash in Ruby >= 1.9 by recursively # re-inserting key/values in sorted order. Native to_yaml will # call .each and get sorted pairs back. tmp = base.to_a.sort base.clear tmp.each do |key, val| - if val.is_a? base.class + case val + when base.class val.extend Puppet_X::Elastic::SortedHash - elsif val.is_a? Array + when Array val.map do |elem| if elem.is_a? base.class elem.extend(Puppet_X::Elastic::SortedHash) else elem end end end base[key] = val end else # Otherwise, recurse into the hash to extend all nested # hashes with the sorted each_pair method. # # Ruby < 1.9 doesn't support any notion of sorted hashes, # so we have to expressly monkey patch each_pair, which is # called by ZAML (the yaml library used in Puppet < 4; Puppet # >= 4 deprecates Ruby 1.8) # # Note that respond_to? is used here as there were weird # problems with .class/.is_a? base.merge! base do |_, ov, _| if ov.respond_to? :each_pair ov.extend Puppet_X::Elastic::SortedHash elsif ov.is_a? Array ov.map do |elem| if elem.respond_to? :each_pair elem.extend Puppet_X::Elastic::SortedHash else elem end end else ov end end end end - # rubocop:enable Metrics/CyclomaticComplexity - # rubocop:enable Metrics/PerceivedComplexity # Override each_pair with a method that yields key/values in # sorted order. def each_pair return to_enum(:each_pair) unless block_given? + keys.sort.each do |key| yield key, self[key] end self end end end end diff --git a/lib/puppet_x/elastic/plugin_parsing.rb b/lib/puppet_x/elastic/plugin_parsing.rb index 5e1f98c..ca5aa1a 100644 --- a/lib/puppet_x/elastic/plugin_parsing.rb +++ b/lib/puppet_x/elastic/plugin_parsing.rb @@ -1,33 +1,38 @@ +# frozen_string_literal: true + class ElasticPluginParseFailure < StandardError; end -module Puppet_X +module Puppet_X # rubocop:disable Style/ClassAndModuleCamelCase # Custom functions for plugin string parsing. module Elastic def self.plugin_name(raw_name) plugin_split(raw_name, 1) end def self.plugin_version(raw_name) - v = plugin_split(raw_name, 2, false).gsub(/^[^0-9]*/, '') + v = plugin_split(raw_name, 2, false).gsub(%r{^[^0-9]*}, '') raise ElasticPluginParseFailure, "could not parse version, got '#{v}'" if v.empty? + v end # Attempt to guess at the plugin's final directory name - def self.plugin_split(original_string, position, soft_fail = true) + def self.plugin_split(original_string, position, soft_fail: true) # Try both colon (maven) and slash-delimited (github/elastic.co) names %w[/ :].each do |delimiter| parts = original_string.split(delimiter) # If the string successfully split, assume we found the right format - return parts[position].gsub(/(elasticsearch-|es-)/, '') unless parts[position].nil? + return parts[position].gsub(%r{(elasticsearch-|es-)}, '') unless parts[position].nil? end - raise( - ElasticPluginParseFailure, - "could not find element '#{position}' in #{original_string}" - ) unless soft_fail + unless soft_fail + raise( + ElasticPluginParseFailure, + "could not find element '#{position}' in #{original_string}" + ) + end original_string end - end # of Elastic -end # of Puppet_X + end +end diff --git a/manifests/config.pp b/manifests/config.pp index fdd699c..0f8cf5d 100644 --- a/manifests/config.pp +++ b/manifests/config.pp @@ -1,225 +1,219 @@ # This class exists to coordinate all configuration related actions, # functionality and logical units in a central place. # # It is not intended to be used directly by external resources like node # definitions or other modules. # # @example importing this class into other classes to use its functionality: # class { 'elasticsearch::config': } # # @author Richard Pijnenburg # @author Tyler Langlois # @author Gavin Williams # class elasticsearch::config { - #### Configuration Exec { - path => [ '/bin', '/usr/bin', '/usr/local/bin' ], + path => ['/bin', '/usr/bin', '/usr/local/bin'], cwd => '/', } - $init_defaults = merge( - { - 'MAX_OPEN_FILES' => '65535', - }, - $elasticsearch::init_defaults - ) - - if ( $elasticsearch::ensure == 'present' ) { + $init_defaults = { + 'MAX_OPEN_FILES' => '65535', + }.merge($elasticsearch::init_defaults) + if ($elasticsearch::ensure == 'present') { file { $elasticsearch::homedir: ensure => 'directory', group => $elasticsearch::elasticsearch_group, owner => $elasticsearch::elasticsearch_user; $elasticsearch::configdir: ensure => 'directory', group => $elasticsearch::elasticsearch_group, owner => $elasticsearch::elasticsearch_user, mode => '2750'; $elasticsearch::datadir: ensure => 'directory', group => $elasticsearch::elasticsearch_group, owner => $elasticsearch::elasticsearch_user, mode => '2750'; $elasticsearch::logdir: ensure => 'directory', group => $elasticsearch::elasticsearch_group, owner => $elasticsearch::elasticsearch_user, mode => '2750'; $elasticsearch::real_plugindir: ensure => 'directory', group => $elasticsearch::elasticsearch_group, owner => $elasticsearch::elasticsearch_user, mode => 'o+Xr'; "${elasticsearch::homedir}/lib": ensure => 'directory', group => '0', owner => 'root', recurse => true; } # Defaults file, either from file source or from hash to augeas commands if ($elasticsearch::init_defaults_file != undef) { file { "${elasticsearch::defaults_location}/elasticsearch": ensure => $elasticsearch::ensure, source => $elasticsearch::init_defaults_file, owner => 'root', group => $elasticsearch::elasticsearch_group, mode => '0660', before => Service['elasticsearch'], notify => $elasticsearch::_notify_service, } } else { augeas { "${elasticsearch::defaults_location}/elasticsearch": incl => "${elasticsearch::defaults_location}/elasticsearch", lens => 'Shellvars.lns', changes => template("${module_name}/etc/sysconfig/defaults.erb"), before => Service['elasticsearch'], notify => $elasticsearch::_notify_service, } } # Generate config file $_config = deep_implode($elasticsearch::config) # Generate SSL config if $elasticsearch::ssl { if ($elasticsearch::keystore_password == undef) { fail('keystore_password required') } if ($elasticsearch::keystore_path == undef) { $_keystore_path = "${elasticsearch::configdir}/elasticsearch.ks" } else { $_keystore_path = $elasticsearch::keystore_path } # Set the correct xpack. settings based on ES version if (versioncmp($elasticsearch::version, '7') >= 0) { $_tls_config = { 'xpack.security.http.ssl.enabled' => true, 'xpack.security.http.ssl.keystore.path' => $_keystore_path, 'xpack.security.http.ssl.keystore.password' => $elasticsearch::keystore_password, 'xpack.security.transport.ssl.enabled' => true, 'xpack.security.transport.ssl.keystore.path' => $_keystore_path, 'xpack.security.transport.ssl.keystore.password' => $elasticsearch::keystore_password, } } else { $_tls_config = { 'xpack.security.transport.ssl.enabled' => true, 'xpack.security.http.ssl.enabled' => true, 'xpack.ssl.keystore.path' => $_keystore_path, 'xpack.ssl.keystore.password' => $elasticsearch::keystore_password, } } # Trust CA Certificate java_ks { 'elasticsearch_ca': ensure => 'latest', certificate => $elasticsearch::ca_certificate, target => $_keystore_path, password => $elasticsearch::keystore_password, trustcacerts => true, } # Load node certificate and private key java_ks { 'elasticsearch_node': ensure => 'latest', certificate => $elasticsearch::certificate, private_key => $elasticsearch::private_key, target => $_keystore_path, password => $elasticsearch::keystore_password, } } else { $_tls_config = {} } # # Logging file or hash # if ($elasticsearch::logging_file != undef) { # $_log4j_content = undef # } else { # if ($elasticsearch::logging_template != undef ) { # $_log4j_content = template($elasticsearch::logging_template) # } else { # $_log4j_content = template("${module_name}/etc/elasticsearch/log4j2.properties.erb") # } # $_logging_source = undef # } # file { # "${elasticsearch::configdir}/log4j2.properties": # ensure => file, # content => $_log4j_content, # source => $_logging_source, # mode => '0644', # notify => $elasticsearch::_notify_service, # require => Class['elasticsearch::package'], # before => Class['elasticsearch::service'], # } # Generate Elasticsearch config $_es_config = merge( $elasticsearch::config, { 'path.data' => $elasticsearch::datadir }, { 'path.logs' => $elasticsearch::logdir }, $_tls_config ) datacat_fragment { 'main_config': target => "${elasticsearch::configdir}/elasticsearch.yml", data => $_es_config, } datacat { "${elasticsearch::configdir}/elasticsearch.yml": template => "${module_name}/etc/elasticsearch/elasticsearch.yml.erb", notify => $elasticsearch::_notify_service, require => Class['elasticsearch::package'], owner => $elasticsearch::elasticsearch_user, group => $elasticsearch::elasticsearch_group, mode => '0440', } # Add any additional JVM options $elasticsearch::jvm_options.each |String $jvm_option| { file_line { "jvm_option_${jvm_option}": ensure => present, path => "${elasticsearch::configdir}/jvm.options", line => $jvm_option, notify => $elasticsearch::_notify_service, } } if $elasticsearch::system_key != undef { file { "${elasticsearch::configdir}/system_key": ensure => 'file', source => $elasticsearch::system_key, mode => '0400', } } # Add secrets to keystore if $elasticsearch::secrets != undef { elasticsearch_keystore { 'elasticsearch_secrets': configdir => $elasticsearch::configdir, purge => $elasticsearch::purge_secrets, settings => $elasticsearch::secrets, - notify => $::elasticsearch::_notify_service, + notify => $elasticsearch::_notify_service, } } - } elsif ( $elasticsearch::ensure == 'absent' ) { file { $elasticsearch::real_plugindir: ensure => 'absent', force => true, backup => false, } file { "${elasticsearch::defaults_location}/elasticsearch": ensure => 'absent', subscribe => Service['elasticsearch'], } } } diff --git a/manifests/index.pp b/manifests/index.pp index 1f58ad5..1d8b07e 100644 --- a/manifests/index.pp +++ b/manifests/index.pp @@ -1,77 +1,76 @@ # A defined type to control Elasticsearch index-level settings. # # @param ensure # Controls whether the named pipeline should be present or absent in # the cluster. # # @param api_basic_auth_password # HTTP basic auth password to use when communicating over the Elasticsearch # API. # # @param api_basic_auth_username # HTTP basic auth username to use when communicating over the Elasticsearch # API. # # @param api_ca_file # Path to a CA file which will be used to validate server certs when # communicating with the Elasticsearch API over HTTPS. # # @param api_ca_path # Path to a directory with CA files which will be used to validate server # certs when communicating with the Elasticsearch API over HTTPS. # # @param api_host # Host name or IP address of the ES instance to connect to. # # @param api_port # Port number of the ES instance to connect to # # @param api_protocol # Protocol that should be used to connect to the Elasticsearch API. # # @param api_timeout # Timeout period (in seconds) for the Elasticsearch API. # # @param settings # Index settings in hash form (typically nested). # # @param validate_tls # Determines whether the validity of SSL/TLS certificates received from the # Elasticsearch API should be verified or ignored. # # @author Richard Pijnenburg # @author Tyler Langlois # define elasticsearch::index ( Enum['absent', 'present'] $ensure = 'present', Optional[String] $api_basic_auth_password = $elasticsearch::api_basic_auth_password, Optional[String] $api_basic_auth_username = $elasticsearch::api_basic_auth_username, Optional[Stdlib::Absolutepath] $api_ca_file = $elasticsearch::api_ca_file, Optional[Stdlib::Absolutepath] $api_ca_path = $elasticsearch::api_ca_path, String $api_host = $elasticsearch::api_host, Integer[0, 65535] $api_port = $elasticsearch::api_port, Enum['http', 'https'] $api_protocol = $elasticsearch::api_protocol, Integer $api_timeout = $elasticsearch::api_timeout, Hash $settings = {}, Boolean $validate_tls = $elasticsearch::validate_tls, ) { - es_instance_conn_validator { "${name}-index-conn-validator": server => $api_host, port => $api_port, timeout => $api_timeout, } -> elasticsearch_index { $name: ensure => $ensure, settings => $settings, protocol => $api_protocol, host => $api_host, port => $api_port, timeout => $api_timeout, username => $api_basic_auth_username, password => $api_basic_auth_password, ca_file => $api_ca_file, ca_path => $api_ca_path, validate_tls => $validate_tls, } } diff --git a/manifests/init.pp b/manifests/init.pp index 035cf1b..55fc4e0 100644 --- a/manifests/init.pp +++ b/manifests/init.pp @@ -1,606 +1,600 @@ # Top-level Elasticsearch class which may manage installation of the # Elasticsearch package, package repository, and other # global options and parameters. # # @summary Manages the installation of Elasticsearch and related options. # # @example install Elasticsearch # class { 'elasticsearch': } # # @example removal and decommissioning # class { 'elasticsearch': # ensure => 'absent', # } # # @example install everything but disable service(s) afterwards # class { 'elasticsearch': # status => 'disabled', # } # # @param ensure # Controls if the managed resources shall be `present` or `absent`. # If set to `absent`, the managed software packages will be uninstalled, and # any traces of the packages will be purged as well as possible, possibly # including existing configuration files. # System modifications (if any) will be reverted as well as possible (e.g. # removal of created users, services, changed log settings, and so on). # This is a destructive parameter and should be used with care. # # @param api_basic_auth_password # Defines the default REST basic auth password for API authentication. # # @param api_basic_auth_username # Defines the default REST basic auth username for API authentication. # # @param api_ca_file # Path to a CA file which will be used to validate server certs when # communicating with the Elasticsearch API over HTTPS. # # @param api_ca_path # Path to a directory with CA files which will be used to validate server # certs when communicating with the Elasticsearch API over HTTPS. # # @param api_host # Default host to use when accessing Elasticsearch APIs. # # @param api_port # Default port to use when accessing Elasticsearch APIs. # # @param api_protocol # Default protocol to use when accessing Elasticsearch APIs. # # @param api_timeout # Default timeout (in seconds) to use when accessing Elasticsearch APIs. # # @param autoupgrade # If set to `true`, any managed package will be upgraded on each Puppet run # when the package provider is able to find a newer version than the present # one. The exact behavior is provider dependent (see # {package, "upgradeable"}[http://j.mp/xbxmNP] in the Puppet documentation). # # @param ca_certificate # Path to the trusted CA certificate to add to this node's Java keystore. # # @param certificate # Path to the certificate for this node signed by the CA listed in # ca_certificate. # # @param config # Elasticsearch configuration hash. # # @param configdir # Directory containing the elasticsearch configuration. # Use this setting if your packages deviate from the norm (`/etc/elasticsearch`) # # @param configdir_recurselimit # Dictates how deeply the file copy recursion logic should descend when # copying files from the `configdir` to instance `configdir`s. # # @param daily_rolling_date_pattern # File pattern for the file appender log when file_rolling_type is 'dailyRollingFile'. # # @param datadir # Allows you to set the data directory of Elasticsearch. # # @param default_logging_level # Default logging level for Elasticsearch. # # @param defaults_location # Absolute path to directory containing init defaults file. # # @param deprecation_logging # Whether to enable deprecation logging. If enabled, deprecation logs will be # saved to ${cluster.name}_deprecation.log in the Elasticsearch log folder. # # @param deprecation_logging_level # Default deprecation logging level for Elasticsearch. # # @param download_tool # Command-line invocation with which to retrieve an optional package_url. # # @param download_tool_insecure # Command-line invocation with which to retrieve an optional package_url when # certificate verification should be ignored. # # @param download_tool_verify_certificates # Whether or not to verify SSL/TLS certificates when retrieving package files # using a download tool instead of a package management provider. # # @param elasticsearch_group # The group Elasticsearch should run as. This also sets file group # permissions. # # @param elasticsearch_user # The user Elasticsearch should run as. This also sets file ownership. # # @param file_rolling_type # Configuration for the file appender rotation. It can be 'dailyRollingFile', # 'rollingFile' or 'file'. The first rotates by name, the second one by size # or third don't rotate automatically. # # @param homedir # Directory where the elasticsearch installation's files are kept (plugins, etc.) # # @param indices # Define indices via a hash. This is mainly used with Hiera's auto binding. # # @param init_defaults # Defaults file content in hash representation. # # @param init_defaults_file # Defaults file as puppet resource. # # @param init_template # Service file as a template. # # @param jvm_options # Array of options to set in jvm_options. # # @param keystore_password # Password to encrypt this node's Java keystore. # # @param keystore_path # Custom path to the Java keystore file. This parameter is optional. # # @param license # Optional Elasticsearch license in hash or string form. # # @param logdir # Directory that will be used for Elasticsearch logging. # # @param logging_config # Representation of information to be included in the log4j.properties file. # # @param logging_file # Instead of a hash, you may supply a `puppet://` file source for the # log4j.properties file. # # @param logging_level # Default logging level for Elasticsearch. # # @param logging_template # Use a custom logging template - just supply the relative path, i.e. # `$module/elasticsearch/logging.yml.erb` # # @param manage_repo # Enable repo management by enabling official Elastic repositories. # # @param oss # Whether to use the purely open source Elasticsearch package distribution. # # @param package_dir # Directory where packages are downloaded to. # # @param package_dl_timeout # For http, https, and ftp downloads, you may set how long the exec resource # may take. # # @param package_name # Name Of the package to install. # # @param package_provider # Method to install the packages, currently only `package` is supported. # # @param package_url # URL of the package to download. # This can be an http, https, or ftp resource for remote packages, or a # `puppet://` resource or `file:/` for local packages # # @param pid_dir # Directory where the elasticsearch process should write out its PID. # # @param pipelines # Define pipelines via a hash. This is mainly used with Hiera's auto binding. # # @param plugindir # Directory containing elasticsearch plugins. # Use this setting if your packages deviate from the norm (/usr/share/elasticsearch/plugins) # # @param plugins # Define plugins via a hash. This is mainly used with Hiera's auto binding. # # @param private_key # Path to the key associated with this node's certificate. # # @param proxy_url # For http and https downloads, you may set a proxy server to use. By default, # no proxy is used. # Format: `proto://[user:pass@]server[:port]/` # # @param purge_configdir # Purge the config directory of any unmanaged files. # # @param purge_package_dir # Purge package directory on removal # # @param purge_secrets # Whether or not keys present in the keystore will be removed if they are not # present in the specified secrets hash. # # @param repo_stage # Use stdlib stage setup for managing the repo instead of relationship # ordering. # # @param restart_on_change # Determines if the application should be automatically restarted # whenever the configuration, package, or plugins change. Enabling this # setting will cause Elasticsearch to restart whenever there is cause to # re-read configuration files, load new plugins, or start the service using an # updated/changed executable. This may be undesireable in highly available # environments. If all other restart_* parameters are left unset, the value of # `restart_on_change` is used for all other restart_*_change defaults. # # @param restart_config_change # Determines if the application should be automatically restarted # whenever the configuration changes. This includes the Elasticsearch # configuration file, any service files, and defaults files. # Disabling automatic restarts on config changes may be desired in an # environment where you need to ensure restarts occur in a controlled/rolling # manner rather than during a Puppet run. # # @param restart_package_change # Determines if the application should be automatically restarted # whenever the package (or package version) for Elasticsearch changes. # Disabling automatic restarts on package changes may be desired in an # environment where you need to ensure restarts occur in a controlled/rolling # manner rather than during a Puppet run. # # @param restart_plugin_change # Determines if the application should be automatically restarted whenever # plugins are installed or removed. # Disabling automatic restarts on plugin changes may be desired in an # environment where you need to ensure restarts occur in a controlled/rolling # manner rather than during a Puppet run. # # @param roles # Define roles via a hash. This is mainly used with Hiera's auto binding. # # @param rolling_file_max_backup_index # Max number of logs to store whern file_rolling_type is 'rollingFile' # # @param rolling_file_max_file_size # Max log file size when file_rolling_type is 'rollingFile' # # @param scripts # Define scripts via a hash. This is mainly used with Hiera's auto binding. # # @param secrets # Optional default configuration hash of key/value pairs to store in the # Elasticsearch keystore file. If unset, the keystore is left unmanaged. # # @param security_logging_content # File content for x-pack logging configuration file (will be placed # into log4j2.properties file). # # @param security_logging_source # File source for x-pack logging configuration file (will be placed # into log4j2.properties). # # @param service_name # Elasticsearch service name # # @param service_provider # The service resource type provider to use when managing elasticsearch instances. # # @param snapshot_repositories # Define snapshot repositories via a hash. This is mainly used with Hiera's auto binding. # # @param ssl # Whether to manage TLS certificates. Requires the ca_certificate, # certificate, private_key and keystore_password parameters to be set. # # @param status # To define the status of the service. If set to `enabled`, the service will # be run and will be started at boot time. If set to `disabled`, the service # is stopped and will not be started at boot time. If set to `running`, the # service will be run but will not be started at boot time. You may use this # to start a service on the first Puppet run instead of the system startup. # If set to `unmanaged`, the service will not be started at boot time and Puppet # does not care whether the service is running or not. For example, this may # be useful if a cluster management software is used to decide when to start # the service plus assuring it is running on the desired node. # # @param system_key # Source for the x-pack system key. Valid values are any that are # supported for the file resource `source` parameter. # # @param systemd_service_path # Path to the directory in which to install systemd service units. # # @param templates # Define templates via a hash. This is mainly used with Hiera's auto binding. # # @param users # Define templates via a hash. This is mainly used with Hiera's auto binding. # # @param validate_tls # Enable TLS/SSL validation on API calls. # # @param version # To set the specific version you want to install. # # @author Richard Pijnenburg # @author Tyler Langlois # @author Gavin Williams # class elasticsearch ( Enum['absent', 'present'] $ensure, Optional[String] $api_basic_auth_password, Optional[String] $api_basic_auth_username, Optional[String] $api_ca_file, Optional[String] $api_ca_path, String $api_host, Integer[0, 65535] $api_port, Enum['http', 'https'] $api_protocol, Integer $api_timeout, Boolean $autoupgrade, Hash $config, Stdlib::Absolutepath $configdir, Integer $configdir_recurselimit, String $daily_rolling_date_pattern, Elasticsearch::Multipath $datadir, Optional[Stdlib::Absolutepath] $defaults_location, Boolean $deprecation_logging, String $deprecation_logging_level, Optional[String] $download_tool, Optional[String] $download_tool_insecure, Boolean $download_tool_verify_certificates, String $elasticsearch_group, String $elasticsearch_user, Enum['dailyRollingFile', 'rollingFile', 'file'] $file_rolling_type, Stdlib::Absolutepath $homedir, Hash $indices, Hash $init_defaults, Optional[String] $init_defaults_file, String $init_template, Array[String] $jvm_options, Optional[Variant[String, Hash]] $license, Stdlib::Absolutepath $logdir, Hash $logging_config, Optional[String] $logging_file, String $logging_level, Optional[String] $logging_template, Boolean $manage_repo, Boolean $oss, Stdlib::Absolutepath $package_dir, Integer $package_dl_timeout, String $package_name, Enum['package'] $package_provider, Optional[String] $package_url, Optional[Stdlib::Absolutepath] $pid_dir, Hash $pipelines, Optional[Stdlib::Absolutepath] $plugindir, Hash $plugins, Optional[Stdlib::HTTPUrl] $proxy_url, Boolean $purge_configdir, Boolean $purge_package_dir, Boolean $purge_secrets, Variant[Boolean, String] $repo_stage, Boolean $restart_on_change, Hash $roles, Integer $rolling_file_max_backup_index, String $rolling_file_max_file_size, Hash $scripts, Optional[Hash] $secrets, Optional[String] $security_logging_content, Optional[String] $security_logging_source, String $service_name, Enum['init', 'openbsd', 'openrc', 'systemd'] $service_provider, Hash $snapshot_repositories, Boolean $ssl, Elasticsearch::Status $status, Optional[String] $system_key, Stdlib::Absolutepath $systemd_service_path, Hash $templates, Hash $users, Boolean $validate_tls, Variant[String, Boolean] $version, Optional[Stdlib::Absolutepath] $ca_certificate = undef, Optional[Stdlib::Absolutepath] $certificate = undef, String $default_logging_level = $logging_level, Optional[String] $keystore_password = undef, Optional[Stdlib::Absolutepath] $keystore_path = undef, Optional[Stdlib::Absolutepath] $private_key = undef, Boolean $restart_config_change = $restart_on_change, Boolean $restart_package_change = $restart_on_change, Boolean $restart_plugin_change = $restart_on_change, ) { - #### Validate parameters if ($package_url != undef and $version != false) { fail('Unable to set the version number when using package_url option.') } if ($version != false) { case $facts['os']['family'] { 'RedHat', 'Linux', 'Suse': { if ($version =~ /.+-\d/) { $pkg_version = $version } else { $pkg_version = "${version}-1" } } default: { $pkg_version = $version } } } # This value serves as an unchanging default for platforms as a default for # init scripts to fallback on. $_datadir_default = $facts['kernel'] ? { 'Linux' => '/var/lib/elasticsearch', 'OpenBSD' => '/var/elasticsearch/data', default => undef, } # The OSS package distribution's package appends `-oss` to the end of the # canonical package name. $_package_name = $oss ? { true => "${package_name}-oss", default => $package_name, } # Set the plugin path variable for use later in the module. if $plugindir == undef { $real_plugindir = "${homedir}/plugins" } else { $real_plugindir = $plugindir } # Should we restart Elasticsearch on config change? $_notify_service = $elasticsearch::restart_config_change ? { true => Service[$elasticsearch::service_name], false => undef, } #### Manage actions contain elasticsearch::package contain elasticsearch::config contain elasticsearch::service create_resources('elasticsearch::index', $elasticsearch::indices) create_resources('elasticsearch::pipeline', $elasticsearch::pipelines) create_resources('elasticsearch::plugin', $elasticsearch::plugins) create_resources('elasticsearch::role', $elasticsearch::roles) create_resources('elasticsearch::script', $elasticsearch::scripts) create_resources('elasticsearch::snapshot_repository', $elasticsearch::snapshot_repositories) create_resources('elasticsearch::template', $elasticsearch::templates) create_resources('elasticsearch::user', $elasticsearch::users) if ($manage_repo == true) { if ($repo_stage == false) { # Use normal relationship ordering contain elastic_stack::repo Class['elastic_stack::repo'] -> Class['elasticsearch::package'] - } else { # Use staging for ordering if !(defined(Stage[$repo_stage])) { stage { $repo_stage: before => Stage['main'] } } include elastic_stack::repo - Class<|title == 'elastic_stack::repo'|>{ + Class<|title == 'elastic_stack::repo'|> { stage => $repo_stage, } } } if ($license != undef) { contain elasticsearch::license } #### Manage relationships # # Note that many of these overly verbose declarations work around # https://tickets.puppetlabs.com/browse/PUP-1410 # which means clean arrow order chaining won't work if someone, say, # doesn't declare any plugins. # # forgive me for what you're about to see if defined(Class['java']) { Class['java'] -> Class['elasticsearch::config'] } if $ensure == 'present' { - # Installation, configuration and service Class['elasticsearch::package'] -> Class['elasticsearch::config'] if $restart_config_change { Class['elasticsearch::config'] ~> Class['elasticsearch::service'] } else { Class['elasticsearch::config'] -> Class['elasticsearch::service'] } # Top-level ordering bindings for resources. Class['elasticsearch::config'] -> Elasticsearch::Plugin <| ensure == 'present' or ensure == 'installed' |> Elasticsearch::Plugin <| ensure == 'absent' |> -> Class['elasticsearch::config'] Class['elasticsearch::config'] -> Elasticsearch::User <| ensure == 'present' |> # Elasticsearch::User <| ensure == 'absent' |> # -> Class['elasticsearch::config'] # Class['elasticsearch::config'] # -> Elasticsearch::Role <| |> Class['elasticsearch::config'] -> Elasticsearch::Template <| |> Class['elasticsearch::config'] -> Elasticsearch::Pipeline <| |> Class['elasticsearch::config'] -> Elasticsearch::Index <| |> Class['elasticsearch::config'] -> Elasticsearch::Snapshot_repository <| |> - } else { - # Absent; remove configuration before the package. Class['elasticsearch::config'] -> Class['elasticsearch::package'] # Top-level ordering bindings for resources. Elasticsearch::Plugin <| |> -> Class['elasticsearch::config'] Elasticsearch::User <| |> -> Class['elasticsearch::config'] Elasticsearch::Role <| |> -> Class['elasticsearch::config'] Elasticsearch::Template <| |> -> Class['elasticsearch::config'] Elasticsearch::Pipeline <| |> -> Class['elasticsearch::config'] Elasticsearch::Index <| |> -> Class['elasticsearch::config'] Elasticsearch::Snapshot_repository <| |> -> Class['elasticsearch::config'] - } # Install plugins before managing users/roles Elasticsearch::Plugin <| ensure == 'present' or ensure == 'installed' |> -> Elasticsearch::User <| |> Elasticsearch::Plugin <| ensure == 'present' or ensure == 'installed' |> -> Elasticsearch::Role <| |> # Remove plugins after managing users/roles Elasticsearch::User <| |> -> Elasticsearch::Plugin <| ensure == 'absent' |> Elasticsearch::Role <| |> -> Elasticsearch::Plugin <| ensure == 'absent' |> # Ensure roles are defined before managing users that reference roles Elasticsearch::Role <| |> -> Elasticsearch::User <| ensure == 'present' |> # Ensure users are removed before referenced roles are managed Elasticsearch::User <| ensure == 'absent' |> -> Elasticsearch::Role <| |> # Ensure users and roles are managed before calling out to REST resources Elasticsearch::Role <| |> -> Elasticsearch::Template <| |> Elasticsearch::User <| |> -> Elasticsearch::Template <| |> Elasticsearch::Role <| |> -> Elasticsearch::Pipeline <| |> Elasticsearch::User <| |> -> Elasticsearch::Pipeline <| |> Elasticsearch::Role <| |> -> Elasticsearch::Index <| |> Elasticsearch::User <| |> -> Elasticsearch::Index <| |> Elasticsearch::Role <| |> -> Elasticsearch::Snapshot_repository <| |> Elasticsearch::User <| |> -> Elasticsearch::Snapshot_repository <| |> # Ensure that any command-line based user changes are performed before the # file is modified Elasticsearch_user <| |> -> Elasticsearch_user_file <| |> } diff --git a/manifests/package.pp b/manifests/package.pp index 2b2d4b8..3b956a9 100644 --- a/manifests/package.pp +++ b/manifests/package.pp @@ -1,192 +1,168 @@ # This class exists to coordinate all software package management related # actions, functionality and logical units in a central place. # # It is not intended to be used directly by external resources like node # definitions or other modules. # # @example importing this class by other classes to use its functionality: # class { 'elasticsearch::package': } # # @author Richard Pijnenburg # @author Tyler Langlois # class elasticsearch::package { - Exec { - path => [ '/bin', '/usr/bin', '/usr/local/bin' ], + path => ['/bin', '/usr/bin', '/usr/local/bin'], cwd => '/', tries => 3, try_sleep => 10, } if $elasticsearch::ensure == 'present' { - if $elasticsearch::restart_package_change { Package['elasticsearch'] ~> Class['elasticsearch::service'] } Package['elasticsearch'] ~> Exec['remove_plugin_dir'] # Create directory to place the package file $package_dir = $elasticsearch::package_dir exec { 'create_package_dir_elasticsearch': cwd => '/', path => ['/usr/bin', '/bin'], command => "mkdir -p ${package_dir}", creates => $package_dir, } file { $package_dir: ensure => 'directory', purge => $elasticsearch::purge_package_dir, force => $elasticsearch::purge_package_dir, backup => false, require => Exec['create_package_dir_elasticsearch'], } # Check if we want to install a specific version or not if $elasticsearch::version == false { - $package_ensure = $elasticsearch::autoupgrade ? { true => 'latest', false => 'present', } - } else { - # install specific version $package_ensure = $elasticsearch::pkg_version - } # action if ($elasticsearch::package_url != undef) { - case $elasticsearch::package_provider { - 'package': { $before = Package['elasticsearch'] } + 'package': { $before = Package['elasticsearch'] } default: { fail("software provider \"${elasticsearch::package_provider}\".") } } $filename_array = split($elasticsearch::package_url, '/') $basefilename = $filename_array[-1] $source_array = split($elasticsearch::package_url, ':') $protocol_type = $source_array[0] $ext_array = split($basefilename, '\.') $ext = $ext_array[-1] $pkg_source = "${package_dir}/${basefilename}" case $protocol_type { - 'puppet': { - file { $pkg_source: ensure => file, source => $elasticsearch::package_url, require => File[$package_dir], backup => false, before => $before, } - } 'ftp', 'https', 'http': { - if $elasticsearch::proxy_url != undef { $exec_environment = [ 'use_proxy=yes', "http_proxy=${elasticsearch::proxy_url}", "https_proxy=${elasticsearch::proxy_url}", ] } else { $exec_environment = [] } case $elasticsearch::download_tool { String: { $_download_command = if $elasticsearch::download_tool_verify_certificates { $elasticsearch::download_tool } else { $elasticsearch::download_tool_insecure } exec { 'download_package_elasticsearch': command => "${_download_command} ${pkg_source} ${elasticsearch::package_url} 2> /dev/null", creates => $pkg_source, environment => $exec_environment, timeout => $elasticsearch::package_dl_timeout, require => File[$package_dir], before => $before, } } default: { fail("no \$elasticsearch::download_tool defined for ${facts['os']['family']}") } } - } 'file': { - $source_path = $source_array[1] file { $pkg_source: ensure => file, source => $source_path, require => File[$package_dir], backup => false, before => $before, } - } default: { fail("Protocol must be puppet, file, http, https, or ftp. You have given \"${protocol_type}\"") } } if ($elasticsearch::package_provider == 'package') { - case $ext { 'deb': { Package { provider => 'dpkg', source => $pkg_source } } 'rpm': { Package { provider => 'rpm', source => $pkg_source } } default: { fail("Unknown file extention \"${ext}\".") } } - } - } else { if ($elasticsearch::manage_repo and $facts['os']['family'] == 'Debian') { Class['apt::update'] -> Package['elasticsearch'] } } - - # Package removal } else { - + # Package removal if ($facts['os']['family'] == 'Suse') { Package { provider => 'rpm', } $package_ensure = 'absent' } else { $package_ensure = 'purged' } - } if ($elasticsearch::package_provider == 'package') { - package { 'elasticsearch': ensure => $package_ensure, name => $elasticsearch::_package_name, } exec { 'remove_plugin_dir': refreshonly => true, command => "rm -rf ${elasticsearch::real_plugindir}", } - - } else { fail("\"${elasticsearch::package_provider}\" is not supported") } - } diff --git a/manifests/pipeline.pp b/manifests/pipeline.pp index 4571ba8..64a3c72 100644 --- a/manifests/pipeline.pp +++ b/manifests/pipeline.pp @@ -1,79 +1,78 @@ # This define allows you to insert, update or delete Elasticsearch index # ingestion pipelines. # # Pipeline content should be defined through the `content` parameter. # # @param ensure # Controls whether the named pipeline should be present or absent in # the cluster. # # @param content # Contents of the pipeline in hash form. # # @param api_basic_auth_password # HTTP basic auth password to use when communicating over the Elasticsearch # API. # # @param api_basic_auth_username # HTTP basic auth username to use when communicating over the Elasticsearch # API. # # @param api_ca_file # Path to a CA file which will be used to validate server certs when # communicating with the Elasticsearch API over HTTPS. # # @param api_ca_path # Path to a directory with CA files which will be used to validate server # certs when communicating with the Elasticsearch API over HTTPS. # # @param api_host # Host name or IP address of the ES instance to connect to. # # @param api_port # Port number of the ES instance to connect to # # @param api_protocol # Protocol that should be used to connect to the Elasticsearch API. # # @param api_timeout # Timeout period (in seconds) for the Elasticsearch API. # # @param validate_tls # Determines whether the validity of SSL/TLS certificates received from the # Elasticsearch API should be verified or ignored. # # @author Tyler Langlois # define elasticsearch::pipeline ( Enum['absent', 'present'] $ensure = 'present', Optional[String] $api_basic_auth_password = $elasticsearch::api_basic_auth_password, Optional[String] $api_basic_auth_username = $elasticsearch::api_basic_auth_username, Optional[Stdlib::Absolutepath] $api_ca_file = $elasticsearch::api_ca_file, Optional[Stdlib::Absolutepath] $api_ca_path = $elasticsearch::api_ca_path, String $api_host = $elasticsearch::api_host, Integer[0, 65535] $api_port = $elasticsearch::api_port, Enum['http', 'https'] $api_protocol = $elasticsearch::api_protocol, Integer $api_timeout = $elasticsearch::api_timeout, Hash $content = {}, Boolean $validate_tls = $elasticsearch::validate_tls, ) { - es_instance_conn_validator { "${name}-ingest-pipeline": server => $api_host, port => $api_port, timeout => $api_timeout, } -> elasticsearch_pipeline { $name: ensure => $ensure, content => $content, protocol => $api_protocol, host => $api_host, port => $api_port, timeout => $api_timeout, username => $api_basic_auth_username, password => $api_basic_auth_password, ca_file => $api_ca_file, ca_path => $api_ca_path, validate_tls => $validate_tls, } } diff --git a/manifests/plugin.pp b/manifests/plugin.pp index 7f219c8..6527f59 100644 --- a/manifests/plugin.pp +++ b/manifests/plugin.pp @@ -1,146 +1,144 @@ # This define allows you to install arbitrary Elasticsearch plugins # either by using the default repositories or by specifying an URL # # @example install from official repository # elasticsearch::plugin {'mobz/elasticsearch-head': module_dir => 'head'} # # @example installation using a custom URL # elasticsearch::plugin { 'elasticsearch-jetty': # module_dir => 'elasticsearch-jetty', # url => 'https://oss-es-plugins.s3.amazonaws.com/elasticsearch-jetty/elasticsearch-jetty-0.90.0.zip', # } # # @param ensure # Whether the plugin will be installed or removed. # Set to 'absent' to ensure a plugin is not installed # # @param configdir # Path to the elasticsearch configuration directory (ES_PATH_CONF) # to which the plugin should be installed. # # @param java_opts # Array of Java options to be passed to `ES_JAVA_OPTS` # # @param java_home # Path to JAVA_HOME, if Java is installed in a non-standard location. # # @param module_dir # Directory name where the module has been installed # This is automatically generated based on the module name # Specify a value here to override the auto generated value # # @param proxy_host # Proxy host to use when installing the plugin # # @param proxy_password # Proxy auth password to use when installing the plugin # # @param proxy_port # Proxy port to use when installing the plugin # # @param proxy_username # Proxy auth username to use when installing the plugin # # @param source # Specify the source of the plugin. # This will copy over the plugin to the node and use it for installation. # Useful for offline installation # # @param url # Specify an URL where to download the plugin from. # # @author Richard Pijnenburg # @author Matteo Sessa # @author Dennis Konert # @author Tyler Langlois # @author Gavin Williams # define elasticsearch::plugin ( Enum['absent', 'present'] $ensure = 'present', Stdlib::Absolutepath $configdir = $elasticsearch::configdir, Array[String] $java_opts = [], Optional[Stdlib::Absolutepath] $java_home = undef, Optional[String] $module_dir = undef, Optional[String] $proxy_host = undef, Optional[String] $proxy_password = undef, Optional[Integer[0, 65535]] $proxy_port = undef, Optional[String] $proxy_username = undef, Optional[String] $source = undef, Optional[Stdlib::HTTPUrl] $url = undef, ) { - include elasticsearch case $ensure { 'present': { $_file_ensure = 'directory' $_file_before = [] } 'absent': { $_file_ensure = $ensure $_file_before = File[$elasticsearch::real_plugindir] } - default: { } + default: { + } } # set proxy by override or parse and use proxy_url from # elasticsearch::proxy_url or use no proxy at all if ($proxy_host != undef and $proxy_port != undef) { if ($proxy_username != undef and $proxy_password != undef) { $_proxy_auth = "${proxy_username}:${proxy_password}@" } else { $_proxy_auth = undef } $_proxy = "http://${_proxy_auth}${proxy_host}:${proxy_port}" } elsif ($elasticsearch::proxy_url != undef) { $_proxy = $elasticsearch::proxy_url } else { $_proxy = undef } if ($source != undef) { - $filename_array = split($source, '/') $basefilename = $filename_array[-1] $file_source = "${elasticsearch::package_dir}/${basefilename}" file { $file_source: ensure => 'file', source => $source, before => Elasticsearch_plugin[$name], } - } else { $file_source = undef } $_module_dir = es_plugin_name($module_dir, $name) elasticsearch_plugin { $name: ensure => $ensure, configdir => $configdir, elasticsearch_package_name => 'elasticsearch', java_opts => $java_opts, java_home => $java_home, source => $file_source, url => $url, proxy => $_proxy, plugin_dir => $elasticsearch::real_plugindir, plugin_path => $module_dir, before => Service['elasticsearch'], } -> file { "${elasticsearch::real_plugindir}/${_module_dir}": ensure => $_file_ensure, mode => 'o+Xr', recurse => true, before => $_file_before, } if $elasticsearch::restart_plugin_change { Elasticsearch_plugin[$name] { notify +> Service['elasticsearch'], } } } diff --git a/manifests/service.pp b/manifests/service.pp index 6e46270..ec37fe0 100644 --- a/manifests/service.pp +++ b/manifests/service.pp @@ -1,53 +1,52 @@ # This class exists to coordinate all service management related actions, # functionality and logical units in a central place. # # *Note*: "service" is the Puppet term and type for background processes # in general and is used in a platform-independent way. E.g. "service" means # "daemon" in relation to Unix-like systems. # # @author Richard Pijnenburg # @author Tyler Langlois # @author Gavin Williams # class elasticsearch::service { - #### Service management if $elasticsearch::ensure == 'present' { - case $elasticsearch::status { # make sure service is currently running, start it on boot 'enabled': { $_service_ensure = 'running' $_service_enable = true } # make sure service is currently stopped, do not start it on boot 'disabled': { $_service_ensure = 'stopped' $_service_enable = false } # make sure service is currently running, do not start it on boot 'running': { $_service_ensure = 'running' $_service_enable = false } # do not start service on boot, do not care whether currently running # or not 'unmanaged': { $_service_ensure = undef $_service_enable = false } - default: { } + default: { + } } } else { # make sure the service is stopped and disabled (the removal itself will be # done by package.pp) $_service_ensure = 'stopped' $_service_enable = false } service { $elasticsearch::service_name: ensure => $_service_ensure, enable => $_service_enable, } } diff --git a/manifests/snapshot_repository.pp b/manifests/snapshot_repository.pp index 1906194..cf0e2e0 100644 --- a/manifests/snapshot_repository.pp +++ b/manifests/snapshot_repository.pp @@ -1,104 +1,103 @@ # This define allows you to insert, update or delete Elasticsearch snapshot # repositories. # # @param ensure # Controls whether the named index template should be present or absent in # the cluster. # # @param api_basic_auth_password # HTTP basic auth password to use when communicating over the Elasticsearch # API. # # @param api_basic_auth_username # HTTP basic auth username to use when communicating over the Elasticsearch # API. # # @param api_ca_file # Path to a CA file which will be used to validate server certs when # communicating with the Elasticsearch API over HTTPS. # # @param api_ca_path # Path to a directory with CA files which will be used to validate server # certs when communicating with the Elasticsearch API over HTTPS. # # @param api_host # Host name or IP address of the ES instance to connect to. # # @param api_port # Port number of the ES instance to connect to # # @param api_protocol # Protocol that should be used to connect to the Elasticsearch API. # # @param api_timeout # Timeout period (in seconds) for the Elasticsearch API. # # @param repository_type # Snapshot repository type. # # @param location # Location of snapshots. Mandatory # # @param compress # Compress the snapshot metadata files? # # @param chunk_size # Chunk size to break big files down into. # # @param max_restore_rate # Throttle value for node restore rate. # # @param max_snapshot_rate # Throttle value for node snapshot rate. # # @param validate_tls # Determines whether the validity of SSL/TLS certificates received from the # Elasticsearch API should be verified or ignored. # # @author Gavin Williams # @author Richard Pijnenburg # @author Tyler Langlois # define elasticsearch::snapshot_repository ( String $location, Enum['absent', 'present'] $ensure = 'present', Optional[String] $api_basic_auth_password = $elasticsearch::api_basic_auth_password, Optional[String] $api_basic_auth_username = $elasticsearch::api_basic_auth_username, Optional[Stdlib::Absolutepath] $api_ca_file = $elasticsearch::api_ca_file, Optional[Stdlib::Absolutepath] $api_ca_path = $elasticsearch::api_ca_path, String $api_host = $elasticsearch::api_host, Integer[0, 65535] $api_port = $elasticsearch::api_port, Enum['http', 'https'] $api_protocol = $elasticsearch::api_protocol, Integer $api_timeout = $elasticsearch::api_timeout, Boolean $compress = true, Optional[String] $chunk_size = undef, Optional[String] $max_restore_rate = undef, Optional[String] $max_snapshot_rate = undef, Optional[String] $repository_type = undef, Boolean $validate_tls = $elasticsearch::validate_tls, ) { - es_instance_conn_validator { "${name}-snapshot": server => $api_host, port => $api_port, timeout => $api_timeout, } -> elasticsearch_snapshot_repository { $name: ensure => $ensure, chunk_size => $chunk_size, compress => $compress, location => $location, max_restore_rate => $max_restore_rate, max_snapshot_rate => $max_snapshot_rate, type => $repository_type, protocol => $api_protocol, host => $api_host, port => $api_port, timeout => $api_timeout, username => $api_basic_auth_username, password => $api_basic_auth_password, ca_file => $api_ca_file, ca_path => $api_ca_path, validate_tls => $validate_tls, } } diff --git a/metadata.json b/metadata.json index 628da01..5cab20d 100644 --- a/metadata.json +++ b/metadata.json @@ -1,79 +1,83 @@ { "name": "elastic-elasticsearch", "version": "7.0.0", "source": "https://github.com/elastic/puppet-elasticsearch", "author": "elastic", "license": "Apache-2.0", "summary": "Module for managing and configuring Elasticsearch nodes", "project_page": "https://github.com/elastic/puppet-elasticsearch", "issues_url": "https://github.com/elastic/puppet-elasticsearch/issues", "dependencies": [ { "name": "elastic/elastic_stack", "version_requirement": ">= 6.1.0 < 8.0.0" }, { "name": "richardc/datacat", "version_requirement": ">= 0.6.2 < 1.0.0" }, + { + "name": "puppetlabs/java", + "version_requirement": ">= 1.0.0 < 8.0.0" + }, { "name": "puppetlabs/stdlib", "version_requirement": ">= 4.13.0 < 7.0.0" } ], "operatingsystem_support": [ { "operatingsystem": "RedHat", "operatingsystemrelease": [ "7", "8" ] }, { "operatingsystem": "CentOS", "operatingsystemrelease": [ "7", "8" ] }, { "operatingsystem": "OracleLinux", "operatingsystemrelease": [ "7", "8" ] }, { "operatingsystem": "Scientific", "operatingsystemrelease": [ "7", "8" ] }, { "operatingsystem": "Debian", "operatingsystemrelease": [ "10" ] }, { "operatingsystem": "Ubuntu", "operatingsystemrelease": [ "18.04", "20.04" ] }, { "operatingsystem": "SLES", "operatingsystemrelease": [ "12" ] } ], "requirements": [ { "name": "puppet", "version_requirement": ">= 6.1.0 < 8.0.0" } ] } diff --git a/spec/acceptance/.beaker-foss.cfg b/spec/acceptance/.beaker-foss.cfg deleted file mode 100644 index 2fcc581..0000000 --- a/spec/acceptance/.beaker-foss.cfg +++ /dev/null @@ -1,5 +0,0 @@ -{ - :ssh => { - :user_known_hosts_file => '/dev/null' - } -} diff --git a/spec/acceptance/nodesets/amazonlinux-1-x64.yml b/spec/acceptance/nodesets/amazonlinux-1-x64.yml deleted file mode 100644 index 21f449a..0000000 --- a/spec/acceptance/nodesets/amazonlinux-1-x64.yml +++ /dev/null @@ -1,14 +0,0 @@ -HOSTS: - amazonlinux-1-x64: - roles: - - agent - - master - platform: el-6-x86_64 - image: amazonlinux:1 - hypervisor: docker - docker_cmd: ["/sbin/init"] - docker_container_name: amazonlinux-1-x64 - docker_preserve_image: true - docker_image_commands: - - rm /etc/init/tty.conf - - yum install -y java-1.8.0-openjdk-headless rubygems20 tar wget which diff --git a/spec/acceptance/nodesets/amazonlinux-2-x64.yml b/spec/acceptance/nodesets/amazonlinux-2-x64.yml deleted file mode 100644 index 2aceb47..0000000 --- a/spec/acceptance/nodesets/amazonlinux-2-x64.yml +++ /dev/null @@ -1,18 +0,0 @@ -HOSTS: - amazonlinux-2-x64: - roles: - - agent - - master - platform: el-7-x86_64 - image: amazonlinux:2 - hypervisor: docker - docker_cmd: ["/sbin/init"] - docker_preserve_image: true - docker_image_commands: - - yum install -y java-1.8.0-openjdk-headless net-tools wget which cronie iproute - - mkdir -p /etc/selinux/targeted/contexts/ - - echo '' > /etc/selinux/targeted/contexts/dbus_contexts - - rm /lib/systemd/system/systemd*udev* - - rm /lib/systemd/system/getty.target -CONFIG: - log_level: warn diff --git a/spec/acceptance/nodesets/centos-6-x64.yml b/spec/acceptance/nodesets/centos-6-x64.yml deleted file mode 100644 index 40b294b..0000000 --- a/spec/acceptance/nodesets/centos-6-x64.yml +++ /dev/null @@ -1,19 +0,0 @@ -HOSTS: - centos-6-x64: - roles: - - master - - agent - - database - - dashboard - platform: el-6-x86_64 - image: centos:6.9 - hypervisor: docker - docker_cmd: ["/sbin/init"] - docker_preserve_image: true - docker_image_commands: - - yum install -y wget tar which java-1.8.0-openjdk-headless - - rm /etc/init/tty.conf - - echo -e "elasticsearch hard nproc 4096\nelasticsearch soft nproc 4096" >> /etc/security/limits.conf - - echo -e "esuser hard nproc 4096\nesuser soft nproc 4096" >> /etc/security/limits.conf -CONFIG: - log_level: warn diff --git a/spec/acceptance/nodesets/centos-7-x64.yml b/spec/acceptance/nodesets/centos-7-x64.yml deleted file mode 100644 index dc95ee4..0000000 --- a/spec/acceptance/nodesets/centos-7-x64.yml +++ /dev/null @@ -1,20 +0,0 @@ -HOSTS: - centos-7-x64: - roles: - - agent - - master - - database - - dashboard - platform: el-7-x86_64 - image: centos:7 - hypervisor: docker - docker_cmd: ["/sbin/init"] - docker_preserve_image: true - docker_image_commands: - - yum install -y wget which cronie iproute - - mkdir -p /etc/selinux/targeted/contexts/ - - echo '' > /etc/selinux/targeted/contexts/dbus_contexts - - rm /lib/systemd/system/systemd*udev* - - rm /lib/systemd/system/getty.target -CONFIG: - log_level: warn diff --git a/spec/acceptance/nodesets/centos-8-x64.yml b/spec/acceptance/nodesets/centos-8-x64.yml deleted file mode 100644 index 03033d4..0000000 --- a/spec/acceptance/nodesets/centos-8-x64.yml +++ /dev/null @@ -1,20 +0,0 @@ -HOSTS: - centos-8-x64: - roles: - - agent - - master - - database - - dashboard - platform: el-8-x86_64 - image: centos:8 - hypervisor: docker - docker_cmd: ["/sbin/init"] - docker_preserve_image: true - docker_image_commands: - - yum install -y wget which cronie iproute - # - mkdir -p /etc/selinux/targeted/contexts/ - # - echo '' > /etc/selinux/targeted/contexts/dbus_contexts - # - rm /lib/systemd/system/systemd*udev* - # - rm /lib/systemd/system/getty.target -CONFIG: - log_level: warn diff --git a/spec/acceptance/nodesets/debian-10-x64.yml b/spec/acceptance/nodesets/debian-10-x64.yml deleted file mode 100644 index 20faebd..0000000 --- a/spec/acceptance/nodesets/debian-10-x64.yml +++ /dev/null @@ -1,17 +0,0 @@ -HOSTS: - debian-10: - roles: - - agent - platform: debian-10-amd64 - image: debian:10 - hypervisor: docker - docker_cmd: ["/bin/systemd"] - docker_preserve_image: true - docker_image_commands: - - apt-get install -yq apt-transport-https wget net-tools gpg ruby-augeas software-properties-common - - wget -qO - https://adoptopenjdk.jfrog.io/adoptopenjdk/api/gpg/key/public | apt-key add - - - add-apt-repository --yes https://adoptopenjdk.jfrog.io/adoptopenjdk/deb/ - - apt update && apt-get install -yq adoptopenjdk-8-hotspot -CONFIG: - log_level: warn - diff --git a/spec/acceptance/nodesets/debian-8-x64.yml b/spec/acceptance/nodesets/debian-8-x64.yml deleted file mode 100644 index 766a414..0000000 --- a/spec/acceptance/nodesets/debian-8-x64.yml +++ /dev/null @@ -1,23 +0,0 @@ -HOSTS: - debian-8: - roles: - - agent - - master - - database - - dashboard - platform: debian-8-amd64 - image: debian:8.11 - hypervisor: docker - docker_cmd: ["/bin/systemd"] - docker_preserve_image: true - docker_image_commands: - - echo 'deb [check-valid-until=no] http://archive.debian.org/debian jessie-backports main' >> /etc/apt/sources.list - - echo 'Acquire::Check-Valid-Until "false";' >> /etc/apt/apt.conf - - sed -i '/jessie.updates/d' /etc/apt/sources.list - - apt-get update - - apt-get install -yq -t jessie-backports openjdk-8-jre-headless - - apt-get install -yq wget net-tools apt-transport-https - - rm /lib/systemd/system/systemd*udev* - - rm /lib/systemd/system/getty.target -CONFIG: - log_level: warn diff --git a/spec/acceptance/nodesets/debian-9-x64.yml b/spec/acceptance/nodesets/debian-9-x64.yml deleted file mode 100644 index 238d589..0000000 --- a/spec/acceptance/nodesets/debian-9-x64.yml +++ /dev/null @@ -1,14 +0,0 @@ -HOSTS: - debian-9: - roles: - - agent - platform: debian-9-amd64 - image: debian:9 - hypervisor: docker - docker_cmd: ["/bin/systemd"] - docker_preserve_image: true - docker_image_commands: - - apt-get install -yq apt-transport-https openjdk-8-jre-headless wget net-tools gpg ruby-augeas -CONFIG: - log_level: warn - diff --git a/spec/acceptance/nodesets/default.yml b/spec/acceptance/nodesets/default.yml deleted file mode 120000 index c1489c8..0000000 --- a/spec/acceptance/nodesets/default.yml +++ /dev/null @@ -1 +0,0 @@ -ubuntu-server-1604-x64.yml \ No newline at end of file diff --git a/spec/acceptance/nodesets/oracle-6-x64.yml b/spec/acceptance/nodesets/oracle-6-x64.yml deleted file mode 100644 index f492d67..0000000 --- a/spec/acceptance/nodesets/oracle-6-x64.yml +++ /dev/null @@ -1,19 +0,0 @@ -HOSTS: - centos-6-x64: - roles: - - agent - - master - - database - - dashboard - platform: el-6-x86_64 - image: oraclelinux:6 - hypervisor: docker - docker_cmd: ["/sbin/init"] - docker_preserve_image: true - docker_image_commands: - - yum install -y tar wget which java-1.8.0-openjdk-headless - - rm /etc/init/tty.conf - - echo -e "elasticsearch hard nproc 4096\nelasticsearch soft nproc 4096" >> /etc/security/limits.conf - - echo -e "esuser hard nproc 4096\nesuser soft nproc 4096" >> /etc/security/limits.conf -CONFIG: - log_level: warn diff --git a/spec/acceptance/nodesets/oracle-7-x64.yml b/spec/acceptance/nodesets/oracle-7-x64.yml deleted file mode 100644 index 43c777f..0000000 --- a/spec/acceptance/nodesets/oracle-7-x64.yml +++ /dev/null @@ -1,20 +0,0 @@ -HOSTS: - oracle-7-x64: - roles: - - agent - - master - - database - - dashboard - platform: el-7-x86_64 - image: oraclelinux:7 - hypervisor: docker - docker_cmd: ["/sbin/init"] - docker_preserve_image: true - docker_image_commands: - - yum install -y wget which cronie - - mkdir -p /etc/selinux/targeted/contexts/ - - echo '' > /etc/selinux/targeted/contexts/dbus_contexts - - rm /lib/systemd/system/systemd*udev* - - rm /lib/systemd/system/getty.target -CONFIG: - log_level: warn diff --git a/spec/acceptance/nodesets/sles-11-x64.yml b/spec/acceptance/nodesets/sles-11-x64.yml deleted file mode 100644 index 80f8309..0000000 --- a/spec/acceptance/nodesets/sles-11-x64.yml +++ /dev/null @@ -1,19 +0,0 @@ -HOSTS: - sles-11-x64: - roles: - - agent - - master - - database - - dashboard - platform: sles-11-x64 - image: dliappis/sles:11sp4 - hypervisor: docker - docker_cmd: ["/sbin/init"] - docker_preserve_image: true - docker_image_commands: - - gem uninstall puppet hiera - - zypper install -y augeas augeas-lenses pkgconfig - - mkdir -p /etc/puppetlabs/code /etc/puppet/modules - - ln -sf /etc/puppet/modules /etc/puppetlabs/code/modules -CONFIG: - log_level: warn diff --git a/spec/acceptance/nodesets/sles-12-x64.yml b/spec/acceptance/nodesets/sles-12-x64.yml deleted file mode 100644 index a2c5cc0..0000000 --- a/spec/acceptance/nodesets/sles-12-x64.yml +++ /dev/null @@ -1,20 +0,0 @@ -HOSTS: - sles-12-x64: - roles: - - agent - - master - - database - - dashboard - platform: sles-12-x86_64 - image: dliappis/sles:12 - hypervisor: docker - docker_cmd: ["/sbin/init"] - docker_preserve_image: true - docker_image_commands: - - rm /etc/zypp/repos.d/devel_languages_python.repo - - gem uninstall -x puppet hiera - - zypper clean -a - - zypper install --force-resolution -y augeas which - - ln -s /usr/lib/systemd/system/sshd.service /etc/systemd/system/multi-user.target.wants/sshd.service -CONFIG: - log_level: warn diff --git a/spec/acceptance/nodesets/ubuntu-server-1404-x64.yml b/spec/acceptance/nodesets/ubuntu-server-1404-x64.yml deleted file mode 100644 index 6dea31b..0000000 --- a/spec/acceptance/nodesets/ubuntu-server-1404-x64.yml +++ /dev/null @@ -1,24 +0,0 @@ -HOSTS: - ubuntu-14-04: - roles: - - agent - - master - - database - - dashboard - platform: ubuntu-14.04-amd64 - image: ubuntu:14.04.5 - hypervisor: docker - docker_cmd: ["/sbin/init"] - docker_preserve_image: true - docker_image_commands: - - apt-get update - - apt-get install -yq apt-transport-https libssl-dev software-properties-common - - add-apt-repository -y ppa:openjdk-r/ppa - - apt-get update - - apt-get install -y openjdk-8-jre-headless - - update-ca-certificates -f - - ln -sf /sbin/initctl.distrib /sbin/initctl - - locale-gen en_US en_US.UTF-8 - - dpkg-reconfigure locales -CONFIG: - log_level: warn diff --git a/spec/acceptance/nodesets/ubuntu-server-1604-x64.yml b/spec/acceptance/nodesets/ubuntu-server-1604-x64.yml deleted file mode 100644 index 6a665dc..0000000 --- a/spec/acceptance/nodesets/ubuntu-server-1604-x64.yml +++ /dev/null @@ -1,17 +0,0 @@ -HOSTS: - ubuntu-16-04: - roles: - - agent - - master - - database - - dashboard - platform: ubuntu-16.04-amd64 - image: ubuntu:16.04 - hypervisor: docker - docker_cmd: ["/sbin/init"] - docker_preserve_image: true - docker_image_commands: - - apt-get update - - apt-get install -yq libssl-dev apt-transport-https -CONFIG: - log_level: warn diff --git a/spec/acceptance/nodesets/ubuntu-server-1804-x64.yml b/spec/acceptance/nodesets/ubuntu-server-1804-x64.yml deleted file mode 100644 index 16b3bcc..0000000 --- a/spec/acceptance/nodesets/ubuntu-server-1804-x64.yml +++ /dev/null @@ -1,17 +0,0 @@ -HOSTS: - ubuntu-18-04: - roles: - - agent - - master - - database - - dashboard - platform: ubuntu-18.04-amd64 - image: ubuntu:18.04 - hypervisor: docker - docker_cmd: ["/sbin/init"] - docker_preserve_image: true - docker_image_commands: - - apt-get update - - apt-get install -yq libssl-dev apt-transport-https openjdk-8-jdk iproute2 -CONFIG: - log_level: warn diff --git a/spec/acceptance/nodesets/ubuntu-server-2004-x64.yml b/spec/acceptance/nodesets/ubuntu-server-2004-x64.yml deleted file mode 100644 index c0b13f5..0000000 --- a/spec/acceptance/nodesets/ubuntu-server-2004-x64.yml +++ /dev/null @@ -1,17 +0,0 @@ -HOSTS: - ubuntu-20-04: - roles: - - agent - - master - - database - - dashboard - platform: ubuntu-20.04-amd64 - image: ubuntu:20.04 - hypervisor: docker - docker_cmd: ["/sbin/init"] - docker_preserve_image: true - docker_image_commands: - - apt-get update - - apt-get install -yq libssl-dev apt-transport-https openjdk-8-jdk iproute2 -CONFIG: - log_level: warn diff --git a/spec/acceptance/tests/acceptance_spec.rb b/spec/acceptance/tests/acceptance_spec.rb index 8cafa20..a016806 100644 --- a/spec/acceptance/tests/acceptance_spec.rb +++ b/spec/acceptance/tests/acceptance_spec.rb @@ -1,83 +1,87 @@ +# frozen_string_literal: true + require 'spec_helper_acceptance' -require 'helpers/acceptance/tests/basic_shared_examples.rb' -require 'helpers/acceptance/tests/template_shared_examples.rb' -require 'helpers/acceptance/tests/removal_shared_examples.rb' -require 'helpers/acceptance/tests/pipeline_shared_examples.rb' -require 'helpers/acceptance/tests/plugin_shared_examples.rb' -require 'helpers/acceptance/tests/plugin_upgrade_shared_examples.rb' -require 'helpers/acceptance/tests/snapshot_repository_shared_examples.rb' -require 'helpers/acceptance/tests/datadir_shared_examples.rb' -require 'helpers/acceptance/tests/package_url_shared_examples.rb' -require 'helpers/acceptance/tests/hiera_shared_examples.rb' -require 'helpers/acceptance/tests/usergroup_shared_examples.rb' -require 'helpers/acceptance/tests/security_shared_examples.rb' +require 'helpers/acceptance/tests/basic_shared_examples' +require 'helpers/acceptance/tests/template_shared_examples' +require 'helpers/acceptance/tests/removal_shared_examples' +require 'helpers/acceptance/tests/pipeline_shared_examples' +require 'helpers/acceptance/tests/plugin_shared_examples' +require 'helpers/acceptance/tests/plugin_upgrade_shared_examples' +require 'helpers/acceptance/tests/snapshot_repository_shared_examples' +require 'helpers/acceptance/tests/datadir_shared_examples' +require 'helpers/acceptance/tests/package_url_shared_examples' +require 'helpers/acceptance/tests/hiera_shared_examples' +require 'helpers/acceptance/tests/usergroup_shared_examples' +require 'helpers/acceptance/tests/security_shared_examples' describe "elasticsearch v#{v[:elasticsearch_full_version]} class" do es_config = { - 'cluster.name' => v[:cluster_name], + 'cluster.name' => v[:cluster_name], 'http.bind_host' => '0.0.0.0', - 'http.port' => 9200, - 'node.name' => 'elasticsearch01' + 'http.port' => 9200, + 'node.name' => 'elasticsearch01' } - let(:elastic_repo) { not v[:is_snapshot] } + let(:elastic_repo) { !v[:is_snapshot] } let(:manifest) do - package = if not v[:is_snapshot] + package = if v[:is_snapshot] <<-MANIFEST - # Hard version set here due to plugin incompatibilities. - version => '#{v[:elasticsearch_full_version]}', + manage_repo => false, + package_url => '#{v[:snapshot_package]}', MANIFEST else <<-MANIFEST - manage_repo => false, - package_url => '#{v[:snapshot_package]}', + # Hard version set here due to plugin incompatibilities. + version => '#{v[:elasticsearch_full_version]}', MANIFEST end - <<-MANIFEST - api_timeout => 60, - config => { -#{es_config.map { |k, v| " '#{k}' => '#{v}'," }.join("\n")} - }, - jvm_options => [ - '-Xms128m', - '-Xmx128m', - ], - oss => #{v[:oss]}, - #{package} + <<~MANIFEST + api_timeout => 60, + config => { + #{es_config.map { |k, v| " '#{k}' => '#{v}'," }.join("\n")} + }, + jvm_options => [ + '-Xms128m', + '-Xmx128m', + ], + oss => #{v[:oss]}, + #{package} MANIFEST end context 'testing with' do describe 'simple config' do include_examples('basic acceptance tests', es_config) end include_examples('module removal', es_config) end include_examples('template operations', es_config, v[:template]) include_examples('pipeline operations', es_config, v[:pipeline]) - include_examples( - 'plugin acceptance tests', - es_config, - v[:elasticsearch_plugins] - ) unless v[:elasticsearch_plugins].empty? + unless v[:elasticsearch_plugins].empty? + include_examples( + 'plugin acceptance tests', + es_config, + v[:elasticsearch_plugins] + ) + end include_examples('snapshot repository acceptance tests') include_examples('datadir acceptance tests', es_config) # Skip this for snapshot testing, as we only have package files anyway. include_examples('package_url acceptance tests', es_config) unless v[:is_snapshot] include_examples('hiera acceptance tests', es_config, v[:elasticsearch_plugins]) # Security-related tests (shield/x-pack). # # Skip OSS-only distributions since they do not bundle x-pack, and skip # snapshots since we they don't recognize prod licenses. - include_examples('security acceptance tests', es_config) unless v[:oss] or v[:is_snapshot] + include_examples('security acceptance tests', es_config) unless v[:oss] || v[:is_snapshot] end diff --git a/spec/classes/000_elasticsearch_init_spec.rb b/spec/classes/000_elasticsearch_init_spec.rb index 1bd73cc..34b8ad5 100644 --- a/spec/classes/000_elasticsearch_init_spec.rb +++ b/spec/classes/000_elasticsearch_init_spec.rb @@ -1,440 +1,525 @@ +# frozen_string_literal: true + require 'spec_helper' -describe 'elasticsearch', :type => 'class' do +describe 'elasticsearch', type: 'class' do default_params = { - :config => { 'node.name' => 'foo' } + config: { 'node.name' => 'foo' } } + # rubocop:disable RSpec/MultipleMemoizedHelpers on_supported_os.each do |os, facts| context "on #{os}" do case facts[:os]['family'] when 'Debian' let(:defaults_path) { '/etc/default' } let(:system_service_folder) { '/lib/systemd/system' } let(:pkg_ext) { 'deb' } let(:pkg_prov) { 'dpkg' } let(:version_add) { '' } - if (facts[:os]['name'] == 'Debian' and \ - facts[:os]['release']['major'].to_i >= 8) or \ - (facts[:os]['name'] == 'Ubuntu' and \ + + if (facts[:os]['name'] == 'Debian' && \ + facts[:os]['release']['major'].to_i >= 8) || \ + (facts[:os]['name'] == 'Ubuntu' && \ facts[:os]['release']['major'].to_i >= 15) let(:systemd_service_path) { '/lib/systemd/system' } + test_pid = true else test_pid = false end when 'RedHat' let(:defaults_path) { '/etc/sysconfig' } let(:system_service_folder) { '/lib/systemd/system' } let(:pkg_ext) { 'rpm' } let(:pkg_prov) { 'rpm' } let(:version_add) { '-1' } + if facts[:os]['release']['major'].to_i >= 7 let(:systemd_service_path) { '/lib/systemd/system' } + test_pid = true else test_pid = false end when 'Suse' let(:defaults_path) { '/etc/sysconfig' } let(:pkg_ext) { 'rpm' } let(:pkg_prov) { 'rpm' } let(:version_add) { '-1' } - if facts[:os]['name'] == 'OpenSuSE' and + + if facts[:os]['name'] == 'OpenSuSE' && facts[:os]['release']['major'].to_i <= 12 let(:systemd_service_path) { '/lib/systemd/system' } else let(:systemd_service_path) { '/usr/lib/systemd/system' } end end let(:facts) do facts.merge('scenario' => '', 'common' => '', 'elasticsearch' => {}) end let(:params) do default_params.merge({}) end - it { should compile.with_all_deps } + it { is_expected.to compile.with_all_deps } # Varies depending on distro - it { should contain_augeas("#{defaults_path}/elasticsearch") } + it { is_expected.to contain_augeas("#{defaults_path}/elasticsearch") } # Systemd-specific files if test_pid == true - it { should contain_service('elasticsearch').with( - :ensure => 'running', - :enable => true - ) } + it { + expect(subject).to contain_service('elasticsearch').with( + ensure: 'running', + enable: true + ) + } end context 'java installation' do let(:pre_condition) do <<-MANIFEST include ::java MANIFEST end - it { should contain_class('elasticsearch::config') - .that_requires('Class[java]') } + it { + expect(subject).to contain_class('elasticsearch::config'). + that_requires('Class[java]') + } end context 'package installation' do context 'via repository' do context 'with specified version' do let(:params) do default_params.merge( - :version => '1.0' + version: '1.0' ) end - it { should contain_package('elasticsearch') - .with(:ensure => "1.0#{version_add}") } + it { + expect(subject).to contain_package('elasticsearch'). + with(ensure: "1.0#{version_add}") + } end if facts[:os]['family'] == 'RedHat' context 'Handle special CentOS/RHEL package versioning' do let(:params) do default_params.merge( - :version => '1.1-2' + version: '1.1-2' ) end - it { should contain_package('elasticsearch') - .with(:ensure => '1.1-2') } + it { + expect(subject).to contain_package('elasticsearch'). + with(ensure: '1.1-2') + } end end end context 'when setting package version and package_url' do let(:params) do default_params.merge( - :version => '0.90.10', - :package_url => "puppet:///path/to/some/es-0.90.10.#{pkg_ext}" + version: '0.90.10', + package_url: "puppet:///path/to/some/es-0.90.10.#{pkg_ext}" ) end - it { expect { should raise_error(Puppet::Error) } } + it { is_expected.to raise_error(Puppet::Error) } end context 'via package_url setting' do ['file:/', 'ftp://', 'http://', 'https://', 'puppet:///'].each do |schema| context "using #{schema} schema" do let(:params) do default_params.merge( - :package_url => "#{schema}domain-or-path/pkg.#{pkg_ext}" + package_url: "#{schema}domain-or-path/pkg.#{pkg_ext}" ) end unless schema.start_with? 'puppet' - it { should contain_exec('create_package_dir_elasticsearch') - .with(:command => 'mkdir -p /opt/elasticsearch/swdl') } - it { should contain_file('/opt/elasticsearch/swdl') - .with( - :purge => false, - :force => false, - :require => 'Exec[create_package_dir_elasticsearch]' - ) } + it { + expect(subject).to contain_exec('create_package_dir_elasticsearch'). + with(command: 'mkdir -p /opt/elasticsearch/swdl') + } + + it { + expect(subject).to contain_file('/opt/elasticsearch/swdl'). + with( + purge: false, + force: false, + require: 'Exec[create_package_dir_elasticsearch]' + ) + } end case schema when 'file:/' - it { should contain_file( - "/opt/elasticsearch/swdl/pkg.#{pkg_ext}" - ).with( - :source => "/domain-or-path/pkg.#{pkg_ext}", - :backup => false - ) } + it { + expect(subject).to contain_file( + "/opt/elasticsearch/swdl/pkg.#{pkg_ext}" + ).with( + source: "/domain-or-path/pkg.#{pkg_ext}", + backup: false + ) + } when 'puppet:///' - it { should contain_file( - "/opt/elasticsearch/swdl/pkg.#{pkg_ext}" - ).with( - :source => "#{schema}domain-or-path/pkg.#{pkg_ext}", - :backup => false - ) } + it { + expect(subject).to contain_file( + "/opt/elasticsearch/swdl/pkg.#{pkg_ext}" + ).with( + source: "#{schema}domain-or-path/pkg.#{pkg_ext}", + backup: false + ) + } else [true, false].each do |verify_certificates| context "with download_tool_verify_certificates '#{verify_certificates}'" do let(:params) do default_params.merge( - :package_url => "#{schema}domain-or-path/pkg.#{pkg_ext}", - :download_tool_verify_certificates => verify_certificates + package_url: "#{schema}domain-or-path/pkg.#{pkg_ext}", + download_tool_verify_certificates: verify_certificates ) end - flag = (not verify_certificates) ? ' --no-check-certificate' : '' + flag = verify_certificates ? '' : ' --no-check-certificate' - it { should contain_exec('download_package_elasticsearch') - .with( - :command => "wget#{flag} -O /opt/elasticsearch/swdl/pkg.#{pkg_ext} #{schema}domain-or-path/pkg.#{pkg_ext} 2> /dev/null", - :require => 'File[/opt/elasticsearch/swdl]' - ) } + it { + expect(subject).to contain_exec('download_package_elasticsearch'). + with( + command: "wget#{flag} -O /opt/elasticsearch/swdl/pkg.#{pkg_ext} #{schema}domain-or-path/pkg.#{pkg_ext} 2> /dev/null", + require: 'File[/opt/elasticsearch/swdl]' + ) + } end end end - it { should contain_package('elasticsearch') - .with( - :ensure => 'present', - :source => "/opt/elasticsearch/swdl/pkg.#{pkg_ext}", - :provider => pkg_prov - ) } + it { + expect(subject).to contain_package('elasticsearch'). + with( + ensure: 'present', + source: "/opt/elasticsearch/swdl/pkg.#{pkg_ext}", + provider: pkg_prov + ) + } end end context 'using http:// schema with proxy_url' do let(:params) do default_params.merge( - :package_url => "http://www.domain.com/package.#{pkg_ext}", - :proxy_url => 'http://proxy.example.com:12345/' + package_url: "http://www.domain.com/package.#{pkg_ext}", + proxy_url: 'http://proxy.example.com:12345/' ) end - it { should contain_exec('download_package_elasticsearch') - .with( - :environment => [ - 'use_proxy=yes', - 'http_proxy=http://proxy.example.com:12345/', - 'https_proxy=http://proxy.example.com:12345/' - ] - ) } + it { + expect(subject).to contain_exec('download_package_elasticsearch'). + with( + environment: [ + 'use_proxy=yes', + 'http_proxy=http://proxy.example.com:12345/', + 'https_proxy=http://proxy.example.com:12345/' + ] + ) + } end end - end # package + end context 'when setting the module to absent' do let(:params) do default_params.merge( - :ensure => 'absent' + ensure: 'absent' ) end case facts[:os]['family'] when 'Suse' - it { should contain_package('elasticsearch') - .with(:ensure => 'absent') } + it { + expect(subject).to contain_package('elasticsearch'). + with(ensure: 'absent') + } else - it { should contain_package('elasticsearch') - .with(:ensure => 'purged') } + it { + expect(subject).to contain_package('elasticsearch'). + with(ensure: 'purged') + } end - it { should contain_service('elasticsearch') - .with( - :ensure => 'stopped', - :enable => 'false' - ) } - it { should contain_file('/usr/share/elasticsearch/plugins') - .with(:ensure => 'absent') } - it { should contain_file("#{defaults_path}/elasticsearch") - .with(:ensure => 'absent') } + it { + expect(subject).to contain_service('elasticsearch'). + with( + ensure: 'stopped', + enable: 'false' + ) + } + + it { + expect(subject).to contain_file('/usr/share/elasticsearch/plugins'). + with(ensure: 'absent') + } + + it { + expect(subject).to contain_file("#{defaults_path}/elasticsearch"). + with(ensure: 'absent') + } end context 'When managing the repository' do let(:params) do default_params.merge( - :manage_repo => true + manage_repo: true ) end - it { should contain_class('elastic_stack::repo') } + it { is_expected.to contain_class('elastic_stack::repo') } end context 'When not managing the repository' do let(:params) do default_params.merge( - :manage_repo => false + manage_repo: false ) end - it { should compile.with_all_deps } + it { is_expected.to compile.with_all_deps } end end end + # rubocop:enable RSpec/MultipleMemoizedHelpers on_supported_os( - :hardwaremodels => ['x86_64'], - :supported_os => [ + hardwaremodels: ['x86_64'], + supported_os: [ { 'operatingsystem' => 'CentOS', 'operatingsystemrelease' => ['7'] } ] ).each do |os, facts| context "on #{os}" do - let(:facts) { facts.merge( - :scenario => '', - :common => '' - ) } + let(:facts) do + facts.merge( + scenario: '', + common: '' + ) + end describe 'main class tests' do # init.pp - it { should compile.with_all_deps } - it { should contain_class('elasticsearch') } - it { should contain_class('elasticsearch::package') } - it { should contain_class('elasticsearch::config') - .that_requires('Class[elasticsearch::package]') } - it { should contain_class('elasticsearch::service') - .that_requires('Class[elasticsearch::config]') } + it { is_expected.to compile.with_all_deps } + it { is_expected.to contain_class('elasticsearch') } + it { is_expected.to contain_class('elasticsearch::package') } + + it { + expect(subject).to contain_class('elasticsearch::config'). + that_requires('Class[elasticsearch::package]') + } + + it { + expect(subject).to contain_class('elasticsearch::service'). + that_requires('Class[elasticsearch::config]') + } # Base directories - it { should contain_file('/etc/elasticsearch') } - it { should contain_file('/usr/share/elasticsearch') } - it { should contain_file('/usr/share/elasticsearch/lib') } - it { should contain_file('/var/lib/elasticsearch') } + it { is_expected.to contain_file('/etc/elasticsearch') } + it { is_expected.to contain_file('/usr/share/elasticsearch') } + it { is_expected.to contain_file('/usr/share/elasticsearch/lib') } + it { is_expected.to contain_file('/var/lib/elasticsearch') } - it { should contain_exec('remove_plugin_dir') } + it { is_expected.to contain_exec('remove_plugin_dir') } end context 'package installation' do describe 'with default package' do - it { should contain_package('elasticsearch') - .with(:ensure => 'present') } - it { should_not contain_package('my-elasticsearch') - .with(:ensure => 'present') } + it { + expect(subject).to contain_package('elasticsearch'). + with(ensure: 'present') + } + + it { + expect(subject).not_to contain_package('my-elasticsearch'). + with(ensure: 'present') + } end describe 'with specified package name' do let(:params) do default_params.merge( - :package_name => 'my-elasticsearch' + package_name: 'my-elasticsearch' ) end - it { should contain_package('elasticsearch') - .with(:ensure => 'present', :name => 'my-elasticsearch') } - it { should_not contain_package('elasticsearch') - .with(:ensure => 'present', :name => 'elasticsearch') } + it { + expect(subject).to contain_package('elasticsearch'). + with(ensure: 'present', name: 'my-elasticsearch') + } + + it { + expect(subject).not_to contain_package('elasticsearch'). + with(ensure: 'present', name: 'elasticsearch') + } end describe 'with auto upgrade enabled' do let(:params) do default_params.merge( - :autoupgrade => true + autoupgrade: true ) end - it { should contain_package('elasticsearch') - .with(:ensure => 'latest') } + it { + expect(subject).to contain_package('elasticsearch'). + with(ensure: 'latest') + } end end describe 'running a a different user' do let(:params) do default_params.merge( - :elasticsearch_user => 'myesuser', - :elasticsearch_group => 'myesgroup' + elasticsearch_user: 'myesuser', + elasticsearch_group: 'myesgroup' ) end - it { should contain_file('/etc/elasticsearch') - .with(:owner => 'myesuser', :group => 'myesgroup') } - it { should contain_file('/var/log/elasticsearch') - .with(:owner => 'myesuser') } - it { should contain_file('/usr/share/elasticsearch') - .with(:owner => 'myesuser', :group => 'myesgroup') } - it { should contain_file('/var/lib/elasticsearch') - .with(:owner => 'myesuser', :group => 'myesgroup') } + it { + expect(subject).to contain_file('/etc/elasticsearch'). + with(owner: 'myesuser', group: 'myesgroup') + } + + it { + expect(subject).to contain_file('/var/log/elasticsearch'). + with(owner: 'myesuser') + } + + it { + expect(subject).to contain_file('/usr/share/elasticsearch'). + with(owner: 'myesuser', group: 'myesgroup') + } + + it { + expect(subject).to contain_file('/var/lib/elasticsearch'). + with(owner: 'myesuser', group: 'myesgroup') + } end describe 'setting jvm_options' do jvm_options = [ '-Xms16g', '-Xmx16g' ] let(:params) do default_params.merge( - :jvm_options => jvm_options + jvm_options: jvm_options ) end jvm_options.each do |jvm_option| - it { should contain_file_line("jvm_option_#{jvm_option}") - .with( - :ensure => 'present', - :path => '/etc/elasticsearch/jvm.options', - :line => jvm_option - )} + it { + expect(subject).to contain_file_line("jvm_option_#{jvm_option}"). + with( + ensure: 'present', + path: '/etc/elasticsearch/jvm.options', + line: jvm_option + ) + } end end context 'with restart_on_change => true' do let(:params) do default_params.merge( - :restart_on_change => true + restart_on_change: true ) end describe 'should restart elasticsearch' do - it { should contain_file('/etc/elasticsearch/elasticsearch.yml') - .that_notifies('Service[elasticsearch]')} + it { + expect(subject).to contain_file('/etc/elasticsearch/elasticsearch.yml'). + that_notifies('Service[elasticsearch]') + } end describe 'setting jvm_options triggers restart' do let(:params) do super().merge( - :jvm_options => ['-Xmx16g'] + jvm_options: ['-Xmx16g'] ) end - it { should contain_file_line('jvm_option_-Xmx16g') - .that_notifies('Service[elasticsearch]')} + it { + expect(subject).to contain_file_line('jvm_option_-Xmx16g'). + that_notifies('Service[elasticsearch]') + } end end # This check helps catch dependency cycles. context 'create_resource' do # Helper for these tests - def singular(s) - case s + def singular(string) + case string when 'indices' 'index' when 'snapshot_repositories' 'snapshot_repository' else - s[0..-2] + string[0..-2] end end { 'indices' => { 'test-index' => {} }, # 'instances' => { 'es-instance' => {} }, 'pipelines' => { 'testpipeline' => { 'content' => {} } }, 'plugins' => { 'head' => {} }, 'roles' => { 'elastic_role' => {} }, 'scripts' => { 'foo' => { 'source' => 'puppet:///path/to/foo.groovy' } }, 'snapshot_repositories' => { 'backup' => { 'location' => '/backups' } }, 'templates' => { 'foo' => { 'content' => {} } }, 'users' => { 'elastic' => { 'password' => 'foobar' } } }.each_pair do |deftype, params| describe deftype do let(:params) do default_params.merge( deftype => params ) end - it { should compile } - it { should send( - "contain_elasticsearch__#{singular(deftype)}", params.keys.first - ) } + + it { is_expected.to compile } + + it { + expect(subject).to send( + "contain_elasticsearch__#{singular(deftype)}", params.keys.first + ) + } end end end describe 'oss' do let(:params) do - default_params.merge(:oss => true) + default_params.merge(oss: true) end it do - should contain_package('elasticsearch').with( - :name => 'elasticsearch-oss' + expect(subject).to contain_package('elasticsearch').with( + name: 'elasticsearch-oss' ) end end end end end diff --git a/spec/classes/001_hiera_spec.rb b/spec/classes/001_hiera_spec.rb index e7ad80a..9946e15 100644 --- a/spec/classes/001_hiera_spec.rb +++ b/spec/classes/001_hiera_spec.rb @@ -1,213 +1,244 @@ +# frozen_string_literal: true + require 'spec_helper' -describe 'elasticsearch', :type => 'class' do +describe 'elasticsearch', type: 'class' do default_params = { - :config => { 'node.name' => 'foo' } + config: { 'node.name' => 'foo' } } let(:params) do default_params.merge({}) end on_supported_os( - :hardwaremodels => ['x86_64'], - :supported_os => [ + hardwaremodels: ['x86_64'], + supported_os: [ { 'operatingsystem' => 'CentOS', 'operatingsystemrelease' => ['7'] } ] ).each do |os, facts| context "on #{os}" do context 'hiera' do describe 'indices' do context 'single indices' do - let(:facts) { facts.merge(:scenario => 'singleindex') } - - it { should contain_elasticsearch__index('baz') - .with( - :ensure => 'present', - :settings => { - 'index' => { - 'number_of_shards' => 1 + let(:facts) { facts.merge(scenario: 'singleindex') } + + it { + expect(subject).to contain_elasticsearch__index('baz'). + with( + ensure: 'present', + settings: { + 'index' => { + 'number_of_shards' => 1 + } } - } - ) } - it { should contain_elasticsearch_index('baz') } - it { should contain_es_instance_conn_validator( - 'baz-index-conn-validator' - ) } + ) + } + + it { is_expected.to contain_elasticsearch_index('baz') } + + it { + expect(subject).to contain_es_instance_conn_validator( + 'baz-index-conn-validator' + ) + } end context 'no indices' do - let(:facts) { facts.merge(:scenario => '') } + let(:facts) { facts.merge(scenario: '') } - it { should_not contain_elasticsearch__index('baz') } + it { is_expected.not_to contain_elasticsearch__index('baz') } end end context 'config' do - let(:facts) { facts.merge(:scenario => 'singleinstance') } - - it { should contain_augeas('/etc/sysconfig/elasticsearch') } - it { should contain_file('/etc/elasticsearch/elasticsearch.yml') } - it { should contain_datacat('/etc/elasticsearch/elasticsearch.yml') } - it { should contain_datacat_fragment('main_config') } - it { should contain_service('elasticsearch').with( - :ensure => 'running', - :enable => true - ) } - end # of config + let(:facts) { facts.merge(scenario: 'singleinstance') } + + it { is_expected.to contain_augeas('/etc/sysconfig/elasticsearch') } + it { is_expected.to contain_file('/etc/elasticsearch/elasticsearch.yml') } + it { is_expected.to contain_datacat('/etc/elasticsearch/elasticsearch.yml') } + it { is_expected.to contain_datacat_fragment('main_config') } + + it { + expect(subject).to contain_service('elasticsearch').with( + ensure: 'running', + enable: true + ) + } + end describe 'pipelines' do context 'single pipeline' do - let(:facts) { facts.merge(:scenario => 'singlepipeline') } - - it { should contain_elasticsearch__pipeline('testpipeline') - .with( - :ensure => 'present', - :content => { - 'description' => 'Add the foo field', - 'processors' => [ - { - 'set' => { - 'field' => 'foo', - 'value' => 'bar' + let(:facts) { facts.merge(scenario: 'singlepipeline') } + + it { + expect(subject).to contain_elasticsearch__pipeline('testpipeline'). + with( + ensure: 'present', + content: { + 'description' => 'Add the foo field', + 'processors' => [ + { + 'set' => { + 'field' => 'foo', + 'value' => 'bar' + } } - } - ] - } - ) } - it { should contain_elasticsearch_pipeline('testpipeline') } + ] + } + ) + } + + it { is_expected.to contain_elasticsearch_pipeline('testpipeline') } end context 'no pipelines' do - let(:facts) { facts.merge(:scenario => '') } + let(:facts) { facts.merge(scenario: '') } - it { should_not contain_elasticsearch__pipeline('testpipeline') } + it { is_expected.not_to contain_elasticsearch__pipeline('testpipeline') } end end describe 'plugins' do context 'single plugin' do - let(:facts) { facts.merge(:scenario => 'singleplugin') } - - it { should contain_elasticsearch__plugin('mobz/elasticsearch-head') - .with( - :ensure => 'present', - :module_dir => 'head' - ) } - it { should contain_elasticsearch_plugin('mobz/elasticsearch-head') } + let(:facts) { facts.merge(scenario: 'singleplugin') } + + it { + expect(subject).to contain_elasticsearch__plugin('mobz/elasticsearch-head'). + with( + ensure: 'present', + module_dir: 'head' + ) + } + + it { is_expected.to contain_elasticsearch_plugin('mobz/elasticsearch-head') } end context 'no plugins' do - let(:facts) { facts.merge(:scenario => '') } + let(:facts) { facts.merge(scenario: '') } - it { should_not contain_elasticsearch__plugin( - 'mobz/elasticsearch-head/1.0.0' - ) } + it { + expect(subject).not_to contain_elasticsearch__plugin( + 'mobz/elasticsearch-head/1.0.0' + ) + } end end describe 'roles' do context 'single roles' do - let(:facts) { facts.merge(:scenario => 'singlerole') } + let(:facts) { facts.merge(scenario: 'singlerole') } let(:params) do default_params end - it { should contain_elasticsearch__role('admin') - .with( - :ensure => 'present', - :privileges => { - 'cluster' => 'monitor', - 'indices' => { - '*' => 'all' - } - }, - :mappings => [ - 'cn=users,dc=example,dc=com' - ] - ) } - it { should contain_elasticsearch_role('admin') } - it { should contain_elasticsearch_role_mapping('admin') } + it { + expect(subject).to contain_elasticsearch__role('admin'). + with( + ensure: 'present', + privileges: { + 'cluster' => 'monitor', + 'indices' => { + '*' => 'all' + } + }, + mappings: [ + 'cn=users,dc=example,dc=com' + ] + ) + } + + it { is_expected.to contain_elasticsearch_role('admin') } + it { is_expected.to contain_elasticsearch_role_mapping('admin') } end context 'no roles' do - let(:facts) { facts.merge(:scenario => '') } + let(:facts) { facts.merge(scenario: '') } - it { should_not contain_elasticsearch__role('admin') } + it { is_expected.not_to contain_elasticsearch__role('admin') } end end describe 'scripts' do context 'single scripts' do - let(:facts) { facts.merge(:scenario => 'singlescript') } - - it { should contain_elasticsearch__script('myscript') - .with( - :ensure => 'present', - :source => 'puppet:///file/here' - ) } - it { should contain_file('/usr/share/elasticsearch/scripts/here') } + let(:facts) { facts.merge(scenario: 'singlescript') } + + it { + expect(subject).to contain_elasticsearch__script('myscript'). + with( + ensure: 'present', + source: 'puppet:///file/here' + ) + } + + it { is_expected.to contain_file('/usr/share/elasticsearch/scripts/here') } end context 'no roles' do - let(:facts) { facts.merge(:scenario => '') } + let(:facts) { facts.merge(scenario: '') } - it { should_not contain_elasticsearch__script('myscript') } + it { is_expected.not_to contain_elasticsearch__script('myscript') } end end describe 'templates' do context 'single template' do - let(:facts) { facts.merge(:scenario => 'singletemplate') } - - it { should contain_elasticsearch__template('foo') - .with( - :ensure => 'present', - :content => { - 'template' => 'foo-*', - 'settings' => { - 'index' => { - 'number_of_replicas' => 0 + let(:facts) { facts.merge(scenario: 'singletemplate') } + + it { + expect(subject).to contain_elasticsearch__template('foo'). + with( + ensure: 'present', + content: { + 'template' => 'foo-*', + 'settings' => { + 'index' => { + 'number_of_replicas' => 0 + } } } - } - ) } - it { should contain_elasticsearch_template('foo') } + ) + } + + it { is_expected.to contain_elasticsearch_template('foo') } end context 'no templates' do - let(:facts) { facts.merge(:scenario => '') } + let(:facts) { facts.merge(scenario: '') } - it { should_not contain_elasticsearch__template('foo') } + it { is_expected.not_to contain_elasticsearch__template('foo') } end end describe 'users' do context 'single users' do - let(:facts) { facts.merge(:scenario => 'singleuser') } + let(:facts) { facts.merge(scenario: 'singleuser') } let(:params) do default_params end - it { should contain_elasticsearch__user('elastic') - .with( - :ensure => 'present', - :roles => ['admin'], - :password => 'password' - ) } - it { should contain_elasticsearch_user('elastic') } + it { + expect(subject).to contain_elasticsearch__user('elastic'). + with( + ensure: 'present', + roles: ['admin'], + password: 'password' + ) + } + + it { is_expected.to contain_elasticsearch_user('elastic') } end context 'no users' do - let(:facts) { facts.merge(:scenario => '') } + let(:facts) { facts.merge(scenario: '') } - it { should_not contain_elasticsearch__user('elastic') } + it { is_expected.not_to contain_elasticsearch__user('elastic') } end end end end end end diff --git a/spec/classes/006_elasticsearch_license_spec.rb b/spec/classes/006_elasticsearch_license_spec.rb index 0c2579d..580bdad 100644 --- a/spec/classes/006_elasticsearch_license_spec.rb +++ b/spec/classes/006_elasticsearch_license_spec.rb @@ -1,85 +1,89 @@ +# frozen_string_literal: true + require 'spec_helper' -describe 'elasticsearch::license', :type => 'class' do +describe 'elasticsearch::license', type: 'class' do # First, randomly select one of our supported OSes to run tests that apply # to any distro on_supported_os.to_a.sample(1).to_h.each do |os, facts| context "on #{os}" do let(:facts) do facts.merge('scenario' => '', 'common' => '') end context 'when managing x-pack license' do let(:params) do { - :content => { + content: { 'license' => { - 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', - 'type' => 'trial', - 'issue_date_in_millis' => 1_519_341_125_550, + 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', + 'type' => 'trial', + 'issue_date_in_millis' => 1_519_341_125_550, 'expiry_date_in_millis' => 1_521_933_125_550, - 'max_nodes' => 1000, - 'issued_to' => 'test', - 'issuer' => 'elasticsearch', - 'signature' => 'secretvalue', - 'start_date_in_millis' => 1_513_814_400_000 + 'max_nodes' => 1000, + 'issued_to' => 'test', + 'issuer' => 'elasticsearch', + 'signature' => 'secretvalue', + 'start_date_in_millis' => 1_513_814_400_000 } } } end let(:pre_condition) do <<-EOS class { 'elasticsearch' : api_protocol => 'https', api_host => '127.0.0.1', api_port => 9201, api_timeout => 11, api_basic_auth_username => 'elastic', api_basic_auth_password => 'password', api_ca_file => '/foo/bar.pem', api_ca_path => '/foo/', validate_tls => false, } EOS end it do - should contain_class('elasticsearch::license') + expect(subject).to contain_class('elasticsearch::license') end + it do - should contain_es_instance_conn_validator( + expect(subject).to contain_es_instance_conn_validator( 'license-conn-validator' ).that_comes_before('elasticsearch_license[xpack]') end + it do - should contain_elasticsearch_license('xpack').with( - :ensure => 'present', - :content => { + expect(subject).to contain_elasticsearch_license('xpack').with( + ensure: 'present', + content: { 'license' => { - 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', - 'type' => 'trial', - 'issue_date_in_millis' => 1_519_341_125_550, + 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', + 'type' => 'trial', + 'issue_date_in_millis' => 1_519_341_125_550, 'expiry_date_in_millis' => 1_521_933_125_550, - 'max_nodes' => 1000, - 'issued_to' => 'test', - 'issuer' => 'elasticsearch', - 'signature' => 'secretvalue', - 'start_date_in_millis' => 1_513_814_400_000 + 'max_nodes' => 1000, + 'issued_to' => 'test', + 'issuer' => 'elasticsearch', + 'signature' => 'secretvalue', + 'start_date_in_millis' => 1_513_814_400_000 } }, - :protocol => 'https', - :host => '127.0.0.1', - :port => 9201, - :timeout => 11, - :username => 'elastic', - :password => 'password', - :ca_file => '/foo/bar.pem', - :ca_path => '/foo/', - :validate_tls => false + protocol: 'https', + host: '127.0.0.1', + port: 9201, + timeout: 11, + username: 'elastic', + password: 'password', + ca_file: '/foo/bar.pem', + ca_path: '/foo/', + validate_tls: false ) end end end end end diff --git a/spec/classes/010_elasticsearch_init_unkown_spec.rb b/spec/classes/010_elasticsearch_init_unkown_spec.rb index 6efed4a..95bad53 100644 --- a/spec/classes/010_elasticsearch_init_unkown_spec.rb +++ b/spec/classes/010_elasticsearch_init_unkown_spec.rb @@ -1,13 +1,15 @@ +# frozen_string_literal: true + require 'spec_helper' -describe 'elasticsearch', :type => 'class' do +describe 'elasticsearch', type: 'class' do context 'on an unknown OS' do context 'it should fail' do let(:facts) do - { :operatingsystem => 'Windows' } + { operatingsystem: 'Windows' } end - it { expect { should raise_error(Puppet::Error) } } + it { is_expected.to raise_error(Puppet::Error) } end end end diff --git a/spec/classes/099_coverage_spec.rb b/spec/classes/099_coverage_spec.rb index 03491b5..03c6441 100644 --- a/spec/classes/099_coverage_spec.rb +++ b/spec/classes/099_coverage_spec.rb @@ -1 +1,3 @@ +# frozen_string_literal: true + at_exit { RSpec::Puppet::Coverage.report! 100 } diff --git a/spec/defines/003_elasticsearch_template_spec.rb b/spec/defines/003_elasticsearch_template_spec.rb index 6c316b7..e4f4d67 100644 --- a/spec/defines/003_elasticsearch_template_spec.rb +++ b/spec/defines/003_elasticsearch_template_spec.rb @@ -1,133 +1,138 @@ +# frozen_string_literal: true + require 'spec_helper' -describe 'elasticsearch::template', :type => 'define' do +describe 'elasticsearch::template', type: 'define' do on_supported_os( - :hardwaremodels => ['x86_64'], - :supported_os => [ + hardwaremodels: ['x86_64'], + supported_os: [ { 'operatingsystem' => 'CentOS', 'operatingsystemrelease' => ['6'] } ] ).each do |os, facts| context "on #{os}" do - let(:facts) { facts.merge( - :scenario => '', - :common => '' - ) } + let(:facts) do + facts.merge( + scenario: '', + common: '' + ) + end let(:title) { 'foo' } let(:pre_condition) do 'class { "elasticsearch" : }' end describe 'parameter validation' do - [:api_ca_file, :api_ca_path].each do |param| + %i[api_ca_file api_ca_path].each do |param| let :params do { :ensure => 'present', :content => '{}', param => 'foo/cert' } end it 'validates cert paths' do - is_expected.to compile.and_raise_error(/expects a/) + expect(subject).to compile.and_raise_error(%r{expects a}) end end describe 'missing parent class' do - let(:pre_condition) {} - it { should_not compile } + it { is_expected.not_to compile } end end describe 'template from source' do let :params do { - :ensure => 'present', - :source => 'puppet:///path/to/foo.json', - :api_protocol => 'https', - :api_host => '127.0.0.1', - :api_port => 9201, - :api_timeout => 11, - :api_basic_auth_username => 'elastic', - :api_basic_auth_password => 'password', - :validate_tls => false + ensure: 'present', + source: 'puppet:///path/to/foo.json', + api_protocol: 'https', + api_host: '127.0.0.1', + api_port: 9201, + api_timeout: 11, + api_basic_auth_username: 'elastic', + api_basic_auth_password: 'password', + validate_tls: false } end - it { should contain_elasticsearch__template('foo') } + it { is_expected.to contain_elasticsearch__template('foo') } + it do - should contain_es_instance_conn_validator('foo-template') - .that_comes_before('Elasticsearch_template[foo]') + expect(subject).to contain_es_instance_conn_validator('foo-template'). + that_comes_before('Elasticsearch_template[foo]') end + it 'passes through parameters' do - should contain_elasticsearch_template('foo').with( - :ensure => 'present', - :source => 'puppet:///path/to/foo.json', - :protocol => 'https', - :host => '127.0.0.1', - :port => 9201, - :timeout => 11, - :username => 'elastic', - :password => 'password', - :validate_tls => false + expect(subject).to contain_elasticsearch_template('foo').with( + ensure: 'present', + source: 'puppet:///path/to/foo.json', + protocol: 'https', + host: '127.0.0.1', + port: 9201, + timeout: 11, + username: 'elastic', + password: 'password', + validate_tls: false ) end end describe 'class parameter inheritance' do let :params do { - :ensure => 'present', - :content => '{}' + ensure: 'present', + content: '{}' } end let(:pre_condition) do <<-EOS class { 'elasticsearch' : api_protocol => 'https', api_host => '127.0.0.1', api_port => 9201, api_timeout => 11, api_basic_auth_username => 'elastic', api_basic_auth_password => 'password', api_ca_file => '/foo/bar.pem', api_ca_path => '/foo/', validate_tls => false, } EOS end it do - should contain_elasticsearch_template('foo').with( - :ensure => 'present', - :content => '{}', - :protocol => 'https', - :host => '127.0.0.1', - :port => 9201, - :timeout => 11, - :username => 'elastic', - :password => 'password', - :ca_file => '/foo/bar.pem', - :ca_path => '/foo/', - :validate_tls => false + expect(subject).to contain_elasticsearch_template('foo').with( + ensure: 'present', + content: '{}', + protocol: 'https', + host: '127.0.0.1', + port: 9201, + timeout: 11, + username: 'elastic', + password: 'password', + ca_file: '/foo/bar.pem', + ca_path: '/foo/', + validate_tls: false ) end end describe 'template deletion' do let :params do { - :ensure => 'absent' + ensure: 'absent' } end it 'removes templates' do - should contain_elasticsearch_template('foo').with(:ensure => 'absent') + expect(subject).to contain_elasticsearch_template('foo').with(ensure: 'absent') end end end end end diff --git a/spec/defines/004_elasticsearch_plugin_spec.rb b/spec/defines/004_elasticsearch_plugin_spec.rb index f22e321..73cecc4 100644 --- a/spec/defines/004_elasticsearch_plugin_spec.rb +++ b/spec/defines/004_elasticsearch_plugin_spec.rb @@ -1,307 +1,372 @@ +# frozen_string_literal: true + require 'spec_helper' +require 'helpers/class_shared_examples' -describe 'elasticsearch::plugin', :type => 'define' do +describe 'elasticsearch::plugin', type: 'define' do let(:title) { 'mobz/elasticsearch-head/1.0.0' } on_supported_os( - :hardwaremodels => ['x86_64'], - :supported_os => [ + hardwaremodels: ['x86_64'], + supported_os: [ { 'operatingsystem' => 'CentOS', 'operatingsystemrelease' => ['6'] } ] ).each do |_os, facts| let(:facts) do facts.merge('scenario' => '', 'common' => '') end let(:pre_condition) do <<-EOS class { "elasticsearch": config => { "node" => { "name" => "test" } } } EOS end context 'default values' do context 'present' do - let(:params) do { - :ensure => 'present', - :configdir => '/etc/elasticsearch' - } end + let(:params) do + { + ensure: 'present', + configdir: '/etc/elasticsearch' + } + end it { is_expected.to compile } end context 'absent' do - let(:params) do { - :ensure => 'absent' - } end + let(:params) do + { + ensure: 'absent' + } + end it { is_expected.to compile } end context 'configdir' do - it { should contain_elasticsearch__plugin( - 'mobz/elasticsearch-head/1.0.0' - ).with_configdir('/etc/elasticsearch') } + it { + expect(subject).to contain_elasticsearch__plugin( + 'mobz/elasticsearch-head/1.0.0' + ).with_configdir('/etc/elasticsearch') + } - it { should contain_elasticsearch_plugin( - 'mobz/elasticsearch-head/1.0.0' - ).with_configdir('/etc/elasticsearch') } + it { + expect(subject).to contain_elasticsearch_plugin( + 'mobz/elasticsearch-head/1.0.0' + ).with_configdir('/etc/elasticsearch') + } end end context 'with module_dir' do context 'add a plugin' do - let(:params) do { - :ensure => 'present', - :module_dir => 'head' - } end - - it { should contain_elasticsearch__plugin( - 'mobz/elasticsearch-head/1.0.0' - ) } - it { should contain_elasticsearch_plugin( - 'mobz/elasticsearch-head/1.0.0' - ) } - it { should contain_file( - '/usr/share/elasticsearch/plugins/head' - ).that_requires( - 'Elasticsearch_plugin[mobz/elasticsearch-head/1.0.0]' - ) } + let(:params) do + { + ensure: 'present', + module_dir: 'head' + } + end + + it { + expect(subject).to contain_elasticsearch__plugin( + 'mobz/elasticsearch-head/1.0.0' + ) + } + + it { + expect(subject).to contain_elasticsearch_plugin( + 'mobz/elasticsearch-head/1.0.0' + ) + } + + it { + expect(subject).to contain_file( + '/usr/share/elasticsearch/plugins/head' + ).that_requires( + 'Elasticsearch_plugin[mobz/elasticsearch-head/1.0.0]' + ) + } end context 'remove a plugin' do - let(:params) do { - :ensure => 'absent', - :module_dir => 'head' - } end - - it { should contain_elasticsearch__plugin( - 'mobz/elasticsearch-head/1.0.0' - ) } - it { should contain_elasticsearch_plugin( - 'mobz/elasticsearch-head/1.0.0' - ).with( - :ensure => 'absent' - ) } - it { should contain_file( - '/usr/share/elasticsearch/plugins/head' - ).that_requires( - 'Elasticsearch_plugin[mobz/elasticsearch-head/1.0.0]' - ) } + let(:params) do + { + ensure: 'absent', + module_dir: 'head' + } + end + + it { + expect(subject).to contain_elasticsearch__plugin( + 'mobz/elasticsearch-head/1.0.0' + ) + } + + it { + expect(subject).to contain_elasticsearch_plugin( + 'mobz/elasticsearch-head/1.0.0' + ).with( + ensure: 'absent' + ) + } + + it { + expect(subject).to contain_file( + '/usr/share/elasticsearch/plugins/head' + ).that_requires( + 'Elasticsearch_plugin[mobz/elasticsearch-head/1.0.0]' + ) + } end end context 'with url' do context 'add a plugin with full name' do - let(:params) do { - :ensure => 'present', - :url => 'https://github.com/mobz/elasticsearch-head/archive/master.zip' - } end + let(:params) do + { + ensure: 'present', + url: 'https://github.com/mobz/elasticsearch-head/archive/master.zip' + } + end - it { should contain_elasticsearch__plugin('mobz/elasticsearch-head/1.0.0') } - it { should contain_elasticsearch_plugin('mobz/elasticsearch-head/1.0.0').with(:ensure => 'present', :url => 'https://github.com/mobz/elasticsearch-head/archive/master.zip') } + it { is_expected.to contain_elasticsearch__plugin('mobz/elasticsearch-head/1.0.0') } + it { is_expected.to contain_elasticsearch_plugin('mobz/elasticsearch-head/1.0.0').with(ensure: 'present', url: 'https://github.com/mobz/elasticsearch-head/archive/master.zip') } end end context 'offline plugin install' do let(:title) { 'head' } - let(:params) do { - :ensure => 'present', - :source => 'puppet:///path/to/my/plugin.zip' - } end - - it { should contain_elasticsearch__plugin('head') } - it { should contain_file('/opt/elasticsearch/swdl/plugin.zip').with(:source => 'puppet:///path/to/my/plugin.zip', :before => 'Elasticsearch_plugin[head]') } - it { should contain_elasticsearch_plugin('head').with(:ensure => 'present', :source => '/opt/elasticsearch/swdl/plugin.zip') } + let(:params) do + { + ensure: 'present', + source: 'puppet:///path/to/my/plugin.zip' + } + end + + it { is_expected.to contain_elasticsearch__plugin('head') } + it { is_expected.to contain_file('/opt/elasticsearch/swdl/plugin.zip').with(source: 'puppet:///path/to/my/plugin.zip', before: 'Elasticsearch_plugin[head]') } + it { is_expected.to contain_elasticsearch_plugin('head').with(ensure: 'present', source: '/opt/elasticsearch/swdl/plugin.zip') } end describe 'service restarts' do let(:title) { 'head' } - let(:params) do { - :ensure => 'present', - :module_dir => 'head' - } end + let(:params) do + { + ensure: 'present', + module_dir: 'head' + } + end context 'restart_on_change set to false (default)' do let(:pre_condition) do <<-EOS class { "elasticsearch": } EOS end - it { should_not contain_elasticsearch_plugin( - 'head' - ).that_notifies( - 'Service[elasticsearch]' - )} + it { + expect(subject).not_to contain_elasticsearch_plugin( + 'head' + ).that_notifies( + 'Service[elasticsearch]' + ) + } include_examples 'class', :sysv end context 'restart_on_change set to true' do let(:pre_condition) do <<-EOS class { "elasticsearch": restart_on_change => true, } EOS end - it { should contain_elasticsearch_plugin( - 'head' - ).that_notifies( - 'Service[elasticsearch]' - )} + it { + expect(subject).to contain_elasticsearch_plugin( + 'head' + ).that_notifies( + 'Service[elasticsearch]' + ) + } include_examples('class') end context 'restart_plugin_change set to false (default)' do let(:pre_condition) do <<-EOS class { "elasticsearch": restart_plugin_change => false, } EOS end - it { should_not contain_elasticsearch_plugin( - 'head' - ).that_notifies( - 'Service[elasticsearch]' - )} + it { + expect(subject).not_to contain_elasticsearch_plugin( + 'head' + ).that_notifies( + 'Service[elasticsearch]' + ) + } include_examples('class') end context 'restart_plugin_change set to true' do let(:pre_condition) do <<-EOS class { "elasticsearch": restart_plugin_change => true, } EOS end - it { should contain_elasticsearch_plugin( - 'head' - ).that_notifies( - 'Service[elasticsearch]' - )} + it { + expect(subject).to contain_elasticsearch_plugin( + 'head' + ).that_notifies( + 'Service[elasticsearch]' + ) + } include_examples('class') end end describe 'proxy arguments' do let(:title) { 'head' } context 'unauthenticated' do context 'on define' do - let(:params) do { - :ensure => 'present', - :proxy_host => 'es.local', - :proxy_port => 8080 - } end + let(:params) do + { + ensure: 'present', + proxy_host: 'es.local', + proxy_port: 8080 + } + end - it { should contain_elasticsearch_plugin( - 'head' - ).with_proxy( - 'http://es.local:8080' - )} + it { + expect(subject).to contain_elasticsearch_plugin( + 'head' + ).with_proxy( + 'http://es.local:8080' + ) + } end context 'on main class' do - let(:params) do { - :ensure => 'present' - } end + let(:params) do + { + ensure: 'present' + } + end let(:pre_condition) do <<-EOS class { 'elasticsearch': proxy_url => 'https://es.local:8080', } EOS end - it { should contain_elasticsearch_plugin( - 'head' - ).with_proxy( - 'https://es.local:8080' - )} + it { + expect(subject).to contain_elasticsearch_plugin( + 'head' + ).with_proxy( + 'https://es.local:8080' + ) + } end end context 'authenticated' do context 'on define' do - let(:params) do { - :ensure => 'present', - :proxy_host => 'es.local', - :proxy_port => 8080, - :proxy_username => 'elastic', - :proxy_password => 'password' - } end - - it { should contain_elasticsearch_plugin( - 'head' - ).with_proxy( - 'http://elastic:password@es.local:8080' - )} + let(:params) do + { + ensure: 'present', + proxy_host: 'es.local', + proxy_port: 8080, + proxy_username: 'elastic', + proxy_password: 'password' + } + end + + it { + expect(subject).to contain_elasticsearch_plugin( + 'head' + ).with_proxy( + 'http://elastic:password@es.local:8080' + ) + } end context 'on main class' do - let(:params) do { - :ensure => 'present' - } end + let(:params) do + { + ensure: 'present' + } + end let(:pre_condition) do <<-EOS class { 'elasticsearch': proxy_url => 'http://elastic:password@es.local:8080', } EOS end - it { should contain_elasticsearch_plugin( - 'head' - ).with_proxy( - 'http://elastic:password@es.local:8080' - )} + it { + expect(subject).to contain_elasticsearch_plugin( + 'head' + ).with_proxy( + 'http://elastic:password@es.local:8080' + ) + } end end end describe 'collector ordering' do describe 'present' do let(:title) { 'head' } let(:pre_condition) do <<-EOS class { 'elasticsearch': } EOS end - it { should contain_elasticsearch__plugin( - 'head' - ).that_requires( - 'Class[elasticsearch::config]' - )} - - it { should contain_elasticsearch_plugin( - 'head' - ).that_comes_before( - 'Service[elasticsearch]' - )} + it { + expect(subject).to contain_elasticsearch__plugin( + 'head' + ).that_requires( + 'Class[elasticsearch::config]' + ) + } + + it { + expect(subject).to contain_elasticsearch_plugin( + 'head' + ).that_comes_before( + 'Service[elasticsearch]' + ) + } include_examples 'class' end end end end diff --git a/spec/defines/006_elasticsearch_script_spec.rb b/spec/defines/006_elasticsearch_script_spec.rb index 84414cd..bab1541 100644 --- a/spec/defines/006_elasticsearch_script_spec.rb +++ b/spec/defines/006_elasticsearch_script_spec.rb @@ -1,81 +1,99 @@ +# frozen_string_literal: true + require 'spec_helper' -describe 'elasticsearch::script', :type => 'define' do +describe 'elasticsearch::script', type: 'define' do let(:title) { 'foo' } let(:pre_condition) do %( class { "elasticsearch": config => { "node" => {"name" => "test" } } } ) end on_supported_os( - :hardwaremodels => ['x86_64'], - :supported_os => [ + hardwaremodels: ['x86_64'], + supported_os: [ { 'operatingsystem' => 'CentOS', 'operatingsystemrelease' => ['6'] } ] ).each do |os, facts| context "on #{os}" do - let(:facts) { facts.merge( - :scenario => '', - :common => '' - ) } + let(:facts) do + facts.merge( + scenario: '', + common: '' + ) + end describe 'missing parent class' do - let(:pre_condition) {} - it { should_not compile } + it { is_expected.not_to compile } end describe 'adding script files' do - let(:params) do { - :ensure => 'present', - :source => 'puppet:///path/to/foo.groovy' - } end + let(:params) do + { + ensure: 'present', + source: 'puppet:///path/to/foo.groovy' + } + end - it { should contain_elasticsearch__script('foo') } - it { should contain_file('/usr/share/elasticsearch/scripts/foo.groovy') - .with( - :source => 'puppet:///path/to/foo.groovy', - :ensure => 'present' - ) } + it { is_expected.to contain_elasticsearch__script('foo') } + + it { + expect(subject).to contain_file('/usr/share/elasticsearch/scripts/foo.groovy'). + with( + source: 'puppet:///path/to/foo.groovy', + ensure: 'present' + ) + } end describe 'adding script directories' do - let(:params) do { - :ensure => 'directory', - :source => 'puppet:///path/to/my_scripts', - :recurse => 'remote' - } end + let(:params) do + { + ensure: 'directory', + source: 'puppet:///path/to/my_scripts', + recurse: 'remote' + } + end - it { should contain_elasticsearch__script('foo') } - it { should contain_file( - '/usr/share/elasticsearch/scripts/my_scripts' - ).with( - :ensure => 'directory', - :source => 'puppet:///path/to/my_scripts', - :recurse => 'remote' - ) } + it { is_expected.to contain_elasticsearch__script('foo') } + + it { + expect(subject).to contain_file( + '/usr/share/elasticsearch/scripts/my_scripts' + ).with( + ensure: 'directory', + source: 'puppet:///path/to/my_scripts', + recurse: 'remote' + ) + } end describe 'removing scripts' do - let(:params) do { - :ensure => 'absent', - :source => 'puppet:///path/to/foo.groovy' - } end + let(:params) do + { + ensure: 'absent', + source: 'puppet:///path/to/foo.groovy' + } + end - it { should contain_elasticsearch__script('foo') } - it { should contain_file('/usr/share/elasticsearch/scripts/foo.groovy') - .with( - :source => 'puppet:///path/to/foo.groovy', - :ensure => 'absent' - ) } + it { is_expected.to contain_elasticsearch__script('foo') } + + it { + expect(subject).to contain_file('/usr/share/elasticsearch/scripts/foo.groovy'). + with( + source: 'puppet:///path/to/foo.groovy', + ensure: 'absent' + ) + } end end end end diff --git a/spec/defines/007_elasticsearch_user_spec.rb b/spec/defines/007_elasticsearch_user_spec.rb index c8b3103..54b6278 100644 --- a/spec/defines/007_elasticsearch_user_spec.rb +++ b/spec/defines/007_elasticsearch_user_spec.rb @@ -1,120 +1,91 @@ +# frozen_string_literal: true + require 'spec_helper' +require 'helpers/class_shared_examples' describe 'elasticsearch::user' do let(:title) { 'elastic' } let(:pre_condition) do <<-EOS class { 'elasticsearch': } EOS end on_supported_os( - :hardwaremodels => ['x86_64'], - :supported_os => [ + hardwaremodels: ['x86_64'], + supported_os: [ { 'operatingsystem' => 'CentOS', 'operatingsystemrelease' => ['7'] } ] ).each do |os, facts| context "on #{os}" do - let(:facts) { facts.merge( - :scenario => '', - :common => '' - ) } + let(:facts) do + facts.merge( + scenario: '', + common: '' + ) + end context 'with default parameters' do let(:params) do { - :password => 'foobar', - :roles => %w[monitor user] + password: 'foobar', + roles: %w[monitor user] } end - it { should contain_elasticsearch__user('elastic') } - it { should contain_elasticsearch_user('elastic') } + it { is_expected.to contain_elasticsearch__user('elastic') } + it { is_expected.to contain_elasticsearch_user('elastic') } + it do - should contain_elasticsearch_user_roles('elastic').with( + expect(subject).to contain_elasticsearch_user_roles('elastic').with( 'ensure' => 'present', - 'roles' => %w[monitor user] + 'roles' => %w[monitor user] ) end end describe 'collector ordering' do - describe 'when present' do - let(:pre_condition) do - <<-EOS - class { 'elasticsearch': } - elasticsearch::template { 'foo': content => {"foo" => "bar"} } - elasticsearch::role { 'test_role': - privileges => { - 'cluster' => 'monitor', - 'indices' => { - '*' => 'all', - }, + let(:pre_condition) do + <<-EOS + class { 'elasticsearch': } + elasticsearch::template { 'foo': content => {"foo" => "bar"} } + elasticsearch::role { 'test_role': + privileges => { + 'cluster' => 'monitor', + 'indices' => { + '*' => 'all', }, - } - EOS - end - - let(:params) do - { - :password => 'foobar', - :roles => %w[monitor user] + }, } - end - - it { should contain_elasticsearch__role('test_role') } - it { should contain_elasticsearch_role('test_role') } - it { should contain_elasticsearch_role_mapping('test_role') } - it { should contain_elasticsearch__user('elastic') - .that_comes_before([ - 'Elasticsearch::Template[foo]' - ]).that_requires([ - 'Elasticsearch::Role[test_role]' - ])} - - include_examples 'class', :systemd + EOS end - describe 'when absent' do - let(:pre_condition) do - <<-EOS - class { 'elasticsearch': } - elasticsearch::template { 'foo': content => {"foo" => "bar"} } - elasticsearch::role { 'test_role': - privileges => { - 'cluster' => 'monitor', - 'indices' => { - '*' => 'all', - }, - }, - } - EOS - end + let(:params) do + { + password: 'foobar', + roles: %w[monitor user] + } + end - let(:params) do - { - :password => 'foobar', - :roles => %w[monitor user] - } - end + it { is_expected.to contain_elasticsearch__role('test_role') } + it { is_expected.to contain_elasticsearch_role('test_role') } + it { is_expected.to contain_elasticsearch_role_mapping('test_role') } - it { should contain_elasticsearch__role('test_role') } - it { should contain_elasticsearch_role('test_role') } - it { should contain_elasticsearch_role_mapping('test_role') } - it { should contain_elasticsearch__user('elastic') - .that_comes_before([ - 'Elasticsearch::Template[foo]' - ]).that_requires([ - 'Elasticsearch::Role[test_role]' - ])} + it { + expect(subject).to contain_elasticsearch__user('elastic'). + that_comes_before([ + 'Elasticsearch::Template[foo]' + ]).that_requires([ + 'Elasticsearch::Role[test_role]' + ]) + } - include_examples 'class', :systemd - end + include_examples 'class', :systemd end end end end diff --git a/spec/defines/008_elasticsearch_role_spec.rb b/spec/defines/008_elasticsearch_role_spec.rb index df1e6b3..eab1741 100644 --- a/spec/defines/008_elasticsearch_role_spec.rb +++ b/spec/defines/008_elasticsearch_role_spec.rb @@ -1,109 +1,118 @@ +# frozen_string_literal: true + require 'spec_helper' +require 'helpers/class_shared_examples' describe 'elasticsearch::role' do let(:title) { 'elastic_role' } let(:pre_condition) do <<-EOS class { 'elasticsearch': } EOS end let(:params) do { - :privileges => { + privileges: { 'cluster' => '*' }, - :mappings => [ + mappings: [ 'cn=users,dc=example,dc=com', 'cn=admins,dc=example,dc=com', 'cn=John Doe,cn=other users,dc=example,dc=com' ] } end on_supported_os( - :hardwaremodels => ['x86_64'], - :supported_os => [ + hardwaremodels: ['x86_64'], + supported_os: [ { 'operatingsystem' => 'CentOS', 'operatingsystemrelease' => ['7'] } ] ).each do |os, facts| context "on #{os}" do - let(:facts) { facts.merge( - :scenario => '', - :common => '' - ) } + let(:facts) do + facts.merge( + scenario: '', + common: '' + ) + end context 'with an invalid role name' do context 'too long' do let(:title) { 'A' * 41 } - it { should raise_error(Puppet::Error, /expected length/i) } + + it { is_expected.to raise_error(Puppet::Error, %r{expected length}i) } end end context 'with default parameters' do - it { should contain_elasticsearch__role('elastic_role') } - it { should contain_elasticsearch_role('elastic_role') } + it { is_expected.to contain_elasticsearch__role('elastic_role') } + it { is_expected.to contain_elasticsearch_role('elastic_role') } + it do - should contain_elasticsearch_role_mapping('elastic_role').with( + expect(subject).to contain_elasticsearch_role_mapping('elastic_role').with( 'ensure' => 'present', 'mappings' => [ 'cn=users,dc=example,dc=com', 'cn=admins,dc=example,dc=com', 'cn=John Doe,cn=other users,dc=example,dc=com' ] ) end end describe 'collector ordering' do describe 'when present' do let(:pre_condition) do <<-EOS class { 'elasticsearch': } elasticsearch::template { 'foo': content => {"foo" => "bar"} } elasticsearch::user { 'elastic': password => 'foobar', roles => ['elastic_role'], } EOS end - it { should contain_elasticsearch__role('elastic_role') - .that_comes_before([ - 'Elasticsearch::Template[foo]', - 'Elasticsearch::User[elastic]' - ])} + it { + expect(subject).to contain_elasticsearch__role('elastic_role'). + that_comes_before([ + 'Elasticsearch::Template[foo]', + 'Elasticsearch::User[elastic]' + ]) + } include_examples 'class', :systemd end describe 'when absent' do let(:pre_condition) do <<-EOS class { 'elasticsearch': } elasticsearch::template { 'foo': content => {"foo" => "bar"} } elasticsearch::user { 'elastic': password => 'foobar', roles => ['elastic_role'], } EOS end include_examples 'class', :systemd # TODO: Uncomment once upstream issue is fixed. # https://github.com/rodjek/rspec-puppet/issues/418 # it { should contain_elasticsearch__shield__role('elastic_role') # .that_comes_before([ # 'Elasticsearch::Template[foo]', # 'Elasticsearch::Plugin[shield]', # 'Elasticsearch::Shield::User[elastic]' # ])} end end end end end diff --git a/spec/defines/009_elasticsearch_pipeline_spec.rb b/spec/defines/009_elasticsearch_pipeline_spec.rb index e2456fe..9840b61 100644 --- a/spec/defines/009_elasticsearch_pipeline_spec.rb +++ b/spec/defines/009_elasticsearch_pipeline_spec.rb @@ -1,101 +1,104 @@ +# frozen_string_literal: true + require 'spec_helper' -describe 'elasticsearch::pipeline', :type => 'define' do +describe 'elasticsearch::pipeline', type: 'define' do let(:title) { 'testpipeline' } let(:pre_condition) do 'class { "elasticsearch" : }' end on_supported_os( - :hardwaremodels => ['x86_64'], - :supported_os => [ + hardwaremodels: ['x86_64'], + supported_os: [ { 'operatingsystem' => 'CentOS', 'operatingsystemrelease' => ['6'] } ] ).each do |os, facts| context "on #{os}" do - let(:facts) { facts.merge( - :scenario => '', - :common => '' - ) } + let(:facts) do + facts.merge( + scenario: '', + common: '' + ) + end describe 'parameter validation' do - [:api_ca_file, :api_ca_path].each do |param| + %i[api_ca_file api_ca_path].each do |param| let :params do { :ensure => 'present', :content => {}, param => 'foo/cert' } end it 'validates cert paths' do - is_expected.to compile.and_raise_error(/expects a/) + expect(subject).to compile.and_raise_error(%r{expects a}) end end describe 'missing parent class' do - let(:pre_condition) {} - it { should_not compile } + it { is_expected.not_to compile } end end describe 'class parameter inheritance' do let :params do { - :ensure => 'present', - :content => {} + ensure: 'present', + content: {} } end let(:pre_condition) do <<-EOS class { 'elasticsearch' : api_protocol => 'https', api_host => '127.0.0.1', api_port => 9201, api_timeout => 11, api_basic_auth_username => 'elastic', api_basic_auth_password => 'password', api_ca_file => '/foo/bar.pem', api_ca_path => '/foo/', validate_tls => false, } EOS end it do - should contain_elasticsearch__pipeline(title) - should contain_es_instance_conn_validator("#{title}-ingest-pipeline") - .that_comes_before("elasticsearch_pipeline[#{title}]") - should contain_elasticsearch_pipeline(title).with( - :ensure => 'present', - :content => {}, - :protocol => 'https', - :host => '127.0.0.1', - :port => 9201, - :timeout => 11, - :username => 'elastic', - :password => 'password', - :ca_file => '/foo/bar.pem', - :ca_path => '/foo/', - :validate_tls => false + expect(subject).to contain_elasticsearch__pipeline(title) + expect(subject).to contain_es_instance_conn_validator("#{title}-ingest-pipeline"). + that_comes_before("elasticsearch_pipeline[#{title}]") + expect(subject).to contain_elasticsearch_pipeline(title).with( + ensure: 'present', + content: {}, + protocol: 'https', + host: '127.0.0.1', + port: 9201, + timeout: 11, + username: 'elastic', + password: 'password', + ca_file: '/foo/bar.pem', + ca_path: '/foo/', + validate_tls: false ) end end describe 'pipeline deletion' do let :params do { - :ensure => 'absent' + ensure: 'absent' } end it 'removes pipelines' do - should contain_elasticsearch_pipeline(title).with(:ensure => 'absent') + expect(subject).to contain_elasticsearch_pipeline(title).with(ensure: 'absent') end end end end end diff --git a/spec/defines/012_elasticsearch_index_spec.rb b/spec/defines/012_elasticsearch_index_spec.rb index 62596a2..d16fb90 100644 --- a/spec/defines/012_elasticsearch_index_spec.rb +++ b/spec/defines/012_elasticsearch_index_spec.rb @@ -1,100 +1,103 @@ +# frozen_string_literal: true + require 'spec_helper' -describe 'elasticsearch::index', :type => 'define' do +describe 'elasticsearch::index', type: 'define' do let(:title) { 'test-index' } let(:pre_condition) do 'class { "elasticsearch" : }' end on_supported_os( - :hardwaremodels => ['x86_64'], - :supported_os => [ + hardwaremodels: ['x86_64'], + supported_os: [ { 'operatingsystem' => 'CentOS', 'operatingsystemrelease' => ['6'] } ] ).each do |os, facts| context "on #{os}" do - let(:facts) { facts.merge( - :scenario => '', - :common => '' - ) } + let(:facts) do + facts.merge( + scenario: '', + common: '' + ) + end describe 'parameter validation' do - [:api_ca_file, :api_ca_path].each do |param| + %i[api_ca_file api_ca_path].each do |param| let :params do { :ensure => 'present', param => 'foo/cert' } end it 'validates cert paths' do - is_expected.to compile.and_raise_error(/expects a/) + expect(subject).to compile.and_raise_error(%r{expects a}) end end describe 'missing parent class' do - let(:pre_condition) {} - it { should_not compile } + it { is_expected.not_to compile } end end describe 'class parameter inheritance' do let :params do { - :ensure => 'present' + ensure: 'present' } end let(:pre_condition) do <<-EOS class { 'elasticsearch' : api_protocol => 'https', api_host => '127.0.0.1', api_port => 9201, api_timeout => 11, api_basic_auth_username => 'elastic', api_basic_auth_password => 'password', api_ca_file => '/foo/bar.pem', api_ca_path => '/foo/', validate_tls => false, } EOS end it do - should contain_elasticsearch__index(title) - should contain_es_instance_conn_validator( + expect(subject).to contain_elasticsearch__index(title) + expect(subject).to contain_es_instance_conn_validator( "#{title}-index-conn-validator" ).that_comes_before("elasticsearch_index[#{title}]") - should contain_elasticsearch_index(title).with( - :ensure => 'present', - :settings => {}, - :protocol => 'https', - :host => '127.0.0.1', - :port => 9201, - :timeout => 11, - :username => 'elastic', - :password => 'password', - :ca_file => '/foo/bar.pem', - :ca_path => '/foo/', - :validate_tls => false + expect(subject).to contain_elasticsearch_index(title).with( + ensure: 'present', + settings: {}, + protocol: 'https', + host: '127.0.0.1', + port: 9201, + timeout: 11, + username: 'elastic', + password: 'password', + ca_file: '/foo/bar.pem', + ca_path: '/foo/', + validate_tls: false ) end end describe 'index deletion' do let :params do { - :ensure => 'absent' + ensure: 'absent' } end it 'removes indices' do - should contain_elasticsearch_index(title).with(:ensure => 'absent') + expect(subject).to contain_elasticsearch_index(title).with(ensure: 'absent') end end end end end diff --git a/spec/defines/013_elasticsearch_snapshot_repository_spec.rb b/spec/defines/013_elasticsearch_snapshot_repository_spec.rb index 694859c..a1233be 100644 --- a/spec/defines/013_elasticsearch_snapshot_repository_spec.rb +++ b/spec/defines/013_elasticsearch_snapshot_repository_spec.rb @@ -1,134 +1,139 @@ +# frozen_string_literal: true + require 'spec_helper' -describe 'elasticsearch::snapshot_repository', :type => 'define' do +describe 'elasticsearch::snapshot_repository', type: 'define' do on_supported_os( - :hardwaremodels => ['x86_64'], - :supported_os => [ + hardwaremodels: ['x86_64'], + supported_os: [ { 'operatingsystem' => 'CentOS', 'operatingsystemrelease' => ['6'] } ] ).each do |os, facts| context "on #{os}" do - let(:facts) { facts.merge( - :scenario => '', - :common => '' - ) } + let(:facts) do + facts.merge( + scenario: '', + common: '' + ) + end let(:title) { 'backup' } let(:pre_condition) do 'class { "elasticsearch" : }' end describe 'parameter validation' do - [:api_ca_file, :api_ca_path].each do |param| + %i[api_ca_file api_ca_path].each do |param| let :params do { :ensure => 'present', :content => '{}', param => 'foo/cert' } end it 'validates cert paths' do - is_expected.to compile.and_raise_error(/expects a/) + expect(subject).to compile.and_raise_error(%r{expects a}) end end describe 'missing parent class' do - let(:pre_condition) {} - it { should_not compile } + it { is_expected.not_to compile } end end describe 'template from source' do let :params do { - :ensure => 'present', - :location => '/var/lib/elasticsearch/backup', - :api_protocol => 'https', - :api_host => '127.0.0.1', - :api_port => 9201, - :api_timeout => 11, - :api_basic_auth_username => 'elastic', - :api_basic_auth_password => 'password', - :validate_tls => false + ensure: 'present', + location: '/var/lib/elasticsearch/backup', + api_protocol: 'https', + api_host: '127.0.0.1', + api_port: 9201, + api_timeout: 11, + api_basic_auth_username: 'elastic', + api_basic_auth_password: 'password', + validate_tls: false } end - it { should contain_elasticsearch__snapshot_repository('backup') } + it { is_expected.to contain_elasticsearch__snapshot_repository('backup') } + it do - should contain_es_instance_conn_validator('backup-snapshot') - .that_comes_before('Elasticsearch_snapshot_repository[backup]') + expect(subject).to contain_es_instance_conn_validator('backup-snapshot'). + that_comes_before('Elasticsearch_snapshot_repository[backup]') end + it 'passes through parameters' do - should contain_elasticsearch_snapshot_repository('backup').with( - :ensure => 'present', - :location => '/var/lib/elasticsearch/backup', - :protocol => 'https', - :host => '127.0.0.1', - :port => 9201, - :timeout => 11, - :username => 'elastic', - :password => 'password', - :validate_tls => false + expect(subject).to contain_elasticsearch_snapshot_repository('backup').with( + ensure: 'present', + location: '/var/lib/elasticsearch/backup', + protocol: 'https', + host: '127.0.0.1', + port: 9201, + timeout: 11, + username: 'elastic', + password: 'password', + validate_tls: false ) end end describe 'class parameter inheritance' do let :params do { - :ensure => 'present', - :location => '/var/lib/elasticsearch/backup' + ensure: 'present', + location: '/var/lib/elasticsearch/backup' } end let(:pre_condition) do <<-MANIFEST class { 'elasticsearch' : api_protocol => 'https', api_host => '127.0.0.1', api_port => 9201, api_timeout => 11, api_basic_auth_username => 'elastic', api_basic_auth_password => 'password', api_ca_file => '/foo/bar.pem', api_ca_path => '/foo/', validate_tls => false, } MANIFEST end it do - should contain_elasticsearch_snapshot_repository('backup').with( - :ensure => 'present', - :location => '/var/lib/elasticsearch/backup', - :protocol => 'https', - :host => '127.0.0.1', - :port => 9201, - :timeout => 11, - :username => 'elastic', - :password => 'password', - :ca_file => '/foo/bar.pem', - :ca_path => '/foo/', - :validate_tls => false + expect(subject).to contain_elasticsearch_snapshot_repository('backup').with( + ensure: 'present', + location: '/var/lib/elasticsearch/backup', + protocol: 'https', + host: '127.0.0.1', + port: 9201, + timeout: 11, + username: 'elastic', + password: 'password', + ca_file: '/foo/bar.pem', + ca_path: '/foo/', + validate_tls: false ) end end describe 'snapshot repository deletion' do let :params do { - :ensure => 'absent', - :location => '/var/lib/elasticsearch/backup' + ensure: 'absent', + location: '/var/lib/elasticsearch/backup' } end it 'removes snapshot repository' do - should contain_elasticsearch_snapshot_repository('backup').with(:ensure => 'absent') + expect(subject).to contain_elasticsearch_snapshot_repository('backup').with(ensure: 'absent') end end end end end diff --git a/spec/functions/concat_merge_spec.rb b/spec/functions/concat_merge_spec.rb index 5b7421d..ebcc454 100644 --- a/spec/functions/concat_merge_spec.rb +++ b/spec/functions/concat_merge_spec.rb @@ -1,168 +1,199 @@ -require 'spec_helper' +# frozen_string_literal: true -# rubocop:disable Style/BracesAroundHashParameters -# rubocop:disable Style/IndentHash +require 'spec_helper' describe 'concat_merge' do describe 'exception handling' do - it { is_expected.to run.with_params.and_raise_error( - Puppet::ParseError, /wrong number of arguments/i - ) } - - it { is_expected.to run.with_params({}).and_raise_error( - Puppet::ParseError, /wrong number of arguments/i - ) } - - it { is_expected.to run.with_params('2', 2).and_raise_error( - Puppet::ParseError, /unexpected argument type/ - ) } - - it { is_expected.to run.with_params(2, '2').and_raise_error( - Puppet::ParseError, /unexpected argument type/ - ) } + it { + expect(subject).to run.with_params.and_raise_error( + Puppet::ParseError, %r{wrong number of arguments}i + ) + } + + it { + expect(subject).to run.with_params({}).and_raise_error( + Puppet::ParseError, %r{wrong number of arguments}i + ) + } + + it { + expect(subject).to run.with_params('2', 2).and_raise_error( + Puppet::ParseError, %r{unexpected argument type} + ) + } + + it { + expect(subject).to run.with_params(2, '2').and_raise_error( + Puppet::ParseError, %r{unexpected argument type} + ) + } end describe 'collisions' do context 'single keys' do - it { is_expected.to run.with_params({ - 'key1' => 'value1' - }, { - 'key1' => 'value2' - }).and_return({ - 'key1' => 'value2' - }) } - - it { is_expected.to run.with_params({ - 'key1' => 'value1' - }, { - 'key1' => 'value2' - }, { - 'key1' => 'value3' - }).and_return({ - 'key1' => 'value3' - }) } + it { + expect(subject).to run.with_params({ + 'key1' => 'value1' + }, { + 'key1' => 'value2' + }).and_return({ + 'key1' => 'value2' + }) + } + + it { + expect(subject).to run.with_params({ + 'key1' => 'value1' + }, { + 'key1' => 'value2' + }, { + 'key1' => 'value3' + }).and_return({ + 'key1' => 'value3' + }) + } end context 'multiple keys' do - it { is_expected.to run.with_params({ - 'key1' => 'value1', - 'key2' => 'value2' - }, { - 'key1' => 'value2' - }).and_return({ - 'key1' => 'value2', - 'key2' => 'value2' - }) } - - it { is_expected.to run.with_params({ - 'key1' => 'value1', - 'key2' => 'value1' - }, { - 'key1' => 'value2' - }, { - 'key1' => 'value3', - 'key2' => 'value2' - }).and_return({ - 'key1' => 'value3', - 'key2' => 'value2' - }) } + it { + expect(subject).to run.with_params({ + 'key1' => 'value1', + 'key2' => 'value2' + }, { + 'key1' => 'value2' + }).and_return({ + 'key1' => 'value2', + 'key2' => 'value2' + }) + } + + it { + expect(subject).to run.with_params({ + 'key1' => 'value1', + 'key2' => 'value1' + }, { + 'key1' => 'value2' + }, { + 'key1' => 'value3', + 'key2' => 'value2' + }).and_return({ + 'key1' => 'value3', + 'key2' => 'value2' + }) + } end end describe 'concat merging' do context 'single keys' do - it { is_expected.to run.with_params({ - 'key1' => ['value1'] - }, { - 'key1' => ['value2'] - }).and_return({ - 'key1' => %w[value1 value2] - }) } - - it { is_expected.to run.with_params({ - 'key1' => ['value1'] - }, { - 'key1' => ['value2'] - }, { - 'key1' => ['value3'] - }).and_return({ - 'key1' => %w[value1 value2 value3] - }) } - - it { is_expected.to run.with_params({ - 'key1' => ['value1'] - }, { - 'key1' => 'value2' - }).and_return({ - 'key1' => 'value2' - }) } - - it { is_expected.to run.with_params({ - 'key1' => 'value1' - }, { - 'key1' => ['value2'] - }).and_return({ - 'key1' => ['value2'] - }) } + it { + expect(subject).to run.with_params({ + 'key1' => ['value1'] + }, { + 'key1' => ['value2'] + }).and_return({ + 'key1' => %w[value1 value2] + }) + } + + it { + expect(subject).to run.with_params({ + 'key1' => ['value1'] + }, { + 'key1' => ['value2'] + }, { + 'key1' => ['value3'] + }).and_return({ + 'key1' => %w[value1 value2 value3] + }) + } + + it { + expect(subject).to run.with_params({ + 'key1' => ['value1'] + }, { + 'key1' => 'value2' + }).and_return({ + 'key1' => 'value2' + }) + } + + it { + expect(subject).to run.with_params({ + 'key1' => 'value1' + }, { + 'key1' => ['value2'] + }).and_return({ + 'key1' => ['value2'] + }) + } end context 'multiple keys' do - it { is_expected.to run.with_params({ - 'key1' => ['value1'], - 'key2' => ['value3'] - }, { - 'key1' => ['value2'], - 'key2' => ['value4'] - }).and_return({ - 'key1' => %w[value1 value2], - 'key2' => %w[value3 value4] - }) } - - it { is_expected.to run.with_params({ - 'key1' => ['value1'], - 'key2' => ['value1.1'] - }, { - 'key1' => ['value2'], - 'key2' => ['value2.1'] - }, { - 'key1' => ['value3'], - 'key2' => ['value3.1'] - }).and_return({ - 'key1' => %w[value1 value2 value3], - 'key2' => ['value1.1', 'value2.1', 'value3.1'] - }) } - - it { is_expected.to run.with_params({ - 'key1' => ['value1'], - 'key2' => 'value1' - }, { - 'key1' => 'value2', - 'key2' => ['value2'] - }).and_return({ - 'key1' => 'value2', - 'key2' => ['value2'] - }) } - - it { is_expected.to run.with_params({ - 'key1' => 'value1', - 'key2' => ['value1'] - }, { - 'key1' => ['value2'], - 'key2' => 'value2' - }).and_return( - 'key1' => ['value2'], - 'key2' => 'value2' - ) } + it { + expect(subject).to run.with_params({ + 'key1' => ['value1'], + 'key2' => ['value3'] + }, { + 'key1' => ['value2'], + 'key2' => ['value4'] + }).and_return({ + 'key1' => %w[value1 value2], + 'key2' => %w[value3 value4] + }) + } + + it { + expect(subject).to run.with_params({ + 'key1' => ['value1'], + 'key2' => ['value1.1'] + }, { + 'key1' => ['value2'], + 'key2' => ['value2.1'] + }, { + 'key1' => ['value3'], + 'key2' => ['value3.1'] + }).and_return({ + 'key1' => %w[value1 value2 value3], + 'key2' => ['value1.1', 'value2.1', 'value3.1'] + }) + } + + it { + expect(subject).to run.with_params({ + 'key1' => ['value1'], + 'key2' => 'value1' + }, { + 'key1' => 'value2', + 'key2' => ['value2'] + }).and_return({ + 'key1' => 'value2', + 'key2' => ['value2'] + }) + } + + it { + expect(subject).to run.with_params({ + 'key1' => 'value1', + 'key2' => ['value1'] + }, { + 'key1' => ['value2'], + 'key2' => 'value2' + }).and_return( + 'key1' => ['value2'], + 'key2' => 'value2' + ) + } end end - it 'should not change the original hashes' do + it 'does not change the original hashes' do argument1 = { 'key1' => 'value1' } original1 = argument1.dup argument2 = { 'key2' => 'value2' } original2 = argument2.dup subject.execute(argument1, argument2) expect(argument1).to eq(original1) expect(argument2).to eq(original2) end end diff --git a/spec/functions/deep_implode_spec.rb b/spec/functions/deep_implode_spec.rb index 0570d77..580a88d 100644 --- a/spec/functions/deep_implode_spec.rb +++ b/spec/functions/deep_implode_spec.rb @@ -1,111 +1,134 @@ -require 'spec_helper' +# frozen_string_literal: true -# rubocop:disable Style/BracesAroundHashParameters -# rubocop:disable Style/IndentHash +require 'spec_helper' describe 'deep_implode' do describe 'exception handling' do - it { is_expected.to run.with_params.and_raise_error( - Puppet::ParseError, /wrong number of arguments/i - ) } + it { + expect(subject).to run.with_params.and_raise_error( + Puppet::ParseError, %r{wrong number of arguments}i + ) + } - it { is_expected.to run.with_params({}, {}).and_raise_error( - Puppet::ParseError, /wrong number of arguments/i - ) } + it { + expect(subject).to run.with_params({}, {}).and_raise_error( + Puppet::ParseError, %r{wrong number of arguments}i + ) + } - it { is_expected.to run.with_params('2').and_raise_error( - Puppet::ParseError, /unexpected argument type/ - ) } + it { + expect(subject).to run.with_params('2').and_raise_error( + Puppet::ParseError, %r{unexpected argument type} + ) + } end ['value', ['value'], 0, 10].each do |value| describe "qualifying #{value}" do it { is_expected.to run.with_params({}).and_return({}) } - it { is_expected.to run.with_params({ - 'key' => value - }).and_return({ - 'key' => value - }) } + it { + expect(subject).to run.with_params({ + 'key' => value + }).and_return({ + 'key' => value + }) + } - it { is_expected.to run.with_params({ - 'key' => { 'subkey' => value } - }).and_return({ - 'key.subkey' => value - }) } + it { + expect(subject).to run.with_params({ + 'key' => { 'subkey' => value } + }).and_return({ + 'key.subkey' => value + }) + } - it { is_expected.to run.with_params({ - 'key' => { 'subkey' => { 'subsubkey' => { 'bottom' => value } } } - }).and_return({ - 'key.subkey.subsubkey.bottom' => value - }) } + it { + expect(subject).to run.with_params({ + 'key' => { 'subkey' => { 'subsubkey' => { 'bottom' => value } } } + }).and_return({ + 'key.subkey.subsubkey.bottom' => value + }) + } end end # The preferred behavior is to favor fully-qualified keys describe 'key collisions' do - it { is_expected.to run.with_params({ - 'key1' => { - 'subkey1' => 'value1' - }, - 'key1.subkey1' => 'value2' - }).and_return({ - 'key1.subkey1' => 'value2' - }) } + it { + expect(subject).to run.with_params({ + 'key1' => { + 'subkey1' => 'value1' + }, + 'key1.subkey1' => 'value2' + }).and_return({ + 'key1.subkey1' => 'value2' + }) + } - it { is_expected.to run.with_params({ - 'key1.subkey1' => 'value2', - 'key1' => { - 'subkey1' => 'value1' - } - }).and_return({ - 'key1.subkey1' => 'value2' - }) } + it { + expect(subject).to run.with_params({ + 'key1.subkey1' => 'value2', + 'key1' => { + 'subkey1' => 'value1' + } + }).and_return({ + 'key1.subkey1' => 'value2' + }) + } end describe 'deep merging' do - it { is_expected.to run.with_params({ - 'key1' => { - 'subkey1' => ['value1'] - }, - 'key1.subkey1' => ['value2'] - }).and_return({ - 'key1.subkey1' => %w[value2 value1] - }) } + it { + expect(subject).to run.with_params({ + 'key1' => { + 'subkey1' => ['value1'] + }, + 'key1.subkey1' => ['value2'] + }).and_return({ + 'key1.subkey1' => %w[value2 value1] + }) + } - it { is_expected.to run.with_params({ - 'key1' => { - 'subkey1' => { 'key2' => 'value1' } - }, - 'key1.subkey1' => { 'key3' => 'value2' } - }).and_return({ - 'key1.subkey1.key2' => 'value1', - 'key1.subkey1.key3' => 'value2' - }) } + it { + expect(subject).to run.with_params({ + 'key1' => { + 'subkey1' => { 'key2' => 'value1' } + }, + 'key1.subkey1' => { 'key3' => 'value2' } + }).and_return({ + 'key1.subkey1.key2' => 'value1', + 'key1.subkey1.key3' => 'value2' + }) + } - it { is_expected.to run.with_params({ - 'key1' => { - 'subkey1' => { 'key2' => ['value1'] } - }, - 'key1.subkey1' => { 'key2' => ['value2'] } - }).and_return({ - 'key1.subkey1.key2' => %w[value2 value1] - }) } + it { + expect(subject).to run.with_params({ + 'key1' => { + 'subkey1' => { 'key2' => ['value1'] } + }, + 'key1.subkey1' => { 'key2' => ['value2'] } + }).and_return({ + 'key1.subkey1.key2' => %w[value2 value1] + }) + } - it { is_expected.to run.with_params({ - 'key1' => { - 'subkey1' => { 'key2' => 'value1' }, - 'subkey1.key2' => 'value2' - } - }).and_return({ - 'key1.subkey1.key2' => 'value2' - }) } + it { + expect(subject).to run.with_params({ + 'key1' => { + 'subkey1' => { 'key2' => 'value1' }, + 'subkey1.key2' => 'value2' + } + }).and_return({ + 'key1.subkey1.key2' => 'value2' + }) + } end - it 'should not change the original hashes' do + it 'does not change the original hashes' do argument1 = { 'key1' => 'value1' } original1 = argument1.dup subject.execute(argument1) expect(argument1).to eq(original1) end end diff --git a/spec/functions/es_plugin_name_spec.rb b/spec/functions/es_plugin_name_spec.rb index 0373611..b67e3b3 100644 --- a/spec/functions/es_plugin_name_spec.rb +++ b/spec/functions/es_plugin_name_spec.rb @@ -1,75 +1,105 @@ +# frozen_string_literal: true + require 'spec_helper' describe 'es_plugin_name' do describe 'exception handling' do - it { is_expected.to run.with_params.and_raise_error( - Puppet::ParseError, /wrong number of arguments/i - ) } + it { + expect(subject).to run.with_params.and_raise_error( + Puppet::ParseError, %r{wrong number of arguments}i + ) + } end describe 'single arguments' do - it { is_expected.to run - .with_params('foo') - .and_return('foo') } - - it { is_expected.to run - .with_params('vendor/foo') - .and_return('foo') } - - it { is_expected.to run - .with_params('vendor/foo/1.0.0') - .and_return('foo') } - - it { is_expected.to run - .with_params('vendor/es-foo/1.0.0') - .and_return('foo') } - - it { is_expected.to run - .with_params('vendor/elasticsearch-foo/1.0.0') - .and_return('foo') } - - it { is_expected.to run - .with_params('com.foo:plugin_name:5.2.0') - .and_return('plugin_name')} - - it { is_expected.to run - .with_params('com:plugin_name:5.2.0-12') - .and_return('plugin_name')} - - it { is_expected.to run - .with_params('com.foo.bar:plugin_name:5') - .and_return('plugin_name')} + it { + expect(subject).to run. + with_params('foo'). + and_return('foo') + } + + it { + expect(subject).to run. + with_params('vendor/foo'). + and_return('foo') + } + + it { + expect(subject).to run. + with_params('vendor/foo/1.0.0'). + and_return('foo') + } + + it { + expect(subject).to run. + with_params('vendor/es-foo/1.0.0'). + and_return('foo') + } + + it { + expect(subject).to run. + with_params('vendor/elasticsearch-foo/1.0.0'). + and_return('foo') + } + + it { + expect(subject).to run. + with_params('com.foo:plugin_name:5.2.0'). + and_return('plugin_name') + } + + it { + expect(subject).to run. + with_params('com:plugin_name:5.2.0-12'). + and_return('plugin_name') + } + + it { + expect(subject).to run. + with_params('com.foo.bar:plugin_name:5'). + and_return('plugin_name') + } end describe 'multiple arguments' do - it { is_expected.to run - .with_params('foo', nil) - .and_return('foo') } - - it { is_expected.to run - .with_params(nil, 'foo') - .and_return('foo') } - - it { is_expected.to run - .with_params(nil, 0, 'foo', 'bar') - .and_return('foo') } + it { + expect(subject).to run. + with_params('foo', nil). + and_return('foo') + } + + it { + expect(subject).to run. + with_params(nil, 'foo'). + and_return('foo') + } + + it { + expect(subject).to run. + with_params(nil, 0, 'foo', 'bar'). + and_return('foo') + } end describe 'undef parameters' do - it { is_expected.to run - .with_params('', 'foo') - .and_return('foo') } - - it { is_expected.to run - .with_params('') - .and_raise_error(Puppet::Error, /could not/) } + it { + expect(subject).to run. + with_params('', 'foo'). + and_return('foo') + } + + it { + expect(subject).to run. + with_params(''). + and_raise_error(Puppet::Error, %r{could not}) + } end - it 'should not change the original values' do + it 'does not change the original values' do argument1 = 'foo' original1 = argument1.dup subject.execute(argument1) expect(argument1).to eq(original1) end end diff --git a/spec/functions/plugin_dir_spec.rb b/spec/functions/plugin_dir_spec.rb index 4d3082c..fa12dbc 100644 --- a/spec/functions/plugin_dir_spec.rb +++ b/spec/functions/plugin_dir_spec.rb @@ -1,36 +1,44 @@ +# frozen_string_literal: true + require 'spec_helper' describe 'plugin_dir' do describe 'exception handling' do describe 'with no arguments' do - it { is_expected.to run.with_params - .and_raise_error(Puppet::ParseError) } + it { + expect(subject).to run.with_params. + and_raise_error(Puppet::ParseError) + } end describe 'more than two arguments' do - it { is_expected.to run.with_params('a', 'b', 'c') - .and_raise_error(Puppet::ParseError) } + it { + expect(subject).to run.with_params('a', 'b', 'c'). + and_raise_error(Puppet::ParseError) + } end describe 'non-string arguments' do - it { is_expected.to run.with_params([]) - .and_raise_error(Puppet::ParseError) } + it { + expect(subject).to run.with_params([]). + and_raise_error(Puppet::ParseError) + } end end { 'mobz/elasticsearch-head' => 'head', 'lukas-vlcek/bigdesk/2.4.0' => 'bigdesk', 'elasticsearch/elasticsearch-cloud-aws/2.5.1' => 'cloud-aws', 'com.sksamuel.elasticsearch/elasticsearch-river-redis/1.1.0' => 'river-redis', 'com.github.lbroudoux.elasticsearch/amazon-s3-river/1.4.0' => 'amazon-s3-river', 'elasticsearch/elasticsearch-lang-groovy/2.0.0' => 'lang-groovy', 'royrusso/elasticsearch-hq' => 'hq', 'polyfractal/elasticsearch-inquisitor' => 'inquisitor', 'mycustomplugin' => 'mycustomplugin' }.each do |plugin, dir| describe "parsed dir for #{plugin}" do it { is_expected.to run.with_params(plugin).and_return(dir) } end end end diff --git a/spec/helpers/acceptance/tests/bad_manifest_shared_examples.rb b/spec/helpers/acceptance/tests/bad_manifest_shared_examples.rb index c6dc2de..38ba1a8 100644 --- a/spec/helpers/acceptance/tests/bad_manifest_shared_examples.rb +++ b/spec/helpers/acceptance/tests/bad_manifest_shared_examples.rb @@ -1,18 +1,20 @@ +# frozen_string_literal: true + shared_examples 'invalid manifest application' do context 'bad manifest' do let(:applied_manifest) do <<-MANIFEST class { 'elasticsearch' : #{manifest} #{defined?(manifest_class_parameters) && manifest_class_parameters} } #{defined?(extra_manifest) && extra_manifest} MANIFEST end it 'fails to apply' do - apply_manifest(applied_manifest, :expect_failures => true, :debug => v[:puppet_debug]) + apply_manifest(applied_manifest, expect_failures: true, debug: v[:puppet_debug]) end end end diff --git a/spec/helpers/acceptance/tests/basic_shared_examples.rb b/spec/helpers/acceptance/tests/basic_shared_examples.rb index e9a3e39..cda6f92 100644 --- a/spec/helpers/acceptance/tests/basic_shared_examples.rb +++ b/spec/helpers/acceptance/tests/basic_shared_examples.rb @@ -1,67 +1,71 @@ +# frozen_string_literal: true + require 'json' require 'helpers/acceptance/tests/manifest_shared_examples' shared_examples 'basic acceptance tests' do |es_config| include_examples('manifest application') describe package("elasticsearch#{v[:oss] ? '-oss' : ''}") do - it { should be_installed - .with_version(v[:elasticsearch_full_version]) } + it { + expect(subject).to be_installed. + with_version(v[:elasticsearch_full_version]) + } end %w[ /etc/elasticsearch /usr/share/elasticsearch /var/lib/elasticsearch ].each do |dir| describe file(dir) do - it { should be_directory } + it { is_expected.to be_directory } end end describe 'resources' do describe service('elasticsearch') do it { send(es_config.empty? ? :should_not : :should, be_enabled) } it { send(es_config.empty? ? :should_not : :should, be_running) } end unless es_config.empty? describe file(pid_file) do - it { should be_file } - its(:content) { should match(/[0-9]+/) } + it { is_expected.to be_file } + its(:content) { is_expected.to match(%r{[0-9]+}) } end describe file('/etc/elasticsearch/elasticsearch.yml') do - it { should be_file } - it { should contain "name: #{es_config['node.name']}" } + it { is_expected.to be_file } + it { is_expected.to contain "name: #{es_config['node.name']}" } end end unless es_config.empty? es_port = es_config['http.port'] describe port(es_port) do it 'open', :with_retries do - should be_listening + expect(subject).to be_listening end end - describe server :container do - describe http("http://localhost:#{es_port}/_nodes/_local") do - it 'serves requests', :with_retries do - expect(response.status).to eq(200) - end + describe "http://localhost:#{es_port}/_nodes/_local" do + subject { shell("curl http://localhost:#{es_port}/_nodes/_local") } + + it 'serves requests', :with_retries do + expect(subject.exit_code).to eq(0) + end - it 'uses the default data path', :with_retries do - json = JSON.parse(response.body)['nodes'].values.first - data_dir = ['/var/lib/elasticsearch'] - expect( - json['settings']['path'] - ).to include( - 'data' => data_dir - ) - end + it 'uses the default data path', :with_retries do + json = JSON.parse(subject.stdout)['nodes'].values.first + data_dir = ['/var/lib/elasticsearch'] + expect( + json['settings']['path'] + ).to include( + 'data' => data_dir + ) end end end end end diff --git a/spec/helpers/acceptance/tests/datadir_shared_examples.rb b/spec/helpers/acceptance/tests/datadir_shared_examples.rb index 0ec67db..dec5765 100644 --- a/spec/helpers/acceptance/tests/datadir_shared_examples.rb +++ b/spec/helpers/acceptance/tests/datadir_shared_examples.rb @@ -1,72 +1,75 @@ +# frozen_string_literal: true + require 'json' require 'helpers/acceptance/tests/manifest_shared_examples' shared_examples 'datadir directory validation' do |es_config, datapaths| include_examples('manifest application') describe file('/etc/elasticsearch/elasticsearch.yml') do - it { should be_file } + it { is_expected.to be_file } + datapaths.each do |datapath| - it { should contain datapath } + it { is_expected.to contain datapath } end end datapaths.each do |datapath| describe file(datapath) do - it { should be_directory } + it { is_expected.to be_directory } end end es_port = es_config['http.port'] describe port(es_port) do it 'open', :with_retries do - should be_listening + expect(subject).to be_listening end end - describe server :container do - describe http( - "http://localhost:#{es_port}/_nodes/_local" - ) do - it 'uses a custom data path' do - json = JSON.parse(response.body)['nodes'].values.first - expect( - json['settings']['path']['data'] - ).to((datapaths.one? and v[:elasticsearch_major_version] <= 2) ? eq(datapaths.first) : contain_exactly(*datapaths)) - end + describe "http://localhost:#{es_port}/_nodes/_local" do + subject { shell("curl http://localhost:#{es_port}/_nodes/_local") } + + it 'uses a custom data path' do + json = JSON.parse(subject.stdout)['nodes'].values.first + expect( + json['settings']['path']['data'] + ).to(datapaths.one? && v[:elasticsearch_major_version] <= 2 ? eq(datapaths.first) : contain_exactly(*datapaths)) end end end shared_examples 'datadir acceptance tests' do |es_config| describe 'elasticsearch::datadir' do let(:manifest_class_parameters) { 'restart_on_change => true' } context 'single path', :with_cleanup do let(:manifest_class_parameters) do <<-MANIFEST datadir => '/var/lib/elasticsearch-data', restart_on_change => true, MANIFEST end + include_examples('datadir directory validation', es_config, ['/var/lib/elasticsearch-data']) end context 'multiple paths', :with_cleanup do let(:manifest_class_parameters) do <<-MANIFEST datadir => [ '/var/lib/elasticsearch-01', '/var/lib/elasticsearch-02' ], restart_on_change => true, MANIFEST end + include_examples('datadir directory validation', es_config, ['/var/lib/elasticsearch-01', '/var/lib/elasticsearch-02']) end end end diff --git a/spec/helpers/acceptance/tests/hiera_shared_examples.rb b/spec/helpers/acceptance/tests/hiera_shared_examples.rb index 609d1de..b06dfe9 100644 --- a/spec/helpers/acceptance/tests/hiera_shared_examples.rb +++ b/spec/helpers/acceptance/tests/hiera_shared_examples.rb @@ -1,87 +1,91 @@ +# frozen_string_literal: true + require 'tempfile' require 'helpers/acceptance/tests/basic_shared_examples' require 'helpers/acceptance/tests/plugin_shared_examples' agents = only_host_with_role(hosts, 'agent') shared_examples 'hiera tests with' do |es_config, additional_yaml = {}| hieradata = { 'elasticsearch::config' => es_config }.merge(additional_yaml).to_yaml - before :all do + before :all do # rubocop:disable RSpec/BeforeAfterAll write_hieradata_to(agents, hieradata) end include_examples('basic acceptance tests', es_config) end shared_examples 'hiera acceptance tests' do |es_config, plugins| describe 'hiera', :then_purge do let(:manifest) do - package = if not v[:is_snapshot] + package = if v[:is_snapshot] <<-MANIFEST - # Hard version set here due to plugin incompatibilities. - version => '#{v[:elasticsearch_full_version]}', + manage_repo => false, + package_url => '#{v[:snapshot_package]}', MANIFEST else <<-MANIFEST - manage_repo => false, - package_url => '#{v[:snapshot_package]}', + # Hard version set here due to plugin incompatibilities. + version => '#{v[:elasticsearch_full_version]}', MANIFEST end <<-MANIFEST api_timeout => 60, jvm_options => [ '-Xms128m', '-Xmx128m', ], oss => #{v[:oss]}, #{package} MANIFEST end let(:manifest_class_parameters) { 'restart_on_change => true' } + after :all do # rubocop:disable RSpec/BeforeAfterAll + write_hieradata_to(agents, {}) + + # Ensure that elasticsearch is cleaned up before any other tests + cleanup_manifest = <<-EOS + class { 'elasticsearch': ensure => 'absent', oss => #{v[:oss]} } + EOS + apply_manifest(cleanup_manifest, debug: v[:puppet_debug]) + end + describe 'with hieradata' do - nodename = SecureRandom.hex(10) + # Remove leading 0: 01234567 is valid octal, but 89abcdef is not and the + # serialisation will cause trouble for the test suite (quoting the value?). + nodename = SecureRandom.hex(10).sub(%r{^0+}, '') include_examples( 'hiera tests with', es_config.merge('node.name' => nodename) ) end plugins.each_pair do |plugin, _meta| describe "with plugin #{plugin}" do nodename = SecureRandom.hex(10) include_examples( 'hiera tests with', es_config.merge('node.name' => nodename), 'elasticsearch::plugins' => { plugin => { 'ensure' => 'present' } } ) include_examples( 'plugin API response', es_config.merge('node.name' => nodename), 'reports the plugin as installed', 'name' => plugin ) end end - - after :all do - write_hieradata_to(agents, {}) - - # Ensure that elasticsearch is cleaned up before any other tests - cleanup_manifest = <<-EOS - class { 'elasticsearch': ensure => 'absent', oss => #{v[:oss]} } - EOS - apply_manifest(cleanup_manifest, :debug => v[:puppet_debug]) - end end end diff --git a/spec/helpers/acceptance/tests/manifest_shared_examples.rb b/spec/helpers/acceptance/tests/manifest_shared_examples.rb index eba3863..0ee4fa9 100644 --- a/spec/helpers/acceptance/tests/manifest_shared_examples.rb +++ b/spec/helpers/acceptance/tests/manifest_shared_examples.rb @@ -1,38 +1,40 @@ +# frozen_string_literal: true + shared_examples 'manifest application' do |idempotency_check = true| context 'manifest' do let(:applied_manifest) do repo = if elastic_repo <<-MANIFEST class { 'elastic_stack::repo': oss => #{v[:oss]}, version => #{v[:elasticsearch_major_version]}, } MANIFEST else '' end <<-MANIFEST #{repo} class { 'elasticsearch' : #{manifest} #{defined?(manifest_class_parameters) && manifest_class_parameters} } #{defined?(extra_manifest) && extra_manifest} MANIFEST end it 'applies cleanly' do - apply_manifest(applied_manifest, :catch_failures => true, :debug => v[:puppet_debug]) + apply_manifest(applied_manifest, catch_failures: true, debug: v[:puppet_debug]) end # binding.pry if idempotency_check it 'is idempotent', :logs_on_failure do - apply_manifest(applied_manifest, :catch_changes => true, :debug => v[:puppet_debug]) + apply_manifest(applied_manifest, catch_changes: true, debug: v[:puppet_debug]) end end end end diff --git a/spec/helpers/acceptance/tests/package_url_shared_examples.rb b/spec/helpers/acceptance/tests/package_url_shared_examples.rb index a250943..823e179 100644 --- a/spec/helpers/acceptance/tests/package_url_shared_examples.rb +++ b/spec/helpers/acceptance/tests/package_url_shared_examples.rb @@ -1,71 +1,73 @@ +# frozen_string_literal: true + require 'json' require 'helpers/acceptance/tests/basic_shared_examples' shared_examples 'package_url acceptance tests' do |es_config| describe 'elasticsearch::package_url' do # Override default manifest to remove `package` let(:manifest) do <<-MANIFEST api_timeout => 60, config => { 'cluster.name' => '#{v[:cluster_name]}', 'http.bind_host' => '0.0.0.0', #{es_config.map { |k, v| " '#{k}' => '#{v}'," }.join("\n")} }, jvm_options => [ '-Xms128m', '-Xmx128m', ], oss => #{v[:oss]}, MANIFEST end # context 'via http', :with_cleanup do context 'via http' do let(:manifest_class_parameters) do <<-MANIFEST manage_repo => false, package_url => '#{v[:elasticsearch_package][:url]}' MANIFEST end include_examples('basic acceptance tests', es_config) end context 'via local filesystem', :with_cleanup do - before :all do + before :all do # rubocop:disable RSpec/BeforeAfterAll scp_to default, v[:elasticsearch_package][:path], "/tmp/#{v[:elasticsearch_package][:filename]}" end let(:manifest_class_parameters) do <<-MANIFEST manage_repo => false, package_url => 'file:/tmp/#{v[:elasticsearch_package][:filename]}' MANIFEST end include_examples('basic acceptance tests', es_config) end context 'via puppet paths', :with_cleanup do - before :all do + before :all do # rubocop:disable RSpec/BeforeAfterAll shell "mkdir -p #{default['distmoduledir']}/another/files" scp_to default, v[:elasticsearch_package][:path], "#{default['distmoduledir']}/another/files/#{v[:elasticsearch_package][:filename]}" end let(:manifest_class_parameters) do <<-MANIFEST manage_repo => false, package_url => 'puppet:///modules/another/#{v[:elasticsearch_package][:filename]}', MANIFEST end include_examples('basic acceptance tests', es_config) end end end diff --git a/spec/helpers/acceptance/tests/pipeline_shared_examples.rb b/spec/helpers/acceptance/tests/pipeline_shared_examples.rb index 181eaf0..a12ad10 100644 --- a/spec/helpers/acceptance/tests/pipeline_shared_examples.rb +++ b/spec/helpers/acceptance/tests/pipeline_shared_examples.rb @@ -1,56 +1,57 @@ +# frozen_string_literal: true + require 'json' require 'helpers/acceptance/tests/manifest_shared_examples' require 'helpers/acceptance/tests/bad_manifest_shared_examples' shared_examples 'pipeline operations' do |es_config, pipeline| describe 'pipeline resources' do let(:pipeline_name) { 'foo' } + context 'present' do let(:extra_manifest) do <<-MANIFEST elasticsearch::pipeline { '#{pipeline_name}': ensure => 'present', content => #{pipeline} } MANIFEST end include_examples('manifest application') include_examples('pipeline content', es_config, pipeline) end context 'absent' do let(:extra_manifest) do <<-MANIFEST elasticsearch::template { '#{pipeline_name}': ensure => absent, } MANIFEST end include_examples('manifest application') end end end # Verifies the content of a loaded index template. shared_examples 'pipeline content' do |es_config, pipeline| elasticsearch_port = es_config['http.port'] describe port(elasticsearch_port) do it 'open', :with_retries do - should be_listening + expect(subject).to be_listening end end - describe server :container do - describe http( - "http://localhost:#{elasticsearch_port}/_ingest/pipeline" - ) do - it 'returns the configured pipelines', :with_retries do - expect(JSON.parse(response.body).values) - .to include(include(pipeline)) - end + describe "http://localhost:#{elasticsearch_port}/_ingest/pipeline" do + subject { shell("curl http://localhost:#{elasticsearch_port}/_ingest/pipeline") } + + it 'returns the configured pipelines', :with_retries do + expect(JSON.parse(subject.stdout).values). + to include(include(pipeline)) end end end diff --git a/spec/helpers/acceptance/tests/plugin_api_shared_examples.rb b/spec/helpers/acceptance/tests/plugin_api_shared_examples.rb index d61cc12..1c020ed 100644 --- a/spec/helpers/acceptance/tests/plugin_api_shared_examples.rb +++ b/spec/helpers/acceptance/tests/plugin_api_shared_examples.rb @@ -1,21 +1,21 @@ +# frozen_string_literal: true + require 'json' shared_examples 'plugin API response' do |es_config, desc, val| describe port(es_config['http.port']) do it 'open', :with_retries do - should be_listening + expect(subject).to be_listening end end - describe server :container do - describe http( - "http://localhost:#{es_config['http.port']}/_cluster/stats" - ) do - it desc, :with_retries do - expect( - JSON.parse(response.body)['nodes']['plugins'] - ).to include(include(val)) - end + describe "http://localhost:#{es_config['http.port']}/_cluster/stats" do + subject { shell("curl http://localhost:#{es_config['http.port']}/_cluster/stats") } + + it desc, :with_retries do + expect( + JSON.parse(subject.stdout)['nodes']['plugins'] + ).to include(include(val)) end end end diff --git a/spec/helpers/acceptance/tests/plugin_shared_examples.rb b/spec/helpers/acceptance/tests/plugin_shared_examples.rb index 6628998..c81bf18 100644 --- a/spec/helpers/acceptance/tests/plugin_shared_examples.rb +++ b/spec/helpers/acceptance/tests/plugin_shared_examples.rb @@ -1,98 +1,100 @@ +# frozen_string_literal: true + require 'json' require 'helpers/acceptance/tests/bad_manifest_shared_examples' require 'helpers/acceptance/tests/manifest_shared_examples' require 'helpers/acceptance/tests/plugin_api_shared_examples' shared_examples 'plugin acceptance tests' do |es_config, plugins| describe 'elasticsearch::plugin' do + before :all do # rubocop:disable RSpec/BeforeAfterAll + shell "mkdir -p #{default['distmoduledir']}/another/files" + end + describe 'invalid plugins', :with_cleanup do let(:extra_manifest) do <<-MANIFEST elasticsearch::plugin { 'elastic/non-existing': } MANIFEST end include_examples('invalid manifest application') end - before :all do - shell "mkdir -p #{default['distmoduledir']}/another/files" - end - plugins.each_pair do |plugin, meta| describe plugin do # Ensure that instances are restarted to include plugins let(:manifest_class_parameters) { 'restart_on_change => true' } describe 'installation' do describe 'using simple names', :with_cleanup do let(:extra_manifest) do <<-MANIFEST elasticsearch::plugin { '#{plugin}': } MANIFEST end include_examples('manifest application', es_config) describe file("/usr/share/elasticsearch/plugins/#{plugin}/") do - it { should be_directory } + it { is_expected.to be_directory } end include_examples( 'plugin API response', es_config, 'reports the plugin as installed', 'name' => plugin ) end describe 'offline via puppet://', :with_cleanup do - before :all do + before :all do # rubocop:disable RSpec/BeforeAfterAll scp_to( default, meta[:path], "#{default['distmoduledir']}/another/files/#{plugin}.zip" ) end let(:extra_manifest) do <<-MANIFEST elasticsearch::plugin { '#{plugin}': source => 'puppet:///modules/another/#{plugin}.zip', } MANIFEST end include_examples('manifest application', es_config) include_examples( 'plugin API response', es_config, 'reports the plugin as installed', 'name' => plugin ) end describe 'via url', :with_cleanup do let(:extra_manifest) do <<-MANIFEST elasticsearch::plugin { '#{plugin}': url => '#{meta[:url]}', } MANIFEST end include_examples('manifest application', es_config) include_examples( 'plugin API response', es_config, 'reports the plugin as installed', 'name' => plugin ) end end end end end end diff --git a/spec/helpers/acceptance/tests/plugin_upgrade_shared_examples.rb b/spec/helpers/acceptance/tests/plugin_upgrade_shared_examples.rb index ff10779..e7ccfbe 100644 --- a/spec/helpers/acceptance/tests/plugin_upgrade_shared_examples.rb +++ b/spec/helpers/acceptance/tests/plugin_upgrade_shared_examples.rb @@ -1,69 +1,71 @@ +# frozen_string_literal: true + require 'json' require 'helpers/acceptance/tests/manifest_shared_examples' require 'helpers/acceptance/tests/plugin_api_shared_examples' shared_examples 'plugin upgrade acceptance tests' do |plugin| describe 'elasticsearch::plugin' do # Ensure that instances are restarted to include plugins let(:manifest_class_parameters) { 'restart_on_change => true' } instances = { 'es-01' => { 'config' => { 'http.port' => 9200, 'node.name' => 'elasticsearch001' } } } describe 'installation' do describe 'upgrades', :with_cleanup do context 'initial installation' do let(:extra_manifest) do <<-MANIFEST elasticsearch::plugin { '#{plugin[:repository]}-#{plugin[:name]}/v#{plugin[:initial]}': instances => 'es-01', } MANIFEST end include_examples( 'manifest application', instances ) include_examples( 'plugin API response', instances, 'contains the initial plugin version', 'name' => plugin[:name], 'version' => plugin[:initial] ) end describe 'upgrading' do let(:extra_manifest) do <<-MANIFEST elasticsearch::plugin { '#{plugin[:repository]}-#{plugin[:name]}/v#{plugin[:upgraded]}': instances => 'es-01', } MANIFEST end include_examples( 'manifest application', instances ) include_examples( 'plugin API response', instances, 'contains the upgraded plugin version', 'name' => plugin[:name], 'version' => plugin[:upgraded] ) end end end end end diff --git a/spec/helpers/acceptance/tests/removal_shared_examples.rb b/spec/helpers/acceptance/tests/removal_shared_examples.rb index 518d691..3c07c77 100644 --- a/spec/helpers/acceptance/tests/removal_shared_examples.rb +++ b/spec/helpers/acceptance/tests/removal_shared_examples.rb @@ -1,30 +1,32 @@ +# frozen_string_literal: true + shared_examples 'module removal' do |es_config| describe 'uninstalling' do let(:manifest) do <<-MANIFEST class { 'elasticsearch': ensure => 'absent', oss => #{v[:oss]} } MANIFEST end - it 'should run successfully' do - apply_manifest(manifest, :catch_failures => true, :debug => v[:puppet_debug]) + it 'runs successfully' do + apply_manifest(manifest, catch_failures: true, debug: v[:puppet_debug]) end describe package("elasticsearch#{v[:oss] ? '-oss' : ''}") do - it { should_not be_installed } + it { is_expected.not_to be_installed } end describe service('elasticsearch') do - it { should_not be_enabled } - it { should_not be_running } + it { is_expected.not_to be_enabled } + it { is_expected.not_to be_running } end unless es_config.empty? describe port(es_config['http.port']) do it 'closed' do - should_not be_listening + expect(subject).not_to be_listening end end end end end diff --git a/spec/helpers/acceptance/tests/security_shared_examples.rb b/spec/helpers/acceptance/tests/security_shared_examples.rb index 1c0f2ea..ba880c9 100644 --- a/spec/helpers/acceptance/tests/security_shared_examples.rb +++ b/spec/helpers/acceptance/tests/security_shared_examples.rb @@ -1,180 +1,181 @@ +# frozen_string_literal: true + require 'json' require 'spec_utilities' require 'helpers/acceptance/tests/manifest_shared_examples' shared_examples 'security plugin manifest' do |credentials| let(:extra_manifest) do users = credentials.map do |username, meta| <<-USER #{meta[:changed] ? "notify { 'password change for #{username}' : } ~>" : ''} elasticsearch::user { '#{username}': - password => '#{meta[:hash] ? meta[:hash] : meta[:plaintext]}', - roles => #{meta[:roles].reduce({}) { |a, e| a.merge(e) }.keys}, + password => '#{meta[:hash] || meta[:plaintext]}', + roles => #{meta[:roles].reduce({}) { |acc, elem| acc.merge(elem) }.keys}, } USER end.join("\n") roles = credentials.values.reduce({}) do |sum, user_metadata| # Collect all roles across users sum.merge user_metadata - end[:roles].reduce({}) do |all_roles, role| + end[:roles] + roles = roles.reduce({}) do |all_roles, role| all_roles.merge role - end.reject do |_role, permissions| + end + roles = roles.reject do |_role, permissions| permissions.empty? - end.map do |role, rights| + end + roles = roles.map do |role, rights| <<-ROLE elasticsearch::role { '#{role}': privileges => #{rights} } ROLE - end.join("\n") + end + roles = roles.join("\n") <<-MANIFEST #{users} #{roles} MANIFEST end include_examples( 'manifest application', - not(credentials.values.map { |p| p[:changed] }.any?) + credentials.values.map { |p| p[:changed] }.none? ) end shared_examples 'secured request' do |test_desc, es_config, path, http_test, expected, user = nil, pass = nil| es_port = es_config['http.port'] describe port(es_port) do it 'open', :with_retries do - should be_listening + expect(subject).to be_listening end end - describe server :container do - describe http( - "https://localhost:#{es_port}#{path}", - { - :ssl => { :verify => false } - }.merge((user and pass) ? { :basic_auth => [user, pass] } : {}) - ) do - it test_desc, :with_retries do - expect(http_test.call(response)).to eq(expected) - end + describe "https://localhost:#{es_port}#{path}" do + subject { shell("curl -k -u #{user}:#{pass} https://localhost:#{es_port}#{path}") } + + it test_desc, :with_retries do + expect(http_test.call(subject.stdout)).to eq(expected) end end end shared_examples 'security acceptance tests' do |es_config| - describe 'security plugin operations', :if => vault_available?, :then_purge => true, :with_license => true, :with_certificates => true do - rand_string = lambda { [*('a'..'z')].sample(8).join } + describe 'security plugin operations', if: vault_available?, then_purge: true, with_license: true, with_certificates: true do + rand_string = -> { [*('a'..'z')].sample(8).join } admin_user = rand_string.call admin_password = rand_string.call - admin = { admin_user => { :plaintext => admin_password, :roles => [{ 'superuser' => [] }] } } + admin = { admin_user => { plaintext: admin_password, roles: [{ 'superuser' => [] }] } } let(:manifest_class_parameters) do <<-MANIFEST api_basic_auth_password => '#{admin_password}', api_basic_auth_username => '#{admin_user}', - api_ca_file => '#{@tls[:ca][:cert][:path]}', + api_ca_file => '#{tls[:ca][:cert][:path]}', api_protocol => 'https', - ca_certificate => '#{@tls[:ca][:cert][:path]}', - certificate => '#{@tls[:clients].first[:cert][:path]}', - keystore_password => '#{@keystore_password}', + ca_certificate => '#{tls[:ca][:cert][:path]}', + certificate => '#{tls[:clients].first[:cert][:path]}', + keystore_password => '#{keystore_password}', license => file('#{v[:elasticsearch_license_path]}'), - private_key => '#{@tls[:clients].first[:key][:path]}', + private_key => '#{tls[:clients].first[:key][:path]}', restart_on_change => true, ssl => true, validate_tls => true, MANIFEST end describe 'over tls' do user_one = rand_string.call user_two = rand_string.call user_one_pw = rand_string.call user_two_pw = rand_string.call describe 'user authentication' do username_passwords = { - user_one => { :plaintext => user_one_pw, :roles => [{ 'superuser' => [] }] }, - user_two => { :plaintext => user_two_pw, :roles => [{ 'superuser' => [] }] } + user_one => { plaintext: user_one_pw, roles: [{ 'superuser' => [] }] }, + user_two => { plaintext: user_two_pw, roles: [{ 'superuser' => [] }] } }.merge(admin) username_passwords[user_two][:hash] = bcrypt(username_passwords[user_two][:plaintext]) include_examples('security plugin manifest', username_passwords) include_examples( 'secured request', 'denies unauthorized access', es_config, '/_cluster/health', - lambda { |r| r.status }, 401 + ->(r) { r.status }, 401 ) include_examples( 'secured request', "permits user #{user_one} access", es_config, '/_cluster/health', - lambda { |r| r.status }, 200, + ->(r) { r.status }, 200, user_one, user_one_pw ) include_examples( 'secured request', "permits user #{user_two} access", es_config, '/_cluster/health', - lambda { |r| r.status }, 200, + ->(r) { r.status }, 200, user_two, user_two_pw ) end describe 'changing passwords' do new_password = rand_string.call username_passwords = { user_one => { - :plaintext => new_password, - :changed => true, - :roles => [{ 'superuser' => [] }] + plaintext: new_password, + changed: true, + roles: [{ 'superuser' => [] }] } } include_examples('security plugin manifest', username_passwords) include_examples( 'secured request', 'denies unauthorized access', es_config, '/_cluster/health', - lambda { |r| r.status }, 401 + ->(r) { r.status }, 401 ) include_examples( 'secured request', "permits user #{user_two} access with new password", es_config, '/_cluster/health', - lambda { |r| r.status }, 200, + ->(r) { r.status }, 200, user_one, new_password ) end describe 'roles' do password = rand_string.call username = rand_string.call user = { username => { - :plaintext => password, - :roles => [{ + plaintext: password, + roles: [{ rand_string.call => { 'cluster' => [ 'cluster:monitor/health' ] } }] } } include_examples('security plugin manifest', user) include_examples( 'secured request', 'denies unauthorized access', es_config, '/_snapshot', - lambda { |r| r.status }, 403, + ->(r) { r.status }, 403, username, password ) include_examples( 'secured request', 'permits authorized access', es_config, '/_cluster/health', - lambda { |r| r.status }, 200, + ->(r) { r.status }, 200, username, password ) end end end end diff --git a/spec/helpers/acceptance/tests/snapshot_repository_shared_examples.rb b/spec/helpers/acceptance/tests/snapshot_repository_shared_examples.rb index abd329f..1dbbd73 100644 --- a/spec/helpers/acceptance/tests/snapshot_repository_shared_examples.rb +++ b/spec/helpers/acceptance/tests/snapshot_repository_shared_examples.rb @@ -1,81 +1,81 @@ +# frozen_string_literal: true + require 'json' require 'helpers/acceptance/tests/manifest_shared_examples' # Main entrypoint for snapshot tests shared_examples 'snapshot repository acceptance tests' do describe 'elasticsearch::snapshot_repository', :with_cleanup do es_config = { 'http.port' => 9200, 'node.name' => 'elasticsearchSnapshot01', 'path.repo' => '/var/lib/elasticsearch' } # Override the manifest in order to populate 'path.repo' let(:manifest) do - package = if not v[:is_snapshot] + package = if v[:is_snapshot] <<-MANIFEST - # Hard version set here due to plugin incompatibilities. - version => '#{v[:elasticsearch_full_version]}', + manage_repo => false, + package_url => '#{v[:snapshot_package]}', MANIFEST else <<-MANIFEST - manage_repo => false, - package_url => '#{v[:snapshot_package]}', + # Hard version set here due to plugin incompatibilities. + version => '#{v[:elasticsearch_full_version]}', MANIFEST end <<-MANIFEST api_timeout => 60, config => { 'cluster.name' => '#{v[:cluster_name]}', 'http.bind_host' => '0.0.0.0', #{es_config.map { |k, v| " '#{k}' => '#{v}'," }.join("\n")} }, jvm_options => [ '-Xms128m', '-Xmx128m', ], oss => #{v[:oss]}, #{package} MANIFEST end let(:manifest_class_parameters) { 'restart_on_change => true' } let(:extra_manifest) do <<-MANIFEST elasticsearch::snapshot_repository { 'backup': ensure => 'present', api_timeout => 60, location => '/var/lib/elasticsearch/backup', max_restore_rate => '20mb', max_snapshot_rate => '80mb', } MANIFEST end include_examples('manifest application', es_config) es_port = es_config['http.port'] describe port(es_port) do it 'open', :with_retries do - should be_listening + expect(subject).to be_listening end end - describe server :container do - describe http( - "http://localhost:#{es_port}/_snapshot/backup" - ) do - it 'returns the snapshot repository', :with_retries do - expect(JSON.parse(response.body)['backup']) - .to include('settings' => a_hash_including( - 'location' => '/var/lib/elasticsearch/backup', - 'max_restore_rate' => '20mb', - 'max_snapshot_rate' => '80mb' - )) - end + describe "http://localhost:#{es_port}/_snapshot/backup" do + subject { shell("curl http://localhost:#{es_port}/_snapshot/backup") } + + it 'returns the snapshot repository', :with_retries do + expect(JSON.parse(subject.stdout)['backup']). + to include('settings' => a_hash_including( + 'location' => '/var/lib/elasticsearch/backup', + 'max_restore_rate' => '20mb', + 'max_snapshot_rate' => '80mb' + )) end end end end diff --git a/spec/helpers/acceptance/tests/template_shared_examples.rb b/spec/helpers/acceptance/tests/template_shared_examples.rb index 11044ad..d6c5a52 100644 --- a/spec/helpers/acceptance/tests/template_shared_examples.rb +++ b/spec/helpers/acceptance/tests/template_shared_examples.rb @@ -1,111 +1,110 @@ +# frozen_string_literal: true + require 'json' require 'helpers/acceptance/tests/manifest_shared_examples' require 'helpers/acceptance/tests/bad_manifest_shared_examples' # Describes how to apply a manifest with a template, verify it, and clean it up shared_examples 'template application' do |es_config, name, template, param| context 'present' do let(:extra_manifest) do <<-MANIFEST elasticsearch::template { '#{name}': ensure => 'present', #{param} } MANIFEST end include_examples('manifest application') include_examples('template content', es_config, template) end context 'absent' do let(:extra_manifest) do <<-MANIFEST elasticsearch::template { '#{name}': ensure => absent, } MANIFEST end include_examples('manifest application') end end # Verifies the content of a loaded index template. shared_examples 'template content' do |es_config, template| elasticsearch_port = es_config['http.port'] describe port(elasticsearch_port) do it 'open', :with_retries do - should be_listening + expect(subject).to be_listening end end - describe server :container do - describe http( - "http://localhost:#{elasticsearch_port}/_template", - :params => { 'flat_settings' => 'false' } - ) do - it 'returns the installed template', :with_retries do - expect(JSON.parse(response.body).values) - .to include(include(template)) - end + describe "http://localhost:#{elasticsearch_port}/_template" do + subject { shell("curl http://localhost:#{elasticsearch_port}/_template") } + + it 'returns the installed template', :with_retries do + expect(JSON.parse(subject.stdout).values). + to include(include(template)) end end end # Main entrypoint for template tests shared_examples 'template operations' do |es_config, template| describe 'template resources' do - before :all do + before :all do # rubocop:disable RSpec/BeforeAfterAll shell "mkdir -p #{default['distmoduledir']}/another/files" create_remote_file( default, "#{default['distmoduledir']}/another/files/good.json", JSON.dump(template) ) create_remote_file( default, "#{default['distmoduledir']}/another/files/bad.json", JSON.dump(template)[0..-5] ) end context 'configured through' do context '`source`' do include_examples( 'template application', es_config, SecureRandom.hex(8), template, "source => 'puppet:///modules/another/good.json'" ) end context '`content`' do include_examples( 'template application', es_config, SecureRandom.hex(8), template, "content => '#{JSON.dump(template)}'" ) end context 'bad json' do let(:extra_manifest) do <<-MANIFEST elasticsearch::template { '#{SecureRandom.hex(8)}': ensure => 'present', file => 'puppet:///modules/another/bad.json' } MANIFEST end include_examples('invalid manifest application') end end end end diff --git a/spec/helpers/acceptance/tests/usergroup_shared_examples.rb b/spec/helpers/acceptance/tests/usergroup_shared_examples.rb index 1fbcbc5..11ac91b 100644 --- a/spec/helpers/acceptance/tests/usergroup_shared_examples.rb +++ b/spec/helpers/acceptance/tests/usergroup_shared_examples.rb @@ -1,49 +1,51 @@ +# frozen_string_literal: true + require 'json' require 'helpers/acceptance/tests/basic_shared_examples' shared_examples 'user/group acceptance tests' do - describe 'user/group parameters', :first_purge => true, :then_purge => true do + describe 'user/group parameters', first_purge: true, then_purge: true do describe 'with non-default values', :with_cleanup do let(:extra_manifest) do <<-MANIFEST group { 'esuser': ensure => 'present', } -> group { 'esgroup': ensure => 'present' } -> user { 'esuser': ensure => 'present', groups => ['esgroup', 'esuser'], before => Class['elasticsearch'], } MANIFEST end let(:manifest_class_parameters) do <<-MANIFEST elasticsearch_user => 'esuser', elasticsearch_group => 'esgroup', MANIFEST end include_examples( 'basic acceptance tests', 'es-01' => { 'config' => { 'http.port' => 9200, 'node.name' => 'elasticsearch001' } } ) %w[ /etc/elasticsearch/es-01/elasticsearch.yml /usr/share/elasticsearch /var/log/elasticsearch ].each do |path| describe file(path) do - it { should be_owned_by 'esuser' } + it { is_expected.to be_owned_by 'esuser' } end end end end end diff --git a/spec/helpers/class_shared_examples.rb b/spec/helpers/class_shared_examples.rb index c7d631a..3391da4 100644 --- a/spec/helpers/class_shared_examples.rb +++ b/spec/helpers/class_shared_examples.rb @@ -1,8 +1,10 @@ +# frozen_string_literal: true + shared_examples 'class' do - it { should compile.with_all_deps } - it { should contain_augeas('/etc/sysconfig/elasticsearch') } - it { should contain_file('/etc/elasticsearch/elasticsearch.yml') } - it { should contain_datacat('/etc/elasticsearch/elasticsearch.yml') } - it { should contain_datacat_fragment('main_config') } - it { should contain_service('elasticsearch') } + it { is_expected.to compile.with_all_deps } + it { is_expected.to contain_augeas('/etc/sysconfig/elasticsearch') } + it { is_expected.to contain_file('/etc/elasticsearch/elasticsearch.yml') } + it { is_expected.to contain_datacat('/etc/elasticsearch/elasticsearch.yml') } + it { is_expected.to contain_datacat_fragment('main_config') } + it { is_expected.to contain_service('elasticsearch') } end diff --git a/spec/helpers/unit/provider/elasticsearch_rest_shared_examples.rb b/spec/helpers/unit/provider/elasticsearch_rest_shared_examples.rb index fec5dfd..611f2c0 100644 --- a/spec/helpers/unit/provider/elasticsearch_rest_shared_examples.rb +++ b/spec/helpers/unit/provider/elasticsearch_rest_shared_examples.rb @@ -1,106 +1,108 @@ +# frozen_string_literal: true + require 'json' require 'spec_helper_rspec' require 'webmock/rspec' shared_examples 'REST API' do |resource_type, create_uri, singleton = false| unless singleton describe 'instances' do context "with no #{resource_type}s" do it 'returns an empty list' do - stub_request(:get, "http://localhost:9200/_#{resource_type}") - .with(:headers => { 'Accept' => 'application/json' }) - .to_return( - :status => 200, - :body => '{}' + stub_request(:get, "http://localhost:9200/_#{resource_type}"). + with(headers: { 'Accept' => 'application/json' }). + to_return( + status: 200, + body: '{}' ) expect(described_class.instances).to eq([]) end end end end describe "#{resource_type}s" do if singleton - let(:json) { json_1 } - let(:instance) { [example_1] } + let(:json) { json1 } + let(:instance) { [example1] } else - let(:json) { json_1.merge(json_2) } - let(:instance) { [example_1, example_2] } + let(:json) { json1.merge(json2) } + let(:instance) { [example1, example2] } end it "returns #{resource_type}s" do - stub_request(:get, "http://localhost:9200/_#{resource_type}") - .with(:headers => { 'Accept' => 'application/json' }) - .to_return( - :status => 200, - :body => JSON.dump(json) + stub_request(:get, "http://localhost:9200/_#{resource_type}"). + with(headers: { 'Accept' => 'application/json' }). + to_return( + status: 200, + body: JSON.dump(json) ) expect(described_class.instances.map do |provider| provider.instance_variable_get(:@property_hash) end).to contain_exactly(*instance) end end describe 'basic authentication' do it 'authenticates' do - stub_request(:get, "http://localhost:9200/_#{resource_type}") - .with( - :basic_auth => %w[elastic password], - :headers => { 'Accept' => 'application/json' } - ) - .to_return( - :status => 200, - :body => JSON.dump(json_1) + stub_request(:get, "http://localhost:9200/_#{resource_type}"). + with( + basic_auth: %w[elastic password], + headers: { 'Accept' => 'application/json' } + ). + to_return( + status: 200, + body: JSON.dump(json1) ) expect(described_class.api_objects( - 'http', true, 'localhost', '9200', 10, 'elastic', 'password' + 'http', 'localhost', '9200', 10, 'elastic', 'password', validate_tls: true ).map do |provider| described_class.new( provider ).instance_variable_get(:@property_hash) - end).to contain_exactly(example_1) + end).to contain_exactly(example1) end end describe 'https' do it 'uses ssl' do - stub_request(:get, "https://localhost:9200/_#{resource_type}") - .with(:headers => { 'Accept' => 'application/json' }) - .to_return( - :status => 200, - :body => JSON.dump(json_1) + stub_request(:get, "https://localhost:9200/_#{resource_type}"). + with(headers: { 'Accept' => 'application/json' }). + to_return( + status: 200, + body: JSON.dump(json1) ) expect(described_class.api_objects( - 'https', true, 'localhost', '9200', 10 + 'https', 'localhost', '9200', 10, validate_tls: true ).map do |provider| described_class.new( provider ).instance_variable_get(:@property_hash) - end).to contain_exactly(example_1) + end).to contain_exactly(example1) end end unless singleton describe 'flush' do it "creates #{resource_type}s" do - stub_request(:put, "http://localhost:9200/#{create_uri}") - .with( - :headers => { + stub_request(:put, "http://localhost:9200/#{create_uri}"). + with( + headers: { 'Accept' => 'application/json', 'Content-Type' => 'application/json' }, - :body => bare_resource + body: bare_resource ) - stub_request(:get, "http://localhost:9200/_#{resource_type}") - .with(:headers => { 'Accept' => 'application/json' }) - .to_return(:status => 200, :body => '{}') + stub_request(:get, "http://localhost:9200/_#{resource_type}"). + with(headers: { 'Accept' => 'application/json' }). + to_return(status: 200, body: '{}') provider.flush end end end -end # of describe puppet type +end diff --git a/spec/helpers/unit/type/elasticsearch_rest_shared_examples.rb b/spec/helpers/unit/type/elasticsearch_rest_shared_examples.rb index 78d958b..d3e8a4a 100644 --- a/spec/helpers/unit/type/elasticsearch_rest_shared_examples.rb +++ b/spec/helpers/unit/type/elasticsearch_rest_shared_examples.rb @@ -1,213 +1,215 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' shared_examples 'REST API types' do |resource_type, meta_property| let(:default_params) do { meta_property => {} } end describe "attribute validation for #{resource_type}s" do - [ - :name, - :host, - :port, - :protocol, - :validate_tls, - :ca_file, - :ca_path, - :timeout, - :username, - :password + %i[ + name + host + port + protocol + validate_tls + ca_file + ca_path + timeout + username + password ].each do |param| - it "should have a #{param} parameter" do + it "has a #{param} parameter" do expect(described_class.attrtype(param)).to eq(:param) end end [ :ensure, meta_property ].each do |prop| - it "should have a #{prop} property" do + it "has a #{prop} property" do expect(described_class.attrtype(prop)).to eq(:property) end end describe 'namevar validation' do - it 'should have :name as its namevar' do + it 'has :name as its namevar' do expect(described_class.key_attributes).to eq([:name]) end end describe meta_property.to_s do - it 'should reject non-hash values' do + it 'rejects non-hash values' do expect do described_class.new( :name => resource_name, meta_property => '{"foo":}' ) - end.to raise_error(Puppet::Error, /hash expected/i) + end.to raise_error(Puppet::Error, %r{hash expected}i) expect do described_class.new( :name => resource_name, meta_property => 0 ) - end.to raise_error(Puppet::Error, /hash expected/i) + end.to raise_error(Puppet::Error, %r{hash expected}i) expect do described_class.new( default_params.merge( - :name => resource_name + name: resource_name ) ) end.not_to raise_error end - it 'should parse PSON-like values for certain types' do + it 'parses PSON-like values for certain types' do expect(described_class.new( :name => resource_name, meta_property => { 'key' => { 'value' => '0', 'other' => true } } )[meta_property]).to include( 'key' => { 'value' => 0, 'other' => true } ) end end describe 'ensure' do - it 'should support present as a value for ensure' do + it 'supports present as a value for ensure' do expect do described_class.new( default_params.merge( - :name => resource_name, - :ensure => :present + name: resource_name, + ensure: :present ) ) - end.to_not raise_error + end.not_to raise_error end - it 'should support absent as a value for ensure' do + it 'supports absent as a value for ensure' do expect do described_class.new( default_params.merge( - :name => resource_name, - :ensure => :absent + name: resource_name, + ensure: :absent ) ) - end.to_not raise_error + end.not_to raise_error end - it 'should not support other values' do + it 'does not support other values' do expect do described_class.new( default_params.merge( - :name => resource_name, - :ensure => :foo + name: resource_name, + ensure: :foo ) ) - end.to raise_error(Puppet::Error, /Invalid value/) + end.to raise_error(Puppet::Error, %r{Invalid value}) end end describe 'host' do - it 'should accept IP addresses' do + it 'accepts IP addresses' do expect do described_class.new( default_params.merge( - :name => resource_name, - :host => '127.0.0.1' + name: resource_name, + host: '127.0.0.1' ) ) end.not_to raise_error end end describe 'port' do [-1, 0, 70_000, 'foo'].each do |value| - it "should reject invalid port value #{value}" do + it "rejects invalid port value #{value}" do expect do described_class.new( default_params.merge( - :name => resource_name, - :port => value + name: resource_name, + port: value ) ) - end.to raise_error(Puppet::Error, /invalid port/i) + end.to raise_error(Puppet::Error, %r{invalid port}i) end end end describe 'validate_tls' do [-1, 0, {}, [], 'foo'].each do |value| - it "should reject invalid ssl_verify value #{value}" do + it "rejects invalid ssl_verify value #{value}" do expect do described_class.new( default_params.merge( - :name => resource_name, - :validate_tls => value + name: resource_name, + validate_tls: value ) ) - end.to raise_error(Puppet::Error, /invalid value/i) + end.to raise_error(Puppet::Error, %r{invalid value}i) end end [true, false, 'true', 'false', 'yes', 'no'].each do |value| - it "should accept validate_tls value #{value}" do + it "accepts validate_tls value #{value}" do expect do described_class.new( default_params.merge( - :name => resource_name, - :validate_tls => value + name: resource_name, + validate_tls: value ) ) end.not_to raise_error end end end describe 'timeout' do - it 'should reject string values' do + it 'rejects string values' do expect do described_class.new( default_params.merge( - :name => resource_name, - :timeout => 'foo' + name: resource_name, + timeout: 'foo' ) ) - end.to raise_error(Puppet::Error, /must be a/) + end.to raise_error(Puppet::Error, %r{must be a}) end - it 'should reject negative integers' do + it 'rejects negative integers' do expect do described_class.new( default_params.merge( - :name => resource_name, - :timeout => -10 + name: resource_name, + timeout: -10 ) ) - end.to raise_error(Puppet::Error, /must be a/) + end.to raise_error(Puppet::Error, %r{must be a}) end - it 'should accept integers' do + it 'accepts integers' do expect do described_class.new( default_params.merge( - :name => resource_name, - :timeout => 10 + name: resource_name, + timeout: 10 ) ) - end.to_not raise_error + end.not_to raise_error end - it 'should accept quoted integers' do + it 'accepts quoted integers' do expect do described_class.new( default_params.merge( - :name => resource_name, - :timeout => '10' + name: resource_name, + timeout: '10' ) ) - end.to_not raise_error + end.not_to raise_error end end - end # of describing when validing values -end # of REST API type shared examples + end +end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index cd90602..6bc659f 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -1,21 +1,21 @@ -require_relative 'helpers/class_shared_examples' -require 'rspec-puppet-utils' -require 'rspec-puppet-facts' -include RspecPuppetFacts +# frozen_string_literal: true -def fixture_path - File.expand_path(File.join(__FILE__, '..', 'fixtures')) -end +# Managed by modulesync - DO NOT EDIT +# https://voxpupuli.org/docs/updating-files-managed-with-modulesync/ + +# puppetlabs_spec_helper will set up coverage if the env variable is set. +# We want to do this if lib exists and it hasn't been explicitly set. +ENV['COVERAGE'] ||= 'yes' if Dir.exist?(File.expand_path('../lib', __dir__)) -$LOAD_PATH.unshift(File.expand_path(File.dirname(__FILE__) + '/../')) +require 'voxpupuli/test/spec_helper' RSpec.configure do |c| - c.mock_with :rspec + c.hiera_config = 'spec/fixtures/hiera/hiera.yaml' end -require 'puppetlabs_spec_helper/module_spec_helper' -RSpec.configure do |c| - c.add_setting :fixture_path, :default => fixture_path - # c.mock_with(:rspec) - c.hiera_config = File.join(fixture_path, '/hiera/hiera.yaml') +if File.exist?(File.join(__dir__, 'default_module_facts.yml')) + facts = YAML.safe_load(File.read(File.join(__dir__, 'default_module_facts.yml'))) + facts&.each do |name, value| + add_custom_fact name.to_sym, value + end end diff --git a/spec/spec_helper_acceptance.rb b/spec/spec_helper_acceptance.rb index 8431fcc..d3a6e23 100644 --- a/spec/spec_helper_acceptance.rb +++ b/spec/spec_helper_acceptance.rb @@ -1,260 +1,10 @@ -require 'beaker-rspec' -require 'beaker/puppet_install_helper' -require 'securerandom' -require 'thread' -require 'infrataster/rspec' -require 'rspec/retry' -require 'vault' +# frozen_string_literal: true -require 'simp/beaker_helpers' -include Simp::BeakerHelpers +# Managed by modulesync - DO NOT EDIT +# https://voxpupuli.org/docs/updating-files-managed-with-modulesync/ -require_relative 'spec_helper_tls' -require_relative 'spec_utilities' -require_relative '../lib/puppet_x/elastic/deep_to_i' -require_relative '../lib/puppet_x/elastic/deep_to_s' +require 'voxpupuli/acceptance/spec_helper_acceptance' -# def f -# RSpec.configuration.fact -# end +configure_beaker -run_puppet_install_helper('agent') unless ENV['BEAKER_provision'] == 'no' - -RSpec.configure do |c| - # General-purpose spec-global variables - c.add_setting :v, :default => {} - - # Puppet debug logging - v[:puppet_debug] = ENV['BEAKER_debug'] ? true : false - - unless ENV['snapshot_version'].nil? - v[:snapshot_version] = ENV['snapshot_version'] - v[:is_snapshot] = ENV['SNAPSHOT_TEST'] == 'true' - end - - unless ENV['ELASTICSEARCH_VERSION'].nil? and v[:snapshot_version].nil? - v[:elasticsearch_full_version] = ENV['ELASTICSEARCH_VERSION'] || v[:snapshot_version] - v[:elasticsearch_major_version] = v[:elasticsearch_full_version].split('.').first.to_i - v[:elasticsearch_package] = {} - v[:template] = if v[:elasticsearch_major_version] == 6 - JSON.load(File.new('spec/fixtures/templates/6.x.json')) - elsif v[:elasticsearch_major_version] >= 8 - JSON.load(File.new('spec/fixtures/templates/post_8.0.json')) - else - JSON.load(File.new('spec/fixtures/templates/7.x.json')) - end - v[:template] = Puppet_X::Elastic.deep_to_i(Puppet_X::Elastic.deep_to_s(v[:template])) - v[:pipeline] = JSON.load(File.new('spec/fixtures/pipelines/example.json')) - - v[:elasticsearch_plugins] = Dir[ - artifact("*#{v[:elasticsearch_full_version]}.zip", ['plugins']) - ].map do |plugin| - plugin_filename = File.basename(plugin) - plugin_name = plugin_filename.match(/^(?.+)-#{v[:elasticsearch_full_version]}.zip/)[:name] - [ - plugin_name, - { - :path => plugin, - :url => derive_plugin_urls_for(v[:elasticsearch_full_version], [plugin_name]).keys.first - } - ] - end.to_h - end - - v[:oss] = (not ENV['OSS_PACKAGE'].nil?) and ENV['OSS_PACKAGE'] == 'true' - v[:cluster_name] = SecureRandom.hex(10) - - # rspec-retry - c.display_try_failure_messages = true - c.default_sleep_interval = 10 - # General-case retry keyword for unstable tests - c.around :each, :with_retries do |example| - example.run_with_retry retry: 10 - end - - # Helper hook for module cleanup - c.after :context, :with_cleanup do - apply_manifest <<-EOS - class { 'elasticsearch': - ensure => 'absent', - manage_repo => true, - oss => #{v[:oss]}, - } - - file { '/usr/share/elasticsearch/plugin': - ensure => 'absent', - force => true, - recurse => true, - require => Class['elasticsearch'], - } - EOS - end - - c.before :context, :with_certificates do - @keystore_password = SecureRandom.hex - @role = [*('a'..'z')].sample(8).join - - # Setup TLS cert placement - @tls = gen_certs(2, '/tmp') - - create_remote_file hosts, @tls[:ca][:cert][:path], @tls[:ca][:cert][:pem] - @tls[:clients].each do |node| - node.each do |_type, params| - create_remote_file hosts, params[:path], params[:pem] - end - end - end - - c.before :context, :with_license do - Vault.address = ENV['VAULT_ADDR'] - if ENV['CI'] - Vault.auth.approle(ENV['VAULT_APPROLE_ROLE_ID'], ENV['VAULT_APPROLE_SECRET_ID']) - else - Vault.auth.token(ENV['VAULT_TOKEN']) - end - licenses = Vault.with_retries(Vault::HTTPConnectionError) do - Vault.logical.read(ENV['VAULT_PATH']) - end.data - - raise 'No license found!' unless licenses - - # license = case v[:elasticsearch_major_version] - # when 6 - # licenses[:v5] - # else - # licenses[:v7] - # end - license = licenses[:v7] - create_remote_file hosts, '/tmp/license.json', license - v[:elasticsearch_license_path] = '/tmp/license.json' - end - - c.after :context, :then_purge do - shell 'rm -rf {/usr/share,/etc,/var/lib}/elasticsearch*' - end - - c.before :context, :first_purge do - shell 'rm -rf {/usr/share,/etc,/var/lib}/elasticsearch*' - end - - # Provide a hook filter to spit out some ES logs if the example fails. - c.after(:example, :logs_on_failure) do |example| - if example.exception - hosts.each do |host| - on host, "find / -name '#{v[:cluster_name]}.log' | xargs cat || true" do |result| - puts result.formatted_output - end - end - end - end -end - -files_dir = ENV['files_dir'] || './spec/fixtures/artifacts' - -# General bootstrapping steps for each host -hosts.each do |host| - # # Set the host to 'aio' in order to adopt the puppet-agent style of - # # installation, and configure paths/etc. - # host[:type] = 'aio' - # configure_defaults_on host, 'aio' - - if fact('os.family') == 'Suse' - install_package host, - '--force-resolution augeas-devel libxml2-devel ruby-devel' - on host, 'gem install ruby-augeas --no-ri --no-rdoc' - end - - v[:ext] = case fact('os.family') - when 'Debian' - 'deb' - else - 'rpm' - end - - if v[:elasticsearch_package] - v[:elasticsearch_package].merge!( - derive_full_package_url( - v[:elasticsearch_full_version], [v[:ext]] - ).flat_map do |url, filename| - [[:url, url], [:filename, filename], [:path, artifact(filename)]] - end.to_h - ) - end - - Infrataster::Server.define(:docker) do |server| - server.address = host[:ip] - server.ssh = host[:ssh].tap { |s| s.delete :forward_agent } - end - Infrataster::Server.define(:container) do |server| - server.address = host[:vm_ip] # this gets ignored anyway - server.from = :docker - end -end - -RSpec.configure do |c| - if v[:is_snapshot] - c.before :suite do - scp_to default, - "#{files_dir}/elasticsearch-snapshot.#{v[:ext]}", - "/tmp/elasticsearch-snapshot.#{v[:ext]}" - v[:snapshot_package] = "file:/tmp/elasticsearch-snapshot.#{v[:ext]}" - end - end - - c.before :suite do - # Install module and dependencies - install_dev_puppet_module :ignore_list => [ - 'junit' - ] + Beaker::DSL::InstallUtils::ModuleUtils::PUPPET_MODULE_INSTALL_IGNORE - - hosts.each do |host| - modules = %w[archive augeas_core datacat java java_ks stdlib elastic_stack] - - dist_module = { - 'Debian' => ['apt'], - 'Suse' => ['zypprepo'], - 'RedHat' => %w[concat yumrepo_core] - }[fact('os.family')] - - modules += dist_module unless dist_module.nil? - - modules.each do |mod| - copy_module_to( - host, - :module_name => mod, - :source => "spec/fixtures/modules/#{mod}" - ) - end - - on(host, 'mkdir -p etc/puppet/modules/another/files/') - - # Apt doesn't update package caches sometimes, ensure we're caught up. - shell 'apt-get update' if fact('os.family') == 'Debian' - end - - # Use the Java class once before the suite of tests - unless shell('command -v java', :accept_all_exit_codes => true).exit_code.zero? - java = case fact('os.name') - when 'OpenSuSE' - 'package => "java-1_8_0-openjdk-headless",' - else - '' - end - - apply_manifest <<-MANIFEST - class { "java" : - distribution => "jdk", - #{java} - } - MANIFEST - end - end -end - -# # Java 8 is only easy to manage on recent distros -# def v5x_capable? -# (fact('os.family') == 'RedHat' and \ -# not (fact('os.name') == 'OracleLinux' and \ -# f['os']['release']['major'] == '6')) or \ -# f.dig 'os', 'distro', 'codename' == 'xenial' -# end +Dir['./spec/support/acceptance/**/*.rb'].sort.each { |f| require f } diff --git a/spec/spec_helper_rspec.rb b/spec/spec_helper_rspec.rb index 4944b5e..b0078e2 100644 --- a/spec/spec_helper_rspec.rb +++ b/spec/spec_helper_rspec.rb @@ -1 +1,3 @@ +# frozen_string_literal: true + require 'puppet' diff --git a/spec/spec_helper_tls.rb b/spec/spec_helper_tls.rb index c3a1250..c754e9e 100644 --- a/spec/spec_helper_tls.rb +++ b/spec/spec_helper_tls.rb @@ -1,102 +1,104 @@ +# frozen_string_literal: true + require 'openssl' def gen_certs(num_certs, path) - ret = { :clients => [] } + ret = { clients: [] } serial = 1_000_000 ca_key = OpenSSL::PKey::RSA.new 2048 # CA Cert ca_name = OpenSSL::X509::Name.parse 'CN=ca/DC=example/DC=com' ca_cert = OpenSSL::X509::Certificate.new ca_cert.serial = serial serial += 1 ca_cert.version = 2 ca_cert.not_before = Time.now ca_cert.not_after = Time.now + 86_400 ca_cert.public_key = ca_key.public_key ca_cert.subject = ca_name ca_cert.issuer = ca_name extension_factory = OpenSSL::X509::ExtensionFactory.new extension_factory.subject_certificate = ca_cert extension_factory.issuer_certificate = ca_cert # ca_cert.add_extension extension_factory.create_extension( # 'subjectAltName', ['localhost', '127.0.0.1'].map { |d| "DNS: #{d}" }.join(',') # ) ca_cert.add_extension extension_factory.create_extension( 'subjectKeyIdentifier', 'hash' ) ca_cert.add_extension extension_factory.create_extension( 'basicConstraints', 'CA:TRUE', true ) - ca_cert.sign ca_key, OpenSSL::Digest::SHA256.new + ca_cert.sign ca_key, OpenSSL::Digest.new('SHA256') ret[:ca] = { - :cert => { - :pem => ca_cert.to_pem, - :path => path + '/ca_cert.pem' + cert: { + pem: ca_cert.to_pem, + path: "#{path}/ca_cert.pem" } } num_certs.times do |i| key, cert, serial = gen_cert_pair serial, ca_cert - cert.sign ca_key, OpenSSL::Digest::SHA256.new + cert.sign ca_key, OpenSSL::Digest.new('SHA256') ret[:clients] << { - :key => { - :pem => key.to_pem, - :path => path + '/' + i.to_s + '_key.pem' + key: { + pem: key.to_pem, + path: "#{path}/#{i}_key.pem" }, - :cert => { - :pem => cert.to_pem, - :path => path + '/' + i.to_s + '_cert.pem' + cert: { + pem: cert.to_pem, + path: "#{path}/#{i}_cert.pem" } } end ret end def gen_cert_pair(serial, ca_cert) serial += 1 # Node Key key = OpenSSL::PKey::RSA.new 2048 node_name = OpenSSL::X509::Name.parse 'CN=localhost/DC=example/DC=com' # prepare SANS list sans = ['localhost.localdomain', 'localhost', 'localhost.example.com'] sans_list = sans.map { |domain| "DNS:#{domain}" } # Node Cert cert = OpenSSL::X509::Certificate.new cert.serial = serial cert.version = 2 cert.not_before = Time.now cert.not_after = Time.now + 6000 cert.subject = node_name cert.public_key = key.public_key cert.issuer = ca_cert.subject csr_extension_factory = OpenSSL::X509::ExtensionFactory.new csr_extension_factory.subject_certificate = cert csr_extension_factory.issuer_certificate = ca_cert cert.add_extension csr_extension_factory.create_extension( 'subjectAltName', sans_list.join(',') ) cert.add_extension csr_extension_factory.create_extension( 'basicConstraints', 'CA:FALSE' ) cert.add_extension csr_extension_factory.create_extension( 'keyUsage', 'keyEncipherment,dataEncipherment,digitalSignature' ) cert.add_extension csr_extension_factory.create_extension( 'extendedKeyUsage', 'serverAuth,clientAuth' ) cert.add_extension csr_extension_factory.create_extension( 'subjectKeyIdentifier', 'hash' ) [key, cert, serial] end diff --git a/spec/spec_utilities.rb b/spec/spec_utilities.rb index 489bd6a..daaeb76 100644 --- a/spec/spec_utilities.rb +++ b/spec/spec_utilities.rb @@ -1,134 +1,136 @@ +# frozen_string_literal: true + require 'bcrypt' require 'open-uri' def to_agent_version(puppet_version) # REF: https://docs.puppet.com/puppet/latest/reference/about_agent.html { # Puppet => Agent '4.10.4' => '1.10.4', '4.10.3' => '1.10.3', '4.10.2' => '1.10.2', '4.10.1' => '1.10.1', '4.10.0' => '1.10.0', '4.9.4' => '1.9.3', '4.8.2' => '1.8.3', '4.7.1' => '1.7.2', '4.7.0' => '1.7.1', '4.6.2' => '1.6.2', '4.5.3' => '1.5.3', '4.4.2' => '1.4.2', '4.4.1' => '1.4.1', '4.4.0' => '1.4.0', '4.3.2' => '1.3.6', '4.3.1' => '1.3.2', '4.3.0' => '1.3.0', '4.2.3' => '1.2.7', '4.2.2' => '1.2.6', '4.2.1' => '1.2.2', '4.2.0' => '1.2.1', '4.1.0' => '1.1.1', '4.0.0' => '1.0.1' }[puppet_version] end def derive_artifact_urls_for(full_version, plugins = ['analysis-icu']) derive_full_package_url(full_version).merge( derive_plugin_urls_for(full_version, plugins) ) end def derive_full_package_url(full_version, extensions = %w[deb rpm]) extensions.map do |ext| url = if full_version.start_with? '6' "https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-#{full_version}.#{ext}" elsif ext == 'deb' "https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-#{full_version}-amd64.#{ext}" else "https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-#{full_version}-x86_64.#{ext}" end [url, File.basename(url)] end.to_h end def derive_plugin_urls_for(full_version, plugins = ['analysis-icu']) plugins.map do |plugin| url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/#{plugin}/#{plugin}-#{full_version}.zip" [url, File.join('plugins', File.basename(url))] end.to_h end def artifact(file, fixture_path = []) File.join(%w[spec fixtures artifacts] + fixture_path + [File.basename(file)]) end def get(url, file_path) puts "Fetching #{url}..." found = false until found uri = URI.parse(url) conn = Net::HTTP.new(uri.host, uri.port) conn.use_ssl = true res = conn.get(uri.path) if res.header['location'] url = res.header['location'] else found = true end end File.open(file_path, 'w+') { |fh| fh.write res.body } end def fetch_archives(archives) archives.each do |url, orig_fp| fp = "spec/fixtures/artifacts/#{orig_fp}" if File.exist? fp - if fp.end_with? 'tar.gz' and !system("tar -tzf #{fp} &>/dev/null") + if fp.end_with?('tar.gz') && !system("tar -tzf #{fp} &>/dev/null") puts "Archive #{fp} corrupt, re-fetching..." File.delete fp else puts "Already retrieved intact archive #{fp}..." next end end get url, fp end end def pid_file if fact('operatingsystem') == 'Debian' \ - and fact('lsbmajdistrelease').to_i <= 7 + && fact('lsbmajdistrelease').to_i <= 7 '/var/run/elasticsearch.pid' else '/var/run/elasticsearch/elasticsearch.pid' end end def vault_available? if ENV['CI'] %w[VAULT_ADDR VAULT_APPROLE_ROLE_ID VAULT_APPROLE_SECRET_ID VAULT_PATH].select do |var| ENV[var].nil? end.empty? else true end end def http_retry(url) retries ||= 0 - open(url).read -rescue + URI.parse(url).open.read +rescue StandardError retry if (retries += 1) < 3 end # Helper to store arbitrary testing setting values def v RSpec.configuration.v end def semver(version) Gem::Version.new version end def bcrypt(value) BCrypt::Password.create(value) end diff --git a/spec/spec_helper_acceptance.rb b/spec/support/acceptance/elastic.rb similarity index 63% copy from spec/spec_helper_acceptance.rb copy to spec/support/acceptance/elastic.rb index 8431fcc..bb46bfe 100644 --- a/spec/spec_helper_acceptance.rb +++ b/spec/support/acceptance/elastic.rb @@ -1,260 +1,220 @@ -require 'beaker-rspec' -require 'beaker/puppet_install_helper' +# frozen_string_literal: true + require 'securerandom' -require 'thread' -require 'infrataster/rspec' require 'rspec/retry' -require 'vault' require 'simp/beaker_helpers' -include Simp::BeakerHelpers +include Simp::BeakerHelpers # rubocop:disable Style/MixinUsage -require_relative 'spec_helper_tls' -require_relative 'spec_utilities' -require_relative '../lib/puppet_x/elastic/deep_to_i' -require_relative '../lib/puppet_x/elastic/deep_to_s' +require_relative '../../spec_helper_tls' +require_relative '../../spec_utilities' +require_relative '../../../lib/puppet_x/elastic/deep_to_i' +require_relative '../../../lib/puppet_x/elastic/deep_to_s' # def f # RSpec.configuration.fact # end +# FIXME: This value should better not be hardcoded +ENV['ELASTICSEARCH_VERSION'] = '7.10.1' +ENV.delete('BEAKER_debug') + run_puppet_install_helper('agent') unless ENV['BEAKER_provision'] == 'no' RSpec.configure do |c| # General-purpose spec-global variables - c.add_setting :v, :default => {} + c.add_setting :v, default: {} # Puppet debug logging v[:puppet_debug] = ENV['BEAKER_debug'] ? true : false unless ENV['snapshot_version'].nil? v[:snapshot_version] = ENV['snapshot_version'] v[:is_snapshot] = ENV['SNAPSHOT_TEST'] == 'true' end - unless ENV['ELASTICSEARCH_VERSION'].nil? and v[:snapshot_version].nil? + unless ENV['ELASTICSEARCH_VERSION'].nil? && v[:snapshot_version].nil? v[:elasticsearch_full_version] = ENV['ELASTICSEARCH_VERSION'] || v[:snapshot_version] v[:elasticsearch_major_version] = v[:elasticsearch_full_version].split('.').first.to_i v[:elasticsearch_package] = {} v[:template] = if v[:elasticsearch_major_version] == 6 - JSON.load(File.new('spec/fixtures/templates/6.x.json')) + JSON.parse(File.read('spec/fixtures/templates/6.x.json')) elsif v[:elasticsearch_major_version] >= 8 - JSON.load(File.new('spec/fixtures/templates/post_8.0.json')) + JSON.parse(File.read('spec/fixtures/templates/post_8.0.json')) else - JSON.load(File.new('spec/fixtures/templates/7.x.json')) + JSON.parse(File.read('spec/fixtures/templates/7.x.json')) end v[:template] = Puppet_X::Elastic.deep_to_i(Puppet_X::Elastic.deep_to_s(v[:template])) - v[:pipeline] = JSON.load(File.new('spec/fixtures/pipelines/example.json')) - - v[:elasticsearch_plugins] = Dir[ - artifact("*#{v[:elasticsearch_full_version]}.zip", ['plugins']) - ].map do |plugin| - plugin_filename = File.basename(plugin) - plugin_name = plugin_filename.match(/^(?.+)-#{v[:elasticsearch_full_version]}.zip/)[:name] - [ - plugin_name, - { - :path => plugin, - :url => derive_plugin_urls_for(v[:elasticsearch_full_version], [plugin_name]).keys.first - } - ] - end.to_h + v[:pipeline] = JSON.parse(File.read('spec/fixtures/pipelines/example.json')) end - v[:oss] = (not ENV['OSS_PACKAGE'].nil?) and ENV['OSS_PACKAGE'] == 'true' + v[:elasticsearch_plugins] = Dir[ + artifact("*#{v[:elasticsearch_full_version]}.zip", ['plugins']) + ].map do |plugin| + plugin_filename = File.basename(plugin) + plugin_name = plugin_filename.match(%r{^(?.+)-#{v[:elasticsearch_full_version]}.zip})[:name] + [ + plugin_name, + { + path: plugin, + url: derive_plugin_urls_for(v[:elasticsearch_full_version], [plugin_name]).keys.first, + }, + ] + end.to_h + + v[:oss] = !ENV['OSS_PACKAGE'].nil? and ENV['OSS_PACKAGE'] == 'true' v[:cluster_name] = SecureRandom.hex(10) # rspec-retry c.display_try_failure_messages = true c.default_sleep_interval = 10 # General-case retry keyword for unstable tests c.around :each, :with_retries do |example| example.run_with_retry retry: 10 end # Helper hook for module cleanup c.after :context, :with_cleanup do - apply_manifest <<-EOS + apply_manifest <<-MANIFEST class { 'elasticsearch': ensure => 'absent', manage_repo => true, oss => #{v[:oss]}, } - file { '/usr/share/elasticsearch/plugin': ensure => 'absent', force => true, recurse => true, require => Class['elasticsearch'], } - EOS + MANIFEST end c.before :context, :with_certificates do @keystore_password = SecureRandom.hex @role = [*('a'..'z')].sample(8).join # Setup TLS cert placement @tls = gen_certs(2, '/tmp') create_remote_file hosts, @tls[:ca][:cert][:path], @tls[:ca][:cert][:pem] @tls[:clients].each do |node| node.each do |_type, params| create_remote_file hosts, params[:path], params[:pem] end end end c.before :context, :with_license do Vault.address = ENV['VAULT_ADDR'] if ENV['CI'] Vault.auth.approle(ENV['VAULT_APPROLE_ROLE_ID'], ENV['VAULT_APPROLE_SECRET_ID']) else Vault.auth.token(ENV['VAULT_TOKEN']) end licenses = Vault.with_retries(Vault::HTTPConnectionError) do Vault.logical.read(ENV['VAULT_PATH']) end.data raise 'No license found!' unless licenses # license = case v[:elasticsearch_major_version] # when 6 # licenses[:v5] # else # licenses[:v7] # end license = licenses[:v7] create_remote_file hosts, '/tmp/license.json', license v[:elasticsearch_license_path] = '/tmp/license.json' end c.after :context, :then_purge do shell 'rm -rf {/usr/share,/etc,/var/lib}/elasticsearch*' end c.before :context, :first_purge do shell 'rm -rf {/usr/share,/etc,/var/lib}/elasticsearch*' end # Provide a hook filter to spit out some ES logs if the example fails. c.after(:example, :logs_on_failure) do |example| if example.exception hosts.each do |host| on host, "find / -name '#{v[:cluster_name]}.log' | xargs cat || true" do |result| puts result.formatted_output end end end end end files_dir = ENV['files_dir'] || './spec/fixtures/artifacts' # General bootstrapping steps for each host hosts.each do |host| # # Set the host to 'aio' in order to adopt the puppet-agent style of # # installation, and configure paths/etc. # host[:type] = 'aio' # configure_defaults_on host, 'aio' if fact('os.family') == 'Suse' install_package host, '--force-resolution augeas-devel libxml2-devel ruby-devel' on host, 'gem install ruby-augeas --no-ri --no-rdoc' end v[:ext] = case fact('os.family') when 'Debian' 'deb' else 'rpm' end - if v[:elasticsearch_package] - v[:elasticsearch_package].merge!( - derive_full_package_url( - v[:elasticsearch_full_version], [v[:ext]] - ).flat_map do |url, filename| - [[:url, url], [:filename, filename], [:path, artifact(filename)]] - end.to_h - ) - end - - Infrataster::Server.define(:docker) do |server| - server.address = host[:ip] - server.ssh = host[:ssh].tap { |s| s.delete :forward_agent } - end - Infrataster::Server.define(:container) do |server| - server.address = host[:vm_ip] # this gets ignored anyway - server.from = :docker - end + v[:elasticsearch_package]&.merge!( + derive_full_package_url( + v[:elasticsearch_full_version], [v[:ext]] + ).flat_map do |url, filename| + [[:url, url], [:filename, filename], [:path, artifact(filename)]] + end.to_h + ) end RSpec.configure do |c| if v[:is_snapshot] c.before :suite do scp_to default, "#{files_dir}/elasticsearch-snapshot.#{v[:ext]}", "/tmp/elasticsearch-snapshot.#{v[:ext]}" v[:snapshot_package] = "file:/tmp/elasticsearch-snapshot.#{v[:ext]}" end end c.before :suite do - # Install module and dependencies - install_dev_puppet_module :ignore_list => [ - 'junit' - ] + Beaker::DSL::InstallUtils::ModuleUtils::PUPPET_MODULE_INSTALL_IGNORE - - hosts.each do |host| - modules = %w[archive augeas_core datacat java java_ks stdlib elastic_stack] - - dist_module = { - 'Debian' => ['apt'], - 'Suse' => ['zypprepo'], - 'RedHat' => %w[concat yumrepo_core] - }[fact('os.family')] - - modules += dist_module unless dist_module.nil? - - modules.each do |mod| - copy_module_to( - host, - :module_name => mod, - :source => "spec/fixtures/modules/#{mod}" - ) - end - - on(host, 'mkdir -p etc/puppet/modules/another/files/') - - # Apt doesn't update package caches sometimes, ensure we're caught up. - shell 'apt-get update' if fact('os.family') == 'Debian' - end + fetch_archives(derive_artifact_urls_for(ENV['ELASTICSEARCH_VERSION'])) # Use the Java class once before the suite of tests - unless shell('command -v java', :accept_all_exit_codes => true).exit_code.zero? + unless shell('command -v java', accept_all_exit_codes: true).exit_code.zero? java = case fact('os.name') when 'OpenSuSE' 'package => "java-1_8_0-openjdk-headless",' else '' end apply_manifest <<-MANIFEST class { "java" : distribution => "jdk", #{java} } MANIFEST end end end - # # Java 8 is only easy to manage on recent distros # def v5x_capable? # (fact('os.family') == 'RedHat' and \ # not (fact('os.name') == 'OracleLinux' and \ # f['os']['release']['major'] == '6')) or \ # f.dig 'os', 'distro', 'codename' == 'xenial' # end diff --git a/spec/templates/001_elasticsearch.yml.erb_spec.rb b/spec/templates/001_elasticsearch.yml.erb_spec.rb index 752c672..7e2f430 100644 --- a/spec/templates/001_elasticsearch.yml.erb_spec.rb +++ b/spec/templates/001_elasticsearch.yml.erb_spec.rb @@ -1,83 +1,85 @@ +# frozen_string_literal: true + require 'spec_helper' require 'yaml' class String def config "### MANAGED BY PUPPET ###\n---#{unindent}" end def unindent - gsub(/^#{scan(/^\s*/).min_by(&:length)}/, '') + gsub(%r{^#{scan(%r{^\s*}).min_by(&:length)}}, '') end end describe 'elasticsearch.yml.erb' do let :harness do TemplateHarness.new( 'templates/etc/elasticsearch/elasticsearch.yml.erb' ) end - it 'should render normal hashes' do + it 'renders normal hashes' do harness.set( '@data', 'node.name' => 'test', 'path.data' => '/mnt/test', 'discovery.zen.ping.unicast.hosts' => %w[ host1 host2 ] ) - expect(YAML.load(harness.run)).to eq(YAML.load(%( + expect(YAML.safe_load(harness.run)).to eq(YAML.safe_load(%( discovery.zen.ping.unicast.hosts: - host1 - host2 node.name: test path.data: /mnt/test ).config)) end - it 'should render arrays of hashes correctly' do + it 'renders arrays of hashes correctly' do harness.set( '@data', 'data' => [ { 'key' => 'value0', 'other_key' => 'othervalue0' }, { 'key' => 'value1', 'other_key' => 'othervalue1' } ] ) - expect(YAML.load(harness.run)).to eq(YAML.load(%( + expect(YAML.safe_load(harness.run)).to eq(YAML.safe_load(%( data: - key: value0 other_key: othervalue0 - key: value1 other_key: othervalue1 ).config)) end - it 'should quote IPv6 loopback addresses' do + it 'quotes IPv6 loopback addresses' do harness.set( '@data', 'network.host' => ['::', '[::]'] ) - expect(YAML.load(harness.run)).to eq(YAML.load(%( + expect(YAML.safe_load(harness.run)).to eq(YAML.safe_load(%( network.host: - "::" - "[::]" ).config)) end - it 'should not quote numeric values' do + it 'does not quote numeric values' do harness.set( '@data', 'some.setting' => '10' ) - expect(YAML.load(harness.run)).to eq(YAML.load(%( + expect(YAML.safe_load(harness.run)).to eq(YAML.safe_load(%( some.setting: 10 ).config)) end end diff --git a/spec/unit/facter/es_facts_spec.rb b/spec/unit/facter/es_facts_spec.rb index d6bfe9e..ce6f1a3 100644 --- a/spec/unit/facter/es_facts_spec.rb +++ b/spec/unit/facter/es_facts_spec.rb @@ -1,107 +1,115 @@ +# frozen_string_literal: true + require 'spec_helper' +require 'spec_utilities' + require 'webmock/rspec' +def fixture_path + File.expand_path(File.join(__dir__, '..', '..', 'fixtures')) +end + describe 'elasticsearch facts' do - before(:each) do - stub_request(:get, 'http://localhost:9200/') - .with(:headers => { 'Accept' => '*/*', 'User-Agent' => 'Ruby' }) - .to_return( - :status => 200, - :body => File.read( + before do + stub_request(:get, 'http://localhost:9200/'). + with(headers: { 'Accept' => '*/*', 'User-Agent' => 'Ruby' }). + to_return( + status: 200, + body: File.read( File.join( fixture_path, 'facts/Warlock-root.json' ) ) ) - stub_request(:get, 'http://localhost:9200/_nodes/Warlock') - .with(:headers => { 'Accept' => '*/*', 'User-Agent' => 'Ruby' }) - .to_return( - :status => 200, - :body => File.read( + stub_request(:get, 'http://localhost:9200/_nodes/Warlock'). + with(headers: { 'Accept' => '*/*', 'User-Agent' => 'Ruby' }). + to_return( + status: 200, + body: File.read( File.join( fixture_path, 'facts/Warlock-nodes.json' ) ) ) - allow(File) - .to receive(:directory?) - .and_return(true) + allow(File). + to receive(:directory?). + and_return(true) - allow(File) - .to receive(:readable?) - .and_return(true) + allow(File). + to receive(:readable?). + and_return(true) - allow(YAML) - .to receive(:load_file) - .with('/etc/elasticsearch/elasticsearch.yml', any_args) - .and_return({}) + allow(YAML). + to receive(:load_file). + with('/etc/elasticsearch/elasticsearch.yml', any_args). + and_return({}) - require 'lib/facter/es_facts' + require_relative '../../../lib/facter/es_facts' end describe 'elasticsearch_port' do it 'finds listening port' do - expect(Facter.fact(:elasticsearch_port).value) - .to eq('9200') + expect(Facter.fact(:elasticsearch_port).value). + to eq('9200') end end describe 'instance' do it 'returns the node name' do expect(Facter.fact(:elasticsearch_name).value).to eq('Warlock') end it 'returns the node version' do expect(Facter.fact(:elasticsearch_version).value).to eq('1.4.2') end it 'returns the cluster name' do - expect(Facter.fact(:elasticsearch_cluster_name).value) - .to eq('elasticsearch') + expect(Facter.fact(:elasticsearch_cluster_name).value). + to eq('elasticsearch') end it 'returns the node ID' do - expect(Facter.fact(:elasticsearch_node_id).value) - .to eq('yQAWBO3FS8CupZnSvAVziQ') + expect(Facter.fact(:elasticsearch_node_id).value). + to eq('yQAWBO3FS8CupZnSvAVziQ') end it 'returns the mlockall boolean' do expect(Facter.fact(:elasticsearch_mlockall).value).to be_falsy end it 'returns installed plugins' do expect(Facter.fact(:elasticsearch_plugins).value).to eq('kopf') end describe 'kopf plugin' do it 'returns the correct version' do - expect(Facter.fact(:elasticsearch_plugin_kopf_version).value) - .to eq('1.4.3') + expect(Facter.fact(:elasticsearch_plugin_kopf_version).value). + to eq('1.4.3') end it 'returns the correct description' do - expect(Facter.fact(:elasticsearch_plugin_kopf_description).value) - .to eq('kopf - simple web administration tool for ElasticSearch') + expect(Facter.fact(:elasticsearch_plugin_kopf_description).value). + to eq('kopf - simple web administration tool for ElasticSearch') end it 'returns the plugin URL' do - expect(Facter.fact(:elasticsearch_plugin_kopf_url).value) - .to eq('/_plugin/kopf/') + expect(Facter.fact(:elasticsearch_plugin_kopf_url).value). + to eq('/_plugin/kopf/') end it 'returns the plugin JVM boolean' do - expect(Facter.fact(:elasticsearch_plugin_kopf_jvm).value) - .to be_falsy + expect(Facter.fact(:elasticsearch_plugin_kopf_jvm).value). + to be_falsy end it 'returns the plugin _site boolean' do - expect(Facter.fact(:elasticsearch_plugin_kopf_site).value) - .to be_truthy + expect(Facter.fact(:elasticsearch_plugin_kopf_site).value). + to be_truthy end - end # of describe plugin - end # of describe instance -end # of describe elasticsearch facts + end + end +end diff --git a/spec/unit/provider/elastic_yaml_spec.rb b/spec/unit/provider/elastic_yaml_spec.rb index 15c18d0..d5dce6d 100644 --- a/spec/unit/provider/elastic_yaml_spec.rb +++ b/spec/unit/provider/elastic_yaml_spec.rb @@ -1,66 +1,68 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..', 'lib')) require 'spec_helper_rspec' require 'puppet/provider/elastic_yaml' class String def flattened split("\n").reject(&:empty?).map(&:strip).join("\n").strip end end describe Puppet::Provider::ElasticYaml do subject do described_class.tap do |o| o.instance_eval { @metadata = :metadata } end end let :unsorted_hash do [{ - :name => 'role', - :metadata => { + name: 'role', + metadata: { 'zeta' => { - 'zeta' => 5, + 'zeta' => 5, 'gamma' => 4, 'delta' => 3, - 'beta' => 2, + 'beta' => 2, 'alpha' => 1 }, 'phi' => [{ - 'zeta' => 3, + 'zeta' => 3, 'gamma' => 2, 'alpha' => 1 }], - 'beta' => 'foobaz', + 'beta' => 'foobaz', 'gamma' => 1, 'alpha' => 'foobar' } }] end it { is_expected.to respond_to :to_file } describe 'to_file' do it 'returns sorted yaml' do expect(described_class.to_file(unsorted_hash).flattened).to( eq(%( role: alpha: foobar beta: foobaz gamma: 1 phi: - alpha: 1 gamma: 2 zeta: 3 zeta: alpha: 1 beta: 2 delta: 3 gamma: 4 zeta: 5 ).flattened) ) end end end diff --git a/spec/unit/provider/elasticsearch_index/ruby_spec.rb b/spec/unit/provider/elasticsearch_index/ruby_spec.rb index d774bdf..dfccc7a 100644 --- a/spec/unit/provider/elasticsearch_index/ruby_spec.rb +++ b/spec/unit/provider/elasticsearch_index/ruby_spec.rb @@ -1,126 +1,128 @@ +# frozen_string_literal: true + require_relative '../../../helpers/unit/provider/elasticsearch_rest_shared_examples' -describe Puppet::Type.type(:elasticsearch_index).provider(:ruby) do +describe Puppet::Type.type(:elasticsearch_index).provider(:ruby) do # rubocop:disable RSpec/MultipleMemoizedHelpers let(:name) { 'test-index' } - let(:example_1) do + let(:example1) do { - :name => 'index-one', - :ensure => :present, - :provider => :ruby, - :settings => { + name: 'index-one', + ensure: :present, + provider: :ruby, + settings: { 'index' => { 'creation_date' => 1_487_354_196_301, 'number_of_replicas' => 1, 'number_of_shards' => 5, 'provided_name' => 'a', 'routing' => { 'allocation' => { 'include' => { 'size' => 'big' } } }, 'store' => { 'type' => 'niofs' }, 'uuid' => 'vtJrcgyeRviqllRakSlrSw', 'version' => { 'created' => 5_020_199 } } } } end - let(:json_1) do + let(:json1) do { 'index-one' => { 'settings' => { 'index' => { 'creation_date' => '1487354196301', 'number_of_replicas' => '1', 'number_of_shards' => '5', 'provided_name' => 'a', 'routing' => { 'allocation' => { 'include' => { 'size' => 'big' } } }, 'store' => { 'type' => 'niofs' }, 'uuid' => 'vtJrcgyeRviqllRakSlrSw', 'version' => { 'created' => '5020199' } } } } } end - let(:example_2) do + let(:example2) do { - :name => 'index-two', - :ensure => :present, - :provider => :ruby, - :settings => { + name: 'index-two', + ensure: :present, + provider: :ruby, + settings: { 'index' => { 'creation_date' => 1_487_354_196_301, 'number_of_replicas' => 1, 'number_of_shards' => 5, 'provided_name' => 'a', 'uuid' => 'vtJrcgyeRviqllRakSlrSw', 'version' => { 'created' => 5_020_199 } } } } end - let(:json_2) do + let(:json2) do { 'index-two' => { 'settings' => { 'index' => { 'creation_date' => '1487354196301', 'number_of_replicas' => '1', 'number_of_shards' => '5', 'provided_name' => 'a', 'uuid' => 'vtJrcgyeRviqllRakSlrSw', 'version' => { 'created' => '5020199' } } } } } end let(:bare_resource) do JSON.dump( 'index' => { 'number_of_replicas' => 0 } ) end let(:resource) { Puppet::Type::Elasticsearch_index.new props } let(:provider) { described_class.new resource } let(:props) do { - :name => name, - :settings => { + name: name, + settings: { 'index' => { 'number_of_replicas' => '0' } } } end include_examples 'REST API', 'all/_settings', 'test-index/_settings' end diff --git a/spec/unit/provider/elasticsearch_keystore/elasticsearch_keystore_spec.rb b/spec/unit/provider/elasticsearch_keystore/elasticsearch_keystore_spec.rb index a679b50..355350b 100644 --- a/spec/unit/provider/elasticsearch_keystore/elasticsearch_keystore_spec.rb +++ b/spec/unit/provider/elasticsearch_keystore/elasticsearch_keystore_spec.rb @@ -1,161 +1,192 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' shared_examples 'keystore instance' do |instance| describe "instance #{instance}" do subject { described_class.instances.find { |x| x.name == instance } } - it { expect(subject.exists?).to be_truthy } + it { expect(subject).to be_exists } it { expect(subject.name).to eq(instance) } - it { expect(subject.settings) - .to eq(['node.name', 'cloud.aws.access_key']) } + + it { + expect(subject.settings). + to eq(['node.name', 'cloud.aws.access_key']) + } end end describe Puppet::Type.type(:elasticsearch_keystore).provider(:elasticsearch_keystore) do let(:executable) { '/usr/share/elasticsearch/bin/elasticsearch-keystore' } let(:instances) { [] } before do Facter.clear Facter.add('osfamily') { setcode { 'Debian' } } - allow(described_class) - .to receive(:command) - .with(:keystore) - .and_return(executable) + allow(described_class). + to receive(:command). + with(:keystore). + and_return(executable) - allow(File).to receive(:exist?) - .with('/etc/elasticsearch/scripts/elasticsearch.keystore') - .and_return(false) + allow(File).to receive(:exist?). + with('/etc/elasticsearch/scripts/elasticsearch.keystore'). + and_return(false) end describe 'instances' do before do - allow(Dir).to receive(:[]) - .with('/etc/elasticsearch/*') - .and_return((['scripts'] + instances).map do |directory| + allow(Dir).to receive(:[]). + with('/etc/elasticsearch/*'). + and_return((['scripts'] + instances).map do |directory| "/etc/elasticsearch/#{directory}" end) instances.each do |instance| instance_dir = "/etc/elasticsearch/#{instance}" defaults_file = "/etc/default/elasticsearch-#{instance}" - allow(File).to receive(:exist?) - .with("#{instance_dir}/elasticsearch.keystore") - .and_return(true) + allow(File).to receive(:exist?). + with("#{instance_dir}/elasticsearch.keystore"). + and_return(true) - expect(described_class) - .to receive(:execute) - .with( + allow(described_class). + to receive(:execute). + with( [executable, 'list'], - :custom_environment => { + custom_environment: { 'ES_INCLUDE' => defaults_file, 'ES_PATH_CONF' => "/etc/elasticsearch/#{instance}" }, - :uid => 'elasticsearch', - :gid => 'elasticsearch', - :failonfail => true - ) - .and_return( + uid: 'elasticsearch', + gid: 'elasticsearch', + failonfail: true + ). + and_return( Puppet::Util::Execution::ProcessOutput.new( "node.name\ncloud.aws.access_key\n", 0 ) ) end end - it 'should have an instance method' do + it 'has an instance method' do expect(described_class).to respond_to(:instances) end context 'without any keystores' do - it 'should return no resources' do + it 'returns no resources' do expect(described_class.instances.size).to eq(0) end end context 'with one instance' do let(:instances) { ['es-01'] } it { expect(described_class.instances.length).to eq(instances.length) } + include_examples 'keystore instance', 'es-01' end context 'with multiple instances' do - let(:instances) { ['es-01', 'es-02'] } + let(:instances) { %w[es-01 es-02] } it { expect(described_class.instances.length).to eq(instances.length) } + include_examples 'keystore instance', 'es-01' include_examples 'keystore instance', 'es-02' end - end # of describe instances + end describe 'prefetch' do - it 'should have a prefetch method' do + it 'has a prefetch method' do expect(described_class).to respond_to :prefetch end end describe 'flush' do - let(:provider) { described_class.new(:name => 'es-03') } + let(:provider) { described_class.new(name: 'es-03') } let(:resource) do Puppet::Type.type(:elasticsearch_keystore).new( - :name => 'es-03', - :provider => provider + name: 'es-03', + provider: provider ) end it 'creates the keystore' do - expect(described_class).to( - receive(:execute) - .with( + allow(described_class).to( + receive(:execute). + with( [executable, 'create'], - :custom_environment => { + custom_environment: { 'ES_INCLUDE' => '/etc/default/elasticsearch-es-03', 'ES_PATH_CONF' => '/etc/elasticsearch/es-03' }, - :uid => 'elasticsearch', - :gid => 'elasticsearch', - :failonfail => true - ) - .and_return(Puppet::Util::Execution::ProcessOutput.new('', 0)) + uid: 'elasticsearch', + gid: 'elasticsearch', + failonfail: true + ). + and_return(Puppet::Util::Execution::ProcessOutput.new('', 0)) ) resource[:ensure] = :present provider.create provider.flush + expect(described_class).to( + have_received(:execute). + with( + [executable, 'create'], + custom_environment: { + 'ES_INCLUDE' => '/etc/default/elasticsearch-es-03', + 'ES_PATH_CONF' => '/etc/elasticsearch/es-03' + }, + uid: 'elasticsearch', + gid: 'elasticsearch', + failonfail: true + ) + ) end it 'deletes the keystore' do - expect(File).to( - receive(:delete) - .with(File.join(%w[/ etc elasticsearch es-03 elasticsearch.keystore])) + allow(File).to( + receive(:delete). + with(File.join(%w[/ etc elasticsearch es-03 elasticsearch.keystore])) ) resource[:ensure] = :absent provider.destroy provider.flush + expect(File).to( + have_received(:delete). + with(File.join(%w[/ etc elasticsearch es-03 elasticsearch.keystore])) + ) end it 'updates settings' do settings = { 'cloud.aws.access_key' => 'AKIAFOOBARFOOBAR', 'cloud.aws.secret_key' => 'AKIAFOOBARFOOBAR' } settings.each do |setting, value| - expect(provider.class).to( - receive(:run_keystore) - .with(['add', '--force', '--stdin', setting], 'es-03', '/etc/elasticsearch', value) - .and_return(Puppet::Util::Execution::ProcessOutput.new('', 0)) + allow(provider.class).to( + receive(:run_keystore). + with(['add', '--force', '--stdin', setting], 'es-03', '/etc/elasticsearch', value). + and_return(Puppet::Util::Execution::ProcessOutput.new('', 0)) ) end # Note that the settings hash is passed in wrapped in an array to mimic # the behavior in real-world puppet runs. resource[:ensure] = :present resource[:settings] = [settings] provider.settings = [settings] provider.flush + + settings.each do |setting, value| + expect(provider.class).to( + have_received(:run_keystore). + with(['add', '--force', '--stdin', setting], 'es-03', '/etc/elasticsearch', value) + ) + end end - end # of describe flush -end # of describe Puppet::Type elasticsearch_keystore + end +end diff --git a/spec/unit/provider/elasticsearch_license/xpack_spec.rb b/spec/unit/provider/elasticsearch_license/xpack_spec.rb index 978b3bd..00691ac 100644 --- a/spec/unit/provider/elasticsearch_license/xpack_spec.rb +++ b/spec/unit/provider/elasticsearch_license/xpack_spec.rb @@ -1,61 +1,63 @@ +# frozen_string_literal: true + require_relative '../../../helpers/unit/provider/elasticsearch_rest_shared_examples' -describe Puppet::Type.type(:elasticsearch_license).provider(:xpack) do +describe Puppet::Type.type(:elasticsearch_license).provider(:xpack) do # rubocop:disable RSpec/MultipleMemoizedHelpers let(:name) { 'xpack' } - let(:example_1) do + let(:example1) do { - :name => 'xpack', - :ensure => :present, - :provider => :xpack, - :content => { + name: 'xpack', + ensure: :present, + provider: :xpack, + content: { 'license' => { - 'status' => 'active', - 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', - 'type' => 'trial', - 'issue_date' => '2018-02-22T23:12:05.550Z', - 'issue_date_in_millis' => 1_519_341_125_550, - 'expiry_date' => '2018-03-24T23:12:05.550Z', + 'status' => 'active', + 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', + 'type' => 'trial', + 'issue_date' => '2018-02-22T23:12:05.550Z', + 'issue_date_in_millis' => 1_519_341_125_550, + 'expiry_date' => '2018-03-24T23:12:05.550Z', 'expiry_date_in_millis' => 1_521_933_125_550, - 'max_nodes' => 1_000, - 'issued_to' => 'test', - 'issuer' => 'elasticsearch', - 'start_date_in_millis' => 1_513_814_400_000 + 'max_nodes' => 1_000, + 'issued_to' => 'test', + 'issuer' => 'elasticsearch', + 'start_date_in_millis' => 1_513_814_400_000 } } } end - let(:json_1) do + let(:json1) do { 'license' => { - 'status' => 'active', - 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', - 'type' => 'trial', - 'issue_date' => '2018-02-22T23:12:05.550Z', - 'issue_date_in_millis' => '1519341125550', - 'expiry_date' => '2018-03-24T23:12:05.550Z', + 'status' => 'active', + 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', + 'type' => 'trial', + 'issue_date' => '2018-02-22T23:12:05.550Z', + 'issue_date_in_millis' => '1519341125550', + 'expiry_date' => '2018-03-24T23:12:05.550Z', 'expiry_date_in_millis' => '1521933125550', - 'max_nodes' => '1000', - 'issued_to' => 'test', - 'issuer' => 'elasticsearch', - 'start_date_in_millis' => '1513814400000' + 'max_nodes' => '1000', + 'issued_to' => 'test', + 'issuer' => 'elasticsearch', + 'start_date_in_millis' => '1513814400000' } } end let(:resource) { Puppet::Type::Elasticsearch_index.new props } let(:provider) { described_class.new resource } let(:props) do { - :name => name, - :settings => { + name: name, + settings: { 'index' => { 'number_of_replicas' => 0 } } } end include_examples 'REST API', 'xpack/license', nil, true end diff --git a/spec/unit/provider/elasticsearch_pipeline/ruby_spec.rb b/spec/unit/provider/elasticsearch_pipeline/ruby_spec.rb index c4f5362..9666f9a 100644 --- a/spec/unit/provider/elasticsearch_pipeline/ruby_spec.rb +++ b/spec/unit/provider/elasticsearch_pipeline/ruby_spec.rb @@ -1,96 +1,98 @@ +# frozen_string_literal: true + require_relative '../../../helpers/unit/provider/elasticsearch_rest_shared_examples' -describe Puppet::Type.type(:elasticsearch_pipeline).provider(:ruby) do - let(:example_1) do +describe Puppet::Type.type(:elasticsearch_pipeline).provider(:ruby) do # rubocop:disable RSpec/MultipleMemoizedHelpers + let(:example1) do { - :name => 'foo', - :ensure => :present, - :provider => :ruby, - :content => { + name: 'foo', + ensure: :present, + provider: :ruby, + content: { 'description' => 'Sets the foo field to "bar"', 'processors' => [{ 'set' => { 'field' => 'foo', 'value' => 'bar' } }] } } end - let(:json_1) do + let(:json1) do { 'foo' => { 'description' => 'Sets the foo field to "bar"', 'processors' => [{ 'set' => { 'field' => 'foo', 'value' => 'bar' } }] } } end - let(:example_2) do + let(:example2) do { - :name => 'baz', - :ensure => :present, - :provider => :ruby, - :content => { + name: 'baz', + ensure: :present, + provider: :ruby, + content: { 'description' => 'A pipeline that never gives you up', 'processors' => [{ 'set' => { 'field' => 'firstname', 'value' => 'rick' } }, { 'set' => { 'field' => 'lastname', 'value' => 'astley' } }] } } end - let(:json_2) do + let(:json2) do { 'baz' => { 'description' => 'A pipeline that never gives you up', 'processors' => [{ 'set' => { 'field' => 'firstname', 'value' => 'rick' } }, { 'set' => { 'field' => 'lastname', 'value' => 'astley' } }] } } end let(:bare_resource) do JSON.dump( 'description' => 'Empty pipeline', 'processors' => [] ) end let(:resource) { Puppet::Type::Elasticsearch_pipeline.new props } let(:provider) { described_class.new resource } let(:props) do { - :name => 'foo', - :content => { + name: 'foo', + content: { 'description' => 'Empty pipeline', 'processors' => [] } } end include_examples 'REST API', 'ingest/pipeline', '_ingest/pipeline/foo' end diff --git a/spec/unit/provider/elasticsearch_plugin/ruby_spec.rb b/spec/unit/provider/elasticsearch_plugin/ruby_spec.rb index d3e3796..39771af 100644 --- a/spec/unit/provider/elasticsearch_plugin/ruby_spec.rb +++ b/spec/unit/provider/elasticsearch_plugin/ruby_spec.rb @@ -1,23 +1,25 @@ +# frozen_string_literal: true + require_relative 'shared_examples' provider_class = Puppet::Type.type(:elasticsearch_plugin).provider(:elasticsearch_plugin) describe provider_class do let(:resource_name) { 'lmenezes/elasticsearch-kopf' } let(:resource) do Puppet::Type.type(:elasticsearch_plugin).new( - :name => resource_name, - :ensure => :present, - :provider => 'elasticsearch_plugin' + name: resource_name, + ensure: :present, + provider: 'elasticsearch_plugin' ) end let(:provider) do provider = provider_class.new provider.resource = resource provider end let(:shortname) { provider.plugin_name(resource_name) } let(:klass) { provider_class } include_examples 'plugin provider', '7.0.0' end diff --git a/spec/unit/provider/elasticsearch_plugin/shared_examples.rb b/spec/unit/provider/elasticsearch_plugin/shared_examples.rb index 094390a..6053860 100644 --- a/spec/unit/provider/elasticsearch_plugin/shared_examples.rb +++ b/spec/unit/provider/elasticsearch_plugin/shared_examples.rb @@ -1,147 +1,187 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' shared_examples 'plugin provider' do |version| describe "elasticsearch #{version}" do - before(:each) do + before do allow(File).to receive(:open) allow(provider).to receive(:es_version).and_return version end describe 'setup' do it 'installs with default parameters' do - expect(provider).to receive(:plugin).with( + allow(provider).to receive(:plugin).with( ['install', resource_name].tap do |args| - if Puppet::Util::Package.versioncmp(version, '2.2.0') >= 0 - args.insert 1, '--batch' - end + args.insert 1, '--batch' if Puppet::Util::Package.versioncmp(version, '2.2.0') >= 0 end ) provider.create + expect(provider).to have_received(:plugin).with( + ['install', resource_name].tap do |args| + args.insert 1, '--batch' if Puppet::Util::Package.versioncmp(version, '2.2.0') >= 0 + end + ) end it 'installs via URLs' do resource[:url] = 'http://url/to/my/plugin.zip' - expect(provider).to receive(:plugin).with( + allow(provider).to receive(:plugin).with( ['install'] + ['http://url/to/my/plugin.zip'].tap do |args| args.unshift('kopf', '--url') if version.start_with? '1' - if Puppet::Util::Package.versioncmp(version, '2.2.0') >= 0 - args.unshift '--batch' - end - - args + args.unshift '--batch' if Puppet::Util::Package.versioncmp(version, '2.2.0') >= 0 end ) provider.create + expect(provider).to have_received(:plugin).with( + ['install'] + ['http://url/to/my/plugin.zip'].tap do |args| + args.unshift('kopf', '--url') if version.start_with? '1' + + args.unshift '--batch' if Puppet::Util::Package.versioncmp(version, '2.2.0') >= 0 + end + ) end it 'installs with a local file' do resource[:source] = '/tmp/plugin.zip' - expect(provider).to receive(:plugin).with( + allow(provider).to receive(:plugin).with( ['install'] + ['file:///tmp/plugin.zip'].tap do |args| args.unshift('kopf', '--url') if version.start_with? '1' - if Puppet::Util::Package.versioncmp(version, '2.2.0') >= 0 - args.unshift '--batch' - end - - args + args.unshift '--batch' if Puppet::Util::Package.versioncmp(version, '2.2.0') >= 0 end ) provider.create + expect(provider).to have_received(:plugin).with( + ['install'] + ['file:///tmp/plugin.zip'].tap do |args| + args.unshift('kopf', '--url') if version.start_with? '1' + + args.unshift '--batch' if Puppet::Util::Package.versioncmp(version, '2.2.0') >= 0 + end + ) end describe 'proxying' do it 'installs behind a proxy' do resource[:proxy] = 'http://localhost:3128' - expect(provider) - .to receive(:plugin) - .with([ - '-Dhttp.proxyHost=localhost', - '-Dhttp.proxyPort=3128', - '-Dhttps.proxyHost=localhost', - '-Dhttps.proxyPort=3128', - 'install', - '--batch', - resource_name - ]) + allow(provider). + to receive(:plugin). + with([ + '-Dhttp.proxyHost=localhost', + '-Dhttp.proxyPort=3128', + '-Dhttps.proxyHost=localhost', + '-Dhttps.proxyPort=3128', + 'install', + '--batch', + resource_name + ]) provider.create + expect(provider). + to have_received(:plugin). + with([ + '-Dhttp.proxyHost=localhost', + '-Dhttp.proxyPort=3128', + '-Dhttps.proxyHost=localhost', + '-Dhttps.proxyPort=3128', + 'install', + '--batch', + resource_name + ]) end it 'uses authentication credentials' do resource[:proxy] = 'http://elastic:password@es.local:8080' - expect(provider) - .to receive(:plugin) - .with([ - '-Dhttp.proxyHost=es.local', - '-Dhttp.proxyPort=8080', - '-Dhttp.proxyUser=elastic', - '-Dhttp.proxyPassword=password', - '-Dhttps.proxyHost=es.local', - '-Dhttps.proxyPort=8080', - '-Dhttps.proxyUser=elastic', - '-Dhttps.proxyPassword=password', - 'install', - '--batch', - resource_name - ]) + allow(provider). + to receive(:plugin). + with([ + '-Dhttp.proxyHost=es.local', + '-Dhttp.proxyPort=8080', + '-Dhttp.proxyUser=elastic', + '-Dhttp.proxyPassword=password', + '-Dhttps.proxyHost=es.local', + '-Dhttps.proxyPort=8080', + '-Dhttps.proxyUser=elastic', + '-Dhttps.proxyPassword=password', + 'install', + '--batch', + resource_name + ]) provider.create + expect(provider). + to have_received(:plugin). + with([ + '-Dhttp.proxyHost=es.local', + '-Dhttp.proxyPort=8080', + '-Dhttp.proxyUser=elastic', + '-Dhttp.proxyPassword=password', + '-Dhttps.proxyHost=es.local', + '-Dhttps.proxyPort=8080', + '-Dhttps.proxyUser=elastic', + '-Dhttps.proxyPassword=password', + 'install', + '--batch', + resource_name + ]) end end describe 'configdir' do it 'sets the ES_PATH_CONF env var' do resource[:configdir] = '/etc/elasticsearch' expect(provider.with_environment do ENV['ES_PATH_CONF'] end).to eq('/etc/elasticsearch') end end - end # of setup + end describe 'java_opts' do it 'uses authentication credentials' do resource[:java_opts] = ['-Des.plugins.staging=4a2ffaf5'] expect(provider.with_environment do ENV['ES_JAVA_OPTS'] end).to eq('-Des.plugins.staging=4a2ffaf5') end end describe 'java_home' do it 'sets the JAVA_HOME env var' do resource[:java_home] = '/opt/foo' expect(provider.with_environment do ENV['JAVA_HOME'] end).to eq('/opt/foo') end end describe 'java_home unset' do elasticsearch_java_home = '/usr/share/elasticsearch/jdk' it 'defaults to the elasticsearch bundled JDK' do resource[:java_home] = '' expect(provider.with_environment do ENV['JAVA_HOME'] end).to eq(elasticsearch_java_home) end end describe 'plugin_name' do let(:resource_name) { 'appbaseio/dejaVu' } it 'maintains mixed-case names' do expect(provider.plugin_path).to include('dejaVu') end end describe 'removal' do it 'uninstalls the plugin' do - expect(provider).to receive(:plugin).with( + allow(provider).to receive(:plugin).with( ['remove', resource_name.split('-').last] ) provider.destroy + expect(provider).to have_received(:plugin).with( + ['remove', resource_name.split('-').last] + ) end end end end diff --git a/spec/unit/provider/elasticsearch_role/ruby_spec.rb b/spec/unit/provider/elasticsearch_role/ruby_spec.rb index bbaafed..2489aeb 100644 --- a/spec/unit/provider/elasticsearch_role/ruby_spec.rb +++ b/spec/unit/provider/elasticsearch_role/ruby_spec.rb @@ -1,59 +1,61 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' describe Puppet::Type.type(:elasticsearch_role).provider(:ruby) do describe 'instances' do - it 'should have an instance method' do + it 'has an instance method' do expect(described_class).to respond_to :instances end context 'with no roles' do - it 'should return no resources' do + it 'returns no resources' do expect(described_class.parse("\n")).to eq([]) end end context 'with one role' do - it 'should return one resource' do + it 'returns one resource' do expect(described_class.parse(%( admin: cluster: all indices: '*': all ))[0]).to eq( - :ensure => :present, - :name => 'admin', - :privileges => { + ensure: :present, + name: 'admin', + privileges: { 'cluster' => 'all', 'indices' => { '*' => 'all' } } ) end end context 'with multiple roles' do - it 'should return three resources' do + it 'returns three resources' do expect(described_class.parse(%( admin: cluster: all indices: '*': all user: indices: '*': read power_user: cluster: monitor indices: '*': all )).length).to eq(3) end end - end # of describe instances + end describe 'prefetch' do - it 'should have a prefetch method' do + it 'has a prefetch method' do expect(described_class).to respond_to :prefetch end end end diff --git a/spec/unit/provider/elasticsearch_role_mapping/ruby_spec.rb b/spec/unit/provider/elasticsearch_role_mapping/ruby_spec.rb index f2af032..9dff8db 100644 --- a/spec/unit/provider/elasticsearch_role_mapping/ruby_spec.rb +++ b/spec/unit/provider/elasticsearch_role_mapping/ruby_spec.rb @@ -1,51 +1,53 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' describe Puppet::Type.type(:elasticsearch_role_mapping).provider(:ruby) do describe 'instances' do - it 'should have an instance method' do + it 'has an instance method' do expect(described_class).to respond_to :instances end context 'with no roles' do - it 'should return no resources' do + it 'returns no resources' do expect(described_class.parse("\n")).to eq([]) end end context 'with one role' do - it 'should return one resource' do + it 'returns one resource' do expect(described_class.parse(%( admin: - "cn=users,dc=example,dc=com" ))[0]).to eq( - :ensure => :present, - :name => 'admin', - :mappings => [ + ensure: :present, + name: 'admin', + mappings: [ 'cn=users,dc=example,dc=com' ] ) end end context 'with multiple roles' do - it 'should return three resources' do + it 'returns three resources' do expect(described_class.parse(%( admin: - "cn=users,dc=example,dc=com" user: - "cn=users,dc=example,dc=com" - "cn=admins,dc=example,dc=com" - "cn=John Doe,cn=other users,dc=example,dc=com" power_user: - "cn=admins,dc=example,dc=com" )).length).to eq(3) end end - end # of describe instances + end describe 'prefetch' do - it 'should have a prefetch method' do + it 'has a prefetch method' do expect(described_class).to respond_to :prefetch end end end diff --git a/spec/unit/provider/elasticsearch_snapshot_repository/ruby_spec.rb b/spec/unit/provider/elasticsearch_snapshot_repository/ruby_spec.rb index 3a05f07..684a1e0 100644 --- a/spec/unit/provider/elasticsearch_snapshot_repository/ruby_spec.rb +++ b/spec/unit/provider/elasticsearch_snapshot_repository/ruby_spec.rb @@ -1,72 +1,74 @@ +# frozen_string_literal: true + require_relative '../../../helpers/unit/provider/elasticsearch_rest_shared_examples' -describe Puppet::Type.type(:elasticsearch_snapshot_repository).provider(:ruby) do - let(:example_1) do +describe Puppet::Type.type(:elasticsearch_snapshot_repository).provider(:ruby) do # rubocop:disable RSpec/MultipleMemoizedHelpers + let(:example1) do { - :name => 'foobar1', - :ensure => :present, - :provider => :ruby, - :location => '/bak1', - :type => 'fs', - :compress => true + name: 'foobar1', + ensure: :present, + provider: :ruby, + location: '/bak1', + type: 'fs', + compress: true } end - let(:json_1) do + let(:json1) do { 'foobar1' => { 'type' => 'fs', 'settings' => { 'compress' => true, 'location' => '/bak1' } } } end - let(:example_2) do + let(:example2) do { - :name => 'foobar2', - :ensure => :present, - :provider => :ruby, - :location => '/bak2', - :type => 'fs', - :compress => true + name: 'foobar2', + ensure: :present, + provider: :ruby, + location: '/bak2', + type: 'fs', + compress: true } end - let(:json_2) do + let(:json2) do { 'foobar2' => { 'type' => 'fs', 'settings' => { 'compress' => true, 'location' => '/bak2' } } } end let(:bare_resource) do JSON.dump( 'type' => 'fs', 'settings' => { 'compress' => true, 'location' => '/backups' } ) end let(:resource) { Puppet::Type::Elasticsearch_snapshot_repository.new props } let(:provider) { described_class.new resource } let(:props) do { - :name => 'backup', - :type => 'fs', - :compress => true, - :location => '/backups' + name: 'backup', + type: 'fs', + compress: true, + location: '/backups' } end include_examples 'REST API', 'snapshot', '_snapshot/backup' end diff --git a/spec/unit/provider/elasticsearch_template/ruby_spec.rb b/spec/unit/provider/elasticsearch_template/ruby_spec.rb index 2e7aff9..848f57f 100644 --- a/spec/unit/provider/elasticsearch_template/ruby_spec.rb +++ b/spec/unit/provider/elasticsearch_template/ruby_spec.rb @@ -1,79 +1,81 @@ +# frozen_string_literal: true + require_relative '../../../helpers/unit/provider/elasticsearch_rest_shared_examples' -describe Puppet::Type.type(:elasticsearch_template).provider(:ruby) do - let(:example_1) do +describe Puppet::Type.type(:elasticsearch_template).provider(:ruby) do # rubocop:disable RSpec/MultipleMemoizedHelpers + let(:example1) do { - :name => 'foobar1', - :ensure => :present, - :provider => :ruby, - :content => { + name: 'foobar1', + ensure: :present, + provider: :ruby, + content: { 'aliases' => {}, 'mappings' => {}, 'settings' => {}, 'template' => 'foobar1-*', 'order' => 1 } } end - let(:json_1) do + let(:json1) do { 'foobar1' => { 'aliases' => {}, 'mappings' => {}, 'order' => 1, 'settings' => {}, 'template' => 'foobar1-*' } } end - let(:example_2) do + let(:example2) do { - :name => 'foobar2', - :ensure => :present, - :provider => :ruby, - :content => { + name: 'foobar2', + ensure: :present, + provider: :ruby, + content: { 'aliases' => {}, 'mappings' => {}, 'settings' => {}, 'template' => 'foobar2-*', 'order' => 2 } } end - let(:json_2) do + let(:json2) do { 'foobar2' => { 'aliases' => {}, 'mappings' => {}, 'order' => 2, 'settings' => {}, 'template' => 'foobar2-*' } } end let(:bare_resource) do JSON.dump( 'order' => 0, 'aliases' => {}, 'mappings' => {}, 'template' => 'fooindex-*' ) end let(:resource) { Puppet::Type::Elasticsearch_template.new props } let(:provider) { described_class.new resource } let(:props) do { - :name => 'foo', - :content => { + name: 'foo', + content: { 'template' => 'fooindex-*' } } end include_examples 'REST API', 'template', '_template/foo' end diff --git a/spec/unit/provider/elasticsearch_user/ruby_spec.rb b/spec/unit/provider/elasticsearch_user/ruby_spec.rb index 627c854..d5c8a9a 100644 --- a/spec/unit/provider/elasticsearch_user/ruby_spec.rb +++ b/spec/unit/provider/elasticsearch_user/ruby_spec.rb @@ -1,63 +1,65 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' describe Puppet::Type.type(:elasticsearch_user).provider(:ruby) do describe 'instances' do - it 'should have an instance method' do + it 'has an instance method' do expect(described_class).to respond_to :instances end context 'without users' do - before do - expect(described_class).to receive(:command_with_path).with('list').and_return( + it 'returns no resources' do + allow(described_class).to receive(:command_with_path).with('list').and_return( 'No users found' ) - end - it 'should return no resources' do expect(described_class.instances.size).to eq(0) + expect(described_class).to have_received(:command_with_path).with('list') end end context 'with one user' do - before do - expect(described_class).to receive(:command_with_path).with('list').and_return( + it 'returns one resource' do + allow(described_class).to receive(:command_with_path).with('list').and_return( 'elastic : admin*,power_user' ) - end - it 'should return one resource' do expect(described_class.instances[0].instance_variable_get( - '@property_hash' - )).to eq( - :ensure => :present, - :name => 'elastic', - :provider => :ruby - ) + '@property_hash' + )).to eq( + ensure: :present, + name: 'elastic', + provider: :ruby + ) + expect(described_class).to have_received(:command_with_path).with('list') end end context 'with multiple users' do - before do - expect(described_class).to receive( + it 'returns three resources' do + allow(described_class).to receive( :command_with_path ).with('list').and_return( <<-EOL elastic : admin* logstash : user kibana : kibana EOL ) - end - it 'should return three resources' do expect(described_class.instances.length).to eq(3) + + expect(described_class).to have_received( + :command_with_path + ).with('list') end end - end # of describe instances + end describe 'prefetch' do - it 'should have a prefetch method' do + it 'has a prefetch method' do expect(described_class).to respond_to :prefetch end end end diff --git a/spec/unit/provider/elasticsearch_user_file/ruby_spec.rb b/spec/unit/provider/elasticsearch_user_file/ruby_spec.rb index 78dfc65..4adda7d 100644 --- a/spec/unit/provider/elasticsearch_user_file/ruby_spec.rb +++ b/spec/unit/provider/elasticsearch_user_file/ruby_spec.rb @@ -1,44 +1,46 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' describe Puppet::Type.type(:elasticsearch_user_file).provider(:ruby) do describe 'instances' do - it 'should have an instance method' do + it 'has an instance method' do expect(described_class).to respond_to :instances end context 'without users' do - it 'should return no resources' do + it 'returns no resources' do expect(described_class.parse("\n")).to eq([]) end end context 'with one user' do - it 'should return one resource' do + it 'returns one resource' do expect(described_class.parse(%( elastic:$2a$10$DddrTs0PS3qNknUTq0vpa.g.0JpU.jHDdlKp1xox1W5ZHX.w8Cc8C - ).gsub(/^\s+/, ''))[0]).to eq( - :name => 'elastic', - :hashed_password => '$2a$10$DddrTs0PS3qNknUTq0vpa.g.0JpU.jHDdlKp1xox1W5ZHX.w8Cc8C', - :record_type => :ruby + ).gsub(%r{^\s+}, ''))[0]).to eq( + name: 'elastic', + hashed_password: '$2a$10$DddrTs0PS3qNknUTq0vpa.g.0JpU.jHDdlKp1xox1W5ZHX.w8Cc8C', + record_type: :ruby ) end end context 'with multiple users' do - it 'should return three resources' do + it 'returns three resources' do expect(described_class.parse(%( admin:$2a$10$DddrTs0PS3qNknUTq0vpa.g.0JpU.jHDdlKp1xox1W5ZHX.w8Cc8C user:$2a$10$caYr8GhYeJ2Yo0yEhQhQvOjLSwt8Lm6MKQWx8WSnZ/L/IL5sGdQFu kibana:$2a$10$daYr8GhYeJ2Yo0yEhQhQvOjLSwt8Lm6MKQWx8WSnZ/L/IL5sGdQFu - ).gsub(/^\s+/, '')).length).to eq(3) + ).gsub(%r{^\s+}, '')).length).to eq(3) end end - end # of describe instances + end describe 'prefetch' do - it 'should have a prefetch method' do + it 'has a prefetch method' do expect(described_class).to respond_to :prefetch end end end diff --git a/spec/unit/provider/elasticsearch_user_roles/ruby_spec.rb b/spec/unit/provider/elasticsearch_user_roles/ruby_spec.rb index 2effbd8..0fa6ed2 100644 --- a/spec/unit/provider/elasticsearch_user_roles/ruby_spec.rb +++ b/spec/unit/provider/elasticsearch_user_roles/ruby_spec.rb @@ -1,44 +1,46 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' -describe Puppet::Type.type(:elasticsearch_user_roles) - .provider(:ruby) do +describe Puppet::Type.type(:elasticsearch_user_roles). + provider(:ruby) do describe 'instances' do - it 'should have an instance method' do + it 'has an instance method' do expect(described_class).to respond_to :instances end context 'without roles' do - it 'should return no resources' do + it 'returns no resources' do expect(described_class.parse("\n")).to eq([]) end end context 'with one user' do - it 'should return one resource' do + it 'returns one resource' do expect(described_class.parse(%( admin:elastic power_user:elastic ))[0]).to eq( - :name => 'elastic', - :roles => %w[admin power_user] + name: 'elastic', + roles: %w[admin power_user] ) end end context 'with multiple users' do - it 'should return three resources' do + it 'returns three resources' do expect(described_class.parse(%( admin:elastic logstash:user kibana:kibana )).length).to eq(3) end end - end # of describe instances + end describe 'prefetch' do - it 'should have a prefetch method' do + it 'has a prefetch method' do expect(described_class).to respond_to :prefetch end end end diff --git a/spec/unit/puppet_x/elastic/hash_spec.rb b/spec/unit/puppet_x/elastic/hash_spec.rb index 1baae97..3e3f132 100644 --- a/spec/unit/puppet_x/elastic/hash_spec.rb +++ b/spec/unit/puppet_x/elastic/hash_spec.rb @@ -1,25 +1,26 @@ +# frozen_string_literal: true $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..', 'lib')) require 'spec_helper_rspec' require 'puppet_x/elastic/hash' describe Puppet_X::Elastic::SortedHash do subject { { 'foo' => 1, 'bar' => 2 } } describe 'each_pair' do it { is_expected.to respond_to :each_pair } it 'yields values' do expect { |b| subject.each_pair(&b) }.to yield_control.exactly(2).times end it 'returns an Enumerator if not passed a block' do expect(subject.each_pair).to be_an_instance_of(Enumerator) end it 'returns values' do subject.each_pair.map { |k, v| [k, v] }.should == subject.to_a end end end diff --git a/spec/unit/type/elasticsearch_index_spec.rb b/spec/unit/type/elasticsearch_index_spec.rb index 70cb1e3..b96d2ab 100644 --- a/spec/unit/type/elasticsearch_index_spec.rb +++ b/spec/unit/type/elasticsearch_index_spec.rb @@ -1,67 +1,69 @@ +# frozen_string_literal: true + require_relative '../../helpers/unit/type/elasticsearch_rest_shared_examples' describe Puppet::Type.type(:elasticsearch_index) do let(:resource_name) { 'test-index' } include_examples 'REST API types', 'index', :settings describe 'settings' do let(:resource) do described_class.new( - :name => resource_name, - :ensure => 'present', - :settings => { + name: resource_name, + ensure: 'present', + settings: { 'index' => { 'number_of_replicas' => '0' } } ) end let(:settings) { resource.property(:settings) } describe 'insync?' do describe 'synced properties' do let(:is_settings) do { 'index' => { 'creation_date' => 1_487_354_196_301, 'number_of_replicas' => 0, 'number_of_shards' => 5, 'provided_name' => 'a', 'uuid' => 'vtjrcgyerviqllrakslrsw', 'version' => { 'created' => 5_020_199 } } } end it 'only enforces defined settings' do - expect(settings.insync?(is_settings)).to be_truthy + expect(settings).to be_insync(is_settings) end end describe 'out-of-sync properties' do let(:is_settings) do { 'index' => { 'creation_date' => 1_487_354_196_301, 'number_of_replicas' => 1, 'number_of_shards' => 5, 'provided_name' => 'a', 'uuid' => 'vtjrcgyerviqllrakslrsw', 'version' => { 'created' => 5_020_199 } } } end it 'detects out-of-sync nested values' do - expect(settings.insync?(is_settings)).to be_falsy + expect(settings).not_to be_insync(is_settings) end end end end end diff --git a/spec/unit/type/elasticsearch_keystore_spec.rb b/spec/unit/type/elasticsearch_keystore_spec.rb index 79bfa98..b844937 100644 --- a/spec/unit/type/elasticsearch_keystore_spec.rb +++ b/spec/unit/type/elasticsearch_keystore_spec.rb @@ -1,93 +1,103 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' describe Puppet::Type.type(:elasticsearch_keystore) do let(:resource_name) { 'es-01' } describe 'validating attributes' do - [:configdir, :instance, :purge].each do |param| - it "should have a `#{param}` parameter" do + %i[configdir instance purge].each do |param| + it "has a `#{param}` parameter" do expect(described_class.attrtype(param)).to eq(:param) end end - [:ensure, :settings].each do |prop| - it "should have a #{prop} property" do + %i[ensure settings].each do |prop| + it "has a #{prop} property" do expect(described_class.attrtype(prop)).to eq(:property) end end describe 'namevar validation' do - it 'should have :instance as its namevar' do + it 'has :instance as its namevar' do expect(described_class.key_attributes).to eq([:instance]) end end - end # of describe validating attributes + end describe 'when validating values' do describe 'ensure' do - it 'should support present as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :present - ) }.to_not raise_error + it 'supports present as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :present + ) + end.not_to raise_error end - it 'should support absent as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :absent - ) }.to_not raise_error + it 'supports absent as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :absent + ) + end.not_to raise_error end - it 'should not support other values' do - expect { described_class.new( - :name => resource_name, - :ensure => :foo - ) }.to raise_error(Puppet::Error, /Invalid value/) + it 'does not support other values' do + expect do + described_class.new( + name: resource_name, + ensure: :foo + ) + end.to raise_error(Puppet::Error, %r{Invalid value}) end end describe 'settings' do [{ 'node.name' => 'foo' }, ['node.name', 'node.data']].each do |setting| it "accepts #{setting.class}s" do - expect { described_class.new( - :name => resource_name, - :settings => setting - ) }.to_not raise_error + expect do + described_class.new( + name: resource_name, + settings: setting + ) + end.not_to raise_error end end describe 'insync' do it 'only checks lists or hash key membership' do expect(described_class.new( - :name => resource_name, - :settings => { 'node.name' => 'foo', 'node.data' => true } + name: resource_name, + settings: { 'node.name' => 'foo', 'node.data' => true } ).property(:settings).insync?( %w[node.name node.data] )).to be true end context 'purge' do it 'defaults to not purge values' do expect(described_class.new( - :name => resource_name, - :settings => { 'node.name' => 'foo', 'node.data' => true } + name: resource_name, + settings: { 'node.name' => 'foo', 'node.data' => true } ).property(:settings).insync?( %w[node.name node.data node.attr.rack] )).to be true end it 'respects the purge parameter' do expect(described_class.new( - :name => resource_name, - :settings => { 'node.name' => 'foo', 'node.data' => true }, - :purge => true + name: resource_name, + settings: { 'node.name' => 'foo', 'node.data' => true }, + purge: true ).property(:settings).insync?( %w[node.name node.data node.attr.rack] )).to be false end end end end - end # of describing when validating values -end # of describe Puppet::Type + end +end diff --git a/spec/unit/type/elasticsearch_license_spec.rb b/spec/unit/type/elasticsearch_license_spec.rb index 2ddbdd1..4b69ad2 100644 --- a/spec/unit/type/elasticsearch_license_spec.rb +++ b/spec/unit/type/elasticsearch_license_spec.rb @@ -1,75 +1,77 @@ +# frozen_string_literal: true + require_relative '../../helpers/unit/type/elasticsearch_rest_shared_examples' describe Puppet::Type.type(:elasticsearch_license) do let(:resource_name) { 'license' } include_examples 'REST API types', 'license', :content describe 'license' do let(:resource) do described_class.new( - :name => resource_name, - :ensure => 'present', - :content => { + name: resource_name, + ensure: 'present', + content: { 'license' => { - 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', - 'type' => 'trial', - 'issue_date_in_millis' => '1519341125550', + 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', + 'type' => 'trial', + 'issue_date_in_millis' => '1519341125550', 'expiry_date_in_millis' => '1521933125550', - 'max_nodes' => '1000', - 'issued_to' => 'test', - 'issuer' => 'elasticsearch', - 'signature' => 'secretvalue', - 'start_date_in_millis' => '1513814400000' + 'max_nodes' => '1000', + 'issued_to' => 'test', + 'issuer' => 'elasticsearch', + 'signature' => 'secretvalue', + 'start_date_in_millis' => '1513814400000' } } ) end let(:content) { resource.property(:content) } describe 'insync?' do let(:is_content) do { 'license' => { - 'status' => 'active', - 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', - 'type' => 'trial', - 'issue_date' => '2018-02-22T23:12:05.550Z', - 'issue_date_in_millis' => 1_519_341_125_550, - 'expiry_date' => '2018-03-24T23:12:05.550Z', + 'status' => 'active', + 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', + 'type' => 'trial', + 'issue_date' => '2018-02-22T23:12:05.550Z', + 'issue_date_in_millis' => 1_519_341_125_550, + 'expiry_date' => '2018-03-24T23:12:05.550Z', 'expiry_date_in_millis' => 1_521_933_125_550, - 'max_nodes' => 1_000, - 'issued_to' => 'test', - 'issuer' => 'elasticsearch', - 'start_date_in_millis' => 1_513_814_400_000 + 'max_nodes' => 1_000, + 'issued_to' => 'test', + 'issuer' => 'elasticsearch', + 'start_date_in_millis' => 1_513_814_400_000 } } end describe 'synced properties' do it 'only enforces defined content' do - expect(content.insync?(is_content)).to be_truthy + expect(content).to be_insync(is_content) end end describe 'out-of-sync property' do { - 'uid' => 'cbff45e7-c553-41f7-ae4f-xxxxxxxxxxxx', - 'issue_date_in_millis' => '1513814400000', + 'uid' => 'cbff45e7-c553-41f7-ae4f-xxxxxxxxxxxx', + 'issue_date_in_millis' => '1513814400000', 'expiry_date_in_millis' => '1533167999999', - 'start_date_in_millis' => '-1' + 'start_date_in_millis' => '-1' }.each_pair do |field, value| let(:changed_content) do is_content['license'][field] = value is_content end it "detection for #{field}" do - expect(content.insync?(changed_content)).to be_falsy + expect(content).not_to be_insync(changed_content) end end end end end end diff --git a/spec/unit/type/elasticsearch_pipeline_spec.rb b/spec/unit/type/elasticsearch_pipeline_spec.rb index 337d706..1b890dc 100644 --- a/spec/unit/type/elasticsearch_pipeline_spec.rb +++ b/spec/unit/type/elasticsearch_pipeline_spec.rb @@ -1,7 +1,9 @@ +# frozen_string_literal: true + require_relative '../../helpers/unit/type/elasticsearch_rest_shared_examples' describe Puppet::Type.type(:elasticsearch_pipeline) do let(:resource_name) { 'test_pipeline' } include_examples 'REST API types', 'pipeline', :content end diff --git a/spec/unit/type/elasticsearch_plugin_spec.rb b/spec/unit/type/elasticsearch_plugin_spec.rb index 4523db7..02258d8 100644 --- a/spec/unit/type/elasticsearch_plugin_spec.rb +++ b/spec/unit/type/elasticsearch_plugin_spec.rb @@ -1,19 +1,21 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' describe Puppet::Type.type(:elasticsearch_plugin) do let(:resource_name) { 'lmenezes/elasticsearch-kopf' } describe 'input validation' do describe 'when validating attributes' do - [:configdir, :java_opts, :java_home, :name, :source, :url, :proxy].each do |param| - it "should have a #{param} parameter" do + %i[configdir java_opts java_home name source url proxy].each do |param| + it "has a #{param} parameter" do expect(described_class.attrtype(param)).to eq(:param) end end - it 'should have an ensure property' do + it 'has an ensure property' do expect(described_class.attrtype(:ensure)).to eq(:property) end end end end diff --git a/spec/unit/type/elasticsearch_role_mapping_spec.rb b/spec/unit/type/elasticsearch_role_mapping_spec.rb index 4f5394b..294b7a2 100644 --- a/spec/unit/type/elasticsearch_role_mapping_spec.rb +++ b/spec/unit/type/elasticsearch_role_mapping_spec.rb @@ -1,66 +1,78 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' describe Puppet::Type.type(:elasticsearch_role_mapping) do let(:resource_name) { 'elastic_role' } describe 'when validating attributes' do [:name].each do |param| - it "should have a #{param} parameter" do + it "has a #{param} parameter" do expect(described_class.attrtype(param)).to eq(:param) end end - [:ensure, :mappings].each do |prop| - it "should have a #{prop} property" do + %i[ensure mappings].each do |prop| + it "has a #{prop} property" do expect(described_class.attrtype(prop)).to eq(:property) end end - end # of describe when validating attributes + end describe 'when validating values' do describe 'ensure' do - it 'should support present as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :present - ) }.to_not raise_error + it 'supports present as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :present + ) + end.not_to raise_error end - it 'should support absent as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :absent - ) }.to_not raise_error + it 'supports absent as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :absent + ) + end.not_to raise_error end - it 'should not support other values' do - expect { described_class.new( - :name => resource_name, - :ensure => :foo - ) }.to raise_error(Puppet::Error, /Invalid value/) + it 'does not support other values' do + expect do + described_class.new( + name: resource_name, + ensure: :foo + ) + end.to raise_error(Puppet::Error, %r{Invalid value}) end end describe 'name' do - it 'should reject long role names' do - expect { described_class.new( - :name => 'a' * 41 - ) }.to raise_error( + it 'rejects long role names' do + expect do + described_class.new( + name: 'a' * 41 + ) + end.to raise_error( Puppet::ResourceError, - /valid values/i + %r{valid values}i ) end - it 'should reject invalid role characters' do + it 'rejects invalid role characters' do ['@foobar', '0foobar'].each do |role| - expect { described_class.new( - :name => role - ) }.to raise_error( + expect do + described_class.new( + name: role + ) + end.to raise_error( Puppet::ResourceError, - /valid values/i + %r{valid values}i ) end end end - end # of describing when validing values -end # of describe Puppet::Type + end +end diff --git a/spec/unit/type/elasticsearch_role_spec.rb b/spec/unit/type/elasticsearch_role_spec.rb index f007d52..f0ce7a9 100644 --- a/spec/unit/type/elasticsearch_role_spec.rb +++ b/spec/unit/type/elasticsearch_role_spec.rb @@ -1,66 +1,78 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' describe Puppet::Type.type(:elasticsearch_role) do let(:resource_name) { 'elastic_role' } describe 'when validating attributes' do [:name].each do |param| - it "should have a #{param} parameter" do + it "has a #{param} parameter" do expect(described_class.attrtype(param)).to eq(:param) end end - [:ensure, :privileges].each do |prop| - it "should have a #{prop} property" do + %i[ensure privileges].each do |prop| + it "has a #{prop} property" do expect(described_class.attrtype(prop)).to eq(:property) end end - end # of describe when validating attributes + end describe 'when validating values' do describe 'ensure' do - it 'should support present as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :present - ) }.to_not raise_error + it 'supports present as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :present + ) + end.not_to raise_error end - it 'should support absent as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :absent - ) }.to_not raise_error + it 'supports absent as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :absent + ) + end.not_to raise_error end - it 'should not support other values' do - expect { described_class.new( - :name => resource_name, - :ensure => :foo - ) }.to raise_error(Puppet::Error, /Invalid value/) + it 'does not support other values' do + expect do + described_class.new( + name: resource_name, + ensure: :foo + ) + end.to raise_error(Puppet::Error, %r{Invalid value}) end end describe 'name' do - it 'should reject long role names' do - expect { described_class.new( - :name => 'a' * 41 - ) }.to raise_error( + it 'rejects long role names' do + expect do + described_class.new( + name: 'a' * 41 + ) + end.to raise_error( Puppet::ResourceError, - /valid values/i + %r{valid values}i ) end - it 'should reject invalid role characters' do + it 'rejects invalid role characters' do ['@foobar', '0foobar'].each do |role| - expect { described_class.new( - :name => role - ) }.to raise_error( + expect do + described_class.new( + name: role + ) + end.to raise_error( Puppet::ResourceError, - /valid values/i + %r{valid values}i ) end end end - end # of describing when validing values -end # of describe Puppet::Type + end +end diff --git a/spec/unit/type/elasticsearch_snapshot_repository_spec.rb b/spec/unit/type/elasticsearch_snapshot_repository_spec.rb index d02492a..8baa914 100644 --- a/spec/unit/type/elasticsearch_snapshot_repository_spec.rb +++ b/spec/unit/type/elasticsearch_snapshot_repository_spec.rb @@ -1,197 +1,199 @@ +# frozen_string_literal: true + require_relative '../../helpers/unit/type/elasticsearch_rest_shared_examples' describe Puppet::Type.type(:elasticsearch_snapshot_repository) do let(:resource_name) { 'test_repository' } let(:default_params) do { - :location => '/backup' + location: '/backup' } end describe 'attribute validation for elasticsearch_snapshot_repository' do - [ - :name, - :host, - :port, - :protocol, - :validate_tls, - :ca_file, - :ca_path, - :timeout, - :username, - :password, - :type + %i[ + name + host + port + protocol + validate_tls + ca_file + ca_path + timeout + username + password + type ].each do |param| - it "should have a #{param} parameter" do + it "has a #{param} parameter" do expect(described_class.attrtype(param)).to eq(:param) end end - [ - :ensure, - :compress, - :location, - :chunk_size, - :max_restore_rate, - :max_snapshot_rate + %i[ + ensure + compress + location + chunk_size + max_restore_rate + max_snapshot_rate ].each do |prop| - it "should have a #{prop} property" do + it "has a #{prop} property" do expect(described_class.attrtype(prop)).to eq(:property) end end describe 'namevar validation' do - it 'should have :name as its namevar' do + it 'has :name as its namevar' do expect(described_class.key_attributes).to eq([:name]) end end describe 'ensure' do - it 'should support present as a value for ensure' do + it 'supports present as a value for ensure' do expect do described_class.new( default_params.merge( - :name => resource_name, - :ensure => :present + name: resource_name, + ensure: :present ) ) - end.to_not raise_error + end.not_to raise_error end - it 'should support absent as a value for ensure' do + it 'supports absent as a value for ensure' do expect do described_class.new( default_params.merge( - :name => resource_name, - :ensure => :absent + name: resource_name, + ensure: :absent ) ) - end.to_not raise_error + end.not_to raise_error end - it 'should not support other values' do + it 'does not support other values' do expect do described_class.new( default_params.merge( - :name => resource_name, - :ensure => :foo + name: resource_name, + ensure: :foo ) ) - end.to raise_error(Puppet::Error, /Invalid value/) + end.to raise_error(Puppet::Error, %r{Invalid value}) end end describe 'location' do - it 'should be required' do + it 'is required' do expect do described_class.new( - :name => resource_name + name: resource_name ) - end.to raise_error(Puppet::Error, /Location is required./) + end.to raise_error(Puppet::Error, %r{Location is required.}) end end describe 'host' do - it 'should accept IP addresses' do + it 'accepts IP addresses' do expect do described_class.new( default_params.merge( - :name => resource_name, - :host => '127.0.0.1' + name: resource_name, + host: '127.0.0.1' ) ) end.not_to raise_error end end describe 'port' do [-1, 0, 70_000, 'foo'].each do |value| - it "should reject invalid port value #{value}" do + it "rejects invalid port value #{value}" do expect do described_class.new( default_params.merge( - :name => resource_name, - :port => value + name: resource_name, + port: value ) ) - end.to raise_error(Puppet::Error, /invalid port/i) + end.to raise_error(Puppet::Error, %r{invalid port}i) end end end describe 'validate_tls' do [-1, 0, {}, [], 'foo'].each do |value| - it "should reject invalid ssl_verify value #{value}" do + it "rejects invalid ssl_verify value #{value}" do expect do described_class.new( default_params.merge( - :name => resource_name, - :validate_tls => value + name: resource_name, + validate_tls: value ) ) - end.to raise_error(Puppet::Error, /invalid value/i) + end.to raise_error(Puppet::Error, %r{invalid value}i) end end [true, false, 'true', 'false', 'yes', 'no'].each do |value| - it "should accept validate_tls value #{value}" do + it "accepts validate_tls value #{value}" do expect do described_class.new( default_params.merge( - :name => resource_name, - :validate_tls => value + name: resource_name, + validate_tls: value ) ) end.not_to raise_error end end end describe 'timeout' do - it 'should reject string values' do + it 'rejects string values' do expect do described_class.new( default_params.merge( - :name => resource_name, - :timeout => 'foo' + name: resource_name, + timeout: 'foo' ) ) - end.to raise_error(Puppet::Error, /must be a/) + end.to raise_error(Puppet::Error, %r{must be a}) end - it 'should reject negative integers' do + it 'rejects negative integers' do expect do described_class.new( default_params.merge( - :name => resource_name, - :timeout => -10 + name: resource_name, + timeout: -10 ) ) - end.to raise_error(Puppet::Error, /must be a/) + end.to raise_error(Puppet::Error, %r{must be a}) end - it 'should accept integers' do + it 'accepts integers' do expect do described_class.new( default_params.merge( - :name => resource_name, - :timeout => 10 + name: resource_name, + timeout: 10 ) ) - end.to_not raise_error + end.not_to raise_error end - it 'should accept quoted integers' do + it 'accepts quoted integers' do expect do described_class.new( default_params.merge( - :name => resource_name, - :timeout => '10' + name: resource_name, + timeout: '10' ) ) - end.to_not raise_error + end.not_to raise_error end end - end # of describing when validing values include_examples 'REST API types', 'snapshot_repository' -end # of describe Puppet::Type + end +end diff --git a/spec/unit/type/elasticsearch_template_spec.rb b/spec/unit/type/elasticsearch_template_spec.rb index abeabf0..7f30cf1 100644 --- a/spec/unit/type/elasticsearch_template_spec.rb +++ b/spec/unit/type/elasticsearch_template_spec.rb @@ -1,134 +1,136 @@ +# frozen_string_literal: true + require_relative '../../helpers/unit/type/elasticsearch_rest_shared_examples' describe Puppet::Type.type(:elasticsearch_template) do let(:resource_name) { 'test_template' } include_examples 'REST API types', 'template', :content describe 'template attribute validation' do - it 'should have a source parameter' do + it 'has a source parameter' do expect(described_class.attrtype(:source)).to eq(:param) end describe 'content and source validation' do - it 'should require either "content" or "source"' do + it 'requires either "content" or "source"' do expect do described_class.new( - :name => resource_name, - :ensure => :present + name: resource_name, + ensure: :present ) - end.to raise_error(Puppet::Error, /content.*or.*source.*required/) + end.to raise_error(Puppet::Error, %r{content.*or.*source.*required}) end - it 'should fail with both defined' do + it 'fails with both defined' do expect do described_class.new( - :name => resource_name, - :content => {}, - :source => 'puppet:///example.json' + name: resource_name, + content: {}, + source: 'puppet:///example.json' ) - end.to raise_error(Puppet::Error, /simultaneous/) + end.to raise_error(Puppet::Error, %r{simultaneous}) end - it 'should parse source paths into the content property' do - file_stub = 'foo' + it 'parses source paths into the content property' do + file_stub = 'foo'.dup [ Puppet::FileServing::Metadata, Puppet::FileServing::Content ].each do |klass| - allow(klass).to receive(:indirection) - .and_return(Object) + allow(klass).to receive(:indirection). + and_return(Object) end - allow(Object).to receive(:find) - .and_return(file_stub) - allow(file_stub).to receive(:content) - .and_return('{"template":"foobar-*", "order": 1}') + allow(Object).to receive(:find). + and_return(file_stub) + allow(file_stub).to receive(:content). + and_return('{"template":"foobar-*", "order": 1}') expect(described_class.new( - :name => resource_name, - :source => '/example.json' + name: resource_name, + source: '/example.json' )[:content]).to include( 'template' => 'foobar-*', 'order' => 1 ) end - it 'should qualify settings' do + it 'qualifies settings' do expect(described_class.new( - :name => resource_name, - :content => { 'settings' => { + name: resource_name, + content: { 'settings' => { 'number_of_replicas' => '2', 'index' => { 'number_of_shards' => '3' } } } )[:content]).to eq( 'order' => 0, 'aliases' => {}, 'mappings' => {}, 'settings' => { 'index' => { 'number_of_replicas' => 2, 'number_of_shards' => 3 } } ) end it 'detects flat qualified index settings' do expect(described_class.new( - :name => resource_name, - :content => { + name: resource_name, + content: { 'settings' => { 'number_of_replicas' => '2', 'index.number_of_shards' => '3' } } )[:content]).to eq( 'order' => 0, 'aliases' => {}, 'mappings' => {}, 'settings' => { 'index' => { 'number_of_replicas' => 2, 'number_of_shards' => 3 } } ) end end - end # of describing when validing values + end describe 'insync?' do # Although users can pass the type a hash structure with any sort of values # - string, integer, or other native datatype - the Elasticsearch API # normalizes all values to strings. In order to verify that the type does # not incorrectly detect changes when values may be in string form, we take # an example template and force all values to strings to mimic what # Elasticsearch does. it 'is idempotent' do def deep_stringify(obj) if obj.is_a? Array obj.map { |element| deep_stringify(element) } elsif obj.is_a? Hash obj.merge(obj) { |_key, val| deep_stringify(val) } elsif [true, false].include? obj obj else obj.to_s end end json = JSON.parse(File.read('spec/fixtures/templates/6.x.json')) is_template = described_class.new( - :name => resource_name, - :ensure => 'present', - :content => json + name: resource_name, + ensure: 'present', + content: json ).property(:content) should_template = described_class.new( - :name => resource_name, - :ensure => 'present', - :content => deep_stringify(json) + name: resource_name, + ensure: 'present', + content: deep_stringify(json) ).property(:content).should - expect(is_template.insync?(should_template)).to be_truthy + expect(is_template).to be_insync(should_template) end end -end # of describe Puppet::Type +end diff --git a/spec/unit/type/elasticsearch_user_roles_spec.rb b/spec/unit/type/elasticsearch_user_roles_spec.rb index ac8e9cf..155b6db 100644 --- a/spec/unit/type/elasticsearch_user_roles_spec.rb +++ b/spec/unit/type/elasticsearch_user_roles_spec.rb @@ -1,50 +1,58 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' describe Puppet::Type.type(:elasticsearch_user_roles) do let(:resource_name) { 'elastic' } describe 'when validating attributes' do [:name].each do |param| - it "should have a #{param} parameter" do + it "has a #{param} parameter" do expect(described_class.attrtype(param)).to eq(:param) end end - [:ensure, :roles].each do |prop| - it "should have a #{prop} property" do + %i[ensure roles].each do |prop| + it "has a #{prop} property" do expect(described_class.attrtype(prop)).to eq(:property) end end describe 'namevar validation' do - it 'should have :name as its namevar' do + it 'has :name as its namevar' do expect(described_class.key_attributes).to eq([:name]) end end - end # of describe when validating attributes + end describe 'when validating values' do describe 'ensure' do - it 'should support present as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :present - ) }.to_not raise_error + it 'supports present as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :present + ) + end.not_to raise_error end - it 'should support absent as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :absent - ) }.to_not raise_error + it 'supports absent as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :absent + ) + end.not_to raise_error end - it 'should not support other values' do - expect { described_class.new( - :name => resource_name, - :ensure => :foo - ) }.to raise_error(Puppet::Error, /Invalid value/) + it 'does not support other values' do + expect do + described_class.new( + name: resource_name, + ensure: :foo + ) + end.to raise_error(Puppet::Error, %r{Invalid value}) end end - end # of describing when validing values -end # of describe Puppet::Type + end +end diff --git a/spec/unit/type/elasticsearch_user_spec.rb b/spec/unit/type/elasticsearch_user_spec.rb index 7d200fc..83dcd11 100644 --- a/spec/unit/type/elasticsearch_user_spec.rb +++ b/spec/unit/type/elasticsearch_user_spec.rb @@ -1,76 +1,87 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' -[ - :elasticsearch_user, - :elasticsearch_user_file +%i[ + elasticsearch_user + elasticsearch_user_file ].each do |described_type| describe Puppet::Type.type(described_type) do let(:resource_name) { 'elastic' } describe 'when validating attributes' do - [:name, :configdir].each do |param| - it "should have a #{param} parameter" do + %i[name configdir].each do |param| + it "has a #{param} parameter" do expect(described_class.attrtype(param)).to eq(:param) end end [:ensure].each do |prop| - it "should have a #{prop} property" do + it "has a #{prop} property" do expect(described_class.attrtype(prop)).to eq(:property) end end describe 'namevar validation' do - it 'should have :name as its namevar' do + it 'has :name as its namevar' do expect(described_class.key_attributes).to eq([:name]) end end - end # of describe when validating attributes + end describe 'when validating values' do describe 'ensure' do - it 'should support present as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :present - ) }.to_not raise_error + it 'supports present as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :present + ) + end.not_to raise_error end - it 'should support absent as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :absent - ) }.to_not raise_error + it 'supports absent as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :absent + ) + end.not_to raise_error end - it 'should not support other values' do - expect { described_class.new( - :name => resource_name, - :ensure => :foo - ) }.to raise_error(Puppet::Error, /Invalid value/) + it 'does not support other values' do + expect do + described_class.new( + name: resource_name, + ensure: :foo + ) + end.to raise_error(Puppet::Error, %r{Invalid value}) end end { - :hashed_password => :property, - :password => :param + hashed_password: :property, + password: :param }.each_pair do |attribute, type| next unless described_class.respond_to? attribute describe attribute.to_s do - it "should have a #{attrtype} #{type}" do + it "has a #{attrtype} #{type}" do expect(described_class.attrtype(attribute)).to eq(type) end end next unless attribute == :password - it 'should reject short passwords' do - expect { described_class.new( - :name => resource_name, - :password => 'foo' - ) }.to raise_error(Puppet::Error, /must be at least/) + + it 'rejects short passwords' do + expect do + described_class.new( + name: resource_name, + password: 'foo' + ) + end.to raise_error(Puppet::Error, %r{must be at least}) end end - end # of describing when validing values - end # of describe Puppet::Type + end + end end diff --git a/spec/unit/type/es_instance_conn_validator_spec.rb b/spec/unit/type/es_instance_conn_validator_spec.rb index 8ef7849..861bb17 100644 --- a/spec/unit/type/es_instance_conn_validator_spec.rb +++ b/spec/unit/type/es_instance_conn_validator_spec.rb @@ -1,88 +1,96 @@ +# frozen_string_literal: true + require_relative '../../helpers/unit/type/elasticsearch_rest_shared_examples' describe Puppet::Type.type(:es_instance_conn_validator) do let(:resource_name) { 'conn-validator' } let(:conn_validator) do - Puppet::Type.type(:es_instance_conn_validator) - .new(name: resource_name) + Puppet::Type.type(:es_instance_conn_validator). + new(name: resource_name) end describe 'when validating attributes' do - [:name, :server, :port, :timeout, :sleep_interval].each do |param| - it 'should have a #{param} parameter' do + %i[name server port timeout sleep_interval].each do |param| + it "has a #{param} parameter" do expect(described_class.attrtype(param)).to eq(:param) end end [:ensure].each do |prop| - it 'should have a #{prop} property' do + it "has a #{prop} property" do expect(described_class.attrtype(prop)).to eq(:property) end end describe 'namevar validation' do - it 'should have :name as its namevar' do + it 'has :name as its namevar' do expect(described_class.key_attributes).to eq([:name]) end end - end # describe when validating attributes + end describe 'when validating values' do describe 'ensure' do - it 'should support present as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :present - ) }.to_not raise_error + it 'supports present as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :present + ) + end.not_to raise_error end - it 'should support absent as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :absent - ) }.to_not raise_error + it 'supports absent as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :absent + ) + end.not_to raise_error end - it 'should not support other values' do - expect { described_class.new( - :name => resource_name, - :ensure => :foo - ) }.to raise_error(Puppet::Error, /Invalid value/) + it 'does not support other values' do + expect do + described_class.new( + name: resource_name, + ensure: :foo + ) + end.to raise_error(Puppet::Error, %r{Invalid value}) end - end # describe 'ensure' + end describe 'timeout' do - it 'should support a numerical value' do + it 'supports a numerical value' do conn_validator[:timeout] = 120 expect(conn_validator[:timeout]).to eq(120) end - it 'should have a default value of 60' do + it 'has a default value of 60' do expect(conn_validator[:timeout]).to eq(60) end - it 'should not support a non-numeric value' do + it 'does not support a non-numeric value' do expect do conn_validator[:timeout] = 'string' - end.to raise_error(Puppet::Error, /invalid value/) + end.to raise_error(Puppet::Error, %r{invalid value}) end - end # describe 'timeout' + end describe 'sleep_interval' do - it 'should support a numerical value' do + it 'supports a numerical value' do conn_validator[:sleep_interval] = 120 expect(conn_validator[:sleep_interval]).to eq(120) end - it 'should have a default value of 10' do + it 'has a default value of 10' do expect(conn_validator[:sleep_interval]).to eq(10) end - it 'should not support a non-numeric value' do + it 'does not support a non-numeric value' do expect do conn_validator[:sleep_interval] = 'string' - end.to raise_error(Puppet::Error, /invalid value/) + end.to raise_error(Puppet::Error, %r{invalid value}) end - end # describe 'sleep_interval - end # describe 'when valdating values' -end # of describe Puppet::Type + end + end +end