diff --git a/lib/facter/es_facts.rb b/lib/facter/es_facts.rb index eb587f4..e487775 100644 --- a/lib/facter/es_facts.rb +++ b/lib/facter/es_facts.rb @@ -1,137 +1,135 @@ +# frozen_string_literal: true + require 'net/http' require 'json' require 'yaml' # Helper module to encapsulate custom fact injection module EsFacts # Add a fact to the catalog of host facts def self.add_fact(prefix, key, value) key = "#{prefix}_#{key}".to_sym ::Facter.add(key) do setcode { value } end end def self.ssl?(config) tls_keys = [ 'xpack.security.http.ssl.enabled' ] tls_keys.any? { |key| (config.key? key) && (config[key] == true) } end # Helper to determine the instance http.port number def self.get_httpport(config) enabled = 'http.enabled' httpport = 'http.port' return false, false if !config[enabled].nil? && config[enabled] == 'false' return config[httpport], ssl?(config) unless config[httpport].nil? + ['9200', ssl?(config)] end # Entrypoint for custom fact populator # # This is a super old function but works; disable a bunch of checks. - # rubocop:disable Lint/HandleExceptions - # rubocop:disable Metrics/CyclomaticComplexity - # rubocop:disable Metrics/PerceivedComplexity def self.run dir_prefix = '/etc/elasticsearch' # httpports is a hash of port_number => ssl? transportports = [] http_bound_addresses = [] transport_bound_addresses = [] transport_publish_addresses = [] nodes = {} # only when the directory exists we need to process the stuff return unless File.directory?(dir_prefix) if File.readable?("#{dir_prefix}/elasticsearch.yml") config_data = YAML.load_file("#{dir_prefix}/elasticsearch.yml") httpport, ssl = get_httpport(config_data) end begin add_fact('elasticsearch', 'port', httpport) unless ssl key_prefix = 'elasticsearch' # key_prefix = "elasticsearch_#{httpport}" uri = URI("http://localhost:#{httpport}") http = Net::HTTP.new(uri.host, uri.port) http.read_timeout = 10 http.open_timeout = 2 response = http.get('/') json_data = JSON.parse(response.body) if json_data['status'] && json_data['status'] == 200 add_fact(key_prefix, 'name', json_data['name']) add_fact(key_prefix, 'version', json_data['version']['number']) uri2 = URI("http://localhost:#{httpport}/_nodes/#{json_data['name']}") http2 = Net::HTTP.new(uri2.host, uri2.port) http2.read_timeout = 10 http2.open_timeout = 2 response2 = http2.get(uri2.path) json_data_node = JSON.parse(response2.body) add_fact(key_prefix, 'cluster_name', json_data_node['cluster_name']) node_data = json_data_node['nodes'].first add_fact(key_prefix, 'node_id', node_data[0]) nodes_data = json_data_node['nodes'][node_data[0]] process = nodes_data['process'] add_fact(key_prefix, 'mlockall', process['mlockall']) plugins = nodes_data['plugins'] plugin_names = [] plugins.each do |plugin| plugin_names << plugin['name'] plugin.each do |key, value| prefix = "#{key_prefix}_plugin_#{plugin['name']}" add_fact(prefix, key, value) unless key == 'name' end end add_fact(key_prefix, 'plugins', plugin_names.join(',')) nodes_data['http']['bound_address'].each { |i| http_bound_addresses << i } nodes_data['transport']['bound_address'].each { |i| transport_bound_addresses << i } transport_publish_addresses << nodes_data['transport']['publish_address'] unless nodes_data['transport']['publish_address'].nil? - transportports << nodes_data['settings']['transport']['tcp']['port'] unless nodes_data['settings']['transport']['tcp'].nil? or nodes_data['settings']['transport']['tcp']['port'].nil? + transportports << nodes_data['settings']['transport']['tcp']['port'] unless nodes_data['settings']['transport']['tcp'].nil? || nodes_data['settings']['transport']['tcp']['port'].nil? node = { - 'http_ports' => httpports.keys, - 'transport_ports' => transportports, - 'http_bound_addresses' => http_bound_addresses, - 'transport_bound_addresses' => transport_bound_addresses, + 'http_ports' => httpports.keys, + 'transport_ports' => transportports, + 'http_bound_addresses' => http_bound_addresses, + 'transport_bound_addresses' => transport_bound_addresses, 'transport_publish_addresses' => transport_publish_addresses, - json_data['name'] => { - 'settings' => nodes_data['settings'], - 'http' => nodes_data['http'], + json_data['name'] => { + 'settings' => nodes_data['settings'], + 'http' => nodes_data['http'], 'transport' => nodes_data['transport'] } } nodes.merge! node end end - rescue + rescue StandardError end Facter.add(:elasticsearch) do setcode do nodes end nodes unless nodes.empty? end end - # rubocop:enable Metrics/CyclomaticComplexity - # rubocop:enable Metrics/PerceivedComplexity end EsFacts.run diff --git a/lib/puppet/feature/elasticsearch_shield_users_native.rb b/lib/puppet/feature/elasticsearch_shield_users_native.rb index d79e364..26d7f04 100644 --- a/lib/puppet/feature/elasticsearch_shield_users_native.rb +++ b/lib/puppet/feature/elasticsearch_shield_users_native.rb @@ -1,16 +1,22 @@ +# frozen_string_literal: true + require 'puppet/util/feature' require 'puppet/util/package' shield_plugin_dir = '/usr/share/elasticsearch/plugins/shield' -Puppet.features.add(:elasticsearch_shield_users_native) { - File.exist? shield_plugin_dir and - Dir[shield_plugin_dir + '/*.jar'].map do |file| - File.basename(file, '.jar').split('-') - end.select do |parts| - parts.include? 'shield' - end.any? do |parts| - parts.last =~ /^[\d.]+$/ and - Puppet::Util::Package.versioncmp(parts.last, '2.3') >= 0 - end -} +Puppet.features.add(:elasticsearch_shield_users_native) do + return false unless File.exist?(shield_plugin_dir) + + jars = Dir["#{shield_plugin_dir}/*.jar"] + jar_parts = jars.map do |file| + File.basename(file, '.jar').split('-') + end + shield_components = jar_parts.select do |parts| + parts.include? 'shield' + end + shield_components.any? do |parts| + parts.last =~ %r{^[\d.]+$} && + Puppet::Util::Package.versioncmp(parts.last, '2.3') >= 0 + end +end diff --git a/lib/puppet/parser/functions/array_suffix.rb b/lib/puppet/parser/functions/array_suffix.rb index 0e4ce3b..3afad3d 100644 --- a/lib/puppet/parser/functions/array_suffix.rb +++ b/lib/puppet/parser/functions/array_suffix.rb @@ -1,46 +1,44 @@ +# frozen_string_literal: true + # Top-level Puppet functions module Puppet::Parser::Functions newfunction( :array_suffix, - :type => :rvalue, - :doc => <<-EOS -This function applies a suffix to all elements in an array. + type: :rvalue, + doc: <<~EOS + This function applies a suffix to all elements in an array. -*Examples:* + *Examples:* - array_suffix(['a','b','c'], 'p') + array_suffix(['a','b','c'], 'p') -Will return: ['ap','bp','cp'] + Will return: ['ap','bp','cp'] -@return Array + @return Array EOS ) do |arguments| # Technically we support two arguments but only first is mandatory ... - raise(Puppet::ParseError, 'array_suffix(): Wrong number of arguments ' \ - "given (#{arguments.size} for 1)") if arguments.empty? + if arguments.empty? + raise(Puppet::ParseError, 'array_suffix(): Wrong number of arguments ' \ + "given (#{arguments.size} for 1)") + end array = arguments[0] - unless array.is_a?(Array) - raise Puppet::ParseError, "array_suffix(): expected first argument to be an Array, got #{array.inspect}" - end + raise Puppet::ParseError, "array_suffix(): expected first argument to be an Array, got #{array.inspect}" unless array.is_a?(Array) suffix = arguments[1] if arguments[1] - if suffix - unless suffix.is_a? String - raise Puppet::ParseError, "array_suffix(): expected second argument to be a String, got #{suffix.inspect}" - end - end + raise Puppet::ParseError, "array_suffix(): expected second argument to be a String, got #{suffix.inspect}" if suffix && !(suffix.is_a? String) # Turn everything into string same as join would do ... - result = array.collect do |i| + result = array.map do |i| i = i.to_s suffix ? i + suffix : i end return result end end # vim: set ts=2 sw=2 et : diff --git a/lib/puppet/parser/functions/concat_merge.rb b/lib/puppet/parser/functions/concat_merge.rb index cddb7e2..27b20c7 100644 --- a/lib/puppet/parser/functions/concat_merge.rb +++ b/lib/puppet/parser/functions/concat_merge.rb @@ -1,50 +1,48 @@ +# frozen_string_literal: true + # Top-level Puppet functions module Puppet::Parser::Functions newfunction( :concat_merge, - :type => :rvalue, - :doc => <<-'ENDHEREDOC') do |args| + type: :rvalue, + doc: <<-'ENDHEREDOC') do |args| Merges two or more hashes together concatenating duplicate keys with array values and returns the resulting hash. For example: $hash1 = {'a' => [1]} $hash2 = {'a' => [2]} concat_merge($hash1, $hash2) # The resulting hash is equivalent to: # { 'a' => [1, 2] } When there is a duplicate key that is not an array, the key in the rightmost hash will "win." @return String ENDHEREDOC - if args.length < 2 - raise Puppet::ParseError, "concat_merge(): wrong number of arguments (#{args.length}; must be at least 2)" - end + raise Puppet::ParseError, "concat_merge(): wrong number of arguments (#{args.length}; must be at least 2)" if args.length < 2 concat_merge = proc do |hash1, hash2| hash1.merge(hash2) do |_key, old_value, new_value| if old_value.is_a?(Array) && new_value.is_a?(Array) old_value + new_value else new_value end end end result = {} args.each do |arg| - next if arg.is_a? String and arg.empty? # empty string is synonym for puppet's undef + next if arg.is_a?(String) && arg.empty? # empty string is synonym for puppet's undef # If the argument was not a hash, skip it. - unless arg.is_a?(Hash) - raise Puppet::ParseError, "concat_merge: unexpected argument type #{arg.class}, only expects hash arguments" - end + raise Puppet::ParseError, "concat_merge: unexpected argument type #{arg.class}, only expects hash arguments" unless arg.is_a?(Hash) result = concat_merge.call(result, arg) end result end end diff --git a/lib/puppet/parser/functions/deep_implode.rb b/lib/puppet/parser/functions/deep_implode.rb index 0cba5d6..92dce02 100644 --- a/lib/puppet/parser/functions/deep_implode.rb +++ b/lib/puppet/parser/functions/deep_implode.rb @@ -1,46 +1,44 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..', '..')) require 'puppet_x/elastic/deep_implode' # Top-level Puppet functions module Puppet::Parser::Functions newfunction( :deep_implode, - :type => :rvalue, - :doc => <<-'ENDHEREDOC') do |args| + type: :rvalue, + doc: <<-'ENDHEREDOC') do |args| Recursively flattens all keys of a hash into a dot-notated hash, deeply merging duplicate key values by natively combining them and returns the resulting hash. That is confusing, look at the examples for more clarity. For example: $hash = {'top' => {'sub' => [1]}, 'top.sub' => [2] } $flattened_hash = deep_implode($hash) # The resulting hash is equivalent to: # { 'top.sub' => [1, 2] } When the function encounters array or hash values, they are concatenated or merged, respectively. When duplace paths for a key are generated, the function will prefer to retain keys with the longest root key. @return Hash ENDHEREDOC - if args.length != 1 - raise Puppet::ParseError, "deep_implode(): wrong number of arguments (#{args.length}; must be 1)" - end + raise Puppet::ParseError, "deep_implode(): wrong number of arguments (#{args.length}; must be 1)" if args.length != 1 arg = args[0] - unless arg.is_a? Hash - raise Puppet::ParseError, 'deep_implode: unexpected argument type, only expects hashes' - end + raise Puppet::ParseError, 'deep_implode: unexpected argument type, only expects hashes' unless arg.is_a? Hash return {} if arg.empty? Puppet_X::Elastic.deep_implode arg end end diff --git a/lib/puppet/parser/functions/es_plugin_name.rb b/lib/puppet/parser/functions/es_plugin_name.rb index 680b943..33813e2 100644 --- a/lib/puppet/parser/functions/es_plugin_name.rb +++ b/lib/puppet/parser/functions/es_plugin_name.rb @@ -1,42 +1,44 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..', '..')) require 'puppet_x/elastic/plugin_parsing' # Top-level Puppet functions module Puppet::Parser::Functions newfunction( :es_plugin_name, - :type => :rvalue, - :doc => <<-'ENDHEREDOC') do |args| + type: :rvalue, + doc: <<-'ENDHEREDOC') do |args| Given a string, return the best guess at what the directory name will be for the given plugin. Any arguments past the first will be fallbacks (using the same logic) should the first fail. For example, all the following return values are "plug": es_plugin_name('plug') es_plugin_name('foo/plug') es_plugin_name('foo/plug/1.0.0') es_plugin_name('foo/elasticsearch-plug') es_plugin_name('foo/es-plug/1.3.2') @return String ENDHEREDOC if args.empty? raise Puppet::ParseError, 'wrong number of arguments, at least one value required' end ret = args.select do |arg| - arg.is_a? String and not arg.empty? + arg.is_a?(String) && !arg.empty? end.first if ret Puppet_X::Elastic.plugin_name ret else raise Puppet::Error, 'could not determine plugin name' end end end diff --git a/lib/puppet/parser/functions/plugin_dir.rb b/lib/puppet/parser/functions/plugin_dir.rb index aee8174..fadba1c 100644 --- a/lib/puppet/parser/functions/plugin_dir.rb +++ b/lib/puppet/parser/functions/plugin_dir.rb @@ -1,43 +1,38 @@ +# frozen_string_literal: true + # Top-level Puppet functions module Puppet::Parser::Functions newfunction( :plugin_dir, - :type => :rvalue, - :doc => <<-EOS + type: :rvalue, + doc: <<-EOS Extracts the end plugin directory of the name @return String EOS ) do |arguments| - if arguments.empty? - raise(Puppet::ParseError, 'plugin_dir(): No arguments given') - elsif arguments.size > 2 - raise(Puppet::ParseError, "plugin_dir(): Too many arguments given (#{arguments.size})") - else - - unless arguments[0].is_a?(String) - raise(Puppet::ParseError, 'plugin_dir(): Requires string as first argument') - end + raise(Puppet::ParseError, 'plugin_dir(): No arguments given') if arguments.empty? + raise(Puppet::ParseError, "plugin_dir(): Too many arguments given (#{arguments.size})") if arguments.size > 2 + raise(Puppet::ParseError, 'plugin_dir(): Requires string as first argument') unless arguments[0].is_a?(String) - plugin_name = arguments[0] - items = plugin_name.split('/') + plugin_name = arguments[0] + items = plugin_name.split('/') - return items[0] if items.count == 1 + return items[0] if items.count == 1 - plugin = items[1] - endname = if plugin.include?('-') # example elasticsearch-head - if plugin.start_with?('elasticsearch-') - plugin.gsub('elasticsearch-', '') - elsif plugin.start_with?('es-') - plugin.gsub('es-', '') - else - plugin - end + plugin = items[1] + endname = if plugin.include?('-') # example elasticsearch-head + if plugin.start_with?('elasticsearch-') + plugin.gsub('elasticsearch-', '') + elsif plugin.start_with?('es-') + plugin.gsub('es-', '') else plugin end + else + plugin + end - return endname - end + return endname end end diff --git a/lib/puppet/provider/elastic_parsedfile.rb b/lib/puppet/provider/elastic_parsedfile.rb index cc2a6b8..b5a3dc8 100644 --- a/lib/puppet/provider/elastic_parsedfile.rb +++ b/lib/puppet/provider/elastic_parsedfile.rb @@ -1,12 +1,14 @@ +# frozen_string_literal: true + require 'puppet/provider/parsedfile' # Parent class for Elasticsearch-based providers that need to access # specific configuration directories. class Puppet::Provider::ElasticParsedFile < Puppet::Provider::ParsedFile # Find/set an x-pack configuration file. # # @return String def self.xpack_config(val) @default_target ||= "/etc/elasticsearch/#{val}" end end diff --git a/lib/puppet/provider/elastic_plugin.rb b/lib/puppet/provider/elastic_plugin.rb index 97e4d6c..2ddf9f2 100644 --- a/lib/puppet/provider/elastic_plugin.rb +++ b/lib/puppet/provider/elastic_plugin.rb @@ -1,161 +1,166 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..')) require 'uri' require 'puppet_x/elastic/es_versioning' require 'puppet_x/elastic/plugin_parsing' # Generalized parent class for providers that behave like Elasticsearch's plugin # command line tool. -# rubocop:disable Metrics/ClassLength class Puppet::Provider::ElasticPlugin < Puppet::Provider # Elasticsearch's home directory. # # @return String def homedir case Facter.value('osfamily') when 'OpenBSD' '/usr/local/elasticsearch' else '/usr/share/elasticsearch' end end def exists? # First, attempt to list whether the named plugin exists by finding a # plugin descriptor file, which each plugin should have. We must wildcard # the name to match meta plugins, see upstream issue for this change: # https://github.com/elastic/elasticsearch/pull/28022 properties_files = Dir[File.join(@resource[:plugin_dir], plugin_path, '**', '*plugin-descriptor.properties')] return false if properties_files.empty? begin # Use the basic name format that the plugin tool supports in order to # determine the version from the resource name. plugin_version = Puppet_X::Elastic.plugin_version(@resource[:name]) # Naively parse the Java .properties file to check version equality. # Because we don't have the luxury of installing arbitrary gems, perform # simple parse with a degree of safety checking in the call chain # # Note that x-pack installs "meta" plugins which bundle multiple plugins # in one. Therefore, we need to find the first "sub" plugin that # indicates which version of x-pack this is. properties = properties_files.sort.map do |prop_file| - IO.readlines(prop_file).map(&:strip).reject do |line| - line.start_with?('#') or line.empty? - end.map do |property| + lines = File.readlines(prop_file).map(&:strip).reject do |line| + line.start_with?('#') || line.empty? + end + lines = lines.map do |property| property.split('=') - end.reject do |pairs| - pairs.length != 2 - end.to_h - end.find { |prop| prop.key? 'version' } + end + lines = lines.select do |pairs| + pairs.length == 2 + end + lines.to_h + end + properties = properties.find { |prop| prop.key? 'version' } - if properties and properties['version'] != plugin_version + if properties && properties['version'] != plugin_version debug "Elasticsearch plugin #{@resource[:name]} not version #{plugin_version}, reinstalling" destroy return false end rescue ElasticPluginParseFailure debug "Failed to parse plugin version for #{@resource[:name]}" end # If there is no version string, we do not check version equality debug "No version found in #{@resource[:name]}, not enforcing any version" true end def plugin_path @resource[:plugin_path] || Puppet_X::Elastic.plugin_name(@resource[:name]) end # Intelligently returns the correct installation arguments for Elasticsearch. # # @return [Array] # arguments to pass to the plugin installation utility def install_args if !@resource[:url].nil? [@resource[:url]] elsif !@resource[:source].nil? ["file://#{@resource[:source]}"] else [@resource[:name]] end end # Format proxy arguments for consumption by the elasticsearch plugin # management tool (i.e., Java properties). # # @return Array # of flags for command-line tools def proxy_args(url) parsed = URI(url) %w[http https].map do |schema| - [:host, :port, :user, :password].map do |param| + %i[host port user password].map do |param| option = parsed.send(param) "-D#{schema}.proxy#{param.to_s.capitalize}=#{option}" unless option.nil? end end.flatten.compact end # Install this plugin on the host. def create commands = [] commands += proxy_args(@resource[:proxy]) if @resource[:proxy] commands << 'install' commands << '--batch' commands += install_args debug("Commands: #{commands.inspect}") retry_count = 3 retry_times = 0 begin with_environment do plugin(commands) end rescue Puppet::ExecutionFailure => e retry_times += 1 debug("Failed to install plugin. Retrying... #{retry_times} of #{retry_count}") sleep 2 retry if retry_times < retry_count raise "Failed to install plugin. Received error: #{e.inspect}" end end # Remove this plugin from the host. def destroy with_environment do plugin(['remove', Puppet_X::Elastic.plugin_name(@resource[:name])]) end end # Run a command wrapped in necessary env vars def with_environment(&block) env_vars = { 'ES_JAVA_OPTS' => @resource[:java_opts], 'ES_PATH_CONF' => @resource[:configdir] } saved_vars = {} # Use 'java_home' param if supplied, otherwise default to Elasticsearch shipped JDK - env_vars['JAVA_HOME'] = if @resource[:java_home].nil? or @resource[:java_home] == '' + env_vars['JAVA_HOME'] = if @resource[:java_home].nil? || @resource[:java_home] == '' "#{homedir}/jdk" else @resource[:java_home] end env_vars['ES_JAVA_OPTS'] = env_vars['ES_JAVA_OPTS'].join(' ') env_vars.each do |env_var, value| saved_vars[env_var] = ENV[env_var] ENV[env_var] = value end ret = block.yield saved_vars.each do |env_var, value| ENV[env_var] = value end ret end end diff --git a/lib/puppet/provider/elastic_rest.rb b/lib/puppet/provider/elastic_rest.rb index 0b993fb..09e7c79 100644 --- a/lib/puppet/provider/elastic_rest.rb +++ b/lib/puppet/provider/elastic_rest.rb @@ -1,303 +1,286 @@ +# frozen_string_literal: true + require 'json' require 'net/http' require 'openssl' # Parent class encapsulating general-use functions for children REST-based # providers. -# rubocop:disable Metrics/ClassLength class Puppet::Provider::ElasticREST < Puppet::Provider class << self - attr_accessor :api_discovery_uri - attr_accessor :api_resource_style - attr_accessor :api_uri - attr_accessor :discrete_resource_creation - attr_accessor :metadata - attr_accessor :metadata_pipeline - attr_accessor :query_string + attr_accessor :api_discovery_uri, :api_resource_style, :api_uri, :discrete_resource_creation, :metadata, :metadata_pipeline, :query_string end # Fetch arbitrary metadata for the class from an instance object. # # @return String def metadata self.class.metadata end # Retrieve the class query_string variable # # @return String def query_string self.class.query_string end # Perform a REST API request against the indicated endpoint. # # @return Net::HTTPResponse - # rubocop:disable Metrics/CyclomaticComplexity - # rubocop:disable Metrics/PerceivedComplexity def self.rest(http, \ req, \ validate_tls = true, \ timeout = 10, \ username = nil, \ password = nil) - if username and password + if username && password req.basic_auth username, password - elsif username or password + elsif username || password Puppet.warning( 'username and password must both be defined, skipping basic auth' ) end req['Accept'] = 'application/json' http.read_timeout = timeout http.open_timeout = timeout http.verify_mode = OpenSSL::SSL::VERIFY_NONE unless validate_tls begin http.request req rescue EOFError => e # Because the provider attempts a best guess at API access, we # only fail when HTTP operations fail for mutating methods. unless %w[GET OPTIONS HEAD].include? req.method raise Puppet::Error, "Received '#{e}' from the Elasticsearch API. Are your API settings correct?" end end end - # rubocop:enable Metrics/CyclomaticComplexity - # rubocop:enable Metrics/PerceivedComplexity # Helper to format a remote URL request for Elasticsearch which takes into # account path ordering, et cetera. def self.format_uri(resource_path, property_flush = {}) - return api_uri if resource_path.nil? or api_resource_style == :bare - if discrete_resource_creation and not property_flush[:ensure].nil? + return api_uri if resource_path.nil? || api_resource_style == :bare + + if discrete_resource_creation && !property_flush[:ensure].nil? resource_path else case api_resource_style when :prefix - resource_path + '/' + api_uri + "#{resource_path}/#{api_uri}" else - api_uri + '/' + resource_path + "#{api_uri}/#{resource_path}" end end end # Fetch Elasticsearch API objects. Accepts a variety of argument functions # dictating how to connect to the Elasticsearch API. # # @return Array # an array of Hashes representing the found API objects, whether they be # templates, pipelines, et cetera. def self.api_objects(protocol = 'http', \ validate_tls = true, \ host = 'localhost', \ port = 9200, \ timeout = 10, \ username = nil, \ password = nil, \ ca_file = nil, \ ca_path = nil) uri = URI("#{protocol}://#{host}:#{port}/#{format_uri(api_discovery_uri)}") http = Net::HTTP.new uri.host, uri.port req = Net::HTTP::Get.new uri.request_uri http.use_ssl = uri.scheme == 'https' [[ca_file, :ca_file=], [ca_path, :ca_path=]].each do |arg, method| - http.send method, arg if arg and http.respond_to? method + http.send method, arg if arg && http.respond_to?(method) end response = rest http, req, validate_tls, timeout, username, password results = [] - if response.respond_to? :code and response.code.to_i == 200 - results = process_body(response.body) - end + results = process_body(response.body) if response.respond_to?(:code) && response.code.to_i == 200 results end # Process the JSON response body def self.process_body(body) - results = JSON.parse(body).map do |object_name, api_object| + JSON.parse(body).map do |object_name, api_object| { - :name => object_name, - :ensure => :present, - metadata => process_metadata(api_object), + :name => object_name, + :ensure => :present, + metadata => process_metadata(api_object), :provider => name } end - - results end # Passes API objects through arbitrary Procs/lambdas in order to postprocess # API responses. def self.process_metadata(raw_metadata) - if metadata_pipeline.is_a? Array and !metadata_pipeline.empty? + if metadata_pipeline.is_a?(Array) && !metadata_pipeline.empty? metadata_pipeline.reduce(raw_metadata) do |md, processor| processor.call md end else raw_metadata end end # Fetch an array of provider objects from the Elasticsearch API. def self.instances api_objects.map { |resource| new resource } end # Unlike a typical #prefetch, which just ties discovered #instances to the # correct resources, we need to quantify all the ways the resources in the # catalog know about Elasticsearch API access and use those settings to # fetch any templates we can before associating resources and providers. def self.prefetch(resources) # Get all relevant API access methods from the resources we know about - resources.map do |_, resource| + res = resources.map do |_, resource| p = resource.parameters [ p[:protocol].value, p[:validate_tls].value, p[:host].value, p[:port].value, p[:timeout].value, (p.key?(:username) ? p[:username].value : nil), (p.key?(:password) ? p[:password].value : nil), (p.key?(:ca_file) ? p[:ca_file].value : nil), (p.key?(:ca_path) ? p[:ca_path].value : nil) ] # Deduplicate identical settings, and fetch templates - end.uniq.map do |api| + end.uniq + res = res.map do |api| api_objects(*api) # Flatten and deduplicate the array, instantiate providers, and do the # typical association dance - end.flatten.uniq.map { |resource| new resource }.each do |prov| + end + res.flatten.uniq.map { |resource| new resource }.each do |prov| if (resource = resources[prov.name]) resource.provider = prov end end end def initialize(value = {}) super(value) @property_flush = {} end # Generate a request body def generate_body JSON.generate( - if metadata != :content and @property_flush[:ensure] == :present + if metadata != :content && @property_flush[:ensure] == :present { metadata.to_s => resource[metadata] } else resource[metadata] end ) end # Call Elasticsearch's REST API to appropriately PUT/DELETE/or otherwise # update any managed API objects. - # rubocop:disable Metrics/CyclomaticComplexity - # rubocop:disable Metrics/PerceivedComplexity def flush Puppet.debug('Got to flush') uri = URI( format( '%s://%s:%d/%s', resource[:protocol], resource[:host], resource[:port], self.class.format_uri(resource[:name], @property_flush) ) ) uri.query = URI.encode_www_form query_string if query_string Puppet.debug("Generated URI = #{uri.inspect}") case @property_flush[:ensure] when :absent req = Net::HTTP::Delete.new uri.request_uri else req = Net::HTTP::Put.new uri.request_uri req.body = generate_body Puppet.debug("Generated body looks like: #{req.body.inspect}") # As of Elasticsearch 6.x, required when requesting with a payload (so we # set it always to be safe) req['Content-Type'] = 'application/json' if req['Content-Type'].nil? end http = Net::HTTP.new uri.host, uri.port http.use_ssl = uri.scheme == 'https' - [:ca_file, :ca_path].each do |arg| - if !resource[arg].nil? and http.respond_to? arg - http.send "#{arg}=".to_sym, resource[arg] - end + %i[ca_file ca_path].each do |arg| + http.send "#{arg}=".to_sym, resource[arg] if !resource[arg].nil? && http.respond_to?(arg) end response = self.class.rest( http, req, resource[:validate_tls], resource[:timeout], resource[:username], resource[:password] ) # Attempt to return useful error output unless response.code.to_i == 200 Puppet.debug("Non-OK reponse: Body = #{response.body.inspect}") json = JSON.parse(response.body) err_msg = if json.key? 'error' - if json['error'].is_a? Hash \ - and json['error'].key? 'root_cause' + if json['error'].is_a?(Hash) \ + && json['error'].key?('root_cause') # Newer versions have useful output json['error']['root_cause'].first['reason'] else # Otherwise fallback to old-style error messages json['error'] end else # As a last resort, return the response error code "HTTP #{response.code}" end raise Puppet::Error, "Elasticsearch API responded with: #{err_msg}" end - # rubocop:enable Metrics/CyclomaticComplexity - # rubocop:enable Metrics/PerceivedComplexity - @property_hash = self.class.api_objects( resource[:protocol], resource[:validate_tls], resource[:host], resource[:port], resource[:timeout], resource[:username], resource[:password], resource[:ca_file], resource[:ca_path] - ).detect do |t| + ).find do |t| t[:name] == resource[:name] end end # Set this provider's `:ensure` property to `:present`. def create @property_flush[:ensure] = :present end def exists? @property_hash[:ensure] == :present end # Set this provider's `:ensure` property to `:absent`. def destroy @property_flush[:ensure] = :absent end -end # of class +end diff --git a/lib/puppet/provider/elastic_user_command.rb b/lib/puppet/provider/elastic_user_command.rb index b6ca8e7..6c76e8c 100644 --- a/lib/puppet/provider/elastic_user_command.rb +++ b/lib/puppet/provider/elastic_user_command.rb @@ -1,125 +1,131 @@ +# frozen_string_literal: true + # Parent provider for Elasticsearch Shield/X-Pack file-based user management # tools. class Puppet::Provider::ElasticUserCommand < Puppet::Provider attr_accessor :homedir # Elasticsearch's home directory. # # @return String def self.homedir @homedir ||= case Facter.value('osfamily') when 'OpenBSD' '/usr/local/elasticsearch' else '/usr/share/elasticsearch' end end # Run the user management command with specified tool arguments. def self.command_with_path(args, configdir = nil) options = { - :combine => true, - :custom_environment => { + combine: true, + custom_environment: { 'ES_PATH_CONF' => configdir || '/etc/elasticsearch' }, - :failonfail => true + failonfail: true } execute( [command(:users_cli)] + (args.is_a?(Array) ? args : [args]), options ) end # Gather local file-based users into an array of Hash objects. def self.fetch_users begin output = command_with_path('list') rescue Puppet::ExecutionFailure => e debug("#fetch_users had an error: #{e.inspect}") return nil end debug("Raw command output: #{output}") - output.split("\n").select { |u| + matching_lines = output.split("\n").select do |u| # Keep only expected "user : role1,role2" formatted lines - u[/^[^:]+:\s+\S+$/] - }.map { |u| + u[%r{^[^:]+:\s+\S+$}] + end + + users = matching_lines.map do |u| # Break into ["user ", " role1,role2"] u.split(':').first.strip - }.map do |user| + end + + users.map do |user| { - :name => user, - :ensure => :present, - :provider => name + name: user, + ensure: :present, + provider: name } end end # Fetch an array of provider objects from the the list of local users. def self.instances fetch_users.map do |user| new user end end # Generic prefetch boilerplate. def self.prefetch(resources) instances.each do |prov| if (resource = resources[prov.name]) resource.provider = prov end end end def initialize(value = {}) super(value) @property_flush = {} end # Enforce the desired state for this user on-disk. def flush arguments = [] case @property_flush[:ensure] when :absent arguments << 'userdel' arguments << resource[:name] else arguments << 'useradd' arguments << resource[:name] arguments << '-p' << resource[:password] end self.class.command_with_path(arguments, resource[:configdir]) - @property_hash = self.class.fetch_users.detect do |u| + @property_hash = self.class.fetch_users.find do |u| u[:name] == resource[:name] end end # Set this provider's `:ensure` property to `:present`. def create @property_flush[:ensure] = :present end def exists? @property_hash[:ensure] == :present end # Set this provider's `:ensure` property to `:absent`. def destroy @property_flush[:ensure] = :absent end # Manually set this user's password. def passwd self.class.command_with_path( [ 'passwd', resource[:name], '-p', resource[:password] ], resource[:configdir] ) end end diff --git a/lib/puppet/provider/elastic_user_roles.rb b/lib/puppet/provider/elastic_user_roles.rb index ee86a4c..9ce660a 100644 --- a/lib/puppet/provider/elastic_user_roles.rb +++ b/lib/puppet/provider/elastic_user_roles.rb @@ -1,49 +1,59 @@ +# frozen_string_literal: true + require 'puppet/provider/elastic_yaml' # Provider to help manage file-based X-Pack user/role configuration # files. class Puppet::Provider::ElasticUserRoles < Puppet::Provider::ElasticYaml # Override the ancestor `parse` method to process a users/roles file # managed by the Elasticsearch user tools. def self.parse(text) - text.split("\n").map(&:strip).select do |line| + lines = text.split("\n").map(&:strip).select do |line| # Strip comments - not line.start_with? '#' and not line.empty? - end.map do |line| + (!line.start_with? '#') && !line.empty? + end + lines = lines.map do |line| # Turn array of roles into array of users that have the role role, users = line.split(':') users.split(',').map do |user| { user => [role] } end - end.flatten.inject({}) do |hash, user| + end + lines = lines.flatten.reduce({}) do |hash, user| # Gather up user => role hashes by append-merging role lists hash.merge(user) { |_, o, n| o + n } - end.map do |user, roles| + end + lines = lines.map do |user, roles| # Map those hashes into what the provider expects { - :name => user, - :roles => roles + name: user, + roles: roles } - end.to_a + end + lines.to_a end # Represent this user/role record as a correctly-formatted config file. def self.to_file(records) debug "Flushing: #{records.inspect}" records.map do |record| record[:roles].map do |r| { [record[:name]] => r } end - end.flatten.map(&:invert).inject({}) do |acc, role| + end + records = records.flatten.map(&:invert).reduce({}) do |acc, role| acc.merge(role) { |_, o, n| o + n } - end.delete_if do |_, users| + end + records = records.delete_if do |_, users| users.empty? - end.map do |role, users| + end + records = records.map do |role, users| "#{role}:#{users.join(',')}" - end.join("\n") + "\n" + end + "#{records.join("\n")}\n" end def self.skip_record?(_record) false end end diff --git a/lib/puppet/provider/elastic_yaml.rb b/lib/puppet/provider/elastic_yaml.rb index 0d855fb..57d6681 100644 --- a/lib/puppet/provider/elastic_yaml.rb +++ b/lib/puppet/provider/elastic_yaml.rb @@ -1,58 +1,61 @@ -# -*- coding: utf-8 -*- +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..')) require 'puppet/provider/elastic_parsedfile' require 'puppet/util/package' require 'puppet_x/elastic/hash' # Provider for yaml-based Elasticsearch configuration files. class Puppet::Provider::ElasticYaml < Puppet::Provider::ElasticParsedFile class << self attr_accessor :metadata end # Transform a given string into a Hash-based representation of the # provider. def self.parse(text) - yaml = YAML.load text + yaml = YAML.safe_load text if yaml yaml.map do |key, metadata| { :name => key, :ensure => :present, @metadata => metadata } end else [] end end # Transform a given list of provider records into yaml-based # representation. def self.to_file(records) yaml = records.map do |record| # Convert top-level symbols to strings - Hash[record.map { |k, v| [k.to_s, v] }] - end.inject({}) do |hash, record| + record.transform_keys(&:to_s) + end + yaml = yaml.reduce({}) do |hash, record| # Flatten array of hashes into single hash hash.merge(record['name'] => record.delete(@metadata.to_s)) - end.extend(Puppet_X::Elastic::SortedHash).to_yaml.split("\n") + end + yaml = yaml.extend(Puppet_X::Elastic::SortedHash).to_yaml.split("\n") - yaml.shift if yaml.first =~ /---/ + yaml.shift if yaml.first =~ %r{---} yaml = yaml.join("\n") yaml << "\n" end def self.skip_record?(_record) false end # This is ugly, but it's overridden in ParsedFile with abstract # functionality we don't need for our simple provider class. # This has been observed to break in Puppet version 3/4 switches. def self.valid_attr?(klass, attr_name) klass.is_a? Class ? klass.parameters.include?(attr_name) : true end end diff --git a/lib/puppet/provider/elasticsearch_index/ruby.rb b/lib/puppet/provider/elasticsearch_index/ruby.rb index 0fa171b..e97b58e 100644 --- a/lib/puppet/provider/elasticsearch_index/ruby.rb +++ b/lib/puppet/provider/elasticsearch_index/ruby.rb @@ -1,25 +1,27 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..', '..')) require 'puppet/provider/elastic_rest' require 'puppet_x/elastic/deep_to_i' require 'puppet_x/elastic/deep_to_s' Puppet::Type.type(:elasticsearch_index).provide( :ruby, - :parent => Puppet::Provider::ElasticREST, - :metadata => :settings, - :metadata_pipeline => [ - lambda { |data| data['settings'] }, - lambda { |data| Puppet_X::Elastic.deep_to_s data }, - lambda { |data| Puppet_X::Elastic.deep_to_i data } + parent: Puppet::Provider::ElasticREST, + metadata: :settings, + metadata_pipeline: [ + ->(data) { data['settings'] }, + ->(data) { Puppet_X::Elastic.deep_to_s data }, + ->(data) { Puppet_X::Elastic.deep_to_i data } ], - :api_uri => '_settings', - :api_discovery_uri => '_all', - :api_resource_style => :prefix, - :discrete_resource_creation => true + api_uri: '_settings', + api_discovery_uri: '_all', + api_resource_style: :prefix, + discrete_resource_creation: true ) do desc 'A REST API based provider to manage Elasticsearch index settings.' mk_resource_methods end diff --git a/lib/puppet/provider/elasticsearch_keystore/ruby.rb b/lib/puppet/provider/elasticsearch_keystore/ruby.rb index 6233564..9c39eaf 100644 --- a/lib/puppet/provider/elasticsearch_keystore/ruby.rb +++ b/lib/puppet/provider/elasticsearch_keystore/ruby.rb @@ -1,167 +1,168 @@ +# frozen_string_literal: true + Puppet::Type.type(:elasticsearch_keystore).provide( :elasticsearch_keystore ) do desc 'Provider for `elasticsearch-keystore` based secret management.' def self.defaults_dir @defaults_dir ||= case Facter.value('osfamily') when 'RedHat' '/etc/sysconfig' else '/etc/default' end end def self.home_dir @home_dir ||= case Facter.value('osfamily') when 'OpenBSD' '/usr/local/elasticsearch' else '/usr/share/elasticsearch' end end attr_accessor :defaults_dir, :home_dir - commands :keystore => "#{home_dir}/bin/elasticsearch-keystore" + commands keystore: "#{home_dir}/bin/elasticsearch-keystore" def self.run_keystore(args, instance, configdir = '/etc/elasticsearch', stdin = nil) options = { - :custom_environment => { - 'ES_INCLUDE' => File.join(defaults_dir, "elasticsearch-#{instance}"), + custom_environment: { + 'ES_INCLUDE' => File.join(defaults_dir, "elasticsearch-#{instance}"), 'ES_PATH_CONF' => "#{configdir}/#{instance}" }, - :uid => 'elasticsearch', - :gid => 'elasticsearch', - :failonfail => true + uid: 'elasticsearch', + gid: 'elasticsearch', + failonfail: true } unless stdin.nil? stdinfile = Tempfile.new('elasticsearch-keystore') stdinfile << stdin stdinfile.flush options[:stdinfile] = stdinfile.path end begin stdout = execute([command(:keystore)] + args, options) ensure unless stdin.nil? stdinfile.close stdinfile.unlink end end stdout.exitstatus.zero? ? stdout : raise(Puppet::Error, stdout) end def self.present_keystores - Dir[File.join(%w[/ etc elasticsearch *])].select do |directory| + files = Dir[File.join(%w[/ etc elasticsearch *])].select do |directory| File.exist? File.join(directory, 'elasticsearch.keystore') - end.map do |instance| + end + + files.map do |instance| settings = run_keystore(['list'], File.basename(instance)).split("\n") { - :name => File.basename(instance), - :ensure => :present, - :provider => name, - :settings => settings + name: File.basename(instance), + ensure: :present, + provider: name, + settings: settings } end end def self.instances present_keystores.map do |keystore| new keystore end end def self.prefetch(resources) instances.each do |prov| if (resource = resources[prov.name]) resource.provider = prov end end end def initialize(value = {}) super(value) @property_flush = {} end - # rubocop:disable Metrics/CyclomaticComplexity - # rubocop:disable Metrics/PerceivedComplexity def flush case @property_flush[:ensure] when :present debug(self.class.run_keystore(['create'], resource[:name], resource[:configdir])) @property_flush[:settings] = resource[:settings] when :absent File.delete(File.join([ - '/', 'etc', 'elasticsearch', resource[:instance], 'elasticsearch.keystore' - ])) + '/', 'etc', 'elasticsearch', resource[:instance], 'elasticsearch.keystore' + ])) end # Note that since the property is :array_matching => :all, we have to # expect that the hash is wrapped in an array. - if @property_flush[:settings] and not @property_flush[:settings].first.empty? + if @property_flush[:settings] && !@property_flush[:settings].first.empty? # Flush properties that _should_ be present @property_flush[:settings].first.each_pair do |setting, value| next unless @property_hash[:settings].nil? \ - or not @property_hash[:settings].include? setting + || (!@property_hash[:settings].include? setting) + debug(self.class.run_keystore( - ['add', '--force', '--stdin', setting], resource[:name], resource[:configdir], value - )) + ['add', '--force', '--stdin', setting], resource[:name], resource[:configdir], value + )) end # Remove properties that are no longer present - if resource[:purge] and not (@property_hash.nil? or @property_hash[:settings].nil?) + if resource[:purge] && !(@property_hash.nil? || @property_hash[:settings].nil?) (@property_hash[:settings] - @property_flush[:settings].first.keys).each do |setting| debug(self.class.run_keystore( - ['remove', setting], resource[:name], resource[:configdir] - )) + ['remove', setting], resource[:name], resource[:configdir] + )) end end end - @property_hash = self.class.present_keystores.detect do |u| + @property_hash = self.class.present_keystores.find do |u| u[:name] == resource[:name] end end - # rubocop:enable Metrics/CyclomaticComplexity - # rubocop:enable Metrics/PerceivedComplexity # settings property setter # # @return [Hash] settings def settings=(new_settings) @property_flush[:settings] = new_settings end # settings property getter # # @return [Hash] settings def settings @property_hash[:settings] end # Sets the ensure property in the @property_flush hash. # # @return [Symbol] :present def create @property_flush[:ensure] = :present end # Determine whether this resource is present on the system. # # @return [Boolean] def exists? @property_hash[:ensure] == :present end # Set flushed ensure property to absent. # # @return [Symbol] :absent def destroy @property_flush[:ensure] = :absent end end diff --git a/lib/puppet/provider/elasticsearch_license/xpack.rb b/lib/puppet/provider/elasticsearch_license/xpack.rb index d7dda2c..4bae734 100644 --- a/lib/puppet/provider/elasticsearch_license/xpack.rb +++ b/lib/puppet/provider/elasticsearch_license/xpack.rb @@ -1,31 +1,33 @@ +# frozen_string_literal: true + require 'puppet/provider/elastic_rest' Puppet::Type.type(:elasticsearch_license).provide( :xpack, - :api_resource_style => :bare, - :parent => Puppet::Provider::ElasticREST, - :metadata => :content, - :metadata_pipeline => [ - lambda { |data| Puppet_X::Elastic.deep_to_s data }, - lambda { |data| Puppet_X::Elastic.deep_to_i data } + api_resource_style: :bare, + parent: Puppet::Provider::ElasticREST, + metadata: :content, + metadata_pipeline: [ + ->(data) { Puppet_X::Elastic.deep_to_s data }, + ->(data) { Puppet_X::Elastic.deep_to_i data } ], - :api_uri => '_xpack/license', - :query_string => { + api_uri: '_xpack/license', + query_string: { 'acknowledge' => 'true' } ) do desc 'A REST API based provider to manage Elasticsearch X-Pack licenses.' mk_resource_methods def self.process_body(body) JSON.parse(body).map do |_object_name, api_object| { - :name => name.to_s, - :ensure => :present, - metadata => { 'license' => process_metadata(api_object) }, + :name => name.to_s, + :ensure => :present, + metadata => { 'license' => process_metadata(api_object) }, :provider => name } end end end diff --git a/lib/puppet/provider/elasticsearch_pipeline/ruby.rb b/lib/puppet/provider/elasticsearch_pipeline/ruby.rb index c277dc8..ed0bd50 100644 --- a/lib/puppet/provider/elasticsearch_pipeline/ruby.rb +++ b/lib/puppet/provider/elasticsearch_pipeline/ruby.rb @@ -1,12 +1,14 @@ +# frozen_string_literal: true + require 'puppet/provider/elastic_rest' Puppet::Type.type(:elasticsearch_pipeline).provide( :ruby, - :parent => Puppet::Provider::ElasticREST, - :metadata => :content, - :api_uri => '_ingest/pipeline' + parent: Puppet::Provider::ElasticREST, + metadata: :content, + api_uri: '_ingest/pipeline' ) do desc 'A REST API based provider to manage Elasticsearch ingest pipelines.' mk_resource_methods end diff --git a/lib/puppet/provider/elasticsearch_plugin/ruby.rb b/lib/puppet/provider/elasticsearch_plugin/ruby.rb index adf5a73..f57d512 100644 --- a/lib/puppet/provider/elasticsearch_plugin/ruby.rb +++ b/lib/puppet/provider/elasticsearch_plugin/ruby.rb @@ -1,21 +1,23 @@ +# frozen_string_literal: true + require 'puppet/provider/elastic_plugin' Puppet::Type.type(:elasticsearch_plugin).provide( :elasticsearch_plugin, - :parent => Puppet::Provider::ElasticPlugin + parent: Puppet::Provider::ElasticPlugin ) do desc <<-END Post-5.x provider for Elasticsearch bin/elasticsearch-plugin command operations.' END case Facter.value('osfamily') when 'OpenBSD' - commands :plugin => '/usr/local/elasticsearch/bin/elasticsearch-plugin' - commands :es => '/usr/local/elasticsearch/bin/elasticsearch' - commands :javapathhelper => '/usr/local/bin/javaPathHelper' + commands plugin: '/usr/local/elasticsearch/bin/elasticsearch-plugin' + commands es: '/usr/local/elasticsearch/bin/elasticsearch' + commands javapathhelper: '/usr/local/bin/javaPathHelper' else - commands :plugin => '/usr/share/elasticsearch/bin/elasticsearch-plugin' - commands :es => '/usr/share/elasticsearch/bin/elasticsearch' + commands plugin: '/usr/share/elasticsearch/bin/elasticsearch-plugin' + commands es: '/usr/share/elasticsearch/bin/elasticsearch' end end diff --git a/lib/puppet/provider/elasticsearch_role/ruby.rb b/lib/puppet/provider/elasticsearch_role/ruby.rb index d05e87e..19f5bdf 100644 --- a/lib/puppet/provider/elasticsearch_role/ruby.rb +++ b/lib/puppet/provider/elasticsearch_role/ruby.rb @@ -1,11 +1,13 @@ +# frozen_string_literal: true + require 'puppet/provider/elastic_yaml' Puppet::Type.type(:elasticsearch_role).provide( :ruby, - :parent => Puppet::Provider::ElasticYaml, - :metadata => :privileges + parent: Puppet::Provider::ElasticYaml, + metadata: :privileges ) do desc 'Provider for X-Pack role resources.' xpack_config 'roles.yml' end diff --git a/lib/puppet/provider/elasticsearch_role_mapping/ruby.rb b/lib/puppet/provider/elasticsearch_role_mapping/ruby.rb index c73ea00..2e3a796 100644 --- a/lib/puppet/provider/elasticsearch_role_mapping/ruby.rb +++ b/lib/puppet/provider/elasticsearch_role_mapping/ruby.rb @@ -1,11 +1,13 @@ +# frozen_string_literal: true + require 'puppet/provider/elastic_yaml' Puppet::Type.type(:elasticsearch_role_mapping).provide( :ruby, - :parent => Puppet::Provider::ElasticYaml, - :metadata => :mappings + parent: Puppet::Provider::ElasticYaml, + metadata: :mappings ) do desc 'Provider for X-Pack role mappings.' xpack_config 'role_mapping.yml' end diff --git a/lib/puppet/provider/elasticsearch_snapshot_repository/ruby.rb b/lib/puppet/provider/elasticsearch_snapshot_repository/ruby.rb index 9b5e6e3..f866d65 100644 --- a/lib/puppet/provider/elasticsearch_snapshot_repository/ruby.rb +++ b/lib/puppet/provider/elasticsearch_snapshot_repository/ruby.rb @@ -1,52 +1,53 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..', '..')) require 'puppet/provider/elastic_rest' Puppet::Type.type(:elasticsearch_snapshot_repository).provide( :ruby, - :parent => Puppet::Provider::ElasticREST, - :api_uri => '_snapshot' + parent: Puppet::Provider::ElasticREST, + api_uri: '_snapshot' ) do desc 'A REST API based provider to manage Elasticsearch snapshot repositories.' mk_resource_methods def self.process_body(body) Puppet.debug('Got to snapshot_repository.process_body') - results = JSON.parse(body).map do |object_name, api_object| + JSON.parse(body).map do |object_name, api_object| { - :name => object_name, - :ensure => :present, - :type => api_object['type'], - :compress => api_object['settings']['compress'], - :location => api_object['settings']['location'], - :chunk_size => api_object['settings']['chunk_size'], - :max_restore_rate => api_object['settings']['max_restore_rate'], - :max_snapshot_rate => api_object['settings']['max_snapshot_rate'], - :provider => name - }.reject { |_k, v| v.nil? } + name: object_name, + ensure: :present, + type: api_object['type'], + compress: api_object['settings']['compress'], + location: api_object['settings']['location'], + chunk_size: api_object['settings']['chunk_size'], + max_restore_rate: api_object['settings']['max_restore_rate'], + max_snapshot_rate: api_object['settings']['max_snapshot_rate'], + provider: name + }.compact end - results end def generate_body Puppet.debug('Got to snapshot_repository.generate_body') # Build core request body body = { - 'type' => resource[:type], + 'type' => resource[:type], 'settings' => { 'compress' => resource[:compress], 'location' => resource[:location] } } # Add optional values body['settings']['chunk_size'] = resource[:chunk_size] unless resource[:chunk_size].nil? body['settings']['max_restore_rate'] = resource[:max_restore_rate] unless resource[:max_restore_rate].nil? body['settings']['max_snapshot_rate'] = resource[:max_snapshot_rate] unless resource[:max_snapshot_rate].nil? # Convert to JSON and return JSON.generate(body) end end diff --git a/lib/puppet/provider/elasticsearch_template/ruby.rb b/lib/puppet/provider/elasticsearch_template/ruby.rb index 2512992..7bc6d8e 100644 --- a/lib/puppet/provider/elasticsearch_template/ruby.rb +++ b/lib/puppet/provider/elasticsearch_template/ruby.rb @@ -1,21 +1,23 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..', '..')) require 'puppet/provider/elastic_rest' require 'puppet_x/elastic/deep_to_i' require 'puppet_x/elastic/deep_to_s' Puppet::Type.type(:elasticsearch_template).provide( :ruby, - :parent => Puppet::Provider::ElasticREST, - :api_uri => '_template', - :metadata => :content, - :metadata_pipeline => [ - lambda { |data| Puppet_X::Elastic.deep_to_s data }, - lambda { |data| Puppet_X::Elastic.deep_to_i data } + parent: Puppet::Provider::ElasticREST, + api_uri: '_template', + metadata: :content, + metadata_pipeline: [ + ->(data) { Puppet_X::Elastic.deep_to_s data }, + ->(data) { Puppet_X::Elastic.deep_to_i data } ] ) do desc 'A REST API based provider to manage Elasticsearch templates.' mk_resource_methods end diff --git a/lib/puppet/provider/elasticsearch_user/ruby.rb b/lib/puppet/provider/elasticsearch_user/ruby.rb index d12c4f6..3a9fd10 100644 --- a/lib/puppet/provider/elasticsearch_user/ruby.rb +++ b/lib/puppet/provider/elasticsearch_user/ruby.rb @@ -1,15 +1,17 @@ +# frozen_string_literal: true + require File.join(File.dirname(__FILE__), '..', '..', '..', 'puppet/provider/elastic_user_command') Puppet::Type.type(:elasticsearch_user).provide( :ruby, - :parent => Puppet::Provider::ElasticUserCommand + parent: Puppet::Provider::ElasticUserCommand ) do desc 'Provider for X-Pack user resources.' has_feature :manages_plaintext_passwords mk_resource_methods - commands :users_cli => "#{homedir}/bin/elasticsearch-users" - commands :es => "#{homedir}/bin/elasticsearch" + commands users_cli: "#{homedir}/bin/elasticsearch-users" + commands es: "#{homedir}/bin/elasticsearch" end diff --git a/lib/puppet/provider/elasticsearch_user_file/ruby.rb b/lib/puppet/provider/elasticsearch_user_file/ruby.rb index 7438be0..bc35132 100644 --- a/lib/puppet/provider/elasticsearch_user_file/ruby.rb +++ b/lib/puppet/provider/elasticsearch_user_file/ruby.rb @@ -1,28 +1,30 @@ +# frozen_string_literal: true + require 'puppet/provider/elastic_parsedfile' Puppet::Type.type(:elasticsearch_user_file).provide( :ruby, - :parent => Puppet::Provider::ElasticParsedFile + parent: Puppet::Provider::ElasticParsedFile ) do desc 'Provider for X-Pack elasticsearch users using plain files.' xpack_config 'users' has_feature :manages_encrypted_passwords text_line :comment, - :match => /^\s*#/ + match: %r{^\s*#} record_line :ruby, - :fields => %w[name hashed_password], - :separator => ':', - :joiner => ':' + fields: %w[name hashed_password], + separator: ':', + joiner: ':' def self.valid_attr?(klass, attr_name) if klass.respond_to? :parameters klass.parameters.include?(attr_name) else true end end end diff --git a/lib/puppet/provider/elasticsearch_user_roles/ruby.rb b/lib/puppet/provider/elasticsearch_user_roles/ruby.rb index a5da043..f130454 100644 --- a/lib/puppet/provider/elasticsearch_user_roles/ruby.rb +++ b/lib/puppet/provider/elasticsearch_user_roles/ruby.rb @@ -1,10 +1,12 @@ +# frozen_string_literal: true + require 'puppet/provider/elastic_user_roles' Puppet::Type.type(:elasticsearch_user_roles).provide( :ruby, - :parent => Puppet::Provider::ElasticUserRoles + parent: Puppet::Provider::ElasticUserRoles ) do desc 'Provider for X-Pack user roles (parsed file.)' xpack_config 'users_roles' end diff --git a/lib/puppet/provider/es_instance_conn_validator/tcp_port.rb b/lib/puppet/provider/es_instance_conn_validator/tcp_port.rb index e492712..93e72d5 100644 --- a/lib/puppet/provider/es_instance_conn_validator/tcp_port.rb +++ b/lib/puppet/provider/es_instance_conn_validator/tcp_port.rb @@ -1,51 +1,53 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..', '..')) require 'puppet/util/es_instance_validator' # This file contains a provider for the resource type `es_instance_conn_validator`, # which validates the Elasticsearch connection by attempting a tcp connection. Puppet::Type.type(:es_instance_conn_validator).provide(:tcp_port) do desc "A provider for the resource type `es_instance_conn_validator`, which validates the connection by attempting an https connection to Elasticsearch." def exists? start_time = Time.now timeout = resource[:timeout] sleep_interval = resource[:sleep_interval] success = validator.attempt_connection while success == false && ((Time.now - start_time) < timeout) # It can take several seconds for the Elasticsearch to start up; # especially on the first install. Therefore, our first connection attempt # may fail. Here we have somewhat arbitrarily chosen to retry every 10 # seconds until the configurable timeout has expired. Puppet.debug("Failed to connect to Elasticsearch; sleeping #{sleep_interval} seconds before retry") sleep sleep_interval success = validator.attempt_connection end if success Puppet.debug("Connected to the Elasticsearch in #{Time.now - start_time} seconds.") else Puppet.notice("Failed to connect to the Elasticsearch within timeout window of #{timeout} seconds; giving up.") end success end def create # If `#create` is called, that means that `#exists?` returned false, which # means that the connection could not be established... so we need to # cause a failure here. raise Puppet::Error, "Unable to connect to Elasticsearch! (#{@validator.instance_server}:#{@validator.instance_port})" end private # @api private def validator @validator ||= Puppet::Util::EsInstanceValidator.new(resource[:server], resource[:port]) end end diff --git a/lib/puppet/type/elasticsearch_index.rb b/lib/puppet/type/elasticsearch_index.rb index 942ee6a..fef6803 100644 --- a/lib/puppet/type/elasticsearch_index.rb +++ b/lib/puppet/type/elasticsearch_index.rb @@ -1,34 +1,36 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..')) require 'puppet_x/elastic/asymmetric_compare' require 'puppet_x/elastic/deep_to_i' require 'puppet_x/elastic/deep_to_s' require 'puppet_x/elastic/elasticsearch_rest_resource' Puppet::Type.newtype(:elasticsearch_index) do extend ElasticsearchRESTResource desc 'Manages Elasticsearch index settings.' ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'Index name.' end newproperty(:settings) do desc 'Structured settings for the index in hash form.' def insync?(is) Puppet_X::Elastic.asymmetric_compare(should, is) end munge do |value| Puppet_X::Elastic.deep_to_i(Puppet_X::Elastic.deep_to_s(value)) end validate do |value| raise Puppet::Error, 'hash expected' unless value.is_a? Hash end end -end # of newtype +end diff --git a/lib/puppet/type/elasticsearch_keystore.rb b/lib/puppet/type/elasticsearch_keystore.rb index 46f5d7d..7bae2a0 100644 --- a/lib/puppet/type/elasticsearch_keystore.rb +++ b/lib/puppet/type/elasticsearch_keystore.rb @@ -1,64 +1,66 @@ +# frozen_string_literal: true + require 'puppet/parameter/boolean' Puppet::Type.newtype(:elasticsearch_keystore) do desc 'Manages an Elasticsearch keystore settings file.' ensurable - newparam(:instance, :namevar => true) do + newparam(:instance, namevar: true) do desc 'Elasticsearch instance this keystore belongs to.' end newparam(:configdir) do desc 'Path to the elasticsearch configuration directory (ES_PATH_CONF).' defaultto '/etc/elasticsearch' end - newparam(:purge, :boolean => true, :parent => Puppet::Parameter::Boolean) do + newparam(:purge, boolean: true, parent: Puppet::Parameter::Boolean) do desc <<-EOS Whether to proactively remove settings that exist in the keystore but are not present in this resource's settings. EOS defaultto false end - newproperty(:settings, :array_matching => :all) do + newproperty(:settings, array_matching: :all) do desc 'A key/value hash of settings names and values.' # The keystore utility can only retrieve a list of stored settings, # so here we only compare the existing settings (sorted) with the # desired settings' keys def insync?(is) if resource[:purge] is.sort == @should.first.keys.sort else (@should.first.keys - is).empty? end end def change_to_s(currentvalue, newvalue_raw) ret = '' newvalue = newvalue_raw.first.keys added_settings = newvalue - currentvalue ret << "added: #{added_settings.join(', ')} " unless added_settings.empty? removed_settings = currentvalue - newvalue unless removed_settings.empty? ret << if resource[:purge] "removed: #{removed_settings.join(', ')}" else "would have removed: #{removed_settings.join(', ')}, but purging is disabled" end end ret end end autorequire(:augeas) do "defaults_#{self[:name]}" end end diff --git a/lib/puppet/type/elasticsearch_license.rb b/lib/puppet/type/elasticsearch_license.rb index 6611836..62890d2 100644 --- a/lib/puppet/type/elasticsearch_license.rb +++ b/lib/puppet/type/elasticsearch_license.rb @@ -1,52 +1,51 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..')) require 'puppet_x/elastic/asymmetric_compare' require 'puppet_x/elastic/deep_to_i' require 'puppet_x/elastic/deep_to_s' require 'puppet_x/elastic/elasticsearch_rest_resource' Puppet::Type.newtype(:elasticsearch_license) do extend ElasticsearchRESTResource desc 'Manages Elasticsearch licenses.' ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'Pipeline name.' end newproperty(:content) do desc 'Structured hash for license content data.' def insync?(is) Puppet_X::Elastic.asymmetric_compare( - should.map { |k, v| [k, v.is_a?(Hash) ? (v.reject { |s, _| s == 'signature' }) : v] }.to_h, + should.transform_values { |v| v.is_a?(Hash) ? (v.reject { |s, _| s == 'signature' }) : v }, is ) end def should_to_s(newvalue) - newvalue.map do |license, license_data| - [ - license, - if license_data.is_a? Hash - license_data.map do |field, value| - [field, field == 'signature' ? '[redacted]' : value] - end.to_h - else - v - end - ] - end.to_h.to_s + newvalue.transform_values do |license_data| + if license_data.is_a? Hash + license_data.map do |field, value| + [field, field == 'signature' ? '[redacted]' : value] + end.to_h + else + v + end + end.to_s end validate do |value| raise Puppet::Error, 'hash expected' unless value.is_a? Hash end munge do |value| Puppet_X::Elastic.deep_to_i(Puppet_X::Elastic.deep_to_s(value)) end end -end # of newtype +end diff --git a/lib/puppet/type/elasticsearch_pipeline.rb b/lib/puppet/type/elasticsearch_pipeline.rb index 993f94f..8c51058 100644 --- a/lib/puppet/type/elasticsearch_pipeline.rb +++ b/lib/puppet/type/elasticsearch_pipeline.rb @@ -1,29 +1,31 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..')) require 'puppet_x/elastic/deep_to_i' require 'puppet_x/elastic/deep_to_s' require 'puppet_x/elastic/elasticsearch_rest_resource' Puppet::Type.newtype(:elasticsearch_pipeline) do extend ElasticsearchRESTResource desc 'Manages Elasticsearch ingest pipelines.' ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'Pipeline name.' end newproperty(:content) do desc 'Structured content of pipeline.' validate do |value| raise Puppet::Error, 'hash expected' unless value.is_a? Hash end munge do |value| Puppet_X::Elastic.deep_to_i(Puppet_X::Elastic.deep_to_s(value)) end end -end # of newtype +end diff --git a/lib/puppet/type/elasticsearch_plugin.rb b/lib/puppet/type/elasticsearch_plugin.rb index 1b52b50..cf22902 100644 --- a/lib/puppet/type/elasticsearch_plugin.rb +++ b/lib/puppet/type/elasticsearch_plugin.rb @@ -1,52 +1,54 @@ +# frozen_string_literal: true + Puppet::Type.newtype(:elasticsearch_plugin) do @doc = 'Plugin installation type' ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'An arbitrary name used as the identity of the resource.' end newparam(:configdir) do desc 'Path to the elasticsearch configuration directory (ES_PATH_CONF).' defaultto '/etc/elasticsearch' validate do |value| raise Puppet::Error, 'path expected' if value.nil? end end newparam(:elasticsearch_package_name) do desc 'Name of the system Elasticsearch package.' end newparam(:java_opts) do desc 'Optional array of Java options for ES_JAVA_OPTS.' defaultto [] end newparam(:java_home) do desc 'Optional string to set the environment variable JAVA_HOME.' end newparam(:url) do desc 'Url of the package' end newparam(:source) do desc 'Source of the package. puppet:// or file:// resource' end newparam(:proxy) do desc 'Proxy Host' end newparam(:plugin_dir) do desc 'Path to the Plugins directory' defaultto '/usr/share/elasticsearch/plugins' end newparam(:plugin_path) do desc 'Override name of the directory created for the plugin' end end diff --git a/lib/puppet/type/elasticsearch_role.rb b/lib/puppet/type/elasticsearch_role.rb index 2248ab5..8cf2d68 100644 --- a/lib/puppet/type/elasticsearch_role.rb +++ b/lib/puppet/type/elasticsearch_role.rb @@ -1,15 +1,17 @@ +# frozen_string_literal: true + Puppet::Type.newtype(:elasticsearch_role) do desc 'Type to model Elasticsearch roles.' ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'Role name.' - newvalues(/^[a-zA-Z_]{1}[-\w@.$]{0,39}$/) + newvalues(%r{^[a-zA-Z_]{1}[-\w@.$]{0,39}$}) end newproperty(:privileges) do desc 'Security privileges of the given role.' end end diff --git a/lib/puppet/type/elasticsearch_role_mapping.rb b/lib/puppet/type/elasticsearch_role_mapping.rb index 6bc669c..47b0f69 100644 --- a/lib/puppet/type/elasticsearch_role_mapping.rb +++ b/lib/puppet/type/elasticsearch_role_mapping.rb @@ -1,15 +1,17 @@ +# frozen_string_literal: true + Puppet::Type.newtype(:elasticsearch_role_mapping) do desc 'Type to model Elasticsearch role mappings.' ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'Role name.' - newvalues(/^[a-zA-Z_]{1}[-\w@.$]{0,39}$/) + newvalues(%r{^[a-zA-Z_]{1}[-\w@.$]{0,39}$}) end - newproperty(:mappings, :array_matching => :all) do + newproperty(:mappings, array_matching: :all) do desc 'List of role mappings.' end end diff --git a/lib/puppet/type/elasticsearch_snapshot_repository.rb b/lib/puppet/type/elasticsearch_snapshot_repository.rb index 17357a9..d23368d 100644 --- a/lib/puppet/type/elasticsearch_snapshot_repository.rb +++ b/lib/puppet/type/elasticsearch_snapshot_repository.rb @@ -1,51 +1,53 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..')) require 'puppet_x/elastic/elasticsearch_rest_resource' Puppet::Type.newtype(:elasticsearch_snapshot_repository) do extend ElasticsearchRESTResource desc 'Manages Elasticsearch snapshot repositories.' ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'Repository name.' end newparam(:type) do desc 'Repository type' defaultto 'fs' validate do |value| raise Puppet::Error, 'string expected' unless value.is_a? String end end # newproperty(:compress, :boolean => true, :parent => Puppet::Property::Boolean) do - newproperty(:compress, :boolean => true) do + newproperty(:compress, boolean: true) do desc 'Compress the repository data' - defaultto :true + defaultto true end newproperty(:location) do desc 'Repository location' end newproperty(:chunk_size) do desc 'File chunk size' end newproperty(:max_restore_rate) do desc 'Maximum Restore rate' end newproperty(:max_snapshot_rate) do desc 'Maximum Snapshot rate' end validate do raise ArgumentError, 'Location is required.' if self[:location].nil? end -end # of newtype +end diff --git a/lib/puppet/type/elasticsearch_template.rb b/lib/puppet/type/elasticsearch_template.rb index c44a5bd..8ebf0ca 100644 --- a/lib/puppet/type/elasticsearch_template.rb +++ b/lib/puppet/type/elasticsearch_template.rb @@ -1,114 +1,116 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..')) require 'puppet/file_serving/content' require 'puppet/file_serving/metadata' require 'puppet_x/elastic/deep_implode' require 'puppet_x/elastic/deep_to_i' require 'puppet_x/elastic/deep_to_s' require 'puppet_x/elastic/elasticsearch_rest_resource' Puppet::Type.newtype(:elasticsearch_template) do extend ElasticsearchRESTResource desc 'Manages Elasticsearch index templates.' ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'Template name.' end newproperty(:content) do desc 'Structured content of template.' validate do |value| raise Puppet::Error, 'hash expected' unless value.is_a? Hash end munge do |value| # The Elasticsearch API will return default empty values for # order, aliases, and mappings if they aren't defined in the # user mapping, so we need to set defaults here to keep the # `in` and `should` states consistent if the user hasn't # provided any. # # The value is first stringified, then integers are parse out as # necessary, since the Elasticsearch API enforces some fields to be # integers. # # We also need to fully qualify index settings, since users # can define those with the index json key absent, but the API # always fully qualifies them. { 'order' => 0, 'aliases' => {}, 'mappings' => {} }.merge( Puppet_X::Elastic.deep_to_i( Puppet_X::Elastic.deep_to_s( value.tap do |val| if val.key? 'settings' val['settings']['index'] = {} unless val['settings'].key? 'index' (val['settings'].keys - ['index']).each do |setting| new_key = if setting.start_with? 'index.' setting[6..-1] else setting end val['settings']['index'][new_key] = \ val['settings'].delete setting end end end ) ) ) end def insync?(is) Puppet_X::Elastic.deep_implode(is) == \ Puppet_X::Elastic.deep_implode(should) end end newparam(:source) do desc 'Puppet source to file containing template contents.' validate do |value| raise Puppet::Error, 'string expected' unless value.is_a? String end end # rubocop:disable Style/SignalException validate do # Ensure that at least one source of template content has been provided if self[:ensure] == :present fail Puppet::ParseError, '"content" or "source" required' \ - if self[:content].nil? and self[:source].nil? - if !self[:content].nil? and !self[:source].nil? + if self[:content].nil? && self[:source].nil? + + if !self[:content].nil? && !self[:source].nil? fail( Puppet::ParseError, "'content' and 'source' cannot be simultaneously defined" ) end end # If a source was passed, retrieve the source content from Puppet's # FileServing indirection and set the content property unless self[:source].nil? - unless Puppet::FileServing::Metadata.indirection.find(self[:source]) - fail(format('Could not retrieve source %s', self[:source])) - end + fail(format('Could not retrieve source %s', self[:source])) unless Puppet::FileServing::Metadata.indirection.find(self[:source]) tmp = if !catalog.nil? \ - and catalog.respond_to?(:environment_instance) + && catalog.respond_to?(:environment_instance) Puppet::FileServing::Content.indirection.find( self[:source], - :environment => catalog.environment_instance + environment: catalog.environment_instance ) else Puppet::FileServing::Content.indirection.find(self[:source]) end fail(format('Could not find any content at %s', self[:source])) unless tmp + self[:content] = PSON.load(tmp.content) end end -end # of newtype +end diff --git a/lib/puppet/type/elasticsearch_user.rb b/lib/puppet/type/elasticsearch_user.rb index 01b0a18..02b44d1 100644 --- a/lib/puppet/type/elasticsearch_user.rb +++ b/lib/puppet/type/elasticsearch_user.rb @@ -1,51 +1,51 @@ +# frozen_string_literal: true + Puppet::Type.newtype(:elasticsearch_user) do desc 'Type to model Elasticsearch users.' feature :manages_plaintext_passwords, 'The provider can control the password in plaintext form.' ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'User name.' end newparam(:configdir) do desc 'Path to the elasticsearch configuration directory (ES_PATH_CONF).' validate do |value| raise Puppet::Error, 'path expected' if value.nil? end end newparam( :password, - :required_features => :manages_plaintext_passwords + required_features: :manages_plaintext_passwords ) do desc 'Plaintext password for user.' validate do |value| - if value.length < 6 - raise ArgumentError, 'Password must be at least 6 characters long' - end + raise ArgumentError, 'Password must be at least 6 characters long' if value.length < 6 end # rubocop:disable Style/PredicateName def is_to_s(_currentvalue) '[old password hash redacted]' end # rubocop:enable Style/PredicateName def should_to_s(_newvalue) '[new password hash redacted]' end end def refresh if @parameters[:ensure] provider.passwd else debug 'skipping password set' end end end diff --git a/lib/puppet/type/elasticsearch_user_file.rb b/lib/puppet/type/elasticsearch_user_file.rb index 250563d..5fc31f8 100644 --- a/lib/puppet/type/elasticsearch_user_file.rb +++ b/lib/puppet/type/elasticsearch_user_file.rb @@ -1,30 +1,32 @@ +# frozen_string_literal: true + Puppet::Type.newtype(:elasticsearch_user_file) do desc 'Type to model Elasticsearch users.' feature :manages_encrypted_passwords, 'The provider can control the password hash without a need to explicitly refresh.' ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'User name.' end newparam(:configdir) do desc 'Path to the elasticsearch configuration directory (ES_PATH_CONF).' validate do |value| raise Puppet::Error, 'path expected' if value.nil? end end newproperty( :hashed_password, - :required_features => :manages_encrypted_passwords + required_features: :manages_encrypted_passwords ) do desc 'Hashed password for user.' - newvalues(/^[$]2a[$].{56}$/) + newvalues(%r{^[$]2a[$].{56}$}) end end diff --git a/lib/puppet/type/elasticsearch_user_roles.rb b/lib/puppet/type/elasticsearch_user_roles.rb index fb8a86a..ced8439 100644 --- a/lib/puppet/type/elasticsearch_user_roles.rb +++ b/lib/puppet/type/elasticsearch_user_roles.rb @@ -1,20 +1,22 @@ +# frozen_string_literal: true + Puppet::Type.newtype(:elasticsearch_user_roles) do desc 'Type to model Elasticsearch user roles.' ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'User name.' end - newproperty(:roles, :array_matching => :all) do + newproperty(:roles, array_matching: :all) do desc 'Array of roles that the user should belong to.' def insync?(is) is.sort == should.sort end end autorequire(:elasticsearch_user) do self[:name] end end diff --git a/lib/puppet/type/es_instance_conn_validator.rb b/lib/puppet/type/es_instance_conn_validator.rb index 938c626..afd5f1b 100644 --- a/lib/puppet/type/es_instance_conn_validator.rb +++ b/lib/puppet/type/es_instance_conn_validator.rb @@ -1,45 +1,47 @@ +# frozen_string_literal: true + Puppet::Type.newtype(:es_instance_conn_validator) do @doc = "Verify that a connection can be successfully established between a node and Elasticsearch. It could potentially be used for other purposes such as monitoring." ensurable - newparam(:name, :namevar => true) do + newparam(:name, namevar: true) do desc 'An arbitrary name used as the identity of the resource.' end newparam(:server) do desc 'DNS name or IP address of the server where Elasticsearch should be running.' defaultto 'localhost' end newparam(:port) do desc 'The port that the Elasticsearch instance should be listening on.' defaultto 9200 end newparam(:timeout) do desc 'The max number of seconds that the validator should wait before giving up and deciding that Elasticsearch is not running; defaults to 60 seconds.' defaultto 60 validate do |value| # This will raise an error if the string is not convertible to an integer Integer(value) end munge do |value| Integer(value) end end newparam(:sleep_interval) do desc 'The number of seconds that the validator should wait before retrying the connection to Elasticsearch; defaults to 10 seconds.' defaultto 10 validate do |value| # This will raise an error if the string is not convertible to an integer Integer(value) end munge do |value| Integer(value) end end end diff --git a/lib/puppet/util/es_instance_validator.rb b/lib/puppet/util/es_instance_validator.rb index 77f0b3c..d9585d9 100644 --- a/lib/puppet/util/es_instance_validator.rb +++ b/lib/puppet/util/es_instance_validator.rb @@ -1,44 +1,43 @@ +# frozen_string_literal: true + require 'socket' require 'timeout' module Puppet # Namespace for miscellaneous tools module Util # Helper class to assist with talking to the Elasticsearch service ports. class EsInstanceValidator - attr_reader :instance_server - attr_reader :instance_port + attr_reader :instance_server, :instance_port def initialize(instance_server, instance_port) @instance_server = instance_server @instance_port = instance_port # Avoid deprecation warnings in Puppet versions < 4 @timeout = if Facter.value(:puppetversion).split('.').first.to_i < 4 Puppet[:configtimeout] else Puppet[:http_connect_timeout] end end # Utility method; attempts to make an https connection to the Elasticsearch instance. # This is abstracted out into a method so that it can be called multiple times # for retry attempts. # # @return true if the connection is successful, false otherwise. def attempt_connection Timeout.timeout(@timeout) do - begin - TCPSocket.new(@instance_server, @instance_port).close - true - rescue Errno::EADDRNOTAVAIL, Errno::ECONNREFUSED, Errno::EHOSTUNREACH => e - Puppet.debug "Unable to connect to Elasticsearch instance (#{@instance_server}:#{@instance_port}): #{e.message}" - false - end + TCPSocket.new(@instance_server, @instance_port).close + true + rescue Errno::EADDRNOTAVAIL, Errno::ECONNREFUSED, Errno::EHOSTUNREACH => e + Puppet.debug "Unable to connect to Elasticsearch instance (#{@instance_server}:#{@instance_port}): #{e.message}" + false end rescue Timeout::Error false end end end end diff --git a/lib/puppet_x/elastic/asymmetric_compare.rb b/lib/puppet_x/elastic/asymmetric_compare.rb index 850d885..8662550 100644 --- a/lib/puppet_x/elastic/asymmetric_compare.rb +++ b/lib/puppet_x/elastic/asymmetric_compare.rb @@ -1,24 +1,26 @@ -module Puppet_X +# frozen_string_literal: true + +module Puppet_X # rubocop:disable Style/ClassAndModuleCamelCase # Custom Elastic functions module Elastic # Certain Elasticsearch APIs return fields that are present in responses # but not present when sending API requests such as creation time, and so # on. When comparing desired settings and extant settings, only indicate # that a value differs from another when user-desired settings differ from # existing settings - we ignore keys that exist in the response that aren't # being explicitly controlled by Puppet. def self.asymmetric_compare(should_val, is_val) should_val.reduce(true) do |is_synced, (should_key, should_setting)| if is_val.key? should_key if is_val[should_key].is_a? Hash asymmetric_compare(should_setting, is_val[should_key]) else is_synced && is_val[should_key] == should_setting end else is_synced && true end end end - end # of Elastic -end # of Puppet_X + end +end diff --git a/lib/puppet_x/elastic/deep_implode.rb b/lib/puppet_x/elastic/deep_implode.rb index 3a51355..103967d 100644 --- a/lib/puppet_x/elastic/deep_implode.rb +++ b/lib/puppet_x/elastic/deep_implode.rb @@ -1,33 +1,35 @@ -module Puppet_X +# frozen_string_literal: true + +module Puppet_X # rubocop:disable Style/ClassAndModuleCamelCase # Custom ruby for some Elastic utilities. module Elastic # Recursively implode a hash into dot-delimited structure of Hash # keys/values. def self.deep_implode(hash) ret = {} implode ret, hash ret end # Recursively descend into hash values, flattening the key structure into # dot-delimited keyed Hash. def self.implode(new_hash, hash, path = []) hash.sort_by { |k, _v| k.length }.reverse.each do |key, value| new_path = path + [key] case value when Hash implode(new_hash, value, new_path) else new_key = new_path.join('.') - if value.is_a? Array \ - and new_hash.key? new_key \ - and new_hash[new_key].is_a? Array + if value.is_a?(Array) \ + && new_hash.key?(new_key) \ + && new_hash[new_key].is_a?(Array) new_hash[new_key] += value else new_hash[new_key] ||= value end end end - end # of deep_implode - end # of Elastic -end # of Puppet_X + end + end +end diff --git a/lib/puppet_x/elastic/deep_to_i.rb b/lib/puppet_x/elastic/deep_to_i.rb index 32f9a1f..8fc5d26 100644 --- a/lib/puppet_x/elastic/deep_to_i.rb +++ b/lib/puppet_x/elastic/deep_to_i.rb @@ -1,20 +1,22 @@ -module Puppet_X +# frozen_string_literal: true + +module Puppet_X # rubocop:disable Style/ClassAndModuleCamelCase # Custom Elastic functions module Elastic # This ugly hack is required due to the fact Puppet passes in the # puppet-native hash with stringified numerics, which causes the # decoded JSON from the Elasticsearch API to be seen as out-of-sync # when the parsed template hash is compared against the puppet hash. def self.deep_to_i(obj) - if obj.is_a? String and obj =~ /^-?[0-9]+$/ + if obj.is_a?(String) && obj =~ %r{^-?[0-9]+$} obj.to_i elsif obj.is_a? Array obj.map { |element| deep_to_i(element) } elsif obj.is_a? Hash obj.merge(obj) { |_key, val| deep_to_i(val) } else obj end end - end # of Elastic -end # of Puppet_X + end +end diff --git a/lib/puppet_x/elastic/deep_to_s.rb b/lib/puppet_x/elastic/deep_to_s.rb index 2d32f17..6ed8a28 100644 --- a/lib/puppet_x/elastic/deep_to_s.rb +++ b/lib/puppet_x/elastic/deep_to_s.rb @@ -1,20 +1,22 @@ -module Puppet_X +# frozen_string_literal: true + +module Puppet_X # rubocop:disable Style/ClassAndModuleCamelCase # Custom Elastic functions module Elastic # When given a hash, this method recurses deeply into all values to convert # any that aren't data structures into strings. This is necessary when # comparing results from Elasticsearch API calls, because values like # integers and booleans are in string form. def self.deep_to_s(obj) if obj.is_a? Array obj.map { |element| deep_to_s(element) } elsif obj.is_a? Hash obj.merge(obj) { |_key, val| deep_to_s(val) } - elsif (not obj.is_a? String) and (not [true, false].include?(obj)) and obj.respond_to? :to_s + elsif (!obj.is_a? String) && ![true, false].include?(obj) && obj.respond_to?(:to_s) obj.to_s else obj end end - end # of Elastic -end # of Puppet_X + end +end diff --git a/lib/puppet_x/elastic/elasticsearch_rest_resource.rb b/lib/puppet_x/elastic/elasticsearch_rest_resource.rb index b00d5c2..d3a3a1d 100644 --- a/lib/puppet_x/elastic/elasticsearch_rest_resource.rb +++ b/lib/puppet_x/elastic/elasticsearch_rest_resource.rb @@ -1,93 +1,91 @@ +# frozen_string_literal: true + require 'puppet/parameter/boolean' # Provides common properties and parameters for REST-based Elasticsearch types module ElasticsearchRESTResource - # rubocop:disable Metrics/CyclomaticComplexity - # rubocop:disable Metrics/PerceivedComplexity def self.extended(extender) extender.newparam(:ca_file) do desc 'Absolute path to a CA file to authenticate server certs against.' end extender.newparam(:ca_path) do desc 'Absolute path to a directory containing CA files.' end extender.newparam(:host) do desc 'Hostname or address of Elasticsearch instance.' defaultto 'localhost' validate do |value| - unless value.is_a? String - raise Puppet::Error, 'invalid parameter, expected string' - end + raise Puppet::Error, 'invalid parameter, expected string' unless value.is_a? String end end extender.newparam(:password) do desc 'Optional HTTP basic auth plaintext password for Elasticsearch.' end extender.newparam(:port) do desc 'Port to use for Elasticsearch HTTP API operations.' defaultto 9200 munge do |value| - if value.is_a? String + case value + when String value.to_i - elsif value.is_a? Integer + when Integer value else raise Puppet::Error, "unknown '#{value}' timeout type #{value.class}" end end validate do |value| raise Puppet::Error, "invalid port value '#{value}'" \ - unless value.to_s =~ /^([0-9]+)$/ + unless value.to_s =~ %r{^([0-9]+)$} raise Puppet::Error, "invalid port value '#{value}'" \ - unless (0 < Regexp.last_match[0].to_i) \ - and (Regexp.last_match[0].to_i < 65_535) + unless Regexp.last_match[0].to_i.positive? \ + && (Regexp.last_match[0].to_i < 65_535) end end extender.newparam(:protocol) do desc 'Protocol to use for communication with Elasticsearch.' defaultto 'http' end extender.newparam(:timeout) do desc 'HTTP timeout for reading/writing content to Elasticsearch.' defaultto 10 munge do |value| - if value.is_a? String + case value + when String value.to_i - elsif value.is_a? Integer + when Integer value else raise Puppet::Error, "unknown '#{value}' timeout type #{value.class}" end end validate do |value| - if value.to_s !~ /^\d+$/ - raise Puppet::Error, 'timeout must be a positive integer' - end + raise Puppet::Error, 'timeout must be a positive integer' if value.to_s !~ %r{^\d+$} end end extender.newparam(:username) do desc 'Optional HTTP basic auth username for Elasticsearch.' end extender.newparam( :validate_tls, - :boolean => true, - :parent => Puppet::Parameter::Boolean + boolean: true, + parent: Puppet::Parameter::Boolean ) do desc 'Whether to verify TLS/SSL certificates.' defaultto true end end -end # of newtype +end diff --git a/lib/puppet_x/elastic/es_versioning.rb b/lib/puppet_x/elastic/es_versioning.rb index f284170..bf01c02 100644 --- a/lib/puppet_x/elastic/es_versioning.rb +++ b/lib/puppet_x/elastic/es_versioning.rb @@ -1,68 +1,71 @@ +# frozen_string_literal: true + class ElasticsearchPackageNotFoundError < StandardError; end -module Puppet_X +module Puppet_X # rubocop:disable Style/ClassAndModuleCamelCase module Elastic # Assists with discerning the locally installed version of Elasticsearch. # Implemented in a way to be called from native types and providers in order # to lazily fetch the package version from various arcane Puppet mechanisms. class EsVersioning # All of the default options we'll set for Elasticsearch's command # invocation. DEFAULT_OPTS = { 'home' => 'ES_HOME', 'logs' => 'LOG_DIR', 'data' => 'DATA_DIR', 'work' => 'WORK_DIR', 'conf' => 'CONF_DIR' }.freeze # Create an array of command-line flags to append to an `elasticsearch` # startup command. def self.opt_flags(package_name, catalog, opts = DEFAULT_OPTS.dup) opt_flag = opt_flag(min_version('5.0.0', package_name, catalog)) opts.delete 'work' if min_version '5.0.0', package_name, catalog opts.delete 'home' if min_version '5.4.0', package_name, catalog opt_args = if min_version '6.0.0', package_name, catalog [] else opts.map do |k, v| "-#{opt_flag}default.path.#{k}=${#{v}}" end.sort end opt_args << '--quiet' if min_version '5.0.0', package_name, catalog [opt_flag, opt_args] end # Get the correct option flag depending on whether Elasticsearch is post # version 5. def self.opt_flag(v5_or_later) v5_or_later ? 'E' : 'Des.' end # Predicate to determine whether a package is at least a certain version. def self.min_version(ver, package_name, catalog) Puppet::Util::Package.versioncmp( version(package_name, catalog), ver ) >= 0 end # Fetch the package version for a locally installed package. def self.version(package_name, catalog) es_pkg = catalog.resource("Package[#{package_name}]") raise Puppet::Error, "could not find `Package[#{package_name}]` resource" unless es_pkg + [ es_pkg.provider.properties[:version], es_pkg.provider.properties[:ensure] ].each do |property| return property if property.is_a? String end Puppet.warning("could not find valid version for `Package[#{package_name}]` resource") raise ElasticsearchPackageNotFoundError end end end end diff --git a/lib/puppet_x/elastic/hash.rb b/lib/puppet_x/elastic/hash.rb index 47f9685..44c2d32 100644 --- a/lib/puppet_x/elastic/hash.rb +++ b/lib/puppet_x/elastic/hash.rb @@ -1,75 +1,75 @@ +# frozen_string_literal: true + # Custom extensions namespace -module Puppet_X +module Puppet_X # rubocop:disable Style/ClassAndModuleCamelCase # Elastic helpers module Elastic # Utility extension for consistent to_yaml behavior. module SortedHash # Upon extension, modify the hash appropriately to render # sorted yaml dependent upon whichever way is supported for # this version of Puppet/Ruby's yaml implementation. - # rubocop:disable Metrics/CyclomaticComplexity - # rubocop:disable Metrics/PerceivedComplexity def self.extended(base) if RUBY_VERSION >= '1.9' # We can sort the hash in Ruby >= 1.9 by recursively # re-inserting key/values in sorted order. Native to_yaml will # call .each and get sorted pairs back. tmp = base.to_a.sort base.clear tmp.each do |key, val| - if val.is_a? base.class + case val + when base.class val.extend Puppet_X::Elastic::SortedHash - elsif val.is_a? Array + when Array val.map do |elem| if elem.is_a? base.class elem.extend(Puppet_X::Elastic::SortedHash) else elem end end end base[key] = val end else # Otherwise, recurse into the hash to extend all nested # hashes with the sorted each_pair method. # # Ruby < 1.9 doesn't support any notion of sorted hashes, # so we have to expressly monkey patch each_pair, which is # called by ZAML (the yaml library used in Puppet < 4; Puppet # >= 4 deprecates Ruby 1.8) # # Note that respond_to? is used here as there were weird # problems with .class/.is_a? base.merge! base do |_, ov, _| if ov.respond_to? :each_pair ov.extend Puppet_X::Elastic::SortedHash elsif ov.is_a? Array ov.map do |elem| if elem.respond_to? :each_pair elem.extend Puppet_X::Elastic::SortedHash else elem end end else ov end end end end - # rubocop:enable Metrics/CyclomaticComplexity - # rubocop:enable Metrics/PerceivedComplexity # Override each_pair with a method that yields key/values in # sorted order. def each_pair return to_enum(:each_pair) unless block_given? + keys.sort.each do |key| yield key, self[key] end self end end end end diff --git a/lib/puppet_x/elastic/plugin_parsing.rb b/lib/puppet_x/elastic/plugin_parsing.rb index 5e1f98c..0d96e66 100644 --- a/lib/puppet_x/elastic/plugin_parsing.rb +++ b/lib/puppet_x/elastic/plugin_parsing.rb @@ -1,33 +1,38 @@ +# frozen_string_literal: true + class ElasticPluginParseFailure < StandardError; end -module Puppet_X +module Puppet_X # rubocop:disable Style/ClassAndModuleCamelCase # Custom functions for plugin string parsing. module Elastic def self.plugin_name(raw_name) plugin_split(raw_name, 1) end def self.plugin_version(raw_name) - v = plugin_split(raw_name, 2, false).gsub(/^[^0-9]*/, '') + v = plugin_split(raw_name, 2, false).gsub(%r{^[^0-9]*}, '') raise ElasticPluginParseFailure, "could not parse version, got '#{v}'" if v.empty? + v end # Attempt to guess at the plugin's final directory name def self.plugin_split(original_string, position, soft_fail = true) # Try both colon (maven) and slash-delimited (github/elastic.co) names %w[/ :].each do |delimiter| parts = original_string.split(delimiter) # If the string successfully split, assume we found the right format - return parts[position].gsub(/(elasticsearch-|es-)/, '') unless parts[position].nil? + return parts[position].gsub(%r{(elasticsearch-|es-)}, '') unless parts[position].nil? end - raise( - ElasticPluginParseFailure, - "could not find element '#{position}' in #{original_string}" - ) unless soft_fail + unless soft_fail + raise( + ElasticPluginParseFailure, + "could not find element '#{position}' in #{original_string}" + ) + end original_string end - end # of Elastic -end # of Puppet_X + end +end diff --git a/spec/acceptance/tests/acceptance_spec.rb b/spec/acceptance/tests/acceptance_spec.rb index 8cafa20..a016806 100644 --- a/spec/acceptance/tests/acceptance_spec.rb +++ b/spec/acceptance/tests/acceptance_spec.rb @@ -1,83 +1,87 @@ +# frozen_string_literal: true + require 'spec_helper_acceptance' -require 'helpers/acceptance/tests/basic_shared_examples.rb' -require 'helpers/acceptance/tests/template_shared_examples.rb' -require 'helpers/acceptance/tests/removal_shared_examples.rb' -require 'helpers/acceptance/tests/pipeline_shared_examples.rb' -require 'helpers/acceptance/tests/plugin_shared_examples.rb' -require 'helpers/acceptance/tests/plugin_upgrade_shared_examples.rb' -require 'helpers/acceptance/tests/snapshot_repository_shared_examples.rb' -require 'helpers/acceptance/tests/datadir_shared_examples.rb' -require 'helpers/acceptance/tests/package_url_shared_examples.rb' -require 'helpers/acceptance/tests/hiera_shared_examples.rb' -require 'helpers/acceptance/tests/usergroup_shared_examples.rb' -require 'helpers/acceptance/tests/security_shared_examples.rb' +require 'helpers/acceptance/tests/basic_shared_examples' +require 'helpers/acceptance/tests/template_shared_examples' +require 'helpers/acceptance/tests/removal_shared_examples' +require 'helpers/acceptance/tests/pipeline_shared_examples' +require 'helpers/acceptance/tests/plugin_shared_examples' +require 'helpers/acceptance/tests/plugin_upgrade_shared_examples' +require 'helpers/acceptance/tests/snapshot_repository_shared_examples' +require 'helpers/acceptance/tests/datadir_shared_examples' +require 'helpers/acceptance/tests/package_url_shared_examples' +require 'helpers/acceptance/tests/hiera_shared_examples' +require 'helpers/acceptance/tests/usergroup_shared_examples' +require 'helpers/acceptance/tests/security_shared_examples' describe "elasticsearch v#{v[:elasticsearch_full_version]} class" do es_config = { - 'cluster.name' => v[:cluster_name], + 'cluster.name' => v[:cluster_name], 'http.bind_host' => '0.0.0.0', - 'http.port' => 9200, - 'node.name' => 'elasticsearch01' + 'http.port' => 9200, + 'node.name' => 'elasticsearch01' } - let(:elastic_repo) { not v[:is_snapshot] } + let(:elastic_repo) { !v[:is_snapshot] } let(:manifest) do - package = if not v[:is_snapshot] + package = if v[:is_snapshot] <<-MANIFEST - # Hard version set here due to plugin incompatibilities. - version => '#{v[:elasticsearch_full_version]}', + manage_repo => false, + package_url => '#{v[:snapshot_package]}', MANIFEST else <<-MANIFEST - manage_repo => false, - package_url => '#{v[:snapshot_package]}', + # Hard version set here due to plugin incompatibilities. + version => '#{v[:elasticsearch_full_version]}', MANIFEST end - <<-MANIFEST - api_timeout => 60, - config => { -#{es_config.map { |k, v| " '#{k}' => '#{v}'," }.join("\n")} - }, - jvm_options => [ - '-Xms128m', - '-Xmx128m', - ], - oss => #{v[:oss]}, - #{package} + <<~MANIFEST + api_timeout => 60, + config => { + #{es_config.map { |k, v| " '#{k}' => '#{v}'," }.join("\n")} + }, + jvm_options => [ + '-Xms128m', + '-Xmx128m', + ], + oss => #{v[:oss]}, + #{package} MANIFEST end context 'testing with' do describe 'simple config' do include_examples('basic acceptance tests', es_config) end include_examples('module removal', es_config) end include_examples('template operations', es_config, v[:template]) include_examples('pipeline operations', es_config, v[:pipeline]) - include_examples( - 'plugin acceptance tests', - es_config, - v[:elasticsearch_plugins] - ) unless v[:elasticsearch_plugins].empty? + unless v[:elasticsearch_plugins].empty? + include_examples( + 'plugin acceptance tests', + es_config, + v[:elasticsearch_plugins] + ) + end include_examples('snapshot repository acceptance tests') include_examples('datadir acceptance tests', es_config) # Skip this for snapshot testing, as we only have package files anyway. include_examples('package_url acceptance tests', es_config) unless v[:is_snapshot] include_examples('hiera acceptance tests', es_config, v[:elasticsearch_plugins]) # Security-related tests (shield/x-pack). # # Skip OSS-only distributions since they do not bundle x-pack, and skip # snapshots since we they don't recognize prod licenses. - include_examples('security acceptance tests', es_config) unless v[:oss] or v[:is_snapshot] + include_examples('security acceptance tests', es_config) unless v[:oss] || v[:is_snapshot] end diff --git a/spec/classes/000_elasticsearch_init_spec.rb b/spec/classes/000_elasticsearch_init_spec.rb index 1bd73cc..fa9daa9 100644 --- a/spec/classes/000_elasticsearch_init_spec.rb +++ b/spec/classes/000_elasticsearch_init_spec.rb @@ -1,440 +1,523 @@ +# frozen_string_literal: true + require 'spec_helper' -describe 'elasticsearch', :type => 'class' do +describe 'elasticsearch', type: 'class' do default_params = { - :config => { 'node.name' => 'foo' } + config: { 'node.name' => 'foo' } } on_supported_os.each do |os, facts| context "on #{os}" do case facts[:os]['family'] when 'Debian' let(:defaults_path) { '/etc/default' } let(:system_service_folder) { '/lib/systemd/system' } let(:pkg_ext) { 'deb' } let(:pkg_prov) { 'dpkg' } let(:version_add) { '' } - if (facts[:os]['name'] == 'Debian' and \ - facts[:os]['release']['major'].to_i >= 8) or \ - (facts[:os]['name'] == 'Ubuntu' and \ + + if (facts[:os]['name'] == 'Debian' && \ + facts[:os]['release']['major'].to_i >= 8) || \ + (facts[:os]['name'] == 'Ubuntu' && \ facts[:os]['release']['major'].to_i >= 15) let(:systemd_service_path) { '/lib/systemd/system' } + test_pid = true else test_pid = false end when 'RedHat' let(:defaults_path) { '/etc/sysconfig' } let(:system_service_folder) { '/lib/systemd/system' } let(:pkg_ext) { 'rpm' } let(:pkg_prov) { 'rpm' } let(:version_add) { '-1' } + if facts[:os]['release']['major'].to_i >= 7 let(:systemd_service_path) { '/lib/systemd/system' } + test_pid = true else test_pid = false end when 'Suse' let(:defaults_path) { '/etc/sysconfig' } let(:pkg_ext) { 'rpm' } let(:pkg_prov) { 'rpm' } let(:version_add) { '-1' } - if facts[:os]['name'] == 'OpenSuSE' and + + if facts[:os]['name'] == 'OpenSuSE' && facts[:os]['release']['major'].to_i <= 12 let(:systemd_service_path) { '/lib/systemd/system' } else let(:systemd_service_path) { '/usr/lib/systemd/system' } end end let(:facts) do facts.merge('scenario' => '', 'common' => '', 'elasticsearch' => {}) end let(:params) do default_params.merge({}) end - it { should compile.with_all_deps } + it { is_expected.to compile.with_all_deps } # Varies depending on distro - it { should contain_augeas("#{defaults_path}/elasticsearch") } + it { is_expected.to contain_augeas("#{defaults_path}/elasticsearch") } # Systemd-specific files if test_pid == true - it { should contain_service('elasticsearch').with( - :ensure => 'running', - :enable => true - ) } + it { + expect(subject).to contain_service('elasticsearch').with( + ensure: 'running', + enable: true + ) + } end context 'java installation' do let(:pre_condition) do <<-MANIFEST include ::java MANIFEST end - it { should contain_class('elasticsearch::config') - .that_requires('Class[java]') } + it { + expect(subject).to contain_class('elasticsearch::config'). + that_requires('Class[java]') + } end context 'package installation' do context 'via repository' do context 'with specified version' do let(:params) do default_params.merge( - :version => '1.0' + version: '1.0' ) end - it { should contain_package('elasticsearch') - .with(:ensure => "1.0#{version_add}") } + it { + expect(subject).to contain_package('elasticsearch'). + with(ensure: "1.0#{version_add}") + } end if facts[:os]['family'] == 'RedHat' context 'Handle special CentOS/RHEL package versioning' do let(:params) do default_params.merge( - :version => '1.1-2' + version: '1.1-2' ) end - it { should contain_package('elasticsearch') - .with(:ensure => '1.1-2') } + it { + expect(subject).to contain_package('elasticsearch'). + with(ensure: '1.1-2') + } end end end context 'when setting package version and package_url' do let(:params) do default_params.merge( - :version => '0.90.10', - :package_url => "puppet:///path/to/some/es-0.90.10.#{pkg_ext}" + version: '0.90.10', + package_url: "puppet:///path/to/some/es-0.90.10.#{pkg_ext}" ) end - it { expect { should raise_error(Puppet::Error) } } + it { is_expected.to raise_error(Puppet::Error) } end context 'via package_url setting' do ['file:/', 'ftp://', 'http://', 'https://', 'puppet:///'].each do |schema| context "using #{schema} schema" do let(:params) do default_params.merge( - :package_url => "#{schema}domain-or-path/pkg.#{pkg_ext}" + package_url: "#{schema}domain-or-path/pkg.#{pkg_ext}" ) end unless schema.start_with? 'puppet' - it { should contain_exec('create_package_dir_elasticsearch') - .with(:command => 'mkdir -p /opt/elasticsearch/swdl') } - it { should contain_file('/opt/elasticsearch/swdl') - .with( - :purge => false, - :force => false, - :require => 'Exec[create_package_dir_elasticsearch]' - ) } + it { + expect(subject).to contain_exec('create_package_dir_elasticsearch'). + with(command: 'mkdir -p /opt/elasticsearch/swdl') + } + + it { + expect(subject).to contain_file('/opt/elasticsearch/swdl'). + with( + purge: false, + force: false, + require: 'Exec[create_package_dir_elasticsearch]' + ) + } end case schema when 'file:/' - it { should contain_file( - "/opt/elasticsearch/swdl/pkg.#{pkg_ext}" - ).with( - :source => "/domain-or-path/pkg.#{pkg_ext}", - :backup => false - ) } + it { + expect(subject).to contain_file( + "/opt/elasticsearch/swdl/pkg.#{pkg_ext}" + ).with( + source: "/domain-or-path/pkg.#{pkg_ext}", + backup: false + ) + } when 'puppet:///' - it { should contain_file( - "/opt/elasticsearch/swdl/pkg.#{pkg_ext}" - ).with( - :source => "#{schema}domain-or-path/pkg.#{pkg_ext}", - :backup => false - ) } + it { + expect(subject).to contain_file( + "/opt/elasticsearch/swdl/pkg.#{pkg_ext}" + ).with( + source: "#{schema}domain-or-path/pkg.#{pkg_ext}", + backup: false + ) + } else [true, false].each do |verify_certificates| context "with download_tool_verify_certificates '#{verify_certificates}'" do let(:params) do default_params.merge( - :package_url => "#{schema}domain-or-path/pkg.#{pkg_ext}", - :download_tool_verify_certificates => verify_certificates + package_url: "#{schema}domain-or-path/pkg.#{pkg_ext}", + download_tool_verify_certificates: verify_certificates ) end - flag = (not verify_certificates) ? ' --no-check-certificate' : '' + flag = verify_certificates ? '' : ' --no-check-certificate' - it { should contain_exec('download_package_elasticsearch') - .with( - :command => "wget#{flag} -O /opt/elasticsearch/swdl/pkg.#{pkg_ext} #{schema}domain-or-path/pkg.#{pkg_ext} 2> /dev/null", - :require => 'File[/opt/elasticsearch/swdl]' - ) } + it { + expect(subject).to contain_exec('download_package_elasticsearch'). + with( + command: "wget#{flag} -O /opt/elasticsearch/swdl/pkg.#{pkg_ext} #{schema}domain-or-path/pkg.#{pkg_ext} 2> /dev/null", + require: 'File[/opt/elasticsearch/swdl]' + ) + } end end end - it { should contain_package('elasticsearch') - .with( - :ensure => 'present', - :source => "/opt/elasticsearch/swdl/pkg.#{pkg_ext}", - :provider => pkg_prov - ) } + it { + expect(subject).to contain_package('elasticsearch'). + with( + ensure: 'present', + source: "/opt/elasticsearch/swdl/pkg.#{pkg_ext}", + provider: pkg_prov + ) + } end end context 'using http:// schema with proxy_url' do let(:params) do default_params.merge( - :package_url => "http://www.domain.com/package.#{pkg_ext}", - :proxy_url => 'http://proxy.example.com:12345/' + package_url: "http://www.domain.com/package.#{pkg_ext}", + proxy_url: 'http://proxy.example.com:12345/' ) end - it { should contain_exec('download_package_elasticsearch') - .with( - :environment => [ - 'use_proxy=yes', - 'http_proxy=http://proxy.example.com:12345/', - 'https_proxy=http://proxy.example.com:12345/' - ] - ) } + it { + expect(subject).to contain_exec('download_package_elasticsearch'). + with( + environment: [ + 'use_proxy=yes', + 'http_proxy=http://proxy.example.com:12345/', + 'https_proxy=http://proxy.example.com:12345/' + ] + ) + } end end - end # package + end context 'when setting the module to absent' do let(:params) do default_params.merge( - :ensure => 'absent' + ensure: 'absent' ) end case facts[:os]['family'] when 'Suse' - it { should contain_package('elasticsearch') - .with(:ensure => 'absent') } + it { + expect(subject).to contain_package('elasticsearch'). + with(ensure: 'absent') + } else - it { should contain_package('elasticsearch') - .with(:ensure => 'purged') } + it { + expect(subject).to contain_package('elasticsearch'). + with(ensure: 'purged') + } end - it { should contain_service('elasticsearch') - .with( - :ensure => 'stopped', - :enable => 'false' - ) } - it { should contain_file('/usr/share/elasticsearch/plugins') - .with(:ensure => 'absent') } - it { should contain_file("#{defaults_path}/elasticsearch") - .with(:ensure => 'absent') } + it { + expect(subject).to contain_service('elasticsearch'). + with( + ensure: 'stopped', + enable: 'false' + ) + } + + it { + expect(subject).to contain_file('/usr/share/elasticsearch/plugins'). + with(ensure: 'absent') + } + + it { + expect(subject).to contain_file("#{defaults_path}/elasticsearch"). + with(ensure: 'absent') + } end context 'When managing the repository' do let(:params) do default_params.merge( - :manage_repo => true + manage_repo: true ) end - it { should contain_class('elastic_stack::repo') } + it { is_expected.to contain_class('elastic_stack::repo') } end context 'When not managing the repository' do let(:params) do default_params.merge( - :manage_repo => false + manage_repo: false ) end - it { should compile.with_all_deps } + it { is_expected.to compile.with_all_deps } end end end on_supported_os( - :hardwaremodels => ['x86_64'], - :supported_os => [ + hardwaremodels: ['x86_64'], + supported_os: [ { 'operatingsystem' => 'CentOS', 'operatingsystemrelease' => ['7'] } ] ).each do |os, facts| context "on #{os}" do - let(:facts) { facts.merge( - :scenario => '', - :common => '' - ) } + let(:facts) do + facts.merge( + scenario: '', + common: '' + ) + end describe 'main class tests' do # init.pp - it { should compile.with_all_deps } - it { should contain_class('elasticsearch') } - it { should contain_class('elasticsearch::package') } - it { should contain_class('elasticsearch::config') - .that_requires('Class[elasticsearch::package]') } - it { should contain_class('elasticsearch::service') - .that_requires('Class[elasticsearch::config]') } + it { is_expected.to compile.with_all_deps } + it { is_expected.to contain_class('elasticsearch') } + it { is_expected.to contain_class('elasticsearch::package') } + + it { + expect(subject).to contain_class('elasticsearch::config'). + that_requires('Class[elasticsearch::package]') + } + + it { + expect(subject).to contain_class('elasticsearch::service'). + that_requires('Class[elasticsearch::config]') + } # Base directories - it { should contain_file('/etc/elasticsearch') } - it { should contain_file('/usr/share/elasticsearch') } - it { should contain_file('/usr/share/elasticsearch/lib') } - it { should contain_file('/var/lib/elasticsearch') } + it { is_expected.to contain_file('/etc/elasticsearch') } + it { is_expected.to contain_file('/usr/share/elasticsearch') } + it { is_expected.to contain_file('/usr/share/elasticsearch/lib') } + it { is_expected.to contain_file('/var/lib/elasticsearch') } - it { should contain_exec('remove_plugin_dir') } + it { is_expected.to contain_exec('remove_plugin_dir') } end context 'package installation' do describe 'with default package' do - it { should contain_package('elasticsearch') - .with(:ensure => 'present') } - it { should_not contain_package('my-elasticsearch') - .with(:ensure => 'present') } + it { + expect(subject).to contain_package('elasticsearch'). + with(ensure: 'present') + } + + it { + expect(subject).not_to contain_package('my-elasticsearch'). + with(ensure: 'present') + } end describe 'with specified package name' do let(:params) do default_params.merge( - :package_name => 'my-elasticsearch' + package_name: 'my-elasticsearch' ) end - it { should contain_package('elasticsearch') - .with(:ensure => 'present', :name => 'my-elasticsearch') } - it { should_not contain_package('elasticsearch') - .with(:ensure => 'present', :name => 'elasticsearch') } + it { + expect(subject).to contain_package('elasticsearch'). + with(ensure: 'present', name: 'my-elasticsearch') + } + + it { + expect(subject).not_to contain_package('elasticsearch'). + with(ensure: 'present', name: 'elasticsearch') + } end describe 'with auto upgrade enabled' do let(:params) do default_params.merge( - :autoupgrade => true + autoupgrade: true ) end - it { should contain_package('elasticsearch') - .with(:ensure => 'latest') } + it { + expect(subject).to contain_package('elasticsearch'). + with(ensure: 'latest') + } end end describe 'running a a different user' do let(:params) do default_params.merge( - :elasticsearch_user => 'myesuser', - :elasticsearch_group => 'myesgroup' + elasticsearch_user: 'myesuser', + elasticsearch_group: 'myesgroup' ) end - it { should contain_file('/etc/elasticsearch') - .with(:owner => 'myesuser', :group => 'myesgroup') } - it { should contain_file('/var/log/elasticsearch') - .with(:owner => 'myesuser') } - it { should contain_file('/usr/share/elasticsearch') - .with(:owner => 'myesuser', :group => 'myesgroup') } - it { should contain_file('/var/lib/elasticsearch') - .with(:owner => 'myesuser', :group => 'myesgroup') } + it { + expect(subject).to contain_file('/etc/elasticsearch'). + with(owner: 'myesuser', group: 'myesgroup') + } + + it { + expect(subject).to contain_file('/var/log/elasticsearch'). + with(owner: 'myesuser') + } + + it { + expect(subject).to contain_file('/usr/share/elasticsearch'). + with(owner: 'myesuser', group: 'myesgroup') + } + + it { + expect(subject).to contain_file('/var/lib/elasticsearch'). + with(owner: 'myesuser', group: 'myesgroup') + } end describe 'setting jvm_options' do jvm_options = [ '-Xms16g', '-Xmx16g' ] let(:params) do default_params.merge( - :jvm_options => jvm_options + jvm_options: jvm_options ) end jvm_options.each do |jvm_option| - it { should contain_file_line("jvm_option_#{jvm_option}") - .with( - :ensure => 'present', - :path => '/etc/elasticsearch/jvm.options', - :line => jvm_option - )} + it { + expect(subject).to contain_file_line("jvm_option_#{jvm_option}"). + with( + ensure: 'present', + path: '/etc/elasticsearch/jvm.options', + line: jvm_option + ) + } end end context 'with restart_on_change => true' do let(:params) do default_params.merge( - :restart_on_change => true + restart_on_change: true ) end describe 'should restart elasticsearch' do - it { should contain_file('/etc/elasticsearch/elasticsearch.yml') - .that_notifies('Service[elasticsearch]')} + it { + expect(subject).to contain_file('/etc/elasticsearch/elasticsearch.yml'). + that_notifies('Service[elasticsearch]') + } end describe 'setting jvm_options triggers restart' do let(:params) do super().merge( - :jvm_options => ['-Xmx16g'] + jvm_options: ['-Xmx16g'] ) end - it { should contain_file_line('jvm_option_-Xmx16g') - .that_notifies('Service[elasticsearch]')} + it { + expect(subject).to contain_file_line('jvm_option_-Xmx16g'). + that_notifies('Service[elasticsearch]') + } end end # This check helps catch dependency cycles. context 'create_resource' do # Helper for these tests def singular(s) case s when 'indices' 'index' when 'snapshot_repositories' 'snapshot_repository' else s[0..-2] end end { 'indices' => { 'test-index' => {} }, # 'instances' => { 'es-instance' => {} }, 'pipelines' => { 'testpipeline' => { 'content' => {} } }, 'plugins' => { 'head' => {} }, 'roles' => { 'elastic_role' => {} }, 'scripts' => { 'foo' => { 'source' => 'puppet:///path/to/foo.groovy' } }, 'snapshot_repositories' => { 'backup' => { 'location' => '/backups' } }, 'templates' => { 'foo' => { 'content' => {} } }, 'users' => { 'elastic' => { 'password' => 'foobar' } } }.each_pair do |deftype, params| describe deftype do let(:params) do default_params.merge( deftype => params ) end - it { should compile } - it { should send( - "contain_elasticsearch__#{singular(deftype)}", params.keys.first - ) } + + it { is_expected.to compile } + + it { + expect(subject).to send( + "contain_elasticsearch__#{singular(deftype)}", params.keys.first + ) + } end end end describe 'oss' do let(:params) do - default_params.merge(:oss => true) + default_params.merge(oss: true) end it do - should contain_package('elasticsearch').with( - :name => 'elasticsearch-oss' + expect(subject).to contain_package('elasticsearch').with( + name: 'elasticsearch-oss' ) end end end end end diff --git a/spec/classes/001_hiera_spec.rb b/spec/classes/001_hiera_spec.rb index e7ad80a..9946e15 100644 --- a/spec/classes/001_hiera_spec.rb +++ b/spec/classes/001_hiera_spec.rb @@ -1,213 +1,244 @@ +# frozen_string_literal: true + require 'spec_helper' -describe 'elasticsearch', :type => 'class' do +describe 'elasticsearch', type: 'class' do default_params = { - :config => { 'node.name' => 'foo' } + config: { 'node.name' => 'foo' } } let(:params) do default_params.merge({}) end on_supported_os( - :hardwaremodels => ['x86_64'], - :supported_os => [ + hardwaremodels: ['x86_64'], + supported_os: [ { 'operatingsystem' => 'CentOS', 'operatingsystemrelease' => ['7'] } ] ).each do |os, facts| context "on #{os}" do context 'hiera' do describe 'indices' do context 'single indices' do - let(:facts) { facts.merge(:scenario => 'singleindex') } - - it { should contain_elasticsearch__index('baz') - .with( - :ensure => 'present', - :settings => { - 'index' => { - 'number_of_shards' => 1 + let(:facts) { facts.merge(scenario: 'singleindex') } + + it { + expect(subject).to contain_elasticsearch__index('baz'). + with( + ensure: 'present', + settings: { + 'index' => { + 'number_of_shards' => 1 + } } - } - ) } - it { should contain_elasticsearch_index('baz') } - it { should contain_es_instance_conn_validator( - 'baz-index-conn-validator' - ) } + ) + } + + it { is_expected.to contain_elasticsearch_index('baz') } + + it { + expect(subject).to contain_es_instance_conn_validator( + 'baz-index-conn-validator' + ) + } end context 'no indices' do - let(:facts) { facts.merge(:scenario => '') } + let(:facts) { facts.merge(scenario: '') } - it { should_not contain_elasticsearch__index('baz') } + it { is_expected.not_to contain_elasticsearch__index('baz') } end end context 'config' do - let(:facts) { facts.merge(:scenario => 'singleinstance') } - - it { should contain_augeas('/etc/sysconfig/elasticsearch') } - it { should contain_file('/etc/elasticsearch/elasticsearch.yml') } - it { should contain_datacat('/etc/elasticsearch/elasticsearch.yml') } - it { should contain_datacat_fragment('main_config') } - it { should contain_service('elasticsearch').with( - :ensure => 'running', - :enable => true - ) } - end # of config + let(:facts) { facts.merge(scenario: 'singleinstance') } + + it { is_expected.to contain_augeas('/etc/sysconfig/elasticsearch') } + it { is_expected.to contain_file('/etc/elasticsearch/elasticsearch.yml') } + it { is_expected.to contain_datacat('/etc/elasticsearch/elasticsearch.yml') } + it { is_expected.to contain_datacat_fragment('main_config') } + + it { + expect(subject).to contain_service('elasticsearch').with( + ensure: 'running', + enable: true + ) + } + end describe 'pipelines' do context 'single pipeline' do - let(:facts) { facts.merge(:scenario => 'singlepipeline') } - - it { should contain_elasticsearch__pipeline('testpipeline') - .with( - :ensure => 'present', - :content => { - 'description' => 'Add the foo field', - 'processors' => [ - { - 'set' => { - 'field' => 'foo', - 'value' => 'bar' + let(:facts) { facts.merge(scenario: 'singlepipeline') } + + it { + expect(subject).to contain_elasticsearch__pipeline('testpipeline'). + with( + ensure: 'present', + content: { + 'description' => 'Add the foo field', + 'processors' => [ + { + 'set' => { + 'field' => 'foo', + 'value' => 'bar' + } } - } - ] - } - ) } - it { should contain_elasticsearch_pipeline('testpipeline') } + ] + } + ) + } + + it { is_expected.to contain_elasticsearch_pipeline('testpipeline') } end context 'no pipelines' do - let(:facts) { facts.merge(:scenario => '') } + let(:facts) { facts.merge(scenario: '') } - it { should_not contain_elasticsearch__pipeline('testpipeline') } + it { is_expected.not_to contain_elasticsearch__pipeline('testpipeline') } end end describe 'plugins' do context 'single plugin' do - let(:facts) { facts.merge(:scenario => 'singleplugin') } - - it { should contain_elasticsearch__plugin('mobz/elasticsearch-head') - .with( - :ensure => 'present', - :module_dir => 'head' - ) } - it { should contain_elasticsearch_plugin('mobz/elasticsearch-head') } + let(:facts) { facts.merge(scenario: 'singleplugin') } + + it { + expect(subject).to contain_elasticsearch__plugin('mobz/elasticsearch-head'). + with( + ensure: 'present', + module_dir: 'head' + ) + } + + it { is_expected.to contain_elasticsearch_plugin('mobz/elasticsearch-head') } end context 'no plugins' do - let(:facts) { facts.merge(:scenario => '') } + let(:facts) { facts.merge(scenario: '') } - it { should_not contain_elasticsearch__plugin( - 'mobz/elasticsearch-head/1.0.0' - ) } + it { + expect(subject).not_to contain_elasticsearch__plugin( + 'mobz/elasticsearch-head/1.0.0' + ) + } end end describe 'roles' do context 'single roles' do - let(:facts) { facts.merge(:scenario => 'singlerole') } + let(:facts) { facts.merge(scenario: 'singlerole') } let(:params) do default_params end - it { should contain_elasticsearch__role('admin') - .with( - :ensure => 'present', - :privileges => { - 'cluster' => 'monitor', - 'indices' => { - '*' => 'all' - } - }, - :mappings => [ - 'cn=users,dc=example,dc=com' - ] - ) } - it { should contain_elasticsearch_role('admin') } - it { should contain_elasticsearch_role_mapping('admin') } + it { + expect(subject).to contain_elasticsearch__role('admin'). + with( + ensure: 'present', + privileges: { + 'cluster' => 'monitor', + 'indices' => { + '*' => 'all' + } + }, + mappings: [ + 'cn=users,dc=example,dc=com' + ] + ) + } + + it { is_expected.to contain_elasticsearch_role('admin') } + it { is_expected.to contain_elasticsearch_role_mapping('admin') } end context 'no roles' do - let(:facts) { facts.merge(:scenario => '') } + let(:facts) { facts.merge(scenario: '') } - it { should_not contain_elasticsearch__role('admin') } + it { is_expected.not_to contain_elasticsearch__role('admin') } end end describe 'scripts' do context 'single scripts' do - let(:facts) { facts.merge(:scenario => 'singlescript') } - - it { should contain_elasticsearch__script('myscript') - .with( - :ensure => 'present', - :source => 'puppet:///file/here' - ) } - it { should contain_file('/usr/share/elasticsearch/scripts/here') } + let(:facts) { facts.merge(scenario: 'singlescript') } + + it { + expect(subject).to contain_elasticsearch__script('myscript'). + with( + ensure: 'present', + source: 'puppet:///file/here' + ) + } + + it { is_expected.to contain_file('/usr/share/elasticsearch/scripts/here') } end context 'no roles' do - let(:facts) { facts.merge(:scenario => '') } + let(:facts) { facts.merge(scenario: '') } - it { should_not contain_elasticsearch__script('myscript') } + it { is_expected.not_to contain_elasticsearch__script('myscript') } end end describe 'templates' do context 'single template' do - let(:facts) { facts.merge(:scenario => 'singletemplate') } - - it { should contain_elasticsearch__template('foo') - .with( - :ensure => 'present', - :content => { - 'template' => 'foo-*', - 'settings' => { - 'index' => { - 'number_of_replicas' => 0 + let(:facts) { facts.merge(scenario: 'singletemplate') } + + it { + expect(subject).to contain_elasticsearch__template('foo'). + with( + ensure: 'present', + content: { + 'template' => 'foo-*', + 'settings' => { + 'index' => { + 'number_of_replicas' => 0 + } } } - } - ) } - it { should contain_elasticsearch_template('foo') } + ) + } + + it { is_expected.to contain_elasticsearch_template('foo') } end context 'no templates' do - let(:facts) { facts.merge(:scenario => '') } + let(:facts) { facts.merge(scenario: '') } - it { should_not contain_elasticsearch__template('foo') } + it { is_expected.not_to contain_elasticsearch__template('foo') } end end describe 'users' do context 'single users' do - let(:facts) { facts.merge(:scenario => 'singleuser') } + let(:facts) { facts.merge(scenario: 'singleuser') } let(:params) do default_params end - it { should contain_elasticsearch__user('elastic') - .with( - :ensure => 'present', - :roles => ['admin'], - :password => 'password' - ) } - it { should contain_elasticsearch_user('elastic') } + it { + expect(subject).to contain_elasticsearch__user('elastic'). + with( + ensure: 'present', + roles: ['admin'], + password: 'password' + ) + } + + it { is_expected.to contain_elasticsearch_user('elastic') } end context 'no users' do - let(:facts) { facts.merge(:scenario => '') } + let(:facts) { facts.merge(scenario: '') } - it { should_not contain_elasticsearch__user('elastic') } + it { is_expected.not_to contain_elasticsearch__user('elastic') } end end end end end end diff --git a/spec/classes/006_elasticsearch_license_spec.rb b/spec/classes/006_elasticsearch_license_spec.rb index 0c2579d..580bdad 100644 --- a/spec/classes/006_elasticsearch_license_spec.rb +++ b/spec/classes/006_elasticsearch_license_spec.rb @@ -1,85 +1,89 @@ +# frozen_string_literal: true + require 'spec_helper' -describe 'elasticsearch::license', :type => 'class' do +describe 'elasticsearch::license', type: 'class' do # First, randomly select one of our supported OSes to run tests that apply # to any distro on_supported_os.to_a.sample(1).to_h.each do |os, facts| context "on #{os}" do let(:facts) do facts.merge('scenario' => '', 'common' => '') end context 'when managing x-pack license' do let(:params) do { - :content => { + content: { 'license' => { - 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', - 'type' => 'trial', - 'issue_date_in_millis' => 1_519_341_125_550, + 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', + 'type' => 'trial', + 'issue_date_in_millis' => 1_519_341_125_550, 'expiry_date_in_millis' => 1_521_933_125_550, - 'max_nodes' => 1000, - 'issued_to' => 'test', - 'issuer' => 'elasticsearch', - 'signature' => 'secretvalue', - 'start_date_in_millis' => 1_513_814_400_000 + 'max_nodes' => 1000, + 'issued_to' => 'test', + 'issuer' => 'elasticsearch', + 'signature' => 'secretvalue', + 'start_date_in_millis' => 1_513_814_400_000 } } } end let(:pre_condition) do <<-EOS class { 'elasticsearch' : api_protocol => 'https', api_host => '127.0.0.1', api_port => 9201, api_timeout => 11, api_basic_auth_username => 'elastic', api_basic_auth_password => 'password', api_ca_file => '/foo/bar.pem', api_ca_path => '/foo/', validate_tls => false, } EOS end it do - should contain_class('elasticsearch::license') + expect(subject).to contain_class('elasticsearch::license') end + it do - should contain_es_instance_conn_validator( + expect(subject).to contain_es_instance_conn_validator( 'license-conn-validator' ).that_comes_before('elasticsearch_license[xpack]') end + it do - should contain_elasticsearch_license('xpack').with( - :ensure => 'present', - :content => { + expect(subject).to contain_elasticsearch_license('xpack').with( + ensure: 'present', + content: { 'license' => { - 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', - 'type' => 'trial', - 'issue_date_in_millis' => 1_519_341_125_550, + 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', + 'type' => 'trial', + 'issue_date_in_millis' => 1_519_341_125_550, 'expiry_date_in_millis' => 1_521_933_125_550, - 'max_nodes' => 1000, - 'issued_to' => 'test', - 'issuer' => 'elasticsearch', - 'signature' => 'secretvalue', - 'start_date_in_millis' => 1_513_814_400_000 + 'max_nodes' => 1000, + 'issued_to' => 'test', + 'issuer' => 'elasticsearch', + 'signature' => 'secretvalue', + 'start_date_in_millis' => 1_513_814_400_000 } }, - :protocol => 'https', - :host => '127.0.0.1', - :port => 9201, - :timeout => 11, - :username => 'elastic', - :password => 'password', - :ca_file => '/foo/bar.pem', - :ca_path => '/foo/', - :validate_tls => false + protocol: 'https', + host: '127.0.0.1', + port: 9201, + timeout: 11, + username: 'elastic', + password: 'password', + ca_file: '/foo/bar.pem', + ca_path: '/foo/', + validate_tls: false ) end end end end end diff --git a/spec/classes/010_elasticsearch_init_unkown_spec.rb b/spec/classes/010_elasticsearch_init_unkown_spec.rb index 6efed4a..95bad53 100644 --- a/spec/classes/010_elasticsearch_init_unkown_spec.rb +++ b/spec/classes/010_elasticsearch_init_unkown_spec.rb @@ -1,13 +1,15 @@ +# frozen_string_literal: true + require 'spec_helper' -describe 'elasticsearch', :type => 'class' do +describe 'elasticsearch', type: 'class' do context 'on an unknown OS' do context 'it should fail' do let(:facts) do - { :operatingsystem => 'Windows' } + { operatingsystem: 'Windows' } end - it { expect { should raise_error(Puppet::Error) } } + it { is_expected.to raise_error(Puppet::Error) } end end end diff --git a/spec/classes/099_coverage_spec.rb b/spec/classes/099_coverage_spec.rb index 03491b5..03c6441 100644 --- a/spec/classes/099_coverage_spec.rb +++ b/spec/classes/099_coverage_spec.rb @@ -1 +1,3 @@ +# frozen_string_literal: true + at_exit { RSpec::Puppet::Coverage.report! 100 } diff --git a/spec/defines/003_elasticsearch_template_spec.rb b/spec/defines/003_elasticsearch_template_spec.rb index 6c316b7..27fd8e6 100644 --- a/spec/defines/003_elasticsearch_template_spec.rb +++ b/spec/defines/003_elasticsearch_template_spec.rb @@ -1,133 +1,140 @@ +# frozen_string_literal: true + require 'spec_helper' -describe 'elasticsearch::template', :type => 'define' do +describe 'elasticsearch::template', type: 'define' do on_supported_os( - :hardwaremodels => ['x86_64'], - :supported_os => [ + hardwaremodels: ['x86_64'], + supported_os: [ { 'operatingsystem' => 'CentOS', 'operatingsystemrelease' => ['6'] } ] ).each do |os, facts| context "on #{os}" do - let(:facts) { facts.merge( - :scenario => '', - :common => '' - ) } + let(:facts) do + facts.merge( + scenario: '', + common: '' + ) + end let(:title) { 'foo' } let(:pre_condition) do 'class { "elasticsearch" : }' end describe 'parameter validation' do - [:api_ca_file, :api_ca_path].each do |param| + %i[api_ca_file api_ca_path].each do |param| let :params do { :ensure => 'present', :content => '{}', param => 'foo/cert' } end it 'validates cert paths' do - is_expected.to compile.and_raise_error(/expects a/) + expect(subject).to compile.and_raise_error(%r{expects a}) end end describe 'missing parent class' do let(:pre_condition) {} - it { should_not compile } + + it { is_expected.not_to compile } end end describe 'template from source' do let :params do { - :ensure => 'present', - :source => 'puppet:///path/to/foo.json', - :api_protocol => 'https', - :api_host => '127.0.0.1', - :api_port => 9201, - :api_timeout => 11, - :api_basic_auth_username => 'elastic', - :api_basic_auth_password => 'password', - :validate_tls => false + ensure: 'present', + source: 'puppet:///path/to/foo.json', + api_protocol: 'https', + api_host: '127.0.0.1', + api_port: 9201, + api_timeout: 11, + api_basic_auth_username: 'elastic', + api_basic_auth_password: 'password', + validate_tls: false } end - it { should contain_elasticsearch__template('foo') } + it { is_expected.to contain_elasticsearch__template('foo') } + it do - should contain_es_instance_conn_validator('foo-template') - .that_comes_before('Elasticsearch_template[foo]') + expect(subject).to contain_es_instance_conn_validator('foo-template'). + that_comes_before('Elasticsearch_template[foo]') end + it 'passes through parameters' do - should contain_elasticsearch_template('foo').with( - :ensure => 'present', - :source => 'puppet:///path/to/foo.json', - :protocol => 'https', - :host => '127.0.0.1', - :port => 9201, - :timeout => 11, - :username => 'elastic', - :password => 'password', - :validate_tls => false + expect(subject).to contain_elasticsearch_template('foo').with( + ensure: 'present', + source: 'puppet:///path/to/foo.json', + protocol: 'https', + host: '127.0.0.1', + port: 9201, + timeout: 11, + username: 'elastic', + password: 'password', + validate_tls: false ) end end describe 'class parameter inheritance' do let :params do { - :ensure => 'present', - :content => '{}' + ensure: 'present', + content: '{}' } end let(:pre_condition) do <<-EOS class { 'elasticsearch' : api_protocol => 'https', api_host => '127.0.0.1', api_port => 9201, api_timeout => 11, api_basic_auth_username => 'elastic', api_basic_auth_password => 'password', api_ca_file => '/foo/bar.pem', api_ca_path => '/foo/', validate_tls => false, } EOS end it do - should contain_elasticsearch_template('foo').with( - :ensure => 'present', - :content => '{}', - :protocol => 'https', - :host => '127.0.0.1', - :port => 9201, - :timeout => 11, - :username => 'elastic', - :password => 'password', - :ca_file => '/foo/bar.pem', - :ca_path => '/foo/', - :validate_tls => false + expect(subject).to contain_elasticsearch_template('foo').with( + ensure: 'present', + content: '{}', + protocol: 'https', + host: '127.0.0.1', + port: 9201, + timeout: 11, + username: 'elastic', + password: 'password', + ca_file: '/foo/bar.pem', + ca_path: '/foo/', + validate_tls: false ) end end describe 'template deletion' do let :params do { - :ensure => 'absent' + ensure: 'absent' } end it 'removes templates' do - should contain_elasticsearch_template('foo').with(:ensure => 'absent') + expect(subject).to contain_elasticsearch_template('foo').with(ensure: 'absent') end end end end end diff --git a/spec/defines/004_elasticsearch_plugin_spec.rb b/spec/defines/004_elasticsearch_plugin_spec.rb index 2b66c3d..73cecc4 100644 --- a/spec/defines/004_elasticsearch_plugin_spec.rb +++ b/spec/defines/004_elasticsearch_plugin_spec.rb @@ -1,308 +1,372 @@ +# frozen_string_literal: true + require 'spec_helper' require 'helpers/class_shared_examples' -describe 'elasticsearch::plugin', :type => 'define' do +describe 'elasticsearch::plugin', type: 'define' do let(:title) { 'mobz/elasticsearch-head/1.0.0' } on_supported_os( - :hardwaremodels => ['x86_64'], - :supported_os => [ + hardwaremodels: ['x86_64'], + supported_os: [ { 'operatingsystem' => 'CentOS', 'operatingsystemrelease' => ['6'] } ] ).each do |_os, facts| let(:facts) do facts.merge('scenario' => '', 'common' => '') end let(:pre_condition) do <<-EOS class { "elasticsearch": config => { "node" => { "name" => "test" } } } EOS end context 'default values' do context 'present' do - let(:params) do { - :ensure => 'present', - :configdir => '/etc/elasticsearch' - } end + let(:params) do + { + ensure: 'present', + configdir: '/etc/elasticsearch' + } + end it { is_expected.to compile } end context 'absent' do - let(:params) do { - :ensure => 'absent' - } end + let(:params) do + { + ensure: 'absent' + } + end it { is_expected.to compile } end context 'configdir' do - it { should contain_elasticsearch__plugin( - 'mobz/elasticsearch-head/1.0.0' - ).with_configdir('/etc/elasticsearch') } + it { + expect(subject).to contain_elasticsearch__plugin( + 'mobz/elasticsearch-head/1.0.0' + ).with_configdir('/etc/elasticsearch') + } - it { should contain_elasticsearch_plugin( - 'mobz/elasticsearch-head/1.0.0' - ).with_configdir('/etc/elasticsearch') } + it { + expect(subject).to contain_elasticsearch_plugin( + 'mobz/elasticsearch-head/1.0.0' + ).with_configdir('/etc/elasticsearch') + } end end context 'with module_dir' do context 'add a plugin' do - let(:params) do { - :ensure => 'present', - :module_dir => 'head' - } end - - it { should contain_elasticsearch__plugin( - 'mobz/elasticsearch-head/1.0.0' - ) } - it { should contain_elasticsearch_plugin( - 'mobz/elasticsearch-head/1.0.0' - ) } - it { should contain_file( - '/usr/share/elasticsearch/plugins/head' - ).that_requires( - 'Elasticsearch_plugin[mobz/elasticsearch-head/1.0.0]' - ) } + let(:params) do + { + ensure: 'present', + module_dir: 'head' + } + end + + it { + expect(subject).to contain_elasticsearch__plugin( + 'mobz/elasticsearch-head/1.0.0' + ) + } + + it { + expect(subject).to contain_elasticsearch_plugin( + 'mobz/elasticsearch-head/1.0.0' + ) + } + + it { + expect(subject).to contain_file( + '/usr/share/elasticsearch/plugins/head' + ).that_requires( + 'Elasticsearch_plugin[mobz/elasticsearch-head/1.0.0]' + ) + } end context 'remove a plugin' do - let(:params) do { - :ensure => 'absent', - :module_dir => 'head' - } end - - it { should contain_elasticsearch__plugin( - 'mobz/elasticsearch-head/1.0.0' - ) } - it { should contain_elasticsearch_plugin( - 'mobz/elasticsearch-head/1.0.0' - ).with( - :ensure => 'absent' - ) } - it { should contain_file( - '/usr/share/elasticsearch/plugins/head' - ).that_requires( - 'Elasticsearch_plugin[mobz/elasticsearch-head/1.0.0]' - ) } + let(:params) do + { + ensure: 'absent', + module_dir: 'head' + } + end + + it { + expect(subject).to contain_elasticsearch__plugin( + 'mobz/elasticsearch-head/1.0.0' + ) + } + + it { + expect(subject).to contain_elasticsearch_plugin( + 'mobz/elasticsearch-head/1.0.0' + ).with( + ensure: 'absent' + ) + } + + it { + expect(subject).to contain_file( + '/usr/share/elasticsearch/plugins/head' + ).that_requires( + 'Elasticsearch_plugin[mobz/elasticsearch-head/1.0.0]' + ) + } end end context 'with url' do context 'add a plugin with full name' do - let(:params) do { - :ensure => 'present', - :url => 'https://github.com/mobz/elasticsearch-head/archive/master.zip' - } end + let(:params) do + { + ensure: 'present', + url: 'https://github.com/mobz/elasticsearch-head/archive/master.zip' + } + end - it { should contain_elasticsearch__plugin('mobz/elasticsearch-head/1.0.0') } - it { should contain_elasticsearch_plugin('mobz/elasticsearch-head/1.0.0').with(:ensure => 'present', :url => 'https://github.com/mobz/elasticsearch-head/archive/master.zip') } + it { is_expected.to contain_elasticsearch__plugin('mobz/elasticsearch-head/1.0.0') } + it { is_expected.to contain_elasticsearch_plugin('mobz/elasticsearch-head/1.0.0').with(ensure: 'present', url: 'https://github.com/mobz/elasticsearch-head/archive/master.zip') } end end context 'offline plugin install' do let(:title) { 'head' } - let(:params) do { - :ensure => 'present', - :source => 'puppet:///path/to/my/plugin.zip' - } end - - it { should contain_elasticsearch__plugin('head') } - it { should contain_file('/opt/elasticsearch/swdl/plugin.zip').with(:source => 'puppet:///path/to/my/plugin.zip', :before => 'Elasticsearch_plugin[head]') } - it { should contain_elasticsearch_plugin('head').with(:ensure => 'present', :source => '/opt/elasticsearch/swdl/plugin.zip') } + let(:params) do + { + ensure: 'present', + source: 'puppet:///path/to/my/plugin.zip' + } + end + + it { is_expected.to contain_elasticsearch__plugin('head') } + it { is_expected.to contain_file('/opt/elasticsearch/swdl/plugin.zip').with(source: 'puppet:///path/to/my/plugin.zip', before: 'Elasticsearch_plugin[head]') } + it { is_expected.to contain_elasticsearch_plugin('head').with(ensure: 'present', source: '/opt/elasticsearch/swdl/plugin.zip') } end describe 'service restarts' do let(:title) { 'head' } - let(:params) do { - :ensure => 'present', - :module_dir => 'head' - } end + let(:params) do + { + ensure: 'present', + module_dir: 'head' + } + end context 'restart_on_change set to false (default)' do let(:pre_condition) do <<-EOS class { "elasticsearch": } EOS end - it { should_not contain_elasticsearch_plugin( - 'head' - ).that_notifies( - 'Service[elasticsearch]' - )} + it { + expect(subject).not_to contain_elasticsearch_plugin( + 'head' + ).that_notifies( + 'Service[elasticsearch]' + ) + } include_examples 'class', :sysv end context 'restart_on_change set to true' do let(:pre_condition) do <<-EOS class { "elasticsearch": restart_on_change => true, } EOS end - it { should contain_elasticsearch_plugin( - 'head' - ).that_notifies( - 'Service[elasticsearch]' - )} + it { + expect(subject).to contain_elasticsearch_plugin( + 'head' + ).that_notifies( + 'Service[elasticsearch]' + ) + } include_examples('class') end context 'restart_plugin_change set to false (default)' do let(:pre_condition) do <<-EOS class { "elasticsearch": restart_plugin_change => false, } EOS end - it { should_not contain_elasticsearch_plugin( - 'head' - ).that_notifies( - 'Service[elasticsearch]' - )} + it { + expect(subject).not_to contain_elasticsearch_plugin( + 'head' + ).that_notifies( + 'Service[elasticsearch]' + ) + } include_examples('class') end context 'restart_plugin_change set to true' do let(:pre_condition) do <<-EOS class { "elasticsearch": restart_plugin_change => true, } EOS end - it { should contain_elasticsearch_plugin( - 'head' - ).that_notifies( - 'Service[elasticsearch]' - )} + it { + expect(subject).to contain_elasticsearch_plugin( + 'head' + ).that_notifies( + 'Service[elasticsearch]' + ) + } include_examples('class') end end describe 'proxy arguments' do let(:title) { 'head' } context 'unauthenticated' do context 'on define' do - let(:params) do { - :ensure => 'present', - :proxy_host => 'es.local', - :proxy_port => 8080 - } end + let(:params) do + { + ensure: 'present', + proxy_host: 'es.local', + proxy_port: 8080 + } + end - it { should contain_elasticsearch_plugin( - 'head' - ).with_proxy( - 'http://es.local:8080' - )} + it { + expect(subject).to contain_elasticsearch_plugin( + 'head' + ).with_proxy( + 'http://es.local:8080' + ) + } end context 'on main class' do - let(:params) do { - :ensure => 'present' - } end + let(:params) do + { + ensure: 'present' + } + end let(:pre_condition) do <<-EOS class { 'elasticsearch': proxy_url => 'https://es.local:8080', } EOS end - it { should contain_elasticsearch_plugin( - 'head' - ).with_proxy( - 'https://es.local:8080' - )} + it { + expect(subject).to contain_elasticsearch_plugin( + 'head' + ).with_proxy( + 'https://es.local:8080' + ) + } end end context 'authenticated' do context 'on define' do - let(:params) do { - :ensure => 'present', - :proxy_host => 'es.local', - :proxy_port => 8080, - :proxy_username => 'elastic', - :proxy_password => 'password' - } end - - it { should contain_elasticsearch_plugin( - 'head' - ).with_proxy( - 'http://elastic:password@es.local:8080' - )} + let(:params) do + { + ensure: 'present', + proxy_host: 'es.local', + proxy_port: 8080, + proxy_username: 'elastic', + proxy_password: 'password' + } + end + + it { + expect(subject).to contain_elasticsearch_plugin( + 'head' + ).with_proxy( + 'http://elastic:password@es.local:8080' + ) + } end context 'on main class' do - let(:params) do { - :ensure => 'present' - } end + let(:params) do + { + ensure: 'present' + } + end let(:pre_condition) do <<-EOS class { 'elasticsearch': proxy_url => 'http://elastic:password@es.local:8080', } EOS end - it { should contain_elasticsearch_plugin( - 'head' - ).with_proxy( - 'http://elastic:password@es.local:8080' - )} + it { + expect(subject).to contain_elasticsearch_plugin( + 'head' + ).with_proxy( + 'http://elastic:password@es.local:8080' + ) + } end end end describe 'collector ordering' do describe 'present' do let(:title) { 'head' } let(:pre_condition) do <<-EOS class { 'elasticsearch': } EOS end - it { should contain_elasticsearch__plugin( - 'head' - ).that_requires( - 'Class[elasticsearch::config]' - )} - - it { should contain_elasticsearch_plugin( - 'head' - ).that_comes_before( - 'Service[elasticsearch]' - )} + it { + expect(subject).to contain_elasticsearch__plugin( + 'head' + ).that_requires( + 'Class[elasticsearch::config]' + ) + } + + it { + expect(subject).to contain_elasticsearch_plugin( + 'head' + ).that_comes_before( + 'Service[elasticsearch]' + ) + } include_examples 'class' end end end end diff --git a/spec/defines/006_elasticsearch_script_spec.rb b/spec/defines/006_elasticsearch_script_spec.rb index 84414cd..aa5e9f0 100644 --- a/spec/defines/006_elasticsearch_script_spec.rb +++ b/spec/defines/006_elasticsearch_script_spec.rb @@ -1,81 +1,101 @@ +# frozen_string_literal: true + require 'spec_helper' -describe 'elasticsearch::script', :type => 'define' do +describe 'elasticsearch::script', type: 'define' do let(:title) { 'foo' } let(:pre_condition) do %( class { "elasticsearch": config => { "node" => {"name" => "test" } } } ) end on_supported_os( - :hardwaremodels => ['x86_64'], - :supported_os => [ + hardwaremodels: ['x86_64'], + supported_os: [ { 'operatingsystem' => 'CentOS', 'operatingsystemrelease' => ['6'] } ] ).each do |os, facts| context "on #{os}" do - let(:facts) { facts.merge( - :scenario => '', - :common => '' - ) } + let(:facts) do + facts.merge( + scenario: '', + common: '' + ) + end describe 'missing parent class' do let(:pre_condition) {} - it { should_not compile } + + it { is_expected.not_to compile } end describe 'adding script files' do - let(:params) do { - :ensure => 'present', - :source => 'puppet:///path/to/foo.groovy' - } end + let(:params) do + { + ensure: 'present', + source: 'puppet:///path/to/foo.groovy' + } + end - it { should contain_elasticsearch__script('foo') } - it { should contain_file('/usr/share/elasticsearch/scripts/foo.groovy') - .with( - :source => 'puppet:///path/to/foo.groovy', - :ensure => 'present' - ) } + it { is_expected.to contain_elasticsearch__script('foo') } + + it { + expect(subject).to contain_file('/usr/share/elasticsearch/scripts/foo.groovy'). + with( + source: 'puppet:///path/to/foo.groovy', + ensure: 'present' + ) + } end describe 'adding script directories' do - let(:params) do { - :ensure => 'directory', - :source => 'puppet:///path/to/my_scripts', - :recurse => 'remote' - } end + let(:params) do + { + ensure: 'directory', + source: 'puppet:///path/to/my_scripts', + recurse: 'remote' + } + end - it { should contain_elasticsearch__script('foo') } - it { should contain_file( - '/usr/share/elasticsearch/scripts/my_scripts' - ).with( - :ensure => 'directory', - :source => 'puppet:///path/to/my_scripts', - :recurse => 'remote' - ) } + it { is_expected.to contain_elasticsearch__script('foo') } + + it { + expect(subject).to contain_file( + '/usr/share/elasticsearch/scripts/my_scripts' + ).with( + ensure: 'directory', + source: 'puppet:///path/to/my_scripts', + recurse: 'remote' + ) + } end describe 'removing scripts' do - let(:params) do { - :ensure => 'absent', - :source => 'puppet:///path/to/foo.groovy' - } end + let(:params) do + { + ensure: 'absent', + source: 'puppet:///path/to/foo.groovy' + } + end - it { should contain_elasticsearch__script('foo') } - it { should contain_file('/usr/share/elasticsearch/scripts/foo.groovy') - .with( - :source => 'puppet:///path/to/foo.groovy', - :ensure => 'absent' - ) } + it { is_expected.to contain_elasticsearch__script('foo') } + + it { + expect(subject).to contain_file('/usr/share/elasticsearch/scripts/foo.groovy'). + with( + source: 'puppet:///path/to/foo.groovy', + ensure: 'absent' + ) + } end end end end diff --git a/spec/defines/007_elasticsearch_user_spec.rb b/spec/defines/007_elasticsearch_user_spec.rb index d96496b..74f80c7 100644 --- a/spec/defines/007_elasticsearch_user_spec.rb +++ b/spec/defines/007_elasticsearch_user_spec.rb @@ -1,121 +1,132 @@ +# frozen_string_literal: true + require 'spec_helper' require 'helpers/class_shared_examples' describe 'elasticsearch::user' do let(:title) { 'elastic' } let(:pre_condition) do <<-EOS class { 'elasticsearch': } EOS end on_supported_os( - :hardwaremodels => ['x86_64'], - :supported_os => [ + hardwaremodels: ['x86_64'], + supported_os: [ { 'operatingsystem' => 'CentOS', 'operatingsystemrelease' => ['7'] } ] ).each do |os, facts| context "on #{os}" do - let(:facts) { facts.merge( - :scenario => '', - :common => '' - ) } + let(:facts) do + facts.merge( + scenario: '', + common: '' + ) + end context 'with default parameters' do let(:params) do { - :password => 'foobar', - :roles => %w[monitor user] + password: 'foobar', + roles: %w[monitor user] } end - it { should contain_elasticsearch__user('elastic') } - it { should contain_elasticsearch_user('elastic') } + it { is_expected.to contain_elasticsearch__user('elastic') } + it { is_expected.to contain_elasticsearch_user('elastic') } + it do - should contain_elasticsearch_user_roles('elastic').with( + expect(subject).to contain_elasticsearch_user_roles('elastic').with( 'ensure' => 'present', - 'roles' => %w[monitor user] + 'roles' => %w[monitor user] ) end end describe 'collector ordering' do describe 'when present' do let(:pre_condition) do <<-EOS class { 'elasticsearch': } elasticsearch::template { 'foo': content => {"foo" => "bar"} } elasticsearch::role { 'test_role': privileges => { 'cluster' => 'monitor', 'indices' => { '*' => 'all', }, }, } EOS end let(:params) do { - :password => 'foobar', - :roles => %w[monitor user] + password: 'foobar', + roles: %w[monitor user] } end - it { should contain_elasticsearch__role('test_role') } - it { should contain_elasticsearch_role('test_role') } - it { should contain_elasticsearch_role_mapping('test_role') } - it { should contain_elasticsearch__user('elastic') - .that_comes_before([ - 'Elasticsearch::Template[foo]' - ]).that_requires([ - 'Elasticsearch::Role[test_role]' - ])} + it { is_expected.to contain_elasticsearch__role('test_role') } + it { is_expected.to contain_elasticsearch_role('test_role') } + it { is_expected.to contain_elasticsearch_role_mapping('test_role') } + + it { + expect(subject).to contain_elasticsearch__user('elastic'). + that_comes_before([ + 'Elasticsearch::Template[foo]' + ]).that_requires([ + 'Elasticsearch::Role[test_role]' + ]) + } include_examples 'class', :systemd end describe 'when absent' do let(:pre_condition) do <<-EOS class { 'elasticsearch': } elasticsearch::template { 'foo': content => {"foo" => "bar"} } elasticsearch::role { 'test_role': privileges => { 'cluster' => 'monitor', 'indices' => { '*' => 'all', }, }, } EOS end let(:params) do { - :password => 'foobar', - :roles => %w[monitor user] + password: 'foobar', + roles: %w[monitor user] } end - it { should contain_elasticsearch__role('test_role') } - it { should contain_elasticsearch_role('test_role') } - it { should contain_elasticsearch_role_mapping('test_role') } - it { should contain_elasticsearch__user('elastic') - .that_comes_before([ - 'Elasticsearch::Template[foo]' - ]).that_requires([ - 'Elasticsearch::Role[test_role]' - ])} + it { is_expected.to contain_elasticsearch__role('test_role') } + it { is_expected.to contain_elasticsearch_role('test_role') } + it { is_expected.to contain_elasticsearch_role_mapping('test_role') } + + it { + expect(subject).to contain_elasticsearch__user('elastic'). + that_comes_before([ + 'Elasticsearch::Template[foo]' + ]).that_requires([ + 'Elasticsearch::Role[test_role]' + ]) + } include_examples 'class', :systemd end end end end end diff --git a/spec/defines/008_elasticsearch_role_spec.rb b/spec/defines/008_elasticsearch_role_spec.rb index 5804c50..eab1741 100644 --- a/spec/defines/008_elasticsearch_role_spec.rb +++ b/spec/defines/008_elasticsearch_role_spec.rb @@ -1,110 +1,118 @@ +# frozen_string_literal: true + require 'spec_helper' require 'helpers/class_shared_examples' describe 'elasticsearch::role' do let(:title) { 'elastic_role' } let(:pre_condition) do <<-EOS class { 'elasticsearch': } EOS end let(:params) do { - :privileges => { + privileges: { 'cluster' => '*' }, - :mappings => [ + mappings: [ 'cn=users,dc=example,dc=com', 'cn=admins,dc=example,dc=com', 'cn=John Doe,cn=other users,dc=example,dc=com' ] } end on_supported_os( - :hardwaremodels => ['x86_64'], - :supported_os => [ + hardwaremodels: ['x86_64'], + supported_os: [ { 'operatingsystem' => 'CentOS', 'operatingsystemrelease' => ['7'] } ] ).each do |os, facts| context "on #{os}" do - let(:facts) { facts.merge( - :scenario => '', - :common => '' - ) } + let(:facts) do + facts.merge( + scenario: '', + common: '' + ) + end context 'with an invalid role name' do context 'too long' do let(:title) { 'A' * 41 } - it { should raise_error(Puppet::Error, /expected length/i) } + + it { is_expected.to raise_error(Puppet::Error, %r{expected length}i) } end end context 'with default parameters' do - it { should contain_elasticsearch__role('elastic_role') } - it { should contain_elasticsearch_role('elastic_role') } + it { is_expected.to contain_elasticsearch__role('elastic_role') } + it { is_expected.to contain_elasticsearch_role('elastic_role') } + it do - should contain_elasticsearch_role_mapping('elastic_role').with( + expect(subject).to contain_elasticsearch_role_mapping('elastic_role').with( 'ensure' => 'present', 'mappings' => [ 'cn=users,dc=example,dc=com', 'cn=admins,dc=example,dc=com', 'cn=John Doe,cn=other users,dc=example,dc=com' ] ) end end describe 'collector ordering' do describe 'when present' do let(:pre_condition) do <<-EOS class { 'elasticsearch': } elasticsearch::template { 'foo': content => {"foo" => "bar"} } elasticsearch::user { 'elastic': password => 'foobar', roles => ['elastic_role'], } EOS end - it { should contain_elasticsearch__role('elastic_role') - .that_comes_before([ - 'Elasticsearch::Template[foo]', - 'Elasticsearch::User[elastic]' - ])} + it { + expect(subject).to contain_elasticsearch__role('elastic_role'). + that_comes_before([ + 'Elasticsearch::Template[foo]', + 'Elasticsearch::User[elastic]' + ]) + } include_examples 'class', :systemd end describe 'when absent' do let(:pre_condition) do <<-EOS class { 'elasticsearch': } elasticsearch::template { 'foo': content => {"foo" => "bar"} } elasticsearch::user { 'elastic': password => 'foobar', roles => ['elastic_role'], } EOS end include_examples 'class', :systemd # TODO: Uncomment once upstream issue is fixed. # https://github.com/rodjek/rspec-puppet/issues/418 # it { should contain_elasticsearch__shield__role('elastic_role') # .that_comes_before([ # 'Elasticsearch::Template[foo]', # 'Elasticsearch::Plugin[shield]', # 'Elasticsearch::Shield::User[elastic]' # ])} end end end end end diff --git a/spec/defines/009_elasticsearch_pipeline_spec.rb b/spec/defines/009_elasticsearch_pipeline_spec.rb index e2456fe..bf9dbe4 100644 --- a/spec/defines/009_elasticsearch_pipeline_spec.rb +++ b/spec/defines/009_elasticsearch_pipeline_spec.rb @@ -1,101 +1,106 @@ +# frozen_string_literal: true + require 'spec_helper' -describe 'elasticsearch::pipeline', :type => 'define' do +describe 'elasticsearch::pipeline', type: 'define' do let(:title) { 'testpipeline' } let(:pre_condition) do 'class { "elasticsearch" : }' end on_supported_os( - :hardwaremodels => ['x86_64'], - :supported_os => [ + hardwaremodels: ['x86_64'], + supported_os: [ { 'operatingsystem' => 'CentOS', 'operatingsystemrelease' => ['6'] } ] ).each do |os, facts| context "on #{os}" do - let(:facts) { facts.merge( - :scenario => '', - :common => '' - ) } + let(:facts) do + facts.merge( + scenario: '', + common: '' + ) + end describe 'parameter validation' do - [:api_ca_file, :api_ca_path].each do |param| + %i[api_ca_file api_ca_path].each do |param| let :params do { :ensure => 'present', :content => {}, param => 'foo/cert' } end it 'validates cert paths' do - is_expected.to compile.and_raise_error(/expects a/) + expect(subject).to compile.and_raise_error(%r{expects a}) end end describe 'missing parent class' do let(:pre_condition) {} - it { should_not compile } + + it { is_expected.not_to compile } end end describe 'class parameter inheritance' do let :params do { - :ensure => 'present', - :content => {} + ensure: 'present', + content: {} } end let(:pre_condition) do <<-EOS class { 'elasticsearch' : api_protocol => 'https', api_host => '127.0.0.1', api_port => 9201, api_timeout => 11, api_basic_auth_username => 'elastic', api_basic_auth_password => 'password', api_ca_file => '/foo/bar.pem', api_ca_path => '/foo/', validate_tls => false, } EOS end it do - should contain_elasticsearch__pipeline(title) - should contain_es_instance_conn_validator("#{title}-ingest-pipeline") - .that_comes_before("elasticsearch_pipeline[#{title}]") - should contain_elasticsearch_pipeline(title).with( - :ensure => 'present', - :content => {}, - :protocol => 'https', - :host => '127.0.0.1', - :port => 9201, - :timeout => 11, - :username => 'elastic', - :password => 'password', - :ca_file => '/foo/bar.pem', - :ca_path => '/foo/', - :validate_tls => false + expect(subject).to contain_elasticsearch__pipeline(title) + expect(subject).to contain_es_instance_conn_validator("#{title}-ingest-pipeline"). + that_comes_before("elasticsearch_pipeline[#{title}]") + expect(subject).to contain_elasticsearch_pipeline(title).with( + ensure: 'present', + content: {}, + protocol: 'https', + host: '127.0.0.1', + port: 9201, + timeout: 11, + username: 'elastic', + password: 'password', + ca_file: '/foo/bar.pem', + ca_path: '/foo/', + validate_tls: false ) end end describe 'pipeline deletion' do let :params do { - :ensure => 'absent' + ensure: 'absent' } end it 'removes pipelines' do - should contain_elasticsearch_pipeline(title).with(:ensure => 'absent') + expect(subject).to contain_elasticsearch_pipeline(title).with(ensure: 'absent') end end end end end diff --git a/spec/defines/012_elasticsearch_index_spec.rb b/spec/defines/012_elasticsearch_index_spec.rb index 62596a2..5e9b38c 100644 --- a/spec/defines/012_elasticsearch_index_spec.rb +++ b/spec/defines/012_elasticsearch_index_spec.rb @@ -1,100 +1,105 @@ +# frozen_string_literal: true + require 'spec_helper' -describe 'elasticsearch::index', :type => 'define' do +describe 'elasticsearch::index', type: 'define' do let(:title) { 'test-index' } let(:pre_condition) do 'class { "elasticsearch" : }' end on_supported_os( - :hardwaremodels => ['x86_64'], - :supported_os => [ + hardwaremodels: ['x86_64'], + supported_os: [ { 'operatingsystem' => 'CentOS', 'operatingsystemrelease' => ['6'] } ] ).each do |os, facts| context "on #{os}" do - let(:facts) { facts.merge( - :scenario => '', - :common => '' - ) } + let(:facts) do + facts.merge( + scenario: '', + common: '' + ) + end describe 'parameter validation' do - [:api_ca_file, :api_ca_path].each do |param| + %i[api_ca_file api_ca_path].each do |param| let :params do { :ensure => 'present', param => 'foo/cert' } end it 'validates cert paths' do - is_expected.to compile.and_raise_error(/expects a/) + expect(subject).to compile.and_raise_error(%r{expects a}) end end describe 'missing parent class' do let(:pre_condition) {} - it { should_not compile } + + it { is_expected.not_to compile } end end describe 'class parameter inheritance' do let :params do { - :ensure => 'present' + ensure: 'present' } end let(:pre_condition) do <<-EOS class { 'elasticsearch' : api_protocol => 'https', api_host => '127.0.0.1', api_port => 9201, api_timeout => 11, api_basic_auth_username => 'elastic', api_basic_auth_password => 'password', api_ca_file => '/foo/bar.pem', api_ca_path => '/foo/', validate_tls => false, } EOS end it do - should contain_elasticsearch__index(title) - should contain_es_instance_conn_validator( + expect(subject).to contain_elasticsearch__index(title) + expect(subject).to contain_es_instance_conn_validator( "#{title}-index-conn-validator" ).that_comes_before("elasticsearch_index[#{title}]") - should contain_elasticsearch_index(title).with( - :ensure => 'present', - :settings => {}, - :protocol => 'https', - :host => '127.0.0.1', - :port => 9201, - :timeout => 11, - :username => 'elastic', - :password => 'password', - :ca_file => '/foo/bar.pem', - :ca_path => '/foo/', - :validate_tls => false + expect(subject).to contain_elasticsearch_index(title).with( + ensure: 'present', + settings: {}, + protocol: 'https', + host: '127.0.0.1', + port: 9201, + timeout: 11, + username: 'elastic', + password: 'password', + ca_file: '/foo/bar.pem', + ca_path: '/foo/', + validate_tls: false ) end end describe 'index deletion' do let :params do { - :ensure => 'absent' + ensure: 'absent' } end it 'removes indices' do - should contain_elasticsearch_index(title).with(:ensure => 'absent') + expect(subject).to contain_elasticsearch_index(title).with(ensure: 'absent') end end end end end diff --git a/spec/defines/013_elasticsearch_snapshot_repository_spec.rb b/spec/defines/013_elasticsearch_snapshot_repository_spec.rb index 694859c..b9ffc74 100644 --- a/spec/defines/013_elasticsearch_snapshot_repository_spec.rb +++ b/spec/defines/013_elasticsearch_snapshot_repository_spec.rb @@ -1,134 +1,141 @@ +# frozen_string_literal: true + require 'spec_helper' -describe 'elasticsearch::snapshot_repository', :type => 'define' do +describe 'elasticsearch::snapshot_repository', type: 'define' do on_supported_os( - :hardwaremodels => ['x86_64'], - :supported_os => [ + hardwaremodels: ['x86_64'], + supported_os: [ { 'operatingsystem' => 'CentOS', 'operatingsystemrelease' => ['6'] } ] ).each do |os, facts| context "on #{os}" do - let(:facts) { facts.merge( - :scenario => '', - :common => '' - ) } + let(:facts) do + facts.merge( + scenario: '', + common: '' + ) + end let(:title) { 'backup' } let(:pre_condition) do 'class { "elasticsearch" : }' end describe 'parameter validation' do - [:api_ca_file, :api_ca_path].each do |param| + %i[api_ca_file api_ca_path].each do |param| let :params do { :ensure => 'present', :content => '{}', param => 'foo/cert' } end it 'validates cert paths' do - is_expected.to compile.and_raise_error(/expects a/) + expect(subject).to compile.and_raise_error(%r{expects a}) end end describe 'missing parent class' do let(:pre_condition) {} - it { should_not compile } + + it { is_expected.not_to compile } end end describe 'template from source' do let :params do { - :ensure => 'present', - :location => '/var/lib/elasticsearch/backup', - :api_protocol => 'https', - :api_host => '127.0.0.1', - :api_port => 9201, - :api_timeout => 11, - :api_basic_auth_username => 'elastic', - :api_basic_auth_password => 'password', - :validate_tls => false + ensure: 'present', + location: '/var/lib/elasticsearch/backup', + api_protocol: 'https', + api_host: '127.0.0.1', + api_port: 9201, + api_timeout: 11, + api_basic_auth_username: 'elastic', + api_basic_auth_password: 'password', + validate_tls: false } end - it { should contain_elasticsearch__snapshot_repository('backup') } + it { is_expected.to contain_elasticsearch__snapshot_repository('backup') } + it do - should contain_es_instance_conn_validator('backup-snapshot') - .that_comes_before('Elasticsearch_snapshot_repository[backup]') + expect(subject).to contain_es_instance_conn_validator('backup-snapshot'). + that_comes_before('Elasticsearch_snapshot_repository[backup]') end + it 'passes through parameters' do - should contain_elasticsearch_snapshot_repository('backup').with( - :ensure => 'present', - :location => '/var/lib/elasticsearch/backup', - :protocol => 'https', - :host => '127.0.0.1', - :port => 9201, - :timeout => 11, - :username => 'elastic', - :password => 'password', - :validate_tls => false + expect(subject).to contain_elasticsearch_snapshot_repository('backup').with( + ensure: 'present', + location: '/var/lib/elasticsearch/backup', + protocol: 'https', + host: '127.0.0.1', + port: 9201, + timeout: 11, + username: 'elastic', + password: 'password', + validate_tls: false ) end end describe 'class parameter inheritance' do let :params do { - :ensure => 'present', - :location => '/var/lib/elasticsearch/backup' + ensure: 'present', + location: '/var/lib/elasticsearch/backup' } end let(:pre_condition) do <<-MANIFEST class { 'elasticsearch' : api_protocol => 'https', api_host => '127.0.0.1', api_port => 9201, api_timeout => 11, api_basic_auth_username => 'elastic', api_basic_auth_password => 'password', api_ca_file => '/foo/bar.pem', api_ca_path => '/foo/', validate_tls => false, } MANIFEST end it do - should contain_elasticsearch_snapshot_repository('backup').with( - :ensure => 'present', - :location => '/var/lib/elasticsearch/backup', - :protocol => 'https', - :host => '127.0.0.1', - :port => 9201, - :timeout => 11, - :username => 'elastic', - :password => 'password', - :ca_file => '/foo/bar.pem', - :ca_path => '/foo/', - :validate_tls => false + expect(subject).to contain_elasticsearch_snapshot_repository('backup').with( + ensure: 'present', + location: '/var/lib/elasticsearch/backup', + protocol: 'https', + host: '127.0.0.1', + port: 9201, + timeout: 11, + username: 'elastic', + password: 'password', + ca_file: '/foo/bar.pem', + ca_path: '/foo/', + validate_tls: false ) end end describe 'snapshot repository deletion' do let :params do { - :ensure => 'absent', - :location => '/var/lib/elasticsearch/backup' + ensure: 'absent', + location: '/var/lib/elasticsearch/backup' } end it 'removes snapshot repository' do - should contain_elasticsearch_snapshot_repository('backup').with(:ensure => 'absent') + expect(subject).to contain_elasticsearch_snapshot_repository('backup').with(ensure: 'absent') end end end end end diff --git a/spec/functions/concat_merge_spec.rb b/spec/functions/concat_merge_spec.rb index 5b7421d..ebcc454 100644 --- a/spec/functions/concat_merge_spec.rb +++ b/spec/functions/concat_merge_spec.rb @@ -1,168 +1,199 @@ -require 'spec_helper' +# frozen_string_literal: true -# rubocop:disable Style/BracesAroundHashParameters -# rubocop:disable Style/IndentHash +require 'spec_helper' describe 'concat_merge' do describe 'exception handling' do - it { is_expected.to run.with_params.and_raise_error( - Puppet::ParseError, /wrong number of arguments/i - ) } - - it { is_expected.to run.with_params({}).and_raise_error( - Puppet::ParseError, /wrong number of arguments/i - ) } - - it { is_expected.to run.with_params('2', 2).and_raise_error( - Puppet::ParseError, /unexpected argument type/ - ) } - - it { is_expected.to run.with_params(2, '2').and_raise_error( - Puppet::ParseError, /unexpected argument type/ - ) } + it { + expect(subject).to run.with_params.and_raise_error( + Puppet::ParseError, %r{wrong number of arguments}i + ) + } + + it { + expect(subject).to run.with_params({}).and_raise_error( + Puppet::ParseError, %r{wrong number of arguments}i + ) + } + + it { + expect(subject).to run.with_params('2', 2).and_raise_error( + Puppet::ParseError, %r{unexpected argument type} + ) + } + + it { + expect(subject).to run.with_params(2, '2').and_raise_error( + Puppet::ParseError, %r{unexpected argument type} + ) + } end describe 'collisions' do context 'single keys' do - it { is_expected.to run.with_params({ - 'key1' => 'value1' - }, { - 'key1' => 'value2' - }).and_return({ - 'key1' => 'value2' - }) } - - it { is_expected.to run.with_params({ - 'key1' => 'value1' - }, { - 'key1' => 'value2' - }, { - 'key1' => 'value3' - }).and_return({ - 'key1' => 'value3' - }) } + it { + expect(subject).to run.with_params({ + 'key1' => 'value1' + }, { + 'key1' => 'value2' + }).and_return({ + 'key1' => 'value2' + }) + } + + it { + expect(subject).to run.with_params({ + 'key1' => 'value1' + }, { + 'key1' => 'value2' + }, { + 'key1' => 'value3' + }).and_return({ + 'key1' => 'value3' + }) + } end context 'multiple keys' do - it { is_expected.to run.with_params({ - 'key1' => 'value1', - 'key2' => 'value2' - }, { - 'key1' => 'value2' - }).and_return({ - 'key1' => 'value2', - 'key2' => 'value2' - }) } - - it { is_expected.to run.with_params({ - 'key1' => 'value1', - 'key2' => 'value1' - }, { - 'key1' => 'value2' - }, { - 'key1' => 'value3', - 'key2' => 'value2' - }).and_return({ - 'key1' => 'value3', - 'key2' => 'value2' - }) } + it { + expect(subject).to run.with_params({ + 'key1' => 'value1', + 'key2' => 'value2' + }, { + 'key1' => 'value2' + }).and_return({ + 'key1' => 'value2', + 'key2' => 'value2' + }) + } + + it { + expect(subject).to run.with_params({ + 'key1' => 'value1', + 'key2' => 'value1' + }, { + 'key1' => 'value2' + }, { + 'key1' => 'value3', + 'key2' => 'value2' + }).and_return({ + 'key1' => 'value3', + 'key2' => 'value2' + }) + } end end describe 'concat merging' do context 'single keys' do - it { is_expected.to run.with_params({ - 'key1' => ['value1'] - }, { - 'key1' => ['value2'] - }).and_return({ - 'key1' => %w[value1 value2] - }) } - - it { is_expected.to run.with_params({ - 'key1' => ['value1'] - }, { - 'key1' => ['value2'] - }, { - 'key1' => ['value3'] - }).and_return({ - 'key1' => %w[value1 value2 value3] - }) } - - it { is_expected.to run.with_params({ - 'key1' => ['value1'] - }, { - 'key1' => 'value2' - }).and_return({ - 'key1' => 'value2' - }) } - - it { is_expected.to run.with_params({ - 'key1' => 'value1' - }, { - 'key1' => ['value2'] - }).and_return({ - 'key1' => ['value2'] - }) } + it { + expect(subject).to run.with_params({ + 'key1' => ['value1'] + }, { + 'key1' => ['value2'] + }).and_return({ + 'key1' => %w[value1 value2] + }) + } + + it { + expect(subject).to run.with_params({ + 'key1' => ['value1'] + }, { + 'key1' => ['value2'] + }, { + 'key1' => ['value3'] + }).and_return({ + 'key1' => %w[value1 value2 value3] + }) + } + + it { + expect(subject).to run.with_params({ + 'key1' => ['value1'] + }, { + 'key1' => 'value2' + }).and_return({ + 'key1' => 'value2' + }) + } + + it { + expect(subject).to run.with_params({ + 'key1' => 'value1' + }, { + 'key1' => ['value2'] + }).and_return({ + 'key1' => ['value2'] + }) + } end context 'multiple keys' do - it { is_expected.to run.with_params({ - 'key1' => ['value1'], - 'key2' => ['value3'] - }, { - 'key1' => ['value2'], - 'key2' => ['value4'] - }).and_return({ - 'key1' => %w[value1 value2], - 'key2' => %w[value3 value4] - }) } - - it { is_expected.to run.with_params({ - 'key1' => ['value1'], - 'key2' => ['value1.1'] - }, { - 'key1' => ['value2'], - 'key2' => ['value2.1'] - }, { - 'key1' => ['value3'], - 'key2' => ['value3.1'] - }).and_return({ - 'key1' => %w[value1 value2 value3], - 'key2' => ['value1.1', 'value2.1', 'value3.1'] - }) } - - it { is_expected.to run.with_params({ - 'key1' => ['value1'], - 'key2' => 'value1' - }, { - 'key1' => 'value2', - 'key2' => ['value2'] - }).and_return({ - 'key1' => 'value2', - 'key2' => ['value2'] - }) } - - it { is_expected.to run.with_params({ - 'key1' => 'value1', - 'key2' => ['value1'] - }, { - 'key1' => ['value2'], - 'key2' => 'value2' - }).and_return( - 'key1' => ['value2'], - 'key2' => 'value2' - ) } + it { + expect(subject).to run.with_params({ + 'key1' => ['value1'], + 'key2' => ['value3'] + }, { + 'key1' => ['value2'], + 'key2' => ['value4'] + }).and_return({ + 'key1' => %w[value1 value2], + 'key2' => %w[value3 value4] + }) + } + + it { + expect(subject).to run.with_params({ + 'key1' => ['value1'], + 'key2' => ['value1.1'] + }, { + 'key1' => ['value2'], + 'key2' => ['value2.1'] + }, { + 'key1' => ['value3'], + 'key2' => ['value3.1'] + }).and_return({ + 'key1' => %w[value1 value2 value3], + 'key2' => ['value1.1', 'value2.1', 'value3.1'] + }) + } + + it { + expect(subject).to run.with_params({ + 'key1' => ['value1'], + 'key2' => 'value1' + }, { + 'key1' => 'value2', + 'key2' => ['value2'] + }).and_return({ + 'key1' => 'value2', + 'key2' => ['value2'] + }) + } + + it { + expect(subject).to run.with_params({ + 'key1' => 'value1', + 'key2' => ['value1'] + }, { + 'key1' => ['value2'], + 'key2' => 'value2' + }).and_return( + 'key1' => ['value2'], + 'key2' => 'value2' + ) + } end end - it 'should not change the original hashes' do + it 'does not change the original hashes' do argument1 = { 'key1' => 'value1' } original1 = argument1.dup argument2 = { 'key2' => 'value2' } original2 = argument2.dup subject.execute(argument1, argument2) expect(argument1).to eq(original1) expect(argument2).to eq(original2) end end diff --git a/spec/functions/deep_implode_spec.rb b/spec/functions/deep_implode_spec.rb index 0570d77..580a88d 100644 --- a/spec/functions/deep_implode_spec.rb +++ b/spec/functions/deep_implode_spec.rb @@ -1,111 +1,134 @@ -require 'spec_helper' +# frozen_string_literal: true -# rubocop:disable Style/BracesAroundHashParameters -# rubocop:disable Style/IndentHash +require 'spec_helper' describe 'deep_implode' do describe 'exception handling' do - it { is_expected.to run.with_params.and_raise_error( - Puppet::ParseError, /wrong number of arguments/i - ) } + it { + expect(subject).to run.with_params.and_raise_error( + Puppet::ParseError, %r{wrong number of arguments}i + ) + } - it { is_expected.to run.with_params({}, {}).and_raise_error( - Puppet::ParseError, /wrong number of arguments/i - ) } + it { + expect(subject).to run.with_params({}, {}).and_raise_error( + Puppet::ParseError, %r{wrong number of arguments}i + ) + } - it { is_expected.to run.with_params('2').and_raise_error( - Puppet::ParseError, /unexpected argument type/ - ) } + it { + expect(subject).to run.with_params('2').and_raise_error( + Puppet::ParseError, %r{unexpected argument type} + ) + } end ['value', ['value'], 0, 10].each do |value| describe "qualifying #{value}" do it { is_expected.to run.with_params({}).and_return({}) } - it { is_expected.to run.with_params({ - 'key' => value - }).and_return({ - 'key' => value - }) } + it { + expect(subject).to run.with_params({ + 'key' => value + }).and_return({ + 'key' => value + }) + } - it { is_expected.to run.with_params({ - 'key' => { 'subkey' => value } - }).and_return({ - 'key.subkey' => value - }) } + it { + expect(subject).to run.with_params({ + 'key' => { 'subkey' => value } + }).and_return({ + 'key.subkey' => value + }) + } - it { is_expected.to run.with_params({ - 'key' => { 'subkey' => { 'subsubkey' => { 'bottom' => value } } } - }).and_return({ - 'key.subkey.subsubkey.bottom' => value - }) } + it { + expect(subject).to run.with_params({ + 'key' => { 'subkey' => { 'subsubkey' => { 'bottom' => value } } } + }).and_return({ + 'key.subkey.subsubkey.bottom' => value + }) + } end end # The preferred behavior is to favor fully-qualified keys describe 'key collisions' do - it { is_expected.to run.with_params({ - 'key1' => { - 'subkey1' => 'value1' - }, - 'key1.subkey1' => 'value2' - }).and_return({ - 'key1.subkey1' => 'value2' - }) } + it { + expect(subject).to run.with_params({ + 'key1' => { + 'subkey1' => 'value1' + }, + 'key1.subkey1' => 'value2' + }).and_return({ + 'key1.subkey1' => 'value2' + }) + } - it { is_expected.to run.with_params({ - 'key1.subkey1' => 'value2', - 'key1' => { - 'subkey1' => 'value1' - } - }).and_return({ - 'key1.subkey1' => 'value2' - }) } + it { + expect(subject).to run.with_params({ + 'key1.subkey1' => 'value2', + 'key1' => { + 'subkey1' => 'value1' + } + }).and_return({ + 'key1.subkey1' => 'value2' + }) + } end describe 'deep merging' do - it { is_expected.to run.with_params({ - 'key1' => { - 'subkey1' => ['value1'] - }, - 'key1.subkey1' => ['value2'] - }).and_return({ - 'key1.subkey1' => %w[value2 value1] - }) } + it { + expect(subject).to run.with_params({ + 'key1' => { + 'subkey1' => ['value1'] + }, + 'key1.subkey1' => ['value2'] + }).and_return({ + 'key1.subkey1' => %w[value2 value1] + }) + } - it { is_expected.to run.with_params({ - 'key1' => { - 'subkey1' => { 'key2' => 'value1' } - }, - 'key1.subkey1' => { 'key3' => 'value2' } - }).and_return({ - 'key1.subkey1.key2' => 'value1', - 'key1.subkey1.key3' => 'value2' - }) } + it { + expect(subject).to run.with_params({ + 'key1' => { + 'subkey1' => { 'key2' => 'value1' } + }, + 'key1.subkey1' => { 'key3' => 'value2' } + }).and_return({ + 'key1.subkey1.key2' => 'value1', + 'key1.subkey1.key3' => 'value2' + }) + } - it { is_expected.to run.with_params({ - 'key1' => { - 'subkey1' => { 'key2' => ['value1'] } - }, - 'key1.subkey1' => { 'key2' => ['value2'] } - }).and_return({ - 'key1.subkey1.key2' => %w[value2 value1] - }) } + it { + expect(subject).to run.with_params({ + 'key1' => { + 'subkey1' => { 'key2' => ['value1'] } + }, + 'key1.subkey1' => { 'key2' => ['value2'] } + }).and_return({ + 'key1.subkey1.key2' => %w[value2 value1] + }) + } - it { is_expected.to run.with_params({ - 'key1' => { - 'subkey1' => { 'key2' => 'value1' }, - 'subkey1.key2' => 'value2' - } - }).and_return({ - 'key1.subkey1.key2' => 'value2' - }) } + it { + expect(subject).to run.with_params({ + 'key1' => { + 'subkey1' => { 'key2' => 'value1' }, + 'subkey1.key2' => 'value2' + } + }).and_return({ + 'key1.subkey1.key2' => 'value2' + }) + } end - it 'should not change the original hashes' do + it 'does not change the original hashes' do argument1 = { 'key1' => 'value1' } original1 = argument1.dup subject.execute(argument1) expect(argument1).to eq(original1) end end diff --git a/spec/functions/es_plugin_name_spec.rb b/spec/functions/es_plugin_name_spec.rb index 0373611..b67e3b3 100644 --- a/spec/functions/es_plugin_name_spec.rb +++ b/spec/functions/es_plugin_name_spec.rb @@ -1,75 +1,105 @@ +# frozen_string_literal: true + require 'spec_helper' describe 'es_plugin_name' do describe 'exception handling' do - it { is_expected.to run.with_params.and_raise_error( - Puppet::ParseError, /wrong number of arguments/i - ) } + it { + expect(subject).to run.with_params.and_raise_error( + Puppet::ParseError, %r{wrong number of arguments}i + ) + } end describe 'single arguments' do - it { is_expected.to run - .with_params('foo') - .and_return('foo') } - - it { is_expected.to run - .with_params('vendor/foo') - .and_return('foo') } - - it { is_expected.to run - .with_params('vendor/foo/1.0.0') - .and_return('foo') } - - it { is_expected.to run - .with_params('vendor/es-foo/1.0.0') - .and_return('foo') } - - it { is_expected.to run - .with_params('vendor/elasticsearch-foo/1.0.0') - .and_return('foo') } - - it { is_expected.to run - .with_params('com.foo:plugin_name:5.2.0') - .and_return('plugin_name')} - - it { is_expected.to run - .with_params('com:plugin_name:5.2.0-12') - .and_return('plugin_name')} - - it { is_expected.to run - .with_params('com.foo.bar:plugin_name:5') - .and_return('plugin_name')} + it { + expect(subject).to run. + with_params('foo'). + and_return('foo') + } + + it { + expect(subject).to run. + with_params('vendor/foo'). + and_return('foo') + } + + it { + expect(subject).to run. + with_params('vendor/foo/1.0.0'). + and_return('foo') + } + + it { + expect(subject).to run. + with_params('vendor/es-foo/1.0.0'). + and_return('foo') + } + + it { + expect(subject).to run. + with_params('vendor/elasticsearch-foo/1.0.0'). + and_return('foo') + } + + it { + expect(subject).to run. + with_params('com.foo:plugin_name:5.2.0'). + and_return('plugin_name') + } + + it { + expect(subject).to run. + with_params('com:plugin_name:5.2.0-12'). + and_return('plugin_name') + } + + it { + expect(subject).to run. + with_params('com.foo.bar:plugin_name:5'). + and_return('plugin_name') + } end describe 'multiple arguments' do - it { is_expected.to run - .with_params('foo', nil) - .and_return('foo') } - - it { is_expected.to run - .with_params(nil, 'foo') - .and_return('foo') } - - it { is_expected.to run - .with_params(nil, 0, 'foo', 'bar') - .and_return('foo') } + it { + expect(subject).to run. + with_params('foo', nil). + and_return('foo') + } + + it { + expect(subject).to run. + with_params(nil, 'foo'). + and_return('foo') + } + + it { + expect(subject).to run. + with_params(nil, 0, 'foo', 'bar'). + and_return('foo') + } end describe 'undef parameters' do - it { is_expected.to run - .with_params('', 'foo') - .and_return('foo') } - - it { is_expected.to run - .with_params('') - .and_raise_error(Puppet::Error, /could not/) } + it { + expect(subject).to run. + with_params('', 'foo'). + and_return('foo') + } + + it { + expect(subject).to run. + with_params(''). + and_raise_error(Puppet::Error, %r{could not}) + } end - it 'should not change the original values' do + it 'does not change the original values' do argument1 = 'foo' original1 = argument1.dup subject.execute(argument1) expect(argument1).to eq(original1) end end diff --git a/spec/functions/plugin_dir_spec.rb b/spec/functions/plugin_dir_spec.rb index 4d3082c..fa12dbc 100644 --- a/spec/functions/plugin_dir_spec.rb +++ b/spec/functions/plugin_dir_spec.rb @@ -1,36 +1,44 @@ +# frozen_string_literal: true + require 'spec_helper' describe 'plugin_dir' do describe 'exception handling' do describe 'with no arguments' do - it { is_expected.to run.with_params - .and_raise_error(Puppet::ParseError) } + it { + expect(subject).to run.with_params. + and_raise_error(Puppet::ParseError) + } end describe 'more than two arguments' do - it { is_expected.to run.with_params('a', 'b', 'c') - .and_raise_error(Puppet::ParseError) } + it { + expect(subject).to run.with_params('a', 'b', 'c'). + and_raise_error(Puppet::ParseError) + } end describe 'non-string arguments' do - it { is_expected.to run.with_params([]) - .and_raise_error(Puppet::ParseError) } + it { + expect(subject).to run.with_params([]). + and_raise_error(Puppet::ParseError) + } end end { 'mobz/elasticsearch-head' => 'head', 'lukas-vlcek/bigdesk/2.4.0' => 'bigdesk', 'elasticsearch/elasticsearch-cloud-aws/2.5.1' => 'cloud-aws', 'com.sksamuel.elasticsearch/elasticsearch-river-redis/1.1.0' => 'river-redis', 'com.github.lbroudoux.elasticsearch/amazon-s3-river/1.4.0' => 'amazon-s3-river', 'elasticsearch/elasticsearch-lang-groovy/2.0.0' => 'lang-groovy', 'royrusso/elasticsearch-hq' => 'hq', 'polyfractal/elasticsearch-inquisitor' => 'inquisitor', 'mycustomplugin' => 'mycustomplugin' }.each do |plugin, dir| describe "parsed dir for #{plugin}" do it { is_expected.to run.with_params(plugin).and_return(dir) } end end end diff --git a/spec/helpers/acceptance/tests/bad_manifest_shared_examples.rb b/spec/helpers/acceptance/tests/bad_manifest_shared_examples.rb index c6dc2de..38ba1a8 100644 --- a/spec/helpers/acceptance/tests/bad_manifest_shared_examples.rb +++ b/spec/helpers/acceptance/tests/bad_manifest_shared_examples.rb @@ -1,18 +1,20 @@ +# frozen_string_literal: true + shared_examples 'invalid manifest application' do context 'bad manifest' do let(:applied_manifest) do <<-MANIFEST class { 'elasticsearch' : #{manifest} #{defined?(manifest_class_parameters) && manifest_class_parameters} } #{defined?(extra_manifest) && extra_manifest} MANIFEST end it 'fails to apply' do - apply_manifest(applied_manifest, :expect_failures => true, :debug => v[:puppet_debug]) + apply_manifest(applied_manifest, expect_failures: true, debug: v[:puppet_debug]) end end end diff --git a/spec/helpers/acceptance/tests/basic_shared_examples.rb b/spec/helpers/acceptance/tests/basic_shared_examples.rb index e9a3e39..5d8ea76 100644 --- a/spec/helpers/acceptance/tests/basic_shared_examples.rb +++ b/spec/helpers/acceptance/tests/basic_shared_examples.rb @@ -1,67 +1,71 @@ +# frozen_string_literal: true + require 'json' require 'helpers/acceptance/tests/manifest_shared_examples' shared_examples 'basic acceptance tests' do |es_config| include_examples('manifest application') describe package("elasticsearch#{v[:oss] ? '-oss' : ''}") do - it { should be_installed - .with_version(v[:elasticsearch_full_version]) } + it { + expect(subject).to be_installed. + with_version(v[:elasticsearch_full_version]) + } end %w[ /etc/elasticsearch /usr/share/elasticsearch /var/lib/elasticsearch ].each do |dir| describe file(dir) do - it { should be_directory } + it { is_expected.to be_directory } end end describe 'resources' do describe service('elasticsearch') do it { send(es_config.empty? ? :should_not : :should, be_enabled) } it { send(es_config.empty? ? :should_not : :should, be_running) } end unless es_config.empty? describe file(pid_file) do - it { should be_file } - its(:content) { should match(/[0-9]+/) } + it { is_expected.to be_file } + its(:content) { is_expected.to match(%r{[0-9]+}) } end describe file('/etc/elasticsearch/elasticsearch.yml') do - it { should be_file } - it { should contain "name: #{es_config['node.name']}" } + it { is_expected.to be_file } + it { is_expected.to contain "name: #{es_config['node.name']}" } end end unless es_config.empty? es_port = es_config['http.port'] describe port(es_port) do it 'open', :with_retries do - should be_listening + expect(subject).to be_listening end end describe server :container do describe http("http://localhost:#{es_port}/_nodes/_local") do it 'serves requests', :with_retries do expect(response.status).to eq(200) end it 'uses the default data path', :with_retries do json = JSON.parse(response.body)['nodes'].values.first data_dir = ['/var/lib/elasticsearch'] expect( json['settings']['path'] ).to include( 'data' => data_dir ) end end end end end end diff --git a/spec/helpers/acceptance/tests/datadir_shared_examples.rb b/spec/helpers/acceptance/tests/datadir_shared_examples.rb index 0ec67db..efe3914 100644 --- a/spec/helpers/acceptance/tests/datadir_shared_examples.rb +++ b/spec/helpers/acceptance/tests/datadir_shared_examples.rb @@ -1,72 +1,77 @@ +# frozen_string_literal: true + require 'json' require 'helpers/acceptance/tests/manifest_shared_examples' shared_examples 'datadir directory validation' do |es_config, datapaths| include_examples('manifest application') describe file('/etc/elasticsearch/elasticsearch.yml') do - it { should be_file } + it { is_expected.to be_file } + datapaths.each do |datapath| - it { should contain datapath } + it { is_expected.to contain datapath } end end datapaths.each do |datapath| describe file(datapath) do - it { should be_directory } + it { is_expected.to be_directory } end end es_port = es_config['http.port'] describe port(es_port) do it 'open', :with_retries do - should be_listening + expect(subject).to be_listening end end describe server :container do describe http( "http://localhost:#{es_port}/_nodes/_local" ) do it 'uses a custom data path' do json = JSON.parse(response.body)['nodes'].values.first expect( json['settings']['path']['data'] - ).to((datapaths.one? and v[:elasticsearch_major_version] <= 2) ? eq(datapaths.first) : contain_exactly(*datapaths)) + ).to(datapaths.one? && v[:elasticsearch_major_version] <= 2 ? eq(datapaths.first) : contain_exactly(*datapaths)) end end end end shared_examples 'datadir acceptance tests' do |es_config| describe 'elasticsearch::datadir' do let(:manifest_class_parameters) { 'restart_on_change => true' } context 'single path', :with_cleanup do let(:manifest_class_parameters) do <<-MANIFEST datadir => '/var/lib/elasticsearch-data', restart_on_change => true, MANIFEST end + include_examples('datadir directory validation', es_config, ['/var/lib/elasticsearch-data']) end context 'multiple paths', :with_cleanup do let(:manifest_class_parameters) do <<-MANIFEST datadir => [ '/var/lib/elasticsearch-01', '/var/lib/elasticsearch-02' ], restart_on_change => true, MANIFEST end + include_examples('datadir directory validation', es_config, ['/var/lib/elasticsearch-01', '/var/lib/elasticsearch-02']) end end end diff --git a/spec/helpers/acceptance/tests/hiera_shared_examples.rb b/spec/helpers/acceptance/tests/hiera_shared_examples.rb index 609d1de..93d9434 100644 --- a/spec/helpers/acceptance/tests/hiera_shared_examples.rb +++ b/spec/helpers/acceptance/tests/hiera_shared_examples.rb @@ -1,87 +1,89 @@ +# frozen_string_literal: true + require 'tempfile' require 'helpers/acceptance/tests/basic_shared_examples' require 'helpers/acceptance/tests/plugin_shared_examples' agents = only_host_with_role(hosts, 'agent') shared_examples 'hiera tests with' do |es_config, additional_yaml = {}| hieradata = { 'elasticsearch::config' => es_config }.merge(additional_yaml).to_yaml - before :all do + before :all do # rubocop:disable RSpec/BeforeAfterAll write_hieradata_to(agents, hieradata) end include_examples('basic acceptance tests', es_config) end shared_examples 'hiera acceptance tests' do |es_config, plugins| describe 'hiera', :then_purge do let(:manifest) do - package = if not v[:is_snapshot] + package = if v[:is_snapshot] <<-MANIFEST - # Hard version set here due to plugin incompatibilities. - version => '#{v[:elasticsearch_full_version]}', + manage_repo => false, + package_url => '#{v[:snapshot_package]}', MANIFEST else <<-MANIFEST - manage_repo => false, - package_url => '#{v[:snapshot_package]}', + # Hard version set here due to plugin incompatibilities. + version => '#{v[:elasticsearch_full_version]}', MANIFEST end <<-MANIFEST api_timeout => 60, jvm_options => [ '-Xms128m', '-Xmx128m', ], oss => #{v[:oss]}, #{package} MANIFEST end let(:manifest_class_parameters) { 'restart_on_change => true' } + after :all do # rubocop:disable RSpec/BeforeAfterAll + write_hieradata_to(agents, {}) + + # Ensure that elasticsearch is cleaned up before any other tests + cleanup_manifest = <<-EOS + class { 'elasticsearch': ensure => 'absent', oss => #{v[:oss]} } + EOS + apply_manifest(cleanup_manifest, debug: v[:puppet_debug]) + end + describe 'with hieradata' do nodename = SecureRandom.hex(10) include_examples( 'hiera tests with', es_config.merge('node.name' => nodename) ) end plugins.each_pair do |plugin, _meta| describe "with plugin #{plugin}" do nodename = SecureRandom.hex(10) include_examples( 'hiera tests with', es_config.merge('node.name' => nodename), 'elasticsearch::plugins' => { plugin => { 'ensure' => 'present' } } ) include_examples( 'plugin API response', es_config.merge('node.name' => nodename), 'reports the plugin as installed', 'name' => plugin ) end end - - after :all do - write_hieradata_to(agents, {}) - - # Ensure that elasticsearch is cleaned up before any other tests - cleanup_manifest = <<-EOS - class { 'elasticsearch': ensure => 'absent', oss => #{v[:oss]} } - EOS - apply_manifest(cleanup_manifest, :debug => v[:puppet_debug]) - end end end diff --git a/spec/helpers/acceptance/tests/manifest_shared_examples.rb b/spec/helpers/acceptance/tests/manifest_shared_examples.rb index eba3863..0ee4fa9 100644 --- a/spec/helpers/acceptance/tests/manifest_shared_examples.rb +++ b/spec/helpers/acceptance/tests/manifest_shared_examples.rb @@ -1,38 +1,40 @@ +# frozen_string_literal: true + shared_examples 'manifest application' do |idempotency_check = true| context 'manifest' do let(:applied_manifest) do repo = if elastic_repo <<-MANIFEST class { 'elastic_stack::repo': oss => #{v[:oss]}, version => #{v[:elasticsearch_major_version]}, } MANIFEST else '' end <<-MANIFEST #{repo} class { 'elasticsearch' : #{manifest} #{defined?(manifest_class_parameters) && manifest_class_parameters} } #{defined?(extra_manifest) && extra_manifest} MANIFEST end it 'applies cleanly' do - apply_manifest(applied_manifest, :catch_failures => true, :debug => v[:puppet_debug]) + apply_manifest(applied_manifest, catch_failures: true, debug: v[:puppet_debug]) end # binding.pry if idempotency_check it 'is idempotent', :logs_on_failure do - apply_manifest(applied_manifest, :catch_changes => true, :debug => v[:puppet_debug]) + apply_manifest(applied_manifest, catch_changes: true, debug: v[:puppet_debug]) end end end end diff --git a/spec/helpers/acceptance/tests/package_url_shared_examples.rb b/spec/helpers/acceptance/tests/package_url_shared_examples.rb index a250943..823e179 100644 --- a/spec/helpers/acceptance/tests/package_url_shared_examples.rb +++ b/spec/helpers/acceptance/tests/package_url_shared_examples.rb @@ -1,71 +1,73 @@ +# frozen_string_literal: true + require 'json' require 'helpers/acceptance/tests/basic_shared_examples' shared_examples 'package_url acceptance tests' do |es_config| describe 'elasticsearch::package_url' do # Override default manifest to remove `package` let(:manifest) do <<-MANIFEST api_timeout => 60, config => { 'cluster.name' => '#{v[:cluster_name]}', 'http.bind_host' => '0.0.0.0', #{es_config.map { |k, v| " '#{k}' => '#{v}'," }.join("\n")} }, jvm_options => [ '-Xms128m', '-Xmx128m', ], oss => #{v[:oss]}, MANIFEST end # context 'via http', :with_cleanup do context 'via http' do let(:manifest_class_parameters) do <<-MANIFEST manage_repo => false, package_url => '#{v[:elasticsearch_package][:url]}' MANIFEST end include_examples('basic acceptance tests', es_config) end context 'via local filesystem', :with_cleanup do - before :all do + before :all do # rubocop:disable RSpec/BeforeAfterAll scp_to default, v[:elasticsearch_package][:path], "/tmp/#{v[:elasticsearch_package][:filename]}" end let(:manifest_class_parameters) do <<-MANIFEST manage_repo => false, package_url => 'file:/tmp/#{v[:elasticsearch_package][:filename]}' MANIFEST end include_examples('basic acceptance tests', es_config) end context 'via puppet paths', :with_cleanup do - before :all do + before :all do # rubocop:disable RSpec/BeforeAfterAll shell "mkdir -p #{default['distmoduledir']}/another/files" scp_to default, v[:elasticsearch_package][:path], "#{default['distmoduledir']}/another/files/#{v[:elasticsearch_package][:filename]}" end let(:manifest_class_parameters) do <<-MANIFEST manage_repo => false, package_url => 'puppet:///modules/another/#{v[:elasticsearch_package][:filename]}', MANIFEST end include_examples('basic acceptance tests', es_config) end end end diff --git a/spec/helpers/acceptance/tests/pipeline_shared_examples.rb b/spec/helpers/acceptance/tests/pipeline_shared_examples.rb index 181eaf0..3256d7e 100644 --- a/spec/helpers/acceptance/tests/pipeline_shared_examples.rb +++ b/spec/helpers/acceptance/tests/pipeline_shared_examples.rb @@ -1,56 +1,59 @@ +# frozen_string_literal: true + require 'json' require 'helpers/acceptance/tests/manifest_shared_examples' require 'helpers/acceptance/tests/bad_manifest_shared_examples' shared_examples 'pipeline operations' do |es_config, pipeline| describe 'pipeline resources' do let(:pipeline_name) { 'foo' } + context 'present' do let(:extra_manifest) do <<-MANIFEST elasticsearch::pipeline { '#{pipeline_name}': ensure => 'present', content => #{pipeline} } MANIFEST end include_examples('manifest application') include_examples('pipeline content', es_config, pipeline) end context 'absent' do let(:extra_manifest) do <<-MANIFEST elasticsearch::template { '#{pipeline_name}': ensure => absent, } MANIFEST end include_examples('manifest application') end end end # Verifies the content of a loaded index template. shared_examples 'pipeline content' do |es_config, pipeline| elasticsearch_port = es_config['http.port'] describe port(elasticsearch_port) do it 'open', :with_retries do - should be_listening + expect(subject).to be_listening end end describe server :container do describe http( "http://localhost:#{elasticsearch_port}/_ingest/pipeline" ) do it 'returns the configured pipelines', :with_retries do - expect(JSON.parse(response.body).values) - .to include(include(pipeline)) + expect(JSON.parse(response.body).values). + to include(include(pipeline)) end end end end diff --git a/spec/helpers/acceptance/tests/plugin_api_shared_examples.rb b/spec/helpers/acceptance/tests/plugin_api_shared_examples.rb index d61cc12..baf52fa 100644 --- a/spec/helpers/acceptance/tests/plugin_api_shared_examples.rb +++ b/spec/helpers/acceptance/tests/plugin_api_shared_examples.rb @@ -1,21 +1,23 @@ +# frozen_string_literal: true + require 'json' shared_examples 'plugin API response' do |es_config, desc, val| describe port(es_config['http.port']) do it 'open', :with_retries do - should be_listening + expect(subject).to be_listening end end describe server :container do describe http( "http://localhost:#{es_config['http.port']}/_cluster/stats" ) do it desc, :with_retries do expect( JSON.parse(response.body)['nodes']['plugins'] ).to include(include(val)) end end end end diff --git a/spec/helpers/acceptance/tests/plugin_shared_examples.rb b/spec/helpers/acceptance/tests/plugin_shared_examples.rb index 6628998..c81bf18 100644 --- a/spec/helpers/acceptance/tests/plugin_shared_examples.rb +++ b/spec/helpers/acceptance/tests/plugin_shared_examples.rb @@ -1,98 +1,100 @@ +# frozen_string_literal: true + require 'json' require 'helpers/acceptance/tests/bad_manifest_shared_examples' require 'helpers/acceptance/tests/manifest_shared_examples' require 'helpers/acceptance/tests/plugin_api_shared_examples' shared_examples 'plugin acceptance tests' do |es_config, plugins| describe 'elasticsearch::plugin' do + before :all do # rubocop:disable RSpec/BeforeAfterAll + shell "mkdir -p #{default['distmoduledir']}/another/files" + end + describe 'invalid plugins', :with_cleanup do let(:extra_manifest) do <<-MANIFEST elasticsearch::plugin { 'elastic/non-existing': } MANIFEST end include_examples('invalid manifest application') end - before :all do - shell "mkdir -p #{default['distmoduledir']}/another/files" - end - plugins.each_pair do |plugin, meta| describe plugin do # Ensure that instances are restarted to include plugins let(:manifest_class_parameters) { 'restart_on_change => true' } describe 'installation' do describe 'using simple names', :with_cleanup do let(:extra_manifest) do <<-MANIFEST elasticsearch::plugin { '#{plugin}': } MANIFEST end include_examples('manifest application', es_config) describe file("/usr/share/elasticsearch/plugins/#{plugin}/") do - it { should be_directory } + it { is_expected.to be_directory } end include_examples( 'plugin API response', es_config, 'reports the plugin as installed', 'name' => plugin ) end describe 'offline via puppet://', :with_cleanup do - before :all do + before :all do # rubocop:disable RSpec/BeforeAfterAll scp_to( default, meta[:path], "#{default['distmoduledir']}/another/files/#{plugin}.zip" ) end let(:extra_manifest) do <<-MANIFEST elasticsearch::plugin { '#{plugin}': source => 'puppet:///modules/another/#{plugin}.zip', } MANIFEST end include_examples('manifest application', es_config) include_examples( 'plugin API response', es_config, 'reports the plugin as installed', 'name' => plugin ) end describe 'via url', :with_cleanup do let(:extra_manifest) do <<-MANIFEST elasticsearch::plugin { '#{plugin}': url => '#{meta[:url]}', } MANIFEST end include_examples('manifest application', es_config) include_examples( 'plugin API response', es_config, 'reports the plugin as installed', 'name' => plugin ) end end end end end end diff --git a/spec/helpers/acceptance/tests/plugin_upgrade_shared_examples.rb b/spec/helpers/acceptance/tests/plugin_upgrade_shared_examples.rb index ff10779..e7ccfbe 100644 --- a/spec/helpers/acceptance/tests/plugin_upgrade_shared_examples.rb +++ b/spec/helpers/acceptance/tests/plugin_upgrade_shared_examples.rb @@ -1,69 +1,71 @@ +# frozen_string_literal: true + require 'json' require 'helpers/acceptance/tests/manifest_shared_examples' require 'helpers/acceptance/tests/plugin_api_shared_examples' shared_examples 'plugin upgrade acceptance tests' do |plugin| describe 'elasticsearch::plugin' do # Ensure that instances are restarted to include plugins let(:manifest_class_parameters) { 'restart_on_change => true' } instances = { 'es-01' => { 'config' => { 'http.port' => 9200, 'node.name' => 'elasticsearch001' } } } describe 'installation' do describe 'upgrades', :with_cleanup do context 'initial installation' do let(:extra_manifest) do <<-MANIFEST elasticsearch::plugin { '#{plugin[:repository]}-#{plugin[:name]}/v#{plugin[:initial]}': instances => 'es-01', } MANIFEST end include_examples( 'manifest application', instances ) include_examples( 'plugin API response', instances, 'contains the initial plugin version', 'name' => plugin[:name], 'version' => plugin[:initial] ) end describe 'upgrading' do let(:extra_manifest) do <<-MANIFEST elasticsearch::plugin { '#{plugin[:repository]}-#{plugin[:name]}/v#{plugin[:upgraded]}': instances => 'es-01', } MANIFEST end include_examples( 'manifest application', instances ) include_examples( 'plugin API response', instances, 'contains the upgraded plugin version', 'name' => plugin[:name], 'version' => plugin[:upgraded] ) end end end end end diff --git a/spec/helpers/acceptance/tests/removal_shared_examples.rb b/spec/helpers/acceptance/tests/removal_shared_examples.rb index 518d691..3c07c77 100644 --- a/spec/helpers/acceptance/tests/removal_shared_examples.rb +++ b/spec/helpers/acceptance/tests/removal_shared_examples.rb @@ -1,30 +1,32 @@ +# frozen_string_literal: true + shared_examples 'module removal' do |es_config| describe 'uninstalling' do let(:manifest) do <<-MANIFEST class { 'elasticsearch': ensure => 'absent', oss => #{v[:oss]} } MANIFEST end - it 'should run successfully' do - apply_manifest(manifest, :catch_failures => true, :debug => v[:puppet_debug]) + it 'runs successfully' do + apply_manifest(manifest, catch_failures: true, debug: v[:puppet_debug]) end describe package("elasticsearch#{v[:oss] ? '-oss' : ''}") do - it { should_not be_installed } + it { is_expected.not_to be_installed } end describe service('elasticsearch') do - it { should_not be_enabled } - it { should_not be_running } + it { is_expected.not_to be_enabled } + it { is_expected.not_to be_running } end unless es_config.empty? describe port(es_config['http.port']) do it 'closed' do - should_not be_listening + expect(subject).not_to be_listening end end end end end diff --git a/spec/helpers/acceptance/tests/security_shared_examples.rb b/spec/helpers/acceptance/tests/security_shared_examples.rb index 1c0f2ea..84c8217 100644 --- a/spec/helpers/acceptance/tests/security_shared_examples.rb +++ b/spec/helpers/acceptance/tests/security_shared_examples.rb @@ -1,180 +1,186 @@ +# frozen_string_literal: true + require 'json' require 'spec_utilities' require 'helpers/acceptance/tests/manifest_shared_examples' shared_examples 'security plugin manifest' do |credentials| let(:extra_manifest) do users = credentials.map do |username, meta| <<-USER #{meta[:changed] ? "notify { 'password change for #{username}' : } ~>" : ''} elasticsearch::user { '#{username}': - password => '#{meta[:hash] ? meta[:hash] : meta[:plaintext]}', - roles => #{meta[:roles].reduce({}) { |a, e| a.merge(e) }.keys}, + password => '#{meta[:hash] || meta[:plaintext]}', + roles => #{meta[:roles].reduce({}) { |acc, elem| acc.merge(elem) }.keys}, } USER end.join("\n") roles = credentials.values.reduce({}) do |sum, user_metadata| # Collect all roles across users sum.merge user_metadata - end[:roles].reduce({}) do |all_roles, role| + end[:roles] + roles = roles.reduce({}) do |all_roles, role| all_roles.merge role - end.reject do |_role, permissions| + end + roles = roles.reject do |_role, permissions| permissions.empty? - end.map do |role, rights| + end + roles = roles.map do |role, rights| <<-ROLE elasticsearch::role { '#{role}': privileges => #{rights} } ROLE - end.join("\n") + end + roles = roles.join("\n") <<-MANIFEST #{users} #{roles} MANIFEST end include_examples( 'manifest application', - not(credentials.values.map { |p| p[:changed] }.any?) + credentials.values.map { |p| p[:changed] }.none? ) end shared_examples 'secured request' do |test_desc, es_config, path, http_test, expected, user = nil, pass = nil| es_port = es_config['http.port'] describe port(es_port) do it 'open', :with_retries do - should be_listening + expect(subject).to be_listening end end describe server :container do describe http( "https://localhost:#{es_port}#{path}", { - :ssl => { :verify => false } - }.merge((user and pass) ? { :basic_auth => [user, pass] } : {}) + ssl: { verify: false } + }.merge(user && pass ? { basic_auth: [user, pass] } : {}) ) do it test_desc, :with_retries do expect(http_test.call(response)).to eq(expected) end end end end shared_examples 'security acceptance tests' do |es_config| - describe 'security plugin operations', :if => vault_available?, :then_purge => true, :with_license => true, :with_certificates => true do - rand_string = lambda { [*('a'..'z')].sample(8).join } + describe 'security plugin operations', if: vault_available?, then_purge: true, with_license: true, with_certificates: true do + rand_string = -> { [*('a'..'z')].sample(8).join } admin_user = rand_string.call admin_password = rand_string.call - admin = { admin_user => { :plaintext => admin_password, :roles => [{ 'superuser' => [] }] } } + admin = { admin_user => { plaintext: admin_password, roles: [{ 'superuser' => [] }] } } let(:manifest_class_parameters) do <<-MANIFEST api_basic_auth_password => '#{admin_password}', api_basic_auth_username => '#{admin_user}', - api_ca_file => '#{@tls[:ca][:cert][:path]}', + api_ca_file => '#{tls[:ca][:cert][:path]}', api_protocol => 'https', - ca_certificate => '#{@tls[:ca][:cert][:path]}', - certificate => '#{@tls[:clients].first[:cert][:path]}', - keystore_password => '#{@keystore_password}', + ca_certificate => '#{tls[:ca][:cert][:path]}', + certificate => '#{tls[:clients].first[:cert][:path]}', + keystore_password => '#{keystore_password}', license => file('#{v[:elasticsearch_license_path]}'), - private_key => '#{@tls[:clients].first[:key][:path]}', + private_key => '#{tls[:clients].first[:key][:path]}', restart_on_change => true, ssl => true, validate_tls => true, MANIFEST end describe 'over tls' do user_one = rand_string.call user_two = rand_string.call user_one_pw = rand_string.call user_two_pw = rand_string.call describe 'user authentication' do username_passwords = { - user_one => { :plaintext => user_one_pw, :roles => [{ 'superuser' => [] }] }, - user_two => { :plaintext => user_two_pw, :roles => [{ 'superuser' => [] }] } + user_one => { plaintext: user_one_pw, roles: [{ 'superuser' => [] }] }, + user_two => { plaintext: user_two_pw, roles: [{ 'superuser' => [] }] } }.merge(admin) username_passwords[user_two][:hash] = bcrypt(username_passwords[user_two][:plaintext]) include_examples('security plugin manifest', username_passwords) include_examples( 'secured request', 'denies unauthorized access', es_config, '/_cluster/health', - lambda { |r| r.status }, 401 + ->(r) { r.status }, 401 ) include_examples( 'secured request', "permits user #{user_one} access", es_config, '/_cluster/health', - lambda { |r| r.status }, 200, + ->(r) { r.status }, 200, user_one, user_one_pw ) include_examples( 'secured request', "permits user #{user_two} access", es_config, '/_cluster/health', - lambda { |r| r.status }, 200, + ->(r) { r.status }, 200, user_two, user_two_pw ) end describe 'changing passwords' do new_password = rand_string.call username_passwords = { user_one => { - :plaintext => new_password, - :changed => true, - :roles => [{ 'superuser' => [] }] + plaintext: new_password, + changed: true, + roles: [{ 'superuser' => [] }] } } include_examples('security plugin manifest', username_passwords) include_examples( 'secured request', 'denies unauthorized access', es_config, '/_cluster/health', - lambda { |r| r.status }, 401 + ->(r) { r.status }, 401 ) include_examples( 'secured request', "permits user #{user_two} access with new password", es_config, '/_cluster/health', - lambda { |r| r.status }, 200, + ->(r) { r.status }, 200, user_one, new_password ) end describe 'roles' do password = rand_string.call username = rand_string.call user = { username => { - :plaintext => password, - :roles => [{ + plaintext: password, + roles: [{ rand_string.call => { 'cluster' => [ 'cluster:monitor/health' ] } }] } } include_examples('security plugin manifest', user) include_examples( 'secured request', 'denies unauthorized access', es_config, '/_snapshot', - lambda { |r| r.status }, 403, + ->(r) { r.status }, 403, username, password ) include_examples( 'secured request', 'permits authorized access', es_config, '/_cluster/health', - lambda { |r| r.status }, 200, + ->(r) { r.status }, 200, username, password ) end end end end diff --git a/spec/helpers/acceptance/tests/snapshot_repository_shared_examples.rb b/spec/helpers/acceptance/tests/snapshot_repository_shared_examples.rb index abd329f..b9af542 100644 --- a/spec/helpers/acceptance/tests/snapshot_repository_shared_examples.rb +++ b/spec/helpers/acceptance/tests/snapshot_repository_shared_examples.rb @@ -1,81 +1,83 @@ +# frozen_string_literal: true + require 'json' require 'helpers/acceptance/tests/manifest_shared_examples' # Main entrypoint for snapshot tests shared_examples 'snapshot repository acceptance tests' do describe 'elasticsearch::snapshot_repository', :with_cleanup do es_config = { 'http.port' => 9200, 'node.name' => 'elasticsearchSnapshot01', 'path.repo' => '/var/lib/elasticsearch' } # Override the manifest in order to populate 'path.repo' let(:manifest) do - package = if not v[:is_snapshot] + package = if v[:is_snapshot] <<-MANIFEST - # Hard version set here due to plugin incompatibilities. - version => '#{v[:elasticsearch_full_version]}', + manage_repo => false, + package_url => '#{v[:snapshot_package]}', MANIFEST else <<-MANIFEST - manage_repo => false, - package_url => '#{v[:snapshot_package]}', + # Hard version set here due to plugin incompatibilities. + version => '#{v[:elasticsearch_full_version]}', MANIFEST end <<-MANIFEST api_timeout => 60, config => { 'cluster.name' => '#{v[:cluster_name]}', 'http.bind_host' => '0.0.0.0', #{es_config.map { |k, v| " '#{k}' => '#{v}'," }.join("\n")} }, jvm_options => [ '-Xms128m', '-Xmx128m', ], oss => #{v[:oss]}, #{package} MANIFEST end let(:manifest_class_parameters) { 'restart_on_change => true' } let(:extra_manifest) do <<-MANIFEST elasticsearch::snapshot_repository { 'backup': ensure => 'present', api_timeout => 60, location => '/var/lib/elasticsearch/backup', max_restore_rate => '20mb', max_snapshot_rate => '80mb', } MANIFEST end include_examples('manifest application', es_config) es_port = es_config['http.port'] describe port(es_port) do it 'open', :with_retries do - should be_listening + expect(subject).to be_listening end end describe server :container do describe http( "http://localhost:#{es_port}/_snapshot/backup" ) do it 'returns the snapshot repository', :with_retries do - expect(JSON.parse(response.body)['backup']) - .to include('settings' => a_hash_including( - 'location' => '/var/lib/elasticsearch/backup', - 'max_restore_rate' => '20mb', + expect(JSON.parse(response.body)['backup']). + to include('settings' => a_hash_including( + 'location' => '/var/lib/elasticsearch/backup', + 'max_restore_rate' => '20mb', 'max_snapshot_rate' => '80mb' )) end end end end end diff --git a/spec/helpers/acceptance/tests/template_shared_examples.rb b/spec/helpers/acceptance/tests/template_shared_examples.rb index 11044ad..e0ae9c6 100644 --- a/spec/helpers/acceptance/tests/template_shared_examples.rb +++ b/spec/helpers/acceptance/tests/template_shared_examples.rb @@ -1,111 +1,113 @@ +# frozen_string_literal: true + require 'json' require 'helpers/acceptance/tests/manifest_shared_examples' require 'helpers/acceptance/tests/bad_manifest_shared_examples' # Describes how to apply a manifest with a template, verify it, and clean it up shared_examples 'template application' do |es_config, name, template, param| context 'present' do let(:extra_manifest) do <<-MANIFEST elasticsearch::template { '#{name}': ensure => 'present', #{param} } MANIFEST end include_examples('manifest application') include_examples('template content', es_config, template) end context 'absent' do let(:extra_manifest) do <<-MANIFEST elasticsearch::template { '#{name}': ensure => absent, } MANIFEST end include_examples('manifest application') end end # Verifies the content of a loaded index template. shared_examples 'template content' do |es_config, template| elasticsearch_port = es_config['http.port'] describe port(elasticsearch_port) do it 'open', :with_retries do - should be_listening + expect(subject).to be_listening end end describe server :container do describe http( "http://localhost:#{elasticsearch_port}/_template", - :params => { 'flat_settings' => 'false' } + params: { 'flat_settings' => 'false' } ) do it 'returns the installed template', :with_retries do - expect(JSON.parse(response.body).values) - .to include(include(template)) + expect(JSON.parse(response.body).values). + to include(include(template)) end end end end # Main entrypoint for template tests shared_examples 'template operations' do |es_config, template| describe 'template resources' do - before :all do + before :all do # rubocop:disable RSpec/BeforeAfterAll shell "mkdir -p #{default['distmoduledir']}/another/files" create_remote_file( default, "#{default['distmoduledir']}/another/files/good.json", JSON.dump(template) ) create_remote_file( default, "#{default['distmoduledir']}/another/files/bad.json", JSON.dump(template)[0..-5] ) end context 'configured through' do context '`source`' do include_examples( 'template application', es_config, SecureRandom.hex(8), template, "source => 'puppet:///modules/another/good.json'" ) end context '`content`' do include_examples( 'template application', es_config, SecureRandom.hex(8), template, "content => '#{JSON.dump(template)}'" ) end context 'bad json' do let(:extra_manifest) do <<-MANIFEST elasticsearch::template { '#{SecureRandom.hex(8)}': ensure => 'present', file => 'puppet:///modules/another/bad.json' } MANIFEST end include_examples('invalid manifest application') end end end end diff --git a/spec/helpers/acceptance/tests/usergroup_shared_examples.rb b/spec/helpers/acceptance/tests/usergroup_shared_examples.rb index 1fbcbc5..11ac91b 100644 --- a/spec/helpers/acceptance/tests/usergroup_shared_examples.rb +++ b/spec/helpers/acceptance/tests/usergroup_shared_examples.rb @@ -1,49 +1,51 @@ +# frozen_string_literal: true + require 'json' require 'helpers/acceptance/tests/basic_shared_examples' shared_examples 'user/group acceptance tests' do - describe 'user/group parameters', :first_purge => true, :then_purge => true do + describe 'user/group parameters', first_purge: true, then_purge: true do describe 'with non-default values', :with_cleanup do let(:extra_manifest) do <<-MANIFEST group { 'esuser': ensure => 'present', } -> group { 'esgroup': ensure => 'present' } -> user { 'esuser': ensure => 'present', groups => ['esgroup', 'esuser'], before => Class['elasticsearch'], } MANIFEST end let(:manifest_class_parameters) do <<-MANIFEST elasticsearch_user => 'esuser', elasticsearch_group => 'esgroup', MANIFEST end include_examples( 'basic acceptance tests', 'es-01' => { 'config' => { 'http.port' => 9200, 'node.name' => 'elasticsearch001' } } ) %w[ /etc/elasticsearch/es-01/elasticsearch.yml /usr/share/elasticsearch /var/log/elasticsearch ].each do |path| describe file(path) do - it { should be_owned_by 'esuser' } + it { is_expected.to be_owned_by 'esuser' } end end end end end diff --git a/spec/helpers/class_shared_examples.rb b/spec/helpers/class_shared_examples.rb index c7d631a..3391da4 100644 --- a/spec/helpers/class_shared_examples.rb +++ b/spec/helpers/class_shared_examples.rb @@ -1,8 +1,10 @@ +# frozen_string_literal: true + shared_examples 'class' do - it { should compile.with_all_deps } - it { should contain_augeas('/etc/sysconfig/elasticsearch') } - it { should contain_file('/etc/elasticsearch/elasticsearch.yml') } - it { should contain_datacat('/etc/elasticsearch/elasticsearch.yml') } - it { should contain_datacat_fragment('main_config') } - it { should contain_service('elasticsearch') } + it { is_expected.to compile.with_all_deps } + it { is_expected.to contain_augeas('/etc/sysconfig/elasticsearch') } + it { is_expected.to contain_file('/etc/elasticsearch/elasticsearch.yml') } + it { is_expected.to contain_datacat('/etc/elasticsearch/elasticsearch.yml') } + it { is_expected.to contain_datacat_fragment('main_config') } + it { is_expected.to contain_service('elasticsearch') } end diff --git a/spec/helpers/unit/provider/elasticsearch_rest_shared_examples.rb b/spec/helpers/unit/provider/elasticsearch_rest_shared_examples.rb index fec5dfd..64f4a05 100644 --- a/spec/helpers/unit/provider/elasticsearch_rest_shared_examples.rb +++ b/spec/helpers/unit/provider/elasticsearch_rest_shared_examples.rb @@ -1,106 +1,108 @@ +# frozen_string_literal: true + require 'json' require 'spec_helper_rspec' require 'webmock/rspec' shared_examples 'REST API' do |resource_type, create_uri, singleton = false| unless singleton describe 'instances' do context "with no #{resource_type}s" do it 'returns an empty list' do - stub_request(:get, "http://localhost:9200/_#{resource_type}") - .with(:headers => { 'Accept' => 'application/json' }) - .to_return( - :status => 200, - :body => '{}' + stub_request(:get, "http://localhost:9200/_#{resource_type}"). + with(headers: { 'Accept' => 'application/json' }). + to_return( + status: 200, + body: '{}' ) expect(described_class.instances).to eq([]) end end end end describe "#{resource_type}s" do if singleton let(:json) { json_1 } let(:instance) { [example_1] } else let(:json) { json_1.merge(json_2) } let(:instance) { [example_1, example_2] } end it "returns #{resource_type}s" do - stub_request(:get, "http://localhost:9200/_#{resource_type}") - .with(:headers => { 'Accept' => 'application/json' }) - .to_return( - :status => 200, - :body => JSON.dump(json) + stub_request(:get, "http://localhost:9200/_#{resource_type}"). + with(headers: { 'Accept' => 'application/json' }). + to_return( + status: 200, + body: JSON.dump(json) ) expect(described_class.instances.map do |provider| provider.instance_variable_get(:@property_hash) end).to contain_exactly(*instance) end end describe 'basic authentication' do it 'authenticates' do - stub_request(:get, "http://localhost:9200/_#{resource_type}") - .with( - :basic_auth => %w[elastic password], - :headers => { 'Accept' => 'application/json' } - ) - .to_return( - :status => 200, - :body => JSON.dump(json_1) + stub_request(:get, "http://localhost:9200/_#{resource_type}"). + with( + basic_auth: %w[elastic password], + headers: { 'Accept' => 'application/json' } + ). + to_return( + status: 200, + body: JSON.dump(json_1) ) expect(described_class.api_objects( 'http', true, 'localhost', '9200', 10, 'elastic', 'password' ).map do |provider| described_class.new( provider ).instance_variable_get(:@property_hash) end).to contain_exactly(example_1) end end describe 'https' do it 'uses ssl' do - stub_request(:get, "https://localhost:9200/_#{resource_type}") - .with(:headers => { 'Accept' => 'application/json' }) - .to_return( - :status => 200, - :body => JSON.dump(json_1) + stub_request(:get, "https://localhost:9200/_#{resource_type}"). + with(headers: { 'Accept' => 'application/json' }). + to_return( + status: 200, + body: JSON.dump(json_1) ) expect(described_class.api_objects( 'https', true, 'localhost', '9200', 10 ).map do |provider| described_class.new( provider ).instance_variable_get(:@property_hash) end).to contain_exactly(example_1) end end unless singleton describe 'flush' do it "creates #{resource_type}s" do - stub_request(:put, "http://localhost:9200/#{create_uri}") - .with( - :headers => { + stub_request(:put, "http://localhost:9200/#{create_uri}"). + with( + headers: { 'Accept' => 'application/json', 'Content-Type' => 'application/json' }, - :body => bare_resource + body: bare_resource ) - stub_request(:get, "http://localhost:9200/_#{resource_type}") - .with(:headers => { 'Accept' => 'application/json' }) - .to_return(:status => 200, :body => '{}') + stub_request(:get, "http://localhost:9200/_#{resource_type}"). + with(headers: { 'Accept' => 'application/json' }). + to_return(status: 200, body: '{}') provider.flush end end end -end # of describe puppet type +end diff --git a/spec/helpers/unit/type/elasticsearch_rest_shared_examples.rb b/spec/helpers/unit/type/elasticsearch_rest_shared_examples.rb index 78d958b..d3e8a4a 100644 --- a/spec/helpers/unit/type/elasticsearch_rest_shared_examples.rb +++ b/spec/helpers/unit/type/elasticsearch_rest_shared_examples.rb @@ -1,213 +1,215 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' shared_examples 'REST API types' do |resource_type, meta_property| let(:default_params) do { meta_property => {} } end describe "attribute validation for #{resource_type}s" do - [ - :name, - :host, - :port, - :protocol, - :validate_tls, - :ca_file, - :ca_path, - :timeout, - :username, - :password + %i[ + name + host + port + protocol + validate_tls + ca_file + ca_path + timeout + username + password ].each do |param| - it "should have a #{param} parameter" do + it "has a #{param} parameter" do expect(described_class.attrtype(param)).to eq(:param) end end [ :ensure, meta_property ].each do |prop| - it "should have a #{prop} property" do + it "has a #{prop} property" do expect(described_class.attrtype(prop)).to eq(:property) end end describe 'namevar validation' do - it 'should have :name as its namevar' do + it 'has :name as its namevar' do expect(described_class.key_attributes).to eq([:name]) end end describe meta_property.to_s do - it 'should reject non-hash values' do + it 'rejects non-hash values' do expect do described_class.new( :name => resource_name, meta_property => '{"foo":}' ) - end.to raise_error(Puppet::Error, /hash expected/i) + end.to raise_error(Puppet::Error, %r{hash expected}i) expect do described_class.new( :name => resource_name, meta_property => 0 ) - end.to raise_error(Puppet::Error, /hash expected/i) + end.to raise_error(Puppet::Error, %r{hash expected}i) expect do described_class.new( default_params.merge( - :name => resource_name + name: resource_name ) ) end.not_to raise_error end - it 'should parse PSON-like values for certain types' do + it 'parses PSON-like values for certain types' do expect(described_class.new( :name => resource_name, meta_property => { 'key' => { 'value' => '0', 'other' => true } } )[meta_property]).to include( 'key' => { 'value' => 0, 'other' => true } ) end end describe 'ensure' do - it 'should support present as a value for ensure' do + it 'supports present as a value for ensure' do expect do described_class.new( default_params.merge( - :name => resource_name, - :ensure => :present + name: resource_name, + ensure: :present ) ) - end.to_not raise_error + end.not_to raise_error end - it 'should support absent as a value for ensure' do + it 'supports absent as a value for ensure' do expect do described_class.new( default_params.merge( - :name => resource_name, - :ensure => :absent + name: resource_name, + ensure: :absent ) ) - end.to_not raise_error + end.not_to raise_error end - it 'should not support other values' do + it 'does not support other values' do expect do described_class.new( default_params.merge( - :name => resource_name, - :ensure => :foo + name: resource_name, + ensure: :foo ) ) - end.to raise_error(Puppet::Error, /Invalid value/) + end.to raise_error(Puppet::Error, %r{Invalid value}) end end describe 'host' do - it 'should accept IP addresses' do + it 'accepts IP addresses' do expect do described_class.new( default_params.merge( - :name => resource_name, - :host => '127.0.0.1' + name: resource_name, + host: '127.0.0.1' ) ) end.not_to raise_error end end describe 'port' do [-1, 0, 70_000, 'foo'].each do |value| - it "should reject invalid port value #{value}" do + it "rejects invalid port value #{value}" do expect do described_class.new( default_params.merge( - :name => resource_name, - :port => value + name: resource_name, + port: value ) ) - end.to raise_error(Puppet::Error, /invalid port/i) + end.to raise_error(Puppet::Error, %r{invalid port}i) end end end describe 'validate_tls' do [-1, 0, {}, [], 'foo'].each do |value| - it "should reject invalid ssl_verify value #{value}" do + it "rejects invalid ssl_verify value #{value}" do expect do described_class.new( default_params.merge( - :name => resource_name, - :validate_tls => value + name: resource_name, + validate_tls: value ) ) - end.to raise_error(Puppet::Error, /invalid value/i) + end.to raise_error(Puppet::Error, %r{invalid value}i) end end [true, false, 'true', 'false', 'yes', 'no'].each do |value| - it "should accept validate_tls value #{value}" do + it "accepts validate_tls value #{value}" do expect do described_class.new( default_params.merge( - :name => resource_name, - :validate_tls => value + name: resource_name, + validate_tls: value ) ) end.not_to raise_error end end end describe 'timeout' do - it 'should reject string values' do + it 'rejects string values' do expect do described_class.new( default_params.merge( - :name => resource_name, - :timeout => 'foo' + name: resource_name, + timeout: 'foo' ) ) - end.to raise_error(Puppet::Error, /must be a/) + end.to raise_error(Puppet::Error, %r{must be a}) end - it 'should reject negative integers' do + it 'rejects negative integers' do expect do described_class.new( default_params.merge( - :name => resource_name, - :timeout => -10 + name: resource_name, + timeout: -10 ) ) - end.to raise_error(Puppet::Error, /must be a/) + end.to raise_error(Puppet::Error, %r{must be a}) end - it 'should accept integers' do + it 'accepts integers' do expect do described_class.new( default_params.merge( - :name => resource_name, - :timeout => 10 + name: resource_name, + timeout: 10 ) ) - end.to_not raise_error + end.not_to raise_error end - it 'should accept quoted integers' do + it 'accepts quoted integers' do expect do described_class.new( default_params.merge( - :name => resource_name, - :timeout => '10' + name: resource_name, + timeout: '10' ) ) - end.to_not raise_error + end.not_to raise_error end end - end # of describing when validing values -end # of REST API type shared examples + end +end diff --git a/spec/spec_helper_rspec.rb b/spec/spec_helper_rspec.rb index 4944b5e..b0078e2 100644 --- a/spec/spec_helper_rspec.rb +++ b/spec/spec_helper_rspec.rb @@ -1 +1,3 @@ +# frozen_string_literal: true + require 'puppet' diff --git a/spec/spec_helper_tls.rb b/spec/spec_helper_tls.rb index c3a1250..c754e9e 100644 --- a/spec/spec_helper_tls.rb +++ b/spec/spec_helper_tls.rb @@ -1,102 +1,104 @@ +# frozen_string_literal: true + require 'openssl' def gen_certs(num_certs, path) - ret = { :clients => [] } + ret = { clients: [] } serial = 1_000_000 ca_key = OpenSSL::PKey::RSA.new 2048 # CA Cert ca_name = OpenSSL::X509::Name.parse 'CN=ca/DC=example/DC=com' ca_cert = OpenSSL::X509::Certificate.new ca_cert.serial = serial serial += 1 ca_cert.version = 2 ca_cert.not_before = Time.now ca_cert.not_after = Time.now + 86_400 ca_cert.public_key = ca_key.public_key ca_cert.subject = ca_name ca_cert.issuer = ca_name extension_factory = OpenSSL::X509::ExtensionFactory.new extension_factory.subject_certificate = ca_cert extension_factory.issuer_certificate = ca_cert # ca_cert.add_extension extension_factory.create_extension( # 'subjectAltName', ['localhost', '127.0.0.1'].map { |d| "DNS: #{d}" }.join(',') # ) ca_cert.add_extension extension_factory.create_extension( 'subjectKeyIdentifier', 'hash' ) ca_cert.add_extension extension_factory.create_extension( 'basicConstraints', 'CA:TRUE', true ) - ca_cert.sign ca_key, OpenSSL::Digest::SHA256.new + ca_cert.sign ca_key, OpenSSL::Digest.new('SHA256') ret[:ca] = { - :cert => { - :pem => ca_cert.to_pem, - :path => path + '/ca_cert.pem' + cert: { + pem: ca_cert.to_pem, + path: "#{path}/ca_cert.pem" } } num_certs.times do |i| key, cert, serial = gen_cert_pair serial, ca_cert - cert.sign ca_key, OpenSSL::Digest::SHA256.new + cert.sign ca_key, OpenSSL::Digest.new('SHA256') ret[:clients] << { - :key => { - :pem => key.to_pem, - :path => path + '/' + i.to_s + '_key.pem' + key: { + pem: key.to_pem, + path: "#{path}/#{i}_key.pem" }, - :cert => { - :pem => cert.to_pem, - :path => path + '/' + i.to_s + '_cert.pem' + cert: { + pem: cert.to_pem, + path: "#{path}/#{i}_cert.pem" } } end ret end def gen_cert_pair(serial, ca_cert) serial += 1 # Node Key key = OpenSSL::PKey::RSA.new 2048 node_name = OpenSSL::X509::Name.parse 'CN=localhost/DC=example/DC=com' # prepare SANS list sans = ['localhost.localdomain', 'localhost', 'localhost.example.com'] sans_list = sans.map { |domain| "DNS:#{domain}" } # Node Cert cert = OpenSSL::X509::Certificate.new cert.serial = serial cert.version = 2 cert.not_before = Time.now cert.not_after = Time.now + 6000 cert.subject = node_name cert.public_key = key.public_key cert.issuer = ca_cert.subject csr_extension_factory = OpenSSL::X509::ExtensionFactory.new csr_extension_factory.subject_certificate = cert csr_extension_factory.issuer_certificate = ca_cert cert.add_extension csr_extension_factory.create_extension( 'subjectAltName', sans_list.join(',') ) cert.add_extension csr_extension_factory.create_extension( 'basicConstraints', 'CA:FALSE' ) cert.add_extension csr_extension_factory.create_extension( 'keyUsage', 'keyEncipherment,dataEncipherment,digitalSignature' ) cert.add_extension csr_extension_factory.create_extension( 'extendedKeyUsage', 'serverAuth,clientAuth' ) cert.add_extension csr_extension_factory.create_extension( 'subjectKeyIdentifier', 'hash' ) [key, cert, serial] end diff --git a/spec/spec_utilities.rb b/spec/spec_utilities.rb index 489bd6a..2782028 100644 --- a/spec/spec_utilities.rb +++ b/spec/spec_utilities.rb @@ -1,134 +1,136 @@ +# frozen_string_literal: true + require 'bcrypt' require 'open-uri' def to_agent_version(puppet_version) # REF: https://docs.puppet.com/puppet/latest/reference/about_agent.html { # Puppet => Agent '4.10.4' => '1.10.4', '4.10.3' => '1.10.3', '4.10.2' => '1.10.2', '4.10.1' => '1.10.1', '4.10.0' => '1.10.0', '4.9.4' => '1.9.3', '4.8.2' => '1.8.3', '4.7.1' => '1.7.2', '4.7.0' => '1.7.1', '4.6.2' => '1.6.2', '4.5.3' => '1.5.3', '4.4.2' => '1.4.2', '4.4.1' => '1.4.1', '4.4.0' => '1.4.0', '4.3.2' => '1.3.6', '4.3.1' => '1.3.2', '4.3.0' => '1.3.0', '4.2.3' => '1.2.7', '4.2.2' => '1.2.6', '4.2.1' => '1.2.2', '4.2.0' => '1.2.1', '4.1.0' => '1.1.1', '4.0.0' => '1.0.1' }[puppet_version] end def derive_artifact_urls_for(full_version, plugins = ['analysis-icu']) derive_full_package_url(full_version).merge( derive_plugin_urls_for(full_version, plugins) ) end def derive_full_package_url(full_version, extensions = %w[deb rpm]) extensions.map do |ext| url = if full_version.start_with? '6' "https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-#{full_version}.#{ext}" elsif ext == 'deb' "https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-#{full_version}-amd64.#{ext}" else "https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-#{full_version}-x86_64.#{ext}" end [url, File.basename(url)] end.to_h end def derive_plugin_urls_for(full_version, plugins = ['analysis-icu']) plugins.map do |plugin| url = "https://artifacts.elastic.co/downloads/elasticsearch-plugins/#{plugin}/#{plugin}-#{full_version}.zip" [url, File.join('plugins', File.basename(url))] end.to_h end def artifact(file, fixture_path = []) File.join(%w[spec fixtures artifacts] + fixture_path + [File.basename(file)]) end def get(url, file_path) puts "Fetching #{url}..." found = false until found uri = URI.parse(url) conn = Net::HTTP.new(uri.host, uri.port) conn.use_ssl = true res = conn.get(uri.path) if res.header['location'] url = res.header['location'] else found = true end end File.open(file_path, 'w+') { |fh| fh.write res.body } end def fetch_archives(archives) archives.each do |url, orig_fp| fp = "spec/fixtures/artifacts/#{orig_fp}" if File.exist? fp - if fp.end_with? 'tar.gz' and !system("tar -tzf #{fp} &>/dev/null") + if fp.end_with?('tar.gz') && !system("tar -tzf #{fp} &>/dev/null") puts "Archive #{fp} corrupt, re-fetching..." File.delete fp else puts "Already retrieved intact archive #{fp}..." next end end get url, fp end end def pid_file if fact('operatingsystem') == 'Debian' \ - and fact('lsbmajdistrelease').to_i <= 7 + && fact('lsbmajdistrelease').to_i <= 7 '/var/run/elasticsearch.pid' else '/var/run/elasticsearch/elasticsearch.pid' end end def vault_available? if ENV['CI'] %w[VAULT_ADDR VAULT_APPROLE_ROLE_ID VAULT_APPROLE_SECRET_ID VAULT_PATH].select do |var| ENV[var].nil? end.empty? else true end end def http_retry(url) retries ||= 0 open(url).read -rescue +rescue StandardError retry if (retries += 1) < 3 end # Helper to store arbitrary testing setting values def v RSpec.configuration.v end def semver(version) Gem::Version.new version end def bcrypt(value) BCrypt::Password.create(value) end diff --git a/spec/templates/001_elasticsearch.yml.erb_spec.rb b/spec/templates/001_elasticsearch.yml.erb_spec.rb index 752c672..7e2f430 100644 --- a/spec/templates/001_elasticsearch.yml.erb_spec.rb +++ b/spec/templates/001_elasticsearch.yml.erb_spec.rb @@ -1,83 +1,85 @@ +# frozen_string_literal: true + require 'spec_helper' require 'yaml' class String def config "### MANAGED BY PUPPET ###\n---#{unindent}" end def unindent - gsub(/^#{scan(/^\s*/).min_by(&:length)}/, '') + gsub(%r{^#{scan(%r{^\s*}).min_by(&:length)}}, '') end end describe 'elasticsearch.yml.erb' do let :harness do TemplateHarness.new( 'templates/etc/elasticsearch/elasticsearch.yml.erb' ) end - it 'should render normal hashes' do + it 'renders normal hashes' do harness.set( '@data', 'node.name' => 'test', 'path.data' => '/mnt/test', 'discovery.zen.ping.unicast.hosts' => %w[ host1 host2 ] ) - expect(YAML.load(harness.run)).to eq(YAML.load(%( + expect(YAML.safe_load(harness.run)).to eq(YAML.safe_load(%( discovery.zen.ping.unicast.hosts: - host1 - host2 node.name: test path.data: /mnt/test ).config)) end - it 'should render arrays of hashes correctly' do + it 'renders arrays of hashes correctly' do harness.set( '@data', 'data' => [ { 'key' => 'value0', 'other_key' => 'othervalue0' }, { 'key' => 'value1', 'other_key' => 'othervalue1' } ] ) - expect(YAML.load(harness.run)).to eq(YAML.load(%( + expect(YAML.safe_load(harness.run)).to eq(YAML.safe_load(%( data: - key: value0 other_key: othervalue0 - key: value1 other_key: othervalue1 ).config)) end - it 'should quote IPv6 loopback addresses' do + it 'quotes IPv6 loopback addresses' do harness.set( '@data', 'network.host' => ['::', '[::]'] ) - expect(YAML.load(harness.run)).to eq(YAML.load(%( + expect(YAML.safe_load(harness.run)).to eq(YAML.safe_load(%( network.host: - "::" - "[::]" ).config)) end - it 'should not quote numeric values' do + it 'does not quote numeric values' do harness.set( '@data', 'some.setting' => '10' ) - expect(YAML.load(harness.run)).to eq(YAML.load(%( + expect(YAML.safe_load(harness.run)).to eq(YAML.safe_load(%( some.setting: 10 ).config)) end end diff --git a/spec/unit/facter/es_facts_spec.rb b/spec/unit/facter/es_facts_spec.rb index b5d208c..ce6f1a3 100644 --- a/spec/unit/facter/es_facts_spec.rb +++ b/spec/unit/facter/es_facts_spec.rb @@ -1,113 +1,115 @@ +# frozen_string_literal: true + require 'spec_helper' require 'spec_utilities' require 'webmock/rspec' def fixture_path File.expand_path(File.join(__dir__, '..', '..', 'fixtures')) end describe 'elasticsearch facts' do - before(:each) do - stub_request(:get, 'http://localhost:9200/') - .with(:headers => { 'Accept' => '*/*', 'User-Agent' => 'Ruby' }) - .to_return( - :status => 200, - :body => File.read( + before do + stub_request(:get, 'http://localhost:9200/'). + with(headers: { 'Accept' => '*/*', 'User-Agent' => 'Ruby' }). + to_return( + status: 200, + body: File.read( File.join( fixture_path, 'facts/Warlock-root.json' ) ) ) - stub_request(:get, 'http://localhost:9200/_nodes/Warlock') - .with(:headers => { 'Accept' => '*/*', 'User-Agent' => 'Ruby' }) - .to_return( - :status => 200, - :body => File.read( + stub_request(:get, 'http://localhost:9200/_nodes/Warlock'). + with(headers: { 'Accept' => '*/*', 'User-Agent' => 'Ruby' }). + to_return( + status: 200, + body: File.read( File.join( fixture_path, 'facts/Warlock-nodes.json' ) ) ) - allow(File) - .to receive(:directory?) - .and_return(true) + allow(File). + to receive(:directory?). + and_return(true) - allow(File) - .to receive(:readable?) - .and_return(true) + allow(File). + to receive(:readable?). + and_return(true) - allow(YAML) - .to receive(:load_file) - .with('/etc/elasticsearch/elasticsearch.yml', any_args) - .and_return({}) + allow(YAML). + to receive(:load_file). + with('/etc/elasticsearch/elasticsearch.yml', any_args). + and_return({}) require_relative '../../../lib/facter/es_facts' end describe 'elasticsearch_port' do it 'finds listening port' do - expect(Facter.fact(:elasticsearch_port).value) - .to eq('9200') + expect(Facter.fact(:elasticsearch_port).value). + to eq('9200') end end describe 'instance' do it 'returns the node name' do expect(Facter.fact(:elasticsearch_name).value).to eq('Warlock') end it 'returns the node version' do expect(Facter.fact(:elasticsearch_version).value).to eq('1.4.2') end it 'returns the cluster name' do - expect(Facter.fact(:elasticsearch_cluster_name).value) - .to eq('elasticsearch') + expect(Facter.fact(:elasticsearch_cluster_name).value). + to eq('elasticsearch') end it 'returns the node ID' do - expect(Facter.fact(:elasticsearch_node_id).value) - .to eq('yQAWBO3FS8CupZnSvAVziQ') + expect(Facter.fact(:elasticsearch_node_id).value). + to eq('yQAWBO3FS8CupZnSvAVziQ') end it 'returns the mlockall boolean' do expect(Facter.fact(:elasticsearch_mlockall).value).to be_falsy end it 'returns installed plugins' do expect(Facter.fact(:elasticsearch_plugins).value).to eq('kopf') end describe 'kopf plugin' do it 'returns the correct version' do - expect(Facter.fact(:elasticsearch_plugin_kopf_version).value) - .to eq('1.4.3') + expect(Facter.fact(:elasticsearch_plugin_kopf_version).value). + to eq('1.4.3') end it 'returns the correct description' do - expect(Facter.fact(:elasticsearch_plugin_kopf_description).value) - .to eq('kopf - simple web administration tool for ElasticSearch') + expect(Facter.fact(:elasticsearch_plugin_kopf_description).value). + to eq('kopf - simple web administration tool for ElasticSearch') end it 'returns the plugin URL' do - expect(Facter.fact(:elasticsearch_plugin_kopf_url).value) - .to eq('/_plugin/kopf/') + expect(Facter.fact(:elasticsearch_plugin_kopf_url).value). + to eq('/_plugin/kopf/') end it 'returns the plugin JVM boolean' do - expect(Facter.fact(:elasticsearch_plugin_kopf_jvm).value) - .to be_falsy + expect(Facter.fact(:elasticsearch_plugin_kopf_jvm).value). + to be_falsy end it 'returns the plugin _site boolean' do - expect(Facter.fact(:elasticsearch_plugin_kopf_site).value) - .to be_truthy + expect(Facter.fact(:elasticsearch_plugin_kopf_site).value). + to be_truthy end - end # of describe plugin - end # of describe instance -end # of describe elasticsearch facts + end + end +end diff --git a/spec/unit/provider/elastic_yaml_spec.rb b/spec/unit/provider/elastic_yaml_spec.rb index 15c18d0..d5dce6d 100644 --- a/spec/unit/provider/elastic_yaml_spec.rb +++ b/spec/unit/provider/elastic_yaml_spec.rb @@ -1,66 +1,68 @@ +# frozen_string_literal: true + $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..', 'lib')) require 'spec_helper_rspec' require 'puppet/provider/elastic_yaml' class String def flattened split("\n").reject(&:empty?).map(&:strip).join("\n").strip end end describe Puppet::Provider::ElasticYaml do subject do described_class.tap do |o| o.instance_eval { @metadata = :metadata } end end let :unsorted_hash do [{ - :name => 'role', - :metadata => { + name: 'role', + metadata: { 'zeta' => { - 'zeta' => 5, + 'zeta' => 5, 'gamma' => 4, 'delta' => 3, - 'beta' => 2, + 'beta' => 2, 'alpha' => 1 }, 'phi' => [{ - 'zeta' => 3, + 'zeta' => 3, 'gamma' => 2, 'alpha' => 1 }], - 'beta' => 'foobaz', + 'beta' => 'foobaz', 'gamma' => 1, 'alpha' => 'foobar' } }] end it { is_expected.to respond_to :to_file } describe 'to_file' do it 'returns sorted yaml' do expect(described_class.to_file(unsorted_hash).flattened).to( eq(%( role: alpha: foobar beta: foobaz gamma: 1 phi: - alpha: 1 gamma: 2 zeta: 3 zeta: alpha: 1 beta: 2 delta: 3 gamma: 4 zeta: 5 ).flattened) ) end end end diff --git a/spec/unit/provider/elasticsearch_index/ruby_spec.rb b/spec/unit/provider/elasticsearch_index/ruby_spec.rb index d774bdf..05ffa8a 100644 --- a/spec/unit/provider/elasticsearch_index/ruby_spec.rb +++ b/spec/unit/provider/elasticsearch_index/ruby_spec.rb @@ -1,126 +1,128 @@ +# frozen_string_literal: true + require_relative '../../../helpers/unit/provider/elasticsearch_rest_shared_examples' describe Puppet::Type.type(:elasticsearch_index).provider(:ruby) do let(:name) { 'test-index' } let(:example_1) do { - :name => 'index-one', - :ensure => :present, - :provider => :ruby, - :settings => { + name: 'index-one', + ensure: :present, + provider: :ruby, + settings: { 'index' => { 'creation_date' => 1_487_354_196_301, 'number_of_replicas' => 1, 'number_of_shards' => 5, 'provided_name' => 'a', 'routing' => { 'allocation' => { 'include' => { 'size' => 'big' } } }, 'store' => { 'type' => 'niofs' }, 'uuid' => 'vtJrcgyeRviqllRakSlrSw', 'version' => { 'created' => 5_020_199 } } } } end let(:json_1) do { 'index-one' => { 'settings' => { 'index' => { 'creation_date' => '1487354196301', 'number_of_replicas' => '1', 'number_of_shards' => '5', 'provided_name' => 'a', 'routing' => { 'allocation' => { 'include' => { 'size' => 'big' } } }, 'store' => { 'type' => 'niofs' }, 'uuid' => 'vtJrcgyeRviqllRakSlrSw', 'version' => { 'created' => '5020199' } } } } } end let(:example_2) do { - :name => 'index-two', - :ensure => :present, - :provider => :ruby, - :settings => { + name: 'index-two', + ensure: :present, + provider: :ruby, + settings: { 'index' => { 'creation_date' => 1_487_354_196_301, 'number_of_replicas' => 1, 'number_of_shards' => 5, 'provided_name' => 'a', 'uuid' => 'vtJrcgyeRviqllRakSlrSw', 'version' => { 'created' => 5_020_199 } } } } end let(:json_2) do { 'index-two' => { 'settings' => { 'index' => { 'creation_date' => '1487354196301', 'number_of_replicas' => '1', 'number_of_shards' => '5', 'provided_name' => 'a', 'uuid' => 'vtJrcgyeRviqllRakSlrSw', 'version' => { 'created' => '5020199' } } } } } end let(:bare_resource) do JSON.dump( 'index' => { 'number_of_replicas' => 0 } ) end let(:resource) { Puppet::Type::Elasticsearch_index.new props } let(:provider) { described_class.new resource } let(:props) do { - :name => name, - :settings => { + name: name, + settings: { 'index' => { 'number_of_replicas' => '0' } } } end include_examples 'REST API', 'all/_settings', 'test-index/_settings' end diff --git a/spec/unit/provider/elasticsearch_keystore/elasticsearch_keystore_spec.rb b/spec/unit/provider/elasticsearch_keystore/elasticsearch_keystore_spec.rb index a679b50..355350b 100644 --- a/spec/unit/provider/elasticsearch_keystore/elasticsearch_keystore_spec.rb +++ b/spec/unit/provider/elasticsearch_keystore/elasticsearch_keystore_spec.rb @@ -1,161 +1,192 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' shared_examples 'keystore instance' do |instance| describe "instance #{instance}" do subject { described_class.instances.find { |x| x.name == instance } } - it { expect(subject.exists?).to be_truthy } + it { expect(subject).to be_exists } it { expect(subject.name).to eq(instance) } - it { expect(subject.settings) - .to eq(['node.name', 'cloud.aws.access_key']) } + + it { + expect(subject.settings). + to eq(['node.name', 'cloud.aws.access_key']) + } end end describe Puppet::Type.type(:elasticsearch_keystore).provider(:elasticsearch_keystore) do let(:executable) { '/usr/share/elasticsearch/bin/elasticsearch-keystore' } let(:instances) { [] } before do Facter.clear Facter.add('osfamily') { setcode { 'Debian' } } - allow(described_class) - .to receive(:command) - .with(:keystore) - .and_return(executable) + allow(described_class). + to receive(:command). + with(:keystore). + and_return(executable) - allow(File).to receive(:exist?) - .with('/etc/elasticsearch/scripts/elasticsearch.keystore') - .and_return(false) + allow(File).to receive(:exist?). + with('/etc/elasticsearch/scripts/elasticsearch.keystore'). + and_return(false) end describe 'instances' do before do - allow(Dir).to receive(:[]) - .with('/etc/elasticsearch/*') - .and_return((['scripts'] + instances).map do |directory| + allow(Dir).to receive(:[]). + with('/etc/elasticsearch/*'). + and_return((['scripts'] + instances).map do |directory| "/etc/elasticsearch/#{directory}" end) instances.each do |instance| instance_dir = "/etc/elasticsearch/#{instance}" defaults_file = "/etc/default/elasticsearch-#{instance}" - allow(File).to receive(:exist?) - .with("#{instance_dir}/elasticsearch.keystore") - .and_return(true) + allow(File).to receive(:exist?). + with("#{instance_dir}/elasticsearch.keystore"). + and_return(true) - expect(described_class) - .to receive(:execute) - .with( + allow(described_class). + to receive(:execute). + with( [executable, 'list'], - :custom_environment => { + custom_environment: { 'ES_INCLUDE' => defaults_file, 'ES_PATH_CONF' => "/etc/elasticsearch/#{instance}" }, - :uid => 'elasticsearch', - :gid => 'elasticsearch', - :failonfail => true - ) - .and_return( + uid: 'elasticsearch', + gid: 'elasticsearch', + failonfail: true + ). + and_return( Puppet::Util::Execution::ProcessOutput.new( "node.name\ncloud.aws.access_key\n", 0 ) ) end end - it 'should have an instance method' do + it 'has an instance method' do expect(described_class).to respond_to(:instances) end context 'without any keystores' do - it 'should return no resources' do + it 'returns no resources' do expect(described_class.instances.size).to eq(0) end end context 'with one instance' do let(:instances) { ['es-01'] } it { expect(described_class.instances.length).to eq(instances.length) } + include_examples 'keystore instance', 'es-01' end context 'with multiple instances' do - let(:instances) { ['es-01', 'es-02'] } + let(:instances) { %w[es-01 es-02] } it { expect(described_class.instances.length).to eq(instances.length) } + include_examples 'keystore instance', 'es-01' include_examples 'keystore instance', 'es-02' end - end # of describe instances + end describe 'prefetch' do - it 'should have a prefetch method' do + it 'has a prefetch method' do expect(described_class).to respond_to :prefetch end end describe 'flush' do - let(:provider) { described_class.new(:name => 'es-03') } + let(:provider) { described_class.new(name: 'es-03') } let(:resource) do Puppet::Type.type(:elasticsearch_keystore).new( - :name => 'es-03', - :provider => provider + name: 'es-03', + provider: provider ) end it 'creates the keystore' do - expect(described_class).to( - receive(:execute) - .with( + allow(described_class).to( + receive(:execute). + with( [executable, 'create'], - :custom_environment => { + custom_environment: { 'ES_INCLUDE' => '/etc/default/elasticsearch-es-03', 'ES_PATH_CONF' => '/etc/elasticsearch/es-03' }, - :uid => 'elasticsearch', - :gid => 'elasticsearch', - :failonfail => true - ) - .and_return(Puppet::Util::Execution::ProcessOutput.new('', 0)) + uid: 'elasticsearch', + gid: 'elasticsearch', + failonfail: true + ). + and_return(Puppet::Util::Execution::ProcessOutput.new('', 0)) ) resource[:ensure] = :present provider.create provider.flush + expect(described_class).to( + have_received(:execute). + with( + [executable, 'create'], + custom_environment: { + 'ES_INCLUDE' => '/etc/default/elasticsearch-es-03', + 'ES_PATH_CONF' => '/etc/elasticsearch/es-03' + }, + uid: 'elasticsearch', + gid: 'elasticsearch', + failonfail: true + ) + ) end it 'deletes the keystore' do - expect(File).to( - receive(:delete) - .with(File.join(%w[/ etc elasticsearch es-03 elasticsearch.keystore])) + allow(File).to( + receive(:delete). + with(File.join(%w[/ etc elasticsearch es-03 elasticsearch.keystore])) ) resource[:ensure] = :absent provider.destroy provider.flush + expect(File).to( + have_received(:delete). + with(File.join(%w[/ etc elasticsearch es-03 elasticsearch.keystore])) + ) end it 'updates settings' do settings = { 'cloud.aws.access_key' => 'AKIAFOOBARFOOBAR', 'cloud.aws.secret_key' => 'AKIAFOOBARFOOBAR' } settings.each do |setting, value| - expect(provider.class).to( - receive(:run_keystore) - .with(['add', '--force', '--stdin', setting], 'es-03', '/etc/elasticsearch', value) - .and_return(Puppet::Util::Execution::ProcessOutput.new('', 0)) + allow(provider.class).to( + receive(:run_keystore). + with(['add', '--force', '--stdin', setting], 'es-03', '/etc/elasticsearch', value). + and_return(Puppet::Util::Execution::ProcessOutput.new('', 0)) ) end # Note that the settings hash is passed in wrapped in an array to mimic # the behavior in real-world puppet runs. resource[:ensure] = :present resource[:settings] = [settings] provider.settings = [settings] provider.flush + + settings.each do |setting, value| + expect(provider.class).to( + have_received(:run_keystore). + with(['add', '--force', '--stdin', setting], 'es-03', '/etc/elasticsearch', value) + ) + end end - end # of describe flush -end # of describe Puppet::Type elasticsearch_keystore + end +end diff --git a/spec/unit/provider/elasticsearch_license/xpack_spec.rb b/spec/unit/provider/elasticsearch_license/xpack_spec.rb index 978b3bd..e94e32b 100644 --- a/spec/unit/provider/elasticsearch_license/xpack_spec.rb +++ b/spec/unit/provider/elasticsearch_license/xpack_spec.rb @@ -1,61 +1,63 @@ +# frozen_string_literal: true + require_relative '../../../helpers/unit/provider/elasticsearch_rest_shared_examples' describe Puppet::Type.type(:elasticsearch_license).provider(:xpack) do let(:name) { 'xpack' } let(:example_1) do { - :name => 'xpack', - :ensure => :present, - :provider => :xpack, - :content => { + name: 'xpack', + ensure: :present, + provider: :xpack, + content: { 'license' => { - 'status' => 'active', - 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', - 'type' => 'trial', - 'issue_date' => '2018-02-22T23:12:05.550Z', - 'issue_date_in_millis' => 1_519_341_125_550, - 'expiry_date' => '2018-03-24T23:12:05.550Z', + 'status' => 'active', + 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', + 'type' => 'trial', + 'issue_date' => '2018-02-22T23:12:05.550Z', + 'issue_date_in_millis' => 1_519_341_125_550, + 'expiry_date' => '2018-03-24T23:12:05.550Z', 'expiry_date_in_millis' => 1_521_933_125_550, - 'max_nodes' => 1_000, - 'issued_to' => 'test', - 'issuer' => 'elasticsearch', - 'start_date_in_millis' => 1_513_814_400_000 + 'max_nodes' => 1_000, + 'issued_to' => 'test', + 'issuer' => 'elasticsearch', + 'start_date_in_millis' => 1_513_814_400_000 } } } end let(:json_1) do { 'license' => { - 'status' => 'active', - 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', - 'type' => 'trial', - 'issue_date' => '2018-02-22T23:12:05.550Z', - 'issue_date_in_millis' => '1519341125550', - 'expiry_date' => '2018-03-24T23:12:05.550Z', + 'status' => 'active', + 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', + 'type' => 'trial', + 'issue_date' => '2018-02-22T23:12:05.550Z', + 'issue_date_in_millis' => '1519341125550', + 'expiry_date' => '2018-03-24T23:12:05.550Z', 'expiry_date_in_millis' => '1521933125550', - 'max_nodes' => '1000', - 'issued_to' => 'test', - 'issuer' => 'elasticsearch', - 'start_date_in_millis' => '1513814400000' + 'max_nodes' => '1000', + 'issued_to' => 'test', + 'issuer' => 'elasticsearch', + 'start_date_in_millis' => '1513814400000' } } end let(:resource) { Puppet::Type::Elasticsearch_index.new props } let(:provider) { described_class.new resource } let(:props) do { - :name => name, - :settings => { + name: name, + settings: { 'index' => { 'number_of_replicas' => 0 } } } end include_examples 'REST API', 'xpack/license', nil, true end diff --git a/spec/unit/provider/elasticsearch_pipeline/ruby_spec.rb b/spec/unit/provider/elasticsearch_pipeline/ruby_spec.rb index c4f5362..2619796 100644 --- a/spec/unit/provider/elasticsearch_pipeline/ruby_spec.rb +++ b/spec/unit/provider/elasticsearch_pipeline/ruby_spec.rb @@ -1,96 +1,98 @@ +# frozen_string_literal: true + require_relative '../../../helpers/unit/provider/elasticsearch_rest_shared_examples' describe Puppet::Type.type(:elasticsearch_pipeline).provider(:ruby) do let(:example_1) do { - :name => 'foo', - :ensure => :present, - :provider => :ruby, - :content => { + name: 'foo', + ensure: :present, + provider: :ruby, + content: { 'description' => 'Sets the foo field to "bar"', 'processors' => [{ 'set' => { 'field' => 'foo', 'value' => 'bar' } }] } } end let(:json_1) do { 'foo' => { 'description' => 'Sets the foo field to "bar"', 'processors' => [{ 'set' => { 'field' => 'foo', 'value' => 'bar' } }] } } end let(:example_2) do { - :name => 'baz', - :ensure => :present, - :provider => :ruby, - :content => { + name: 'baz', + ensure: :present, + provider: :ruby, + content: { 'description' => 'A pipeline that never gives you up', 'processors' => [{ 'set' => { 'field' => 'firstname', 'value' => 'rick' } }, { 'set' => { 'field' => 'lastname', 'value' => 'astley' } }] } } end let(:json_2) do { 'baz' => { 'description' => 'A pipeline that never gives you up', 'processors' => [{ 'set' => { 'field' => 'firstname', 'value' => 'rick' } }, { 'set' => { 'field' => 'lastname', 'value' => 'astley' } }] } } end let(:bare_resource) do JSON.dump( 'description' => 'Empty pipeline', 'processors' => [] ) end let(:resource) { Puppet::Type::Elasticsearch_pipeline.new props } let(:provider) { described_class.new resource } let(:props) do { - :name => 'foo', - :content => { + name: 'foo', + content: { 'description' => 'Empty pipeline', 'processors' => [] } } end include_examples 'REST API', 'ingest/pipeline', '_ingest/pipeline/foo' end diff --git a/spec/unit/provider/elasticsearch_plugin/ruby_spec.rb b/spec/unit/provider/elasticsearch_plugin/ruby_spec.rb index d3e3796..39771af 100644 --- a/spec/unit/provider/elasticsearch_plugin/ruby_spec.rb +++ b/spec/unit/provider/elasticsearch_plugin/ruby_spec.rb @@ -1,23 +1,25 @@ +# frozen_string_literal: true + require_relative 'shared_examples' provider_class = Puppet::Type.type(:elasticsearch_plugin).provider(:elasticsearch_plugin) describe provider_class do let(:resource_name) { 'lmenezes/elasticsearch-kopf' } let(:resource) do Puppet::Type.type(:elasticsearch_plugin).new( - :name => resource_name, - :ensure => :present, - :provider => 'elasticsearch_plugin' + name: resource_name, + ensure: :present, + provider: 'elasticsearch_plugin' ) end let(:provider) do provider = provider_class.new provider.resource = resource provider end let(:shortname) { provider.plugin_name(resource_name) } let(:klass) { provider_class } include_examples 'plugin provider', '7.0.0' end diff --git a/spec/unit/provider/elasticsearch_plugin/shared_examples.rb b/spec/unit/provider/elasticsearch_plugin/shared_examples.rb index 094390a..eaebe4f 100644 --- a/spec/unit/provider/elasticsearch_plugin/shared_examples.rb +++ b/spec/unit/provider/elasticsearch_plugin/shared_examples.rb @@ -1,147 +1,195 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' shared_examples 'plugin provider' do |version| describe "elasticsearch #{version}" do - before(:each) do + before do allow(File).to receive(:open) allow(provider).to receive(:es_version).and_return version end describe 'setup' do it 'installs with default parameters' do - expect(provider).to receive(:plugin).with( + allow(provider).to receive(:plugin).with( ['install', resource_name].tap do |args| - if Puppet::Util::Package.versioncmp(version, '2.2.0') >= 0 - args.insert 1, '--batch' - end + args.insert 1, '--batch' if Puppet::Util::Package.versioncmp(version, '2.2.0') >= 0 end ) provider.create + expect(provider).to have_received(:plugin).with( + ['install', resource_name].tap do |args| + args.insert 1, '--batch' if Puppet::Util::Package.versioncmp(version, '2.2.0') >= 0 + end + ) end it 'installs via URLs' do resource[:url] = 'http://url/to/my/plugin.zip' - expect(provider).to receive(:plugin).with( + allow(provider).to receive(:plugin).with( ['install'] + ['http://url/to/my/plugin.zip'].tap do |args| args.unshift('kopf', '--url') if version.start_with? '1' - if Puppet::Util::Package.versioncmp(version, '2.2.0') >= 0 - args.unshift '--batch' - end + args.unshift '--batch' if Puppet::Util::Package.versioncmp(version, '2.2.0') >= 0 args end ) provider.create + expect(provider).to have_received(:plugin).with( + ['install'] + ['http://url/to/my/plugin.zip'].tap do |args| + args.unshift('kopf', '--url') if version.start_with? '1' + + args.unshift '--batch' if Puppet::Util::Package.versioncmp(version, '2.2.0') >= 0 + + args + end + ) end it 'installs with a local file' do resource[:source] = '/tmp/plugin.zip' - expect(provider).to receive(:plugin).with( + allow(provider).to receive(:plugin).with( ['install'] + ['file:///tmp/plugin.zip'].tap do |args| args.unshift('kopf', '--url') if version.start_with? '1' - if Puppet::Util::Package.versioncmp(version, '2.2.0') >= 0 - args.unshift '--batch' - end + args.unshift '--batch' if Puppet::Util::Package.versioncmp(version, '2.2.0') >= 0 args end ) provider.create + expect(provider).to have_received(:plugin).with( + ['install'] + ['file:///tmp/plugin.zip'].tap do |args| + args.unshift('kopf', '--url') if version.start_with? '1' + + args.unshift '--batch' if Puppet::Util::Package.versioncmp(version, '2.2.0') >= 0 + + args + end + ) end describe 'proxying' do it 'installs behind a proxy' do resource[:proxy] = 'http://localhost:3128' - expect(provider) - .to receive(:plugin) - .with([ - '-Dhttp.proxyHost=localhost', - '-Dhttp.proxyPort=3128', - '-Dhttps.proxyHost=localhost', - '-Dhttps.proxyPort=3128', - 'install', - '--batch', - resource_name - ]) + allow(provider). + to receive(:plugin). + with([ + '-Dhttp.proxyHost=localhost', + '-Dhttp.proxyPort=3128', + '-Dhttps.proxyHost=localhost', + '-Dhttps.proxyPort=3128', + 'install', + '--batch', + resource_name + ]) provider.create + expect(provider). + to have_received(:plugin). + with([ + '-Dhttp.proxyHost=localhost', + '-Dhttp.proxyPort=3128', + '-Dhttps.proxyHost=localhost', + '-Dhttps.proxyPort=3128', + 'install', + '--batch', + resource_name + ]) end it 'uses authentication credentials' do resource[:proxy] = 'http://elastic:password@es.local:8080' - expect(provider) - .to receive(:plugin) - .with([ - '-Dhttp.proxyHost=es.local', - '-Dhttp.proxyPort=8080', - '-Dhttp.proxyUser=elastic', - '-Dhttp.proxyPassword=password', - '-Dhttps.proxyHost=es.local', - '-Dhttps.proxyPort=8080', - '-Dhttps.proxyUser=elastic', - '-Dhttps.proxyPassword=password', - 'install', - '--batch', - resource_name - ]) + allow(provider). + to receive(:plugin). + with([ + '-Dhttp.proxyHost=es.local', + '-Dhttp.proxyPort=8080', + '-Dhttp.proxyUser=elastic', + '-Dhttp.proxyPassword=password', + '-Dhttps.proxyHost=es.local', + '-Dhttps.proxyPort=8080', + '-Dhttps.proxyUser=elastic', + '-Dhttps.proxyPassword=password', + 'install', + '--batch', + resource_name + ]) provider.create + expect(provider). + to have_received(:plugin). + with([ + '-Dhttp.proxyHost=es.local', + '-Dhttp.proxyPort=8080', + '-Dhttp.proxyUser=elastic', + '-Dhttp.proxyPassword=password', + '-Dhttps.proxyHost=es.local', + '-Dhttps.proxyPort=8080', + '-Dhttps.proxyUser=elastic', + '-Dhttps.proxyPassword=password', + 'install', + '--batch', + resource_name + ]) end end describe 'configdir' do it 'sets the ES_PATH_CONF env var' do resource[:configdir] = '/etc/elasticsearch' expect(provider.with_environment do ENV['ES_PATH_CONF'] end).to eq('/etc/elasticsearch') end end - end # of setup + end describe 'java_opts' do it 'uses authentication credentials' do resource[:java_opts] = ['-Des.plugins.staging=4a2ffaf5'] expect(provider.with_environment do ENV['ES_JAVA_OPTS'] end).to eq('-Des.plugins.staging=4a2ffaf5') end end describe 'java_home' do it 'sets the JAVA_HOME env var' do resource[:java_home] = '/opt/foo' expect(provider.with_environment do ENV['JAVA_HOME'] end).to eq('/opt/foo') end end describe 'java_home unset' do elasticsearch_java_home = '/usr/share/elasticsearch/jdk' it 'defaults to the elasticsearch bundled JDK' do resource[:java_home] = '' expect(provider.with_environment do ENV['JAVA_HOME'] end).to eq(elasticsearch_java_home) end end describe 'plugin_name' do let(:resource_name) { 'appbaseio/dejaVu' } it 'maintains mixed-case names' do expect(provider.plugin_path).to include('dejaVu') end end describe 'removal' do it 'uninstalls the plugin' do - expect(provider).to receive(:plugin).with( + allow(provider).to receive(:plugin).with( ['remove', resource_name.split('-').last] ) provider.destroy + expect(provider).to have_received(:plugin).with( + ['remove', resource_name.split('-').last] + ) end end end end diff --git a/spec/unit/provider/elasticsearch_role/ruby_spec.rb b/spec/unit/provider/elasticsearch_role/ruby_spec.rb index bbaafed..2489aeb 100644 --- a/spec/unit/provider/elasticsearch_role/ruby_spec.rb +++ b/spec/unit/provider/elasticsearch_role/ruby_spec.rb @@ -1,59 +1,61 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' describe Puppet::Type.type(:elasticsearch_role).provider(:ruby) do describe 'instances' do - it 'should have an instance method' do + it 'has an instance method' do expect(described_class).to respond_to :instances end context 'with no roles' do - it 'should return no resources' do + it 'returns no resources' do expect(described_class.parse("\n")).to eq([]) end end context 'with one role' do - it 'should return one resource' do + it 'returns one resource' do expect(described_class.parse(%( admin: cluster: all indices: '*': all ))[0]).to eq( - :ensure => :present, - :name => 'admin', - :privileges => { + ensure: :present, + name: 'admin', + privileges: { 'cluster' => 'all', 'indices' => { '*' => 'all' } } ) end end context 'with multiple roles' do - it 'should return three resources' do + it 'returns three resources' do expect(described_class.parse(%( admin: cluster: all indices: '*': all user: indices: '*': read power_user: cluster: monitor indices: '*': all )).length).to eq(3) end end - end # of describe instances + end describe 'prefetch' do - it 'should have a prefetch method' do + it 'has a prefetch method' do expect(described_class).to respond_to :prefetch end end end diff --git a/spec/unit/provider/elasticsearch_role_mapping/ruby_spec.rb b/spec/unit/provider/elasticsearch_role_mapping/ruby_spec.rb index f2af032..9dff8db 100644 --- a/spec/unit/provider/elasticsearch_role_mapping/ruby_spec.rb +++ b/spec/unit/provider/elasticsearch_role_mapping/ruby_spec.rb @@ -1,51 +1,53 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' describe Puppet::Type.type(:elasticsearch_role_mapping).provider(:ruby) do describe 'instances' do - it 'should have an instance method' do + it 'has an instance method' do expect(described_class).to respond_to :instances end context 'with no roles' do - it 'should return no resources' do + it 'returns no resources' do expect(described_class.parse("\n")).to eq([]) end end context 'with one role' do - it 'should return one resource' do + it 'returns one resource' do expect(described_class.parse(%( admin: - "cn=users,dc=example,dc=com" ))[0]).to eq( - :ensure => :present, - :name => 'admin', - :mappings => [ + ensure: :present, + name: 'admin', + mappings: [ 'cn=users,dc=example,dc=com' ] ) end end context 'with multiple roles' do - it 'should return three resources' do + it 'returns three resources' do expect(described_class.parse(%( admin: - "cn=users,dc=example,dc=com" user: - "cn=users,dc=example,dc=com" - "cn=admins,dc=example,dc=com" - "cn=John Doe,cn=other users,dc=example,dc=com" power_user: - "cn=admins,dc=example,dc=com" )).length).to eq(3) end end - end # of describe instances + end describe 'prefetch' do - it 'should have a prefetch method' do + it 'has a prefetch method' do expect(described_class).to respond_to :prefetch end end end diff --git a/spec/unit/provider/elasticsearch_snapshot_repository/ruby_spec.rb b/spec/unit/provider/elasticsearch_snapshot_repository/ruby_spec.rb index 3a05f07..4531da1 100644 --- a/spec/unit/provider/elasticsearch_snapshot_repository/ruby_spec.rb +++ b/spec/unit/provider/elasticsearch_snapshot_repository/ruby_spec.rb @@ -1,72 +1,74 @@ +# frozen_string_literal: true + require_relative '../../../helpers/unit/provider/elasticsearch_rest_shared_examples' describe Puppet::Type.type(:elasticsearch_snapshot_repository).provider(:ruby) do let(:example_1) do { - :name => 'foobar1', - :ensure => :present, - :provider => :ruby, - :location => '/bak1', - :type => 'fs', - :compress => true + name: 'foobar1', + ensure: :present, + provider: :ruby, + location: '/bak1', + type: 'fs', + compress: true } end let(:json_1) do { 'foobar1' => { 'type' => 'fs', 'settings' => { 'compress' => true, 'location' => '/bak1' } } } end let(:example_2) do { - :name => 'foobar2', - :ensure => :present, - :provider => :ruby, - :location => '/bak2', - :type => 'fs', - :compress => true + name: 'foobar2', + ensure: :present, + provider: :ruby, + location: '/bak2', + type: 'fs', + compress: true } end let(:json_2) do { 'foobar2' => { 'type' => 'fs', 'settings' => { 'compress' => true, 'location' => '/bak2' } } } end let(:bare_resource) do JSON.dump( 'type' => 'fs', 'settings' => { 'compress' => true, 'location' => '/backups' } ) end let(:resource) { Puppet::Type::Elasticsearch_snapshot_repository.new props } let(:provider) { described_class.new resource } let(:props) do { - :name => 'backup', - :type => 'fs', - :compress => true, - :location => '/backups' + name: 'backup', + type: 'fs', + compress: true, + location: '/backups' } end include_examples 'REST API', 'snapshot', '_snapshot/backup' end diff --git a/spec/unit/provider/elasticsearch_template/ruby_spec.rb b/spec/unit/provider/elasticsearch_template/ruby_spec.rb index 2e7aff9..cc5f86d 100644 --- a/spec/unit/provider/elasticsearch_template/ruby_spec.rb +++ b/spec/unit/provider/elasticsearch_template/ruby_spec.rb @@ -1,79 +1,81 @@ +# frozen_string_literal: true + require_relative '../../../helpers/unit/provider/elasticsearch_rest_shared_examples' describe Puppet::Type.type(:elasticsearch_template).provider(:ruby) do let(:example_1) do { - :name => 'foobar1', - :ensure => :present, - :provider => :ruby, - :content => { + name: 'foobar1', + ensure: :present, + provider: :ruby, + content: { 'aliases' => {}, 'mappings' => {}, 'settings' => {}, 'template' => 'foobar1-*', 'order' => 1 } } end let(:json_1) do { 'foobar1' => { 'aliases' => {}, 'mappings' => {}, 'order' => 1, 'settings' => {}, 'template' => 'foobar1-*' } } end let(:example_2) do { - :name => 'foobar2', - :ensure => :present, - :provider => :ruby, - :content => { + name: 'foobar2', + ensure: :present, + provider: :ruby, + content: { 'aliases' => {}, 'mappings' => {}, 'settings' => {}, 'template' => 'foobar2-*', 'order' => 2 } } end let(:json_2) do { 'foobar2' => { 'aliases' => {}, 'mappings' => {}, 'order' => 2, 'settings' => {}, 'template' => 'foobar2-*' } } end let(:bare_resource) do JSON.dump( 'order' => 0, 'aliases' => {}, 'mappings' => {}, 'template' => 'fooindex-*' ) end let(:resource) { Puppet::Type::Elasticsearch_template.new props } let(:provider) { described_class.new resource } let(:props) do { - :name => 'foo', - :content => { + name: 'foo', + content: { 'template' => 'fooindex-*' } } end include_examples 'REST API', 'template', '_template/foo' end diff --git a/spec/unit/provider/elasticsearch_user/ruby_spec.rb b/spec/unit/provider/elasticsearch_user/ruby_spec.rb index 627c854..d5c8a9a 100644 --- a/spec/unit/provider/elasticsearch_user/ruby_spec.rb +++ b/spec/unit/provider/elasticsearch_user/ruby_spec.rb @@ -1,63 +1,65 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' describe Puppet::Type.type(:elasticsearch_user).provider(:ruby) do describe 'instances' do - it 'should have an instance method' do + it 'has an instance method' do expect(described_class).to respond_to :instances end context 'without users' do - before do - expect(described_class).to receive(:command_with_path).with('list').and_return( + it 'returns no resources' do + allow(described_class).to receive(:command_with_path).with('list').and_return( 'No users found' ) - end - it 'should return no resources' do expect(described_class.instances.size).to eq(0) + expect(described_class).to have_received(:command_with_path).with('list') end end context 'with one user' do - before do - expect(described_class).to receive(:command_with_path).with('list').and_return( + it 'returns one resource' do + allow(described_class).to receive(:command_with_path).with('list').and_return( 'elastic : admin*,power_user' ) - end - it 'should return one resource' do expect(described_class.instances[0].instance_variable_get( - '@property_hash' - )).to eq( - :ensure => :present, - :name => 'elastic', - :provider => :ruby - ) + '@property_hash' + )).to eq( + ensure: :present, + name: 'elastic', + provider: :ruby + ) + expect(described_class).to have_received(:command_with_path).with('list') end end context 'with multiple users' do - before do - expect(described_class).to receive( + it 'returns three resources' do + allow(described_class).to receive( :command_with_path ).with('list').and_return( <<-EOL elastic : admin* logstash : user kibana : kibana EOL ) - end - it 'should return three resources' do expect(described_class.instances.length).to eq(3) + + expect(described_class).to have_received( + :command_with_path + ).with('list') end end - end # of describe instances + end describe 'prefetch' do - it 'should have a prefetch method' do + it 'has a prefetch method' do expect(described_class).to respond_to :prefetch end end end diff --git a/spec/unit/provider/elasticsearch_user_file/ruby_spec.rb b/spec/unit/provider/elasticsearch_user_file/ruby_spec.rb index 78dfc65..4adda7d 100644 --- a/spec/unit/provider/elasticsearch_user_file/ruby_spec.rb +++ b/spec/unit/provider/elasticsearch_user_file/ruby_spec.rb @@ -1,44 +1,46 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' describe Puppet::Type.type(:elasticsearch_user_file).provider(:ruby) do describe 'instances' do - it 'should have an instance method' do + it 'has an instance method' do expect(described_class).to respond_to :instances end context 'without users' do - it 'should return no resources' do + it 'returns no resources' do expect(described_class.parse("\n")).to eq([]) end end context 'with one user' do - it 'should return one resource' do + it 'returns one resource' do expect(described_class.parse(%( elastic:$2a$10$DddrTs0PS3qNknUTq0vpa.g.0JpU.jHDdlKp1xox1W5ZHX.w8Cc8C - ).gsub(/^\s+/, ''))[0]).to eq( - :name => 'elastic', - :hashed_password => '$2a$10$DddrTs0PS3qNknUTq0vpa.g.0JpU.jHDdlKp1xox1W5ZHX.w8Cc8C', - :record_type => :ruby + ).gsub(%r{^\s+}, ''))[0]).to eq( + name: 'elastic', + hashed_password: '$2a$10$DddrTs0PS3qNknUTq0vpa.g.0JpU.jHDdlKp1xox1W5ZHX.w8Cc8C', + record_type: :ruby ) end end context 'with multiple users' do - it 'should return three resources' do + it 'returns three resources' do expect(described_class.parse(%( admin:$2a$10$DddrTs0PS3qNknUTq0vpa.g.0JpU.jHDdlKp1xox1W5ZHX.w8Cc8C user:$2a$10$caYr8GhYeJ2Yo0yEhQhQvOjLSwt8Lm6MKQWx8WSnZ/L/IL5sGdQFu kibana:$2a$10$daYr8GhYeJ2Yo0yEhQhQvOjLSwt8Lm6MKQWx8WSnZ/L/IL5sGdQFu - ).gsub(/^\s+/, '')).length).to eq(3) + ).gsub(%r{^\s+}, '')).length).to eq(3) end end - end # of describe instances + end describe 'prefetch' do - it 'should have a prefetch method' do + it 'has a prefetch method' do expect(described_class).to respond_to :prefetch end end end diff --git a/spec/unit/provider/elasticsearch_user_roles/ruby_spec.rb b/spec/unit/provider/elasticsearch_user_roles/ruby_spec.rb index 2effbd8..0fa6ed2 100644 --- a/spec/unit/provider/elasticsearch_user_roles/ruby_spec.rb +++ b/spec/unit/provider/elasticsearch_user_roles/ruby_spec.rb @@ -1,44 +1,46 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' -describe Puppet::Type.type(:elasticsearch_user_roles) - .provider(:ruby) do +describe Puppet::Type.type(:elasticsearch_user_roles). + provider(:ruby) do describe 'instances' do - it 'should have an instance method' do + it 'has an instance method' do expect(described_class).to respond_to :instances end context 'without roles' do - it 'should return no resources' do + it 'returns no resources' do expect(described_class.parse("\n")).to eq([]) end end context 'with one user' do - it 'should return one resource' do + it 'returns one resource' do expect(described_class.parse(%( admin:elastic power_user:elastic ))[0]).to eq( - :name => 'elastic', - :roles => %w[admin power_user] + name: 'elastic', + roles: %w[admin power_user] ) end end context 'with multiple users' do - it 'should return three resources' do + it 'returns three resources' do expect(described_class.parse(%( admin:elastic logstash:user kibana:kibana )).length).to eq(3) end end - end # of describe instances + end describe 'prefetch' do - it 'should have a prefetch method' do + it 'has a prefetch method' do expect(described_class).to respond_to :prefetch end end end diff --git a/spec/unit/puppet_x/elastic/hash_spec.rb b/spec/unit/puppet_x/elastic/hash_spec.rb index 1baae97..3e3f132 100644 --- a/spec/unit/puppet_x/elastic/hash_spec.rb +++ b/spec/unit/puppet_x/elastic/hash_spec.rb @@ -1,25 +1,26 @@ +# frozen_string_literal: true $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', '..', 'lib')) require 'spec_helper_rspec' require 'puppet_x/elastic/hash' describe Puppet_X::Elastic::SortedHash do subject { { 'foo' => 1, 'bar' => 2 } } describe 'each_pair' do it { is_expected.to respond_to :each_pair } it 'yields values' do expect { |b| subject.each_pair(&b) }.to yield_control.exactly(2).times end it 'returns an Enumerator if not passed a block' do expect(subject.each_pair).to be_an_instance_of(Enumerator) end it 'returns values' do subject.each_pair.map { |k, v| [k, v] }.should == subject.to_a end end end diff --git a/spec/unit/type/elasticsearch_index_spec.rb b/spec/unit/type/elasticsearch_index_spec.rb index 70cb1e3..b96d2ab 100644 --- a/spec/unit/type/elasticsearch_index_spec.rb +++ b/spec/unit/type/elasticsearch_index_spec.rb @@ -1,67 +1,69 @@ +# frozen_string_literal: true + require_relative '../../helpers/unit/type/elasticsearch_rest_shared_examples' describe Puppet::Type.type(:elasticsearch_index) do let(:resource_name) { 'test-index' } include_examples 'REST API types', 'index', :settings describe 'settings' do let(:resource) do described_class.new( - :name => resource_name, - :ensure => 'present', - :settings => { + name: resource_name, + ensure: 'present', + settings: { 'index' => { 'number_of_replicas' => '0' } } ) end let(:settings) { resource.property(:settings) } describe 'insync?' do describe 'synced properties' do let(:is_settings) do { 'index' => { 'creation_date' => 1_487_354_196_301, 'number_of_replicas' => 0, 'number_of_shards' => 5, 'provided_name' => 'a', 'uuid' => 'vtjrcgyerviqllrakslrsw', 'version' => { 'created' => 5_020_199 } } } end it 'only enforces defined settings' do - expect(settings.insync?(is_settings)).to be_truthy + expect(settings).to be_insync(is_settings) end end describe 'out-of-sync properties' do let(:is_settings) do { 'index' => { 'creation_date' => 1_487_354_196_301, 'number_of_replicas' => 1, 'number_of_shards' => 5, 'provided_name' => 'a', 'uuid' => 'vtjrcgyerviqllrakslrsw', 'version' => { 'created' => 5_020_199 } } } end it 'detects out-of-sync nested values' do - expect(settings.insync?(is_settings)).to be_falsy + expect(settings).not_to be_insync(is_settings) end end end end end diff --git a/spec/unit/type/elasticsearch_keystore_spec.rb b/spec/unit/type/elasticsearch_keystore_spec.rb index 79bfa98..b844937 100644 --- a/spec/unit/type/elasticsearch_keystore_spec.rb +++ b/spec/unit/type/elasticsearch_keystore_spec.rb @@ -1,93 +1,103 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' describe Puppet::Type.type(:elasticsearch_keystore) do let(:resource_name) { 'es-01' } describe 'validating attributes' do - [:configdir, :instance, :purge].each do |param| - it "should have a `#{param}` parameter" do + %i[configdir instance purge].each do |param| + it "has a `#{param}` parameter" do expect(described_class.attrtype(param)).to eq(:param) end end - [:ensure, :settings].each do |prop| - it "should have a #{prop} property" do + %i[ensure settings].each do |prop| + it "has a #{prop} property" do expect(described_class.attrtype(prop)).to eq(:property) end end describe 'namevar validation' do - it 'should have :instance as its namevar' do + it 'has :instance as its namevar' do expect(described_class.key_attributes).to eq([:instance]) end end - end # of describe validating attributes + end describe 'when validating values' do describe 'ensure' do - it 'should support present as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :present - ) }.to_not raise_error + it 'supports present as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :present + ) + end.not_to raise_error end - it 'should support absent as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :absent - ) }.to_not raise_error + it 'supports absent as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :absent + ) + end.not_to raise_error end - it 'should not support other values' do - expect { described_class.new( - :name => resource_name, - :ensure => :foo - ) }.to raise_error(Puppet::Error, /Invalid value/) + it 'does not support other values' do + expect do + described_class.new( + name: resource_name, + ensure: :foo + ) + end.to raise_error(Puppet::Error, %r{Invalid value}) end end describe 'settings' do [{ 'node.name' => 'foo' }, ['node.name', 'node.data']].each do |setting| it "accepts #{setting.class}s" do - expect { described_class.new( - :name => resource_name, - :settings => setting - ) }.to_not raise_error + expect do + described_class.new( + name: resource_name, + settings: setting + ) + end.not_to raise_error end end describe 'insync' do it 'only checks lists or hash key membership' do expect(described_class.new( - :name => resource_name, - :settings => { 'node.name' => 'foo', 'node.data' => true } + name: resource_name, + settings: { 'node.name' => 'foo', 'node.data' => true } ).property(:settings).insync?( %w[node.name node.data] )).to be true end context 'purge' do it 'defaults to not purge values' do expect(described_class.new( - :name => resource_name, - :settings => { 'node.name' => 'foo', 'node.data' => true } + name: resource_name, + settings: { 'node.name' => 'foo', 'node.data' => true } ).property(:settings).insync?( %w[node.name node.data node.attr.rack] )).to be true end it 'respects the purge parameter' do expect(described_class.new( - :name => resource_name, - :settings => { 'node.name' => 'foo', 'node.data' => true }, - :purge => true + name: resource_name, + settings: { 'node.name' => 'foo', 'node.data' => true }, + purge: true ).property(:settings).insync?( %w[node.name node.data node.attr.rack] )).to be false end end end end - end # of describing when validating values -end # of describe Puppet::Type + end +end diff --git a/spec/unit/type/elasticsearch_license_spec.rb b/spec/unit/type/elasticsearch_license_spec.rb index 2ddbdd1..4b69ad2 100644 --- a/spec/unit/type/elasticsearch_license_spec.rb +++ b/spec/unit/type/elasticsearch_license_spec.rb @@ -1,75 +1,77 @@ +# frozen_string_literal: true + require_relative '../../helpers/unit/type/elasticsearch_rest_shared_examples' describe Puppet::Type.type(:elasticsearch_license) do let(:resource_name) { 'license' } include_examples 'REST API types', 'license', :content describe 'license' do let(:resource) do described_class.new( - :name => resource_name, - :ensure => 'present', - :content => { + name: resource_name, + ensure: 'present', + content: { 'license' => { - 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', - 'type' => 'trial', - 'issue_date_in_millis' => '1519341125550', + 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', + 'type' => 'trial', + 'issue_date_in_millis' => '1519341125550', 'expiry_date_in_millis' => '1521933125550', - 'max_nodes' => '1000', - 'issued_to' => 'test', - 'issuer' => 'elasticsearch', - 'signature' => 'secretvalue', - 'start_date_in_millis' => '1513814400000' + 'max_nodes' => '1000', + 'issued_to' => 'test', + 'issuer' => 'elasticsearch', + 'signature' => 'secretvalue', + 'start_date_in_millis' => '1513814400000' } } ) end let(:content) { resource.property(:content) } describe 'insync?' do let(:is_content) do { 'license' => { - 'status' => 'active', - 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', - 'type' => 'trial', - 'issue_date' => '2018-02-22T23:12:05.550Z', - 'issue_date_in_millis' => 1_519_341_125_550, - 'expiry_date' => '2018-03-24T23:12:05.550Z', + 'status' => 'active', + 'uid' => 'cbff45e7-c553-41f7-ae4f-9205eabd80xx', + 'type' => 'trial', + 'issue_date' => '2018-02-22T23:12:05.550Z', + 'issue_date_in_millis' => 1_519_341_125_550, + 'expiry_date' => '2018-03-24T23:12:05.550Z', 'expiry_date_in_millis' => 1_521_933_125_550, - 'max_nodes' => 1_000, - 'issued_to' => 'test', - 'issuer' => 'elasticsearch', - 'start_date_in_millis' => 1_513_814_400_000 + 'max_nodes' => 1_000, + 'issued_to' => 'test', + 'issuer' => 'elasticsearch', + 'start_date_in_millis' => 1_513_814_400_000 } } end describe 'synced properties' do it 'only enforces defined content' do - expect(content.insync?(is_content)).to be_truthy + expect(content).to be_insync(is_content) end end describe 'out-of-sync property' do { - 'uid' => 'cbff45e7-c553-41f7-ae4f-xxxxxxxxxxxx', - 'issue_date_in_millis' => '1513814400000', + 'uid' => 'cbff45e7-c553-41f7-ae4f-xxxxxxxxxxxx', + 'issue_date_in_millis' => '1513814400000', 'expiry_date_in_millis' => '1533167999999', - 'start_date_in_millis' => '-1' + 'start_date_in_millis' => '-1' }.each_pair do |field, value| let(:changed_content) do is_content['license'][field] = value is_content end it "detection for #{field}" do - expect(content.insync?(changed_content)).to be_falsy + expect(content).not_to be_insync(changed_content) end end end end end end diff --git a/spec/unit/type/elasticsearch_pipeline_spec.rb b/spec/unit/type/elasticsearch_pipeline_spec.rb index 337d706..1b890dc 100644 --- a/spec/unit/type/elasticsearch_pipeline_spec.rb +++ b/spec/unit/type/elasticsearch_pipeline_spec.rb @@ -1,7 +1,9 @@ +# frozen_string_literal: true + require_relative '../../helpers/unit/type/elasticsearch_rest_shared_examples' describe Puppet::Type.type(:elasticsearch_pipeline) do let(:resource_name) { 'test_pipeline' } include_examples 'REST API types', 'pipeline', :content end diff --git a/spec/unit/type/elasticsearch_plugin_spec.rb b/spec/unit/type/elasticsearch_plugin_spec.rb index 4523db7..02258d8 100644 --- a/spec/unit/type/elasticsearch_plugin_spec.rb +++ b/spec/unit/type/elasticsearch_plugin_spec.rb @@ -1,19 +1,21 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' describe Puppet::Type.type(:elasticsearch_plugin) do let(:resource_name) { 'lmenezes/elasticsearch-kopf' } describe 'input validation' do describe 'when validating attributes' do - [:configdir, :java_opts, :java_home, :name, :source, :url, :proxy].each do |param| - it "should have a #{param} parameter" do + %i[configdir java_opts java_home name source url proxy].each do |param| + it "has a #{param} parameter" do expect(described_class.attrtype(param)).to eq(:param) end end - it 'should have an ensure property' do + it 'has an ensure property' do expect(described_class.attrtype(:ensure)).to eq(:property) end end end end diff --git a/spec/unit/type/elasticsearch_role_mapping_spec.rb b/spec/unit/type/elasticsearch_role_mapping_spec.rb index 4f5394b..294b7a2 100644 --- a/spec/unit/type/elasticsearch_role_mapping_spec.rb +++ b/spec/unit/type/elasticsearch_role_mapping_spec.rb @@ -1,66 +1,78 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' describe Puppet::Type.type(:elasticsearch_role_mapping) do let(:resource_name) { 'elastic_role' } describe 'when validating attributes' do [:name].each do |param| - it "should have a #{param} parameter" do + it "has a #{param} parameter" do expect(described_class.attrtype(param)).to eq(:param) end end - [:ensure, :mappings].each do |prop| - it "should have a #{prop} property" do + %i[ensure mappings].each do |prop| + it "has a #{prop} property" do expect(described_class.attrtype(prop)).to eq(:property) end end - end # of describe when validating attributes + end describe 'when validating values' do describe 'ensure' do - it 'should support present as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :present - ) }.to_not raise_error + it 'supports present as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :present + ) + end.not_to raise_error end - it 'should support absent as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :absent - ) }.to_not raise_error + it 'supports absent as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :absent + ) + end.not_to raise_error end - it 'should not support other values' do - expect { described_class.new( - :name => resource_name, - :ensure => :foo - ) }.to raise_error(Puppet::Error, /Invalid value/) + it 'does not support other values' do + expect do + described_class.new( + name: resource_name, + ensure: :foo + ) + end.to raise_error(Puppet::Error, %r{Invalid value}) end end describe 'name' do - it 'should reject long role names' do - expect { described_class.new( - :name => 'a' * 41 - ) }.to raise_error( + it 'rejects long role names' do + expect do + described_class.new( + name: 'a' * 41 + ) + end.to raise_error( Puppet::ResourceError, - /valid values/i + %r{valid values}i ) end - it 'should reject invalid role characters' do + it 'rejects invalid role characters' do ['@foobar', '0foobar'].each do |role| - expect { described_class.new( - :name => role - ) }.to raise_error( + expect do + described_class.new( + name: role + ) + end.to raise_error( Puppet::ResourceError, - /valid values/i + %r{valid values}i ) end end end - end # of describing when validing values -end # of describe Puppet::Type + end +end diff --git a/spec/unit/type/elasticsearch_role_spec.rb b/spec/unit/type/elasticsearch_role_spec.rb index f007d52..f0ce7a9 100644 --- a/spec/unit/type/elasticsearch_role_spec.rb +++ b/spec/unit/type/elasticsearch_role_spec.rb @@ -1,66 +1,78 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' describe Puppet::Type.type(:elasticsearch_role) do let(:resource_name) { 'elastic_role' } describe 'when validating attributes' do [:name].each do |param| - it "should have a #{param} parameter" do + it "has a #{param} parameter" do expect(described_class.attrtype(param)).to eq(:param) end end - [:ensure, :privileges].each do |prop| - it "should have a #{prop} property" do + %i[ensure privileges].each do |prop| + it "has a #{prop} property" do expect(described_class.attrtype(prop)).to eq(:property) end end - end # of describe when validating attributes + end describe 'when validating values' do describe 'ensure' do - it 'should support present as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :present - ) }.to_not raise_error + it 'supports present as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :present + ) + end.not_to raise_error end - it 'should support absent as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :absent - ) }.to_not raise_error + it 'supports absent as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :absent + ) + end.not_to raise_error end - it 'should not support other values' do - expect { described_class.new( - :name => resource_name, - :ensure => :foo - ) }.to raise_error(Puppet::Error, /Invalid value/) + it 'does not support other values' do + expect do + described_class.new( + name: resource_name, + ensure: :foo + ) + end.to raise_error(Puppet::Error, %r{Invalid value}) end end describe 'name' do - it 'should reject long role names' do - expect { described_class.new( - :name => 'a' * 41 - ) }.to raise_error( + it 'rejects long role names' do + expect do + described_class.new( + name: 'a' * 41 + ) + end.to raise_error( Puppet::ResourceError, - /valid values/i + %r{valid values}i ) end - it 'should reject invalid role characters' do + it 'rejects invalid role characters' do ['@foobar', '0foobar'].each do |role| - expect { described_class.new( - :name => role - ) }.to raise_error( + expect do + described_class.new( + name: role + ) + end.to raise_error( Puppet::ResourceError, - /valid values/i + %r{valid values}i ) end end end - end # of describing when validing values -end # of describe Puppet::Type + end +end diff --git a/spec/unit/type/elasticsearch_snapshot_repository_spec.rb b/spec/unit/type/elasticsearch_snapshot_repository_spec.rb index d02492a..8baa914 100644 --- a/spec/unit/type/elasticsearch_snapshot_repository_spec.rb +++ b/spec/unit/type/elasticsearch_snapshot_repository_spec.rb @@ -1,197 +1,199 @@ +# frozen_string_literal: true + require_relative '../../helpers/unit/type/elasticsearch_rest_shared_examples' describe Puppet::Type.type(:elasticsearch_snapshot_repository) do let(:resource_name) { 'test_repository' } let(:default_params) do { - :location => '/backup' + location: '/backup' } end describe 'attribute validation for elasticsearch_snapshot_repository' do - [ - :name, - :host, - :port, - :protocol, - :validate_tls, - :ca_file, - :ca_path, - :timeout, - :username, - :password, - :type + %i[ + name + host + port + protocol + validate_tls + ca_file + ca_path + timeout + username + password + type ].each do |param| - it "should have a #{param} parameter" do + it "has a #{param} parameter" do expect(described_class.attrtype(param)).to eq(:param) end end - [ - :ensure, - :compress, - :location, - :chunk_size, - :max_restore_rate, - :max_snapshot_rate + %i[ + ensure + compress + location + chunk_size + max_restore_rate + max_snapshot_rate ].each do |prop| - it "should have a #{prop} property" do + it "has a #{prop} property" do expect(described_class.attrtype(prop)).to eq(:property) end end describe 'namevar validation' do - it 'should have :name as its namevar' do + it 'has :name as its namevar' do expect(described_class.key_attributes).to eq([:name]) end end describe 'ensure' do - it 'should support present as a value for ensure' do + it 'supports present as a value for ensure' do expect do described_class.new( default_params.merge( - :name => resource_name, - :ensure => :present + name: resource_name, + ensure: :present ) ) - end.to_not raise_error + end.not_to raise_error end - it 'should support absent as a value for ensure' do + it 'supports absent as a value for ensure' do expect do described_class.new( default_params.merge( - :name => resource_name, - :ensure => :absent + name: resource_name, + ensure: :absent ) ) - end.to_not raise_error + end.not_to raise_error end - it 'should not support other values' do + it 'does not support other values' do expect do described_class.new( default_params.merge( - :name => resource_name, - :ensure => :foo + name: resource_name, + ensure: :foo ) ) - end.to raise_error(Puppet::Error, /Invalid value/) + end.to raise_error(Puppet::Error, %r{Invalid value}) end end describe 'location' do - it 'should be required' do + it 'is required' do expect do described_class.new( - :name => resource_name + name: resource_name ) - end.to raise_error(Puppet::Error, /Location is required./) + end.to raise_error(Puppet::Error, %r{Location is required.}) end end describe 'host' do - it 'should accept IP addresses' do + it 'accepts IP addresses' do expect do described_class.new( default_params.merge( - :name => resource_name, - :host => '127.0.0.1' + name: resource_name, + host: '127.0.0.1' ) ) end.not_to raise_error end end describe 'port' do [-1, 0, 70_000, 'foo'].each do |value| - it "should reject invalid port value #{value}" do + it "rejects invalid port value #{value}" do expect do described_class.new( default_params.merge( - :name => resource_name, - :port => value + name: resource_name, + port: value ) ) - end.to raise_error(Puppet::Error, /invalid port/i) + end.to raise_error(Puppet::Error, %r{invalid port}i) end end end describe 'validate_tls' do [-1, 0, {}, [], 'foo'].each do |value| - it "should reject invalid ssl_verify value #{value}" do + it "rejects invalid ssl_verify value #{value}" do expect do described_class.new( default_params.merge( - :name => resource_name, - :validate_tls => value + name: resource_name, + validate_tls: value ) ) - end.to raise_error(Puppet::Error, /invalid value/i) + end.to raise_error(Puppet::Error, %r{invalid value}i) end end [true, false, 'true', 'false', 'yes', 'no'].each do |value| - it "should accept validate_tls value #{value}" do + it "accepts validate_tls value #{value}" do expect do described_class.new( default_params.merge( - :name => resource_name, - :validate_tls => value + name: resource_name, + validate_tls: value ) ) end.not_to raise_error end end end describe 'timeout' do - it 'should reject string values' do + it 'rejects string values' do expect do described_class.new( default_params.merge( - :name => resource_name, - :timeout => 'foo' + name: resource_name, + timeout: 'foo' ) ) - end.to raise_error(Puppet::Error, /must be a/) + end.to raise_error(Puppet::Error, %r{must be a}) end - it 'should reject negative integers' do + it 'rejects negative integers' do expect do described_class.new( default_params.merge( - :name => resource_name, - :timeout => -10 + name: resource_name, + timeout: -10 ) ) - end.to raise_error(Puppet::Error, /must be a/) + end.to raise_error(Puppet::Error, %r{must be a}) end - it 'should accept integers' do + it 'accepts integers' do expect do described_class.new( default_params.merge( - :name => resource_name, - :timeout => 10 + name: resource_name, + timeout: 10 ) ) - end.to_not raise_error + end.not_to raise_error end - it 'should accept quoted integers' do + it 'accepts quoted integers' do expect do described_class.new( default_params.merge( - :name => resource_name, - :timeout => '10' + name: resource_name, + timeout: '10' ) ) - end.to_not raise_error + end.not_to raise_error end end - end # of describing when validing values include_examples 'REST API types', 'snapshot_repository' -end # of describe Puppet::Type + end +end diff --git a/spec/unit/type/elasticsearch_template_spec.rb b/spec/unit/type/elasticsearch_template_spec.rb index abeabf0..a4df913 100644 --- a/spec/unit/type/elasticsearch_template_spec.rb +++ b/spec/unit/type/elasticsearch_template_spec.rb @@ -1,134 +1,136 @@ +# frozen_string_literal: true + require_relative '../../helpers/unit/type/elasticsearch_rest_shared_examples' describe Puppet::Type.type(:elasticsearch_template) do let(:resource_name) { 'test_template' } include_examples 'REST API types', 'template', :content describe 'template attribute validation' do - it 'should have a source parameter' do + it 'has a source parameter' do expect(described_class.attrtype(:source)).to eq(:param) end describe 'content and source validation' do - it 'should require either "content" or "source"' do + it 'requires either "content" or "source"' do expect do described_class.new( - :name => resource_name, - :ensure => :present + name: resource_name, + ensure: :present ) - end.to raise_error(Puppet::Error, /content.*or.*source.*required/) + end.to raise_error(Puppet::Error, %r{content.*or.*source.*required}) end - it 'should fail with both defined' do + it 'fails with both defined' do expect do described_class.new( - :name => resource_name, - :content => {}, - :source => 'puppet:///example.json' + name: resource_name, + content: {}, + source: 'puppet:///example.json' ) - end.to raise_error(Puppet::Error, /simultaneous/) + end.to raise_error(Puppet::Error, %r{simultaneous}) end - it 'should parse source paths into the content property' do + it 'parses source paths into the content property' do file_stub = 'foo' [ Puppet::FileServing::Metadata, Puppet::FileServing::Content ].each do |klass| - allow(klass).to receive(:indirection) - .and_return(Object) + allow(klass).to receive(:indirection). + and_return(Object) end - allow(Object).to receive(:find) - .and_return(file_stub) - allow(file_stub).to receive(:content) - .and_return('{"template":"foobar-*", "order": 1}') + allow(Object).to receive(:find). + and_return(file_stub) + allow(file_stub).to receive(:content). + and_return('{"template":"foobar-*", "order": 1}') expect(described_class.new( - :name => resource_name, - :source => '/example.json' + name: resource_name, + source: '/example.json' )[:content]).to include( 'template' => 'foobar-*', 'order' => 1 ) end - it 'should qualify settings' do + it 'qualifies settings' do expect(described_class.new( - :name => resource_name, - :content => { 'settings' => { + name: resource_name, + content: { 'settings' => { 'number_of_replicas' => '2', 'index' => { 'number_of_shards' => '3' } } } )[:content]).to eq( 'order' => 0, 'aliases' => {}, 'mappings' => {}, 'settings' => { 'index' => { 'number_of_replicas' => 2, 'number_of_shards' => 3 } } ) end it 'detects flat qualified index settings' do expect(described_class.new( - :name => resource_name, - :content => { + name: resource_name, + content: { 'settings' => { 'number_of_replicas' => '2', 'index.number_of_shards' => '3' } } )[:content]).to eq( 'order' => 0, 'aliases' => {}, 'mappings' => {}, 'settings' => { 'index' => { 'number_of_replicas' => 2, 'number_of_shards' => 3 } } ) end end - end # of describing when validing values + end describe 'insync?' do # Although users can pass the type a hash structure with any sort of values # - string, integer, or other native datatype - the Elasticsearch API # normalizes all values to strings. In order to verify that the type does # not incorrectly detect changes when values may be in string form, we take # an example template and force all values to strings to mimic what # Elasticsearch does. it 'is idempotent' do def deep_stringify(obj) if obj.is_a? Array obj.map { |element| deep_stringify(element) } elsif obj.is_a? Hash obj.merge(obj) { |_key, val| deep_stringify(val) } elsif [true, false].include? obj obj else obj.to_s end end json = JSON.parse(File.read('spec/fixtures/templates/6.x.json')) is_template = described_class.new( - :name => resource_name, - :ensure => 'present', - :content => json + name: resource_name, + ensure: 'present', + content: json ).property(:content) should_template = described_class.new( - :name => resource_name, - :ensure => 'present', - :content => deep_stringify(json) + name: resource_name, + ensure: 'present', + content: deep_stringify(json) ).property(:content).should - expect(is_template.insync?(should_template)).to be_truthy + expect(is_template).to be_insync(should_template) end end -end # of describe Puppet::Type +end diff --git a/spec/unit/type/elasticsearch_user_roles_spec.rb b/spec/unit/type/elasticsearch_user_roles_spec.rb index ac8e9cf..155b6db 100644 --- a/spec/unit/type/elasticsearch_user_roles_spec.rb +++ b/spec/unit/type/elasticsearch_user_roles_spec.rb @@ -1,50 +1,58 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' describe Puppet::Type.type(:elasticsearch_user_roles) do let(:resource_name) { 'elastic' } describe 'when validating attributes' do [:name].each do |param| - it "should have a #{param} parameter" do + it "has a #{param} parameter" do expect(described_class.attrtype(param)).to eq(:param) end end - [:ensure, :roles].each do |prop| - it "should have a #{prop} property" do + %i[ensure roles].each do |prop| + it "has a #{prop} property" do expect(described_class.attrtype(prop)).to eq(:property) end end describe 'namevar validation' do - it 'should have :name as its namevar' do + it 'has :name as its namevar' do expect(described_class.key_attributes).to eq([:name]) end end - end # of describe when validating attributes + end describe 'when validating values' do describe 'ensure' do - it 'should support present as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :present - ) }.to_not raise_error + it 'supports present as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :present + ) + end.not_to raise_error end - it 'should support absent as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :absent - ) }.to_not raise_error + it 'supports absent as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :absent + ) + end.not_to raise_error end - it 'should not support other values' do - expect { described_class.new( - :name => resource_name, - :ensure => :foo - ) }.to raise_error(Puppet::Error, /Invalid value/) + it 'does not support other values' do + expect do + described_class.new( + name: resource_name, + ensure: :foo + ) + end.to raise_error(Puppet::Error, %r{Invalid value}) end end - end # of describing when validing values -end # of describe Puppet::Type + end +end diff --git a/spec/unit/type/elasticsearch_user_spec.rb b/spec/unit/type/elasticsearch_user_spec.rb index 7d200fc..83dcd11 100644 --- a/spec/unit/type/elasticsearch_user_spec.rb +++ b/spec/unit/type/elasticsearch_user_spec.rb @@ -1,76 +1,87 @@ +# frozen_string_literal: true + require 'spec_helper_rspec' -[ - :elasticsearch_user, - :elasticsearch_user_file +%i[ + elasticsearch_user + elasticsearch_user_file ].each do |described_type| describe Puppet::Type.type(described_type) do let(:resource_name) { 'elastic' } describe 'when validating attributes' do - [:name, :configdir].each do |param| - it "should have a #{param} parameter" do + %i[name configdir].each do |param| + it "has a #{param} parameter" do expect(described_class.attrtype(param)).to eq(:param) end end [:ensure].each do |prop| - it "should have a #{prop} property" do + it "has a #{prop} property" do expect(described_class.attrtype(prop)).to eq(:property) end end describe 'namevar validation' do - it 'should have :name as its namevar' do + it 'has :name as its namevar' do expect(described_class.key_attributes).to eq([:name]) end end - end # of describe when validating attributes + end describe 'when validating values' do describe 'ensure' do - it 'should support present as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :present - ) }.to_not raise_error + it 'supports present as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :present + ) + end.not_to raise_error end - it 'should support absent as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :absent - ) }.to_not raise_error + it 'supports absent as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :absent + ) + end.not_to raise_error end - it 'should not support other values' do - expect { described_class.new( - :name => resource_name, - :ensure => :foo - ) }.to raise_error(Puppet::Error, /Invalid value/) + it 'does not support other values' do + expect do + described_class.new( + name: resource_name, + ensure: :foo + ) + end.to raise_error(Puppet::Error, %r{Invalid value}) end end { - :hashed_password => :property, - :password => :param + hashed_password: :property, + password: :param }.each_pair do |attribute, type| next unless described_class.respond_to? attribute describe attribute.to_s do - it "should have a #{attrtype} #{type}" do + it "has a #{attrtype} #{type}" do expect(described_class.attrtype(attribute)).to eq(type) end end next unless attribute == :password - it 'should reject short passwords' do - expect { described_class.new( - :name => resource_name, - :password => 'foo' - ) }.to raise_error(Puppet::Error, /must be at least/) + + it 'rejects short passwords' do + expect do + described_class.new( + name: resource_name, + password: 'foo' + ) + end.to raise_error(Puppet::Error, %r{must be at least}) end end - end # of describing when validing values - end # of describe Puppet::Type + end + end end diff --git a/spec/unit/type/es_instance_conn_validator_spec.rb b/spec/unit/type/es_instance_conn_validator_spec.rb index 8ef7849..861bb17 100644 --- a/spec/unit/type/es_instance_conn_validator_spec.rb +++ b/spec/unit/type/es_instance_conn_validator_spec.rb @@ -1,88 +1,96 @@ +# frozen_string_literal: true + require_relative '../../helpers/unit/type/elasticsearch_rest_shared_examples' describe Puppet::Type.type(:es_instance_conn_validator) do let(:resource_name) { 'conn-validator' } let(:conn_validator) do - Puppet::Type.type(:es_instance_conn_validator) - .new(name: resource_name) + Puppet::Type.type(:es_instance_conn_validator). + new(name: resource_name) end describe 'when validating attributes' do - [:name, :server, :port, :timeout, :sleep_interval].each do |param| - it 'should have a #{param} parameter' do + %i[name server port timeout sleep_interval].each do |param| + it "has a #{param} parameter" do expect(described_class.attrtype(param)).to eq(:param) end end [:ensure].each do |prop| - it 'should have a #{prop} property' do + it "has a #{prop} property" do expect(described_class.attrtype(prop)).to eq(:property) end end describe 'namevar validation' do - it 'should have :name as its namevar' do + it 'has :name as its namevar' do expect(described_class.key_attributes).to eq([:name]) end end - end # describe when validating attributes + end describe 'when validating values' do describe 'ensure' do - it 'should support present as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :present - ) }.to_not raise_error + it 'supports present as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :present + ) + end.not_to raise_error end - it 'should support absent as a value for ensure' do - expect { described_class.new( - :name => resource_name, - :ensure => :absent - ) }.to_not raise_error + it 'supports absent as a value for ensure' do + expect do + described_class.new( + name: resource_name, + ensure: :absent + ) + end.not_to raise_error end - it 'should not support other values' do - expect { described_class.new( - :name => resource_name, - :ensure => :foo - ) }.to raise_error(Puppet::Error, /Invalid value/) + it 'does not support other values' do + expect do + described_class.new( + name: resource_name, + ensure: :foo + ) + end.to raise_error(Puppet::Error, %r{Invalid value}) end - end # describe 'ensure' + end describe 'timeout' do - it 'should support a numerical value' do + it 'supports a numerical value' do conn_validator[:timeout] = 120 expect(conn_validator[:timeout]).to eq(120) end - it 'should have a default value of 60' do + it 'has a default value of 60' do expect(conn_validator[:timeout]).to eq(60) end - it 'should not support a non-numeric value' do + it 'does not support a non-numeric value' do expect do conn_validator[:timeout] = 'string' - end.to raise_error(Puppet::Error, /invalid value/) + end.to raise_error(Puppet::Error, %r{invalid value}) end - end # describe 'timeout' + end describe 'sleep_interval' do - it 'should support a numerical value' do + it 'supports a numerical value' do conn_validator[:sleep_interval] = 120 expect(conn_validator[:sleep_interval]).to eq(120) end - it 'should have a default value of 10' do + it 'has a default value of 10' do expect(conn_validator[:sleep_interval]).to eq(10) end - it 'should not support a non-numeric value' do + it 'does not support a non-numeric value' do expect do conn_validator[:sleep_interval] = 'string' - end.to raise_error(Puppet::Error, /invalid value/) + end.to raise_error(Puppet::Error, %r{invalid value}) end - end # describe 'sleep_interval - end # describe 'when valdating values' -end # of describe Puppet::Type + end + end +end