diff --git a/.gemspec b/.gemspec deleted file mode 100644 index e44296d6cab..00000000000 --- a/.gemspec +++ /dev/null @@ -1,56 +0,0 @@ -# -*- encoding: utf-8 -*- -# -# PLEASE NOTE -# This gemspec is not intended to be used for building the Puppet gem. This -# gemspec is intended for use with bundler when Puppet is a dependency of -# another project. For example, the stdlib project is able to integrate with -# the master branch of Puppet by using a Gemfile path of -# git://github.com/puppetlabs/puppet.git -# -# Please see the [packaging -# repository](https://github.com/puppetlabs/packaging) for information on how -# to build the Puppet gem package. - -begin - require 'puppet/version' -rescue LoadError - $LOAD_PATH.unshift(File.expand_path("../lib", __FILE__)) - require 'puppet/version' -end - -Gem::Specification.new do |s| - s.name = "puppet" - version = Puppet.version - mdata = version.match(/(\d+\.\d+\.\d+)/) - s.version = mdata ? mdata[1] : version - - s.required_rubygems_version = Gem::Requirement.new("> 1.3.1") if s.respond_to? :required_rubygems_version= - s.required_ruby_version = Gem::Requirement.new(">= 1.9.3") if s.respond_to? :required_ruby_version= - s.authors = ["Puppet Labs"] - s.date = "2012-08-17" - s.description = "Puppet, an automated configuration management tool" - s.email = "puppet@puppetlabs.com" - s.executables = ["puppet"] - s.files = ["bin/puppet"] - s.homepage = "http://puppetlabs.com" - s.rdoc_options = ["--title", "Puppet - Configuration Management", "--main", "README", "--line-numbers"] - s.require_paths = ["lib"] - s.rubyforge_project = "puppet" - s.rubygems_version = "1.8.24" - s.summary = "Puppet, an automated configuration management tool" - - if s.respond_to? :specification_version then - s.specification_version = 3 - - if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then - s.add_runtime_dependency(%q, [">= 1.7", "< 3"]) - s.add_runtime_dependency(%q, [">= 2.0", "< 3"]) - else - s.add_dependency(%q, [">= 1.7", "< 3"]) - s.add_dependency(%q, [">= 2.0", "< 3"]) - end - else - s.add_dependency(%q, [">= 1.7", "< 3"]) - s.add_dependency(%q, [">= 2.0", "< 3"]) - end -end diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000000..b1e89515c09 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,6 @@ +# Disable NL -> CRNL translation on Windows. This is necessary because the files on disk must +# match the checksums in metadata.json. +spec/fixtures/integration/application/module/environments/direnv/modules/nginx/README -text +spec/fixtures/integration/application/module/environments/direnv/modules/nginx/metadata.json -text +spec/fixtures/integration/application/module/environments/direnv/modules/nginx/Modulefile -text +spec/fixtures/integration/application/module/environments/direnv/modules/nginx/manifests/init.pp -text diff --git a/.github/release.yml b/.github/release.yml new file mode 100644 index 00000000000..f3a34428761 --- /dev/null +++ b/.github/release.yml @@ -0,0 +1,8 @@ +changelog: + categories: + - title: Features & Enhancements + labels: + - enhancement + - title: Bug Fixes + labels: + - bug diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml new file mode 100644 index 00000000000..40086688592 --- /dev/null +++ b/.github/workflows/backport.yml @@ -0,0 +1,19 @@ +name: Backport merged pull request +on: + pull_request_target: + types: [labeled] +permissions: + contents: write # so it can comment + pull-requests: write # so it can create pull requests +jobs: + backport: + name: Backport merged pull request + runs-on: ubuntu-latest + # For security reasons, we don't want to checkout and run arbitrary code when + # using the pull_request_target trigger. So restrict this to cases where the + # backport label is applied to an already merged PR. + if: github.event.pull_request.merged && contains(github.event.label.name, 'backport') + steps: + - uses: actions/checkout@v4 + - name: Create backport pull requests + uses: korthout/backport-action@v1 diff --git a/.github/workflows/checks.yaml b/.github/workflows/checks.yaml new file mode 100644 index 00000000000..6fad834b99f --- /dev/null +++ b/.github/workflows/checks.yaml @@ -0,0 +1,41 @@ +--- +name: Checks + +on: + push: + branches: [main] + pull_request: + branches: [main] + +permissions: + contents: read + +jobs: + checks: + name: ${{ matrix.cfg.check }} + strategy: + matrix: + cfg: + - {check: rubocop, os: ubuntu-latest, ruby: '3.1'} + - {check: warnings, os: ubuntu-latest, ruby: '3.1'} + + runs-on: ${{ matrix.cfg.os }} + steps: + - name: Checkout current PR + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Install ruby version ${{ matrix.cfg.ruby }} + uses: ruby/setup-ruby@v1 + with: + ruby-version: ${{ matrix.cfg.ruby }} + + - name: Update rubygems and install gems + run: | + gem update --system --silent --no-document + bundle config set without packaging documentation + bundle install --jobs 4 --retry 3 + + - name: Run ${{ matrix.cfg.check }} check + run: bundle exec rake ${{ matrix.cfg.check }} diff --git a/.github/workflows/jira.yml b/.github/workflows/jira.yml new file mode 100644 index 00000000000..0444c98d823 --- /dev/null +++ b/.github/workflows/jira.yml @@ -0,0 +1,19 @@ +--- +name: Export issue to Jira + +on: + issues: + types: [labeled] + +permissions: + issues: write + +jobs: + export: + uses: "puppetlabs/phoenix-github-actions/.github/workflows/jira.yml@main" + with: + jira-project: PUP + jira-base-url: ${{ vars.jira_base_url }} + jira-user-email: ${{ vars.jira_user_email }} + secrets: + jira-api-token: ${{ secrets.JIRA_ISSUES_ACTION }} diff --git a/.github/workflows/mend.yaml b/.github/workflows/mend.yaml new file mode 100644 index 00000000000..e77550532ec --- /dev/null +++ b/.github/workflows/mend.yaml @@ -0,0 +1,34 @@ +--- +name: Mend Monitor +on: + push: + branches: + - main +jobs: + mend_monitor: + if: ${{ github.repository_owner == 'puppetlabs' }} + runs-on: ubuntu-latest + name: Mend Monitor + steps: + - name: Checkout current PR + uses: actions/checkout@v4 + - name: Setup Ruby + uses: ruby/setup-ruby@v1 + with: + ruby-version: 3.1 + - name: Create lock + run: bundle lock + - uses: actions/setup-java@v3 + with: + distribution: 'temurin' + java-version: '17' + - name: Download Mend + run: curl -o wss-unified-agent.jar https://unified-agent.s3.amazonaws.com/wss-unified-agent.jar + - name: Run Mend + run: java -jar wss-unified-agent.jar + env: + WS_APIKEY: ${{ secrets.MEND_API_KEY }} + WS_WSS_URL: https://saas-eu.whitesourcesoftware.com/agent + WS_USERKEY: ${{ secrets.MEND_TOKEN }} + WS_PRODUCTNAME: Puppet Agent + WS_PROJECTNAME: ${{ github.event.repository.name }} diff --git a/.github/workflows/rspec_tests.yaml b/.github/workflows/rspec_tests.yaml new file mode 100644 index 00000000000..848cdad0704 --- /dev/null +++ b/.github/workflows/rspec_tests.yaml @@ -0,0 +1,84 @@ +--- +name: RSpec tests + +on: + push: + branches: [main] + pull_request: + branches: [main] + +permissions: + contents: read + +jobs: + rspec_tests: + name: ${{ matrix.cfg.os }}(ruby ${{ matrix.cfg.ruby }}) + strategy: + matrix: + cfg: + - {os: ubuntu-latest, ruby: '3.1'} + - {os: ubuntu-20.04, ruby: '3.2'} # openssl 1.1.1 + - {os: ubuntu-22.04, ruby: '3.2'} # openssl 3 + - {os: ubuntu-22.04, ruby: '3.3'} # openssl 3 / latest Ruby + - {os: ubuntu-latest, ruby: 'jruby-9.4.3.0'} + - {os: windows-2019, ruby: '3.1'} + - {os: windows-2019, ruby: '3.2.5'} # openssl 3 & temporarily pinned to ruby 3.2.5, see PA-7108 + - {os: windows-2019, ruby: '3.3'} # openssl 3 / latest Ruby + + runs-on: ${{ matrix.cfg.os }} + env: + BUNDLE_SET: "without packaging documentation" + steps: + - name: Checkout current PR + uses: actions/checkout@v4 + + - name: Install ruby version ${{ matrix.cfg.ruby }} + uses: ruby/setup-ruby@v1 + with: + ruby-version: ${{ matrix.cfg.ruby }} + bundler-cache: true + + - name: Run tests on Windows + if: runner.os == 'Windows' + run: | + # https://github.com/ruby/ruby/pull/2791/files#diff-ff5ff976e81bebd977f0834e60416abbR97-R100 + # Actions uses UTF8, causes test failures, similar to normal OS setup + $PSDefaultParameterValues['*:Encoding'] = 'utf8' + [Console]::OutputEncoding = [System.Text.Encoding]::GetEncoding("IBM437") + [Console]::InputEncoding = [System.Text.Encoding]::GetEncoding("IBM437") + $Env:LOG_SPEC_ORDER = 'true' + + # debug information + chcp + Get-WinSystemLocale + Get-ChildItem Env: | % { Write-Output "$($_.Key): $($_.Value)" } + # list current OpenSSL install + gem list openssl + ruby -ropenssl -e 'puts "OpenSSL Version - #{OpenSSL::OPENSSL_VERSION}"; puts "OpenSSL Library Version - #{OpenSSL::OPENSSL_LIBRARY_VERSION}"' + Get-Content Gemfile.lock + ruby -v + gem --version + bundle --version + + # Run tests + bundle exec rake parallel:spec[2] + + - name: Run tests on Linux + if: runner.os == 'Linux' + run: | + # debug information + gem list openssl + ruby -ropenssl -e 'puts "OpenSSL Version - #{OpenSSL::OPENSSL_VERSION}"; puts "OpenSSL Library Version - #{OpenSSL::OPENSSL_LIBRARY_VERSION}"' + cat Gemfile.lock + ruby -v + gem --version + bundle --version + + if [[ ${{ matrix.cfg.ruby }} =~ "jruby" ]]; then + export _JAVA_OPTIONS='-Xmx1024m -Xms512m' + + # workaround for PUP-10683 + sudo apt remove rpm + fi + + bundle exec rake parallel:spec[2] diff --git a/.gitignore b/.gitignore index 9c75fc11d4e..1013232d981 100644 --- a/.gitignore +++ b/.gitignore @@ -2,8 +2,6 @@ results tags .*.sw[op] -ext/packaging -pkg test.pp # YARD generated documentation .yardoc @@ -12,18 +10,25 @@ test.pp # to work around that. Which we don't want committed, so we can ignore it here. /.rvmrc .bundle/ -ext/packaging/ -pkg/ +.byebug_history +/ext/packaging/ +/pkg/ Gemfile.lock Gemfile.local -.bundle/ +Guardfile puppet-acceptance/ /.project -.idea/ .ruby-version .ruby-gemset -acceptance/junit -acceptance/log -acceptance/.bundle +/acceptance/junit +/acceptance/log +/acceptance/.beaker # emacs backup files *~ +/*.samples +coverage/ +# Files and directory added by RubyMine IDE +*.iml +.rakeTasks +.idea/ +spec_order.txt diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000000..eed731dca39 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "benchmarks/full_catalog/puppetlabs-puppetserver_perf_control"] + path = benchmarks/full_catalog/puppetlabs-puppetserver_perf_control + url = https://github.com/puppetlabs/puppetlabs-puppetserver_perf_control diff --git a/.rubocop.yml b/.rubocop.yml index 0813e2e4ca1..ab9fb23dda4 100644 --- a/.rubocop.yml +++ b/.rubocop.yml @@ -1,532 +1,388 @@ +inherit_from: .rubocop_todo.yml + +require: + - rubocop-i18n + - rubocop-performance + - rubocop-rake + - rubocop-rspec + AllCops: + TargetRubyVersion: 3.1 Include: - 'lib/**/*.rb' - 'ext/**/*.rb' Exclude: - '**/*.erb' - 'acceptance/**/*' - - 'autotest/**/*' - 'spec/**/*' - 'tasks/**/*' + - 'ext/suse/puppet.spec' - 'lib/puppet/vendor/**/*' - - 'lib/puppet/parser/parser.rb' + - 'lib/puppet/pops/model/ast.rb' - 'lib/puppet/pops/parser/eparser.rb' - - 'lib/puppet/external/nagios/parser.rb' -Lint/ConditionPosition: - Enabled: true +# The formatting of defaults is unusual, so let's skip layout cops. +Layout: + Exclude: + - 'lib/puppet/defaults.rb' -Lint/ElseLayout: - Enabled: true +# We don't mind when module and class keywords are aligned. +Layout/IndentationWidth: + AllowedPatterns: ['^\s*module'] -Lint/UnreachableCode: +Layout/LineEndStringConcatenationIndentation: Enabled: true -Lint/UselessComparison: - Enabled: true - -# MAYBE useful - no return inside ensure block. -Lint/EnsureReturn: - Enabled: false - -# MAYBE useful - errors when rescue {} happens. -Lint/HandleExceptions: - Enabled: false +# Enabling this cop will remove raising RuntimeErrors and cause spec test +# failures +Style/RedundantException: + Exclude: + - 'lib/puppet/file_bucket/dipper.rb' + - 'lib/puppet/forge.rb' + - 'lib/puppet/module_tool/applications/unpacker.rb' + - 'lib/puppet/module_tool/local_tarball.rb' + - 'lib/puppet/module_tool/tar.rb' + - 'lib/puppet/pops/types/class_loader.rb' + +# Enabling this cop causes a plethora of failed rspec tests, mostly +# Errno::ENAMETOOLONG and Puppet::Context::UndefinedBindingError: Unable to +# lookup 'environments' errors +Style/RedundantSelfAssignment: + Exclude: + - 'lib/puppet/context.rb' -# MAYBE useful - catches while 1 -Lint/LiteralInCondition: +# Enabling this cop causes failures in rb_tree_map_spec relating to important +# function slike being unable to delete nodes and returning nil when the key +# cannot be found +Style/PreferredHashMethods: Enabled: false -Lint/ShadowingOuterLocalVariable: +# Explicitly enables this cop new in 1.7 +Layout/SpaceBeforeBrackets: Enabled: true -# Can catch complicated strings. -Lint/LiteralInInterpolation: +# puppet uses symbol booleans in types and providers to work around long standing +# bugs when trying to manage falsey pararameters and properties +Lint/BooleanSymbol: Enabled: true + Exclude: + - 'lib/puppet/type.rb' + - 'lib/puppet/type/**/*.rb' + - 'lib/puppet/provider/**/*.rb' + - 'lib/puppet/reference/providers.rb' + - 'lib/puppet/parameter/value.rb' - -# DISABLED really useless. Detects return as last statement. -Style/RedundantReturn: - Enabled: false - -# DISABLED since the instances do not seem to indicate any specific errors. -Lint/AmbiguousOperator: - Enabled: false - -# DISABLED since for all the checked, we are basically checking nil -# TODO: Change the checking so that if the variable being assigned to has -# a value ALREADY, then raise an error. -Lint/AssignmentInCondition: +Metrics/AbcSize: Enabled: false -# DISABLED - not useful -Style/SpaceBeforeComment: +Metrics/BlockLength: Enabled: false -# DISABLED - not useful -Style/HashSyntax: +Metrics/BlockNesting: Enabled: false -# USES: as shortcut for non nil&valid checking a = x() and a.empty? -# DISABLED - not useful -Style/AndOr: +Metrics/ClassLength: Enabled: false -# DISABLED - not useful -Style/RedundantSelf: +Metrics/CyclomaticComplexity: Enabled: false -# DISABLED - not useful Metrics/MethodLength: Enabled: false -# DISABLED - not useful -Style/WhileUntilModifier: +Metrics/ModuleLength: Enabled: false -# DISABLED - the offender is just haskell envy -Lint/AmbiguousRegexpLiteral: +Metrics/ParameterLists: Enabled: false -# DISABLED -Lint/Eval: - Enabled: false -# DISABLED -Lint/BlockAlignment: +Metrics/PerceivedComplexity: Enabled: false -# DISABLED -Lint/DefEndAlignment: +Naming/AccessorMethodName: Enabled: false -# DISABLED -Lint/EndAlignment: +Naming/BinaryOperatorParameterName: Enabled: false -# DISABLED -Lint/DeprecatedClassMethods: - Enabled: false +Naming/BlockParameterName: + Exclude: + - 'lib/puppet/util/windows/daemon.rb' + - 'lib/puppet/util/windows/user.rb' -# DISABLED -Lint/Loop: - Enabled: false +Naming/ClassAndModuleCamelCase: + Exclude: + - 'lib/puppet/ffi/windows/structs.rb' + - 'lib/puppet/pops/validation/checker4_0.rb' + - 'lib/puppet/pops/validation/validator_factory_4_0.rb' + - 'lib/puppet/util/windows/root_certs.rb' + - 'lib/puppet/util/windows/security.rb' + - 'lib/puppet/util/windows/user.rb' + +Naming/ConstantName: + Exclude: + - 'lib/puppet/graph/relationship_graph.rb' + - 'lib/puppet/indirector/hiera.rb' + - 'lib/puppet/provider/package/sun.rb' + - 'lib/puppet/resource/type.rb' + - 'lib/puppet/type/schedule.rb' + - 'lib/puppet/type/tidy.rb' + - 'lib/puppet/util.rb' + - 'lib/puppet/util/colors.rb' + - 'lib/puppet/util/execution.rb' + - 'lib/puppet/util/symbolic_file_mode.rb' + - 'lib/puppet/util/tagging.rb' + - 'lib/puppet/util/windows/adsi.rb' + - 'lib/puppet/util/windows/sid.rb' + - 'lib/puppet/util/yaml.rb' + +Naming/HeredocDelimiterNaming: + Enabled: false + +# Exclude existing violations to avoid breaking changes +Naming/MemoizedInstanceVariableName: + Exclude: + - 'lib/puppet/module_tool/applications/installer.rb' + - 'lib/puppet/pops/types/type_factory.rb' + - 'lib/puppet/provider/package/portage.rb' + - 'lib/puppet/resource.rb' -# DISABLED -Lint/ParenthesesAsGroupedExpression: - Enabled: false +Naming/MethodName: + Exclude: + - 'lib/puppet/functions/**/*' + - 'lib/puppet/parser/ast/pops_bridge.rb' + - 'lib/puppet/pops/**/*' + - 'lib/puppet/util/windows/**/*' -Lint/RescueException: +Naming/MethodParameterName: Enabled: false -Lint/StringConversionInInterpolation: - Enabled: false +Naming/PredicateName: + ForbiddenPrefixes: [] -Lint/UnusedBlockArgument: +Naming/RescuedExceptionsVariableName: Enabled: false -Lint/UnusedMethodArgument: - Enabled: false +Naming/VariableName: + Exclude: + - 'ext/windows/service/daemon.rb' + - 'lib/puppet/agent.rb' + - 'lib/puppet/application/describe.rb' + - 'lib/puppet/pops/lookup/hiera_config.rb' + - 'lib/puppet/pops/validation/checker4_0.rb' + - 'lib/puppet/provider/package/pip.rb' + - 'lib/puppet/provider/package/windows/exe_package.rb' + - 'lib/puppet/provider/package/windows/msi_package.rb' + - 'lib/puppet/ssl/ssl_provider.rb' + - 'lib/puppet/util/windows/adsi.rb' + - 'lib/puppet/util/windows/daemon.rb' + - 'lib/puppet/util/windows/error.rb' + - 'lib/puppet/util/windows/user.rb' + +Naming/VariableNumber: + Enabled: false + +Performance/AncestorsInclude: # new in 1.7 + Enabled: true -# DISABLED - TODO -Lint/UselessAccessModifier: - Enabled: false +Performance/BigDecimalWithNumericArgument: # new in 1.7 + Enabled: true -# DISABLED - TODO -Lint/UselessAssignment: - Enabled: false +Performance/ConcurrentMonotonicTime: # new in 1.12 + Enabled: true -# DISABLED - TODO -Lint/Void: - Enabled: false +Performance/MapCompact: # new in 1.11 + Enabled: true -Style/AccessModifierIndentation: - Enabled: false +Performance/RedundantSortBlock: # new in 1.7 + Enabled: true -Style/AccessorMethodName: - Enabled: false +Performance/ReverseFirst: # new in 1.7 + Enabled: true -Style/Alias: - Enabled: false +RSpec/BeEq: # new in 2.9.0 + Enabled: true -Style/AlignArray: - Enabled: false +RSpec/BeNil: # new in 2.9.0 + Enabled: true -Style/AlignHash: - Enabled: false +RSpec/ExcessiveDocstringSpacing: # new in 2.5 + Enabled: true -Style/AlignParameters: - Enabled: false +RSpec/IdenticalEqualityAssertion: # new in 2.4 + Enabled: true -Metrics/BlockNesting: - Enabled: false +RSpec/SubjectDeclaration: # new in 2.5 + Enabled: true -Style/AsciiComments: - Enabled: false +RSpec/VerifiedDoubleReference: # new in 2.10.0 + Enabled: true -Style/Attr: - Enabled: false +RSpec/FactoryBot/SyntaxMethods: # new in 2.7 + Enabled: true -Style/Blocks: - Enabled: false +RSpec/Rails/AvoidSetupHook: # new in 2.4 + Enabled: true -Style/BracesAroundHashParameters: - Enabled: false +Style/AutoResourceCleanup: + Enabled: true Style/CaseEquality: - Enabled: false - -Style/CaseIndentation: - Enabled: false - -Style/CharacterLiteral: - Enabled: false - -Style/ClassAndModuleCamelCase: - Enabled: false + Enabled: true -Style/ClassAndModuleChildren: - Enabled: false +Style/CaseLikeIf: + Enabled: true Style/ClassCheck: - Enabled: false + Enabled: true -Metrics/ClassLength: - Enabled: false +Style/ClassEqualityComparison: + Enabled: true Style/ClassMethods: - Enabled: false - -Style/ClassVars: - Enabled: false - -Style/WhenThen: - Enabled: false - - -# DISABLED - not useful -Style/WordArray: - Enabled: false - -Style/UnneededPercentQ: - Enabled: false - -Style/Tab: - Enabled: false - -Style/SpaceBeforeSemicolon: - Enabled: false - -Style/TrailingBlankLines: - Enabled: false - -Style/SpaceInsideBlockBraces: - Enabled: false - -Style/SpaceInsideBrackets: - Enabled: false - -Style/SpaceInsideHashLiteralBraces: - Enabled: false - -Style/SpaceInsideParens: - Enabled: false - -Style/LeadingCommentSpace: - Enabled: false - -Style/SingleSpaceBeforeFirstArg: - Enabled: false - -Style/SpaceAfterColon: - Enabled: false - -Style/SpaceAfterComma: - Enabled: false - -Style/SpaceAfterControlKeyword: - Enabled: false - -Style/SpaceAfterMethodName: - Enabled: false - -Style/SpaceAfterNot: - Enabled: false - -Style/SpaceAfterSemicolon: - Enabled: false - -Style/SpaceAroundEqualsInParameterDefault: - Enabled: false - -Style/SpaceAroundOperators: - Enabled: false - -Style/SpaceBeforeBlockBraces: - Enabled: false - -Style/SpaceBeforeComma: - Enabled: false - - -Style/CollectionMethods: - Enabled: false + Enabled: true -Style/CommentIndentation: - Enabled: false +Style/CollectionCompact: + Enabled: true Style/ColonMethodCall: - Enabled: false - -Style/CommentAnnotation: - Enabled: false - -Metrics/CyclomaticComplexity: - Enabled: false + Enabled: true -Style/ConstantName: - Enabled: false +Style/CombinableLoops: + Enabled: true + Exclude: + - 'lib/puppet/graph/relationship_graph.rb' + - 'lib/puppet/pops/types/ruby_generator.rb' + - 'lib/puppet/provider/service/init.rb' + - 'lib/puppet/util/rdoc/generators/puppet_generator.rb' -Style/Documentation: - Enabled: false +Style/ColonMethodDefinition: + Enabled: true Style/DefWithParentheses: - Enabled: false - -Style/DeprecatedHashMethods: - Enabled: false - -Style/DotPosition: - Enabled: false - -# DISABLED - used for converting to bool -Style/DoubleNegation: - Enabled: false - -Style/EachWithObject: - Enabled: false + Enabled: true -Style/EmptyLineBetweenDefs: - Enabled: false +Style/Dir: + Enabled: true -Style/IndentArray: - Enabled: false +Style/DocumentDynamicEvalDefinition: + Enabled: true -Style/IndentHash: - Enabled: false +Style/DoubleCopDisableDirective: + Enabled: true -Style/IndentationConsistency: - Enabled: false +Style/EachForSimpleLoop: + Enabled: true -Style/IndentationWidth: - Enabled: false +Style/EachWithObject: + Enabled: true -Style/EmptyLines: - Enabled: false +Style/EmptyBlockParameter: + Enabled: true -Style/EmptyLinesAroundAccessModifier: - Enabled: false +Style/EmptyCaseCondition: + Enabled: true -Style/EmptyLinesAroundBody: - Enabled: false +Style/EmptyLambdaParameter: + Enabled: true Style/EmptyLiteral: - Enabled: false - -Metrics/LineLength: - Enabled: false - -Style/MethodCallParentheses: - Enabled: false - -Style/MethodDefParentheses: - Enabled: false - -Style/LineEndConcatenation: - Enabled: false - -Style/TrailingWhitespace: - Enabled: false - -Style/StringLiterals: - Enabled: false - -Style/TrailingComma: - Enabled: false - -Style/GlobalVars: - Enabled: false - -Style/GuardClause: - Enabled: false - -Style/IfUnlessModifier: - Enabled: false - -Style/MultilineIfThen: - Enabled: false - -Style/NegatedIf: - Enabled: false - -Style/NegatedWhile: - Enabled: false - -Style/Next: - Enabled: false - -Style/SingleLineBlockParams: - Enabled: false - -Style/SingleLineMethods: - Enabled: false - -Style/SpecialGlobalVars: - Enabled: false - - -Style/TrivialAccessors: - Enabled: false - -Style/UnlessElse: - Enabled: false - -Style/UnneededPercentX: - Enabled: false - -Style/VariableInterpolation: - Enabled: false - -Style/VariableName: - Enabled: false + Enabled: true -Style/WhileUntilDo: - Enabled: false +Style/EvalWithLocation: + Enabled: true Style/EvenOdd: - Enabled: false - -Style/FileName: - Enabled: false - -Style/For: - Enabled: false - -Style/Lambda: - Enabled: false - -Style/MethodName: - Enabled: false - -Style/MultilineTernaryOperator: - Enabled: false - -Style/NestedTernaryOperator: - Enabled: false - -Style/NilComparison: - Enabled: false - -Style/FormatString: - Enabled: false - -Style/MultilineBlockChain: - Enabled: false - -Style/Semicolon: - Enabled: false - -Style/SignalException: - Enabled: false - -Style/NonNilCheck: - Enabled: false - -Style/Not: - Enabled: false - -Style/NumericLiterals: - Enabled: false + Enabled: true -Style/OneLineConditional: - Enabled: false +Style/ExpandPathArguments: + Enabled: true -Style/OpMethod: - Enabled: false +Style/FetchEnvVar: + Enabled: true -Style/ParenthesesAroundCondition: - Enabled: false +Style/FileRead: + Enabled: true -Style/PercentLiteralDelimiters: - Enabled: false +Style/FileWrite: + Enabled: true -Style/PerlBackrefs: - Enabled: false +Style/FloatDivision: + Enabled: true -Style/PredicateName: - Enabled: false +Style/For: + Enabled: true -Style/RedundantException: - Enabled: false +Style/FrozenStringLiteralComment: + Enabled: true -Style/SelfAssignment: - Enabled: false +Style/GlobalStdStream: + Enabled: true -Style/Proc: - Enabled: false +Style/GlobalVars: + Enabled: true -Style/RaiseArgs: - Enabled: false +Style/HashAsLastArrayItem: + Enabled: true -Style/RedundantBegin: - Enabled: false +Style/HashConversion: + Enabled: true -Style/RescueModifier: +# HashEachMethods does not guarantee the receiver is a Hash, so +# this will not work since numerous puppet classes define #value +# not #each_value +Style/HashEachMethods: Enabled: false -Style/RegexpLiteral: - Enabled: false +Style/HashLikeCase: + Enabled: true + Exclude: + - 'lib/puppet/util/command_line/trollop.rb' -Lint/UnderscorePrefixedVariableName: - Enabled: false +Style/HashTransformKeys: + Enabled: true -Metrics/ParameterLists: - Enabled: false +Style/HashTransformValues: + Enabled: true -Lint/RequireParentheses: - Enabled: false +Style/IdenticalConditionalBranches: + Enabled: true -Lint/SpaceBeforeFirstArg: - Enabled: false +Style/IfInsideElse: + Enabled: true -Style/ModuleFunction: - Enabled: false +Style/IfUnlessModifierOfIfUnless: + Enabled: true -Lint/Debugger: - Enabled: false +Style/IfWithBooleanLiteralBranches: + Enabled: true Style/IfWithSemicolon: - Enabled: false + Enabled: true -Style/Encoding: - Enabled: false +Style/InfiniteLoop: + Enabled: true -Metrics/PerceivedComplexity: - Enabled: false +Style/InverseMethods: + Enabled: true -Style/SymbolProc: - Enabled: false +Style/KeywordParametersOrder: + Enabled: true -Style/SpaceInsideRangeLiteral: - Enabled: false +Style/Lambda: + Enabled: true -Style/InfiniteLoop: - Enabled: false +Style/LambdaCall: + Enabled: true -Style/BarePercentLiterals: - Enabled: false +Style/LineEndConcatenation: + Enabled: true -Style/PercentQLiterals: - Enabled: false +Style/MapCompactWithConditionalBlock: + Enabled: true -Style/MultilineBlockLayout: - Enabled: false +Style/MapToHash: + Enabled: true + +Style/MapToSet: + Enabled: true diff --git a/.rubocop_todo.yml b/.rubocop_todo.yml new file mode 100644 index 00000000000..6d7d6eefe3a --- /dev/null +++ b/.rubocop_todo.yml @@ -0,0 +1,795 @@ +# This configuration was generated by +# `rubocop --auto-gen-config --no-offense-counts --no-auto-gen-timestamp` +# using RuboCop version 1.28.0. +# The point is for the user to remove these configuration records +# one by one as the offenses are removed from the code base. +# Note that changes in the inspected code, or installation of new +# versions of RuboCop, may require this file to be generated again. + +# This cop supports safe auto-correction (--auto-correct). +I18n/GetText/DecorateFunctionMessage: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +I18n/GetText/DecorateString: + Enabled: false + +I18n/GetText/DecorateStringFormattingUsingPercent: + Exclude: + - 'lib/puppet/provider/user/windows_adsi.rb' + - 'lib/puppet/transaction/resource_harness.rb' + +I18n/RailsI18n/DecorateString: + Enabled: false + +Lint/AmbiguousAssignment: # new in 1.7 + Enabled: false + +Lint/AmbiguousOperatorPrecedence: # new in 1.21 + Enabled: false + +Lint/AmbiguousRange: # new in 1.19 + Enabled: false + +# Configuration parameters: AllowedMethods. +# AllowedMethods: enums +Lint/ConstantDefinitionInBlock: + Exclude: + - 'lib/puppet/face/config.rb' + - 'lib/puppet/face/help.rb' + - 'lib/puppet/face/node/clean.rb' + - 'lib/puppet/provider/package/aix.rb' + - 'lib/puppet/provider/package/apt.rb' + - 'lib/puppet/provider/package/gem.rb' + - 'lib/puppet/provider/package/pip.rb' + - 'lib/puppet/provider/package/yum.rb' + - 'lib/puppet/provider/service/upstart.rb' + - 'lib/puppet/provider/user/directoryservice.rb' + - 'lib/puppet/type/file.rb' + - 'lib/puppet/type/file/source.rb' + - 'lib/puppet/type/resources.rb' + - 'lib/puppet/type/schedule.rb' + - 'lib/puppet/type/tidy.rb' + +Lint/ConstantOverwrittenInRescue: # new in 1.31 + Enabled: false + +Lint/DeprecatedConstants: # new in 1.8 + Enabled: false + +Lint/DuplicateBranch: # new in 1.3 + Enabled: false + +Lint/DuplicateMagicComment: # new in 1.37 + Enabled: false + +Lint/DuplicateMatchPattern: # new in 1.50 + Enabled: false + +Lint/DuplicateRegexpCharacterClassElement: # new in 1.1 + Enabled: false + +Lint/EmptyBlock: # new in 1.1 + Enabled: false + +Lint/EmptyClass: # new in 1.3 + Enabled: false + +Lint/EmptyInPattern: # new in 1.16 + Enabled: false + +Lint/IncompatibleIoSelectWithFiberScheduler: # new in 1.21 + Enabled: false + +Lint/ItWithoutArgumentsInBlock: # new in 1.59 + Enabled: false + +Lint/LambdaWithoutLiteralBlock: # new in 1.8 + Enabled: false + +Lint/LiteralAssignmentInCondition: # new in 1.58 + Enabled: false + +Lint/MissingSuper: + Enabled: false + +Lint/MixedCaseRange: # new in 1.53 + Enabled: false + +Lint/NestedMethodDefinition: + Exclude: + - 'lib/puppet/pops/types/p_binary_type.rb' + - 'lib/puppet/pops/types/p_init_type.rb' + - 'lib/puppet/pops/types/p_object_type.rb' + - 'lib/puppet/pops/types/p_sem_ver_range_type.rb' + - 'lib/puppet/pops/types/p_sem_ver_type.rb' + - 'lib/puppet/pops/types/p_sensitive_type.rb' + - 'lib/puppet/pops/types/p_timespan_type.rb' + - 'lib/puppet/pops/types/p_timestamp_type.rb' + - 'lib/puppet/pops/types/p_uri_type.rb' + - 'lib/puppet/pops/types/types.rb' + - 'lib/puppet/type.rb' + +Lint/NonAtomicFileOperation: # new in 1.31 + Enabled: false + +Lint/NoReturnInBeginEndBlocks: # new in 1.2 + Enabled: false + +Lint/NumberedParameterAssignment: # new in 1.9 + Enabled: false + +Lint/OrAssignmentToConstant: # new in 1.9 + Enabled: false + +Lint/RedundantDirGlobSort: # new in 1.8 + Enabled: false + +Lint/RedundantRegexpQuantifiers: # new in 1.53 + Enabled: false + +# Unsure how the changes in portage.rb from Lint/RedundantSplatExpansion impact +# the code +Lint/RedundantSplatExpansion: + Exclude: + - 'lib/puppet/provider/package/portage.rb' + +Lint/RefinementImportMethods: # new in 1.27 + Enabled: false + +Lint/RequireRangeParentheses: # new in 1.32 + Enabled: false + +Lint/RequireRelativeSelfPath: # new in 1.22 + Enabled: false + +Lint/RescueException: + Exclude: + - 'ext/windows/service/daemon.rb' + - 'lib/puppet/configurer/fact_handler.rb' + - 'lib/puppet/generate/type.rb' + - 'lib/puppet/settings.rb' + - 'lib/puppet/transaction/resource_harness.rb' + - 'lib/puppet/util.rb' + - 'lib/puppet/util/autoload.rb' + - 'lib/puppet/util/command_line/trollop.rb' + - 'util/rspec_grouper' + +# Configuration parameters: AllowComments, AllowNil. +Lint/SuppressedException: + Exclude: + - 'lib/puppet/application/face_base.rb' + - 'lib/puppet/ffi/windows/functions.rb' + - 'lib/puppet/forge/errors.rb' + - 'lib/puppet/functions/each.rb' + - 'lib/puppet/functions/filter.rb' + - 'lib/puppet/functions/map.rb' + - 'lib/puppet/functions/slice.rb' + - 'lib/puppet/pops/time/timespan.rb' + - 'lib/puppet/pops/types/iterable.rb' + - 'lib/puppet/pops/types/p_runtime_type.rb' + - 'lib/puppet/util/command_line.rb' + - 'lib/puppet/util/execution.rb' + - 'util/rspec_grouper' + +Lint/SymbolConversion: # new in 1.9 + Enabled: false + +Lint/ToEnumArguments: # new in 1.1 + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +Lint/ToJSON: + Exclude: + - 'lib/puppet/module_tool/metadata.rb' + - 'lib/puppet/network/http/error.rb' + - 'lib/puppet/pops/serialization/json.rb' + +Lint/TripleQuotes: # new in 1.9 + Enabled: false + +Lint/UnexpectedBlockArity: # new in 1.5 + Enabled: false + +Lint/UnmodifiedReduceAccumulator: # new in 1.1 + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: AllowUnusedKeywordArguments, IgnoreEmptyMethods, IgnoreNotImplementedMethods. +Lint/UnusedMethodArgument: + Enabled: false + +Lint/UselessRescue: # new in 1.43 + Enabled: false + +Lint/UselessRuby2Keywords: # new in 1.23 + Enabled: false + +Performance/BlockGivenWithExplicitBlock: # new in 1.9 + Enabled: false + +Performance/CollectionLiteralInLoop: # new in 1.8 + Enabled: false + +Performance/ConstantRegexp: # new in 1.9 + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +Performance/Count: + Exclude: + - 'lib/puppet/confine/any.rb' + - 'lib/puppet/confine/false.rb' + - 'lib/puppet/confine/true.rb' + - 'lib/puppet/graph/relationship_graph.rb' + - 'lib/puppet/provider.rb' + +# This cop supports unsafe auto-correction (--auto-correct-all). +Performance/InefficientHashSearch: + Exclude: + - 'lib/puppet/face/node/clean.rb' + - 'lib/puppet/provider/nameservice/directoryservice.rb' + - 'lib/puppet/provider/user/directoryservice.rb' + - 'lib/puppet/resource.rb' + - 'lib/puppet/util/windows/adsi.rb' + +Performance/MethodObjectAsBlock: # new in 1.9 + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +Performance/RedundantBlockCall: + Exclude: + - 'lib/puppet/application.rb' + - 'lib/puppet/context.rb' + - 'lib/puppet/file_bucket/file.rb' + - 'lib/puppet/functions/max.rb' + - 'lib/puppet/functions/min.rb' + - 'lib/puppet/gettext/stubs.rb' + - 'lib/puppet/network/http/api/server/v3.rb' + - 'lib/puppet/pal/pal_impl.rb' + - 'lib/puppet/pops/adaptable.rb' + - 'lib/puppet/pops/lookup/invocation.rb' + - 'lib/puppet/pops/model/factory.rb' + - 'lib/puppet/util.rb' + +Performance/RedundantEqualityComparisonBlock: # new in 1.10 + Enabled: false + +# This cop supports unsafe auto-correction (--auto-correct-all). +# Configuration parameters: MaxKeyValuePairs. +Performance/RedundantMerge: + Exclude: + - 'lib/puppet/x509/cert_provider.rb' + +Performance/RedundantSplitRegexpArgument: # new in 1.10 + Enabled: false + +Performance/RedundantStringChars: # new in 1.7 + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +Performance/RegexpMatch: + Enabled: false + +Performance/SortReverse: # new in 1.7 + Enabled: false + +Performance/Squeeze: # new in 1.7 + Enabled: false + +Performance/StringIdentifierArgument: # new in 1.13 + Enabled: false + +Performance/StringInclude: # new in 1.7 + Enabled: false + +Performance/Sum: # new in 1.8 + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +Performance/UnfreezeString: + Enabled: false + +# Configuration parameters: EnforcedStyle, AllowModifiersOnSymbols. +# SupportedStyles: inline, group +Style/AccessModifierDeclarations: + Exclude: + - 'lib/puppet/util/suidmanager.rb' + - 'lib/puppet/util/command_line/trollop.rb' + - 'lib/puppet/util/windows/monkey_patches/process.rb' + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: EnforcedStyle. +# SupportedStyles: separated, grouped +Style/AccessorGrouping: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: EnforcedStyle. +# SupportedStyles: prefer_alias, prefer_alias_method +Style/Alias: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: EnforcedStyle. +# SupportedStyles: always, conditionals +Style/AndOr: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: EnforcedStyle. +# SupportedStyles: percent_q, bare_percent +Style/BarePercentLiterals: + Exclude: + - 'lib/puppet/module_tool/metadata.rb' + - 'lib/puppet/node/environment.rb' + - 'lib/puppet/parameter/package_options.rb' + - 'lib/puppet/provider/package/dpkg.rb' + - 'lib/puppet/provider/package/gem.rb' + - 'lib/puppet/provider/package/rpm.rb' + - 'lib/puppet/provider/package/windows/package.rb' + - 'lib/puppet/settings.rb' + - 'lib/puppet/settings/base_setting.rb' + - 'lib/puppet/transaction/event.rb' + - 'lib/puppet/util/execution.rb' + +# This cop supports safe auto-correction (--auto-correct). +Style/BisectedAttrAccessor: + Exclude: + - 'lib/puppet/module.rb' + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: EnforcedStyle, ProceduralMethods, FunctionalMethods, IgnoredMethods, AllowBracesOnProceduralOneLiners, BracesRequiredMethods. +# SupportedStyles: line_count_based, semantic, braces_for_chaining, always_braces +# ProceduralMethods: benchmark, bm, bmbm, create, each_with_object, measure, new, realtime, tap, with_object +# FunctionalMethods: let, let!, subject, watch +# IgnoredMethods: lambda, proc, it +Style/BlockDelimiters: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: AllowOnConstant. +Style/CaseEquality: + Exclude: + - 'lib/puppet/indirector/terminus.rb' + - 'lib/puppet/interface/face_collection.rb' + - 'lib/puppet/module_tool/installed_modules.rb' + - 'lib/puppet/module_tool/shared_behaviors.rb' + - 'lib/puppet/util/command_line/puppet_option_parser.rb' + - 'lib/puppet/util/log/destination.rb' + - 'lib/puppet/util/multi_match.rb' + - 'lib/puppet/util/rdoc/generators/puppet_generator.rb' + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: EnforcedStyle. +# SupportedStyles: nested, compact +Style/ClassAndModuleChildren: + Enabled: false + +Style/ClassVars: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +Style/ColonMethodCall: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: EnforcedStyle, AllowInnerBackticks. +# SupportedStyles: backticks, percent_x, mixed +Style/CommandLiteral: + Exclude: + - 'ext/windows/service/daemon.rb' + - 'lib/puppet/provider/nameservice/directoryservice.rb' + - 'lib/puppet/util/reference.rb' + - 'lib/puppet/util/terminal.rb' + - 'lib/puppet/util/windows/process.rb' + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: Keywords, RequireColon. +# Keywords: TODO, FIXME, OPTIMIZE, HACK, REVIEW, NOTE +Style/CommentAnnotation: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +Style/CommentedKeyword: + Exclude: + - 'lib/puppet/util/command_line/trollop.rb' + - 'lib/puppet/util/rdoc/generators/puppet_generator.rb' + - 'lib/puppet/util/rdoc/generators/template/puppet/puppet.rb' + - 'lib/puppet/util/rpm_compare.rb' + - 'lib/puppet/util/windows/service.rb' + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: EnforcedStyle, SingleLineConditionsOnly, IncludeTernaryExpressions. +# SupportedStyles: assign_to_condition, assign_inside_condition +Style/ConditionalAssignment: + Enabled: false + +# Enabling this would require reworking Puppet's use of DateTime's #rfc2822, #httptime, and _strptime +Style/DateTime: + Enabled: false + +# Configuration parameters: AllowedConstants. +Style/Documentation: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: EnforcedStyle. +# SupportedStyles: allowed_in_returns, forbidden +Style/DoubleNegation: + Exclude: + - 'lib/puppet/application/lookup.rb' + - 'lib/puppet/confine/boolean.rb' + - 'lib/puppet/http/service/compiler.rb' + - 'lib/puppet/parser/functions/fqdn_rand.rb' + - 'lib/puppet/pops/evaluator/evaluator_impl.rb' + - 'lib/puppet/pops/issue_reporter.rb' + - 'lib/puppet/pops/types/p_runtime_type.rb' + - 'lib/puppet/provider/package/apt.rb' + - 'lib/puppet/resource/status.rb' + - 'lib/puppet/type.rb' + - 'lib/puppet/util/feature.rb' + - 'lib/puppet/util/windows/adsi.rb' + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: EnforcedStyle. +# SupportedStyles: empty, nil, both +Style/EmptyElse: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +Style/EmptyLiteral: + Exclude: + - 'lib/puppet/parser/scope.rb' + - 'lib/puppet/pops/puppet_stack.rb' + - 'lib/puppet/pops/visitor.rb' + - 'lib/puppet/provider/package/portupgrade.rb' + - 'lib/puppet/provider/service/launchd.rb' + - 'lib/puppet/provider/user/directoryservice.rb' + - 'lib/puppet/type.rb' + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: EnforcedStyle. +# SupportedStyles: compact, expanded +Style/EmptyMethod: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +Style/Encoding: + Exclude: + - 'lib/puppet/face/module/install.rb' + - 'lib/puppet/face/module/list.rb' + - 'lib/puppet/face/module/upgrade.rb' + - 'lib/puppet/ffi/windows/structs.rb' + - 'lib/puppet/interface/action.rb' + - 'lib/puppet/module_tool.rb' + - 'lib/puppet/type.rb' + - 'lib/puppet/type/file.rb' + - 'lib/puppet/type/package.rb' + - 'lib/puppet/util/windows/service.rb' + +# This cop supports safe auto-correction (--auto-correct). +Style/ExplicitBlockArgument: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: EnforcedStyle. +# SupportedStyles: format, sprintf, percent +Style/FormatString: + Enabled: false + +# Configuration parameters: EnforcedStyle, MaxUnannotatedPlaceholdersAllowed, IgnoredMethods. +# SupportedStyles: annotated, template, unannotated +Style/FormatStringToken: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Applying the safe auto-correct results in util_spec failures. +Style/GlobalStdStream: + Exclude: + - 'lib/puppet/application/apply.rb' + - 'lib/puppet/application/script.rb' + - 'lib/puppet/face/epp.rb' + - 'lib/puppet/face/parser.rb' + - 'lib/puppet/util.rb' + - 'lib/puppet/util/command_line.rb' + - 'lib/puppet/util/windows/daemon.rb' + +# Configuration parameters: AllowedVariables. +Style/GlobalVars: + Exclude: + - 'lib/puppet/external/dot.rb' + - 'lib/puppet/test/test_helper.rb' + - 'lib/puppet/util/logging.rb' + +# Configuration parameters: MinBodyLength, AllowConsecutiveConditionals. +Style/GuardClause: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: EnforcedStyle, EnforcedShorthandSyntax, UseHashRocketsWithSymbolValues, PreferHashRocketsForNonAlnumEndingSymbols. +# SupportedStyles: ruby19, hash_rockets, no_mixed_keys, ruby19_no_mixed_keys +# SupportedShorthandSyntax: always, never, either +Style/HashSyntax: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +Style/IfUnlessModifier: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# The auto-correct function introduces testing errors in the evaluating_parser_spec +Style/IfWithSemicolon: + Exclude: + - 'lib/puppet/pops/parser/evaluating_parser.rb' + +# This cop requires significant changes to testing, will require its own effort +Style/ImplicitRuntimeError: + Enabled: false + +# This cop supports unsafe auto-correction (--auto-correct-all). +# Configuration parameters: InverseMethods, InverseBlocks. +Style/InverseMethods: + Exclude: + - 'lib/puppet/face/catalog/select.rb' + - 'lib/puppet/graph/relationship_graph.rb' + - 'lib/puppet/parser/compiler.rb' + - 'lib/puppet/pops/loader/loader_paths.rb' + - 'lib/puppet/pops/types/ruby_generator.rb' + - 'lib/puppet/pops/validation.rb' + - 'lib/puppet/pops/validation/checker4_0.rb' + - 'lib/puppet/provider/package/pkg.rb' + - 'lib/puppet/provider/user/user_role_add.rb' + - 'lib/puppet/reference/providers.rb' + - 'lib/puppet/type/file.rb' + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: IgnoredMethods. +Style/MethodCallWithoutArgsParentheses: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: EnforcedStyle. +# SupportedStyles: require_parentheses, require_no_parentheses, require_no_parentheses_except_multiline +Style/MethodDefParentheses: + Exclude: + - 'lib/puppet/pops/evaluator/evaluator_impl.rb' + - 'lib/puppet/pops/evaluator/relationship_operator.rb' + - 'lib/puppet/pops/issues.rb' + - 'lib/puppet/pops/label_provider.rb' + - 'lib/puppet/pops/model/factory.rb' + - 'lib/puppet/pops/model/model_label_provider.rb' + - 'lib/puppet/pops/model/model_tree_dumper.rb' + - 'lib/puppet/pops/model/tree_dumper.rb' + - 'lib/puppet/pops/parser/interpolation_support.rb' + - 'lib/puppet/pops/parser/parser_support.rb' + - 'lib/puppet/pops/utils.rb' + - 'lib/puppet/pops/validation.rb' + - 'lib/puppet/pops/validation/validator_factory_4_0.rb' + - 'lib/puppet/util/command_line/trollop.rb' + +Style/MissingRespondToMissing: + Exclude: + - 'lib/puppet/module_tool/metadata.rb' + - 'lib/puppet/parser/scope.rb' + - 'lib/puppet/settings/alias_setting.rb' + - 'lib/puppet/util/command_line/trollop.rb' + - 'lib/puppet/util/feature.rb' + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: EnforcedStyle. +# SupportedStyles: separated, grouped +Style/MixinGrouping: + Exclude: + - 'lib/puppet/util/rdoc/generators/puppet_generator.rb' + +Style/MultilineBlockChain: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +Style/MultilineIfModifier: + Exclude: + - 'lib/puppet/face/config.rb' + - 'lib/puppet/module_tool/applications/installer.rb' + - 'lib/puppet/module_tool/shared_behaviors.rb' + - 'lib/puppet/network/http/api/indirected_routes.rb' + - 'lib/puppet/pops/evaluator/access_operator.rb' + - 'lib/puppet/pops/loader/task_instantiator.rb' + - 'lib/puppet/pops/model/model_tree_dumper.rb' + - 'lib/puppet/provider/package/windows.rb' + - 'lib/puppet/provider/service/upstart.rb' + - 'lib/puppet/resource/catalog.rb' + - 'lib/puppet/type/file/content.rb' + - 'lib/puppet/type/user.rb' + - 'lib/puppet/util/execution.rb' + - 'lib/puppet/util/windows/com.rb' + +# This cop supports safe auto-correction (--auto-correct). +Style/MultilineIfThen: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: EnforcedStyle. +# SupportedStyles: keyword, braces +Style/MultilineMemoization: + Exclude: + - 'lib/puppet/application.rb' + - 'lib/puppet/pops/types/types.rb' + - 'lib/puppet/type.rb' + +# This cop supports safe auto-correction (--auto-correct). +Style/MultilineTernaryOperator: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +Style/MultilineWhenThen: + Exclude: + - 'lib/puppet/graph/simple_graph.rb' + - 'lib/puppet/interface/documentation.rb' + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: AllowMethodComparison. +Style/MultipleComparison: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: EnforcedStyle. +# SupportedStyles: literals, strict +Style/MutableConstant: + Enabled: false + +# This cop supports unsafe auto-correction (--auto-correct-all). +# Configuration parameters: EnforcedStyle, IgnoredMethods. +# SupportedStyles: predicate, comparison +Style/NumericPredicate: + Enabled: false + +# Configuration parameters: AllowedMethods. +# AllowedMethods: respond_to_missing? +Style/OptionalBooleanParameter: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: EnforcedStyle. +# SupportedStyles: implicit, explicit +Style/RescueStandardError: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: ConvertCodeThatCanStartToReturnNil, AllowedMethods, MaxChainLength. +# AllowedMethods: present?, blank?, presence, try, try! +Style/SafeNavigation: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: AllowAsExpressionSeparator. +Style/Semicolon: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: EnforcedStyle. +# SupportedStyles: only_raise, only_fail, semantic +Style/SignalException: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: AllowIfMethodIsEmpty. +Style/SingleLineMethods: + Exclude: + - 'lib/puppet/ffi/windows/api_types.rb' + - 'lib/puppet/file_system/memory_file.rb' + - 'lib/puppet/graph/simple_graph.rb' + - 'lib/puppet/interface/action.rb' + - 'lib/puppet/parser/resource.rb' + - 'lib/puppet/pops/model/factory.rb' + - 'lib/puppet/pops/model/model_label_provider.rb' + - 'lib/puppet/pops/types/type_formatter.rb' + - 'lib/puppet/provider/nameservice/directoryservice.rb' + - 'lib/puppet/provider/service/freebsd.rb' + - 'lib/puppet/type.rb' + - 'lib/puppet/util/command_line/trollop.rb' + - 'lib/puppet/util/metaid.rb' + - 'lib/puppet/util/windows/com.rb' + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: AllowModifier. +Style/SoleNestedConditional: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +Style/StderrPuts: + Exclude: + - 'bin/puppet' + - 'lib/puppet/application/agent.rb' + - 'lib/puppet/application/apply.rb' + - 'lib/puppet/application/describe.rb' + - 'lib/puppet/application/device.rb' + - 'lib/puppet/application/face_base.rb' + - 'lib/puppet/application/filebucket.rb' + - 'lib/puppet/application/script.rb' + - 'lib/puppet/face/config.rb' + - 'lib/puppet/reference/type.rb' + - 'lib/puppet/util.rb' + - 'lib/puppet/util/command_line/trollop.rb' + - 'lib/puppet/util/rdoc/generators/puppet_generator.rb' + - 'lib/puppet/util/reference.rb' + +# This cop supports unsafe auto-correction (--auto-correct-all). +# Configuration parameters: Mode. +Style/StringConcatenation: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: EnforcedStyle, ConsistentQuotesInMultiline. +# SupportedStyles: single_quotes, double_quotes +Style/StringLiterals: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: EnforcedStyle, MinSize. +# SupportedStyles: percent, brackets +Style/SymbolArray: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: EnforcedStyle, AllowSafeAssignment. +# SupportedStyles: require_parentheses, require_no_parentheses, require_parentheses_when_complex +Style/TernaryParentheses: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: EnforcedStyleForMultiline. +# SupportedStylesForMultiline: comma, consistent_comma, no_comma +Style/TrailingCommaInHashLiteral: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: AllowNamedUnderscoreVariables. +Style/TrailingUnderscoreVariable: + Exclude: + - 'lib/puppet/indirector/file_server.rb' + - 'lib/puppet/module/plan.rb' + - 'lib/puppet/pops/evaluator/closure.rb' + - 'lib/puppet/pops/parser/parser_support.rb' + - 'lib/puppet/provider/group/windows_adsi.rb' + - 'lib/puppet/provider/package/zypper.rb' + - 'lib/puppet/provider/service/launchd.rb' + - 'lib/puppet/provider/user/pw.rb' + - 'lib/puppet/provider/user/windows_adsi.rb' + - 'lib/puppet/resource/type.rb' + - 'lib/puppet/util/windows/registry.rb' + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: ExactNameMatch, AllowPredicates, AllowDSLWriters, IgnoreClassMethods, AllowedMethods. +# AllowedMethods: to_ary, to_a, to_c, to_enum, to_h, to_hash, to_i, to_int, to_io, to_open, to_path, to_proc, to_r, to_regexp, to_str, to_s, to_sym +Style/TrivialAccessors: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +Style/WhenThen: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +Style/WhileUntilModifier: + Exclude: + - 'lib/puppet/parser/scope.rb' + - 'lib/puppet/pops/parser/interpolation_support.rb' + - 'lib/puppet/pops/parser/locator.rb' + - 'lib/puppet/pops/parser/pn_parser.rb' + - 'lib/puppet/pops/types/p_object_type.rb' + - 'lib/puppet/util/windows/process.rb' + +# This cop supports unsafe auto-correction (--auto-correct-all). +# Configuration parameters: EnforcedStyle. +# SupportedStyles: forbid_for_all_comparison_operators, forbid_for_equality_operators_only, require_for_all_comparison_operators, require_for_equality_operators_only +Style/YodaCondition: + Enabled: false + +# This cop supports unsafe auto-correction (--auto-correct-all). +Style/ZeroLengthPredicate: + Enabled: false + +# This cop supports safe auto-correction (--auto-correct). +# Configuration parameters: AllowHeredoc, AllowURI, URISchemes, IgnoreCopDirectives, AllowedPatterns, IgnoredPatterns. +# URISchemes: http, https +Layout/LineLength: + Max: 582 diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 29373eb7a25..00000000000 --- a/.travis.yml +++ /dev/null @@ -1,25 +0,0 @@ -language: ruby -sudo: false -bundler_args: --without development -script: - - "bundle exec rake $CHECK" -notifications: - email: false -rvm: - - 2.2.0 - - 2.1.5 - - 2.0.0 - - 1.9.3 - -env: - - "CHECK=parallel:spec\\[2\\]" - - "CHECK=rubocop" - -matrix: - exclude: - - rvm: 2.2.0 - env: "CHECK=rubocop" - - rvm: 2.0.0 - env: "CHECK=rubocop" - - rvm: 1.9.3 - env: "CHECK=rubocop" diff --git a/.yardopts b/.yardopts index fab728163db..61524c2a2a1 100644 --- a/.yardopts +++ b/.yardopts @@ -14,4 +14,5 @@ --api public --api private --hide-void-return +--exclude lib/puppet/vendor/ lib/**/*.rb diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 00000000000..88d2b6c419f --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1,11 @@ +# defaults +* @puppetlabs/phoenix + +# PAL +/lib/puppet/pal @puppetlabs/bolt + +# puppet module +/lib/puppet/application/module.rb @puppetlabs/modules +/lib/puppet/face/module @puppetlabs/modules +/lib/puppet/forge @puppetlabs/modules +/lib/puppet/module_tool @puppetlabs/modules diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 00000000000..f947ee4f376 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,70 @@ +# Community Guidelines and Code of Conduct + +We want to keep the Puppet communities awesome, and we need your help to keep it +that way. While we have specific guidelines for various tools (see links below), +in general, you should: + +* **Be nice**: Be courteous, respectful and polite to fellow community members. No + offensive comments related to gender, gender identity or expression, sexual + orientation, disability, physical appearance, body size, race, religion; no + sexual images in public spaces, real or implied violence, intimidation, + oppression, stalking, following, harassing photography or recording, sustained + disruption of talks or other events, inappropriate physical contact, doxxing, or + unwelcome sexual attention will be tolerated. We like nice people way better + than mean ones! +* **Encourage diversity and participation**: Make everyone in our community feel + welcome, regardless of their background, and do everything possible to encourage + participation in our community. +* **Focus on constructive criticisms**: When offering suggestions, whether in online + discussions or as comments on a pull request, you should always use welcoming + and inclusive language. Be respectful of differing viewpoints and the fact that + others may not have the same experiences you do. Offer suggestions for + improvement, rather than focusing on mistakes. When others critique your work or + ideas, gracefully accept the criticisms and default to assuming good intentions. +* **Keep it legal**: Basically, don't get us in trouble. Share only content that you + own, do not share private or sensitive information, and don't break the law. +* **Stay on topic**: Keep conversation in a thread on topic, whether that's a pull + request or a Slack conversation or anything else. Make sure that you are posting + to the correct channel and remember that nobody likes spam. + +## Guideline violations --- 3 strikes method + +The point of this section is not to find opportunities to punish people, but we +do need a fair way to deal with people who do harm to our community. Extreme +violations of a threatening, abusive, destructive, or illegal nature will be +addressed immediately and are not subject to 3 strikes. + +* First occurrence: We'll give you a friendly, but public, reminder that the + behavior is inappropriate according to our guidelines. +* Second occurrence: We'll send you a private message with a warning that any + additional violations will result in removal from the community. +* Third occurrence: Depending on the violation, we might need to delete or ban + your account. + +Notes: + +* Obvious spammers are banned on first occurrence. If we don’t do this, we’ll + have spam all over the place. +* Violations are forgiven after 6 months of good behavior, and we won’t hold a grudge. +* People who are committing minor formatting / style infractions will get some + education, rather than hammering them in the 3 strikes process. + +Contact conduct@puppet.com to report abuse or appeal violations. This email list +goes to Kara Sowles (kara at puppet.com) and Katie Abbott (katie dot abbott at +puppet.com). In the case of appeals, we know that mistakes happen, and we’ll +work with you to come up with a fair solution if there has been a +misunderstanding. + +## Full text + +See our [full community guidelines](https://puppet.com/community/community-guidelines), +covering Slack, IRC, events and other forms of community participation. + +## Credits + +Credit to [01.org](https://01.org/community/participation-guidelines) and +[meego.com](http://wiki.meego.com/Community_guidelines), since they formed the +starting point for many of these guidelines. + +The Event Code of Conduct is based on the [example policy from the Geek Feminism wiki](http://geekfeminism.wikia.com/wiki/Conference_anti-harassment), +created by the Ada Initiative and other volunteers. The [PyCon Code of Conduct](https://github.com/python/pycon-code-of-conduct) also served as inspiration. diff --git a/COMMITTERS.md b/COMMITTERS.md deleted file mode 100644 index a6288c4c0f8..00000000000 --- a/COMMITTERS.md +++ /dev/null @@ -1,244 +0,0 @@ -Committing changes to Puppet -==== - -We would like to make it easier for community members to contribute to Puppet -using pull requests, even if it makes the task of reviewing and committing -these changes a little harder. Pull requests are only ever based on a single -branch, however, we maintain more than one active branch. As a result -contributors should target their changes at the master branch. This makes the -process of contributing a little easier for the contributor since they don't -need to concern themselves with the question, "What branch do I base my changes -on?" This is already called out in the [CONTRIBUTING.md](http://goo.gl/XRH2J). - -Therefore, it is the responsibility of the committer to re-base the change set -on the appropriate branch which should receive the contribution. - -It is also the responsibility of the committer to review the change set in an -effort to make sure the end users must opt-in to new behavior that is -incompatible with previous behavior. We employ the use of [feature -flags](http://stackoverflow.com/questions/7707383/what-is-a-feature-flag) as -the primary way to achieve this user opt-in behavior. Finally, it is the -responsibility of the committer to make sure the `master` and `stable` branches -are both clean and working at all times. Clean means that dead code is not -allowed, everything needs to be usable in some manner at all points in time. -Stable is not an indication of the build status, but rather an expression of -our intent that the `stable` branch does not receive new functionality. - -The rest of this document addresses the concerns of the committer. This -document will help guide the committer decide which branch to base, or re-base -a contribution on top of. This document also describes our branch management -strategy, which is closely related to the decision of what branch to commit -changes into. - -Terminology -==== - -Many of these terms have more than one meaning. For the purposes of this -document, the following terms refer to specific things. - -**contributor** - A person who makes a change to Puppet and submits a change -set in the form of a pull request. - -**change set** - A set of discrete patches which combined together form a -contribution. A change set takes the form of Git commits and is submitted to -Puppet in the form of a pull request. - -**committer** - A person responsible for reviewing a pull request and then -making the decision what base branch to merge the change set into. - -**base branch** - A branch in Git that contains an active history of changes -and will eventually be released using semantic version guidelines. The branch -named `master` will always exist as a base branch. The other base branches are -`stable`, and `security` described below. - -**master branch** - The branch where new functionality that are not bug fixes -is merged. - -**stable branch** - The branch where bug fixes against the latest release or -release candidate are merged. - -**security** - Where critical security fixes are merged. These change sets -will then be merged into release branches independently from one another. (i.e. -no merging up). Please do not submit pull requests against the security branch -and instead report all security related issues to security@puppetlabs.com as -per our security policy published at -[https://puppetlabs.com/security/](https://puppetlabs.com/security/). - -Committer Guide -==== - -This section provides a guide to follow while committing change sets to Puppet -base branches. - -How to decide what release(s) should be patched ---- - -This section provides a guide to help a committer decide the specific base -branch that a change set should be merged into. - -The latest minor release of a major release is the only base branch that should -be patched. These patches will be merged into `master` if they contain new -functionality. They will be merged into `stable` and `master` if they fix a -critical bug. Older minor releases in a major release do not get patched. - -Before the switch to [semantic versions](http://semver.org/) committers did not -have to think about the difference between minor and major releases. -Committing to the latest minor release of a major release is a policy intended -to limit the number of active base branches that must be managed. - -Security patches are handled as a special case. Security patches may be -applied to earlier minor releases of a major release, but the patches should -first be merged into the `security` branch. Security patches should be merged -by Puppet Labs staff members. Pull requests should not be submitted with the -security branch as the base branch. Please send all security related -information or patches to security@puppetlabs.com as per our [Security -Policy](https://puppetlabs.com/security/). - -The CI systems are configured to run against `master` and `stable`. Over time, -these branches will refer to different versions, but their name will remain -fixed to avoid having to update CI jobs and tasks as new versions are released. - -How to commit a change set to multiple base branches ---- - -A change set may apply to multiple branches, for example a bug fix should be -applied to the stable release and the development branch. In this situation -the change set needs to be committed to multiple base branches. This section -provides a guide for how to merge patches into these branches, e.g. -`stable` is patched, how should the changes be applied to `master`? - -First, rebase the change set onto the `stable` branch. Next, merge the change -set into the `stable` branch using a merge commit. Once merged into `stable`, -merge the same change set into `master` without doing a rebase as to preserve -the commit identifiers. This merge strategy follows the [git -flow](http://nvie.com/posts/a-successful-git-branching-model/) model. Both of -these change set merges should have a merge commit which makes it much easier -to track a set of commits as a logical change set through the history of a -branch. Merge commits should be created using the `--no-ff --log` git merge -options. - -Any merge conflicts should be resolved using the merge commit in order to -preserve the commit identifiers for each individual change. This ensures `git -branch --contains` will accurately report all of the base branches which -contain a specific patch. - -Using this strategy, the stable branch need not be reset. Both `master` and -`stable` have infinite lifetimes. Patch versions, also known as bug fix -releases, will be tagged and released directly from the `stable` branch. Major -and minor versions, also known as feature releases, will be tagged and released -directly from the `master` branch. Upon release of a new major or minor -version all of the changes in the `master` branch will be merged into the -`stable` branch. - -Code review checklist ---- - -This section aims to provide a checklist of things to look for when reviewing a -pull request and determining if the change set should be merged into a base -branch: - - * All tests pass - * Are there any platform gotchas? (Does a change make an assumption about - platform specific behavior that is incompatible with other platforms? e.g. - Windows paths vs. POSIX paths.) - * Is the change backwards compatible? (It should be) - * Are there YARD docs for API changes? - * Does the change set also require documentation changes? If so is the - documentation being kept up to date? - * Does the change set include clean code? (software code that is formatted - correctly and in an organized manner so that another coder can easily read - or modify it.) HINT: `git diff master --check` - * Does the change set conform to the contributing guide? - -Commit citizen guidelines: ---- - -This section aims to provide guidelines for being a good commit citizen by -paying attention to our automated build tools. - - * Don’t push on a broken build. (A broken build is defined as a failing job - in the [Puppet FOSS](https://jenkins.puppetlabs.com/view/Puppet%20FOSS/) - page.) - * Watch the build until your changes have gone through green - * Update the ticket status and target version. The target version field in - our issue tracker should be updated to be the next release of Puppet. For - example, if the most recent release of Puppet is 3.1.1 and you merge a - backwards compatible change set into master, then the target version should - be 3.2.0 in the issue tracker.) - * Ensure the pull request is closed (Hint: amend your merge commit to contain - the string `closes #123` where 123 is the pull request number and github - will automatically close the pull request when the branch is pushed.) - -Example Procedure -==== - -This section helps a committer rebase a contribution onto an earlier base -branch, then merge into the base branch and up through all active base -branches. - -Suppose a contributor submits a pull request based on master. The change set -fixes a bug reported against Puppet 3.1.1 which is the most recently released -version of Puppet. - -In this example the committer should rebase the change set onto the `stable` -branch since this is a bug rather than new functionality. - -First, the committer pulls down the branch using the `hub` gem. This tool -automates the process of adding the remote repository and creating a local -branch to track the remote branch. - - $ hub checkout https://github.com/puppetlabs/puppet/pull/1234 - Branch jeffmccune-fix_foo_error set up to track remote branch fix_foo_error from jeffmccune. - Switched to a new branch 'jeffmccune-fix_foo_error' - -At this point the topic branch is a descendant of master, but we want it to -descend from `stable`. The committer rebases the change set onto `stable`. - - $ git branch bug/stable/fix_foo_error - $ git rebase --onto stable master bug/stable/fix_foo_error - First, rewinding head to replay your work on top of it... - Applying: (#23456) Fix FooError that always bites users in 3.1.1 - -The `git rebase` command may be interpreted as, "First, check out the branch -named `bug/stable/fix_foo_error`, then take the changes that were previously -based on `master` and re-base them onto `stable`. - -Now that we have a topic branch containing the change set based on the `stable` -release branch, the committer merges in: - - $ git checkout stable - Switched to branch 'stable' - $ git merge --no-ff --log bug/stable/fix_foo_error - Merge made by the 'recursive' strategy. - foo | 0 - 1 file changed, 0 insertions(+), 0 deletions(-) - create mode 100644 foo - -Once merged into the first base branch, the committer merges the `stable` -branch into `master`, being careful to preserve the same commit identifiers. - - $ git checkout master - Switched to branch 'master' - $ git merge --no-ff --log stable - Merge made by the 'recursive' strategy. - foo | 0 - 1 file changed, 0 insertions(+), 0 deletions(-) - create mode 100644 foo - -Once the change set has been merged into one base branch, the change set should -not be modified in order to keep the history clean, avoid "double" commits, and -preserve the usefulness of `git branch --contains`. If there are any merge -conflicts, they are to be resolved in the merge commit itself and not by -re-writing (rebasing) the patches for one base branch, but not another. - -Once the change set has been merged into `stable` and into `master`, the -committer pushes. Please note, the checklist should be complete at this point. -It's helpful to make sure your local branches are up to date to avoid one of -the branches failing to fast forward while the other succeeds. Both the -`stable` and `master` branches are being pushed at the same time. - - $ git push puppetlabs master:master stable:stable - -That's it! The committer then updates the pull request, updates the issue in -our issue tracker, and keeps an eye on the [build -status](http://jenkins.puppetlabs.com). diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index 633fdee38d9..00000000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,116 +0,0 @@ -# How to contribute - -Third-party patches are essential for keeping puppet great. We simply can't -access the huge number of platforms and myriad configurations for running -puppet. We want to keep it as easy as possible to contribute changes that -get things working in your environment. There are a few guidelines that we -need contributors to follow so that we can have a chance of keeping on -top of things. - -## Puppet Core vs Modules - -New functionality is typically directed toward modules to provide a slimmer -Puppet Core, reducing its surface area, and to allow greater freedom for -module maintainers to ship releases at their own cadence, rather than -being held to the cadence of Puppet releases. With Puppet 4's "all in one" -packaging, a list of modules at specific versions will be packaged with the -core so that popular types and providers will still be available as part of -the "out of the box" experience. - -Generally, new types and new OS-specific providers for existing types should -be added in modules. Exceptions would be things like new cross-OS providers -and updates to existing core types. - -If you are unsure of whether your contribution should be implemented as a -module or part of Puppet Core, you may visit -[#puppet-dev on Freenode IRC](http://freenode.net) or ask on the -[puppet-dev mailing list](https://groups.google.com/forum/#!forum/puppet-dev) -for advice. - -## Getting Started - -* Make sure you have a [Jira account](http://tickets.puppetlabs.com) -* Make sure you have a [GitHub account](https://github.com/signup/free) -* Submit a ticket for your issue, assuming one does not already exist. - * Clearly describe the issue including steps to reproduce when it is a bug. - * Make sure you fill in the earliest version that you know has the issue. -* Fork the repository on GitHub - -## Making Changes - -* Create a topic branch from where you want to base your work. - * This is usually the master branch. - * Only target release branches if you are certain your fix must be on that - branch. - * To quickly create a topic branch based on master; `git checkout -b - fix/master/my_contribution master`. Please avoid working directly on the - `master` branch. -* Make commits of logical units. -* Check for unnecessary whitespace with `git diff --check` before committing. -* Make sure your commit messages are in the proper format. - -```` - (PUP-1234) Make the example in CONTRIBUTING imperative and concrete - - Without this patch applied the example commit message in the CONTRIBUTING - document is not a concrete example. This is a problem because the - contributor is left to imagine what the commit message should look like - based on a description rather than an example. This patch fixes the - problem by making the example concrete and imperative. - - The first line is a real life imperative statement with a ticket number - from our issue tracker. The body describes the behavior without the patch, - why this is a problem, and how the patch fixes the problem when applied. -```` - -* Make sure you have added the necessary tests for your changes. -* Run _all_ the tests to assure nothing else was accidentally broken. - -## Making Trivial Changes - -### Documentation - -For changes of a trivial nature to comments and documentation, it is not -always necessary to create a new ticket in Jira. In this case, it is -appropriate to start the first line of a commit with '(doc)' instead of -a ticket number. - -```` - (doc) Add documentation commit example to CONTRIBUTING - - There is no example for contributing a documentation commit - to the Puppet repository. This is a problem because the contributor - is left to assume how a commit of this nature may appear. - - The first line is a real life imperative statement with '(doc)' in - place of what would have been the ticket number in a - non-documentation related commit. The body describes the nature of - the new documentation or comments added. -```` - -## Submitting Changes - -* Sign the [Contributor License Agreement](http://links.puppetlabs.com/cla). -* Push your changes to a topic branch in your fork of the repository. -* Submit a pull request to the repository in the puppetlabs organization. -* Update your Jira ticket to mark that you have submitted code and are ready for it to be reviewed (Status: Ready for Merge). - * Include a link to the pull request in the ticket. -* The core team looks at Pull Requests on a regular basis in a weekly triage - meeting that we hold in a public Google Hangout. The hangout is announced in - the weekly status updates that are sent to the puppet-dev list. Notes are - posted to the [Puppet Community community-triage - repo](https://github.com/puppet-community/community-triage/tree/master/core/notes) - and include a link to a YouTube recording of the hangout. -* After feedback has been given we expect responses within two weeks. After two - weeks we may close the pull request if it isn't showing any activity. - -# Additional Resources - -* [Puppet Labs community guildelines](http://docs.puppetlabs.com/community/community_guidelines.html) -* [Bug tracker (Jira)](http://tickets.puppetlabs.com) -* [Contributor License Agreement](http://links.puppetlabs.com/cla) -* [General GitHub documentation](http://help.github.com/) -* [GitHub pull request documentation](http://help.github.com/send-pull-requests/) -* #puppet-dev IRC channel on freenode.org ([Archive](https://botbot.me/freenode/puppet-dev/)) -* [puppet-dev mailing list](https://groups.google.com/forum/#!forum/puppet-dev) -* [Community PR Triage notes](https://github.com/puppet-community/community-triage/tree/master/core/notes) diff --git a/Gemfile b/Gemfile index 55c0f23941c..366b326c390 100644 --- a/Gemfile +++ b/Gemfile @@ -1,93 +1,85 @@ source ENV['GEM_SOURCE'] || "https://rubygems.org" +gemspec + def location_for(place, fake_version = nil) - if place =~ /^(git[:@][^#]*)#(.*)/ - [fake_version, { :git => $1, :branch => $2, :require => false }].compact - elsif place =~ /^file:\/\/(.*)/ - ['>= 0', { :path => File.expand_path($1), :require => false }] + if place.is_a?(String) && place =~ /^((?:git[:@]|https:)[^#]*)#(.*)/ + [fake_version, { git: $1, branch: $2, require: false }].compact + elsif place.is_a?(String) && place =~ /^file:\/\/(.*)/ + ['>= 0', { path: File.expand_path($1), require: false }] else - [place, { :require => false }] + [place, { require: false }] end end -# C Ruby (MRI) or Rubinius, but NOT Windows -platforms :ruby do - gem 'pry', :group => :development - gem 'yard', :group => :development - gem 'redcarpet', '~> 2.0', :group => :development - gem "racc", "1.4.9", :group => :development - - # To enable the augeas feature, use this gem. - # Note that it is a native gem, so the augeas headers/libs - # are neeed. - #gem 'ruby-augeas', :group => :development -end - -gem "puppet", :path => File.dirname(__FILE__), :require => false -gem "facter", *location_for(ENV['FACTER_LOCATION'] || ['> 2.0', '< 4']) -gem "hiera", *location_for(ENV['HIERA_LOCATION'] || ['>= 2.0', '< 3']) -gem "rake", "10.1.1", :require => false - -group(:development, :test) do - gem "rspec", "~> 3.1", :require => false - gem "rspec-its", "~> 1.1", :require => false - gem "rspec-collection_matchers", "~> 1.1", :require => false - gem "rspec-legacy_formatters", "~> 1.0", :require => false +# Make sure these gem requirements are in sync with the gempspec. Specifically, +# the runtime_dependencies in puppet.gemspec match the runtime dependencies here +# (like facter, semantic_puppet, and puppet-resource_api) - # Mocha is not compatible across minor version changes; because of this only - # versions matching ~> 0.10.5 are supported. All other versions are unsupported - # and can be expected to fail. - gem "mocha", "~> 0.10.5", :require => false +gem "facter", *location_for(ENV['FACTER_LOCATION'] || ["~> 4.3"]) +gem "semantic_puppet", *location_for(ENV['SEMANTIC_PUPPET_LOCATION'] || ["~> 1.0"]) +gem "puppet-resource_api", *location_for(ENV['RESOURCE_API_LOCATION'] || ["~> 1.5"]) - gem "yarjuf", "~> 2.0" - - # json-schema does not support windows, so omit it from the platforms list - # json-schema uses multi_json, but chokes with multi_json 1.7.9, so prefer 1.7.7 - gem "multi_json", "1.7.7", :require => false, :platforms => [:ruby, :jruby] - gem "json-schema", "2.1.1", :require => false, :platforms => [:ruby, :jruby] +group(:features) do + gem 'diff-lcs', '~> 1.3', require: false + gem "hiera", *location_for(ENV['HIERA_LOCATION']) if ENV.has_key?('HIERA_LOCATION') + gem 'hiera-eyaml', *location_for(ENV['HIERA_EYAML_LOCATION']) + gem 'hocon', '~> 1.0', require: false + # requires native libshadow headers/libs + #gem 'ruby-shadow', '~> 2.5', require: false, platforms: [:ruby] + gem 'minitar', '~> 0.9', require: false + gem 'msgpack', '~> 1.2', require: false + gem 'rdoc', ['~> 6.0', '< 6.4.0'], require: false, platforms: [:ruby] + # requires native augeas headers/libs + # gem 'ruby-augeas', require: false, platforms: [:ruby] + # requires native ldap headers/libs + # gem 'ruby-ldap', '~> 0.9', require: false, platforms: [:ruby] + gem 'puppetserver-ca', '~> 2.0', require: false + gem 'syslog', '~> 0.1.1', require: false, platforms: [:ruby] + gem 'CFPropertyList', ['>= 3.0.6', '< 4'], require: false +end - gem "rubocop", "~> 0.26.1", :platforms => [:ruby] +group(:test) do + # 1.16.0 - 1.16.2 are broken on Windows + gem 'ffi', '>= 1.15.5', '< 1.17.0', '!= 1.16.0', '!= 1.16.1', '!= 1.16.2', require: false + gem "json-schema", "~> 2.0", require: false + gem "racc", "1.5.2", require: false + gem "rake", *location_for(ENV['RAKE_LOCATION'] || '~> 13.0') + gem "rspec", "~> 3.1", require: false + gem "rspec-expectations", ["~> 3.9", "!= 3.9.3"] + gem "rspec-its", "~> 1.1", require: false + gem 'vcr', '~> 6.1', require: false + gem 'webmock', '~> 3.0', require: false + gem 'webrick', '~> 1.7', require: false + gem 'yard', require: false - gem 'rdoc', "~> 4.1", :platforms => [:ruby] + gem 'rubocop', '~> 1.0', require: false, platforms: [:ruby] + gem 'rubocop-i18n', '~> 3.0', require: false, platforms: [:ruby] + gem 'rubocop-performance', '~> 1.0', require: false, platforms: [:ruby] + gem 'rubocop-rake', '~> 0.6', require: false, platforms: [:ruby] + gem 'rubocop-rspec', '~> 2.0', require: false, platforms: [:ruby] end -group(:development) do +group(:development, optional: true) do + gem 'memory_profiler', require: false, platforms: [:mri] + gem 'pry', require: false, platforms: [:ruby] if RUBY_PLATFORM != 'java' - gem 'ruby-prof', :require => false + gem 'ruby-prof', '>= 0.16.0', require: false end end -group(:extra) do - gem "rack", "~> 1.4", :require => false - gem "net-ssh", '~> 2.1', :require => false - gem "puppetlabs_spec_helper", :require => false - gem "tzinfo", :require => false - case RUBY_PLATFORM - when 'java' - gem "msgpack-jruby", :require => false - else - gem "msgpack", :require => false - end +group(:packaging) do + gem 'packaging', *location_for(ENV['PACKAGING_LOCATION'] || '~> 0.99') end -require 'yaml' -data = YAML.load_file(File.join(File.dirname(__FILE__), 'ext', 'project_data.yaml')) -bundle_platforms = data['bundle_platforms'] -x64_platform = Gem::Platform.local.cpu == 'x64' -data['gem_platform_dependencies'].each_pair do |gem_platform, info| - next if gem_platform == 'x86-mingw32' && x64_platform - next if gem_platform == 'x64-mingw32' && !x64_platform - if bundle_deps = info['gem_runtime_dependencies'] - bundle_platform = bundle_platforms[gem_platform] or raise "Missing bundle_platform" - platform(bundle_platform.intern) do - bundle_deps.each_pair do |name, version| - gem(name, version, :require => false) - end - end - end +group(:documentation, optional: true) do + gem 'gettext-setup', '~> 1.0', require: false, platforms: [:ruby] + gem 'ronn', '~> 0.7.3', require: false, platforms: [:ruby] + gem 'puppet-strings', require: false, platforms: [:ruby] + gem 'pandoc-ruby', require: false, platforms: [:ruby] end -if File.exists? "#{__FILE__}.local" +if File.exist? "#{__FILE__}.local" eval(File.read("#{__FILE__}.local"), binding) end diff --git a/Guardfile.example b/Guardfile.example new file mode 100644 index 00000000000..0936f7dd5ce --- /dev/null +++ b/Guardfile.example @@ -0,0 +1,76 @@ +# More info at https://github.com/guard/guard#readme + +# You'll need to make sure Guard, and any of its plugins are in your Gemfile.local +# +# Example: +# # Automatically run tests on file changes +# gem 'guard', require: false +# gem 'guard-rspec', require: false +# gem 'guard-bundler', require: false +# gem 'terminal-notifier-guard', require: false +# +# After running `bundle install`, you can run Guard via `bundle exec guard` +# from the top of the repository checkout. + +notification(:terminal_notifier, app_name: "Puppet ::", group: `pwd`.chomp) if `uname` =~ /Darwin/ + +## Uncomment and set this to only include directories you want to watch +# directories %w(app lib config test spec features) \ +# .select{|d| Dir.exist?(d) ? d : UI.warning("Directory #{d} does not exist")} + +## Note: if you are using the `directories` clause above and you are not +## watching the project directory ('.'), then you will want to move +## the Guardfile to a watched dir and symlink it back, e.g. +# +# $ mkdir config +# $ mv Guardfile config/ +# $ ln -s config/Guardfile . +# +# and, you'll have to watch "config/Guardfile" instead of "Guardfile" + +guard :bundler do + require 'guard/bundler' + require 'guard/bundler/verify' + helper = Guard::Bundler::Verify.new + + files = ['Gemfile', 'Gemfile.local'] + files += Dir['*.gemspec'] if files.any? { |f| helper.uses_gemspec?(f) } + + # Assume files are symlinked from somewhere + files.each { |file| watch(helper.real_path(file)) } +end + +def file2specs(match) + file = match[0] + puts "Lib file changed: #{file.inspect}" + %w{spec/unit spec/integration}.collect { |d| + file.sub('lib/puppet', d).sub(".rb", "_spec.rb") + }.find_all { |f| + File.exist?(f) + } +end + +rspec_options = { + cmd: "bundle exec rspec", + run_all: { + cmd: "bundle exec parallel_rspec -o '--format progress ", + cmd_additional_args: "'" + }, + all_after_pass: false +} +guard :rspec, rspec_options do + require "guard/rspec/dsl" + dsl = Guard::RSpec::Dsl.new(self) + + # Feel free to open issues for suggestions and improvements + + # RSpec files + rspec = dsl.rspec + watch(rspec.spec_helper) { rspec.spec_dir } + watch(rspec.spec_support) { rspec.spec_dir } + watch(rspec.spec_files) + + # Ruby files + ruby = dsl.ruby + watch(ruby.lib_files) { |f| file2specs(f) } +end diff --git a/LICENSE b/LICENSE index e5ecbd0014f..427417b60d5 100644 --- a/LICENSE +++ b/LICENSE @@ -1,8 +1,193 @@ - Puppet - Automating Configuration Management. - Copyright (C) 2005-2015 Puppet Labs Inc + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ - Puppet Labs can be contacted at: info@puppetlabs.com + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/README.md b/README.md index a0747d1dfd9..547a738d582 100644 --- a/README.md +++ b/README.md @@ -1,76 +1,68 @@ -Puppet -====== +# Puppet -[![Build Status](https://travis-ci.org/puppetlabs/puppet.png?branch=master)](https://travis-ci.org/puppetlabs/puppet) -[![Inline docs](http://inch-ci.org/github/puppetlabs/puppet.png)](http://inch-ci.org/github/puppetlabs/puppet) +![RSpec tests](https://github.com/puppetlabs/puppet/workflows/RSpec%20tests/badge.svg) +[![Gem Version](https://badge.fury.io/rb/puppet.svg)](https://badge.fury.io/rb/puppet) +[![Inline docs](https://inch-ci.org/github/puppetlabs/puppet.svg)](https://inch-ci.org/github/puppetlabs/puppet) Puppet, an automated administrative engine for your Linux, Unix, and Windows systems, performs administrative tasks (such as adding users, installing packages, and updating server configurations) based on a centralized specification. -Documentation -------------- +## Documentation Documentation for Puppet and related projects can be found online at the -[Puppet Docs site](http://docs.puppetlabs.com). +[Puppet Docs site](https://puppet.com/docs). -HTTP API --------- -[HTTP API Index](api/docs/http_api_index.md) +### HTTP API -Installation ------------- +[HTTP API Index](https://puppet.com/docs/puppet/latest/http_api/http_api_index.html) -The best way to run Puppet is with [Puppet Enterprise](http://puppetlabs.com/puppet/puppet-enterprise), +## Installation + +The best way to run Puppet is with [Puppet Enterprise (PE)](https://puppet.com/products/puppet-enterprise/), which also includes orchestration features, a web console, and professional support. -[The PE documentation is available here.](http://docs.puppetlabs.com/pe/latest) +The PE documentation is [available here.](https://puppet.com/docs/pe/latest) To install an open source release of Puppet, -[see the installation guide on the docs site.](http://docs.puppetlabs.com/guides/installation.html) +[see the installation guide on the docs site.](https://puppet.com/docs/puppet/latest/installing_and_upgrading.html) If you need to run Puppet from source as a tester or developer, -[see the running from source guide on the docs site.](http://docs.puppetlabs.com/guides/from_source.html) +see the [Quick Start to Developing on Puppet](docs/quickstart.md) guide. -Developing and Contributing ------- +## Developing and Contributing We'd love to get contributions from you! For a quick guide to getting your -system setup for developing take a look at our [Quickstart -Guide](docs/quickstart.md). Once you are up and running, take a look at the -[Contribution Documents](CONTRIBUTING.md) to see how to get your changes merged +system setup for developing, take a look at our [Quickstart +Guide](https://github.com/puppetlabs/puppet/blob/main/docs/quickstart.md). Once you are up and running, take a look at the +[Contribution Documents](https://github.com/puppetlabs/.github/blob/main/CONTRIBUTING.md) to see how to get your changes merged in. -For more complete docs on developing with puppet you can take a look at the -rest of the [developer documents](docs/index.md). +For more complete docs on developing with Puppet, take a look at the +rest of the [developer documents](https://github.com/puppetlabs/puppet/blob/main/docs/index.md). -License -------- +## Licensing -See [LICENSE](LICENSE) file. +See [LICENSE](https://github.com/puppetlabs/puppet/blob/main/LICENSE) file. Puppet is licensed by Puppet, Inc. under the Apache license. Puppet, Inc. can be contacted at: info@puppet.com -Support -------- +## Support -Please log tickets and issues at our [JIRA tracker](http://tickets.puppetlabs.com). A [mailing +Please log issues in this project's [GitHub Issues](https://github.com/puppetlabs/puppet/issues). A [mailing list](https://groups.google.com/forum/?fromgroups#!forum/puppet-users) is -available for asking questions and getting help from others. In addition there -is an active #puppet channel on Freenode. +available for asking questions and getting help from others, or if you prefer chat, we also have a [Puppet Community slack.](https://puppetcommunity.slack.com/) -We use semantic version numbers for our releases, and recommend that users stay +We use semantic version numbers for our releases and recommend that users stay as up-to-date as possible by upgrading to patch releases and minor releases as they become available. -Bugfixes and ongoing development will occur in minor releases for the current +Bug fixes and ongoing development will occur in minor releases for the current major version. Security fixes will be backported to a previous major version on a best-effort basis, until the previous major version is no longer maintained. - -For example: If a security vulnerability is discovered in Puppet 4.1.1, we -would fix it in the 4 series, most likely as 4.1.2. Maintainers would then make -a best effort to backport that fix onto the latest Puppet 3 release. +For example: If a security vulnerability is discovered in Puppet 8.1.1, we +would fix it in the 8 series, most likely as 8.1.2. Maintainers would then make +a best effort to backport that fix onto the latest Puppet 7 release. Long-term support, including security patches and bug fixes, is available for commercial customers. Please see the following page for more details: -[Puppet Enterprise Support Lifecycle](http://puppetlabs.com/misc/puppet-enterprise-lifecycle) - +[Puppet Enterprise Support Lifecycle](https://puppet.com/docs/puppet-enterprise/product-support-lifecycle/) diff --git a/Rakefile b/Rakefile index 618b0aa477b..4c515da9ecf 100644 --- a/Rakefile +++ b/Rakefile @@ -1,69 +1,75 @@ -# Rakefile for Puppet -*- ruby -*- -RAKE_ROOT = File.dirname(__FILE__) +# frozen_string_literal: true -# We need access to the Puppet.version method -$LOAD_PATH.unshift(File.expand_path("lib")) -require 'puppet/version' - -$LOAD_PATH << File.join(RAKE_ROOT, 'tasks') +require 'open3' +require 'rake' +require 'rubygems' +require 'rubygems/package_task' -begin - require 'rubygems' - require 'rubygems/package_task' -rescue LoadError - # Users of older versions of Rake (0.8.7 for example) will not necessarily - # have rubygems installed, or the newer rubygems package_task for that - # matter. - require 'rake/packagetask' - require 'rake/gempackagetask' +if Rake.application.top_level_tasks.grep(/^(pl:|package:)/).any? + begin + require 'packaging' + Pkg::Util::RakeUtils.load_packaging_tasks + rescue LoadError => e + puts "Error loading packaging rake tasks: #{e}" + end end -require 'rake' - -Dir['tasks/**/*.rake'].each { |t| load t } +namespace :package do + task :bootstrap do + puts 'Bootstrap is no longer needed, using packaging-as-a-gem' + end + task :implode do + puts 'Implode is no longer needed, using packaging-as-a-gem' + end +end -begin - load File.join(RAKE_ROOT, 'ext', 'packaging', 'packaging.rake') -rescue LoadError +task :default do + sh %{rake -T} end -build_defs_file = 'ext/build_defaults.yaml' -if File.exist?(build_defs_file) - begin - require 'yaml' - @build_defaults ||= YAML.load_file(build_defs_file) - rescue Exception => e - STDERR.puts "Unable to load yaml from #{build_defs_file}:" - STDERR.puts e +namespace :pl_ci do + desc 'Build puppet gems' + task :gem_build, [:gemspec] do |t, args| + args.with_defaults(gemspec: 'puppet.gemspec') + stdout, stderr, status = Open3.capture3(<<~END) + gem build #{args.gemspec} --platform x86-mingw32 && \ + gem build #{args.gemspec} --platform x64-mingw32 && \ + gem build #{args.gemspec} --platform universal-darwin && \ + gem build #{args.gemspec} + END + if !status.exitstatus.zero? + puts "Error building #{args.gemspec}\n#{stdout} \n#{stderr}" + exit(1) + else + puts stdout + end end - @packaging_url = @build_defaults['packaging_url'] - @packaging_repo = @build_defaults['packaging_repo'] - raise "Could not find packaging url in #{build_defs_file}" if @packaging_url.nil? - raise "Could not find packaging repo in #{build_defs_file}" if @packaging_repo.nil? - namespace :package do - desc "Bootstrap packaging automation, e.g. clone into packaging repo" - task :bootstrap do - if File.exist?("ext/#{@packaging_repo}") - puts "It looks like you already have ext/#{@packaging_repo}. If you don't like it, blow it away with package:implode." - else - cd 'ext' do - %x{git clone #{@packaging_url}} + desc 'build the nightly puppet gems' + task :nightly_gem_build do + # this is taken from `rake package:nightly_gem` + extended_dot_version = %x{git describe --tags --dirty --abbrev=7}.chomp.tr('-', '.') + + # we must create tempfile in the same directory as puppetg.gemspec, since + # it uses __dir__ to determine which files to include + require 'tempfile' + Tempfile.create('gemspec', __dir__) do |dst| + File.open('puppet.gemspec', 'r') do |src| + src.readlines.each do |line| + if line.match?(/version\s*=\s*['"][0-9.]+['"]/) + line = "spec.version = '#{extended_dot_version}'" + end + dst.puts line end end - end - desc "Remove all cloned packaging automation" - task :implode do - rm_rf "ext/#{@packaging_repo}" + dst.flush + Rake::Task['pl_ci:gem_build'].invoke(dst.path) end end end -task :default do - sh %{rake -T} -end - task :spec do + ENV["LOG_SPEC_ORDER"] = "true" sh %{rspec #{ENV['TEST'] || ENV['TESTS'] || 'spec'}} end @@ -75,3 +81,35 @@ task(:rubocop) do raise "RuboCop detected offenses" if exit_code != 0 end +desc "verify that changed files are clean of Ruby warnings" +task(:warnings) do + # This rake task looks at all files modified in this branch. + commit_range = 'HEAD^..HEAD' + ruby_files_ok = true + puts "Checking modified files #{commit_range}" + %x{git diff --diff-filter=ACM --name-only #{commit_range}}.each_line do |modified_file| + modified_file.chomp! + # Skip racc generated file as it can have many warnings that cannot be manually fixed + next if modified_file.end_with?("pops/parser/eparser.rb") + next if modified_file.start_with?('spec/fixtures/', 'acceptance/fixtures/') || File.extname(modified_file) != '.rb' + puts modified_file + + stdout, stderr, _ = Open3.capture3("ruby -wc \"#{modified_file}\"") + unless stderr.empty? + ruby_files_ok = false + puts stderr + end + puts stdout + end + raise "One or more ruby files contain warnings." unless ruby_files_ok +end + +if Rake.application.top_level_tasks.grep(/^gettext:/).any? + begin + spec = Gem::Specification.find_by_name 'gettext-setup' + load "#{spec.gem_dir}/lib/tasks/gettext.rake" + GettextSetup.initialize(File.absolute_path('locales', File.dirname(__FILE__))) + rescue LoadError + abort("Run `bundle install --with documentation` to install the `gettext-setup` gem.") + end +end diff --git a/acceptance/.beaker.yml b/acceptance/.beaker.yml new file mode 100644 index 00000000000..5b26c2a4e87 --- /dev/null +++ b/acceptance/.beaker.yml @@ -0,0 +1,14 @@ +--- +ssh: + keys: + - id_rsa_acceptance + - ~/.ssh/id_rsa-acceptance +xml: true +timesync: false +repo_proxy: true +add_el_extras: false +'master-start-curl-retries': 30 +log_level: debug +preserve_hosts: onfail +helper: ./lib/helper.rb +options_file: ./config/aio/options.rb diff --git a/acceptance/.gitignore b/acceptance/.gitignore index 553270032d5..fe65d4f0ebc 100644 --- a/acceptance/.gitignore +++ b/acceptance/.gitignore @@ -9,3 +9,4 @@ id_rsa-acceptance id_rsa-acceptance.pub preserved_config.yaml merged_options.rb +tmp diff --git a/acceptance/Gemfile b/acceptance/Gemfile index 5459b86cd9a..2e7d52c8a0c 100644 --- a/acceptance/Gemfile +++ b/acceptance/Gemfile @@ -3,17 +3,25 @@ source ENV['GEM_SOURCE'] || 'https://rubygems.org' def location_for(place, fake_version = nil) - if place =~ /^(git:[^#]*)#(.*)/ + if place.is_a?(String) && place =~ /^((?:git[:@]|https:)[^#]*)#(.*)/ [fake_version, { :git => $1, :branch => $2, :require => false }].compact - elsif place =~ /^file:\/\/(.*)/ + elsif place.is_a?(String) && place =~ /^file:\/\/(.*)/ ['>= 0', { :path => File.expand_path($1), :require => false }] else [place, { :require => false }] end end -gem "beaker", *location_for(ENV['BEAKER_VERSION'] || '~> 2.8') -gem "rake", "~> 10.1" +gem "beaker", *location_for(ENV['BEAKER_VERSION'] || '~> 6.0') +gem "beaker-puppet", *location_for(ENV['BEAKER_PUPPET_VERSION' || "~> 4.0"]) +gem "beaker-hostgenerator", *location_for(ENV['BEAKER_HOSTGENERATOR_VERSION'] || "~> 2") +gem "beaker-abs", *location_for(ENV['BEAKER_ABS_VERSION'] || "~> 1.0") +gem "beaker-vagrant", *location_for(ENV['BEAKER_VAGRANT_VERSION'] || "~> 0") +gem "beaker-vmpooler", *location_for(ENV['BEAKER_VMPOOLER_VERSION'] || "~> 1.3") +gem "beaker-vcloud", *location_for(ENV['BEAKER_VCLOUD_VERSION'] || "~> 1.0") +gem "beaker-docker", *location_for(ENV['BEAKER_DOCKER_VERSION'] || "~> 0.5") +gem "beaker-gke", *location_for(ENV['BEAKER_GKE_VERSION'] || "~> 0.0.3") +gem "rake", ">= 12.3.3" gem "httparty", :require => false gem 'uuidtools', :require => false @@ -22,6 +30,6 @@ group(:test) do gem "mocha", "~> 0.10.5", :require => false end -if File.exists? "#{__FILE__}.local" +if File.exist? "#{__FILE__}.local" eval(File.read("#{__FILE__}.local"), binding) end diff --git a/acceptance/README.md b/acceptance/README.md new file mode 100644 index 00000000000..9d16b82fdea --- /dev/null +++ b/acceptance/README.md @@ -0,0 +1,469 @@ +# Running Puppet Acceptance Tests + + +## Table of Contents +* [Setup](#setup) +* [Quick Start](#quick-start) +* [Configuration](#configuration) +* [Running Tests](#running-tests) +* [Writing Tests](#writing-tests) +* [Getting Help](#getting-help) + +------------- +An important aside: Currently running acceptance tests that contain a specific +change is challenging unless you have access to infrastructure internal to the +Puppet, Inc. network. This is a known issue, and we are working to make this a +better experience for our community. + +------------- + +## Setup +### Prerequisites +* git +* ruby +* [bundler][] +* a local clone of the puppet repo + +All command examples in this readme assume you are working in the same directory +this README is in, `puppet/acceptance`. + +### Installation +All of the dependencies you need to run and develop tests are defined in +`Gemfile`. To install them, run `bundle install --path .bundle/gems`. This +command, as well all the command examples in this README, assume you are working +in the acceptance directory. If you ever have issues with your runtime +dependencies, you can update them with `bundle update` or start over fresh with +`rm -rf .bundle/gems; bundle install --path .bundle/gems`. + +To ensure installation was successful, you can run `bundle exec rake -T`. This +should return something along these lines: +``` +$ bundle exec rake -T +rake ci:help # Print usage information +rake ci:test:aio # Run the acceptance tests using puppet-agent (AI... +rake ci:test:gem # Run the acceptance tests against puppet gem on ... +rake ci:test:git # Run the acceptance tests against a git checkout +rake ci:test:quick # Run a limited but representative subset of acce... +rake clean # Remove any temporary products +rake clobber # Remove any generated files +``` +To get a detailed description of all of these tasks, run `bundle exec rake -D`. + +------------- +## Quick Start +### For community members +Currently, there isn't a good way for community members to run acceptance tests. +This is a known problem. We currently have multiple avenues we are exploring to +make running puppet acceptance tests easier for our community. In the meantime, +we apologize for the inconvenience. + +### For Puppet, Inc. employees +If you have access to infrastructure internal to the Puppet, Inc. network, then +the quickest way to get acceptance tests running is with vmpooler. + +To test changes that are available on a branch on github.com: +``` +bundle exec rake ci:test:git OPTIONS='--preserve-hosts=always' SHA=ticket/6.0.x/ticketed-work-description RUNTIME_BRANCH=6.0.x FORK=melissa TESTS='tests/path/to/test.rb,tests/other/test.rb' +``` +Where `SHA` is the branch name, `RUNTIME_BRANCH` is the agent version stream, +and `FORK` is the github fork where the branch lives. + +To test changes that are available in a puppet-agent package on builds.delivery.puppetlabs.net: +``` +bundle exec rake ci:test:aio OPTIONS='--preserve-hosts=always' SHA=9124b4e81ec0ac6394d3edc67d4ab71866869fd7 TESTS='tests/path/to/test.rb,tests/other/test.rb' +``` +`SHA` is a sha or tag that exists on builds.delivery.puppetlabs.net/puppet-agent + +To rerun a test on the hosts that have already been provisioned, use beaker subcommands: +``` +bundle exec beaker exec tests/path/to/test.rb,tests/other/test.rb +``` + +Always clean up after yourself when you are done: +``` +bundle exec beaker destroy +``` +This will remove any provisioned hosts. Only run this once you are done with the +hosts that have been checked out and provisioned for a given run. + +------------- + +## Configuration +### Environment Variables +A detailed description of the available environment variables can be found by +running `bundle exec rake ci:help`. This will print a list of both required and +optional environment variables with short descriptions on how they are used. +Please review all of these options as they will impact how your test servers +are provisioned. This rake task is the most up to date source for this +information. Please read through the available variables, their defaults, and +what they do. They may impact your acceptance run in ways you do not expect. + +### Customizing Test Targets +If you are using the vmpooler hypervisor internal to Puppet, Inc. infrastructure, +you can customize the platforms to test on using the `HOSTS` environment variable. +You'll set the `HOSTS` environment variable to the host string you want to test, +such as `HOSTS=redhat7-64ma-windows2012r2-64a`. + +For a list of available `HOSTS` platforms and their exact naming structures, +check the keys listed in [beaker hostgenerator](https://github.com/puppetlabs/beaker-hostgenerator/blob/master/lib/beaker-hostgenerator/data.rb). Generally, this string will be in the format +`{platform}{version}-{architecture}{role/s}`. You will most often use either the +agent (a) or master (m) role, but you can find a list of available roles in +[beaker hostgenerator](https://github.com/puppetlabs/beaker-hostgenerator/blob/master/lib/beaker-hostgenerator/roles.rb). +Multiple hosts in the string are separated with a dash(`-`). You must have at +least one agent and at least one master. + +Be careful not to confuse the different host string formats. We have different +tools that expect the host string to be in different forms. For example, +`packaging_platform` is specific to how [Vanagon](https://github.com/puppetlabs/vanagon) +parses that string. + +### The Hosts File +The rake tasks that run acceptance will by default create a hosts file and +populate it using [beaker-hostgenerator][] using either the `HOSTS` environment +variable or the default host string (currently `redhat7-64ma-windows2012r2-64a`). +The automation assumes you are using the vmpooler hypervisor and a vmpooler +instance that is only available to Puppet, Inc. employees. If you want to +customize the hypervisor or the vmpooler instance, you'll need to generate your own +hosts file. You must pass in a valid host string to the `beaker-hostgenerator` +command. See [Customizing Test Targets](#customizing-test-targets) for more +information on how to construct a valid host string. + +To customize the hypervisor, pass in `--hypervisor {hypervisor name}`. To set +the vmpooler instance, use `--global-config pooling_api={vmpooler uri}`. Only the +vmpooler hypervisor uses the pooling_api key. + +The host string that is passed in is the same that you would use with the +`HOSTS` environment variable. See [Customizing Test Targets](#customizing-test-targets) +on how to format this string. + +To have the automation recognize and use your custom hosts file, you'll need to +set the `HOSTS` environment variable to the hosts file. In the above example, we +called this file `hosts.yaml`, so we will set `HOSTS` to `hosts.yaml` when running +all future beaker commands or rake tasks to run acceptance tests. + +For example, if you were to run this command: +``` +bundle exec beaker-hostgenerator redhat7-64ma-windows2012r2-64a --disable-default-role --osinfo-version 1 --hypervisor vmpooler --global-config pooling_api=http://customvmpooler/ > hosts.yaml +``` +You would generate a file called `hosts.yaml` that contains something like this: +``` +--- +HOSTS: + redhat7-64-1: + platform: el-7-x86_64 + packaging_platform: el-7-x86_64 + template: redhat-7-x86_64 + hypervisor: vmpooler + roles: + - master + - agent + windows2012r2-64-1: + platform: windows-2012r2-64 + packaging_platform: windows-2012-x64 + ruby_arch: x64 + template: win-2012r2-x86_64 + hypervisor: vmpooler + roles: + - agent +CONFIG: + nfs_server: none + consoleport: 443 + pooling_api: http://customvmpooler/ +``` +We can then run the acceptance tests with: +`bundle exec rake ci:test:aio HOSTS=hosts.yaml SHA={sha}` + +### Hypervisor Options +The hypervisor dictates where you will be running the acceptance tests. The beaker +hypervisors take care of basic host setup so that you will have a consistent +host environment across every test run. You can find more details on the different +hypervisor options in [the beaker repo](https://github.com/puppetlabs/beaker/blob/master/docs/how_to/hypervisors/README.md). + +Here, we will focus on vmpooler and docker, as those are the two we use most +often internally. If you use a hypervisor other than abs, vagrant, vmpooler, or +docker, you'll have to add the gem to that hypervisor to `Gemfile.local` and run +`bundle update` to install the new gems. You also have the ability to run tests +on a static host. + +#### VMPooler +[VMPooler](https://github.com/puppetlabs/vmpooler) is the default hypervisor we +use. This is only available to Puppet, Inc. employees as it uses internal +infrastructure. If you have access to a similar setup, then you are welcome to +use this option with a few values changed. If you are using the Puppet internal +vmpooler, then you can simply run the acceptance rake tasks. See +[Customizing Test Targets](#customizing-test-targets) about how to use the +`HOSTS` environment variable to customize the platforms you are running tests on. + +To use a different vmpooler instance, use +`--global-config pooling_api=http://customvmpooler/` when you use +`beaker-hostgenerator` to generate `hosts.yaml`. Make sure you set `HOSTS` to +the hosts file you just generated so the automation can find that file. See +[The Hosts File](#the-hosts-file) for more detail on the hosts file. + +#### Docker +To test with [the docker hypervisor](https://github.com/puppetlabs/beaker-docker), +you will want to generate a custom hosts file. You will also mostly likely need +to manually edit the file. See [The Hosts File](#the-hosts-file) for more +detail on the hosts file. + +To create a hosts file with a centos 7 master and a centos 7 agent, we can use +the following beaker-hostgenerator command +`bundle exec beaker-hostgenerator centos7-64m-centos7-64a --disable-default-role --osinfo-version 1 --hypervisor docker > hosts.yaml` +Which will produce a file called `hosts.yaml` that contains the following: +``` +--- +HOSTS: + centos7-64-1: + docker_cmd: + - "/sbin/init" + image: centos:7 + platform: centos-7-x86_64 + packaging_platform: el-7-x86_64 + docker_image_commands: + - cp /bin/true /sbin/agetty + - yum install -y crontabs initscripts iproute openssl sysvinit-tools tar wget + which ss + hypervisor: docker + roles: + - master + centos7-64-2: + docker_cmd: + - "/sbin/init" + image: centos:7 + platform: centos-7-x86_64 + packaging_platform: el-7-x86_64 + docker_image_commands: + - cp /bin/true /sbin/agetty + - yum install -y crontabs initscripts iproute openssl sysvinit-tools tar wget + which ss + hypervisor: docker + roles: + - agent +CONFIG: + nfs_server: none + consoleport: 443 + +``` +Run acceptance tests against pre-built puppet-agent packages with +`bundle exec rake ci:test:aio SHA={sha|tag} TESTS=path/to/test.rb HOSTS=hosts.yaml` + +Note that if you are not running tests against the master branch and you are +installing the latest puppetserver package, you will likely need to set `RELEASE_STREAM` +to pick up the correct server version. Please see the section on [environment variables](#environment-variables) +for more information. + +When you generate your [hosts file](#the-hosts-file), [beaker-hostgenerator][] does +its best to populate the values as logically as possible. You will likely want +to update or modify them to suite your needs. + +With `image`, [beaker-hostgenerator][] does its best to guess the most logical +image string based on the platform you are building. For the most part, this +should work without interference, but if you are using a custom docker image or +do not want the default, then you will have to manually update this string. Not +every string beaker-hostgenerator uses to populate this variable will be valid. + +`docker_image_commands` is automatically populated when generating the hosts +file with [beaker-hostgenerator][]. This has already been set for a handful of +host types, but may not be set for all. + +* TODO I only tried this once using a docker image that already had puppetserver + installed as the master host. The image I used took forever to provision, + so I gave up. If we want to continue down this route, we need to make sure + the setup steps can check if puppetserver has already been installed so that + we don't try to install it agian. +* TODO There's something odd with `docker_mage_entrypoint` versus `docker_cmd`. + We should clarify the difference between these two values. I don't quite + understand what the difference is between them. +* TODO These docker containers have to run in privileged mode (or systemd, + among possibly other things, won't function as we need them to). This is + not ideal if you're testing code that affects your OS (ie running docker on + linux without a docker machine in between the container and your laptop). + BE CAREFUL + +#### Static Hosts +This is not recommended unless you are familiar with how beaker and +beaker-puppet provision hosts. + +To test on a server that's already been spun up or doesn't require a hypervisor, +you should set the name of the host to the FQDN of the server you want to use, +then remove the hypervisor and template settings. This is not recommended, and +you may run into issues with failures or overwritten configuration due to either +beaker provision steps or test provisioning steps. +``` +--- +HOSTS: + azeqdqmk14mvu3g.delivery.puppetlabs.net: + platform: el-7-x86_64 + packaging_platform: el-7-x86_64 + roles: + - master +``` + +------------- + +## Running Tests +### Testing with pre-built packages +``` +bundle exec rake ci:test: SHA={sha|tag} +``` + +This is the primary method that we use to run puppet acceptance tests. It +requires puppet-agent packages that have been built with the version of the +puppet code that you want to test. As building packages usually takes quite a +bit of time, this method requires some patience. You are required to set `SHA` +when running acceptance tests against pre-built packages. + +#### Testing a specific version +If you are testing a specific version, `SHA` must be set to a value that exists +on the path `#{ENV['DEV_BUILDS_URL']}/puppet-agent/#{ENV['SHA']}`. Note that +this value corresponds to the puppet-agent package, not to puppet. +`DEV_BUILDS_URL` defaults to the internal build server that is only accessible +to Puppet, Inc. employees. The method called here depends on information written +to a yaml file in that directory. Though you can override DEV_BUILDS_URL, the +automation here is very specific and likely will not work as you are expecting +it to. + +``` +bundle exec rake ci:test:aio SHA=3cfbac6857c10efc5b1e02262cfd7b849bb9c4b2 +``` +``` +bundle exec rake ci:test:aio SHA=6.0.5 +``` + +#### Testing Nightlies +If you do not have access to internal infrastructure, you can test against +packages that have been made available on nightlies.puppet.com. Currently, you +cannot specify a specific version. Instead, you have to use the latest shipped +package for the release stream you are interested in. To do this, `SHA` must be +set to `latest`. If you want to modify the release stream you are testing, +`RELEASE_STREAM` can be modified. It defaults to `puppet` which should +correspond to the latest stream available. If you want to modify +`RELEASE_STREAM`, set it to an available repo, such as `puppet5`. +``` +bundle exec rake ci:test:aio SHA=latest RELEASE_STREAM=puppet5 +``` + +### Testing with Git +``` +bundle exec rake ci:test:git SHA={sha|tag|branch} +``` + +#### From a repo on a git server +Though we primarily run acceptance tests against a built package, it is possible +to run these tests with a git checkout. This is most useful when testing locally +to speed up the feedback cycle. + +When testing from a github repo we need to unpack the appropriate +[runtime archive](https://github.com/puppetlabs/puppet-runtime) +for the platform we are testing on. These pre-built archives are stored on an +internal server, and are currently only available to Puppet, Inc. employees. +With these archives, we get all of the runtime dependencies that are usually +provided as a part of the puppet agent package. This allows us to replicate +the runtime environment produced via a package install for the purpose of +running acceptance tests. + +When testing with git, `SHA` can be set to any git artifact: a long sha, a short +sha, a tag, a branch name, etc. What happens is that we write a gemfile with the +details of the puppet repo, pointing to the artifact referenced with `SHA`. Then +when we run `bundle install` on the testing host, bundler grabs puppet from +wherever the gemfile points. If the git artifact referenced is not from the +puppetlabs repo, you can use `FORK` to point to a different github namespace. +Likewise, if the artifact you want to access is not available on `github.com` +but a custom git server, you can set `SERVER` to customize the git uri bundler +pulls from. For more details on these environment variables, run +`bundle exec rake ci:help`. + +As an example, if I have a development branch +(`developent/master/major-feature`) that I'm working on and it only exists in my +fork of puppet (`github.com/joeschmoe/puppet`), then I will run +``` +bundle exec rake ci:test:git SHA=developent/master/major-feature FORK=joeschmoe +``` + +Please note that any changes you want to test must be pushed up to your github +server. This is how we access the code to be tested. + +#### From a local repo +If you are testing with git and using the docker hypervisor, you can run tests +against the puppet checkout on your local system. You need to update your hosts +file to add `mount_folders` to the docker host where you want the checkout of +puppet to be available. Here, `host_path` is the path to puppet on your local +machine. The `container_path` is where puppet will end up on the docker image, +so you can leave it as `/build/puppet`. Note that although `SHA` is required, it +is never used in this workflow. For consistency, I would recommend setting `SHA` +to your working branch name. + +We still need access to our runtime dependencies when testing against a local +git checkout. When we are testing with the docker hypervisor, we assume that the +docker image you are using will have this. As of this writing (Jan. 2019), the +docker image you'll want to use for these tests is not public. The image is +called `agent-runtime-{branch}`, where `{branch}` is the branch of puppet you +are testing. This image includes everything we build as a part of [the runtime +archive](https://github.com/puppetlabs/puppet-runtime). These components are +normally provided as a part of the puppet agent package. +``` +--- +HOSTS: + debian8-64-1: + hypervisor: docker + docker_image_entrypoint: "/sbin/init" + image: pcr-internal.puppet.net/pe-and-platform/agent-runtime-master:201810110.17.gb5afc66 + platform: debian-8-amd64 + packaging_platform: debian-8-amd64 + docker_image_commands: + - rm -f /usr/sbin/policy-rc.d + - systemctl mask getty@tty1.service getty-static.service + - apt-get update && apt-get install -y cron locales-all net-tools wget + mount_folders: + puppet: + host_path: ~/puppet + container_path: /build/puppet + roles: + - agent +``` + +For more details on testing with docker, see [the docker section](#docker). +Remember that `HOSTS` must be set to your hosts file for the automation to honor +it. + +### Testing with Gems +Currently, running acceptance tests with gems is not working. +``` +bundle exec rake ci:test:gem +``` + +### Rerunning Failed Tests +The rake tasks we use here take advantage of a newer feature in beaker that gives us quite a bit of flexibility. We take advantage of beaker subcommands. Subcommands are individual beaker invocations that are used to run the different stages of running tests: provisioning, pre-suite setup, tests, etc. We do this by writing state to the file `.beaker/subcommand_options.yaml`. With each new invocation of a subcommand, beaker will check for this file and load the contents if the file exists. The important thing about this feature is that you can rerun tests without going through the entire provisioning process every time. + +To ensure your hosts aren't cleaned up after a run, set `OPTIONS='--preserve-hosts=always'`. With this set, we can rerun a failed test using the infrastructure beaker has already provisioned. +``` +bundle exec rake ci:test:aio OPTIONS='--preserve-hosts=always' SHA=6.0.5 +``` +If this run fails because a small handful of tests fail, you can rerun only those tests that failed. For example, assume that `tests/resource/package/yum.rb` and `tests/node/check_woy_cache_works.rb` both had failing tests. you can run +``` +bundle exec beaker exec tests/resource/package/yum.rb,tests/node/check_woy_cache_works.rb +``` + +This should work regardless of which hypervisor or testing method you are using. + +------------- + +## Writing Tests +Read more about writing beaker tests in beaker. Check out the [tutorials section](https://github.com/puppetlabs/beaker/tree/master/docs/tutorials) +and [how to write a quick test](https://github.com/puppetlabs/beaker/blob/master/docs/tutorials/lets_write_a_test.md) + +------------- + +## Getting Help +### On the web +* [Puppet help messageboard](http://puppet.com/community/get-help) +* [General GitHub documentation](http://help.github.com/) +### On chat +* Slack (slack.puppet.com) #testing, #puppet-dev, #windows + +[bundler]: https://rubygems.org/gems/bundler +[rspec-puppet]: http://rspec-puppet.com/ +[rspec-puppet_docs]: http://rspec-puppet.com/documentation/ +[beaker]: https://github.com/puppetlabs/beaker +[beaker-puppet]: https://github.com/puppetlabs/beaker-puppet +[beaker-hostgenerator]: https://github.com/puppetlabs/beaker-hostgenerator diff --git a/acceptance/Rakefile b/acceptance/Rakefile index 7fd7db88cfd..62811cd2fc3 100644 --- a/acceptance/Rakefile +++ b/acceptance/Rakefile @@ -1,372 +1,133 @@ -require 'rake/clean' -require 'pp' -require 'yaml' -$LOAD_PATH << File.expand_path(File.join(File.dirname(__FILE__), 'lib')) -require 'puppet/acceptance/git_utils' -extend Puppet::Acceptance::GitUtils +require 'beaker-puppet' -ONE_DAY_IN_SECS = 24 * 60 * 60 -REPO_CONFIGS_DIR = "repo-configs" -CLEAN.include('*.tar', REPO_CONFIGS_DIR, 'merged_options.rb') +Beaker::DSL::Helpers::RakeHelpers.load_tasks -module HarnessOptions - - DEFAULTS = { - :type => 'git', - :helper => ['lib/helper.rb'], - :tests => ['tests'], - :log_level => 'debug', - :color => false, - :root_keys => true, - :ssh => { - :keys => ["id_rsa_acceptance", "#{ENV['HOME']}/.ssh/id_rsa-acceptance"], - }, - :xml => true, - :timesync => false, - :repo_proxy => true, - :add_el_extras => true, - :preserve_hosts => 'onfail', - :forge_host => 'forge-aio01-petest.puppetlabs.com', - :'master-start-curl-retries' => 30, - } - - class Aggregator - attr_reader :mode - - def initialize(mode) - @mode = mode - end - - def get_options(file_path) - puts file_path - if File.exists? file_path - options = eval(File.read(file_path), binding) - else - puts "No options file found at #{File.expand_path(file_path)}" - end - options || {} - end +namespace :ci do + namespace :test do + desc <<-EOS +Run a limited but representative subset of acceptance tests against puppet-agent +(AIO) packages. This task is intended to reduce testing time on a per-commit +basis. - def get_mode_options - get_options("./config/#{mode}/options.rb") - end + $ SHA= bundle exec rake ci:test:quick - def get_local_options - get_options("./local_options.rb") +SHA should be the full SHA for the puppet-agent package. +EOS + task :quick => ['ci:check_env', 'ci:gen_hosts'] do + ENV['TESTS'] = get_test_sample.join(",") + Rake::Task["ci:test:aio"].invoke end - def final_options(intermediary_options = {}) - mode_options = get_mode_options - local_overrides = get_local_options - final_options = DEFAULTS.merge(mode_options) - final_options.merge!(intermediary_options) - final_options.merge!(local_overrides) - return final_options - end - end + desc <<-EOS +Run tests on docker quickly and easily. The docker container is set up to mount +your puppet directory in the container. This means you can edit code or test +files and rerun tests without reconfiguring your test environment. - def self.options(mode, options) - final_options = Aggregator.new(mode).final_options(options) - final_options - end -end +Defaults to running all tests unless TESTS is set. TESTS is a comma seperated +list of test files to run. -def beaker_test(mode = :packages, options = {}) - delete_options = options.delete(:__delete_options__) || [] - final_options = HarnessOptions.options(mode, options) - preserve_config = final_options.delete(:__preserve_config__) + $ bundle exec rake ci:test:docker TESTS='path/to/test.rb,path/to/another/test.rb' - if mode == :git - # Build up project git urls based on git server and fork env variables or defaults - final_options[:install].map! do |install| - raise(ArgumentError, "Missing Git URL within options hash. Install URL is nil.") if install.nil? - if md = /^(\w+)#(\w+)$/.match(install) - project, project_sha = md.captures - "#{build_giturl(project)}##{project_sha}" - elsif md = /^(\w+)$/.match(install) - project = md[1] - "#{build_giturl(project)}##{sha}" - else - install - end - end - end +By default, tests are run on a centos 7 host. To change the host, set HOSTS to +a valid host string according to beaker-hostgenerator requirements. - delete_options.each do |delete_me| - final_options.delete(delete_me) - end +All tests marked with a server tag will be skipped. - options_file = 'merged_options.rb' - File.open(options_file, 'w') do |merged| - merged.puts <<-EOS -# Copy this file to local_options.rb and adjust as needed if you wish to run -# with some local overrides. +This task skips all cleanup. Please be sure to run `bundle exec beaker destroy` +to clean up docker containers used for testing. EOS - merged.puts(final_options.pretty_inspect) - end - - tests = ENV['TESTS'] || ENV['TEST'] - tests_opt = "--tests=#{tests}" if tests - - config_opt = "--hosts=#{config}" if config - - overriding_options = ENV['OPTIONS'] - - args = ["--options-file", options_file, config_opt, tests_opt, overriding_options].compact - - begin - sh("beaker", *args) - ensure - preserve_configuration(final_options, options_file) if preserve_config - end -end - -def preserve_configuration(final_options, options_file) - if (hosts_file = config || final_options[:hosts_file]) && hosts_file !~ /preserved_config/ - cp(hosts_file, "log/latest/config.yml") - generate_config_for_latest_hosts - end - mv(options_file, "log/latest") -end - -def generate_config_for_latest_hosts - preserved_config_hash = { 'HOSTS' => {} } - - puts "\nPreserving configuration so that any preserved nodes can be tested again locally..." - - config_hash = YAML.load_file('log/latest/config.yml') - if !config_hash || !config_hash.include?('HOSTS') - puts "Warning: No HOSTS configuration found in log/latest/config.yml" - return - else - nodes = config_hash['HOSTS'].map do |node_label,hash| - { - :node_label => node_label, - :roles => hash['roles'], - :platform => hash['platform'] - } - end - - pre_suite_log = File.read('log/latest/pre_suite-run.log') - nodes.each do |node_info| - host_regex = /^([\w.]+) \(#{node_info[:node_label]}\)/ - if matched = host_regex.match(pre_suite_log) - hostname = matched[1] - fqdn = (hostname =~ /\./) ? - hostname : - "#{hostname}.delivery.puppetlabs.net" - elsif /^#{node_info[:node_label]} /.match(pre_suite_log) - fqdn = "#{node_info[:node_label]}" - puts "* Couldn't find any log lines for #{host_regex}, assuming #{fqdn} is the fqdn" - end - if fqdn - preserved_config_hash['HOSTS'][fqdn] = { - 'roles' => node_info[:roles], - 'platform' => node_info[:platform], - } - else - puts "* Couldn't match #{node_info[:node_label]} in pre_suite-run.log" - end - end - pp preserved_config_hash - - File.open('log/latest/preserved_config.yaml', 'w') do |config_file| - YAML.dump(preserved_config_hash, config_file) - end - end -rescue Errno::ENOENT => e - puts "Warning: Couldn't generate preserved_config.yaml #{e}" -end - -def list_preserved_configurations(secs_ago = ONE_DAY_IN_SECS) - preserved = {} - Dir.glob('log/*_*').each do |dir| - preserved_config_path = "#{dir}/preserved_config.yaml" - yesterday = Time.now - secs_ago.to_i - if preserved_config = File.exists?(preserved_config_path) - directory = File.new(dir) - if directory.ctime > yesterday - hosts = [] - preserved_config = YAML.load_file(preserved_config_path).to_hash - preserved_config['HOSTS'].each do |hostname,values| - hosts << "#{hostname}: #{values['platform']}, #{values['roles']}" + task :docker do + begin + ENV['HOSTS'] ||= 'centos7-64a' + ENV['SHA'] ||= `git rev-parse HEAD`.chomp + ENV['OPTIONS'] ||= '--preserve-hosts=always' + ENV['OPTIONS'] += ' --test-tag-exclude=server' + Rake::Task["ci:gen_hosts"].invoke('docker') + hosts_file_content = YAML.load_file ENV['HOSTS'] + hosts_file_content['HOSTS'].each do |host| + host[1]['mount_folders'] = { + 'puppet' => { + 'host_path' => "#{File.dirname(__dir__)}" , + 'container_path' => '/build/puppet' + } + } + host[1]['tag'] = 'acceptance_test_host' end - preserved[hosts] = directory.to_path - end - end - end - preserved.map { |k,v| [v,k] }.sort { |a,b| a[0] <=> b[0] }.reverse -end + File.open(ENV['HOSTS'], "w") { |f| f.write(YAML.dump(hosts_file_content)) } + Rake::Task["ci:test:git"].invoke -def list_preserved_hosts(secs_ago = ONE_DAY_IN_SECS) - hosts = Set.new - Dir.glob('log/**/pre*suite*run.log').each do |log| - yesterday = Time.now - secs_ago.to_i - File.open(log, 'r') do |file| - if file.ctime > yesterday - file.each_line do |line| - matchdata = /^(\w+)(?:\.[\w.]+)? \(.*?\) \d\d:\d\d:\d\d\$/.match(line.encode!('UTF-8', 'UTF-8', :invalid => :replace)) - hosts.add(matchdata[1]) if matchdata - end - end - end - end - hosts -end + ensure + puts <<-EOF -def release_hosts(hosts = nil, secs_ago = ONE_DAY_IN_SECS) - secs_ago ||= ONE_DAY_IN_SECS - hosts ||= list_preserved_hosts(secs_ago) - hosts.each do |h| - hostname = h.split('.').first - puts "Releaseing '#{hostname}'" - puts `curl -X DELETE --url http://vcloud.delivery.puppetlabs.net/vm/#{hostname}` - end -end +************************ +You can modify puppet code or tests and rerun tests without modifying your test +environment. -def print_preserved(preserved) - preserved.each_with_index do |entry,i| - puts "##{i}: #{entry[0]}" - entry[1].each { |h| puts " #{h}" } - end -end +To rerun a test or set of tests, pass a comma seperated list of tests to: -def beaker_run_type - type = ENV['TYPE'] || :packages - type = type.to_sym -end + $ bundle exec beaker exec path/to/test.rb -def sha - ENV['SHA'] -end - -def config - ENV['CONFIG'] -end + or -namespace :ci do + $ bundle exec beaker exec path/to/test.rb,path/to/another/test.rb - task :check_env do - raise(USAGE) unless sha - end +************************ +This task skips all clean up so you can rerun tests. Don't forget to clean up +after yourself! - namespace :test do +To clean up the docker containers used to run tests, run: - USAGE = <<-EOS -Requires commit SHA to be put under test as environment variable: SHA=''. -Also must set CONFIG=config/nodes/foo.yaml or include it in an options.rb for Beaker. -You may set TESTS=path/to/test,and/more/tests. -You may set additional Beaker OPTIONS='--more --options' -If testing from git checkouts, you may optionally set the github fork to checkout from using PUPPET_FORK='some-other-puppet-fork' (you may change the HIERA_FORK and FACTER_FORK as well if you wish). -You may also optionally set the git server to checkout repos from using GIT_SERVER='some.git.mirror'. -Or you may set PUPPET_GIT_SERVER='my.host.with.git.daemon', specifically, if you have set up a `git daemon` to pull local commits from. (You will need to allow the git daemon to serve the repo (see `git help daemon` and the docs/acceptance_tests.md for more details)). -If there is a Beaker options hash in a ./local_options.rb, it will be included. Commandline options set through the above environment variables will override settings in this file. -EOS - - desc <<-EOS -Run the acceptance tests through Beaker and install packages on the configuration targets. -#{USAGE} -EOS - task :packages => 'ci:check_env' do - beaker_test - end + $ bundle exec beaker destroy - desc <<-EOS -Run the acceptance tests through Beaker and install packages as part of the AIO puppet-agent installation. -#{USAGE} -EOS - task :aio => 'ci:check_env' do - beaker_test(:aio) - end +************************ - desc <<-EOS -Run the acceptance tests through Beaker and install packages as part of the AIO puppet-agent installation, testing against puppet-master-passenger. -#{USAGE} -EOS - task :passenger => 'ci:check_env' do - beaker_test(:passenger) - end - desc <<-EOS -Run the acceptance tests through Beaker and install from git on the configuration targets. -#{USAGE} -EOS - task :git => 'ci:check_env' do - beaker_test(:git) + EOF + end end end - desc "Capture the master and agent hostname from the latest log and construct a preserved_config.yaml for re-running against preserved hosts without provisioning." - task :extract_preserved_config do - generate_config_for_latest_hosts - end - - desc <<-EOS -Run an acceptance test for a given node configuration and preserve the hosts. -Defaults to a packages run, but you can set it to 'git' with TYPE='git'. -#{USAGE} - EOS - task :test_and_preserve_hosts => 'ci:check_env' do - beaker_test(beaker_run_type, :preserve_hosts => 'always', :__preserve_config__ => true) - end - - desc "List acceptance runs from the past day which had hosts preserved." - task :list_preserved do - preserved = list_preserved_configurations - print_preserved(preserved) - end - - desc <<-EOS -Shutdown and destroy any hosts that we have preserved for testing. These should be reaped daily by scripts, but this will free up resources immediately. -Specify a list of comma separated HOST_NAMES if you have a set of dynamic vcloud host names you want to purge outside of what can be grepped from the logs. -You can go back through the last SECS_AGO logs. Default is one day ago in secs. - EOS - task :release_hosts do - host_names = ENV['HOST_NAMES'].split(',') if ENV['HOST_NAMES'] - secs_ago = ENV['SECS_AGO'] - release_hosts(host_names, secs_ago) - end - - task :destroy_preserved_hosts => 'ci:release_hosts' do - puts "Note: we are now releasing hosts back to the vcloud pooling api rather than destroying them directly. The rake task for this is ci:release_hosts" - end - - desc <<-EOS -Rerun an acceptance test using the last captured preserved_config.yaml to skip provisioning. -Or specify a CONFIG_NUMBER from `rake ci:list_preserved`. -Defaults to a packages run, but you can set it to 'git' with TYPE='git'. - EOS - task :test_against_preserved_hosts do - config_number = (ENV['CONFIG_NUMBER'] || 0).to_i - preserved = list_preserved_configurations - print_preserved(preserved) - config_path = preserved[config_number][0] - - puts "Using ##{config_number}: #{config_path}" - - options = { - :hosts_file => "#{config_path}/preserved_config.yaml", - :no_provision => true, - :preserve_hosts => 'always', - } - run_type = beaker_run_type - if run_type == :packages - options.merge!(:pre_suite => [ - 'setup/packages/pre-suite/015_PackageHostsPresets.rb', - 'setup/packages/pre-suite/045_EnsureMasterStartedOnPassenger.rb', - ]) - else - options.merge!(:__delete_options__ => [:pre_suite]) + namespace :sync do + task :windows do + raise 'WIN_MACHINE environment variable is required' unless ENV['WIN_MACHINE'] + win_machine = ENV['WIN_MACHINE'] + '.delivery.puppetlabs.net' + path = ENV['LIB_DIR'] || 'type' # 'lib/puppet' prefix is implicit. + dest_path = path.split('/')[0...-1].join + system("scp -r #{File.dirname(__FILE__)}/../lib/puppet/#{path} Administrator@#{win_machine}:'C:/Program\\ Files/Puppet\\ Labs/Puppet/puppet/lib/ruby/vendor_ruby/puppet/#{dest_path}'") end - beaker_test(beaker_run_type, options) end end -task :default do - sh('rake -T') -end - -task :spec do - sh('rspec lib') +def get_test_sample + # This set represents a reasonable sample of puppet acceptance tests, + # covering a wide range of features and code susceptible to regressions. + tests = [ 'tests/direct_puppet/cached_catalog_remediate_local_drift.rb', + 'tests/resource/file/content_attribute.rb', + 'tests/face/loadable_from_modules.rb', + 'tests/language/functions_in_puppet_language.rb', + 'tests/parser_functions/calling_all_functions.rb', + 'tests/ticket_4622_filebucket_diff_test.rb', + 'tests/pluginsync/4420_pluginfacts_should_be_resolvable_on_agent.rb', + 'tests/ssl/puppet_cert_generate_and_autosign.rb', + 'tests/resource/package/yum.rb', + 'tests/resource/service/ticket_5024_systemd_enabling_masked_service.rb', + 'tests/resource/service/puppet_service_management.rb' + ] + + # Add any tests modified within the last two weeks to the list, excluding + # deleted ones. We can't rely on --diff-filter, because an acceptance + # test may be modified and then deleted in the same time range. + modified = `git log --name-only --pretty="format:" --since 2.weeks ./tests` + tests += modified.split("\n").reject do |s| + s.empty? + end.collect do |s| + s.sub('acceptance/', '') + end.select do |s| + s =~ /\.rb$/ + end.find_all do |s| + File.exist?(s) + end + + tests.uniq.sort end diff --git a/acceptance/bin/ci-bootstrap-from-artifacts.sh b/acceptance/bin/ci-bootstrap-from-artifacts.sh deleted file mode 100755 index 2340831c225..00000000000 --- a/acceptance/bin/ci-bootstrap-from-artifacts.sh +++ /dev/null @@ -1,54 +0,0 @@ -#! /usr/bin/env bash - -############################################################################### -# Initial preparation for a ci acceptance job in Jenkins. Crucially, it -# handles the untarring of the build artifact and bundle install, getting us to -# a state where we can then bundle exec rake the particular ci:test we want to -# run. -# -# Having this checked in in a script makes it much easier to have multiple -# acceptance jobs. It must be kept agnostic between Linux/Solaris/Windows -# builds, however. - -set -x - -# If $GEM_SOURCE is not set, fall back to rubygems.org -if [ -z $GEM_SOURCE ]; then - export GEM_SOURCE='https://rubygems.org' -fi - -echo "SHA: ${SHA}" -echo "FORK: ${FORK}" -echo "BUILD_SELECTOR: ${BUILD_SELECTOR}" -echo "PACKAGE_BUILD_STATUS: ${PACKAGE_BUILD_STATUS}" - -rm -rf acceptance -mkdir acceptance -cd acceptance -tar -xzf ../acceptance-artifacts.tar.gz - -echo "===== This artifact is from =====" -cat creator.txt - -bundle install --without=development --path=.bundle/gems - -if [[ "${platform}" =~ 'solaris' ]]; then - repo_proxy=" :repo_proxy => false," -fi - -# If the platform is Windows and $ruby_arch is set, append it -if [[ "${platform}" =~ 'win' && ! -z $ruby_arch ]]; then - platform="${platform}-${ruby_arch}" -fi - -cat > local_options.rb <<-EOF -{ - :hosts_file => 'config/nodes/${platform}.yaml', - :ssh => { - :keys => ["${HOME}/.ssh/id_rsa-acceptance"], - }, -${repo_proxy} -} -EOF - -[[ (-z "${PACKAGE_BUILD_STATUS}") || ("${PACKAGE_BUILD_STATUS}" = "success") ]] || exit 1 diff --git a/acceptance/bin/ci-package.sh b/acceptance/bin/ci-package.sh deleted file mode 100755 index 44a2c24a541..00000000000 --- a/acceptance/bin/ci-package.sh +++ /dev/null @@ -1,16 +0,0 @@ -#! /usr/bin/env bash - -set -e -set -x - -JOB_NAME=$1 -[[ (-z "$JOB_NAME") ]] && echo "No job name passed in" && exit 1 - -rake --trace package:implode -rake --trace package:bootstrap - -# This obtains either the sha or tag if the commit is tagged -REF=`rake pl:print_build_params |grep "^ref: " |cut -d ":" -f 2 | tr -d ' '` -rake --trace pl:jenkins:uber_build DOWNSTREAM_JOB="http://jenkins-foss.delivery.puppetlabs.net/job/$JOB_NAME/buildWithParameters?token=iheartjenkins&SHA=$REF&BUILD_SELECTOR=$BUILD_NUMBER&FORK=$GIT_FORK" - -rake ci:acceptance_artifacts SHA=$REF diff --git a/acceptance/bin/ci-pe-puppet.sh b/acceptance/bin/ci-pe-puppet.sh deleted file mode 100755 index 26729a07af5..00000000000 --- a/acceptance/bin/ci-pe-puppet.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/bash -##!/bin/bash +e +x - -#source /usr/local/rvm/scripts/rvm -#rvm use ruby-1.9.3-p392 - -umask 0002 - -if [[ -z "$BEAKER_GEM" || -z "$tests" || -z "$platform" || -z "$layout" || -z "$pe_dist_dir" ]]; then - echo " - Usage: env bin/ci-pe-puppet.sh - The following environment variables need to be set: - 'pe_dist_dir' (to http://enterprise.delivery.puppetlabs.net/3.3/ci-ready/ for PE 3.3.x for example) - 'platform' (to one of the http://vcloud.delivery.puppetlabs.net/ platform names...'curl --url http://vcloud.delivery.puppetlabs.net/vm' for more info) - 'layout' (to '64mcda' or '32mcda' or '32m-32d-32c-32a' or '64mdc-32a' for various cpu & master/database/console node combinations) - 'tests' (to the comma separated list of tests or directory of tests to execute) - 'BEAKER_GEM' (to either 'beaker' or 'pe-beaker' which is holding some temporary puppetserver related changes) - The gem 'sqa-utils' is also required, but not part of the Gemfile as it's internal to Puppet Labs. - The script will add sqa-utils to Gemfile.local. -" - exit 1 -fi - -cd acceptance - -rm -f Gemfile.lock - -if ! grep -qs sqa-utils Gemfile.local; then - echo "gem 'sqa-utils'" >> Gemfile.local -fi - -bundle install --path=./.bundle/gems - -#export pe_version=${pe_version_override:-$pe_version} -#export pe_family=3.4 -if ! bundle exec genconfig ${platform}-${layout} > hosts.cfg; then - echo "Usage: ensure Gemfile.local exists requiring sqa-utils" -fi - -export forge_host=api-forge-aio01-petest.puppetlabs.com - -# export PRE_SUITE=./config/el6/setup/pe/pre-suite/ -export PRE_SUITE=./setup/pe/pre-suite/ - -bundle exec beaker \ - --xml \ - --debug \ - --repo-proxy \ - --config hosts.cfg \ - --pre-suite ${PRE_SUITE} \ - --tests=${tests} \ - --keyfile ${HOME}/.ssh/id_rsa-acceptance \ - --root-keys \ - --helper lib/helper.rb \ - --preserve-hosts always \ - --no-color - -RESULT=$? - -exit $RESULT diff --git a/acceptance/bin/ci-rerun-pe-puppet.sh b/acceptance/bin/ci-rerun-pe-puppet.sh deleted file mode 100755 index ba9311b3f68..00000000000 --- a/acceptance/bin/ci-rerun-pe-puppet.sh +++ /dev/null @@ -1,126 +0,0 @@ -#!/bin/bash -##!/bin/bash +e +x - -#source /usr/local/rvm/scripts/rvm -#rvm use ruby-1.9.3-p392 - -umask 0002 - -cd acceptance - -if [ -z "$tests" ]; then -echo "Must provide tests to run in the environment variable 'tests': got '$tests'" -exit 1 -fi - -if [ -z "$platform" ]; then - echo "'platform' not set: should be 'el-6-x86_64' or 'ubuntu-1204-i386' or some other platform string acceptable to Beaker" - exit 1 -fi - -if [ -z "$1" ]; then -echo "Must provide the hostname: got '$1'" -exit 1 -fi - -domain=${domain:-delivery.puppetlabs.net} - -if [ -z "$2" ]; then - - cat > hosts-immediate.cfg << EOHOSTS ---- -HOSTS: - ${1}.${domain}: - roles: - - agent - - master - - dashboard - - database - platform: ${platform} -CONFIG: - nfs_server: none - consoleport: 443 - datastore: instance0 - folder: Delivery/Quality Assurance/Enterprise/Dynamic - resourcepool: delivery/Quality Assurance/Enterprise/Dynamic - pooling_api: http://vcloud.delivery.puppetlabs.net/ -EOHOSTS - -else - - if [ -z "$3" ]; then - - cat > hosts-immediate.cfg << EOHOSTS ---- -HOSTS: - ${1}.${domain}: - roles: - - agent - - dashboard - - database - - master - platform: ${platform} - ${2}.${domain}: - roles: - - agent - platform: ${platform} -CONFIG: - nfs_server: none - consoleport: 443 - datastore: instance0 - folder: Delivery/Quality Assurance/Enterprise/Dynamic - resourcepool: delivery/Quality Assurance/Enterprise/Dynamic - pooling_api: http://vcloud.delivery.puppetlabs.net/ -EOHOSTS - - else - - cat > hosts-immediate.cfg << EOHOSTS ---- -HOSTS: - ${1}.${domain}: - roles: - - master - - dashboard - - database - platform: ${platform} - ${2}.${domain}: - roles: - - agent - platform: ${platform} - ${3}.${domain}: - roles: - - agent - platform: ${platform} -CONFIG: - nfs_server: none - consoleport: 443 - datastore: instance0 - folder: Delivery/Quality Assurance/Enterprise/Dynamic - resourcepool: delivery/Quality Assurance/Enterprise/Dynamic - pooling_api: http://vcloud.delivery.puppetlabs.net/ -EOHOSTS - - fi - -fi - -export forge_host=api-forge-aio01-petest.puppetlabs.com - -bundle exec beaker \ - --xml \ - --debug \ - --repo-proxy \ - --config hosts-immediate.cfg \ - --pre-suite setup/common/pre-suite/110_SetPEPuppetService.rb \ - --tests=${tests} \ - --keyfile ${HOME}/.ssh/id_rsa-acceptance \ - --root-keys \ - --helper lib/helper.rb \ - --preserve-hosts onfail \ - --no-color \ - --no-validate - -RESULT=$? - -exit $RESULT diff --git a/acceptance/config/aio/options.rb b/acceptance/config/aio/options.rb index fbcd3a69989..d2d69edfd05 100644 --- a/acceptance/config/aio/options.rb +++ b/acceptance/config/aio/options.rb @@ -1,14 +1,11 @@ { - :type => 'aio', - :is_puppetserver => true, - :puppetservice => 'puppetserver', - :'puppetserver-confdir' => '/etc/puppetlabs/puppetserver/conf.d', - :pre_suite => [ - 'setup/aio/pre-suite/010_Install.rb', - 'setup/aio/pre-suite/015_PackageHostsPresets.rb', - 'setup/common/pre-suite/025_StopFirewall.rb', - 'setup/common/pre-suite/040_ValidateSignCert.rb', - 'setup/aio/pre-suite/045_EnsureMasterStartedOnPassenger.rb', - 'setup/common/pre-suite/070_InstallCACerts.rb', + :type => 'aio', + 'is_puppetserver' => true, + 'use-service' => true, # use service scripts to start/stop stuff + 'puppetservice' => 'puppetserver', + 'puppetserver-confdir' => '/etc/puppetlabs/puppetserver/conf.d', + 'puppetserver-config' => '/etc/puppetlabs/puppetserver/conf.d/puppetserver.conf', + :post_suite => [ + 'teardown/common/099_Archive_Logs.rb', ], } diff --git a/acceptance/config/gem/options.rb b/acceptance/config/gem/options.rb new file mode 100644 index 00000000000..baf64c55319 --- /dev/null +++ b/acceptance/config/gem/options.rb @@ -0,0 +1,4 @@ +{ + # Use `git` so that we have a sane ruby environment + :type => 'git', +} diff --git a/acceptance/config/git/options.rb b/acceptance/config/git/options.rb index f841b593e0a..7c925b7c771 100644 --- a/acceptance/config/git/options.rb +++ b/acceptance/config/git/options.rb @@ -1,17 +1,11 @@ { - :install => [ - 'facter#stable', - 'hiera#stable', + :type => 'git', + :install => [ 'puppet', ], - :pre_suite => [ - 'setup/git/pre-suite/000_EnvSetup.rb', - 'setup/git/pre-suite/010_TestSetup.rb', - 'setup/git/pre-suite/020_PuppetUserAndGroup.rb', - 'setup/common/pre-suite/025_StopFirewall.rb', - 'setup/git/pre-suite/030_PuppetMasterSanity.rb', - 'setup/common/pre-suite/040_ValidateSignCert.rb', - 'setup/git/pre-suite/060_InstallModules.rb', - 'setup/common/pre-suite/070_InstallCACerts.rb', - ], + 'is_puppetserver' => false, + 'use-service' => true, # use service scripts to start/stop stuff + 'puppetservice' => 'puppetserver', + 'puppetserver-confdir' => '/etc/puppetlabs/puppetserver/conf.d', + 'puppetserver-config' => '/etc/puppetlabs/puppetserver/conf.d/puppetserver.conf' } diff --git a/acceptance/config/nodes/aix-53-power.yaml b/acceptance/config/nodes/aix-53-power.yaml new file mode 100644 index 00000000000..c5b731893db --- /dev/null +++ b/acceptance/config/nodes/aix-53-power.yaml @@ -0,0 +1,16 @@ +--- +HOSTS: + master: + roles: + - master + platform: el-7-x86_64 + hypervisor: vmpooler + template: redhat-7-x86_64 + pe-aix-53-acceptance: + roles: + - agent + platform: aix-5.3-power + hypervisor: none + vmhostname: pe-aix-53-acceptance.delivery.puppetlabs.net +CONFIG: + pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/aix-61-power.yaml b/acceptance/config/nodes/aix-61-power.yaml new file mode 100644 index 00000000000..7fed705a78a --- /dev/null +++ b/acceptance/config/nodes/aix-61-power.yaml @@ -0,0 +1,16 @@ +--- +HOSTS: + master: + roles: + - master + platform: el-7-x86_64 + hypervisor: vmpooler + template: redhat-7-x86_64 + pe-aix-61-acceptance: + roles: + - agent + platform: aix-6.1-power + hypervisor: none + vmhostname: pe-aix-61-acceptance.delivery.puppetlabs.net +CONFIG: + pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/aix-71-power.yaml b/acceptance/config/nodes/aix-71-power.yaml new file mode 100644 index 00000000000..b1b73d3a6cc --- /dev/null +++ b/acceptance/config/nodes/aix-71-power.yaml @@ -0,0 +1,16 @@ +--- +HOSTS: + master: + roles: + - master + platform: el-7-x86_64 + hypervisor: vmpooler + template: redhat-7-x86_64 + pe-aix-71-acceptance: + roles: + - agent + platform: aix-7.1-power + hypervisor: none + vmhostname: pe-aix-71-acceptance.delivery.puppetlabs.net +CONFIG: + pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/centos-5-i386.yaml b/acceptance/config/nodes/centos-5-i386.yaml deleted file mode 100644 index b83cd77bfc7..00000000000 --- a/acceptance/config/nodes/centos-5-i386.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: centos-5-i386 - hypervisor: vcloud - template: centos-5-i386 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/centos-5-x86_64.yaml b/acceptance/config/nodes/centos-5-x86_64.yaml deleted file mode 100644 index df6820885b3..00000000000 --- a/acceptance/config/nodes/centos-5-x86_64.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: centos-5-x86_64 - hypervisor: vcloud - template: centos-5-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/centos-6-i386.yaml b/acceptance/config/nodes/centos-6-i386.yaml deleted file mode 100644 index 006b6319843..00000000000 --- a/acceptance/config/nodes/centos-6-i386.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: centos-6-i386 - hypervisor: vcloud - template: centos-6-i386 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/centos-6-x86_64.yaml b/acceptance/config/nodes/centos-6-x86_64.yaml deleted file mode 100644 index f5ba6cb900a..00000000000 --- a/acceptance/config/nodes/centos-6-x86_64.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: centos-6-x86_64 - hypervisor: vcloud - template: centos-6-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/debian-6-i386.yaml b/acceptance/config/nodes/debian-6-i386.yaml deleted file mode 100644 index 8adf7c4d188..00000000000 --- a/acceptance/config/nodes/debian-6-i386.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: debian-squeeze-i386 - hypervisor: vcloud - template: debian-6-i386 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/debian-6-x86_64.yaml b/acceptance/config/nodes/debian-6-x86_64.yaml deleted file mode 100644 index 59ac928fa2a..00000000000 --- a/acceptance/config/nodes/debian-6-x86_64.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: debian-squeeze-x86_64 - hypervisor: vcloud - template: debian-6-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/debian-7-i386.yaml b/acceptance/config/nodes/debian-7-i386.yaml deleted file mode 100644 index 1a51d43d130..00000000000 --- a/acceptance/config/nodes/debian-7-i386.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: debian-wheezy-i386 - hypervisor: vcloud - template: debian-7-i386 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/debian-7-passenger.yaml b/acceptance/config/nodes/debian-7-passenger.yaml deleted file mode 100644 index c8cd1ee1864..00000000000 --- a/acceptance/config/nodes/debian-7-passenger.yaml +++ /dev/null @@ -1,18 +0,0 @@ -HOSTS: - master: - roles: - - master - platform: debian-wheezy-x86_64 - hypervisor: vcloud - template: debian-7-x86_64 - agent: - roles: - - agent - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/debian-7-x86_64.yaml b/acceptance/config/nodes/debian-7-x86_64.yaml deleted file mode 100644 index 9cc1f310044..00000000000 --- a/acceptance/config/nodes/debian-7-x86_64.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: debian-wheezy-x86_64 - hypervisor: vcloud - template: debian-7-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/debian-8-i386.yaml b/acceptance/config/nodes/debian-8-i386.yaml deleted file mode 100644 index ee0849efbd0..00000000000 --- a/acceptance/config/nodes/debian-8-i386.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: debian-jessie-i386 - hypervisor: vcloud - template: debian-8-i386 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/debian-8-x86_64.yaml b/acceptance/config/nodes/debian-8-x86_64.yaml deleted file mode 100644 index af86a5d6bc4..00000000000 --- a/acceptance/config/nodes/debian-8-x86_64.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: debian-jessie-x86_64 - hypervisor: vcloud - template: debian-8-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/fedora-20-i386.yaml b/acceptance/config/nodes/fedora-20-i386.yaml deleted file mode 100644 index cc64c4d6510..00000000000 --- a/acceptance/config/nodes/fedora-20-i386.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: fedora-20-i386 - hypervisor: vcloud - template: fedora-20-i386 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/fedora-20-x86_64.yaml b/acceptance/config/nodes/fedora-20-x86_64.yaml deleted file mode 100644 index 18a94a26987..00000000000 --- a/acceptance/config/nodes/fedora-20-x86_64.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: fedora-20-x86_64 - hypervisor: vcloud - template: fedora-20-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/fedora-21-i386.yaml b/acceptance/config/nodes/fedora-21-i386.yaml deleted file mode 100644 index d462c9f03e0..00000000000 --- a/acceptance/config/nodes/fedora-21-i386.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: fedora-21-i386 - hypervisor: vcloud - template: fedora-21-i386 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/fedora-21-x86_64.yaml b/acceptance/config/nodes/fedora-21-x86_64.yaml deleted file mode 100644 index ad9630adb74..00000000000 --- a/acceptance/config/nodes/fedora-21-x86_64.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: fedora-21-x86_64 - hypervisor: vcloud - template: fedora-21-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/fedora21.yaml b/acceptance/config/nodes/fedora21.yaml deleted file mode 100644 index 254af1dc280..00000000000 --- a/acceptance/config/nodes/fedora21.yaml +++ /dev/null @@ -1,19 +0,0 @@ -HOSTS: - master: - roles: - - master - - agent - platform: fedora-21-x86_64 - hypervisor: vcloud - template: fedora-21-x86_64 - agent: - roles: - - agent - platform: fedora-21-i386 - hypervisor: vcloud - template: fedora-21-i386 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/gem.yaml b/acceptance/config/nodes/gem.yaml new file mode 100644 index 00000000000..477d99f230c --- /dev/null +++ b/acceptance/config/nodes/gem.yaml @@ -0,0 +1,30 @@ +--- +HOSTS: + win-2012r2-rubyx86: + roles: + - agent + platform: windows-2012r2-64 + ruby_arch: x86 + hypervisor: vmpooler + template: win-2012r2-x86_64 + win-2012r2-rubyx64: + roles: + - agent + platform: windows-2012r2-64 + ruby_arch: x64 + hypervisor: vmpooler + template: win-2012r2-x86_64 + osx-1010: + roles: + - agent + platform: osx-10.10-x86_64 + hypervisor: vmpooler + template: osx-1010-x86_64 + redhat-7: + roles: + - agent + platform: el-7-x86_64 + hypervisor: vmpooler + template: redhat-7-x86_64 +CONFIG: + pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/huaweios-6-powerpc.yaml b/acceptance/config/nodes/huaweios-6-powerpc.yaml new file mode 100644 index 00000000000..62a88e3e4b4 --- /dev/null +++ b/acceptance/config/nodes/huaweios-6-powerpc.yaml @@ -0,0 +1,15 @@ +--- +HOSTS: + master: + roles: + - master + platform: el-7-x86_64 + hypervisor: vmpooler + template: redhat-7-x86_64 + huawei-ce6850-2-debian-vm-eth0.ops.puppetlabs.net: + roles: + - agent + platform: huaweios-6-powerpc + hypervisor: none +CONFIG: + pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/redhat-5-i386.yaml b/acceptance/config/nodes/redhat-5-i386.yaml deleted file mode 100644 index 6e69736be48..00000000000 --- a/acceptance/config/nodes/redhat-5-i386.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: el-5-i386 - hypervisor: vcloud - template: redhat-5-i386 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/redhat-5-x86_64.yaml b/acceptance/config/nodes/redhat-5-x86_64.yaml deleted file mode 100644 index cca4d9982f9..00000000000 --- a/acceptance/config/nodes/redhat-5-x86_64.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: el-5-x86_64 - hypervisor: vcloud - template: redhat-5-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/redhat-6-i386.yaml b/acceptance/config/nodes/redhat-6-i386.yaml deleted file mode 100644 index 83478703ef1..00000000000 --- a/acceptance/config/nodes/redhat-6-i386.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: el-6-i386 - hypervisor: vcloud - template: redhat-6-i386 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/redhat-6-passenger.yaml b/acceptance/config/nodes/redhat-6-passenger.yaml deleted file mode 100644 index 6ee6e8de0f7..00000000000 --- a/acceptance/config/nodes/redhat-6-passenger.yaml +++ /dev/null @@ -1,18 +0,0 @@ -HOSTS: - master: - roles: - - master - platform: el-6-x86_64 - hypervisor: vcloud - template: redhat-6-x86_64 - agent: - roles: - - agent - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/redhat-6-x86_64.yaml b/acceptance/config/nodes/redhat-6-x86_64.yaml deleted file mode 100644 index 36f95c32e9a..00000000000 --- a/acceptance/config/nodes/redhat-6-x86_64.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: el-6-x86_64 - hypervisor: vcloud - template: redhat-6-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/redhat-7-passenger.yaml b/acceptance/config/nodes/redhat-7-passenger.yaml deleted file mode 100644 index 507927dc9d6..00000000000 --- a/acceptance/config/nodes/redhat-7-passenger.yaml +++ /dev/null @@ -1,18 +0,0 @@ -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/redhat-7-x86_64.yaml b/acceptance/config/nodes/redhat-7-x86_64.yaml deleted file mode 100644 index 9593c6e8a63..00000000000 --- a/acceptance/config/nodes/redhat-7-x86_64.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/solaris-10-sparc.yaml b/acceptance/config/nodes/solaris-10-sparc.yaml new file mode 100644 index 00000000000..d41d858307a --- /dev/null +++ b/acceptance/config/nodes/solaris-10-sparc.yaml @@ -0,0 +1,17 @@ +--- +HOSTS: + master: + roles: + - master + platform: el-7-x86_64 + hypervisor: vmpooler + template: redhat-7-x86_64 + solaris-10-sparc: + roles: + - agent + platform: solaris-10-sparc + hypervisor: none + ip: 10.32.121.124 + vmhostname: sol10-1.delivery.puppetlabs.net +CONFIG: + pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/solaris-11-sparc.yaml b/acceptance/config/nodes/solaris-11-sparc.yaml new file mode 100644 index 00000000000..54895dd96af --- /dev/null +++ b/acceptance/config/nodes/solaris-11-sparc.yaml @@ -0,0 +1,17 @@ +--- +HOSTS: + master: + roles: + - master + platform: el-7-x86_64 + hypervisor: vmpooler + template: redhat-7-x86_64 + solaris-11-sparc: + roles: + - agent + platform: solaris-11-sparc + hypervisor: none + ip: 10.32.114.245 + vmhostname: sol11-1.delivery.puppetlabs.net +CONFIG: + pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/trusty_and_precise.yaml b/acceptance/config/nodes/trusty_and_precise.yaml deleted file mode 100644 index e56b437d101..00000000000 --- a/acceptance/config/nodes/trusty_and_precise.yaml +++ /dev/null @@ -1,19 +0,0 @@ -HOSTS: - master: - roles: - - master - - agent - platform: ubuntu-precise-x86_64 - hypervisor: vcloud - template: ubuntu-1204-x86_64 - agent: - roles: - - agent - platform: ubuntu-trusty-i386 - hypervisor: vcloud - template: ubuntu-1404-i386 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/ubuntu-1204-i386.yaml b/acceptance/config/nodes/ubuntu-1204-i386.yaml deleted file mode 100644 index 23d2ced01ef..00000000000 --- a/acceptance/config/nodes/ubuntu-1204-i386.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: ubuntu-precise-i386 - hypervisor: vcloud - template: ubuntu-1204-i386 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/ubuntu-1204-passenger.yaml b/acceptance/config/nodes/ubuntu-1204-passenger.yaml deleted file mode 100644 index aa1fbdc0e06..00000000000 --- a/acceptance/config/nodes/ubuntu-1204-passenger.yaml +++ /dev/null @@ -1,18 +0,0 @@ -HOSTS: - master: - roles: - - master - platform: ubuntu-precise-x86_64 - hypervisor: vcloud - template: ubuntu-1204-x86_64 - agent: - roles: - - agent - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/ubuntu-1204-x86_64.yaml b/acceptance/config/nodes/ubuntu-1204-x86_64.yaml deleted file mode 100644 index e200dffe953..00000000000 --- a/acceptance/config/nodes/ubuntu-1204-x86_64.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: ubuntu-precise-x86_64 - hypervisor: vcloud - template: ubuntu-1204-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/ubuntu-1404-i386.yaml b/acceptance/config/nodes/ubuntu-1404-i386.yaml deleted file mode 100644 index 16931ad7b6f..00000000000 --- a/acceptance/config/nodes/ubuntu-1404-i386.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: ubuntu-trusty-i386 - hypervisor: vcloud - template: ubuntu-1404-i386 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/ubuntu-1404-passenger.yaml b/acceptance/config/nodes/ubuntu-1404-passenger.yaml deleted file mode 100644 index a6188b9e310..00000000000 --- a/acceptance/config/nodes/ubuntu-1404-passenger.yaml +++ /dev/null @@ -1,18 +0,0 @@ -HOSTS: - master: - roles: - - master - platform: ubuntu-trusty-x86_64 - hypervisor: vcloud - template: ubuntu-1404-x86_64 - agent: - roles: - - agent - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/ubuntu-1404-x86_64.yaml b/acceptance/config/nodes/ubuntu-1404-x86_64.yaml deleted file mode 100644 index c953b50cbfd..00000000000 --- a/acceptance/config/nodes/ubuntu-1404-x86_64.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: ubuntu-trusty-x86_64 - hypervisor: vcloud - template: ubuntu-1404-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/ubuntu-1410-i386.yaml b/acceptance/config/nodes/ubuntu-1410-i386.yaml deleted file mode 100644 index 4f5aeab73b3..00000000000 --- a/acceptance/config/nodes/ubuntu-1410-i386.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: ubuntu-utopic-i386 - hypervisor: vcloud - template: ubuntu-1410-i386 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/ubuntu-1410-x86_64.yaml b/acceptance/config/nodes/ubuntu-1410-x86_64.yaml deleted file mode 100644 index d13126e3e97..00000000000 --- a/acceptance/config/nodes/ubuntu-1410-x86_64.yaml +++ /dev/null @@ -1,19 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent: - roles: - - agent - platform: ubuntu-utopic-x86_64 - hypervisor: vcloud - template: ubuntu-1410-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/win2003r2x64-rubyx64.yaml b/acceptance/config/nodes/win2003r2x64-rubyx64.yaml deleted file mode 100644 index 9ded92d853d..00000000000 --- a/acceptance/config/nodes/win2003r2x64-rubyx64.yaml +++ /dev/null @@ -1,20 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent-2003r2-x86_64-rubyx64: - roles: - - agent - platform: windows-2003r2-64 - ruby_arch: x64 - hypervisor: vcloud - template: win-2003r2-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/win2003r2x64-rubyx86.yaml b/acceptance/config/nodes/win2003r2x64-rubyx86.yaml deleted file mode 100644 index 788dc1f3a49..00000000000 --- a/acceptance/config/nodes/win2003r2x64-rubyx86.yaml +++ /dev/null @@ -1,20 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent-2003r2-x86_64-rubyx86: - roles: - - agent - platform: windows-2003r2-64 - ruby_arch: x86 - hypervisor: vcloud - template: win-2003r2-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/win2003r2x86-rubyx86.yaml b/acceptance/config/nodes/win2003r2x86-rubyx86.yaml deleted file mode 100644 index 6c8a45f0ed2..00000000000 --- a/acceptance/config/nodes/win2003r2x86-rubyx86.yaml +++ /dev/null @@ -1,20 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent-2003r2-i386: - roles: - - agent - platform: windows-2003r2-32 - ruby_arch: x86 - hypervisor: vcloud - template: win-2003r2-i386 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/win2003x64-rubyx64.yaml b/acceptance/config/nodes/win2003x64-rubyx64.yaml deleted file mode 100644 index cbd338c6204..00000000000 --- a/acceptance/config/nodes/win2003x64-rubyx64.yaml +++ /dev/null @@ -1,20 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent-2003-x86_64-rubyx64: - roles: - - agent - platform: windows-2003-64 - ruby_arch: x64 - hypervisor: vcloud - template: win-2003-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/win2003x64-rubyx86.yaml b/acceptance/config/nodes/win2003x64-rubyx86.yaml deleted file mode 100644 index cdff843be9b..00000000000 --- a/acceptance/config/nodes/win2003x64-rubyx86.yaml +++ /dev/null @@ -1,20 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent-2003-x86_64-rubyx86: - roles: - - agent - platform: windows-2003-64 - ruby_arch: x86 - hypervisor: vcloud - template: win-2003-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/win2003x86-rubyx86.yaml b/acceptance/config/nodes/win2003x86-rubyx86.yaml deleted file mode 100644 index ab22574304f..00000000000 --- a/acceptance/config/nodes/win2003x86-rubyx86.yaml +++ /dev/null @@ -1,20 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent-2003-i386: - roles: - - agent - platform: windows-2003-32 - ruby_arch: x86 - hypervisor: vcloud - template: win-2003-i386 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/win2008-rubyx64.yaml b/acceptance/config/nodes/win2008-rubyx64.yaml deleted file mode 100644 index c7c183724ff..00000000000 --- a/acceptance/config/nodes/win2008-rubyx64.yaml +++ /dev/null @@ -1,20 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent-2008-x86_64-rubyx64: - roles: - - agent - platform: windows-2008-64 - ruby_arch: x64 - hypervisor: vcloud - template: win-2008-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/win2008-rubyx86.yaml b/acceptance/config/nodes/win2008-rubyx86.yaml deleted file mode 100644 index a063dfe4637..00000000000 --- a/acceptance/config/nodes/win2008-rubyx86.yaml +++ /dev/null @@ -1,20 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent-2008-x86_64-rubyx86: - roles: - - agent - platform: windows-2008-64 - ruby_arch: x86 - hypervisor: vcloud - template: win-2008-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/win2008r2-rubyx64.yaml b/acceptance/config/nodes/win2008r2-rubyx64.yaml deleted file mode 100644 index f6026ef08a9..00000000000 --- a/acceptance/config/nodes/win2008r2-rubyx64.yaml +++ /dev/null @@ -1,20 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent-2008r2-x86_64-rubyx64: - roles: - - agent - platform: windows-2008r2-64 - ruby_arch: x64 - hypervisor: vcloud - template: win-2008r2-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/win2008r2-rubyx86.yaml b/acceptance/config/nodes/win2008r2-rubyx86.yaml deleted file mode 100644 index aabb656384f..00000000000 --- a/acceptance/config/nodes/win2008r2-rubyx86.yaml +++ /dev/null @@ -1,20 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent-2008r2-x86_64-rubyx86: - roles: - - agent - platform: windows-2008r2-64 - ruby_arch: x86 - hypervisor: vcloud - template: win-2008r2-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/win2012-rubyx64.yaml b/acceptance/config/nodes/win2012-rubyx64.yaml deleted file mode 100644 index c5db8b36257..00000000000 --- a/acceptance/config/nodes/win2012-rubyx64.yaml +++ /dev/null @@ -1,20 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent-2012-x86_64-rubyx64: - roles: - - agent - platform: windows-2012-64 - ruby_arch: x64 - hypervisor: vcloud - template: win-2012-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/win2012-rubyx86.yaml b/acceptance/config/nodes/win2012-rubyx86.yaml deleted file mode 100644 index 04190822859..00000000000 --- a/acceptance/config/nodes/win2012-rubyx86.yaml +++ /dev/null @@ -1,20 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent-2012-x86_64-rubyx86: - roles: - - agent - platform: windows-2012-64 - ruby_arch: x86 - hypervisor: vcloud - template: win-2012-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/win2012r2-rubyx64.yaml b/acceptance/config/nodes/win2012r2-rubyx64.yaml deleted file mode 100644 index 1d6ba8b65e2..00000000000 --- a/acceptance/config/nodes/win2012r2-rubyx64.yaml +++ /dev/null @@ -1,20 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent-2012r2-x86_64-rubyx64: - roles: - - agent - platform: windows-2012r2-64 - ruby_arch: x64 - hypervisor: vcloud - template: win-2012r2-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/nodes/win2012r2-rubyx86.yaml b/acceptance/config/nodes/win2012r2-rubyx86.yaml deleted file mode 100644 index 5db29077738..00000000000 --- a/acceptance/config/nodes/win2012r2-rubyx86.yaml +++ /dev/null @@ -1,20 +0,0 @@ ---- -HOSTS: - master: - roles: - - master - platform: el-7-x86_64 - hypervisor: vcloud - template: redhat-7-x86_64 - agent-2012r2-x86_64-rubyx86: - roles: - - agent - platform: windows-2012r2-64 - ruby_arch: x86 - hypervisor: vcloud - template: win-2012r2-x86_64 -CONFIG: - datastore: instance0 - resourcepool: delivery/Quality Assurance/FOSS/Dynamic - folder: Delivery/Quality Assurance/FOSS/Dynamic - pooling_api: http://vmpooler.delivery.puppetlabs.net/ diff --git a/acceptance/config/packages/options.rb b/acceptance/config/packages/options.rb deleted file mode 100644 index 4b563435edb..00000000000 --- a/acceptance/config/packages/options.rb +++ /dev/null @@ -1,11 +0,0 @@ -{ - :type => 'foss-packages', - :pre_suite => [ - 'setup/packages/pre-suite/010_Install.rb', - 'setup/packages/pre-suite/015_PackageHostsPresets.rb', - 'setup/common/pre-suite/025_StopFirewall.rb', - 'setup/common/pre-suite/040_ValidateSignCert.rb', - 'setup/packages/pre-suite/045_EnsureMasterStartedOnPassenger.rb', - 'setup/common/pre-suite/070_InstallCACerts.rb', - ], -} diff --git a/acceptance/config/passenger/options.rb b/acceptance/config/passenger/options.rb deleted file mode 100644 index 5a2112a77dd..00000000000 --- a/acceptance/config/passenger/options.rb +++ /dev/null @@ -1,13 +0,0 @@ -{ - :type => 'aio', - :passenger => true, - :pre_suite => [ - 'setup/aio/pre-suite/010_Install.rb', - 'setup/passenger/pre-suite/015_PackageHostsPresets.rb', - 'setup/common/pre-suite/025_StopFirewall.rb', - 'setup/passenger/pre-suite/030_ConfigurePassenger.rb', - 'setup/common/pre-suite/040_ValidateSignCert.rb', - 'setup/aio/pre-suite/045_EnsureMasterStartedOnPassenger.rb', - 'setup/common/pre-suite/070_InstallCACerts.rb', - ], -} diff --git a/acceptance/fixtures/MockInstaller.cs b/acceptance/fixtures/MockInstaller.cs new file mode 100644 index 00000000000..b6c0a873463 --- /dev/null +++ b/acceptance/fixtures/MockInstaller.cs @@ -0,0 +1,42 @@ +/* + +The MockInstaller is a C# class representing a stubbed exe installer. We will +compile this class into an installable .exe file. + +A MockInstaller _MUST_ come alongside a MockUninstaller, so we can uninstall the +fake package from the system + +*/ +using System; + +public class MockInstaller +{ public static void Main() + { + try + { + %{install_commands} + } + catch { + Environment.Exit(1003); + } + string keyName = "Software\\Microsoft\\Windows\\CurrentVersion\\Uninstall"; + Microsoft.Win32.RegistryKey key; + key = Microsoft.Win32.Registry.LocalMachine.CreateSubKey(keyName + "\\%{package_display_name}"); + /* + Puppet deems an exe package 'installable' by identifying whether or not the following registry + values exist in the Software\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\PackageName key: + + * DisplayName + * DisplayVersion + * UninstallString + + So we must set those values in the registry for this to be an 'installable package' manageable by + puppet. + */ + key.SetValue("DisplayName", "%{package_display_name}"); + key.SetValue("DisplayVersion", "1.0.0"); + key.SetValue("UninstallString", @"%{uninstaller_location}"); + key.Close(); + Console.WriteLine("Installing..."); + } +} diff --git a/acceptance/fixtures/MockService.cs b/acceptance/fixtures/MockService.cs new file mode 100644 index 00000000000..e6a756e8f86 --- /dev/null +++ b/acceptance/fixtures/MockService.cs @@ -0,0 +1,77 @@ +/* + +The MockService is a C# class representing a stubbed service. We will +compile this class into the service's .exe file. + +Here, we implement four methods: + * OnStart -- called when SCM starts the service + * OnPause -- called when SCM pauses the service + * OnContinue -- called when SCM resumes a paused service + * OnStop -- called when SCM stops a service + +Before calling one of these 'On' methods, the ServiceBase class sets +the service state to the corresponding PENDING state. The service state +is in this PENDING state until the 'On' method is finished, whereby it is +then transitioned into the corresponding final state. Thus if we sleep for a +few seconds in the 'On' method, then note that SCM will report our service +state as being in the PENDING state while we're asleep. For example, if the +'On' method is 'OnStart', the service state is set to START_PENDING before +calling 'OnStart', is START_PENDING while executing 'OnStart', and then is set +to RUNNING after exiting 'OnStart'. + +When testing the Windows service provider, we really want to test to ensure +that it handles the state transitions correctly. For example, we want to +check that: + * It waits for the appropriate PENDING state to finish + * It sets the service state to the appropriate final state + +The reason we want to do this is because our service provider is communicating +with SCM directly, which does not care how the service implements these +transitions so long as it implements them. C#'s ServiceBase class implements +these state transitions for us. Thus by going to sleep in all of our 'On' methods, +we simulate transitioning to the corresponding PENDING state. When we wake-up +and exit the 'On' method, we will transition to the appropriate final state. + +NOTE: Normally, you're supposed to have the service thread in a separate process. +The 'On' methods in this class would send signals to the service thread and then wait +for those signals to be processed. Sending and waiting for these signals is quite +hard and unnecessary for our use-case, which is why our MockService does not have +the service thread. + +*/ + +using System; +using System.ServiceProcess; + +public class MockService : ServiceBase { + public static void Main() { + System.ServiceProcess.ServiceBase.Run(new MockService()); + } + + public MockService() { + ServiceName = "%{service_name}"; + CanStop = true; + CanPauseAndContinue = true; + } + + private void StubPendingTransition(int seconds) { + RequestAdditionalTime(2000); + System.Threading.Thread.Sleep(seconds * 1000); + } + + protected override void OnStart(string [] args) { + StubPendingTransition(%{start_sleep}); + } + + protected override void OnPause() { + StubPendingTransition(%{pause_sleep}); + } + + protected override void OnContinue() { + StubPendingTransition(%{continue_sleep}); + } + + protected override void OnStop() { + StubPendingTransition(%{stop_sleep}); + } +} diff --git a/acceptance/fixtures/MockUninstaller.cs b/acceptance/fixtures/MockUninstaller.cs new file mode 100644 index 00000000000..99d144e202e --- /dev/null +++ b/acceptance/fixtures/MockUninstaller.cs @@ -0,0 +1,32 @@ +/* + +The MockUninstaller is a C# class representing a stubbed exe uninstaller. We will +compile this class into an usable .exe file. + +A MockInstaller _MUST_ come alongside a MockUninstaller, so we can uninstall the +fake package from the system + +*/ +using System; + +public class MockInstaller +{ public static void Main() + { + try + { + %{uninstall_commands} + } + catch { + Environment.Exit(1003); + } + string keyName = "Software\\Microsoft\\Windows\\CurrentVersion\\Uninstall"; + Console.WriteLine("Uninstalling..."); + /* + Remove the entire registry key created by the installer exe + */ + using (Microsoft.Win32.RegistryKey _key = Microsoft.Win32.Registry.LocalMachine.OpenSubKey(keyName, true)) + { + _key.DeleteSubKeyTree("%{package_display_name}"); + } + } +} diff --git a/acceptance/fixtures/debian-repo/Packages.gz b/acceptance/fixtures/debian-repo/Packages.gz new file mode 100644 index 00000000000..697a14af9bf Binary files /dev/null and b/acceptance/fixtures/debian-repo/Packages.gz differ diff --git a/acceptance/fixtures/debian-repo/Release b/acceptance/fixtures/debian-repo/Release new file mode 100644 index 00000000000..1bde0c11c64 --- /dev/null +++ b/acceptance/fixtures/debian-repo/Release @@ -0,0 +1,5 @@ +Archive: stable +Component: contrib +Origin: Puppet +Label: Puppet +Architecture: i386 \ No newline at end of file diff --git a/acceptance/fixtures/debian-repo/helloworld_1.0-1.deb b/acceptance/fixtures/debian-repo/helloworld_1.0-1.deb new file mode 100644 index 00000000000..549c5442ac2 Binary files /dev/null and b/acceptance/fixtures/debian-repo/helloworld_1.0-1.deb differ diff --git a/acceptance/fixtures/debian-repo/helloworld_1.19-1.deb b/acceptance/fixtures/debian-repo/helloworld_1.19-1.deb new file mode 100644 index 00000000000..3ff2b8267db Binary files /dev/null and b/acceptance/fixtures/debian-repo/helloworld_1.19-1.deb differ diff --git a/acceptance/fixtures/debian-repo/helloworld_2.0-1.deb b/acceptance/fixtures/debian-repo/helloworld_2.0-1.deb new file mode 100644 index 00000000000..affd21a376e Binary files /dev/null and b/acceptance/fixtures/debian-repo/helloworld_2.0-1.deb differ diff --git a/acceptance/fixtures/el-repo/RPMS/kernel-devel-puppet-3.10.0-1062.1.1.noarch.rpm b/acceptance/fixtures/el-repo/RPMS/kernel-devel-puppet-3.10.0-1062.1.1.noarch.rpm new file mode 100644 index 00000000000..a961ce8a678 Binary files /dev/null and b/acceptance/fixtures/el-repo/RPMS/kernel-devel-puppet-3.10.0-1062.1.1.noarch.rpm differ diff --git a/acceptance/fixtures/el-repo/RPMS/kernel-devel-puppet-3.10.0-1062.4.3.noarch.rpm b/acceptance/fixtures/el-repo/RPMS/kernel-devel-puppet-3.10.0-1062.4.3.noarch.rpm new file mode 100644 index 00000000000..2a0171133ce Binary files /dev/null and b/acceptance/fixtures/el-repo/RPMS/kernel-devel-puppet-3.10.0-1062.4.3.noarch.rpm differ diff --git a/acceptance/fixtures/el-repo/repodata/0427a2b1b650922e9e7359c09be8820caa3b1ae72efef4998fd7a50fbd3a858c-primary.xml.gz b/acceptance/fixtures/el-repo/repodata/0427a2b1b650922e9e7359c09be8820caa3b1ae72efef4998fd7a50fbd3a858c-primary.xml.gz new file mode 100644 index 00000000000..ae0c4ef25bd Binary files /dev/null and b/acceptance/fixtures/el-repo/repodata/0427a2b1b650922e9e7359c09be8820caa3b1ae72efef4998fd7a50fbd3a858c-primary.xml.gz differ diff --git a/acceptance/fixtures/el-repo/repodata/12382dd1ca2ce49561d698430501e038a8694b64a5d69bdb7133bff1be5bd4ab-filelists.xml.gz b/acceptance/fixtures/el-repo/repodata/12382dd1ca2ce49561d698430501e038a8694b64a5d69bdb7133bff1be5bd4ab-filelists.xml.gz new file mode 100644 index 00000000000..78056d1215e Binary files /dev/null and b/acceptance/fixtures/el-repo/repodata/12382dd1ca2ce49561d698430501e038a8694b64a5d69bdb7133bff1be5bd4ab-filelists.xml.gz differ diff --git a/acceptance/fixtures/el-repo/repodata/4427b13c52edea24fc19776198a99611464b3c67f7828aeed8c5d20f3d8b1c02-filelists.sqlite.bz2 b/acceptance/fixtures/el-repo/repodata/4427b13c52edea24fc19776198a99611464b3c67f7828aeed8c5d20f3d8b1c02-filelists.sqlite.bz2 new file mode 100644 index 00000000000..5bdf1915924 Binary files /dev/null and b/acceptance/fixtures/el-repo/repodata/4427b13c52edea24fc19776198a99611464b3c67f7828aeed8c5d20f3d8b1c02-filelists.sqlite.bz2 differ diff --git a/acceptance/fixtures/el-repo/repodata/653202d291344674c0e6c2547647d09c2b0044ec96986b9c62f74dc49f15a3db-other.sqlite.bz2 b/acceptance/fixtures/el-repo/repodata/653202d291344674c0e6c2547647d09c2b0044ec96986b9c62f74dc49f15a3db-other.sqlite.bz2 new file mode 100644 index 00000000000..96ab1db612e Binary files /dev/null and b/acceptance/fixtures/el-repo/repodata/653202d291344674c0e6c2547647d09c2b0044ec96986b9c62f74dc49f15a3db-other.sqlite.bz2 differ diff --git a/acceptance/fixtures/el-repo/repodata/68861daea8ff469f3418abd08697b408df11c8079b0b24178a4e2b4bd8a7102e-primary.sqlite.bz2 b/acceptance/fixtures/el-repo/repodata/68861daea8ff469f3418abd08697b408df11c8079b0b24178a4e2b4bd8a7102e-primary.sqlite.bz2 new file mode 100644 index 00000000000..180d15cf6ee Binary files /dev/null and b/acceptance/fixtures/el-repo/repodata/68861daea8ff469f3418abd08697b408df11c8079b0b24178a4e2b4bd8a7102e-primary.sqlite.bz2 differ diff --git a/acceptance/fixtures/el-repo/repodata/c274906bddc4277eb4a9f54ad0bfb833ae2c34209d2c8059ee187aa409886ead-other.xml.gz b/acceptance/fixtures/el-repo/repodata/c274906bddc4277eb4a9f54ad0bfb833ae2c34209d2c8059ee187aa409886ead-other.xml.gz new file mode 100644 index 00000000000..0cde0bc9099 Binary files /dev/null and b/acceptance/fixtures/el-repo/repodata/c274906bddc4277eb4a9f54ad0bfb833ae2c34209d2c8059ee187aa409886ead-other.xml.gz differ diff --git a/acceptance/fixtures/el-repo/repodata/repomd.xml b/acceptance/fixtures/el-repo/repodata/repomd.xml new file mode 100644 index 00000000000..a5b5ef1db02 --- /dev/null +++ b/acceptance/fixtures/el-repo/repodata/repomd.xml @@ -0,0 +1,55 @@ + + + 1631716408 + + 0427a2b1b650922e9e7359c09be8820caa3b1ae72efef4998fd7a50fbd3a858c + c838f308b3673ee45b64f5b4f594b1428ec76c7a09045ddbcafd37cddf6c9806 + + 1631716408 + 853 + 2885 + + + 12382dd1ca2ce49561d698430501e038a8694b64a5d69bdb7133bff1be5bd4ab + ee99cce50b52fdbe9986c16a1896b3f621fd678fa3e5731f153458a9b2770aa0 + + 1631716408 + 362 + 799 + + + c274906bddc4277eb4a9f54ad0bfb833ae2c34209d2c8059ee187aa409886ead + 537bf4c94bce5d5bf7a7ecca3e612028ba87b7bf9c2cf7c952ec902d6e3259ee + + 1631716408 + 297 + 493 + + + 68861daea8ff469f3418abd08697b408df11c8079b0b24178a4e2b4bd8a7102e + f50da72da990487e2a53e9414b4b5fe0b47357e7ddebb8ded8450ea268d1a1f2 + + 1631716408 + 2176 + 106496 + 10 + + + 4427b13c52edea24fc19776198a99611464b3c67f7828aeed8c5d20f3d8b1c02 + 6aa60e9b3cb91ce9240c648f2f72eeec9a9f29fa809e4fdb4d05de334e9d2f0d + + 1631716408 + 973 + 28672 + 10 + + + 653202d291344674c0e6c2547647d09c2b0044ec96986b9c62f74dc49f15a3db + 44c6e7b3d018f1c291708a368223ada428e13d5dd2ecc08c56321e074006a655 + + 1631716408 + 746 + 24576 + 10 + + diff --git a/acceptance/fixtures/manifest_large_exported_classes_node.pp b/acceptance/fixtures/manifest_large_exported_classes_node.pp new file mode 100644 index 00000000000..e782520328e --- /dev/null +++ b/acceptance/fixtures/manifest_large_exported_classes_node.pp @@ -0,0 +1,65 @@ +class foo ($bar) { + @@notify { 'foo': } +} +@@file { "somedir/${name}_${munin_port_real}": + ensure => present, + content => template("munin/defaultclient.erb"), +} +# Collect all exported files +File <<||>> + +# Compile the munin.conf with a local header +concatenated_file { "/etc/munin/munin.conf": + dir => somedir, + header => "/etc/munin/munin.conf.header", +} +hosting_vserver_configuration { + "erics": + domain => "orange.co", + type => "friend", + context => 13, + ip => "255.255.255.254", prefix => 27, + admin_user => "erict", admin_user_name => "hello, its me", + admin_user_email => "erict@orange.co", + customer => "hello? is it me?", + admin_password => file("/etc/puppet/secrets/hosting/erict_passwd"), +} +class davids_black_co_at { + ## Create users for my parents and my grandmother + hosting::user { + rztt: realname => "some other rztt", + uid => 2001, admin => true; + same: realname => "could be same", + uid => 2002; + imapersontoodamnit: realname => "some one else", + uid => 2003; + } + + # Install git.black.co.at + include git::daemon + include git::web + git::web::export { [manifests, "puppet-trunk"]: } + + # Provision an additional mysql database on the database server + hosting::database { "fogbugz": type => mysql } + # Create another VirtualHost + apache2::site { "local-fogbugz": + source => "puppet://$servername/files/hosting/erict/sites/local-fogbugz" + } +} +node backuppc { + # only use the smarthost + $mta = ssmtp + # this is a vserver on this host, so register correctly in nagios + $nagios_parent = "orange.co" + # I'm sharing an IP here, so those things have to have their own ports + $apache2_port = 8080 + $munin_port = 5008 + $munin_stats_port = 8667 + + # default configuration + include dbp + + # configure the backuppc server + include backuppc::server +} diff --git a/acceptance/fixtures/sles-repo/noarch/helloworld-1.0-2.noarch.rpm b/acceptance/fixtures/sles-repo/noarch/helloworld-1.0-2.noarch.rpm new file mode 100644 index 00000000000..c2aaeceeb5c Binary files /dev/null and b/acceptance/fixtures/sles-repo/noarch/helloworld-1.0-2.noarch.rpm differ diff --git a/acceptance/fixtures/sles-repo/noarch/helloworld-1.19-2.noarch.rpm b/acceptance/fixtures/sles-repo/noarch/helloworld-1.19-2.noarch.rpm new file mode 100644 index 00000000000..b733ebfab02 Binary files /dev/null and b/acceptance/fixtures/sles-repo/noarch/helloworld-1.19-2.noarch.rpm differ diff --git a/acceptance/fixtures/sles-repo/noarch/helloworld-2.0-2.noarch.rpm b/acceptance/fixtures/sles-repo/noarch/helloworld-2.0-2.noarch.rpm new file mode 100644 index 00000000000..1fbd22fbfee Binary files /dev/null and b/acceptance/fixtures/sles-repo/noarch/helloworld-2.0-2.noarch.rpm differ diff --git a/acceptance/fixtures/sles-repo/repodata/filelists.xml.gz b/acceptance/fixtures/sles-repo/repodata/filelists.xml.gz new file mode 100644 index 00000000000..a3ebe3ed9a5 Binary files /dev/null and b/acceptance/fixtures/sles-repo/repodata/filelists.xml.gz differ diff --git a/acceptance/fixtures/sles-repo/repodata/other.xml.gz b/acceptance/fixtures/sles-repo/repodata/other.xml.gz new file mode 100644 index 00000000000..174117d4ed3 Binary files /dev/null and b/acceptance/fixtures/sles-repo/repodata/other.xml.gz differ diff --git a/acceptance/fixtures/sles-repo/repodata/primary.xml.gz b/acceptance/fixtures/sles-repo/repodata/primary.xml.gz new file mode 100644 index 00000000000..f4bdc08c999 Binary files /dev/null and b/acceptance/fixtures/sles-repo/repodata/primary.xml.gz differ diff --git a/acceptance/fixtures/sles-repo/repodata/repomd.xml b/acceptance/fixtures/sles-repo/repodata/repomd.xml new file mode 100644 index 00000000000..64b981b371d --- /dev/null +++ b/acceptance/fixtures/sles-repo/repodata/repomd.xml @@ -0,0 +1,28 @@ + + + 1586872174 + + 57a44da7ea9c26d4f438d6ca5da3c561acfaabe1 + 4de6823d4c547a4d0126368662a25086ebba0b48 + + 1586872175 + 346 + 1143 + + + e8a80b739fad3deba689620da7947ea0e4bb8ec6 + 438d8c0a6507319e488dabcb95a37e6e3a984086 + + 1586872175 + 289 + 563 + + + 76eab89dc047884727aa27a4523e21728f356eb1 + 803b916a92d58ee369063232de96c62125e1e483 + + 1586872175 + 817 + 3654 + + diff --git a/acceptance/lib/helper.rb b/acceptance/lib/helper.rb index c2b5df87066..b1ee1225719 100644 --- a/acceptance/lib/helper.rb +++ b/acceptance/lib/helper.rb @@ -1 +1,3 @@ $LOAD_PATH << File.expand_path(File.dirname(__FILE__)) + +require 'beaker-puppet' diff --git a/acceptance/lib/puppet/acceptance/agent_fqdn_utils.rb b/acceptance/lib/puppet/acceptance/agent_fqdn_utils.rb new file mode 100644 index 00000000000..0934ed79957 --- /dev/null +++ b/acceptance/lib/puppet/acceptance/agent_fqdn_utils.rb @@ -0,0 +1,16 @@ +module Puppet + module Acceptance + module AgentFqdnUtils + + @@hostname_to_fqdn = {} + + # convert from an Beaker::Host (agent) to the systems fqdn as returned by facter + def agent_to_fqdn(agent) + unless @@hostname_to_fqdn.has_key?(agent.hostname) + @@hostname_to_fqdn[agent.hostname] = on(agent, facter('networking.fqdn')).stdout.chomp + end + @@hostname_to_fqdn[agent.hostname] + end + end + end +end diff --git a/acceptance/lib/puppet/acceptance/aix_util.rb b/acceptance/lib/puppet/acceptance/aix_util.rb new file mode 100644 index 00000000000..79050b534cd --- /dev/null +++ b/acceptance/lib/puppet/acceptance/aix_util.rb @@ -0,0 +1,172 @@ +module Puppet + module Acceptance + module AixUtil + def to_kv_array(attributes) + attributes.map { |attribute, value| "#{attribute}=#{value}" } + end + + def assert_object_attributes_on(host, object_get, object, expected_attributes) + host.send(object_get, object) do |result| + actual_attrs_kv_pairs = result.stdout.chomp.split(' ')[(1..-1)] + actual_attrs = actual_attrs_kv_pairs.map do |kv_pair| + attribute, value = kv_pair.split('=') + next nil unless value + [attribute, value] + end.compact.to_h + + expected_attributes.each do |attribute, value| + attribute_str = "attributes[#{object}][#{attribute}]" + actual_value = actual_attrs[attribute] + assert_match( + /\A#{value}\z/, + actual_value, + "EXPECTED: #{attribute_str} = \"#{value}\", ACTUAL: #{attribute_str} = \"#{actual_value}\"" + ) + end + end + end + + def assert_puppet_changed_object_attributes(result, object_resource, object, changed_attributes) + stdout = result.stdout.chomp + changed_attributes.each do |attribute, value| + prefix = /#{object_resource}\[#{object}\].*attributes changed.*/ + attribute_str = "attributes[#{object}][#{attribute}]" + + assert_match( + /#{prefix}#{attribute}=#{value}/, + stdout, + "Puppet did not indicate that #{attribute_str} changed to #{value}" + ) + end + end + + def object_resource_manifest(object_resource, object, params) + params_str = params.map do |param, value| + value_str = value.to_s + value_str = "\"#{value_str}\"" if value.is_a?(String) + + " #{param} => #{value_str}" + end.join(",\n") + + <<-MANIFEST +#{object_resource} { '#{object}': + #{params_str} +} +MANIFEST + end + + def run_attribute_management_tests(object_resource, id_property, initial_attributes, changed_attributes) + object_get = "#{object_resource}_get".to_sym + object_absent = "#{object_resource}_absent".to_sym + + name = "obj" + teardown do + agents.each { |agent| agent.send(object_absent, name) } + end + + current_attributes = initial_attributes.dup + + agents.each do |agent| + agent.send(object_absent, name) + + # We extract the code for this step as a lambda because we will be checking + # for this case (1) Before the object has been created and (2) After the + # object has been created (towards the end). We do this because in (1), Puppet + # does not trigger the property setters after creating the object, while in (2) + # it does. These are two different scenarios that we want to check. + step_run_errors_when_property_is_passed_as_attribute = lambda do + manifest = object_resource_manifest( + object_resource, + name, + attributes: current_attributes.merge({ 'id' => '15' }) + ) + + apply_manifest_on(agent, manifest) do |result| + assert_match(/Error:.*'#{id_property}'.*'id'/, result.stderr, "specifying a Puppet property as part of an AIX attribute should have errored, but received #{result.stderr}") + end + end + + + step "Ensure that Puppet errors if a Puppet property is passed in as an AIX attribute when creating the #{object_resource}" do + step_run_errors_when_property_is_passed_as_attribute.call + end + + step "Ensure that the #{object_resource} can be created with the specified attributes" do + manifest = object_resource_manifest( + object_resource, + name, + ensure: :present, + attributes: to_kv_array(current_attributes) + ) + + apply_manifest_on(agent, manifest) + assert_object_attributes_on(agent, object_get, name, current_attributes) + end + + step "Ensure that Puppet noops when the specified attributes are already set" do + manifest = object_resource_manifest( + object_resource, + name, + attributes: to_kv_array(current_attributes) + ) + + apply_manifest_on(agent, manifest, catch_changes: true) + end + + # Remember the changed attribute's old values + old_attributes = current_attributes.select { |k, _| changed_attributes.keys.include?(k) } + + step "Ensure that Puppet updates only the specified attributes and nothing else" do + current_attributes = current_attributes.merge(changed_attributes) + + manifest = object_resource_manifest( + object_resource, + name, + attributes: to_kv_array(current_attributes) + ) + + apply_manifest_on(agent, manifest) do |result| + assert_puppet_changed_object_attributes( + result, + object_resource.capitalize, + name, + changed_attributes + ) + end + assert_object_attributes_on(agent, object_get, name, current_attributes) + end + + step "Ensure that Puppet accepts a hash for the attributes property" do + # We want to see if Puppet will do something with the attributes property + # when we pass it in as a hash so that it does not just pass validation + # and end up noop-ing. Let's set one of our attributes back to its old + # value in order to simulate an actual change. + attribute = old_attributes.keys.first + old_value = old_attributes.delete(attribute) + current_attributes[attribute] = old_value + + manifest = object_resource_manifest( + object_resource, + name, + attributes: current_attributes + ) + + apply_manifest_on(agent, manifest) + assert_object_attributes_on(agent, object_get, name, current_attributes) + end + + step "Ensure that `puppet resource #{object_resource}` outputs valid Puppet code" do + on(agent, puppet("resource #{object_resource} #{name}")) do |result| + manifest = result.stdout.chomp + apply_manifest_on(agent, manifest) + end + end + + step "Ensure that Puppet errors if a Puppet property is passed in as an AIX attribute after #{object_resource} has been created" do + step_run_errors_when_property_is_passed_as_attribute.call + end + end + end + end + end +end diff --git a/acceptance/lib/puppet/acceptance/classifier_utils.rb b/acceptance/lib/puppet/acceptance/classifier_utils.rb index 15e56ee88a7..343991d2d35 100644 --- a/acceptance/lib/puppet/acceptance/classifier_utils.rb +++ b/acceptance/lib/puppet/acceptance/classifier_utils.rb @@ -17,6 +17,24 @@ def self.tmpdirs @classifier_utils_tmpdirs ||= [] end + # PE creates a "Production environment" group during installation which + # all nodes are a member of by default. This method just looks up this + # group and returns its uuid so that other methods may reference it. + def get_production_environment_group_uuid + step "Get classifier groups so we can locate the 'Production environment' group" + response = classifier_handle.get("/v1/groups") + assert_equal(200, response.code, "Unable to get classifer groups: #{response.body}") + + groups_json = response.body + groups = JSON.parse(groups_json) + + if production_environment = groups.find { |g| g['name'] == 'Production environment' } + production_environment['id'] + else + nil + end + end + # Create a Classifier Group which by default will apply to all of the passed # nodes. The Group will merge in the passed group_hash which will be converted # into the json body for a Classifier PUT /v1/groups/:id request. @@ -47,9 +65,13 @@ def create_group_for_nodes(nodes, group_hash) r << ["~", "name", name] r end + # In order to override the environment for test nodes, we need the + # groups we create to be a child of this "Production environment" group, + # otherwise we get a classification error from the conflicting groups. + parent = get_production_environment_group_uuid || Puppet::Acceptance::ClassifierUtils::DEFAULT_GROUP_ID body = { "description" => "A classification group for the following acceptance test nodes: (#{hostnames.join(", ")})", - "parent" => "#{Puppet::Acceptance::ClassifierUtils::DEFAULT_GROUP_ID}", + "parent" => parent, "rule" => rule, "classes" => {} }.merge group_hash @@ -60,48 +82,6 @@ def create_group_for_nodes(nodes, group_hash) return group_uuid end - # Locates and then updates the 'PE MCollective' group to disable it for - # all agent nodes. A teardown is registered to restore the 'PE - # MCollective' group at the end of the test. - def disable_pe_enterprise_mcollective_agent_classes - return if !master.is_pe? - - step "Get classifier groups so we can locate the PE MCollective group" - response = classifier_handle.get("/v1/groups") - assert_equal(200, response.code, "Unable to get classifer groups: #{response.body}") - - groups_json = response.body - groups = JSON.parse(groups_json) - pe_mcollective = groups.find { |g| g['name'] == 'PE MCollective' } - assert_not_nil pe_mcollective, "Unable to find the 'PE MCollective' group in: #{groups.pretty_inspect}" - select_properties_we_can_put = lambda do |group| - group.select { |k,v| ['id','name','environment','environment_trumps','rule','description','classes','variables','parent'].include?(k) } - end - pe_mcollective = select_properties_we_can_put.call(pe_mcollective) - - teardown do - step "Restore original PE MCollective group" do - groups = JSON.parse(groups_json) - original_pe_mcollective = groups.find { |g| g['name'] == 'PE MCollective' } - original_pe_mcollective = select_properties_we_can_put.call(original_pe_mcollective) - response = classifier_handle.put("/v1/groups/#{original_pe_mcollective['id']}", :body => original_pe_mcollective.to_json) - assert_equal(201, response.code, "Unable to restore 'PE MCollective' group: #{response.code}:#{response.body}") - end if response.code == 201 - end - - hostnames = agents.map { |n| n.hostname } - step "Adjust PE MCollective not to match for #{hostnames.join(", ")}" - host_matching_rule = hostnames.inject(["or"]) do |r,name| - r << ["~", "name", name] - r - end - pe_mcollective['rule'] = ['and', pe_mcollective['rule'], ['not', host_matching_rule]] - - response = classifier_handle.put("/v1/groups/#{pe_mcollective['id']}", :body => pe_mcollective.to_json) - - assert_equal(201, response.code, "Unexpected response code: #{response.code}, #{response.body}") - end - # Creates a group which allows the given nodes to specify their own environments. # Will be torn down at the end of the test. def classify_nodes_as_agent_specified(nodes) @@ -148,7 +128,8 @@ def master_ca_cert_file Puppet::Acceptance::ClassifierUtils.tmpdirs << cert_dir @ca_cert_file = File.join(cert_dir, "cacert.pem") - File.open(@ca_cert_file, "w") do |f| + # RFC 1421 states PEM is 7-bit ASCII https://tools.ietf.org/html/rfc1421 + File.open(@ca_cert_file, "w:ASCII") do |f| f.write(ca_cert) end end diff --git a/acceptance/lib/puppet/acceptance/common_utils.rb b/acceptance/lib/puppet/acceptance/common_utils.rb index be437c2394a..fdd487a4df8 100644 --- a/acceptance/lib/puppet/acceptance/common_utils.rb +++ b/acceptance/lib/puppet/acceptance/common_utils.rb @@ -1,127 +1,19 @@ module Puppet module Acceptance - module CronUtils - def clean(agent, o={}) - o = {:user => 'tstuser'}.merge(o) - run_cron_on(agent, :remove, o[:user]) - apply_manifest_on(agent, %[user { '%s': ensure => absent, managehome => false }] % o[:user]) - end - - def setup(agent, o={}) - o = {:user => 'tstuser'}.merge(o) - apply_manifest_on(agent, %[user { '%s': ensure => present, managehome => false }] % o[:user]) - apply_manifest_on(agent, %[case $operatingsystem { - centos, redhat: {$cron = 'cronie'} - solaris: { $cron = 'core-os' } - default: {$cron ='cron'} } - package {'cron': name=> $cron, ensure=>present, }]) + module BeakerUtils + # TODO: This should be added to Beaker + def assert_matching_arrays(expected, actual, message = "") + assert_equal(expected.sort, actual.sort, message) end end - module CAUtils - - def initialize_ssl - hostname = on(master, 'facter hostname').stdout.strip - fqdn = on(master, 'facter fqdn').stdout.strip - - if master.use_service_scripts? - step "Ensure puppet is stopped" - # Passenger, in particular, must be shutdown for the cert setup steps to work, - # but any running puppet master will interfere with webrick starting up and - # potentially ignore the puppet.conf changes. - on(master, puppet('resource', 'service', master['puppetservice'], "ensure=stopped")) - end - - step "Clear SSL on all hosts" - hosts.each do |host| - ssldir = on(host, puppet('agent --configprint ssldir')).stdout.chomp - on(host, "rm -rf '#{ssldir}'") - end - - step "Master: Start Puppet Master" do - master_opts = { - :main => { - :dns_alt_names => "puppet,#{hostname},#{fqdn}", - }, - :__service_args__ => { - # apache2 service scripts can't restart if we've removed the ssl dir - :bypass_service_script => true, - }, - } - with_puppet_running_on(master, master_opts) do - - hosts.each do |host| - next if host['roles'].include? 'master' - - step "Agents: Run agent --test first time to gen CSR" - on host, puppet("agent --test --server #{master}"), :acceptable_exit_codes => [1] - end - - # Sign all waiting certs - step "Master: sign all certs" - on master, puppet("cert --sign --all"), :acceptable_exit_codes => [0,24] - - step "Agents: Run agent --test second time to obtain signed cert" - on agents, puppet("agent --test --server #{master}"), :acceptable_exit_codes => [0,2] - end - end - end - - def clean_cert(host, cn, check = true) - if host == master && master[:is_puppetserver] - on master, puppet_resource("service", master['puppetservice'], "ensure=stopped") - end - - on(host, puppet('cert', 'clean', cn), :acceptable_exit_codes => check ? [0] : [0, 24]) - if check - assert_match(/remov.*Certificate.*#{cn}/i, stdout, "Should see a log message that certificate request was removed.") - on(host, puppet('cert', 'list', '--all')) - assert_no_match(/#{cn}/, stdout, "Should not see certificate in list anymore.") - end - end - - def clear_agent_ssl - return if master.is_pe? - step "All: Clear agent only ssl settings (do not clear master)" - hosts.each do |host| - next if host == master - ssldir = on(host, puppet('agent --configprint ssldir')).stdout.chomp - on( host, host_command("rm -rf '#{ssldir}'") ) - end + module PackageUtils + def package_present(host, package, version = nil) + host.install_package(package, '', version) end - def reset_agent_ssl(resign = true) - return if master.is_pe? - clear_agent_ssl - - hostname = master.execute('facter hostname') - fqdn = master.execute('facter fqdn') - - step "Clear old agent certificates from master" do - agents.each do |agent| - next if agent == master && agent.is_using_passenger? - agent_cn = on(agent, puppet('agent --configprint certname')).stdout.chomp - clean_cert(master, agent_cn, false) if agent_cn - end - end - - if resign - step "Master: Ensure the master is listening and autosigning" - with_puppet_running_on(master, - :master => { - :dns_alt_names => "puppet,#{hostname},#{fqdn}", - :autosign => true, - } - ) do - - agents.each do |agent| - next if agent == master && agent.is_using_passenger? - step "Agents: Run agent --test once to obtain auto-signed cert" do - on agent, puppet('agent', "--test --server #{master}"), :acceptable_exit_codes => [0,2] - end - end - end - end + def package_absent(host, package, cmdline_args = '', opts = {}) + host.uninstall_package(package, cmdline_args, opts) end end @@ -131,14 +23,45 @@ def ruby_command(host) end module_function :ruby_command - def gem_command(host) - if host['platform'] =~ /windows/ - "env PATH=\"#{host['privatebindir']}:${PATH}\" cmd /c gem" + def gem_command(host, type='aio') + if type == 'aio' + if host['platform'] =~ /windows/ + "env PATH=\"#{host['privatebindir']}:${PATH}\" cmd /c gem" + else + "env PATH=\"#{host['privatebindir']}:${PATH}\" gem" + end else - "env PATH=\"#{host['privatebindir']}:${PATH}\" gem" + on(host, 'which gem').stdout.chomp end end module_function :gem_command end + + module ManifestUtils + def resource_manifest(resource, title, params = {}) + params_str = params.map do |param, value| + # This is not quite correct for all parameter values, + # but it is good enough for most purposes. + value_str = value.to_s + value_str = "\"#{value_str}\"" if value.is_a?(String) + + " #{param} => #{value_str}" + end.join(",\n") + + <<-MANIFEST +#{resource} { '#{title}': + #{params_str} +} +MANIFEST + end + + def file_manifest(path, params = {}) + resource_manifest('file', path, params) + end + + def user_manifest(username, params = {}) + resource_manifest('user', username, params) + end + end end end diff --git a/acceptance/lib/puppet/acceptance/environment_utils.rb b/acceptance/lib/puppet/acceptance/environment_utils.rb index 45b8d4c18b0..21282c611eb 100644 --- a/acceptance/lib/puppet/acceptance/environment_utils.rb +++ b/acceptance/lib/puppet/acceptance/environment_utils.rb @@ -19,7 +19,7 @@ def generate_environment(options) manifestpath = options[:manifestpath] env_name = options[:env_name] - environment = <<-MANIFEST_SNIPPET + <<-MANIFEST_SNIPPET file { ################################################### # #{env_name} @@ -40,7 +40,7 @@ def generate_environment(options) # Generate one module's manifest code. def generate_module(module_name, env_name, modulepath) - module_pp = <<-MANIFEST_SNIPPET + <<-MANIFEST_SNIPPET "#{modulepath}":; "#{modulepath}/#{module_name}":; "#{modulepath}/#{module_name}/manifests":; @@ -62,7 +62,7 @@ def generate_module(module_name, env_name, modulepath) # the environments live in # @return [String] Puppet manifest to generate all of the environment files. def environment_manifest(testdir) - manifest = <<-MANIFEST + <<-MANIFEST File { ensure => directory, owner => #{master.puppet['user']}, @@ -227,8 +227,7 @@ def use_an_environment(environment, description, master_opts, envdir, confdir, o master_puppet_conf = master_opts.dup # shallow clone results = {} - safely_shadow_directory_contents_and_yield(master, master.puppet('master')['codedir'], envdir) do - + safely_shadow_directory_contents_and_yield(master, puppet_config(master, 'codedir', section: 'master'), envdir) do config_print = options[:config_print] directory_environments = options[:directory_environments] @@ -242,35 +241,21 @@ def use_an_environment(environment, description, master_opts, envdir, confdir, o # Test agents configured to use directory environments (affects environment # loading on the agent, especially with regards to requests/node environment) args << "--environmentpath='$confdir/environments'" if directory_environments && agent != master - on(agent, puppet("agent", *args), :acceptable_exit_codes => (0..255)) do + on(agent, puppet("agent", *args), :acceptable_exit_codes => (0..255)) do |result| agent_results[:puppet_agent] = result end - if agent == master - args = ["--trace"] - args << ["--environment", environment] if environment - - step "print puppet config for #{description} environment" - on(agent, puppet(*(["config", "print", "basemodulepath", "modulepath", "manifest", "config_version", config_print] + args)), :acceptable_exit_codes => (0..255)) do - agent_results[:puppet_config] = result - end - - step "puppet apply using #{description} environment" - on(agent, puppet(*(["apply", '-e', '"include testing_mod"'] + args)), :acceptable_exit_codes => (0..255)) do - agent_results[:puppet_apply] = result - end - - # Be aware that Puppet Module Tool will create the module directory path if it - # does not exist. So these tests should be run last... - step "install a module into environment" - on(agent, puppet(*(["module", "install", "pmtacceptance-nginx"] + args)), :acceptable_exit_codes => (0..255)) do - agent_results[:puppet_module_install] = result - end - - step "uninstall a module from #{description} environment" - on(agent, puppet(*(["module", "uninstall", "pmtacceptance-nginx"] + args)), :acceptable_exit_codes => (0..255)) do - agent_results[:puppet_module_uninstall] = result - end + args = ["--trace"] + args << ["--environment", environment] if environment + + step "print puppet config for #{description} environment" + on(master, puppet(*(["config", "print", "basemodulepath", "modulepath", "manifest", "config_version", config_print] + args)), :acceptable_exit_codes => (0..255)) do |result| + agent_results[:puppet_config] = result + end + + step "puppet apply using #{description} environment" + on(master, puppet(*(["apply", '-e', '"include testing_mod"'] + args)), :acceptable_exit_codes => (0..255)) do |result| + agent_results[:puppet_apply] = result end end end @@ -351,15 +336,62 @@ def review_results(results, expectations) def assert_review(review) failures = [] - review.each do |scenario, failed| + review.each do |failed| if !failed.empty? - problems = "Problems in the '#{scenario}' output reported above:\n #{failed.join("\n ")}" + problems = "Problems in the output reported above:\n #{failed}" logger.warn(problems) failures << problems end end assert failures.empty?, "Failed Review:\n\n#{failures.join("\n")}\n" end + + # generate a random string of 6 letters and numbers. NOT secure + def random_string + [*('a'..'z'),*('0'..'9')].shuffle[0,8].join + end + private :random_string + + # if the first test to call this has changed the environmentpath, this will cause trouble + # maybe not the best idea to memoize this? + def environmentpath + @@memoized_environmentpath ||= master.puppet['environmentpath'] + end + module_function :environmentpath + + # create a tmpdir to hold a temporary environment bound by puppet environment naming restrictions + # symbolically link environment into environmentpath + # we can't use the temp_file utils in our own lib because host.tmpdir violates puppet's naming requirements + # in rare cases we want to do this on agents when testing things that use the default manifest + def mk_tmp_environment_with_teardown(host, environment) + # add the tmp_environment to a set to ensure no collisions + @@tmp_environment_set ||= Set.new + deadman = 100; loop_num = 0 + while @@tmp_environment_set.include?(tmp_environment = environment.downcase + '_' + random_string) do + break if (loop_num = loop_num + 1) > deadman + end + @@tmp_environment_set << tmp_environment + tmpdir = File.join('','tmp',tmp_environment) + on host, "mkdir -p #{tmpdir}/manifests #{tmpdir}/modules; chmod -R 755 #{tmpdir}" + + # register teardown to remove the link below + teardown do + on host, "rm -rf #{File.join(environmentpath,tmp_environment)}" + end + + # WARNING: this won't work with filesync (symlinked environments are not supported) + on host, "mkdir -p #{environmentpath}; ln -sf #{tmpdir} #{File.join(environmentpath,tmp_environment)}" + return tmp_environment + end + module_function :mk_tmp_environment_with_teardown + + # create sitepp in a tmp_environment as created by mk_tmp_environment_with_teardown + def create_sitepp(host, tmp_environment, file_content) + file_path = File.join('','tmp',tmp_environment,'manifests','site.pp') + create_remote_file(host, file_path, file_content) + on host, "chmod -R 755 #{file_path}" + end + end end end diff --git a/acceptance/lib/puppet/acceptance/git_utils.rb b/acceptance/lib/puppet/acceptance/git_utils.rb deleted file mode 100644 index 6f27f6b6b8b..00000000000 --- a/acceptance/lib/puppet/acceptance/git_utils.rb +++ /dev/null @@ -1,19 +0,0 @@ -module Puppet - module Acceptance - module GitUtils - def lookup_in_env(env_variable_name, project_name, default) - project_specific_name = "#{project_name.upcase.gsub("-","_")}_#{env_variable_name}" - ENV[project_specific_name] || ENV[env_variable_name] || default - end - - def build_giturl(project_name, git_fork = nil, git_server = nil) - git_fork ||= lookup_in_env('FORK', project_name, 'puppetlabs') - git_server ||= lookup_in_env('GIT_SERVER', project_name, 'github.com') - repo = (git_server == 'github.com') ? - "#{git_fork}/#{project_name}.git" : - "#{git_fork}-#{project_name}.git" - "git://#{git_server}/#{repo}" - end - end - end -end diff --git a/acceptance/lib/puppet/acceptance/git_utils_spec.rb b/acceptance/lib/puppet/acceptance/git_utils_spec.rb deleted file mode 100644 index 39a72a95df4..00000000000 --- a/acceptance/lib/puppet/acceptance/git_utils_spec.rb +++ /dev/null @@ -1,62 +0,0 @@ -require File.join(File.dirname(__FILE__),'../../acceptance_spec_helper.rb') -require 'puppet/acceptance/git_utils' - -describe 'GitUtils' do - include Puppet::Acceptance::GitUtils - - def with_env(vars) - saved = {} - vars.each do |k,v| - saved[k] = ENV[k] if ENV[k] - ENV[k] = v - end - yield - ensure - vars.keys.each do |k| - saved.include?(k) ? - ENV[k] = saved[k] : - ENV.delete(k) - end - end - - it "looks up an env variable" do - with_env('VAR' => 'from-var') do - expect(lookup_in_env('VAR', 'foo', 'default')).to eq('from-var') - end - end - - it "looks up an env variable and submits default if none found" do - expect(lookup_in_env('VAR', 'foo', 'default')).to eq('default') - end - - it "prefers a project prefixed env variable" do - with_env('VAR' => 'from-var', - 'FOO_BAR_VAR' => 'from-foo-bar-var') do - expect(lookup_in_env('VAR', 'foo-bar', 'default')).to eq('from-foo-bar-var') - end - end - - it "builds a default git url for a project" do - expect(build_giturl('foo')).to eq('git://github.com/puppetlabs/foo.git') - end - - it "builds a git url from passed parameters" do - expect(build_giturl('foo', 'somefork', 'someserver')).to eq('git://someserver/somefork-foo.git') - end - - it "builds a git url based on env variables" do - with_env('GIT_SERVER' => 'gitmirror', - 'FORK' => 'fork') do - expect(build_giturl('foo')).to eq('git://gitmirror/fork-foo.git') - end - end - - it "builds a git url based on project specific env variables" do - with_env('GIT_SERVER' => 'gitmirror', - 'FORK' => 'fork', - 'FOO_GIT_SERVER' => 'project.gitmirror', - 'FOO_FORK' => 'project-fork') do - expect(build_giturl('foo')).to eq('git://project.gitmirror/project-fork-foo.git') - end - end -end diff --git a/acceptance/lib/puppet/acceptance/i18n_utils.rb b/acceptance/lib/puppet/acceptance/i18n_utils.rb new file mode 100644 index 00000000000..973289f2871 --- /dev/null +++ b/acceptance/lib/puppet/acceptance/i18n_utils.rb @@ -0,0 +1,40 @@ +module Puppet + module Acceptance + module I18nUtils + + # try to enable the locale's for a given language on the agent and return the preferred language name + # + # @param agent [string] the agent to check the locale configuration on + # @param language [string] the language attempt to configure if needed + # @return language [string] the language string to use on the agent node, will return nil if not available + def enable_locale_language(agent, language) + if agent['platform'] =~ /ubuntu/ + on(agent, 'locale -a') do |locale_result| + if locale_result.stdout !~ /#{language}/ + on(agent, "locale-gen --lang #{language}") + end + end + elsif agent['platform'] =~ /debian/ + on(agent, 'locale -a') do |locale_result| + if locale_result.stdout !~ /#{language}/ + on(agent, "cp /etc/locale.gen /etc/locale.gen.orig ; sed -e 's/# #{language}/#{language}/' /etc/locale.gen.orig > /etc/locale.gen") + on(agent, 'locale-gen') + end + end + end + return language_name(agent, language) + end + + # figure out the preferred language string for the requested language if the language is configured on the system + def language_name(agent, language) + step "PLATFORM #{agent['platform']}" + on(agent, 'locale -a') do |locale_result| + ["#{language}.utf8", "#{language}.UTF-8", language].each do |lang| + return lang if locale_result.stdout =~ /#{lang}/ + end + end + return nil + end + end + end +end diff --git a/acceptance/lib/puppet/acceptance/i18ndemo_utils.rb b/acceptance/lib/puppet/acceptance/i18ndemo_utils.rb new file mode 100644 index 00000000000..f7cabddfeca --- /dev/null +++ b/acceptance/lib/puppet/acceptance/i18ndemo_utils.rb @@ -0,0 +1,40 @@ +module Puppet +module Acceptance + module I18nDemoUtils + + require 'puppet/acceptance/i18n_utils' + extend Puppet::Acceptance::I18nUtils + + I18NDEMO_NAME = "i18ndemo" + I18NDEMO_MODULE_NAME = "eputnam-#{I18NDEMO_NAME}" + + def configure_master_system_locale(language) + language = enable_locale_language(master, language) + fail_test("puppet server machine is missing #{language} locale. help...") if language.nil? + + on(master, "localectl set-locale LANG=#{language}") + on(master, "service #{master['puppetservice']} restart") + end + + def reset_master_system_locale + language = language_name(master, 'en_US') || 'en_US' + on(master, "localectl set-locale LANG=#{language}") + on(master, "service #{master['puppetservice']} restart") + end + + def install_i18n_demo_module(node, environment=nil) + env_options = environment.nil? ? '' : "--environment #{environment}" + on(node, puppet("module install #{I18NDEMO_MODULE_NAME} #{env_options}")) + end + + def uninstall_i18n_demo_module(node, environment=nil) + env_options = environment.nil? ? '' : "--environment #{environment}" + [I18NDEMO_MODULE_NAME, 'puppetlabs-stdlib', 'puppetlabs-translate'].each do |module_name| + on(node, puppet("module uninstall #{module_name} #{env_options}"), :acceptable_exit_codes => [0,1]) + end + var_dir = on(node, puppet('config print vardir')).stdout.chomp + on(node, "rm -rf '#{File.join(var_dir, 'locales', 'ja')}' '#{File.join(var_dir, 'locales', 'fi')}'") + end + end +end +end diff --git a/acceptance/lib/puppet/acceptance/install_utils.rb b/acceptance/lib/puppet/acceptance/install_utils.rb deleted file mode 100644 index e51668c5352..00000000000 --- a/acceptance/lib/puppet/acceptance/install_utils.rb +++ /dev/null @@ -1,195 +0,0 @@ -require 'open-uri' -require 'open3' -require 'uri' -require 'puppet/acceptance/common_utils' - -module Puppet - module Acceptance - module InstallUtils - PLATFORM_PATTERNS = { - :redhat => /fedora|el|centos/, - :debian => /debian|ubuntu/, - :debian_ruby18 => /debian|ubuntu-lucid|ubuntu-precise/, - :solaris_10 => /solaris-10/, - :solaris_11 => /solaris-11/, - :windows => /windows/, - }.freeze - - # Installs packages on the hosts. - # - # @param hosts [Array] Array of hosts to install packages to. - # @param package_hash [Hash{Symbol=>Array>}] - # Keys should be a symbol for a platform in PLATFORM_PATTERNS. Values - # should be an array of package names to install, or of two element - # arrays where a[0] is the command we expect to find on the platform - # and a[1] is the package name (when they are different). - # @param options [Hash{Symbol=>Boolean}] - # @option options [Boolean] :check_if_exists First check to see if - # command is present before installing package. (Default false) - # @return true - def install_packages_on(hosts, package_hash, options = {}) - check_if_exists = options[:check_if_exists] - hosts = [hosts] unless hosts.kind_of?(Array) - hosts.each do |host| - package_hash.each do |platform_key,package_list| - if pattern = PLATFORM_PATTERNS[platform_key] - if pattern.match(host['platform']) - package_list.each do |cmd_pkg| - if cmd_pkg.kind_of?(Array) - command, package = cmd_pkg - else - command = package = cmd_pkg - end - if !check_if_exists || !host.check_for_package(command) - host.logger.notify("Installing #{package}") - additional_switches = '--allow-unauthenticated' if platform_key == :debian - host.install_package(package, additional_switches) - end - end - end - else - raise("Unknown platform '#{platform_key}' in package_hash") - end - end - end - return true - end - - def fetch(base_url, file_name, dst_dir) - FileUtils.makedirs(dst_dir) - src = "#{base_url}/#{file_name}" - dst = File.join(dst_dir, file_name) - if File.exists?(dst) - logger.notify "Already fetched #{dst}" - else - logger.notify "Fetching: #{src}" - logger.notify " and saving to #{dst}" - open(src) do |remote| - File.open(dst, "w") do |file| - FileUtils.copy_stream(remote, file) - end - end - end - return dst - end - - def fetch_remote_dir(url, dst_dir) - logger.notify "fetch_remote_dir (url: #{url}, dst_dir #{dst_dir})" - if url[-1, 1] !~ /\// - url += '/' - end - url = URI.parse(url) - chunks = url.path.split('/') - dst = File.join(dst_dir, chunks.last) - #determine directory structure to cut - #only want to keep the last directory, thus cut total number of dirs - 2 (hostname + last dir name) - cut = chunks.length - 2 - wget_command = "wget -nv -P #{dst_dir} --reject \"index.html*\",\"*.gif\" --cut-dirs=#{cut} -np -nH --no-check-certificate -r #{url}" - - logger.notify "Fetching remote directory: #{url}" - logger.notify " and saving to #{dst}" - logger.notify " using command: #{wget_command}" - - #in ruby 1.9+ we can upgrade this to popen3 to gain access to the subprocess pid - result = `#{wget_command} 2>&1` - result.each_line do |line| - logger.debug(line) - end - if $?.to_i != 0 - raise "Failed to fetch_remote_dir '#{url}' (exit code #{$?}" - end - dst - end - - def stop_firewall_on(host) - case host['platform'] - when /debian/ - on host, 'iptables -F' - when /fedora|el-7/ - on host, puppet('resource', 'service', 'firewalld', 'ensure=stopped') - when /el|centos/ - on host, puppet('resource', 'service', 'iptables', 'ensure=stopped') - when /ubuntu/ - on host, puppet('resource', 'service', 'ufw', 'ensure=stopped') - else - logger.notify("Not sure how to clear firewall on #{host['platform']}") - end - end - - def install_repos_on(host, project, sha, repo_configs_dir) - platform = host['platform'].with_version_codename - platform_configs_dir = File.join(repo_configs_dir,platform) - tld = sha == 'nightly' ? 'nightlies.puppetlabs.com' : 'builds.puppetlabs.lan' - project = sha == 'nightly' ? project + '-latest' : project - sha = sha == 'nightly' ? nil : sha - - case platform - when /^(fedora|el|centos)-(\d+)-(.+)$/ - variant = (($1 == 'centos') ? 'el' : $1) - fedora_prefix = ((variant == 'fedora') ? 'f' : '') - version = $2 - arch = $3 - - repo_filename = "pl-%s%s-%s-%s%s-%s.repo" % [ - project, - sha ? '-' + sha : '', - variant, - fedora_prefix, - version, - arch - ] - repo_url = "http://%s/%s/%s/repo_configs/rpm/%s" % [tld, project, sha, repo_filename] - - on host, "curl -o /etc/yum.repos.d/#{repo_filename} #{repo_url}" - when /^(debian|ubuntu)-([^-]+)-(.+)$/ - variant = $1 - version = $2 - arch = $3 - - list_filename = "pl-%s%s-%s.list" % [ - project, - sha ? '-' + sha : '', - version - ] - list_url = "http://%s/%s/%s/repo_configs/deb/%s" % [tld, project, sha, list_filename] - - on host, "curl -o /etc/apt/sources.list.d/#{list_filename} #{list_url}" - on host, "apt-get update" - else - host.logger.notify("No repository installation step for #{platform} yet...") - end - end - - # Configures gem sources on hosts to use a mirror, if specified - # This is a duplicate of the Gemfile logic. - def configure_gem_mirror(hosts) - hosts = [hosts] unless hosts.kind_of?(Array) - gem_source = ENV['GEM_SOURCE'] || 'https://rubygems.org' - - hosts.each do |host| - gem = Puppet::Acceptance::CommandUtils.gem_command(host) - on host, "#{gem} source --clear-all" - on host, "#{gem} source --add #{gem_source}" - end - end - - def install_puppet_from_msi( host, opts ) - if not link_exists?(opts[:url]) - raise "Puppet does not exist at #{opts[:url]}!" - end - - # `start /w` blocks until installation is complete, but needs to be wrapped in `cmd.exe /c` - on host, "cmd.exe /c start /w msiexec /qn /i #{opts[:url]} /L*V C:\\\\Windows\\\\Temp\\\\Puppet-Install.log" - - # make sure the background service isn't running while the test executes - on host, "net stop puppet" - - # make sure install is sane, beaker has already added puppet and ruby - # to PATH in ~/.ssh/environment - on host, puppet('--version') - ruby = Puppet::Acceptance::CommandUtils.ruby_command(host) - on host, "#{ruby} --version" - end - end - end -end diff --git a/acceptance/lib/puppet/acceptance/install_utils_spec.rb b/acceptance/lib/puppet/acceptance/install_utils_spec.rb deleted file mode 100644 index 3c077f1d257..00000000000 --- a/acceptance/lib/puppet/acceptance/install_utils_spec.rb +++ /dev/null @@ -1,263 +0,0 @@ -require File.join(File.dirname(__FILE__),'../../acceptance_spec_helper.rb') -require 'puppet/acceptance/install_utils' - -module InstallUtilsSpec -describe 'InstallUtils' do - - class ATestCase - include Puppet::Acceptance::InstallUtils - end - - class Platform < String - - def with_version_codename - self - end - end - - class TestHost - attr_accessor :config - def initialize(config = {}) - self.config = config - end - - def [](key) - config[key] - end - end - - let(:host) { TestHost.new } - let(:testcase) { ATestCase.new } - - describe "install_packages_on" do - it "raises an error if package_hash has unknown platform keys" do - expect do - testcase.install_packages_on(host, { :foo => 'bar'}) - end.to raise_error(RuntimeError, /Unknown platform 'foo' in package_hash/) - end - - shared_examples_for(:install_packages_on) do |platform,command,package| - - let(:package_hash) do - { - :redhat => ['rh_package'], - :debian => [['db_command', 'db_package']], - } - end - let(:additional_switches) { platform == 'debian' ? '--allow-unauthenticated' : nil } - - before do - logger = mock('logger', :notify => nil) - host.stubs(:logger).returns(logger) - host.config['platform'] = Platform.new(platform) - end - - it "installs packages on a host" do - host.expects(:check_for_package).never - host.expects(:install_package).with(package, additional_switches).once - testcase.install_packages_on(host, package_hash) - end - - it "checks and installs packages on a host" do - host.expects(:check_for_package).with(command).once - host.expects(:install_package).with(package, additional_switches).once - testcase.install_packages_on(host, package_hash, :check_if_exists => true) - end - end - - it_should_behave_like(:install_packages_on, 'fedora', 'rh_package', 'rh_package') - it_should_behave_like(:install_packages_on, 'debian', 'db_command', 'db_package') - end - - describe "fetch" do - before do - logger = stub('logger', :notify => nil) - testcase.stubs(:logger).returns(logger) - FileUtils.expects(:makedirs).with('dir') - end - - it "does not fetch if destination file already exists" do - File.expects(:exists?).with('dir/file').returns(true) - testcase.expects(:open).never - testcase.fetch('http://foo', 'file', 'dir') - end - - it "fetches file from url and stores in destination directory as filename" do - stream = mock('stream') - file = mock('file') - testcase.expects(:open).with('http://foo/file').yields(stream) - File.expects(:open).with('dir/file', 'w').yields(file) - FileUtils.expects(:copy_stream).with(stream, file) - testcase.fetch('http://foo', 'file', 'dir') - end - - it "returns path to destination file" do - testcase.expects(:open).with('http://foo/file') - expect(testcase.fetch('http://foo', 'file', 'dir')).to eql('dir/file') - end - end - - describe "fetch_remote_dir" do - before do - logger = stub('logger', {:notify => nil, :debug => nil}) - testcase.stubs(:logger).returns(logger) - end - - it "calls wget with the right amount of cut dirs for url that ends in '/'" do - url = 'http://builds.puppetlabs.lan/puppet/7807591405af849da2ad6534c66bd2d4efff604f/repos/el/6/devel/x86_64/' - testcase.expects(:`).with("wget -nv -P dir --reject \"index.html*\",\"*.gif\" --cut-dirs=6 -np -nH --no-check-certificate -r #{url} 2>&1").returns("log") - - expect( testcase.fetch_remote_dir(url, 'dir')).to eql('dir/x86_64') - end - - it "calls wget with the right amount of cut dirs for url that doesn't end in '/'" do - url = 'http://builds.puppetlabs.lan/puppet/7807591405af849da2ad6534c66bd2d4efff604f/repos/apt/wheezy' - testcase.expects(:`).with("wget -nv -P dir --reject \"index.html*\",\"*.gif\" --cut-dirs=4 -np -nH --no-check-certificate -r #{url}/ 2>&1").returns("log") - - expect( testcase.fetch_remote_dir(url, 'dir')).to eql('dir/wheezy') - end - - end - - shared_examples_for :redhat_platforms do |platform,sha,files| - before do - host.config['platform'] = Platform.new(platform) - end - - it "fetches and installs repo configurations for #{platform}" do - platform_configs_dir = "repo-configs/#{platform}" - - rpm_url = files[:rpm][0] - rpm_file = files[:rpm][1] - testcase.expects(:fetch).with( - "http://yum.puppetlabs.com", - rpm_file, - platform_configs_dir - ).returns("#{platform_configs_dir}/#{rpm_file}") - - repo_url = files[:repo][0] - repo_file = files[:repo][1] - testcase.expects(:fetch).with( - repo_url, - repo_file, - platform_configs_dir - ).returns("#{platform_configs_dir}/#{repo_file}") - - repo_dir_url = files[:repo_dir][0] - repo_dir = files[:repo_dir][1] - testcase.expects(:link_exists?).returns( true ) - testcase.expects(:fetch_remote_dir).with( - repo_dir_url, - platform_configs_dir - ).returns("#{platform_configs_dir}/#{repo_dir}") - testcase.expects(:link_exists?).returns( true ) - - testcase.expects(:on).with(host, regexp_matches(/rm.*repo; rm.*rpm; rm.*#{repo_dir}/)) - testcase.expects(:scp_to).with(host, "#{platform_configs_dir}/#{rpm_file}", '/root') - testcase.expects(:scp_to).with(host, "#{platform_configs_dir}/#{repo_file}", '/root') - testcase.expects(:scp_to).with(host, "#{platform_configs_dir}/#{repo_dir}", '/root') - testcase.expects(:on).with(host, regexp_matches(%r{mv.*repo /etc/yum.repos.d})) - testcase.expects(:on).with(host, regexp_matches(%r{find /etc/yum.repos.d/ -name .*})) - testcase.expects(:on).with(host, regexp_matches(%r{rpm.*/root/.*rpm})) - - testcase.install_repos_on(host, sha, 'repo-configs') - end - end - - describe "install_repos_on" do - let(:sha) { "abcdef10" } - - it_should_behave_like(:redhat_platforms, - 'el-6-i386', - 'abcdef10', - { - :rpm => [ - "http://yum.puppetlabs.com", - "puppetlabs-release-el-6.noarch.rpm", - ], - :repo => [ - "http://builds.puppetlabs.lan/puppet/abcdef10/repo_configs/rpm/", - "pl-puppet-abcdef10-el-6-i386.repo", - ], - :repo_dir => [ - "http://builds.puppetlabs.lan/puppet/abcdef10/repos/el/6/products/i386/", - "i386", - ], - }, - ) - - it_should_behave_like(:redhat_platforms, - 'fedora-20-x86_64', - 'abcdef10', - { - :rpm => [ - "http://yum.puppetlabs.com", - "puppetlabs-release-fedora-20.noarch.rpm", - ], - :repo => [ - "http://builds.puppetlabs.lan/puppet/abcdef10/repo_configs/rpm/", - "pl-puppet-abcdef10-fedora-f20-x86_64.repo", - ], - :repo_dir => [ - "http://builds.puppetlabs.lan/puppet/abcdef10/repos/fedora/f20/products/x86_64/", - "x86_64", - ], - }, - ) - - it_should_behave_like(:redhat_platforms, - 'centos-5-x86_64', - 'abcdef10', - { - :rpm => [ - "http://yum.puppetlabs.com", - "puppetlabs-release-el-5.noarch.rpm", - ], - :repo => [ - "http://builds.puppetlabs.lan/puppet/abcdef10/repo_configs/rpm/", - "pl-puppet-abcdef10-el-5-x86_64.repo", - ], - :repo_dir => [ - "http://builds.puppetlabs.lan/puppet/abcdef10/repos/el/5/products/x86_64/", - "x86_64", - ], - }, - ) - - it "installs on a debian host" do - host.config['platform'] = platform = Platform.new('ubuntu-precise-x86_64') - platform_configs_dir = "repo-configs/#{platform}" - - deb = "puppetlabs-release-precise.deb" - testcase.expects(:fetch).with( - "http://apt.puppetlabs.com/", - deb, - platform_configs_dir - ).returns("#{platform_configs_dir}/#{deb}") - - list = "pl-puppet-#{sha}-precise.list" - testcase.expects(:fetch).with( - "http://builds.puppetlabs.lan/puppet/#{sha}/repo_configs/deb/", - list, - platform_configs_dir - ).returns("#{platform_configs_dir}/#{list}") - - testcase.expects(:fetch_remote_dir).with( - "http://builds.puppetlabs.lan/puppet/#{sha}/repos/apt/precise", - platform_configs_dir - ).returns("#{platform_configs_dir}/precise") - - testcase.expects(:on).with(host, regexp_matches(/rm.*list; rm.*deb; rm.*/)) - testcase.expects(:scp_to).with(host, "#{platform_configs_dir}/#{deb}", '/root') - testcase.expects(:scp_to).with(host, "#{platform_configs_dir}/#{list}", '/root') - testcase.expects(:scp_to).with(host, "#{platform_configs_dir}/precise", '/root') - testcase.expects(:on).with(host, regexp_matches(%r{mv.*list /etc/apt/sources.list.d})) - testcase.expects(:on).with(host, regexp_matches(%r{find /etc/apt/sources.list.d/ -name .*})) - testcase.expects(:on).with(host, regexp_matches(%r{dpkg -i.*/root/.*deb})) - testcase.expects(:on).with(host, regexp_matches(%r{apt-get update})) - - testcase.install_repos_on(host, sha, 'repo-configs') - end - end -end -end diff --git a/acceptance/lib/puppet/acceptance/module_utils.rb b/acceptance/lib/puppet/acceptance/module_utils.rb index 3338e750ef7..3c840b39fea 100644 --- a/acceptance/lib/puppet/acceptance/module_utils.rb +++ b/acceptance/lib/puppet/acceptance/module_utils.rb @@ -14,7 +14,7 @@ module ModuleUtils # # @param host [String] hostname # @return [Array] paths for found modulepath - def get_modulepaths_for_host (host) + def get_modulepaths_for_host(host) environment = on(host, puppet("config print environment")).stdout.chomp on(host, puppet("config print modulepath --environment #{environment}")).stdout.chomp.split(host['pathseparator']) end @@ -27,7 +27,7 @@ def get_modulepaths_for_host (host) # # @param host [String] hostname # @return [String] first path for found modulepath - def get_default_modulepath_for_host (host) + def get_default_modulepath_for_host(host) get_modulepaths_for_host(host)[0] end @@ -43,12 +43,13 @@ def get_default_modulepath_for_host (host) # # @param host [String] hostname # @return [Array] paths for found modules - def get_installed_modules_for_host (host) - on host, puppet("module list --render-as pson") - str = stdout.lines.to_a.last - pat = /\(([^()]+)\)/ - mods = str.scan(pat).flatten - return mods + def get_installed_modules_for_host(host) + on(host, puppet('module list --render-as json')) do |result| + str = result.stdout.lines.to_a.last + pat = /\(([^()]+)\)/ + mods = str.scan(pat).flatten + return mods + end end # Return a hash of array of paths to installed modules for a hosts. @@ -74,7 +75,7 @@ def get_installed_modules_for_host (host) # # @param hosts [Array] hostnames # @return [Hash] paths for found modules indexed by hostname - def get_installed_modules_for_hosts (hosts) + def get_installed_modules_for_hosts(hosts) mods = {} hosts.each do |host| mods[host] = get_installed_modules_for_host host @@ -102,7 +103,7 @@ def get_installed_modules_for_hosts (hosts) # by hostname. Taken in the setup stage of a test. # @param ending_hash [Hash] paths for found modules indexed # by hostname. Taken in the teardown stage of a test. - def rm_installed_modules_from_hosts (beginning_hash, ending_hash) + def rm_installed_modules_from_hosts(beginning_hash, ending_hash) ending_hash.each do |host, mod_array| mod_array.each do |mod| if ! beginning_hash[host].include? mod @@ -119,7 +120,7 @@ def rm_installed_modules_from_hosts (beginning_hash, ending_hash) # 10242 # # @param semver [String] semantic version number - def semver_to_i ( semver ) + def semver_to_i( semver ) # semver assumed to be in format .. # calculation assumes that each segment is < 100 tmp = semver.split('.') @@ -133,7 +134,7 @@ def semver_to_i ( semver ) # a value greater than 0 indicates that the semver1 is greater than semver2 # a value less than 0 indicates that the semver1 is less than semver2 # - def semver_cmp ( semver1, semver2 ) + def semver_cmp( semver1, semver2 ) semver_to_i(semver1) - semver_to_i(semver2) end @@ -151,7 +152,7 @@ def semver_cmp ( semver1, semver2 ) # installed version # @param compare_op [String] the operator for comparing the verions of # the installed module - def assert_module_installed_ui ( stdout, module_author, module_name, module_version = nil, compare_op = nil ) + def assert_module_installed_ui( stdout, module_author, module_name, module_version = nil, compare_op = nil ) valid_compare_ops = {'==' => 'equal to', '>' => 'greater than', '<' => 'less than'} assert_match(/#{module_author}-#{module_name}/, stdout, "Notice that module '#{module_author}-#{module_name}' was installed was not displayed") @@ -171,7 +172,7 @@ def assert_module_installed_ui ( stdout, module_author, module_name, module_vers # @param module_name [String] the name portion of a module name # @param optional moduledir [String, Array] the path where the module should be, will # iterate over components of the modulepath by default. - def assert_module_installed_on_disk (host, module_name, moduledir=nil) + def assert_module_installed_on_disk(host, module_name, moduledir=nil) moduledir ||= get_modulepaths_for_host(host) modulepath = moduledir.is_a?(Array) ? moduledir : [moduledir] moduledir= nil @@ -219,7 +220,7 @@ def assert_module_installed_on_disk (host, module_name, moduledir=nil) end end - LS_REGEX = %r[(.)(...)(...)(...).?\s+\d+\s+(\w+)\s+(\w+).*(\S+)$] + LS_REGEX = %r[(.)(...)(...)(...).?[[:space:]]+\d+[[:space:]]+([[:word:]]+)[[:space:]]+([[:word:]]+).*[[:space:]]+([[:graph:]]+)$] def parse_ls(line) match = line.match(LS_REGEX) @@ -249,7 +250,7 @@ def parse_ls(line) # @param module_name [String] the name portion of a module name # @param optional moduledir [String, Array] the path where the module should be, will # iterate over components of the modulepath by default. - def assert_module_not_installed_on_disk (host, module_name, moduledir=nil) + def assert_module_not_installed_on_disk(host, module_name, moduledir=nil) moduledir ||= get_modulepaths_for_host(host) modulepath = moduledir.is_a?(Array) ? moduledir : [moduledir] moduledir= nil diff --git a/acceptance/lib/puppet/acceptance/puppet_type_test_tools.rb b/acceptance/lib/puppet/acceptance/puppet_type_test_tools.rb new file mode 100644 index 00000000000..b4f67b09c5f --- /dev/null +++ b/acceptance/lib/puppet/acceptance/puppet_type_test_tools.rb @@ -0,0 +1,103 @@ +require 'puppet/acceptance/environment_utils' + +module Puppet + module Acceptance + module PuppetTypeTestTools + include Puppet::Acceptance::EnvironmentUtils # for now, just for #random_string + + # FIXME: yardocs + # TODO: create resource class which contains its manifest chunk, and assertions + # can be an array or singular, holds the manifest and the assertion_code + # has getter for the manifest + # has #run_assertions(BeakerResult or string) + def generate_manifest(test_resources) + manifest = '' + test_resources = [test_resources].flatten # ensure it's an array so we enumerate properly + test_resources.each do |resource| + manifest << resource[:pre_code] + "\n" if resource[:pre_code] + namevar = (resource[:parameters][:namevar] if resource[:parameters]) || "#{resource[:type]}_#{random_string}" + # ensure these are double quotes around the namevar incase users puppet-interpolate inside it + # FIXME: add test ^^ + manifest << resource[:type] + '{"' + namevar + '":' if resource[:type] + if resource[:parameters] + resource[:parameters].each do |key,value| + next if key == :namevar + manifest << "#{key} => #{value}," + end + end + manifest << "}\n" if resource[:type] + end + return manifest + end + + def generate_assertions(test_resources) + assertion_code = '' + test_resources = [test_resources].flatten # ensure it's an array so we enumerate properly + test_resources.each do |resource| + if resource[:assertions] + resource[:assertions] = [resource[:assertions]].flatten # ensure it's an array so we enumerate properly + resource[:assertions].each do |assertion_type| + expect_failure = false + if assertion_type[:expect_failure] + expect_failure = true + assertion_code << "expect_failure '#{assertion_type[:expect_failure][:message]}' do\n" + # delete the message + assertion_type[:expect_failure].delete(:message) + # promote the hash in expect_failure + assertion_type = assertion_type[:expect_failure] + assertion_type.delete(:expect_failure) + end + + # ensure all the values are arrays + assertion_values = [assertion_type.values].flatten + assertion_values.each do |assertion_value| + # TODO: non matching asserts? + # TODO: non stdout? (support stdout, stderr, exit_code) + # TODO: what about checking resource state on host (non agent/apply #on use)? + if assertion_type.keys.first =~ /assert_match/ + assert_msg = 'found ' + elsif assertion_type.keys.first =~ /refute_match/ + assert_msg = 'did not find ' + else + assert_msg = '' + end + if assertion_value.is_a?(String) + matcher = "\"#{assertion_value}\"" + elsif assertion_value.is_a?(Regexp) + matcher = assertion_value.inspect + else + matcher = assertion_value + end + assertion_code << "#{assertion_type.keys.first}(#{matcher}, result.stdout, '#{assert_msg}#{matcher}')\n" + end + + assertion_code << "end\n" if expect_failure + end + end + end + return assertion_code + end + + Result = Struct.new(:stdout) + def run_assertions(assertions = '', result) + result_struct = Result.new + if result.respond_to? :stdout + result_struct.stdout = result.stdout + else + # handle results sent in as string + result_struct.stdout = result + end + result = result_struct + + begin + eval(assertions) + rescue RuntimeError, SyntaxError => e + puts e + puts assertions + raise + end + end + + end + end +end diff --git a/acceptance/lib/puppet/acceptance/puppet_type_test_tools_spec.rb b/acceptance/lib/puppet/acceptance/puppet_type_test_tools_spec.rb new file mode 100644 index 00000000000..db53888b9c3 --- /dev/null +++ b/acceptance/lib/puppet/acceptance/puppet_type_test_tools_spec.rb @@ -0,0 +1,131 @@ +require File.join(File.dirname(__FILE__),'../../acceptance_spec_helper.rb') +require 'puppet/acceptance/puppet_type_test_tools.rb' +require 'beaker/dsl/assertions' +require 'beaker/result' + +module Puppet + module Acceptance + + describe 'PuppetTypeTestTools' do + include PuppetTypeTestTools + include Beaker::DSL::Assertions + include Beaker + + context '#generate_manifest' do + it 'takes a single hash' do + expect(generate_manifest({:type => 'fake'})).to match(/^fake{"fake_\w{8}":}$/) + end + it 'takes an array' do + expect(generate_manifest([{:type => 'fake'}])).to match(/^fake{"fake_\w{8}":}$/) + end + it 'generates empty puppet code (assertion-only instance)' do + expect(generate_manifest({:fake => 'fake'})).to eql('') + end + it 'puts a namevar in the right place' do + expect(generate_manifest({:type => 'fake', :parameters => + {:namevar => 'blah'}})).to match(/^fake{"blah":}$/) + end + it 'retains puppet code in a namevar' do + expect(generate_manifest({:type => 'fake', :parameters => + {:namevar => "half_${interpolated}_puppet_namevar"}})). + to match(/^fake{"half_\${interpolated}_puppet_namevar":}$/) + end + it 'places pre_code before the type' do + expect(generate_manifest({:type => 'fake', :pre_code => '$some = puppet_code'})). + to match(/^\$some = puppet_code\nfake{"fake_\w{8}":}$/m) + end + it 'places multiple, arbitrary parameters' do + expect(generate_manifest({:type => 'fake', :parameters => + {:someprop => "function(call)", :namevar => "blah", :someparam => 2}})). + to match(/^fake{"blah":someprop => function\(call\),someparam => 2,}$/) + end + end + + context '#generate_assertions' do + it 'takes a single hash' do + expect(generate_assertions({:assertions => {:fake => 'matcher'}})) + .to match(/^fake\("matcher", result\.stdout, '"matcher"'\)$/) + end + it 'takes an array' do + expect(generate_assertions([{:assertions => {:fake => 'matcher'}}])) + .to match(/^fake\("matcher", result\.stdout, '"matcher"'\)$/) + end + it 'generates empty assertions (puppet-code only instance)' do + expect(generate_assertions({:type => 'no assertions'})).to eql('') + end + it 'generates arbitrary assertions' do + expect(generate_assertions({:assertions => [{:fake => 'matcher'}, + {:other => 'othermatch'}]})) + .to match(/^fake\("matcher", result\.stdout, '"matcher"'\)\nother\("othermatch", result.stdout, '"othermatch"'\)$/m) + end + it 'can give a regex to assertions' do + expect(generate_assertions({:assertions => {:fake => /matcher/}})) + .to match(/^fake\(\/matcher\/, result\.stdout, '\/matcher\/'\)$/) + end + it 'allows multiple of one assertion type' do + expect(generate_assertions({:assertions => {:fake => ['matcher','othermatch']}})) + .to match(/^fake\("matcher", result\.stdout, '"matcher"'\)\nfake\("othermatch", result.stdout, '"othermatch"'\)$/) + end + it 'allows multiple assertion_types with multiple values' do + expect(generate_assertions({:assertions => [{:fake => ['matcher','othermatch']}, + {:fake2 => ['matcher2','othermatch2']}]})) + .to match(/^fake\("matcher", result\.stdout, '"matcher"'\)\nfake\("othermatch", result.stdout, '"othermatch"'\)\nfake2\("matcher2", result.stdout, '"matcher2"'\)\nfake2\("othermatch2", result.stdout, '"othermatch2"'\)\n$/) + end + context 'expect_failure' do + it 'generates arbitrary assertion' do + expect(generate_assertions({:assertions => {:expect_failure => {:fake => 'matcher'}}})) + .to match(/^expect_failure '' do\nfake\(.*\)\nend$/) + end + it 'allows multiple of one assertion type' do + expect(generate_assertions({:assertions => {:expect_failure => {:fake => ['matcher','othermatch']}}})) + .to match(/^expect_failure '' do\nfake\(.*\)\nfake\(.*\)\nend$/) + end + it 'allows multiple assertion_types' do + pending 'ack! requires recursion :-(' + #expect(generate_assertions({:assertions => {:expect_failure => [{:fake => 'matcher'},{:fake2 => 'matcher2'}]}})) + #.to match(/^expect_failure '' do\nfake\(.*\)\nfake2\(.*\)\nend$/) + end + it 'allows multiple assertion_types with an expect_failure on one' do + expect(generate_assertions({:assertions => [{:expect_failure => {:fake => 'matcher'}}, {:fake2 => 'matcher2'}]})) + .to match(/^expect_failure '' do\nfake\(.*\)\nend\nfake2\(.*\)$/) + end + it 'allows custom expect_failure messages' do + expect(generate_assertions({:assertions => {:expect_failure => {:fake => 'matcher', :message => 'oh noes, this should fail but pass'}}})) + .to match(/^expect_failure 'oh noes, this should fail but pass' do\nfake\(.*\)\nend$/) + end + end + it 'allow custom assertion messages' + end + + context 'run_assertions' do + #def run_assertions(assertions = '', result) + it 'takes a string result' do + expect(run_assertions('assert_match("yes please", result.stdout)', 'yes please')).to be true + end + let(:result) {Beaker::Result.new('host','command')} + it 'takes a beaker "type" Result' do + result.stdout = 'yes please' + expect(run_assertions('assert_match("yes please", result.stdout)', result)).to be true + end + it 'runs a bunch of assertions' do + result.stdout = 'yes please' + expect(run_assertions("assert_match('yes please', result.stdout)\nrefute_match('blah', result.stdout)", result)).to be false + end + it 'fails assertions' do + pending 'why doesnt this work?' + result.stdout = 'yes please' + expect(run_assertions('assert_match("blah", result.stdout)', result)).to raise_error + end + context 'exceptions' do + #rescue RuntimeError, SyntaxError => e + it 'puts the assertion code, raises error' do + pending 'why doesnt this work?' + expect(run_assertions('assert_match("blah") }', result)).to raise_error + end + end + end + + end + + end +end diff --git a/acceptance/lib/puppet/acceptance/rpm_util.rb b/acceptance/lib/puppet/acceptance/rpm_util.rb index a8415e6a41a..d34e63555b8 100644 --- a/acceptance/lib/puppet/acceptance/rpm_util.rb +++ b/acceptance/lib/puppet/acceptance/rpm_util.rb @@ -3,20 +3,42 @@ module Acceptance module RpmUtils # Utilities for creating a basic rpm package and using it in tests @@defaults = {:repo => '/tmp/rpmrepo', :pkg => 'mypkg', :publisher => 'tstpub.lan', :version => '1.0'} + @@setup_packages = {} + + def rpm_provider(agent) + has_dnf = on(agent, 'which dnf', :acceptable_exit_codes => [0,1]).exit_code + has_dnf == 0 ? 'dnf' : 'yum' + end def setup(agent) - required_packages = ['createrepo', 'rpm-build'] + @@setup_packages[agent] ||= {} + cmd = rpm_provider(agent) + required_packages = %w[createrepo curl rpm-build] required_packages.each do |pkg| - unless ((on agent, "yum list installed #{pkg}", :acceptable_exit_codes => (0..255)).exit_code == 0) then - on agent, "yum install -y #{pkg}" + pkg_installed = (on agent, "#{cmd} list installed #{pkg}", :acceptable_exit_codes => (0..255)).exit_code == 0 + # We need a newer OpenSSH for the newer OpenSSL that curl installs + # RE-16677 + on(agent, 'dnf upgrade -y openssh') if (agent.platform.start_with?('el-9') && pkg == 'curl') + # package not present, so perform a new install + if !pkg_installed + on agent, "#{cmd} install -y #{pkg}" + # package is present, but has not yet attempted an upgrade + # note that this may influence YUM cache behavior + elsif !@@setup_packages[agent].has_key?(pkg) + # first pass, always attempt an upgrade to latest version + # fixes Fedora 25 curl compat with python-pycurl for instance + on agent, "#{cmd} upgrade -y #{pkg}" end + + @@setup_packages[agent][pkg] = true end end def clean_rpm(agent, o={}) + cmd = rpm_provider(agent) o = @@defaults.merge(o) on agent, "rm -rf #{o[:repo]}", :acceptable_exit_codes => (0..255) - on agent, "yum remove -y #{o[:pkg]}", :acceptable_exit_codes => (0..255) + on agent, "#{cmd} remove -y #{o[:pkg]}", :acceptable_exit_codes => (0..255) on agent, "rm -f /etc/yum.repos.d/#{o[:publisher]}.repo", :acceptable_exit_codes => (0..255) end @@ -59,10 +81,12 @@ def send_rpm(agent, o={}) Name: #{o[:pkg]} Version: #{o[:version]} Release: 1 +Epoch: #{o[:epoch] || 0} +BuildArch: noarch License: GPL+ Group: Development/Tools SOURCE0 : %{name}-%{version}.tar.gz -URL: http://www.puppetlabs.com/ +URL: https://www.puppetlabs.com/ BuildRoot: %{_topdir}/BUILD/%{name}-%{version}-%{release}-root @@ -98,6 +122,12 @@ def send_rpm(agent, o={}) " on agent, "rpmbuild -ba #{o[:repo]}/SPECS/#{o[:pkg]}.spec" on agent, "createrepo --update #{o[:repo]}" + + cmd = rpm_provider(agent) + # DNF requires a cache reset to make local repositories accessible. + if cmd == 'dnf' + on agent, "dnf clean metadata" + end end end end diff --git a/acceptance/lib/puppet/acceptance/service_utils.rb b/acceptance/lib/puppet/acceptance/service_utils.rb new file mode 100644 index 00000000000..757ac24838d --- /dev/null +++ b/acceptance/lib/puppet/acceptance/service_utils.rb @@ -0,0 +1,152 @@ +require 'puppet/acceptance/common_utils' + +module Puppet + module Acceptance + module ServiceUtils + + # Return whether a host supports the systemd provider. + # @param host [String] hostname + # @return [Boolean] whether the systemd provider is supported. + def supports_systemd?(host) + # The Windows MSI doesn't put Puppet in the Ruby vendor or site dir, so loading it fails. + return false if host.platform.variant == 'windows' + ruby = Puppet::Acceptance::CommandUtils.ruby_command(host) + suitable = on(host, "#{ruby} -e \"require 'puppet'; puts Puppet::Type.type(:service).provider(:systemd).suitable?\"" ).stdout.chomp + suitable == "true" ? true : false + end + + # Construct manifest ensuring service status. + # @param service [String] name of the service + # @param status [Hash] properties to set - can include 'ensure' and 'enable' keys. + # @return [String] a manifest + def service_manifest(service, status) + ensure_status = "ensure => '#{status[:ensure]}'," if status[:ensure] + enable_status = "enable => '#{status[:enable]}'," if status[:enable] + %Q{ + service { '#{service}': + #{ensure_status} + #{enable_status} + } + } + end + + # Alter the state of a service using puppet apply and assert that a change was logged. + # Assumes the starting state is not the desired state. + # @param host [String] hostname. + # @param service [String] name of the service. + # @param status [Hash] properties to set - can include 'ensure' and 'enable' keys. + # @return None + def ensure_service_change_on_host(host, service, status) + # the process of creating the service will also start it + # to avoid a flickering test from the race condition, this test will ensure + # that the exit code is either + # 2 => something changed, or + # 0 => no change needed + apply_manifest_on(host, service_manifest(service, status), :acceptable_exit_codes => [0, 2]) do |result| + assert_match(/Service\[#{service}\]\/ensure: ensure changed '\w+' to '#{status[:ensure]}'/, result.stdout, 'Service status change failed') if status[:ensure] + assert_match(/Service\[#{service}\]\/enable: enable changed '\w+' to '#{status[:enable]}'/, result.stdout, 'Service enable change failed') if status[:enable] + end + end + + # Ensure the state of a service using puppet apply and assert that no change was logged. + # Assumes the starting state is the ensured state. + # @param host [String] hostname. + # @param service [String] name of the service. + # @param status [Hash] properties to set - can include 'ensure' and 'enable' keys. + # @return None + def ensure_service_idempotent_on_host(host, service, status) + # ensure idempotency + apply_manifest_on(host, service_manifest(service, status)) do |result| + refute_match(/Service\[#{service}\]\/ensure/, result.stdout, 'Service status not idempotent') if status[:ensure] + refute_match(/Service\[#{service}\]\/enable/, result.stdout, 'Service enable not idempotent') if status[:enable] + end + end + + # Alter the state of a service using puppet apply, assert that it changed and change is idempotent. + # Can set 'ensure' and 'enable'. Assumes the starting state is not the desired state. + # @param host [String] hostname. + # @param service [String] name of the service. + # @param status [Hash] properties to set - can include 'ensure' and 'enable' keys. + # @param block [Proc] optional: block to verify service state + # @return None + def ensure_service_on_host(host, service, status, &block) + ensure_service_change_on_host(host, service, status) + assert_service_status_on_host(host, service, status, &block) + ensure_service_idempotent_on_host(host, service, status) + assert_service_status_on_host(host, service, status, &block) + end + + # Checks that the ensure and/or enable status of a service are as expected. + # @param host [String] hostname. + # @param service [String] name of the service. + # @param status [Hash] properties to set - can include 'ensure' and 'enable' keys. + # @param block [Proc] optional: block to verify service state + # @return None + def assert_service_status_on_host(host, service, status, &block) + ensure_status = "ensure.+=> '#{status[:ensure]}'" if status[:ensure] + enable_status = "enable.+=> '#{status[:enable]}'" if status[:enable] + + on(host, puppet_resource('service', service)) do |result| + assert_match(/'#{service}'.+#{ensure_status}.+#{enable_status}/m, result.stdout, "Service status does not match expectation #{status}") + end + + # Verify service state on the system using a custom block + if block + yield block + end + end + + # Refreshes a service. + # @param host [String] hostname. + # @param service [String] name of the service to refresh. + # @return None + def refresh_service_on_host(host, service) + refresh_manifest = %Q{ + service { '#{service}': } + + notify { 'Refreshing #{service}': + notify => Service['#{service}'], + } + } + + apply_manifest_on(host, refresh_manifest) + end + + # Runs some common acceptance tests for nonexistent services. + # @param service [String] name of the service + # @return None + def run_nonexistent_service_tests(service) + step "Verify that a nonexistent service is considered stopped, disabled and no logonaccount is reported" do + on(agent, puppet_resource('service', service)) do |result| + { enable: false, ensure: :stopped }.each do |property, value| + assert_match(/#{property}.*#{value}.*$/, result.stdout, "Puppet does not report #{property}=#{value} for a non-existent service") + end + refute_match(/logonaccount\s+=>/, result.stdout, "Puppet reports logonaccount for a non-existent service") + end + end + + step "Verify that stopping and disabling a nonexistent service is a no-op" do + manifest = service_manifest(service, ensure: :stopped, enable: false) + apply_manifest_on(agent, manifest, catch_changes: true) + end + + [ + [ :enabling, [ :enable, true ]], + [ :starting, [ :ensure, :running ]] + ].each do |operation, (property, value)| + manifest = service_manifest(service, property => value) + + step "Verify #{operation} a non-existent service prints an error message but does not fail the run without detailed exit codes" do + apply_manifest_on(agent, manifest) do |result| + assert_match(/Error:.*#{service}.*$/, result.stderr, "Puppet does not error when #{operation} a non-existent service.") + end + end + + step "Verify #{operation} a non-existent service with detailed exit codes correctly returns an error code" do + apply_manifest_on(agent, manifest, :acceptable_exit_codes => [4]) + end + end + end + end + end +end diff --git a/acceptance/lib/puppet/acceptance/solaris_util.rb b/acceptance/lib/puppet/acceptance/solaris_util.rb index 5467fd88899..8d4fc64c761 100644 --- a/acceptance/lib/puppet/acceptance/solaris_util.rb +++ b/acceptance/lib/puppet/acceptance/solaris_util.rb @@ -1,35 +1,5 @@ module Puppet module Acceptance - module ZoneUtils - def clean(agent) - lst = on(agent, "zoneadm list -cip").stdout.lines.each do |l| - case l - when /tstzone:running/ - on agent,"zoneadm -z tstzone halt" - on agent,"zoneadm -z tstzone uninstall -F" - on agent,"zonecfg -z tstzone delete -F" - on agent,"rm -f /etc/zones/tstzone.xml" - when /tstzone:configured/ - on agent,"zonecfg -z tstzone delete -F" - on agent,"rm -f /etc/zones/tstzone.xml" - when /tstzone:*/ - on agent,"zonecfg -z tstzone delete -F" - on agent,"rm -f /etc/zones/tstzone.xml" - end - end - lst = on(agent, "zfs list").stdout.lines.each do |l| - case l - when /rpool.tstzones/ - on agent,"zfs destroy -r rpool/tstzones" - end - end - on agent, "rm -rf /tstzones" - end - - def setup(agent, o={}) - o = {:size => '64m'}.merge(o) - end - end module IPSUtils def clean(agent, o={}) o = {:repo => '/var/tstrepo', :pkg => 'mypkg', :publisher => 'tstpub.lan'}.merge(o) @@ -87,8 +57,15 @@ def set_publisher(agent, o={}) module SMFUtils def clean(agent, o={}) o = {:service => 'tstapp'}.merge(o) - on agent, "svcadm disable %s ||:" % o[:service] - on agent, "svccfg delete %s ||:" % o[:service] + on(agent, "svcs -l %s" % o[:service], acceptable_exit_codes: [0, 1]) do |result| + next if result.stdout =~ /doesn't match/ + lines = result.stdout.chomp.lines + instances = lines.select { |line| line =~ /^fmri/ }.map { |line| line.split(' ')[1].chomp } + instances.each do |instance| + on agent, "svcadm disable %s ||:" % instance + on agent, "svccfg delete %s ||:" % instance + end + end on agent, "rm -rf /var/svc/manifest/application/%s.xml ||:" % o[:service] on agent, "rm -f /opt/bin/%s ||:" % o[:service] end @@ -133,7 +110,6 @@ def setup_methodscript(agent, o={}) - @@ -153,35 +129,5 @@ def setup_methodscript(agent, o={}) return ("/var/smf-%s.xml" % o[:service]), ("/lib/svc/method/%s" % o[:service]) end end - module ZFSUtils - def clean(agent, o={}) - o = {:fs=>'tstfs', :pool=>'tstpool', :poolpath => '/ztstpool'}.merge(o) - on agent, "zfs destroy -r %s/%s ||:" % [o[:pool], o[:fs]] - on agent, "zpool destroy %s ||:" % o[:pool] - on agent, "rm -rf %s ||:" % o[:poolpath] - end - - def setup(agent, o={}) - o = {:poolpath=>'/ztstpool', :pool => 'tstpool'}.merge(o) - on agent, "mkdir -p %s/mnt" % o[:poolpath] - on agent, "mkdir -p %s/mnt2" % o[:poolpath] - on agent, "mkfile 64m %s/dsk" % o[:poolpath] - on agent, "zpool create %s %s/dsk" % [ o[:pool], o[:poolpath]] - end - end - module ZPoolUtils - def clean(agent, o={}) - o = {:pool=>'tstpool', :poolpath => '/ztstpool'}.merge(o) - on agent, "zpool destroy %s ||:" % o[:pool] - on agent, "rm -rf %s ||:" % o[:poolpath] - end - - def setup(agent, o={}) - o = {:poolpath => '/ztstpool'}.merge(o) - on agent, "mkdir -p %s/mnt||:" % o[:poolpath] - on agent, "mkfile 100m %s/dsk1 %s/dsk2 %s/dsk3 %s/dsk5 ||:" % ([o[:poolpath]] * 4) - on agent, "mkfile 50m %s/dsk4 ||:" % o[:poolpath] - end - end end end diff --git a/acceptance/lib/puppet/acceptance/static_catalog_utils.rb b/acceptance/lib/puppet/acceptance/static_catalog_utils.rb new file mode 100644 index 00000000000..4a3c8e2e0c8 --- /dev/null +++ b/acceptance/lib/puppet/acceptance/static_catalog_utils.rb @@ -0,0 +1,55 @@ +module Puppet + module Acceptance + module StaticCatalogUtils + + # Adds code-id-command and code-content-command scripts + # to the server and updates puppetserver.conf. This is + # necessary for testing static catalogs. + # @param master [String] the host running puppetserver. + # @param scriptdir [String] the path to the directory where the scripts should be placed. + def setup_puppetserver_code_id_scripts(master, scriptdir) + code_id_command = < true) +file { '#{scriptdir}/code_id.sh': + ensure => file, + content => "#{code_id_command}", + mode => "0755", +} + +file { '#{scriptdir}/code_content.sh': + ensure => file, + content => "#{code_content_command}", + mode => "0755", +} +MANIFEST + + puppetserver_config = "#{master['puppetserver-confdir']}/puppetserver.conf" + on master, "cp #{puppetserver_config} #{scriptdir}/puppetserver.conf.bak" + versioned_code_settings = {"versioned-code" => {"code-id-command" => "#{scriptdir}/code_id.sh", "code-content-command" => "#{scriptdir}/code_content.sh"}} + modify_tk_config(master, puppetserver_config, versioned_code_settings) + end + + def cleanup_puppetserver_code_id_scripts(master, scriptdir) + # These are -f so we don't bail on the teardown if for some reason they didn't get laid down + on master, "rm -f #{scriptdir}/code_id.sh" + on master, "rm -f #{scriptdir}/code_content.sh" + puppetserver_config = "#{master['puppetserver-confdir']}/puppetserver.conf" + on master, "cp #{scriptdir}/puppetserver.conf.bak #{puppetserver_config}" + end + end + end +end diff --git a/acceptance/lib/puppet/acceptance/temp_file_utils.rb b/acceptance/lib/puppet/acceptance/temp_file_utils.rb index 6105000bf2a..6678552b363 100644 --- a/acceptance/lib/puppet/acceptance/temp_file_utils.rb +++ b/acceptance/lib/puppet/acceptance/temp_file_utils.rb @@ -1,6 +1,32 @@ module Puppet module Acceptance module TempFileUtils + RWXR_XR_X = '0755' + PUPPET_CODEDIR_PERMISSIONS = RWXR_XR_X + + # Return the name of the root user, as appropriate for the platform. + def root_user(host) + case host['platform'] + when /windows/ + 'Administrator' + else + 'root' + end + end + + # Return the name of the root group, as appropriate for the platform. + def root_group(host) + case host['platform'] + when /windows/ + 'Administrators' + when /aix/ + 'system' + when /osx|bsd/ + 'wheel' + else + 'root' + end + end # Create a file on the host. # Parameters: @@ -22,28 +48,14 @@ def create_test_file(host, file_rel_path, file_content, options = {}) if host['roles'].include?('master') then options[:owner] = host.puppet['user'] else - case host['platform'] - when /windows/ - options[:owner] = 'Administrator' - else - options[:owner] = 'root' - end + options[:owner] = root_user(host) end end unless options[:group] if host['roles'].include?('master') then options[:group] = host.puppet['group'] else - case host['platform'] - when /windows/ - options[:group] = 'Administrators' - when /aix/ - options[:group] = 'system' - when /osx|bsd/ - options[:group] = 'wheel' - else - options[:owner] = 'root' - end + options[:group] = root_group(host) end end @@ -67,6 +79,8 @@ def create_test_file(host, file_rel_path, file_content, options = {}) # Given a relative path, returns an absolute path for a test file. Basically, this just prepends the # a unique temp dir path (specific to the current test execution) to your relative path. def get_test_file_path(host, file_rel_path) + initialize_temp_dirs unless @host_test_tmp_dirs + File.join(@host_test_tmp_dirs[host.name], file_rel_path) end @@ -125,6 +139,19 @@ def chmod(host, mode, path) on(host, "chmod #{mode} #{path}") end + # Returns an array containing the owner, group and mode of + # the file specified by path. The returned mode is an integer + # value containing only the file mode, excluding the type, e.g + # S_IFDIR 0040000 + def stat(host, path) + require File.join(File.dirname(__FILE__),'common_utils.rb') + ruby = Puppet::Acceptance::CommandUtils.ruby_command(host) + owner = on(host, "#{ruby} -e 'require \"etc\"; puts (Etc.getpwuid(File.stat(\"#{path}\").uid).name)'").stdout.chomp + group = on(host, "#{ruby} -e 'require \"etc\"; puts (Etc.getgrgid(File.stat(\"#{path}\").gid).name)'").stdout.chomp + mode = on(host, "#{ruby} -e 'puts (File.stat(\"#{path}\").mode & 07777)'").stdout.chomp.to_i + + [owner, group, mode] + end def initialize_temp_dirs() # pluck this out of the test case environment; not sure if there is a better way diff --git a/acceptance/lib/puppet/acceptance/windows_utils.rb b/acceptance/lib/puppet/acceptance/windows_utils.rb index 712f5029c47..bb37105d572 100644 --- a/acceptance/lib/puppet/acceptance/windows_utils.rb +++ b/acceptance/lib/puppet/acceptance/windows_utils.rb @@ -3,13 +3,41 @@ module Puppet module Acceptance module WindowsUtils + require 'puppet/acceptance/windows_utils/service.rb' + require 'puppet/acceptance/windows_utils/package_installer.rb' + def profile_base(agent) ruby = Puppet::Acceptance::CommandUtils.ruby_command(agent) getbasedir = <<'END' -require 'win32/dir' -puts Dir::PROFILE.match(/(.*)\\\\[^\\\\]*/)[1] +puts ENV['USERPROFILE'].match(/(.*)\\\\[^\\\\]*/)[1] END - on(agent, "#{ruby} -rubygems -e \"#{getbasedir}\"").stdout.chomp + on(agent, "#{ruby} -e \"#{getbasedir}\"").stdout.chomp + end + + # Checks whether the account with the given username has the given password on a host + def assert_password_matches_on(host, username, password, msg = nil) + script = <<-PS1 + Add-Type -AssemblyName System.DirectoryServices.AccountManagement + $ctx = New-Object System.DirectoryServices.AccountManagement.PrincipalContext([System.DirectoryServices.AccountManagement.ContextType]::Machine, $env:COMPUTERNAME) + $ctx.ValidateCredentials("#{username}", "#{password}") + PS1 + result = execute_powershell_script_on(host, script) + assert_match(/True/, result.stdout.strip, msg) + end + + def deny_administrator_access_to(host, filepath) + # we need to create a fake directory in the user's tempdir with powershell because the ACL + # perms set down by cygwin when making tempdirs makes the ACL unusable. Thus we create a + # tempdir using powershell and pull its' ACL as a starting point for the new ACL. + script = <<-PS1 + mkdir -Force $env:TMP\\fake-dir-for-acl + $acl = Get-ACL $env:TMP\\fake-dir-for-acl + rm -Force $env:TMP\\fake-dir-for-acl + $ar = New-Object system.security.accesscontrol.filesystemaccessrule("Administrator","FullControl","Deny") + $acl.SetAccessRule($ar) + Set-ACL #{filepath} $acl + PS1 + execute_powershell_script_on(host, script) end end end diff --git a/acceptance/lib/puppet/acceptance/windows_utils/package_installer.rb b/acceptance/lib/puppet/acceptance/windows_utils/package_installer.rb new file mode 100644 index 00000000000..9f59ba3fd5e --- /dev/null +++ b/acceptance/lib/puppet/acceptance/windows_utils/package_installer.rb @@ -0,0 +1,69 @@ +module Puppet + module Acceptance + module WindowsUtils + # Sets up a mock installer on the host. + + def create_mock_package(host, tmpdir, config = {}, installer_file = 'MockInstaller.cs', uninstaller_file = 'MockUninstaller.cs') + installer_exe_path = "#{tmpdir}/#{config[:name].gsub(/\s+/, '')}Installer.exe".gsub('/', '\\') + uninstaller_exe_path = "#{tmpdir}/#{config[:name].gsub(/\s+/, '')}Uninstaller.exe".gsub('/', '\\') + tranformations = { + package_display_name: config[:name], + uninstaller_location: uninstaller_exe_path, + install_commands: config[:install_commands], + uninstall_commands: config[:uninstall_commands] + } + + [ + { source: installer_file, destination: installer_exe_path }, + { source: uninstaller_file, destination: uninstaller_exe_path }, + ].each do |exe| + fixture_path = File.join( + File.dirname(__FILE__), + '..', + '..', + '..', + '..', + 'fixtures', + exe[:source] + ) + code = File.read(fixture_path) % tranformations + build_mock_exe(host, exe[:destination], code) + end + # If the registry key still exists from a previous package install, then delete it. + teardown do + if package_installed?(host, config[:name]) + on host, powershell("\"Remove-Item HKLM:\\Software\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\#{config[:name]}\"") + end + end + # return the installer path for tests to use as the source: attribute + installer_exe_path + end + + def build_mock_exe(host, destination, code) + # Make a source file containing the code on the SUT, the source file + # will be the same location/name as the destination exe but with the .cs + # extension + source_path_on_host = destination.gsub(/\.exe$/, '.cs') + create_remote_file(host, source_path_on_host.gsub('\\', '/'), code) + # Create the installer.exe file by compiling the copied over C# code + # with PowerShell + create_installer_exe = "\"Add-Type"\ + " -TypeDefinition (Get-Content #{source_path_on_host} | Out-String)"\ + " -Language CSharp"\ + " -OutputAssembly #{destination}"\ + " -OutputType ConsoleApplication\"" + on host, powershell(create_installer_exe) + end + + def package_installed?(host, name) + # A successfully installed mock package will have created a registry key under + # HKLM:\\Software\\Microsoft\\Windows\\CurrentVersion\\Uninstall. Simply checking + # for that key should suffice as an indicator that the installer completed + test_key = "\"Test-Path HKLM:\\Software\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\#{name}\"" + on(host, powershell(test_key)) do |result| + return result.stdout.chomp == 'True' + end + end + end + end +end diff --git a/acceptance/lib/puppet/acceptance/windows_utils/service.rb b/acceptance/lib/puppet/acceptance/windows_utils/service.rb new file mode 100644 index 00000000000..b4f615561c4 --- /dev/null +++ b/acceptance/lib/puppet/acceptance/windows_utils/service.rb @@ -0,0 +1,111 @@ +module Puppet + module Acceptance + module WindowsUtils + # Sets up a mock service on the host. The methodology here is a simplified + # version of what's described in https://msdn.microsoft.com/en-us/magazine/mt703436.aspx + def setup_service(host, config = {}, service_file = 'MockService.cs') + config[:name] ||= "Mock Service" + config[:display_name] ||= "#{config[:name]} (Puppet Acceptance Tests)" + config[:description] ||= "Service created solely for acceptance testing the Puppet Windows Service provider" + + # Create a temporary directory to store the service's C# source code + + # its .exe file. + tmpdir = host.tmpdir("mock_service") + + # Copy-over the C# code + code_fixture_path = File.join( + File.dirname(__FILE__), + '..', + '..', + '..', + '..', + 'fixtures', + service_file + ) + code = File.read(code_fixture_path) % { + service_name: config[:name], + start_sleep: config[:start_sleep], + pause_sleep: config[:pause_sleep], + continue_sleep: config[:continue_sleep], + stop_sleep: config[:stop_sleep] + } + code_path_unix = "#{tmpdir}/source.cs" + code_path_win = code_path_unix.gsub('/', '\\') + create_remote_file(host, code_path_unix, code) + + # Create the service.exe file by compiling the copied over C# code + # with PowerShell + service_exe_path_win = "#{tmpdir}/#{config[:name]}.exe".gsub('/', '\\') + create_service_exe = "\"Add-Type"\ + " -TypeDefinition (Get-Content #{code_path_win} | Out-String)"\ + " -Language CSharp"\ + " -OutputAssembly #{service_exe_path_win}"\ + " -OutputType ConsoleApplication"\ + " -ReferencedAssemblies 'System.ServiceProcess'\"" + on host, powershell(create_service_exe) + + # Now register the service with SCM + register_service_with_scm = "\"New-Service"\ + " #{config[:name]}"\ + " #{service_exe_path_win}"\ + " -DisplayName '#{config[:display_name]}'"\ + " -Description '#{config[:description]}'"\ + " -StartupType Automatic\"" + on host, powershell(register_service_with_scm) + + # Ensure that our service is deleted after the tests + teardown { delete_service(host, config[:name]) } + end + + def delete_service(host, name) + # Check if our service has already been deleted. If so, then we + # have nothing else to do. + begin + on host, powershell("Get-Service #{name}") + rescue Beaker::Host::CommandFailure + return + end + + # Ensure that our service process is killed. We cannot do a Stop-Service here + # b/c there's a chance that our service could be in a pending state (e.g. + # "PausePending", "ContinuePending"). If this is the case, then Stop-Service + # will fail. + on host, powershell("\"Get-Process #{name} -ErrorAction SilentlyContinue | Stop-Process -Force\" | exit 0") + + # Now remove our service. We use sc.exe because older versions of PowerShell + # may not have the Remove-Service commandlet. + on agent, "sc.exe delete #{name}" + end + + # Config should be a hash of => + def assert_service_properties_on(host, name, properties = {}) + properties.each do |property, expected_value| + # We need to get the underlying WMI object for the service since that + # object contains all of our service properties. The one returned by + # Get-Service only has these properties for newer versions of PowerShell. + get_property_value = "\"Get-WmiObject -Class Win32_Service"\ + " | Where-Object { \\$_.name -eq '#{name}' }"\ + " | ForEach-Object { \\$_.#{property} }\"" + + on(host, powershell(get_property_value)) do |result| + actual_value = result.stdout.chomp + + property_str = "#{name}[#{property}]" + assert_match(expected_value, actual_value, "EXPECTED: #{property_str} = #{expected_value}, ACTUAL: #{property_str} = #{actual_value}") + end + end + end + + def assert_service_startmode_delayed(host, name) + get_delayed_service = "\"Get-ChildItem HKLM:\\SYSTEM\\CurrentControlSet\\Services"\ + " | Where-Object { \\$_.Property -Contains 'DelayedAutoStart' -And \\$_.PsChildName -Like '#{name}' }"\ + " | Select-Object -ExpandProperty PSChildName\"" + + on(host, powershell(get_delayed_service)) do |result| + svc = result.stdout.chomp + assert(!svc.empty?, "Service #{name} does not exist or is not a delayed service") + end + end + end + end +end diff --git a/acceptance/lib/puppet_x/acceptance/external_cert_fixtures.rb b/acceptance/lib/puppet_x/acceptance/external_cert_fixtures.rb deleted file mode 100644 index 5a8a0610ae0..00000000000 --- a/acceptance/lib/puppet_x/acceptance/external_cert_fixtures.rb +++ /dev/null @@ -1,361 +0,0 @@ -module PuppetX -module Acceptance -class ExternalCertFixtures - attr_reader :fixture_dir - attr_reader :test_dir - attr_reader :master_name - attr_reader :agent_name - - ## - # ExternalCerts provides a utility class to fill in fixture data and other - # large blobs of text configuration for the acceptance testing of External CA - # behavior. - # - # @param [String] fixture_dir The fixture directory to read from. - # - # @param [String] test_dir The directory on the remote system, used for - # filling in templates. - # - # @param [String] master_name The common name the master should be reachable - # at. This name should match up with the certificate files in the fixture - # directory, e.g. master1.example.org. - # - # @param [String] agent_name The common name the agent is configured to use. - # This name should match up with the certificate files in the fixture - # directory, e.g. - def initialize(fixture_dir, test_dir, master_name = "master1.example.org", agent_name = "agent1.example.org") - @fixture_dir = fixture_dir - @test_dir = test_dir - @master_name = master_name - @agent_name = agent_name - end - - def master_short_name - @master_short_name ||= master_name.gsub(/\..*/, '') - end - - def host_entry - @host_entry ||= "127.0.0.3 #{master_name} #{master_short_name} puppet" - end - - def root_ca_cert - @root_ca_cert ||= File.read(File.join(fixture_dir, 'root', 'ca-root.crt')) - end - - def agent_ca_cert - @agent_ca_cert ||= File.read(File.join(fixture_dir, 'agent-ca', 'ca-agent-ca.crt')) - end - - def master_ca_cert - @master_ca_cert ||= File.read(File.join(fixture_dir, 'master-ca', 'ca-master-ca.crt')) - end - - def master_ca_crl - @master_ca_crl ||= File.read(File.join(fixture_dir, 'master-ca', 'ca-master-ca.crl')) - end - - def agent_cert - @agent_cert ||= File.read(File.join(fixture_dir, 'leaves', "#{agent_name}.issued_by.agent-ca.crt")) - end - - def agent_key - @agent_key ||= File.read(File.join(fixture_dir, 'leaves', "#{agent_name}.issued_by.agent-ca.key")) - end - - def agent_email_cert - @agent_email_cert ||= File.read(File.join(fixture_dir, 'leaves', "#{agent_name}.email.issued_by.agent-ca.crt")) - end - - def agent_email_key - @agent_email_cert ||= File.read(File.join(fixture_dir, 'leaves', "#{agent_name}.email.issued_by.agent-ca.key")) - end - - def master_cert - @master_cert ||= File.read(File.join(fixture_dir, 'leaves', "#{master_name}.issued_by.master-ca.crt")) - end - - def master_key - @master_key ||= File.read(File.join(fixture_dir, 'leaves', "#{master_name}.issued_by.master-ca.key")) - end - - def master_cert_rogue - @master_cert_rogue ||= File.read(File.join(fixture_dir, 'leaves', "#{master_name}.issued_by.agent-ca.crt")) - end - - def master_key_rogue - @master_key_rogue ||= File.read(File.join(fixture_dir, 'leaves', "#{master_name}.issued_by.agent-ca.key")) - end - - ## Configuration files - def agent_conf - @agent_conf ||= <<-EO_AGENT_CONF -[main] -color = false -certname = #{agent_name} -server = #{master_name} -certificate_revocation = false - -# localcacert must contain the Root CA certificate to complete the 2 level CA -# chain when an intermediate CA certificate is being used. Either the HTTP -# server must send the intermediate certificate during the handshake, or the -# agent must use the `ssl_client_ca_auth` setting to provide the client -# certificate. -localcacert = #{test_dir}/ca_root.crt -EO_AGENT_CONF - end - - def agent_conf_email - @agent_conf ||= <<-EO_AGENT_CONF -[main] -color = false -certname = #{agent_name} -server = #{master_name} -certificate_revocation = false -hostcert = #{test_dir}/agent_email.crt -hostkey = #{test_dir}/agent_email.key -localcacert = #{test_dir}/ca_root.crt -EO_AGENT_CONF - end - - def agent_conf_crl - @agent_conf_crl ||= <<-EO_AGENT_CONF -[main] -certname = #{agent_name} -server = #{master_name} - -# localcacert must contain the Root CA certificate to complete the 2 level CA -# chain when an intermediate CA certificate is being used. Either the HTTP -# server must send the intermediate certificate during the handshake, or the -# agent must use the `ssl_client_ca_auth` setting to provide the client -# certificate. -localcacert = #{test_dir}/ca_root.crt -EO_AGENT_CONF - end - - def master_conf - @master_conf ||= <<-EO_MASTER_CONF -[master] -ca = false -certname = #{master_name} -ssl_client_header = HTTP_X_CLIENT_DN -ssl_client_verify_header = HTTP_X_CLIENT_VERIFY -EO_MASTER_CONF - end - - ## - # Passenger Rack compliant config.ru which is responsible for starting the - # Puppet master. - def config_ru - @config_ru ||= <<-EO_CONFIG_RU -\$0 = "master" -ARGV << "--rack" -ARGV << "--confdir=#{test_dir}/etc/master" -ARGV << "--vardir=#{test_dir}/etc/master/var" -require 'puppet/util/command_line' -run Puppet::Util::CommandLine.new.execute -EO_CONFIG_RU - end - - ## - # auth_conf should return auth authorization file that allows *.example.org - # access to to the full REST API. - def auth_conf - @auth_conf_content ||= File.read(File.join(fixture_dir, 'auth.conf')) - end - - ## - # Apache configuration with Passenger - def httpd_conf - @httpd_conf ||= <<-EO_HTTPD_CONF -User apache -Group apache - -ServerRoot "/etc/httpd" -PidFile run/httpd.pid -Timeout 60 -KeepAlive Off -MaxKeepAliveRequests 100 -KeepAliveTimeout 15 - - -StartServers 8 -MinSpareServers 5 -MaxSpareServers 20 -ServerLimit 256 -MaxClients 256 -MaxRequestsPerChild 4000 - - - -StartServers 4 -MaxClients 300 -MinSpareThreads 25 -MaxSpareThreads 75 -ThreadsPerChild 25 -MaxRequestsPerChild 0 - - -LoadModule auth_basic_module modules/mod_auth_basic.so -LoadModule auth_digest_module modules/mod_auth_digest.so -LoadModule authn_file_module modules/mod_authn_file.so -LoadModule authn_alias_module modules/mod_authn_alias.so -LoadModule authn_anon_module modules/mod_authn_anon.so -LoadModule authn_dbm_module modules/mod_authn_dbm.so -LoadModule authn_default_module modules/mod_authn_default.so -LoadModule authz_host_module modules/mod_authz_host.so -LoadModule authz_user_module modules/mod_authz_user.so -LoadModule authz_owner_module modules/mod_authz_owner.so -LoadModule authz_groupfile_module modules/mod_authz_groupfile.so -LoadModule authz_dbm_module modules/mod_authz_dbm.so -LoadModule authz_default_module modules/mod_authz_default.so -LoadModule ldap_module modules/mod_ldap.so -LoadModule authnz_ldap_module modules/mod_authnz_ldap.so -LoadModule include_module modules/mod_include.so -LoadModule log_config_module modules/mod_log_config.so -LoadModule logio_module modules/mod_logio.so -LoadModule env_module modules/mod_env.so -LoadModule ext_filter_module modules/mod_ext_filter.so -LoadModule mime_magic_module modules/mod_mime_magic.so -LoadModule expires_module modules/mod_expires.so -LoadModule deflate_module modules/mod_deflate.so -LoadModule headers_module modules/mod_headers.so -LoadModule usertrack_module modules/mod_usertrack.so -LoadModule setenvif_module modules/mod_setenvif.so -LoadModule mime_module modules/mod_mime.so -LoadModule dav_module modules/mod_dav.so -LoadModule status_module modules/mod_status.so -LoadModule autoindex_module modules/mod_autoindex.so -LoadModule info_module modules/mod_info.so -LoadModule dav_fs_module modules/mod_dav_fs.so -LoadModule vhost_alias_module modules/mod_vhost_alias.so -LoadModule negotiation_module modules/mod_negotiation.so -LoadModule dir_module modules/mod_dir.so -LoadModule actions_module modules/mod_actions.so -LoadModule speling_module modules/mod_speling.so -LoadModule userdir_module modules/mod_userdir.so -LoadModule alias_module modules/mod_alias.so -LoadModule substitute_module modules/mod_substitute.so -LoadModule rewrite_module modules/mod_rewrite.so -LoadModule proxy_module modules/mod_proxy.so -LoadModule proxy_balancer_module modules/mod_proxy_balancer.so -LoadModule proxy_ftp_module modules/mod_proxy_ftp.so -LoadModule proxy_http_module modules/mod_proxy_http.so -LoadModule proxy_ajp_module modules/mod_proxy_ajp.so -LoadModule proxy_connect_module modules/mod_proxy_connect.so -LoadModule cache_module modules/mod_cache.so -LoadModule suexec_module modules/mod_suexec.so -LoadModule disk_cache_module modules/mod_disk_cache.so -LoadModule cgi_module modules/mod_cgi.so -LoadModule version_module modules/mod_version.so - -LoadModule ssl_module modules/mod_ssl.so -LoadModule passenger_module modules/mod_passenger.so - -ServerName #{master_name} -DocumentRoot "#{test_dir}/etc/master/public" - -DefaultType text/plain -TypesConfig /etc/mime.types - -# Same thing, just using a certificate issued by the Agent CA, which should not -# be trusted by the clients. - -Listen 8140 https -Listen 8141 https - - - SSLEngine on - SSLProtocol ALL -SSLv2 - SSLCipherSuite ALL:!ADH:RC4+RSA:+HIGH:+MEDIUM:-LOW:-SSLv2:-EXP - - SSLCertificateFile "#{test_dir}/master.crt" - SSLCertificateKeyFile "#{test_dir}/master.key" - - # The chain file is sent to the client during handshake. - SSLCertificateChainFile "#{test_dir}/ca_master_bundle.crt" - # The CA cert file is used to authenticate clients - SSLCACertificateFile "#{test_dir}/ca_agent_bundle.crt" - - SSLVerifyClient optional - SSLVerifyDepth 2 - SSLOptions +StdEnvVars - RequestHeader set X-SSL-Subject %{SSL_CLIENT_S_DN}e - RequestHeader set X-Client-DN %{SSL_CLIENT_S_DN}e - RequestHeader set X-Client-Verify %{SSL_CLIENT_VERIFY}e - - DocumentRoot "#{test_dir}/etc/master/public" - - PassengerRoot /usr/share/gems/gems/passenger-3.0.17 - PassengerRuby /usr/bin/ruby - - RackAutoDetect On - RackBaseURI / - - - - SSLEngine on - SSLProtocol ALL -SSLv2 - SSLCipherSuite ALL:!ADH:RC4+RSA:+HIGH:+MEDIUM:-LOW:-SSLv2:-EXP - SSLCertificateFile "#{test_dir}/master_rogue.crt" - SSLCertificateKeyFile "#{test_dir}/master_rogue.key" - - SSLCertificateChainFile "#{test_dir}/ca_agent_bundle.crt" - SSLCACertificateFile "#{test_dir}/ca_agent_bundle.crt" - - SSLVerifyClient optional - SSLVerifyDepth 2 - SSLOptions +StdEnvVars - RequestHeader set X-SSL-Subject %{SSL_CLIENT_S_DN}e - RequestHeader set X-Client-DN %{SSL_CLIENT_S_DN}e - RequestHeader set X-Client-Verify %{SSL_CLIENT_VERIFY}e - - DocumentRoot "#{test_dir}/etc/master/public" - - PassengerRoot /usr/share/gems/gems/passenger-3.0.17 - PassengerRuby /usr/bin/ruby - - RackAutoDetect On - RackBaseURI / - -EO_HTTPD_CONF - end - - ## - # webserver.conf for a trustworthy master for use with Jetty - def jetty_webserver_conf_for_trustworthy_master - @jetty_webserver_conf_for_trustworthy_master ||= <<-EO_WEBSERVER_CONF -webserver: { - client-auth: want - ssl-host: 0.0.0.0 - ssl-port: 8140 - - ssl-cert: "#{test_dir}/master.crt" - ssl-key: "#{test_dir}/master.key" - - ssl-cert-chain: "#{test_dir}/ca_master_bundle.crt" - ssl-ca-cert: "#{test_dir}/ca_agent_bundle.crt" -} - EO_WEBSERVER_CONF - end - - ## - # webserver.conf for a rogue master for use with Jetty - def jetty_webserver_conf_for_rogue_master - @jetty_webserver_conf_for_rogue_master ||= <<-EO_WEBSERVER_CONF -webserver: { - client-auth: want - ssl-host: 0.0.0.0 - ssl-port: 8140 - - ssl-cert: "#{test_dir}/master_rogue.crt" - ssl-key: "#{test_dir}/master_rogue.key" - - ssl-cert-chain: "#{test_dir}/ca_agent_bundle.crt" - ssl-ca-cert: "#{test_dir}/ca_agent_bundle.crt" -} - EO_WEBSERVER_CONF - end - -end -end -end diff --git a/acceptance/setup/aio/pre-suite/010_Install.rb b/acceptance/setup/aio/pre-suite/010_Install.rb deleted file mode 100644 index c038f6413ae..00000000000 --- a/acceptance/setup/aio/pre-suite/010_Install.rb +++ /dev/null @@ -1,79 +0,0 @@ -require 'puppet/acceptance/install_utils' - -extend Puppet::Acceptance::InstallUtils - -test_name "Install Packages" - -step "Install repositories on target machines..." do - - sha = ENV['SHA'] - repo_configs_dir = 'repo-configs' - - hosts.each do |host| - install_repos_on(host, 'puppet-agent', sha, repo_configs_dir) - end - - if master['passenger'] - passenger_version = ENV['PASSENGER_VERSION'] || '3518347c3480172fcef41406cad31b7ed34cd14f' - install_repos_on(master, 'puppet-master-passenger', passenger_version, repo_configs_dir) - else - server_version = ENV['SERVER_VERSION'] || 'nightly' - install_repos_on(master, 'puppetserver', server_version, repo_configs_dir) - end -end - -if master['passenger'] - MASTER_PACKAGES = { - :redhat => [ - 'puppet-master-passenger', - ], - :debian => [ - 'puppet-master-passenger', - ], - } -else - MASTER_PACKAGES = { - :redhat => [ - 'puppetserver', - ], - :debian => [ - 'puppetserver', - ], -# :solaris => [ -# 'puppet-server', -# ], -# :windows => [ -# 'puppet-server', -# ], - } -end - -AGENT_PACKAGES = { - :redhat => [ - 'puppet-agent', - ], - :debian => [ - 'puppet-agent', - ], -# :solaris => [ -# 'puppet', -# ], -# :windows => [ -# 'puppet', -# ], -} - -install_packages_on(master, MASTER_PACKAGES) -install_packages_on(agents, AGENT_PACKAGES) - -agents.each do |agent| - if agent['platform'] =~ /windows/ - arch = agent[:ruby_arch] || 'x86' - base_url = ENV['MSI_BASE_URL'] || "http://builds.puppetlabs.lan/puppet-agent/#{ENV['SHA']}/artifacts/windows" - filename = ENV['MSI_FILENAME'] || "puppet-agent-#{arch}.msi" - - install_puppet_from_msi(agent, :url => "#{base_url}/#{filename}") - end -end - -configure_gem_mirror(hosts) diff --git a/acceptance/setup/aio/pre-suite/015_PackageHostsPresets.rb b/acceptance/setup/aio/pre-suite/015_PackageHostsPresets.rb deleted file mode 100644 index 6476bbed202..00000000000 --- a/acceptance/setup/aio/pre-suite/015_PackageHostsPresets.rb +++ /dev/null @@ -1 +0,0 @@ -master['use-service'] = true diff --git a/acceptance/setup/aio/pre-suite/045_EnsureMasterStartedOnPassenger.rb b/acceptance/setup/aio/pre-suite/045_EnsureMasterStartedOnPassenger.rb deleted file mode 100644 index 20f4fdfb5a3..00000000000 --- a/acceptance/setup/aio/pre-suite/045_EnsureMasterStartedOnPassenger.rb +++ /dev/null @@ -1,3 +0,0 @@ -if master.graceful_restarts? - on(master, puppet('resource', 'service', master['puppetservice'], "ensure=running")) -end diff --git a/acceptance/setup/common/pre-suite/025_StopFirewall.rb b/acceptance/setup/common/pre-suite/025_StopFirewall.rb deleted file mode 100644 index 0d651ba0124..00000000000 --- a/acceptance/setup/common/pre-suite/025_StopFirewall.rb +++ /dev/null @@ -1,9 +0,0 @@ -require 'puppet/acceptance/install_utils' - -extend Puppet::Acceptance::InstallUtils - -test_name "Stop firewall" do - hosts.each do |host| - stop_firewall_on(host) - end -end diff --git a/acceptance/setup/common/pre-suite/040_ValidateSignCert.rb b/acceptance/setup/common/pre-suite/040_ValidateSignCert.rb deleted file mode 100644 index 549432427bd..00000000000 --- a/acceptance/setup/common/pre-suite/040_ValidateSignCert.rb +++ /dev/null @@ -1,6 +0,0 @@ -test_name "Validate Sign Cert" - -require 'puppet/acceptance/common_utils' -extend Puppet::Acceptance::CAUtils - -initialize_ssl diff --git a/acceptance/setup/common/pre-suite/070_InstallCACerts.rb b/acceptance/setup/common/pre-suite/070_InstallCACerts.rb deleted file mode 100644 index 19d87751a69..00000000000 --- a/acceptance/setup/common/pre-suite/070_InstallCACerts.rb +++ /dev/null @@ -1,93 +0,0 @@ -test_name "Install CA Certs" -confine :to, :platform => 'windows' - -GEOTRUST_GLOBAL_CA = <<-EOM ------BEGIN CERTIFICATE----- -MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT -MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i -YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG -EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg -R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9 -9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq -fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv -iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU -1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+ -bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW -MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA -ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l -uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn -Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS -tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF -PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un -hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV -5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw== ------END CERTIFICATE----- -EOM - -USERTRUST_NETWORK_CA = <<-EOM ------BEGIN CERTIFICATE----- -MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB -lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug -Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho -dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt -SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG -A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe -MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v -d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh -cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn -0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ -M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a -MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd -oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI -DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy -oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD -VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0 -dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy -bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF -BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM -//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli -CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE -CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t -3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS -KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA== ------END CERTIFICATE----- -EOM - -EQUIFAX_CA = <<-EOM ------BEGIN CERTIFICATE----- -MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV -UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy -dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1 -MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx -dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B -AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f -BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A -cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC -AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ -MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm -aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw -ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj -IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF -MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA -A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y -7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh -1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4 ------END CERTIFICATE----- -EOM - -hosts.each do |host| - step "Installing Geotrust CA cert" - create_remote_file(host, "geotrustglobal.pem", GEOTRUST_GLOBAL_CA) - on host, "chmod 644 geotrustglobal.pem" - on host, "cmd /c certutil -v -addstore Root `cygpath -w geotrustglobal.pem`" - - step "Installing Usertrust Network CA cert" - create_remote_file(host, "usertrust-network.pem", USERTRUST_NETWORK_CA) - on host, "chmod 644 usertrust-network.pem" - on host, "cmd /c certutil -v -addstore Root `cygpath -w usertrust-network.pem`" - - step "Installing Equifax CA cert" - create_remote_file(host, "equifax.pem", EQUIFAX_CA) - on host, "chmod 644 equifax.pem" - on host, "cmd /c certutil -v -addstore Root `cygpath -w equifax.pem`" -end diff --git a/acceptance/setup/common/pre-suite/110_SetPEPuppetService.rb b/acceptance/setup/common/pre-suite/110_SetPEPuppetService.rb deleted file mode 100644 index 49ee5a8b94a..00000000000 --- a/acceptance/setup/common/pre-suite/110_SetPEPuppetService.rb +++ /dev/null @@ -1 +0,0 @@ -master['puppetservice'] = 'pe-puppetserver' diff --git a/acceptance/setup/git/pre-suite/000_EnvSetup.rb b/acceptance/setup/git/pre-suite/000_EnvSetup.rb deleted file mode 100644 index edf1e1dca87..00000000000 --- a/acceptance/setup/git/pre-suite/000_EnvSetup.rb +++ /dev/null @@ -1,124 +0,0 @@ -test_name "Setup environment" - -step "Ensure Git and Ruby" - -require 'puppet/acceptance/install_utils' -extend Puppet::Acceptance::InstallUtils -require 'puppet/acceptance/git_utils' -extend Puppet::Acceptance::GitUtils -require 'beaker/dsl/install_utils' -extend Beaker::DSL::InstallUtils - -PACKAGES = { - :redhat => [ - 'git', - 'ruby', - 'rubygem-json', - ], - :debian => [ - ['git', 'git-core'], - 'ruby', - ], - :debian_ruby18 => [ - 'libjson-ruby', - ], - :solaris_11 => [ - ['git', 'developer/versioning/git'], - ], - :solaris_10 => [ - 'coreutils', - 'curl', # update curl to fix "CURLOPT_SSL_VERIFYHOST no longer supports 1 as value!" issue - 'git', - 'ruby19', - 'ruby19_dev', - 'gcc4core', - ], - :windows => [ - 'git', - # there isn't a need for json on windows because it is bundled in ruby 1.9 - ], -} - -hosts.each do |host| - case host['platform'] - when /solaris-10/ - on host, 'mkdir -p /var/lib' - on host, 'ln -sf /opt/csw/bin/pkgutil /usr/bin/pkgutil' - on host, 'ln -sf /opt/csw/bin/gem19 /usr/bin/gem' - on host, 'ln -sf /opt/csw/bin/git /usr/bin/git' - on host, 'ln -sf /opt/csw/bin/ruby19 /usr/bin/ruby' - on host, 'ln -sf /opt/csw/bin/gstat /usr/bin/stat' - on host, 'ln -sf /opt/csw/bin/greadlink /usr/bin/readlink' - when /solaris-11/ - step "#{host} jump through hoops to install ruby19; switch back to runtime/ruby-19 after template upgrade to sol11.2" - create_remote_file host, "/root/shutupsolaris", < true) - -hosts.each do |host| - case host['platform'] - when /windows/ - arch = host[:ruby_arch] || 'x86' - step "#{host} Selected architecture #{arch}" - - revision = if arch == 'x64' - '2.1.x-x64' - else - '2.1.x-x86' - end - - step "#{host} Install ruby from git using revision #{revision}" - # TODO remove this step once we are installing puppet from msi packages - install_from_git(host, "/opt/puppet-git-repos", - :name => 'puppet-win32-ruby', - :path => build_giturl('puppet-win32-ruby'), - :rev => revision) - on host, 'cd /opt/puppet-git-repos/puppet-win32-ruby; cp -r ruby/* /' - on host, 'cd /lib; icacls ruby /grant "Everyone:(OI)(CI)(RX)"' - on host, 'cd /lib; icacls ruby /reset /T' - on host, 'cd /; icacls bin /grant "Everyone:(OI)(CI)(RX)"' - on host, 'cd /; icacls bin /reset /T' - on host, 'ruby --version' - on host, 'cmd /c gem list' - end -end - -# Only configure gem mirror after Ruby has been installed, but before any gems are installed. -configure_gem_mirror(hosts) - -hosts.each do |host| - case host['platform'] - when /solaris/ - step "#{host} Install json from rubygems" - on host, 'gem install json_pure' - end -end diff --git a/acceptance/setup/git/pre-suite/010_TestSetup.rb b/acceptance/setup/git/pre-suite/010_TestSetup.rb deleted file mode 100644 index e9be979675e..00000000000 --- a/acceptance/setup/git/pre-suite/010_TestSetup.rb +++ /dev/null @@ -1,60 +0,0 @@ -begin - require 'beaker/dsl/install_utils' -end - -test_name "Install packages and repositories on target machines..." do - extend Beaker::DSL::InstallUtils - - SourcePath = Beaker::DSL::InstallUtils::SourcePath - GitURI = Beaker::DSL::InstallUtils::GitURI - GitHubSig = Beaker::DSL::InstallUtils::GitHubSig - - tmp_repositories = [] - options[:install].each do |uri| - raise(ArgumentError, "Missing GitURI argument. URI is nil.") if uri.nil? - raise(ArgumentError, "#{uri} is not recognized.") unless(uri =~ GitURI) - tmp_repositories << extract_repo_info_from(uri) - end - - repositories = order_packages(tmp_repositories) - - versions = {} - hosts.each_with_index do |host, index| - on host, "echo #{GitHubSig} >> $HOME/.ssh/known_hosts" - - repositories.each do |repository| - step "Install #{repository[:name]}" - if repository[:path] =~ /^file:\/\/(.+)$/ - on host, "test -d #{SourcePath} || mkdir -p #{SourcePath}" - source_dir = $1 - checkout_dir = "#{SourcePath}/#{repository[:name]}" - on host, "rm -f #{checkout_dir}" # just the symlink, do not rm -rf ! - on host, "ln -s #{source_dir} #{checkout_dir}" - on host, "cd #{checkout_dir} && if [ -f install.rb ]; then ruby ./install.rb ; else true; fi" - else - install_from_git host, SourcePath, repository - end - - if index == 1 - versions[repository[:name]] = find_git_repo_versions(host, - SourcePath, - repository) - end - end - end - - step "Hosts: create basic puppet.conf" do - hosts.each do |host| - confdir = host.puppet['confdir'] - on host, "mkdir -p #{confdir}" - puppetconf = File.join(confdir, 'puppet.conf') - - if host['roles'].include?('agent') - on host, "echo '[agent]' > '#{puppetconf}' && " + - "echo server=#{master} >> '#{puppetconf}'" - else - on host, "touch '#{puppetconf}'" - end - end - end -end diff --git a/acceptance/setup/git/pre-suite/020_PuppetUserAndGroup.rb b/acceptance/setup/git/pre-suite/020_PuppetUserAndGroup.rb deleted file mode 100644 index 394f8d39bf7..00000000000 --- a/acceptance/setup/git/pre-suite/020_PuppetUserAndGroup.rb +++ /dev/null @@ -1,10 +0,0 @@ -test_name 'Puppet User and Group' do - hosts.each do |host| - - step "ensure puppet user and group added to all nodes because this is what the packages do" do - on host, puppet("resource user puppet ensure=present") - on host, puppet("resource group puppet ensure=present") - end - - end -end diff --git a/acceptance/setup/git/pre-suite/030_PuppetMasterSanity.rb b/acceptance/setup/git/pre-suite/030_PuppetMasterSanity.rb deleted file mode 100644 index b1af5ae8db5..00000000000 --- a/acceptance/setup/git/pre-suite/030_PuppetMasterSanity.rb +++ /dev/null @@ -1,18 +0,0 @@ -test_name "Puppet Master sanity checks: PID file and SSL dir creation" - -hostname = on(master, 'facter hostname').stdout.strip -fqdn = on(master, 'facter fqdn').stdout.strip - -with_puppet_running_on(master, :main => { :dns_alt_names => "puppet,#{hostname},#{fqdn}", :verbose => true, :noop => true }) do - # SSL dir created? - step "SSL dir created?" - on master, "[ -d #{master.puppet('master')['ssldir']} ]" - - # PID file exists? - step "PID file created?" - on master, "[ -f #{master.puppet('master')['pidfile']} ]" -end - -step "Create module directories normally handled via packaging" -on master, "mkdir -p #{master['distmoduledir']}" -on master, "mkdir -p #{master['sitemoduledir']}" diff --git a/acceptance/setup/git/pre-suite/060_InstallModules.rb b/acceptance/setup/git/pre-suite/060_InstallModules.rb deleted file mode 100644 index 00243010b22..00000000000 --- a/acceptance/setup/git/pre-suite/060_InstallModules.rb +++ /dev/null @@ -1,76 +0,0 @@ -require 'pathname' - -# Given an array of modules specified by the --modules command line option, -# Parse all of them into an array of usable hash structures. -class PuppetModules - attr_reader :modules - - def initialize(modules=[]) - @modules = modules - end - - def list - return [] unless modules - modules.collect do |uri| - git_url, git_ref = uri.split '#' - folder = Pathname.new(git_url).basename('.git') - name = folder.to_s.split('-', 2)[1] || folder.to_s - { - :name => name, - :url => git_url, - :folder => folder.to_s, - :ref => git_ref, - :protocol => git_url.split(':')[0].intern, - } - end - end -end - -def install_git_module(mod, hosts) - # The idea here is that each test can symlink the modules they want from a - # temporary directory to this location. This will preserve the global - # state of the system while allowing individual test cases to quickly run - # with a module "installed" in the module path. - moddir = "/opt/puppet-git-repos" - target = "#{moddir}/#{mod[:name]}" - - step "Clone #{mod[:url]} if needed" - on hosts, "test -d #{moddir} || mkdir -p #{moddir}" - on hosts, "test -d #{target} || git clone #{mod[:url]} #{target}" - step "Update #{mod[:name]} and check out revision #{mod[:ref]}" - - commands = ["cd #{target}", - "remote rm origin", - "remote add origin #{mod[:url]}", - "fetch origin", - "checkout -f #{mod[:ref]}", - "reset --hard refs/remotes/origin/#{mod[:ref]}", - "clean -fdx", - ] - - on hosts, commands.join(" && git ") -end - -def install_scp_module(mod, hosts) - moddir = "/opt/puppet-git-repos" - target = "#{moddir}/#{mod[:name]}" - - step "Purge #{target} if needed" - on hosts, "test -d #{target} && rm -rf #{target} || true" - - step "Copy #{mod[:name]} to hosts" - scp_to hosts, mod[:url].split(':', 2)[1], target -end - -modules = PuppetModules.new(options[:modules]).list - -step "Masters: Install Puppet Modules" -masters = hosts.select { |host| host['roles'].include? 'master' } - -modules.each do |mod| - if mod[:protocol] == :scp - install_scp_module(mod, masters) - else - install_git_module(mod, masters) - end -end diff --git a/acceptance/setup/packages/pre-suite/010_Install.rb b/acceptance/setup/packages/pre-suite/010_Install.rb deleted file mode 100644 index 72f93b825bf..00000000000 --- a/acceptance/setup/packages/pre-suite/010_Install.rb +++ /dev/null @@ -1,52 +0,0 @@ -require 'puppet/acceptance/install_utils' - -extend Puppet::Acceptance::InstallUtils - -test_name "Install Packages" - -step "Install repositories on target machines..." do - - sha = ENV['SHA'] - repo_configs_dir = 'repo-configs' - - hosts.each do |host| - install_repos_on(host, 'puppet', sha, repo_configs_dir) - end -end - - -MASTER_PACKAGES = { - :redhat => [ - 'puppet-server', - ], - :debian => [ - 'puppetmaster-passenger', - ], -# :solaris => [ -# 'puppet-server', -# ], -# :windows => [ -# 'puppet-server', -# ], -} - -AGENT_PACKAGES = { - :redhat => [ - 'puppet', - ], - :debian => [ - 'puppet', - ], -# :solaris => [ -# 'puppet', -# ], -# :windows => [ -# 'puppet', -# ], -} - -install_packages_on(master, MASTER_PACKAGES) -install_packages_on(agents, AGENT_PACKAGES) - -configure_gem_mirror(hosts) - diff --git a/acceptance/setup/packages/pre-suite/015_PackageHostsPresets.rb b/acceptance/setup/packages/pre-suite/015_PackageHostsPresets.rb deleted file mode 100644 index 026c6985b24..00000000000 --- a/acceptance/setup/packages/pre-suite/015_PackageHostsPresets.rb +++ /dev/null @@ -1,5 +0,0 @@ -if master['platform'] =~ /debian|ubuntu/ - master.uses_passenger! -elsif master['platform'] =~ /redhat|el|centos|scientific|fedora/ - master['use-service'] = true -end diff --git a/acceptance/setup/packages/pre-suite/045_EnsureMasterStartedOnPassenger.rb b/acceptance/setup/packages/pre-suite/045_EnsureMasterStartedOnPassenger.rb deleted file mode 100644 index 20f4fdfb5a3..00000000000 --- a/acceptance/setup/packages/pre-suite/045_EnsureMasterStartedOnPassenger.rb +++ /dev/null @@ -1,3 +0,0 @@ -if master.graceful_restarts? - on(master, puppet('resource', 'service', master['puppetservice'], "ensure=running")) -end diff --git a/acceptance/setup/passenger/pre-suite/015_PackageHostsPresets.rb b/acceptance/setup/passenger/pre-suite/015_PackageHostsPresets.rb deleted file mode 100644 index 4a402601c45..00000000000 --- a/acceptance/setup/passenger/pre-suite/015_PackageHostsPresets.rb +++ /dev/null @@ -1,6 +0,0 @@ -if master['platform'] =~ /^el-|^fedora-/ - master.uses_passenger!('httpd') - master['graceful-restarts'] = false # workaround BKR-221 -else - master.uses_passenger!('apache2') -end diff --git a/acceptance/setup/passenger/pre-suite/030_ConfigurePassenger.rb b/acceptance/setup/passenger/pre-suite/030_ConfigurePassenger.rb deleted file mode 100644 index 282635fecbb..00000000000 --- a/acceptance/setup/passenger/pre-suite/030_ConfigurePassenger.rb +++ /dev/null @@ -1,15 +0,0 @@ -certname = on(master, puppet('master --configprint certname')).stdout.chomp - -if master['platform'] =~ /^el-|^fedora-/ - on(master, "sed -i 's|localhost|#{certname}|g' /etc/httpd/conf.d/puppet-passenger.conf") -else - on(master, "sed -i 's|localhost|#{certname}|g' /etc/apache2/sites-available/puppet-passenger*") - on(master, 'a2enmod headers') - on(master, 'a2enmod ssl') - - # only enable version module if it's not loaded - modules = on(master, 'apache2ctl -M').stdout - on(master, 'a2enmod version') if modules !~ /version_module/ - - on(master, 'a2ensite puppet-passenger') -end diff --git a/acceptance/setup/pe/pre-suite/000_Install.rb b/acceptance/setup/pe/pre-suite/000_Install.rb deleted file mode 100644 index acb7f465951..00000000000 --- a/acceptance/setup/pe/pre-suite/000_Install.rb +++ /dev/null @@ -1,5 +0,0 @@ -test_name 'Install Puppet Enterprise' do - - # This installs the latest PE build, can be overridden, see API docs - install_pe -end diff --git a/acceptance/setup/pe/pre-suite/010_UpdatePkg.rb b/acceptance/setup/pe/pre-suite/010_UpdatePkg.rb deleted file mode 100644 index e105410c991..00000000000 --- a/acceptance/setup/pe/pre-suite/010_UpdatePkg.rb +++ /dev/null @@ -1,18 +0,0 @@ -test_name 'Update pe-puppet pkg' do - - repo_path = ENV['PUPPET_REPO_CONFIGS'] - version = ENV['PUPPET_REF'] - - unless repo_path && version - skip_test "The puppet version to install isn't specified, using what's in the tarball..." - end - - hosts.each do |host| - deploy_package_repo(host, repo_path, "pe-puppet", version) - host.upgrade_package("pe-puppet") - end - - with_puppet_running_on master, {} do - # this bounces the puppet master for us - end -end diff --git a/acceptance/teardown/common/099_Archive_Logs.rb b/acceptance/teardown/common/099_Archive_Logs.rb new file mode 100644 index 00000000000..d940e2bea2e --- /dev/null +++ b/acceptance/teardown/common/099_Archive_Logs.rb @@ -0,0 +1,107 @@ +require 'date' + +def file_glob(host, path) + result = on(host, "ls #{path}", :acceptable_exit_codes => [0, 2]) + return [] if result.exit_code != 0 + return result.stdout.strip.split("\n") +end + +# This test is prefixed with zzz so it will hopefully run last. +test_name 'Backup puppet logs and app data on all hosts' do + today = Date.today().to_s + # truncate the job name so it only has the name-y part and no parameters + job_name = (ENV['JOB_NAME'] || 'unknown_jenkins_job') + .sub(/[A-Z0-9_]+=.*$/, '') + .gsub(/[\/,.]/, '_')[0..200] + archive_name = "#{job_name}__#{ENV['BUILD_ID']}__#{today}__sut-files.tgz" + archive_root = "SUT_#{today}" + + hosts.each do |host| + step("Capturing log errors for #{host}") do + case host[:platform] + when /windows/ + # on Windows, all of the desired data (including logs) is in the data dir + puppetlabs_data = 'C:/ProgramData/PuppetLabs' + archive_file_from(host, puppetlabs_data, {}, archive_root, archive_name) + + # Note: Windows `ls` uses absolute paths for all matches when an absolute path is supplied. + tempdir = 'C:/Windows/TEMP' + file_glob(host, File.join(tempdir, 'install-puppet-*.log')).each do |install_log| + archive_file_from(host, install_log, {}, archive_root, archive_name) + end + file_glob(host, File.join(tempdir, 'puppet-*-installer.log')).each do |install_log| + archive_file_from(host, install_log, {}, archive_root, archive_name) + end + else + puppetlabs_logdir = '/var/log/puppetlabs' + grep_for_alerts = if host[:platform] =~ /solaris/ + "egrep -i 'warn|error|fatal'" + elsif host[:platform] =~ /aix/ + "grep -iE -B5 -A10 'warn|error|fatal'" + else + "grep -i -B5 -A10 'warn\\|error\\|fatal'" + end + + ## If there are any PL logs, try to echo all warning, error, and fatal + ## messages from all PL logs to the job's output + on(host, <<-GREP_FOR_ALERTS, :accept_all_exit_codes => true ) + if [ -d #{puppetlabs_logdir} ] && [ -n "$(find #{puppetlabs_logdir} -name '*.log*')" ]; then + for log in $(find #{puppetlabs_logdir} -name '*.log*'); do + # grep /dev/null only to get grep to print filenames, since -H is not in POSIX spec for grep + #{grep_for_alerts} $log /dev/null; + echo "" + done + fi + GREP_FOR_ALERTS + + step("Archiving logs for #{host} into #{archive_name} (muzzling everything but :warn or higher beaker logs...)") do + ## turn the logger off to avoid getting hundreds of lines of scp progress output + previous_level = @logger.log_level + @logger.log_level = :warn + + pxp_cache = '/opt/puppetlabs/pxp-agent/spool' + puppetlabs_data = '/etc/puppetlabs' + + version_lookup_result = on(host, "cat /opt/puppetlabs/puppet/VERSION", :accept_all_exit_codes => true) + + # If we can't find a VERSION file, chances are puppet wasn't + # installed and these paths aren't present. Beaker's + # archive_file_from() will fail if it can't find the file, and we + # want to proceed... + if version_lookup_result.exit_code == 0 + agent_version = version_lookup_result.output.strip + archive_file_from(host, pxp_cache, {}, archive_root, archive_name) unless version_is_less(agent_version, "1.3.2") + archive_file_from(host, puppetlabs_data, {}, archive_root, archive_name) + archive_file_from(host, puppetlabs_logdir, {}, archive_root, archive_name) + end + + syslog_dir = '/var/log' + syslog_name = 'messages' + if host[:platform] =~ /ubuntu|debian/ + syslog_name = 'syslog' + elsif host[:platform] =~ /solaris/ + syslog_dir = '/var/adm' + # Next few lines are for debugging POOLER-200, once that is resolved this can be removed + @logger.log_level = previous_level + on(host, 'egrep -i \'reboot after panic\' /var/adm/messages', :acceptable_exit_codes => [0,1,2]) + @logger.log_level = :warn + elsif host[:platform] =~ /osx/ + syslog_name = "system.log" + elsif host[:platform] =~ /fedora/ + on(host, "journalctl --no-pager > /var/log/messages") + elsif host[:platform] =~ /aix/ + on(host, "alog -o -t console > /var/log/messages") + end + + syslog_path = File.join(syslog_dir, syslog_name) + if host.file_exist?(syslog_path) + archive_file_from(host, syslog_path, {}, archive_root, archive_name) + end + + ## turn the logger back on in case someone else wants to log things + @logger.log_level = previous_level + end + end + end + end +end diff --git a/acceptance/tests/agent/agent_disable_lockfile.rb b/acceptance/tests/agent/agent_disable_lockfile.rb index e93f18e05b6..68d4419668f 100644 --- a/acceptance/tests/agent/agent_disable_lockfile.rb +++ b/acceptance/tests/agent/agent_disable_lockfile.rb @@ -1,9 +1,14 @@ -test_name "the agent --disable/--enable functionality should manage the agent lockfile properly" +test_name "C4553 - agent --disable/--enable functionality should manage the agent lockfile properly" +tag 'audit:integration', # lockfile uses the standard `vardir` location to store/query lockfile. + # The validation of the `vardir` at the OS level + # should be accomplished in another test. + 'audit:high', + 'audit:refactor' # This test should not require a master. Remove the use of `with_puppet_running_on`. # # This test is intended to ensure that puppet agent --enable/--disable # work properly, both in terms of complying with our public "API" around -# lockfile semantics ( http://links.puppetlabs.com/agent_lockfiles ), and +# lockfile semantics ( http://links.puppet.com/agent_lockfiles ), and # in terms of actually restricting or allowing new agent runs to begin. # @@ -33,10 +38,8 @@ ["I'm busy; go away.'", true] ] -tuples.each do |expected_message, explicitly_specify_message| - - with_puppet_running_on(master, {}) do - +with_puppet_running_on(master, {}) do + tuples.each do |expected_message, explicitly_specify_message| step "disable the agent; specify message? '#{explicitly_specify_message}', message: '#{expected_message}'" do agents.each do |agent| if (explicitly_specify_message) @@ -63,11 +66,10 @@ step "attempt to run the agent (message: '#{expected_message}')" do agents.each do |agent| - on(agent, puppet('agent', "--test --server #{master}"), - :acceptable_exit_codes => [1]) do + on(agent, puppet('agent', "--test"), :acceptable_exit_codes => [1]) do |result| disabled_regex = /administratively disabled.*'#{expected_message}'/ unless result.stdout =~ disabled_regex - fail_test("Unexpected output from attempt to run agent disabled; expecting to match '#{disabled_regex}', got '#{result.stdout}' on agent '#{agent}'") + fail_test("Unexpected output from attempt to run agent disabled; expecting to match '#{disabled_regex}', got '#{result.stdout}' on agent '#{agent}'") unless agent['locale'] == 'ja' end end end @@ -86,12 +88,10 @@ step "verify that we can run the agent (message: '#{expected_message}')" do agents.each do |agent| - on(agent, puppet('agent', "--test --server #{master}")) + on(agent, puppet('agent', "--test")) end end - - end # with_puppet_running_on block - -end # tuples block + end # tuples block +end # with_puppet_running_on block @all_tests_passed = true diff --git a/acceptance/tests/agent/agent_fails_with_unknown_resource.rb b/acceptance/tests/agent/agent_fails_with_unknown_resource.rb new file mode 100644 index 00000000000..b5289167ed5 --- /dev/null +++ b/acceptance/tests/agent/agent_fails_with_unknown_resource.rb @@ -0,0 +1,79 @@ +test_name "agent run should fail if it finds an unknown resource type" do + tag 'audit:high', + 'audit:integration' + + require 'puppet/acceptance/common_utils' + + require 'puppet/acceptance/environment_utils' + extend Puppet::Acceptance::EnvironmentUtils + + require 'puppet/acceptance/temp_file_utils' + extend Puppet::Acceptance::TempFileUtils + + step "agent should fail when it can't find a resource" do + vendor_modules_path = master.tmpdir('vendor_modules') + tmp_environment = mk_tmp_environment_with_teardown(master, 'tmp') + + site_pp_content = <<-SITEPP + define foocreateresource($one) { + $msg = 'hello' + notify { $name: message => $msg } + } + class example($x) { + if $x == undef or $x == [] or $x == '' { + notice 'foo' + return() + } + notice 'bar' + } + node default { + class { example: x => [] } + create_resources('foocreateresource', {'blah'=>{'one'=>'two'}}) + mycustomtype{'foobar':} + } + SITEPP + manifests_path = "/tmp/#{tmp_environment}/manifests" + on(master, "mkdir -p '#{manifests_path}'") + create_remote_file(master, "#{manifests_path}/site.pp", site_pp_content) + + custom_type_content = <<-CUSTOMTYPE + Puppet::Type.newtype(:mycustomtype) do + @doc = "Create a new mycustomtype thing." + + newparam(:name, :namevar => true) do + desc "Name of mycustomtype instance" + end + + def refresh + end + end + CUSTOMTYPE + type_path = "#{vendor_modules_path}/foo/lib/puppet/type" + on(master, "mkdir -p '#{type_path}'") + create_remote_file(master, "#{type_path}/mycustomtype.rb", custom_type_content) + + on(master, "chmod -R 750 '#{vendor_modules_path}' '/tmp/#{tmp_environment}'") + on(master, "chown -R #{master.puppet['user']}:#{master.puppet['group']} '#{vendor_modules_path}' '/tmp/#{tmp_environment}'") + + master_opts = { + 'main' => { + 'environment' => tmp_environment, + 'vendormoduledir' => vendor_modules_path + } + } + + with_puppet_running_on(master, master_opts) do + agents.each do |agent| + teardown do + agent.rm_rf(vendor_modules_path) + end + + # override vendormoduledir in case agent and server are on the same host + agent_dir = get_test_file_path(agent, 'vendormodulepath') + on(agent, puppet('agent', '-t', '--environment', tmp_environment, '--vendormoduledir', agent_dir), acceptable_exit_codes: [1]) do |result| + assert_match(/Error: Failed to apply catalog: Resource type 'Mycustomtype' was not found/, result.stderr) + end + end + end + end +end diff --git a/acceptance/tests/agent/agent_parses_json_catalog.rb b/acceptance/tests/agent/agent_parses_json_catalog.rb new file mode 100644 index 00000000000..f5719f73960 --- /dev/null +++ b/acceptance/tests/agent/agent_parses_json_catalog.rb @@ -0,0 +1,28 @@ +test_name "C99978: Agent parses a JSON catalog" + +tag 'risk:high', + 'audit:high', # tests defined catalog format + 'audit:integration', # There is no OS specific risk here. + 'server', + 'catalog:json' + +require 'puppet/acceptance/common_utils' +require 'json' + +step "Agent parses a JSON catalog" do + agents.each do |agent| + # Path to a ruby binary + ruby = Puppet::Acceptance::CommandUtils.ruby_command(agent) + + # Refresh the catalog + on(agent, puppet("agent --test")) + + # The catalog file should be parseable JSON + json_catalog = File.join(agent.puppet['client_datadir'], 'catalog', + "#{agent.puppet['certname']}.json") + on(agent, "cat #{json_catalog} | #{ruby} -rjson -e 'JSON.parse(STDIN.read)'") + + # Can the agent parse it as JSON? + on(agent, puppet("catalog find --terminus json > /dev/null")) + end +end diff --git a/acceptance/tests/agent/fallback_to_cached_catalog.rb b/acceptance/tests/agent/fallback_to_cached_catalog.rb index ae6b2024d43..ccf24ae3a47 100644 --- a/acceptance/tests/agent/fallback_to_cached_catalog.rb +++ b/acceptance/tests/agent/fallback_to_cached_catalog.rb @@ -1,8 +1,12 @@ test_name "fallback to the cached catalog" +tag 'audit:high', + 'audit:integration', # This test is not OS sensitive. + 'audit:refactor' # A catalog fixture can be used for this test. Remove the usage of `with_puppet_running_on`. + step "run agents once to cache the catalog" do with_puppet_running_on master, {} do - on(agents, puppet("agent -t --server #{master}")) + on(agents, puppet("agent -t")) end end @@ -12,7 +16,7 @@ # We use a server that the agent can't possibly talk to in order # to guarantee that no communication can take place. on(agent, puppet("agent --onetime --no-daemonize --server puppet.example.com --verbose")) do |result| - assert_match(/Using cached catalog/, result.stdout) + assert_match(/Using cached catalog/, result.stdout) unless agent['locale'] == 'ja' end end end diff --git a/acceptance/tests/agent/last_run_summary_report.rb b/acceptance/tests/agent/last_run_summary_report.rb new file mode 100644 index 00000000000..178b8771196 --- /dev/null +++ b/acceptance/tests/agent/last_run_summary_report.rb @@ -0,0 +1,116 @@ +test_name "The 'last_run_summary.yaml' report has the right location and permissions" do + tag 'audit:high' + + require 'puppet/acceptance/temp_file_utils' + extend Puppet::Acceptance::TempFileUtils + + agents.each do |agent| + skip_test('This test does not work on Windows in japanese') if agent['platform'] =~ /windows/ && agent['locale'] == 'ja' + + custom_publicdir = agent.tmpdir('custom_public_dir') + + statedir = on(agent, puppet('config print statedir')).stdout.chomp + fail_test("The 'statedir' config is not set!") if statedir.empty? + + publicdir = on(agent, puppet('config print publicdir')).stdout.chomp + fail_test("The 'publicdir' config is not set!") if publicdir.empty? + + teardown do + agent.rm_rf(custom_publicdir) + agent.rm_rf("#{publicdir}/*") unless publicdir.empty? + on(agent, puppet("config set publicdir #{publicdir}")) + end + + step "Check if '#{publicdir}' was created during puppet installation" do + on(agent, "ls #{publicdir}", :acceptable_exit_codes => [0]) + end + + step "Check if '#{publicdir}' has '0755' permissions" do + if agent['platform'] =~ /windows/ + on(agent, "icacls #{publicdir}") do |result| + # Linux 'Owner' permissions class equivalent + assert_match(/BUILTIN\\Administrators:.*\(F\)/, result.stdout) + + # Known issue on Windows: 'C:\ProgramData\PuppetLabs\puppet' permissions are inherited + # by its subfolders and it does not have any permissions for 'Everyone' (see 'PuppetAppDir' + # in 'puppet-agent/resources/windows/wix/appdatafiles.wxs') + # Below line should be added when solution is found: + # assert_match(/Everyone:.*\(RX\)/, result.stdout) + end + else + on(agent, "ls -al #{publicdir}") do |result| + assert_match(/rwxr-xr-x.+\.$/, result.stdout) + end + end + end + + step "Create the 'last_run_summary.yaml' report file by applying catalog" do + on(agent, puppet('agent -t')) do |result| + assert_match('Applied catalog', result.stdout) + end + end + + step "Check if the 'last_run_summary.yaml' report file created has '0640' permissions" do + if agent['platform'] =~ /windows/ + on(agent, "icacls #{File.join(publicdir, 'last_run_summary.yaml')}") do |result| + # Linux 'Owner' premissions class equivalent + assert_match('Administrator:(R,W', result.stdout) + # Linux 'Group' permissions class equivalent + assert_match('None:(R)', result.stdout) + # Linux 'Public' permissions class equivalent + assert_match('Everyone:(Rc,S,RA)', result.stdout) + # According to icacls docs: + # Rc = Read control + # S = Synchronize + # RA = Read attributes + # More at https://docs.microsoft.com/en-us/windows-server/administration/windows-commands/icacls + end + else + on(agent, "ls -al #{publicdir}") do |result| + assert_match(/rw-r-----.+last_run_summary\.yaml$/, result.stdout) + end + end + end + + step "Check that '#{statedir}' exists and has no 'last_run_summary.yaml' file" do + on(agent, "ls #{statedir}",:acceptable_exit_codes => [0]) do |result| + refute_match(/last_run_summary.yaml/, result.stdout) + end + end + + step "Check that 'publicdir' can be reconfigured" do + on(agent, puppet("config set publicdir #{custom_publicdir}")) + on(agent, puppet('config print publicdir')) do |result| + assert_match(custom_publicdir, result.stdout) + end + end + + step "Create a new 'last_run_summary.yaml' report file by applying catalog" do + on(agent, puppet('agent -t')) do |result| + assert_match('Applied catalog', result.stdout) + end + end + + step "Check if the 'last_run_summary.yaml' report file was created in the new location and still has '0640' permissions" do + if agent['platform'] =~ /windows/ + on(agent, "icacls #{File.join(custom_publicdir, 'last_run_summary.yaml')}") do |result| + # Linux 'Owner' premissions class equivalent + assert_match('Administrator:(R,W', result.stdout) + # Linux 'Group' permissions class equivalent + assert_match('None:(R)', result.stdout) + # Linux 'Public' permissions class equivalent + assert_match('Everyone:(Rc,S,RA)', result.stdout) + # According to icacls docs: + # Rc = Read control + # S = Synchronize + # RA = Read attributes + # More at https://docs.microsoft.com/en-us/windows-server/administration/windows-commands/icacls + end + else + on(agent, "ls -al #{custom_publicdir}") do |result| + assert_match(/rw-r-----.+last_run_summary\.yaml$/, result.stdout) + end + end + end + end +end diff --git a/acceptance/tests/aix/aix_package_provider.rb b/acceptance/tests/aix/aix_package_provider.rb index 1d1d8d6dd03..bd03be55d4b 100644 --- a/acceptance/tests/aix/aix_package_provider.rb +++ b/acceptance/tests/aix/aix_package_provider.rb @@ -1,85 +1,97 @@ -test_name "aix package provider should work correctly" +test_name "aix package provider should work correctly" do -confine :to, :platform => /aix/ + tag 'audit:high', + 'audit:acceptance' # OS specific by definition. -dir = "/tmp/aix-packages-#{$$}" + confine :to, :platform => /aix/ -teardown do - on hosts, "rm -rf #{dir}" -end + dir = "/tmp/aix-packages-#{$$}" -def assert_package_version(package, expected_version) - # The output of lslpp is a colon-delimited list like: - # sudo:sudo.rte:1.8.6.4: : :C: :Configurable super-user privileges runtime: : : : : : :0:0:/: - # We want the version, so grab the third field - on hosts, "lslpp -qLc #{package} | cut -f3 -d:" do - actual_version = stdout.chomp - assert_equal(expected_version, actual_version, "Installed package version #{actual_version} does not match expected version #{expected_version}") + def assert_package_version(package, expected_version) + # The output of lslpp is a colon-delimited list like: + # sudo:sudo.rte:1.8.6.4: : :C: :Configurable super-user privileges runtime: : : : : : :0:0:/: + # We want the version, so grab the third field + on(hosts, "lslpp -qLc #{package} | cut -f3 -d:") do |result| + actual_version = result.stdout.chomp + assert_equal(expected_version, actual_version, "Installed package version #{actual_version} does not match expected version #{expected_version}") + end end -end -package = 'sudo.rte' -version1 = '1.7.10.4' -version2 = '1.8.6.4' + def get_package_manifest(package, version, sourcedir) + <<-MANIFEST + package { '#{package}': + ensure => '#{version}', + provider => aix, + source => '#{sourcedir}', + } + MANIFEST + end -step "download packages to use for test" + package = 'sudo.rte' + version1 = '1.7.10.4' + version2 = '1.8.6.4' -on hosts, "mkdir -p #{dir}" -on hosts, "curl neptune.puppetlabs.lan/misc/sudo.#{version1}.aix51.lam.bff > #{dir}/sudo.#{version1}.aix51.lam.bff" -on hosts, "curl neptune.puppetlabs.lan/misc/sudo.#{version2}.aix51.lam.bff > #{dir}/sudo.#{version2}.aix51.lam.bff" + teardown do + on hosts, "rm -rf #{dir}" + on hosts, "installp -u #{package}" + end -step "setup manifests for testing" + step "download packages to use for test" do + on hosts, "mkdir -p #{dir}" + on hosts, "curl https://artifactory.delivery.puppetlabs.net/artifactory/generic_enterprise__local/misc/sudo.#{version1}.aix51.lam.bff > #{dir}/sudo.#{version1}.aix51.lam.bff" + on hosts, "curl https://artifactory.delivery.puppetlabs.net/artifactory/generic_enterprise__local/misc/sudo.#{version2}.aix51.lam.bff > #{dir}/sudo.#{version2}.aix51.lam.bff" + end -version1_manifest = <<-MANIFEST -package { '#{package}': - ensure => '#{version1}', - provider => aix, - source => '#{dir}', -} -MANIFEST + step "install the older version of package" do + apply_manifest_on(hosts, get_package_manifest(package, version1, dir), :catch_failures => true) + end -version2_manifest = <<-MANIFEST -package { '#{package}': - ensure => '#{version2}', - provider => aix, - source => '#{dir}', -} -MANIFEST + step "verify package is installed and at the correct version" do + assert_package_version package, version1 + end -absent_manifest = <<-MANIFEST -package { '#{package}': - ensure => absent, - provider => aix, - source => '#{dir}', -} -MANIFEST + step "install a newer version of the package" do + apply_manifest_on(hosts, get_package_manifest(package, version2, dir), :catch_failures => true) + end -step "install the package" + step "verify package is installed and at the newer version" do + assert_package_version package, version2 + end -apply_manifest_on hosts, version1_manifest + step "test that downgrading fails by trying to install an older version of the package" do + apply_manifest_on(hosts, get_package_manifest(package, version1, dir), :acceptable_exit_codes => [4,6]) do |res| + assert_match(/aix package provider is unable to downgrade packages/, res.stderr, "Didn't get an error about downgrading packages") + end + end -step "verify package is installed and at the correct version" + step "uninstall the package" do + apply_manifest_on(hosts, get_package_manifest(package, 'absent', dir), :catch_failures => true) + end -assert_package_version package, version1 + step "verify the package is gone" do + on hosts, "lslpp -qLc #{package}", :acceptable_exit_codes => [1] + end -step "install a newer version of the package" + step "install the older version of package" do + apply_manifest_on(hosts, get_package_manifest(package, version1, dir), :catch_failures => true) + end -apply_manifest_on hosts, version2_manifest + step "verify package is installed and at the correct version" do + assert_package_version package, version1 + end -step "verify package is installed and at the newer version" + step "install latest version of the package" do + apply_manifest_on(hosts, get_package_manifest(package, 'latest', dir), :catch_failures => true) + end -assert_package_version package, version2 + step "verify package is installed and at the correct version" do + assert_package_version package, version2 + end -step "test that downgrading fails by trying to install an older version of the package" + step "PUP-7818 remove a package without defining the source metaparameter" do + manifest = get_package_manifest(package, 'latest', dir) + manifest = manifest + "package { 'nonexistant_example_package.rte': ensure => absent, }" + apply_manifest_on(hosts, manifest, :catch_failures => true) + end -on hosts, puppet_apply("--verbose", "--detailed-exitcodes"), :stdin => version1_manifest, :acceptable_exit_codes => [4,6] do - assert_match(/aix package provider is unable to downgrade packages/, stdout, "Didn't get an error about downgrading packages") end - -step "uninstall the package" - -apply_manifest_on hosts, absent_manifest - -step "verify the package is gone" - -on hosts, "lslpp -qLc #{package}", :acceptable_exit_codes => [1] diff --git a/acceptance/tests/aix/nim_package_provider.rb b/acceptance/tests/aix/nim_package_provider.rb index 68feafe4914..0694ffc6e05 100644 --- a/acceptance/tests/aix/nim_package_provider.rb +++ b/acceptance/tests/aix/nim_package_provider.rb @@ -1,15 +1,27 @@ test_name "NIM package provider should work correctly" -confine :to, :platform => "aix" +tag 'audit:high', + 'audit:acceptance' # OS specific by definition + +# nim test is slow, confine to only aix 7.2 and recent puppet versions +confine :to, :platform => "aix" do |aix| + version = on(aix, 'puppet --version').stdout + version && + Gem::Version.new(version) > Gem::Version.new('6.4.0') && + on(aix, 'facter os.release.full').stdout == '7.2' +end -# NOTE: This test is duplicated in the pe_acceptance_tests repo +teardown do + test_apply('cdrecord', 'absent', '') + test_apply('puppet.test.rte', 'absent', '') +end def assert_package_version(package, expected_version) # The output of lslpp is a colon-delimited list like: # sudo:sudo.rte:1.8.6.4: : :C: :Configurable super-user privileges runtime: : : : : : :0:0:/: # We want the version, so grab the third field - on hosts, "lslpp -qLc #{package} | cut -f3 -d:" do - actual_version = stdout.chomp + on(hosts, "lslpp -qLc #{package} | cut -f3 -d:") do |result| + actual_version = result.stdout.chomp assert_equal(expected_version, actual_version, "Installed package version #{actual_version} does not match expected version #{expected_version}") end end @@ -43,6 +55,9 @@ def test_apply(package_name, ensure_value, expected_version) end end +# These two packages live in an LPP source on the NIM master. Details +# on our nim masters are available at +# https://confluence.puppetlabs.com/display/OPS/IBM+Power+LPARs package_types = { "RPM" => { :package_name => "cdrecord", @@ -50,12 +65,20 @@ def test_apply(package_name, ensure_value, expected_version) :new_version => '1.9-9' }, "BFF" => { - :package_name => "bos.atm.atmle", - :old_version => '6.1.7.0', - :new_version => '7.1.2.0' + :package_name => "puppet.test.rte", + :old_version => '1.0.0.0', + :new_version => '2.0.0.0' } } +step "Setup: ensure test packages are not installed" do + pkgs = ['cdrecord', 'puppet.test.rte'] + pkgs.each do |pkg| + on hosts, puppet_apply(["--detailed-exitcodes", "--verbose"]), + {:stdin => get_manifest(pkg, 'absent'), :acceptable_exit_codes => [0,2]} + end +end + package_types.each do |package_type, details| step "install a #{package_type} package via 'ensure=>present'" do package_name = details[:package_name] @@ -86,11 +109,11 @@ def test_apply(package_name, ensure_value, expected_version) version = details[:old_version] manifest = get_manifest(package_name, version) - on hosts, puppet_apply("--verbose", "--detailed-exitcodes"), + on(hosts, puppet_apply("--verbose", "--detailed-exitcodes"), { :stdin => manifest, - :acceptable_exit_codes => [4,6] } do + :acceptable_exit_codes => [4,6] }) do |result| - assert_match(/NIM package provider is unable to downgrade packages/, stdout, "Didn't get an error about downgrading packages") + assert_match(/NIM package provider is unable to downgrade packages/, result.stderr, "Didn't get an error about downgrading packages") end end diff --git a/acceptance/tests/allow_arbitrary_node_name_fact_for_agent.rb b/acceptance/tests/allow_arbitrary_node_name_fact_for_agent.rb index e19c476717a..8b1f3cdb601 100644 --- a/acceptance/tests/allow_arbitrary_node_name_fact_for_agent.rb +++ b/acceptance/tests/allow_arbitrary_node_name_fact_for_agent.rb @@ -1,88 +1,154 @@ test_name "node_name_fact should be used to determine the node name for puppet agent" +tag 'audit:high', + 'audit:integration', # Tests that the server properly overrides certname with node_name fact. + # Testing of passenger master is no longer needed. + 'server' + success_message = "node_name_fact setting was correctly used to determine the node name" testdir = master.tmpdir("nodenamefact") node_names = [] -on agents, facter('kernel') do - node_names << stdout.chomp +on agents, facter('kernel') do |result| + node_names << result.stdout.chomp end node_names.uniq! -authfile = "#{testdir}/auth.conf" -authconf = node_names.map do |node_name| - %Q[ -path /puppet/v3/catalog/#{node_name} -auth yes -allow * - -path /puppet/v3/node/#{node_name} -auth yes -allow * - -path /puppet/v3/report/#{node_name} -auth yes -allow * -] -end.join("\n") - -manifest_file = "#{testdir}/environments/production/manifests/manifest.pp" -manifest = %Q[ - Exec { path => "/usr/bin:/bin" } - node default { - notify { "false": } - } -] -manifest << node_names.map do |node_name| - %Q[ - node "#{node_name}" { - notify { "#{success_message}": } - } - ] -end.join("\n") +step "Prepare for custom tk-auth rules" do + on master, 'cp /etc/puppetlabs/puppetserver/conf.d/auth.conf /etc/puppetlabs/puppetserver/conf.d/auth.bak' + modify_tk_config(master, options['puppetserver-config'], {'jruby-puppet' => {'use-legacy-auth-conf' => false}}) +end -apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) - File { - ensure => directory, - mode => '0777', - } +teardown do + modify_tk_config(master, options['puppetserver-config'], {'jruby-puppet' => {'use-legacy-auth-conf' => true}}) + on master, 'cp /etc/puppetlabs/puppetserver/conf.d/auth.bak /etc/puppetlabs/puppetserver/conf.d/auth.conf' + on master, "service #{master['puppetservice']} reload" +end - file { - '#{testdir}':; - '#{testdir}/environments':; - '#{testdir}/environments/production':; - '#{testdir}/environments/production/manifests':; - } +step "Setup tk-auth rules" do + tka_header = <<-HEADER +authorization: { + version: 1 + rules: [ + { + match-request: { + path: "/puppet/v3/file" + type: path + } + allow: "*" + sort-order: 500 + name: "puppetlabs file" + }, + HEADER + + tka_node_rules = node_names.map do |node_name| + <<-NODE_RULES + { + match-request: { + path: "/puppet/v3/catalog/#{node_name}" + type: path + method: [get, post] + } + allow: "*" + sort-order: 500 + name: "puppetlabs catalog #{node_name}" + }, + { + match-request: { + path: "/puppet/v3/node/#{node_name}" + type: path + method: get + } + allow: "*" + sort-order: 500 + name: "puppetlabs node #{node_name}" + }, + { + match-request: { + path: "/puppet/v3/report/#{node_name}" + type: path + method: put + } + allow: "*" + sort-order: 500 + name: "puppetlabs report #{node_name}" + }, + NODE_RULES + end - file { '#{manifest_file}': - ensure => file, - mode => '0644', - content => '#{manifest}', - } + tka_footer = <<-FOOTER + { + match-request: { + path: "/" + type: path + } + deny: "*" + sort-order: 999 + name: "puppetlabs deny all" + } + ] +} + FOOTER - file { '#{authfile}': - ensure => file, - mode => '0644', - content => '#{authconf}', - } -MANIFEST - -with_these_opts = { - 'main' => { - 'environmentpath' => "#{testdir}/environments", - }, - 'master' => { - 'rest_authconfig' => "#{testdir}/auth.conf", - 'node_terminus' => 'plain', - }, + tk_auth = [tka_header, tka_node_rules, tka_footer].flatten.join("\n") + + apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) + file { '/etc/puppetlabs/puppetserver/conf.d/auth.conf': + ensure => file, + mode => '0644', + content => '#{tk_auth}', + } + MANIFEST +end + +step "Setup site.pp for node name based classification" do + + site_manifest = <<-SITE_MANIFEST +node default { + notify { "false": } } -with_puppet_running_on master, with_these_opts, testdir do +node #{node_names.map { |name| %Q["#{name}"] }.join(", ")} { + notify { "#{success_message}": } +} +SITE_MANIFEST + + apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) + $directories = [ + '#{testdir}', + '#{testdir}/environments', + '#{testdir}/environments/production', + '#{testdir}/environments/production/manifests', + ] + + file { $directories: + ensure => directory, + mode => '0755', + } - on(agents, puppet('agent', "--no-daemonize --verbose --onetime --node_name_fact kernel --server #{master}")) do - assert_match(/defined 'message'.*#{success_message}/, stdout) - end + file { '#{testdir}/environments/production/manifests/manifest.pp': + ensure => file, + mode => '0644', + content => '#{site_manifest}', + } + MANIFEST +end +step "Ensure nodes are classified based on the node name fact" do + master_opts = { + 'main' => { + 'environmentpath' => "#{testdir}/environments", + }, + 'master' => { + 'node_terminus' => 'plain', + }, + } + + with_puppet_running_on(master, master_opts, testdir) do + on(agents, puppet('agent', "--no-daemonize --verbose --onetime --node_name_fact kernel")) do |result| + assert_match(/defined 'message'.*#{success_message}/, result.stdout) + end + end end diff --git a/acceptance/tests/allow_arbitrary_node_name_fact_for_apply.rb b/acceptance/tests/allow_arbitrary_node_name_fact_for_apply.rb deleted file mode 100644 index 4569bdaa107..00000000000 --- a/acceptance/tests/allow_arbitrary_node_name_fact_for_apply.rb +++ /dev/null @@ -1,30 +0,0 @@ -test_name "node_name_fact should be used to determine the node name for puppet apply" - -success_message = "node_name_fact setting was correctly used to determine the node name" - -node_names = [] - -on agents, facter('kernel') do - node_names << stdout.chomp -end - -node_names.uniq! - -manifest = %Q[ - Exec { path => "/usr/bin:/bin" } - node default { - notify { "false": } - } -] - -node_names.each do |node_name| - manifest << %Q[ - node "#{node_name}" { - notify { "echo #{success_message}": } - } - ] -end - -on agents, puppet_apply("--verbose --node_name_fact kernel"), :stdin => manifest do - assert_match(/defined 'message'.*#{success_message}/, stdout) -end diff --git a/acceptance/tests/allow_arbitrary_node_name_for_agent.rb b/acceptance/tests/allow_arbitrary_node_name_for_agent.rb index 49d8793242a..146c442b18c 100644 --- a/acceptance/tests/allow_arbitrary_node_name_for_agent.rb +++ b/acceptance/tests/allow_arbitrary_node_name_for_agent.rb @@ -1,74 +1,134 @@ test_name "node_name_value should be used as the node name for puppet agent" -success_message = "node_name_value setting was correctly used as the node name" -in_testdir = master.tmpdir('nodenamevalue') +tag 'audit:high', + 'audit:integration', # Tests that the server properly overrides certname with node_name fact. + # Testing of passenger master is no longer needed. + 'server' -authfile = "#{in_testdir}/auth.conf" -authconf = <<-AUTHCONF -path /puppet/v3/catalog/specified_node_name -auth yes -allow * +success_message = "node_name_value setting was correctly used as the node name" +testdir = master.tmpdir('nodenamevalue') -path /puppet/v3/node/specified_node_name -auth yes -allow * +step "Prepare for custom tk-auth rules" do + on master, 'cp /etc/puppetlabs/puppetserver/conf.d/auth.conf /etc/puppetlabs/puppetserver/conf.d/auth.bak' + modify_tk_config(master, options['puppetserver-config'], {'jruby-puppet' => {'use-legacy-auth-conf' => false}}) +end -path /puppet/v3/report/specified_node_name -auth yes -allow * -AUTHCONF +teardown do + on master, 'cp /etc/puppetlabs/puppetserver/conf.d/auth.bak /etc/puppetlabs/puppetserver/conf.d/auth.conf' + modify_tk_config(master, options['puppetserver-config'], {'jruby-puppet' => {'use-legacy-auth-conf' => true}}) + on master, "service #{master['puppetservice']} reload" +end -manifest_file = "#{in_testdir}/environments/production/manifests/manifest.pp" -manifest = <<-MANIFEST - Exec { path => "/usr/bin:/bin" } - node default { - notify { "false": } - } - node specified_node_name { - notify { "#{success_message}": } - } -MANIFEST +step "Setup tk-auth rules" do + tk_auth = <<-TK_AUTH +authorization: { + version: 1 + rules: [ + { + match-request: { + path: "/puppet/v3/file" + type: path + } + allow: "*" + sort-order: 500 + name: "puppetlabs file" + }, + { + match-request: { + path: "/puppet/v3/catalog/specified_node_name" + type: path + method: [get, post] + } + allow: "*" + sort-order: 500 + name: "puppetlabs catalog" + }, + { + match-request: { + path: "/puppet/v3/node/specified_node_name" + type: path + method: get + } + allow: "*" + sort-order: 500 + name: "puppetlabs node" + }, + { + match-request: { + path: "/puppet/v3/report/specified_node_name" + type: path + method: put + } + allow: "*" + sort-order: 500 + name: "puppetlabs report" + }, + { + match-request: { + path: "/" + type: path + } + deny: "*" + sort-order: 999 + name: "puppetlabs deny all" + } + ] +} + TK_AUTH -apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) - File { - ensure => directory, - mode => '0777', - } + apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) + file { '/etc/puppetlabs/puppetserver/conf.d/auth.conf': + ensure => file, + mode => '0644', + content => '#{tk_auth}', + } + MANIFEST +end - file { - '#{in_testdir}':; - '#{in_testdir}/environments':; - '#{in_testdir}/environments/production':; - '#{in_testdir}/environments/production/manifests':; - } +step "Setup site.pp for node name based classification" do - file { '#{manifest_file}': - ensure => file, - mode => '0644', - content => '#{manifest}', - } + site_manifest = <<-SITE_MANIFEST +node default { + notify { "false": } +} +node specified_node_name { + notify { "#{success_message}": } +} + SITE_MANIFEST - file { '#{authfile}': - ensure => file, - mode => '0644', - content => '#{authconf}', - } -MANIFEST + apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) + $directories = [ + '#{testdir}', + '#{testdir}/environments', + '#{testdir}/environments/production', + '#{testdir}/environments/production/manifests', + ] -with_these_opts = { - 'main' => { - 'environmentpath' => "#{in_testdir}/environments", - }, - 'master' => { - 'rest_authconfig' => "#{in_testdir}/auth.conf", - 'node_terminus' => 'plain', - }, -} + file { $directories: + ensure => directory, + mode => '0755', + } -with_puppet_running_on master, with_these_opts, in_testdir do + file { '#{testdir}/environments/production/manifests/manifest.pp': + ensure => file, + mode => '0644', + content => '#{site_manifest}', + } + MANIFEST +end - on(agents, puppet('agent', "-t --node_name_value specified_node_name --server #{master}"), :acceptable_exit_codes => [0,2]) do - assert_match(/defined 'message'.*#{success_message}/, stdout) +step "Ensure nodes are classified based on the node name fact" do + master_opts = { + 'main' => { + 'environmentpath' => "#{testdir}/environments", + }, + 'master' => { + 'node_terminus' => 'plain', + }, + } + with_puppet_running_on(master, master_opts, testdir) do + on(agents, puppet('agent', "-t --node_name_value specified_node_name"), :acceptable_exit_codes => [0,2]) do |result| + assert_match(/defined 'message'.*#{success_message}/, result.stdout) + end end - end diff --git a/acceptance/tests/allow_arbitrary_node_name_for_apply.rb b/acceptance/tests/allow_arbitrary_node_name_for_apply.rb deleted file mode 100644 index d9bf1471798..00000000000 --- a/acceptance/tests/allow_arbitrary_node_name_for_apply.rb +++ /dev/null @@ -1,17 +0,0 @@ -test_name "node_name_value should be used as the node name for puppet apply" - -success_message = "node_name_value setting was correctly used as the node name" - -manifest = %Q[ - Exec { path => "/usr/bin:/bin" } - node default { - notify { "false": } - } - node a_different_node_name { - notify { "notify #{success_message}": } - } -] - -on agents, puppet_apply("--verbose --node_name_value a_different_node_name"), :stdin => manifest do - assert_match(/defined 'message'.*#{success_message}/, stdout) -end diff --git a/acceptance/tests/allow_symlinks_as_config_directories.rb b/acceptance/tests/allow_symlinks_as_config_directories.rb deleted file mode 100644 index 2d45d4fef95..00000000000 --- a/acceptance/tests/allow_symlinks_as_config_directories.rb +++ /dev/null @@ -1,27 +0,0 @@ -test_name "Should allow symlinks to directories as configuration directories" -confine :except, :platform => 'windows' - -agents.each do |agent| - step "Create the test confdir with a link to it" - confdir = agent.tmpdir('puppet_conf-directory') - conflink = agent.tmpfile('puppet_conf-symlink') - - on agent, "rm -rf #{conflink} #{confdir}" - - on agent, "mkdir #{confdir}" - on agent, "ln -s #{confdir} #{conflink}" - - on(agent, puppet('config', 'set', 'certname', 'awesome_certname', '--confdir', confdir)) - - manifest = 'notify{"My certname is $clientcert": }' - - step "Run Puppet and ensure it used the conf file in the confdir" - on agent, puppet_apply("--confdir #{conflink}"), :stdin => manifest do - assert_match(/My certname is awesome_certname[^\w]/, stdout) - end - - step "Check that the symlink and confdir are unchanged" - on agent, "[ -L #{conflink} ]" - on agent, "[ -d #{confdir} ]" - on agent, "[ $(readlink #{conflink}) = #{confdir} ]" -end diff --git a/acceptance/tests/apply/classes/parameterized_classes.rb b/acceptance/tests/apply/classes/parameterized_classes.rb index 9a70294255f..3f089a968ce 100755 --- a/acceptance/tests/apply/classes/parameterized_classes.rb +++ b/acceptance/tests/apply/classes/parameterized_classes.rb @@ -1,5 +1,8 @@ test_name "parametrized classes" +tag 'audit:high', + 'audit:unit' # This should be covered at the unit layer. + ######################################################################## step "should allow param classes" manifest = %q{ @@ -9,8 +12,8 @@ class x($y, $z) { class {x: y => '1', z => '2'} } -apply_manifest_on(agents, manifest) do - fail_test "inclusion after parameterization failed" unless stdout.include? "1-2" +apply_manifest_on(agents, manifest) do |result| + fail_test "inclusion after parameterization failed" unless result.stdout.include? "1-2" end ######################################################################## @@ -25,8 +28,8 @@ class {x: y => '1', z => '2'} include x } -apply_manifest_on(agents, manifest) do - fail_test "inclusion after parameterization failed" unless stdout.include? "1-2" +apply_manifest_on(agents, manifest) do |result| + fail_test "inclusion after parameterization failed" unless result.stdout.include? "1-2" end ######################################################################## @@ -38,12 +41,12 @@ class x($y, $z='2') { class {x: y => '1'} } -apply_manifest_on(agents, manifest) do - fail_test "the default didn't apply as expected" unless stdout.include? "1-2" +apply_manifest_on(agents, manifest) do |result| + fail_test "the default didn't apply as expected" unless result.stdout.include? "1-2" end ######################################################################## -step "should allow param class defaults to be overriden" +step "should allow param class defaults to be overridden" manifest = %q{ class x($y, $z='2') { notice("${y}-${z}") @@ -51,6 +54,6 @@ class x($y, $z='2') { class {x: y => '1', z => '3'} } -apply_manifest_on(agents, manifest) do - fail_test "the override didn't happen as we expected" unless stdout.include? "1-3" +apply_manifest_on(agents, manifest) do |result| + fail_test "the override didn't happen as we expected" unless result.stdout.include? "1-3" end diff --git a/acceptance/tests/apply/classes/should_allow_param_override.rb b/acceptance/tests/apply/classes/should_allow_param_override.rb index 09592ec8bfe..c077e272010 100755 --- a/acceptance/tests/apply/classes/should_allow_param_override.rb +++ b/acceptance/tests/apply/classes/should_allow_param_override.rb @@ -1,5 +1,8 @@ test_name "should allow param override" +tag 'audit:high', + 'audit:unit' # This should be covered at the unit layer. + manifest = %q{ class parent { notify { 'msg': @@ -13,8 +16,8 @@ class child inherits parent { include child } -apply_manifest_on(agents, manifest) do +apply_manifest_on(agents, manifest) do |result| fail_test "parameter override didn't work" unless - stdout.include? "defined 'message' as 'child'" + result.stdout.include? "defined 'message' as 'child'" end diff --git a/acceptance/tests/apply/classes/should_allow_param_undef_override.rb b/acceptance/tests/apply/classes/should_allow_param_undef_override.rb index 39904dc0d03..052bdf270e9 100755 --- a/acceptance/tests/apply/classes/should_allow_param_undef_override.rb +++ b/acceptance/tests/apply/classes/should_allow_param_undef_override.rb @@ -1,5 +1,8 @@ test_name "should allow overriding a parameter to undef in inheritence" +tag 'audit:high', + 'audit:unit' # This should be covered at the unit layer. + agents.each do |agent| dir = agent.tmpdir('class_undef_override') out = File.join(dir, 'class_undef_override_out') @@ -27,9 +30,9 @@ class child inherits parent { step "apply the manifest" apply_manifest_on(agent, manifest) step "verify the file content" - on(agent, "cat #{out}") do - fail_test "the file was not touched" if stdout.include? "hello world!" - fail_test "the file was not updated" unless stdout.include? "hello new world" + on(agent, "cat #{out}") do |result| + fail_test "the file was not touched" if result.stdout.include? "hello world!" + fail_test "the file was not updated" unless result.stdout.include? "hello new world" end on(agent, "rm -rf #{dir}") diff --git a/acceptance/tests/apply/classes/should_include_resources_from_class.rb b/acceptance/tests/apply/classes/should_include_resources_from_class.rb index b78be6cecef..52f78f513f7 100755 --- a/acceptance/tests/apply/classes/should_include_resources_from_class.rb +++ b/acceptance/tests/apply/classes/should_include_resources_from_class.rb @@ -1,11 +1,14 @@ test_name "resources declared in a class can be applied with include" + +tag 'audit:high', + 'audit:unit' # This should be covered at the unit layer. + manifest = %q{ class x { notify{'a':} } include x } -apply_manifest_on(agents, manifest) do - fail_test "the resource did not apply" unless - stdout.include? "defined 'message' as 'a'" +apply_manifest_on(agents, manifest) do |result| + fail_test "the resource did not apply" unless result.stdout.include?("defined 'message' as 'a'") end diff --git a/acceptance/tests/apply/classes/should_not_auto_include_resources_from_class.rb b/acceptance/tests/apply/classes/should_not_auto_include_resources_from_class.rb index 25721eb4ca5..80aeea50fd2 100755 --- a/acceptance/tests/apply/classes/should_not_auto_include_resources_from_class.rb +++ b/acceptance/tests/apply/classes/should_not_auto_include_resources_from_class.rb @@ -1,6 +1,10 @@ test_name "resources declared in classes are not applied without include" + +tag 'audit:high', + 'audit:unit' # This should be covered at the unit layer. + manifest = %q{ class x { notify { 'test': message => 'never invoked' } } } -apply_manifest_on(agents, manifest) do +apply_manifest_on(agents, manifest) do |result| fail_test "found the notify despite not including it" if - stdout.include? "never invoked" + result.stdout.include? "never invoked" end diff --git a/acceptance/tests/apply/hashes/should_not_reassign.rb b/acceptance/tests/apply/hashes/should_not_reassign.rb deleted file mode 100755 index 885939ffe77..00000000000 --- a/acceptance/tests/apply/hashes/should_not_reassign.rb +++ /dev/null @@ -1,12 +0,0 @@ -test_name "hash reassignment should fail" -manifest = %q{ -$my_hash = {'one' => '1', 'two' => '2' } -$my_hash['one']='1.5' -} - -agents.each do |host| - apply_manifest_on(host, manifest, :acceptable_exit_codes => [1]) do - expected_error_message = "Illegal attempt to assign via [index/key]. Not an assignable reference" - fail_test("didn't find the failure") unless stderr.include?(expected_error_message) - end -end diff --git a/acceptance/tests/apply/puppet_apply_trace.rb b/acceptance/tests/apply/puppet_apply_trace.rb deleted file mode 100644 index fea35dc19e4..00000000000 --- a/acceptance/tests/apply/puppet_apply_trace.rb +++ /dev/null @@ -1,7 +0,0 @@ -test_name 'puppet apply --trace should provide a stack trace' - -agents.each do |agent| - on(agent, puppet('apply --trace -e "blue < 2"'), :acceptable_exit_codes => 1) do - assert_match(/\.rb:\d+:in `\w+'/m, stderr, "Did not print expected stack trace on stderr") - end -end diff --git a/acceptance/tests/catalog_with_binary_data.rb b/acceptance/tests/catalog_with_binary_data.rb new file mode 100644 index 00000000000..fcb35f69a61 --- /dev/null +++ b/acceptance/tests/catalog_with_binary_data.rb @@ -0,0 +1,83 @@ +test_name "C100300: Catalog containing binary data is applied correctly" do + require 'puppet/acceptance/common_utils' + require 'puppet/acceptance/environment_utils' + extend Puppet::Acceptance::EnvironmentUtils + + require 'puppet/acceptance/agent_fqdn_utils' + extend Puppet::Acceptance::AgentFqdnUtils + + tag 'risk:high', + 'server' + + test_num = 'c100300' + tmp_environment = mk_tmp_environment_with_teardown(master, File.basename(__FILE__, '.*')) + agent_tmp_dirs = {} + agents.each do |agent| + agent_tmp_dirs[agent_to_fqdn(agent)] = agent.tmpdir(tmp_environment) + end + + teardown do + step 'remove all test files on agents' do + agents.each {|agent| on(agent, "rm -r '#{agent_tmp_dirs[agent_to_fqdn(agent)]}'", :accept_all_exit_codes => true)} + end + + # Remove all traces of the last used environment + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + + # note - master teardown is registered by #mk_tmp_environment_with_teardown + end + + step "Create module with binary data file on master" do + on(master, "mkdir -p '#{environmentpath}/#{tmp_environment}/modules/#{test_num}'/{manifests,files}") + master_module_manifest = "#{environmentpath}/#{tmp_environment}/modules/#{test_num}/manifests/init.pp" + master_module_binary_file = "#{environmentpath}/#{tmp_environment}/modules/#{test_num}/files/binary_data" + + create_remote_file(master, master_module_binary_file, "\xC0\xFF") + on(master, "chmod 644 '#{master_module_binary_file}'") + + manifest = <<-MANIFEST + class #{test_num}( + ) { + \$test_path = \$facts['networking']['fqdn'] ? #{agent_tmp_dirs} + file { '#{test_num}': + path => "\$test_path/#{test_num}", + content => binary_file('#{test_num}/binary_data'), + ensure => present, + } + } + MANIFEST + create_remote_file(master, master_module_manifest, manifest) + on(master, "chmod 644 '#{master_module_manifest}'") + end + + step "Create site.pp to classify nodes to include module" do + site_pp_file = "#{environmentpath}/#{tmp_environment}/manifests/site.pp" + site_pp = <<-SITE_PP + node default { + include #{test_num} + } + SITE_PP + create_remote_file(master, site_pp_file, site_pp) + on(master, "chmod 644 '#{site_pp_file}'") + end + + step "start the master" do + with_puppet_running_on(master, {}) do + + step "run puppet and ensure that binary data was correctly applied" do + agents.each do |agent| + on(agent, puppet('agent', '--test', "--environment '#{tmp_environment}'"), :acceptable_exit_codes => 2) + on(agent, "#{Puppet::Acceptance::CommandUtils::ruby_command(agent)} -e 'puts File.binread(\"#{agent_tmp_dirs[agent_to_fqdn(agent)]}/#{test_num}\").bytes.map {|b| b.to_s(16)}'") do |res| + assert_match(/c0\nff/, res.stdout, 'Binary file did not contain originally specified data') + end + end + end + + end + end + +end diff --git a/acceptance/tests/concurrency/ticket_2659_concurrent_catalog_requests.rb b/acceptance/tests/concurrency/ticket_2659_concurrent_catalog_requests.rb deleted file mode 100644 index 72e872a5c56..00000000000 --- a/acceptance/tests/concurrency/ticket_2659_concurrent_catalog_requests.rb +++ /dev/null @@ -1,110 +0,0 @@ -test_name "concurrent catalog requests (PUP-2659)" - -# we're only testing the effects of loading a master with concurrent requests -confine :except, :platform => 'windows' - -step "setup a manifest" - -testdir = master.tmpdir("concurrent") - -apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) - File { - ensure => directory, - owner => #{master.puppet['user']}, - group => #{master.puppet['group']}, - mode => '750', - } - - file { '#{testdir}': } - file { '#{testdir}/busy': } - file { '#{testdir}/busy/one.txt': - ensure => file, - mode => '640', - content => "Something to read", - } - file { '#{testdir}/busy/two.txt': - ensure => file, - mode => '640', - content => "Something else to read", - } - file { '#{testdir}/busy/three.txt': - ensure => file, - mode => '640', - content => "Something more else to read", - } - - file { '#{testdir}/environments': } - file { '#{testdir}/environments/production': } - file { '#{testdir}/environments/production/manifests': } - file { '#{testdir}/environments/production/manifests/site.pp': - ensure => file, - content => ' - $foo = inline_template(" - <%- 1000.times do - Dir.glob(\\'#{testdir}/busy/*.txt\\').each do |f| - File.read(f) - end - end - %> - \\'touched the file system for a bit\\' - ") - notify { "end": - message => $foo, - } - ', - mode => '640', - } -MANIFEST - -step "start master" -master_opts = { - 'main' => { - 'environmentpath' => "#{testdir}/environments", - } -} -with_puppet_running_on(master, master_opts, testdir) do - - step "concurrent catalog curls (with alliterative alacrity)" - agents.each do |agent| - cert_path = on(agent, puppet('config', 'print', 'hostcert')).stdout.chomp - key_path = on(agent, puppet('config', 'print', 'hostprivkey')).stdout.chomp - cacert_path = on(agent, puppet('config', 'print', 'localcacert')).stdout.chomp - agent_cert = on(agent, puppet('config', 'print', 'certname')).stdout.chomp - - run_count = 6 - agent_tmpdir = agent.tmpdir("concurrent-loop-script") - test_script = "#{agent_tmpdir}/loop.sh" - create_remote_file(agent, test_script, <<-EOF) - declare -a MYPIDS - loops=#{run_count} - - for (( i=0; i<$loops; i++ )); do - ( - sleep_for="0.$(( $RANDOM % 49 ))" - sleep $sleep_for - url='https://#{master}:8140/puppet/v3/catalog/#{agent_cert}?environment=production' - echo "Curling: $url" - curl --tlsv1 -v -# -H 'Accept: text/pson' --cert #{cert_path} --key #{key_path} --cacert #{cacert_path} $url - echo "$PPID Completed" - ) > "#{agent_tmpdir}/catalog-request-$i.out" 2>&1 & - echo "Launched $!" - MYPIDS[$i]=$! - done - - for (( i=0; i<$loops; i++ )); do - wait ${MYPIDS[$i]} - done - - echo "All requests are finished" - EOF - on(agent, "chmod +x #{test_script}") - on(agent, "#{test_script}") - run_count.times do |i| - step "Checking the results of catalog request ##{i}" - on(agent, "cat #{agent_tmpdir}/catalog-request-#{i}.out") do - assert_match(%r{< HTTP/1.* 200}, stdout) - assert_match(%r{touched the file system for a bit}, stdout) - end - end - end -end diff --git a/acceptance/tests/config/apply_file_metadata_specified_in_config.rb b/acceptance/tests/config/apply_file_metadata_specified_in_config.rb deleted file mode 100644 index 932ec74f383..00000000000 --- a/acceptance/tests/config/apply_file_metadata_specified_in_config.rb +++ /dev/null @@ -1,26 +0,0 @@ -test_name "#17371 file metadata specified in puppet.conf needs to be applied" - -# when owner/group works on windows for settings, this confine should be removed. -confine :except, :platform => 'windows' - -require 'puppet/acceptance/temp_file_utils' -extend Puppet::Acceptance::TempFileUtils -initialize_temp_dirs() - -agents.each do |agent| - logdir = get_test_file_path(agent, 'log') - - create_test_file(agent, 'site.pp', <<-SITE) - node default { - notify { puppet_run: } - } - SITE - - on(agent, puppet('config', 'set', 'logdir', "'#{logdir} { owner = root, group = root, mode = 0700 }'", '--confdir', get_test_file_path(agent, ''))) - - on(agent, puppet('apply', get_test_file_path(agent, 'site.pp'), '--confdir', get_test_file_path(agent, ''))) - - on(agent, "stat --format '%U:%G %a' #{logdir}") do - assert_match(/root:root 700/, stdout) - end -end diff --git a/acceptance/tests/config/puppet_manages_own_configuration_in_robust_manner.rb b/acceptance/tests/config/puppet_manages_own_configuration_in_robust_manner.rb deleted file mode 100644 index 0b116010062..00000000000 --- a/acceptance/tests/config/puppet_manages_own_configuration_in_robust_manner.rb +++ /dev/null @@ -1,88 +0,0 @@ -# User story: -# A new user has installed puppet either from source or from a gem, which does -# not put the "puppet" user or group on the system. They run the puppet master, -# which fails because of the missing user and then correct their actions. They -# expect that after correcting their actions, puppet will work correctly. -test_name "Puppet manages its own configuration in a robust manner" - -confine :except, :platform => 'fedora-19' - -skip_test "JVM Puppet cannot change its user while running." if @options[:is_puppetserver] - -# when owner/group works on windows for settings, this confine should be removed. -confine :except, :platform => 'windows' -# when managhome roundtrips for solaris, this confine should be removed -confine :except, :platform => 'solaris' -# pe setup includes ownership of external directories such as the passenger -# document root, which puppet itself knows nothing about -confine :except, :type => 'pe' -# same issue for a foss passenger run -if master.is_using_passenger? - skip_test 'Cannot test with passenger.' -end - -if master.use_service_scripts? - # Beaker defaults to leaving puppet running when using service scripts, - # Need to shut it down so we can modify user/group and test startup failure - on(master, puppet('resource', 'service', master['puppetservice'], 'ensure=stopped')) -end - -step "Clear out yaml directory because of a bug in the indirector/yaml. (See #21145)" -on master, 'rm -rf $(puppet master --configprint yamldir)' - -original_state = {} -step "Record original state of system users" do - hosts.each do |host| - original_state[host] = {} - original_state[host][:user] = user = host.execute('puppet config print user') - original_state[host][:group] = group = host.execute('puppet config print group') - original_state[host][:ug_resources] = on(host, puppet('resource', 'user', user)).stdout - original_state[host][:ug_resources] += on(host, puppet('resource', 'group', group)).stdout - original_state[host][:ug_resources] += "Group['#{group}'] -> User['#{user}']\n" - end -end - -teardown do - # And cleaning up yaml dir again here because we are changing service - # user and group ids back to the original uid and gid - on master, 'rm -rf $(puppet master --configprint yamldir)' - - if master.use_service_scripts? - on(master, puppet('resource', 'service', master['puppetservice'], 'ensure=stopped')) - end - - hosts.each do |host| - apply_manifest_on(host, <<-ORIG, :catch_failures => true) - #{original_state[host][:ug_resources]} - ORIG - end - - with_puppet_running_on(master, {}) do - agents.each do |agent| - on agent, puppet('agent', '-t', '--server', master) - end - end -end - -step "Remove system users" do - hosts.each do |host| - on host, puppet('resource', 'user', original_state[host][:user], 'ensure=absent') - on host, puppet('resource', 'group', original_state[host][:group], 'ensure=absent') - end -end - -step "Ensure master fails to start when missing system user" do - on master, puppet('master'), :acceptable_exit_codes => [74] do - assert_match(/could not change to group "#{original_state[master][:group]}"/, result.output) - assert_match(/Could not change to user #{original_state[master][:user]}/, result.output) - end -end - -step "Ensure master starts when making users after having previously failed startup" do - with_puppet_running_on(master, - :master => { :mkusers => true }) do - agents.each do |agent| - on agent, puppet('agent', '-t', '--server', master) - end - end -end diff --git a/acceptance/tests/cycle_detection.rb b/acceptance/tests/cycle_detection.rb deleted file mode 100644 index e5432d96722..00000000000 --- a/acceptance/tests/cycle_detection.rb +++ /dev/null @@ -1,26 +0,0 @@ -test_name "cycle detection and reporting" - -step "check we report a simple cycle" -manifest = < Notify["a2"] } -notify { "a2": require => Notify["a1"] } -EOT - -apply_manifest_on(agents, manifest, :acceptable_exit_codes => [1]) do - assert_match(/Found 1 dependency cycle/, stderr, - "found and reported the cycle correctly") -end - -step "report multiple cycles in the same graph" -manifest = < Notify["a2"] } -notify { "a2": require => Notify["a1"] } - -notify { "b1": require => Notify["b2"] } -notify { "b2": require => Notify["b1"] } -EOT - -apply_manifest_on(agents, manifest, :acceptable_exit_codes => [1]) do - assert_match(/Found 2 dependency cycles/, stderr, - "found and reported the cycle correctly") -end diff --git a/acceptance/tests/direct_puppet/cached_catalog_remediate_local_drift.rb b/acceptance/tests/direct_puppet/cached_catalog_remediate_local_drift.rb new file mode 100644 index 00000000000..60c9fb33e9c --- /dev/null +++ b/acceptance/tests/direct_puppet/cached_catalog_remediate_local_drift.rb @@ -0,0 +1,137 @@ +require 'puppet/acceptance/static_catalog_utils' +extend Puppet::Acceptance::StaticCatalogUtils + +test_name "PUP-5122: Puppet remediates local drift using code_id and content_uri" do + + tag 'audit:high', + 'audit:acceptance', + 'audit:refactor', # use mk_tmp_environment_with_teardown helper for environment construction + 'server' + + + skip_test 'requires puppetserver installation' if @options[:type] != 'aio' + + basedir = master.tmpdir(File.basename(__FILE__, '.*')) + module_dir = "#{basedir}/environments/production/modules" + + master_opts = { + 'main' => { + 'environmentpath' => "#{basedir}/environments" + } + } + + step "Add versioned-code parameters to puppetserver.conf and ensure the server is running" do + setup_puppetserver_code_id_scripts(master, basedir) + end + + teardown do + cleanup_puppetserver_code_id_scripts(master, basedir) + on master, "rm -rf #{basedir}" + + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + step "Create a module and a file with content representing the first code_id version" do + apply_manifest_on(master, < true) +File { + ensure => directory, + mode => "0750", + owner => #{master.puppet['user']}, + group => #{master.puppet['group']}, +} + +file { + '#{basedir}':; + '#{basedir}/environments':; + '#{basedir}/environments/production':; + '#{basedir}/environments/production/manifests':; + '#{module_dir}':; + '#{module_dir}/foo':; + '#{module_dir}/foo/files':; +} +MANIFEST + end + + with_puppet_running_on master, master_opts, basedir do + agents.each do |agent| + agent_test_file_path = agent.tmpfile('foo_file') + + step "Add test file resource to site.pp on master with agent-specific file path" do + apply_manifest_on(master, < true) +File { + owner => #{master.puppet['user']}, + group => #{master.puppet['group']}, +} + +file { "#{basedir}/environments/production/manifests/site.pp" : + ensure => file, + mode => "0640", + content => "node default { + file { '#{agent_test_file_path}' : + ensure => file, + source => 'puppet:///modules/foo/foo.txt' + } +}", +} + +file { "#{module_dir}/foo/files/foo.txt" : + ensure => file, + content => "code_version_1", + mode => "0640", +} +MANIFEST + end + + step "agent: #{agent}: Initial run: create the file with code version 1 and cache the catalog" + on(agent, puppet("agent", "-t"), :acceptable_exit_codes => [0,2]) + + # When there is no drift, there should be no request made to the server + # for file metadata or file content. A puppet run depending on + # a non-server will fail if such a request is made. Verify the agent + # sends a report. + + step "Remove existing reports from server reports directory" + on(master, "rm -rf /opt/puppetlabs/server/data/puppetserver/reports/#{agent.node_name}/*") + r = on(master, "ls /opt/puppetlabs/server/data/puppetserver/reports/#{agent.node_name} | wc -l").stdout.chomp + assert_equal(r, '0', "reports directory should be empty!") + + step "Verify puppet run without drift does not make file request from server" + r = on(agent, puppet("agent", + "--use_cached_catalog", + "--server", "no_such_host", + "--report_server", master.hostname, + "--onetime", + "--no-daemonize", + "--detailed-exitcodes", + "--verbose" + )).stderr + assert_equal(r, "", "Fail: Did agent try to contact server?") + + step "Verify report was delivered to server" + r = on(master, "ls /opt/puppetlabs/server/data/puppetserver/reports/#{agent.node_name} | wc -l").stdout.chomp + assert_equal(r, '1', "Reports directory should have one file") + + step "agent: #{agent}: Remove the test file to simulate drift" + on(agent, "rm -rf #{agent_test_file_path}") + + step "Alter the source file on the master to simulate a code update" + apply_manifest_on(master, < true) +file { "#{module_dir}/foo/files/foo.txt" : + ensure => file, + mode => "0640", + content => "code_version_2", +} +MANIFEST + + step "Run agent again using --use_cached_catalog and ensure content from the first code_id is used" + on(agent, puppet("agent", "-t", "--use_cached_catalog"), :acceptable_exit_codes => [0,2]) + on(agent, "cat #{agent_test_file_path}") do |result| + assert_equal('code_version_1', result.stdout) + end + end + end +end diff --git a/acceptance/tests/direct_puppet/catalog_uuid_correlates_catalogs_with_reports.rb b/acceptance/tests/direct_puppet/catalog_uuid_correlates_catalogs_with_reports.rb new file mode 100644 index 00000000000..45c862f7f5a --- /dev/null +++ b/acceptance/tests/direct_puppet/catalog_uuid_correlates_catalogs_with_reports.rb @@ -0,0 +1,56 @@ +test_name "PUP-5872: catalog_uuid correlates catalogs with reports" do + + tag 'audit:high', + 'audit:acceptance', + 'audit:refactor' # remove dependence on server by adding a + # catalog and report fixture to validate against. + + master_reportdir = create_tmpdir_for_user(master, 'reportdir') + + def remove_reports_on_master(master_reportdir, agent_node_name) + on(master, "rm -rf #{master_reportdir}/#{agent_node_name}/*") + end + + def get_catalog_uuid_from_cached_catalog(host, agent_vardir, agent_node_name) + cache_catalog_uuid = nil + on(host, "cat #{agent_vardir}/client_data/catalog/#{agent_node_name}.json") do |result| + cache_catalog_uuid = result.stdout.match(/"catalog_uuid":"([a-z0-9\-]*)",/)[1] + end + cache_catalog_uuid + end + + def get_catalog_uuid_from_report(master_reportdir, agent_node_name) + report_catalog_uuid = nil + on(master, "cat #{master_reportdir}/#{agent_node_name}/*") do |result| + report_catalog_uuid = result.stdout.match(/catalog_uuid: '?([a-z0-9\-]*)'?/)[1] + end + report_catalog_uuid + end + + with_puppet_running_on(master, :master => { :reportdir => master_reportdir, :reports => 'store' }) do + agents.each do |agent| + agent_vardir = agent.tmpdir(File.basename(__FILE__, '.*')) + + step "agent: #{agent}: Initial run to retrieve a catalog and generate the first report" do + on(agent, puppet("agent", "-t", "--vardir #{agent_vardir}"), :acceptable_exit_codes => [0,2]) + end + + cache_catalog_uuid = get_catalog_uuid_from_cached_catalog(agent, agent_vardir, agent.node_name) + + step "agent: #{agent}: Ensure the catalog and report share the same catalog_uuid" do + report_catalog_uuid = get_catalog_uuid_from_report(master_reportdir, agent.node_name) + assert_equal(cache_catalog_uuid, report_catalog_uuid, "catalog_uuid found in cached catalog, #{cache_catalog_uuid} did not match report #{report_catalog_uuid}") + end + + step "cleanup reports on master" do + remove_reports_on_master(master_reportdir, agent.node_name) + end + + step "Run with --use_cached_catalog and ensure catalog_uuid in the new report matches the cached catalog" do + on(agent, puppet("agent", "--onetime", "--no-daemonize", "--use_cached_catalog", "--vardir #{agent_vardir}"), :acceptance_exit_codes => [0,2]) + report_catalog_uuid = get_catalog_uuid_from_report(master_reportdir, agent.node_name) + assert_equal(cache_catalog_uuid, report_catalog_uuid, "catalog_uuid found in cached catalog, #{cache_catalog_uuid} did not match report #{report_catalog_uuid}") + end + end + end +end diff --git a/acceptance/tests/direct_puppet/static_catalog_env_control.rb b/acceptance/tests/direct_puppet/static_catalog_env_control.rb new file mode 100644 index 00000000000..cb751bb92b0 --- /dev/null +++ b/acceptance/tests/direct_puppet/static_catalog_env_control.rb @@ -0,0 +1,344 @@ +test_name "Environment control of static catalogs" + +tag 'audit:high', + 'audit:acceptance', + 'audit:refactor', # use mk_tmp_environment_with_teardown helper for environment construction + 'server' + +skip_test 'requires puppetserver to test static catalogs' if @options[:type] != 'aio' + +require 'json' + +@testroot = master.tmpdir(File.basename(__FILE__, '/*')) +@coderoot = "#{@testroot}/code" +@confdir = master['puppetserver-confdir'] +@master_opts = { + 'main' => { + 'environmentpath' => "#{@coderoot}/environments", + }, +} +@production_files = {} +@canary_files = {} +@agent_manifests = {} +@catalog_files = {} +agents.each do |agent| + hn = agent.node_name + resdir = agent.tmpdir('results') + @production_files[hn] = "#{resdir}/prod_hello_from_puppet_uri" + @canary_files[hn] = "#{resdir}/can_hello_from_puppet_uri" + @catalog_files[hn] = "#{on(agent, puppet('config', 'print', 'client_datadir')).stdout.chomp}/catalog/#{hn}.json" + @agent_manifests[hn] = < file, + mode => "0644", + content => "class hello { + notice('hello from production-hello') + file { '#{resdir}' : + ensure => directory, + mode => '0755', + } + file { '#{resdir}/prod_hello_from_puppet_uri' : + ensure => file, + mode => '0644', + source => 'puppet:///modules/hello/hello_msg', + } +}", +} + +file { '#{@coderoot}/environments/canary/modules/can_hello/manifests/init.pp': + ensure => file, + mode => "0644", + content => 'class can_hello { + notice("hello from production-hello") + file { "#{resdir}": + ensure => directory, + mode => "0755", + } + file { "#{resdir}/can_hello_from_puppet_uri" : + ensure => file, + mode => "0644", + source => "puppet:///modules/can_hello/hello_msg", + } +}', +} +MANIFESTAGENT +end + +# The code_content script needs to return the correct content whose checksum +# matches the metadata contained in the static catalog. +PRODUCTION_CONTENT = "Hello message from production/hello module, content from source attribute.".freeze +CANARY_CONTENT = "Hello message from canary/can_hello module, content from source attribute.".freeze + +@manifest = < directory, + mode => "0755", +} + +file { + '#{@testroot}':; + '#{@coderoot}':; + '#{@coderoot}/environments':; + '#{@coderoot}/environments/production':; + '#{@coderoot}/environments/production/manifests':; + '#{@coderoot}/environments/production/modules':; + '#{@coderoot}/environments/production/modules/hello':; + '#{@coderoot}/environments/production/modules/hello/manifests':; + '#{@coderoot}/environments/production/modules/hello/files':; + + '#{@coderoot}/environments/canary':; + '#{@coderoot}/environments/canary/manifests':; + '#{@coderoot}/environments/canary/modules':; + '#{@coderoot}/environments/canary/modules/can_hello':; + '#{@coderoot}/environments/canary/modules/can_hello/manifests':; + '#{@coderoot}/environments/canary/modules/can_hello/files':; + +} + +file { '#{@coderoot}/code_id.sh' : + ensure => file, + mode => "0755", + content => '#! /bin/bash +echo "code_version_1" +', +} + +file { '#{@coderoot}/code_content.sh' : + ensure => file, + mode => "0755", + content => '#! /bin/bash +# script arguments: +# $1 environment +# $2 code_id +# $3 path relative to mount +# use echo -n to omit newline +if [ $1 == "production" ] ; then + echo -n "#{PRODUCTION_CONTENT}" +else + echo -n "#{CANARY_CONTENT}" +fi +', +} + +file { '#{@coderoot}/environments/production/environment.conf': + ensure => file, + mode => "0644", + content => 'environment_timeout = 0 +', +} + +file { '#{@coderoot}/environments/canary/environment.conf': + ensure => file, + mode => "0644", + content => 'environment_timeout = 0 +static_catalogs = false +', +} + +file { '#{@coderoot}/environments/production/manifests/site.pp': + ensure => file, + mode => "0644", + content => "node default { + include hello +} +", +} + +file { '#{@coderoot}/environments/canary/manifests/site.pp': + ensure => file, + mode => "0644", + content => "node default { + include can_hello +} +", +} + +file { '#{@coderoot}/environments/production/modules/hello/files/hello_msg': + ensure => file, + mode => "0644", + content => "#{PRODUCTION_CONTENT}", +} + +file { '#{@coderoot}/environments/canary/modules/can_hello/files/hello_msg': + ensure => file, + mode => "0644", + content => "#{CANARY_CONTENT}", +} +MANIFEST + +teardown do + on(master, "mv #{@confdir}/puppetserver.conf.bak #{@confdir}/puppetserver.conf") + on(master, "rm -rf #{@testroot}") + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end +end + +step 'apply main manifest, static_catalogs unspecified in global scope, unspecified in production environment, disabled in canary environment' +on( + master, + "cp #{@confdir}/puppetserver.conf #{@confdir}/puppetserver.conf.bak" +) +apply_manifest_on(master, @manifest, :catch_failures => true) + +step "Add versioned-code parameters to puppetserver.conf and ensure the server is running" +puppetserver_config = "#{master['puppetserver-confdir']}/puppetserver.conf" +on master, "cp #{puppetserver_config} #{@coderoot}/puppetserver.conf.bak" +versioned_code_settings = { + "jruby-puppet" => { + "master-code-dir" => @coderoot + }, + "versioned-code" => { + "code-id-command" => "#{@coderoot}/code_id.sh", + "code-content-command" => "#{@coderoot}/code_content.sh" + } +} +modify_tk_config(master, puppetserver_config, versioned_code_settings) + +step 'start puppet server' +with_puppet_running_on master, @master_opts, @coderoot do + agents.each do |agent| + hn = agent.node_name + + apply_manifest_on(master, @agent_manifests[hn], :catch_failures => true) + + step 'agent gets a production catalog, should be static catalog by default' + on( + agent, + puppet( + 'agent', + '-t', + '--environment', 'production' + ), + :acceptable_exit_codes => [0, 2] + ) + + step 'verify production environment' + r = on(agent, "cat #{@catalog_files[hn]}") + catalog_content = JSON.parse(r.stdout) + assert_equal( + catalog_content['environment'], + 'production', + 'catalog for unexpectected environment' + ) + + step 'verify static catalog by finding metadata section in catalog' + assert( + catalog_content['metadata'] && catalog_content['metadata'][@production_files[hn]], + 'metadata section of catalog not found' + ) + + step 'agent gets a canary catalog, static catalog should be disabled' + on( + agent, + puppet( + 'agent', + '-t', + '--environment', 'canary' + ), + :acceptable_exit_codes => [0, 2] + ) + + step 'verify canary environment' + r = on(agent, "cat #{@catalog_files[hn]}") + catalog_content = JSON.parse(r.stdout) + assert_equal( + catalog_content['environment'], + 'canary', + 'catalog for unexpectected environment' + ) + + step 'verify not static catalog by absence of metadata section in catalog' + assert_nil( + catalog_content['metadata'], + 'unexpected metadata section found in catalog' + ) + + end +end + +step 'enable static catalog for canary environment' +@static_canary_manifest = < file, + mode => "0644", + content => 'environment_timeout = 0 +static_catalogs = true +', +} +MANIFEST2 +apply_manifest_on(master, @static_canary_manifest, :catch_failures => true) + +step 'disable global static catalog setting' +@master_opts = { + 'master' => { + 'static_catalogs' => false + }, + 'main' => { + 'environmentpath' => "#{@coderoot}/environments", + }, +} + +step 'bounce server for static catalog disable setting to take effect.' +with_puppet_running_on master, @master_opts, @coderoot do + agents.each do |agent| + hn = agent.node_name + + apply_manifest_on(master, @agent_manifests[hn], :catch_failures => true) + + step 'agent gets a production catalog, should not be a static catalog' + on( + agent, + puppet( + 'agent', + '-t', + '--environment', 'production' + ), + :acceptable_exit_codes => [0, 2] + ) + + step 'verify production environment' + r = on(agent, "cat #{@catalog_files[hn]}") + catalog_content = JSON.parse(r.stdout) + assert_equal( + catalog_content['environment'], + 'production', + 'catalog for unexpectected environment' + ) + + step 'verify production environment, not static catalog' + assert_nil( + catalog_content['metadata'], + 'unexpected metadata section found in catalog' + ) + + step 'agent gets a canary catalog, static catalog should be enabled' + on( + agent, + puppet( + 'agent', + '-t', + '--environment', 'canary' + ), + :acceptable_exit_codes => [0, 2] + ) + + step 'verify canary catalog' + r = on(agent, "cat #{@catalog_files[hn]}") + catalog_content = JSON.parse(r.stdout) + assert_equal( + catalog_content['environment'], + 'canary', + 'catalog for unexpectected environment' + ) + + step 'verify canary static catalog' + assert( + catalog_content['metadata'] && catalog_content['metadata'][@canary_files[hn]], + 'metadata section of catalog not found' + ) + + end +end diff --git a/acceptance/tests/direct_puppet/supports_utf8.rb b/acceptance/tests/direct_puppet/supports_utf8.rb new file mode 100644 index 00000000000..2e7bffac17f --- /dev/null +++ b/acceptance/tests/direct_puppet/supports_utf8.rb @@ -0,0 +1,83 @@ +test_name "C97172: static catalogs support utf8" do + + require 'puppet/acceptance/environment_utils' + extend Puppet::Acceptance::EnvironmentUtils + + require 'puppet/acceptance/agent_fqdn_utils' + extend Puppet::Acceptance::AgentFqdnUtils + + tag 'audit:high', + 'audit:acceptance', + 'audit:refactor' # Review for agent side UTF validation. + + app_type = File.basename(__FILE__, '.*') + tmp_environment = mk_tmp_environment_with_teardown(master, app_type) + + tmp_file = {} + agents.each do |agent| + tmp_file[agent_to_fqdn(agent)] = agent.tmpfile(tmp_environment) + end + + teardown do + # Remove all traces of the last used environment + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + + step 'clean out produced resources' do + agents.each do |agent| + if tmp_file.has_key?(agent_to_fqdn(agent)) && !tmp_file[agent_to_fqdn(agent)].empty? + on(agent, "rm -f '#{tmp_file[agent_to_fqdn(agent)]}'") + end + end + end + end + + file_contents = 'Mønti Pythøn ik den Hølie Gräilen, yër? € ‰ ㄘ 万 竹 Ü Ö' + step 'create site.pp with utf8 chars' do + manifest = < file, + content => ' +\$test_path = \$facts["networking"]["fqdn"] ? #{tmp_file} +file { \$test_path: + content => @(UTF8) + #{file_contents} + | UTF8 +} + ', +} +MANIFEST + apply_manifest_on(master, manifest, :catch_failures => true) + end + + step 'run agent(s)' do + with_puppet_running_on(master, {}) do + agents.each do |agent| + config_version = '' + config_version_matcher = /configuration version '(\d+)'/ + on(agent, puppet("agent -t --environment '#{tmp_environment}'"), + :acceptable_exit_codes => 2).stdout do |result| + config_version = result.match(config_version_matcher)[1] + end + on(agent, "cat '#{tmp_file[agent_to_fqdn(agent)]}'").stdout do |result| + assert_equal(file_contents, result, 'file contents did not match accepted') + end + + on(agent, "rm -f '#{tmp_file[agent_to_fqdn(agent)]}'") + on(agent, puppet("agent -t --environment '#{tmp_environment}' --use_cached_catalog"), + :acceptable_exit_codes => 2).stdout do |result| + assert_match(config_version_matcher, result, 'agent did not use cached catalog') + second_config_version = result.match(config_version_matcher)[1] + asset_equal(config_version, second_config_version, 'config version should have been the same') + end + on(agent, "cat '#{tmp_file[agent_to_fqdn(agent)]}'").stdout do |result| + assert_equal(file_contents, result, 'file contents did not match accepted') + end + end + end + end + +end diff --git a/acceptance/tests/doc/should_print_function_reference.rb b/acceptance/tests/doc/should_print_function_reference.rb deleted file mode 100644 index 3ffcf8097ba..00000000000 --- a/acceptance/tests/doc/should_print_function_reference.rb +++ /dev/null @@ -1,5 +0,0 @@ -test_name "verify we can print the function reference" -on(agents, puppet_doc("-r", "function")) do - fail_test "didn't print function reference" unless - stdout.include? 'Function Reference' -end diff --git a/acceptance/tests/doc/ticket_4120_cannot_generate_type_reference.rb b/acceptance/tests/doc/ticket_4120_cannot_generate_type_reference.rb deleted file mode 100755 index 7f721c2724f..00000000000 --- a/acceptance/tests/doc/ticket_4120_cannot_generate_type_reference.rb +++ /dev/null @@ -1,5 +0,0 @@ -test_name "verify we can print the function reference" -on(agents, puppet_doc("-r", "type")) do - fail_test "didn't print type reference" unless - stdout.include? 'Type Reference' -end diff --git a/acceptance/tests/ensure_puppet-agent_paths.rb b/acceptance/tests/ensure_puppet-agent_paths.rb deleted file mode 100644 index d710de01375..00000000000 --- a/acceptance/tests/ensure_puppet-agent_paths.rb +++ /dev/null @@ -1,159 +0,0 @@ -# ensure installs and code honor new puppet-agent path spec: -# https://github.com/puppetlabs/puppet-specifications/blob/master/file_paths.md -test_name 'PUP-4033: Ensure aio path spec is honored' - -# include file_exists? -require 'puppet/acceptance/temp_file_utils' -extend Puppet::Acceptance::TempFileUtils - -def config_options(platform) - case platform - when /windows/ - if platform =~ /2003/ - common_app_data = 'C:/Documents and Settings/All Users/Application Data' - else - common_app_data = 'C:/ProgramData' - end - puppetlabs_data = "#{common_app_data}/PuppetLabs" - - codedir = "#{puppetlabs_data}/code" - confdir = "#{puppetlabs_data}/puppet/etc" - vardir = "#{puppetlabs_data}/puppet/cache" - logdir = "#{puppetlabs_data}/puppet/var/log" - rundir = "#{puppetlabs_data}/puppet/var/run" - sep = ";" - else - codedir = '/etc/puppetlabs/code' - confdir = '/etc/puppetlabs/puppet' - vardir = '/opt/puppetlabs/puppet/cache' - logdir = '/var/log/puppetlabs/puppet' - rundir = '/var/run/puppetlabs' - sep = ":" - end - - [ - # code - {:name => :codedir, :expected => codedir, :installed => :dir}, - {:name => :environmentpath, :expected => "#{codedir}/environments"}, - {:name => :hiera_config, :expected => "#{codedir}/hiera.yaml"}, - - # confdir - {:name => :confdir, :expected => confdir, :installed => :dir}, - {:name => :rest_authconfig, :expected => "#{confdir}/auth.conf"}, - {:name => :autosign, :expected => "#{confdir}/autosign.conf"}, - {:name => :binder_config, :expected => ""}, - {:name => :csr_attributes, :expected => "#{confdir}/csr_attributes.yaml"}, - {:name => :trusted_oid_mapping_file, :expected => "#{confdir}/custom_trusted_oid_mapping.yaml"}, - {:name => :deviceconfig, :expected => "#{confdir}/device.conf"}, - {:name => :fileserverconfig, :expected => "#{confdir}/fileserver.conf"}, - {:name => :config, :expected => "#{confdir}/puppet.conf", :installed => :file}, - {:name => :route_file, :expected => "#{confdir}/routes.yaml"}, - {:name => :ssldir, :expected => "#{confdir}/ssl", :installed => :dir}, - - # vardir - {:name => :vardir, :expected => "#{vardir}", :installed => :dir}, - {:name => :bucketdir, :expected => "#{vardir}/bucket"}, - {:name => :clientyamldir, :expected => "#{vardir}/client_yaml", :installed => :dir}, - {:name => :client_datadir, :expected => "#{vardir}/client_data", :installed => :dir}, - {:name => :clientbucketdir, :expected => "#{vardir}/clientbucket", :installed => :dir}, - {:name => :devicedir, :expected => "#{vardir}/devices"}, - {:name => :pluginfactdest, :expected => "#{vardir}/facts.d", :installed => :dir}, - {:name => :libdir, :expected => "#{vardir}/lib", :installed => :dir}, - {:name => :factpath, :expected => "#{vardir}/lib/facter#{sep}#{vardir}/facts", :not_path => true}, - {:name => :module_working_dir, :expected => "#{vardir}/puppet-module"}, - {:name => :reportdir, :expected => "#{vardir}/reports"}, - {:name => :server_datadir, :expected => "#{vardir}/server_data"}, - {:name => :statedir, :expected => "#{vardir}/state", :installed => :dir}, - {:name => :yamldir, :expected => "#{vardir}/yaml"}, - - # logdir/rundir - {:name => :logdir, :expected => logdir, :installed => :dir}, - {:name => :rundir, :expected => rundir, :installed => :dir}, - {:name => :pidfile, :expected => "#{rundir}/agent.pid"}, - ] -end - -step 'test configprint outputs' -agents.each do |agent| - on(agent, puppet_agent('--configprint all')) do - config_options(agent[:platform]).each do |config_option| - assert_match("#{config_option[:name]} = #{config_option[:expected]}", stdout) - end - end -end - -step 'test puppet genconfig entries' -agents.each do |agent| - on(agent, puppet_agent('--genconfig')) do - config_options(agent[:platform]).each do |config_option| - assert_match("#{config_option[:name]} = #{config_option[:expected]}", stdout) - end - end -end - -step 'test puppet config paths exist' -agents.each do |agent| - config_options(agent[:platform]).select {|v| !v[:not_path] }.each do |config_option| - path = config_option[:expected] - case config_option[:installed] - when :dir - if !dir_exists?(agent, path) - fail_test("Failed to find expected directory '#{path}' on agent '#{agent}'") - end - when :file - if !file_exists?(agent, path) - fail_test("Failed to find expected file '#{path}' on agent '#{agent}'") - end - end - end -end - - -public_binaries = { - :posix => ['puppet', 'facter', 'hiera', 'mco'], - :win => ['puppet.bat', 'facter.bat', 'hiera.bat', 'mco.bat'] -} - -def locations(platform, ruby_arch, type) - if type != 'aio' - return '/usr/bin' - end - - case platform - when /windows/ - # If undefined, ruby_arch defaults to x86 - if ruby_arch == 'x64' - ruby_arch = /-64/ - else - ruby_arch = /-32/ - end - if platform =~ ruby_arch - return 'C:/Program Files/Puppet Labs/Puppet/bin' - else - return 'C:/Program Files (x86)/Puppet Labs/Puppet/bin' - end - else - return '/opt/puppetlabs/bin' - end -end - -step 'test puppet binaries exist' -agents.each do |agent| - dir = locations(agent[:platform], agent[:ruby_arch], @options[:type]) - os = agent['platform'] =~ /windows/ ? :win : :posix - - public_binaries[os].each do |binary| - path = File.join(dir, binary) - case os - when :win - if !file_exists?(agent, path) - fail_test("Failed to find expected binary '#{path}' on agent '#{agent}'") - end - when :posix - if !link_exists?(agent, path) - fail_test("Failed to find expected symbolic link '#{path}' on agent '#{agent}'") - end - end - end -end - diff --git a/acceptance/tests/environment/3x_forbidden_environment_names_allowed.rb b/acceptance/tests/environment/3x_forbidden_environment_names_allowed.rb deleted file mode 100644 index 0592472a60d..00000000000 --- a/acceptance/tests/environment/3x_forbidden_environment_names_allowed.rb +++ /dev/null @@ -1,66 +0,0 @@ -test_name 'PUP-4413 3x forbidden environment names should be allowed in 4x' - -step 'setup environments' - -testdir = create_tmpdir_for_user(master, 'forbidden_env') -manifest = <<-MANIFEST - File { - ensure => directory, - owner => #{master.puppet['user']}, - group => #{master.puppet['group']}, - mode => "0750", - } - - file { "#{testdir}":; - "#{testdir}/environments":; - "#{testdir}/environments/master":; - "#{testdir}/environments/master/manifests":; - "#{testdir}/environments/master/modules":; - "#{testdir}/environments/main":; - "#{testdir}/environments/main/manifests":; - "#{testdir}/environments/main/modules":; - "#{testdir}/environments/agent":; - "#{testdir}/environments/agent/manifests":; - "#{testdir}/environments/agent/modules":; - "#{testdir}/environments/user":; - "#{testdir}/environments/user/manifests":; - "#{testdir}/environments/user/modules":; - } - file { "#{testdir}/environments/master/manifests/site.pp": - ensure => file, - content => 'notify{"$::environment":}' - } - file { "#{testdir}/environments/main/manifests/site.pp": - ensure => file, - content => 'notify{"$::environment":}' - } - file { "#{testdir}/environments/agent/manifests/site.pp": - ensure => file, - content => 'notify{"$::environment":}' - } - file { "#{testdir}/environments/user/manifests/site.pp": - ensure => file, - content => 'notify{"$::environment":}' - } -MANIFEST - -apply_manifest_on(master, manifest, :catch_failures => true) - -step 'run agents, ensure no one complains about the environment' - -master_opts = { - 'main' => { - 'environmentpath' => "#{testdir}/environments" - } -} - -environments = ['master','main','agent','user'] -with_puppet_running_on(master, master_opts, testdir) do - agents.each do |agent| - environments.each do |environment| - on(agent, puppet('agent', - "--test --server #{master} --environment #{environment}"), - :acceptable_exit_codes => 2) - end - end -end diff --git a/acceptance/tests/environment/agent_runs_pluginsync_with_proper_environment.rb b/acceptance/tests/environment/agent_runs_pluginsync_with_proper_environment.rb deleted file mode 100644 index 37cfc0133cb..00000000000 --- a/acceptance/tests/environment/agent_runs_pluginsync_with_proper_environment.rb +++ /dev/null @@ -1,63 +0,0 @@ -# We noticed some strange behavior if an environment was changed between the -# time where the node retrieved facts for itself and the catalog retrieved -# facts puppet could pluginsync with the incorrect environment. For more -# details see PUP-3591. -test_name "Agent should pluginsync with the environment the agent resolves to" - -testdir = create_tmpdir_for_user master, 'environment_resolve' - -create_remote_file master, "#{testdir}/enc.rb", < true) - File { - ensure => directory, - mode => "0770", - owner => #{master.puppet['user']}, - group => #{master.puppet['group']}, - } - file { - '#{testdir}/environments':; - '#{testdir}/environments/production':; - '#{testdir}/environments/correct/':; - '#{testdir}/environments/correct/modules':; - '#{testdir}/environments/correct/modules/amod':; - '#{testdir}/environments/correct/modules/amod/lib':; - '#{testdir}/environments/correct/modules/amod/lib/puppet':; - } - file { '#{testdir}/environments/correct/modules/amod/lib/puppet/foo.rb': - ensure => file, - mode => "0640", - content => "#correct_version", - } -MANIFEST - -master_opts = { - 'main' => { - 'environmentpath' => "#{testdir}/environments", - }, - 'master' => { - 'node_terminus' => 'exec', - 'external_nodes' => "#{testdir}/enc.rb" - }, -} - -with_puppet_running_on master, master_opts, testdir do - agents.each do |agent| - on(agent, puppet("agent", "-t", "--server #{master}")) - on(agent, "cat \"#{agent.puppet['vardir']}/lib/puppet/foo.rb\"") - assert_match(/#correct_version/, stdout, "The plugin from environment 'correct' was not synced") - on(agent, "rm -rf \"#{agent.puppet['vardir']}/lib\"") - end -end diff --git a/acceptance/tests/environment/broken_unassigned_environment_handled_gracefully.rb b/acceptance/tests/environment/broken_unassigned_environment_handled_gracefully.rb index 92314de6618..23d32a62466 100644 --- a/acceptance/tests/environment/broken_unassigned_environment_handled_gracefully.rb +++ b/acceptance/tests/environment/broken_unassigned_environment_handled_gracefully.rb @@ -1,5 +1,18 @@ test_name 'PUP-3755 Test an un-assigned broken environment' +tag 'audit:high', + 'audit:integration', + 'audit:refactor', # Use mk_tmp_environment_with_teardown helper + 'server' + +teardown do + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end +end + step 'setup environments' testdir = create_tmpdir_for_user(master, 'confdir') @@ -45,10 +58,9 @@ with_puppet_running_on(master, master_opts, testdir) do agents.each do |agent| on(agent, puppet('agent', - "--test --server #{master} --environment #{environment}"), - :acceptable_exit_codes => (0..255)) do - assert_match(/you win/, stdout, - 'agent did not pickup newly classified environment.') + "--test --environment #{environment}"), + :acceptable_exit_codes => (0..255)) do |result| + assert_match(/you win/, result.stdout, 'agent did not pickup newly classified environment.') end end end diff --git a/acceptance/tests/environment/can_enumerate_environments.rb b/acceptance/tests/environment/can_enumerate_environments.rb index 28f92876096..3fb8a0dad98 100644 --- a/acceptance/tests/environment/can_enumerate_environments.rb +++ b/acceptance/tests/environment/can_enumerate_environments.rb @@ -1,7 +1,13 @@ test_name "Can enumerate environments via an HTTP endpoint" -def master_port(agent) - setting_on(agent, "agent", "masterport") +tag 'audit:high', + 'audit:integration', + 'server' + +confine :except, :platform => /osx/ # see PUP-4820 + +def server_port(agent) + setting_on(agent, "agent", "serverport") end def setting_on(host, section, name) @@ -17,7 +23,7 @@ def full_path(host, path) end def curl_master_from(agent, path, headers = '', &block) - url = "https://#{master}:#{master_port(agent)}#{path}" + url = "https://#{master}:#{server_port(agent)}#{path}" cert_path = full_path(agent, setting_on(agent, "agent", "hostcert")) key_path = full_path(agent, setting_on(agent, "agent", "hostprivkey")) curl_base = "curl --tlsv1 -sg --cert \"#{cert_path}\" --key \"#{key_path}\" -k -H '#{headers}'" @@ -25,7 +31,7 @@ def curl_master_from(agent, path, headers = '', &block) on agent, "#{curl_base} '#{url}'", &block end -master_user = on(master, puppet("master --configprint user")).stdout.strip +master_user = puppet_config(master, 'user', section: 'master') environments_dir = create_tmpdir_for_user master, "environments" apply_manifest_on(master, <<-MANIFEST) File { @@ -52,18 +58,16 @@ def curl_master_from(agent, path, headers = '', &block) end with_puppet_running_on(master, master_opts) do - agents.each do |agent| - step "Ensure that an unauthenticated client cannot access the environments list" do - on agent, "curl --tlsv1 -ksv https://#{master}:#{master_port(agent)}/puppet/v3/environments", :acceptable_exit_codes => [0,7] do - assert_match(/< HTTP\/1\.\d 403/, stderr) - end + step "Ensure that an unauthenticated client cannot access the environments list" do + on(master, "curl --tlsv1 -ksv https://#{master}:#{server_port(master)}/puppet/v3/environments", :acceptable_exit_codes => [0,7]) do |result| + assert_match(/< HTTP\/1\.\d 403/, result.stderr) end + end - step "Ensure that an authenticated client can retrieve the list of environments" do - curl_master_from(agent, '/puppet/v3/environments') do - data = JSON.parse(stdout) - assert_equal(["env1", "env2", "production"], data["environments"].keys.sort) - end + step "Ensure that an authenticated client can retrieve the list of environments" do + curl_master_from(master, '/puppet/v3/environments') do |result| + data = JSON.parse(result.stdout) + assert_equal(["env1", "env2", "production"], data["environments"].keys.sort) end end end diff --git a/acceptance/tests/environment/custom_type_provider_from_same_environment.rb b/acceptance/tests/environment/custom_type_provider_from_same_environment.rb new file mode 100644 index 00000000000..3a8074aba23 --- /dev/null +++ b/acceptance/tests/environment/custom_type_provider_from_same_environment.rb @@ -0,0 +1,121 @@ +test_name 'C59122: ensure provider from same env as custom type' do +require 'puppet/acceptance/environment_utils' +extend Puppet::Acceptance::EnvironmentUtils + +tag 'audit:high', + 'audit:integration', # This behavior is specific to the master to 'do the right thing' + 'server' + + app_type = File.basename(__FILE__, '.*') + tmp_environment = mk_tmp_environment_with_teardown(master, app_type) + file_correct = "#{tmp_environment}-correct.txt" + file_wrong = "#{tmp_environment}-wrong.txt" + fq_tmp_environmentpath = "#{environmentpath}/#{tmp_environment}" + fq_prod_environmentpath = "#{environmentpath}/production" + + teardown do + step 'clean out production env' do + on(master, "rm -rf #{fq_prod_environmentpath}/modules/*", :accept_all_exit_codes => true) + on(master, "rm #{fq_prod_environmentpath}/manifests/site.pp", :accept_all_exit_codes => true) + end + step 'clean out file resources' do + on(hosts, "rm #{file_correct} #{file_wrong}", :accept_all_exit_codes => true) + end + + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + step "create a custom type and provider in each of production and #{tmp_environment}" do + type_name = 'test_custom_type' + provider_name = 'universal' + type_content = < directory } +file { + '#{fq_tmp_environmentpath}/modules/simple_type':; + '#{fq_tmp_environmentpath}/modules/simple_type/lib':; + '#{fq_tmp_environmentpath}/modules/simple_type/lib/puppet':; + '#{fq_tmp_environmentpath}/modules/simple_type/lib/puppet/type/':; + '#{fq_tmp_environmentpath}/modules/simple_type/lib/puppet/provider/':; + '#{fq_tmp_environmentpath}/modules/simple_type/lib/puppet/provider/#{type_name}':; + '#{fq_prod_environmentpath}/modules':; + '#{fq_prod_environmentpath}/modules/simple_type':; + '#{fq_prod_environmentpath}/modules/simple_type/lib':; + '#{fq_prod_environmentpath}/modules/simple_type/lib/puppet':; + '#{fq_prod_environmentpath}/modules/simple_type/lib/puppet/type/':; + '#{fq_prod_environmentpath}/modules/simple_type/lib/puppet/provider/':; + '#{fq_prod_environmentpath}/modules/simple_type/lib/puppet/provider/#{type_name}':; +} +file { '#{fq_tmp_environmentpath}/modules/simple_type/lib/puppet/type/#{type_name}.rb': + ensure => file, + content => '#{type_content}', +} +file { '#{fq_prod_environmentpath}/modules/simple_type/lib/puppet/type/#{type_name}.rb': + ensure => file, + content => '#{type_content}', +} +file { '#{fq_tmp_environmentpath}/modules/simple_type/lib/puppet/provider/#{type_name}/#{provider_name}.rb': + ensure => file, + content => '#{provider_content('correct', type_name, provider_name)}', +} +file { '#{fq_prod_environmentpath}/modules/simple_type/lib/puppet/provider/#{type_name}/#{provider_name}.rb': + ensure => file, + content => '#{provider_content('wrong', type_name, provider_name)}', +} +file { '#{fq_tmp_environmentpath}/manifests/site.pp': + ensure => file, + content => 'node default { #{type_name}{"#{file_correct}": ensure=>present} }', +} +file { '#{fq_prod_environmentpath}/manifests': } +file { '#{fq_prod_environmentpath}/manifests/site.pp': + ensure => file, + content => 'node default { #{type_name}{"#{file_wrong}": ensure=>present} }', +} +MANIFEST + apply_manifest_on(master, manifest, :catch_failures => true) + end + + step "run agent in #{tmp_environment}, ensure it finds the correct provider" do + with_puppet_running_on(master,{}) do + agents.each do |agent| + on(agent, puppet("agent -t --environment #{tmp_environment}"), + :accept_all_exit_codes => true) do |result| + assert_equal(2, result.exit_code, 'agent did not exit with the correct code of 2') + assert_match(/#{file_correct}/, result.stdout, 'agent did not ensure the correct file') + assert(agent.file_exist?(file_correct), 'puppet did not create the file') + end + end + end + end + +end diff --git a/acceptance/tests/environment/directory_environment_production_created_master.rb b/acceptance/tests/environment/directory_environment_production_created_master.rb index 780aa0ee3b0..3f4c3ec0b77 100644 --- a/acceptance/tests/environment/directory_environment_production_created_master.rb +++ b/acceptance/tests/environment/directory_environment_production_created_master.rb @@ -1,12 +1,13 @@ test_name 'ensure production environment created by master if missing' +tag 'audit:high', + 'audit:integration', + 'server' + testdir = create_tmpdir_for_user master, 'prod-env-created' step 'make environmentpath' -master_user = on(master, puppet("master --configprint user")).stdout.strip -cert_path = on(master, puppet('config', 'print', 'hostcert')).stdout.strip -key_path = on(master, puppet('config', 'print', 'hostprivkey')).stdout.strip -cacert_path = on(master, puppet('config', 'print', 'localcacert')).stdout.strip +master_user = puppet_config(master, 'user', section: 'master') apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) File { ensure => directory, @@ -29,16 +30,13 @@ step 'run master; ensure production environment created' with_puppet_running_on(master, master_opts, testdir) do - if master.is_using_passenger? - on(master, "curl -k --cert #{cert_path} --key #{key_path} --cacert #{cacert_path} https://localhost:8140/puppet/v3/environments") - end on(master, "test -d '#{testdir}/environments/production'") step 'ensure catalog returned from production env with no changes' agents.each do |agent| - on(agent, puppet("agent -t --server #{master} --environment production --detailed-exitcodes")) do + on(agent, puppet("agent -t --environment production --detailed-exitcodes")) do |result| # detailed-exitcodes produces a 0 when no changes are made. - assert_equal(0, exit_code) + assert_equal(0, result.exit_code) end end end diff --git a/acceptance/tests/environment/directory_environment_with_environment_conf.rb b/acceptance/tests/environment/directory_environment_with_environment_conf.rb deleted file mode 100644 index ae37c24ee2b..00000000000 --- a/acceptance/tests/environment/directory_environment_with_environment_conf.rb +++ /dev/null @@ -1,112 +0,0 @@ -test_name 'Use a directory environment from environmentpath with an environment.conf' -require 'puppet/acceptance/classifier_utils' -extend Puppet::Acceptance::ClassifierUtils - -classify_nodes_as_agent_specified_if_classifer_present - -testdir = create_tmpdir_for_user master, 'use-environment-conf' -absolute_manifestdir = "#{testdir}/manifests" -absolute_modulesdir = "#{testdir}/absolute-modules" -absolute_globalsdir = "#{testdir}/global-modules" - -apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) -File { - ensure => directory, - owner => #{master.puppet['user']}, - group => #{master.puppet['group']}, - mode => "0770", -} - -file { - "#{testdir}":; - "#{testdir}/environments":; - "#{testdir}/environments/direnv":; - "#{testdir}/environments/direnv/environment.conf": - ensure => file, - mode => "0640", - content => ' - manifest=#{absolute_manifestdir} - modulepath=relative-modules:#{absolute_modulesdir}:$basemodulepath - config_version=version_script.sh - ' - ; - - "#{testdir}/environments/direnv/relative-modules":; - "#{testdir}/environments/direnv/relative-modules/relmod":; - "#{testdir}/environments/direnv/relative-modules/relmod/manifests":; - "#{testdir}/environments/direnv/relative-modules/relmod/manifests/init.pp": - ensure => file, - mode => "0640", - content => 'class relmod { - notify { "included relmod": } - }' - ; - - "#{testdir}/environments/direnv/version_script.sh": - ensure => file, - mode => "0750", - content => '#!/usr/bin/env sh -echo "ver123" -' - ; - - "#{absolute_manifestdir}":; - "#{absolute_manifestdir}/site.pp": - ensure => file, - mode => "0640", - content => ' - notify { "direnv site.pp": } - include relmod - include absmod - include globalmod - ' - ; - - "#{absolute_modulesdir}":; - "#{absolute_modulesdir}/absmod":; - "#{absolute_modulesdir}/absmod/manifests":; - "#{absolute_modulesdir}/absmod/manifests/init.pp": - ensure => file, - mode => "0640", - content => 'class absmod { - notify { "included absmod": } - }' - ; - - "#{absolute_globalsdir}":; - "#{absolute_globalsdir}/globalmod":; - "#{absolute_globalsdir}/globalmod/manifests":; - "#{absolute_globalsdir}/globalmod/manifests/init.pp": - ensure => file, - mode => "0640", - content => 'class globalmod { - notify { "included globalmod": } - }' - ; -} -MANIFEST - -master_opts = { - 'master' => { - 'environmentpath' => "#{testdir}/environments", - 'basemodulepath' => "#{absolute_globalsdir}", - } -} -if master.is_pe? - master_opts['master']['basemodulepath'] << ":#{master['sitemoduledir']}" -end - -with_puppet_running_on master, master_opts, testdir do - agents.each do |agent| - on(agent, - puppet("agent", "-t", "--server", master, "--environment", "direnv"), - :acceptable_exit_codes => [2]) do |result| - - assert_match(/direnv site.pp/, result.stdout) - assert_match(/included relmod/, result.stdout) - assert_match(/included absmod/, result.stdout) - assert_match(/included globalmod/, result.stdout) - assert_match(/Applying.*ver123/, result.stdout) - end - end -end diff --git a/acceptance/tests/environment/enc_nonexistent_directory_environment.rb b/acceptance/tests/environment/enc_nonexistent_directory_environment.rb index b5f4410a9b9..2c938aee1dd 100644 --- a/acceptance/tests/environment/enc_nonexistent_directory_environment.rb +++ b/acceptance/tests/environment/enc_nonexistent_directory_environment.rb @@ -1,10 +1,22 @@ -test_name "Master should produce error if enc specifies a nonexistent environment" -require 'puppet/acceptance/classifier_utils.rb' -extend Puppet::Acceptance::ClassifierUtils +test_name "Master should produce error if enc specifies a nonexistent environment" do + require 'puppet/acceptance/classifier_utils.rb' + extend Puppet::Acceptance::ClassifierUtils -testdir = create_tmpdir_for_user master, 'nonexistent_env' + tag 'audit:high', + 'audit:unit', + 'server' -apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) + teardown do + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + testdir = create_tmpdir_for_user(master, 'nonexistent_env') + + apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) File { ensure => directory, owner => #{master.puppet['user']}, @@ -22,18 +34,18 @@ mode => '0644', content => 'notify { "In the production environment": }'; } -MANIFEST + MANIFEST -if master.is_pe? - group = { - 'name' => 'Environment Does Not Exist', - 'description' => 'Classify our test agent nodes in an environment that does not exist.', - 'environment' => 'doesnotexist', - 'environment_trumps' => true, - } - create_group_for_nodes(agents, group) -else - apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) + if master.is_pe? + group = { + 'name' => 'Environment Does Not Exist', + 'description' => 'Classify our test agent nodes in an environment that does not exist.', + 'environment' => 'doesnotexist', + 'environment_trumps' => true, + } + create_group_for_nodes(agents, group) + else + apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) file { "#{testdir}/enc.rb": ensure => file, mode => '0775', @@ -41,24 +53,27 @@ puts "environment: doesnotexist" '; } - MANIFEST -end + MANIFEST + end -master_opts = { - 'main' => { - 'environmentpath' => "#{testdir}/environments", + master_opts = { + 'main' => { + 'environmentpath' => "#{testdir}/environments", + } } -} -master_opts['master'] = { - 'node_terminus' => 'exec', - 'external_nodes' => "#{testdir}/enc.rb", -} if !master.is_pe? + master_opts['master'] = { + 'node_terminus' => 'exec', + 'external_nodes' => "#{testdir}/enc.rb", + } if !master.is_pe? -with_puppet_running_on master, master_opts, testdir do - agents.each do |agent| - on(agent, puppet("agent -t --server #{master} --verbose"), :acceptable_exit_codes => [1]) do - assert_match(/Could not find a directory environment named 'doesnotexist'/, stderr, "Errors when nonexistent environment is specified") - assert_not_match(/In the production environment/, stdout, "Executed manifest from production environment") + with_puppet_running_on(master, master_opts, testdir) do + agents.each do |agent| + on(agent, puppet("agent -t --verbose"), :acceptable_exit_codes => [1]) do |result| + unless agent['locale'] == 'ja' + assert_match(/Could not find a directory environment named 'doesnotexist'/, result.stderr, "Errors when nonexistent environment is specified") + end + refute_match(/In the production environment/, result.stdout, "Executed manifest from production environment") + end end end end diff --git a/acceptance/tests/environment/environment_scenario-bad.rb b/acceptance/tests/environment/environment_scenario-bad.rb index 0eaadfc1371..344ceb4f08c 100644 --- a/acceptance/tests/environment/environment_scenario-bad.rb +++ b/acceptance/tests/environment/environment_scenario-bad.rb @@ -1,53 +1,66 @@ -test_name 'Test behavior of directory environments when environmentpath is set to a non-existent directory' -require 'puppet/acceptance/environment_utils' -extend Puppet::Acceptance::EnvironmentUtils -require 'puppet/acceptance/classifier_utils' -extend Puppet::Acceptance::ClassifierUtils - -classify_nodes_as_agent_specified_if_classifer_present - -step 'setup environments' - -stub_forge_on(master) - -testdir = create_tmpdir_for_user master, 'confdir' -puppet_conf_backup_dir = create_tmpdir_for_user(master, "puppet-conf-backup-dir") - -apply_manifest_on(master, environment_manifest(testdir), :catch_failures => true) - -step 'Test' -master_opts = { - 'main' => { - 'environmentpath' => '/doesnotexist', - } -} -env = 'testing' -path = master.puppet('master')['codedir'] - -results = use_an_environment(env, 'bad environmentpath', master_opts, testdir, puppet_conf_backup_dir, :directory_environments => true) - -expectations = { - :puppet_config => { - :exit_code => 1, - :matches => [%r{Could not find a directory environment named '#{env}' anywhere in the path.*#{path}}], - }, - :puppet_module_install => { - :exit_code => 1, - :matches => [%r{Could not find a directory environment named '#{env}' anywhere in the path.*#{path}}], - }, - :puppet_module_uninstall => { - :exit_code => 1, - :matches => [%r{Could not find a directory environment named '#{env}' anywhere in the path.*#{path}}], - }, - :puppet_apply => { - :exit_code => 1, - :matches => [%r{Could not find a directory environment named '#{env}' anywhere in the path.*#{path}}], - }, - :puppet_agent => { - :exit_code => 1, - :matches => [%r{(Warning|Error).*(404|400).*Could not find environment '#{env}'}, - %r{Could not retrieve catalog; skipping run}], - }, -} - -assert_review(review_results(results,expectations)) +test_name 'Test behavior of directory environments when environmentpath is set to a non-existent directory' do + require 'puppet/acceptance/environment_utils' + extend Puppet::Acceptance::EnvironmentUtils + require 'puppet/acceptance/classifier_utils' + extend Puppet::Acceptance::ClassifierUtils + + tag 'audit:high', + 'audit:unit', # The error responses for the agent should be covered by Ruby unit tests. + # The server 404/400 response should be covered by server integration tests. + 'server' + + teardown do + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + classify_nodes_as_agent_specified_if_classifer_present + + step 'setup environments' + + testdir = create_tmpdir_for_user(master, 'confdir') + puppet_conf_backup_dir = create_tmpdir_for_user(master, "puppet-conf-backup-dir") + + apply_manifest_on(master, environment_manifest(testdir), :catch_failures => true) + + step 'Test' do + env_path = '/doesnotexist' + master_opts = { + 'main' => { + 'environmentpath' => "#{env_path}", + } + } + env = 'testing' + + results = use_an_environment(env, 'bad environmentpath', master_opts, testdir, puppet_conf_backup_dir, :directory_environments => true) + + expectations = { + :puppet_config => { + :exit_code => 0, + :matches => [%r{basemodulepath = /etc/puppetlabs/code/modules:/opt/puppetlabs/puppet/modules}, + %r{modulepath =}, + %r{manifest =}, + %r{config_version =}], + }, + :puppet_apply => { + :exit_code => 1, + :matches => [%r{Could not find a directory environment named '#{env}' anywhere in the path.*#{env_path}}], + }, + :puppet_agent => { + :exit_code => 0, + }, + } + + agents.each do |host| + unless host['locale'] == 'ja' + expectations[:puppet_agent][:matches] = [%r{Environment '#{env}' not found on server, skipping initial pluginsync.}, + %r{Local environment: '#{env}' doesn't match server specified environment 'production', restarting agent run with environment 'production'}] + end + end + + assert_review(review_results(results, expectations)) + end +end diff --git a/acceptance/tests/environment/environment_scenario-default.rb b/acceptance/tests/environment/environment_scenario-default.rb deleted file mode 100644 index d081622a299..00000000000 --- a/acceptance/tests/environment/environment_scenario-default.rb +++ /dev/null @@ -1,60 +0,0 @@ -test_name "Test behavior of default environment" -require 'puppet/acceptance/environment_utils' -extend Puppet::Acceptance::EnvironmentUtils -require 'puppet/acceptance/classifier_utils' -extend Puppet::Acceptance::ClassifierUtils - -classify_nodes_as_agent_specified_if_classifer_present - -step "setup environments" - -stub_forge_on(master) - -codedir = master.puppet('master')['codedir'] -testdir = create_tmpdir_for_user master, "codedir" -puppet_code_backup_dir = create_tmpdir_for_user(master, "puppet-code-backup-dir") - -apply_manifest_on(master, environment_manifest(testdir), :catch_failures => true) - -step "Test" -master_opts = { - 'main' => { - 'environmentpath' => '$codedir/environments', - } -} -general = [ master_opts, testdir, puppet_code_backup_dir, { :directory_environments => true } ] -env = nil - -results = use_an_environment(env, "default environment", *general) - -expectations = { - :puppet_config => { - :exit_code => 0, - :matches => [%r{manifest.*#{codedir}/environments/#{env}/manifests$}, - %r{modulepath.*#{codedir}/environments/#{env}/modules:.+}, - %r{config_version = $}] - }, - :puppet_module_install => { - :exit_code => 0, - :matches => [%r{Preparing to install into #{codedir}/environments/#{env}/modules}, - %r{pmtacceptance-nginx}], - }, - :puppet_module_uninstall => { - :exit_code => 0, - :matches => [%r{Removed.*pmtacceptance-nginx.*from #{codedir}/environments/#{env}/modules}], - }, - :puppet_apply => { - :exit_code => 0, - :matches => [%r{include default environment testing_mod}], - :notes => "The production directory environment is empty, but the inclusion of basemodulepath in the directory environment modulepath picks up the default testing_mod class in $codedir/modules" - }, - :puppet_agent => { - :exit_code => 0, - :matches => [ %r{Applying configuration version '\d+'}], - :does_not_match => [%r{include.*testing_mod}, - %r{Warning.*404}], - :notes => "The master automatically creates an empty production env dir." - } -} - -assert_review(review_results(results,expectations)) diff --git a/acceptance/tests/environment/environment_scenario-existing.rb b/acceptance/tests/environment/environment_scenario-existing.rb deleted file mode 100644 index dca86d50e96..00000000000 --- a/acceptance/tests/environment/environment_scenario-existing.rb +++ /dev/null @@ -1,59 +0,0 @@ -test_name "Test a specific, existing directory environment configuration" -require 'puppet/acceptance/environment_utils' -extend Puppet::Acceptance::EnvironmentUtils -require 'puppet/acceptance/classifier_utils' -extend Puppet::Acceptance::ClassifierUtils - -classify_nodes_as_agent_specified_if_classifer_present - -step "setup environments" - -stub_forge_on(master) - -codedir = master.puppet('master')['codedir'] -testdir = create_tmpdir_for_user master, "codedir" -puppet_code_backup_dir = create_tmpdir_for_user(master, "puppet-code-backup-dir") - -apply_manifest_on(master, environment_manifest(testdir), :catch_failures => true) - -step "Test" - -master_opts = { - 'main' => { - 'environmentpath' => '$codedir/environments', - } -} -general = [ master_opts, testdir, puppet_code_backup_dir, { :directory_environments => true } ] - -env = 'testing' - -results = use_an_environment(env, "directory testing", *general) -expectations = { - :puppet_config => { - :exit_code => 0, - :matches => [%r{manifest.*#{codedir}/environments/#{env}/manifests$}, - %r{modulepath.*#{codedir}/environments/#{env}/modules:.+}, - %r{config_version = $}] - }, - :puppet_module_install => { - :exit_code => 0, - :matches => [%r{Preparing to install into #{codedir}/environments/#{env}/modules}, - %r{pmtacceptance-nginx}], - }, - :puppet_module_uninstall => { - :exit_code => 0, - :matches => [%r{Removed.*pmtacceptance-nginx.*from #{codedir}/environments/#{env}/modules}], - }, - :puppet_apply => { - :exit_code => 0, - :matches => [%r{include directory #{env} environment testing_mod}], - }, - :puppet_agent => { - :exit_code => 2, - :matches => [%r{Applying configuration version '\d+'}, - %r{in directory #{env} environment site.pp}, - %r{include directory #{env} environment testing_mod}], - }, -} - -assert_review(review_results(results, expectations)) diff --git a/acceptance/tests/environment/environment_scenario-master_environmentpath.rb b/acceptance/tests/environment/environment_scenario-master_environmentpath.rb deleted file mode 100644 index f957f497ab9..00000000000 --- a/acceptance/tests/environment/environment_scenario-master_environmentpath.rb +++ /dev/null @@ -1,63 +0,0 @@ -test_name "Test behavior of a directory environment when environmentpath is set in the master section" -require 'puppet/acceptance/environment_utils' -extend Puppet::Acceptance::EnvironmentUtils -require 'puppet/acceptance/classifier_utils' -extend Puppet::Acceptance::ClassifierUtils - -classify_nodes_as_agent_specified_if_classifer_present - -step "setup environments" - -stub_forge_on(master) - -codedir = master.puppet('master')['codedir'] -testdir = create_tmpdir_for_user master, "codedir" -puppet_code_backup_dir = create_tmpdir_for_user(master, "puppet-code-backup-dir") - -apply_manifest_on(master, environment_manifest(testdir), :catch_failures => true) - -step "Test" -master_opts = { - 'master' => { - 'environmentpath' => '$codedir/environments', - } -} -env = 'testing' - -results = use_an_environment("testing", "master environmentpath", master_opts, testdir, puppet_code_backup_dir, :directory_environments => true, :config_print => '--section=master') - -expectations = { - :puppet_config => { - :exit_code => 0, - :matches => [%r{manifest.*#{codedir}/environments/#{env}/manifests$}, - %r{modulepath.*#{codedir}/environments/#{env}/modules:.+}, - %r{config_version = $}] - }, - :puppet_module_install => { - :exit_code => 0, - :matches => [%r{Preparing to install into #{codedir}/modules}, - %r{pmtacceptance-nginx}], - :expect_failure => true, - :notes => "Runs in user mode and doesn't see the master environmenetpath setting.", - }, - :puppet_module_uninstall => { - :exit_code => 0, - :matches => [%r{Removed.*pmtacceptance-nginx.*from #{codedir}/modules}], - :expect_failure => true, - :notes => "Runs in user mode and doesn't see the master environmenetpath setting.", - }, - :puppet_apply => { - :exit_code => 0, - :matches => [%r{include default environment testing_mod}], - :expect_failure => true, - :notes => "Runs in user mode and doesn't see the master environmenetpath setting.", - }, - :puppet_agent => { - :exit_code => 2, - :matches => [%r{Applying configuration version '\d+'}, - %r{in directory #{env} environment site.pp}, - %r{include directory #{env} environment testing_mod}], - }, -} - -assert_review(review_results(results,expectations)) diff --git a/acceptance/tests/environment/environment_scenario-non_existent.rb b/acceptance/tests/environment/environment_scenario-non_existent.rb deleted file mode 100644 index b57c6de06fc..00000000000 --- a/acceptance/tests/environment/environment_scenario-non_existent.rb +++ /dev/null @@ -1,54 +0,0 @@ -test_name "Test for an environment that does not exist" -require 'puppet/acceptance/environment_utils' -extend Puppet::Acceptance::EnvironmentUtils -require 'puppet/acceptance/classifier_utils' -extend Puppet::Acceptance::ClassifierUtils - -classify_nodes_as_agent_specified_if_classifer_present - -step "setup environments" - -stub_forge_on(master) - -testdir = create_tmpdir_for_user master, "codedir" -puppet_code_backup_dir = create_tmpdir_for_user(master, "puppet-code-backup-dir") - -apply_manifest_on(master, environment_manifest(testdir), :catch_failures => true) - -step "Test" -master_opts = { - 'main' => { - 'environmentpath' => '$codedir/environments', - } -} -general = [ master_opts, testdir, puppet_code_backup_dir, { :directory_environments => true } ] -env = 'doesnotexist' -path = master.puppet('master')['codedir'] - -results = use_an_environment(env, "non existent environment", *general) - -expectations = { - :puppet_config => { - :exit_code => 1, - :matches => [%r{Could not find a directory environment named '#{env}' anywhere in the path.*#{path}}], - }, - :puppet_module_install => { - :exit_code => 1, - :matches => [%r{Could not find a directory environment named '#{env}' anywhere in the path.*#{path}}], - }, - :puppet_module_uninstall => { - :exit_code => 1, - :matches => [%r{Could not find a directory environment named '#{env}' anywhere in the path.*#{path}}], - }, - :puppet_apply => { - :exit_code => 1, - :matches => [%r{Could not find a directory environment named '#{env}' anywhere in the path.*#{path}}], - }, - :puppet_agent => { - :exit_code => 1, - :matches => [%r{(Warning|Error).*(404|400).*Could not find environment '#{env}'}, - %r{Could not retrieve catalog; skipping run}], - } -} - -assert_review(review_results(results,expectations)) diff --git a/acceptance/tests/environment/environment_scenario-with_explicit_environment_conf.rb b/acceptance/tests/environment/environment_scenario-with_explicit_environment_conf.rb deleted file mode 100644 index 1a9205b2bfc..00000000000 --- a/acceptance/tests/environment/environment_scenario-with_explicit_environment_conf.rb +++ /dev/null @@ -1,57 +0,0 @@ -test_name "Test a specific, existing directory environment with an explicit environment.conf file" -require 'puppet/acceptance/environment_utils' -extend Puppet::Acceptance::EnvironmentUtils -require 'puppet/acceptance/classifier_utils' -extend Puppet::Acceptance::ClassifierUtils - -classify_nodes_as_agent_specified_if_classifer_present - -step "setup environments" - -stub_forge_on(master) - -codedir = master.puppet('master')['codedir'] -testdir = create_tmpdir_for_user master, "codedir" -puppet_code_backup_dir = create_tmpdir_for_user(master, "puppet-code-backup-dir") - -apply_manifest_on(master, environment_manifest(testdir), :catch_failures => true) - -step "Test" -master_opts = { - 'main' => { - 'environmentpath' => '$codedir/environments', - } -} -general = [ master_opts, testdir, puppet_code_backup_dir, { :directory_environments => true } ] - -results = use_an_environment("testing_environment_conf", "directory with environment.conf testing", *general) - -expectations = { - :puppet_config => { - :exit_code => 0, - :matches => [%r{manifest.*#{codedir}/environments/testing_environment_conf/nonstandard-manifests$}, - %r{modulepath.*#{codedir}/environments/testing_environment_conf/nonstandard-modules:.+}, - %r{config_version = #{codedir}/environments/testing_environment_conf/local-version.sh$}] - }, - :puppet_module_install => { - :exit_code => 0, - :matches => [%r{Preparing to install into #{codedir}/environments/testing_environment_conf/nonstandard-modules}, - %r{pmtacceptance-nginx}], - }, - :puppet_module_uninstall => { - :exit_code => 0, - :matches => [%r{Removed.*pmtacceptance-nginx.*from #{codedir}/environments/testing_environment_conf/nonstandard-modules}], - }, - :puppet_apply => { - :exit_code => 0, - :matches => [%r{include directory testing with environment\.conf testing_mod}], - }, - :puppet_agent => { - :exit_code => 2, - :matches => [%r{Applying configuration version 'local testing_environment_conf'}, - %r{in directory testing with environment\.conf site.pp}, - %r{include directory testing with environment\.conf testing_mod}], - }, -} - -assert_review(review_results(results,expectations)) diff --git a/acceptance/tests/environment/feature_branch_configured_environment.rb b/acceptance/tests/environment/feature_branch_configured_environment.rb new file mode 100644 index 00000000000..d95fa0d053b --- /dev/null +++ b/acceptance/tests/environment/feature_branch_configured_environment.rb @@ -0,0 +1,27 @@ +test_name "Agent should use set environment after running with specified environment" do + require 'puppet/acceptance/environment_utils' + extend Puppet::Acceptance::EnvironmentUtils + + tag 'audit:high', + 'server' + + # Remove all traces of the last used environment + teardown do + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + tmp_environment = mk_tmp_environment_with_teardown(master, 'special') + agents.each do |agent| + on(agent, puppet("agent -t --environment #{tmp_environment}")) do |result| + assert_match(/Info: Using environment 'special_\w+'/, result.stdout) + end + + on(agent, puppet('agent -t')) do |result| + assert_match(/Info: Using environment 'production'/, result.stdout) + end + end +end diff --git a/acceptance/tests/environment/should_find_existing_production_environment.rb b/acceptance/tests/environment/should_find_existing_production_environment.rb new file mode 100644 index 00000000000..ad5b096f6bd --- /dev/null +++ b/acceptance/tests/environment/should_find_existing_production_environment.rb @@ -0,0 +1,149 @@ +test_name "should find existing production environment" +tag 'audit:medium' + +require 'puppet/acceptance/i18ndemo_utils' +extend Puppet::Acceptance::I18nDemoUtils + +agents.each do |agent| + path_separator = agent.platform_defaults[:pathseparator] + initial_environment = on(agent, puppet("config print environment")).stdout.chomp + initial_environment_paths = on(agent, puppet("config print environmentpath")).stdout.chomp.split(path_separator) + + default_environment_path = '' + custom_environment_path = agent.tmpdir('custom_environment') + + teardown do + step 'uninstall the module' do + uninstall_i18n_demo_module(master) + uninstall_i18n_demo_module(agent) + end + + step 'Remove custom environment paths' do + environment_paths = on(agent, puppet("config print environmentpath")).stdout.chomp + environment_paths.split(path_separator).each do |path| + agent.rm_rf(path) unless initial_environment_paths.include?(path) + end + + agent.rm_rf(custom_environment_path) + end + + step 'Reset environment settings' do + on(agent, puppet("config set environmentpath #{initial_environment_paths.join(path_separator)}")) + + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + + if initial_environment == 'production' + on(agent, puppet("config delete environment")) + else + on(agent, puppet("config set environment #{initial_environment}")) + end + + on(agent, puppet("agent -t")) + end + end + + step 'Ensure a clean environment with default settings' do + step 'Remove the lastrunfile which contains the last used agent environment' do + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + + step 'Change to the default environment setting' do + on(agent, puppet("config delete environment")) + on(agent, puppet("config print environment")) do |result| + assert_match('production', result.stdout, "Default environment is not 'production' as expected") + end + end + + step 'Change to the default environmentpath setting and remove production folder' do + on(agent, puppet("config delete environmentpath")) + default_environment_path = on(agent, puppet("config print environmentpath")).stdout.chomp + agent.rm_rf("#{default_environment_path}/production") + end + + step 'Apply changes and expect puppet to create the production folder back' do + on(agent, puppet("agent -t")) + on(agent, "ls #{default_environment_path}") do |result| + assert_match('production', result.stdout, "Default environment folder was not generated in last puppet run") + end + end + end + + step 'Install a module' do + install_i18n_demo_module(master) + end + + step 'Expect output from the custom fact of the module' do + on(agent, puppet("agent -t"), :acceptable_exit_codes => [0, 2]) do |result| + assert_match(/Error:.*i18ndemo/, result.stderr) + end + end + + step 'Add a custom environment path before the current one' do + current_environment_path = on(agent, puppet("config print environmentpath")).stdout.chomp + on(agent, puppet("config set environmentpath '#{custom_environment_path}#{path_separator}#{current_environment_path}'")) + end + + step 'Expect the module to still be found' do + on(agent, puppet("agent -t"), :acceptable_exit_codes => [0, 2]) do |result| + assert_match(/Error:.*i18ndemo/, result.stderr) + end + end + + step 'Expect no production environment folder changes' do + on(agent, "ls #{custom_environment_path}") do |result| + refute_match(/production/, result.stdout) + end + + on(agent, "ls #{default_environment_path}") do |result| + assert_match('production', result.stdout) + end + end + + step 'Remove production folder' do + agent.rm_rf("#{default_environment_path}/production") + end + + step 'Expect production environment folder to be recreated in the custom path' do + on(agent, puppet("agent -t"), :acceptable_exit_codes => [0, 2]) do |result| + step 'Expect the module to be gone on the server node' do + refute_match(/Error:.*i18ndemo/, result.stderr) + end if agent == master + + step 'Expect the production environment, along with the module, to be synced back on the agent node' do + assert_match(/Error:.*i18ndemo/, result.stderr) + end if agent != master + end + + on(agent, "ls #{custom_environment_path}") do |result| + assert_match('production', result.stdout, "Default environment folder was not generated in last puppet run") + end + + on(agent, "ls #{default_environment_path}") do |result| + refute_match(/production/, result.stdout) + end + end + + step 'Set back to just default environmentpath setting' do + on(agent, puppet("config delete environmentpath")) + end + + step 'Expect production environment folder to be found in both paths but use the default one' do + on(agent, puppet("agent -t"), :acceptable_exit_codes => [0, 2]) do |result| + step 'Expect the module to be gone' do + refute_match(/Error:.*i18ndemo/, result.stderr) + end if agent == master + end + + on(agent, "ls #{default_environment_path}") do |result| + assert_match('production', result.stdout, "Default environment folder was not generated in last puppet run") + end + + on(agent, "ls #{custom_environment_path}") do |result| + assert_match('production', result.stdout) + end + end +end diff --git a/acceptance/tests/environment/use_agent_environment_when_enc_doesnt_specify.rb b/acceptance/tests/environment/use_agent_environment_when_enc_doesnt_specify.rb index 5611f34cc06..4e9173e3e1d 100644 --- a/acceptance/tests/environment/use_agent_environment_when_enc_doesnt_specify.rb +++ b/acceptance/tests/environment/use_agent_environment_when_enc_doesnt_specify.rb @@ -1,20 +1,33 @@ -test_name "Agent should use agent environment if there is an enc that does not specify the environment" -require 'puppet/acceptance/classifier_utils' -extend Puppet::Acceptance::ClassifierUtils +test_name "Agent should use agent environment if there is an enc that does not specify the environment" do + require 'puppet/acceptance/classifier_utils' + extend Puppet::Acceptance::ClassifierUtils -classify_nodes_as_agent_specified_if_classifer_present + tag 'audit:high', + 'audit:integration', + 'server' -testdir = create_tmpdir_for_user master, 'use_agent_env' + # Remove all traces of the last used environment + teardown do + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + classify_nodes_as_agent_specified_if_classifer_present + + testdir = create_tmpdir_for_user(master, 'use_agent_env') -create_remote_file master, "#{testdir}/enc.rb", < true) + apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) File { ensure => directory, mode => "0770", @@ -38,21 +51,23 @@ mode => "0640", content => 'notify { "more_different_string": }', } -MANIFEST + MANIFEST -master_opts = { - 'main' => { - 'environmentpath' => "#{testdir}/environments", - 'node_terminus' => 'exec', - 'external_nodes' => "#{testdir}/enc.rb", - }, -} + master_opts = { + 'main' => { + 'environmentpath' => "#{testdir}/environments", + 'node_terminus' => 'exec', + 'external_nodes' => "#{testdir}/enc.rb", + }, + } -with_puppet_running_on master, master_opts, testdir do + with_puppet_running_on(master, master_opts, testdir) do - agents.each do |agent| - run_agent_on(agent, "--no-daemonize --onetime --server #{master} --verbose --environment more_different") - assert_match(/more_different_string/, stdout, "Did not find more_different_string from \"more_different\" environment") - end + agents.each do |agent| + run_agent_on(agent, "--no-daemonize --onetime --verbose --environment more_different") do |result| + assert_match(/more_different_string/, result.stdout, "Did not find more_different_string from \"more_different\" environment") + end + end + end end diff --git a/acceptance/tests/environment/use_agent_environment_when_no_enc.rb b/acceptance/tests/environment/use_agent_environment_when_no_enc.rb index 6178f670fcc..b9d1bbdb213 100644 --- a/acceptance/tests/environment/use_agent_environment_when_no_enc.rb +++ b/acceptance/tests/environment/use_agent_environment_when_no_enc.rb @@ -1,8 +1,22 @@ -test_name "Agent should use agent environment if there is no enc-specified environment" +test_name "Agent should use agent environment if there is no enc-specified environment" do -testdir = create_tmpdir_for_user master, 'use_agent_env' + tag 'audit:high', + 'audit:integration', + 'audit:refactor', # This can be combined with use_agent_environment_when_enc_doesnt_specify test + 'server' -apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) + # Remove all traces of the last used environment + teardown do + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + testdir = create_tmpdir_for_user(master, 'use_agent_env') + + apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) File { ensure => directory, mode => "0770", @@ -26,21 +40,23 @@ mode => "0640", content => 'notify { "more_different_string": }', } -MANIFEST + MANIFEST -master_opts = { - 'main' => { - 'environmentpath' => "#{testdir}/environments", - }, - 'master' => { - 'node_terminus' => 'plain' - }, -} + master_opts = { + 'main' => { + 'environmentpath' => "#{testdir}/environments", + }, + 'master' => { + 'node_terminus' => 'plain' + }, + } -with_puppet_running_on master, master_opts, testdir do + with_puppet_running_on(master, master_opts, testdir) do - agents.each do |agent| - run_agent_on(agent, "--no-daemonize --onetime --server #{master} --verbose --environment more_different") - assert_match(/more_different_string/, stdout, "Did not find more_different_string from \"more_different\" environment") + agents.each do |agent| + run_agent_on(agent, "--no-daemonize --onetime --verbose --environment more_different") do |result| + assert_match(/more_different_string/, result.stdout, "Did not find more_different_string from \"more_different\" environment") + end + end end end diff --git a/acceptance/tests/environment/use_enc_environment.rb b/acceptance/tests/environment/use_enc_environment.rb index d40798ec7fd..41f6d50d5dd 100644 --- a/acceptance/tests/environment/use_enc_environment.rb +++ b/acceptance/tests/environment/use_enc_environment.rb @@ -1,31 +1,44 @@ -test_name "Agent should use environment given by ENC" -require 'puppet/acceptance/classifier_utils.rb' -extend Puppet::Acceptance::ClassifierUtils +test_name 'Agent should use environment given by ENC and only compile a catalog once' do + require 'puppet/acceptance/classifier_utils.rb' + extend Puppet::Acceptance::ClassifierUtils -testdir = create_tmpdir_for_user master, 'use_enc_env' + tag 'audit:high', + 'audit:integration', + 'server' -if master.is_pe? - group = { - 'name' => 'Special Environment', - 'description' => 'Classify our test agent nodes in the special environment.', - 'environment' => 'special', - 'environment_trumps' => true, - } - create_group_for_nodes(agents, group) -else + # Remove all traces of the last used environment + teardown do + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + testdir = create_tmpdir_for_user(master, 'use_enc_env') + + if master.is_pe? + group = { + 'name' => 'Special Environment', + 'description' => 'Classify our test agent nodes in the special environment.', + 'environment' => 'special', + 'environment_trumps' => true, + } + create_group_for_nodes(agents, group) + else -create_remote_file master, "#{testdir}/enc.rb", < true) + apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) File { ensure => directory, mode => "0770", @@ -49,22 +62,26 @@ mode => "0640", content => 'notify { "expected_string": }', } -MANIFEST + MANIFEST -master_opts = { - 'main' => { - 'environmentpath' => "#{testdir}/environments", - }, -} -master_opts['master'] = { - 'node_terminus' => 'exec', - 'external_nodes' => "#{testdir}/enc.rb", -} if !master.is_pe? + master_opts = { + 'main' => { + 'environmentpath' => "#{testdir}/environments", + }, + } + master_opts['master'] = { + 'node_terminus' => 'exec', + 'external_nodes' => "#{testdir}/enc.rb", + } if !master.is_pe? -with_puppet_running_on master, master_opts, testdir do + with_puppet_running_on(master, master_opts, testdir) do - agents.each do |agent| - run_agent_on(agent, "--no-daemonize --onetime --server #{master} --verbose") - assert_match(/expected_string/, stdout, "Did not find expected_string from \"special\" environment") + agents.each do |agent| + run_agent_on(agent, "--no-daemonize --onetime --verbose") do |result| + assert_match(/expected_string/, result.stdout, "Did not find expected_string from \"special\" environment") + caching_catalog_message_count = result.stdout.split(/Info: Caching catalog for/).length - 1 + assert_equal(caching_catalog_message_count, 1, 'Should only compile and cache the catalog once during the run') + end + end end end diff --git a/acceptance/tests/environment/use_enc_environment_for_files.rb b/acceptance/tests/environment/use_enc_environment_for_files.rb index db2e4e73a64..e03d64f817d 100644 --- a/acceptance/tests/environment/use_enc_environment_for_files.rb +++ b/acceptance/tests/environment/use_enc_environment_for_files.rb @@ -1,17 +1,31 @@ -test_name "Agent should use environment given by ENC for fetching remote files" +test_name "Agent should use environment given by ENC for fetching remote files" do -testdir = create_tmpdir_for_user master, 'respect_enc_test' + tag 'audit:high', + 'audit:integration', + 'audit:refactor', # This test should be rolled into use_enc_environment + 'server' -create_remote_file master, "#{testdir}/enc.rb", < true) + apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) File { ensure => directory, mode => "0770", @@ -32,41 +46,42 @@ mode => "0640", content => 'special_environment', } -MANIFEST + MANIFEST -master_opts = { - 'main' => { - 'environmentpath' => "#{testdir}/environments", - 'environment_timeout' => 0, - }, - 'master' => { - 'node_terminus' => 'exec', - 'external_nodes' => "#{testdir}/enc.rb", - }, -} + master_opts = { + 'main' => { + 'environmentpath' => "#{testdir}/environments", + 'environment_timeout' => 0, + }, + 'master' => { + 'node_terminus' => 'exec', + 'external_nodes' => "#{testdir}/enc.rb", + }, + } + + with_puppet_running_on(master, master_opts, testdir) do + agents.each do |agent| + atmp = agent.tmpdir('respect_enc_test') + teardown do + on(agent, "rm -rf '#{atmp}'") + end -with_puppet_running_on master, master_opts, testdir do - agents.each do |agent| - atmp = agent.tmpdir('respect_enc_test') - logger.debug "agent: #{agent} \tagent.tmpdir => #{atmp}" + logger.debug "agent: #{agent} \tagent.tmpdir => #{atmp}" - create_remote_file master, "#{testdir}/environments/special/manifests/different.pp", < "puppet:///modules/amod/testy", } - -notify { "mytemp is ${::mytemp}": } END - on master, "chmod 644 #{testdir}/environments/special/manifests/different.pp" + on(master, "chmod 644 '#{testdir}/environments/special/manifests/different.pp'") - run_agent_on(agent, "--no-daemonize --onetime --server #{master} --verbose --trace") + run_agent_on(agent, "--no-daemonize --onetime --verbose --trace") - on agent, "cat #{atmp}/special_testy" do |result| - assert_match(/special_environment/, - result.stdout, - "The file from environment 'special' was not found") + on(agent, "cat '#{atmp}/special_testy'") do |result| + assert_match(/special_environment/, + result.stdout, + "The file from environment 'special' was not found") + end end - - on agent, "rm -rf #{atmp}" end end diff --git a/acceptance/tests/environment/use_enc_environment_for_pluginsync.rb b/acceptance/tests/environment/use_enc_environment_for_pluginsync.rb index 56322392b66..1d4dd113eb3 100644 --- a/acceptance/tests/environment/use_enc_environment_for_pluginsync.rb +++ b/acceptance/tests/environment/use_enc_environment_for_pluginsync.rb @@ -1,17 +1,31 @@ -test_name "Agent should use environment given by ENC for pluginsync" +test_name "Agent should use environment given by ENC for pluginsync" do -testdir = create_tmpdir_for_user master, 'respect_enc_test' + tag 'audit:high', + 'audit:integration', + 'audit:refactor', # This test should be rolled into use_enc_environment + 'server' -create_remote_file master, "#{testdir}/enc.rb", < true) + apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) File { ensure => directory, mode => "0770", @@ -32,25 +46,30 @@ mode => "0640", content => "#special_version", } -MANIFEST - -master_opts = { - 'main' => { - 'environmentpath' => "#{testdir}/environments", - }, - 'master' => { - 'node_terminus' => 'exec', - 'external_nodes' => "#{testdir}/enc.rb" - }, -} - -with_puppet_running_on master, master_opts, testdir do - - agents.each do |agent| - agent_vardir = agent.puppet['vardir'] - run_agent_on(agent, "--no-daemonize --onetime --server #{master}") - on agent, "cat \"#{agent_vardir}/lib/puppet/foo.rb\"" - assert_match(/#special_version/, stdout, "The plugin from environment 'special' was not synced") - on agent, "rm -rf \"#{agent_vardir}/lib\"" + MANIFEST + + master_opts = { + 'main' => { + 'environmentpath' => "#{testdir}/environments", + }, + 'master' => { + 'node_terminus' => 'exec', + 'external_nodes' => "#{testdir}/enc.rb" + }, + } + + with_puppet_running_on(master, master_opts, testdir) do + + agents.each do |agent| + agent_vardir = agent.puppet['vardir'] + teardown do + on(agent, "rm -rf '#{agent_vardir}/lib'") + end + + run_agent_on(agent, "-t --no-daemonize --onetime") + on(agent, "cat '#{agent_vardir}/lib/puppet/foo.rb'") do |result| + assert_match(/#special_version/, result.stdout, "The plugin from environment 'special' was not synced") + end + end end end diff --git a/acceptance/tests/environment/use_environment_from_environmentpath.rb b/acceptance/tests/environment/use_environment_from_environmentpath.rb index 92f52d4cb28..14a90347474 100644 --- a/acceptance/tests/environment/use_environment_from_environmentpath.rb +++ b/acceptance/tests/environment/use_environment_from_environmentpath.rb @@ -1,29 +1,33 @@ -test_name "Use environments from the environmentpath" -require 'puppet/acceptance/classifier_utils' -extend Puppet::Acceptance::ClassifierUtils +test_name "Use environments from the environmentpath" do + require 'puppet/acceptance/classifier_utils' + extend Puppet::Acceptance::ClassifierUtils -classify_nodes_as_agent_specified_if_classifer_present + tag 'audit:high', + 'audit:integration', + 'server' -testdir = create_tmpdir_for_user master, 'use_environmentpath' + classify_nodes_as_agent_specified_if_classifer_present -def generate_environment(path_to_env, environment) - env_content = <<-EOS + testdir = create_tmpdir_for_user(master, 'use_environmentpath') + + def generate_environment(path_to_env, environment) + <<-EOS "#{path_to_env}/#{environment}":; "#{path_to_env}/#{environment}/manifests":; "#{path_to_env}/#{environment}/modules":; - EOS -end + EOS + end -def generate_module_content(module_name, options = {}) - base_path = options[:base_path] - environment = options[:environment] - env_path = options[:env_path] + def generate_module_content(module_name, options = {}) + base_path = options[:base_path] + environment = options[:environment] + env_path = options[:env_path] - path_to_module = [base_path, env_path, environment, "modules"].compact.join("/") - module_info = "module-#{module_name}" - module_info << "-from-#{environment}" if environment + path_to_module = [base_path, env_path, environment, "modules"].compact.join("/") + module_info = "module-#{module_name}" + module_info << "-from-#{environment}" if environment - module_content = <<-EOS + <<-EOS "#{path_to_module}/#{module_name}":; "#{path_to_module}/#{module_name}/manifests":; "#{path_to_module}/#{module_name}/files":; @@ -54,24 +58,23 @@ def generate_module_content(module_name, options = {}) mode => "0640", content => "<%= @environment_fact_#{module_name} %>" ; - EOS -end + EOS + end -def generate_site_manifest(path_to_manifest, *modules_to_include) - manifest_content = <<-EOS + def generate_site_manifest(path_to_manifest, *modules_to_include) + <<-EOS "#{path_to_manifest}/site.pp": ensure => file, mode => "0640", - content => "#{modules_to_include.map { |m| "include #{m}" }.join("\n")}" + content => "#{modules_to_include.map {|m| "include #{m}"}.join("\n")}" ; - EOS -end + EOS + end -master_user = on(master, puppet("master --configprint user")).stdout.strip -apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) + apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) File { ensure => directory, - owner => #{master_user}, + owner => #{master.puppet['user']}, group => #{master.puppet['group']}, mode => "0770", } @@ -82,26 +85,26 @@ def generate_site_manifest(path_to_manifest, *modules_to_include) "#{testdir}/additional":; "#{testdir}/modules":; #{generate_environment("#{testdir}/base", "shadowed")} -#{generate_environment("#{testdir}/base", "onlybase")} -#{generate_environment("#{testdir}/additional", "shadowed")} - -#{generate_module_content("atmp", - :base_path => testdir, - :env_path => 'base', - :environment => 'shadowed')} -#{generate_site_manifest("#{testdir}/base/shadowed/manifests", "atmp", "globalmod")} - -#{generate_module_content("atmp", - :base_path => testdir, - :env_path => 'base', - :environment => 'onlybase')} -#{generate_site_manifest("#{testdir}/base/onlybase/manifests", "atmp", "globalmod")} - -#{generate_module_content("atmp", - :base_path => testdir, - :env_path => 'additional', - :environment => 'shadowed')} -#{generate_site_manifest("#{testdir}/additional/shadowed/manifests", "atmp", "globalmod")} + #{generate_environment("#{testdir}/base", "onlybase")} + #{generate_environment("#{testdir}/additional", "shadowed")} + + #{generate_module_content("atmp", + :base_path => testdir, + :env_path => 'base', + :environment => 'shadowed')} + #{generate_site_manifest("#{testdir}/base/shadowed/manifests", "atmp", "globalmod")} + + #{generate_module_content("atmp", + :base_path => testdir, + :env_path => 'base', + :environment => 'onlybase')} + #{generate_site_manifest("#{testdir}/base/onlybase/manifests", "atmp", "globalmod")} + + #{generate_module_content("atmp", + :base_path => testdir, + :env_path => 'additional', + :environment => 'shadowed')} + #{generate_site_manifest("#{testdir}/additional/shadowed/manifests", "atmp", "globalmod")} # And one global module (--modulepath setting) #{generate_module_content("globalmod", :base_path => testdir)} @@ -109,80 +112,81 @@ def generate_site_manifest(path_to_manifest, *modules_to_include) "#{testdir}/additional/production/manifests":; #{generate_site_manifest("#{testdir}/additional/production/manifests", "globalmod")} } -MANIFEST - -def run_with_environment(agent, environment, options = {}) - expected_exit_code = options[:expected_exit_code] || 2 - expected_strings = options[:expected_strings] - - step "running an agent in environment '#{environment}'" - atmp = agent.tmpdir("use_environmentpath_#{environment}") - - agent_config = [ - "-t", - "--server", master, - ] - agent_config << '--environment' << environment if environment - # This to test how the agent behaves when using the directory environment - # loaders (which will not load an environment if it does not exist) - agent_config << "--environmentpath='$confdir/environments'" if agent != master - agent_config << { - 'ENV' => { "FACTER_agent_file_location" => atmp }, - } + MANIFEST - on(agent, - puppet("agent", *agent_config), - :acceptable_exit_codes => [expected_exit_code]) do |result| + def run_with_environment(agent, environment, options = {}) + expected_exit_code = options[:expected_exit_code] || 2 - yield atmp, result - end + step "running an agent in environment '#{environment}'" + atmp = agent.tmpdir("use_environmentpath_#{environment}") - on agent, "rm -rf #{atmp}" -end + teardown do + on(agent, "rm -rf '#{atmp}'") + end + + agent_config = [ + "-t" + ] + agent_config << '--environment' << environment if environment + # This to test how the agent behaves when using the directory environment + # loaders (which will not load an environment if it does not exist) + agent_config << "--environmentpath='$confdir/environments'" if agent != master + agent_config << { + 'ENV' => { "FACTER_agent_file_location" => atmp }, + } + + on(agent, + puppet("agent", *agent_config), + :acceptable_exit_codes => [expected_exit_code]) do |result| + + yield atmp, result + end + end -master_opts = { - 'master' => { - 'environmentpath' => "#{testdir}/additional:#{testdir}/base", - 'basemodulepath' => "#{testdir}/modules", + master_opts = { + 'master' => { + 'environmentpath' => "#{testdir}/additional:#{testdir}/base", + 'basemodulepath' => "#{testdir}/modules", + } } -} -if master.is_pe? - master_opts['master']['basemodulepath'] << ":#{master['sitemoduledir']}" -end + if master.is_pe? + master_opts['master']['basemodulepath'] << ":#{master['sitemoduledir']}" + end -with_puppet_running_on master, master_opts, testdir do - agents.each do |agent| - run_with_environment(agent, "shadowed") do |tmpdir,catalog_result| - ["module-atmp-from-shadowed", "module-globalmod"].each do |expected| - assert_match(/environment fact from #{expected}/, catalog_result.stdout) - end + with_puppet_running_on(master, master_opts, testdir) do + agents.each do |agent| + run_with_environment(agent, "shadowed") do |tmpdir, catalog_result| + ["module-atmp-from-shadowed", "module-globalmod"].each do |expected| + assert_match(/environment fact from #{expected}/, catalog_result.stdout) + end - ["module-atmp-from-shadowed", "module-globalmod"].each do |expected| - on agent, "cat #{tmpdir}/file-#{expected}" do |file_result| - assert_match(/data file from #{expected}/, file_result.stdout) + ["module-atmp-from-shadowed", "module-globalmod"].each do |expected| + on(agent, "cat '#{tmpdir}/file-#{expected}'") do |file_result| + assert_match(/data file from #{expected}/, file_result.stdout) + end end end - end - run_with_environment(agent, "onlybase") do |tmpdir,catalog_result| - ["module-atmp-from-onlybase", "module-globalmod"].each do |expected| - assert_match(/environment fact from #{expected}/, catalog_result.stdout) - end + run_with_environment(agent, "onlybase") do |tmpdir, catalog_result| + ["module-atmp-from-onlybase", "module-globalmod"].each do |expected| + assert_match(/environment fact from #{expected}/, catalog_result.stdout) + end - ["module-atmp-from-onlybase", "module-globalmod"].each do |expected| - on agent, "cat #{tmpdir}/file-#{expected}" do |file_result| - assert_match(/data file from #{expected}/, file_result.stdout) + ["module-atmp-from-onlybase", "module-globalmod"].each do |expected| + on(agent, "cat '#{tmpdir}/file-#{expected}'") do |file_result| + assert_match(/data file from #{expected}/, file_result.stdout) + end end end - end - run_with_environment(agent, nil, :expected_exit_code => 2) do |tmpdir, catalog_result| - assert_no_match(/module-atmp/, catalog_result.stdout, "module-atmp was included despite no environment being loaded") + run_with_environment(agent, "production", :expected_exit_code => 2) do |tmpdir, catalog_result| + refute_match(/module-atmp/, catalog_result.stdout, "module-atmp was included despite the default environment being loaded") - assert_match(/environment fact from module-globalmod/, catalog_result.stdout) + assert_match(/environment fact from module-globalmod/, catalog_result.stdout) - on agent, "cat #{tmpdir}/file-module-globalmod" do |file_result| - assert_match(/data file from module-globalmod/, file_result.stdout) + on(agent, "cat '#{tmpdir}/file-module-globalmod'") do |file_result| + assert_match(/data file from module-globalmod/, file_result.stdout) + end end end end diff --git a/acceptance/tests/environment/use_last_server_specified_environment.rb b/acceptance/tests/environment/use_last_server_specified_environment.rb new file mode 100644 index 00000000000..4d75ac65d08 --- /dev/null +++ b/acceptance/tests/environment/use_last_server_specified_environment.rb @@ -0,0 +1,88 @@ +test_name "Agent should use the last server-specified environment if server is authoritative" do + require 'puppet/acceptance/environment_utils' + extend Puppet::Acceptance::EnvironmentUtils + + tag 'audit:high', + 'server' + + # Remove all traces of the last used environment + teardown do + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + testdir = create_tmpdir_for_user(master, 'use_enc_env') + + create_remote_file(master, "#{testdir}/enc.rb", < true) + File { + ensure => directory, + mode => "0770", + owner => #{master.puppet['user']}, + group => #{master.puppet['group']}, + } + file { + '#{testdir}/environments':; + '#{testdir}/environments/production':; + '#{testdir}/environments/production/manifests':; + '#{testdir}/environments/special/':; + '#{testdir}/environments/special/manifests':; + } + file { '#{testdir}/environments/production/manifests/site.pp': + ensure => file, + mode => "0640", + content => 'notify { "production environment": }', + } + file { '#{testdir}/environments/special/manifests/different.pp': + ensure => file, + mode => "0640", + content => 'notify { "special environment": }', + } + MANIFEST + + master_opts = { + 'main' => { + 'environmentpath' => "#{testdir}/environments", + }, + } + master_opts['master'] = { + 'node_terminus' => 'exec', + 'external_nodes' => "#{testdir}/enc.rb", + } if !master.is_pe? + + with_puppet_running_on(master, master_opts, testdir) do + agents.each do |agent| + step 'ensure the lastrunfile is absent for the first run' do + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + + step 'first run: agent makes a node request to get the environment' do + run_agent_on(agent, '--no-daemonize --onetime --debug') do |result| + assert_match(/Local environment: 'production' doesn't match server specified node environment 'special', switching agent to 'special'/, result.stdout) + assert_match(/Debug: HTTP GET .*\/puppet\/v3\/node/, result.stdout) + assert_match(/Notice: special environment/, result.stdout) + end + end + + step 'second run: agent uses the environment from lastrunfile' do + run_agent_on(agent, '--no-daemonize --onetime --debug') do |result| + assert_match(/Debug: Successfully loaded last environment from the lastrunfile/, result.stdout) + assert_match(/Notice: special environment/, result.stdout) + end + end + end + end +end diff --git a/acceptance/tests/environment/variables_refreshed_each_compilation.rb b/acceptance/tests/environment/variables_refreshed_each_compilation.rb new file mode 100644 index 00000000000..7b1fa57247b --- /dev/null +++ b/acceptance/tests/environment/variables_refreshed_each_compilation.rb @@ -0,0 +1,112 @@ +test_name 'C98115 compilation should get new values in variables on each compilation' do + require 'puppet/acceptance/environment_utils' + extend Puppet::Acceptance::EnvironmentUtils + + confine :except, :platform => /^(aix|osx|solaris)/ + + tag 'audit:high', + 'audit:integration', + 'server' + + teardown do + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + app_type = File.basename(__FILE__, '.*') + tmp_environment = mk_tmp_environment_with_teardown(master, app_type) + fq_tmp_environmentpath = "#{environmentpath}/#{tmp_environment}" + + create_remote_file(master, "#{fq_tmp_environmentpath}/environment.conf", <<-CONF) + environment_timeout = unlimited + CONF + # the module function loading logic is different from inside a single manifest + # we exercise both here + on(master, "mkdir -p '#{fq_tmp_environmentpath}'/modules/custom_time/{manifests,functions,facts.d}") + create_remote_file(master, "#{fq_tmp_environmentpath}/modules/custom_time/manifests/init.pp", <<-FILE) + class custom_time { + $t = custom_time::my_system_time() + + notify { 'custom time': + message => "module_${t}_module", + } + } + FILE + create_remote_file(master, "#{fq_tmp_environmentpath}/modules/custom_time/functions/my_system_time.pp", <<-FILE) + function custom_time::my_system_time() { + $facts['custom_time'] + } + FILE + create_sitepp(master, tmp_environment, <<-SITE) + function bar() { + $facts['custom_time'] + } + class foo::bar { + notify { "local_${bar()}_local": } + } + include foo::bar + include custom_time + SITE + create_remote_file(master, "#{fq_tmp_environmentpath}/modules/custom_time/facts.d/custom_time.sh", <<-FILE) +#!/bin/bash + + +if [[ `uname` == 'Darwin' ]]; then + echo -n "custom_time=$(date +%s)" +else + echo -n "custom_time=$(date +%s%N)" +fi + FILE + + on(master, "chmod -R 0777 '#{fq_tmp_environmentpath}/'") + + windows_fact_location = "#{fq_tmp_environmentpath}/modules/custom_time/facts.d/custom_time.ps1" + create_remote_file(master, windows_fact_location, <<-FILE) +echo "custom_time=$(get-date -format HHmmssffffff)" + FILE + + on(master, "chmod -R 0666 '#{windows_fact_location}'") + + + step "run agent in #{tmp_environment}, ensure it increments the customtime with each run" do + with_puppet_running_on(master, {}) do + local_custom_time_pattern = 'local_(\d+)_local' + module_custom_time_pattern = 'module_(\d+)_module' + agents.each do |agent| + # ensure our custom facts have been synced + on(agent, + puppet("agent -t --environment '#{tmp_environment}'"), + :accept_all_exit_codes => true) + + local_custom_time1 = module_custom_time1 = nil + local_custom_time2 = module_custom_time2 = nil + + on(agent, puppet("agent -t --environment '#{tmp_environment}'"), + :accept_all_exit_codes => [2]) do |result| + assert_match(/Notice: #{local_custom_time_pattern}/, result.stdout, 'first custom time was not as expected') + assert_match(/Notice: #{module_custom_time_pattern}/, result.stdout, 'first module uptime was not as expected') + + local_custom_time1 = result.stdout.match(/Notice: #{local_custom_time_pattern}/)[1].to_i + module_custom_time1 = result.stdout.match(/Notice: #{module_custom_time_pattern}/)[1].to_i + end + + sleep 1 + + on(agent, puppet("agent -t --environment '#{tmp_environment}'"), + :accept_all_exit_codes => [2]) do |result| + assert_match(/Notice: #{local_custom_time_pattern}/, result.stdout, 'second custom time was not as expected') + assert_match(/Notice: #{module_custom_time_pattern}/, result.stdout, 'second module uptime was not as expected') + + local_custom_time2 = result.stdout.match(/Notice: #{local_custom_time_pattern}/)[1].to_i + module_custom_time2 = result.stdout.match(/Notice: #{module_custom_time_pattern}/)[1].to_i + end + + assert(local_custom_time2 > local_custom_time1, 'local custom time did not change as expected if at all') + assert(module_custom_time2 > module_custom_time1, 'module custom time did not change as expected if at all') + end + end + end +end diff --git a/acceptance/tests/external_ca_support/apache_external_root_ca.rb b/acceptance/tests/external_ca_support/apache_external_root_ca.rb deleted file mode 100644 index b0a00e0c9e4..00000000000 --- a/acceptance/tests/external_ca_support/apache_external_root_ca.rb +++ /dev/null @@ -1,190 +0,0 @@ -begin - require 'puppet_x/acceptance/external_cert_fixtures' -rescue LoadError - $LOAD_PATH.unshift(File.expand_path('../../../lib', __FILE__)) - require 'puppet_x/acceptance/external_cert_fixtures' -end - -# This test only runs on EL-6 master roles. -confine :to, :platform => 'el-6' -confine :except, :type => 'pe' - -skip_test "Test not supported on jvm" if @options[:is_puppetserver] - -if master.use_service_scripts? - # Beaker defaults to leaving puppet running when using service scripts, - # Need to shut it down so we can start up our apache instance - on(master, puppet('resource', 'service', master['puppetservice'], 'ensure=stopped')) - - teardown do - # And ensure that it is up again after everything is done - on(master, puppet('resource', 'service', master['puppetservice'], 'ensure=running')) - end -end - -# Verify that a trivial manifest can be run to completion. -# Supported Setup: Single, Root CA -# - Agent and Master SSL cert issued by the Root CA -# - Revocation disabled on the agent `certificate_revocation = false` -# - CA disabled on the master `ca = false` -# -test_name "Puppet agent works with Apache, both configured with externally issued certificates from independent intermediate CA's" - -step "Copy certificates and configuration files to the master..." -fixture_dir = File.expand_path('../fixtures', __FILE__) -testdir = master.tmpdir('apache_external_root_ca') -fixtures = PuppetX::Acceptance::ExternalCertFixtures.new(fixture_dir, testdir) - -# We need this variable in scope. -disable_and_reenable_selinux = nil - -# Register our cleanup steps early in a teardown so that they will happen even -# if execution aborts part way. -teardown do - step "Cleanup Apache (httpd) and /etc/hosts" - # Restore /etc/hosts - on master, "cp -p '#{testdir}/hosts' /etc/hosts" - # stop the service before moving files around - on master, "/etc/init.d/httpd stop" - on master, "mv --force /etc/httpd/conf/httpd.conf{,.external_ca_test}" - on master, "mv --force /etc/httpd/conf/httpd.conf{.orig,}" - - if disable_and_reenable_selinux - step "Restore the original state of SELinux" - on master, "setenforce 1" - end -end - -# Read all of the CA certificates. - -# Copy all of the x.509 fixture data over to the master. -create_remote_file master, "#{testdir}/ca_root.crt", fixtures.root_ca_cert -create_remote_file master, "#{testdir}/ca_agent.crt", fixtures.agent_ca_cert -create_remote_file master, "#{testdir}/ca_master.crt", fixtures.master_ca_cert -create_remote_file master, "#{testdir}/ca_master.crl", fixtures.master_ca_crl -create_remote_file master, "#{testdir}/ca_master_bundle.crt", "#{fixtures.master_ca_cert}\n#{fixtures.root_ca_cert}\n" -create_remote_file master, "#{testdir}/ca_agent_bundle.crt", "#{fixtures.agent_ca_cert}\n#{fixtures.root_ca_cert}\n" -create_remote_file master, "#{testdir}/agent.crt", fixtures.agent_cert -create_remote_file master, "#{testdir}/agent.key", fixtures.agent_key -create_remote_file master, "#{testdir}/agent_email.crt", fixtures.agent_email_cert -create_remote_file master, "#{testdir}/agent_email.key", fixtures.agent_email_key -create_remote_file master, "#{testdir}/master.crt", fixtures.master_cert -create_remote_file master, "#{testdir}/master.key", fixtures.master_key -create_remote_file master, "#{testdir}/master_rogue.crt", fixtures.master_cert_rogue -create_remote_file master, "#{testdir}/master_rogue.key", fixtures.master_key_rogue - -## -# Now create the master and agent puppet.conf -# -# We need to create the public directory for Passenger and the modules -# directory to avoid `Error: Could not evaluate: Could not retrieve information -# from environment production source(s) puppet://master1.example.org/plugins` -on master, "mkdir -p #{testdir}/etc/{master/{public,modules/empty/lib},agent}" -# Backup /etc/hosts -on master, "cp -p /etc/hosts '#{testdir}/hosts'" - -# Make master1.example.org resolve if it doesn't already. -on master, "grep -q -x '#{fixtures.host_entry}' /etc/hosts || echo '#{fixtures.host_entry}' >> /etc/hosts" - -create_remote_file master, "#{testdir}/etc/agent/puppet.conf", fixtures.agent_conf -create_remote_file master, "#{testdir}/etc/agent/puppet.conf.crl", fixtures.agent_conf_crl -create_remote_file master, "#{testdir}/etc/agent/puppet.conf.email", fixtures.agent_conf_email -create_remote_file master, "#{testdir}/etc/master/puppet.conf", fixtures.master_conf - -# auth.conf to allow *.example.com access to the rest API -create_remote_file master, "#{testdir}/etc/master/auth.conf", fixtures.auth_conf - -create_remote_file master, "#{testdir}/etc/master/config.ru", fixtures.config_ru - -step "Set filesystem permissions and ownership for the master" -# These permissions are required for Passenger to start Puppet as puppet -on master, "chown -R puppet:puppet #{testdir}/etc/master" - -# These permissions are just for testing, end users should protect their -# private keys. -on master, "chmod -R a+rX #{testdir}" - -agent_cmd_prefix = "--confdir #{testdir}/etc/agent --vardir #{testdir}/etc/agent/var" - -step "Configure EPEL" -epel_release_path = "http://mirror.us.leaseweb.net/epel/6/i386/epel-release-6-8.noarch.rpm" -on master, "rpm -q epel-release || (yum -y install #{epel_release_path} && yum -y upgrade epel-release)" - -step "Configure Apache and Passenger" -packages = [ 'httpd', 'mod_ssl', 'mod_passenger', 'rubygem-passenger', 'policycoreutils-python' ] -packages.each do |pkg| - on master, "rpm -q #{pkg} || (yum -y install #{pkg})" -end - -create_remote_file master, "#{testdir}/etc/httpd.conf", fixtures.httpd_conf -on master, 'test -f /etc/httpd/conf/httpd.conf.orig || cp -p /etc/httpd/conf/httpd.conf{,.orig}' -on master, "cat #{testdir}/etc/httpd.conf > /etc/httpd/conf/httpd.conf" - -step "Make SELinux and Apache play nicely together..." - -on master, "sestatus" do - if stdout.match(/Current mode:.*enforcing/) - disable_and_reenable_selinux = true - else - disable_and_reenable_selinux = false - end -end - -if disable_and_reenable_selinux - on master, "setenforce 0" -end - -step "Start the Apache httpd service..." -on master, 'service httpd restart' - -# Move the agent SSL cert and key into place. -# The filename must match the configured certname, otherwise Puppet will try -# and generate a new certificate and key -step "Configure the agent with the externally issued certificates" -on master, "mkdir -p #{testdir}/etc/agent/ssl/{public_keys,certs,certificate_requests,private_keys,private}" -create_remote_file master, "#{testdir}/etc/agent/ssl/certs/#{fixtures.agent_name}.pem", fixtures.agent_cert -create_remote_file master, "#{testdir}/etc/agent/ssl/private_keys/#{fixtures.agent_name}.pem", fixtures.agent_key - -# Now, try and run the agent on the master against itself. -step "Successfully run the puppet agent on the master" -on master, puppet_agent("#{agent_cmd_prefix} --test"), :acceptable_exit_codes => (0..255) do - assert_no_match /Creating a new SSL key/, stdout - assert_no_match /\Wfailed\W/i, stderr - assert_no_match /\Wfailed\W/i, stdout - assert_no_match /\Werror\W/i, stderr - assert_no_match /\Werror\W/i, stdout - # Assert the exit code so we get a "Failed test" instead of an "Errored test" - assert exit_code == 0 -end - -step "Agent refuses to connect to a rogue master" -on master, puppet_agent("#{agent_cmd_prefix} --ssl_client_ca_auth=#{testdir}/ca_master.crt --masterport=8141 --test"), :acceptable_exit_codes => (0..255) do - assert_no_match /Creating a new SSL key/, stdout - assert_match /certificate verify failed/i, stderr - assert_match /The server presented a SSL certificate chain which does not include a CA listed in the ssl_client_ca_auth file/i, stderr - assert exit_code == 1 -end - -step "Master accepts client cert with email address in subject" -on master, "cp #{testdir}/etc/agent/puppet.conf{,.no_email}" -on master, "cp #{testdir}/etc/agent/puppet.conf{.email,}" -on master, puppet_agent("#{agent_cmd_prefix} --test"), :acceptable_exit_codes => (0..255) do - assert_no_match /\Wfailed\W/i, stdout - assert_no_match /\Wfailed\W/i, stderr - assert_no_match /\Werror\W/i, stdout - assert_no_match /\Werror\W/i, stderr - # Assert the exit code so we get a "Failed test" instead of an "Errored test" - assert exit_code == 0 -end - -step "Agent refuses to connect to revoked master" -on master, "cp #{testdir}/etc/agent/puppet.conf{,.no_crl}" -on master, "cp #{testdir}/etc/agent/puppet.conf{.crl,}" - -revoke_opts = "--hostcrl #{testdir}/ca_master.crl" -on master, puppet_agent("#{agent_cmd_prefix} #{revoke_opts} --test"), :acceptable_exit_codes => (0..255) do - assert_match /certificate revoked.*?example.org/, stderr - assert exit_code == 1 -end - -step "Finished testing External Certificates" diff --git a/acceptance/tests/external_ca_support/fixtures/agent-ca/ca-agent-ca.crt b/acceptance/tests/external_ca_support/fixtures/agent-ca/ca-agent-ca.crt deleted file mode 100644 index 0337bc3a5d5..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/agent-ca/ca-agent-ca.crt +++ /dev/null @@ -1,23 +0,0 @@ ------BEGIN CERTIFICATE----- -MIID3jCCAsagAwIBAgIBATANBgkqhkiG9w0BAQUFADBJMRAwDgYDVQQDDAdSb290 -IENBMRowGAYDVQQLDBFTZXJ2ZXIgT3BlcmF0aW9uczEZMBcGA1UECgwQRXhhbXBs -ZSBPcmcsIExMQzAeFw0xNDA0MDgwMTI1MzZaFw0zNDA0MDMwMTI1MzZaMH0xIzAh -BgNVBAMTGkludGVybWVkaWF0ZSBDQSAoYWdlbnQtY2EpMR8wHQYJKoZIhvcNAQkB -FhB0ZXN0QGV4YW1wbGUub3JnMRkwFwYDVQQKExBFeGFtcGxlIE9yZywgTExDMRow -GAYDVQQLExFTZXJ2ZXIgT3BlcmF0aW9uczCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBANAjnBQPul4VZp8/PgnQxZtJQHhgWCRtLw5KDoZHFfQQxGW6utHb -MPIoX4qgJDb8msojb7ZO63C2BAjO5FHwhAwk3SciZX7VEt5YUYg0X1J7GyWHKWEt -yXEiIlXZ0xfXdzZ0kPskITQTLmKav7d08cN8SSqhAMeWhbiZ9xaCFWnYneqGdHc/ -Ps8EPszuJTiwrJsQtoxXFEdZfnJctlleGyZZFk/zg4M3P3RWr/ATBnMqL1Q4VfTd -9C23p+6kYhrYMxfWrawWAqyzn/G17X1TzQY4qW9Imn+RYLEQeBkO+KTl0Y+eaIOD -1PLfGaUu+XUumcMcbqyYgM5heqPEKHMs3g0CAwEAAaOBnDCBmTB5BgNVHSMEcjBw -gBQWr4Al4/rqSL+RM2YB/VHvJGdMY6FNpEswSTEQMA4GA1UEAwwHUm9vdCBDQTEa -MBgGA1UECwwRU2VydmVyIE9wZXJhdGlvbnMxGTAXBgNVBAoMEEV4YW1wbGUgT3Jn -LCBMTEOCCQCxuQRy+xEn4zAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAN -BgkqhkiG9w0BAQUFAAOCAQEAqyvoi3vDeE0Do7k2QiVF4WBhfGEW6921+UMVgMqB -SSV4mJ98ep4lrJA4VZPEW7jZWbox8fpH2WmA4DSK6lBMf7MoLSuDxaTmVDCvauGU -jtOD4ejIKWcJN8tkyFjz7DCca8x7EryZpr5sZMU78jZ/jOVwIK85FX/5ptQdoyo+ -j7TxWz464bUrlCOyzZEKIDViFeahY6Krfsfn60lmaWjXD5WSc9g5V/RA59cECpiT -Dl9Li9Weu0aXoF8nmWVhhBI1drmqvKbffAvQ42K4x2OTFG3r5wbCRSZTCGT2BWZW -M2HXCE5pMoTvM6H4PbMJsJw/x4qonM9HG81EcjHtKDUFGA== ------END CERTIFICATE----- diff --git a/acceptance/tests/external_ca_support/fixtures/agent-ca/ca-agent-ca.key b/acceptance/tests/external_ca_support/fixtures/agent-ca/ca-agent-ca.key deleted file mode 100644 index c83dca3b51c..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/agent-ca/ca-agent-ca.key +++ /dev/null @@ -1,28 +0,0 @@ ------BEGIN PRIVATE KEY----- -MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDQI5wUD7peFWaf -Pz4J0MWbSUB4YFgkbS8OSg6GRxX0EMRlurrR2zDyKF+KoCQ2/JrKI2+2TutwtgQI -zuRR8IQMJN0nImV+1RLeWFGINF9SexslhylhLclxIiJV2dMX13c2dJD7JCE0Ey5i -mr+3dPHDfEkqoQDHloW4mfcWghVp2J3qhnR3Pz7PBD7M7iU4sKybELaMVxRHWX5y -XLZZXhsmWRZP84ODNz90Vq/wEwZzKi9UOFX03fQtt6fupGIa2DMX1q2sFgKss5/x -te19U80GOKlvSJp/kWCxEHgZDvik5dGPnmiDg9Ty3xmlLvl1LpnDHG6smIDOYXqj -xChzLN4NAgMBAAECggEARD2Ym584fEZJ+iYzAebYEvymTZFQ9MhzaBzxvCasVPP2 -YGAjhlB2ML757CprFTgmy+VoZ/5iBPc4RWcHxrGzqYOgmocVfcsAP7P3L0/0fMdt -9BTnhTwM0rHdTgZ3xlZXeJwpOJ304Oz1BVE1UEHgTjZ+iqJ07fs05nxcXZ3SxXuu -yte/CtOxfiu12qw0Pa3en/wvkqeHPMrPYLD4PJznPIFmcfzVH6qO4DWtblW4HHLW -OqoqgjpuDLe1hjN5RZY0dLnvuKAr1d+ZKhvvuPPKGUR9J2/h/vV1ZMJDW45zJIu3 -XfoFUxxnrSBynBtyfFwkC2Btriryuu9HdfnMxCHpYQKBgQD/r0f36Zj7M571E+5Q -owCAE0qHRd/++p+KA3x/mVnECQ4c42QVwAwW99wd062k/9IbzVS6okbz1tKNsSvS -7TgWc1qAWeoxV8y2Fo3ovs8mOuRxnKMjWwlf9vOVEr49r1h/CP9iEzcjW147hw0m -EyWdBFBLQVLi/XdbguJW9e0bxQKBgQDQZVGM88r60yzo50kFLx4gVPx7nH8LDLkM -HM6Lxk5UQxbHZEzpGpxM+GEypHdRb5d5uITTzZSmosRuUc73mvTJE8hc5kcSZPsh -pEpSS4El7gcp/cmDNrHpqJdW4VwftJ8WYwFNOCCgLcmSNLJovc11j5NtGeP+Leqh -EsjWXOr1qQKBgAxuBv+kWY2MuuOLLoC5C+MuDOd6nCMXJ/5boQfK+rQvBIKfA1ST -W4MaVZcVnVFyJlK3rrDMBsr/3IiK3miIo7tjrDilJl9ztz365rcz33oqTsS/Kqcj -W9dQeBL9MEZrac/zLgcki/+qB3C5Zgg90gxKE2U1LcRfMhg+yqYTmo1JAoGBALWf -J+TdcJELzP8q26Pt/aaWCvo8WSirLPdWf9inuwqK8eZTDwi1jXUzn5qAZhEOXYjS -/MiPSje0cdfn6qY3YZGBcUUt2NE6OviF89QnQ+ZnvcymB6MY3xPSQBuTCzQCugfL -v42qFh0j6qJG1RqeGNuVhxo1z1NudydsdKcGkiwJAoGAYNbfuY3ccWmvX3K9ufL9 -M8m9ADXVE0o9LUQzZZwdv9IsQoeyufR7q0NUxrLqsracbJVhIoRzWZ7AUsWZ2qiX -eME8gioXoJVShTw9TpZY/nuH72iP/SbpZ9s+/43wNP0PTCS2ZQKwxmszz8Eg3qxN -D6ThCdnUCDA4JNQou0GozRQ= ------END PRIVATE KEY----- diff --git a/acceptance/tests/external_ca_support/fixtures/agent-ca/openssl.conf b/acceptance/tests/external_ca_support/fixtures/agent-ca/openssl.conf deleted file mode 100644 index 4259157e14a..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/agent-ca/openssl.conf +++ /dev/null @@ -1,96 +0,0 @@ -SAN = DNS:puppet - -[ca] -default_ca = master_ca_config - -# Root CA -[root_ca_config] -certificate = /tmp/certchain.KDOYxTc2/agent-ca/ca-agent-ca.crt -private_key = /tmp/certchain.KDOYxTc2/agent-ca/ca-agent-ca.key -database = /tmp/certchain.KDOYxTc2/agent-ca/inventory.txt -new_certs_dir = /tmp/certchain.KDOYxTc2/agent-ca/certs -serial = /tmp/certchain.KDOYxTc2/agent-ca/serial - -default_crl_days = 7300 -default_days = 7300 -default_md = sha1 - -policy = root_ca_policy -x509_extensions = root_ca_exts - -[root_ca_policy] -commonName = supplied -emailAddress = supplied -organizationName = supplied -organizationalUnitName = supplied - -[root_ca_exts] -authorityKeyIdentifier = keyid,issuer:always -basicConstraints = critical,CA:true -keyUsage = keyCertSign, cRLSign - -# Master CA -[master_ca_config] -certificate = /tmp/certchain.KDOYxTc2/agent-ca/ca-agent-ca.crt -private_key = /tmp/certchain.KDOYxTc2/agent-ca/ca-agent-ca.key -database = /tmp/certchain.KDOYxTc2/agent-ca/inventory.txt -new_certs_dir = /tmp/certchain.KDOYxTc2/agent-ca/certs -serial = /tmp/certchain.KDOYxTc2/agent-ca/serial - -default_crl_days = 7300 -default_days = 7300 -default_md = sha1 - -policy = master_ca_policy -x509_extensions = master_ca_exts - -# Master CA (Email) -[master_ca_email_config] -certificate = /tmp/certchain.KDOYxTc2/agent-ca/ca-agent-ca.crt -private_key = /tmp/certchain.KDOYxTc2/agent-ca/ca-agent-ca.key -database = /tmp/certchain.KDOYxTc2/agent-ca/inventory.txt -new_certs_dir = /tmp/certchain.KDOYxTc2/agent-ca/certs -serial = /tmp/certchain.KDOYxTc2/agent-ca/serial - -default_crl_days = 7300 -default_days = 7300 -default_md = sha1 - -email_in_dn = yes - -policy = master_ca_email_policy -x509_extensions = master_ca_exts - -[master_ca_policy] -commonName = supplied - -[master_ca_email_policy] -commonName = supplied -emailAddress = supplied - -# default extensions for clients -[master_ca_exts] -authorityKeyIdentifier = keyid,issuer:always -basicConstraints = critical,CA:false -keyUsage = keyEncipherment, digitalSignature -extendedKeyUsage = serverAuth, clientAuth - -[master_ssl_exts] -authorityKeyIdentifier = keyid,issuer:always -basicConstraints = critical,CA:false -keyUsage = keyEncipherment, digitalSignature -extendedKeyUsage = serverAuth, clientAuth -subjectAltName = $ENV::SAN - -# extensions for the master certificate (specifically adding subjectAltName) -[master_self_ca_exts] -authorityKeyIdentifier = keyid,issuer:always -basicConstraints = critical,CA:false -keyUsage = keyEncipherment, digitalSignature -extendedKeyUsage = serverAuth, clientAuth -# include the master's fqdn here, as well as in the CN, to work -# around https://bugs.ruby-lang.org/issues/6493 -# NOTE: Alt Names should be set in the request, so they know -# their FQDN -# subjectAltName = DNS:puppet,DNS:agent-ca.example.org - diff --git a/acceptance/tests/external_ca_support/fixtures/agent-ca/serial b/acceptance/tests/external_ca_support/fixtures/agent-ca/serial deleted file mode 100644 index adb9de8ee03..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/agent-ca/serial +++ /dev/null @@ -1 +0,0 @@ -08 diff --git a/acceptance/tests/external_ca_support/fixtures/auth.conf b/acceptance/tests/external_ca_support/fixtures/auth.conf deleted file mode 100644 index 499c544f6d5..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/auth.conf +++ /dev/null @@ -1,60 +0,0 @@ -# Puppet 4.0.0 auth.conf, modified to allow requests from example.org for -# external ca testing. - -# allow nodes to retrieve their own catalog -path ~ ^/puppet/v3/catalog/([^/]+)$ -method find -allow *.example.org -allow $1 - -# allow nodes to retrieve their own node definition -path ~ ^/puppet/v3/node/([^/]+)$ -method find -allow *.example.org -allow $1 - -# allow all nodes to access the certificates services -path /puppet-ca/v1/certificate_revocation_list/ca -method find -allow * - -# allow all nodes to store their own reports -path ~ ^/puppet/v3/report/([^/]+)$ -method save -allow *.example.org -allow $1 - -# Allow all nodes to access all file services; this is necessary for -# pluginsync, file serving from modules, and file serving from custom -# mount points (see fileserver.conf). Note that the `/file` prefix matches -# requests to both the file_metadata and file_content paths. See "Examples" -# above if you need more granular access control for custom mount points. -path /puppet/v3/file -allow * - -### Unauthenticated ACLs, for clients without valid certificates; authenticated -### clients can also access these paths, though they rarely need to. - -# allow access to the CA certificate; unauthenticated nodes need this -# in order to validate the puppet master's certificate -path /puppet-ca/v1/certificate/ca -auth any -method find -allow * - -# allow nodes to retrieve the certificate they requested earlier -path /puppet-ca/v1/certificate/ -auth any -method find -allow * - -# allow nodes to request a new certificate -path /puppet-ca/v1/certificate_request -auth any -method find, save -allow * - -# deny everything else; this ACL is not strictly necessary, but -# illustrates the default policy. -path / -auth any diff --git a/acceptance/tests/external_ca_support/fixtures/certchain.sh b/acceptance/tests/external_ca_support/fixtures/certchain.sh deleted file mode 100755 index 8735dfb6309..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/certchain.sh +++ /dev/null @@ -1,554 +0,0 @@ -#! /bin/bash - -## NOTE: -## This script requires the following in /etc/hosts: -## 127.0.0.2 puppet master1.example.org - -# This will fail with a stock puppet 3.1.1, but will succeed if all of the -# certificate subjects contain only the "CN" portion, and no O, OU, or -# emailAddress. - -# basic config to describe the environment -# B="/tmp/certchain" -B="$(mktemp -d -t certchain.XXXXXXXX)" -HTTPS_PORT=8443 -OPENSSL=$(which openssl) - -# utility method to dedent a heredoc -dedent() { - python -c 'import sys, textwrap; print textwrap.dedent(sys.stdin.read())' -} - -# invoke openssl -openssl() { - echo "----" - echo "running" ${OPENSSL} ${@} - echo " in $PWD" - ${OPENSSL} "${@}" -} - -show_cert() { - local cert="$1" - # openssl x509 -in "${cert}" -noout -text -nameopt RFC2253 - openssl x509 -in "${cert}" -noout -text -} - -hash_cert() { - local cert="$1" - local certdir="${B}/certdir" - local h=$(${OPENSSL} x509 -hash -noout -in ${cert}) - mkdir -p "${certdir}" - ln -s "$cert" "${certdir}/${h}.0" -} - -show_crl() { - local crl="$1" - openssl crl -in "${crl}" -noout -text -} - -hash_crl() { - local crl="$1" - local certdir="${B}/certdir" - local h=$(${OPENSSL} crl -hash -noout -in ${crl}) - mkdir -p "${certdir}" - ln -s "$crl" "${certdir}/${h}.r0" -} - -# clean out any messes this script has made -clean_up() { - stop_apache - rm -rf "$B" -} - -stop_apache() { - local pid pidfile="${B}/apache/httpd.pid" - while true; do - pid=$(cat "${pidfile}" 2>/dev/null || true) - [ -z "$pid" ] && break # break if the pid is gone - kill "$pid" || break # break if the kill fails (process is gone) - sleep 0.1 - done -} - -# perform basic setup: make directories, etc. -set_up() { - mkdir -p "$B" -} - -# create CA certificates: -# -# * $B/root_ca -# * $B/master{1..2}_ca -# -# with each containing: -# -# * openssl.conf -- suitable for signing certificates -# * ca-$name.key -- PEM format certificate key, with no password -# * ca-$name.crt -- PEM format certificate -create_ca_certs() { - local name cn dir subj ca_config - for name in root agent-ca master-ca; do - dir="${B}/${name}" - mkdir -p "${dir}" - ( cd "${dir}" - # if this is the root cert, make a self-signed cert - if [ "$name" = "root" ]; then - subj="/CN=Root CA/OU=Server Operations/O=Example Org, LLC" - openssl req -new -newkey rsa:2048 -days 7300 -nodes -x509 \ - -subj "${subj}" -keyout "ca-${name}.key" -out "ca-${name}.crt" - else - # make a new key for the CA - openssl genpkey -algorithm RSA -pkeyopt rsa_keygen_bits:2048 -out "ca-${name}.key" - - # build a CSR out of it - dedent > openssl.tmp << OPENSSL_TMP - [req] - prompt = no - distinguished_name = dn_config - - [dn_config] - commonName = Intermediate CA (${name}) - emailAddress = test@example.org - organizationalUnitName = Server Operations - organizationName = Example Org, LLC -OPENSSL_TMP - openssl req -config openssl.tmp -new -key "ca-${name}.key" -out "ca-${name}.csr" - rm openssl.tmp - - # sign it with the root CA - openssl ca -config ../root/openssl.conf -in "ca-${name}.csr" -notext -out "ca-${name}.crt" -batch - - # clean up the now-redundant csr - rm "ca-${name}.csr" - fi - - # set up the CA config; this uses the same file for all, but with different options - # for the root and master CAs - [ "$name" = "root" ] && ca_config=root_ca_config || ca_config=master_ca_config - - dedent > openssl.conf << OPENSSL_CONF - SAN = DNS:puppet - - [ca] - default_ca = ${ca_config} - - # Root CA - [root_ca_config] - certificate = ${dir}/ca-${name}.crt - private_key = ${dir}/ca-${name}.key - database = ${dir}/inventory.txt - new_certs_dir = ${dir}/certs - serial = ${dir}/serial - - default_crl_days = 7300 - default_days = 7300 - default_md = sha1 - - policy = root_ca_policy - x509_extensions = root_ca_exts - - [root_ca_policy] - commonName = supplied - emailAddress = supplied - organizationName = supplied - organizationalUnitName = supplied - - [root_ca_exts] - authorityKeyIdentifier = keyid,issuer:always - basicConstraints = critical,CA:true - keyUsage = keyCertSign, cRLSign - - # Master CA - [master_ca_config] - certificate = ${dir}/ca-${name}.crt - private_key = ${dir}/ca-${name}.key - database = ${dir}/inventory.txt - new_certs_dir = ${dir}/certs - serial = ${dir}/serial - - default_crl_days = 7300 - default_days = 7300 - default_md = sha1 - - policy = master_ca_policy - x509_extensions = master_ca_exts - - # Master CA (Email) - [master_ca_email_config] - certificate = ${dir}/ca-${name}.crt - private_key = ${dir}/ca-${name}.key - database = ${dir}/inventory.txt - new_certs_dir = ${dir}/certs - serial = ${dir}/serial - - default_crl_days = 7300 - default_days = 7300 - default_md = sha1 - - email_in_dn = yes - - policy = master_ca_email_policy - x509_extensions = master_ca_exts - - [master_ca_policy] - commonName = supplied - - [master_ca_email_policy] - commonName = supplied - emailAddress = supplied - - # default extensions for clients - [master_ca_exts] - authorityKeyIdentifier = keyid,issuer:always - basicConstraints = critical,CA:false - keyUsage = keyEncipherment, digitalSignature - extendedKeyUsage = serverAuth, clientAuth - - [master_ssl_exts] - authorityKeyIdentifier = keyid,issuer:always - basicConstraints = critical,CA:false - keyUsage = keyEncipherment, digitalSignature - extendedKeyUsage = serverAuth, clientAuth - subjectAltName = \$ENV::SAN - - # extensions for the master certificate (specifically adding subjectAltName) - [master_self_ca_exts] - authorityKeyIdentifier = keyid,issuer:always - basicConstraints = critical,CA:false - keyUsage = keyEncipherment, digitalSignature - extendedKeyUsage = serverAuth, clientAuth - # include the master's fqdn here, as well as in the CN, to work - # around https://bugs.ruby-lang.org/issues/6493 - # NOTE: Alt Names should be set in the request, so they know - # their FQDN - # subjectAltName = DNS:puppet,DNS:${name}.example.org -OPENSSL_CONF - touch inventory.txt - mkdir certs - echo 01 > serial - - show_cert "${dir}/ca-${name}.crt" - hash_cert "${dir}/ca-${name}.crt" - - # generate an empty CRL for this CA - openssl ca -config "${dir}/openssl.conf" -gencrl -out "${dir}/ca-${name}.crl" - - show_crl "${dir}/ca-${name}.crl" - hash_crl "${dir}/ca-${name}.crl" - ) - done -} - -# revoke leaf cert for $1 issued by master CA $2 -revoke_leaf_cert() { - local fqdn="$1" - local ca="${2:-agent-ca}" - local dir="${B}/${ca}" - - # revoke the cert and regenerate the crl - openssl ca -config "${dir}/openssl.conf" -revoke "${B}/leaves/${fqdn}.issued_by.${ca}.crt" - openssl ca -config "${dir}/openssl.conf" -gencrl -out "${dir}/ca-${ca}.crl" - show_crl "${dir}/ca-${ca}.crl" - # kill -HUP $(< "${B}/apache/httpd.pid") -} - -# revoke CA cert for $1 -revoke_ca_cert() { - local master="$1" - local dir="${B}/root" - - # revoke the cert and regenerate the crl - openssl ca -config "${dir}/openssl.conf" -revoke "${B}/${master}/ca-${master}.crt" - openssl ca -config "${dir}/openssl.conf" -gencrl -out "${dir}/ca-root.crl" - show_crl "${dir}/ca-root.crl" - kill -HUP $(< "${B}/apache/httpd.pid") -} - -# create a "leaf" certificate for the given fqdn, signed by the given ca name. -# $fqdn.issued_by.${ca}.{key,crt} will be placed in "${B}/leaves" -create_leaf_cert() { - local fqdn="$1" ca="$2" exts="$3" - local masterdir="${B}/${ca}" - local dir="${B}/leaves" - local fname="${fqdn}.issued_by.${ca}" - - [ -n "$exts" ] && exts="-extensions $exts" - - mkdir -p "${dir}" - ( cd "${dir}" - - openssl genpkey -algorithm RSA -pkeyopt rsa_keygen_bits:2048 -out "${fname}.key" - openssl req -subj "/CN=${fqdn}" -new -key "${fname}.key" -out "${fname}.csr" - CN="${fqdn}" SAN="DNS:${fqdn}, DNS:${fqdn%%.*}, DNS:puppet, DNS:puppetmaster" \ - openssl ca -config "${B}/${ca}/openssl.conf" -in "${fname}.csr" -notext \ - -out "${fname}.crt" -batch $exts - ) - show_cert "${dir}/${fname}.crt" -} - -# Note, we can parameterize SubjectAltNames using environment variables. -create_leaf_certs() { - create_leaf_cert master1.example.org master-ca master_ssl_exts - create_leaf_cert master2.example.org master-ca master_ssl_exts - - create_leaf_cert agent1.example.org agent-ca - create_leaf_cert agent2.example.org agent-ca - create_leaf_cert agent3.example.org agent-ca - - create_leaf_cert master1.example.org agent-ca master_ssl_exts # rogue - # create_leaf_cert master1.example.org root master_ssl_exts # rogue - - create_leaf_cert agent1.example.org master-ca # rogue - # create_leaf_cert agent1.example.org root # rogue -} - -# create a "leaf" certificate for the given fqdn, signed by the given ca name, -# with an email address in the subject. -# $fqdn.issued_by.${ca}.{key,crt} will be placed in "${B}/leaves" -create_leaf_email_cert() { - local fqdn="$1" ca="$2" exts="$3" - local masterdir="${B}/${ca}" - local dir="${B}/leaves" - local fname="${fqdn}.issued_by.${ca}" - - mkdir -p "${dir}" - ( cd "${dir}" - - openssl genpkey -algorithm RSA -pkeyopt rsa_keygen_bits:2048 -out "${fname}.key" - openssl req -subj "/CN=${fqdn}/emailAddress=test@example.com" -new -key "${fname}.key" -out "${fname}.csr" - - openssl ca -config "${B}/${ca}/openssl.conf" -name master_ca_email_config \ - -in "${fname}.csr" -notext -out "${fname}.crt" -batch $exts_arg - ) - show_cert "${dir}/${fname}.crt" -} - -create_leaf_email_certs() { - create_leaf_email_cert master-email1.example.org master-ca master_self_ca_exts - create_leaf_email_cert master-email2.example.org master-ca master_self_ca_exts - create_leaf_email_cert agent-email1.example.org agent-ca - create_leaf_email_cert agent-email2.example.org agent-ca - create_leaf_email_cert agent-email3.example.org agent-ca -} - -set_up_apache() { - local apachedir="${B}/apache" - mkdir -p "${apachedir}/puppetmaster/public" - - echo 'passed'> "${apachedir}/puppetmaster/public/test.txt" - dedent > "${apachedir}/httpd.conf" < - Require all granted - - - RackAutoDetect On - RackBaseURI / -HTTPD_CONF -} - -set_up_puppetmaster() { - local apachedir="${B}/apache" - local masterdir="${B}/puppetmaster" - local confdir="${masterdir}/conf" - local environmentdir="${confdir}/environments/production" - mkdir -p "${confdir}" "${masterdir}/var" "${environmentdir}/manifests" - dedent > "${apachedir}/puppetmaster/config.ru" < "${masterdir}/conf/puppet.conf" < "${environmentdir}/manifests/site.pp" < "yes I was" - } - } -SITE_PP -} - -start_apache() { - local apachedir="${B}/apache" - if ! httpd -f "${apachedir}/httpd.conf"; then - [ -f "${apachedir}/error_log" ] && tail "${apachedir}/error_log" - false - fi -} - -check_apache() { - # verify the SSL config with openssl. Note that s_client exits with 0 - # no matter what, so this greps the output for an OK status. Also note - # that this only checks that the validation of the server certs is OK, since - # client validation is optional in the httpd config. - echo $'GET /test.txt HTTP/1.0\n' | \ - openssl s_client -connect "127.0.0.1:${HTTPS_PORT}" -verify 2 \ - -cert "${B}/leaves/client2a.example.org.crt" \ - -key "${B}/leaves/client2a.example.org.key" \ - -CAfile "${B}/root/ca-root.crt" \ - | tee "${B}/verify.out" - cat "${B}/apache/error_log" - grep -q "Verify return code: 0 (ok)" "${B}/verify.out" -} - -check_puppetmaster() { - # this is insecure, because otherwise curl will check that 127.0.0.1 == - # master1.example.org and fail; validation of the server certs is done - # above in check_apache, so this is fine. - curl -vks --fail \ - --header 'Accept: yaml' \ - --cert "${B}/leaves/client2a.example.org.crt" \ - --key "${B}/leaves/client2a.example.org.key" \ - "https://127.0.0.1:${HTTPS_PORT}/puppet/v3/catalog/client2a.example.org?environment=production" >/dev/null - echo -} - -# set up the agent with the given fqdn -set_up_agent() { - local fqdn="$1" - local agentdir="${B}/agent" - mkdir -p "${agentdir}/conf" "${agentdir}/var" - mkdir -p "${agentdir}/conf/ssl/private_keys" "${agentdir}/conf/ssl/certs" - - dedent > "${agentdir}/conf/puppet.conf" < -StartServers 8 -MinSpareServers 5 -MaxSpareServers 20 -ServerLimit 256 -MaxClients 256 -MaxRequestsPerChild 4000 - - - -StartServers 4 -MaxClients 300 -MinSpareThreads 25 -MaxSpareThreads 75 -ThreadsPerChild 25 -MaxRequestsPerChild 0 - - -LoadModule auth_basic_module modules/mod_auth_basic.so -LoadModule auth_digest_module modules/mod_auth_digest.so -LoadModule authn_file_module modules/mod_authn_file.so -LoadModule authn_alias_module modules/mod_authn_alias.so -LoadModule authn_anon_module modules/mod_authn_anon.so -LoadModule authn_dbm_module modules/mod_authn_dbm.so -LoadModule authn_default_module modules/mod_authn_default.so -LoadModule authz_host_module modules/mod_authz_host.so -LoadModule authz_user_module modules/mod_authz_user.so -LoadModule authz_owner_module modules/mod_authz_owner.so -LoadModule authz_groupfile_module modules/mod_authz_groupfile.so -LoadModule authz_dbm_module modules/mod_authz_dbm.so -LoadModule authz_default_module modules/mod_authz_default.so -LoadModule ldap_module modules/mod_ldap.so -LoadModule authnz_ldap_module modules/mod_authnz_ldap.so -LoadModule include_module modules/mod_include.so -LoadModule log_config_module modules/mod_log_config.so -LoadModule logio_module modules/mod_logio.so -LoadModule env_module modules/mod_env.so -LoadModule ext_filter_module modules/mod_ext_filter.so -LoadModule mime_magic_module modules/mod_mime_magic.so -LoadModule expires_module modules/mod_expires.so -LoadModule deflate_module modules/mod_deflate.so -LoadModule headers_module modules/mod_headers.so -LoadModule usertrack_module modules/mod_usertrack.so -LoadModule setenvif_module modules/mod_setenvif.so -LoadModule mime_module modules/mod_mime.so -LoadModule dav_module modules/mod_dav.so -LoadModule status_module modules/mod_status.so -LoadModule autoindex_module modules/mod_autoindex.so -LoadModule info_module modules/mod_info.so -LoadModule dav_fs_module modules/mod_dav_fs.so -LoadModule vhost_alias_module modules/mod_vhost_alias.so -LoadModule negotiation_module modules/mod_negotiation.so -LoadModule dir_module modules/mod_dir.so -LoadModule actions_module modules/mod_actions.so -LoadModule speling_module modules/mod_speling.so -LoadModule userdir_module modules/mod_userdir.so -LoadModule alias_module modules/mod_alias.so -LoadModule substitute_module modules/mod_substitute.so -LoadModule rewrite_module modules/mod_rewrite.so -LoadModule proxy_module modules/mod_proxy.so -LoadModule proxy_balancer_module modules/mod_proxy_balancer.so -LoadModule proxy_ftp_module modules/mod_proxy_ftp.so -LoadModule proxy_http_module modules/mod_proxy_http.so -LoadModule proxy_ajp_module modules/mod_proxy_ajp.so -LoadModule proxy_connect_module modules/mod_proxy_connect.so -LoadModule cache_module modules/mod_cache.so -LoadModule suexec_module modules/mod_suexec.so -LoadModule disk_cache_module modules/mod_disk_cache.so -LoadModule cgi_module modules/mod_cgi.so -LoadModule version_module modules/mod_version.so - -LoadModule ssl_module modules/mod_ssl.so -LoadModule passenger_module modules/mod_passenger.so - -PassengerRoot /usr/share/gems/gems/passenger-3.0.17 -PassengerRuby /usr/bin/ruby - -Listen 8140 https -SSLRandomSeed startup file:/dev/urandom 256 -SSLRandomSeed connect builtin -SSLEngine on -SSLProtocol all -SSLv2 -SSLCipherSuite HIGH:MEDIUM:!aNULL:!MD5 - -# puppet-relevant SSL config: - -SSLCertificateFile "${B}/leaves/master1.example.org.crt" -SSLCertificateKeyFile "${B}/leaves/master1.example.org.key" -# chain in the intermediate cert for this master -SSLCertificateChainFile "${B}/master1/ca-master1.crt" -SSLCACertificatePath "${B}/certdir" -SSLCARevocationPath "${B}/certdir" -SSLCARevocationCheck chain -SSLVerifyClient optional -SSLVerifyDepth 2 -SSLOptions +StdEnvVars -RequestHeader set X-SSL-Subject %{SSL_CLIENT_S_DN}e -RequestHeader set X-Client-DN %{SSL_CLIENT_S_DN}e -RequestHeader set X-Client-Verify %{SSL_CLIENT_VERIFY}e - -ServerName master1.example.org -DocumentRoot "${apachedir}/puppetmaster/public" - -# NOTE: this is httpd-2.4 syntax - - Require all granted - - -RackAutoDetect On -RackBaseURI / diff --git a/acceptance/tests/external_ca_support/fixtures/httpd.conf.el6.orig b/acceptance/tests/external_ca_support/fixtures/httpd.conf.el6.orig deleted file mode 100644 index d87a3265252..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/httpd.conf.el6.orig +++ /dev/null @@ -1,1009 +0,0 @@ -# -# This is the main Apache server configuration file. It contains the -# configuration directives that give the server its instructions. -# See for detailed information. -# In particular, see -# -# for a discussion of each configuration directive. -# -# -# Do NOT simply read the instructions in here without understanding -# what they do. They're here only as hints or reminders. If you are unsure -# consult the online docs. You have been warned. -# -# The configuration directives are grouped into three basic sections: -# 1. Directives that control the operation of the Apache server process as a -# whole (the 'global environment'). -# 2. Directives that define the parameters of the 'main' or 'default' server, -# which responds to requests that aren't handled by a virtual host. -# These directives also provide default values for the settings -# of all virtual hosts. -# 3. Settings for virtual hosts, which allow Web requests to be sent to -# different IP addresses or hostnames and have them handled by the -# same Apache server process. -# -# Configuration and logfile names: If the filenames you specify for many -# of the server's control files begin with "/" (or "drive:/" for Win32), the -# server will use that explicit path. If the filenames do *not* begin -# with "/", the value of ServerRoot is prepended -- so "logs/foo.log" -# with ServerRoot set to "/etc/httpd" will be interpreted by the -# server as "/etc/httpd/logs/foo.log". -# - -### Section 1: Global Environment -# -# The directives in this section affect the overall operation of Apache, -# such as the number of concurrent requests it can handle or where it -# can find its configuration files. -# - -# -# Don't give away too much information about all the subcomponents -# we are running. Comment out this line if you don't mind remote sites -# finding out what major optional modules you are running -ServerTokens OS - -# -# ServerRoot: The top of the directory tree under which the server's -# configuration, error, and log files are kept. -# -# NOTE! If you intend to place this on an NFS (or otherwise network) -# mounted filesystem then please read the LockFile documentation -# (available at ); -# you will save yourself a lot of trouble. -# -# Do NOT add a slash at the end of the directory path. -# -ServerRoot "/etc/httpd" - -# -# PidFile: The file in which the server should record its process -# identification number when it starts. Note the PIDFILE variable in -# /etc/sysconfig/httpd must be set appropriately if this location is -# changed. -# -PidFile run/httpd.pid - -# -# Timeout: The number of seconds before receives and sends time out. -# -Timeout 60 - -# -# KeepAlive: Whether or not to allow persistent connections (more than -# one request per connection). Set to "Off" to deactivate. -# -KeepAlive Off - -# -# MaxKeepAliveRequests: The maximum number of requests to allow -# during a persistent connection. Set to 0 to allow an unlimited amount. -# We recommend you leave this number high, for maximum performance. -# -MaxKeepAliveRequests 100 - -# -# KeepAliveTimeout: Number of seconds to wait for the next request from the -# same client on the same connection. -# -KeepAliveTimeout 15 - -## -## Server-Pool Size Regulation (MPM specific) -## - -# prefork MPM -# StartServers: number of server processes to start -# MinSpareServers: minimum number of server processes which are kept spare -# MaxSpareServers: maximum number of server processes which are kept spare -# ServerLimit: maximum value for MaxClients for the lifetime of the server -# MaxClients: maximum number of server processes allowed to start -# MaxRequestsPerChild: maximum number of requests a server process serves - -StartServers 8 -MinSpareServers 5 -MaxSpareServers 20 -ServerLimit 256 -MaxClients 256 -MaxRequestsPerChild 4000 - - -# worker MPM -# StartServers: initial number of server processes to start -# MaxClients: maximum number of simultaneous client connections -# MinSpareThreads: minimum number of worker threads which are kept spare -# MaxSpareThreads: maximum number of worker threads which are kept spare -# ThreadsPerChild: constant number of worker threads in each server process -# MaxRequestsPerChild: maximum number of requests a server process serves - -StartServers 4 -MaxClients 300 -MinSpareThreads 25 -MaxSpareThreads 75 -ThreadsPerChild 25 -MaxRequestsPerChild 0 - - -# -# Listen: Allows you to bind Apache to specific IP addresses and/or -# ports, in addition to the default. See also the -# directive. -# -# Change this to Listen on specific IP addresses as shown below to -# prevent Apache from glomming onto all bound IP addresses (0.0.0.0) -# -#Listen 12.34.56.78:80 -Listen 80 - -# -# Dynamic Shared Object (DSO) Support -# -# To be able to use the functionality of a module which was built as a DSO you -# have to place corresponding `LoadModule' lines at this location so the -# directives contained in it are actually available _before_ they are used. -# Statically compiled modules (those listed by `httpd -l') do not need -# to be loaded here. -# -# Example: -# LoadModule foo_module modules/mod_foo.so -# -LoadModule auth_basic_module modules/mod_auth_basic.so -LoadModule auth_digest_module modules/mod_auth_digest.so -LoadModule authn_file_module modules/mod_authn_file.so -LoadModule authn_alias_module modules/mod_authn_alias.so -LoadModule authn_anon_module modules/mod_authn_anon.so -LoadModule authn_dbm_module modules/mod_authn_dbm.so -LoadModule authn_default_module modules/mod_authn_default.so -LoadModule authz_host_module modules/mod_authz_host.so -LoadModule authz_user_module modules/mod_authz_user.so -LoadModule authz_owner_module modules/mod_authz_owner.so -LoadModule authz_groupfile_module modules/mod_authz_groupfile.so -LoadModule authz_dbm_module modules/mod_authz_dbm.so -LoadModule authz_default_module modules/mod_authz_default.so -LoadModule ldap_module modules/mod_ldap.so -LoadModule authnz_ldap_module modules/mod_authnz_ldap.so -LoadModule include_module modules/mod_include.so -LoadModule log_config_module modules/mod_log_config.so -LoadModule logio_module modules/mod_logio.so -LoadModule env_module modules/mod_env.so -LoadModule ext_filter_module modules/mod_ext_filter.so -LoadModule mime_magic_module modules/mod_mime_magic.so -LoadModule expires_module modules/mod_expires.so -LoadModule deflate_module modules/mod_deflate.so -LoadModule headers_module modules/mod_headers.so -LoadModule usertrack_module modules/mod_usertrack.so -LoadModule setenvif_module modules/mod_setenvif.so -LoadModule mime_module modules/mod_mime.so -LoadModule dav_module modules/mod_dav.so -LoadModule status_module modules/mod_status.so -LoadModule autoindex_module modules/mod_autoindex.so -LoadModule info_module modules/mod_info.so -LoadModule dav_fs_module modules/mod_dav_fs.so -LoadModule vhost_alias_module modules/mod_vhost_alias.so -LoadModule negotiation_module modules/mod_negotiation.so -LoadModule dir_module modules/mod_dir.so -LoadModule actions_module modules/mod_actions.so -LoadModule speling_module modules/mod_speling.so -LoadModule userdir_module modules/mod_userdir.so -LoadModule alias_module modules/mod_alias.so -LoadModule substitute_module modules/mod_substitute.so -LoadModule rewrite_module modules/mod_rewrite.so -LoadModule proxy_module modules/mod_proxy.so -LoadModule proxy_balancer_module modules/mod_proxy_balancer.so -LoadModule proxy_ftp_module modules/mod_proxy_ftp.so -LoadModule proxy_http_module modules/mod_proxy_http.so -LoadModule proxy_ajp_module modules/mod_proxy_ajp.so -LoadModule proxy_connect_module modules/mod_proxy_connect.so -LoadModule cache_module modules/mod_cache.so -LoadModule suexec_module modules/mod_suexec.so -LoadModule disk_cache_module modules/mod_disk_cache.so -LoadModule cgi_module modules/mod_cgi.so -LoadModule version_module modules/mod_version.so - -# -# The following modules are not loaded by default: -# -#LoadModule asis_module modules/mod_asis.so -#LoadModule authn_dbd_module modules/mod_authn_dbd.so -#LoadModule cern_meta_module modules/mod_cern_meta.so -#LoadModule cgid_module modules/mod_cgid.so -#LoadModule dbd_module modules/mod_dbd.so -#LoadModule dumpio_module modules/mod_dumpio.so -#LoadModule filter_module modules/mod_filter.so -#LoadModule ident_module modules/mod_ident.so -#LoadModule log_forensic_module modules/mod_log_forensic.so -#LoadModule unique_id_module modules/mod_unique_id.so -# - -# -# Load config files from the config directory "/etc/httpd/conf.d". -# -Include conf.d/*.conf - -# -# ExtendedStatus controls whether Apache will generate "full" status -# information (ExtendedStatus On) or just basic information (ExtendedStatus -# Off) when the "server-status" handler is called. The default is Off. -# -#ExtendedStatus On - -# -# If you wish httpd to run as a different user or group, you must run -# httpd as root initially and it will switch. -# -# User/Group: The name (or #number) of the user/group to run httpd as. -# . On SCO (ODT 3) use "User nouser" and "Group nogroup". -# . On HPUX you may not be able to use shared memory as nobody, and the -# suggested workaround is to create a user www and use that user. -# NOTE that some kernels refuse to setgid(Group) or semctl(IPC_SET) -# when the value of (unsigned)Group is above 60000; -# don't use Group #-1 on these systems! -# -User apache -Group apache - -### Section 2: 'Main' server configuration -# -# The directives in this section set up the values used by the 'main' -# server, which responds to any requests that aren't handled by a -# definition. These values also provide defaults for -# any containers you may define later in the file. -# -# All of these directives may appear inside containers, -# in which case these default settings will be overridden for the -# virtual host being defined. -# - -# -# ServerAdmin: Your address, where problems with the server should be -# e-mailed. This address appears on some server-generated pages, such -# as error documents. e.g. admin@your-domain.com -# -ServerAdmin root@localhost - -# -# ServerName gives the name and port that the server uses to identify itself. -# This can often be determined automatically, but we recommend you specify -# it explicitly to prevent problems during startup. -# -# If this is not set to valid DNS name for your host, server-generated -# redirections will not work. See also the UseCanonicalName directive. -# -# If your host doesn't have a registered DNS name, enter its IP address here. -# You will have to access it by its address anyway, and this will make -# redirections work in a sensible way. -# -#ServerName www.example.com:80 - -# -# UseCanonicalName: Determines how Apache constructs self-referencing -# URLs and the SERVER_NAME and SERVER_PORT variables. -# When set "Off", Apache will use the Hostname and Port supplied -# by the client. When set "On", Apache will use the value of the -# ServerName directive. -# -UseCanonicalName Off - -# -# DocumentRoot: The directory out of which you will serve your -# documents. By default, all requests are taken from this directory, but -# symbolic links and aliases may be used to point to other locations. -# -DocumentRoot "/var/www/html" - -# -# Each directory to which Apache has access can be configured with respect -# to which services and features are allowed and/or disabled in that -# directory (and its subdirectories). -# -# First, we configure the "default" to be a very restrictive set of -# features. -# - - Options FollowSymLinks - AllowOverride None - - -# -# Note that from this point forward you must specifically allow -# particular features to be enabled - so if something's not working as -# you might expect, make sure that you have specifically enabled it -# below. -# - -# -# This should be changed to whatever you set DocumentRoot to. -# - - -# -# Possible values for the Options directive are "None", "All", -# or any combination of: -# Indexes Includes FollowSymLinks SymLinksifOwnerMatch ExecCGI MultiViews -# -# Note that "MultiViews" must be named *explicitly* --- "Options All" -# doesn't give it to you. -# -# The Options directive is both complicated and important. Please see -# http://httpd.apache.org/docs/2.2/mod/core.html#options -# for more information. -# - Options Indexes FollowSymLinks - -# -# AllowOverride controls what directives may be placed in .htaccess files. -# It can be "All", "None", or any combination of the keywords: -# Options FileInfo AuthConfig Limit -# - AllowOverride None - -# -# Controls who can get stuff from this server. -# - Order allow,deny - Allow from all - - - -# -# UserDir: The name of the directory that is appended onto a user's home -# directory if a ~user request is received. -# -# The path to the end user account 'public_html' directory must be -# accessible to the webserver userid. This usually means that ~userid -# must have permissions of 711, ~userid/public_html must have permissions -# of 755, and documents contained therein must be world-readable. -# Otherwise, the client will only receive a "403 Forbidden" message. -# -# See also: http://httpd.apache.org/docs/misc/FAQ.html#forbidden -# - - # - # UserDir is disabled by default since it can confirm the presence - # of a username on the system (depending on home directory - # permissions). - # - UserDir disabled - - # - # To enable requests to /~user/ to serve the user's public_html - # directory, remove the "UserDir disabled" line above, and uncomment - # the following line instead: - # - #UserDir public_html - - - -# -# Control access to UserDir directories. The following is an example -# for a site where these directories are restricted to read-only. -# -# -# AllowOverride FileInfo AuthConfig Limit -# Options MultiViews Indexes SymLinksIfOwnerMatch IncludesNoExec -# -# Order allow,deny -# Allow from all -# -# -# Order deny,allow -# Deny from all -# -# - -# -# DirectoryIndex: sets the file that Apache will serve if a directory -# is requested. -# -# The index.html.var file (a type-map) is used to deliver content- -# negotiated documents. The MultiViews Option can be used for the -# same purpose, but it is much slower. -# -DirectoryIndex index.html index.html.var - -# -# AccessFileName: The name of the file to look for in each directory -# for additional configuration directives. See also the AllowOverride -# directive. -# -AccessFileName .htaccess - -# -# The following lines prevent .htaccess and .htpasswd files from being -# viewed by Web clients. -# - - Order allow,deny - Deny from all - Satisfy All - - -# -# TypesConfig describes where the mime.types file (or equivalent) is -# to be found. -# -TypesConfig /etc/mime.types - -# -# DefaultType is the default MIME type the server will use for a document -# if it cannot otherwise determine one, such as from filename extensions. -# If your server contains mostly text or HTML documents, "text/plain" is -# a good value. If most of your content is binary, such as applications -# or images, you may want to use "application/octet-stream" instead to -# keep browsers from trying to display binary files as though they are -# text. -# -DefaultType text/plain - -# -# The mod_mime_magic module allows the server to use various hints from the -# contents of the file itself to determine its type. The MIMEMagicFile -# directive tells the module where the hint definitions are located. -# - -# MIMEMagicFile /usr/share/magic.mime - MIMEMagicFile conf/magic - - -# -# HostnameLookups: Log the names of clients or just their IP addresses -# e.g., www.apache.org (on) or 204.62.129.132 (off). -# The default is off because it'd be overall better for the net if people -# had to knowingly turn this feature on, since enabling it means that -# each client request will result in AT LEAST one lookup request to the -# nameserver. -# -HostnameLookups Off - -# -# EnableMMAP: Control whether memory-mapping is used to deliver -# files (assuming that the underlying OS supports it). -# The default is on; turn this off if you serve from NFS-mounted -# filesystems. On some systems, turning it off (regardless of -# filesystem) can improve performance; for details, please see -# http://httpd.apache.org/docs/2.2/mod/core.html#enablemmap -# -#EnableMMAP off - -# -# EnableSendfile: Control whether the sendfile kernel support is -# used to deliver files (assuming that the OS supports it). -# The default is on; turn this off if you serve from NFS-mounted -# filesystems. Please see -# http://httpd.apache.org/docs/2.2/mod/core.html#enablesendfile -# -#EnableSendfile off - -# -# ErrorLog: The location of the error log file. -# If you do not specify an ErrorLog directive within a -# container, error messages relating to that virtual host will be -# logged here. If you *do* define an error logfile for a -# container, that host's errors will be logged there and not here. -# -ErrorLog logs/error_log - -# -# LogLevel: Control the number of messages logged to the error_log. -# Possible values include: debug, info, notice, warn, error, crit, -# alert, emerg. -# -LogLevel warn - -# -# The following directives define some format nicknames for use with -# a CustomLog directive (see below). -# -LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined -LogFormat "%h %l %u %t \"%r\" %>s %b" common -LogFormat "%{Referer}i -> %U" referer -LogFormat "%{User-agent}i" agent - -# "combinedio" includes actual counts of actual bytes received (%I) and sent (%O); this -# requires the mod_logio module to be loaded. -#LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\" %I %O" combinedio - -# -# The location and format of the access logfile (Common Logfile Format). -# If you do not define any access logfiles within a -# container, they will be logged here. Contrariwise, if you *do* -# define per- access logfiles, transactions will be -# logged therein and *not* in this file. -# -#CustomLog logs/access_log common - -# -# If you would like to have separate agent and referer logfiles, uncomment -# the following directives. -# -#CustomLog logs/referer_log referer -#CustomLog logs/agent_log agent - -# -# For a single logfile with access, agent, and referer information -# (Combined Logfile Format), use the following directive: -# -CustomLog logs/access_log combined - -# -# Optionally add a line containing the server version and virtual host -# name to server-generated pages (internal error documents, FTP directory -# listings, mod_status and mod_info output etc., but not CGI generated -# documents or custom error documents). -# Set to "EMail" to also include a mailto: link to the ServerAdmin. -# Set to one of: On | Off | EMail -# -ServerSignature On - -# -# Aliases: Add here as many aliases as you need (with no limit). The format is -# Alias fakename realname -# -# Note that if you include a trailing / on fakename then the server will -# require it to be present in the URL. So "/icons" isn't aliased in this -# example, only "/icons/". If the fakename is slash-terminated, then the -# realname must also be slash terminated, and if the fakename omits the -# trailing slash, the realname must also omit it. -# -# We include the /icons/ alias for FancyIndexed directory listings. If you -# do not use FancyIndexing, you may comment this out. -# -Alias /icons/ "/var/www/icons/" - - - Options Indexes MultiViews FollowSymLinks - AllowOverride None - Order allow,deny - Allow from all - - -# -# WebDAV module configuration section. -# - - # Location of the WebDAV lock database. - DAVLockDB /var/lib/dav/lockdb - - -# -# ScriptAlias: This controls which directories contain server scripts. -# ScriptAliases are essentially the same as Aliases, except that -# documents in the realname directory are treated as applications and -# run by the server when requested rather than as documents sent to the client. -# The same rules about trailing "/" apply to ScriptAlias directives as to -# Alias. -# -ScriptAlias /cgi-bin/ "/var/www/cgi-bin/" - -# -# "/var/www/cgi-bin" should be changed to whatever your ScriptAliased -# CGI directory exists, if you have that configured. -# - - AllowOverride None - Options None - Order allow,deny - Allow from all - - -# -# Redirect allows you to tell clients about documents which used to exist in -# your server's namespace, but do not anymore. This allows you to tell the -# clients where to look for the relocated document. -# Example: -# Redirect permanent /foo http://www.example.com/bar - -# -# Directives controlling the display of server-generated directory listings. -# - -# -# IndexOptions: Controls the appearance of server-generated directory -# listings. -# -IndexOptions FancyIndexing VersionSort NameWidth=* HTMLTable Charset=UTF-8 - -# -# AddIcon* directives tell the server which icon to show for different -# files or filename extensions. These are only displayed for -# FancyIndexed directories. -# -AddIconByEncoding (CMP,/icons/compressed.gif) x-compress x-gzip - -AddIconByType (TXT,/icons/text.gif) text/* -AddIconByType (IMG,/icons/image2.gif) image/* -AddIconByType (SND,/icons/sound2.gif) audio/* -AddIconByType (VID,/icons/movie.gif) video/* - -AddIcon /icons/binary.gif .bin .exe -AddIcon /icons/binhex.gif .hqx -AddIcon /icons/tar.gif .tar -AddIcon /icons/world2.gif .wrl .wrl.gz .vrml .vrm .iv -AddIcon /icons/compressed.gif .Z .z .tgz .gz .zip -AddIcon /icons/a.gif .ps .ai .eps -AddIcon /icons/layout.gif .html .shtml .htm .pdf -AddIcon /icons/text.gif .txt -AddIcon /icons/c.gif .c -AddIcon /icons/p.gif .pl .py -AddIcon /icons/f.gif .for -AddIcon /icons/dvi.gif .dvi -AddIcon /icons/uuencoded.gif .uu -AddIcon /icons/script.gif .conf .sh .shar .csh .ksh .tcl -AddIcon /icons/tex.gif .tex -AddIcon /icons/bomb.gif core - -AddIcon /icons/back.gif .. -AddIcon /icons/hand.right.gif README -AddIcon /icons/folder.gif ^^DIRECTORY^^ -AddIcon /icons/blank.gif ^^BLANKICON^^ - -# -# DefaultIcon is which icon to show for files which do not have an icon -# explicitly set. -# -DefaultIcon /icons/unknown.gif - -# -# AddDescription allows you to place a short description after a file in -# server-generated indexes. These are only displayed for FancyIndexed -# directories. -# Format: AddDescription "description" filename -# -#AddDescription "GZIP compressed document" .gz -#AddDescription "tar archive" .tar -#AddDescription "GZIP compressed tar archive" .tgz - -# -# ReadmeName is the name of the README file the server will look for by -# default, and append to directory listings. -# -# HeaderName is the name of a file which should be prepended to -# directory indexes. -ReadmeName README.html -HeaderName HEADER.html - -# -# IndexIgnore is a set of filenames which directory indexing should ignore -# and not include in the listing. Shell-style wildcarding is permitted. -# -IndexIgnore .??* *~ *# HEADER* README* RCS CVS *,v *,t - -# -# DefaultLanguage and AddLanguage allows you to specify the language of -# a document. You can then use content negotiation to give a browser a -# file in a language the user can understand. -# -# Specify a default language. This means that all data -# going out without a specific language tag (see below) will -# be marked with this one. You probably do NOT want to set -# this unless you are sure it is correct for all cases. -# -# * It is generally better to not mark a page as -# * being a certain language than marking it with the wrong -# * language! -# -# DefaultLanguage nl -# -# Note 1: The suffix does not have to be the same as the language -# keyword --- those with documents in Polish (whose net-standard -# language code is pl) may wish to use "AddLanguage pl .po" to -# avoid the ambiguity with the common suffix for perl scripts. -# -# Note 2: The example entries below illustrate that in some cases -# the two character 'Language' abbreviation is not identical to -# the two character 'Country' code for its country, -# E.g. 'Danmark/dk' versus 'Danish/da'. -# -# Note 3: In the case of 'ltz' we violate the RFC by using a three char -# specifier. There is 'work in progress' to fix this and get -# the reference data for rfc1766 cleaned up. -# -# Catalan (ca) - Croatian (hr) - Czech (cs) - Danish (da) - Dutch (nl) -# English (en) - Esperanto (eo) - Estonian (et) - French (fr) - German (de) -# Greek-Modern (el) - Hebrew (he) - Italian (it) - Japanese (ja) -# Korean (ko) - Luxembourgeois* (ltz) - Norwegian Nynorsk (nn) -# Norwegian (no) - Polish (pl) - Portugese (pt) -# Brazilian Portuguese (pt-BR) - Russian (ru) - Swedish (sv) -# Simplified Chinese (zh-CN) - Spanish (es) - Traditional Chinese (zh-TW) -# -AddLanguage ca .ca -AddLanguage cs .cz .cs -AddLanguage da .dk -AddLanguage de .de -AddLanguage el .el -AddLanguage en .en -AddLanguage eo .eo -AddLanguage es .es -AddLanguage et .et -AddLanguage fr .fr -AddLanguage he .he -AddLanguage hr .hr -AddLanguage it .it -AddLanguage ja .ja -AddLanguage ko .ko -AddLanguage ltz .ltz -AddLanguage nl .nl -AddLanguage nn .nn -AddLanguage no .no -AddLanguage pl .po -AddLanguage pt .pt -AddLanguage pt-BR .pt-br -AddLanguage ru .ru -AddLanguage sv .sv -AddLanguage zh-CN .zh-cn -AddLanguage zh-TW .zh-tw - -# -# LanguagePriority allows you to give precedence to some languages -# in case of a tie during content negotiation. -# -# Just list the languages in decreasing order of preference. We have -# more or less alphabetized them here. You probably want to change this. -# -LanguagePriority en ca cs da de el eo es et fr he hr it ja ko ltz nl nn no pl pt pt-BR ru sv zh-CN zh-TW - -# -# ForceLanguagePriority allows you to serve a result page rather than -# MULTIPLE CHOICES (Prefer) [in case of a tie] or NOT ACCEPTABLE (Fallback) -# [in case no accepted languages matched the available variants] -# -ForceLanguagePriority Prefer Fallback - -# -# Specify a default charset for all content served; this enables -# interpretation of all content as UTF-8 by default. To use the -# default browser choice (ISO-8859-1), or to allow the META tags -# in HTML content to override this choice, comment out this -# directive: -# -AddDefaultCharset UTF-8 - -# -# AddType allows you to add to or override the MIME configuration -# file mime.types for specific file types. -# -#AddType application/x-tar .tgz - -# -# AddEncoding allows you to have certain browsers uncompress -# information on the fly. Note: Not all browsers support this. -# Despite the name similarity, the following Add* directives have nothing -# to do with the FancyIndexing customization directives above. -# -#AddEncoding x-compress .Z -#AddEncoding x-gzip .gz .tgz - -# If the AddEncoding directives above are commented-out, then you -# probably should define those extensions to indicate media types: -# -AddType application/x-compress .Z -AddType application/x-gzip .gz .tgz - -# -# MIME-types for downloading Certificates and CRLs -# -AddType application/x-x509-ca-cert .crt -AddType application/x-pkcs7-crl .crl - -# -# AddHandler allows you to map certain file extensions to "handlers": -# actions unrelated to filetype. These can be either built into the server -# or added with the Action directive (see below) -# -# To use CGI scripts outside of ScriptAliased directories: -# (You will also need to add "ExecCGI" to the "Options" directive.) -# -#AddHandler cgi-script .cgi - -# -# For files that include their own HTTP headers: -# -#AddHandler send-as-is asis - -# -# For type maps (negotiated resources): -# (This is enabled by default to allow the Apache "It Worked" page -# to be distributed in multiple languages.) -# -AddHandler type-map var - -# -# Filters allow you to process content before it is sent to the client. -# -# To parse .shtml files for server-side includes (SSI): -# (You will also need to add "Includes" to the "Options" directive.) -# -AddType text/html .shtml -AddOutputFilter INCLUDES .shtml - -# -# Action lets you define media types that will execute a script whenever -# a matching file is called. This eliminates the need for repeated URL -# pathnames for oft-used CGI file processors. -# Format: Action media/type /cgi-script/location -# Format: Action handler-name /cgi-script/location -# - -# -# Customizable error responses come in three flavors: -# 1) plain text 2) local redirects 3) external redirects -# -# Some examples: -#ErrorDocument 500 "The server made a boo boo." -#ErrorDocument 404 /missing.html -#ErrorDocument 404 "/cgi-bin/missing_handler.pl" -#ErrorDocument 402 http://www.example.com/subscription_info.html -# - -# -# Putting this all together, we can internationalize error responses. -# -# We use Alias to redirect any /error/HTTP_.html.var response to -# our collection of by-error message multi-language collections. We use -# includes to substitute the appropriate text. -# -# You can modify the messages' appearance without changing any of the -# default HTTP_.html.var files by adding the line: -# -# Alias /error/include/ "/your/include/path/" -# -# which allows you to create your own set of files by starting with the -# /var/www/error/include/ files and -# copying them to /your/include/path/, even on a per-VirtualHost basis. -# - -Alias /error/ "/var/www/error/" - - - - - AllowOverride None - Options IncludesNoExec - AddOutputFilter Includes html - AddHandler type-map var - Order allow,deny - Allow from all - LanguagePriority en es de fr - ForceLanguagePriority Prefer Fallback - - -# ErrorDocument 400 /error/HTTP_BAD_REQUEST.html.var -# ErrorDocument 401 /error/HTTP_UNAUTHORIZED.html.var -# ErrorDocument 403 /error/HTTP_FORBIDDEN.html.var -# ErrorDocument 404 /error/HTTP_NOT_FOUND.html.var -# ErrorDocument 405 /error/HTTP_METHOD_NOT_ALLOWED.html.var -# ErrorDocument 408 /error/HTTP_REQUEST_TIME_OUT.html.var -# ErrorDocument 410 /error/HTTP_GONE.html.var -# ErrorDocument 411 /error/HTTP_LENGTH_REQUIRED.html.var -# ErrorDocument 412 /error/HTTP_PRECONDITION_FAILED.html.var -# ErrorDocument 413 /error/HTTP_REQUEST_ENTITY_TOO_LARGE.html.var -# ErrorDocument 414 /error/HTTP_REQUEST_URI_TOO_LARGE.html.var -# ErrorDocument 415 /error/HTTP_UNSUPPORTED_MEDIA_TYPE.html.var -# ErrorDocument 500 /error/HTTP_INTERNAL_SERVER_ERROR.html.var -# ErrorDocument 501 /error/HTTP_NOT_IMPLEMENTED.html.var -# ErrorDocument 502 /error/HTTP_BAD_GATEWAY.html.var -# ErrorDocument 503 /error/HTTP_SERVICE_UNAVAILABLE.html.var -# ErrorDocument 506 /error/HTTP_VARIANT_ALSO_VARIES.html.var - - - - -# -# The following directives modify normal HTTP response behavior to -# handle known problems with browser implementations. -# -BrowserMatch "Mozilla/2" nokeepalive -BrowserMatch "MSIE 4\.0b2;" nokeepalive downgrade-1.0 force-response-1.0 -BrowserMatch "RealPlayer 4\.0" force-response-1.0 -BrowserMatch "Java/1\.0" force-response-1.0 -BrowserMatch "JDK/1\.0" force-response-1.0 - -# -# The following directive disables redirects on non-GET requests for -# a directory that does not include the trailing slash. This fixes a -# problem with Microsoft WebFolders which does not appropriately handle -# redirects for folders with DAV methods. -# Same deal with Apple's DAV filesystem and Gnome VFS support for DAV. -# -BrowserMatch "Microsoft Data Access Internet Publishing Provider" redirect-carefully -BrowserMatch "MS FrontPage" redirect-carefully -BrowserMatch "^WebDrive" redirect-carefully -BrowserMatch "^WebDAVFS/1.[0123]" redirect-carefully -BrowserMatch "^gnome-vfs/1.0" redirect-carefully -BrowserMatch "^XML Spy" redirect-carefully -BrowserMatch "^Dreamweaver-WebDAV-SCM1" redirect-carefully - -# -# Allow server status reports generated by mod_status, -# with the URL of http://servername/server-status -# Change the ".example.com" to match your domain to enable. -# -# -# SetHandler server-status -# Order deny,allow -# Deny from all -# Allow from .example.com -# - -# -# Allow remote server configuration reports, with the URL of -# http://servername/server-info (requires that mod_info.c be loaded). -# Change the ".example.com" to match your domain to enable. -# -# -# SetHandler server-info -# Order deny,allow -# Deny from all -# Allow from .example.com -# - -# -# Proxy Server directives. Uncomment the following lines to -# enable the proxy server: -# -# -#ProxyRequests On -# -# -# Order deny,allow -# Deny from all -# Allow from .example.com -# - -# -# Enable/disable the handling of HTTP/1.1 "Via:" headers. -# ("Full" adds the server version; "Block" removes all outgoing Via: headers) -# Set to one of: Off | On | Full | Block -# -#ProxyVia On - -# -# To enable a cache of proxied content, uncomment the following lines. -# See http://httpd.apache.org/docs/2.2/mod/mod_cache.html for more details. -# -# -# CacheEnable disk / -# CacheRoot "/var/cache/mod_proxy" -# -# - -# -# End of proxy directives. - -### Section 3: Virtual Hosts -# -# VirtualHost: If you want to maintain multiple domains/hostnames on your -# machine you can setup VirtualHost containers for them. Most configurations -# use only name-based virtual hosts so the server doesn't need to worry about -# IP addresses. This is indicated by the asterisks in the directives below. -# -# Please see the documentation at -# -# for further details before you try to setup virtual hosts. -# -# You may use the command line option '-S' to verify your virtual host -# configuration. - -# -# Use name-based virtual hosting. -# -#NameVirtualHost *:80 -# -# NOTE: NameVirtualHost cannot be used without a port specifier -# (e.g. :80) if mod_ssl is being used, due to the nature of the -# SSL protocol. -# - -# -# VirtualHost example: -# Almost any Apache directive may go into a VirtualHost container. -# The first VirtualHost section is used for requests without a known -# server name. -# -# -# ServerAdmin webmaster@dummy-host.example.com -# DocumentRoot /www/docs/dummy-host.example.com -# ServerName dummy-host.example.com -# ErrorLog logs/dummy-host.example.com-error_log -# CustomLog logs/dummy-host.example.com-access_log common -# diff --git a/acceptance/tests/external_ca_support/fixtures/leaves/agent1.example.org.email.issued_by.agent-ca.crt b/acceptance/tests/external_ca_support/fixtures/leaves/agent1.example.org.email.issued_by.agent-ca.crt deleted file mode 100644 index dc4c675befb..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/leaves/agent1.example.org.email.issued_by.agent-ca.crt +++ /dev/null @@ -1,15 +0,0 @@ ------BEGIN CERTIFICATE----- -MIICTTCCAfegAwIBAgIBBTANBgkqhkiG9w0BAQUFADB9MSMwIQYDVQQDExpJbnRl -cm1lZGlhdGUgQ0EgKGFnZW50LWNhKTEfMB0GCSqGSIb3DQEJARYQdGVzdEBleGFt -cGxlLm9yZzEZMBcGA1UEChMQRXhhbXBsZSBPcmcsIExMQzEaMBgGA1UECxMRU2Vy -dmVyIE9wZXJhdGlvbnMwHhcNMTMwMzMwMDU1MDQ4WhcNMzMwMzI1MDU1MDQ4WjBE -MSEwHwYDVQQDDBhhZ2VudC1lbWFpbDEuZXhhbXBsZS5vcmcxHzAdBgkqhkiG9w0B -CQEWEHRlc3RAZXhhbXBsZS5jb20wXDANBgkqhkiG9w0BAQEFAANLADBIAkEA1yGO -Vq1tH18qqfGofLz4jsQK8L1+sH/Rqv41YwsOwVEvBbvDLhbaokbqdPj49Oilk8jd -2WCFhfeQR2EHYwDbvQIDAQABo4GaMIGXMFsGA1UdIwRUMFKhTaRLMEkxEDAOBgNV -BAMMB1Jvb3QgQ0ExGjAYBgNVBAsMEVNlcnZlciBPcGVyYXRpb25zMRkwFwYDVQQK -DBBFeGFtcGxlIE9yZywgTExDggEBMAwGA1UdEwEB/wQCMAAwCwYDVR0PBAQDAgWg -MB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjANBgkqhkiG9w0BAQUFAANB -AKAeQNxCcQfbMaxxp84Qqi2dTUKnKE4IKOOCgQEPIE9gzZ5Tjig2bJIBfer3hXia -Mudl4EXigQpy2zJ+CIE/Pl4= ------END CERTIFICATE----- diff --git a/acceptance/tests/external_ca_support/fixtures/leaves/agent1.example.org.email.issued_by.agent-ca.key b/acceptance/tests/external_ca_support/fixtures/leaves/agent1.example.org.email.issued_by.agent-ca.key deleted file mode 100644 index 6cf5d84267b..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/leaves/agent1.example.org.email.issued_by.agent-ca.key +++ /dev/null @@ -1,9 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIBPQIBAAJBANchjlatbR9fKqnxqHy8+I7ECvC9frB/0ar+NWMLDsFRLwW7wy4W -2qJG6nT4+PTopZPI3dlghYX3kEdhB2MA270CAwEAAQJBAMsqgYYSGm/6LAx6kCfd -Ps91DO/1xWNiEtxtXnW+x5yAFjdiM0uhRupci78kHJ+00dDqPGDHpWkmQ/bqzdHM -A/ECIQD+b+ezEA4sUfiSHRkfH4EsBMtDAxS6NyrEk8I1EQYukwIhANhz1+PrDnpI -BO0ql2sIaAO333zYXTT6JAyWBs2zou5vAiEAkw4uhknDR05hMmHrARf+Wmiy6ho/ -wpjb+R3FY1Jhlk0CIQDJPyO5+EEUtkNGUvRHgqBa3KmAxRYb/Hpp7U9UD7HsJwIh -AJR9GTJSdpPz7HSS2GtBxK2AlwWvqIj3zLyL/u4aZRJs ------END RSA PRIVATE KEY----- diff --git a/acceptance/tests/external_ca_support/fixtures/leaves/agent1.example.org.issued_by.agent-ca.crt b/acceptance/tests/external_ca_support/fixtures/leaves/agent1.example.org.issued_by.agent-ca.crt deleted file mode 100644 index 1d52b3bfa5c..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/leaves/agent1.example.org.issued_by.agent-ca.crt +++ /dev/null @@ -1,22 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDsDCCApigAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MSMwIQYDVQQDExpJbnRl -cm1lZGlhdGUgQ0EgKGFnZW50LWNhKTEfMB0GCSqGSIb3DQEJARYQdGVzdEBleGFt -cGxlLm9yZzEZMBcGA1UEChMQRXhhbXBsZSBPcmcsIExMQzEaMBgGA1UECxMRU2Vy -dmVyIE9wZXJhdGlvbnMwHhcNMTQwNDA4MDEyNTM3WhcNMzQwNDAzMDEyNTM3WjAd -MRswGQYDVQQDDBJhZ2VudDEuZXhhbXBsZS5vcmcwggEiMA0GCSqGSIb3DQEBAQUA -A4IBDwAwggEKAoIBAQDAjIjCGe+/hW62yuI9i4bVqZ3X60tQtUTvB1TA4TIAHSqb -MGSSuMBdPKSkLRrASzCUUdPBbNo1MDwcQ6ZulyE6emIxp3V+ISrkRLIfYw+CSqCu -MjuaXHLtC03qOcXvFXYMwJ9xGHkUnylKe2IMVafbtgQ6MXF7E9VxZu4eddf9OCAX -I5ZgehuZjiOOAVZaMfE/0xIUc/aQcyCstv8NKbFdET32nMYnuYprkY12LVsnDsdi -Tfa2zdqHCJBXmAb23QvwRp2scFy9QcCbhF063Fklx4Ix0ut1ifjjywiwDKu/OMiM -2BwpU+JnzSHhKH2VIdJbMpaAt/whyOJ9RrKK6ZDFAgMBAAGjgZowgZcwWwYDVR0j -BFQwUqFNpEswSTEQMA4GA1UEAwwHUm9vdCBDQTEaMBgGA1UECwwRU2VydmVyIE9w -ZXJhdGlvbnMxGTAXBgNVBAoMEEV4YW1wbGUgT3JnLCBMTEOCAQEwDAYDVR0TAQH/ -BAIwADALBgNVHQ8EBAMCBaAwHQYDVR0lBBYwFAYIKwYBBQUHAwEGCCsGAQUFBwMC -MA0GCSqGSIb3DQEBBQUAA4IBAQDAgTSbvhE8//g0WNmT3kXVPuGg02v9ML3Uai1f -whq2dbBaERWhfIHBZw0KiNw0fI0MgKtR8qu3nEFOrXZEIgVCtBDv/aTGoudT5nil -WF99bC4OkoAiYLwFOVLjXbc3+wOB7dd9G6pdBnETmWT/MO3WwXxexNx/7yxY2UDI -4g4LOkxQMWC6zCEHp7lNofEKTFGlFgaP65PeLZ2GxnvAVfm1k7rMSy7eeQV2L1Hq -40LhAbQSqsSY8hBoypQnCmf0yijFwTH6wP6hRCu0ptu//W9BSJ4L8bTNg7n/Ff97 -QXtq2qn4OgLIoCSjshs3JbFHiPEpC8He3JKXM+gHWqySe8Ln ------END CERTIFICATE----- diff --git a/acceptance/tests/external_ca_support/fixtures/leaves/agent1.example.org.issued_by.agent-ca.key b/acceptance/tests/external_ca_support/fixtures/leaves/agent1.example.org.issued_by.agent-ca.key deleted file mode 100644 index fb7a531160e..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/leaves/agent1.example.org.issued_by.agent-ca.key +++ /dev/null @@ -1,28 +0,0 @@ ------BEGIN PRIVATE KEY----- -MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDAjIjCGe+/hW62 -yuI9i4bVqZ3X60tQtUTvB1TA4TIAHSqbMGSSuMBdPKSkLRrASzCUUdPBbNo1MDwc -Q6ZulyE6emIxp3V+ISrkRLIfYw+CSqCuMjuaXHLtC03qOcXvFXYMwJ9xGHkUnylK -e2IMVafbtgQ6MXF7E9VxZu4eddf9OCAXI5ZgehuZjiOOAVZaMfE/0xIUc/aQcyCs -tv8NKbFdET32nMYnuYprkY12LVsnDsdiTfa2zdqHCJBXmAb23QvwRp2scFy9QcCb -hF063Fklx4Ix0ut1ifjjywiwDKu/OMiM2BwpU+JnzSHhKH2VIdJbMpaAt/whyOJ9 -RrKK6ZDFAgMBAAECggEAN5EW8nULtEisj8HzEnHOoqCFdcdof80gLJ7//X2/sTq5 -jfmkPJdmpEGY/ewqWS4ZwePvaVWhj1HxNvLq9+VTaI3jApNiG9k0iJWuldI/qokg -38SNYNmnLPNjM+IiVgHqhHmeScQXRTU9dEoxHIAnpgdcJvMX3b32jfh3ZYD6qQ9m -EINbT/x+L0nRFLj6MzJyvwydlxP38koTLyLcPKEZFpVQkIVn74kEzGEUl4PzzUnf -28l+IbXYN0fZwsfq05QFiscx85rGXE7IkhDkF/gkJxrQVDJqjIOW0v0KzRFv075b -cuEvnAirJB1Un5OdFfTNMobjw/r9XDdC/A8ZeXnKrQKBgQDrGxwpn7Nkwx1nHIY0 -WiAwDhnSzq3DOzxMC4YWMMIHeEi4DC/HzS77OmCEjtL0UgtlnjrKTW9hK1QE/DQk -FkvPOyD7/47X2p2T9oIC6yZMKfN6KldHIhQyRR7Hb7/ke8Qd+ddXOyHza2GYsZsK -/AXAKAL4pgEiX9m7neZm/zgN1wKBgQDRqTZNGh0+0eRyju8No+jnoPflJIle9G8U -EYXeMC+oKlnk5c98hThFgxxrnHriC0FHS3QvR0Xk8B92Rf6hmf6Tm/lmkVpknXxT -uVxPAs/Vog9oDROfrymNTU/Sm/JZGCOcteOsq+ux0kAFs5pu09Qx1A5h8n/ui6lK -pJpo9uRqwwKBgQDFLh6HlmZJqw7c7OJtw+zdAOaNlEPNyr9+c+fIcqo4w5wRqD7m -juLJV6OdaX/p42mll3htNValo0Y7TB6a6Yw5SLkYQLTXuoWQlB1kPz4GgOSwYBSF -F0LjCm+PCt1gzqCkF2eQxIpr8nKPMt3673YPPD+JtQicgNFG9l5HoeOsTwKBgG9I -EhNQdg9e1pNbfFTQGBHBZXJRNzyYHtEXD+fDybfHatlMhRmBmEGE+rO/ZZXPBYHy -8aMagGWocfqT4jgiBXaRhgnDwqe+0zuZGf6x7mBk2SqQLkdGcJaYX3LY5QQXBOfr -vwFqavqCLwsXrjN+6ZTdChlA3lhd9qgwv+hQiyKDAoGAc3ceSPFNFEfXjcAltARk -uJxH6tgDGwRaxvAgkqNy5yW4PVUYttipVA6lUlSI/BoCsRaKA10tmX9953VLtOQn -RT0ffDpaiUcea+ZcfboFRAl1qeW9nqY+o5duS/ZusoG17K30Fjaulh2mWV79+kjP -+u0OWrAQ82oICBolbMGIPz0= ------END PRIVATE KEY----- diff --git a/acceptance/tests/external_ca_support/fixtures/leaves/agent1.example.org.issued_by.master-ca.crt b/acceptance/tests/external_ca_support/fixtures/leaves/agent1.example.org.issued_by.master-ca.crt deleted file mode 100644 index c53fe9aa30d..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/leaves/agent1.example.org.issued_by.master-ca.crt +++ /dev/null @@ -1,22 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDsTCCApmgAwIBAgIBAzANBgkqhkiG9w0BAQUFADB+MSQwIgYDVQQDExtJbnRl -cm1lZGlhdGUgQ0EgKG1hc3Rlci1jYSkxHzAdBgkqhkiG9w0BCQEWEHRlc3RAZXhh -bXBsZS5vcmcxGTAXBgNVBAoTEEV4YW1wbGUgT3JnLCBMTEMxGjAYBgNVBAsTEVNl -cnZlciBPcGVyYXRpb25zMB4XDTE0MDQwODAxMjUzOFoXDTM0MDQwMzAxMjUzOFow -HTEbMBkGA1UEAwwSYWdlbnQxLmV4YW1wbGUub3JnMIIBIjANBgkqhkiG9w0BAQEF -AAOCAQ8AMIIBCgKCAQEA6EjKALrLYvsuUlBYm5+0Ig24R4axrhFPv3h8n9iL3/33 -Fl1tdxFGMi9jtRcNIajEo4XboD9tSvxWVJQHOOMdPh4Fo05ygcWn9k93W7xZmEiP -X+suzEi6LvfoaTsmWN/a8CVLRbfB/xsPLGUhCB7qOS0nmDcIp2A72nBXCMbXUnez -at5Hsmdb9DJPzuKuv5bgA2nL44uPDKXwG0n3AUVqyIab3uXN52q8nFGg62AVSFcy -8ergApXR6F6Nmy+HdzAJZeaxxN93pi6GrT9Bxh194iffeN1Dl2SVSWB4zGsjt1NN -+4z0Y6MhAt/1g+ZIOOitW/6U3T4KOsamGd6ZVsiGbwIDAQABo4GaMIGXMFsGA1Ud -IwRUMFKhTaRLMEkxEDAOBgNVBAMMB1Jvb3QgQ0ExGjAYBgNVBAsMEVNlcnZlciBP -cGVyYXRpb25zMRkwFwYDVQQKDBBFeGFtcGxlIE9yZywgTExDggECMAwGA1UdEwEB -/wQCMAAwCwYDVR0PBAQDAgWgMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcD -AjANBgkqhkiG9w0BAQUFAAOCAQEAJB2a8oabIxE0mn8tINQtUbjE1UPI2HU4HxnX -eAO225BogLCbo5RgUo+V1VZp8oksWggn7ztavcY/lpR/0lEiC/rrr2A10HCybAkZ -zBudei7Wi2ztBC8CTnO61K1/jvxHgUElufGuu+Aa7YlAJFby1ROKWJP7UhAS4uR0 -mMj4yqApXsp4+/Qio64MNCjj6MuCyUqcRoq+QM7m+/Po8i1DWesIJ0N8l68/uFiY -ByZ8Y9q3VPn49LUf/h+jTRus6PDoCzY7Zbh/L8R1g3p93ahLiwXJ422ubG9WJh90 -IAG3jJaGEfLCxPQ80Td2ZnOP3Xkf0lcK11NX7IuXeGNSoK7flQ== ------END CERTIFICATE----- diff --git a/acceptance/tests/external_ca_support/fixtures/leaves/agent1.example.org.issued_by.master-ca.key b/acceptance/tests/external_ca_support/fixtures/leaves/agent1.example.org.issued_by.master-ca.key deleted file mode 100644 index b459d0ed261..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/leaves/agent1.example.org.issued_by.master-ca.key +++ /dev/null @@ -1,28 +0,0 @@ ------BEGIN PRIVATE KEY----- -MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDoSMoAusti+y5S -UFibn7QiDbhHhrGuEU+/eHyf2Ivf/fcWXW13EUYyL2O1Fw0hqMSjhdugP21K/FZU -lAc44x0+HgWjTnKBxaf2T3dbvFmYSI9f6y7MSLou9+hpOyZY39rwJUtFt8H/Gw8s -ZSEIHuo5LSeYNwinYDvacFcIxtdSd7Nq3keyZ1v0Mk/O4q6/luADacvji48MpfAb -SfcBRWrIhpve5c3narycUaDrYBVIVzLx6uACldHoXo2bL4d3MAll5rHE33emLoat -P0HGHX3iJ9943UOXZJVJYHjMayO3U037jPRjoyEC3/WD5kg46K1b/pTdPgo6xqYZ -3plWyIZvAgMBAAECggEAW8xIaLlpn5h82fL+0aHv4nVhmoLUvWICDj1joxhFTyRi -/ee2VxPwley8jtipS+AStj00asQmQTgwNgS6RfUbnWzn9X4PKtIeau1E0nBBSDmE -z/d16y3ixqUaqFvD7+On9rgGY2sXs+XRvU3KHBd7oVUDDNjTBrK1dMJo4wMSHXiI -JOJGujhrzHcDwERsCRHR6lCg80bwP57420FgOt6iA+uEtAa/JS/RL1SNzc0/ZfpR -xoym+7uWsTBxdSHROcv9yObip62C8uPySNW/13kkXZnP5IFl6yWh5xVuQJaSRI4g -zyLc8p2JlVg8SSjpXvP3Y5cmOrPCqPUBQsDPIS3C4QKBgQD0n78CZSK3Pr7w+DPl -08QpYiFWdtaz9FC6syRuW3+CW59j3WqrqczwWFrP7Uq1ltZ/tc+gQJaCMyBPTxS1 -fSMrWK9cU6qVz+DNAKAq+KbRjPDG1shSRy4AaUj3BHwazjCz7JcfFf9zC3N+Arbl -AwtoP/mDv6dUoaVLxT0o9rFe0wKBgQDzFiOLZTxcOrN1ELiMI3jvhZ8rffIQNQEw -PrrKMmqRxnpoXRfJSKhKjddLX/I2vbPc7/luF0rDTjgWyNmj9bqOYFrr0H0Cukh0 -jnRh/UDxxY6TfX9kFTqameGa8iaYW7+CZyt70TFghnMNprGOi0AWqIIQWJjo8wAe -wE7eJikQdQKBgCVuPB/1sbYwV68vi2FjYeSjK067qGaov6GRv8VTItOjiWQSgTv7 -I2yzWFHwkTnv9qpb+4Ud3OnXQN5hz11l5HfBfjO2aae6wz8bFWb93Si7VCxYuRY9 -5gASHCQP/51qZ3Fghk8vYoMMAYQKjknEiX5OK7PJI6WDhnNtnsoh1UoZAoGBAOcm -gzA3WPlevG0yNDrWNFaXWm7x+W9d575nuBkzbdEmyhlZhJfbpmQWksZS+zEAnPsX -mrkMHwSu6XkJDqPxs69Vgr/AZk9ksidbmuHe/5WnxVcMvEmAQlTmxPghEYBJpkcr -MdcwamKeFuPBQhAI7C1fmSZQCT9iaxfSyGxIHz5NAoGBAKrPfTFX74+Po7Uo+qEc -81VMFJEwTwVJUX0+EIuskG6LIDAdRo2KWr/Rl8XvFcAj+BfSF7Y5eOA8IeqGSJHG -n1zyTmZjMQDyb6ECu0QiA8sshM3ICsrEySvTdGgw7pMySIen9Ku066bvdq+FOIY1 -zH44viWG+2efHCt5r2CehPU5 ------END PRIVATE KEY----- diff --git a/acceptance/tests/external_ca_support/fixtures/leaves/agent2.example.org.issued_by.agent-ca.crt b/acceptance/tests/external_ca_support/fixtures/leaves/agent2.example.org.issued_by.agent-ca.crt deleted file mode 100644 index 50c064ce395..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/leaves/agent2.example.org.issued_by.agent-ca.crt +++ /dev/null @@ -1,22 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDsDCCApigAwIBAgIBAjANBgkqhkiG9w0BAQUFADB9MSMwIQYDVQQDExpJbnRl -cm1lZGlhdGUgQ0EgKGFnZW50LWNhKTEfMB0GCSqGSIb3DQEJARYQdGVzdEBleGFt -cGxlLm9yZzEZMBcGA1UEChMQRXhhbXBsZSBPcmcsIExMQzEaMBgGA1UECxMRU2Vy -dmVyIE9wZXJhdGlvbnMwHhcNMTQwNDA4MDEyNTM4WhcNMzQwNDAzMDEyNTM4WjAd -MRswGQYDVQQDDBJhZ2VudDIuZXhhbXBsZS5vcmcwggEiMA0GCSqGSIb3DQEBAQUA -A4IBDwAwggEKAoIBAQDHweG4FqoG5dYD84YmGpGUKFIBy+ISXaZLnjD3JhOb6d5H -GFFFhBIuWeM8ieaY67crBpVdjKWuQAkWayWYdCa8npC7YxxHKphLbN81eL2WLraW -2N2kji3TwwA6J+QQVPAUCgPUQcC/ob0x9faor+zKrxQPTKQ6xtuSI+FIkWiw3+e2 -W54ONXx2QrKDwbeXXXNG09QOJVJxW3DueycbCH4sWa2n/ODySeC2t1hU6A5e6Z0b -UhMtaixdbQsKfsid2TpEC66ILW2MlCYwv4EdaOjC9FbizEXR4F7GAg2C5kPnQvhA -iRP6MAeFW5hFCkjOGBQXBCNxBSf9xPzmV4WGLjA1AgMBAAGjgZowgZcwWwYDVR0j -BFQwUqFNpEswSTEQMA4GA1UEAwwHUm9vdCBDQTEaMBgGA1UECwwRU2VydmVyIE9w -ZXJhdGlvbnMxGTAXBgNVBAoMEEV4YW1wbGUgT3JnLCBMTEOCAQEwDAYDVR0TAQH/ -BAIwADALBgNVHQ8EBAMCBaAwHQYDVR0lBBYwFAYIKwYBBQUHAwEGCCsGAQUFBwMC -MA0GCSqGSIb3DQEBBQUAA4IBAQAVDnNbC88i9mZeGy5KTbV4Xp0qZcPY5+w3MP6S -65DOxCSjw9ZIHF8E0EgGhG8bHidf7+z5YdyVald9lDV8PItjXzam+VUMmMuPdJIR -cwLdRjd30NmHaDQEjqsXMS+KaZDKUCIxaOcYHb4D2XU2LRXhmA0JMteQHLo0Ugua -2WoanhjV+7KMdY74h7aPc4j9KFEOzfRLFWQYpIR3ENucRLkPX4Aeb/qrDlQaSfw/ -BXma+8+4F/P4JjlE2wRNI8nSvflQGJ6fBnDgSyb07M5RqM/oDjMz58c/TEv5jzX6 -X4GizfT4l0mNtjxe3k7+tmlLsDaN2aiY3jCQ5D8kzAhdM+rL ------END CERTIFICATE----- diff --git a/acceptance/tests/external_ca_support/fixtures/leaves/agent2.example.org.issued_by.agent-ca.key b/acceptance/tests/external_ca_support/fixtures/leaves/agent2.example.org.issued_by.agent-ca.key deleted file mode 100644 index 5c1506f753b..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/leaves/agent2.example.org.issued_by.agent-ca.key +++ /dev/null @@ -1,28 +0,0 @@ ------BEGIN PRIVATE KEY----- -MIIEwAIBADANBgkqhkiG9w0BAQEFAASCBKowggSmAgEAAoIBAQDHweG4FqoG5dYD -84YmGpGUKFIBy+ISXaZLnjD3JhOb6d5HGFFFhBIuWeM8ieaY67crBpVdjKWuQAkW -ayWYdCa8npC7YxxHKphLbN81eL2WLraW2N2kji3TwwA6J+QQVPAUCgPUQcC/ob0x -9faor+zKrxQPTKQ6xtuSI+FIkWiw3+e2W54ONXx2QrKDwbeXXXNG09QOJVJxW3Du -eycbCH4sWa2n/ODySeC2t1hU6A5e6Z0bUhMtaixdbQsKfsid2TpEC66ILW2MlCYw -v4EdaOjC9FbizEXR4F7GAg2C5kPnQvhAiRP6MAeFW5hFCkjOGBQXBCNxBSf9xPzm -V4WGLjA1AgMBAAECggEBAImIQaJ/aD0rjThXOKuPa1/4is3V1CcU92Pk3I2tV57n -XDTS11HnZqUnGj8aKvxqfkhPVhN8vK3GRPt8dfwLLIh9G+UIjfWprAVyRhQIt29p -Zilh0uhzQ7Yi8ksYH/vmnw0NACnpw42tDBgT4umthnjeJg+KkyAsvgxWS8us4cm0 -h+wT6jMhKTGJKys83Py7CYIH4IiJ/1NHqkyXOdJjmx4cipys+3RYLeqnVpYhdKab -KV9B4WOmsb3NVQvtcQ1PDTohFeCZZgMurCLGx4/eGSvy26YyFKjiqFi+rqjQPiRB -YFlnePXE7EXlPzfimLGE+zYG7SMM89Z/ou3yevT8k9kCgYEA46DW2pzTHLno9qxL -AhhXrMkAoSluJXtekrOvQFqeYfCnPQPSfpN4D1i1UhFmizZ1+hJqjRaxf3boOTwd -hsy0pxljknoNtBNu0rIPmMNqO0wKmB0LapMqdsYbj1PWPP+MoyyVLj1AX1P6Y2Ee -vFU3FuBvKzpb9YQKFIWzicCoKv8CgYEA4Ke8tsj5ut76xD9bVrSdR4MpXWWgPnwa -lpsZ7rVDfON/pYuln5pKmA9gmETMmjnkpFNyecTABTYifivpG/48PeAyH5AtGWLh -Cy74G1R7a7g579S7occ4Enx5MMQZvA6xJLCZagxsM5vogIjQ5CwvrTA3KD7dQ3n4 -rDq1+Ycj6MsCgYEAio88WeR+aY0NLyJfok3ZCLdt56xFfRFk1x4DfsPqhymrU2vB -NQVlXsobzXIEHpevJuMQ8wMuVq213CkovAZdrR26S856CvSNUDnNXqsNPj0icce0 -TFJ61cPvwMZCsezI2gboZDYquhxvXREgWXnxx74kTYb/tSqDzEw6po6cj1cCgYEA -zQUnOuSky36s+aYcVD+WfWncuSiCmiOmHCXF/8waoAH6Pvj3mHU1eBoofC3lTyZg -W/rvsOL2nort5ZlrLqnRDmLpO1bXYaCUYiFmOYLa8853yUCqT5TAvlYsP23nUnFS -BnrINzjJEjxJbevuPx14ESS8YZUlVmwcngkig2tqQQsCgYEAyx6hnpLjhd3K0v5x -d78HarimcTRoHyldztubdVDV081KFTIrrWjQn1ag9eFOfctkhjngxlGHBtIQnLRM -pdYkCsymxTDju30EvD8bvOE9+SVgOFOpTRuenbepxltygtKQmGY/5w2PHQFJZjwI -tklzLp5QZNYfJeaPZ+frNRL7wxQ= ------END PRIVATE KEY----- diff --git a/acceptance/tests/external_ca_support/fixtures/leaves/agent3.example.org.issued_by.agent-ca.crt b/acceptance/tests/external_ca_support/fixtures/leaves/agent3.example.org.issued_by.agent-ca.crt deleted file mode 100644 index c254ce91a32..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/leaves/agent3.example.org.issued_by.agent-ca.crt +++ /dev/null @@ -1,22 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDsDCCApigAwIBAgIBAzANBgkqhkiG9w0BAQUFADB9MSMwIQYDVQQDExpJbnRl -cm1lZGlhdGUgQ0EgKGFnZW50LWNhKTEfMB0GCSqGSIb3DQEJARYQdGVzdEBleGFt -cGxlLm9yZzEZMBcGA1UEChMQRXhhbXBsZSBPcmcsIExMQzEaMBgGA1UECxMRU2Vy -dmVyIE9wZXJhdGlvbnMwHhcNMTQwNDA4MDEyNTM4WhcNMzQwNDAzMDEyNTM4WjAd -MRswGQYDVQQDDBJhZ2VudDMuZXhhbXBsZS5vcmcwggEiMA0GCSqGSIb3DQEBAQUA -A4IBDwAwggEKAoIBAQCvaJCmw0/fy9rmPGtda/9wOlaqiozKac51fJbfDcdiTV9k -Adoln28td4vAXqCckIWhdeSAyF1MhIpnGSsJuklsHTJ2ZfospNwDY0NqYkejAzT6 -RM/jblY+D8/Cyp37UjoLi1Ri8PEbnjbgYMVKn51nZlBexHp1CpFBvjm1nd7rt3Ll -wZ6Hgbg9aLSJYKWucvsFNteeDuqte6fV+ypO/VtDGaLJS9e/3X9kNt7lU1tTBLyD -KfUOhpDBR2+fuoWJ9YADFiyK7AadAV+d+nlep2UG6MN8L0H0ZgBp73Rpxb9tgIWr -istMboUvQghFfZ4n/KTa4s6ooUe2tzMxFGsqaus1AgMBAAGjgZowgZcwWwYDVR0j -BFQwUqFNpEswSTEQMA4GA1UEAwwHUm9vdCBDQTEaMBgGA1UECwwRU2VydmVyIE9w -ZXJhdGlvbnMxGTAXBgNVBAoMEEV4YW1wbGUgT3JnLCBMTEOCAQEwDAYDVR0TAQH/ -BAIwADALBgNVHQ8EBAMCBaAwHQYDVR0lBBYwFAYIKwYBBQUHAwEGCCsGAQUFBwMC -MA0GCSqGSIb3DQEBBQUAA4IBAQBXOZ+j8MRJgS1XBriZxL5y5GTCxZTmgdv7akvy -RnDHN6CPSPDG3HX0Db7Ey1kcQU/d/N9PCDyNGFUxihfq4CjqJpQAarZ+oYBCO+k0 -367Xw+iKm6JHK3CZLT9GG7ikz9wAb5E+O3PMWePAtqSOdHmEF+K+8O+WNHe/Slzz -BvY1m0WXXxEtbkKOxVCZ9oPxO0dZd+CsiaBTuMp4TX6218NIWNGYixaVNCOTKT/i -nuE3naOGRZ++y/clYrrni18WZC4jqTdZn57Bho7gf6nR9hFt2FaIUPagS37oWKF3 -KwpsbWEVMcdjbMUGuG04WclzATUbqJt+m1ueiK4slb63O5Z1 ------END CERTIFICATE----- diff --git a/acceptance/tests/external_ca_support/fixtures/leaves/agent3.example.org.issued_by.agent-ca.key b/acceptance/tests/external_ca_support/fixtures/leaves/agent3.example.org.issued_by.agent-ca.key deleted file mode 100644 index 4bf1e771521..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/leaves/agent3.example.org.issued_by.agent-ca.key +++ /dev/null @@ -1,28 +0,0 @@ ------BEGIN PRIVATE KEY----- -MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCvaJCmw0/fy9rm -PGtda/9wOlaqiozKac51fJbfDcdiTV9kAdoln28td4vAXqCckIWhdeSAyF1MhIpn -GSsJuklsHTJ2ZfospNwDY0NqYkejAzT6RM/jblY+D8/Cyp37UjoLi1Ri8PEbnjbg -YMVKn51nZlBexHp1CpFBvjm1nd7rt3LlwZ6Hgbg9aLSJYKWucvsFNteeDuqte6fV -+ypO/VtDGaLJS9e/3X9kNt7lU1tTBLyDKfUOhpDBR2+fuoWJ9YADFiyK7AadAV+d -+nlep2UG6MN8L0H0ZgBp73Rpxb9tgIWristMboUvQghFfZ4n/KTa4s6ooUe2tzMx -FGsqaus1AgMBAAECggEBAKQDEtZLY8EvnNORBRcKZ4MpmWXHPRP60Qh74ZpZCOi6 -zgn1zWNGa69Iy4ypLmaWn8y0Ll/YeZyVhdAW8W4cGJEVz9xzR/Q8hlhIlX3AvZkH -455rfzL6NXu8TxilWgjn/sxSkCoffmLDki5FbdRz4LsRKO8jqhkMM3wUgWN3ZTxi -9HfvO9cqKX49TRECs8i6o0vjSBY871nBXS0PQb0IRyEPnidk3fMl6KkYzAy4VQB+ -vlFUnGTFUx9wStAEIG04v+X2yBXiP5KeCqAwikCB1dHZU0iXliadnjn1LD6An86p -1kkUtZ54crR4fz/SELYLivApYLDPidVcAApS3kZCN8ECgYEA2tSxzx+4BjjnPIga -2hzVX4QE0qsB+rCsxBKFPRvLWu3coBdTor+t4S5Umft9nZtyRRa72JOZUuG7yjpi -rDxqPo7s/3hR42aelH2oYKTlz51oD2gVj7S+NoIZGynyo8EJZvN4PmAcOU4cQoYK -Xyq2LPpdM4ng1KbhGKENYX5ToGUCgYEAzTPCEeUIYLLb+92p4zMkLkmATtQOhcGb -v2VHWeeK95NEwlSlUDnmfEnJzOA5jcV1p367iC1u4+SAqX4poUrMbQtO6YjaLJ3n -lQM9WDM4Tk5n+K3xCWFbcYaaBvfoVmJ6IdxmqN3O44osRzX1nEZnQRbADBT+/ncP -y4EMBOjCqpECgYEAvt3JgQj/b53uQqPMF/YSPc9ejYlOnqO+7P3ibNxzUIorZw8x -icB6HvBUJkJu3CsbTht4GH0UvG/bZZXrPqMrYAk1udXuRtPY+Vdop3LsZ1u5ycZV -GBTj25M55rF9y0qxsrQlavJVa74vc+6J2VvdrflplWxrR8+OgfbnPuP6eOkCgYBB -Jgicc9G4GbEcAuE4H/mBtJZG8K53quNO3dHMamXFen0fEY62rNtPZIHsSNgc+a8D -8YmKj5ogQmDS7xo6eivzBt8T5/4UokFvHuswAHsE6YG7A0Z+iulkXq6yPa0HGBbc -f0QcoudXeHvcU/ZZmkSX9qgcbeITWiuqmZr+SAPd8QKBgCmQbKVYf0DZ45duXrl/ -UaAg4TNOgbXKP5sCBMA3Wl4gOItL9umJFmh2QHZRaieFt7qlYCYsLul2CzOOaqqC -nA6US7SMpq5uoxiimwjY9PUF9WDzwWFFXxYyphOZI5fXKgXCBOSwBMBPioYWxhQK -amoP2hwuVMpudtS2nZOtnPSJ ------END PRIVATE KEY----- diff --git a/acceptance/tests/external_ca_support/fixtures/leaves/master1.example.org.issued_by.agent-ca.crt b/acceptance/tests/external_ca_support/fixtures/leaves/master1.example.org.issued_by.agent-ca.crt deleted file mode 100644 index 17c35320ba8..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/leaves/master1.example.org.issued_by.agent-ca.crt +++ /dev/null @@ -1,24 +0,0 @@ ------BEGIN CERTIFICATE----- -MIID8DCCAtigAwIBAgIBBDANBgkqhkiG9w0BAQUFADB9MSMwIQYDVQQDExpJbnRl -cm1lZGlhdGUgQ0EgKGFnZW50LWNhKTEfMB0GCSqGSIb3DQEJARYQdGVzdEBleGFt -cGxlLm9yZzEZMBcGA1UEChMQRXhhbXBsZSBPcmcsIExMQzEaMBgGA1UECxMRU2Vy -dmVyIE9wZXJhdGlvbnMwHhcNMTQwNDA4MDEyNTM4WhcNMzQwNDAzMDEyNTM4WjAe -MRwwGgYDVQQDDBNtYXN0ZXIxLmV4YW1wbGUub3JnMIIBIjANBgkqhkiG9w0BAQEF -AAOCAQ8AMIIBCgKCAQEA5K+/ngO2GiDGyd33t8jROHqeHEFrEtm3+T7ap6BQNj7+ -GcOf5GB13SiBLoyWaS2wFCBOJbqlXwwjfKPbYyWxCMk6xKYAhmOC7he1auNC5WUa -TAkze0lqnyOuJCfIgGEJm7AGZZ72WIP60u60+YFaXcGkWMCZnK+7gpHtfaMdkV+P -/6Int1DRikUGYCFBqZoEi7S8aaOO5S9/426+AYeUCSv7huIRY2i5kzfbo47MhpNI -uQEIroKDH35cpXkkAqV+/bttfkGBGNPdv5ccRfsoWs08HeU/WyZGPeLBzYUi74YC -GjNsKwfn65sJSnYyQbSbtEcUSvXr8SOPNaX/WCMcPQIDAQABo4HZMIHWMFsGA1Ud -IwRUMFKhTaRLMEkxEDAOBgNVBAMMB1Jvb3QgQ0ExGjAYBgNVBAsMEVNlcnZlciBP -cGVyYXRpb25zMRkwFwYDVQQKDBBFeGFtcGxlIE9yZywgTExDggEBMAwGA1UdEwEB -/wQCMAAwCwYDVR0PBAQDAgWgMB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcD -AjA9BgNVHREENjA0ghNtYXN0ZXIxLmV4YW1wbGUub3JnggdtYXN0ZXIxggZwdXBw -ZXSCDHB1cHBldG1hc3RlcjANBgkqhkiG9w0BAQUFAAOCAQEAZHIF2DB1ogCiTfcT -mM50IxDsUGWit3916FdIZEpgUcp+mMd0vpKejzNy/G1iriBErHujwxUPJPCFkyZx -5Uly8PoTykMGZPFDI3OaDnfsWtX+JV7P23PKgKYsUea2ewj+kUVWkKI2GhLV6+We -pWrzbUFFfATjMpda2k9f8AaGT5kU3L16KrCa6X8tuE6FTeMrkYHZxe72rufZOWw6 -XjSJ2wqxHDp+iiZ627Qn6Z1DA3b2XowCPPIaVnmhlawhQKDGrrh4HfuqQSI5LpwP -7Wx5oNNSC7qGJCVFWAamkQiy7JsPrrM/egNDxSV52Zkhp8AjDrYn4CTrdbd1CINO -eS/Dtw== ------END CERTIFICATE----- diff --git a/acceptance/tests/external_ca_support/fixtures/leaves/master1.example.org.issued_by.agent-ca.key b/acceptance/tests/external_ca_support/fixtures/leaves/master1.example.org.issued_by.agent-ca.key deleted file mode 100644 index 42ea4c82847..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/leaves/master1.example.org.issued_by.agent-ca.key +++ /dev/null @@ -1,28 +0,0 @@ ------BEGIN PRIVATE KEY----- -MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDkr7+eA7YaIMbJ -3fe3yNE4ep4cQWsS2bf5PtqnoFA2Pv4Zw5/kYHXdKIEujJZpLbAUIE4luqVfDCN8 -o9tjJbEIyTrEpgCGY4LuF7Vq40LlZRpMCTN7SWqfI64kJ8iAYQmbsAZlnvZYg/rS -7rT5gVpdwaRYwJmcr7uCke19ox2RX4//oie3UNGKRQZgIUGpmgSLtLxpo47lL3/j -br4Bh5QJK/uG4hFjaLmTN9ujjsyGk0i5AQiugoMfflyleSQCpX79u21+QYEY092/ -lxxF+yhazTwd5T9bJkY94sHNhSLvhgIaM2wrB+frmwlKdjJBtJu0RxRK9evxI481 -pf9YIxw9AgMBAAECggEAHs39xc9CLGrV3ENmJv+KD02VFsFJJxTkz1JEKehSZbw+ -hkHvu9eyiMP4Ask1SZ255jwjyrlFpXQBI7z+xHBDVKemnu6ti2zEKkZPTcKnNDdW -P3/Ny4GsNxubTYt+Wqy3mp0vf2jWTj4Y/2jgP+vIvHBlLv9isdSKMaBumUnrpbU0 -fnWJ5fz5yyNoSaaH8+UxOJz/EMOV/Crc2KuwULCpPWbJXVVFhgQ2o0oCjBC6RGUx -C9RyjQCOiEWI7Rt9IeQAqmBqIP1TFZeWlCiJEXIQi7P7y2LtpbCSu2LXU7bsTKxS -uXVGmiix5yNiyzljsGMDCwBiAzoKvA8x+kEv/Q9qgQKBgQD4zF5vyuBN+Palgcep -aAn9a/og6kWLYslPNEvWYkkSWi8R+hlWgC1W5LUXpply7Pj8ZdR82McPrRjDXzsk -wz5MofEWkB3O/qgOjY8BR7N1pMddBRV+eVvLZMTnyaCdkaKCqo/L1a18ITywpasl -AML6JnUJLMUdspeQPRJGdyf/hQKBgQDrTlkqxZ0tnME6yqpHzS/WAYGMIpxWzPWH -sY6E5lINvsMuy5OjPtE7Qgl/fbu/wjdt0VUw+BeuHbgJP55S1vmfrCSvRwHSumXq -7Oyx9fQfz4bBiV0BPbXSE8OlKd+mFsQUq2BZA12NFfWpw6R1BG6wkyxB/qN3psUf -SpwCPPtbWQKBgGzb5todN2WGcEzcawMJfY1/qDKsrn8dWx2nsSL9YCGCiiPgfSOm -86+YZFAT2gI6A3tUBtQVeYu3XiVlzpf4QZMALF+F8TT3oYq0j4Ss3a20ynI93ji3 -Wt20mp8cToWDCksH2+EA/mZYmJhl9Gs9WRJgr0azaw/ia4R15EAmtL6NAoGBANN1 -Wj5Mzn5iA2ghHrtd0qJAQjo8FEEQ+YlS8B/Ql2aQyvivq+KlHgRhyQVQabve/k7r -MuLJ4QXw4SU2jcImPLfYTDNT3XiRV+uxtNgMjrFTnucn2XO3EEFUz5oKslig1t84 -T2JO2vBiDp3cGluqewqlh71a524sabNAKkj8omopAoGBAOPfjxvmQprID0pID9xE -Qe4+ll+BAUQN1W9wwOazwMSON2WOOm+2IL7RPgyNgFWeMzBM0CpBq8Pve+G3N+xr -RrOmsAJdofK3xM0UCnoWLazNuoJZvuzyRyqppIiG2DT6aBVpZ9Rgcy7ufdQmj6tU -YgvO+lsMalyGDMSP21hWtgxN ------END PRIVATE KEY----- diff --git a/acceptance/tests/external_ca_support/fixtures/leaves/master1.example.org.issued_by.master-ca.crt b/acceptance/tests/external_ca_support/fixtures/leaves/master1.example.org.issued_by.master-ca.crt deleted file mode 100644 index e79ab8bfd8a..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/leaves/master1.example.org.issued_by.master-ca.crt +++ /dev/null @@ -1,24 +0,0 @@ ------BEGIN CERTIFICATE----- -MIID8TCCAtmgAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MSQwIgYDVQQDExtJbnRl -cm1lZGlhdGUgQ0EgKG1hc3Rlci1jYSkxHzAdBgkqhkiG9w0BCQEWEHRlc3RAZXhh -bXBsZS5vcmcxGTAXBgNVBAoTEEV4YW1wbGUgT3JnLCBMTEMxGjAYBgNVBAsTEVNl -cnZlciBPcGVyYXRpb25zMB4XDTE0MDQwODAxMjUzN1oXDTM0MDQwMzAxMjUzN1ow -HjEcMBoGA1UEAwwTbWFzdGVyMS5leGFtcGxlLm9yZzCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBANtT0Se0OBG+bU3ZbZ2IxiSKNs7ZxDBoyXVeVGvOvEQW -56TkHnYdoJ3bn3zLctAoWMggv4DxO0nncmVJYbFoeZo9n7viUQdsO8+hTWVJCjov -uZYNO88Q5NE4zP/Pi9IWigOzjNMl959ItGI0Sr/aPZUpsc/V6eEpyY0eREGG6Ixa -eeO2z/kU4mqO9CK4VzNxfZQqAi0kJEEp2gQ8Ax0gCXee4gbBF7zvyi6467Q3hJTf -413cL0jMIPHbNiyXdLlzjtmkYDL9mjnXbL1W339twBgPzs/ZjDqR4IBK4Fzqakoz -WvWbp1aTYkRqSBiNRHtiQleCXG7JU6FDeF/wzXXWkWECAwEAAaOB2TCB1jBbBgNV -HSMEVDBSoU2kSzBJMRAwDgYDVQQDDAdSb290IENBMRowGAYDVQQLDBFTZXJ2ZXIg -T3BlcmF0aW9uczEZMBcGA1UECgwQRXhhbXBsZSBPcmcsIExMQ4IBAjAMBgNVHRMB -Af8EAjAAMAsGA1UdDwQEAwIFoDAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYBBQUH -AwIwPQYDVR0RBDYwNIITbWFzdGVyMS5leGFtcGxlLm9yZ4IHbWFzdGVyMYIGcHVw -cGV0ggxwdXBwZXRtYXN0ZXIwDQYJKoZIhvcNAQEFBQADggEBAFUas+1NvtqTsT8X -CHiwL/njj7at7V6BsF5yw/MnJ2oEwkJpfsp7J3aB/R1s5bxjtxOJ5fVzED3L0uIf -we29p16rdSeINn9D/LShF7SUFIB3GokT/L5gHgYPLGH4itmz+GKul6qBdt0bOydM -1CqfKTmMEvH0sicEDRFIxji+dfrS6lPhdDHkdKGJeEWpNuATYmw24NYOIpO+4Bv7 -oVXn5hoZp5VzbokCzVha1hlsUeG+wp3GnOoN2aaAm3LZNqKLhm5dKoNeRtECFEOu -+GViwgc9RG4GN4jNDGU03+z+SMozlt3cc+osIxeOKExiK2dfhJwA9Uj1uvYYnSuy -/hHeAt4= ------END CERTIFICATE----- diff --git a/acceptance/tests/external_ca_support/fixtures/leaves/master1.example.org.issued_by.master-ca.key b/acceptance/tests/external_ca_support/fixtures/leaves/master1.example.org.issued_by.master-ca.key deleted file mode 100644 index e708e042afb..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/leaves/master1.example.org.issued_by.master-ca.key +++ /dev/null @@ -1,28 +0,0 @@ ------BEGIN PRIVATE KEY----- -MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDbU9EntDgRvm1N -2W2diMYkijbO2cQwaMl1XlRrzrxEFuek5B52HaCd2598y3LQKFjIIL+A8TtJ53Jl -SWGxaHmaPZ+74lEHbDvPoU1lSQo6L7mWDTvPEOTROMz/z4vSFooDs4zTJfefSLRi -NEq/2j2VKbHP1enhKcmNHkRBhuiMWnnjts/5FOJqjvQiuFczcX2UKgItJCRBKdoE -PAMdIAl3nuIGwRe878ouuOu0N4SU3+Nd3C9IzCDx2zYsl3S5c47ZpGAy/Zo512y9 -Vt9/bcAYD87P2Yw6keCASuBc6mpKM1r1m6dWk2JEakgYjUR7YkJXglxuyVOhQ3hf -8M111pFhAgMBAAECggEBAJxG78wTnLP/9NA4seNC9rRIi17+Sc2YjJuFmC+tAfae -P3X9WTseRzjTqaN5L5jkdsY6l1mgCXfSY2+KRwLrB2KAsFVmoAfi9gcuzv/xeEkX -gmxJh6k2R2RQzbkkwGL0zmhuwlQdRICJhIZI7k4fiivDpsAJkvluFf/oZgguwXpn -F62e7nM2rV4ApH8wN9wixFeAONv9GxiTxjLCYWIMeDP9ETnIsMPTuzpbjHn1cXDt -kobmRma93jUzJK2wtsyrvsj7hvYPV+EzHhO8N+VK7FfZ90FBbWQDM+nrxOePVmsY -t6KYpVh+B02UtEuVwg+qc7E2bhSxQZzhuuTy27DszWUCgYEA7iIDJtxR8rcNAr5p -nMrnJ/ZFtzUMxk1K77hPWN3dLhT5nr70WDUmg2xaHyS8VDk9sIyERCOt+fyngj9Q -AWeukD7xwpLzZ0oivK2btT9OG9OCNZYeu4NoWX3ocI2GoHsV3TdLVkSKsWv9Z7EQ -EXBkFAGgrclMpWiyGw6sbuKBq68CgYEA68iYUhq13+oTrX2nMBjkah5YsBpInNng -B4IOuvcfUf6gsewLpbkcpg4UfUqQxoGsla4mK/5Gd9uIWRENsBZB/ZYKqjQW0bfo -kyTXXJy5Phkh/oo4bgSVWGIIiTI3F6tuXu6X75HTiXghm3m/87X3p8AhZk8MURQW -dLePVDAih+8CgYEAvylqok2HM3Ki7SryGT4A5mGagYICqUXu/BVXDR29qnqIEFl2 -SUERk7rtdcbFsE7rKMkEfLavuNiLl9E/ZoFW7tC4vtu8rZQj4pbzQkJ5b3kRM/c4 -4IqSwBSE/aV/B2EHojf7MFuBgwAPwqevIHC6xhywYhIQh1BOec4Dulf2hF0CgYB9 -2R+UEzWoQiQmob6u6VphWbk0pZLERXZSC5UZLfXFqgbTcI328orcBv/gr//+NBCO -A9nT+XBbYQ2xnGyV5Ats8rzWg976KRM2Fp/siqpE/t0qI1RjRIcCGbE8qVTGiXXr -raXi9Q7XfQtTFPTje+in3OD23pJQZExoF+GkqdyEeQKBgEBm6ZzuXYn9hkoBySK8 -O2sFOUJLE3ptdEdzBHGu1oZNgrTIVIwSykmMwzRtLdJz12gvHs5+hqdvROzZGdHy -HAXsEzv8s5RTr1cUGUcCueBiFeiOfvIu6YsFl08WpSIya4bGgOLNRojxvqcfpjn0 -nyYXiflNy9ffvLvyXKdq0nyW ------END PRIVATE KEY----- diff --git a/acceptance/tests/external_ca_support/fixtures/leaves/master2.example.org.issued_by.master-ca.crt b/acceptance/tests/external_ca_support/fixtures/leaves/master2.example.org.issued_by.master-ca.crt deleted file mode 100644 index 2e75b22fe12..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/leaves/master2.example.org.issued_by.master-ca.crt +++ /dev/null @@ -1,24 +0,0 @@ ------BEGIN CERTIFICATE----- -MIID8TCCAtmgAwIBAgIBAjANBgkqhkiG9w0BAQUFADB+MSQwIgYDVQQDExtJbnRl -cm1lZGlhdGUgQ0EgKG1hc3Rlci1jYSkxHzAdBgkqhkiG9w0BCQEWEHRlc3RAZXhh -bXBsZS5vcmcxGTAXBgNVBAoTEEV4YW1wbGUgT3JnLCBMTEMxGjAYBgNVBAsTEVNl -cnZlciBPcGVyYXRpb25zMB4XDTE0MDQwODAxMjUzN1oXDTM0MDQwMzAxMjUzN1ow -HjEcMBoGA1UEAwwTbWFzdGVyMi5leGFtcGxlLm9yZzCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBALADxMRg/KiG3QDVO9RbJ8NlEF+DovPrc1GmjYlSewEj -3mYf5v1a/TTHZOeB+iA51qFW7HGhDJltBZ/0J71y2A2x0nVbGAW4EkTfryKaCBBl -fwE5fv2Xm6fJB3YAYlPUw+WAZ1HMLrBtqkpIy4xirAy4MCirLBnKxBAjT8kk1f6n -8fV7l3Sz1frtl5ONSaiLsCzDBDfGyp9HDI9X2ABzRIh64SnDCWDq9OZ5UkM3gJmh -vZGT4VhwSJzPJohMPOEgzdKHjW5aFhx4FIHhmNJ0jucEXjNtF270lHjBqjfnsrKF -xHah2dvhkr6PjAOUR4SM1hCXSru5Msa2huBD0Q5vh0sCAwEAAaOB2TCB1jBbBgNV -HSMEVDBSoU2kSzBJMRAwDgYDVQQDDAdSb290IENBMRowGAYDVQQLDBFTZXJ2ZXIg -T3BlcmF0aW9uczEZMBcGA1UECgwQRXhhbXBsZSBPcmcsIExMQ4IBAjAMBgNVHRMB -Af8EAjAAMAsGA1UdDwQEAwIFoDAdBgNVHSUEFjAUBggrBgEFBQcDAQYIKwYBBQUH -AwIwPQYDVR0RBDYwNIITbWFzdGVyMi5leGFtcGxlLm9yZ4IHbWFzdGVyMoIGcHVw -cGV0ggxwdXBwZXRtYXN0ZXIwDQYJKoZIhvcNAQEFBQADggEBAB7pmCkatEPj2UTL -OO0/xcYXog3NO+90ZEKkUoQpe+cVEkOdj1hZQ8OLK7sennVqww8Gc6vRjLve7t8W -ApJevYCcLAx/fNdIMpya+8ZPn+ybm1JY6cAwUauJ4BdrqhQM1zdKwbleSCcXW2RQ -TtsFBtcnlNhd+f3lcEIjM9oI/+UlituQ0dZRiWTYbswFer06DwJWQywiWMr0oZip -P+fzwwewb4ri5L3i683ZIpl9MuShEqpM0SPNl+XdmnZn0jDNK4bxOezt7qwZl6T4 -JbRshdQ3ClWa2CcAdJgawfd9VRZC8GWQ5PZYNr5rWwLtrDBX2Uf102O67TXbNOnn -yg4Ci+8= ------END CERTIFICATE----- diff --git a/acceptance/tests/external_ca_support/fixtures/leaves/master2.example.org.issued_by.master-ca.key b/acceptance/tests/external_ca_support/fixtures/leaves/master2.example.org.issued_by.master-ca.key deleted file mode 100644 index 7c24a91f786..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/leaves/master2.example.org.issued_by.master-ca.key +++ /dev/null @@ -1,28 +0,0 @@ ------BEGIN PRIVATE KEY----- -MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCwA8TEYPyoht0A -1TvUWyfDZRBfg6Lz63NRpo2JUnsBI95mH+b9Wv00x2TngfogOdahVuxxoQyZbQWf -9Ce9ctgNsdJ1WxgFuBJE368imggQZX8BOX79l5unyQd2AGJT1MPlgGdRzC6wbapK -SMuMYqwMuDAoqywZysQQI0/JJNX+p/H1e5d0s9X67ZeTjUmoi7AswwQ3xsqfRwyP -V9gAc0SIeuEpwwlg6vTmeVJDN4CZob2Rk+FYcEiczyaITDzhIM3Sh41uWhYceBSB -4ZjSdI7nBF4zbRdu9JR4wao357KyhcR2odnb4ZK+j4wDlEeEjNYQl0q7uTLGtobg -Q9EOb4dLAgMBAAECggEAOUlgt1GhqfAK6gR8robkkDHKfGvJk33kUITWuAUCR8K2 -1aI4eektVAo5iyfAktCATI6P8gOqqBzVodPDQ2Oncf2YM93sGaBpQcVTqP1c9oky -JuHhtQOtA5ToOr2xQvuSrpV2CydbYsyuifLbXNuYh6NC+VmMxvA5k88KXrsAcxsH -wOl0DxTw0/enPCdxK1HzV37Vgq0yaJtkm1+NkvYQREcbZz0TpnUX3qSaFRv+XI3t -lHbLgCZfW5syUgibZKYmc2KHJ7WUpnjc7joYooc+cjQ/eJ3skcPu0JqaJMH3Lzmf -4dfgUJ9CIyyneuxoB4kxDPKYnsJ7l8ttKd4tR155UQKBgQDgMzXUwfmskGtgF7mq -+xcSZrQ4mNp4qcyScs1ALX9M41Tv0r3HzottvXjtt97Nfw7CyWIUsk2Z11xqp3S4 -NQv2t2GK0O8Axzte5nt2pG4E8phigj8i8cqxDyeRqJEJzaZ7d0ycDFcMsY9yBuzB -beu+BZnbfxLaGTAdouTmcqp9/QKBgQDI+u5NoBy/C2Zn7PK8oAYkoiVlFSg4cfz+ -rM1P5xKOslVzMjFpkU7djYYJdBwk/rnvo7p0BAooIm6bK6BdUKHx8qR0mxroD64c -O+ZTw5gobyUIMAvtb3/c4IYFE5kOCBN0vRXkxlpQoyJlH61eh+mN4CvpNSvgAfU/ -uMIJxVK45wKBgAo1SrYtPtkEml79tY57cEl+M6aADzJ/SLlCUOYjLDCsCSoCRnHa -ja6peeAZByQELP/he33FwJLIo4SXq1DX2XuaTe6sJLW8XI+tnuGACyJeiWGy3H0S -1U/KiNqw+imgG3xzqiFQzoXdb9KZTh5giuNUDfVYi9syWmZ/DD0R64FhAoGBAIsg -6obiKAQ4qBOZfBAH1zjU5n4rP0ST5EeI6DhABiCyY2TzgjFKXgYRSpCQyErz61Nx -TEmz4jQWYesZMFJo1zLefeoa1W2ALU9uR+wEoviauFAKsrcTULhe1Wf+QR3tiUpu -+xD8HtljlRPb8fBWtd95J+yw2kagFF6gT/LjnnhRAoGAW6kgmk82hUCIqX/uEqW7 -DEsLy9mbbFuWjoqt2ma+6Urx66muX+SMQj5pwd6JnRRSexWVAxxNuKuYUq1ZOhom -e4+/+VfkiFzPzVCA0VQ1YRNHt26Uxvr3ovZ5Y50u4FH2hypNSHLMFZV6T2EWt4Nd -b5Lr8vvMoJlCVp+O3AvF154= ------END PRIVATE KEY----- diff --git a/acceptance/tests/external_ca_support/fixtures/master-ca/ca-master-ca.crl b/acceptance/tests/external_ca_support/fixtures/master-ca/ca-master-ca.crl deleted file mode 100644 index 2213db67394..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/master-ca/ca-master-ca.crl +++ /dev/null @@ -1,12 +0,0 @@ ------BEGIN X509 CRL----- -MIIB2jCBwzANBgkqhkiG9w0BAQUFADB+MSQwIgYDVQQDExtJbnRlcm1lZGlhdGUg -Q0EgKG1hc3Rlci1jYSkxHzAdBgkqhkiG9w0BCQEWEHRlc3RAZXhhbXBsZS5vcmcx -GTAXBgNVBAoTEEV4YW1wbGUgT3JnLCBMTEMxGjAYBgNVBAsTEVNlcnZlciBPcGVy -YXRpb25zFw0xNDA0MDgwMTI1MzlaFw0zNDA0MDMwMTI1MzlaMBQwEgIBARcNMTQw -NDA4MDEyNTM5WjANBgkqhkiG9w0BAQUFAAOCAQEAYMRAzRxGX0SaCUYZKNn25JyK -uNoT91hdmUuSYnIiHkNxft/8x6EKSllrc1nzn0/sBBfzZ1VAK6tZTPHhWZ3m/JDy -B40ovaz/jgQEowKf+LL8lI8wVqa3ycwAclpACmvMpwhVIWTGOSI5kDA3+rI24J+k -z8mlTWJ56JcdWlbniZ9TPR73LixUvBq8t1dZGsj8Xez+BHcgcdT79M7HMuGgXws1 -JHbHc4N8s5Ursp6np6o56YRSU/24kH/Dzp+4ddSGV+nTEX8y/tQsIgSLlZ9fJoQi -HSDe5/yTzhYVvOyW7ZLDvuy409kyNuESsur3iTKLPLQNFQwMsM++yt2nKWOzcQ== ------END X509 CRL----- diff --git a/acceptance/tests/external_ca_support/fixtures/master-ca/ca-master-ca.crt b/acceptance/tests/external_ca_support/fixtures/master-ca/ca-master-ca.crt deleted file mode 100644 index 45a03709d7b..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/master-ca/ca-master-ca.crt +++ /dev/null @@ -1,23 +0,0 @@ ------BEGIN CERTIFICATE----- -MIID3zCCAsegAwIBAgIBAjANBgkqhkiG9w0BAQUFADBJMRAwDgYDVQQDDAdSb290 -IENBMRowGAYDVQQLDBFTZXJ2ZXIgT3BlcmF0aW9uczEZMBcGA1UECgwQRXhhbXBs -ZSBPcmcsIExMQzAeFw0xNDA0MDgwMTI1MzdaFw0zNDA0MDMwMTI1MzdaMH4xJDAi -BgNVBAMTG0ludGVybWVkaWF0ZSBDQSAobWFzdGVyLWNhKTEfMB0GCSqGSIb3DQEJ -ARYQdGVzdEBleGFtcGxlLm9yZzEZMBcGA1UEChMQRXhhbXBsZSBPcmcsIExMQzEa -MBgGA1UECxMRU2VydmVyIE9wZXJhdGlvbnMwggEiMA0GCSqGSIb3DQEBAQUAA4IB -DwAwggEKAoIBAQDTgKLGBkExFRXrQJn/lHE4XHkN2nXwJpS+y8bWqHiBdq5eZ8D2 -UAILOBaALeQN/1d1J4yrh6w/YK+gRtCLn+CslR+9NW4AgShALi+r26DK9ZRk4F7V -Dk4yEjNpmTyVRyP8w7iZwasZdyK04xAhj+yEInz29SLxmh1jddts/rjqLMZW/s0S -T+E9XSEDYNVprC5VuYutUuHKah7AYSp07FHNsqDg+y+vCRezSqbrHrGpTwMupVmD -2ObsSJntghsLzPwjSGhbo6e8C/TDwrPtm6az9TPKbsUrqjdvyZcSfc5Q6OgExNhg -zWQkk5PqFOESsQSBfOOn2eqfqBXHUnH9PCNTAgMBAAGjgZwwgZkweQYDVR0jBHIw -cIAUFq+AJeP66ki/kTNmAf1R7yRnTGOhTaRLMEkxEDAOBgNVBAMMB1Jvb3QgQ0Ex -GjAYBgNVBAsMEVNlcnZlciBPcGVyYXRpb25zMRkwFwYDVQQKDBBFeGFtcGxlIE9y -ZywgTExDggkAsbkEcvsRJ+MwDwYDVR0TAQH/BAUwAwEB/zALBgNVHQ8EBAMCAQYw -DQYJKoZIhvcNAQEFBQADggEBAC4keJ+jeGh7/EWwsCKollYW7H4aSjPu/Ufe38hH -pEER9FyCqJ0jo+MabOx8l1F5ySNWngB0qbJuA/kiV2gJ1bQ+mE2TN88x6Sz12eol -ifhFU0PazGdpNQRhpQxbwJ7tFC3Z8WrHEcVqP9iicNWqSI/QkqXsCk4Zyezpx28W -sqHylf1CiBOU45FJdDXRg80mk6WOpNZR8HIUdqQLQDXz0FfXeFKmVteatmc/yrGG -5iHzMkH4Vz5laBjin9s1p+O8Z7+cWtJNWfXaULAEecZQ6CZ3V1OVOjhsrL28iF7C -kx01rSrsxaFclDalJVmKmO2spHvNmQTlWD6jm5d94WaRyXU= ------END CERTIFICATE----- diff --git a/acceptance/tests/external_ca_support/fixtures/master-ca/ca-master-ca.key b/acceptance/tests/external_ca_support/fixtures/master-ca/ca-master-ca.key deleted file mode 100644 index daa4d6d74c2..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/master-ca/ca-master-ca.key +++ /dev/null @@ -1,28 +0,0 @@ ------BEGIN PRIVATE KEY----- -MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDTgKLGBkExFRXr -QJn/lHE4XHkN2nXwJpS+y8bWqHiBdq5eZ8D2UAILOBaALeQN/1d1J4yrh6w/YK+g -RtCLn+CslR+9NW4AgShALi+r26DK9ZRk4F7VDk4yEjNpmTyVRyP8w7iZwasZdyK0 -4xAhj+yEInz29SLxmh1jddts/rjqLMZW/s0ST+E9XSEDYNVprC5VuYutUuHKah7A -YSp07FHNsqDg+y+vCRezSqbrHrGpTwMupVmD2ObsSJntghsLzPwjSGhbo6e8C/TD -wrPtm6az9TPKbsUrqjdvyZcSfc5Q6OgExNhgzWQkk5PqFOESsQSBfOOn2eqfqBXH -UnH9PCNTAgMBAAECggEAOduOq606WDr0bA6YEhqBfnGNjASfDrr/JdKI6l/b106o -FJs9ZiQs6vTE8vuaz1lyi1HuYV4T+KJyaN32Xru1soB41kGvfePXJvd7gv/p1Jup -pI7cYlwvd/MSrWiF8jAr4BGKhkU8kRLOI72ZxBSjFuJ4XPZiQMVwFK1BAWgNHcHy -CU13+4sMI7Er425g5pMkv6fWkp6HTRF8WE73e68w1NOvc/6WyiQS9iytNhOR8TUl -vJkFyhcAbg7alsEfQ0Ove/co0yCzbXF85sRYr1ANRTp/y6RoMRXA79qCB4BS2ke6 -UUUU8F+hnmNNVjFAMlojC/yConEyDj8fsh1QoqzNQQKBgQD+5+6cgT8xJpM/Woji -ZogjNoNmbx/G3J00TEhR3vp7Frl+Lvec0OkvQlrof+yfEFDiYpeR8vgjfW9otlbJ -PcnV2ZKRspy4wFqDnP4JQW0hz39uOvZeIbNcIFtI1QaLc29TZ9NpAwiAwD4mm0fr -l/MLVPHtBh0Pn8V6G89lN2cWfwKBgQDUaQQP/s0t/mF6GOAcwF62bxw9T7gMBuu9 -MOE3ehchBjAFdLKn4F5STfxgIsJ3DlMiQdj4EjzM9NsgAA4akRVGDNeypMdTTskr -WbL3188RU2Kfbr07NWce22720mTzNNPf4SZZ7S7BYNYdx3Pg+uFPGvT+L61BXKB+ -3XmktIFRLQKBgEZ3TUIchKCMvmXkHDUiDP5XAoodrBYoRJkBgXiIgIvoY0zbpP3H -zr6OxFzDiGEMfJ+oi6gAF9KPyW2UAnfNSe+BHwCB9Msp+ZuqmZtrZ8TR/LM3P4YU -IOYV18ZRhWaleZ//8rRz7zTQcB3yDTa2oax/RgW1C/GWxFuWNoYPg8L3AoGATtHV -RxY3Dw0pIMSgTSr5OltKsFmnoxbiXWcI+6wbbw67vRqLvQ3eEk6MCyR64vD22mxy -1cL2uttVgK9kjGkv6rmTROAucvR2Y9a6OYvkc7psi1rtb04mwOWObJqwCSueP6Jt -t8ryoaGMcqpzjpWqC8+l/7izYuC7UbhfjkXJAtUCgYAJBcEkCI8GF0+k2qaf2CNS -j8U597tvsrKCFFO7avJWAybZFoXd95/8Aa1SO1Qk+0rm2UVmSRd9YkmmN2UBrxHi -mFw5D92/PI4fLYh9pFOpWs+90CQt1BDtu+nIcGNP3JgW5AGMkv9Boqy+ENdLNJ9t -Ta283HvVvIkM+0Hrd1Zn6g== ------END PRIVATE KEY----- diff --git a/acceptance/tests/external_ca_support/fixtures/master-ca/openssl.conf b/acceptance/tests/external_ca_support/fixtures/master-ca/openssl.conf deleted file mode 100644 index e03712e95e0..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/master-ca/openssl.conf +++ /dev/null @@ -1,96 +0,0 @@ -SAN = DNS:puppet - -[ca] -default_ca = master_ca_config - -# Root CA -[root_ca_config] -certificate = /tmp/certchain.KDOYxTc2/master-ca/ca-master-ca.crt -private_key = /tmp/certchain.KDOYxTc2/master-ca/ca-master-ca.key -database = /tmp/certchain.KDOYxTc2/master-ca/inventory.txt -new_certs_dir = /tmp/certchain.KDOYxTc2/master-ca/certs -serial = /tmp/certchain.KDOYxTc2/master-ca/serial - -default_crl_days = 7300 -default_days = 7300 -default_md = sha1 - -policy = root_ca_policy -x509_extensions = root_ca_exts - -[root_ca_policy] -commonName = supplied -emailAddress = supplied -organizationName = supplied -organizationalUnitName = supplied - -[root_ca_exts] -authorityKeyIdentifier = keyid,issuer:always -basicConstraints = critical,CA:true -keyUsage = keyCertSign, cRLSign - -# Master CA -[master_ca_config] -certificate = /tmp/certchain.KDOYxTc2/master-ca/ca-master-ca.crt -private_key = /tmp/certchain.KDOYxTc2/master-ca/ca-master-ca.key -database = /tmp/certchain.KDOYxTc2/master-ca/inventory.txt -new_certs_dir = /tmp/certchain.KDOYxTc2/master-ca/certs -serial = /tmp/certchain.KDOYxTc2/master-ca/serial - -default_crl_days = 7300 -default_days = 7300 -default_md = sha1 - -policy = master_ca_policy -x509_extensions = master_ca_exts - -# Master CA (Email) -[master_ca_email_config] -certificate = /tmp/certchain.KDOYxTc2/master-ca/ca-master-ca.crt -private_key = /tmp/certchain.KDOYxTc2/master-ca/ca-master-ca.key -database = /tmp/certchain.KDOYxTc2/master-ca/inventory.txt -new_certs_dir = /tmp/certchain.KDOYxTc2/master-ca/certs -serial = /tmp/certchain.KDOYxTc2/master-ca/serial - -default_crl_days = 7300 -default_days = 7300 -default_md = sha1 - -email_in_dn = yes - -policy = master_ca_email_policy -x509_extensions = master_ca_exts - -[master_ca_policy] -commonName = supplied - -[master_ca_email_policy] -commonName = supplied -emailAddress = supplied - -# default extensions for clients -[master_ca_exts] -authorityKeyIdentifier = keyid,issuer:always -basicConstraints = critical,CA:false -keyUsage = keyEncipherment, digitalSignature -extendedKeyUsage = serverAuth, clientAuth - -[master_ssl_exts] -authorityKeyIdentifier = keyid,issuer:always -basicConstraints = critical,CA:false -keyUsage = keyEncipherment, digitalSignature -extendedKeyUsage = serverAuth, clientAuth -subjectAltName = $ENV::SAN - -# extensions for the master certificate (specifically adding subjectAltName) -[master_self_ca_exts] -authorityKeyIdentifier = keyid,issuer:always -basicConstraints = critical,CA:false -keyUsage = keyEncipherment, digitalSignature -extendedKeyUsage = serverAuth, clientAuth -# include the master's fqdn here, as well as in the CN, to work -# around https://bugs.ruby-lang.org/issues/6493 -# NOTE: Alt Names should be set in the request, so they know -# their FQDN -# subjectAltName = DNS:puppet,DNS:master-ca.example.org - diff --git a/acceptance/tests/external_ca_support/fixtures/master-ca/serial b/acceptance/tests/external_ca_support/fixtures/master-ca/serial deleted file mode 100644 index cd672a533b7..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/master-ca/serial +++ /dev/null @@ -1 +0,0 @@ -06 diff --git a/acceptance/tests/external_ca_support/fixtures/root/ca-root.crt b/acceptance/tests/external_ca_support/fixtures/root/ca-root.crt deleted file mode 100644 index 5f1c9dd4671..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/root/ca-root.crt +++ /dev/null @@ -1,21 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDZTCCAk2gAwIBAgIJALG5BHL7ESfjMA0GCSqGSIb3DQEBBQUAMEkxEDAOBgNV -BAMMB1Jvb3QgQ0ExGjAYBgNVBAsMEVNlcnZlciBPcGVyYXRpb25zMRkwFwYDVQQK -DBBFeGFtcGxlIE9yZywgTExDMB4XDTE0MDQwODAxMjUzNloXDTM0MDQwMzAxMjUz -NlowSTEQMA4GA1UEAwwHUm9vdCBDQTEaMBgGA1UECwwRU2VydmVyIE9wZXJhdGlv -bnMxGTAXBgNVBAoMEEV4YW1wbGUgT3JnLCBMTEMwggEiMA0GCSqGSIb3DQEBAQUA -A4IBDwAwggEKAoIBAQDFDXbR+00AwXM+HuMIpw8eVWBzQWBqDCYkX3IvYRGj+w9y -7AitrN+J0MZE3pbaRvlH5wU7MShFOmT0k/B/wrylW4W5G/iAtd2ZnXicBPrA9zDU -eHJftQxR7+Qjmsc1BqVf43PUlQITpn1APgXDzPJdk9XbRWEsIycuXkwTXzVND0U5 -z3dGS/oh9yMim0DnF2oQ+gTFA9n17xOD5hBN80U3fn4DXtcFGbtXOj6zBHsxgLCi -leif2AB1oAaZ0lqkwk6Se0rFd3zafYLDAwCPCWlZSfkQ0C/W7WYx07PDRxSYs1H6 -Viz2uHwqzyD6elxvJBGcrLdvDqTLL+w0ag3yMPWbAgMBAAGjUDBOMB0GA1UdDgQW -BBQWr4Al4/rqSL+RM2YB/VHvJGdMYzAfBgNVHSMEGDAWgBQWr4Al4/rqSL+RM2YB -/VHvJGdMYzAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4IBAQC/sFnu1TIr -L6HhTft5aUaeLuO/329cDUHxlUppGRYrctkZvYK4b8TBi2BD+tcwRKS1kh4nrQhr -xaBO+oUmyJeNwEPk40trzusV9N9tfqw8drBBXEVZGxrYRYovq/RqLfUQ224EF3z0 -r74dAWL0R80PvVzeJfUsUw0KYgskfLzP5QSW1rrJnutfYP95EMV4yWyrNqnDko3M -v7XENh0TMEolMxPZ+X3TqT6Q0j4aM8njswObyeABslt+nC6nLfgBvgDaSvEULPL6 -u5aWNxp9WudGqGBvHoR6OXdZDRCzWSz52jnvXiZE4E0VnqsWxCmjDGECke4TRoMU -rtMLavKgCsIe ------END CERTIFICATE----- diff --git a/acceptance/tests/external_ca_support/fixtures/root/ca-root.key b/acceptance/tests/external_ca_support/fixtures/root/ca-root.key deleted file mode 100644 index 99de668e4f1..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/root/ca-root.key +++ /dev/null @@ -1,28 +0,0 @@ ------BEGIN PRIVATE KEY----- -MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDFDXbR+00AwXM+ -HuMIpw8eVWBzQWBqDCYkX3IvYRGj+w9y7AitrN+J0MZE3pbaRvlH5wU7MShFOmT0 -k/B/wrylW4W5G/iAtd2ZnXicBPrA9zDUeHJftQxR7+Qjmsc1BqVf43PUlQITpn1A -PgXDzPJdk9XbRWEsIycuXkwTXzVND0U5z3dGS/oh9yMim0DnF2oQ+gTFA9n17xOD -5hBN80U3fn4DXtcFGbtXOj6zBHsxgLCileif2AB1oAaZ0lqkwk6Se0rFd3zafYLD -AwCPCWlZSfkQ0C/W7WYx07PDRxSYs1H6Viz2uHwqzyD6elxvJBGcrLdvDqTLL+w0 -ag3yMPWbAgMBAAECggEAPLxxqzcx/NGjm7oZcElNt6PJHr0/4m/sMbnZkBDzRv/T -OzDVW/K0092tvvx8srq9ixQB4MS+DNh61Yfj0P1M+ArFpNCiP8nOi9KlojFGuMN4 -IYUee7FqIrc2MmM6k3WA85U/1n43LadbY7m6PSembIFsoTE5Sklrgjc+a5ok66MG -1m2bZuQnfQs146T5/tZ7cFpvLKL0EUTSFLblXUeugnesQI/z17XcveEm40T0RleU -oKHYeldNYTzcRCRu5riZoOCeAiBeKvmc2Yk8Dx2ump8u2Ir5G6ST0oVP0tZ9On++ -wBfmhkJzN7wmRURk/Ivu1/Kq9/PGsJI4v4TBQbUZAQKBgQDhIw/55KmFbwKUBku3 -Gegf6R7NQjn/HccBlN/zUCfV9IJs2V4J96/7B7S3JYfwoCQp9uhZJ1Qe/aWIY9IY -UmwZLb9QhXWQbuMJtfqgrWy4FoRqAe469Mr+ol44v4j45ZfiMF+BY1sQjZuIdUNv -YrUGkFzj+ETeUUPDPcZHXLDCawKBgQDgEM+wOGBFcBuONm6XpJ/Mp4dGbSqRfRm7 -bo4lqF0UFpm9HKsS6WY/ZsQGOrTq8bsU6aTttPGvenwYm/ySEpI5iBbvXe/fgH3i -Wehd81UkiHO4uzdNgyo30aZRpa1644G4LkBlPKrcU+hKiVZtshZJjd05BQyBLKeb -NByETARFkQKBgC/O/0yEt08DlNuUPq3iTX3BRm42GxTG4QS/9ZK4uczggHXW7vZU -58T6DPE7ghOiHivfJ9YO2Pk/ydAdynapwhTStybFQILsWUAtqcxHJ6gr9/B++nUA -mL7cAgAxBEg+kTNSLGXQkH0CZT3kEO7tWh3LuD4c8Zr4TNiAHMP4tfyDAoGBAKbb -Xg54rRiIl7ybxFeg/G9HAnHrsZuOca8mcSyR6F6hRfOSecMnlED6oleROFENmqfE -JlHuQVzP9cHNx5Rvx/yX35x5c7wYz6XUFjqAjpMaGjMjF9fdKX9P3G9I1ZuB+IPT -zZiWSRayVXXMOFSQHhDAWFGx9NHtExN9Bw9uHHBBAoGBAMfC/cikwe+JjiNRMeUy -5A+KMfKYZ/63UctCZXgx+MbcOmt4LW/JYBGJG9l8ekGZVxprAMTTePsscYHQxPct -O+JvTnDJc7q3Jqmk3+eZMoNkIY0JWMR9qKTy4gmz3NMQetTpP3W3QWswALP6pKOV -5whba0lHsqlXpZzsxV/rcpuk ------END PRIVATE KEY----- diff --git a/acceptance/tests/external_ca_support/fixtures/root/inventory.txt b/acceptance/tests/external_ca_support/fixtures/root/inventory.txt deleted file mode 100644 index 2334e475252..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/root/inventory.txt +++ /dev/null @@ -1,2 +0,0 @@ -V 340403012536Z 01 unknown /CN=Intermediate CA (agent-ca)/emailAddress=test@example.org/O=Example Org, LLC/OU=Server Operations -V 340403012537Z 02 unknown /CN=Intermediate CA (master-ca)/emailAddress=test@example.org/O=Example Org, LLC/OU=Server Operations diff --git a/acceptance/tests/external_ca_support/fixtures/root/openssl.conf b/acceptance/tests/external_ca_support/fixtures/root/openssl.conf deleted file mode 100644 index c5bf84bb75f..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/root/openssl.conf +++ /dev/null @@ -1,96 +0,0 @@ -SAN = DNS:puppet - -[ca] -default_ca = root_ca_config - -# Root CA -[root_ca_config] -certificate = /tmp/certchain.KDOYxTc2/root/ca-root.crt -private_key = /tmp/certchain.KDOYxTc2/root/ca-root.key -database = /tmp/certchain.KDOYxTc2/root/inventory.txt -new_certs_dir = /tmp/certchain.KDOYxTc2/root/certs -serial = /tmp/certchain.KDOYxTc2/root/serial - -default_crl_days = 7300 -default_days = 7300 -default_md = sha1 - -policy = root_ca_policy -x509_extensions = root_ca_exts - -[root_ca_policy] -commonName = supplied -emailAddress = supplied -organizationName = supplied -organizationalUnitName = supplied - -[root_ca_exts] -authorityKeyIdentifier = keyid,issuer:always -basicConstraints = critical,CA:true -keyUsage = keyCertSign, cRLSign - -# Master CA -[master_ca_config] -certificate = /tmp/certchain.KDOYxTc2/root/ca-root.crt -private_key = /tmp/certchain.KDOYxTc2/root/ca-root.key -database = /tmp/certchain.KDOYxTc2/root/inventory.txt -new_certs_dir = /tmp/certchain.KDOYxTc2/root/certs -serial = /tmp/certchain.KDOYxTc2/root/serial - -default_crl_days = 7300 -default_days = 7300 -default_md = sha1 - -policy = master_ca_policy -x509_extensions = master_ca_exts - -# Master CA (Email) -[master_ca_email_config] -certificate = /tmp/certchain.KDOYxTc2/root/ca-root.crt -private_key = /tmp/certchain.KDOYxTc2/root/ca-root.key -database = /tmp/certchain.KDOYxTc2/root/inventory.txt -new_certs_dir = /tmp/certchain.KDOYxTc2/root/certs -serial = /tmp/certchain.KDOYxTc2/root/serial - -default_crl_days = 7300 -default_days = 7300 -default_md = sha1 - -email_in_dn = yes - -policy = master_ca_email_policy -x509_extensions = master_ca_exts - -[master_ca_policy] -commonName = supplied - -[master_ca_email_policy] -commonName = supplied -emailAddress = supplied - -# default extensions for clients -[master_ca_exts] -authorityKeyIdentifier = keyid,issuer:always -basicConstraints = critical,CA:false -keyUsage = keyEncipherment, digitalSignature -extendedKeyUsage = serverAuth, clientAuth - -[master_ssl_exts] -authorityKeyIdentifier = keyid,issuer:always -basicConstraints = critical,CA:false -keyUsage = keyEncipherment, digitalSignature -extendedKeyUsage = serverAuth, clientAuth -subjectAltName = $ENV::SAN - -# extensions for the master certificate (specifically adding subjectAltName) -[master_self_ca_exts] -authorityKeyIdentifier = keyid,issuer:always -basicConstraints = critical,CA:false -keyUsage = keyEncipherment, digitalSignature -extendedKeyUsage = serverAuth, clientAuth -# include the master's fqdn here, as well as in the CN, to work -# around https://bugs.ruby-lang.org/issues/6493 -# NOTE: Alt Names should be set in the request, so they know -# their FQDN -# subjectAltName = DNS:puppet,DNS:root.example.org - diff --git a/acceptance/tests/external_ca_support/fixtures/root/serial b/acceptance/tests/external_ca_support/fixtures/root/serial deleted file mode 100644 index 75016ea3625..00000000000 --- a/acceptance/tests/external_ca_support/fixtures/root/serial +++ /dev/null @@ -1 +0,0 @@ -03 diff --git a/acceptance/tests/external_ca_support/jetty_external_root_ca.rb b/acceptance/tests/external_ca_support/jetty_external_root_ca.rb deleted file mode 100644 index a9a7e79d034..00000000000 --- a/acceptance/tests/external_ca_support/jetty_external_root_ca.rb +++ /dev/null @@ -1,159 +0,0 @@ -begin - require 'puppet_x/acceptance/external_cert_fixtures' -rescue LoadError - $LOAD_PATH.unshift(File.expand_path('../../../lib', __FILE__)) - require 'puppet_x/acceptance/external_cert_fixtures' -end - -confine :except, :type => 'pe' - -skip_test "Test only supported on Jetty" unless @options[:is_puppetserver] - -# Verify that a trivial manifest can be run to completion. -# Supported Setup: Single, Root CA -# - Agent and Master SSL cert issued by the Root CA -# - Revocation disabled on the agent `certificate_revocation = false` -# - CA disabled on the master `ca = false` -# -test_name "Puppet agent and master work when both configured with externally issued certificates from independent intermediate CAs" - -step "Copy certificates and configuration files to the master..." -fixture_dir = File.expand_path('../fixtures', __FILE__) -testdir = master.tmpdir('jetty_external_root_ca') -fixtures = PuppetX::Acceptance::ExternalCertFixtures.new(fixture_dir, testdir) - -jetty_confdir = master['puppetserver-confdir'] - -# Register our cleanup steps early in a teardown so that they will happen even -# if execution aborts part way. -teardown do - step "Restore /etc/hosts and webserver.conf" - on master, "cp -p '#{testdir}/hosts' /etc/hosts" - on master, "cp -p '#{testdir}/webserver.conf.orig' '#{jetty_confdir}/webserver.conf'" -end - -# Read all of the CA certificates. - -# Copy all of the x.509 fixture data over to the master. -create_remote_file master, "#{testdir}/ca_root.crt", fixtures.root_ca_cert -create_remote_file master, "#{testdir}/ca_agent.crt", fixtures.agent_ca_cert -create_remote_file master, "#{testdir}/ca_master.crt", fixtures.master_ca_cert -create_remote_file master, "#{testdir}/ca_master.crl", fixtures.master_ca_crl -create_remote_file master, "#{testdir}/ca_master_bundle.crt", "#{fixtures.master_ca_cert}\n#{fixtures.root_ca_cert}\n" -create_remote_file master, "#{testdir}/ca_agent_bundle.crt", "#{fixtures.agent_ca_cert}\n#{fixtures.root_ca_cert}\n" -create_remote_file master, "#{testdir}/agent.crt", fixtures.agent_cert -create_remote_file master, "#{testdir}/agent.key", fixtures.agent_key -create_remote_file master, "#{testdir}/agent_email.crt", fixtures.agent_email_cert -create_remote_file master, "#{testdir}/agent_email.key", fixtures.agent_email_key -create_remote_file master, "#{testdir}/master.crt", fixtures.master_cert -create_remote_file master, "#{testdir}/master.key", fixtures.master_key -create_remote_file master, "#{testdir}/master_rogue.crt", fixtures.master_cert_rogue -create_remote_file master, "#{testdir}/master_rogue.key", fixtures.master_key_rogue - -## -# Now create the master and agent puppet.conf -# -# We need to create the public directory for Passenger and the modules -# directory to avoid `Error: Could not evaluate: Could not retrieve information -# from environment production source(s) puppet://master1.example.org/plugins` -on master, "mkdir -p #{testdir}/etc/{master/{public,modules/empty/lib},agent}" -# Backup /etc/hosts -on master, "cp -p /etc/hosts '#{testdir}/hosts'" - -# Make master1.example.org resolve if it doesn't already. -on master, "grep -q -x '#{fixtures.host_entry}' /etc/hosts || echo '#{fixtures.host_entry}' >> /etc/hosts" - -create_remote_file master, "#{testdir}/etc/agent/puppet.conf", fixtures.agent_conf -create_remote_file master, "#{testdir}/etc/agent/puppet.conf.crl", fixtures.agent_conf_crl -create_remote_file master, "#{testdir}/etc/agent/puppet.conf.email", fixtures.agent_conf_email - -# auth.conf to allow *.example.com access to the rest API -create_remote_file master, "#{testdir}/etc/master/auth.conf", fixtures.auth_conf - -create_remote_file master, "#{testdir}/etc/master/config.ru", fixtures.config_ru - -step "Set filesystem permissions and ownership for the master" -# These permissions are required for the JVM to start Puppet as puppet -on master, "chown -R puppet:puppet #{testdir}/etc/master" -on master, "chown -R puppet:puppet #{testdir}/*.crt" -on master, "chown -R puppet:puppet #{testdir}/*.key" -on master, "chown -R puppet:puppet #{testdir}/*.crl" - -# These permissions are just for testing, end users should protect their -# private keys. -on master, "chmod -R a+rX #{testdir}" - -agent_cmd_prefix = "--confdir #{testdir}/etc/agent --vardir #{testdir}/etc/agent/var" - -# Move the agent SSL cert and key into place. -# The filename must match the configured certname, otherwise Puppet will try -# and generate a new certificate and key -step "Configure the agent with the externally issued certificates" -on master, "mkdir -p #{testdir}/etc/agent/ssl/{public_keys,certs,certificate_requests,private_keys,private}" -create_remote_file master, "#{testdir}/etc/agent/ssl/certs/#{fixtures.agent_name}.pem", fixtures.agent_cert -create_remote_file master, "#{testdir}/etc/agent/ssl/private_keys/#{fixtures.agent_name}.pem", fixtures.agent_key - -on master, "cp -p '#{jetty_confdir}/webserver.conf' '#{testdir}/webserver.conf.orig'" -create_remote_file master, "#{jetty_confdir}/webserver.conf", - fixtures.jetty_webserver_conf_for_trustworthy_master - -master_opts = { - 'master' => { - 'ca' => false, - 'certname' => fixtures.master_name, - 'ssl_client_header' => "HTTP_X_CLIENT_DN", - 'ssl_client_verify_header' => "HTTP_X_CLIENT_VERIFY" - } -} - -step "Start the Puppet master service..." -with_puppet_running_on(master, master_opts) do - # Now, try and run the agent on the master against itself. - step "Successfully run the puppet agent on the master" - on master, puppet_agent("#{agent_cmd_prefix} --test"), :acceptable_exit_codes => (0..255) do - assert_no_match /Creating a new SSL key/, stdout - assert_no_match /\Wfailed\W/i, stderr - assert_no_match /\Wfailed\W/i, stdout - assert_no_match /\Werror\W/i, stderr - assert_no_match /\Werror\W/i, stdout - # Assert the exit code so we get a "Failed test" instead of an "Errored test" - assert exit_code == 0 - end - - step "Master accepts client cert with email address in subject" - on master, "cp #{testdir}/etc/agent/puppet.conf{,.no_email}" - on master, "cp #{testdir}/etc/agent/puppet.conf{.email,}" - on master, puppet_agent("#{agent_cmd_prefix} --test"), :acceptable_exit_codes => (0..255) do - assert_no_match /\Wfailed\W/i, stdout - assert_no_match /\Wfailed\W/i, stderr - assert_no_match /\Werror\W/i, stdout - assert_no_match /\Werror\W/i, stderr - # Assert the exit code so we get a "Failed test" instead of an "Errored test" - assert exit_code == 0 - end - - step "Agent refuses to connect to revoked master" - on master, "cp #{testdir}/etc/agent/puppet.conf{,.no_crl}" - on master, "cp #{testdir}/etc/agent/puppet.conf{.crl,}" - - revoke_opts = "--hostcrl #{testdir}/ca_master.crl" - on master, puppet_agent("#{agent_cmd_prefix} #{revoke_opts} --test"), :acceptable_exit_codes => (0..255) do - assert_match /certificate revoked.*?example.org/, stderr - assert exit_code == 1 - end -end - -create_remote_file master, "#{jetty_confdir}/webserver.conf", - fixtures.jetty_webserver_conf_for_rogue_master - -with_puppet_running_on(master, master_opts) do - step "Agent refuses to connect to a rogue master" - on master, puppet_agent("#{agent_cmd_prefix} --ssl_client_ca_auth=#{testdir}/ca_master.crt --test"), :acceptable_exit_codes => (0..255) do - assert_no_match /Creating a new SSL key/, stdout - assert_match /certificate verify failed/i, stderr - assert_match /The server presented a SSL certificate chain which does not include a CA listed in the ssl_client_ca_auth file/i, stderr - assert exit_code == 1 - end -end - -step "Finished testing External Certificates" diff --git a/acceptance/tests/face/4654_facts_face.rb b/acceptance/tests/face/4654_facts_face.rb new file mode 100644 index 00000000000..d8dc8954836 --- /dev/null +++ b/acceptance/tests/face/4654_facts_face.rb @@ -0,0 +1,67 @@ +test_name "Puppet facts face should resolve custom and external facts" + +tag 'audit:high', + 'audit:integration' # The facter acceptance tests should be acceptance. + # However, the puppet face merely needs to interact with libfacter. + # So, this should be an integration test. +# +# This test is intended to ensure that custom and external facts present +# on the agent are resolved and displayed by the puppet facts face. +# +custom_fact = < < directory, + } + + file { "#{agent.puppet['plugindest']}/facter/custom.rb": + ensure => file, + content => "#{custom_fact}", + } + + file { "#{agent.puppet['pluginfactdest']}/external#{ext}": + ensure => file, + mode => "0755", + content => "#{external_fact}", + } +MANIFEST + + step "Agent #{agent}: custom_fact and external_fact should be present in the output of `puppet facts`" + on agent, puppet('facts') do |result| + assert_match(/"custom_fact": "foo"/, result.stdout, "custom_fact did not match expected output") + assert_match(/"external_fact": "bar"/, result.stdout, "external_fact did not match expected output") + end +end diff --git a/acceptance/tests/face/loadable_from_modules.rb b/acceptance/tests/face/loadable_from_modules.rb index c1bb3903737..15d80450cc6 100644 --- a/acceptance/tests/face/loadable_from_modules.rb +++ b/acceptance/tests/face/loadable_from_modules.rb @@ -3,26 +3,58 @@ # Because the module tool does not work on windows, we can't run this test there confine :except, :platform => 'windows' +tag 'audit:high', + 'audit:acceptance', # This has been OS sensitive. + 'audit:refactor' # Remove the confine against windows and refactor to + # accommodate the Windows platform. + require 'puppet/acceptance/temp_file_utils' extend Puppet::Acceptance::TempFileUtils initialize_temp_dirs +metadata_json_file = <<-FILE +{ + "name": "puppetlabs-helloworld", + "version": "0.0.1", + "author": "Puppet Labs", + "summary": "Nginx Module", + "license": "Apache Version 2.0", + "source": "https://github.com/puppetlabs/puppetlabs-nginx", + "project_page": "https://github.com/puppetlabs/puppetlabs-nginx", + "issues_url": "https://github.com/puppetlabs/puppetlabs-nginx", + "dependencies": [ + {"name":"puppetlabs-stdlub","version_requirement":">= 1.0.0"} + ] +} +FILE + agents.each do |agent| + + if on(agent, facter("fips_enabled")).stdout =~ /true/ + puts "Module build, loading and installing not supported on fips enabled platforms" + next + end + environmentpath = get_test_file_path(agent, 'environments') dev_modulepath = "#{environmentpath}/dev/modules" + module_base_dir = "#{dev_modulepath}/helloworld" + + teardown do + on agent, "rm -rf #{module_base_dir}" + end # make sure that we use the modulepath from the dev environment puppetconf = get_test_file_path(agent, 'puppet.conf') on agent, puppet("config", "set", "environmentpath", environmentpath, "--section", "main", "--config", puppetconf) on agent, puppet("config", "set", "environment", "dev", "--section", "user", "--config", puppetconf) - on agent, 'rm -rf helloworld' - on agent, puppet("module", "generate", "puppetlabs-helloworld", "--skip-interview") - mkdirs agent, 'helloworld/lib/puppet/application' - mkdirs agent, 'helloworld/lib/puppet/face' + mkdirs agent, module_base_dir + create_remote_file(agent, "#{module_base_dir}/metadata.json", metadata_json_file) + mkdirs agent, "#{module_base_dir}/lib/puppet/application" + mkdirs agent, "#{module_base_dir}/lib/puppet/face" # copy application, face, and utility module - create_remote_file(agent, "helloworld/lib/puppet/application/helloworld.rb", <<'EOM') + create_remote_file(agent, "#{module_base_dir}/lib/puppet/application/helloworld.rb", <<'EOM') require 'puppet/face' require 'puppet/application/face_base' @@ -30,7 +62,7 @@ class Puppet::Application::Helloworld < Puppet::Application::FaceBase end EOM - create_remote_file(agent, "helloworld/lib/puppet/face/helloworld.rb", <<'EOM') + create_remote_file(agent, "#{module_base_dir}/lib/puppet/face/helloworld.rb", <<'EOM') Puppet::Face.define(:helloworld, '0.1.0') do summary "Hello world face" description "This is the hello world face" @@ -52,7 +84,7 @@ class Puppet::Application::Helloworld < Puppet::Application::FaceBase end EOM - create_remote_file(agent, "helloworld/lib/puppet/helloworld.rb", <<'EOM') + create_remote_file(agent, "#{module_base_dir}/lib/puppet/helloworld.rb", <<'EOM') module Puppet::Helloworld def print puts "Hello world from a required module" @@ -61,24 +93,21 @@ def print end EOM - on agent, puppet('module', 'build', 'helloworld') - on agent, puppet('module', 'install', '--ignore-dependencies', '--target-dir', dev_modulepath, 'helloworld/pkg/puppetlabs-helloworld-0.1.0.tar.gz') - - on(agent, puppet('help', '--config', puppetconf)) do - assert_match(/helloworld\s*Hello world face/, stdout, "Face missing from list of available subcommands") + on(agent, puppet('help', '--config', puppetconf)) do |result| + assert_match(/helloworld\s*Hello world face/, result.stdout, "Face missing from list of available subcommands") end - on(agent, puppet('help', 'helloworld', '--config', puppetconf)) do - assert_match(/This is the hello world face/, stdout, "Descripion help missing") - assert_match(/moduleprint\s*Prints hello world from a required module/, stdout, "help for moduleprint action missing") - assert_match(/actionprint\s*Prints hello world from an action/, stdout, "help for actionprint action missing") + on(agent, puppet('help', 'helloworld', '--config', puppetconf)) do |result| + assert_match(/This is the hello world face/, result.stdout, "Descripion help missing") + assert_match(/moduleprint\s*Prints hello world from a required module/, result.stdout, "help for moduleprint action missing") + assert_match(/actionprint\s*Prints hello world from an action/, result.stdout, "help for actionprint action missing") end - on(agent, puppet('helloworld', 'actionprint', '--config', puppetconf)) do - assert_match(/^Hello world from an action$/, stdout, "face did not print hello world") + on(agent, puppet('helloworld', 'actionprint', '--config', puppetconf)) do |result| + assert_match(/^Hello world from an action$/, result.stdout, "face did not print hello world") end - on(agent, puppet('helloworld', 'moduleprint', '--config', puppetconf)) do - assert_match(/^Hello world from a required module$/, stdout, "face did not load module to print hello world") + on(agent, puppet('helloworld', 'moduleprint', '--config', puppetconf)) do |result| + assert_match(/^Hello world from a required module$/, result.stdout, "face did not load module to print hello world") end end diff --git a/acceptance/tests/face/parser_validate.rb b/acceptance/tests/face/parser_validate.rb new file mode 100644 index 00000000000..f913eb67b68 --- /dev/null +++ b/acceptance/tests/face/parser_validate.rb @@ -0,0 +1,70 @@ +test_name 'parser validate' do + +tag 'audit:high', + 'audit:unit' # Parser validation should be core to ruby + # and platform agnostic. + + require 'puppet/acceptance/environment_utils' + extend Puppet::Acceptance::EnvironmentUtils + require 'puppet/acceptance/temp_file_utils' + extend Puppet::Acceptance::TempFileUtils + + app_type = File.basename(__FILE__, '.*') + + teardown do + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + agents.each do |agent| + skip_test('this test fails on windows French due to Cygwin/UTF Issues - PUP-8319,IMAGES-492') if agent['platform'] =~ /windows/ && agent['locale'] == 'fr' + + step 'manifest with parser function call' do + if agent.platform !~ /windows/ + tmp_environment = mk_tmp_environment_with_teardown(agent, app_type) + + create_sitepp(agent, tmp_environment, <<-SITE) +function validate_this() { + notice('hello, puppet') +} +validate_this() + SITE + on(agent, puppet("parser validate --environment #{tmp_environment}"), :pty => true) # default manifest + end + + # manifest with Type aliases + create_test_file(agent, "#{app_type}.pp", <<-PP) +function validate_this() { + notice('hello, puppet') +} +validate_this() +type MyInteger = Integer +notice 42 =~ MyInteger + PP + tmp_manifest = get_test_file_path(agent, "#{app_type}.pp") + on(agent, puppet("parser validate #{tmp_manifest}")) + end + + step 'manifest with bad syntax' do + create_test_file(agent, "#{app_type}_broken.pp", "notify 'hello there'") + tmp_manifest = get_test_file_path(agent, "#{app_type}_broken.pp") + on(agent, puppet("parser validate #{tmp_manifest}"), :accept_all_exit_codes => true) do |result| + assert_equal(result.exit_code, 1, 'parser validate did not exit with 1 upon parse failure') + expected = /Error: Could not parse for environment production: This Name has no effect\. A value was produced and then forgotten \(one or more preceding expressions may have the wrong form\) \(file: .*_broken\.pp, line: 1, column: 1\)/ + assert_match(expected, result.output, "parser validate did not output correctly: '#{result.output}'. expected: '#{expected.to_s}'") unless agent['locale'] == 'ja' + end + end + + step '(large) manifest with exported resources' do + fixture_path = File.join(File.dirname(__FILE__), '..', '..', 'fixtures/manifest_large_exported_classes_node.pp') + create_test_file(agent, "#{app_type}_exported.pp", File.read(fixture_path)) + tmp_manifest = get_test_file_path(agent, "#{app_type}_exported.pp") + on(agent, puppet("parser validate #{tmp_manifest}")) + end + + end + +end diff --git a/acceptance/tests/helpful_error_message_when_hostname_not_match_server_certificate.rb b/acceptance/tests/helpful_error_message_when_hostname_not_match_server_certificate.rb deleted file mode 100644 index d43f6ca42d3..00000000000 --- a/acceptance/tests/helpful_error_message_when_hostname_not_match_server_certificate.rb +++ /dev/null @@ -1,45 +0,0 @@ -test_name "generate a helpful error message when hostname doesn't match server certificate" - -skip_test( 'Changing certnames of the master will break PE/Passenger installations' ) if master.is_using_passenger? - -certname = "foobar_not_my_hostname" -dns_alt_names = "one_cert,two_cert,red_cert,blue_cert" - -# The DNS names in the certificate's Subject Alternative Name extension -# may appear in any order so sort the list of names alphabetically before -# comparison. -expected_sorted_dns_alt_names = "DNS:" + - dns_alt_names.split(",").push(certname).sort().join(", DNS:") - -# Start the master with a certname not matching its hostname -master_opts = { - 'master' => { - 'certname' => certname, - 'dns_alt_names' => dns_alt_names - } -} - -with_puppet_running_on master, master_opts do - run_agent_on(agents, "--test --server #{master}", :acceptable_exit_codes => (1..255)) do - msg = "Server hostname '" + - Regexp.escape(master) + - "' did not match server certificate; expected one of " + - Regexp.escape(certname) + - ', (.*)$' - - exp = Regexp.new (msg) - - match_result = exp.match(stderr) - - assert(match_result, "Expected " + msg + " to match '" + stderr + "'") - - # Sort the expected DNS names in alphabetical order before comparison. - # The names extracted from the shell output might contain color output - # characters at the end (\e[0m), so strip those off before sorting. - actual_sorted_dns_alt_names = match_result[1].sub(/\e\[0m$/,''). - split(", ").sort().join(", ") - - assert_equal(expected_sorted_dns_alt_names, actual_sorted_dns_alt_names, - "Unexpected DNS alt names found in server certificate") - end -end diff --git a/acceptance/tests/i18n/enable_option_disable_i18n.rb b/acceptance/tests/i18n/enable_option_disable_i18n.rb new file mode 100644 index 00000000000..d55e22c184d --- /dev/null +++ b/acceptance/tests/i18n/enable_option_disable_i18n.rb @@ -0,0 +1,88 @@ +test_name 'Verify that disable_i18n can be set to true and have translations disabled' do + confine :except, :platform => /^solaris/ # translation not supported + + tag 'audit:medium', + 'audit:acceptance' + + require 'puppet/acceptance/environment_utils.rb' + extend Puppet::Acceptance::EnvironmentUtils + + require 'puppet/acceptance/i18n_utils' + extend Puppet::Acceptance::I18nUtils + + require 'puppet/acceptance/i18ndemo_utils' + extend Puppet::Acceptance::I18nDemoUtils + + language = 'ja_JP' + + step "configure server locale to #{language}" do + configure_master_system_locale(language) + end + + tmp_environment = mk_tmp_environment_with_teardown(master, File.basename(__FILE__, '.*')) + + step 'install a i18ndemo module' do + install_i18n_demo_module(master, tmp_environment) + end + + disable_i18n_default_master = master.puppet['disable_i18n'] + teardown do + step 'resetting the server locale' do + on(master, puppet("config set disable_i18n #{ disable_i18n_default_master }")) + reset_master_system_locale + end + step 'uninstall the module' do + agents.each do |agent| + uninstall_i18n_demo_module(agent) + end + uninstall_i18n_demo_module(master) + end + end + + agents.each do |agent| + agent_language = enable_locale_language(agent, language) + skip_test("test machine is missing #{agent_language} locale. Skipping") if agent_language.nil? + shell_env_language = { 'LANGUAGE' => agent_language, 'LANG' => agent_language } + + disable_i18n_default_agent = agent.puppet['disable_i18n'] + teardown do + on(agent, puppet("config set disable_i18n #{ disable_i18n_default_agent }")) + end + + step 'enable i18n' do + on(agent, puppet("config set disable_i18n false")) + on(master, puppet("config set disable_i18n false")) + reset_master_system_locale + end + + step 'expect #{language} translation for a custom type' do + site_pp_content = <<-PP + node default { + i18ndemo_type { '12345': } + } + PP + create_sitepp(master, tmp_environment, site_pp_content) + on(agent, puppet("agent -t --environment #{tmp_environment}", 'ENV' => shell_env_language), :acceptable_exit_codes => 1) do |result| + assert_match(/Error: .* \w+-i18ndemo type: 値は有12345効な値ではありません/, result.stderr, 'missing error from invalid value for custom type param') + end + end + + step 'disable i18n' do + on(agent, puppet("config set disable_i18n true")) + on(master, puppet("config set disable_i18n true")) + reset_master_system_locale + end + + step 'expect no #{language} translation for a custom type' do + site_pp_content = <<-PP + node default { + i18ndemo_type { '12345': } + } + PP + create_sitepp(master, tmp_environment, site_pp_content) + on(agent, puppet("agent -t --environment #{tmp_environment}", 'ENV' => shell_env_language), :acceptable_exit_codes => 1) do |result| + assert_match(/Error: .* Value 12345 is not a valid value for i18ndemo_type\:\:name/, result.stderr) + end + end + end +end diff --git a/acceptance/tests/i18n/modules/puppet_agent.rb b/acceptance/tests/i18n/modules/puppet_agent.rb new file mode 100644 index 00000000000..50c7131762e --- /dev/null +++ b/acceptance/tests/i18n/modules/puppet_agent.rb @@ -0,0 +1,142 @@ +test_name 'C100565: puppet agent with module should translate messages' do + confine :except, :platform => /^solaris/ # translation not supported + + tag 'audit:medium', + 'audit:acceptance' + + skip_test('i18n test module uses deprecated function; update module to resume testing.') + # function validate_absolute_path used https://github.com/eputnam/eputnam-i18ndemo/blob/621d06d/manifests/init.pp#L15 + + require 'puppet/acceptance/environment_utils.rb' + extend Puppet::Acceptance::EnvironmentUtils + + require 'puppet/acceptance/i18n_utils' + extend Puppet::Acceptance::I18nUtils + + require 'puppet/acceptance/i18ndemo_utils' + extend Puppet::Acceptance::I18nDemoUtils + + language = 'ja_JP' + disable_i18n_default_master = master.puppet['disable_i18n'] + + step 'enable i18n on master' do + on(master, puppet("config set disable_i18n false")) + end + + step "configure server locale to #{language}" do + configure_master_system_locale(language) + end + + tmp_environment = mk_tmp_environment_with_teardown(master, File.basename(__FILE__, '.*')) + + step 'install a i18ndemo module' do + install_i18n_demo_module(master, tmp_environment) + end + + teardown do + step 'resetting the server locale' do + on(master, puppet("config set disable_i18n #{ disable_i18n_default_master }")) + reset_master_system_locale + end + step 'uninstall the module' do + agents.each do |agent| + uninstall_i18n_demo_module(agent) + end + uninstall_i18n_demo_module(master) + end + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + agents.each do |agent| + skip_test('on windows this test only works on a machine with a japanese code page set') if agent['platform'] =~ /windows/ && agent['locale'] != 'ja' + + agent_language = enable_locale_language(agent, language) + skip_test("test machine is missing #{agent_language} locale. Skipping") if agent_language.nil? + shell_env_language = { 'LANGUAGE' => agent_language, 'LANG' => agent_language } + + type_path = agent.tmpdir('provider') + disable_i18n_default_agent = agent.puppet['disable_i18n'] + teardown do + on(agent, puppet("config set disable_i18n #{ disable_i18n_default_agent }")) + agent.rm_rf(type_path) + end + + step 'enable i18n' do + on(agent, puppet("config set disable_i18n false")) + end + + step "Run puppet agent of a module with language #{agent_language} and verify the translations" do + + step 'verify custom fact translations' do + site_pp_content_1 = <<-PP + node default { + class { 'i18ndemo': + filename => '#{type_path}' + } + } + PP + + create_sitepp(master, tmp_environment, site_pp_content_1) + on(agent, puppet("agent -t --no-disable_i18n --environment #{tmp_environment}", 'ENV' => shell_env_language), :acceptable_exit_codes => [0, 2]) do |result| + assert_match(/.*\w+-i18ndemo fact: これは\w+-i18ndemoからのカスタムファクトからのレイズです/, result.stderr, 'missing translation for raise from ruby fact') + end + end unless agent['platform'] =~ /ubuntu-16.04/ # Condition to be removed after FACT-2799 gets resolved + + step 'verify custom type translations' do + site_pp_content_2 = <<-PP + node default { + i18ndemo_type { 'hello': } + } + PP + + create_sitepp(master, tmp_environment, site_pp_content_2) + on(agent, puppet("agent -t --environment #{tmp_environment}", 'ENV' => shell_env_language), :acceptable_exit_codes => 1) do |result| + assert_match(/Warning:.*\w+-i18ndemo type: 良い値/, result.stderr, 'missing warning from custom type') + end + + site_pp_content_3 = <<-PP + node default { + i18ndemo_type { '12345': } + } + PP + create_sitepp(master, tmp_environment, site_pp_content_3) + on(agent, puppet("agent -t --environment #{tmp_environment}", 'ENV' => shell_env_language), :acceptable_exit_codes => 1) do |result| + assert_match(/Error: .* \w+-i18ndemo type: 値は有12345効な値ではありません/, result.stderr, 'missing error from invalid value for custom type param') + end + end + + step 'verify custom provider translation' do + site_pp_content_4 = <<-PP + node default { + i18ndemo_type { 'hello': + ensure => present, + dir => '#{type_path}', + } + } + PP + create_sitepp(master, tmp_environment, site_pp_content_4) + on(agent, puppet("agent -t --environment #{tmp_environment}", 'ENV' => shell_env_language)) do |result| + assert_match(/Warning:.*\w+-i18ndemo provider: i18ndemo_typeは存在しますか/, result.stderr, 'missing translated provider message') + end + end + + step 'verify function string translation' do + site_pp_content_5 = <<-PP + node default { + notify { 'happy': + message => happyfuntime('happy') + } + } + PP + create_sitepp(master, tmp_environment, site_pp_content_5) + on(agent, puppet("agent -t --environment #{tmp_environment}", 'ENV' => shell_env_language), :acceptable_exit_codes => 2) do |result| + assert_match(/Notice: --\*\w+-i18ndemo function: それは楽しい時間です\*--/, result.stdout, 'missing translated notice message') + end + end + end + end +end diff --git a/acceptance/tests/i18n/modules/puppet_agent_cached_catalog.rb b/acceptance/tests/i18n/modules/puppet_agent_cached_catalog.rb new file mode 100644 index 00000000000..8df68f2a41b --- /dev/null +++ b/acceptance/tests/i18n/modules/puppet_agent_cached_catalog.rb @@ -0,0 +1,125 @@ +test_name 'C100566: puppet agent with module should translate messages when using a cached catalog' do + confine :except, :platform => /^solaris/ # translation not supported + + tag 'audit:medium', + 'audit:acceptance' + + require 'puppet/acceptance/environment_utils.rb' + extend Puppet::Acceptance::EnvironmentUtils + + require 'puppet/acceptance/i18n_utils' + extend Puppet::Acceptance::I18nUtils + + require 'puppet/acceptance/i18ndemo_utils' + extend Puppet::Acceptance::I18nDemoUtils + + language = 'ja_JP' + disable_i18n_default_master = master.puppet['disable_i18n'] + + step 'enable i18n on master' do + on(master, puppet("config set disable_i18n false")) + end + + step "configure server locale to #{language}" do + configure_master_system_locale(language) + end + + tmp_environment = mk_tmp_environment_with_teardown(master, File.basename(__FILE__, '.*')) + + step 'install a i18ndemo module' do + install_i18n_demo_module(master, tmp_environment) + end + + teardown do + step 'resetting the server locale' do + on(master, puppet("config set disable_i18n #{ disable_i18n_default_master }")) + reset_master_system_locale + end + step 'uninstall the module' do + agents.each do |agent| + uninstall_i18n_demo_module(agent) + end + uninstall_i18n_demo_module(master) + end + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + agents.each do |agent| + skip_test('on windows this test only works on a machine with a japanese code page set') if agent['platform'] =~ /windows/ && agent['locale'] != 'ja' + + agent_language = enable_locale_language(agent, language) + skip_test("test machine is missing #{agent_language} locale. Skipping") if agent_language.nil? + shell_env_language = { 'LANGUAGE' => agent_language, 'LANG' => agent_language } + + type_path = agent.tmpdir('provider') + disable_i18n_default_agent = agent.puppet['disable_i18n'] + teardown do + on(agent, puppet("config set disable_i18n #{ disable_i18n_default_agent }")) + agent.rm_rf(type_path) + end + + step 'enable i18n' do + on(agent, puppet("config set disable_i18n false")) + end + + unresolved_server = 'puppet.unresolved.host.example.com' + + step "Run puppet apply of a module with language #{agent_language} and verify the translations using the cached catalog" do + step 'verify custom fact translations' do + site_pp_content_1 = <<-PP + node default { + class { 'i18ndemo': + filename => '#{type_path}' + } + } + PP + create_sitepp(master, tmp_environment, site_pp_content_1) + on(agent, puppet("agent -t --environment #{tmp_environment}", 'ENV' => shell_env_language), :acceptable_exit_codes => [0, 2]) do |result| + assert_match(/.*\w+-i18ndemo fact: これは\w+-i18ndemoからのカスタムファクトからのレイズです/, result.stderr, 'missing translation for raise from ruby fact') + end + on(agent, puppet("agent -t --environment #{tmp_environment} --use_cached_catalog", 'ENV' => shell_env_language), :acceptable_exit_codes => [0, 2]) do |result| + assert_match(/.*\w+-i18ndemo fact: これは\w+-i18ndemoからのカスタムファクトからのレイズです/, result.stderr, 'missing translation for raise from ruby fact when using cached catalog') + end + end + + step 'verify custom provider translation' do + site_pp_content_2 = <<-PP + node default { + i18ndemo_type { 'hello': + ensure => present, + dir => '#{type_path}', + } + } + PP + create_sitepp(master, tmp_environment, site_pp_content_2) + on(agent, puppet("agent -t --environment #{tmp_environment}", 'ENV' => shell_env_language), :acceptable_exit_codes => [0, 2]) do |result| + assert_match(/Warning:.*\w+-i18ndemo provider: i18ndemo_typeは存在しますか/, result.stderr, 'missing translated provider message') + end + on(agent, puppet("agent -t --server #{unresolved_server} --environment #{tmp_environment} --use_cached_catalog", 'ENV' => shell_env_language), :acceptable_exit_codes => [0, 2]) do |result| + assert_match(/Warning:.*\w+-i18ndemo provider: i18ndemo_typeは存在しますか/, result.stderr, 'missing translated provider message when using cached catalog') + end + end + + step 'verify function string translation' do + site_pp_content_3 = <<-PP + node default { + notify { 'happy': + message => happyfuntime('happy') + } + } + PP + create_sitepp(master, tmp_environment, site_pp_content_3) + on(agent, puppet("agent -t --environment #{tmp_environment}", 'ENV' => shell_env_language), :acceptable_exit_codes => [0, 2]) do |result| + assert_match(/Notice: --\*\w+-i18ndemo function: それは楽しい時間です\*--/, result.stdout, 'missing translated notice message') + end + on(agent, puppet("agent -t --server #{unresolved_server} --environment #{tmp_environment} --use_cached_catalog", 'ENV' => shell_env_language), :acceptable_exit_codes => [0, 2]) do |result| + assert_match(/Notice: --\*\w+-i18ndemo function: それは楽しい時間です\*--/, result.stdout, 'missing translated notice message when using cached catalog') + end + end + end + end +end diff --git a/acceptance/tests/i18n/modules/puppet_agent_with_multiple_environments.rb b/acceptance/tests/i18n/modules/puppet_agent_with_multiple_environments.rb new file mode 100644 index 00000000000..94856e6a925 --- /dev/null +++ b/acceptance/tests/i18n/modules/puppet_agent_with_multiple_environments.rb @@ -0,0 +1,100 @@ +test_name 'C100575: puppet agent with different modules in different environments should translate based on their module' do + confine :except, :platform => /^solaris/ # translation not supported + + tag 'audit:medium', + 'audit:acceptance' + + require 'puppet/acceptance/environment_utils.rb' + extend Puppet::Acceptance::EnvironmentUtils + + require 'puppet/acceptance/i18n_utils' + extend Puppet::Acceptance::I18nUtils + + require 'puppet/acceptance/i18ndemo_utils' + extend Puppet::Acceptance::I18nDemoUtils + + language = 'ja_JP' + + app_type_1 = File.basename(__FILE__, '.*') + "_env_1" + app_type_2 = File.basename(__FILE__, '.*') + "_env_2" + tmp_environment_1 = mk_tmp_environment_with_teardown(master, app_type_1) + tmp_environment_2 = mk_tmp_environment_with_teardown(master, app_type_2) + full_path_env_1 = File.join('/tmp', tmp_environment_1) + full_path_env_2 = File.join('/tmp', tmp_environment_2) + tmp_po_file = master.tmpfile('tmp_po_file') + + disable_i18n_default_master = master.puppet['disable_i18n'] + step 'enable i18n on master' do + on(master, puppet("config set disable_i18n false")) + end + + step 'install a i18ndemo module' do + install_i18n_demo_module(master, tmp_environment_1) + install_i18n_demo_module(master, tmp_environment_2) + end + + step "configure server locale to #{language}" do + configure_master_system_locale(language) + end + + teardown do + on(master, "rm -f '#{tmp_po_file}'") + step 'uninstall the module' do + agents.each do |agent| + uninstall_i18n_demo_module(agent) + end + uninstall_i18n_demo_module(master) + end + step 'resetting the server locale' do + on(master, puppet("config set disable_i18n #{ disable_i18n_default_master }")) + reset_master_system_locale + end + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + agents.each do |agent| + skip_test('on windows this test only works on a machine with a japanese code page set') if agent['platform'] =~ /windows/ && agent['locale'] != 'ja' + + agent_language = enable_locale_language(agent, language) + skip_test("test machine is missing #{agent_language} locale. Skipping") if agent_language.nil? + shell_env_language = { 'LANGUAGE' => agent_language, 'LANG' => agent_language } + + disable_i18n_default_agent = agent.puppet['disable_i18n'] + teardown do + on(agent, puppet("config set disable_i18n #{ disable_i18n_default_agent }")) + end + + step 'enable i18n' do + on(agent, puppet("config set disable_i18n false")) + end + + env_1_po_file = File.join(full_path_env_1, 'modules', I18NDEMO_NAME, 'locales', 'ja', "#{I18NDEMO_MODULE_NAME}.po") + on(master, "sed -e 's/\\(msgstr \"\\)\\([^\"]\\)/\\1'\"ENV_1\"':\\2/' #{env_1_po_file} > #{tmp_po_file} && mv #{tmp_po_file} #{env_1_po_file}") + env_2_po_file = File.join(full_path_env_2, 'modules', I18NDEMO_NAME, 'locales', 'ja', "#{I18NDEMO_MODULE_NAME}.po") + on(master, "sed -e 's/\\(msgstr \"\\)\\([^\"]\\)/\\1'\"ENV_2\"':\\2/' #{env_2_po_file} > #{tmp_po_file} && mv #{tmp_po_file} #{env_2_po_file}") + on(master, "chmod a+r '#{env_1_po_file}' '#{env_2_po_file}'") + + step 'verify function string translation' do + site_pp_content = <<-PP + node default { + notify { 'happy': + message => happyfuntime('happy') + } + } + PP + create_sitepp(master, tmp_environment_1, site_pp_content) + on(agent, puppet("agent -t --environment #{tmp_environment_1}", 'ENV' => shell_env_language), :acceptable_exit_codes => 2) do |result| + assert_match(/Notice: --\*(ENV_1:)?ENV_1:\w+-i18ndemo function: それは楽しい時間です\*--/, result.stdout, 'missing translated notice message for environment 1') + end + + create_sitepp(master, tmp_environment_2, site_pp_content) + on(agent, puppet("agent -t --environment #{tmp_environment_2}", 'ENV' => shell_env_language), :acceptable_exit_codes => 2) do |result| + assert_match(/Notice: --\*(ENV_2:)?ENV_2:\w+-i18ndemo function: それは楽しい時間です\*--/, result.stdout, 'missing translated notice message for environment 2') + end + end + end +end diff --git a/acceptance/tests/i18n/modules/puppet_apply.rb b/acceptance/tests/i18n/modules/puppet_apply.rb new file mode 100644 index 00000000000..ef549b94c3c --- /dev/null +++ b/acceptance/tests/i18n/modules/puppet_apply.rb @@ -0,0 +1,79 @@ +test_name 'C100567: puppet apply of module should translate messages' do + confine :except, :platform => /^solaris/ # translation not supported + + tag 'audit:medium', + 'audit:acceptance' + + require 'puppet/acceptance/temp_file_utils' + extend Puppet::Acceptance::TempFileUtils + + require 'puppet/acceptance/i18n_utils' + extend Puppet::Acceptance::I18nUtils + + require 'puppet/acceptance/i18ndemo_utils' + extend Puppet::Acceptance::I18nDemoUtils + + language = 'ja_JP' + + agents.each do |agent| + skip_test('on windows this test only works on a machine with a japanese code page set') if agent['platform'] =~ /windows/ && agent['locale'] != 'ja' + + # REMIND - It was noted that skipping tests on certain platforms sometimes causes + # beaker to mark the test as a failed even if the test succeeds on other targets. + # Hence we just print a message and skip w/o telling beaker about it. + if on(agent, facter("fips_enabled")).stdout =~ /true/ + puts "Module build, loading and installing is not supported on fips enabled platforms" + next + end + + agent_language = enable_locale_language(agent, language) + skip_test("test machine is missing #{agent_language} locale. Skipping") if agent_language.nil? + shell_env_language = { 'LANGUAGE' => agent_language, 'LANG' => agent_language } + + type_path = agent.tmpdir('provider') + step 'install a i18ndemo module' do + install_i18n_demo_module(agent) + end + + disable_i18n_default_agent = agent.puppet['disable_i18n'] + teardown do + on(agent, puppet("config set disable_i18n #{ disable_i18n_default_agent }")) + uninstall_i18n_demo_module(agent) + on(agent, "rm -rf '#{type_path}'") + end + + step 'enable i18n' do + on(agent, puppet("config set disable_i18n false")) + end + + step "Run puppet apply of a module with language #{agent_language} and verify the translations" do + step 'verify custom fact translations' do + on(agent, puppet("apply -e \"class { 'i18ndemo': filename => '#{type_path}' }\"", 'ENV' => shell_env_language)) do |apply_result| + assert_match(/.*\w+-i18ndemo fact: これは\w+-i18ndemoからのカスタムファクトからのレイズです/, apply_result.stderr, 'missing translation for raise from ruby fact') + end + end unless agent['platform'] =~ /ubuntu-16.04/ # Condition to be removed after FACT-2799 gets resolved + + step 'verify custom translations' do + on(agent, puppet("apply -e \"i18ndemo_type { 'hello': }\"", 'ENV' => shell_env_language), :acceptable_exit_codes => 1) do |apply_result| + assert_match(/Warning:.*\w+-i18ndemo type: 良い値/, apply_result.stderr, 'missing warning from custom type') + end + + on(agent, puppet("apply -e \"i18ndemo_type { '12345': }\"", 'ENV' => shell_env_language), :acceptable_exit_codes => 1) do |apply_result| + assert_match(/Error: .* \w+-i18ndemo type: 値は有12345効な値ではありません/, apply_result.stderr, 'missing error from invalid value for custom type param') + end + end + + step 'verify custom provider translation' do + on(agent, puppet("apply -e \"i18ndemo_type { 'hello': ensure => present, dir => '#{type_path}', }\"", 'ENV' => shell_env_language)) do |apply_result| + assert_match(/Warning:.*\w+-i18ndemo provider: i18ndemo_typeは存在しますか/, apply_result.stderr, 'missing translated provider message') + end + end + + step 'verify function string translation' do + on(agent, puppet("apply -e \"notify { 'happy': message => happyfuntime('happy') }\"", 'ENV' => shell_env_language)) do |apply_result| + assert_match(/Notice: --\*\w+-i18ndemo function: それは楽しい時間です\*--/, apply_result.stdout, 'missing translated notice message') + end + end + end + end +end diff --git a/acceptance/tests/i18n/modules/puppet_apply_module_lang.rb b/acceptance/tests/i18n/modules/puppet_apply_module_lang.rb new file mode 100644 index 00000000000..78ea85862c4 --- /dev/null +++ b/acceptance/tests/i18n/modules/puppet_apply_module_lang.rb @@ -0,0 +1,90 @@ +test_name 'C100574: puppet apply using a module should translate messages in a language not supported by puppet' do + + confine :except, :platform => /^windows/ # Can't print Finish on an English or Japanese code page + + tag 'audit:medium', + 'audit:acceptance' + + require 'puppet/acceptance/temp_file_utils' + extend Puppet::Acceptance::TempFileUtils + + require 'puppet/acceptance/i18n_utils' + extend Puppet::Acceptance::I18nUtils + + require 'puppet/acceptance/i18ndemo_utils' + extend Puppet::Acceptance::I18nDemoUtils + + language='fi_FI' + + agents.each do |agent| + # REMIND - It was noted that skipping tests on certain platforms sometimes causes + # beaker to mark the test as a failed even if the test succeeds on other targets. + # Hence we just print a message and skip w/o telling beaker about it. + if on(agent, facter("fips_enabled")).stdout =~ /true/ + puts "Module build, loading and installing is not supported on fips enabled platforms" + next + end + + agent_language = enable_locale_language(agent, language) + skip_test("test machine is missing #{agent_language} locale. Skipping") if agent_language.nil? + shell_env_language = { 'LANGUAGE' => agent_language, 'LANG' => agent_language } + + type_path = agent.tmpdir('provider') + disable_i18n_default_agent = agent.puppet['disable_i18n'] + teardown do + on(agent, puppet("config set disable_i18n #{ disable_i18n_default_agent }")) + uninstall_i18n_demo_module(agent) + on(agent, "rm -rf '#{type_path}'") + end + + step 'install a i18ndemo module' do + install_i18n_demo_module(agent) + end + + step 'enable i18n' do + on(agent, puppet("config set disable_i18n false")) + end + + step "Run puppet apply of a module with language #{agent_language} and verify default english returned" do + step 'verify custom fact message translated and applied catalog message not translatated' do + on(agent, puppet("apply -e \"class { 'i18ndemo': filename => '#{type_path}' }\"", 'ENV' => shell_env_language)) do |apply_result| + assert_match(/i18ndemo_fact: tämä on korotus mukautetusta tosiasiasta \w+-i18ndemo/, + apply_result.stderr, 'missing translated message for raise from ruby fact') + assert_match(/Notice: Applied catalog in [0-9.]+ seconds/, apply_result.stdout, 'missing untranslated message for catalog applied') + end + end unless agent['platform'] =~ /ubuntu-16.04/ # Condition to be removed after FACT-2799 gets resolved + + step 'verify warning translated from init.pp' do + on(agent, puppet("apply -e \"class { 'i18ndemo': filename => '#{type_path}' }\"", 'ENV' => shell_env_language)) do |apply_result| + assert_match(/Warning: .*I18ndemo-tiedoston luominen/, apply_result.stderr, 'missing translated warning from init.pp') + end + + on(agent, puppet("apply -e \"class { 'i18ndemo': param1 => false }\"", 'ENV' => shell_env_language), :acceptable_exit_codes => 1) do |apply_result| + assert_match(/Error: .* tiedostoa ei voitu luoda./, apply_result.stderr, 'missing translated message for fail from init.pp') + end + end + + step 'verify custom type messages translated' do + on(agent, puppet("apply -e \"i18ndemo_type { 'hello': }\"", 'ENV' => shell_env_language), :acceptable_exit_codes => 1) do |apply_result| + assert_match(/Warning: .* Hyvä arvo i18ndemo_type::name/, apply_result.stderr, 'missing translated warning from custom type') + end + + on(agent, puppet("apply -e \"i18ndemo_type { '12345': }\"", 'ENV' => shell_env_language), :acceptable_exit_codes => 1) do |apply_result| + assert_match(/Error: .* Arvo 12345 ei ole kelvollinen arvo i18ndemo_type::name/, apply_result.stderr, 'missing translated error from invalid value for custom type param') + end + end + + step 'verify custom provider translation' do + on(agent, puppet("apply -e \"i18ndemo_type { 'hello': ensure => present, dir => '#{type_path}', }\"", 'ENV' => shell_env_language)) do |apply_result| + assert_match(/Warning: .* Onko i18ndemo_type olemassa\?/, apply_result.stderr, 'missing translated provider message') + end + end + + step 'verify function string translation' do + on(agent, puppet("apply -e \"notify { 'happy': message => happyfuntime('happy') }\"", 'ENV' => shell_env_language)) do |apply_result| + assert_match(/Notice: --\*SE ON HAUSKAA AIKAA\*--/, apply_result.stdout, 'missing translated notice message') + end + end + end + end +end diff --git a/acceptance/tests/i18n/modules/puppet_apply_unsupported_lang.rb b/acceptance/tests/i18n/modules/puppet_apply_unsupported_lang.rb new file mode 100644 index 00000000000..46661b8f2b0 --- /dev/null +++ b/acceptance/tests/i18n/modules/puppet_apply_unsupported_lang.rb @@ -0,0 +1,80 @@ +test_name 'C100568: puppet apply of module for an unsupported language should fall back to english' do + + tag 'audit:medium', + 'audit:acceptance' + + require 'puppet/acceptance/temp_file_utils' + extend Puppet::Acceptance::TempFileUtils + + require 'puppet/acceptance/i18ndemo_utils' + extend Puppet::Acceptance::I18nDemoUtils + + unsupported_language='hu_HU' + shell_env_language = { 'LANGUAGE' => unsupported_language, 'LANG' => unsupported_language } + + agents.each do |agent| + # REMIND - It was noted that skipping tests on certain platforms sometimes causes + # beaker to mark the test as a failed even if the test succeeds on other targets. + # Hence we just print a message and skip w/o telling beaker about it. + if on(agent, facter("fips_enabled")).stdout =~ /true/ + puts "Module build, loading and installing is not supported on fips enabled platforms" + next + end + + type_path = agent.tmpdir('provider') + disable_i18n_default_agent = agent.puppet['disable_i18n'] + teardown do + on(agent, puppet("config set disable_i18n #{ disable_i18n_default_agent }")) + uninstall_i18n_demo_module(agent) + on(agent, "rm -rf '#{type_path}'") + end + + step 'enable i18n' do + on(agent, puppet("config set disable_i18n false")) + end + + step 'install a i18ndemo module' do + install_i18n_demo_module(agent) + end + + step "Run puppet apply of a module with language #{unsupported_language} and verify default english returned" do + step 'verify custom fact messages not translatated' do + on(agent, puppet("apply -e \"class { 'i18ndemo': filename => '#{type_path}' }\"", 'ENV' => shell_env_language)) do |apply_result| + assert_match(/.*i18ndemo_fact: this is a raise from a custom fact from \w+-i18ndemo/, apply_result.stderr, 'missing untranslated message for raise from ruby fact') + end + end + + step 'verify warning not translated from init.pp' do + on(agent, puppet("apply -e \"class { 'i18ndemo': filename => '#{type_path}' }\"", 'ENV' => shell_env_language)) do |apply_result| + assert_match(/Warning:.*Creating an i18ndemo file/, apply_result.stderr, 'missing untranslated warning from init.pp') + end + + on(agent, puppet("apply -e \"class { 'i18ndemo': param1 => false }\"", 'ENV' => shell_env_language), :acceptable_exit_codes => 1) do |apply_result| + assert_match(/Error:.*Failed to create/, apply_result.stderr, 'missing untranslated message for fail from init.pp') + end + end + + step 'verify custom type messages not translated' do + on(agent, puppet("apply -e \"i18ndemo_type { 'hello': }\"", 'ENV' => shell_env_language), :acceptable_exit_codes => 1) do |apply_result| + assert_match(/Warning:.*Good value for i18ndemo_type::name/, apply_result.stderr, 'missing untranslated warning from custom type') + end + + on(agent, puppet("apply -e \"i18ndemo_type { '12345': }\"", 'ENV' => shell_env_language), :acceptable_exit_codes => 1) do |apply_result| + assert_match(/Error:.*Value 12345 is not a valid value for i18ndemo_type::name/, apply_result.stderr, 'missing untranslated error from invalid value for custom type param') + end + end + + step 'verify custom provider translation' do + on(agent, puppet("apply -e \"i18ndemo_type { 'hello': ensure => present, dir => '#{type_path}', }\"", 'ENV' => shell_env_language)) do |apply_result| + assert_match(/Warning:.* Does i18ndemo_type exist\?/, apply_result.stderr, 'missing untranslated provider message') + end + end + + step 'verify function string translation' do + on(agent, puppet("apply -e \"notify { 'happy': message => happyfuntime('happy') }\"", 'ENV' => shell_env_language)) do |apply_result| + assert_match(/Notice: --\*IT'S HAPPY FUN TIME\*--/, apply_result.stdout, 'missing untranslated notice message') + end + end + end + end +end diff --git a/acceptance/tests/i18n/modules/puppet_describe.rb b/acceptance/tests/i18n/modules/puppet_describe.rb new file mode 100644 index 00000000000..5e5916b4270 --- /dev/null +++ b/acceptance/tests/i18n/modules/puppet_describe.rb @@ -0,0 +1,51 @@ +test_name 'C100576: puppet describe with module type translates message' do + confine :except, :platform => /^solaris/ # translation not supported + + tag 'audit:medium', + 'audit:acceptance' + + require 'puppet/acceptance/i18n_utils' + extend Puppet::Acceptance::I18nUtils + + require 'puppet/acceptance/i18ndemo_utils' + extend Puppet::Acceptance::I18nDemoUtils + + language = 'ja_JP' + + agents.each do |agent| + skip_test('on windows this test only works on a machine with a japanese code page set') if agent['platform'] =~ /windows/ && agent['locale'] != 'ja' + + # REMIND - It was noted that skipping tests on certain platforms sometimes causes + # beaker to mark the test as a failed even if the test succeeds on other targets. + # Hence we just print a message and skip w/o telling beaker about it. + if on(agent, facter("fips_enabled")).stdout =~ /true/ + puts "Module build, loading and installing is not supported on fips enabled platforms" + next + end + + agent_language = enable_locale_language(agent, language) + skip_test("test machine is missing #{agent_language} locale. Skipping") if agent_language.nil? + shell_env_language = { 'LANGUAGE' => agent_language, 'LANG' => agent_language } + + disable_i18n_default_agent = agent.puppet['disable_i18n'] + teardown do + on(agent, puppet("config set disable_i18n #{ disable_i18n_default_agent }")) + uninstall_i18n_demo_module(agent) + end + + step 'enable i18n' do + on(agent, puppet("config set disable_i18n false")) + end + + step 'install a i18ndemo module' do + install_i18n_demo_module(agent) + end + + step "Run puppet describe from a module with language #{agent_language} and verify the translations" do + on(agent, puppet('describe i18ndemo_type', 'ENV' => shell_env_language)) do |result| + assert_match(/\w+-i18ndemo type: dirパラメータは、検査するディレクトリパスをとります/, result.stdout, 'missing translation of dir parameter from i18ndemo_type') + assert_match(/\w+-i18ndemo type: nameパラメータには、大文字と小文字の変数A-Za-zが使用されます/, result.stdout, 'missing translation of name parameter from i18ndemo_type') + end + end + end +end diff --git a/acceptance/tests/i18n/modules/puppet_face.rb b/acceptance/tests/i18n/modules/puppet_face.rb new file mode 100644 index 00000000000..50c83b86323 --- /dev/null +++ b/acceptance/tests/i18n/modules/puppet_face.rb @@ -0,0 +1,64 @@ +test_name 'C100573: puppet application/face with module translates messages' do + confine :except, :platform => /^solaris/ # translation not supported + + tag 'audit:medium', + 'audit:acceptance' + + require 'puppet/acceptance/i18n_utils' + extend Puppet::Acceptance::I18nUtils + + require 'puppet/acceptance/i18ndemo_utils' + extend Puppet::Acceptance::I18nDemoUtils + + language = 'ja_JP' + + agents.each do |agent| + skip_test('on windows this test only works on a machine with a japanese code page set') if agent['platform'] =~ /windows/ && agent['locale'] != 'ja' + + # REMIND - It was noted that skipping tests on certain platforms sometimes causes + # beaker to mark the test as a failed even if the test succeeds on other targets. + # Hence we just print a message and skip w/o telling beaker about it. + if on(agent, facter("fips_enabled")).stdout =~ /true/ + puts "Module build, loading and installing is not supported on fips enabled platforms" + next + end + + agent_language = enable_locale_language(agent, language) + skip_test("test machine is missing #{agent_language} locale. Skipping") if agent_language.nil? + shell_env_language = { 'LANGUAGE' => agent_language, 'LANG' => agent_language } + + disable_i18n_default_agent = agent.puppet['disable_i18n'] + teardown do + on(agent, puppet("config set disable_i18n #{ disable_i18n_default_agent }")) + uninstall_i18n_demo_module(agent) + end + + step 'enable i18n' do + on(agent, puppet("config set disable_i18n false")) + end + + step 'install a i18ndemo module' do + install_i18n_demo_module(agent) + end + + step "Run puppet i18ndemo (face/application from a module with language #{agent_language} and verify the translations" do + step 'puppet --help contains i18ndemo summary translation' do + on(agent, puppet('--help', 'ENV' => shell_env_language)) do |result| + assert_match(/\s*i18ndemo\s+\w+-i18ndemo face: I18ndemoモジュールの人形の顔の例/, result.stdout, 'missing translation of i18ndemo help summary') + end + end + + step 'puppet i18ndemo --help contains test_face summary' do + on(agent, puppet('i18ndemo --help', 'ENV' => shell_env_language)) do |result| + assert_match(/\s*test_face\s+\w+-i18ndemo face: test_faceアクションのヘルプの要約/, result.stdout, 'missing translation of i18ndemo face help summary') + end + end + + step 'puppet i18ndemo test_face contains translated warning' do + on(agent, puppet('i18ndemo', 'ENV' => shell_env_language)) do |result| + assert_match(/Warning: \w+-i18ndemo face: i18ndemo test_faceが呼び出されました/, result.stderr, 'missing translation of Warning message from i18ndemo face') + end + end + end + end +end diff --git a/acceptance/tests/i18n/modules/puppet_resource.rb b/acceptance/tests/i18n/modules/puppet_resource.rb new file mode 100644 index 00000000000..56dd97d1fa3 --- /dev/null +++ b/acceptance/tests/i18n/modules/puppet_resource.rb @@ -0,0 +1,52 @@ +test_name 'C100572: puppet resource with module translates messages' do + confine :except, :platform => /^solaris/ # translation not supported + + tag 'audit:medium', + 'audit:acceptance' + + require 'puppet/acceptance/i18n_utils' + extend Puppet::Acceptance::I18nUtils + + require 'puppet/acceptance/i18ndemo_utils' + extend Puppet::Acceptance::I18nDemoUtils + + language = 'ja_JP' + + agents.each do |agent| + skip_test('on windows this test only works on a machine with a japanese code page set') if agent['platform'] =~ /windows/ && agent['locale'] != 'ja' + + # REMIND - It was noted that skipping tests on certain platforms sometimes causes + # beaker to mark the test as a failed even if the test succeeds on other targets. + # Hence we just print a message and skip w/o telling beaker about it. + if on(agent, facter("fips_enabled")).stdout =~ /true/ + puts "Module build, loading and installing is not supported on fips enabled platforms" + next + end + + agent_language = enable_locale_language(agent, language) + skip_test("test machine is missing #{agent_language} locale. Skipping") if agent_language.nil? + shell_env_language = { 'LANGUAGE' => agent_language, 'LANG' => agent_language } + + disable_i18n_default_agent = agent.puppet['disable_i18n'] + teardown do + on(agent, puppet("config set disable_i18n #{ disable_i18n_default_agent }")) + uninstall_i18n_demo_module(agent) + end + + step 'enable i18n' do + on(agent, puppet("config set disable_i18n false")) + end + + step 'install a i18ndemo module' do + install_i18n_demo_module(agent) + end + + step "Run puppet resource for a module with language #{agent_language} and verify the translations" do + step 'puppet resource i18ndemo_type information contains translation' do + on(agent, puppet('resource i18ndemo_type', 'ENV' => shell_env_language), :acceptable_exit_codes => 1) do |result| + assert_match(/Warning: Puppet::Type::I18ndemo_type::ProviderRuby: \w+-i18ndemo type: i18ndemo_typeからの警告メッセージ/, result.stderr, 'missing translation of resource i18ndemo_type information') + end + end + end + end +end diff --git a/acceptance/tests/i18n/translation_fallback.rb b/acceptance/tests/i18n/translation_fallback.rb new file mode 100644 index 00000000000..d5b191b92e2 --- /dev/null +++ b/acceptance/tests/i18n/translation_fallback.rb @@ -0,0 +1,19 @@ +test_name 'C100560: puppet agent run output falls back to english when language not available' do + # No confines because even on non-translation supported OS' we should still fall back to english + + tag 'audit:medium', + 'audit:acceptance' + + agents.each do |agent| + step 'Run Puppet apply with language Hungarian and check the output' do + unsupported_language='hu_HU' + on(agent, puppet("agent -t", + 'ENV' => {'LANG' => unsupported_language, 'LANGUAGE' => ''})) do |apply_result| + assert_match(/Applying configuration version '[^']*'/, apply_result.stdout, + 'agent run should default to english translation') + assert_match(/Applied catalog in [0-9.]* seconds/, apply_result.stdout, + 'agent run should default to english translation') + end + end + end +end diff --git a/acceptance/tests/language/PUP-2630-server_set_facts.rb b/acceptance/tests/language/PUP-2630-server_set_facts.rb deleted file mode 100644 index dc3269cb744..00000000000 --- a/acceptance/tests/language/PUP-2630-server_set_facts.rb +++ /dev/null @@ -1,71 +0,0 @@ -test_name 'PUP-2630 ensure $server_facts is set and warning is issued if any value is overwritten by an agent' - -step 'ensure :trusted_server_facts is false by default' -on(master, puppet('master', '--configprint trusted_server_facts')) do |result| - assert_match('false', result.stdout, - 'trusted_server_facts setting should be false by default') -end - -step 'ensure $server_facts does not exist by default' -testdir = master.tmpdir(File.basename(__FILE__, ".*")) - -test_manifest = < directory, - mode => "0750", - owner => #{master.puppet['user']}, - group => #{master.puppet['group']}, -} -file { - '#{testdir}':; - '#{testdir}/environments':; - '#{testdir}/environments/production':; - '#{testdir}/environments/production/modules':; - '#{testdir}/environments/production/manifests':; -} - -file { '#{testdir}/environments/production/manifests/site.pp': - ensure => file, - content => 'notify{"abc$server_facts":} - ', -} -MANIFEST - -apply_manifest_on(master, test_manifest) - -master_opts = { - 'main' => { - 'environmentpath' => "#{testdir}/environments", - } -} -with_puppet_running_on(master, master_opts) do - agents.each do |agent| - on(agent, puppet("agent -t --server #{master}"), - :acceptable_exit_codes => 2) do |result| - assert_match(/as 'abc'/, result.stdout, - "#{agent}: $server_facts should be empty prior to opt-in" ) - end - end -end - -step 'ensure $server_facts DO exist after the user opts-in' -master_opts['main']['trusted_server_facts'] = true -with_puppet_running_on(master, master_opts) do - agents.each do |agent| - on(agent, puppet("agent -t --server #{master}"), - :acceptable_exit_codes => 2) do |result| - assert_match(/abc{serverversion/, result.stdout, - "#{agent}: $server_facts should have some stuff" ) - end - end - - step 'ensure puppet issues a warning if an agent overwrites a server fact' - agents.each do |agent| - on(agent, puppet("agent -t --server #{master}", - 'ENV' => { 'FACTER_server_facts' => 'overwrite' }), - :acceptable_exit_codes => 1) do |result| - assert_match(/Attempt to assign to a reserved variable name: 'server_facts'/, - result.stderr, "#{agent}: $server_facts should warn if overwritten" ) - end - end -end diff --git a/acceptance/tests/language/binary_data_type.rb b/acceptance/tests/language/binary_data_type.rb new file mode 100644 index 00000000000..00a4a2fd56b --- /dev/null +++ b/acceptance/tests/language/binary_data_type.rb @@ -0,0 +1,83 @@ +test_name 'C98346: Binary data type' do + require 'puppet/acceptance/puppet_type_test_tools.rb' + extend Puppet::Acceptance::PuppetTypeTestTools + + tag 'audit:high', + 'audit:integration', # Tests that binary data is retains integrity + # between server and agent transport/application. + # The weak link here is final ruby translation and + # should not be OS sensitive. + 'server' + + teardown do + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + app_type = File.basename(__FILE__, '.*') + tmp_environment = mk_tmp_environment_with_teardown(master, app_type) + + tmp_filename_win = tmp_filename_else = '' + agents.each do |agent| + # ugh... this won't work with more than two agents of two types + if agent.platform =~ /32$/ + tmp_filename_win = "C:\\cygwin\\tmp\\#{tmp_environment}.txt" + else + tmp_filename_win = "C:\\cygwin64\\tmp\\#{tmp_environment}.txt" + end + tmp_filename_else = "/tmp/#{tmp_environment}.txt" + on(agent, "echo 'old content' > '/tmp/#{tmp_environment}.txt'") + end + # create a fake module files... file for binary_file() + on(master, puppet_apply("-e 'file{[\"#{environmentpath}/#{tmp_environment}/modules\",\"#{environmentpath}/#{tmp_environment}/modules/empty\",\"#{environmentpath}/#{tmp_environment}/modules/empty/files\"]: ensure => \"directory\"} file{\"#{environmentpath}/#{tmp_environment}/modules/empty/files/blah.txt\": content => \"binary, yo\"}'")) + + base64_relaxed = Base64.encode64("invasionfromspace#{random_string}").strip + base64_strict = Base64.strict_encode64("invasion from space #{random_string}\n") + base64_urlsafe = Base64.urlsafe_encode64("invasion from-space/#{random_string}\n") + + test_resources = [ + { :type => 'notify', :parameters => { :namevar => "1:$hell" }, :pre_code => "$hell = Binary('hello','%b')", + :assertions => { :assert_match => 'Notice: 1:hell' } }, + { :type => 'notify', :parameters => { :namevar => "2:$relaxed" }, :pre_code => "$relaxed = Binary('#{base64_relaxed}')", + :assertions => { :assert_match => "Notice: 2:#{base64_relaxed}" } }, + { :type => 'notify', :parameters => { :namevar => "3:$cHVwcGV0" }, :pre_code => "$cHVwcGV0 = Binary('cHVwcGV0')", + :assertions => { :assert_match => 'Notice: 3:cHVwcGV0' } }, + { :type => 'notify', :parameters => { :namevar => "4:$strict" }, :pre_code => "$strict = Binary('#{base64_strict}')", + :assertions => { :assert_match => "Notice: 4:#{base64_strict}" } }, + { :type => 'notify', :parameters => { :namevar => "5:$urlsafe" }, :pre_code => "$urlsafe = Binary('#{base64_urlsafe}')", + :assertions => { :assert_match => "Notice: 5:#{base64_urlsafe}" } }, + { :type => 'notify', :parameters => { :namevar => "6:$byte_array" }, :pre_code => "$byte_array = Binary([67,68])", + :assertions => { :assert_match => "Notice: 6:Q0Q=" } }, + { :type => 'notify', :parameters => { :namevar => "7:${empty_array}empty" }, :pre_code => "$empty_array = Binary([])", + :assertions => { :assert_match => "Notice: 7:empty" } }, + { :type => 'notify', :parameters => { :namevar => "8:${relaxed[1]}" }, + :assertions => { :assert_match => "Notice: 8:bg==" } }, + { :type => 'notify', :parameters => { :namevar => "9:${relaxed[1,3]}" }, + :assertions => { :assert_match => "Notice: 9:bnZh" } }, + { :type => 'notify', :parameters => { :namevar => "A:${utf8}" }, :pre_code => '$utf8=String(Binary([0xF0, 0x9F, 0x91, 0x92]),"%s")', + :assertions => { :assert_match => 'Notice: A:\\xF0\\x9F\\x91\\x92' } }, + { :type => 'notify', :parameters => { :namevar => "B:${type($bin_file)}" }, :pre_code => '$bin_file=binary_file("empty/blah.txt")', + :assertions => { :assert_match => 'Notice: B:Binary' } }, + { :type => 'file', :parameters => { :namevar => "$pup_tmp_filename", :content => "$relaxed" }, :pre_code => "$pup_tmp_filename = if $facts['os']['family'] == 'windows' { '#{tmp_filename_win}' } else { '#{tmp_filename_else}' }", + :assertions => { :assert_match => /#{base64_relaxed}/ } }, + ] + + sitepp_content = generate_manifest(test_resources) + assertion_code = generate_assertions(test_resources) + + create_sitepp(master, tmp_environment, sitepp_content) + + step "run agent in #{tmp_environment}, run all assertions" do + with_puppet_running_on(master, {}) do + agents.each do |agent| + on(agent, puppet("agent -t --environment '#{tmp_environment}'"), :acceptable_exit_codes => [2]) do |result| + run_assertions(assertion_code, result) + end + end + end + end + +end diff --git a/acceptance/tests/language/exported_resources.rb b/acceptance/tests/language/exported_resources.rb new file mode 100644 index 00000000000..23c6c2d9e5d --- /dev/null +++ b/acceptance/tests/language/exported_resources.rb @@ -0,0 +1,180 @@ +test_name "C94788: exported resources using a yaml terminus for storeconfigs" do +require 'puppet/acceptance/environment_utils' +extend Puppet::Acceptance::EnvironmentUtils + +tag 'audit:high', + 'audit:integration', + 'audit:refactor', # This could be a component of a larger workflow scenario. + 'server' + + skip_test 'requires puppetserver to service restart' if @options[:type] != 'aio' + + app_type = File.basename(__FILE__, '.*') + tmp_environment = mk_tmp_environment_with_teardown(master, app_type) + exported_username = 'er0ck' + + teardown do + step 'stop puppet server' do + on(master, "service #{master['puppetservice']} stop") + end + step 'remove cached agent json catalogs from the master' do + on(master, "rm -f #{File.join(master.puppet['yamldir'],'catalog','*')}", + :accept_all_exit_codes => true) + end + on(master, "mv #{File.join('','tmp','puppet.conf')} #{master.puppet['confdir']}", + :accept_all_exit_codes => true) + step 'clean out collected resources' do + on(hosts, puppet_resource("user #{exported_username} ensure=absent"), :accept_all_exit_codes => true) + end + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + storeconfigs_backend_name = 'json_storeconfigs' + step 'create a yaml storeconfigs terminus in the modulepath' do + moduledir = File.join(environmentpath,tmp_environment,'modules') + terminus_class_name = 'JsonStoreconfigs' + manifest = < directory, +} +file { + '#{moduledir}':; + '#{moduledir}/yaml_terminus':; + '#{moduledir}/yaml_terminus/lib':; + '#{moduledir}/yaml_terminus/lib/puppet':; + '#{moduledir}/yaml_terminus/lib/puppet/indirector':; + '#{moduledir}/yaml_terminus/lib/puppet/indirector/catalog':; + '#{moduledir}/yaml_terminus/lib/puppet/indirector/facts':; + '#{moduledir}/yaml_terminus/lib/puppet/indirector/node':; + '#{moduledir}/yaml_terminus/lib/puppet/indirector/resource':; +} +file { '#{moduledir}/yaml_terminus/lib/puppet/indirector/catalog/#{storeconfigs_backend_name}.rb': + ensure => file, + content => ' + require "puppet/indirector/catalog/yaml" + class Puppet::Resource::Catalog::#{terminus_class_name} < Puppet::Resource::Catalog::Yaml + def save(request) + raise ArgumentError.new("You can only save objects that respond to :name") unless request.instance.respond_to?(:name) + file = path(request.key) + basedir = File.dirname(file) + # This is quite likely a bad idea, since we are not managing ownership or modes. + Dir.mkdir(basedir) unless Puppet::FileSystem.exist?(basedir) + begin + # We cannot dump anonymous modules in yaml, so dump to json + File.open(file, "w") { |f| f.write request.instance.to_json } + rescue TypeError => detail + Puppet.err "Could not save \#{self.name} \#{request.key}: \#{detail}" + end + end + def find(request) + nil + end + end + ', +} +file { '#{moduledir}/yaml_terminus/lib/puppet/indirector/facts/#{storeconfigs_backend_name}.rb': + ensure => file, + content => ' + require "puppet/indirector/facts/yaml" + class Puppet::Node::Facts::#{terminus_class_name} < Puppet::Node::Facts::Yaml + def find(request) + nil + end + end + ', +} +file { '#{moduledir}/yaml_terminus/lib/puppet/indirector/node/#{storeconfigs_backend_name}.rb': + ensure => file, + content => ' + require "puppet/indirector/node/yaml" + class Puppet::Node::#{terminus_class_name} < Puppet::Node::Yaml + def find(request) + nil + end + end + ', +} +file { '#{moduledir}/yaml_terminus/lib/puppet/indirector/resource/#{storeconfigs_backend_name}.rb': + ensure => file, + content => ' + require "puppet/indirector/yaml" + require "puppet/resource/catalog" + class Puppet::Resource::#{terminus_class_name} < Puppet::Indirector::Yaml + desc "Read resource instances from cached catalogs" + def search(request) + catalog_dir = File.join(Puppet.run_mode.server? ? Puppet[:yamldir] : Puppet[:clientyamldir], "catalog", "*") + results = Dir.glob(catalog_dir).collect { |file| + catalog = Puppet::Resource::Catalog.convert_from(:json, File.read(file)) + if catalog.name == request.options[:host] + next + end + catalog.resources.select { |resource| + resource.type == request.key && resource.exported + }.map! { |res| + data_hash = res.to_data_hash + parameters = data_hash["parameters"].map do |name, value| + Puppet::Parser::Resource::Param.new(:name => name, :value => value) + end + attrs = {:parameters => parameters, :scope => request.options[:scope]} + result = Puppet::Parser::Resource.new(res.type, res.title, attrs) + result.collector_id = "\#{catalog.name}|\#{res.type}|\#{res.title}" + result + } + }.flatten.compact + results + end + end + ', +} +# all the filtering is taken care of in the terminii +# so any tests on filtering belong with puppetdb or pe +file { '#{environmentpath}/#{tmp_environment}/manifests/site.pp': + ensure => file, + content => ' + node "#{master.hostname}" { + @@user{"#{exported_username}": ensure => present,} + } + node "default" { + # collect resources on all nodes (puppet prevents collection on same node) + User<<| |>> + } + ', +} +MANIFEST + apply_manifest_on(master, manifest, :catch_failures => true) + end + + # must specify environment in puppet.conf for it to pickup the terminus code in an environment module + # but we have to bounce the server to pickup the storeconfigs... config anyway + # we can't use with_puppet_running_on here because it uses puppet resource to bounce the server + # puppet resource tries to use yaml_storeconfig's path() which doesn't exist + # and fails back to yaml which indicates an attempted directory traversal and fails. + # we could implemnt path() properly, but i'm just going to start the server the old fashioned way + # and... config set is broken and doesn't add a main section + step 'turn on storeconfigs, start puppetserver the old fashioned way' do + on(master, "cp #{File.join(master.puppet['confdir'],'puppet.conf')} #{File.join('','tmp')}") + on(master, "echo [main] >> #{File.join(master.puppet['confdir'],'puppet.conf')}") + on(master, "echo environment=#{tmp_environment} >> #{File.join(master.puppet['confdir'],'puppet.conf')}") + on(master, puppet('config set storeconfigs true --section main')) + on(master, puppet("config set storeconfigs_backend #{storeconfigs_backend_name} --section main")) + on(master, "service #{master['puppetservice']} restart") + step 'run the master agent to export the resources' do + on(master, puppet("agent -t --environment #{tmp_environment}")) + end + agents.each do |agent| + next if agent == master + step 'run the agents to collect exported resources' do + on(agent, puppet("agent -t --environment #{tmp_environment}"), + :acceptable_exit_codes => 2) + on(agent, puppet_resource("user #{exported_username}"), :accept_all_exit_codes => true) do |result| + assert_match(/present/, result.stdout, 'collected resource not found') + end + end + end + end + +end diff --git a/acceptance/tests/language/functions_in_puppet_language.rb b/acceptance/tests/language/functions_in_puppet_language.rb new file mode 100644 index 00000000000..8ce841502f3 --- /dev/null +++ b/acceptance/tests/language/functions_in_puppet_language.rb @@ -0,0 +1,139 @@ +test_name 'Puppet executes functions written in the Puppet language' + +tag 'audit:high', + 'audit:integration', + 'audit:refactor', # use mk_tmp_environment_with_teardown helper for environment construction + 'server' + +teardown do + on master, 'rm -rf /etc/puppetlabs/code/modules/jenny' + on master, 'rm -rf /etc/puppetlabs/code/environments/tommy' + on master, 'rm -rf /etc/puppetlabs/code/environments/production/modules/one' + on master, 'rm -rf /etc/puppetlabs/code/environments/production/modules/three' + + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end +end + +step 'Create some functions' do + + manifest = <<-EOF + File { + ensure => 'present', + owner => 'root', + group => 'root', + mode => '0644', + } + + file {['/etc/puppetlabs/', + '/etc/puppetlabs/code/', + '/etc/puppetlabs/code/modules/', + '/etc/puppetlabs/code/modules/jenny', + '/etc/puppetlabs/code/modules/jenny/functions', + '/etc/puppetlabs/code/modules/jenny/functions/nested', + '/etc/puppetlabs/code/environments', + '/etc/puppetlabs/code/environments/production', + '/etc/puppetlabs/code/environments/production/modules', + '/etc/puppetlabs/code/environments/production/modules/one', + '/etc/puppetlabs/code/environments/production/modules/one/functions', + '/etc/puppetlabs/code/environments/production/modules/one/manifests', + '/etc/puppetlabs/code/environments/production/modules/three', + '/etc/puppetlabs/code/environments/production/modules/three/functions', + '/etc/puppetlabs/code/environments/production/modules/three/manifests', + '/etc/puppetlabs/code/environments/tommy', + '/etc/puppetlabs/code/environments/tommy/modules', + '/etc/puppetlabs/code/environments/tommy/modules/two', + '/etc/puppetlabs/code/environments/tommy/modules/two/functions', + ]: + ensure => directory, + mode => '0755', + } + + # "Global" functions, no env + file { '/etc/puppetlabs/code/modules/jenny/functions/mini.pp': + content => 'function jenny::mini($a, $b) {if $a <= $b {$a} else {$b}}', + require => File['/etc/puppetlabs/code/modules/jenny/functions'], + } + file { '/etc/puppetlabs/code/modules/jenny/functions/nested/maxi.pp': + content => 'function jenny::nested::maxi($a, $b) {if $a >= $b {$a} else {$b}}', + require => File['/etc/puppetlabs/code/modules/jenny/functions/nested'], + } + + # Module "one", "production" env + file { '/etc/puppetlabs/code/environments/production/modules/one/functions/foo.pp': + content => 'function one::foo() {"This is the one::foo() function in the production environment"}', + require => File['/etc/puppetlabs/code/environments/production/modules/one/functions'], + } + file { '/etc/puppetlabs/code/environments/production/modules/one/manifests/init.pp': + content => 'class one { }', + require => File['/etc/puppetlabs/code/environments/production/modules/one/manifests'], + } + + # Module "three", "production" env + file { '/etc/puppetlabs/code/environments/production/modules/three/functions/baz.pp': + content => 'function three::baz() {"This is the three::baz() function in the production environment"}', + require => File['/etc/puppetlabs/code/environments/production/modules/three/functions'], + } + file { '/etc/puppetlabs/code/environments/production/modules/three/manifests/init.pp': + content => 'class three { }', + require => File['/etc/puppetlabs/code/environments/production/modules/three/functions'], + } + + # Module "two", "tommy" env + file { '/etc/puppetlabs/code/environments/tommy/modules/two/functions/bar.pp': + content => 'function two::bar() {"This is the two::bar() function in the tommy environment"}', + require => File['/etc/puppetlabs/code/environments/tommy/modules/two/functions'], + } + EOF + apply_manifest_on(master, manifest, {:catch_failures => true, :acceptable_exit_codes => [0,1]}) +end + +manifest = <<-MANIFEST + notice 'jenny::mini(1, 2) =', jenny::mini(1,2) + notice 'jenny::nested::maxi(1, 2) =', jenny::nested::maxi(1,2) + notice 'one::foo() =', one::foo() + require 'one'; notice 'three::baz() =', three::baz() +MANIFEST + +rc = apply_manifest_on(master, manifest, {:accept_all_exit_codes => true,}) + +step 'Call a global function' do + fail_test 'Failed to call a "global" function' \ + unless rc.stdout.include?('jenny::mini(1, 2) = 1') + end + +step 'Call a global nested function' do + fail_test 'Failed to call a "global" nested function' \ + unless rc.stdout.include?('jenny::nested::maxi(1, 2) = 2') + end + +step 'Call an env-specific function' do + fail_test 'Failed to call a function defined in the current environment' \ + unless rc.stdout.include?('This is the one::foo() function in the production environment') + end + +step 'Call a function defined in an un-included module' do + fail_test 'Failed to call a function defined in an un-required module' \ + unless rc.stdout.include?('This is the three::baz() function in the production environment') +end + +manifest = <<-MANIFEST.strip + notice "two::bar() =", two::bar() +MANIFEST + +# This should fail +step 'Call a function not defined in the current environment' do + rc = on master, puppet("apply -e '#{manifest}' --environment production"), {:accept_all_exit_codes => true,} + fail_test 'Should not be able to call a function not defined in the current environment' \ + unless rc.stderr.include?("Error: Evaluation Error: Unknown function: 'two::bar'") +end + +step 'Call an env-specific function in a non-default environment' do + rc = on master, puppet("apply -e '#{manifest}' --environment tommy") + fail_test 'Failed to call env-specific function from that environment' \ + unless rc.stdout.include?('This is the two::bar() function in the tommy environment') +end + diff --git a/acceptance/tests/language/objects_in_catalog.rb b/acceptance/tests/language/objects_in_catalog.rb new file mode 100644 index 00000000000..7861b15b986 --- /dev/null +++ b/acceptance/tests/language/objects_in_catalog.rb @@ -0,0 +1,41 @@ +test_name 'C99627: can use Object types in the catalog and apply/agent' do + require 'puppet/acceptance/environment_utils.rb' + extend Puppet::Acceptance::EnvironmentUtils + +tag 'audit:high', + 'audit:integration', + 'audit:refactor' # The use of apply on a reference system should + # be adequate to test puppet. Running this in + # context of server/agent should not be necessary. + + manifest = <<-PP +type Mod::Foo = Object[{ + attributes => { + 'name' => String, + 'size' => Integer[0, default] + } +}] +define mod::foo_notifier(Mod::Foo $foo) { + notify { $foo.name: } +} +class mod { + mod::foo_notifier { xyz: + foo => Mod::Foo('A foo', 42) + } +} +include mod + PP + + agents.each do |agent| + # This is currently only expected to work with apply as the custom data type + # definition will not be present on the agent to deserialize properly + + step "apply manifest on agent #{agent.hostname} and assert notify output" do + apply_manifest_on(agent, manifest) do |result| + assert(result.exit_code == 0, "agent didn't exit properly: (#{result.exit_code})") + assert_match(/A foo/, result.stdout, 'agent didn\'t notify correctly') + end + end + end + +end diff --git a/acceptance/tests/language/pcore_generate_env_isolation.rb b/acceptance/tests/language/pcore_generate_env_isolation.rb new file mode 100644 index 00000000000..3468711bc75 --- /dev/null +++ b/acceptance/tests/language/pcore_generate_env_isolation.rb @@ -0,0 +1,86 @@ +test_name 'C98345: ensure puppet generate assures env. isolation' do + require 'puppet/acceptance/environment_utils.rb' + extend Puppet::Acceptance::EnvironmentUtils + +tag 'audit:high', + 'audit:integration', + 'server' + + teardown do + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + app_type = File.basename(__FILE__, '.*') + tmp_environment = mk_tmp_environment_with_teardown(master, app_type) + tmp_environment2 = mk_tmp_environment_with_teardown(master, app_type) + fq_tmp_environmentpath = "#{environmentpath}/#{tmp_environment}" + fq_tmp_environmentpath2 = "#{environmentpath}/#{tmp_environment2}" + + type_name = 'conflicting' + relative_type_dir = 'modules/conflict/lib/puppet/type' + relative_type_path = "#{relative_type_dir}/#{type_name}.rb" + step 'create custom type in two environments' do + on(master, "mkdir -p #{fq_tmp_environmentpath}/#{relative_type_dir}") + on(master, "mkdir -p #{fq_tmp_environmentpath2}/#{relative_type_dir}") + + custom_type1 = <<-END + Puppet::Type.newtype(:#{type_name}) do + newparam :name, :namevar => true + END + custom_type2 = "#{custom_type1}" + custom_type2 << " newparam :other\n" + custom_type1 << " end\n" + custom_type2 << " end\n" + create_remote_file(master, "#{fq_tmp_environmentpath}/#{relative_type_path}", custom_type1) + create_remote_file(master, "#{fq_tmp_environmentpath2}/#{relative_type_path}", custom_type2) + + site_pp1 = <<-PP + notify{$environment:} + #{type_name}{"somename":} + PP + site_pp2 = <<-PP + notify{$environment:} + #{type_name}{"somename": other => "uhoh"} + PP + create_sitepp(master, tmp_environment, site_pp1) + create_sitepp(master, tmp_environment2, site_pp2) + end + + on master, "chmod -R 755 /tmp/#{tmp_environment}" + on master, "chmod -R 755 /tmp/#{tmp_environment2}" + + with_puppet_running_on(master,{}) do + agents.each do |agent| + on(agent, puppet("agent -t --environment #{tmp_environment}"), + :acceptable_exit_codes => 2) + step 'run agent in environment with type with an extra parameter. try to use this parameter' do + on(agent, puppet("agent -t --environment #{tmp_environment2}"), + :accept_all_exit_codes => true) do |result| + unless agent['locale'] == 'ja' + assert_match("Error: no parameter named 'other'", result.output, + 'did not produce environment isolation issue as expected') + end + end + end + end + + step 'generate pcore files' do + on(master, puppet("generate types --environment #{tmp_environment}")) + on(master, puppet("generate types --environment #{tmp_environment2}")) + end + + agents.each do |agent| + step 'rerun agents after generate, ensure proper runs' do + on(agent, puppet("agent -t --environment #{tmp_environment}"), + :acceptable_exit_codes => 2) + on(agent, puppet("agent -t --environment #{tmp_environment2}"), + :acceptable_exit_codes => 2) + end + end + end + +end diff --git a/acceptance/tests/language/pcore_resource_types_should_have_precedence_over_ruby.rb b/acceptance/tests/language/pcore_resource_types_should_have_precedence_over_ruby.rb new file mode 100644 index 00000000000..d0eb3f54618 --- /dev/null +++ b/acceptance/tests/language/pcore_resource_types_should_have_precedence_over_ruby.rb @@ -0,0 +1,114 @@ +test_name 'C98097 - generated pcore resource types should be loaded instead of ruby for custom types' do + +tag 'audit:high', + 'audit:integration', + 'audit:refactor', # use `mk_tmp_environment_with_teardown` helper to build environment + 'server' + + environment = 'production' + step 'setup - install module with custom ruby resource type' do + #{{{ + testdir = master.tmpdir('c98097') + codedir = "#{testdir}/codedir" + + site_manifest_content =< true) do + desc "Name of mycustomtype instance" + $stderr.puts "this indicates that we are running ruby code and should not be seen when running generated pcore resource" + end + + def refresh + end + +end +EOM + + apply_manifest_on(master, < true) +File { + ensure => directory, + mode => "0755", +} + +file {[ + '#{codedir}', + '#{codedir}/environments', + '#{codedir}/environments/#{environment}', + '#{codedir}/environments/#{environment}/manifests', + '#{codedir}/environments/#{environment}/modules', + '#{codedir}/environments/#{environment}/modules/mymodule', + '#{codedir}/environments/#{environment}/modules/mymodule/manifests', + '#{codedir}/environments/#{environment}/modules/mymodule/lib', + '#{codedir}/environments/#{environment}/modules/mymodule/lib/puppet', + '#{codedir}/environments/#{environment}/modules/mymodule/lib/puppet/type' + ]: +} + +file { '#{codedir}/environments/#{environment}/manifests/site.pp': + ensure => file, + content => '#{site_manifest_content}', +} + +file { '#{codedir}/environments/#{environment}/modules/mymodule/lib/puppet/type/mycustomtype.rb': + ensure => file, + content => '#{custom_type_content}', +} +MANIFEST + + conf_opts = { + 'main' => { + 'environmentpath' => "#{codedir}/environments" + } + } + + backup_file = backup_the_file(master, puppet_config(master, 'confdir', section: 'master'), testdir, 'puppet.conf') + lay_down_new_puppet_conf master, conf_opts, testdir + + teardown do + restore_puppet_conf_from_backup( master, backup_file ) + # See PUP-6995 + on(master, "rm -f #{puppet_config(master, 'yamldir', section: 'master')}/node/*.yaml") + + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + #}}} + + catalog_results = {} + catalog_results[master.hostname] = { 'ruby_cat' => '', 'pcore_cat' => '' } + + step 'compile catalog using ruby resource' do + on master, puppet('catalog', 'find', master.hostname) do |result| + assert_match(/running ruby code/, result.stderr) + catalog_results[master.hostname]['ruby_cat'] = JSON.parse(result.stdout.sub(/^[^{]+/,'')) + end + end + + step 'generate pcore type from ruby type' do + on master, puppet('generate', 'types', '--environment', environment) + end + + step 'compile catalog and make sure that ruby code is NOT executed' do + on master, puppet('catalog', 'find', master.hostname) do |result| + refute_match(/running ruby code/, result.stderr) + catalog_results[master.hostname]['pcore_cat'] = JSON.parse(result.stdout.sub(/^[^{]+/,'')) + end + end + + step 'ensure that the resources created in the catalog using ruby and pcore are the same' do + assert_equal(catalog_results[master.hostname]['ruby_cat']['resources'], catalog_results[master.hostname]['pcore_cat']['resources']) + end + + end +end diff --git a/acceptance/tests/language/resource_refs_with_nested_arrays.rb b/acceptance/tests/language/resource_refs_with_nested_arrays.rb index 5b11854d994..b8c345ff3bb 100644 --- a/acceptance/tests/language/resource_refs_with_nested_arrays.rb +++ b/acceptance/tests/language/resource_refs_with_nested_arrays.rb @@ -1,5 +1,8 @@ test_name "#7681: Allow using array variables in resource references" +tag 'audit:high', + 'audit:unit' + agents.each do |agent| test_manifest = < /tmp/#{tmp_environment}.txt" + end + + # first attempts at a reasonable table driven test. needs API work + # FIXME: + # expand this to other resource types, make parameters arbitrary, make assertions arbitrary + # FIXME: add context messaging to each instance + notify_redacted = 'Sensitive \[value redacted\]' + file_redacted = 'changed \[redacted\] to \[redacted\]' + test_resources = [ + {:type => 'notify', :parameters => {:namevar => "1:${Sensitive.new('sekrit1')}"}, + :assertions => [{:refute_match => 'sekrit1'}, {:assert_match => "1:#{notify_redacted}"}]}, + {:type => 'notify', :parameters => {:namevar => "2:${Sensitive.new($meh2)}"}, :pre_code => '$meh2="sekrit2"', + :assertions => [{:refute_match => 'sekrit2'}, {:assert_match => "2:#{notify_redacted}"}]}, + {:type => 'notify', :parameters => {:namevar => "3:meh", :message => '"3:${Sensitive.new(\'sekrit3\')}"'}, + :assertions => [{:refute_match => 'sekrit3'}, {:assert_match => "3:#{notify_redacted}"}]}, + {:type => 'notify', :parameters => {:namevar => "4:meh", :message => "Sensitive.new($meh4)"}, :pre_code => '$meh4="sekrit4"', + :assertions => [{:refute_match => 'sekrit4'}, {:assert_match => file_redacted}]}, + {:type => 'notify', :parameters => {:namevar => "5:meh", :message => "$meh5"}, :pre_code => '$meh5=Sensitive.new("sekrit5")', + :assertions => [{:refute_match => 'sekrit5'}, {:assert_match => file_redacted}]}, + {:type => 'notify', :parameters => {:namevar => "6:meh", :message => '"6:${meh6}"'}, :pre_code => '$meh6=Sensitive.new("sekrit6")', + :assertions => [{:refute_match => 'sekrit6'}, {:assert_match => "6:#{notify_redacted}"}]}, + {:type => 'notify', :parameters => {:namevar => "7:${Sensitive('sekrit7')}"}, + :assertions => [{:refute_match => 'sekrit7'}, {:assert_match => "7:#{notify_redacted}"}]}, + # unwrap(), these should be en-clair + {:type => 'notify', :parameters => {:namevar => "8:${unwrap(Sensitive.new('sekrit8'))}"}, + :assertions => {:assert_match => "8:sekrit8"}}, + {:type => 'notify', :parameters => {:namevar => "9:meh", :message => '"9:${unwrap(Sensitive.new(\'sekrit9\'))}"'}, + :assertions => {:assert_match => "9:sekrit9"}}, + {:type => 'notify', :parameters => {:namevar => "A:meh", :message => '"A:${unwrap($mehA)}"'}, :pre_code => '$mehA=Sensitive.new("sekritA")', + :assertions => {:assert_match => "A:sekritA"}}, + {:type => 'notify', :parameters => {:namevar => "B:meh", :message => '"B:${$mehB.unwrap}"'}, :pre_code => '$mehB=Sensitive.new("sekritB")', + :assertions => {:assert_match => "B:sekritB"}}, + {:type => 'notify', :parameters => {:namevar => "C:meh", :message => '"C:${$mehC.unwrap |$unwrapped| { "blk_${unwrapped}_blk" } } nonblk_${mehC}_nonblk"'}, :pre_code => '$mehC=Sensitive.new("sekritC")', + :assertions => {:assert_match => ["C:blk_sekritC_blk", "nonblk_#{notify_redacted}_nonblk"]}}, + # for --show_diff + {:type => 'file', :parameters => {:namevar => "$pup_tmp_filename", :content => "Sensitive.new('sekritD')"}, :pre_code => "$pup_tmp_filename = if $facts['os']['family'] == 'windows' { '#{tmp_filename_win}' } else { '#{tmp_filename_else}' }", + :assertions => [{:refute_match => 'sekritD'}, {:assert_match => /#{tmp_environment}\.txt..content. #{file_redacted}/}]}, + + ] + + sitepp_content = generate_manifest(test_resources) + assertion_code = generate_assertions(test_resources) + + # Make a copy of the full set of 'test_resources' but filtered down to include + # only the assertions of type ':refute_match'. So for example, where the + # 'test_resources' array might have an entry like this... + # + # {:type => 'notify', ... + # :assertions => [{:refute_match => 'sekrit1'}, + # {:assert_match => "1:#{notify_redacted}"}]} + # + # ... the ':assert_match' entry would be filtered out in the new + # 'refutation_resources' array, producing: + # + # {:type => 'notify', ... + # :assertions => [{:refute_match => 'sekrit1'}]} + # + # This is done so that when validating the log output, we can refute the + # existence of any of the sensitive info in the log without having to + # assert that redacted info is in the log. The redacted info appears in + # the console output from the Puppet agent run - by virtue of including a + # '--debug' flag on the agent command line - whereas the redacted info is not + # expected to be piped into the log. + + refutation_resources = test_resources.collect do |assertion_group| + refutation_group = assertion_group.clone + refutation_group[:assertions] = assertion_group[:assertions].select do |assertion| + assertion.has_key?(:refute_match) + end + refutation_group + end + refutation_code = generate_assertions(refutation_resources) + + create_sitepp(master, tmp_environment, sitepp_content) + + step "run agent in #{tmp_environment}, run all assertions" do + with_puppet_running_on(master,{}) do + agents.each do |agent| + # redirect logging to a temp location to avoid platform specific syslogs + on(agent, puppet("agent -t --debug --trace --show_diff --environment #{tmp_environment}"), :accept_all_exit_codes => true) do |result| + assert_equal(result.exit_code, 2,'puppet agent run failed') + + run_assertions(assertion_code, result) unless agent['locale'] == 'ja' + + step "assert no redacted data in log" do + run_assertions(refutation_code, result) + end + end + + # don't do this before the agent log scanning, above. it will skew the results + step "assert no redacted data in vardir" do + # no recursive grep in solaris :facepalm: + on(agent, "find #{agent.puppet['vardir']} -type f | xargs grep sekrit", :accept_all_exit_codes => true) do |result| + refute_match(/sekrit(1|2|3|6|7)/, result.stdout, 'found redacted data we should not have') + #TODO: if/when this is fixed, we should just be able to eval(assertion_code_ in this result block also! + expect_failure 'file resource contents will end up in the cached catalog en-clair' do + refute_match(/sekritD/, result.stdout, 'found redacted file data we should not have') + end + end + end + + end + end + end + +end diff --git a/acceptance/tests/language/server_set_facts.rb b/acceptance/tests/language/server_set_facts.rb new file mode 100644 index 00000000000..af6a455c264 --- /dev/null +++ b/acceptance/tests/language/server_set_facts.rb @@ -0,0 +1,50 @@ +test_name 'C64667: ensure server_facts is set and error if any value is overwritten by an agent' do + require 'puppet/acceptance/environment_utils.rb' + extend Puppet::Acceptance::EnvironmentUtils + +tag 'audit:high', + 'audit:acceptance', # Validating server/client interaction + 'server' + + teardown do + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + app_type = File.basename(__FILE__, '.*') + tmp_environment = mk_tmp_environment_with_teardown(master, app_type) + + step 'ensure $server_facts exist' do + create_sitepp(master, tmp_environment, <<-SITE) + notify{"abc$server_facts":} + SITE + + master_opts = {} + with_puppet_running_on(master, master_opts) do + agents.each do |agent| + on(agent, puppet("agent -t --environment #{tmp_environment}"), + :acceptable_exit_codes => 2) do |result| + assert_match(/abc{serverversion/, result.stdout, + "#{agent}: $server_facts should have some stuff" ) + end + end + end + end + + step 'ensure puppet issues a warning if an agent overwrites a server fact' do + agents.each do |agent| + on(agent, puppet("agent -t", + 'ENV' => { 'FACTER_server_facts' => 'overwrite' }), + :acceptable_exit_codes => 1) do |result| + # Do not perform this check on non-English hosts + unless agent['locale'] == 'ja' + assert_match(/Error.*Attempt to assign to a reserved variable name: 'server_facts'/, + result.stderr, "#{agent}: $server_facts should error if overwritten" ) + end + end + end + end +end diff --git a/acceptance/tests/loader/autoload_from_resource_type_decl.rb b/acceptance/tests/loader/autoload_from_resource_type_decl.rb new file mode 100644 index 00000000000..e50f801e494 --- /dev/null +++ b/acceptance/tests/loader/autoload_from_resource_type_decl.rb @@ -0,0 +1,152 @@ +test_name 'C100303: Resource type statement triggered auto-loading works both with and without generated types' do + tag 'risk:high' + + require 'puppet/acceptance/environment_utils.rb' + extend Puppet::Acceptance::EnvironmentUtils + + require 'puppet/acceptance/agent_fqdn_utils' + extend Puppet::Acceptance::AgentFqdnUtils + + # create the file and make sure its empty and accessible by everyone + def empty_execution_log_file(host, path) + create_remote_file(host, path, '') + on(host, "chmod 777 '#{path}'") + end + + app_type = File.basename(__FILE__, '.*') + tmp_environment = mk_tmp_environment_with_teardown(master, app_type) + fq_tmp_environmentpath = "#{environmentpath}/#{tmp_environment}" + + relative_type_dir = 'modules/one/lib/puppet/type' + relative_type_path = "#{relative_type_dir}/type_tst.rb" + + execution_log = {} + execution_log[agent_to_fqdn(master)] = master.tmpfile('master_autoload_resource') + agents.each do |agent| + execution_log[agent_to_fqdn(agent)] = agent.tmpfile('agent_autoload_resource') + end + + teardown do + on(master, "rm -f '#{execution_log[agent_to_fqdn(master)]}'") + agents.each do |agent| + on(agent, "rm -f '#{execution_log[agent_to_fqdn(agent)]}'") + + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + step 'create custom type' do + on(master, "mkdir -p '#{fq_tmp_environmentpath}/#{relative_type_dir}'") + + # create a custom type that will write out to a different file on each agent + # this way we can verify whether the newtype code was executed on each system + custom_type = <<-END + Puppet::Type.newtype(:type_tst) do + newparam(:name, :namevar => true) do + fqdn = Facter.value('networking.fqdn') + if fqdn == '#{agent_to_fqdn(master)}' + File.open("#{execution_log[agent_to_fqdn(master)]}", 'a+') { |f| f.puts("found_type_tst: " + Time.now.to_s) } + end + END + agents.each do |agent| + custom_type << <<-END + if fqdn == '#{agent_to_fqdn(agent)}' + File.open("#{execution_log[agent_to_fqdn(agent)]}", 'a+') { |f| f.puts("found_type_tst: " + Time.now.to_s) } + end + END + end + custom_type << <<-END + Puppet.notice("found_type_tst") + end + end + END + create_remote_file(master, "#{fq_tmp_environmentpath}/#{relative_type_path}", custom_type) + + site_pp = <<-PP + Resource['type_tst'] { 'found_type': } + PP + create_sitepp(master, tmp_environment, site_pp) + end + on(master, "chmod -R 755 '/tmp/#{tmp_environment}'") + + # when the agent does its run, the newtype is executed on both the agent and master nodes + # so we should see a message in the execution log file on the agent and the master + agents.each do |agent| + with_puppet_running_on(master, {}) do + + empty_execution_log_file(master, execution_log[agent_to_fqdn(master)]) + empty_execution_log_file(agent, execution_log[agent_to_fqdn(agent)]) + + on(agent, puppet("agent -t --environment '#{tmp_environment}'")) do |puppet_result| + assert_match(/\/File\[.*\/type_tst.rb\]\/ensure: defined content as/, puppet_result.stdout, + 'Expected to see defined content message for type: type_tst') + assert_match(/Notice: found_type_tst/, puppet_result.stdout, 'Expected to see the notice from the new type: type_tst') + end + + on(master, "cat '#{execution_log[agent_to_fqdn(master)]}'") do |cat_result| + assert_match(/found_type_tst:/, cat_result.stdout, + "Expected to see execution log entry on master #{agent_to_fqdn(master)}") + end + on(agent, "cat '#{execution_log[agent_to_fqdn(agent)]}'") do |cat_result| + assert_match(/found_type_tst:/, cat_result.stdout, + "Expected to see execution log entry on agent #{agent_to_fqdn(agent)}") + end + end + end + + # when generating the pcore the newtype should only be run on the master node + step 'generate pcore files' do + # start with an empty execution log + empty_execution_log_file(master, execution_log[agent_to_fqdn(master)]) + agents.each do |agent| + empty_execution_log_file(agent, execution_log[agent_to_fqdn(agent)]) + end + + on(master, puppet("generate types --environment '#{tmp_environment}'")) do |puppet_result| + assert_match(/Notice: Generating '\/.*\/type_tst\.pp' using 'pcore' format/, puppet_result.stdout, + 'Expected to see Generating message for type: type_tst') + assert_match(/Notice: found_type_tst/, puppet_result.stdout, 'Expected to see log entry on master ') + end + + # we should see a log entry on the master node + on(master, "cat '#{execution_log[agent_to_fqdn(master)]}'") do |cat_result| + assert_match(/found_type_tst:/, cat_result.stdout, + "Expected to see execution log entry on master #{agent_to_fqdn(master)}") + end + + # we should not see any log entries on any of the agent nodes + agents.each do |agent| + next if agent == master + on(agent, "cat '#{execution_log[agent_to_fqdn(agent)]}'") do |cat_result| + assert_empty(cat_result.stdout.chomp, "Expected execution log file to be empty on agent node #{agent_to_fqdn(agent)}") + end + end + end + + empty_execution_log_file(master, execution_log[agent_to_fqdn(master)]) + agents.each do |agent| + next if agent == master + empty_execution_log_file(agent, execution_log[agent_to_fqdn(agent)]) + + # this test is relying on the beaker helper with_puppet_running_on() to restart the server + # Compilation should now work using the generated types, + # so we should only see a log entry on the agent node and nothing on the master node + with_puppet_running_on(master, {}) do + on(agent, puppet("agent -t --environment '#{tmp_environment}'"), + :acceptable_exit_codes => 0) do |puppet_result| + assert_match(/Notice: found_type_tst/, puppet_result.stdout, 'Expected to see output from new type: type_tst') + end + end + + on(agent, "cat '#{execution_log[agent_to_fqdn(agent)]}'") do |cat_result| + assert_match(/found_type_tst:/, cat_result.stdout, + "Expected to see an execution log entry on agent #{agent_to_fqdn(agent)}") + end + end + + on(master, "cat '#{execution_log[agent_to_fqdn(master)]}'") do |cat_result| + assert_empty(cat_result.stdout.chomp, "Expected master execution log to be empty #{agent_to_fqdn(master)}") + end +end diff --git a/acceptance/tests/loader/func4x_loadable_from_modules.rb b/acceptance/tests/loader/func4x_loadable_from_modules.rb index fd729d20f1b..8e8a4d8c9b0 100644 --- a/acceptance/tests/loader/func4x_loadable_from_modules.rb +++ b/acceptance/tests/loader/func4x_loadable_from_modules.rb @@ -19,6 +19,10 @@ require 'puppet/acceptance/temp_file_utils' extend Puppet::Acceptance::TempFileUtils + +tag 'audit:high', + 'audit:unit' # This should be covered adequately by unit tests + initialize_temp_dirs agents.each do |agent| @@ -66,11 +70,11 @@ class helloworld { SOURCE # Run apply to generate the file with the output - on agent, puppet('apply', '-e', "'include helloworld'", '--config', puppetconf) + on(agent, puppet('apply', '-e', "'include helloworld'", '--config', puppetconf)) # Assert that the file was written with the generated content - on(agent, "cat #{File.join(target_path, 'result.txt')}") do - assert_match(/^Generated, 1 => 10, 2 => 20, 3 => 30$/, stdout, "Generated the wrong content") + on(agent, "cat #{File.join(target_path, 'result.txt')}") do |result| + assert_match(/^Generated, 1 => 10, 2 => 20, 3 => 30$/, result.stdout, "Generated the wrong content") end end diff --git a/acceptance/tests/loader/resource_triggers_autoload.rb b/acceptance/tests/loader/resource_triggers_autoload.rb new file mode 100644 index 00000000000..cfa193cb6d8 --- /dev/null +++ b/acceptance/tests/loader/resource_triggers_autoload.rb @@ -0,0 +1,57 @@ +test_name 'C100296: can auto-load defined types using a Resource statement' do + tag 'risk:high' + + require 'puppet/acceptance/environment_utils.rb' + extend Puppet::Acceptance::EnvironmentUtils + + app_type = File.basename(__FILE__, '.*') + tmp_environment = mk_tmp_environment_with_teardown(master, app_type) + fq_tmp_environmentpath = "#{environmentpath}/#{tmp_environment}" + + teardown do + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + relative_define_type_dir = 'modules/one/manifests' + relative_define_type_1_path = "#{relative_define_type_dir}/tst1.pp" + relative_define_type_2_path = "#{relative_define_type_dir}/tst2.pp" + step 'create custom type in two environments' do + on(master, "mkdir -p #{fq_tmp_environmentpath}/#{relative_define_type_dir}") + + define_type_1 = <<-END + define one::tst1($var) { + notify { "tst1: ${var}": } + } + END + define_type_2 = <<-END + define one::tst2($var) { + notify { "tst2: ${var}": } + } + END + create_remote_file(master, "#{fq_tmp_environmentpath}/#{relative_define_type_1_path}", define_type_1) + create_remote_file(master, "#{fq_tmp_environmentpath}/#{relative_define_type_2_path}", define_type_2) + + site_pp = <<-PP + each(['tst1', 'tst2']) |$nr| { + Resource["one::${nr}"] { "some_title_${nr}": var => "Define found one::${nr}" } + } + PP + create_sitepp(master, tmp_environment, site_pp) + end + + on(master, "chmod -R 755 /tmp/#{tmp_environment}") + + with_puppet_running_on(master, {}) do + agents.each do |agent| + on(agent, puppet("agent -t --environment #{tmp_environment}"), + :acceptable_exit_codes => 2) do |puppet_result| + assert_match(/Notice: tst1: Define found one::tst1/, puppet_result.stdout, 'Expected to see output from define notify') + assert_match(/Notice: tst2: Define found one::tst2/, puppet_result.stdout, 'Expected to see output from define notify') + end + end + end +end diff --git a/acceptance/tests/lookup/config3_interpolation.rb b/acceptance/tests/lookup/config3_interpolation.rb new file mode 100644 index 00000000000..002fc231621 --- /dev/null +++ b/acceptance/tests/lookup/config3_interpolation.rb @@ -0,0 +1,80 @@ +test_name 'C99578: lookup should allow interpolation in hiera3 configs' do + require 'puppet/acceptance/environment_utils.rb' + extend Puppet::Acceptance::EnvironmentUtils + +tag 'audit:high', + 'audit:integration', + 'audit:refactor', # This test specifically tests interpolation on the master. + # Recommend adding an additonal test that validates + # lookup in a masterless setup. + 'server' + + app_type = File.basename(__FILE__, '.*') + tmp_environment = mk_tmp_environment_with_teardown(master, app_type) + fq_tmp_environmentpath = "#{environmentpath}/#{tmp_environment}" + master_confdir = puppet_config(master, 'confdir', section: 'master') + + hiera_conf_backup = master.tmpfile('C99578-hiera-yaml') + + step "backup global hiera.yaml" do + on(master, "cp -a #{master_confdir}/hiera.yaml #{hiera_conf_backup}", :acceptable_exit_codes => [0,1]) + end + + teardown do + on(master, "mv #{hiera_conf_backup} #{master_confdir}/hiera.yaml", :acceptable_exit_codes => [0,1]) + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + step "create hiera configs in #{tmp_environment} and global" do + step "create global hiera.yaml and module data" do + create_remote_file(master, "#{master_confdir}/hiera.yaml", <<-HIERA) +--- +:backends: + - "yaml" +:hierarchy: + - "%{calling_class_path}" + - "%{calling_class}" + - "%{calling_module}" + - "common" + HIERA + + on(master, "mkdir -p #{fq_tmp_environmentpath}/hieradata/") + on(master, "mkdir -p #{fq_tmp_environmentpath}/modules/some_mod/manifests") + create_remote_file(master, "#{fq_tmp_environmentpath}/modules/some_mod/manifests/init.pp", <<-PP) +class some_mod { + notify { "${lookup('environment_key')}": } +} + PP + + create_remote_file(master, "#{fq_tmp_environmentpath}/hieradata/some_mod.yaml", <<-YAML) +--- +environment_key: "env value" + YAML + + create_sitepp(master, tmp_environment, <<-SITE) +include some_mod + SITE + + on(master, "chmod -R 775 #{fq_tmp_environmentpath}") + on(master, "chmod -R 775 #{master_confdir}") + end + end + + with_puppet_running_on(master,{}) do + agents.each do |agent| + step "agent lookup" do + on(agent, puppet('agent', "-t --environment #{tmp_environment} --debug"), + :accept_all_exit_codes => true) do |result| + assert(result.exit_code == 2, "agent lookup didn't exit properly: (#{result.exit_code})") + assert_match(/env value/, result.stdout, + "agent lookup didn't find correct key") + end + end + end + end + +end diff --git a/acceptance/tests/lookup/config5_interpolation.rb b/acceptance/tests/lookup/config5_interpolation.rb new file mode 100644 index 00000000000..fffaf6d8515 --- /dev/null +++ b/acceptance/tests/lookup/config5_interpolation.rb @@ -0,0 +1,135 @@ +test_name 'C99578: hiera5 lookup config with interpolated scoped nested variables' do + require 'puppet/acceptance/environment_utils.rb' + extend Puppet::Acceptance::EnvironmentUtils + + +tag 'audit:high', + 'audit:integration', + 'audit:refactor', # This test specifically tests interpolation on the master. + # Recommend adding an additonal test that validates + # lookup in a masterless setup. + 'server' + + app_type = File.basename(__FILE__, '.*') + tmp_environment = mk_tmp_environment_with_teardown(master, app_type + '1') + fq_tmp_environmentpath = "#{environmentpath}/#{tmp_environment}" + + teardown do + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + step "create environment hiera5.yaml and environment data" do + + create_remote_file(master, "#{fq_tmp_environmentpath}/hiera.yaml", <<-HIERA) +--- +version: 5 +defaults: + datadir: 'hieradata' + data_hash: yaml_data +hierarchy: + - name: "Global settings" + path: "global.yaml" + - name: "Role specific settings" + paths: + - "roles/%{::roles.0}.yaml" + - name: "Other Role specific settings" + paths: + - "roles/%{roles2.0}.yaml" + - name: "scoped variable" + paths: + - "roles/%{::myclass::myvar.0}.yaml" + - name: "nested hash variable" + paths: + - "roles/%{::hash_array.key1.0}.yaml" + HIERA + + on(master, "mkdir -p #{fq_tmp_environmentpath}/hieradata/roles") + create_remote_file(master, "#{fq_tmp_environmentpath}/hieradata/global.yaml", <<-YAML) +roles: + - test1 +roles2: + - test2 +data: + - "from global" + YAML + + create_remote_file(master, "#{fq_tmp_environmentpath}/hieradata/roles/test1.yaml", <<-YAML) +data: + - 'from test1' + YAML + + create_remote_file(master, "#{fq_tmp_environmentpath}/hieradata/roles/test2.yaml", <<-YAML) +data: + - 'from test2' + YAML + + create_remote_file(master, "#{fq_tmp_environmentpath}/hieradata/roles/test3.yaml", <<-YAML) +data: + - 'from test3' + YAML + + create_remote_file(master, "#{fq_tmp_environmentpath}/hieradata/roles/test4.yaml", <<-YAML) +data: + - 'from test4' + YAML + + create_sitepp(master, tmp_environment, <<-SITE) +class myclass { + $myvar = ['test3'] +} +include myclass + +$hash_array = {key1 => ['test4']} + +$roles = lookup('roles') +$data = lookup('data', Array[String], 'unique') +notify{"data: ${data}":} +$hiera_array_data = hiera_array('data') +notify{"hiera_array_data: ${hiera_array_data}":} + +$roles2 = lookup('roles2') +$data2 = lookup('data', Array[String], 'unique') +notify{"data2: ${data2}":} +$hiera_array_data2 = hiera_array('data') +notify{"hiera_array_data2: ${hiera_array_data2}":} + SITE + + on(master, "chmod -R 775 #{fq_tmp_environmentpath}") + end + + with_puppet_running_on(master,{}) do + agents.each do |agent| + step "agent lookups: #{agent.hostname}, hiera5" do + on(agent, puppet('agent', "-t --environment #{tmp_environment}"), + :accept_all_exit_codes => true) do |result| + assert(result.exit_code == 2, "agent lookup didn't exit properly: (#{result.exit_code})") + assert_match(/data: \[from global, from test1/, result.stdout, + "agent lookup didn't interpolate with hiera value") + assert_match(/hiera_array_data: \[from global, from test1/, result.stdout, + "agent hiera_array didn't interpolate with hiera value") + + assert_match(/data2: \[from global, from test1, from test2/, result.stdout, + "agent lookup didn't interpolate non-global scope with hiera value") + assert_match(/hiera_array_data2: \[from global, from test1, from test2/, result.stdout, + "agent hiera_array didn't interpolate non-global scope with hiera value") + + assert_match(/data2: \[from global, from test1, from test2, from test3/, result.stdout, + "agent lookup didn't interpolate class scope with hiera value") + assert_match(/hiera_array_data2: \[from global, from test1, from test2, from test3/, result.stdout, + "agent hiera_array didn't interpolate class scope with hiera value") + + assert_match(/data2: \[from global, from test1, from test2, from test3, from test4\]/, result.stdout, + "agent lookup didn't interpolate nested hashes with hiera value") + assert_match(/hiera_array_data2: \[from global, from test1, from test2, from test3, from test4\]/, result.stdout, + "agent hiera_array didn't interpolate nested hashes with hiera value") + end + end + end + + end + +end diff --git a/acceptance/tests/lookup/hiera3_custom_backend.rb b/acceptance/tests/lookup/hiera3_custom_backend.rb new file mode 100644 index 00000000000..f11e4d47053 --- /dev/null +++ b/acceptance/tests/lookup/hiera3_custom_backend.rb @@ -0,0 +1,102 @@ +test_name 'C99630: hiera v3 custom backend' do + require 'puppet/acceptance/environment_utils.rb' + extend Puppet::Acceptance::EnvironmentUtils + require 'puppet/acceptance/temp_file_utils.rb' + extend Puppet::Acceptance::TempFileUtils + +tag 'audit:high', + 'audit:acceptance', + 'audit:refactor', # Master is not needed for this test. Refactor + # to use puppet apply with a local module tree. + + app_type = File.basename(__FILE__, '.*') + tmp_environment = mk_tmp_environment_with_teardown(master, app_type) + fq_tmp_environmentpath = "#{environmentpath}/#{tmp_environment}" + puppetserver_config = "#{master['puppetserver-confdir']}/puppetserver.conf" + existing_loadpath = read_tk_config_string(on(master, "cat #{puppetserver_config}").stdout.strip)['jruby-puppet']['ruby-load-path'].first + confdir = puppet_config(master, 'confdir', section: 'master') + + hiera_conf_backup = master.tmpfile('C99629-hiera-yaml') + + step "backup global hiera.yaml" do + on(master, "cp -a #{confdir}/hiera.yaml #{hiera_conf_backup}", :acceptable_exit_codes => [0,1]) + end + + teardown do + step 'delete custom backend, restore default hiera config' do + on(master, "rm #{existing_loadpath}/hiera/backend/custom_backend.rb", :acceptable_exit_codes => [0,1]) + on(master, "mv #{hiera_conf_backup} #{confdir}/hiera.yaml", :acceptable_exit_codes => [0,1]) + on(master, "/opt/puppetlabs/server/bin/puppetserver gem uninstall --executables --force hiera") + on(master, "/opt/puppetlabs/puppet/bin/gem uninstall --executables --force hiera") + end + + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + step "install hiera v3 gem" do + # for puppet agent <-> server, hiera must be installed using puppetserver's gem command + on(master, "/opt/puppetlabs/server/bin/puppetserver gem install --no-document hiera") + # for puppet lookup, hiera must be installed using puppet's gem command + on(master, "/opt/puppetlabs/puppet/bin/gem install --no-document hiera") + end + + step "create hiera v5 config and v3 custom backend" do + on(master, "cp #{confdir}/hiera.yaml /tmp") + create_remote_file(master, "#{confdir}/hiera.yaml", <<-HIERA) +--- +version: 5 +hierarchy: + - name: Test + hiera3_backend: custom + HIERA + on(master, "chmod -R #{PUPPET_CODEDIR_PERMISSIONS} #{confdir}") + + on(master, "mkdir -p #{existing_loadpath}/hiera/backend/") + custom_backend_rb = <<-RB +class Hiera + module Backend + class Custom_backend + def lookup(key, scope, order_override, resolution_type, context) + return 'custom value' unless (key == 'lookup_options') + end + end + end +end + RB + create_remote_file(master, "#{existing_loadpath}/hiera/backend/custom_backend.rb", custom_backend_rb) + on(master, "chmod #{PUPPET_CODEDIR_PERMISSIONS} #{existing_loadpath}/hiera/backend/custom_backend.rb") + end + + step "create site.pp which calls lookup on our keys" do + create_sitepp(master, tmp_environment, <<-SITE) + notify { "${lookup('anykey')}": } + SITE + on(master, "chmod -R #{PUPPET_CODEDIR_PERMISSIONS} #{fq_tmp_environmentpath}") + end + + step 'assert lookups using lookup subcommand on the master' do + on(master, puppet('lookup', "--environment #{tmp_environment}", '--explain', 'anykey'), :accept_all_exit_codes => true) do |result| + assert(result.exit_code == 0, "lookup subcommand didn't exit properly: (#{result.exit_code})") + assert_match(/custom value/, result.stdout, + "lookup subcommand didn't find correct key") + end + end + + with_puppet_running_on(master,{}) do + agents.each do |agent| + step "agent manifest lookup on #{agent.hostname}" do + on(agent, puppet('agent', "-t --environment #{tmp_environment}"), + :accept_all_exit_codes => true) do |result| + assert(result.exit_code == 2, "agent lookup didn't exit properly: (#{result.exit_code})") + assert_match(/custom value/, result.stdout, + "agent lookup didn't find correct key") + end + end + end + end + +end diff --git a/acceptance/tests/lookup/lookup.rb b/acceptance/tests/lookup/lookup.rb new file mode 100644 index 00000000000..a65d5a347ae --- /dev/null +++ b/acceptance/tests/lookup/lookup.rb @@ -0,0 +1,334 @@ +test_name "Lookup data using the agnostic lookup function" do + # pre-docs: + # https://puppet-on-the-edge.blogspot.com/2015/01/puppet-40-data-in-modules-and.html + +tag 'audit:high', + 'audit:acceptance', + 'audit:refactor', # Master is not needed for this test. Refactor + # to use puppet apply with a local module tree. + # Use mk_tmp_environment_with_teardown to create environment. + 'server' + + testdir = master.tmpdir('lookup') + + module_name = "data_module" + module_name2 = "other_module" + hash_name = "hash_name" + array_key = "array_key" + + env_data_implied_key = "env_data_implied" + env_data_implied_value = "env_implied_a" + env_data_key = "env_data" + env_data_value = "env_a" + env_hash_key = "env_hash_key" + env_hash_value = "env_class_a" + env_array_value0 = "env_array_a" + env_array_value1 = "env_array_b" + + module_data_implied_key = "module_data_implied" + module_data_implied_value = "module_implied_b" + module_data_key = "module_data" + module_data_value = "module_b" + module_data_value_other = "other_module_b" + module_hash_key = "module_hash_key" + module_hash_value = "module_class_b" + module_array_value0 = "module_array_a" + module_array_value1 = "module_array_b" + + env_data_override_implied_key = "env_data_override_implied" + env_data_override_implied_value = "env_override_implied_c" + env_data_override_key = "env_data_override" + env_data_override_value = "env_override_c" + + hiera_data_implied_key = "apache_server_port_implied" + hiera_data_implied_value = "8080" + hiera_data_key = "apache_server_port" + hiera_data_value = "9090" + hiera_hash_key = "hiera_hash_key" + hiera_hash_value = "hiera_class_c" + hiera_array_value0 = "hiera_array_a" + hiera_array_value1 = "hiera_array_b" + + automatic_data_key = "automatic_data_key" + automatic_data_value = "automatic_data_value" + automatic_default_value = "automatic_default_value" + + def mod_manifest_entry(module_name = nil, testdir, module_data_implied_key, + module_data_implied_value, module_data_key, + module_data_value, hash_name, module_hash_key, + module_hash_value, array_key, module_array_value0, + module_array_value1) + if module_name + module_files_manifest = < file, + content => " + Puppet::Functions.create_function(:'#{module_name}::data') do + def data() + { '#{module_name}::#{module_data_implied_key}' => '#{module_data_implied_value}', + '#{module_name}::#{module_data_key}' => '#{module_data_value}', + '#{module_name}::#{hash_name}' => {'#{module_hash_key}' => '#{module_hash_value}'}, + '#{module_name}::#{array_key}' => ['#{module_array_value0}', '#{module_array_value1}'] + } + end + end + ", + mode => "0640", + } +PP + module_files_manifest + end + end + + def mod_manifest_metadata_json(module_name = nil, testdir) + if module_name + < file, + content => ' +{ + "name": "tester-#{module_name}", + "version": "0.1.0", + "author": "tester", + "summary": null, + "license": "Apache-2.0", + "source": "", + "project_page": null, + "issues_url": null, + "dependencies": [ + ], + "data_provider": "function" +} + ', + mode => "0644", + } + file { '#{testdir}/environments/production/modules/#{module_name}/lib/puppet/bindings': + ensure => absent, + force => true, + } +PPmetadata + end + end + + teardown do + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + module_manifest1 = mod_manifest_entry(module_name, testdir, module_data_implied_key, + module_data_implied_value, module_data_key, module_data_value, + hash_name, module_hash_key, module_hash_value, array_key, + module_array_value0, module_array_value1) + module_manifest2 = mod_manifest_entry(module_name2, testdir, module_data_implied_key, + module_data_implied_value, module_data_key, module_data_value_other, + hash_name, module_hash_key, module_hash_value, array_key, + module_array_value0, module_array_value1) + metadata_manifest1 = mod_manifest_metadata_json(module_name, testdir) + metadata_manifest2 = mod_manifest_metadata_json(module_name2, testdir) + + apply_manifest_on(master, <<-PP, :catch_failures => true) +File { + ensure => directory, + mode => "0750", + owner => #{master.puppet['user']}, + group => #{master.puppet['group']}, +} + +file { + '#{testdir}':; + '#{testdir}/hieradata':; + '#{testdir}/environments':; + '#{testdir}/environments/production':; + '#{testdir}/environments/production/manifests':; + '#{testdir}/environments/production/modules':; + '#{testdir}/environments/production/lib':; + '#{testdir}/environments/production/lib/puppet':; + '#{testdir}/environments/production/lib/puppet/functions':; + '#{testdir}/environments/production/lib/puppet/functions/environment':; + '#{testdir}/environments/production/modules/#{module_name}':; + '#{testdir}/environments/production/modules/#{module_name}/manifests':; + '#{testdir}/environments/production/modules/#{module_name}/lib':; + '#{testdir}/environments/production/modules/#{module_name}/lib/puppet':; + '#{testdir}/environments/production/modules/#{module_name}/lib/puppet/bindings':; + '#{testdir}/environments/production/modules/#{module_name}/lib/puppet/bindings/#{module_name}':; + '#{testdir}/environments/production/modules/#{module_name}/lib/puppet/functions':; + '#{testdir}/environments/production/modules/#{module_name}/lib/puppet/functions/#{module_name}':; + '#{testdir}/environments/production/modules/#{module_name2}':; + '#{testdir}/environments/production/modules/#{module_name2}/manifests':; + '#{testdir}/environments/production/modules/#{module_name2}/lib':; + '#{testdir}/environments/production/modules/#{module_name2}/lib/puppet':; + '#{testdir}/environments/production/modules/#{module_name2}/lib/puppet/bindings':; + '#{testdir}/environments/production/modules/#{module_name2}/lib/puppet/bindings/#{module_name2}':; + '#{testdir}/environments/production/modules/#{module_name2}/lib/puppet/functions':; + '#{testdir}/environments/production/modules/#{module_name2}/lib/puppet/functions/#{module_name2}':; +} + +file { '#{testdir}/hiera.yaml': + ensure => file, + content => '--- + :backends: + - "yaml" + :logger: "console" + :hierarchy: + - "global" + + :yaml: + :datadir: "#{testdir}/hieradata" + ', + mode => "0640", +} + +file { '#{testdir}/hieradata/global.yaml': + ensure => file, + content => "--- + #{hiera_data_key}: #{hiera_data_value} + #{module_name}::#{hiera_data_implied_key}: #{hiera_data_implied_value} + #{module_name}::#{hash_name}: + #{hiera_hash_key}: #{hiera_hash_value} + #{module_name}::#{array_key}: + - #{hiera_array_value0} + - #{hiera_array_value1} + #{module_name}::#{automatic_data_key}: #{automatic_data_value} + ", + mode => "0640", +} + +file { '#{testdir}/environments/production/environment.conf': + ensure => file, + content => ' + environment_timeout = 0 + # for this environment, provide our own function to supply data to lookup + # implies a ruby function in /lib/puppet/functions/environment/data.rb + # named environment::data() + environment_data_provider = "function" + ', + mode => "0640", +} + +# the function to provide data for this environment +file { '#{testdir}/environments/production/lib/puppet/functions/environment/data.rb': + ensure => file, + content => " + Puppet::Functions.create_function(:'environment::data') do + def data() + { '#{module_name}::#{env_data_implied_key}' => '#{env_data_implied_value}', + '#{module_name}::#{env_data_override_implied_key}' => '#{env_data_override_implied_value}', + '#{env_data_key}' => '#{env_data_value}', + '#{module_name}::#{hash_name}' => {'#{env_hash_key}' => '#{env_hash_value}'}, + '#{env_data_override_key}' => '#{env_data_override_value}', + '#{module_name}::#{array_key}' => ['#{env_array_value0}', '#{env_array_value1}'] + } + end + end + ", + mode => "0640", +} + +# place module file segments here +#{module_manifest1} +# same key, different module and values +#{module_manifest2} + +file { '#{testdir}/environments/production/modules/#{module_name}/manifests/init.pp': + ensure => file, + content => ' + class #{module_name}($#{env_data_implied_key}, + $#{module_data_implied_key}, + $#{env_data_override_implied_key}, + $#{hiera_data_implied_key}, + $#{automatic_data_key}=$#{automatic_default_value}) { + # lookup data from the environment function databinding + notify { "#{env_data_implied_key} $#{env_data_implied_key}": } + $lookup_env = lookup("#{env_data_key}") + notify { "#{env_data_key} $lookup_env": } + + # lookup data from the module databinding + notify { "#{module_data_implied_key} $#{module_data_implied_key}": } + $lookup_module = lookup("#{module_name}::#{module_data_key}") + notify { "#{module_data_key} $lookup_module": } + + # lookup data from another modules databinding + $lookup_module2 = lookup("#{module_name2}::#{module_data_key}") + notify { "#{module_data_key} $lookup_module2": } + + # ensure env can override module + notify { "#{env_data_override_implied_key} $#{env_data_override_implied_key}": } + $lookup_override = lookup("#{env_data_override_key}") + notify { "#{env_data_override_key} $lookup_override": } + + # should fall-back to hiera global.yaml data + notify { "#{hiera_data_implied_key} $#{hiera_data_implied_key}": } + $lookup_port = lookup("#{hiera_data_key}") + notify { "#{hiera_data_key} $lookup_port": } + + # should be able to merge hashes across sources + # this mimicks/covers behavior for including classes + $lookup_hash = lookup("#{module_name}::#{hash_name}",Hash[String,String],\\'hash\\') + notify { "#{hash_name} $lookup_hash": } + + # should be able to make an array across sources + # this mimicks/covers behavior for including classes + $lookup_array = lookup("#{module_name}::#{array_key}",Array[String],\\'unique\\') + notify { "yep": message => "#{array_key} $lookup_array" } + + # automatic data lookup of parametrized class + notify { "#{automatic_data_key} $#{automatic_data_key}": } + }', + mode => "0640", +} + +file { '#{testdir}/environments/production/manifests/site.pp': + ensure => file, + content => " + node default { + include #{module_name} + }", + mode => "0640", +} +PP + + apply_manifest_on(master, <<-PP, :catch_failures => true) +#{metadata_manifest1} +#{metadata_manifest2} + PP + + + master_opts = { + 'main' => { + 'environmentpath' => "#{testdir}/environments", + 'hiera_config' => "#{testdir}/hiera.yaml", + }, + } + with_puppet_running_on master, master_opts, testdir do + step "Lookup string data, binding specified in metadata.json" do + agents.each do |agent| + on(agent, puppet('agent', "-t"), :acceptable_exit_codes => [0, 2]) do |result| + assert_match("#{env_data_implied_key} #{env_data_implied_value}", result.stdout) + assert_match("#{env_data_key} #{env_data_value}", result.stdout) + + assert_match("#{module_data_implied_key} #{module_data_implied_value}", result.stdout) + assert_match("#{module_data_key} #{module_data_value}", result.stdout) + + assert_match("#{module_data_key} #{module_data_value_other}", result.stdout) + + assert_match("#{env_data_override_implied_key} #{env_data_override_implied_value}", result.stdout) + assert_match("#{env_data_override_key} #{env_data_override_value}", result.stdout) + + assert_match("#{hiera_data_implied_key} #{hiera_data_implied_value}", result.stdout) + assert_match("#{hiera_data_key} #{hiera_data_value}", result.stdout) + + assert_match("#{hash_name} {#{module_hash_key} => #{module_hash_value}, #{env_hash_key} => #{env_hash_value}, #{hiera_hash_key} => #{hiera_hash_value}}", result.stdout) + + assert_match("#{array_key} [#{hiera_array_value0}, #{hiera_array_value1}, #{env_array_value0}, #{env_array_value1}, #{module_array_value0}, #{module_array_value1}]", result.stdout) + + assert_match("#{automatic_data_key} #{automatic_data_value}", result.stdout) + end + end + end + end + +end diff --git a/acceptance/tests/lookup/lookup_rich_values.rb b/acceptance/tests/lookup/lookup_rich_values.rb new file mode 100644 index 00000000000..2b3af46060a --- /dev/null +++ b/acceptance/tests/lookup/lookup_rich_values.rb @@ -0,0 +1,124 @@ +test_name 'C99044: lookup should allow rich data as values' do + require 'puppet/acceptance/environment_utils.rb' + extend Puppet::Acceptance::EnvironmentUtils + +tag 'audit:high', + 'audit:acceptance', + 'audit:refactor', # Master is not needed for this test. Refactor + # to use puppet apply with a local environment. + 'server' + + # The following two lines are required for the puppetserver service to + # start correctly. These should be removed when PUP-7102 is resolved. + confdir = puppet_config(master, 'confdir', section: 'master') + on(master, "chown puppet:puppet #{confdir}/hiera.yaml") + + app_type = File.basename(__FILE__, '.*') + tmp_environment = mk_tmp_environment_with_teardown(master, app_type) + fq_tmp_environmentpath = "#{environmentpath}/#{tmp_environment}" + + sensitive_value_rb = 'foot, no mouth' + sensitive_value_pp = 'toe, no step' + sensitive_value_pp2 = 'toe, no module' + + teardown do + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + step "create ruby lookup function in #{tmp_environment}" do + on(master, "mkdir -p #{fq_tmp_environmentpath}/lib/puppet/functions/environment") + create_remote_file(master, "#{fq_tmp_environmentpath}/hiera.yaml", <<-HIERA) +--- +version: 5 +hierarchy: + - name: Test + data_hash: rich_data_test + - name: Test2 + data_hash: some_mod::rich_data_test2 + - name: Test3 + data_hash: rich_data_test3 + HIERA + create_remote_file(master, "#{fq_tmp_environmentpath}/lib/puppet/functions/rich_data_test.rb", <<-FUNC) +Puppet::Functions.create_function(:rich_data_test) do + def rich_data_test(options, context) + rich_type_instance = Puppet::Pops::Types::PSensitiveType::Sensitive.new("#{sensitive_value_rb}") + { + 'environment_key' => rich_type_instance, + } + end +end + FUNC + end + + step "create puppet language lookup function in #{tmp_environment} module" do + on(master, "mkdir -p #{fq_tmp_environmentpath}/modules/some_mod/functions") + create_remote_file(master, "#{fq_tmp_environmentpath}/modules/some_mod/functions/rich_data_test2.pp", <<-FUNC) +function some_mod::rich_data_test2($options, $context) { + { + "environment_key2" => Sensitive('#{sensitive_value_pp}'), + } +} + FUNC + on(master, "chmod -R a+rw #{fq_tmp_environmentpath}") + end + + step "C99571: create puppet language lookup function in #{tmp_environment}" do + on(master, "mkdir -p #{fq_tmp_environmentpath}/functions") + create_remote_file(master, "#{fq_tmp_environmentpath}/functions/rich_data_test3.pp", <<-FUNC) +function rich_data_test3($options, $context) { + { + "environment_key3" => Sensitive('#{sensitive_value_pp2}'), + } +} + FUNC + on(master, "chmod -R a+rw #{fq_tmp_environmentpath}") + end + + step "create site.pp which calls lookup on our keys" do + create_sitepp(master, tmp_environment, <<-SITE) + notify { "${unwrap(lookup('environment_key'))}": } + notify { "${unwrap(lookup('environment_key2'))}": } + notify { "${unwrap(lookup('environment_key3'))}": } + SITE + end + + step 'assert lookups using lookup subcommand' do + on(master, puppet('lookup', "--environment #{tmp_environment}", 'environment_key'), :accept_all_exit_codes => true) do |result| + assert(result.exit_code == 0, "lookup subcommand using ruby function didn't exit properly: (#{result.exit_code})") + assert_match(sensitive_value_rb, result.stdout, + "lookup subcommand using ruby function didn't find correct key") + end + on(master, puppet('lookup', "--environment #{tmp_environment}", 'environment_key2'), :accept_all_exit_codes => true) do |result| + assert(result.exit_code == 0, "lookup subcommand using puppet function in module didn't exit properly: (#{result.exit_code})") + assert_match(sensitive_value_pp, result.stdout, + "lookup subcommand using puppet function in module didn't find correct key") + end + on(master, puppet('lookup', "--environment #{tmp_environment}", 'environment_key3'), :accept_all_exit_codes => true) do |result| + assert(result.exit_code == 0, "lookup subcommand using puppet function didn't exit properly: (#{result.exit_code})") + assert_match(sensitive_value_pp2, result.stdout, + "lookup subcommand using puppet function didn't find correct key") + end + end + + with_puppet_running_on(master,{}) do + agents.each do |agent| + step "agent lookup in ruby function" do + on(agent, puppet('agent', "-t --environment #{tmp_environment}"), + :accept_all_exit_codes => true) do |result| + assert(result.exit_code == 2, "agent lookup using ruby function didn't exit properly: (#{result.exit_code})") + assert_match(sensitive_value_rb, result.stdout, + "agent lookup using ruby function didn't find correct key") + assert_match(sensitive_value_pp, result.stdout, + "agent lookup using puppet function in module didn't find correct key") + assert_match(sensitive_value_pp2, result.stdout, + "agent lookup using puppet function didn't find correct key") + end + end + end + end + +end diff --git a/acceptance/tests/lookup/merge_strategies.rb b/acceptance/tests/lookup/merge_strategies.rb new file mode 100644 index 00000000000..79cfd36ed6f --- /dev/null +++ b/acceptance/tests/lookup/merge_strategies.rb @@ -0,0 +1,226 @@ +test_name 'C99903: merge strategies' do + require 'puppet/acceptance/environment_utils.rb' + extend Puppet::Acceptance::EnvironmentUtils + +tag 'audit:high', + 'audit:acceptance', + 'audit:refactor', # Master is not needed for this test. Refactor + # to use puppet apply with a local module tree. + + app_type = File.basename(__FILE__, '.*') + tmp_environment = mk_tmp_environment_with_teardown(master, app_type + '1') + fq_tmp_environmentpath = "#{environmentpath}/#{tmp_environment}" + tmp_environment2 = mk_tmp_environment_with_teardown(master, app_type + '2') + fq_tmp_environmentpath2 = "#{environmentpath}/#{tmp_environment2}" + + master_confdir = puppet_config(master, 'confdir', section: 'master') + hiera_conf_backup = master.tmpfile(app_type) + + teardown do + step "restore default global hiera.yaml" do + on(master, "mv #{hiera_conf_backup} #{master_confdir}/hiera.yaml", :acceptable_exit_codes => [0,1]) + end + + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + step "create global hiera.yaml and environment data" do + step "backup global hiera.yaml" do + on(master, "cp -a #{master_confdir}/hiera.yaml #{hiera_conf_backup}") + end + + create_remote_file(master, "#{master_confdir}/hiera.yaml", <<-HIERA) +--- +:backends: + - yaml +:yaml: + :datadir: "/etc/puppetlabs/code/environments/%{::environment}/hieradata" +:hierarchy: + - "host" + - "roles" + - "profiles" + - "%{facts.os.name}" + - "%{facts.os.family}" + - "%{facts.kernel}" + - "common" +:merge_behavior: deeper +:deep_merge_options: + :merge_hash_arrays: true +HIERA + on(master, "chown puppet:puppet #{master_confdir}/hiera.yaml") + + on(master, "mkdir -p #{fq_tmp_environmentpath}/hieradata/") + create_remote_file(master, "#{fq_tmp_environmentpath}/hieradata/host.yaml", <<-YAML) +--- +profiles: + webserver: + apache: + httpd: + modules: + - mpm_prefork + - php + - ssl +arrayed_hash: + the_hash: + - array1: + key1: val1 + key2: val2 +array: + - foo +YAML + + create_remote_file(master, "#{fq_tmp_environmentpath}/hieradata/profiles.yaml", <<-YAML) +profiles: + webserver: + apache: + httpd: + modules: + - auth_kerb + - authnz_ldap + - cgid + - php + - status +array: + - bar +YAML + + create_sitepp(master, tmp_environment, <<-SITE) +notify { "hiera_hash: ${hiera_hash ('profiles')['webserver']['apache']['httpd']['modules']}": } +notify { "lookup1: ${lookup ('profiles')['webserver']['apache']['httpd']['modules']}": } +notify { "lookup1b: ${lookup ({'name' => 'profiles', 'merge' => 'deep'})['webserver']['apache']['httpd']['modules']}": } +notify { "hiera_merge_hash: ${hiera_hash ('arrayed_hash')}": } +notify { "lookup_arrayed_hash: ${lookup ({'name' => 'arrayed_hash', 'merge' => {'strategy' => 'deep', 'merge_hash_arrays' => true}})}": } +notify { "hiera-array: ${hiera ('array')}": } +notify { "hiera_array: ${hiera_array ('array')}": } +notify { "lookup-array: ${lookup ('array')}": } + SITE + + on(master, "chmod -R 775 #{fq_tmp_environmentpath}") + end + + step "create another environment, hiera5 config and environment data: #{tmp_environment2}" do + create_remote_file(master, "#{fq_tmp_environmentpath2}/hiera.yaml", <<-HIERA) +--- +version: 5 +hierarchy: + - name: "%{environment}/host" + data_hash: yaml_data + path: "hieradata/host.yaml" + - name: "%{environment}/profiles" + data_hash: yaml_data + path: "hieradata/profiles.yaml" +HIERA + + on(master, "mkdir -p #{fq_tmp_environmentpath2}/hieradata/") + create_remote_file(master, "#{fq_tmp_environmentpath2}/hieradata/host.yaml", <<-YAML) +--- +profiles: + webserver: + apache: + httpd: + modules: + - mpm_prefork + - php + - ssl +arrayed_hash: + the_hash: + - array1: + key1: val1 + key2: val2 +array: + - foo +lookup_options: + 'profiles': + merge: + strategy: deep +YAML + + create_remote_file(master, "#{fq_tmp_environmentpath2}/hieradata/profiles.yaml", <<-YAML) +profiles: + webserver: + apache: + httpd: + modules: + - auth_kerb + - authnz_ldap + - cgid + - php + - status +array: + - bar +lookup_options: + 'profiles': + merge: + strategy: deep +YAML + + create_sitepp(master, tmp_environment2, <<-SITE) +notify { "hiera_hash: ${hiera_hash ('profiles')['webserver']['apache']['httpd']['modules']}": } +notify { "lookup2: ${lookup ('profiles')['webserver']['apache']['httpd']['modules']}": } +notify { "lookup2b: ${lookup ({'name' => 'profiles', 'merge' => 'first'})['webserver']['apache']['httpd']['modules']}": } +notify { "hiera_merge_hash: ${hiera_hash ('arrayed_hash')}": } +notify { "lookup_arrayed_hash: ${lookup ({'name' => 'arrayed_hash', 'merge' => {'strategy' => 'deep', 'merge_hash_arrays' => true}})}": } +notify { "hiera-array: ${hiera ('array')}": } +notify { "hiera_array: ${hiera_array ('array')}": } +notify { "lookup-array: ${lookup ('array')}": } + SITE + + on(master, "chmod -R 775 #{fq_tmp_environmentpath2}") + end + + with_puppet_running_on(master,{}) do + agents.each do |agent| + step "agent lookups #{agent.hostname}, hiera3" do + on(agent, puppet('agent', "-t --environment #{tmp_environment}"), + :accept_all_exit_codes => true) do |result| + assert(result.exit_code == 2, "agent lookup didn't exit properly: (#{result.exit_code})") + # hiera_hash will honor old global merge strategies, which were a bad idea + assert_match(/hiera_hash: \[auth_kerb, authnz_ldap, cgid, php, status, mpm_prefork, ssl\]/, result.stdout, + "1: agent hiera_hash didn't find correct key") + # so, lookup doesn't honor them except on a by-key or by-lookup basis + assert_match(/lookup1: \[mpm_prefork, php, ssl\]/, result.stdout, + "1: agent lookup didn't find correct key") + assert_match(/lookup1b: \[auth_kerb, authnz_ldap, cgid, php, status, mpm_prefork, ssl\]/, result.stdout, + "1b: agent lookup didn't find correct key") + assert_match(/hiera_merge_hash: {the_hash => \[{array1 => {key1 => val1, key2 => val2}}\]}/, result.stdout, + "agent hiera_hash 1 merge_hash_arrays didn't work properly") + assert_match(/lookup_arrayed_hash: {the_hash => \[{array1 => {key1 => val1, key2 => val2}}\]}/, result.stdout, + "agent lookup 1 deep merge with merge_hash_arrays didn't work properly") + assert_match(/hiera-array: \[foo\]/, result.stdout, + "hiera() lookup of an array with deeper should be merged") + assert_match(/hiera_array: \[foo, bar\]/, result.stdout, + "hiera_array() lookup of an array should be merged") + assert_match(/lookup-array: \[foo\]/, result.stdout, + "lookup() lookup of an array should default to first") + end + end + step "agent lookups #{agent.hostname}, hiera5" do + on(agent, puppet('agent', "-t --environment #{tmp_environment2}"), + :accept_all_exit_codes => true) do |result| + assert(result.exit_code == 2, "agent lookup didn't exit properly: (#{result.exit_code})") + assert_match(/hiera_hash: \[auth_kerb, authnz_ldap, cgid, php, status, mpm_prefork, ssl\]/, result.stdout, + "2: agent hiera_hash didn't find correct key") + assert_match(/lookup2: \[auth_kerb, authnz_ldap, cgid, php, status, mpm_prefork, ssl\]/, result.stdout, + "2: agent lookup didn't find correct key") + assert_match(/lookup2b: \[mpm_prefork, php, ssl\]/, result.stdout, + "2b: agent lookup didn't find correct key") + assert_match(/hiera_merge_hash: {the_hash => \[{array1 => {key1 => val1, key2 => val2}}\]}/, result.stdout, + "agent hiera_hash 2 merge_hash_arrays didn't work properly") + assert_match(/lookup_arrayed_hash: {the_hash => \[{array1 => {key1 => val1, key2 => val2}}\]}/, result.stdout, + "agent lookup 2 deep merge with merge_hash_arrays didn't work properly") + assert_match(/hiera-array: \[foo\]/, result.stdout, + "hiera() 2 lookup in hiera5 of an array should default to first") + assert_match(/hiera_array: \[foo, bar\]/, result.stdout, + "hiera_array() 2 lookup of an array should be merged") + assert_match(/lookup-array: \[foo\]/, result.stdout, + "lookup() 2 lookup in hiera5 of an array should default to first") + end + end + end + end + +end diff --git a/acceptance/tests/lookup/v3_config_and_data.rb b/acceptance/tests/lookup/v3_config_and_data.rb new file mode 100644 index 00000000000..dd713b02d08 --- /dev/null +++ b/acceptance/tests/lookup/v3_config_and_data.rb @@ -0,0 +1,126 @@ +test_name 'C99629: hiera v5 can use v3 config and data' do + require 'puppet/acceptance/environment_utils.rb' + extend Puppet::Acceptance::EnvironmentUtils + +tag 'audit:high', + 'audit:acceptance', + 'audit:refactor', # Master is not needed for this test. Refactor + # to use puppet apply with a local module tree. + + app_type = File.basename(__FILE__, '.*') + tmp_environment = mk_tmp_environment_with_teardown(master, app_type) + fq_tmp_environmentpath = "#{environmentpath}/#{tmp_environment}" + + hiera_conf_backup = master.tmpfile('C99629-hiera-yaml') + + step "create hiera v3 global config and data" do + confdir = puppet_config(master, 'confdir', section: 'master') + + step "backup global hiera.yaml" do + on(master, "cp -a #{confdir}/hiera.yaml #{hiera_conf_backup}", :acceptable_exit_codes => [0,1]) + end + + teardown do + step "restore global hiera.yaml" do + on(master, "mv #{hiera_conf_backup} #{confdir}/hiera.yaml", :acceptable_exit_codes => [0,1]) + end + + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + step "create global hiera.yaml and module data" do + create_remote_file(master, "#{confdir}/hiera.yaml", <<-HIERA) +--- +:backends: + - "yaml" + - "json" + - "hocon" +:hierarchy: + - "somesuch" + - "common" + HIERA + + on(master, "mkdir -p #{fq_tmp_environmentpath}/hieradata/") + create_remote_file(master, "#{fq_tmp_environmentpath}/hieradata/somesuch.yaml", <<-YAML) +--- +environment_key1: "env value1" +environment_key3: "env value3" + YAML + create_remote_file(master, "#{fq_tmp_environmentpath}/hieradata/somesuch.json", <<-JSON) +{ + "environment_key1" : "wrong value", + "environment_key2" : "env value2" +} + JSON + step "C99628: add hocon backend and data" do + create_remote_file(master, "#{fq_tmp_environmentpath}/hieradata/somesuch.conf", <<-HOCON) +environment_key4 = "hocon value", + HOCON + end + + create_sitepp(master, tmp_environment, <<-SITE) +notify { "${lookup('environment_key1')}": } +notify { "${lookup('environment_key2')}": } +notify { "${lookup('environment_key3')}": } +notify { "${lookup('environment_key4')}": } + SITE + + on(master, "chmod -R 775 #{fq_tmp_environmentpath}") + on(master, "chmod -R 775 #{confdir}") + end + end + + step 'assert lookups using lookup subcommand' do + step 'assert lookup --explain using lookup subcommand' do + on(master, puppet('lookup', "--environment #{tmp_environment}", 'environment_key1 --explain'), :accept_all_exit_codes => true) do |result| + assert(result.exit_code == 0, "1: lookup subcommand didn't exit properly: (#{result.exit_code})") + assert_match(/env value1/, result.stdout, + "1: lookup subcommand didn't find correct key") + assert_match(/hiera configuration version 3/, result.stdout, + "hiera config version not reported properly") + assert_match(/#{fq_tmp_environmentpath}\/hieradata\/somesuch\.yaml/, result.stdout, + "hiera hierarchy abs path not reported properly") + assert_match(/path: "somesuch"/, result.stdout, + "hiera hierarchy path not reported properly") + end + end + on(master, puppet('lookup', "--environment #{tmp_environment}", 'environment_key2'), :accept_all_exit_codes => true) do |result| + assert(result.exit_code == 0, "2: lookup subcommand didn't exit properly: (#{result.exit_code})") + assert_match(/env value2/, result.stdout, + "2: lookup subcommand didn't find correct key") + end + on(master, puppet('lookup', "--environment #{tmp_environment}", 'environment_key3'), :accept_all_exit_codes => true) do |result| + assert(result.exit_code == 0, "3: lookup subcommand didn't exit properly: (#{result.exit_code})") + assert_match(/env value3/, result.stdout, + "3: lookup subcommand didn't find correct key") + end + on(master, puppet('lookup', "--environment #{tmp_environment}", 'environment_key4'), :accept_all_exit_codes => true) do |result| + assert(result.exit_code == 0, "4: lookup subcommand didn't exit properly: (#{result.exit_code})") + assert_match(/hocon value/, result.stdout, + "4: lookup subcommand didn't find correct key") + end + end + + with_puppet_running_on(master,{}) do + agents.each do |agent| + step "agent lookup" do + on(agent, puppet('agent', "-t --environment #{tmp_environment}"), + :accept_all_exit_codes => true) do |result| + assert(result.exit_code == 2, "agent lookup didn't exit properly: (#{result.exit_code})") + assert_match(/env value1/, result.stdout, + "1: agent lookup didn't find correct key") + assert_match(/env value2/, result.stdout, + "2: agent lookup didn't find correct key") + assert_match(/env value3/, result.stdout, + "3: agent lookup didn't find correct key") + assert_match(/hocon value/, result.stdout, + "4: agent lookup didn't find correct key") + end + end + end + end +end diff --git a/acceptance/tests/lookup/v4_hieradata_with_v5_configs.rb b/acceptance/tests/lookup/v4_hieradata_with_v5_configs.rb new file mode 100644 index 00000000000..c2082c3f0fa --- /dev/null +++ b/acceptance/tests/lookup/v4_hieradata_with_v5_configs.rb @@ -0,0 +1,132 @@ +test_name 'C99572: v4 hieradata with v5 configs' do + require 'puppet/acceptance/puppet_type_test_tools.rb' + extend Puppet::Acceptance::PuppetTypeTestTools + +tag 'audit:high', + 'audit:acceptance', + 'audit:refactor', # Master is not needed for this test. Refactor + # to use puppet apply with a local module tree. + + app_type = File.basename(__FILE__, '.*') + tmp_environment = mk_tmp_environment_with_teardown(master, app_type) + fq_tmp_environmentpath = "#{environmentpath}/#{tmp_environment}" + + confdir = puppet_config(master, 'confdir', section: 'master') + hiera_conf_backup = master.tmpfile('C99572-hiera-yaml') + + step "backup global hiera.yaml" do + on(master, "cp -a #{confdir}/hiera.yaml #{hiera_conf_backup}", :acceptable_exit_codes => [0,1]) + end + + teardown do + step "restore global hiera.yaml" do + on(master, "mv #{hiera_conf_backup} #{confdir}/hiera.yaml", :acceptable_exit_codes => [0,1]) + end + + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + step "create global hiera.yaml and data" do + create_remote_file(master, "#{confdir}/hiera.yaml", <<-HIERA) +--- +version: 5 +hierarchy: + - name: "%{environment}" + data_hash: yaml_data + path: "%{environment}.yaml" + - name: common + data_hash: yaml_data + path: "common.yaml" + HIERA + on(master, "chmod 755 #{confdir}/hiera.yaml") + create_remote_file(master, "#{confdir}/#{tmp_environment}.yaml", <<-YAML) +--- +environment_key: environment_key-global_env_file +global_key: global_key-global_env_file + YAML + create_remote_file(master, "#{confdir}/common.yaml", <<-YAML) +--- +environment_key: environment_key-global_common_file +global_key: global_key-global_common_file + YAML + end + + step "create environment hiera.yaml and data" do + on(master, "mkdir -p #{fq_tmp_environmentpath}/data") + create_remote_file(master, "#{fq_tmp_environmentpath}/hiera.yaml", <<-HIERA) +--- +version: 5 +hierarchy: + - name: "%{environment}" + data_hash: yaml_data + path: "%{environment}.yaml" + - name: common + data_hash: yaml_data + path: "common.yaml" + - name: hocon + data_hash: hocon_data + path: "common.conf" + HIERA + create_remote_file(master, "#{fq_tmp_environmentpath}/data/#{tmp_environment}.yaml", <<-YAML) +--- +environment_key: "environment_key-env_file" + YAML + create_remote_file(master, "#{fq_tmp_environmentpath}/data/common.yaml", <<-YAML) +--- +environment_key: "environment_key-common_file" +global_key: "global_key-common_file" + YAML + step "C99628: add hocon backend and data" do + create_remote_file(master, "#{fq_tmp_environmentpath}/data/common.conf", <<-HOCON) +environment_key2 = "hocon value", + HOCON + end + + create_sitepp(master, tmp_environment, <<-SITE) + notify { "${lookup('environment_key')}": } + notify { "${lookup('global_key')}": } + notify { "${lookup('environment_key2')}": } + SITE + on(master, "chmod -R 755 #{fq_tmp_environmentpath}") + end + + step 'assert lookups using lookup subcommand' do + on(master, puppet('lookup', "--environment #{tmp_environment}", 'environment_key'), :accept_all_exit_codes => true) do |result| + assert(result.exit_code == 0, "1: lookup subcommand didn't exit properly: (#{result.exit_code})") + assert_match(/environment_key-env_file/, result.stdout, + 'lookup environment_key subcommand didn\'t find correct key') + end + on(master, puppet('lookup', "--environment #{tmp_environment}", 'global_key'), :accept_all_exit_codes => true) do |result| + assert(result.exit_code == 0, "2: lookup subcommand didn't exit properly: (#{result.exit_code})") + assert_match(/global_key-common_file/, result.stdout, + 'lookup global_key subcommand didn\'t find correct key') + end + on(master, puppet('lookup', "--environment #{tmp_environment}", 'environment_key2'), :accept_all_exit_codes => true) do |result| + assert(result.exit_code == 0, "3: lookup subcommand didn't exit properly: (#{result.exit_code})") + assert_match(/hocon value/, result.stdout, + 'lookup environment_key2 subcommand didn\'t find correct key') + end + end + + with_puppet_running_on(master,{}) do + agents.each do |agent| + step 'agent lookup' do + on(agent, puppet('agent', "-t --environment #{tmp_environment}"), + :accept_all_exit_codes => true) do |result| + assert(result.exit_code == 2, "agent lookup didn't exit properly: (#{result.exit_code})") + assert_match(/global_key-common_file/m, result.stdout, + 'agent lookup didn\'t find global key') + assert_match(/environment_key-env_file/m, result.stdout, + 'agent lookup didn\'t find environment_key') + assert_match(/hocon value/m, result.stdout, + 'agent lookup didn\'t find environment_key2') + end + end + end + end + +end diff --git a/acceptance/tests/modulepath.rb b/acceptance/tests/modulepath.rb new file mode 100644 index 00000000000..a119f547fa0 --- /dev/null +++ b/acceptance/tests/modulepath.rb @@ -0,0 +1,130 @@ +test_name 'Supports vendored modules' do + tag 'risk:high' + + # beacon custom type emits a message so we can tell where the + # type was loaded from, e.g. vendored, global, and whether the + # type was loaded locally or pluginsynced from the master. + def beacon_type(message) + return < true) + newproperty(:message) do + def sync; true; end + def retrieve; :absent; end + def insync?(is); false; end + defaultto { "#{message}" } + end + end +END + end + + def global_modules(host) + if host.platform =~ /windows/ + '/cygdrive/c/ProgramData/PuppetLabs/code/modules' + else + '/etc/puppetlabs/code/modules' + end + end + + def vendor_modules(host) + if host.platform =~ /windows/ + # escape spaces + "/cygdrive/c/Program\\ Files/Puppet\\ Labs/Puppet/puppet/vendor_modules" + else + '/opt/puppetlabs/puppet/vendor_modules' + end + end + + teardown do + hosts.each do |host| + on(host, "rm -rf #{vendor_modules(host)}/beacon") + on(host, "rm -rf #{global_modules(host)}/beacon") + + libdir = host.puppet['vardir'] + on(host, "rm -rf #{libdir}") + end + + on(master, "rm -rf /etc/puppetlabs/code/environments/production/modules/beacon") + on(master, "rm -f /etc/puppetlabs/code/environments/production/manifests/site.pp") + end + + step 'delete libdir' do + hosts.each do |host| + on(host, "rm -rf #{host.puppet['libdir']}") + end + end + + step 'create vendored module with a custom type' do + hosts.each do |host| + vendor_dir = vendor_modules(host) + on(host, "mkdir -p #{vendor_dir}/beacon/lib/puppet/type") + + # unescape, because net-scp escapes + vendor_dir.gsub!(/\\/, '') + create_remote_file(host, "#{vendor_dir}/beacon/lib/puppet/type/beacon.rb", beacon_type("vendored module from #{host}")) + end + end + + step 'vendored modules work locally' do + hosts.each do |host| + on(host, puppet("apply -e \"beacon { 'ping': }\"")) do |result| + assert_match(/defined 'message' as 'vendored module from #{host}'/, result.stdout) + end + end + end + + step 'vendored modules can be excluded' do + hosts.each do |host| + on(host, puppet("describe --vendormoduledir '' beacon"), accept_all_exit_codes: true) do |result| + assert_match(/Unknown type beacon/, result.stdout) + end + end + end + + step 'global modules override vendored modules' do + agents.each do |agent| + # skip the agent on the master, as we don't want to install the + # global module on the master until later + next if agent == master + + global_dir = global_modules(agent) + on(agent, "mkdir -p #{global_dir}/beacon/lib/puppet/type") + + # global_dir doesn't have spaces, so don't need to escape + create_remote_file(agent, "#{global_dir}/beacon/lib/puppet/type/beacon.rb", beacon_type("global module from #{agent}")) + + on(agent, puppet("apply -e \"beacon { 'ping': }\"")) do |result| + assert_match(/defined 'message' as 'global module from #{agent}'/, result.stdout) + end + end + end + + step "prepare server" do + create_remote_file(master, "/etc/puppetlabs/code/environments/production/manifests/site.pp", "beacon { 'ping': }") + on(master, "chown -R puppet:puppet /etc/puppetlabs/code/environments/production/manifests/site.pp") + on(master, "chown -R puppet:puppet #{vendor_modules(master)}") + end + + with_puppet_running_on(master, {}) do + step "agent doesn't pluginsync the vendored module, instead using its local vendored module" do + agents.each do |agent| + on(agent, puppet("agent -t"), :acceptable_exit_codes => [0,2]) do |result| + assert_match(/defined 'message' as 'vendored module from #{agent}'/, result.stdout) + end + end + end + + step "agent downloads and uses newly installed global module from the server" do + global_dir = global_modules(master) + on(master, "mkdir -p #{global_dir}/beacon/lib/puppet/type") + create_remote_file(master, "#{global_dir}/beacon/lib/puppet/type/beacon.rb", beacon_type("server module from #{master}")) + on(master, "chown -R puppet:puppet #{global_dir}") + + agents.each do |agent| + on(agent, puppet("agent -t"), :acceptable_exit_codes => [0,2]) do |result| + assert_match(/defined 'message' as 'server module from #{master}'/, result.stdout) + end + end + end + end +end diff --git a/acceptance/tests/modules/build/build_agent.rb b/acceptance/tests/modules/build/build_agent.rb deleted file mode 100644 index 79e890bbdf0..00000000000 --- a/acceptance/tests/modules/build/build_agent.rb +++ /dev/null @@ -1,15 +0,0 @@ -test_name "puppet module build (agent)" - -agents.each do |agent| - teardown do - on agent, 'rm -rf bar' - end - - step 'generate' - on(agent, puppet('module generate foo-bar --skip-interview')) - - step 'build' - on(agent, puppet('module build bar')) do - assert_match(/Module built: .*\/bar\/pkg\/foo-bar-.*\.tar\.gz/, stdout) - end -end diff --git a/acceptance/tests/modules/build/build_basic.rb b/acceptance/tests/modules/build/build_basic.rb deleted file mode 100644 index fe11942a0ef..00000000000 --- a/acceptance/tests/modules/build/build_basic.rb +++ /dev/null @@ -1,70 +0,0 @@ -test_name 'CODEMGMT-69 - Build a Module Using "metadata.json" Only' - -#Init -temp_module_path = '/tmp/nginx' -metadata_json_file_path = File.join(temp_module_path, 'metadata.json') - -#In-line File -metadata_json_file = <<-FILE -{ - "name": "puppetlabs-nginx", - "version": "0.0.1", - "author": "Puppet Labs", - "summary": "Nginx Module", - "license": "Apache Version 2.0", - "source": "git://github.com/puppetlabs/puppetlabs-nginx.git", - "project_page": "http://github.com/puppetlabs/puppetlabs-nginx", - "issues_url": "http://github.com/puppetlabs/puppetlabs-nginx", - "dependencies": [ - {"name":"puppetlabs-stdlub","version_requirement":">= 1.0.0"} - ] -} -FILE - -#Verification -build_message_1_regex = /Notice: Building #{temp_module_path} for release/ -build_message_2_regex = /Module built: #{temp_module_path}\/pkg\/puppetlabs-nginx-0.0.1.tar.gz/ - -verify_pkg_dir_command = "[ -d #{temp_module_path}/pkg/puppetlabs-nginx-0.0.1 ]" -verify_tarball_command = "[ -f #{temp_module_path}/pkg/puppetlabs-nginx-0.0.1.tar.gz ]" - -#Teardown -teardown do - step 'Teardown Test Artifacts' - on(master, "rm -rf #{temp_module_path}") -end - -#Setup -step 'Create Temporary Path for Module' -on(master, "mkdir #{temp_module_path}") - -step 'Create "metadata.json" for Temporary Module' -create_remote_file(master, metadata_json_file_path, metadata_json_file) - -#Tests -step 'Build Module with Absolute Path' -on(master, puppet("module build #{temp_module_path}")) do |result| - assert_no_match(/Error:/, result.output, 'Unexpected error was detected!') - assert_no_match(/Warning:/, result.output, 'Unexpected warning was detected!') - assert_match(build_message_1_regex, result.stdout, 'Expected message not found!') - assert_match(build_message_2_regex, result.stdout, 'Expected message not found!') -end - -step 'Verify Build Artifacts' -on(master, verify_pkg_dir_command) -on(master, verify_tarball_command) - -step 'Clean-up Artifacts' -on(master, "rm -rf #{temp_module_path}/pkg") - -step "Build Module with Relative Path" -on(master, ("cd #{temp_module_path} && puppet module build")) do |result| - assert_no_match(/Error:/, result.output, 'Unexpected error was detected!') - assert_no_match(/Warning:/, result.output, 'Unexpected warning was detected!') - assert_match(build_message_1_regex, result.stdout, 'Expected message not found!') - assert_match(build_message_2_regex, result.stdout, 'Expected message not found!') -end - -step 'Verify Build Artifacts' -on(master, verify_pkg_dir_command) -on(master, verify_tarball_command) diff --git a/acceptance/tests/modules/build/build_ignore_module_file.rb b/acceptance/tests/modules/build/build_ignore_module_file.rb deleted file mode 100755 index b7fb4ff2c1d..00000000000 --- a/acceptance/tests/modules/build/build_ignore_module_file.rb +++ /dev/null @@ -1,49 +0,0 @@ -test_name 'PUP-3981 - C63215 - Build Module Should Ignore Module File' - -#Init -temp_module_path = '/tmp/test' -metadata_json_file_path = File.join(temp_module_path, 'metadata.json') -modulefile_file_path = File.join(temp_module_path, 'Modulefile') - -#In-line File -metadata_json_file = <<-FILE -{ - "name": "puppetlabs-test", - "version": "0.0.1", - "author": "Puppet Labs", - "summary": "Test Module", - "license": "Apache Version 2.0", - "source": "git://github.com/puppetlabs/puppetlabs-test.git", - "project_page": "http://github.com/puppetlabs/puppetlabs-test", - "issues_url": "http://github.com/puppetlabs/puppetlabs-test", - "dependencies": [ - {"name":"puppetlabs-stdlub","version_requirement":">= 1.0.0"} - ] -} -FILE - -#Verification -modulefile_ignore_message_regex = /Warning: A Modulefile was found in the root directory of the module. This file will be ignored and can safely be removed./ - -#Teardown -teardown do - step 'Teardown Test Artifacts' - on(master, "rm -rf #{temp_module_path}") -end - -#Setup -step 'Create Temporary Path for Module' -on(master, "mkdir #{temp_module_path}") - -step 'Create "metadata.json" for Temporary Module' -create_remote_file(master, metadata_json_file_path, metadata_json_file) - -step 'Create "Modulefile" for Temporary Module' -create_remote_file(master, modulefile_file_path, 'Empty') - -#Tests -step 'Build Module with Modulefile Present' -on(master, puppet("module build #{temp_module_path}")) do |result| - assert_no_match(/Error:/, result.output, 'Unexpected error was detected!') - assert_match(modulefile_ignore_message_regex, result.output, 'Expected message not found!') -end diff --git a/acceptance/tests/modules/build/build_should_not_allow_symlinks.rb b/acceptance/tests/modules/build/build_should_not_allow_symlinks.rb deleted file mode 100644 index 9a694b4f243..00000000000 --- a/acceptance/tests/modules/build/build_should_not_allow_symlinks.rb +++ /dev/null @@ -1,33 +0,0 @@ -test_name "puppet module build should verify there are no symlinks in module" - -confine :except, :platform => 'windows' - -modauthor = 'foo' -modname = 'bar' -defaultversion = '0.1.0' -buildpath = "#{modname}/pkg/#{modname}-#{defaultversion}" - -agents.each do |agent| - tmpdir = agent.tmpdir('pmtbuildsymlink') - - teardown do - on(agent, "rm -rf #{modname}") - on(agent, "rm -rf #{tmpdir}") - end - - step 'Generate module' do - on(agent, puppet("module generate #{modauthor}-#{modname} --skip-interview")) - end - - step 'Add symlink to module' do - on(agent, "touch #{tmpdir}/hello") - on(agent, "ln -s #{tmpdir}/hello #{modname}/tests/symlink") - end - - step 'Build module should fail with message about needing symlinks removed' do - on(agent, puppet("module build #{modname}"), :acceptable_exit_codes => [1]) do |res| - fail_test('Proper failure message not displayed') unless res.stderr.include? 'Symlinks in modules are unsupported' - end - end - -end diff --git a/acceptance/tests/modules/build/build_should_not_create_changes.rb b/acceptance/tests/modules/build/build_should_not_create_changes.rb deleted file mode 100644 index ddc6942d069..00000000000 --- a/acceptance/tests/modules/build/build_should_not_create_changes.rb +++ /dev/null @@ -1,27 +0,0 @@ -test_name "puppet module build should not result in changed files" - -modauthor = 'foo' -modname = 'bar' -defaultversion = '0.1.0' -buildpath = "#{modname}/pkg/#{modauthor}-#{modname}-#{defaultversion}" - -agents.each do |agent| - teardown do - on(agent, "rm -rf #{modname}") - end - - step 'Generate module' do - on(agent, puppet("module generate #{modauthor}-#{modname} --skip-interview")) - end - - step 'Build module' do - on(agent, puppet("module build #{modname}")) - on(agent, "test -d #{buildpath}") - end - - step 'Verify fresh build has no changes' do - on(agent, puppet("module changes #{buildpath}")) do |res| - fail_test('Changed files found') if res.stderr.include? 'modified' - end - end -end diff --git a/acceptance/tests/modules/changes/invalid_module_install_path.rb b/acceptance/tests/modules/changes/invalid_module_install_path.rb deleted file mode 100644 index bb8cdb3e3b1..00000000000 --- a/acceptance/tests/modules/changes/invalid_module_install_path.rb +++ /dev/null @@ -1,15 +0,0 @@ -test_name 'puppet module changes (on an invalid module install path)' - -step 'Setup' - -stub_forge_on(master) -testdir = master.tmpdir('module_changes_with_invalid_path') - -step 'Run module changes on an invalid module install path' -on master, puppet("module changes #{testdir}/nginx"), :acceptable_exit_codes => [1] do - pattern = Regexp.new([ -%Q{.*Error: Could not find a valid module at "#{testdir}/nginx".*}, -%Q{.*Error: Try 'puppet help module changes' for usage.*}, - ].join("\n"), Regexp::MULTILINE) - assert_match(pattern, result.stderr) -end diff --git a/acceptance/tests/modules/changes/missing_checksums_json.rb b/acceptance/tests/modules/changes/missing_checksums_json.rb deleted file mode 100644 index 1a16e48f50b..00000000000 --- a/acceptance/tests/modules/changes/missing_checksums_json.rb +++ /dev/null @@ -1,37 +0,0 @@ -test_name 'puppet module changes (on a module which is missing checksums.json)' - -step 'Setup' - -stub_forge_on(master) -testdir = master.tmpdir('module_changes_on_invalid_checksums') - -apply_manifest_on master, %Q{ - file { '#{testdir}/nginx': ensure => directory; - '#{testdir}/nginx/metadata.json': ensure => present, - content => ' -{ - "name": "puppetlabs-nginx", - "version": "0.0.1", - "author": "Puppet Labs", - "summary": "Nginx Module", - "license": "Apache Version 2.0", - "source": "git://github.com/puppetlabs/puppetlabs-nginx.git", - "project_page": "http://github.com/puppetlabs/puppetlabs-nginx", - "issues_url": "http://github.com/puppetlabs/puppetlabs-nginx", - "dependencies": [ - {"name":"puppetlabs-stdlub","version_requirement":">= 1.0.0"} - ] -}' - } -} - -step 'Run module changes on a module which is missing checksums.json' -on( master, puppet("module changes #{testdir}/nginx"), - :acceptable_exit_codes => [1] ) do - - pattern = Regexp.new([ -%Q{.*Error: No file containing checksums found.*}, -%Q{.*Error: Try 'puppet help module changes' for usage.*}, - ].join("\n"), Regexp::MULTILINE) - assert_match(pattern, result.stderr) -end diff --git a/acceptance/tests/modules/changes/missing_metadata_json.rb b/acceptance/tests/modules/changes/missing_metadata_json.rb deleted file mode 100644 index c99759d170b..00000000000 --- a/acceptance/tests/modules/changes/missing_metadata_json.rb +++ /dev/null @@ -1,21 +0,0 @@ -test_name 'puppet module changes (on a module which is missing metadata.json)' - -step 'Setup' - -stub_forge_on(master) -testdir = master.tmpdir('module_changes_on_invalid_metadata') - -apply_manifest_on master, %Q{ - file { '#{testdir}/nginx': ensure => directory } -} - -step 'Run module changes on a module which is missing metadata.json' -on( master, puppet("module changes #{testdir}/nginx"), - :acceptable_exit_codes => [1] ) do - - pattern = Regexp.new([ -%Q{.*Error: Could not find a valid module at.*}, -%Q{.*Error: Try 'puppet help module changes' for usage.*}, - ].join("\n"), Regexp::MULTILINE) - assert_match(pattern, result.stderr) -end diff --git a/acceptance/tests/modules/changes/module_with_modified_file.rb b/acceptance/tests/modules/changes/module_with_modified_file.rb deleted file mode 100644 index a350f6e9920..00000000000 --- a/acceptance/tests/modules/changes/module_with_modified_file.rb +++ /dev/null @@ -1,27 +0,0 @@ -test_name 'puppet module changes (on a module with a modified file)' - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -step 'Setup' - -stub_forge_on(master) -testdir = master.tmpdir('module_changes_with_modified_file') - -on master, puppet("module install pmtacceptance-nginx --modulepath #{testdir}") -on master, "echo >> #{testdir}/nginx/README" - -step 'Run module changes to check a module with a modified file' -on( master, puppet("module changes #{testdir}/nginx"), - :acceptable_exit_codes => [0] ) do - - pattern = Regexp.new([ -%Q{.*Warning: 1 files modified.*}, - ].join("\n"), Regexp::MULTILINE) - assert_match(pattern, result.stderr) - - assert_equal <<-OUTPUT, stdout -README - OUTPUT -end diff --git a/acceptance/tests/modules/changes/module_with_removed_file.rb b/acceptance/tests/modules/changes/module_with_removed_file.rb deleted file mode 100644 index aab0aff388d..00000000000 --- a/acceptance/tests/modules/changes/module_with_removed_file.rb +++ /dev/null @@ -1,28 +0,0 @@ -test_name 'puppet module changes (on a module with a removed file)' - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -step 'Setup' - -stub_forge_on(master) -testdir = master.tmpdir('module_changes_with_removed_file') - -on master, puppet("module install pmtacceptance-nginx --modulepath #{testdir}") -on master, "rm -rf #{testdir}/nginx/README" - -step 'Run module changes to check a module with a removed file' -on( master, puppet("module changes #{testdir}/nginx"), - :acceptable_exit_codes => [0] ) do - - pattern = Regexp.new([ -%Q{.*Warning: 1 files modified.*}, - ].join("\n"), Regexp::MULTILINE) - assert_match(pattern, result.stderr) - - assert_equal <<-OUTPUT, stdout -README - OUTPUT - -end diff --git a/acceptance/tests/modules/changes/unmodified_module.rb b/acceptance/tests/modules/changes/unmodified_module.rb deleted file mode 100644 index a3e60dfc613..00000000000 --- a/acceptance/tests/modules/changes/unmodified_module.rb +++ /dev/null @@ -1,19 +0,0 @@ -test_name 'puppet module changes (on an unmodified module)' - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -step 'Setup' - -stub_forge_on(master) -testdir = master.tmpdir('module_changes_with_unmodified') - -on master, puppet("module install pmtacceptance-nginx --modulepath #{testdir}") - -step 'Run module changes to check an unmodified module' -on( master, puppet("module changes #{testdir}/nginx"), - :acceptable_exit_codes => [0] ) do - - assert_match /No modified files/, stdout -end diff --git a/acceptance/tests/modules/generate/basic_generate.rb b/acceptance/tests/modules/generate/basic_generate.rb deleted file mode 100644 index fd17abc3f01..00000000000 --- a/acceptance/tests/modules/generate/basic_generate.rb +++ /dev/null @@ -1,24 +0,0 @@ -test_name "puppet module generate (agent)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -module_author = "pmtacceptance" -module_name = "nginx" -module_dependencies = [] - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -agents.each do |agent| - - step "Generate #{module_author}-#{module_name} module" - on agent, puppet("module generate #{module_author}-#{module_name} --skip-interview") - - step "Check for #{module_name} scaffolding" - on agent,"test -f #{module_name}/manifests/init.pp" - - step "Clean up" - on agent,"rm -fr #{module_name}" -end diff --git a/acceptance/tests/modules/install/already_installed.rb b/acceptance/tests/modules/install/already_installed.rb deleted file mode 100644 index 491085036da..00000000000 --- a/acceptance/tests/modules/install/already_installed.rb +++ /dev/null @@ -1,55 +0,0 @@ -test_name "puppet module install (already installed)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -module_author = "pmtacceptance" -module_name = "nginx" -module_reference = "#{module_author}-#{module_name}" -module_dependencies = [] - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step "Setup" do - stub_forge_on(master) -end - -step "Check that module is not installed" do - assert_module_not_installed_on_disk(master, module_name) -end - -step "Install module" do - on master, puppet("module install #{module_reference}") - assert_module_installed_on_disk(master, module_name) -end - -step "Try to install a module that is already installed" do - on master, puppet("module install #{module_reference}"), :acceptable_exit_codes => [0] do - assert_match(/#{module_reference}.*is already installed/, stdout, - "Error that module was already installed was not displayed") - end - assert_module_installed_on_disk(master, module_name) -end - -step "Try to install a specific version of a module that is already installed" do - on master, puppet("module install #{module_reference} --version 1.x"), :acceptable_exit_codes => [1] do - assert_match(/Could not install module '#{module_reference}' \(v1.x\)/, stderr, - "Error that specified module version could not be installed was not displayed") - assert_match(/#{module_reference}.*is already installed/, stderr, - "Error that module was already installed was not displayed") - end - assert_module_installed_on_disk(master, module_name) -end - -step "Install a module that is already installed (with --force)" do - on master, puppet("module install #{module_reference} --force") do - assert_module_installed_ui(stdout, module_author, module_name) - end - assert_module_installed_on_disk(master, module_name) -end diff --git a/acceptance/tests/modules/install/already_installed_elsewhere.rb b/acceptance/tests/modules/install/already_installed_elsewhere.rb deleted file mode 100644 index a22a42eb911..00000000000 --- a/acceptance/tests/modules/install/already_installed_elsewhere.rb +++ /dev/null @@ -1,69 +0,0 @@ -test_name "puppet module install (already installed elsewhere)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -module_author = "pmtacceptance" -module_name = "nginx" -module_reference = "#{module_author}-#{module_name}" -module_dependencies = [] - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step 'Setup' - -stub_forge_on(master) - -apply_manifest_on master, <<-PP -file { - [ - '#{master['sitemoduledir']}', - '#{master['sitemoduledir']}/#{module_name}', - ]: ensure => directory; - '#{master['sitemoduledir']}/#{module_name}/metadata.json': - content => '{ - "name": "#{module_author}/#{module_name}", - "version": "0.0.1", - "source": "", - "author": "#{module_author}", - "license": "MIT", - "dependencies": [] - }'; -} -PP - -default_moduledir = get_default_modulepath_for_host(master) - -step "Try to install a module that is already installed" -on master, puppet("module install #{module_author}-#{module_name}") do - assert_match(/#{module_reference}.*is already installed/, stdout, - "Error that module was already installed was not displayed") -end -assert_module_not_installed_on_disk(master, module_name, default_moduledir) - -step "Try to install a specific version of a module that is already installed" -on master, puppet("module install #{module_author}-#{module_name} --version 1.x"), :acceptable_exit_codes => [1] do - assert_match(/Could not install module '#{module_author}-#{module_name}' \(v1.x\)/, stderr, - "Error that specified module version could not be installed was not displayed") - assert_match(/#{module_author}-#{module_name}.*is already installed/, stderr, - "Error that module was already installed was not displayed") -end -assert_module_not_installed_on_disk(master, module_name, default_moduledir) - -step "Install a specifc module version that is already installed (with --force)" -on master, puppet("module install #{module_author}-#{module_name} --force --version 0.0.1") do - assert_module_installed_ui(stdout, module_author, module_name, '0.0.1', '==') -end -assert_module_installed_on_disk(master, module_name, default_moduledir) - -step "Install a module that is already installed (with --force)" -on master, puppet("module install #{module_author}-#{module_name} --force") do - assert_module_installed_ui(stdout, module_author, module_name) -end -assert_module_installed_on_disk(master, module_name, default_moduledir) diff --git a/acceptance/tests/modules/install/already_installed_with_local_changes.rb b/acceptance/tests/modules/install/already_installed_with_local_changes.rb deleted file mode 100644 index 42bf30479f7..00000000000 --- a/acceptance/tests/modules/install/already_installed_with_local_changes.rb +++ /dev/null @@ -1,55 +0,0 @@ -test_name "puppet module install (already installed with local changes)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -module_author = "pmtacceptance" -module_name = "nginx" -module_reference = "#{module_author}-#{module_name}" -module_dependencies = [] - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step 'Setup' do - stub_forge_on(master) -end - -step "Check that module is not installed" do - assert_module_not_installed_on_disk(master, module_name) -end - -step "Install module" do - on master, puppet("module install #{module_reference}") - assert_module_installed_on_disk(master, module_name) -end - -step "Make local changes in installed module" do - module_path = "#{get_default_modulepath_for_host(master)}/#{module_name}" - on master, "echo 'changed' >> #{module_path}/README" -end - -step "Try to install a specific version of a module that is already installed" do - on master, puppet("module install #{module_reference} --version 1.x"), :acceptable_exit_codes => [1] do - assert_match(/Could not install module '#{module_reference}' \(v1.x\)/, stderr, - "Error that specified module version could not be installed was not displayed") - assert_match(/#{module_reference}.*is already installed/, stderr, - "Error that module was already installed was not displayed") - assert_match(/changes made locally/, stderr, - "Error that module has local changes was not displayed") - end - assert_module_installed_on_disk(master, module_name) -end - -step "Install a module that is already installed (with --force)" do - on master, puppet("module install #{module_reference} --force") do - assert_module_installed_ui(stdout, module_author, module_name) - end - assert_module_installed_on_disk(master, module_name) - #validate checksum -end diff --git a/acceptance/tests/modules/install/basic_install.rb b/acceptance/tests/modules/install/basic_install.rb deleted file mode 100644 index 5ad9b1fb220..00000000000 --- a/acceptance/tests/modules/install/basic_install.rb +++ /dev/null @@ -1,27 +0,0 @@ -test_name "puppet module install (agent)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -module_author = "pmtacceptance" -module_name = "nginx" -module_dependencies = [] - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -agents.each do |agent| - step 'setup' - stub_forge_on(agent) - - step "install module '#{module_author}-#{module_name}'" - on(agent, puppet("module install #{module_author}-#{module_name}")) do - assert_module_installed_ui(stdout, module_author, module_name) - end - assert_module_installed_on_disk(agent, module_name) -end diff --git a/acceptance/tests/modules/install/force_ignores_dependencies.rb b/acceptance/tests/modules/install/force_ignores_dependencies.rb deleted file mode 100644 index 92015293c93..00000000000 --- a/acceptance/tests/modules/install/force_ignores_dependencies.rb +++ /dev/null @@ -1,39 +0,0 @@ -test_name "puppet module install (force ignores dependencies)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -module_author = "pmtacceptance" -module_name = "git" -module_dependencies = ["apache"] - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step 'Setup' - -stub_forge_on(master) - -step "Try to install an unsatisfiable module" -on master, puppet("module install #{module_author}-#{module_name}"), :acceptable_exit_codes => [1] do - assert_match(/No version of '#{module_author}-#{module_name}' can satisfy all dependencies/, stderr, - "Error that module dependencies could not be met was not displayed") -end -assert_module_not_installed_on_disk(master, module_name) -module_dependencies.each do |dependency| - assert_module_not_installed_on_disk(master, dependency) -end - -step "Install an unsatisfiable module with force" -on master, puppet("module install #{module_author}-#{module_name} --force") do - assert_module_installed_ui(stdout, module_author, module_name) -end -assert_module_installed_on_disk(master, module_name) -module_dependencies.each do |dependency| - assert_module_not_installed_on_disk(master, dependency) -end diff --git a/acceptance/tests/modules/install/ignoring_dependencies.rb b/acceptance/tests/modules/install/ignoring_dependencies.rb deleted file mode 100644 index 1dd1e67558e..00000000000 --- a/acceptance/tests/modules/install/ignoring_dependencies.rb +++ /dev/null @@ -1,28 +0,0 @@ -test_name "puppet module install (ignoring dependencies)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -module_author = "pmtacceptance" -module_name = "java" -module_dependencies = ["stdlub"] - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step 'Setup' -stub_forge_on(master) - -step "Install a module, but ignore dependencies" -on master, puppet("module install #{module_author}-#{module_name} --ignore-dependencies") do - assert_module_installed_ui(stdout, module_author, module_name) -end -assert_module_installed_on_disk(master, module_name) -module_dependencies.each do |dependency| - assert_module_not_installed_on_disk(master, dependency) -end diff --git a/acceptance/tests/modules/install/nonexistent_directory.rb b/acceptance/tests/modules/install/nonexistent_directory.rb deleted file mode 100644 index cd69134ff6a..00000000000 --- a/acceptance/tests/modules/install/nonexistent_directory.rb +++ /dev/null @@ -1,41 +0,0 @@ -test_name "puppet module install (nonexistent directory)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -module_author = "pmtacceptance" -module_name = "nginx" -module_dependencies = [] - -default_moduledir = get_default_modulepath_for_host(master) - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - on master, "mv #{default_moduledir}-bak #{default_moduledir}", :acceptable_exit_codes => [0, 1] - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step 'Setup' - -stub_forge_on(master) - -apply_manifest_on master, <<-PP - file { '/tmp/modules': ensure => absent, recurse => true, force => true } -PP - -step "Try to install a module to a non-existent directory" -on master, puppet("module install #{module_author}-#{module_name} --target-dir /tmp/modules") do - assert_module_installed_ui(stdout, module_author, module_name) -end -assert_module_installed_on_disk(master, module_name, '/tmp/modules') - -step "Try to install a module to a non-existent implicit directory" -# This test relies on destroying the default module directory... -on master, "mv #{default_moduledir} #{default_moduledir}-bak" -on master, puppet("module install #{module_author}-#{module_name}") do - assert_module_installed_ui(stdout, module_author, module_name) -end -assert_module_installed_on_disk(master, module_name, default_moduledir) diff --git a/acceptance/tests/modules/install/nonexistent_module.rb b/acceptance/tests/modules/install/nonexistent_module.rb deleted file mode 100644 index 47ba72eaa4f..00000000000 --- a/acceptance/tests/modules/install/nonexistent_module.rb +++ /dev/null @@ -1,48 +0,0 @@ -test_name "puppet module install (nonexistent module)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -module_author = "pmtacceptance" -module_name = "nonexistent" -module_dependencies = [] - -default_moduledir = get_default_modulepath_for_host(master) - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step 'Setup' - -stub_forge_on(master) - -step "Try to install a non-existent module" -on master, puppet("module install #{module_author}-#{module_name}"), :acceptable_exit_codes => [1] do - assert_match(/could not install '#{module_author}-#{module_name}'/i, stderr, - "Error that module could not be installed was not displayed") - - assert_match(/no releases are available from/i, stderr, - "Error that no releases were found was not displayed") -end - -step "Try to install a non-existent module (JSON rendering)" -on master, puppet("module --render-as json install #{module_author}-#{module_name}") do - require 'json' - str = stdout.lines.to_a.last - json = JSON.parse(str) - - oneline_expectation = /could not install '#{module_author}-#{module_name}'; no releases are available from/i - multiline_expectation = /could not install '#{module_author}-#{module_name}'.*no releases are available from.*have at least one published release.*\z/im - - assert_equal 'failure', json['result'] - assert_equal "#{module_author}-#{module_name}", json['module_name'] - assert_equal '>= 0.0.0', json['module_version'] - assert_equal default_moduledir, json['install_dir'] - assert_match oneline_expectation, json['error']['oneline'] - assert_match multiline_expectation, json['error']['multiline'] -end diff --git a/acceptance/tests/modules/install/with_debug.rb b/acceptance/tests/modules/install/with_debug.rb deleted file mode 100644 index 87cb1d75fe4..00000000000 --- a/acceptance/tests/modules/install/with_debug.rb +++ /dev/null @@ -1,26 +0,0 @@ -test_name "puppet module install (with debug)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -module_author = "pmtacceptance" -module_name = "java" -module_dependencies = [] - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step 'Setup' - -stub_forge_on(master) - -step "Install a module with debug output" -on master, puppet("module install #{module_author}-#{module_name} --debug") do - assert_match(/Debug: Executing/, stdout, - "No 'Debug' output displayed!") -end diff --git a/acceptance/tests/modules/install/with_dependencies.rb b/acceptance/tests/modules/install/with_dependencies.rb deleted file mode 100644 index c58d52b6414..00000000000 --- a/acceptance/tests/modules/install/with_dependencies.rb +++ /dev/null @@ -1,32 +0,0 @@ -test_name "puppet module install (with dependencies)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -module_author = "pmtacceptance" -module_name = "java" -module_dependencies = ["stdlub"] - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step 'Setup' - -stub_forge_on(master) - -step "Install a module with dependencies" -on master, puppet("module install #{module_author}-#{module_name}") do - assert_module_installed_ui(stdout, module_author, module_name) - module_dependencies.each do |dependency| - assert_module_installed_ui(stdout, module_author, dependency) - end -end -assert_module_installed_on_disk(master, module_name) -module_dependencies.each do |dependency| - assert_module_installed_on_disk(master, dependency) -end diff --git a/acceptance/tests/modules/install/with_environment.rb b/acceptance/tests/modules/install/with_environment.rb deleted file mode 100644 index 96144892703..00000000000 --- a/acceptance/tests/modules/install/with_environment.rb +++ /dev/null @@ -1,64 +0,0 @@ -test_name 'puppet module install (with environment)' -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -tmpdir = master.tmpdir('module-install-with-environment') - -module_author = "pmtacceptance" -module_name = "nginx" - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step 'Setup' - -stub_forge_on(master) - -puppet_conf = generate_base_directory_environments(tmpdir) - -check_module_install_in = lambda do |environment_path, module_install_args| - on(master, puppet("module install #{module_author}-#{module_name} --config=#{puppet_conf} #{module_install_args}")) do - assert_module_installed_ui(stdout, module_author, module_name) - assert_match(/#{environment_path}/, stdout, - "Notice of non default install path was not displayed") - end - assert_module_installed_on_disk(master, module_name, environment_path) -end - -step 'Install a module into a non default directory environment' do - check_module_install_in.call("#{tmpdir}/environments/direnv/modules", - "--environment=direnv") -end - -step 'Prepare a separate modulepath' -modulepath_dir = master.tmpdir("modulepath") -apply_manifest_on(master, <<-MANIFEST , :catch_failures => true) - file { - [ - '#{tmpdir}/environments/production', - '#{modulepath_dir}', - ]: - - ensure => directory, - owner => #{master.puppet['user']}, - } -MANIFEST - -step "Install a module into --modulepath #{modulepath_dir} despite the implicit production directory env existing" do - check_module_install_in.call(modulepath_dir, "--modulepath=#{modulepath_dir}") -end - -step "Uninstall so we can try a different scenario" do - on(master, puppet("module uninstall #{module_author}-#{module_name} --config=#{puppet_conf} --modulepath=#{modulepath_dir}")) -end - -step "Install a module into --modulepath #{modulepath_dir} with a directory env specified" do - check_module_install_in.call(modulepath_dir, - "--modulepath=#{modulepath_dir} --environment=direnv") -end diff --git a/acceptance/tests/modules/install/with_existing_module_directory.rb b/acceptance/tests/modules/install/with_existing_module_directory.rb deleted file mode 100644 index ebfed1116ab..00000000000 --- a/acceptance/tests/modules/install/with_existing_module_directory.rb +++ /dev/null @@ -1,82 +0,0 @@ -test_name "puppet module install (with existing module directory)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -module_author = "pmtacceptance" -module_name = "nginx" -module_dependencies = [] - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step 'Setup' - -stub_forge_on(master) - -default_moduledir = get_default_modulepath_for_host(master) - -apply_manifest_on master, <<-PP -file { - [ - '#{default_moduledir}/#{module_name}', - '#{default_moduledir}/apache', - ]: ensure => directory; - '#{default_moduledir}/#{module_name}/metadata.json': - content => '{ - "name": "not#{module_author}/#{module_name}", - "version": "0.0.3", - "source": "", - "author": "not#{module_author}", - "license": "MIT", - "dependencies": [] - }'; - [ - '#{default_moduledir}/#{module_name}/extra.json', - '#{default_moduledir}/apache/extra.json', - ]: content => ''; -} -PP - -step "Try to install a module with a name collision" -module_name = "nginx" -on master, puppet("module install #{module_author}-#{module_name}"), :acceptable_exit_codes => [1] do - assert_match(/Installation would overwrite #{default_moduledir}\/#{module_name}/, stderr, - "Error of module collision was not displayed") -end -on master, "[ -f #{default_moduledir}/#{module_name}/extra.json ]" - -step "Try to install a module with a path collision" -module_name = "apache" -on master, puppet("module install #{module_author}-#{module_name}"), :acceptable_exit_codes => [1] do - assert_match(/Installation would overwrite #{default_moduledir}\/#{module_name}/, stderr, - "Error of module collision was not displayed") -end -on master, "[ -f #{default_moduledir}/#{module_name}/extra.json ]" - -step "Try to install a module with a dependency that has collides" -module_name = "php" -on master, puppet("module install #{module_author}-#{module_name} --version 0.0.1"), :acceptable_exit_codes => [1] do - assert_match(/Dependency .* would overwrite/, stderr, - "Error of dependency collision was not displayed") -end -on master, "[ -f #{default_moduledir}/apache/extra.json ]" - -step "Install a module with a name collision by using --force" -module_name = "nginx" -on master, puppet("module install #{module_author}-#{module_name} --force"), :acceptable_exit_codes => [0] do - assert_module_installed_ui(stdout, module_author, module_name) -end -on master, "[ ! -f #{default_moduledir}/#{module_name}/extra.json ]" - -step "Install an module with a name collision by using --force" -module_name = "apache" -on master, puppet("module install #{module_author}-#{module_name} --force"), :acceptable_exit_codes => [0] do - assert_module_installed_ui(stdout, module_author, module_name) -end -on master, "[ ! -f #{default_moduledir}/#{module_name}/extra.json ]" diff --git a/acceptance/tests/modules/install/with_modulepath.rb b/acceptance/tests/modules/install/with_modulepath.rb deleted file mode 100644 index b1d3c3fe635..00000000000 --- a/acceptance/tests/modules/install/with_modulepath.rb +++ /dev/null @@ -1,44 +0,0 @@ -# encoding: UTF-8 - -test_name "puppet module install (with modulepath)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -codedir = master.puppet('master')['codedir'] -module_author = "pmtacceptance" -module_name = "nginx" -module_dependencies = [] - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) - # TODO: make helper take modulepath - on master, "rm -rf #{codedir}/modules2" -end - -step 'Setup' - -stub_forge_on(master) - -on master, "mkdir -p #{codedir}/modules2" - -step "Install a module with relative modulepath" -on master, "cd #{codedir}/modules2 && puppet module install #{module_author}-#{module_name} --modulepath=." do - assert_module_installed_ui(stdout, module_author, module_name) - assert_match(/#{codedir}\/modules2/, stdout, - "Notice of non default install path was not displayed") -end -assert_module_installed_on_disk(master, module_name, "#{codedir}/modules2") - -step "Install a module with absolute modulepath" -on master, "test -d #{codedir}/modules2/#{module_name} && rm -rf #{codedir}/modules2/#{module_name}" -on master, puppet("module install #{module_author}-#{module_name} --modulepath=#{codedir}/modules2") do - assert_module_installed_ui(stdout, module_author, module_name) - assert_match(/#{codedir}\/modules2/, stdout, - "Notice of non default install path was not displayed") -end -assert_module_installed_on_disk(master, module_name, "#{codedir}/modules2") diff --git a/acceptance/tests/modules/install/with_necessary_upgrade.rb b/acceptance/tests/modules/install/with_necessary_upgrade.rb deleted file mode 100644 index 740b2b9a63c..00000000000 --- a/acceptance/tests/modules/install/with_necessary_upgrade.rb +++ /dev/null @@ -1,40 +0,0 @@ -test_name "puppet module install (with necessary dependency upgrade)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -module_author = "pmtacceptance" -module_name = "java" -module_dependencies = [] - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step 'Setup' - -stub_forge_on(master) - -default_moduledir = get_default_modulepath_for_host(master) - -step "Install an older module version" -module_version = '1.6.0' -on master, puppet("module install #{module_author}-#{module_name} --version #{module_version}") do - assert_match(/#{module_author}-#{module_name} \(.*v#{module_version}.*\)/, stdout, - "Notice of specific version installed was not displayed") -end -on master, "grep \"version '#{module_version}'\" #{default_moduledir}/#{module_name}/Modulefile" - - -step "Install a module that requires the older module dependency be upgraded" -on master, puppet("module install #{module_author}-apollo") do - assert_module_installed_ui(stdout, module_author, module_name, module_version, '>') -end - -on master, puppet("module list --modulepath #{default_moduledir}") do - assert_module_installed_ui(stdout, module_author, module_name, module_version, '>') -end diff --git a/acceptance/tests/modules/install/with_no_dependencies.rb b/acceptance/tests/modules/install/with_no_dependencies.rb deleted file mode 100644 index f31d58a98fc..00000000000 --- a/acceptance/tests/modules/install/with_no_dependencies.rb +++ /dev/null @@ -1,26 +0,0 @@ -test_name "puppet module install (with no dependencies)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -module_author = "pmtacceptance" -module_name = "nginx" -module_dependencies = [] - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step 'Setup' - -stub_forge_on(master) - -step "Install a module with no dependencies" -on master, puppet("module install #{module_author}-#{module_name}") do - assert_module_installed_ui(stdout, module_author, module_name) -end -assert_module_installed_on_disk(master, module_name) diff --git a/acceptance/tests/modules/install/with_unnecessary_upgrade.rb b/acceptance/tests/modules/install/with_unnecessary_upgrade.rb deleted file mode 100644 index 61b943560b2..00000000000 --- a/acceptance/tests/modules/install/with_unnecessary_upgrade.rb +++ /dev/null @@ -1,42 +0,0 @@ -test_name "puppet module install (with unnecessary dependency upgrade)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -module_author = "pmtacceptance" -module_name = "java" -module_dependencies = ["stdlub"] - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step 'Setup' - -default_moduledir = get_default_modulepath_for_host(master) - -stub_forge_on(master) - -step "Install an older module version" -module_version = '1.7.0' -on master, puppet("module install #{module_author}-#{module_name} --version #{module_version}") do - assert_match(/#{module_author}-#{module_name} \(.*v#{module_version}.*\)/, stdout, - "Notice of specific version installed was not displayed") -end -on master, "grep \"version '#{module_version}'\" #{default_moduledir}/#{module_name}/Modulefile" - - -step "Install a module that depends on a dependency that could be upgraded, but already satisfies constraints" -module_name = "apollo" -on master, puppet("module install #{module_author}-#{module_name}") do - assert_module_installed_ui(stdout, module_author, module_name) -end - -on master, puppet("module list --modulepath #{default_moduledir}") do - module_name = "java" - assert_module_installed_ui(stdout, module_author, module_name, module_version, '==') -end diff --git a/acceptance/tests/modules/install/with_unsatisfied_constraints.rb b/acceptance/tests/modules/install/with_unsatisfied_constraints.rb deleted file mode 100644 index b409fcea4f0..00000000000 --- a/acceptance/tests/modules/install/with_unsatisfied_constraints.rb +++ /dev/null @@ -1,76 +0,0 @@ -test_name "puppet module install (with unsatisfied constraints)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -module_author = "pmtacceptance" -module_name = "git" -module_reference = "#{module_author}-#{module_name}" -module_dependencies = [] - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step 'Setup' - -stub_forge_on(master) - -apply_manifest_on master, <<-PP -file { - [ - '#{master['distmoduledir']}/crakorn', - ]: ensure => directory; - '#{master['distmoduledir']}/crakorn/metadata.json': - content => '{ - "name": "jimmy/crakorn", - "version": "0.0.1", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [ - { "name": "#{module_author}/stdlub", "version_requirement": "1.x" } - ] - }'; -} -PP - -step "Try to install a module that has an unsatisfiable dependency" -on master, puppet("module install #{module_author}-#{module_name}"), :acceptable_exit_codes => [1] do - assert_match(/No version.*can satisfy all dependencies/, stderr, - "Unsatisfiable dependency error was not displayed") - assert_match(/Use `puppet module install --ignore-dependencies/, stderr, - "Use --ignore-dependencies error was not displayed") -end -assert_module_not_installed_on_disk(master, module_name) - -# FIXME I don't understand what behaviour this looking for? -step "Install the module with an unsatisfiable dependency" -on master, puppet("module install #{module_author}-#{module_name} --ignore-dependencies") do - assert_module_installed_ui(stdout, module_author, module_name) -end -assert_module_installed_on_disk(master, module_name) - -step "Try to install a specific version of the unsatisfiable dependency" -on master, puppet("module install #{module_author}-stdlub --version 1.x"), :acceptable_exit_codes => [1] do - assert_match(/No version.* can satisfy all dependencies/, stderr, - "Unsatisfiable dependency was not displayed") -end -assert_module_not_installed_on_disk(master, 'stdlub') - -step "Try to install any version of the unsatisfiable dependency" -on master, puppet("module install #{module_author}-stdlub"), :acceptable_exit_codes => [1] do - assert_match(/No version.* can satisfy all dependencies/, stderr, - "Unsatisfiable dependency was not displayed") -end -assert_module_not_installed_on_disk(master, 'stdlub') - -step "Install the unsatisfiable dependency with --force" -on master, puppet("module install #{module_author}-stdlub --force") do - assert_module_installed_ui(stdout, module_author, 'stdlub') -end -assert_module_installed_on_disk(master, 'stdlub') diff --git a/acceptance/tests/modules/install/with_version.rb b/acceptance/tests/modules/install/with_version.rb deleted file mode 100644 index 46aac7349d5..00000000000 --- a/acceptance/tests/modules/install/with_version.rb +++ /dev/null @@ -1,28 +0,0 @@ -test_name "puppet module install (with version)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -module_author = "pmtacceptance" -module_name = "java" -module_version = "1.7.0" - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -agents.each do |agent| - step 'setup' - stub_forge_on(agent) - - step "install module '< #{module_version}' #{module_author}-#{module_name}" - on(agent, puppet("module install --version '< #{module_version}' #{module_author}-#{module_name}")) do - assert_module_installed_ui(stdout, module_author, module_name, module_version, '<') - assert_module_installed_on_disk(agent, module_name) - end -end - diff --git a/acceptance/tests/modules/list/with_circular_dependencies.rb b/acceptance/tests/modules/list/with_circular_dependencies.rb deleted file mode 100644 index be95cad20dc..00000000000 --- a/acceptance/tests/modules/list/with_circular_dependencies.rb +++ /dev/null @@ -1,56 +0,0 @@ -test_name "puppet module list (with circular dependencies)" - -teardown do - on master, "rm -rf #{master['distmoduledir']}/appleseed" - on master, "rm -rf #{master['sitemoduledir']}/crakorn" -end - -step "Setup" - -apply_manifest_on master, <<-PP -file { - [ - '#{master['distmoduledir']}/appleseed', - '#{master['sitemoduledir']}/crakorn', - ]: ensure => directory, - recurse => true, - purge => true, - force => true; - '#{master['sitemoduledir']}/crakorn/metadata.json': - content => '{ - "name": "jimmy/crakorn", - "version": "0.4.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [ - { "name": "jimmy/appleseed", "version_requirement": "1.1.0" } - ] - }'; - '#{master['distmoduledir']}/appleseed/metadata.json': - content => '{ - "name": "jimmy/appleseed", - "version": "1.1.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [ - { "name": "jimmy/crakorn", "version_requirement": "0.4.0" } - ] - }'; -} -PP -on master, "[ -d #{master['distmoduledir']}/appleseed ]" -on master, "[ -d #{master['sitemoduledir']}/crakorn ]" - -step "List the installed modules" -on master, puppet("module list") do - assert_match /jimmy-crakorn/, stdout, 'Could not find jimmy crakorn' - assert_match /jimmy-appleseed/, stdout, 'Could not find jimmy appleseed, but then again... wasnt it johnny appleseed?' -end - -step "List the installed modules as a dependency tree" -on master, puppet("module list --tree") do - assert_match /jimmy-crakorn.*\[#{master['sitemoduledir']}\]/, stdout, 'Could not find jimmy crakorn' - assert_match /jimmy-appleseed.*\[#{master['distmoduledir']}\]/, stdout, 'Could not find jimmy appleseed, but then again... wasnt it johnny appleseed?' -end diff --git a/acceptance/tests/modules/list/with_environment.rb b/acceptance/tests/modules/list/with_environment.rb deleted file mode 100644 index db2f4e0fb73..00000000000 --- a/acceptance/tests/modules/list/with_environment.rb +++ /dev/null @@ -1,30 +0,0 @@ -test_name 'puppet module list (with environment)' -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -tmpdir = master.tmpdir('module-list-with-environment') - -step 'Setup' - -stub_forge_on(master) - -puppet_conf = generate_base_directory_environments(tmpdir) - -step 'List modules in a non default directory environment' do - on master, puppet("module", "install", - "pmtacceptance-nginx", - "--config", puppet_conf, - "--environment=direnv") - - on master, puppet("module", "list", - "--config", puppet_conf, - "--environment=direnv") do - - assert_match(%r{#{tmpdir}/environments/direnv/modules}, stdout) - assert_match(/pmtacceptance-nginx/, stdout) - end -end diff --git a/acceptance/tests/modules/list/with_installed_modules.rb b/acceptance/tests/modules/list/with_installed_modules.rb deleted file mode 100644 index cc0caa486e9..00000000000 --- a/acceptance/tests/modules/list/with_installed_modules.rb +++ /dev/null @@ -1,107 +0,0 @@ -test_name "puppet module list (with installed modules)" - -teardown do - on master, "rm -rf #{master['distmoduledir']}/thelock" - on master, "rm -rf #{master['distmoduledir']}/appleseed" - on master, "rm -rf #{master['distmoduledir']}/crakorn" - on master, "rm -rf #{master['sitemoduledir']}/crick" -end - -step "Setup" - -apply_manifest_on master, <<-PP -file { - [ - '#{master['distmoduledir']}/crakorn', - '#{master['distmoduledir']}/appleseed', - '#{master['distmoduledir']}/thelock', - '#{master['sitemoduledir']}/crick', - ]: ensure => directory, - recurse => true, - purge => true, - force => true; - '#{master['distmoduledir']}/crakorn/metadata.json': - content => '{ - "name": "jimmy/crakorn", - "version": "0.4.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [] - }'; - '#{master['distmoduledir']}/appleseed/metadata.json': - content => '{ - "name": "jimmy/appleseed", - "version": "1.1.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [ - { "name": "jimmy/crakorn", "version_requirement": "0.4.0" } - ] - }'; - '#{master['distmoduledir']}/thelock/metadata.json': - content => '{ - "name": "jimmy/thelock", - "version": "1.0.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [ - { "name": "jimmy/appleseed", "version_requirement": "1.x" } - ] - }'; - '#{master['sitemoduledir']}/crick/metadata.json': - content => '{ - "name": "jimmy/crick", - "version": "1.0.1", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [ - { "name": "jimmy/crakorn", "version_requirement": "0.4.x" } - ] - }'; -} -PP - -on master, "[ -d #{master['distmoduledir']}/crakorn ]" -on master, "[ -d #{master['distmoduledir']}/appleseed ]" -on master, "[ -d #{master['distmoduledir']}/thelock ]" -on master, "[ -d #{master['sitemoduledir']}/crick ]" - -step "List the installed modules" -on master, puppet("module list --modulepath #{master['distmoduledir']}") do - assert_equal <<-STDOUT, stdout -#{master['distmoduledir']} -├── jimmy-appleseed (\e[0;36mv1.1.0\e[0m) -├── jimmy-crakorn (\e[0;36mv0.4.0\e[0m) -└── jimmy-thelock (\e[0;36mv1.0.0\e[0m) -STDOUT -end - -on master, puppet("module list --modulepath #{master['sitemoduledir']}") do |res| - assert_match( /jimmy-crick/, - res.stdout, - 'Did not find module jimmy-crick in module site path') -end - -step "List the installed modules as a dependency tree" -on master, puppet("module list --tree --modulepath #{master['distmoduledir']}") do - assert_equal <<-STDOUT, stdout -#{master['distmoduledir']} -└─┬ jimmy-thelock (\e[0;36mv1.0.0\e[0m) - └─┬ jimmy-appleseed (\e[0;36mv1.1.0\e[0m) - └── jimmy-crakorn (\e[0;36mv0.4.0\e[0m) -STDOUT -end - -on master, puppet("module list --tree --modulepath #{master['sitemoduledir']}") do |res| - assert_match( /jimmy-crakorn/, - res.stdout, - 'Did not find module jimmy-crakorn in module site path') - - assert_match( /jimmy-crick/, - res.stdout, - 'Did not find module jimmy-crick in module site path') -end diff --git a/acceptance/tests/modules/list/with_invalid_dependencies.rb b/acceptance/tests/modules/list/with_invalid_dependencies.rb deleted file mode 100644 index f80ac4cc242..00000000000 --- a/acceptance/tests/modules/list/with_invalid_dependencies.rb +++ /dev/null @@ -1,94 +0,0 @@ -test_name "puppet module list (with invalid dependencies)" - -teardown do - on master, "rm -rf #{master['distmoduledir']}/thelock" - on master, "rm -rf #{master['distmoduledir']}/appleseed" - on master, "rm -rf #{master['distmoduledir']}/crakorn" - on master, "rm -rf #{master['sitemoduledir']}/crick" -end - -step "Setup" - -apply_manifest_on master, <<-PP -file { - [ - '#{master['distmoduledir']}/appleseed', - '#{master['distmoduledir']}/crakorn', - '#{master['distmoduledir']}/thelock', - '#{master['sitemoduledir']}/crick', - ]: ensure => directory, - recurse => true, - purge => true, - force => true; - '#{master['distmoduledir']}/crakorn/metadata.json': - content => '{ - "name": "jimmy/crakorn", - "version": "0.3.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [] - }'; - '#{master['distmoduledir']}/appleseed/metadata.json': - content => '{ - "name": "jimmy/appleseed", - "version": "1.1.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [ - { "name": "jimmy/crakorn", "version_requirement": "0.x" } - ] - }'; - '#{master['distmoduledir']}/thelock/metadata.json': - content => '{ - "name": "jimmy/thelock", - "version": "1.0.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [ - { "name": "jimmy/appleseed", "version_requirement": "1.x" } - ] - }'; - '#{master['sitemoduledir']}/crick/metadata.json': - content => '{ - "name": "jimmy/crick", - "version": "1.0.1", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [ - { "name": "jimmy/crakorn", "version_requirement": "0.4.x" } - ] - }'; -} -PP - -on master, "[ -d #{master['distmoduledir']}/appleseed ]" -on master, "[ -d #{master['distmoduledir']}/crakorn ]" -on master, "[ -d #{master['distmoduledir']}/thelock ]" -on master, "[ -d #{master['sitemoduledir']}/crick ]" - -step "List the installed modules" -on master, puppet("module list") do |res| - pattern = Regexp.new([ - %Q{.*Warning: Module 'jimmy-crakorn' \\(v0.3.0\\) fails to meet some dependencies:}, - %Q{ 'jimmy-crick' \\(v1.0.1\\) requires 'jimmy-crakorn' \\(v0.4.x\\).*}, - ].join("\n"), Regexp::MULTILINE) - assert_match(pattern, result.stderr) - - assert_match /jimmy-crakorn.*invalid/, res.stdout, 'Did not find module jimmy-crick in module site path' -end - -step "List the installed modules as a dependency tree" -on master, puppet("module list --tree") do |res| - - pattern = Regexp.new([ - %Q{.*Warning: Module 'jimmy-crakorn' \\(v0.3.0\\) fails to meet some dependencies:}, - %Q{ 'jimmy-crick' \\(v1.0.1\\) requires 'jimmy-crakorn' \\(v0.4.x\\).*}, - ].join("\n"), Regexp::MULTILINE) - assert_match(pattern, result.stderr) - - assert_match /jimmy-crakorn.*\[#{master['distmoduledir']}\].*invalid/, res.stdout -end diff --git a/acceptance/tests/modules/list/with_missing_dependencies.rb b/acceptance/tests/modules/list/with_missing_dependencies.rb deleted file mode 100644 index 94831b2b23a..00000000000 --- a/acceptance/tests/modules/list/with_missing_dependencies.rb +++ /dev/null @@ -1,88 +0,0 @@ -test_name "puppet module list (with missing dependencies)" - -teardown do - on master, "rm -rf #{master['distmoduledir']}/thelock" - on master, "rm -rf #{master['distmoduledir']}/appleseed" - on master, "rm -rf #{master['sitemoduledir']}/crick" -end - -step "Setup" - -apply_manifest_on master, <<-PP -file { - [ - '#{master['distmoduledir']}/appleseed', - '#{master['distmoduledir']}/thelock', - '#{master['sitemoduledir']}/crick', - ]: ensure => directory, - recurse => true, - purge => true, - force => true; - '#{master['distmoduledir']}/appleseed/metadata.json': - content => '{ - "name": "jimmy/appleseed", - "version": "1.1.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [ - { "name": "jimmy/crakorn", "version_requirement": "0.4.0" } - ] - }'; - '#{master['distmoduledir']}/thelock/metadata.json': - content => '{ - "name": "jimmy/thelock", - "version": "1.0.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [ - { "name": "jimmy/appleseed", "version_requirement": "1.x" }, - { "name": "jimmy/sprinkles", "version_requirement": "2.x" } - ] - }'; - '#{master['sitemoduledir']}/crick/metadata.json': - content => '{ - "name": "jimmy/crick", - "version": "1.0.1", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [ - { "name": "jimmy/crakorn", "version_requirement": "0.4.x" } - ] - }'; -} -PP - -on master, "[ -d #{master['distmoduledir']}/appleseed ]" -on master, "[ -d #{master['distmoduledir']}/thelock ]" -on master, "[ -d #{master['sitemoduledir']}/crick ]" - -step "List the installed modules" -on master, puppet('module list') do - pattern = Regexp.new([ - %Q{.*Warning: Missing dependency 'jimmy-crakorn':}, - %Q{ 'jimmy-appleseed' \\(v1.1.0\\) requires 'jimmy-crakorn' \\(v0.4.0\\)}, - %Q{ 'jimmy-crick' \\(v1.0.1\\) requires 'jimmy-crakorn' \\(v0.4.x\\).*}, - %Q{.*Warning: Missing dependency 'jimmy-sprinkles':}, - %Q{ 'jimmy-thelock' \\(v1.0.0\\) requires 'jimmy-sprinkles' \\(v2.x\\).*}, - ].join("\n"), Regexp::MULTILINE) - assert_match(pattern, result.stderr) -end - -step "List the installed modules as a dependency tree" -on master, puppet('module list --tree') do - pattern = Regexp.new([ - %Q{.*Warning: Missing dependency 'jimmy-crakorn':}, - %Q{ 'jimmy-appleseed' \\(v1.1.0\\) requires 'jimmy-crakorn' \\(v0.4.0\\)}, - %Q{ 'jimmy-crick' \\(v1.0.1\\) requires 'jimmy-crakorn' \\(v0.4.x\\).*}, - %Q{.*Warning: Missing dependency 'jimmy-sprinkles':}, - %Q{ 'jimmy-thelock' \\(v1.0.0\\) requires 'jimmy-sprinkles' \\(v2.x\\).*}, - ].join("\n"), Regexp::MULTILINE) - assert_match(pattern, result.stderr) - - assert_match /UNMET DEPENDENCY.*jimmy-sprinkles/, stdout, 'Did not find unmeet dependency for jimmy-sprinkles warning' - - assert_match /UNMET DEPENDENCY.*jimmy-crakorn/, stdout, 'Did not find unmeet dependency for jimmy-crakorn warning' -end diff --git a/acceptance/tests/modules/list/with_modulepath.rb b/acceptance/tests/modules/list/with_modulepath.rb deleted file mode 100644 index 81ea036b14f..00000000000 --- a/acceptance/tests/modules/list/with_modulepath.rb +++ /dev/null @@ -1,77 +0,0 @@ -test_name "puppet module list (with modulepath)" - -codedir = master.puppet('master')['codedir'] - -step "Setup" -apply_manifest_on master, <<-PP -file { - [ - '#{codedir}/modules2', - '#{codedir}/modules2/crakorn', - '#{codedir}/modules2/appleseed', - '#{codedir}/modules2/thelock', - ]: ensure => directory, - recurse => true, - purge => true, - force => true; - '#{codedir}/modules2/crakorn/metadata.json': - content => '{ - "name": "jimmy/crakorn", - "version": "0.4.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [] - }'; - '#{codedir}/modules2/appleseed/metadata.json': - content => '{ - "name": "jimmy/appleseed", - "version": "1.1.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [ - { "name": "jimmy/crakorn", "version_requirement": "0.4.0" } - ] - }'; - '#{codedir}/modules2/thelock/metadata.json': - content => '{ - "name": "jimmy/thelock", - "version": "1.0.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [ - { "name": "jimmy/appleseed", "version_requirement": "1.x" } - ] - }'; -} -PP - -teardown do - on master, "rm -rf #{codedir}/modules2" -end - -on master, "[ -d #{codedir}/modules2/crakorn ]" -on master, "[ -d #{codedir}/modules2/appleseed ]" -on master, "[ -d #{codedir}/modules2/thelock ]" - -step "List the installed modules with relative modulepath" -on master, "cd #{codedir}/modules2 && puppet module list --modulepath=." do - assert_equal <<-STDOUT, stdout -#{codedir}/modules2 -├── jimmy-appleseed (\e[0;36mv1.1.0\e[0m) -├── jimmy-crakorn (\e[0;36mv0.4.0\e[0m) -└── jimmy-thelock (\e[0;36mv1.0.0\e[0m) -STDOUT -end - -step "List the installed modules with absolute modulepath" -on master, puppet("module list --modulepath=#{codedir}/modules2") do - assert_equal <<-STDOUT, stdout -#{codedir}/modules2 -├── jimmy-appleseed (\e[0;36mv1.1.0\e[0m) -├── jimmy-crakorn (\e[0;36mv0.4.0\e[0m) -└── jimmy-thelock (\e[0;36mv1.0.0\e[0m) -STDOUT -end diff --git a/acceptance/tests/modules/list/with_no_installed_modules.rb b/acceptance/tests/modules/list/with_no_installed_modules.rb deleted file mode 100644 index f2481dcf35f..00000000000 --- a/acceptance/tests/modules/list/with_no_installed_modules.rb +++ /dev/null @@ -1,9 +0,0 @@ -test_name "puppet module list (with no installed modules)" - - -step "List the installed modules" -modulesdir = master.tmpdir('puppet_module') -on master, puppet("module list --modulepath #{modulesdir}") do - assert_match(/no modules installed/, stdout, - "Declaration of 'no modules installed' not found") -end diff --git a/acceptance/tests/modules/list/with_repeated_dependencies.rb b/acceptance/tests/modules/list/with_repeated_dependencies.rb deleted file mode 100644 index 472b07be15f..00000000000 --- a/acceptance/tests/modules/list/with_repeated_dependencies.rb +++ /dev/null @@ -1,109 +0,0 @@ -test_name "puppet module list (with repeated dependencies)" - -teardown do - on master, "rm -rf #{master['distmoduledir']}/crakorn" - on master, "rm -rf #{master['distmoduledir']}/steward" - on master, "rm -rf #{master['distmoduledir']}/appleseed" - on master, "rm -rf #{master['distmoduledir']}/thelock" - on master, "rm -rf #{master['sitemoduledir']}/crick" -end - -step "Setup" - -apply_manifest_on master, <<-PP -file { - [ - '#{master['distmoduledir']}/crakorn', - '#{master['distmoduledir']}/steward', - '#{master['distmoduledir']}/appleseed', - '#{master['distmoduledir']}/thelock', - '#{master['sitemoduledir']}/crick', - ]: ensure => directory, - recurse => true, - purge => true, - force => true; - '#{master['distmoduledir']}/crakorn/metadata.json': - content => '{ - "name": "jimmy/crakorn", - "version": "0.4.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [ - { "name": "jimmy/steward", "version_requirement": ">= 0.0.0" } - ] - }'; - '#{master['distmoduledir']}/steward/metadata.json': - content => '{ - "name": "jimmy/steward", - "version": "0.9.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [] - }'; - '#{master['distmoduledir']}/appleseed/metadata.json': - content => '{ - "name": "jimmy/appleseed", - "version": "1.1.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [ - { "name": "jimmy/crakorn", "version_requirement": "0.4.0" } - ] - }'; - '#{master['distmoduledir']}/thelock/metadata.json': - content => '{ - "name": "jimmy/thelock", - "version": "1.0.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [ - { "name": "jimmy/crakorn", "version_requirement": ">= 0.0.0" }, - { "name": "jimmy/appleseed", "version_requirement": "1.x" } - ] - }'; - '#{master['sitemoduledir']}/crick/metadata.json': - content => '{ - "name": "jimmy/crick", - "version": "1.0.1", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [ - { "name": "jimmy/crakorn", "version_requirement": "0.4.x" } - ] - }'; -} -PP - -on master, "[ -d #{master['distmoduledir']}/crakorn ]" -on master, "[ -d #{master['distmoduledir']}/steward ]" -on master, "[ -d #{master['distmoduledir']}/appleseed ]" -on master, "[ -d #{master['distmoduledir']}/thelock ]" -on master, "[ -d #{master['sitemoduledir']}/crick ]" - -step "List the installed modules" -on master, puppet('module list') # assertion is exit code 0 - -step "List the installed modules as a dependency tree" -on master, puppet("module list --tree --modulepath #{master['distmoduledir']}") do - assert_equal <<-STDOUT, stdout -#{master['distmoduledir']} -└─┬ jimmy-thelock (\e[0;36mv1.0.0\e[0m) - ├─┬ jimmy-crakorn (\e[0;36mv0.4.0\e[0m) - │ └── jimmy-steward (\e[0;36mv0.9.0\e[0m) - └── jimmy-appleseed (\e[0;36mv1.1.0\e[0m) -STDOUT -end - -on master, puppet("module list --tree") do - assert_match( /jimmy-crakorn.*\[#{master['distmoduledir']}\]/, - stdout, - 'Did not find cross modulepath reference to jimmy-crakorn' ) - assert_match( /jimmy-steward.*\[#{master['distmoduledir']}\]/, - stdout, - 'Did not find cross modulepath reference to jimmy-steward' ) -end diff --git a/acceptance/tests/modules/search/communication_error.rb b/acceptance/tests/modules/search/communication_error.rb deleted file mode 100644 index 0df08a1efa9..00000000000 --- a/acceptance/tests/modules/search/communication_error.rb +++ /dev/null @@ -1,18 +0,0 @@ -test_name 'puppet module search should print a reasonable message on communication errors' - -confine :except, :platform => 'solaris' - -step 'Setup' -stub_hosts_on(master, 'forgeapi.puppetlabs.com' => '127.0.0.2') - -step "Search against a non-existent Forge" -on master, puppet("module search yup"), :acceptable_exit_codes => [1] do - - assert_match <<-STDOUT, stdout -\e[mNotice: Searching https://forgeapi.puppetlabs.com ...\e[0m -STDOUT - -assert_no_match /yup/, - stdout, - 'Found a reference to a fake module when errors should have prevented us from getting here' -end diff --git a/acceptance/tests/modules/search/formatting.rb b/acceptance/tests/modules/search/formatting.rb deleted file mode 100644 index 2c290a21a7c..00000000000 --- a/acceptance/tests/modules/search/formatting.rb +++ /dev/null @@ -1,20 +0,0 @@ -test_name 'puppet module search output should be well structured' - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -step 'Setup' -stub_forge_on(master) - -step 'Search results should line up by column' -on master, puppet("module search apache") do - - assert_match(/Searching/, stdout.lines.first) - columns = stdout.lines.to_a[1].split(/\s{2}(?=\S)/) - pattern = /^#{ columns.map { |c| c.chomp.gsub(/./, '.') }.join(' ') }$/ - - stdout.gsub(/\e.*?m/, '').lines.to_a[1..-1].each do |line| - assert_match(pattern, line.chomp, 'columns were misaligned') - end -end diff --git a/acceptance/tests/modules/search/multiple_search_terms.rb b/acceptance/tests/modules/search/multiple_search_terms.rb deleted file mode 100644 index 76b975d898d..00000000000 --- a/acceptance/tests/modules/search/multiple_search_terms.rb +++ /dev/null @@ -1,20 +0,0 @@ -test_name 'puppet module search should handle multiple search terms sensibly' - -#step 'Setup' -#stub_forge_on(master) - -# FIXME: The Forge doesn't properly handle multi-term searches. -# step 'Search for a module by description' -# on master, puppet("module search 'notice here'") do -# assert stdout !~ /'notice here'/ -# end -# -# step 'Search for a module by name' -# on master, puppet("module search 'ance-geo ance-std'") do -# assert stdout !~ /'ance-geo ance-std'/ -# end -# -# step 'Search for multiple keywords' -# on master, puppet("module search 'star trek'") do -# assert stdout !~ /'star trek'/ -# end diff --git a/acceptance/tests/modules/search/no_results.rb b/acceptance/tests/modules/search/no_results.rb deleted file mode 100644 index e71bce3a3e9..00000000000 --- a/acceptance/tests/modules/search/no_results.rb +++ /dev/null @@ -1,16 +0,0 @@ -test_name 'puppet module search should print a reasonable message for no results' - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -module_name = "module_not_appearing_in_this_forge" - -step 'Setup' -stub_forge_on(master) - -step "Search for a module that doesn't exist" -on master, puppet("module search #{module_name}") do |res| - assert_match(/Searching/, res.stdout) - assert_match(/No results found for '#{module_name}'/, res.stdout) -end diff --git a/acceptance/tests/modules/search/ssl_errors.rb b/acceptance/tests/modules/search/ssl_errors.rb deleted file mode 100644 index 2d374d6e2bd..00000000000 --- a/acceptance/tests/modules/search/ssl_errors.rb +++ /dev/null @@ -1,24 +0,0 @@ -begin test_name 'puppet module search should print a reasonable message on ssl errors' - -step "Search against a website where the certificate is not signed by a public authority" - -# This might seem silly, but a master has a self-signed certificate and is a -# cheap way of testing against a web server without a publicly signed cert -with_puppet_running_on master, {} do - on master, puppet("module search yup --module_repository=https://#{master}:8140"), :acceptable_exit_codes => [1] do - assert_match <<-STDOUT, stdout -\e[mNotice: Searching https://#{master}:8140 ...\e[0m -STDOUT - assert_match <<-STDERR.chomp, stderr -Error: Could not connect via HTTPS to https://#{master}:8140 - Unable to verify the SSL certificate - The certificate may not be signed by a valid CA - The CA bundle included with OpenSSL may not be valid or up to date -STDERR -end - -end - -ensure step 'Remove fake forge hostname' -apply_manifest_on master, "host { 'fake.fakeforge.com': ensure => absent }" -end diff --git a/acceptance/tests/modules/uninstall/using_directory_name.rb b/acceptance/tests/modules/uninstall/using_directory_name.rb deleted file mode 100644 index 505e6e166a1..00000000000 --- a/acceptance/tests/modules/uninstall/using_directory_name.rb +++ /dev/null @@ -1,49 +0,0 @@ -test_name "puppet module uninstall (using directory name)" - -teardown do - on master, "rm -rf #{master['distmoduledir']}/apache" - on master, "rm -rf #{master['distmoduledir']}/crakorn" -end - -step "Setup" -apply_manifest_on master, <<-PP -file { - [ - '#{master['distmoduledir']}/apache', - '#{master['distmoduledir']}/crakorn', - ]: ensure => directory; - '#{master['distmoduledir']}/crakorn/metadata.json': - content => '{ - "name": "jimmy/crakorn", - "version": "0.4.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [] - }'; -} -PP - -on master, "[ -d #{master['distmoduledir']}/apache ]" -on master, "[ -d #{master['distmoduledir']}/crakorn ]" - -step "Try to uninstall the module apache" -on master, puppet('module uninstall apache') do - assert_equal <<-OUTPUT, stdout -\e[mNotice: Preparing to uninstall 'apache' ...\e[0m -Removed 'apache' from #{master['distmoduledir']} - OUTPUT -end -on master, "[ ! -d #{master['distmoduledir']}/apache ]" - -step "Try to uninstall the module crakorn" -on master, puppet('module uninstall crakorn'), :acceptable_exit_codes => [1] do - pattern = Regexp.new([ - %Q{.*Notice: Preparing to uninstall 'crakorn' ....*}, - %Q{.*Error: Could not uninstall module 'crakorn'}, - %Q{ Module 'crakorn' is not installed}, - %Q{ You may have meant `puppet module uninstall jimmy-crakorn`.*}, - ].join("\n"), Regexp::MULTILINE) - assert_match(pattern, result.output) -end -on master, "[ -d #{master['distmoduledir']}/crakorn ]" diff --git a/acceptance/tests/modules/uninstall/using_version_filter.rb b/acceptance/tests/modules/uninstall/using_version_filter.rb deleted file mode 100644 index 0469f8ee470..00000000000 --- a/acceptance/tests/modules/uninstall/using_version_filter.rb +++ /dev/null @@ -1,84 +0,0 @@ -test_name "puppet module uninstall (with module installed)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -module_author = "jimmy" -module_name = "crakorn" -module_dependencies = [] - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step "Setup" -apply_manifest_on master, <<-PP -file { - [ - '#{master['distmoduledir']}/#{module_name}', - '#{master['sitemoduledir']}/#{module_name}', - '#{master['sitemoduledir']}/appleseed', - ]: ensure => directory; - '#{master['distmoduledir']}/#{module_name}/metadata.json': - content => '{ - "name": "#{module_author}/#{module_name}", - "version": "0.4.0", - "source": "", - "author": "#{module_author}", - "license": "MIT", - "dependencies": [] - }'; - '#{master['sitemoduledir']}/#{module_name}/metadata.json': - content => '{ - "name": "#{module_author}/#{module_name}", - "version": "0.5.1", - "source": "", - "author": "#{module_author}", - "license": "MIT", - "dependencies": [] - }'; - '#{master['sitemoduledir']}/appleseed/metadata.json': - content => '{ - "name": "#{module_author}/appleseed", - "version": "0.4.0", - "source": "", - "author": "#{module_author}", - "license": "MIT", - "dependencies": [] - }'; -} -PP - -step "Uninstall #{module_author}-#{module_name} version 0.5.x" -on master, puppet("module uninstall #{module_author}-#{module_name} --version 0.5.x") do - assert_match(/Removed '#{module_author}-#{module_name}'/, stdout, - "Notice that module was uninstalled was not displayed") -end -on master, "[ -d #{master['distmoduledir']}/#{module_name} ]" -on master, "[ ! -d #{master['sitemoduledir']}/#{module_name} ]" - -step "Try to uninstall #{module_author}-#{module_name} v0.4.0 with `--version 0.5.x`" -on master, puppet("module uninstall #{module_author}-#{module_name} --version 0.5.x"), :acceptable_exit_codes => [1] do - assert_match(/Could not uninstall module '#{module_author}-#{module_name}'/, stderr, - "Error that module could not be uninstalled was not displayed") - assert_match(/No installed version of '#{module_author}-#{module_name}' matches/, stderr, - "Error that module version could not be found was not displayed") -end -on master, "[ -d #{master['distmoduledir']}/#{module_name} ]" - -module_name = 'appleseed' -step "Try to uninstall #{module_author}-#{module_name} v0.4.0 with `--version >9.9.9`" -on master, puppet("module uninstall #{module_author}-#{module_name} --version \">9.9.9\""), :acceptable_exit_codes => [1] do - assert_match(/Could not uninstall module '#{module_author}-#{module_name}'/, stderr, - "Error that module could not be uninstalled was not displayed") - assert_match(/No installed version of '#{module_author}-#{module_name}' matches/, stderr, - "Error that module version could not be found was not displayed") -end -on master, "[ -d #{master['sitemoduledir']}/#{module_name} ]" - -step "Uninstall #{module_author}-#{module_name} v0.4.0 with `--version >0.0.0`" -on master, puppet("module uninstall #{module_author}-#{module_name} --version \">0.0.0\"") do - assert_match(/Removed '#{module_author}-#{module_name}'/, stdout, - "Notice that module was uninstalled was not displayed") -end -on master, "[ ! -d #{master['sitemoduledir']}/#{module_name} ]" diff --git a/acceptance/tests/modules/uninstall/with_active_dependency.rb b/acceptance/tests/modules/uninstall/with_active_dependency.rb deleted file mode 100644 index f4fc185af23..00000000000 --- a/acceptance/tests/modules/uninstall/with_active_dependency.rb +++ /dev/null @@ -1,77 +0,0 @@ -test_name "puppet module uninstall (with active dependency)" - -step "Setup" -apply_manifest_on master, <<-PP -file { - [ - '#{master['distmoduledir']}/crakorn', - '#{master['distmoduledir']}/appleseed', - ]: ensure => directory; - '#{master['distmoduledir']}/crakorn/metadata.json': - content => '{ - "name": "jimmy/crakorn", - "version": "0.4.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [] - }'; - '#{master['distmoduledir']}/appleseed/metadata.json': - content => '{ - "name": "jimmy/appleseed", - "version": "1.1.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [ - { "name": "jimmy/crakorn", "version_requirement": "0.4.0" } - ] - }'; -} -PP - -teardown do - on master, "rm -rf #{master['distmoduledir']}/crakorn" - on master, "rm -rf #{master['distmoduledir']}/appleseed" -end - -on master, "[ -d #{master['distmoduledir']}/crakorn ]" -on master, "[ -d #{master['distmoduledir']}/appleseed ]" - -step "Try to uninstall the module jimmy-crakorn" -on master, puppet('module uninstall jimmy-crakorn'), :acceptable_exit_codes => [1] do - pattern = Regexp.new([ - %Q{.*Notice: Preparing to uninstall 'jimmy-crakorn' .*}, - %Q{.*Error: Could not uninstall module 'jimmy-crakorn'}, - %Q{ Other installed modules have dependencies on 'jimmy-crakorn' \\(v0.4.0\\)}, - %Q{ 'jimmy/appleseed' \\(v1.1.0\\) requires 'jimmy-crakorn' \\(v0.4.0\\)}, - %Q{ Use `puppet module uninstall --force` to uninstall this module anyway.*}, - ].join("\n"), Regexp::MULTILINE) - assert_match(pattern, result.output) -end -on master, "[ -d #{master['distmoduledir']}/crakorn ]" -on master, "[ -d #{master['distmoduledir']}/appleseed ]" - -step "Try to uninstall the module jimmy-crakorn with a version range" -on master, puppet('module uninstall jimmy-crakorn --version 0.x'), :acceptable_exit_codes => [1] do - pattern = Regexp.new([ - %Q{.*Notice: Preparing to uninstall 'jimmy-crakorn' \\(.*v0.x.*\\) .*}, - %Q{.*Error: Could not uninstall module 'jimmy-crakorn' \\(v0.x\\)}, - %Q{ Other installed modules have dependencies on 'jimmy-crakorn' \\(v0.4.0\\)}, - %Q{ 'jimmy/appleseed' \\(v1.1.0\\) requires 'jimmy-crakorn' \\(v0.4.0\\)}, - %Q{ Use `puppet module uninstall --force` to uninstall this module anyway.*}, - ].join("\n"), Regexp::MULTILINE) - assert_match(pattern, result.output) -end -on master, "[ -d #{master['distmoduledir']}/crakorn ]" -on master, "[ -d #{master['distmoduledir']}/appleseed ]" - -step "Uninstall the module jimmy-crakorn forcefully" -on master, puppet('module uninstall jimmy-crakorn --force') do - assert_equal <<-OUTPUT, stdout -\e[mNotice: Preparing to uninstall 'jimmy-crakorn' ...\e[0m -Removed 'jimmy-crakorn' (\e[0;36mv0.4.0\e[0m) from #{master['distmoduledir']} - OUTPUT -end -on master, "[ ! -d #{master['distmoduledir']}/crakorn ]" -on master, "[ -d #{master['distmoduledir']}/appleseed ]" diff --git a/acceptance/tests/modules/uninstall/with_environment.rb b/acceptance/tests/modules/uninstall/with_environment.rb deleted file mode 100644 index 7c9e9348456..00000000000 --- a/acceptance/tests/modules/uninstall/with_environment.rb +++ /dev/null @@ -1,48 +0,0 @@ -test_name 'puppet module uninstall (with environment)' -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -tmpdir = master.tmpdir('module-uninstall-with-environment') - -step 'Setup' - -stub_forge_on(master) - -puppet_conf = generate_base_directory_environments(tmpdir) - -crakorn_metadata = <<-EOS -{ - "name": "jimmy/crakorn", - "version": "0.4.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [] -} -EOS - -# Configure a non-default environment -apply_manifest_on master, %Q{ - file { - [ - '#{tmpdir}/environments/direnv/modules', - '#{tmpdir}/environments/direnv/modules/crakorn', - ]: - ensure => directory, - } - file { - '#{tmpdir}/environments/direnv/modules/crakorn/metadata.json': - content => '#{crakorn_metadata}', - } -} - -step 'Uninstall a module from a non default directory environment' do - environment_path = "#{tmpdir}/environments/direnv/modules" - on(master, puppet("module uninstall jimmy-crakorn --config=#{puppet_conf} --environment=direnv")) do - assert_equal <<-OUTPUT, stdout -\e[mNotice: Preparing to uninstall 'jimmy-crakorn' ...\e[0m -Removed 'jimmy-crakorn' (\e[0;36mv0.4.0\e[0m) from #{environment_path} - OUTPUT - end - on master, "[ ! -d #{environment_path}/crackorn ]" -end diff --git a/acceptance/tests/modules/uninstall/with_module_installed.rb b/acceptance/tests/modules/uninstall/with_module_installed.rb deleted file mode 100644 index c81bc9ecee1..00000000000 --- a/acceptance/tests/modules/uninstall/with_module_installed.rb +++ /dev/null @@ -1,34 +0,0 @@ -test_name "puppet module uninstall (with module installed)" - -teardown do - on master, "rm -rf #{master['distmoduledir']}/crakorn" -end - -step "Setup" -apply_manifest_on master, <<-PP -file { - [ - '#{master['distmoduledir']}/crakorn', - ]: ensure => directory; - '#{master['distmoduledir']}/crakorn/metadata.json': - content => '{ - "name": "jimmy/crakorn", - "version": "0.4.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [] - }'; -} -PP - -on master, "[ -d #{master['distmoduledir']}/crakorn ]" - -step "Uninstall the module jimmy-crakorn" -on master, puppet('module uninstall jimmy-crakorn') do - assert_equal <<-OUTPUT, stdout -\e[mNotice: Preparing to uninstall 'jimmy-crakorn' ...\e[0m -Removed 'jimmy-crakorn' (\e[0;36mv0.4.0\e[0m) from #{master['distmoduledir']} - OUTPUT -end -on master, "[ ! -d #{master['distmoduledir']}/crakorn ]" diff --git a/acceptance/tests/modules/uninstall/with_modulepath.rb b/acceptance/tests/modules/uninstall/with_modulepath.rb deleted file mode 100644 index 8c53cff8652..00000000000 --- a/acceptance/tests/modules/uninstall/with_modulepath.rb +++ /dev/null @@ -1,58 +0,0 @@ -test_name "puppet module uninstall (with modulepath)" - -codedir = master.puppet('master')['codedir'] - -teardown do - on master, "rm -rf #{codedir}/modules2" -end - -step "Setup" -apply_manifest_on master, <<-PP -file { - [ - '#{codedir}/modules2', - '#{codedir}/modules2/crakorn', - '#{codedir}/modules2/absolute', - ]: ensure => directory; - '#{codedir}/modules2/crakorn/metadata.json': - content => '{ - "name": "jimmy/crakorn", - "version": "0.4.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [] - }'; - '#{codedir}/modules2/absolute/metadata.json': - content => '{ - "name": "jimmy/absolute", - "version": "0.4.0", - "source": "", - "author": "jimmy", - "license": "MIT", - "dependencies": [] - }'; -} -PP - -on master, "[ -d #{codedir}/modules2/crakorn ]" -on master, "[ -d #{codedir}/modules2/absolute ]" - -step "Try to uninstall the module jimmy-crakorn using relative modulepath" -on master, "cd #{codedir}/modules2 && puppet module uninstall jimmy-crakorn --modulepath=." do - assert_equal <<-OUTPUT, stdout -\e[mNotice: Preparing to uninstall 'jimmy-crakorn' ...\e[0m -Removed 'jimmy-crakorn' (\e[0;36mv0.4.0\e[0m) from #{codedir}/modules2 - OUTPUT -end - -on master, "[ ! -d #{codedir}/modules2/crakorn ]" - -step "Try to uninstall the module jimmy-absolute using an absolute modulepath" -on master, "cd #{codedir}/modules2 && puppet module uninstall jimmy-absolute --modulepath=#{codedir}/modules2" do - assert_equal <<-OUTPUT, stdout -\e[mNotice: Preparing to uninstall 'jimmy-absolute' ...\e[0m -Removed 'jimmy-absolute' (\e[0;36mv0.4.0\e[0m) from #{codedir}/modules2 - OUTPUT -end -on master, "[ ! -d #{codedir}/modules2/absolute ]" diff --git a/acceptance/tests/modules/uninstall/with_multiple_modules_installed.rb b/acceptance/tests/modules/uninstall/with_multiple_modules_installed.rb deleted file mode 100644 index b44f97d9317..00000000000 --- a/acceptance/tests/modules/uninstall/with_multiple_modules_installed.rb +++ /dev/null @@ -1,87 +0,0 @@ -test_name "puppet module uninstall (with multiple modules installed)" - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -if master.is_pe? - skip_test -end - -step 'Setup' -testdir = master.tmpdir('unistallmultiple') - -stub_forge_on(master) - -teardown do - on master, "rm -rf #{master['distmoduledir']}/java" - on master, "rm -rf #{master['distmoduledir']}/stdlub" -end - -environmentpath = "#{testdir}/environments" - -apply_manifest_on(master, %Q{ - File { - ensure => directory, - owner => #{master.puppet['user']}, - group => #{master.puppet['group']}, - mode => "0750", - } - file { - [ - '#{environmentpath}', - '#{environmentpath}/production', - ]: - } -}) - -master_opts = { - 'main' => { - 'environmentpath' => environmentpath, - 'basemodulepath' => "#{master['sitemoduledir']}:#{master['distmoduledir']}", - } -} - -with_puppet_running_on master, master_opts, testdir do - on master, puppet("module install pmtacceptance-java --version 1.6.0 --modulepath #{master['distmoduledir']}") - on master, puppet("module install pmtacceptance-java --version 1.7.0 --modulepath #{environmentpath}/production/modules") - on master, puppet("module list --modulepath #{master['distmoduledir']}") do - pattern = Regexp.new([ - "#{master['distmoduledir']}", - "├── pmtacceptance-java \\(.*v1.6.0.*\\)", - "└── pmtacceptance-stdlub \\(.*v1.0.0.*\\)" - ].join("\n")) - assert_match(pattern, result.output) - end - - on master, puppet("module list --modulepath #{environmentpath}/production/modules") do - pattern = Regexp.new([ - "#{environmentpath}/production/modules", - "├── pmtacceptance-java \\(.*v1.7.0.*\\)", - "└── pmtacceptance-stdlub \\(.*v1.0.0.*\\)", - ].join("\n")) - assert_match(pattern, result.output) - end - - step "Try to uninstall a module that exists in multiple locations in the module path" - on master, puppet("module uninstall pmtacceptance-java"), :acceptable_exit_codes => [1] do - pattern = Regexp.new([ - ".*Notice: Preparing to uninstall 'pmtacceptance-java' .*", - ".*Error: Could not uninstall module 'pmtacceptance-java'", - " Module 'pmtacceptance-java' appears multiple places in the module path", - " 'pmtacceptance-java' \\(v1.7.0\\) was found in #{environmentpath}/production/modules", - " 'pmtacceptance-java' \\(v1.6.0\\) was found in #{master['distmoduledir']}", - " Use the `--modulepath` option to limit the search to specific directories.*" - ].join("\n"), Regexp::MULTILINE) - assert_match(pattern, result.output) - end - - step "Uninstall a module that exists multiple locations by restricting the --modulepath" - on master, puppet("module uninstall pmtacceptance-java --modulepath #{master['distmoduledir']}") do - pattern = Regexp.new([ - ".*Notice: Preparing to uninstall 'pmtacceptance-java' .*", - "Removed 'pmtacceptance-java' \\(.*v1.6.0.*\\) from #{master['distmoduledir']}" - ].join("\n"), Regexp::MULTILINE) - assert_match(pattern, result.output) - end -end diff --git a/acceptance/tests/modules/upgrade/in_a_secondary_directory.rb b/acceptance/tests/modules/upgrade/in_a_secondary_directory.rb deleted file mode 100644 index 58f33a6b4e4..00000000000 --- a/acceptance/tests/modules/upgrade/in_a_secondary_directory.rb +++ /dev/null @@ -1,38 +0,0 @@ -test_name "puppet module upgrade (in a secondary directory)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step 'Setup' - -stub_forge_on(master) - -on master, "mkdir -p #{master['distmoduledir']}" -on master, puppet("module install pmtacceptance-java --version 1.6.0 --target-dir #{master['distmoduledir']}") -on master, puppet("module list --modulepath #{master['distmoduledir']}") do - assert_equal <<-OUTPUT, stdout -#{master['distmoduledir']} -├── pmtacceptance-java (\e[0;36mv1.6.0\e[0m) -└── pmtacceptance-stdlub (\e[0;36mv1.0.0\e[0m) - OUTPUT -end - -step "Upgrade a module that has a more recent version published" -on master, puppet("module upgrade pmtacceptance-java") do - assert_equal <<-OUTPUT, stdout -\e[mNotice: Preparing to upgrade 'pmtacceptance-java' ...\e[0m -\e[mNotice: Found 'pmtacceptance-java' (\e[0;36mv1.6.0\e[m) in #{master['distmoduledir']} ...\e[0m -\e[mNotice: Downloading from https://forgeapi.puppetlabs.com ...\e[0m -\e[mNotice: Upgrading -- do not interrupt ...\e[0m -#{master['distmoduledir']} -└── pmtacceptance-java (\e[0;36mv1.6.0 -> v1.7.1\e[0m) - OUTPUT -end diff --git a/acceptance/tests/modules/upgrade/introducing_new_dependencies.rb b/acceptance/tests/modules/upgrade/introducing_new_dependencies.rb deleted file mode 100644 index 1c26d949f6c..00000000000 --- a/acceptance/tests/modules/upgrade/introducing_new_dependencies.rb +++ /dev/null @@ -1,43 +0,0 @@ -test_name "puppet module upgrade (introducing new dependencies)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step 'Setup' - -stub_forge_on(master) - -default_moduledir = get_default_modulepath_for_host(master) - -on master, puppet("module install pmtacceptance-stdlub --version 1.0.0") -on master, puppet("module install pmtacceptance-java --version 1.7.0") -on master, puppet("module install pmtacceptance-postql --version 0.0.2") -on master, puppet("module list --modulepath #{default_moduledir}") do - assert_equal <<-OUTPUT, stdout -#{default_moduledir} -├── pmtacceptance-java (\e[0;36mv1.7.0\e[0m) -├── pmtacceptance-postql (\e[0;36mv0.0.2\e[0m) -└── pmtacceptance-stdlub (\e[0;36mv1.0.0\e[0m) - OUTPUT -end - -step "Upgrade a module to a version that introduces new dependencies" -on master, puppet("module upgrade pmtacceptance-postql") do - assert_equal <<-OUTPUT, stdout -\e[mNotice: Preparing to upgrade 'pmtacceptance-postql' ...\e[0m -\e[mNotice: Found 'pmtacceptance-postql' (\e[0;36mv0.0.2\e[m) in #{default_moduledir} ...\e[0m -\e[mNotice: Downloading from https://forgeapi.puppetlabs.com ...\e[0m -\e[mNotice: Upgrading -- do not interrupt ...\e[0m -#{default_moduledir} -└─┬ pmtacceptance-postql (\e[0;36mv0.0.2 -> v1.0.0\e[0m) - └── pmtacceptance-geordi (\e[0;36mv0.0.1\e[0m) - OUTPUT -end diff --git a/acceptance/tests/modules/upgrade/not_upgradable.rb b/acceptance/tests/modules/upgrade/not_upgradable.rb deleted file mode 100644 index 78d855417eb..00000000000 --- a/acceptance/tests/modules/upgrade/not_upgradable.rb +++ /dev/null @@ -1,90 +0,0 @@ -test_name "puppet module upgrade (not upgradable)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step 'Setup' - -stub_forge_on(master) - -default_moduledir = get_default_modulepath_for_host(master) - -on master, "mkdir -p #{default_moduledir}" -apply_manifest_on master, <<-PP - file { - [ - '#{default_moduledir}/nginx', - '#{default_moduledir}/unicorns', - ]: ensure => directory; - '#{default_moduledir}/unicorns/metadata.json': - content => '{ - "name": "notpmtacceptance/unicorns", - "version": "0.0.3", - "source": "", - "author": "notpmtacceptance", - "license": "MIT", - "dependencies": [] - }'; - } -PP - -on master, puppet("module install pmtacceptance-java --version 1.6.0") -on master, puppet("module list --modulepath #{default_moduledir}") do - assert_equal <<-OUTPUT, stdout -#{default_moduledir} -├── nginx (\e[0;36m???\e[0m) -├── notpmtacceptance-unicorns (\e[0;36mv0.0.3\e[0m) -├── pmtacceptance-java (\e[0;36mv1.6.0\e[0m) -└── pmtacceptance-stdlub (\e[0;36mv1.0.0\e[0m) - OUTPUT -end - -step "Try to upgrade a module that is not installed" -on master, puppet("module upgrade pmtacceptance-nginx"), :acceptable_exit_codes => [1] do - pattern = Regexp.new([ - %Q{.*Notice: Preparing to upgrade 'pmtacceptance-nginx' .*}, - %Q{.*Error: Could not upgrade module 'pmtacceptance-nginx'}, - %Q{ Module 'pmtacceptance-nginx' is not installed}, - %Q{ Use `puppet module install` to install this module.*}, - ].join("\n"), Regexp::MULTILINE) - assert_match(pattern, result.output) -end - -# TODO: Determine the appropriate response for this test. -# step "Try to upgrade a local module" -# on master, puppet("module upgrade nginx"), :acceptable_exit_codes => [1] do -# pattern = Regexp.new([ -# %Q{.*Notice: Preparing to upgrade 'nginx' .*}, -# %Q{.*Notice: Found 'nginx' \\(.*\\?\\?\\?.*\\) in #{default_moduledir} .*}, -# %Q{.*Notice: Downloading from https://forgeapi.puppetlabs.com .*}, -# %Q{.*Error: Could not upgrade module 'nginx' \\(\\?\\?\\? -> latest\\)}, -# %Q{ Module 'nginx' does not exist on https://forgeapi.puppetlabs.com.*}, -# ].join("\n"), Regexp::MULTILINE) -# assert_match(pattern, result.output) -# end - -step "Try to upgrade a module that doesn't exist in module_repository" -on master, puppet("module upgrade notpmtacceptance-unicorns"), :acceptable_exit_codes => [1] do - assert_match(/could not upgrade 'notpmtacceptance-unicorns'/i, stderr, - 'Could not upgrade error not shown') - - assert_match(/no releases are available from/i, stderr, - 'Upgrade failure reason not shown') -end - -step "Try to upgrade an installed module to a version that doesn't exist in module_repository" -on master, puppet("module upgrade pmtacceptance-java --version 2.0.0"), :acceptable_exit_codes => [1] do - assert_match(/could not upgrade 'pmtacceptance-java'/i, stderr, - 'Could not upgrade error not shown') - - assert_match(/no releases matching '2.0.0' are available from/i, stderr, - 'Upgrade failure reason not shown') -end diff --git a/acceptance/tests/modules/upgrade/that_was_installed_twice.rb b/acceptance/tests/modules/upgrade/that_was_installed_twice.rb deleted file mode 100644 index 3f503bce7ae..00000000000 --- a/acceptance/tests/modules/upgrade/that_was_installed_twice.rb +++ /dev/null @@ -1,64 +0,0 @@ -test_name "puppet module upgrade (that was installed twice)" -skip_test "This test does not seem to properly respect the given modulepath" - -step 'Setup' - -stub_forge_on(master) -testdir = master.tmpdir('upgrademultimods') - -teardown do - on master, "rm -rf #{master['distmoduledir']}/java" - on master, "rm -rf #{master['distmoduledir']}/stdlub" - on master, "rm -rf #{testdir}/modules/java" - on master, "rm -rf #{testdir}/modules/stdlub" -end - -master_opts = { - 'main' => { - 'modulepath' => "#{master['distmoduledir']}:#{testdir}/modules" - } -} - - - -with_puppet_running_on master, master_opts, testdir do - on master, puppet("module install pmtacceptance-java --version 1.6.0 --modulepath #{master['distmoduledir']}") - on master, puppet("module install pmtacceptance-java --version 1.7.0 --modulepath #{testdir}/modules") - on master, puppet("module list") do - pattern = Regexp.new([ - "#{master['distmoduledir']}", - "├── pmtacceptance-java \\(.*v1.6.0\e.*\\)", - "└── pmtacceptance-stdlub \\(.*v1.0.0.*\\)", - "#{testdir}/modules", - "├── pmtacceptance-java \\(.*v1.7.0.*\\)", - "└── pmtacceptance-stdlub \\(.*v1.0.0.*\\)", - ].join("\n")) - assert_match(pattern, result.output) - end - - step "Try to upgrade a module that exists multiple locations in the module path" - on master, puppet("module upgrade pmtacceptance-java"), :acceptable_exit_codes => [1] do - pattern = Regexp.new([ - ".*Notice: Preparing to upgrade 'pmtacceptance-java' .*", - ".*Error: Could not upgrade module 'pmtacceptance-java'", - " Module 'pmtacceptance-java' appears multiple places in the module path", - " 'pmtacceptance-java' \\(v1.6.0\\) was found in #{master['distmoduledir']}", - " 'pmtacceptance-java' \\(v1.7.0\\) was found in #{testdir}/modules", - " Use the `--modulepath` option to limit the search to specific directories", - ].join("\n"), Regexp::MULTILINE) - assert_match(pattern, result.output) - end - - step "Upgrade a module that exists multiple locations by restricting the --modulepath" - on master, puppet("module upgrade pmtacceptance-java --modulepath #{master['distmoduledir']}") do - pattern = Regexp.new([ - ".*Notice: Preparing to upgrade 'pmtacceptance-java' .*", - ".*Notice: Found 'pmtacceptance-java' \\(.*v1.6.0.*\\) in #{master['distmoduledir']} .*", - ".*Notice: Downloading from https://forgeapi.puppetlabs.com .*", - ".*Notice: Upgrading -- do not interrupt .*", - "#{master['distmoduledir']}", - "└── pmtacceptance-java \\(.*v1.6.0 -> v1.7.1.*\\)", - ].join("\n"), Regexp::MULTILINE) - assert_match(pattern, result.output) - end -end diff --git a/acceptance/tests/modules/upgrade/to_a_specific_version.rb b/acceptance/tests/modules/upgrade/to_a_specific_version.rb deleted file mode 100644 index 15464442aba..00000000000 --- a/acceptance/tests/modules/upgrade/to_a_specific_version.rb +++ /dev/null @@ -1,39 +0,0 @@ -test_name "puppet module upgrade (to a specific version)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step 'Setup' - -stub_forge_on(master) - -default_moduledir = get_default_modulepath_for_host(master) - -on master, puppet("module install pmtacceptance-java --version 1.6.0") -on master, puppet("module list --modulepath #{default_moduledir}") do - assert_equal <<-OUTPUT, stdout -#{default_moduledir} -├── pmtacceptance-java (\e[0;36mv1.6.0\e[0m) -└── pmtacceptance-stdlub (\e[0;36mv1.0.0\e[0m) - OUTPUT -end - -step "Upgrade a module to a specific (greater) version" -on master, puppet("module upgrade pmtacceptance-java --version 1.7.0") do - assert_equal <<-OUTPUT, stdout -\e[mNotice: Preparing to upgrade 'pmtacceptance-java' ...\e[0m -\e[mNotice: Found 'pmtacceptance-java' (\e[0;36mv1.6.0\e[m) in #{default_moduledir} ...\e[0m -\e[mNotice: Downloading from https://forgeapi.puppetlabs.com ...\e[0m -\e[mNotice: Upgrading -- do not interrupt ...\e[0m -#{default_moduledir} -└── pmtacceptance-java (\e[0;36mv1.6.0 -> v1.7.0\e[0m) - OUTPUT -end diff --git a/acceptance/tests/modules/upgrade/to_installed_version.rb b/acceptance/tests/modules/upgrade/to_installed_version.rb deleted file mode 100644 index dbfa51e593b..00000000000 --- a/acceptance/tests/modules/upgrade/to_installed_version.rb +++ /dev/null @@ -1,69 +0,0 @@ -test_name "puppet module upgrade (to installed version)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step 'Setup' - -stub_forge_on(master) - -default_moduledir = get_default_modulepath_for_host(master) - -on master, puppet("module install pmtacceptance-java --version 1.6.0") -on master, puppet("module list --modulepath #{default_moduledir}") do - assert_equal <<-OUTPUT, stdout -#{default_moduledir} -├── pmtacceptance-java (\e[0;36mv1.6.0\e[0m) -└── pmtacceptance-stdlub (\e[0;36mv1.0.0\e[0m) - OUTPUT -end - -step "Try to upgrade a module to the current version" -on master, puppet("module upgrade pmtacceptance-java --version 1.6.x"), :acceptable_exit_codes => [0] do - assert_match(/The installed version is already the latest version matching/, stdout, - "Error that specified version was already satisfied was not displayed") -end - -step "Upgrade a module to the current version with --force" -on master, puppet("module upgrade pmtacceptance-java --version 1.6.x --force") do - assert_match(/#{default_moduledir}/, stdout, - 'Error that distmoduledir was not displayed') - - assert_match(/\'pmtacceptance-java\' \(.*v1\.6\.0.*\)/, stdout, - 'Error that package name and version were not displayed') -end - -step "Upgrade to the latest version" -on master, puppet("module upgrade pmtacceptance-java") do - assert_equal <<-OUTPUT, stdout -\e[mNotice: Preparing to upgrade 'pmtacceptance-java' ...\e[0m -\e[mNotice: Found 'pmtacceptance-java' (\e[0;36mv1.6.0\e[m) in #{default_moduledir} ...\e[0m -\e[mNotice: Downloading from https://forgeapi.puppetlabs.com ...\e[0m -\e[mNotice: Upgrading -- do not interrupt ...\e[0m -#{default_moduledir} -└── pmtacceptance-java (\e[0;36mv1.6.0 -> v1.7.1\e[0m) - OUTPUT -end - -step "Try to upgrade a module to the latest version with the latest version installed" -on master, puppet("module upgrade pmtacceptance-java"), :acceptable_exit_codes => [0] do - assert_match(/The installed version is already the latest version matching.*latest/, stdout, - "Error that latest version was already installed was not displayed") -end - -step "Upgrade a module to the latest version with --force" -on master, puppet("module upgrade pmtacceptance-java --force") do - assert_match(/#{default_moduledir}/, stdout, - 'Error that distmoduledir was not displayed') - - assert_match(/pmtacceptance-java \(.*v1\.7\.1.*\)/, stdout, - 'Error that package name and version were not displayed') -end diff --git a/acceptance/tests/modules/upgrade/with_constraints_on_it.rb b/acceptance/tests/modules/upgrade/with_constraints_on_it.rb deleted file mode 100644 index 3bb9acdb8c2..00000000000 --- a/acceptance/tests/modules/upgrade/with_constraints_on_it.rb +++ /dev/null @@ -1,41 +0,0 @@ -test_name "puppet module upgrade (with constraints on it)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step 'Setup' - -stub_forge_on(master) - -default_moduledir = get_default_modulepath_for_host(master) - -on master, puppet("module install pmtacceptance-java --version 1.7.0") -on master, puppet("module install pmtacceptance-apollo") -on master, puppet("module list --modulepath #{default_moduledir}") do - assert_equal <<-OUTPUT, stdout -#{default_moduledir} -├── pmtacceptance-apollo (\e[0;36mv0.0.1\e[0m) -├── pmtacceptance-java (\e[0;36mv1.7.0\e[0m) -└── pmtacceptance-stdlub (\e[0;36mv1.0.0\e[0m) - OUTPUT -end - -step "Upgrade a version-constrained module that has an upgrade" -on master, puppet("module upgrade pmtacceptance-java") do - assert_equal <<-OUTPUT, stdout -\e[mNotice: Preparing to upgrade 'pmtacceptance-java' ...\e[0m -\e[mNotice: Found 'pmtacceptance-java' (\e[0;36mv1.7.0\e[m) in #{default_moduledir} ...\e[0m -\e[mNotice: Downloading from https://forgeapi.puppetlabs.com ...\e[0m -\e[mNotice: Upgrading -- do not interrupt ...\e[0m -#{default_moduledir} -└── pmtacceptance-java (\e[0;36mv1.7.0 -> v1.7.1\e[0m) - OUTPUT -end diff --git a/acceptance/tests/modules/upgrade/with_constraints_on_its_dependencies.rb b/acceptance/tests/modules/upgrade/with_constraints_on_its_dependencies.rb deleted file mode 100644 index ec1b487bda9..00000000000 --- a/acceptance/tests/modules/upgrade/with_constraints_on_its_dependencies.rb +++ /dev/null @@ -1,87 +0,0 @@ -test_name "puppet module upgrade (with constraints on its dependencies)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step 'Setup' - -stub_forge_on(master) - -default_moduledir = get_default_modulepath_for_host(master) - -apply_manifest_on master, <<-PP - file { - [ - '#{default_moduledir}/unicorns', - ]: ensure => directory; - '#{default_moduledir}/unicorns/metadata.json': - content => '{ - "name": "notpmtacceptance/unicorns", - "version": "0.0.3", - "source": "", - "author": "notpmtacceptance", - "license": "MIT", - "dependencies": [ - { "name": "pmtacceptance/stdlub", "version_requirement": "0.0.2" } - ] - }'; - } -PP -on master, puppet("module install pmtacceptance-stdlub --version 0.0.2") -on master, puppet("module install pmtacceptance-java --version 1.6.0") -on master, puppet("module list --modulepath #{default_moduledir}") do - assert_equal <<-OUTPUT, stdout -#{default_moduledir} -├── notpmtacceptance-unicorns (\e[0;36mv0.0.3\e[0m) -├── pmtacceptance-java (\e[0;36mv1.6.0\e[0m) -└── pmtacceptance-stdlub (\e[0;36mv0.0.2\e[0m) - OUTPUT -end - -step "Try to upgrade a module with constraints on its dependencies that cannot be met" -on master, puppet("module upgrade pmtacceptance-java --version 1.7.1"), :acceptable_exit_codes => [1] do - assert_match(/No version.* can satisfy all dependencies/, stderr, - "Unsatisfiable dependency was not displayed") -end - -step "Relax constraints" -on master, puppet("module uninstall notpmtacceptance-unicorns") -on master, puppet("module list --modulepath #{default_moduledir}") do - assert_equal <<-OUTPUT, stdout -#{default_moduledir} -├── pmtacceptance-java (\e[0;36mv1.6.0\e[0m) -└── pmtacceptance-stdlub (\e[0;36mv0.0.2\e[0m) - OUTPUT -end - -step "Upgrade a single module, ignoring its dependencies" -on master, puppet("module upgrade pmtacceptance-java --version 1.7.0 --ignore-dependencies") do - assert_equal <<-OUTPUT, stdout -\e[mNotice: Preparing to upgrade 'pmtacceptance-java' ...\e[0m -\e[mNotice: Found 'pmtacceptance-java' (\e[0;36mv1.6.0\e[m) in #{default_moduledir} ...\e[0m -\e[mNotice: Downloading from https://forgeapi.puppetlabs.com ...\e[0m -\e[mNotice: Upgrading -- do not interrupt ...\e[0m -#{default_moduledir} -└── pmtacceptance-java (\e[0;36mv1.6.0 -> v1.7.0\e[0m) - OUTPUT -end - -step "Attempt to upgrade a module where dependency requires upgrade across major version" -on master, puppet("module upgrade pmtacceptance-java"), :acceptable_exit_codes => [1] do - assert_match(/There are 1 newer versions/, stderr, - 'Number of newer releases was not displayed') - - assert_match(/Dependencies will not be automatically upgraded across major versions/, stderr, - 'Dependency upgrade restriction message was not displayed') - - assert_match(/pmtacceptance-stdlub/, stderr, - 'Potential culprit depdendency was not displayed') -end diff --git a/acceptance/tests/modules/upgrade/with_environment.rb b/acceptance/tests/modules/upgrade/with_environment.rb deleted file mode 100644 index 0ea11b9ed59..00000000000 --- a/acceptance/tests/modules/upgrade/with_environment.rb +++ /dev/null @@ -1,32 +0,0 @@ -test_name "puppet module upgrade (with environment)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -tmpdir = master.tmpdir('module-upgrade-withenv') - -module_author = "pmtacceptance" -module_name = "java" -module_dependencies = ["stdlub"] - -step 'Setup' - -stub_forge_on(master) - -puppet_conf = generate_base_directory_environments(tmpdir) - -step "Upgrade a module that has a more recent version published in a directory environment" do - on master, puppet("module install #{module_author}-#{module_name} --config=#{puppet_conf} --version 1.6.0 --environment=direnv") do - assert_module_installed_ui(stdout, module_author, module_name) - end - - environment_path = "#{tmpdir}/environments/direnv/modules" - on master, puppet("module upgrade #{module_author}-#{module_name} --config=#{puppet_conf} --environment=direnv") do - assert_module_installed_ui(stdout, module_author, module_name) - on master, "[ -f #{environment_path}/#{module_name}/Modulefile ]" - on master, "grep 1.7.1 #{environment_path}/#{module_name}/Modulefile" - end -end diff --git a/acceptance/tests/modules/upgrade/with_local_changes.rb b/acceptance/tests/modules/upgrade/with_local_changes.rb deleted file mode 100644 index f51b0a5515c..00000000000 --- a/acceptance/tests/modules/upgrade/with_local_changes.rb +++ /dev/null @@ -1,61 +0,0 @@ -test_name "puppet module upgrade (with local changes)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step 'Setup' - -stub_forge_on(master) - -default_moduledir = get_default_modulepath_for_host(master) - -on master, puppet("module install pmtacceptance-java --version 1.6.0") -on master, puppet("module list --modulepath #{default_moduledir}") do - assert_equal <<-OUTPUT, stdout -#{default_moduledir} -├── pmtacceptance-java (\e[0;36mv1.6.0\e[0m) -└── pmtacceptance-stdlub (\e[0;36mv1.0.0\e[0m) - OUTPUT -end -apply_manifest_on master, <<-PP - file { - '#{default_moduledir}/java/README': content => "I CHANGE MY READMES"; - '#{default_moduledir}/java/NEWFILE': content => "I don't exist.'"; - } -PP - -step "Try to upgrade a module with local changes" -on master, puppet("module upgrade pmtacceptance-java"), :acceptable_exit_codes => [1] do - pattern = Regexp.new([ - %Q{.*Notice: Preparing to upgrade 'pmtacceptance-java' ....*}, - %Q{.*Notice: Found 'pmtacceptance-java' \\(.*v1.6.0.*\\) in #{default_moduledir} ....*}, - %Q{.*Error: Could not upgrade module 'pmtacceptance-java' \\(v1.6.0 -> latest\\)}, - %Q{ Installed module has had changes made locally}, - %Q{ Use `puppet module upgrade --ignore-changes` to upgrade this module anyway.*}, - ].join("\n"), Regexp::MULTILINE) - assert_match(pattern, result.output) -end -on master, %{[[ "$(cat #{default_moduledir}/java/README)" == "I CHANGE MY READMES" ]]} -on master, "[ -f #{default_moduledir}/java/NEWFILE ]" - -step "Upgrade a module with local changes with --force" -on master, puppet("module upgrade pmtacceptance-java --force") do - assert_equal <<-OUTPUT, stdout -\e[mNotice: Preparing to upgrade 'pmtacceptance-java' ...\e[0m -\e[mNotice: Found 'pmtacceptance-java' (\e[0;36mv1.6.0\e[m) in #{default_moduledir} ...\e[0m -\e[mNotice: Downloading from https://forgeapi.puppetlabs.com ...\e[0m -\e[mNotice: Upgrading -- do not interrupt ...\e[0m -#{default_moduledir} -└── pmtacceptance-java (\e[0;36mv1.6.0 -> v1.7.1\e[0m) - OUTPUT -end -on master, %{[[ "$(cat #{default_moduledir}/java/README)" != "I CHANGE MY READMES" ]]} -on master, "[ ! -f #{default_moduledir}/java/NEWFILE ]" diff --git a/acceptance/tests/modules/upgrade/with_scattered_dependencies.rb b/acceptance/tests/modules/upgrade/with_scattered_dependencies.rb deleted file mode 100644 index 0dc2deec1f2..00000000000 --- a/acceptance/tests/modules/upgrade/with_scattered_dependencies.rb +++ /dev/null @@ -1,32 +0,0 @@ -test_name "puppet module upgrade (with scattered dependencies)" - -skip_test 'needs triage' - -step 'Setup' - -stub_forge_on(master) -testdir = master.tmpdir('scattereddeps') -on master, "mkdir -p #{testdir}/modules" - -teardown do - on master, "rm -rf #{master['distmoduledir']}/java" - on master, "rm -rf #{master['distmoduledir']}/postql" -end - -master_opts = { - 'main' => { - 'modulepath' => "#{testdir}/modules:#{master['distmoduledir']}:#{master['sitemoduledir']}" - } -} - -with_puppet_running_on master, master_opts, testdir do - on master, puppet("module install pmtacceptance-stdlub --version 0.0.2 --target-dir #{testdir}/modules") - on master, puppet("module install pmtacceptance-java --version 1.6.0 --target-dir #{master['distmoduledir']} --ignore-dependencies") - on master, puppet("module install pmtacceptance-postql --version 0.0.1 --target-dir #{master['distmoduledir']} --ignore-dependencies") - on master, puppet("module list") do - assert_match /pmtacceptance-java.*1\.6\.0/, stdout, 'Could not find pmtacceptance/java' - assert_match /pmtacceptance-postql.*0\.0\.1/, stdout, 'Could not find pmtacceptance/postql' - assert_match /pmtacceptance-stdlub.*0\.0\.2/, stdout, 'Could not find pmtacceptance/stdlub' - end - -end diff --git a/acceptance/tests/modules/upgrade/with_update_available.rb b/acceptance/tests/modules/upgrade/with_update_available.rb deleted file mode 100644 index 7368e13abed..00000000000 --- a/acceptance/tests/modules/upgrade/with_update_available.rb +++ /dev/null @@ -1,42 +0,0 @@ -test_name "puppet module upgrade (with update available)" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -hosts.each do |host| - skip_test "skip tests requiring forge certs on solaris and aix" if host['platform'] =~ /solaris/ -end - -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) -end - -step 'Setup' - -stub_forge_on(master) - -default_moduledir = get_default_modulepath_for_host(master) - -on master, puppet("module install pmtacceptance-java --version 1.6.0") -on master, puppet("module list --modulepath #{default_moduledir}") do - assert_equal <<-OUTPUT, stdout -#{default_moduledir} -├── pmtacceptance-java (\e[0;36mv1.6.0\e[0m) -└── pmtacceptance-stdlub (\e[0;36mv1.0.0\e[0m) - OUTPUT -end - -step "Upgrade a module that has a more recent version published" -on master, puppet("module upgrade pmtacceptance-java") do - assert_equal <<-OUTPUT, stdout -\e[mNotice: Preparing to upgrade 'pmtacceptance-java' ...\e[0m -\e[mNotice: Found 'pmtacceptance-java' (\e[0;36mv1.6.0\e[m) in #{default_moduledir} ...\e[0m -\e[mNotice: Downloading from https://forgeapi.puppetlabs.com ...\e[0m -\e[mNotice: Upgrading -- do not interrupt ...\e[0m -#{default_moduledir} -└── pmtacceptance-java (\e[0;36mv1.6.0 -> v1.7.1\e[0m) - OUTPUT - on master, "[ -d #{default_moduledir}/java ]" - on master, "[ -f #{default_moduledir}/java/Modulefile ]" - on master, "grep 1.7.1 #{default_moduledir}/java/Modulefile" -end diff --git a/acceptance/tests/node/check_woy_cache_works.rb b/acceptance/tests/node/check_woy_cache_works.rb deleted file mode 100644 index 3068e71b75a..00000000000 --- a/acceptance/tests/node/check_woy_cache_works.rb +++ /dev/null @@ -1,55 +0,0 @@ -require 'securerandom' -require 'puppet/acceptance/temp_file_utils' -require 'yaml' -extend Puppet::Acceptance::TempFileUtils - -test_name "ticket #16753 node data should be cached in yaml to allow it to be queried" - -node_name = "woy_node_#{SecureRandom.hex}" -auth_contents = < { - 'rest_authconfig' => authfile, - 'yamldir' => temp_yamldir, - } -} - -with_puppet_running_on master, master_opts do - - # only one agent is needed because we only care about the file written on the master - run_agent_on(agents[0], "--no-daemonize --verbose --onetime --node_name_value #{node_name} --server #{master}") - - - yamldir = on(master, puppet('master', '--configprint', 'yamldir')).stdout.chomp - on master, puppet('node', 'search', '"*"', '--node_terminus', 'yaml', '--clientyamldir', yamldir, '--render-as', 'json') do - assert_match(/"name":["\s]*#{node_name}/, stdout, - "Expect node name '#{node_name}' to be present in node yaml content written by the WriteOnlyYaml terminus") - end -end diff --git a/acceptance/tests/ordering/master_agent_application.rb b/acceptance/tests/ordering/master_agent_application.rb index 41cd01ca1bd..5ebc2ea0d88 100644 --- a/acceptance/tests/ordering/master_agent_application.rb +++ b/acceptance/tests/ordering/master_agent_application.rb @@ -1,5 +1,9 @@ test_name "Puppet applies resources without dependencies in file order over the network" +tag 'audit:high', + 'audit:integration', + 'server' + testdir = master.tmpdir('application_order') apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) @@ -38,9 +42,10 @@ with_puppet_running_on(master, master_opts) do agents.each do |agent| - on(agent, puppet('agent', "--no-daemonize --onetime --verbose --server #{master} --ordering manifest")) - if stdout !~ /Notice: first.*Notice: second.*Notice: third.*Notice: fourth.*Notice: fifth.*Notice: sixth.*Notice: seventh.*Notice: eighth/m - fail_test "Output did not include the notify resources in the correct order" + on(agent, puppet('agent', "--no-daemonize --onetime --verbose")) do |result| + if result.stdout !~ /Notice: first.*Notice: second.*Notice: third.*Notice: fourth.*Notice: fifth.*Notice: sixth.*Notice: seventh.*Notice: eighth/m + fail_test "Output did not include the notify resources in the correct order" + end end end end diff --git a/acceptance/tests/parser_functions/calling_all_functions.rb b/acceptance/tests/parser_functions/calling_all_functions.rb index 292089f4dc9..f03ac4101ae 100644 --- a/acceptance/tests/parser_functions/calling_all_functions.rb +++ b/acceptance/tests/parser_functions/calling_all_functions.rb @@ -1,79 +1,124 @@ test_name 'Calling all functions.. test in progress!' -# create list of 3x functions and args -# notes: hiera functions are well tested elsewhere, included for completeness -# special cases: contain (call this from call_em_all) -# do fail last because it errors out -testdir = '' -generator = '' -agents.each do |agent| - testdir = agent.tmpdir('calling_all_functions') - if agent["platform"] =~ /win/ - generator = {:args => '"c:/windows/system32/tasklist.exe"', :expected => /\nImage Name/} - else - generator = {:args => '"/bin/date"', :expected => /\w\w\w.*?\d\d:\d\d\:\d\d/} +tag 'audit:high', + 'audit:acceptance' + +# create single manifest calling all functions +step 'Apply manifest containing all function calls' +def manifest_call_each_function_from_array(functions) + manifest = '' + # use index to work around puppet's immutable variables + # use variables so we can concatenate strings + functions.each_with_index do |function,index| + if function[:rvalue] + manifest << "$pre#{index} = \"sayeth #{function[:name].capitalize}: Scope(Class[main]): \" " + manifest << "$output#{index} = #{function[:name]}(#{function[:args]}) " + manifest << "#{function[:lambda]} notice \"${pre#{index}}${output#{index}}\"\n" + else + manifest << "$pre#{index} = \"sayeth #{function[:name].capitalize}: \" " + manifest << "notice \"${pre#{index}}\"\n" + manifest << "#{function[:name]}(#{function[:args]}) " + manifest << "#{function[:lambda]}\n" + end end + manifest end -functions_3x = [ - {:name => :alert, :args => '"consider yourself on alert"', :lambda => nil, :expected => 'consider yourself on alert'}, - {:name => :assert_type, :args => '"String[1]", "Valar morghulis"', :lambda => nil, :expected => 'Valar morghulis'}, - # this is explicitly called from call_em_all module which is included below - #{:name => :contain, :args => 'call_em_all', :lambda => nil, :expected => ''}, - # below doens't instance the resource. no output - {:name => :create_resources, :args => 'notify, {"w"=>{message=>"winter is coming"}}', :lambda => nil, :expected => ''}, - {:name => :crit, :args => '"consider yourself critical"', :lambda => nil, :expected => 'consider yourself critical'}, - {:name => :debug, :args => '"consider yourself bugged"', :lambda => nil, :expected => ''}, # no output expected unless run with debug - {:name => :defined, :args => 'File["/tmp"]', :lambda => nil, :expected => 'false'}, - {:name => :digest, :args => '"Sansa"', :lambda => nil, :expected => 'f16491bf0133c6103918b2edcd00cf89'}, - {:name => :each, :args => '[1,2,3]', :lambda => '|$x| {notice $x}', :expected => '[1, 2, 3]'}, - {:name => :emerg, :args => '"consider yourself emergent"', :lambda => nil, :expected => 'consider yourself emergent'}, - {:name => :epp, :args => '"call_em_all/template.epp",{x=>droid}', :lambda => nil, :expected => 'This is the droid you are looking for!'}, - {:name => :err, :args => '"consider yourself in err"', :lambda => nil, :expected => 'consider yourself in err'}, - {:name => :file, :args => '"call_em_all/rickon.txt"', :lambda => nil, :expected => 'who?'}, - {:name => :filter, :args => '[4,5,6]', :lambda => '|$x| {true}', :expected => '[4, 5, 6]'}, - {:name => :fqdn_rand, :args => '100000', :lambda => nil, :expected => /\d+\e/}, - # generate requires a fully qualified exe; which requires specifics for windows vs posix - {:name => :generate, :args => generator[:args], :lambda => nil, :expected => generator[:expected]}, - {:name => :hiera_array, :args => 'date,default_array', :lambda => nil, :expected => 'default_array'}, - {:name => :hiera_hash, :args => 'date,default_hash', :lambda => nil, :expected => 'default_hash'}, - {:name => :hiera_include, :args => 'date,call_em_all', :lambda => nil, :expected => ''}, - {:name => :hiera, :args => 'date,default_date', :lambda => nil, :expected => 'default_date'}, - {:name => :include, :args => 'call_em_all', :lambda => nil, :expected => ''}, - {:name => :info, :args => '"consider yourself informed"', :lambda => nil, :expected => 'consider yourself informed'}, - {:name => :inline_epp, :args => '\'<%= $x %>\',{x=>10}', :lambda => nil, :expected => '10'}, - {:name => :inline_template, :args => '\'empty<%= @x %>space\'', :lambda => nil, :expected => 'emptyspace'}, - # test the living life out of this thing in lookup.rb, and it doesn't allow for a default value - #{:name => :lookup, :args => 'date,lookup_date', :lambda => nil, :expected => ''}, # well tested elsewhere - {:name => :map, :args => '[7,8,9]', :lambda => '|$x| {notice $x}', :expected => '[7, 8, 9]'}, - {:name => :match, :args => '"abc", /b/', :lambda => nil, :expected => '[b]'}, - {:name => :md5, :args => '"Bran"', :lambda => nil, :expected => '723f9ac32ceb881ddf4fb8fc1020cf83'}, - {:name => :notice, :args => '"consider yourself under notice"', :lambda => nil, :expected => 'consider yourself under notice'}, - {:name => :realize, :args => 'User[arya]', :lambda => nil, :expected => ''}, # TODO: create a virtual first - {:name => :reduce, :args => '[4,5,6]', :lambda => '|$sum, $n| { $sum+$n }', :expected => '15'}, - #{:name => :reuse, :args => '[4,5,6]', :lambda => '|$sum, $n| { $sum+$n }', :expected => ''}, - #{:name => :recycle, :args => '[4,5,6]', :lambda => '|$sum, $n| { $sum+$n }', :expected => ''}, - {:name => :regsubst, :args => '"Cersei","Cer(\\w)ei","Daenery\\1"',:lambda => nil, :expected => 'Daenerys'}, - # explicitly called in call_em_all; implicitly called by the include above - #{:name => :require, :args => '[4,5,6]', :lambda => nil, :expected => ''}, - {:name => :scanf, :args => '"Eddard Stark","%6s"', :lambda => nil, :expected => '[Eddard]'}, - {:name => :sha1, :args => '"Sansa"', :lambda => nil, :expected => '4337ce5e4095e565d51e0ef4c80df1fecf238b29'}, - {:name => :shellquote, :args => '["-1", "--two"]', :lambda => nil, :expected => '-1 --two'}, - {:name => :slice, :args => '[1,2,3,4,5,6], 2', :lambda => nil, :expected => '[[1, 2], [3, 4], [5, 6]]'}, - {:name => :split, :args => '"9,8,7",","', :lambda => nil, :expected => '[9, 8, 7]'}, - {:name => :sprintf, :args => '"%b","123"', :lambda => nil, :expected => '1111011'}, - # explicitly called in call_em_all - #{:name => :tag, :args => '[4,5,6]', :lambda => nil, :expected => ''}, - {:name => :tagged, :args => '"yer_it"', :lambda => nil, :expected => 'false'}, - {:name => :template, :args => '"call_em_all/template.erb"', :lambda => nil, :expected => 'no defaultsno space'}, - {:name => :versioncmp, :args => '"1","2"', :lambda => nil, :expected => '-1'}, - {:name => :warning, :args => '"consider yourself warned"', :lambda => nil, :expected => 'consider yourself warned'}, - {:name => :with, :args => '1, "Catelyn"', :lambda => '|$x| {notice $x}', :expected => '1'}, - # do this one last or it will not allow the others to run. - {:name => :fail, :args => '"Jon Snow"', :lambda => nil, :expected => 'Error while evaluating a Function Call, Jon Snow'}, -] - -module_manifest = < '"c:/windows/system32/tasklist.exe"', :expected => /\nImage Name/} + # else + # generator = {:args => '"/bin/date"', :expected => /\w\w\w.*?\d\d:\d\d\:\d\d/} + # end + + # create list of 3x functions and args + # notes: hiera functions are well tested elsewhere, included for completeness + # special cases: contain (call this from call_em_all) + # do fail last because it errors out + + functions_3x = [ + {:name => :alert, :args => '"consider yourself on alert"', :lambda => nil, :expected => 'consider yourself on alert', :rvalue => false}, + {:name => :binary_file, :args => '"call_em_all/rickon.txt"', :lambda => nil, :expected => '', :rvalue => true}, + #{:name => :break, :args => '', :lambda => nil, :expected => '', :rvalue => false}, + # this is explicitly called from call_em_all module which is included below + #{:name => :contain, :args => 'call_em_all', :lambda => nil, :expected => '', :rvalue => true}, + # below doens't instance the resource. no output + {:name => :create_resources, :args => 'notify, {"w"=>{message=>"winter is coming"}}', :lambda => nil, :expected => '', :rvalue => false}, + {:name => :crit, :args => '"consider yourself critical"', :lambda => nil, :expected => 'consider yourself critical', :rvalue => false}, + {:name => :debug, :args => '"consider yourself bugged"', :lambda => nil, :expected => '', :rvalue => false}, # no output expected unless run with debug + {:name => :defined, :args => 'File["/tmp"]', :lambda => nil, :expected => 'false', :rvalue => true}, + {:name => :dig, :args => '[100]', :lambda => nil, :expected => '[100]', :rvalue => true}, + # Expect sha256 hash value for the digest + {:name => :digest, :args => '"Sansa"', :lambda => nil, :expected => '4ebf3a5527313f06c7965749d7764c15cba6fe86da11691ca9bd0ce448563979', :rvalue => true}, + {:name => :emerg, :args => '"consider yourself emergent"', :lambda => nil, :expected => 'consider yourself emergent', :rvalue => false}, + {:name => :err, :args => '"consider yourself in err"', :lambda => nil, :expected => 'consider yourself in err', :rvalue => false}, + {:name => :file, :args => '"call_em_all/rickon.txt"', :lambda => nil, :expected => 'who?', :rvalue => true}, + {:name => :fqdn_rand, :args => '100000', :lambda => nil, :expected => /Fqdn_rand: Scope\(Class\[main\]\): \d{1,5}/, :rvalue => true}, + # generate requires a fully qualified exe; which requires specifics for windows vs posix + #{:name => :generate, :args => generator[:args], :lambda => nil, :expected => generator[:expected], :rvalue => true}, + {:name => :hiera_array, :args => 'date,default_array', :lambda => nil, :expected => 'default_array', :rvalue => true}, + {:name => :hiera_hash, :args => 'date,default_hash', :lambda => nil, :expected => 'default_hash', :rvalue => true}, + {:name => :hiera_include, :args => 'date,call_em_all', :lambda => nil, :expected => '', :rvalue => false}, + {:name => :hiera, :args => 'date,default_date', :lambda => nil, :expected => 'default_date', :rvalue => true}, + {:name => :include, :args => 'call_em_all', :lambda => nil, :expected => '', :rvalue => false}, + {:name => :info, :args => '"consider yourself informed"', :lambda => nil, :expected => '', :rvalue => false}, # no ouput unless in debug mode + {:name => :inline_template, :args => '\'empty<%= @x %>space\'', :lambda => nil, :expected => 'emptyspace', :rvalue => true}, + # test the living life out of this thing in lookup.rb, and it doesn't allow for a default value + #{:name => :lookup, :args => 'date,lookup_date', :lambda => nil, :expected => '', :rvalue => true}, # well tested elsewhere + {:name => :sha256, :args => '"Bran"', :lambda => nil, :expected => '824264f7f73d6026550b52a671c50ad0c4452af66c24f3784e30f515353f2ce0', :rvalue => true}, + # Integer.new + {:name => :Integer, :args => '"100"', :lambda => nil, :expected => '100', :rvalue => true}, + {:name => :notice, :args => '"consider yourself under notice"', :lambda => nil, :expected => 'consider yourself under notice', :rvalue => false}, + {:name => :realize, :args => 'User[arya]', :lambda => nil, :expected => '', :rvalue => false}, # TODO: create a virtual first + {:name => :regsubst, :args => '"Cersei","Cer(\\\\w)ei","Daenery\\\\1"',:lambda => nil, :expected => 'Daenerys', :rvalue => true}, + # explicitly called in call_em_all; implicitly called by the include above + #{:name => :require, :args => '[4,5,6]', :lambda => nil, :expected => '', :rvalue => true}, + # 4x output contains brackets around scanf output + {:name => :scanf, :args => '"Eddard Stark","%6s"', :lambda => nil, :expected => '[Eddard]', :rvalue => true}, + {:name => :sha1, :args => '"Sansa"', :lambda => nil, :expected => '4337ce5e4095e565d51e0ef4c80df1fecf238b29', :rvalue => true}, + {:name => :shellquote, :args => '["-1", "--two"]', :lambda => nil, :expected => '-1 --two', :rvalue => true}, + # 4x output contains brackets around split output and commas btwn values + {:name => :split, :args => '"9,8,7",","', :lambda => nil, :expected => '[9, 8, 7]', :rvalue => true}, + {:name => :sprintf, :args => '"%b","123"', :lambda => nil, :expected => '1111011', :rvalue => true}, + {:name => :step, :args => '[100,99],1', :lambda => nil, :expected => 'Iterator[Integer]-Value', :rvalue => true}, + # explicitly called in call_em_all + #{:name => :tag, :args => '[4,5,6]', :lambda => nil, :expected => '', :rvalue => true}, + {:name => :tagged, :args => '"yer_it"', :lambda => nil, :expected => 'false', :rvalue => true}, + {:name => :template, :args => '"call_em_all/template.erb"', :lambda => nil, :expected => 'no defaultsno space', :rvalue => true}, + {:name => :type, :args => '42', :lambda => nil, :expected => 'Integer[42, 42]', :rvalue => true}, + {:name => :versioncmp, :args => '"1","2"', :lambda => nil, :expected => '-1', :rvalue => true}, + {:name => :warning, :args => '"consider yourself warned"', :lambda => nil, :expected => 'consider yourself warned', :rvalue => false}, + # do this one last or it will not allow the others to run. + {:name => :fail, :args => '"Jon Snow"', :lambda => nil, :expected => /Error:.*Jon Snow/, :rvalue => false}, + ] + + puppet_version = on(agent, puppet('--version')).stdout.chomp + + functions_4x = [ + {:name => :assert_type, :args => '"String[1]", "Valar morghulis"', :lambda => nil, :expected => 'Valar morghulis', :rvalue => true}, + {:name => :each, :args => '[1,2,3]', :lambda => '|$x| {$x}', :expected => '[1, 2, 3]', :rvalue => true}, + {:name => :epp, :args => '"call_em_all/template.epp",{x=>droid}', :lambda => nil, :expected => 'This is the droid you are looking for!', :rvalue => true}, + {:name => :filter, :args => '[4,5,6]', :lambda => '|$x| {true}', :expected => '[4, 5, 6]', :rvalue => true}, + # find_file() called by binary_file + #{:name => :find_file, :args => '[4,5,6]', :lambda => '|$x| {true}', :expected => '[4, 5, 6]', :rvalue => true}, + {:name => :inline_epp, :args => '\'<%= $x %>\',{x=>10}', :lambda => nil, :expected => '10', :rvalue => true}, + #{:name => :lest, :args => '100', :lambda => '"100"', :expected => '100', :rvalue => true}, + {:name => :map, :args => '[7,8,9]', :lambda => '|$x| {$x * $x}', :expected => '[49, 64, 81]', :rvalue => true}, + {:name => :match, :args => '"abc", /b/', :lambda => nil, :expected => '[b]', :rvalue => true}, + #{:name => :next, :args => '100', :lambda => nil, :expected => '100', :rvalue => true}, + {:name => :reduce, :args => '[4,5,6]', :lambda => '|$sum, $n| { $sum+$n }', :expected => '15', :rvalue => true}, + #{:name => :return, :args => '100', :lambda => nil, :expected => '100', :rvalue => true}, + {:name => :reverse_each, :args => '[100,99]', :lambda => nil, :expected => 'Iterator[Integer]-Value', :rvalue => true}, + # :reuse,:recycle + {:name => :slice, :args => '[1,2,3,4,5,6], 2', :lambda => nil, :expected => '[[1, 2], [3, 4], [5, 6]]', :rvalue => true}, + {:name => :strftime, :args => 'Timestamp("4216-09-23T13:14:15.123 UTC"), "%C"', :lambda => nil, :expected => '42', :rvalue => true}, + {:name => :then, :args => '100', :lambda => '|$x| {$x}', :expected => '100', :rvalue => true}, + {:name => :with, :args => '1, "Catelyn"', :lambda => '|$x, $y| {"$x, $y"}', :expected => '1, Catelyn', :rvalue => true}, + ] + + module_manifest = < directory, } @@ -135,39 +180,56 @@ } PP -# create single manifest calling all functions -step 'Apply manifest containing all function calls' -def manifest_call_each_function_from_array(functions) - manifest = '' - # use index to work around puppet's imutable variables - # use variables so we can contatenate strings - functions.each_with_index do |function,index| - manifest << "$pre#{index} = \"sayeth #{function[:name]}: \" $output#{index} = #{function[:name]}(#{function[:args]}) #{function[:lambda]} notice \"${pre#{index}}${output#{index}}\"\n" - end - manifest -end - -agents.each do |agent| apply_manifest_on(agent, module_manifest, :catch_failures => true) - apply_manifest_on(agent, manifest_call_each_function_from_array(functions_3x), - {:modulepath => "#{testdir}/environments/production/modules/", - :acceptable_exit_codes => 1} ) do |result| - functions_3x.each do |function| - # append the function name to the matcher so it's more expressive - if function[:expected].is_a?(String) - # argh. fail breaks my abstraction here - if function[:name] == :fail - expected = function[:expected] - else - expected = "#{function[:name]}: #{function[:expected]}" + + scope = 'Scope(Class[main]):' + # apply the 4x function manifest with future parser + puppet_apply_options = {:modulepath => "#{testdir}/environments/production/modules/", + :acceptable_exit_codes => 1} + puppet_apply_options[:future_parser] = true if puppet_version =~ /\A3\./ + apply_manifest_on(agent, manifest_call_each_function_from_array(functions_4x), puppet_apply_options) do |result| + functions_4x.each do |function| + expected = "#{function[:name].capitalize}: #{scope} #{function[:expected]}" + unless agent['locale'] == 'ja' + assert_match(expected, result.output, + "#{function[:name]} output didn't match expected value") + end + end + end + + file_path = agent.tmpfile('apply_manifest.pp') + + create_remote_file(agent, file_path, manifest_call_each_function_from_array(functions_3x)) + + trusted_3x = puppet_version =~ /\A3\./ ? '--trusted_node_data ' : '' + on(agent, puppet("apply #{trusted_3x} --color=false --modulepath #{testdir}/environments/production/modules/ #{file_path}"), + :acceptable_exit_codes => 1 ) do |result| + functions_3x.each do |function| + # append the function name to the matcher so it's more expressive + if function[:expected].is_a?(String) + if function[:name] == :fail + expected = function[:expected] + elsif function[:name] == :crit + expected = "#{function[:name].capitalize}ical: #{scope} #{function[:expected]}" + elsif function[:name] == :emerg + expected = "#{function[:name].capitalize}ency: #{scope} #{function[:expected]}" + elsif function[:name] == :err + expected = "#{function[:name].capitalize}or: #{scope} #{function[:expected]}" + elsif function[:expected] == '' + expected = "#{function[:name].capitalize}: #{function[:expected]}" + else + expected = "#{function[:name].capitalize}: #{scope} #{function[:expected]}" + end + elsif function[:expected].is_a?(Regexp) + expected = function[:expected] + else + raise 'unhandled function expectation type (we allow String or Regexp)' + end + + unless agent['locale'] == 'ja' + assert_match(expected, result.output, "#{function[:name]} output didn't match expected value") + end end - elsif function[:expected].is_a?(Regexp) - expected = Regexp.new(/#{function[:name]}:\s/.to_s + function[:expected].to_s) - else - raise 'unhandled function expectation type (we allow String or Regexp)' - end + end - assert_match(expected, result.output, "#{function[:name]} output didn't match expected value") - end - end end diff --git a/acceptance/tests/parser_functions/hiera/lookup_data.rb b/acceptance/tests/parser_functions/hiera/lookup_data.rb index 3ae0b944bc2..3e0b67f1d90 100644 --- a/acceptance/tests/parser_functions/hiera/lookup_data.rb +++ b/acceptance/tests/parser_functions/hiera/lookup_data.rb @@ -1,5 +1,9 @@ test_name "Lookup data using the hiera parser function" +tag 'audit:high', + 'audit:acceptance', + 'audit:refactor' # Master is not required for this test. Replace with agents.each + testdir = master.tmpdir('hiera') step 'Setup' @@ -28,7 +32,6 @@ - "yaml" :logger: "console" :hierarchy: - - "%{fqdn}" - "%{environment}" - "global" @@ -85,8 +88,8 @@ class apache { with_puppet_running_on master, master_opts, testdir do agents.each do |agent| - on(agent, puppet('agent', "-t --server #{master}"), :acceptable_exit_codes => [2]) - - assert_match("apache server port: 8080", stdout) + on(agent, puppet('agent', "-t"), :acceptable_exit_codes => [2]) do |result| + assert_match('apache server port: 8080', result.stdout) + end end end diff --git a/acceptance/tests/parser_functions/hiera_array/lookup_data.rb b/acceptance/tests/parser_functions/hiera_array/lookup_data.rb index 49e161274ec..3bfbcfa635b 100644 --- a/acceptance/tests/parser_functions/hiera_array/lookup_data.rb +++ b/acceptance/tests/parser_functions/hiera_array/lookup_data.rb @@ -1,5 +1,9 @@ test_name "Lookup data using the hiera_array parser function" +tag 'audit:high', + 'audit:acceptance', + 'audit:refactor' # Master is not required for this test. Replace with agents.each + testdir = master.tmpdir('hiera') step 'Setup' @@ -28,7 +32,6 @@ - "yaml" :logger: "console" :hierarchy: - - "%{fqdn}" - "%{environment}" - "global" @@ -97,9 +100,9 @@ class ntp { with_puppet_running_on master, master_opts, testdir do agents.each do |agent| - on(agent, puppet('agent', "-t --server #{master}"), :acceptable_exit_codes => [2]) - - assert_match("ntpserver global.ntp.puppetlabs.com", stdout) - assert_match("ntpserver production.ntp.puppetlabs.com", stdout) + on(agent, puppet('agent', "-t"), :acceptable_exit_codes => [2]) do |result| + assert_match('ntpserver global.ntp.puppetlabs.com', result.stdout) + assert_match('ntpserver production.ntp.puppetlabs.com', result.stdout) + end end end diff --git a/acceptance/tests/parser_functions/hiera_hash/lookup_data.rb b/acceptance/tests/parser_functions/hiera_hash/lookup_data.rb index 4fa39e12e02..3532fdb454d 100644 --- a/acceptance/tests/parser_functions/hiera_hash/lookup_data.rb +++ b/acceptance/tests/parser_functions/hiera_hash/lookup_data.rb @@ -1,5 +1,9 @@ test_name "Lookup data using the hiera_hash parser function" +tag 'audit:high', + 'audit:acceptance', + 'audit:refactor' # Master is not required for this test. Replace with agents.each + testdir = master.tmpdir('hiera') step 'Setup' @@ -27,7 +31,6 @@ - "yaml" :logger: "console" :hierarchy: - - "%{fqdn}" - "%{environment}" - "global" @@ -95,8 +98,8 @@ with_puppet_running_on master, master_opts, testdir do agents.each do |agent| - on(agent, puppet('agent', "-t --server #{master}"), :acceptable_exit_codes => [2]) - - assert_match("name: postgres shell: /bin/bash", stdout) + on(agent, puppet('agent', "-t"), :acceptable_exit_codes => [2]) do |result| + assert_match("name: postgres shell: /bin/bash", result.stdout) + end end end diff --git a/acceptance/tests/parser_functions/hiera_in_templates.rb b/acceptance/tests/parser_functions/hiera_in_templates.rb new file mode 100644 index 00000000000..49924b5e434 --- /dev/null +++ b/acceptance/tests/parser_functions/hiera_in_templates.rb @@ -0,0 +1,475 @@ +test_name "Calling Hiera function from inside templates" + +tag 'audit:high', + 'audit:integration', + 'audit:refactor' # Master is not required for this test. Replace with agents.each + +@module_name = "hieratest" +@coderoot = master.tmpdir("#{@module_name}") + +@msg_default = 'message from default.yaml' +@msg_production = 'message from production.yaml' +@msg1os = 'message1 from {osfamily}.yaml' +@msg2os = 'message2 from {osfamily}.yaml' +@msg_fqdn = 'messsage from {fqdn}.yaml' + +@k1 = 'key1' +@k2 = 'key2' +@k3 = 'key3' + +@hval2p = 'hash_value2 from production.yaml' +@hval3p = 'hash_value3 from production.yaml' +@hval1os = 'hash_value1 from {osfamily}.yaml' +@hval2os = 'hash_value2 from {osfamily}.yaml' + +@h_m_call = "hiera\\('message'\\)" +@h_h_call = "hiera\\('hash_value'\\)" +@h_i_call = "hiera\\('includes'\\)" +@ha_m_call = "hiera_array\\('message'\\)" +@ha_i_call = "hiera_array\\('includes'\\)" +@hh_h_call = "hiera_hash\\('hash_value'\\)" + +@mod_default_msg = 'This file created by mod_default.' +@mod_osfamily_msg = 'This file created by mod_osfamily.' +@mod_production_msg = 'This file created by mod_production.' +@mod_fqdn_msg = 'This file created by mod_fqdn.' + +@master_opts = { + 'main' => { + 'environmentpath' => "#{@coderoot}/environments", + 'hiera_config' => "#{@coderoot}/hiera.yaml", + }, +} + + +def create_environment(osfamilies, tmp_dirs) + envroot = "#{@coderoot}/environments" + production = "#{envroot}/production" + modroot = "#{production}/modules" + moduledir = "#{modroot}/#{@module_name}" + hieradir = "#{@coderoot}/hieradata" + + osfamily_yamls = "" + osfamilies.each do |osf| + new_yaml = < " +--- +message: [ + '#{@msg1os}', + '#{@msg2os}', +] +includes: '#{@module_name}::mod_osfamily' +hash_value: + #{@k1}: '#{@hval1os}' + #{@k2}: '#{@hval2os}' +" +} +NEW_YAML + osfamily_yamls += new_yaml + end + environ = < file, + owner => #{master.puppet['user']}, + group => #{master.puppet['group']}, + mode => "0644", +} + +file { + [ + "#{@coderoot}", + "#{envroot}", + "#{production}", + "#{production}/modules", + "#{production}/manifests", + "#{hieradir}", + "#{moduledir}", + "#{moduledir}/examples", + "#{moduledir}/manifests", + "#{moduledir}/templates", + ] : + ensure => directory, +} + +file { '#{production}/manifests/site.pp': + ensure => file, + content => " +node default { + \\$msgs = hiera_array('message') + notify {\\$msgs:} + class {'#{@module_name}': + result_dir => hiera('result_dir')[\\$facts['networking']['hostname']], + } +} +", +} + + +file {"#{@coderoot}/hiera.yaml": + content => " +--- +:backends: + - yaml + +:yaml: + :datadir: #{@coderoot}/hieradata + +:hierarchy: + - \\"%{clientcert}\\" + - \\"%{environment}\\" + - \\"%{os.family}\\" + - \\"default\\" +" +} + +file {"#{hieradir}/default.yaml": + content => " +--- +message: '#{@msg_default}' +includes: '#{@module_name}::mod_default' +result_dir: +#{tmp_dirs} +" +} + +#{osfamily_yamls} + + +file {"#{hieradir}/production.yaml": + content => " +--- +message: '#{@msg_production}' +includes: '#{@module_name}::mod_production' +hash_value: + #{@k2}: '#{@hval2p}' + #{@k3}: '#{@hval3p}' +" +} + +file {"#{hieradir}/#{$fqdn}.yaml": + content => " +--- +message: '#{@msg_fqdn}' +includes: '#{@module_name}::mod_fqdn' +" +} + +file {"#{moduledir}/examples/init.pp": + content => " +include #{@module_name} +" +} + +file { "#{moduledir}/manifests/init.pp": + content => " +class #{@module_name} ( + \\$result_dir, +) { + file { \\$result_dir: + ensure => directory, + mode => '0755', + } + file {\\\"\\\${result_dir}/#{@module_name}_results_epp\\\": + ensure => file, + mode => '0644', + content => epp('#{@module_name}/hieratest_results_epp.epp'), + } + file {\\\"\\\${result_dir}/#{@module_name}_results_erb\\\": + ensure => file, + mode => '0644', + content => template('#{@module_name}/hieratest_results_erb.erb'), + } +} +" +} + +file { "#{moduledir}/manifests/mod_default.pp": + content => " +class #{@module_name}::mod_default { + \\$result_dir = hiera('result_dir')[\\$facts['networking']['hostname']] + notify{\\"module mod_default invoked.\\\\n\\":} + file {\\\"\\\${result_dir}/mod_default\\\": + ensure => 'file', + mode => '0644', + content => \\\"#{@mod_default_msg}\\\\n\\\", + } +} +" +} + +file { "#{moduledir}/manifests/mod_osfamily.pp": + content => " +class #{@module_name}::mod_osfamily { + \\$result_dir = hiera('result_dir')[\\$facts['networking']['hostname']] + notify{\\"module mod_osfamily invoked.\\\\n\\":} + file {\\\"\\\${result_dir}/mod_osfamily\\\": + ensure => 'file', + mode => '0644', + content => \\\"#{@mod_osfamily_msg}\\\\n\\\", + } +} +" +} + +file { "#{moduledir}/manifests/mod_production.pp": + content => " +class #{@module_name}::mod_production { + \\$result_dir = hiera('result_dir')[\\$facts['networking']['hostname']] + notify{\\"module mod_production invoked.\\\\n\\":} + file {\\\"\\\${result_dir}/mod_production\\\": + ensure => 'file', + mode => '0644', + content => '#{@mod_production_msg}', + } +} +" +} + +file { "#{moduledir}/manifests/mod_fqdn.pp": + content => " +class #{@module_name}::mod_fqdn { + \\$result_dir = hiera('result_dir')[\\$facts['networking']['hostname']] + notify{\\"module mod_fqdn invoked.\\\\n\\":} + file {\\\"\\\${result_dir}/mod_fqdn\\\": + ensure => 'file', + mode => '0644', + content => \\\"#{@mod_fqdn_msg}\\\\n\\\", + } +} +" +} + +file { "#{moduledir}/templates/hieratest_results_epp.epp": + content => " +hiera('message'): <%= hiera('message') %> +hiera('hash_value'): <%= hiera('hash_value') %> +hiera('includes'): <%= hiera('includes') %> +hiera_array('message'): <%= hiera_array('message') %> +hiera_array('includes'): <%= hiera_array('includes') %> +hiera_hash('hash_value'): <%= hiera_hash('hash_value') %> +hiera_include('includes'): <%= hiera_include('includes') %> +" +} + +file { "#{moduledir}/templates/hieratest_results_erb.erb": + content => " +hiera('message'): <%= scope().call_function('hiera', ['message']) %> +hiera('hash_value'): <%= scope().call_function('hiera', ['hash_value']) %> +hiera('includes'): <%= scope().call_function('hiera', ['includes']) %> +hiera_array('message'): <%= scope().call_function('hiera_array', ['message']) %> +hiera_array('includes'): <%= scope().call_function('hiera_array', ['includes']) %> +hiera_hash('hash_value'): <%= scope().call_function('hiera_hash', ['hash_value']) %> +" +} + +ENV + environ +end + +def find_osfamilies + family_hash = {} + agents.each do |agent| + res = on(agent, facter("os.family")) + osf = res.stdout.chomp + family_hash[osf] = 1 + end + family_hash.keys +end + +def find_tmp_dirs + tmp_dirs = "" + host_to_result_dir = {} + agents.each do |agent| + h = on(agent, facter("networking.hostname")).stdout.chomp + t = agent.tmpdir("#{@module_name}_results") + tmp_dirs += " #{h}: '#{t}'\n" + host_to_result_dir[h] = t + end + result = { + 'tmp_dirs' => tmp_dirs, + 'host_to_result_dir' => host_to_result_dir + } + result +end + + +step 'Setup' + +with_puppet_running_on master, @master_opts, @coderoot do + res = find_tmp_dirs + tmp_dirs = res['tmp_dirs'] + host_to_result_dir = res['host_to_result_dir'] + env_manifest = create_environment(find_osfamilies, tmp_dirs) + apply_manifest_on(master, env_manifest, :catch_failures => true) + agents.each do |agent| + resultdir = host_to_result_dir[on(agent, facter("networking.hostname")).stdout.chomp] + step "Applying catalog to agent: #{agent}. result files in #{resultdir}" + on( + agent, + puppet('agent', "-t"), + :acceptable_exit_codes => [2] + ) + + step "####### Verifying hiera calls from erb template #######" + r1 = on(agent, "cat #{resultdir}/hieratest_results_erb") + result = r1.stdout + + step "Verifying hiera() call #1." + assert_match( + /#{@h_m_call}: #{@msg_production}/, + result, + "#{@h_m_call} failed. Expected: '#{@msg_production}'" + ) + + step "Verifying hiera() call #2." + assert_match( + /#{@h_h_call}.*\"#{@k3}\"=>\"#{@hval3p}\"/, + result, + "#{@h_h_call} failed. Expected: '\"#{@k3}\"=>\"#{@hval3p}\"'" + ) + + step "Verifying hiera() call #3." + assert_match( + /#{@h_h_call}.*\"#{@k2}\"=>\"#{@hval2p}\"/, + result, + "#{@h_h_call} failed. Expected: '\"#{@k2}\"=>\"#{@hval2p}\"'" + ) + + step "Verifying hiera() call #4." + assert_match( + /#{@h_i_call}: #{@module_name}::mod_production/, + result, + "#{@h_i_call} failed. Expected:'#{@module_name}::mod_production'" + ) + + step "Verifying hiera_array() call. #1" + assert_match( +/#{@ha_m_call}: \[\"#{@msg_production}\", \"#{@msg1os}\", \"#{@msg2os}\", \"#{@msg_default}\"\]/, + result, + "#{@ha_m_call} failed. Expected: '[\"#{@msg_production}\", \"#{@msg1os}\", \"#{@msg2os}\", \"#{@msg_default}\"]'" + ) + + step "Verifying hiera_array() call. #2" + assert_match( +/#{@ha_i_call}: \[\"#{@module_name}::mod_production\", \"#{@module_name}::mod_osfamily\", \"#{@module_name}::mod_default\"\]/, + result, + "#{@ha_i_call} failed. Expected: '[\"#{@module_name}::mod_production\", \"#{@module_name}::mod_osfamily\", \"#{@module_name}::mod_default\"]'" + ) + + step "Verifying hiera_hash() call. #1" + assert_match( + /#{@hh_h_call}:.*\"#{@k3}\"=>\"#{@hval3p}\"/, + result, + "#{@hh_h_call} failed. Expected: '\"#{@k3}\"=>\"#{@hval3p}\"'" + ) + + step "Verifying hiera_hash() call. #2" + assert_match( + /#{@hh_h_call}:.*\"#{@k2}\"=>\"#{@hval2p}\"/, + result, + "#{@hh_h_call} failed. Expected: '\"#{@k2}\"=>\"#{@hval2p}\"'" + ) + + step "Verifying hiera_hash() call. #3" + assert_match( + /#{@hh_h_call}:.*\"#{@k1}\"=>\"#{@hval1os}\"/, + result, + "#{@hh_h_call} failed. Expected: '\"#{@k1}\"=>\"#{@hval1os}\"'" + ) + + r2 = on(agent, "cat #{resultdir}/mod_default") + result = r2.stdout + step "Verifying hiera_include() call. #1" + assert_match( + "#{@mod_default_msg}", + result, + "#{@hi_i_call} failed. Expected: '#{@mod_default_msg}'" + ) + + r3 = on(agent, "cat #{resultdir}/mod_osfamily") + result = r3.stdout + step "Verifying hiera_include() call. #2" + assert_match( + "#{@mod_osfamily_msg}", + result, + "#{@hi_i_call} failed. Expected: '#{@mod_osfamily_msg}'" + ) + + r4 = on(agent, "cat #{resultdir}/mod_production") + result = r4.stdout + step "Verifying hiera_include() call. #3" + assert_match( + "#{@mod_production_msg}", + result, + "#{@hi_i_call} failed. Expected: '#{@mod_production_msg}'" + ) + + step "####### Verifying hiera calls from epp template #######" + r5 = on(agent, "cat #{resultdir}/hieratest_results_epp") + result = r5.stdout + + step "Verifying hiery() call #1." + assert_match( + /#{@h_m_call}: #{@msg_production}/, + result, + "#{@hi_m_call} failed. Expected '#{@msg_production}'" + ) + + step "Verifying hiera() call #2." + assert_match( + /#{@h_h_call}.*#{@k3} => #{@hval3p}/, + result, + "#{@h_h_call} failed. Expected '#{@k3} => #{@hval3p}'" + ) + + step "Verifying hiera() call #3." + assert_match(/#{@h_h_call}.*#{@k2} => #{@hval2p}/, + result, + "#{@h_h_call} failed. Expected '#{@k2} => #{@hval2p}'" + ) + + step "Verifying hiera() call #4." + assert_match( + /#{@h_i_call}: #{@module_name}::mod_production/, + result, + "#{@h_i_call} failed. Expected: '#{@module_name}::mod_production'" + ) + + step "Verifying hiera_array() call. #1" + assert_match( +/#{@ha_m_call}: \[#{@msg_production}, #{@msg1os}, #{@msg2os}, #{@msg_default}\]/, + result, + "#{@ha_m_call} failed. Expected: '[#{@msg_production}, #{@msg1os}, #{@msg2os}, #{@msg_default}]'" + ) + + step "Verifying hiera_array() call. #2" + assert_match( +/#{@ha_i_call}: \[#{@module_name}::mod_production, #{@module_name}::mod_osfamily, #{@module_name}::mod_default\]/, + result, + "#{@ha_i_call} failed. Expected: '[#{@module_name}::mod_production, #{@module_name}::mod_osfamily, #{@module_name}::mod_default'" + ) + + step "Verifying hiera_hash() call. #1" + assert_match( + /#{@hh_h_call}:.*#{@k3} => #{@hval3p}/, + result, + "#{@hh_h_call} failed. Expected: '{@k3} => #{@hval3p}'" + ) + + step "Verifying hiera_hash() call. #2" + assert_match( + /#{@hh_h_call}:.*#{@k2} => #{@hval2p}/, + result, + "#{@hh_h_call} failed. Expected '#{@k2} => #{@hval2p}'", + ) + + step "Verifying hiera_hash() call. #3" + assert_match( + /#{@hh_h_call}:.*#{@k1} => #{@hval1os}/, + result, + "#{@hh_h_call}: failed. Expected: '#{@k1} => #{@hval1os}'" + ) + end +end diff --git a/acceptance/tests/parser_functions/lookup.rb b/acceptance/tests/parser_functions/lookup.rb deleted file mode 100644 index bf2240681de..00000000000 --- a/acceptance/tests/parser_functions/lookup.rb +++ /dev/null @@ -1,288 +0,0 @@ -test_name "Lookup data using the agnostic lookup function" -# pre-docs: -# http://puppet-on-the-edge.blogspot.com/2015/01/puppet-40-data-in-modules-and.html - -testdir = master.tmpdir('lookup') - -step 'Setup' - -module_name = "data_module" -module_name2 = "other_module" -hash_name = "hash_name" -array_key = "array_key" - -env_data_implied_key = "env_data_implied" -env_data_implied_value = "env_implied_a" -env_data_key = "env_data" -env_data_value = "env_a" -env_hash_key = "env_hash_key" -env_hash_value = "env_class_a" -env_array_value0 = "env_array_a" -env_array_value1 = "env_array_b" - -module_data_implied_key = "module_data_implied" -module_data_implied_value = "module_implied_b" -module_data_key = "module_data" -module_data_value = "module_b" -module_data_value_other = "other_module_b" -module_hash_key = "module_hash_key" -module_hash_value = "module_class_b" -module_array_value0 = "module_array_a" -module_array_value1 = "module_array_b" - -env_data_override_implied_key = "env_data_override_implied" -env_data_override_implied_value = "env_override_implied_c" -env_data_override_key = "env_data_override" -env_data_override_value = "env_override_c" - -hiera_data_implied_key = "apache_server_port_implied" -hiera_data_implied_value = "8080" -hiera_data_key = "apache_server_port" -hiera_data_value = "9090" -hiera_hash_key = "hiera_hash_key" -hiera_hash_value = "hiera_class_c" -hiera_array_value0 = "hiera_array_a" -hiera_array_value1 = "hiera_array_b" - - -def mod_manifest_entry(module_name = nil, testdir, module_data_implied_key, - module_data_implied_value, module_data_key, - module_data_value, hash_name, module_hash_key, - module_hash_value, array_key, module_array_value0, - module_array_value1) - if module_name - module_files_manifest = < file, - content => " - Puppet::Bindings.newbindings('#{module_name}::default') do - # In the default bindings for this module - bind { - # bind its name to the 'puppet' module data provider - name '#{module_name}' - to 'function' - in_multibind 'puppet::module_data' - } - end - ", - mode => "0640", - } - - # the function to provide data for this module - file { '#{testdir}/environments/production/modules/#{module_name}/lib/puppet/functions/#{module_name}/data.rb': - ensure => file, - content => " - Puppet::Functions.create_function(:'#{module_name}::data') do - def data() - { '#{module_name}::#{module_data_implied_key}' => '#{module_data_implied_value}', - '#{module_name}::#{module_data_key}' => '#{module_data_value}', - '#{module_name}::#{hash_name}' => {'#{module_hash_key}' => '#{module_hash_value}'}, - '#{module_name}::#{array_key}' => ['#{module_array_value0}', '#{module_array_value1}'] - } - end - end - ", - mode => "0640", - } -PP - module_files_manifest - end -end - -module_manifest1 = mod_manifest_entry(module_name, testdir, module_data_implied_key, - module_data_implied_value, module_data_key, module_data_value, - hash_name, module_hash_key, module_hash_value, array_key, - module_array_value0, module_array_value1) -module_manifest2 = mod_manifest_entry(module_name2, testdir, module_data_implied_key, - module_data_implied_value, module_data_key, module_data_value_other, - hash_name, module_hash_key, module_hash_value, array_key, - module_array_value0, module_array_value1) - -apply_manifest_on(master, <<-PP, :catch_failures => true) -File { - ensure => directory, - mode => "0750", - owner => #{master.puppet['user']}, - group => #{master.puppet['group']}, -} - -file { - '#{testdir}':; - '#{testdir}/hieradata':; - '#{testdir}/environments':; - '#{testdir}/environments/production':; - '#{testdir}/environments/production/manifests':; - '#{testdir}/environments/production/modules':; - '#{testdir}/environments/production/lib':; - '#{testdir}/environments/production/lib/puppet':; - '#{testdir}/environments/production/lib/puppet/functions':; - '#{testdir}/environments/production/lib/puppet/functions/environment':; - '#{testdir}/environments/production/modules/#{module_name}':; - '#{testdir}/environments/production/modules/#{module_name}/manifests':; - '#{testdir}/environments/production/modules/#{module_name}/lib':; - '#{testdir}/environments/production/modules/#{module_name}/lib/puppet':; - '#{testdir}/environments/production/modules/#{module_name}/lib/puppet/bindings':; - '#{testdir}/environments/production/modules/#{module_name}/lib/puppet/bindings/#{module_name}':; - '#{testdir}/environments/production/modules/#{module_name}/lib/puppet/functions':; - '#{testdir}/environments/production/modules/#{module_name}/lib/puppet/functions/#{module_name}':; - '#{testdir}/environments/production/modules/#{module_name2}':; - '#{testdir}/environments/production/modules/#{module_name2}/manifests':; - '#{testdir}/environments/production/modules/#{module_name2}/lib':; - '#{testdir}/environments/production/modules/#{module_name2}/lib/puppet':; - '#{testdir}/environments/production/modules/#{module_name2}/lib/puppet/bindings':; - '#{testdir}/environments/production/modules/#{module_name2}/lib/puppet/bindings/#{module_name2}':; - '#{testdir}/environments/production/modules/#{module_name2}/lib/puppet/functions':; - '#{testdir}/environments/production/modules/#{module_name2}/lib/puppet/functions/#{module_name2}':; -} - -file { '#{testdir}/hiera.yaml': - ensure => file, - content => '--- - :backends: - - "yaml" - :logger: "console" - :hierarchy: - - "global" - - :yaml: - :datadir: "#{testdir}/hieradata" - ', - mode => "0640", -} - -file { '#{testdir}/hieradata/global.yaml': - ensure => file, - content => "--- - #{hiera_data_key}: #{hiera_data_value} - #{module_name}::#{hiera_data_implied_key}: #{hiera_data_implied_value} - #{module_name}::#{hash_name}: - #{hiera_hash_key}: #{hiera_hash_value} - #{module_name}::#{array_key}: - - #{hiera_array_value0} - - #{hiera_array_value1} - ", - mode => "0640", -} - -file { '#{testdir}/environments/production/environment.conf': - ensure => file, - content => ' - environment_timeout = 0 - # for this environment, provide our own function to supply data to lookup - # implies a ruby function in /lib/puppet/functions/environment/data.rb - # named environment::data() - environment_data_provider = "function" - ', - mode => "0640", -} - -# the function to provide data for this environment -file { '#{testdir}/environments/production/lib/puppet/functions/environment/data.rb': - ensure => file, - content => " - Puppet::Functions.create_function(:'environment::data') do - def data() - { '#{module_name}::#{env_data_implied_key}' => '#{env_data_implied_value}', - '#{module_name}::#{env_data_override_implied_key}' => '#{env_data_override_implied_value}', - '#{env_data_key}' => '#{env_data_value}', - '#{module_name}::#{hash_name}' => {'#{env_hash_key}' => '#{env_hash_value}'}, - '#{env_data_override_key}' => '#{env_data_override_value}', - '#{module_name}::#{array_key}' => ['#{env_array_value0}', '#{env_array_value1}'] - } - end - end - ", - mode => "0640", -} - -# place module file segments here -#{module_manifest1} -# same key, different module and values -#{module_manifest2} - -file { '#{testdir}/environments/production/modules/#{module_name}/manifests/init.pp': - ensure => file, - content => ' - class #{module_name}($#{env_data_implied_key}, - $#{module_data_implied_key}, - $#{env_data_override_implied_key}, - $#{hiera_data_implied_key}) { - # lookup data from the environment function databinding - notify { "#{env_data_implied_key} $#{env_data_implied_key}": } - $lookup_env = lookup("#{env_data_key}") - notify { "#{env_data_key} $lookup_env": } - - # lookup data from the module databinding - notify { "#{module_data_implied_key} $#{module_data_implied_key}": } - $lookup_module = lookup("#{module_name}::#{module_data_key}") - notify { "#{module_data_key} $lookup_module": } - - # lookup data from another modules databinding - $lookup_module2 = lookup("#{module_name2}::#{module_data_key}") - notify { "#{module_data_key} $lookup_module2": } - - # ensure env can override module - notify { "#{env_data_override_implied_key} $#{env_data_override_implied_key}": } - $lookup_override = lookup("#{env_data_override_key}") - notify { "#{env_data_override_key} $lookup_override": } - - # should fall-back to hiera global.yaml data - notify { "#{hiera_data_implied_key} $#{hiera_data_implied_key}": } - $lookup_port = lookup("#{hiera_data_key}") - notify { "#{hiera_data_key} $lookup_port": } - - # should be able to merge hashes across sources - # this mimicks/covers behavior for including classes - $lookup_hash = lookup("#{module_name}::#{hash_name}",Hash[String,String],\\'hash\\') - notify { "#{hash_name} $lookup_hash": } - - # should be able to make an array across sources - # this mimicks/covers behavior for including classes - $lookup_array = lookup("#{module_name}::#{array_key}",Array[String],\\'unique\\') - notify { "yep": message => "#{array_key} $lookup_array" } - }', - mode => "0640", -} - -file { '#{testdir}/environments/production/manifests/site.pp': - ensure => file, - content => " - node default { - include #{module_name} - }", - mode => "0640", -} -PP - -step "Try to lookup string data" - -master_opts = { - 'main' => { - 'environmentpath' => "#{testdir}/environments", - 'hiera_config' => "#{testdir}/hiera.yaml", - }, -} - -with_puppet_running_on master, master_opts, testdir do - agents.each do |agent| - on(agent, puppet('agent', "-t --server #{master}"), :acceptable_exit_codes => [2]) - assert_match("#{env_data_implied_key} #{env_data_implied_value}", stdout) - assert_match("#{env_data_key} #{env_data_value}", stdout) - - assert_match("#{module_data_implied_key} #{module_data_implied_value}", stdout) - assert_match("#{module_data_key} #{module_data_value}", stdout) - - assert_match("#{module_data_key} #{module_data_value_other}", stdout) - - assert_match("#{env_data_override_implied_key} #{env_data_override_implied_value}", stdout) - assert_match("#{env_data_override_key} #{env_data_override_value}", stdout) - - assert_match("#{hiera_data_implied_key} #{hiera_data_implied_value}", stdout) - assert_match("#{hiera_data_key} #{hiera_data_value}", stdout) - - assert_match("#{hash_name} {#{module_hash_key} => #{module_hash_value}, #{env_hash_key} => #{env_hash_value}, #{hiera_hash_key} => #{hiera_hash_value}}", stdout) - - assert_match("#{array_key} [#{hiera_array_value0}, #{hiera_array_value1}, #{env_array_value0}, #{env_array_value1}, #{module_array_value0}, #{module_array_value1}]", stdout) - end -end diff --git a/acceptance/tests/parser_functions/no_exception_in_reduce_with_bignum.rb b/acceptance/tests/parser_functions/no_exception_in_reduce_with_bignum.rb new file mode 100644 index 00000000000..c3120a51456 --- /dev/null +++ b/acceptance/tests/parser_functions/no_exception_in_reduce_with_bignum.rb @@ -0,0 +1,71 @@ +test_name 'C97760: Integer in reduce() should not cause exception' do + require 'puppet/acceptance/environment_utils' + extend Puppet::Acceptance::EnvironmentUtils + + tag 'audit:high', + 'audit:unit' + + # Remove all traces of the last used environment + teardown do + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + app_type = File.basename(__FILE__, '.*') + tmp_environment = mk_tmp_environment_with_teardown(master, app_type) + + step 'On master, create site.pp with integer' do + create_sitepp(master, tmp_environment, <<-SITEPP) +$data = [ +{ +"certname"=>"xxxxxxxxx.some.domain", +"parameters"=>{ + "admin_auth_keys"=>{ + "keyname1"=>{ + "key"=>"ABCDEF", + "options"=>["from=\\"10.0.0.0/8\\""] + }, + "keyname2"=>{ + "key"=>"ABCDEF", + }, + "keyname3"=>{ + "key"=>"ABCDEF", + "options"=>["from=\\"10.0.0.0/8\\""], + "type"=>"ssh-xxx" + }, + "keyname4"=>{ + "key"=>"ABCDEF", + "options"=>["from=\\"10.0.0.0/8\\""] + } + }, + "admin_user"=>"ertxa", + "admin_hosts"=>["1.2.3.4", + "1.2.3.4", + "1.2.3.4"], + "admin_password"=>"ABCDEF", + "sshd_ports"=>[22, + 22, 24], + "sudo_no_password_all"=>false, + "sudo_no_password_commands"=>[], + "sshd_config_template"=>"cfauth/sshd_config.epp", + "sudo_env_keep"=>[] +}, +"exported"=>false}, +] +$data_reduced = $data.reduce({}) |$m, $r|{ + $cn = $r['certname'] + notice({ $cn => $r['parameters'] }) +} +SITEPP + end + + with_puppet_running_on(master, {}) do + agents.each do |agent| + on(agent, puppet("agent -t --environment #{tmp_environment}")) + end + end + +end diff --git a/acceptance/tests/parser_functions/puppet_lookup_cmd.rb b/acceptance/tests/parser_functions/puppet_lookup_cmd.rb new file mode 100644 index 00000000000..4d3a2e2d82b --- /dev/null +++ b/acceptance/tests/parser_functions/puppet_lookup_cmd.rb @@ -0,0 +1,2625 @@ +test_name "Puppet Lookup Command" + +tag 'audit:high', + 'audit:acceptance', + 'audit:refactor' # Master is not required for this test. Replace with agents.each + # Wrap steps in blocks in accordance with Beaker style guide + +# doc: +# https://puppet.com/docs/puppet/latest/hiera_automatic.html + +@module_name = "puppet_lookup_command_test" + +### @testroot = "/etc/puppetlabs" +@testroot = master.tmpdir("#{@module_name}") + +@coderoot = "#{@testroot}/code" +@confdir = "#{@testroot}/puppet" + +@node1 = 'node1.example.org' +@node2 = 'node2.example.org' + +@master_opts = { + 'main' => { + 'environmentpath' => "#{@coderoot}/environments", + 'hiera_config' => "#{@coderoot}/hiera.yaml", + }, +} + +@manifest = < directory, + mode => "0755", +} + +file { + '#{@confdir}':; + '#{@coderoot}':; + '#{@coderoot}/hieradata':; + '#{@coderoot}/environments':; + +##### default environment, production + '#{@coderoot}/environments/production':; + '#{@coderoot}/environments/production/data':; + '#{@coderoot}/environments/production/functions':; + '#{@coderoot}/environments/production/functions/environment':; + '#{@coderoot}/environments/production/lib':; + '#{@coderoot}/environments/production/lib/puppet':; + '#{@coderoot}/environments/production/lib/puppet/functions':; + '#{@coderoot}/environments/production/lib/puppet/functions/environment':; + '#{@coderoot}/environments/production/manifests':; + '#{@coderoot}/environments/production/modules':; + +# module mod1 hiera + '#{@coderoot}/environments/production/modules/mod1':; + '#{@coderoot}/environments/production/modules/mod1/manifests':; + '#{@coderoot}/environments/production/modules/mod1/data':; + '#{@coderoot}/environments/production/modules/mod1/functions':; + '#{@coderoot}/environments/production/modules/mod1/lib':; + '#{@coderoot}/environments/production/modules/mod1/lib/puppet':; + '#{@coderoot}/environments/production/modules/mod1/lib/puppet/functions':; + '#{@coderoot}/environments/production/modules/mod1/lib/puppet/functions/mod1':; + +# module mod2 ruby function + '#{@coderoot}/environments/production/modules/mod2':; + '#{@coderoot}/environments/production/modules/mod2/manifests':; + '#{@coderoot}/environments/production/modules/mod2/data':; + '#{@coderoot}/environments/production/modules/mod2/functions':; + '#{@coderoot}/environments/production/modules/mod2/lib':; + '#{@coderoot}/environments/production/modules/mod2/lib/puppet':; + '#{@coderoot}/environments/production/modules/mod2/lib/puppet/functions':; + '#{@coderoot}/environments/production/modules/mod2/lib/puppet/functions/mod2':; + +# module mod3 puppet function + '#{@coderoot}/environments/production/modules/mod3':; + '#{@coderoot}/environments/production/modules/mod3/manifests':; + '#{@coderoot}/environments/production/modules/mod3/data':; + '#{@coderoot}/environments/production/modules/mod3/functions':; + '#{@coderoot}/environments/production/modules/mod3/not-lib':; + '#{@coderoot}/environments/production/modules/mod3/not-lib/puppet':; + '#{@coderoot}/environments/production/modules/mod3/not-lib/puppet/functions':; + '#{@coderoot}/environments/production/modules/mod3/not-lib/puppet/functions/mod3':; + +# module mod4 none + '#{@coderoot}/environments/production/modules/mod4':; + '#{@coderoot}/environments/production/modules/mod4/manifests':; + '#{@coderoot}/environments/production/modules/mod4/data':; + '#{@coderoot}/environments/production/modules/mod4/functions':; + '#{@coderoot}/environments/production/modules/mod4/lib':; + '#{@coderoot}/environments/production/modules/mod4/lib/puppet':; + '#{@coderoot}/environments/production/modules/mod4/lib/puppet/functions':; + '#{@coderoot}/environments/production/modules/mod4/lib/puppet/functions/mod4':; + +##### env1 hiera + '#{@coderoot}/environments/env1':; + '#{@coderoot}/environments/env1/data':; + '#{@coderoot}/environments/env1/functions':; + '#{@coderoot}/environments/env1/functions/environment':; + '#{@coderoot}/environments/env1/lib':; + '#{@coderoot}/environments/env1/lib/puppet':; + '#{@coderoot}/environments/env1/lib/puppet/functions':; + '#{@coderoot}/environments/env1/lib/puppet/functions/environment':; + '#{@coderoot}/environments/env1/manifests':; + '#{@coderoot}/environments/env1/modules':; + +# module mod1 hiera + '#{@coderoot}/environments/env1/modules/mod1':; + '#{@coderoot}/environments/env1/modules/mod1/manifests':; + '#{@coderoot}/environments/env1/modules/mod1/data':; + '#{@coderoot}/environments/env1/modules/mod1/functions':; + '#{@coderoot}/environments/env1/modules/mod1/lib':; + '#{@coderoot}/environments/env1/modules/mod1/lib/puppet':; + '#{@coderoot}/environments/env1/modules/mod1/lib/puppet/functions':; + '#{@coderoot}/environments/env1/modules/mod1/lib/puppet/functions/mod1':; + +# module mod2 ruby function + '#{@coderoot}/environments/env1/modules/mod2':; + '#{@coderoot}/environments/env1/modules/mod2/manifests':; + '#{@coderoot}/environments/env1/modules/mod2/data':; + '#{@coderoot}/environments/env1/modules/mod2/functions':; + '#{@coderoot}/environments/env1/modules/mod2/lib':; + '#{@coderoot}/environments/env1/modules/mod2/lib/puppet':; + '#{@coderoot}/environments/env1/modules/mod2/lib/puppet/functions':; + '#{@coderoot}/environments/env1/modules/mod2/lib/puppet/functions/mod2':; + +# module mod3 puppet function + '#{@coderoot}/environments/env1/modules/mod3':; + '#{@coderoot}/environments/env1/modules/mod3/manifests':; + '#{@coderoot}/environments/env1/modules/mod3/data':; + '#{@coderoot}/environments/env1/modules/mod3/functions':; + '#{@coderoot}/environments/env1/modules/mod3/not-lib':; + '#{@coderoot}/environments/env1/modules/mod3/not-lib/puppet':; + '#{@coderoot}/environments/env1/modules/mod3/not-lib/puppet/functions':; + '#{@coderoot}/environments/env1/modules/mod3/not-lib/puppet/functions/mod3':; + +# module mod4 none + '#{@coderoot}/environments/env1/modules/mod4':; + '#{@coderoot}/environments/env1/modules/mod4/manifests':; + '#{@coderoot}/environments/env1/modules/mod4/data':; + '#{@coderoot}/environments/env1/modules/mod4/functions':; + '#{@coderoot}/environments/env1/modules/mod4/lib':; + '#{@coderoot}/environments/env1/modules/mod4/lib/puppet':; + '#{@coderoot}/environments/env1/modules/mod4/lib/puppet/functions':; + '#{@coderoot}/environments/env1/modules/mod4/lib/puppet/functions/mod4':; + + +##### env2 ruby function + '#{@coderoot}/environments/env2':; + '#{@coderoot}/environments/env2/data':; + '#{@coderoot}/environments/env2/functions':; + '#{@coderoot}/environments/env2/functions/environment':; + '#{@coderoot}/environments/env2/lib':; + '#{@coderoot}/environments/env2/lib/puppet':; + '#{@coderoot}/environments/env2/lib/puppet/functions':; + '#{@coderoot}/environments/env2/lib/puppet/functions/environment':; + '#{@coderoot}/environments/env2/manifests':; + '#{@coderoot}/environments/env2/modules':; + +# module mod1 hiera + '#{@coderoot}/environments/env2/modules/mod1':; + '#{@coderoot}/environments/env2/modules/mod1/manifests':; + '#{@coderoot}/environments/env2/modules/mod1/data':; + '#{@coderoot}/environments/env2/modules/mod1/functions':; + '#{@coderoot}/environments/env2/modules/mod1/lib':; + '#{@coderoot}/environments/env2/modules/mod1/lib/puppet':; + '#{@coderoot}/environments/env2/modules/mod1/lib/puppet/functions':; + '#{@coderoot}/environments/env2/modules/mod1/lib/puppet/functions/mod1':; + +# module mod2 ruby function + '#{@coderoot}/environments/env2/modules/mod2':; + '#{@coderoot}/environments/env2/modules/mod2/manifests':; + '#{@coderoot}/environments/env2/modules/mod2/data':; + '#{@coderoot}/environments/env2/modules/mod2/functions':; + '#{@coderoot}/environments/env2/modules/mod2/lib':; + '#{@coderoot}/environments/env2/modules/mod2/lib/puppet':; + '#{@coderoot}/environments/env2/modules/mod2/lib/puppet/functions':; + '#{@coderoot}/environments/env2/modules/mod2/lib/puppet/functions/mod2':; + +# module mod3 puppet function + '#{@coderoot}/environments/env2/modules/mod3':; + '#{@coderoot}/environments/env2/modules/mod3/manifests':; + '#{@coderoot}/environments/env2/modules/mod3/data':; + '#{@coderoot}/environments/env2/modules/mod3/functions':; + '#{@coderoot}/environments/env2/modules/mod3/not-lib':; + '#{@coderoot}/environments/env2/modules/mod3/not-lib/puppet':; + '#{@coderoot}/environments/env2/modules/mod3/not-lib/puppet/functions':; + '#{@coderoot}/environments/env2/modules/mod3/not-lib/puppet/functions/mod3':; + +# module mod4 none + '#{@coderoot}/environments/env2/modules/mod4':; + '#{@coderoot}/environments/env2/modules/mod4/manifests':; + '#{@coderoot}/environments/env2/modules/mod4/data':; + '#{@coderoot}/environments/env2/modules/mod4/functions':; + '#{@coderoot}/environments/env2/modules/mod4/lib':; + '#{@coderoot}/environments/env2/modules/mod4/lib/puppet':; + '#{@coderoot}/environments/env2/modules/mod4/lib/puppet/functions':; + '#{@coderoot}/environments/env2/modules/mod4/lib/puppet/functions/mod4':; + + +##### env3 puppet function + '#{@coderoot}/environments/env3':; + '#{@coderoot}/environments/env3/data':; + '#{@coderoot}/environments/env3/functions':; + '#{@coderoot}/environments/env3/functions/environment':; + '#{@coderoot}/environments/env3/not-lib':; + '#{@coderoot}/environments/env3/not-lib/puppet':; + '#{@coderoot}/environments/env3/not-lib/puppet/functions':; + '#{@coderoot}/environments/env3/not-lib/puppet/functions/environment':; + '#{@coderoot}/environments/env3/manifests':; + '#{@coderoot}/environments/env3/modules':; + +# module mod1 hiera + '#{@coderoot}/environments/env3/modules/mod1':; + '#{@coderoot}/environments/env3/modules/mod1/manifests':; + '#{@coderoot}/environments/env3/modules/mod1/data':; + '#{@coderoot}/environments/env3/modules/mod1/functions':; + '#{@coderoot}/environments/env3/modules/mod1/lib':; + '#{@coderoot}/environments/env3/modules/mod1/lib/puppet':; + '#{@coderoot}/environments/env3/modules/mod1/lib/puppet/functions':; + '#{@coderoot}/environments/env3/modules/mod1/lib/puppet/functions/mod1':; + +# module mod2 ruby function + '#{@coderoot}/environments/env3/modules/mod2':; + '#{@coderoot}/environments/env3/modules/mod2/manifests':; + '#{@coderoot}/environments/env3/modules/mod2/data':; + '#{@coderoot}/environments/env3/modules/mod2/functions':; + '#{@coderoot}/environments/env3/modules/mod2/lib':; + '#{@coderoot}/environments/env3/modules/mod2/lib/puppet':; + '#{@coderoot}/environments/env3/modules/mod2/lib/puppet/functions':; + '#{@coderoot}/environments/env3/modules/mod2/lib/puppet/functions/mod2':; + +# module mod3 puppet function + '#{@coderoot}/environments/env3/modules/mod3':; + '#{@coderoot}/environments/env3/modules/mod3/manifests':; + '#{@coderoot}/environments/env3/modules/mod3/data':; + '#{@coderoot}/environments/env3/modules/mod3/functions':; + '#{@coderoot}/environments/env3/modules/mod3/not-lib':; + '#{@coderoot}/environments/env3/modules/mod3/not-lib/puppet':; + '#{@coderoot}/environments/env3/modules/mod3/not-lib/puppet/functions':; + '#{@coderoot}/environments/env3/modules/mod3/not-lib/puppet/functions/mod3':; + +# module mod4 none + '#{@coderoot}/environments/env3/modules/mod4':; + '#{@coderoot}/environments/env3/modules/mod4/manifests':; + '#{@coderoot}/environments/env3/modules/mod4/data':; + '#{@coderoot}/environments/env3/modules/mod4/functions':; + '#{@coderoot}/environments/env3/modules/mod4/lib':; + '#{@coderoot}/environments/env3/modules/mod4/lib/puppet':; + '#{@coderoot}/environments/env3/modules/mod4/lib/puppet/functions':; + '#{@coderoot}/environments/env3/modules/mod4/lib/puppet/functions/mod4':; + + +##### env4 none + '#{@coderoot}/environments/env4':; + '#{@coderoot}/environments/env4/data':; + '#{@coderoot}/environments/env4/functions':; + '#{@coderoot}/environments/env4/functions/environment':; + '#{@coderoot}/environments/env4/lib':; + '#{@coderoot}/environments/env4/lib/puppet':; + '#{@coderoot}/environments/env4/lib/puppet/functions':; + '#{@coderoot}/environments/env4/lib/puppet/functions/environment':; + '#{@coderoot}/environments/env4/manifests':; + '#{@coderoot}/environments/env4/modules':; + +# module mod1 hiera + '#{@coderoot}/environments/env4/modules/mod1':; + '#{@coderoot}/environments/env4/modules/mod1/manifests':; + '#{@coderoot}/environments/env4/modules/mod1/data':; + '#{@coderoot}/environments/env4/modules/mod1/functions':; + '#{@coderoot}/environments/env4/modules/mod1/lib':; + '#{@coderoot}/environments/env4/modules/mod1/lib/puppet':; + '#{@coderoot}/environments/env4/modules/mod1/lib/puppet/functions':; + '#{@coderoot}/environments/env4/modules/mod1/lib/puppet/functions/mod1':; + +# module mod2 ruby function + '#{@coderoot}/environments/env4/modules/mod2':; + '#{@coderoot}/environments/env4/modules/mod2/manifests':; + '#{@coderoot}/environments/env4/modules/mod2/data':; + '#{@coderoot}/environments/env4/modules/mod2/functions':; + '#{@coderoot}/environments/env4/modules/mod2/lib':; + '#{@coderoot}/environments/env4/modules/mod2/lib/puppet':; + '#{@coderoot}/environments/env4/modules/mod2/lib/puppet/functions':; + '#{@coderoot}/environments/env4/modules/mod2/lib/puppet/functions/mod2':; + +# module mod3 puppet function + '#{@coderoot}/environments/env4/modules/mod3':; + '#{@coderoot}/environments/env4/modules/mod3/manifests':; + '#{@coderoot}/environments/env4/modules/mod3/data':; + '#{@coderoot}/environments/env4/modules/mod3/functions':; + '#{@coderoot}/environments/env4/modules/mod3/not-lib':; + '#{@coderoot}/environments/env4/modules/mod3/not-lib/puppet':; + '#{@coderoot}/environments/env4/modules/mod3/not-lib/puppet/functions':; + '#{@coderoot}/environments/env4/modules/mod3/not-lib/puppet/functions/mod3':; + +# module mod4 none + '#{@coderoot}/environments/env4/modules/mod4':; + '#{@coderoot}/environments/env4/modules/mod4/manifests':; + '#{@coderoot}/environments/env4/modules/mod4/data':; + '#{@coderoot}/environments/env4/modules/mod4/functions':; + '#{@coderoot}/environments/env4/modules/mod4/lib':; + '#{@coderoot}/environments/env4/modules/mod4/lib/puppet':; + '#{@coderoot}/environments/env4/modules/mod4/lib/puppet/functions':; + '#{@coderoot}/environments/env4/modules/mod4/lib/puppet/functions/mod4':; +} + +## Global data provider config (hiera) +file { '#{@coderoot}/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + :backends: + - "yaml" + :logger: "console" + :hierarchy: + - "global" + + :yaml: + :datadir: "#{@coderoot}/hieradata" +', +} + +## facts file +file { '#{@coderoot}/facts.yaml': + ensure => file, + mode => "0644", + content => '--- + my_data_key: "my_data_value" +', +} + +file { '#{@coderoot}/hieradata/global.yaml': + ensure => file, + mode => "0644", + content => '--- + global_key: "global-hiera provided value for key" + another_global_key: "global-hiera provided value for key" + mod1::global_key: "global-hiera provided value for key" + mod2::global_key: "global-hiera provided value for key" + mod3::global_key: "global-hiera provided value for key" + mod4::global_key: "global-hiera provided value for key" +', +} + + +## Evironment data provider configuration +file { '#{@coderoot}/environments/production/environment.conf': + ensure => file, + mode => "0644", + content => 'environment_timeout = 0 +', +} + +file { '#{@coderoot}/environments/env1/environment.conf': + ensure => file, + mode => "0644", + content => 'environment_timeout = 0 +environment_data_provider = "hiera" +', +} + +file { '#{@coderoot}/environments/env2/environment.conf': + ensure => file, + mode => "0644", + content => 'environment_timeout = 0 +environment_data_provider = "function" +', +} + +file { '#{@coderoot}/environments/env3/environment.conf': + ensure => file, + mode => "0644", + content => 'environment_timeout = 0 +environment_data_provider = "function" +', +} + +file { '#{@coderoot}/environments/env4/environment.conf': + ensure => file, + mode => "0644", + content => 'environment_timeout = 0 +environment_data_provider = "none" +', +} + +# Environment hiera data provider +file { '#{@coderoot}/environments/production/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/production/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + global_key: "env-production hiera provided value" + environment_key: "env-production hiera provided value" +', +} + +file { '#{@coderoot}/environments/env1/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/env1/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + global_key: "env-env1 hiera provided value" + environment_key: "env-env1 hiera provided value" +', +} + + +file { '#{@coderoot}/environments/env2/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/env2/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + global_key: "env-env1 hiera provided value" + environment_key: "env-env1 hiera provided value" +', +} + + +file { '#{@coderoot}/environments/env3/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/env3/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + global_key: "env-env1 hiera provided value" + environment_key: "env-env1 hiera provided value" +', +} + + +file { '#{@coderoot}/environments/env4/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/env4/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + global_key: "env-env1 hiera provided value" + environment_key: "env-env1 hiera provided value" +', +} + +# Environment ruby function data provider +file { '#{@coderoot}/environments/production/lib/puppet/functions/environment/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'environment::data') do + def data() + { + 'environment_key': 'env-production-ruby-function data() provided value', + 'global_key': 'env-production-ruby-function data () provided value', + } + end +end +", +} + +file { '#{@coderoot}/environments/env1/lib/puppet/functions/environment/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'environment::data') do + def data() + { + 'environment_key' => 'env-env1-ruby-function data() provided value', + 'global_key' => 'env-env1-ruby-function data () provided value', + } + end +end +", +} + +file { '#{@coderoot}/environments/env2/lib/puppet/functions/environment/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'environment::data') do + def data() + { + 'environment_key' => 'env-env2-ruby-function data() provided value', + 'global_key' => 'env-env2-ruby-function data () provided value', + } + end +end +", +} + +file { '#{@coderoot}/environments/env3/not-lib/puppet/functions/environment/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'environment::data') do + def data() + { + 'environment_key' => 'env-env3-ruby-function data() provided value', + 'global_key' => 'env-env3-ruby-function data () provided value', + } + end +end +", +} + +file { '#{@coderoot}/environments/env4/lib/puppet/functions/environment/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'environment::data') do + def data() + { + 'environment_key' => 'env-env4-ruby-function data() provided value', + 'global_key' => 'env-env4-ruby-function data () provided value', + } + end +end +", +} + +# Environment puppet function data provider +file { '#{@coderoot}/environments/production/functions/environment/data.pp': + ensure => file, + mode => "0755", + content => 'function environment::data() { + { + "environment_key" => "env-production-puppet-function data() provided value", + "global_key" => "env-production-puppet-function data() provided value", + } +} +', +} + +file { '#{@coderoot}/environments/env1/functions/environment/data.pp': + ensure => file, + mode => "0755", + content => 'function environment::data() { + { + "environment_key" => "env-env1-puppet-function data() provided value", + "global_key" => "env-env1-puppet-function data() provided value", + } +} +', +} + +file { '#{@coderoot}/environments/env2/functions/environment/data.pp': + ensure => file, + mode => "0755", + content => 'function environment::data() { + { + "environment_key" => "env-env2-puppet-function data() provided value", + "global_key" => "env-env2-puppet-function data() provided value", + } +} +', +} + +file { '#{@coderoot}/environments/env3/functions/environment/data.pp': + ensure => file, + mode => "0755", + content => 'function environment::data() { + { + "environment_key" => "env-env3-puppet-function data() provided value", + "global_key" => "env-env3-puppet-function data() provided value", + } +} +', +} + +file { '#{@coderoot}/environments/env4/functions/environment/data.pp': + ensure => file, + mode => "0755", + content => 'function environment::data() { + { + "environment_key" => "env-env4-puppet-function data() provided value", + "global_key" => "env-env4-puppet-function data() provided value", + } +} +', +} + + +## Module data provider configuration +# Module hiera data provider +file { '#{@coderoot}/environments/production/modules/mod1/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/production/modules/mod1/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + "mod1::module_key": "module-production-mod1-hiera provided value" + "mod1::global_key": "module-production-mod1-hiera provided value" + "environment_key": "module-production-mod1-hiera provided value" + "global_key": "module-production-mod1-hiera provided value" +', +} + +file { '#{@coderoot}/environments/production/modules/mod2/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/production/modules/mod2/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + "mod2::module_key": "module-production-mod2-hiera provided value" + "mod2::global_key": "module-production-mod2-hiera provided value" + "environment_key": "module-production-mod2-hiera provided value" + "global_key": "module-production-mod2-hiera provided value" +', +} + +file { '#{@coderoot}/environments/production/modules/mod3/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/production/modules/mod3/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + "mod3::module_key": "module-production-mod3-hiera provided value" + "mod3::global_key": "module-production-mod3-hiera provided value" + "environment_key": "module-production-mod3-hiera provided value" + "global_key" => "module-production-mod3-hiera provided value" +', +} + +file { '#{@coderoot}/environments/production/modules/mod4/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/production/modules/mod4/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + "mod4::module_key": "module-production-mod4-hiera provided value" + "mod4::global_key": "module-production-mod4-hiera provided value" + "environment_key": "module-production-mod4-hiera provided value" + "global_key": "module-production-mod4-hiera provided value" +', +} + +file { '#{@coderoot}/environments/env1/modules/mod1/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/env1/modules/mod1/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + "mod1::module_key": "module-env1-mod1-hiera provided value" + "global_key": "module-env1-mod1-hiera provided value" + "environment_key": "module-env1-mod1-hiera provided value" +', +} + +file { '#{@coderoot}/environments/env1/modules/mod2/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/env1/modules/mod2/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + "mod2::module_key": "module-env1-mod2-hiera provided value" + "global_key": "module-env1-mod2-hiera provided value" + "environment_key": "module-env1-mod2-hiera provided value" +', +} + +file { '#{@coderoot}/environments/env1/modules/mod3/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/env1/modules/mod3/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + "mod3::module_key": "module-env1-mod3-hiera provided value" + "global_key": "module-env1-mod3-hiera provided value" + "environment_key": "module-env1-mod3-hiera provided value" +', +} + +file { '#{@coderoot}/environments/env1/modules/mod4/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/env1/modules/mod4/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + "mod4::module_key": "module-env1-mod4-hiera provided value" + "global_key": "module-env1-mod4-hiera provided value" + "environment_key": "module-env1-mod4-hiera provided value" +', +} + +file { '#{@coderoot}/environments/env2/modules/mod1/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/env2/modules/mod1/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + "mod1::module_key": "module-env2-mod1-hiera provided value" + "global_key": "module-env2-mod1-hiera provided value" + "environment_key": "module-env2-mod1-hiera provided value" +', +} + +file { '#{@coderoot}/environments/env2/modules/mod2/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/env2/modules/mod2/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + "mod2::module_key": "module-env2-mod2-hiera provided value" + "global_key": "module-env2-mod2-hiera provided value" + "environment_key": "module-env2-mod2-hiera provided value" +', +} + +file { '#{@coderoot}/environments/env2/modules/mod3/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/env2/modules/mod3/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + "mod3::module_key": "module-env2-mod3-hiera provided value" + "global_key": "module-env2-mod3-hiera provided value" + "environment_key": "module-env2-mod3-hiera provided value" +', +} + +file { '#{@coderoot}/environments/env2/modules/mod4/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/env2/modules/mod4/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + "mod4::module_key": "module-env2-mod4-hiera provided value" + "global_key": "module-env2-mod4-hiera provided value" + "environment_key": "module-env2-mod4-hiera provided value" +', +} + +file { '#{@coderoot}/environments/env3/modules/mod1/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/env3/modules/mod1/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + "mod1::module_key": "module-env3-mod1-hiera provided value" + "global_key": "module-env3-mod1-hiera provided value" + "environment_key": "module-env3-mod1-hiera provided value" +', +} + +file { '#{@coderoot}/environments/env3/modules/mod2/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/env3/modules/mod2/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + "mod2::module_key": "module-env3-mod2-hiera provided value" + "global_key": "module-env3-mod2-hiera provided value" + "environment_key": "module-env3-mod2-hiera provided value" +', +} + +file { '#{@coderoot}/environments/env3/modules/mod3/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/env3/modules/mod3/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + "mod3::module_key": "module-env3-mod3-hiera provided value" + "global_key": "module-env3-mod3-hiera provided value" + "environment_key": "module-env3-mod3-hiera provided value" +', +} + +file { '#{@coderoot}/environments/env3/modules/mod4/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/env3/modules/mod4/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + "mod4::module_key": "module-env3-mod4-hiera provided value" + "global_key": "module-env3-mod4-hiera provided value" + "environment_key": "module-env3-mod4-hiera provided value" +', +} + +file { '#{@coderoot}/environments/env4/modules/mod1/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/env4/modules/mod1/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + "mod1::module_key": "module-env4-mod1-hiera provided value" + "global_key": "module-env4-mod1-hiera provided value" + "environment_key": "module-env4-mod1-hiera provided value" +', +} + +file { '#{@coderoot}/environments/env4/modules/mod2/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/env4/modules/mod2/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + "mod2::module_key": "module-env4-mod2-hiera provided value" + "global_key": "module-env4-mod2-hiera provided value" + "environment_key": "module-env4-mod2-hiera provided value" +', +} + +file { '#{@coderoot}/environments/env4/modules/mod3/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/env4/modules/mod3/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + "mod3::module_key": "module-env4-mod3-hiera provided value" + "global_key": "module-env4-mod3-hiera provided value" + "environment_key": "module-env4-mod3-hiera provided value" +', +} + +file { '#{@coderoot}/environments/env4/modules/mod4/hiera.yaml': + ensure => file, + mode => "0644", + content => '--- + version: 4 +', +} + +file { '#{@coderoot}/environments/env4/modules/mod4/data/common.yaml': + ensure => file, + mode => "0644", + content => '--- + "mod4::module_key": "module-env4-mod4-hiera provided value" + "global_key": "module-env4-mod4-hiera provided value" + "environment_key": "module-env4-mod4-hiera provided value" +', +} + +# Module ruby function data provider +file { '#{@coderoot}/environments/production/modules/mod1/lib/puppet/functions/mod1/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'mod1::data') do + def data() + { + 'mod1::module_key' => 'module-production-mod1-ruby-function provided value', + 'mod1::global_key' => 'module-production-mod1-ruby-function provided value', + } + end +end +", +} + +file { '#{@coderoot}/environments/production/modules/mod2/lib/puppet/functions/mod2/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'mod2::data') do + def data() + { + 'mod2::module_key' => 'module-production-mod2-ruby-function provided value', + 'mod2::global_key' => 'module-production-mod2-ruby-function provided value', + } + end +end +", +} + +file { '#{@coderoot}/environments/production/modules/mod3/not-lib/puppet/functions/mod3/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'mod3::data') do + def data() + { + 'mod3::module_key' => 'module-production-mod3-ruby-function provided value', + 'mod3::global_key' => 'module-production-mod3-ruby-function provided value', + } + end +end +", +} + +file { '#{@coderoot}/environments/production/modules/mod4/lib/puppet/functions/mod4/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'mod4::data') do + def data() + { + 'mod4::module_key' => 'module-production-mod4-ruby-function provided value', + 'mod4::global_key' => 'module-production-mod4-ruby-function provided value', + } + end +end +", +} + +file { '#{@coderoot}/environments/env1/modules/mod1/lib/puppet/functions/mod1/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'mod1::data') do + def data() + { + 'mod1::module_key' => 'module-env1-mod1-ruby-function provided value', + } + end +end +", +} + +file { '#{@coderoot}/environments/env1/modules/mod2/lib/puppet/functions/mod2/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'mod2::data') do + def data() + { + 'mod2::module_key' => 'module-env1-mod2-ruby-function provided value', + } + end +end +", +} + +file { '#{@coderoot}/environments/env1/modules/mod3/not-lib/puppet/functions/mod3/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'mod3::data') do + def data() + { + 'mod3::module_key' => 'module-env1-mod3-ruby-function provided value', + } + end +end +", +} + +file { '#{@coderoot}/environments/env1/modules/mod4/lib/puppet/functions/mod4/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'mod4::data') do + def data() + { + 'mod4::module_key' => 'module-env1-mod4-ruby-function provided value', + } + end +end +", +} + +file { '#{@coderoot}/environments/env2/modules/mod1/lib/puppet/functions/mod1/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'mod1::data') do + def data() + { + 'mod1::module_key' => 'module-env2-mod1-ruby-function provided value', + } + end +end +", +} + +file { '#{@coderoot}/environments/env2/modules/mod2/lib/puppet/functions/mod2/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'mod2::data') do + def data() + { + 'mod2::module_key' => 'module-env2-mod2-ruby-function provided value', + } + end +end +", +} + +file { '#{@coderoot}/environments/env2/modules/mod3/not-lib/puppet/functions/mod3/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'mod3::data') do + def data() + { + 'mod3::module_key' => 'module-env2-mod3-ruby-function provided value', + } + end +end +", +} + +file { '#{@coderoot}/environments/env2/modules/mod4/lib/puppet/functions/mod4/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'mod4::data') do + def data() + { + 'mod4::module_key' => 'module-env2-mod4-ruby-function provided value', + } + end +end +", +} + +file { '#{@coderoot}/environments/env3/modules/mod1/lib/puppet/functions/mod1/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'mod1::data') do + def data() + { + 'mod1::module_key' => 'module-env3-mod1-ruby-function provided value', + } + end +end +", +} + +file { '#{@coderoot}/environments/env3/modules/mod2/lib/puppet/functions/mod2/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'mod2::data') do + def data() + { + 'mod2::module_key' => 'module-env3-mod2-ruby-function provided value', + } + end +end +", +} + +file { '#{@coderoot}/environments/env3/modules/mod3/not-lib/puppet/functions/mod3/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'mod3::data') do + def data() + { + 'mod3::module_key' => 'module-env3-mod3-ruby-function provided value', + } + end +end +", +} + +file { '#{@coderoot}/environments/env3/modules/mod4/lib/puppet/functions/mod4/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'mod4::data') do + def data() + { + 'mod4::module_key' => 'module-env3-mod4-ruby-function provided value', + } + end +end +", +} + +file { '#{@coderoot}/environments/env4/modules/mod1/lib/puppet/functions/mod1/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'mod1::data') do + def data() + { + 'mod1::module_key' => 'module-env4-mod1-ruby-function provided value', + } + end +end +", +} + +file { '#{@coderoot}/environments/env4/modules/mod2/lib/puppet/functions/mod2/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'mod2::data') do + def data() + { + 'mod2::module_key' => 'module-env4-mod2-ruby-function provided value', + } + end +end +", +} + +file { '#{@coderoot}/environments/env4/modules/mod3/not-lib/puppet/functions/mod3/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'mod3::data') do + def data() + { + 'mod3::module_key' => 'module-env4-mod3-ruby-function provided value', + } + end +end +", +} + +file { '#{@coderoot}/environments/env4/modules/mod4/lib/puppet/functions/mod4/data.rb': + ensure => file, + mode => "0644", + content => "Puppet::Functions.create_function(:'mod4::data') do + def data() + { + 'mod4::module_key' => 'module-env4-mod4-ruby-function provided value', + } + end +end +", +} + +# Module puppet function data provider +file { '#{@coderoot}/environments/production/modules/mod1/functions/data.pp': + ensure => file, + mode => "0644", + content => "function mod1::data() { + { + 'mod1::module_key' => 'module-production-mod1-puppet-function provided value', + 'mod1::global_key' => 'module-production-mod1-puppet-function provided value', + } +} +", +} + +file { '#{@coderoot}/environments/production/modules/mod2/functions/data.pp': + ensure => file, + mode => "0644", + content => "function mod2::data() { + { + 'mod2::module_key' => 'module-production-mod2-puppet-function provided value', + 'mod2::global_key' => 'module-production-mod2-puppet-function provided value', + } +} +", +} + +file { '#{@coderoot}/environments/production/modules/mod3/functions/data.pp': + ensure => file, + mode => "0644", + content => "function mod3::data() { + { + 'mod3::module_key' => 'module-production-mod3-puppet-function provided value', + 'mod3::global_key' => 'module-production-mod3-puppet-funtion provided value', + } +} +", +} + +file { '#{@coderoot}/environments/production/modules/mod4/functions/data.pp': + ensure => file, + mode => "0644", + content => "function mod4::data() { + { + 'mod4::module_key' => 'module-production-mod4-puppet-function provided value', + 'mod4::global_key' => 'module-production-mod4-puppet-funtion provided value', + } +} +", +} + +file { '#{@coderoot}/environments/env1/modules/mod1/functions/data.pp': + ensure => file, + mode => "0644", + content => "function mod1::data() { + { + 'mod1::module_key' => 'module-env1-mod1-puppet-function provided value', + } +} +", +} + +file { '#{@coderoot}/environments/env1/modules/mod2/functions/data.pp': + ensure => file, + mode => "0644", + content => "function mod2::data() { + { + 'mod2::module_key' => 'module-env1-mod2-puppet-function provided value', + } +} +", +} + +file { '#{@coderoot}/environments/env1/modules/mod3/functions/data.pp': + ensure => file, + mode => "0644", + content => "function mod3::data() { + { + 'mod3::module_key' => 'module-env1-mod3-puppet-function provided value', + } +} +", +} + +file { '#{@coderoot}/environments/env1/modules/mod4/functions/data.pp': + ensure => file, + mode => "0644", + content => "function mod4::data() { + { + 'mod4::module_key' => 'module-env1-mod4-puppet-function provided value', + } +} +", +} + +file { '#{@coderoot}/environments/env2/modules/mod1/functions/data.pp': + ensure => file, + mode => "0644", + content => "function mod1::data() { + { + 'mod1::module_key' => 'module-env2-mod1-puppet-function provided value', + } +} +", +} + +file { '#{@coderoot}/environments/env2/modules/mod2/functions/data.pp': + ensure => file, + mode => "0644", + content => "function mod2::data() { + { + 'mod2::module_key' => 'module-env2-mod2-puppet-function provided value', + } +} +", +} + +file { '#{@coderoot}/environments/env2/modules/mod3/functions/data.pp': + ensure => file, + mode => "0644", + content => "function mod3::data() { + { + 'mod3::module_key' => 'module-env2-mod3-puppet-function provided value', + } +} +", +} + +file { '#{@coderoot}/environments/env2/modules/mod4/functions/data.pp': + ensure => file, + mode => "0644", + content => "function mod4::data() { + { + 'mod4::module_key' => 'module-env2-mod4-puppet-function provided value', + } +} +", +} + +file { '#{@coderoot}/environments/env3/modules/mod1/functions/data.pp': + ensure => file, + mode => "0644", + content => "function mod1::data() { + { + 'mod1::module_key' => 'module-env3-mod1-puppet-function provided value', + } +} +", +} + +file { '#{@coderoot}/environments/env3/modules/mod2/functions/data.pp': + ensure => file, + mode => "0644", + content => "function mod2::data() { + { + 'mod2::module_key' => 'module-env3-mod2-puppet-function provided value', + } +} +", +} + +file { '#{@coderoot}/environments/env3/modules/mod3/functions/data.pp': + ensure => file, + mode => "0644", + content => "function mod3::data() { + { + 'mod3::module_key' => 'module-env3-mod3-puppet-function provided value', + } +} +", +} + +file { '#{@coderoot}/environments/env3/modules/mod4/functions/data.pp': + ensure => file, + mode => "0644", + content => "function mod4::data() { + { + 'mod4::module_key' => 'module-env3-mod4-puppet-function provided value', + } +} +", +} + +file { '#{@coderoot}/environments/env4/modules/mod1/functions/data.pp': + ensure => file, + mode => "0644", + content => "function mod1::data() { + { + 'mod1::module_key' => 'module-env4-mod1-puppet-function provided value', + } +} +", +} + +file { '#{@coderoot}/environments/env4/modules/mod2/functions/data.pp': + ensure => file, + mode => "0644", + content => "function mod2::data() { + { + 'mod2::module_key' => 'module-env4-mod2-puppet-function provided value', + } +} +", +} + +file { '#{@coderoot}/environments/env4/modules/mod3/functions/data.pp': + ensure => file, + mode => "0644", + content => "function mod3::data() { + { + 'mod3::module_key' => 'module-env4-mod3-puppet-function provided value', + } +} +", +} + +file { '#{@coderoot}/environments/env4/modules/mod4/functions/data.pp': + ensure => file, + mode => "0644", + content => "function mod4::data() { + { + 'mod4::module_key' => 'module-env4-mod4-puppet-function provided value', + } +} +", +} + +file { '#{@coderoot}/environments/production/manifests/site.pp': + ensure => file, + mode => "0644", + content => "node default { + include mod1 + include mod2 + include mod3 + include mod4 +} +", +} + +file { '#{@coderoot}/environments/env1/manifests/site.pp': + ensure => file, + mode => "0644", + content => "node default { + include mod1 + include mod2 + include mod3 + include mod4 +} +", +} + +file { '#{@coderoot}/environments/env2/manifests/site.pp': + ensure => file, + mode => "0644", + content => "node default { + include mod1 + include mod2 + include mod3 + include mod4 +} +", +} + +file { '#{@coderoot}/environments/env3/manifests/site.pp': + ensure => file, + mode => "0644", + content => "node default { + include mod1 + include mod2 + include mod3 + include mod4 +} +", +} + +file { '#{@coderoot}/environments/env4/manifests/site.pp': + ensure => file, + mode => "0644", + content => "node default { + include mod1 + include mod2 + include mod2 + include mod2 +} +", +} + +file { '#{@coderoot}/environments/production/modules/mod1/manifests/init.pp': + ensure => file, + mode => "0644", + content => 'class mod1 { + notice("hello from production-mod1") +} +', +} + +file { '#{@coderoot}/environments/production/modules/mod2/manifests/init.pp': + ensure => file, + mode => "0644", + content => 'class mod2 { + notice("hello from production-mod2") +} +', +} + +file { '#{@coderoot}/environments/production/modules/mod3/manifests/init.pp': + ensure => file, + mode => "0644", + content => 'class mod3 { + notice("hello from production-mod3") +} +', +} + +file { '#{@coderoot}/environments/production/modules/mod4/manifests/init.pp': + ensure => file, + mode => "0644", + content => 'class mod4 { + notice("hello from production-mod4") +} +', +} + +file { '#{@coderoot}/environments/env1/modules/mod1/manifests/init.pp': + ensure => file, + mode => "0644", + content => 'class mod1 { + notice("hello from env1-mod1") +} +', +} + +file { '#{@coderoot}/environments/env1/modules/mod2/manifests/init.pp': + ensure => file, + mode => "0644", + content => 'class mod2 { + notice("hello from env1-mod2") +} +', +} + +file { '#{@coderoot}/environments/env1/modules/mod3/manifests/init.pp': + ensure => file, + mode => "0644", + content => 'class mod3 { + notice("hello from env1-mod3") +} +', +} + +file { '#{@coderoot}/environments/env1/modules/mod4/manifests/init.pp': + ensure => file, + mode => "0644", + content => 'class mod4 { + notice("hello from env1-mod4") +} +', +} + +file { '#{@coderoot}/environments/env2/modules/mod1/manifests/init.pp': + ensure => file, + mode => "0644", + content => 'class mod1 { + notice("hello from env2-mod1") +} +', +} + +file { '#{@coderoot}/environments/env2/modules/mod2/manifests/init.pp': + ensure => file, + mode => "0644", + content => 'class mod2 { + notice("hello from env2-mod2") +} +', +} + +file { '#{@coderoot}/environments/env2/modules/mod3/manifests/init.pp': + ensure => file, + mode => "0644", + content => 'class mod3 { + notice("hello from env2-mod3") +} +', +} + +file { '#{@coderoot}/environments/env2/modules/mod4/manifests/init.pp': + ensure => file, + mode => "0644", + content => 'class mod4 { + notice("hello from env2-mod4") +} +', +} + +file { '#{@coderoot}/environments/env3/modules/mod1/manifests/init.pp': + ensure => file, + mode => "0644", + content => 'class mod1 { + notice("hello from env3-mod1") +} +', +} + +file { '#{@coderoot}/environments/env3/modules/mod2/manifests/init.pp': + ensure => file, + mode => "0644", + content => 'class mod2 { + notice("hello from env3-mod2") +} +', +} + +file { '#{@coderoot}/environments/env3/modules/mod3/manifests/init.pp': + ensure => file, + mode => "0644", + content => 'class mod3 { + notice("hello from env3-mod3") +} +', +} + +file { '#{@coderoot}/environments/env3/modules/mod4/manifests/init.pp': + ensure => file, + mode => "0644", + content => 'class mod4 { + notice("hello from env3-mod4") +} +', +} + +file { '#{@coderoot}/environments/env4/modules/mod1/manifests/init.pp': + ensure => file, + mode => "0644", + content => 'class mod1 { + notice("hello from env4-mod1") +} +', +} + +file { '#{@coderoot}/environments/env4/modules/mod2/manifests/init.pp': + ensure => file, + mode => "0644", + content => 'class mod2 { + notice("hello from env4-mod2") +} +', +} + +file { '#{@coderoot}/environments/env4/modules/mod3/manifests/init.pp': + ensure => file, + mode => "0644", + content => 'class mod3 { + notice("hello from env4-mod3") +} +', +} + +file { '#{@coderoot}/environments/env4/modules/mod4/manifests/init.pp': + ensure => file, + mode => "0644", + content => 'class mod4 { + notice("hello from env4-mod4") +} +', +} + +file { '#{@coderoot}/environments/production/modules/mod1/metadata.json': + ensure => file, + mode => "0644", + content => '{ + "name": "tester-mod1", + "version": "0.1.0", + "author": "tester", + "summary": null, + "license": "Apache-2.0", + "source": "", + "project_page": null, + "issues_url": null, + "dependencies": [], + "data_provider": "hiera" +} +', +} + +file { '#{@coderoot}/environments/production/modules/mod2/metadata.json': + ensure => file, + mode => "0644", + content => '{ + "name": "tester-mod2", + "version": "0.1.0", + "author": "tester", + "summary": null, + "license": "Apache-2.0", + "source": "", + "project_page": null, + "issues_url": null, + "dependencies": [], + "data_provider": "function" +} +', +} + +file { '#{@coderoot}/environments/production/modules/mod3/metadata.json': + ensure => file, + mode => "0644", + content => '{ + "name": "tester-mod3", + "version": "0.1.0", + "author": "tester", + "summary": null, + "license": "Apache-2.0", + "source": "", + "project_page": null, + "issues_url": null, + "dependencies": [], + "data_provider": "function" +} +', +} + +file { '#{@coderoot}/environments/production/modules/mod4/metadata.json': + ensure => file, + mode => "0644", + content => '{ + "name": "tester-mod1", + "version": "0.1.0", + "author": "tester", + "summary": null, + "license": "Apache-2.0", + "source": "", + "project_page": null, + "issues_url": null, + "dependencies": [], + "data_provider": "none" +} +', +} + +file { '#{@coderoot}/environments/env1/modules/mod1/metadata.json': + ensure => file, + mode => "0644", + content => '{ + "name": "tester-mod1", + "version": "0.1.0", + "author": "tester", + "summary": null, + "license": "Apache-2.0", + "source": "", + "project_page": null, + "issues_url": null, + "dependencies": [], + "data_provider": "hiera" +} +', +} + +file { '#{@coderoot}/environments/env1/modules/mod2/metadata.json': + ensure => file, + mode => "0644", + content => '{ + "name": "tester-mod2", + "version": "0.1.0", + "author": "tester", + "summary": null, + "license": "Apache-2.0", + "source": "", + "project_page": null, + "issues_url": null, + "dependencies": [], + "data_provider": "function" +} +', +} + +file { '#{@coderoot}/environments/env1/modules/mod3/metadata.json': + ensure => file, + mode => "0644", + content => '{ + "name": "tester-mod3", + "version": "0.1.0", + "author": "tester", + "summary": null, + "license": "Apache-2.0", + "source": "", + "project_page": null, + "issues_url": null, + "dependencies": [], + "data_provider": "function" +} +', +} + +file { '#{@coderoot}/environments/env1/modules/mod4/metadata.json': + ensure => file, + mode => "0644", + content => '{ + "name": "tester-mod4", + "version": "0.1.0", + "author": "tester", + "summary": null, + "license": "Apache-2.0", + "source": "", + "project_page": null, + "issues_url": null, + "dependencies": [], + "data_provider": "none" +} +', +} + +file { '#{@coderoot}/environments/env2/modules/mod1/metadata.json': + ensure => file, + mode => "0644", + content => '{ + "name": "tester-mod1", + "version": "0.1.0", + "author": "tester", + "summary": null, + "license": "Apache-2.0", + "source": "", + "project_page": null, + "issues_url": null, + "dependencies": [], + "data_provider": "hiera" +} +', +} + +file { '#{@coderoot}/environments/env2/modules/mod2/metadata.json': + ensure => file, + mode => "0644", + content => '{ + "name": "tester-mod2", + "version": "0.1.0", + "author": "tester", + "summary": null, + "license": "Apache-2.0", + "source": "", + "project_page": null, + "issues_url": null, + "dependencies": [], + "data_provider": "function" +} +', +} + +file { '#{@coderoot}/environments/env2/modules/mod3/metadata.json': + ensure => file, + mode => "0644", + content => '{ + "name": "tester-mod3", + "version": "0.1.0", + "author": "tester", + "summary": null, + "license": "Apache-2.0", + "source": "", + "project_page": null, + "issues_url": null, + "dependencies": [], + "data_provider": "function" +} +', +} + +file { '#{@coderoot}/environments/env2/modules/mod4/metadata.json': + ensure => file, + mode => "0644", + content => '{ + "name": "tester-mod4", + "version": "0.1.0", + "author": "tester", + "summary": null, + "license": "Apache-2.0", + "source": "", + "project_page": null, + "issues_url": null, + "dependencies": [], + "data_provider": "none" +} +', +} + +file { '#{@coderoot}/environments/env3/modules/mod1/metadata.json': + ensure => file, + mode => "0644", + content => '{ + "name": "tester-mod1", + "version": "0.1.0", + "author": "tester", + "summary": null, + "license": "Apache-2.0", + "source": "", + "project_page": null, + "issues_url": null, + "dependencies": [], + "data_provider": "hiera" +} +', +} + +file { '#{@coderoot}/environments/env3/modules/mod2/metadata.json': + ensure => file, + mode => "0644", + content => '{ + "name": "tester-mod2", + "version": "0.1.0", + "author": "tester", + "summary": null, + "license": "Apache-2.0", + "source": "", + "project_page": null, + "issues_url": null, + "dependencies": [], + "data_provider": "function" +} +', +} + +file { '#{@coderoot}/environments/env3/modules/mod3/metadata.json': + ensure => file, + mode => "0644", + content => '{ + "name": "tester-mod3", + "version": "0.1.0", + "author": "tester", + "summary": null, + "license": "Apache-2.0", + "source": "", + "project_page": null, + "issues_url": null, + "dependencies": [], + "data_provider": "function" +} +', +} + +file { '#{@coderoot}/environments/env3/modules/mod4/metadata.json': + ensure => file, + mode => "0644", + content => '{ + "name": "tester-mod4", + "version": "0.1.0", + "author": "tester", + "summary": null, + "license": "Apache-2.0", + "source": "", + "project_page": null, + "issues_url": null, + "dependencies": [], + "data_provider": "none" +} +', +} + +file { '#{@coderoot}/environments/env4/modules/mod1/metadata.json': + ensure => file, + mode => "0644", + content => '{ + "name": "tester-mod1", + "version": "0.1.0", + "author": "tester", + "summary": null, + "license": "Apache-2.0", + "source": "", + "project_page": null, + "issues_url": null, + "dependencies": [], + "data_provider": "hiera" +} +', +} + +file { '#{@coderoot}/environments/env4/modules/mod2/metadata.json': + ensure => file, + mode => "0644", + content => '{ + "name": "tester-mod2", + "version": "0.1.0", + "author": "tester", + "summary": null, + "license": "Apache-2.0", + "source": "", + "project_page": null, + "issues_url": null, + "dependencies": [], + "data_provider": "function" +} +', +} + +file { '#{@coderoot}/environments/env4/modules/mod3/metadata.json': + ensure => file, + mode => "0644", + content => '{ + "name": "tester-mod3", + "version": "0.1.0", + "author": "tester", + "summary": null, + "license": "Apache-2.0", + "source": "", + "project_page": null, + "issues_url": null, + "dependencies": [], + "data_provider": "function" +} +', +} + +file { '#{@coderoot}/environments/env4/modules/mod4/metadata.json': + ensure => file, + mode => "0644", + content => '{ + "name": "tester-mod4", + "version": "0.1.0", + "author": "tester", + "summary": null, + "license": "Apache-2.0", + "source": "", + "project_page": null, + "issues_url": null, + "dependencies": [], + "data_provider": "none" +} +', +} +MANIFEST + + @env1puppetconfmanifest = < file, + mode => "0664", + content => "[server] +vardir = /opt/puppetlabs/server/data/puppetserver +logdir = /var/log/puppetlabs/puppetserver +rundir = /var/run/puppetlabs/puppetserver +pidfile = /var/run/puppetlabs/puppetserver/puppetserver.pid +codedir = #{@coderoot} + +[main] +environmentpath = #{@coderoot}/environments +hiera_config = #{@coderoot}/hiera.yaml +environment = env1 +server = #{master.connection.hostname} +", +} +MANI1 + + @env2puppetconfmanifest = < file, + mode => "0664", + content => "[server] +vardir = /opt/puppetlabs/server/data/puppetserver +logdir = /var/log/puppetlabs/puppetserver +rundir = /var/run/puppetlabs/puppetserver +pidfile = /var/run/puppetlabs/puppetserver/puppetserver.pid +codedir = #{@coderoot} + +[main] +environmentpath = #{@coderoot}/environments +hiera_config = #{@coderoot}/hiera.yaml +environment = env2 +server = #{master.connection.hostname} +", +} +MANI2 + + @env3puppetconfmanifest = < file, + mode => "0664", + content => "[server] +vardir = /opt/puppetlabs/server/data/puppetserver +logdir = /var/log/puppetlabs/puppetserver +rundir = /var/run/puppetlabs/puppetserver +pidfile = /var/run/puppetlabs/puppetserver/puppetserver.pid +codedir = #{@coderoot} + +[main] +environmentpath = #{@coderoot}/environments +hiera_config = #{@coderoot}/hiera.yaml +environment = env3 +server = #{master.connection.hostname} +", +} +MANI3 + + @env4puppetconfmanifest = < file, + mode => "0664", + content => "[server] +vardir = /opt/puppetlabs/server/data/puppetserver +logdir = /var/log/puppetlabs/puppetserver +rundir = /var/run/puppetlabs/puppetserver +pidfile = /var/run/puppetlabs/puppetserver/puppetserver.pid +codedir = #{@coderoot} + +[main] +environmentpath = #{@coderoot}/environments +hiera_config = #{@coderoot}/hiera.yaml +environment = env4 +server = #{master.connection.hostname} +", +} +MANI4 + + @encmanifest = < file, + mode => "0755", + content => "#!#{master['privatebindir']}/ruby +nodename = ARGV.shift +node2env = { + '#{@node1}' => \\\"---\\\\n environment: env2\\\\n\\\", + '#{@node2}' => \\\"---\\\\n environment: env3\\\\n\\\", +} +puts (\\\"\#{node2env[nodename]}\\\" ||'') +", +} +file { '#{@confdir}/puppet.conf' : + ensure => file, + mode => "0664", + content => "[server] +vardir = /opt/puppetlabs/server/data/puppetserver +logdir = /var/log/puppetlabs/puppetserver +rundir = /var/run/puppetlabs/puppetserver +pidfile = /var/run/puppetlabs/puppetserver/puppetserver.pid +codedir = #{@coderoot} + +[server] +node_terminus = exec +external_nodes = #{@coderoot}/enc.rb + +[main] +environmentpath = #{@coderoot}/environments +hiera_config = #{@coderoot}/hiera.yaml +server = #{master.connection.hostname} +", +} +MANIENC + +teardown do + on(master, "rm -f /etc/puppetlabs/puppet/ssl/certs/#{@node1}.pem") + on(master, "rm -f /etc/puppetlabs/puppet/ssl/certs/#{@node2}.pem") + on(master, "rm -f /etc/puppetlabs/puppet/ssl/ca/signed/#{@node1}.pem") + on(master, "rm -f /etc/puppetlabs/puppet/ssl/ca/signed/#{@node2}.pem") + on(master, "rm -f /etc/puppetlabs/puppet/ssl/private_keys/#{@node1}.pem") + on(master, "rm -f /etc/puppetlabs/puppet/ssl/private_keys/#{@node2}.pem") + on(master, "rm -f /etc/puppetlabs/puppet/ssl/public_keys/#{@node1}.pem") + on(master, "rm -f /etc/puppetlabs/puppet/ssl/public_keys/#{@node2}.pem") +end + +step 'apply main manifest' +apply_manifest_on(master, @manifest, :catch_failures => true) + +step 'start puppet server' +with_puppet_running_on master, @master_opts, @coderoot do + + step "global_key" + rg = on(master, puppet('lookup', 'global_key')) + result = rg.stdout + assert_match( + /global-hiera/, + result, + "global_key lookup failed, expected 'global-hiera'" + ) + + step "production environment_key not provided" + on(master, puppet('lookup', 'enviroment_key'), :acceptable_exit_codes => [1]) + + step "environment_key from environment env1" + re1 = on(master, puppet('lookup', '--environment env1', 'environment_key')) + result = re1.stdout + assert_match( + /env-env1 hiera/, + result, + "env1 environment_key lookup failed, expected 'env-env1 hiera'" + ) + + step "environment_key from environment env2" + re2 = on(master, puppet('lookup', '--environment env2', 'environment_key')) + result = re2.stdout + assert_match( + /env-env2-ruby-function/, + result, + "env2 environment_key lookup failed, expected 'env-env2-puppet-function'" + ) + + step "environment_key from environment env3" + re3 = on(master, puppet('lookup', '--environment env3', 'environment_key')) + result = re3.stdout + assert_match( + /env-env3-puppet-function/, + result, + "env3 environment_key lookup failed, expected 'env-env2-ruby-function data() provided value'" + ) + + step "environment_key from environment env4" + on(master, puppet('lookup', '--environment env4', 'environment_key'), :acceptable_exit_codes => [1]) + + step "production mod1 module_key" + repm1 = on(master, puppet('lookup', 'mod1::module_key')) + result = repm1.stdout + assert_match( + /module-production-mod1-hiera/, + result, + "production mod1 module_key lookup failed, expected 'module-production-mod1-hiera'" + ) + + step "production mod2 module_key" + repm2 = on(master, puppet('lookup', 'mod2::module_key')) + result = repm2.stdout + assert_match( + /module-production-mod2-ruby-function/, + result, + "production mod2 module_key lookup failed, expected 'module-production-mod2-ruby-function'" + ) + + step "production mod3 module_key" + repm3 = on(master, puppet('lookup', 'mod3::module_key')) + result = repm3.stdout + assert_match( + /module-production-mod3-puppet-function/, + result, + "production mod3 module_key lookup failed, expected 'module-production-mod3-puppet-function'" + ) + + step "production mod4 module_key" + on(master, puppet('lookup', 'mod4::module_key'), :acceptable_exit_codes => [1]) + + step "env1 mod1 module_key" + re1m1 = on(master, puppet('lookup', '--environment env1', 'mod1::module_key')) + result = re1m1.stdout + assert_match( + /module-env1-mod1-hiera/, + result, + "env1 mod1 module_key lookup failed, expected 'module-env1-mod1-hiera'" + ) + + step "env1 mod2 module_key" + re1m2 = on(master, puppet('lookup', '--environment env1', 'mod2::module_key')) + result = re1m2.stdout + assert_match( + /module-env1-mod2-ruby-function/, + result, + "env1 mod2 module_key lookup failed, expected 'module-env1-mod2-ruby-function'" + ) + + step "env1 mod3 module_key" + re1m3 = on(master, puppet('lookup', '--environment env1', 'mod3::module_key')) + result = re1m3.stdout + assert_match( + /module-env1-mod3-puppet-function/, + result, + "env1 mod3 module_key lookup failed, expected 'module-env1-mod3-puppet-function'" + ) + + step "env1 mod4 module_key" + on(master, puppet('lookup', '--environment env1', 'mod4::module_key'), :acceptable_exit_codes => [1]) + + step "env2 mod1 module_key" + re2m1 = on(master, puppet('lookup', '--environment env2', 'mod1::module_key')) + result = re2m1.stdout + assert_match( + /module-env2-mod1-hiera/, + result, + "env2 mod1 module_key lookup failed, expected 'module-env2-mod1-hiera'" + ) + + step "env2 mod2 module_key" + re2m2 = on(master, puppet('lookup', '--environment env2', 'mod2::module_key')) + result = re2m2.stdout + assert_match( + /module-env2-mod2-ruby-function/, + result, + "env2 mod2 module_key lookup failed, expected 'module-env2-mod2-ruby-function'" + ) + + step "env2 mod3 module_key" + re2m3 = on(master, puppet('lookup', '--environment env2', 'mod3::module_key')) + result = re2m3.stdout + assert_match( + /module-env2-mod3-puppet-function/, + result, + "env2 mod3 module_key lookup failed, expected 'module-env2-mod3-puppet-function'" + ) + + step "env2 mod4 module_key" + on(master, puppet('lookup', '--environment env2', 'mod4::module_key'), :acceptable_exit_codes => [1]) + + step "env3 mod1 module_key" + re3m1 = on(master, puppet('lookup', '--environment env3', 'mod1::module_key')) + result = re3m1.stdout + assert_match( + /module-env3-mod1-hiera/, + result, + "env3 mod1 module_key lookup failed, expected 'module-env3-mod1-hiera'" + ) + + step "env3 mod2 module_key" + re3m2 = on(master, puppet('lookup', '--environment env3', 'mod2::module_key')) + result = re3m2.stdout + assert_match( + /module-env3-mod2-ruby-function/, + result, + "env3 mod2 module_key lookup failed, expected 'module-env3-mod2-ruby-function'" + ) + + step "env3 mod3 module_key" + re3m3 = on(master, puppet('lookup', '--environment env3', 'mod3::module_key')) + result = re3m3.stdout + assert_match( + /module-env3-mod3-puppet-function/, + result, + "env3 mod3 module_key lookup failed, expected 'module-env3-mod3-puppet-function'" + ) + + step "env3 mod4 module_key" +# re3m4 = on(master, puppet('lookup', '--environment env3', 'mod4::module_key'), :acceptable_exit_codes => [1]) + + step "env4 mod1 module_key" + re4m1 = on(master, puppet('lookup', '--environment env4', 'mod1::module_key')) + result = re4m1.stdout + assert_match( + /module-env4-mod1-hiera/, + result, + "env4 mod2 environent_key lookup failed, expected 'module-env4-hiera'" + ) + + + step "env4 mod2 module_key" + re4m2 = on(master, puppet('lookup', '--environment env4', 'mod2::module_key')) + result = re4m2.stdout + assert_match( + /module-env4-mod2-ruby-function/, + result, + "env4 mod2 environent_key lookup failed, expected 'module-env4-mod2-ruby-function'" + ) + + step "env4 mod3 module_key" + re4m3 = on(master, puppet('lookup', '--environment env4', 'mod3::module_key')) + result = re4m3.stdout + assert_match( + /module-env4-mod3-puppet-function/, + result, + "env4 mod3 module_key lookup failed, expected 'module-env4-mod3-puppet-function'" + ) + + step "env4 mod4 module_key" + on(master, puppet('lookup', '--environment env4', 'mod4::module_key'), :acceptable_exit_codes => [1]) + + step "global key explained" + rxg = on(master, puppet('lookup', '--explain', 'global_key')) + result = rxg.stdout + assert_match( + /Global Data Provider.*\s*Using.*\s*Hier.*\s*Path.*\s*Orig.*\s*Found key.*global-hiera/, + result, + "global_key explained failed, expected /Global Data Provider.*\s*Using.*\s*Hier.*\s*Path.*\s*Orig.*\s*Found key.*global-hiera/" + ) + + step "environment env1 environment_key explained" + rxe1 = on(master, puppet('lookup', '--explain', '--environment env1', 'environment_key')) + result = rxe1.stdout + assert_match( + /Global Data Provider.*\s*Using.*\s*Hier.*\s*Path.*\s*Orig.*\s*No such key/, + result, + "environment env1 enviroment_key lookup failed, expected /Global Data Provider.*\s*Using.*\s*Hier.*\s*Path.*\s*Orig.*\s*No such key/" + ) + assert_match( + /common.*\s*.*env-env1 hiera/, + result, + "environment env1 enviroment_key lookup failed, expected /common.*\s*.*env-env1 hiera/" + ) + + step "environment env2 environment_key explained" + rxe2 = on(master, puppet('lookup', '--explain', '--environment env2', 'environment_key')) + result = rxe2.stdout + assert_match( + /Global Data Provider.*\s*Using.*\s*Hier.*\s*Path.*\s*Orig.*\s*No such key/, + result, + "environment env2 enviroment_key lookup failed, expected /Global Data Provider.*\s*Using.*\s*Hier.*\s*Path.*\s*Orig.*\s*No such key/" + ) + assert_match( + /eprecated API function.*\s*.*env-env2-ruby-function/, + result, + "environment env2 enviroment_key lookup failed, expected /eprecated API function.*\s*.*env-env2-ruby-function/" + ) + + step "environment env3 environment_key explained" + rxe3 = on(master, puppet('lookup', '--explain', '--environment env3', 'environment_key')) + result = rxe3.stdout + assert_match( + /Global Data Provider.*\s*Using.*\s*Hier.*\s*Path.*\s*Orig.*\s*No such key/, + result, + "environment env3 enviroment_key lookup failed, expected /Global Data Provider.*\s*Using.*\s*Hier.*\s*Path.*\s*Orig.*\s*No such key/" + ) + assert_match( + /eprecated API function.*\s*.*env-env3-puppet-function/, + result, + "environment env3 enviroment_key lookup failed, expected /eprecated API function.*\s*.*env-env3-puppet-function/" + ) + + step "environment env4 environment_key explained" + rxe4 = on(master, puppet('lookup', '--explain', '--environment env4', 'environment_key')) + result = rxe4.stdout + assert_match( + /Global Data Provider.*\s*Using.*\s*Hier.*\s*Path.*\s*Orig.*\s*No such key.*environment_key/, + result, + "environment env4 environment_key lookup failed expected /Global Data Provider.*\s*Using.*\s*Hier.*\s*Path.*\s*Orig.*\s*No such key.*environment_key.*\s.*did not find a value.*/" + ) + + step "environment env1 mod4::module_key explained" + rxe1m4 = on(master, puppet('lookup', '--explain', '--environment env1', 'mod4::module_key')) + result = rxe1m4.stdout + assert_match( + /Global Data Provider.*\s*Using.*\s*Hier.*\s*Path.*\s*Orig.*\s*No such key.*\s*Env.*\s*.*env1\/hiera.yaml\"\s*Hier.*common\"\s*Path.*\s*Orig.*\s*No such key.*\s*Module data provider.*not found\s*.*did not find a value.*/, + result, + "environment env1 mod4::module_key lookup explained failed." + ) + + step "environment env2 mod3::module_key explained" + rxe2m3 = on(master, puppet('lookup', '--explain', '--environment env2', 'mod3::module_key')) + result = rxe2m3.stdout + assert_match( + /Global Data Provider.*Using configuration.*Hierarchy entry.*Path.*No such key/m, + result, + "global env2 mod3::module_key lookup --explain had correct output" + ) + assert_match( + /Environment Data Provider.*Deprecated.*No such key/m, + result, + "environment env2 mod3::module_key lookup --explain had correct output" + ) + assert_match( + /Module.*Data Provider.*Deprecated API function "mod3::data".*Found key.*module-env2-mod3-puppet-function provided value/m, + result, + "module env2 mod3::module_key lookup --explain had correct output" + ) + + step "environment env3 mod2::module_key explained" + rxe3m2 = on(master, puppet('lookup', '--explain', '--environment env3', 'mod2::module_key')) + result = rxe3m2.stdout + assert_match( + /Global Data Provider.*Using configuration.*Hierarchy entry.*Path.*No such key/m, + result, + "global env2 mod3::module_key lookup --explain had correct output" + ) + assert_match( + /Environment Data Provider.*Deprecated.*No such key/m, + result, + "environment env2 mod3::module_key lookup --explain had correct output" + ) + assert_match( + /Module.*Data Provider.*Deprecated API function "mod2::data".*Found key.*module-env3-mod2-ruby-function provided value/m, + result, + "module env2 mod3::module_key lookup --explain had correct output" + ) + + step "environment env4 mod1::module_key explained" + rxe4m1 = on(master, puppet('lookup', '--explain', '--environment env4', 'mod1::module_key')) + result = rxe4m1.stdout + assert_match( + /Global Data Provider.*\s*Using.*\s*Hier.*\s*Path.*\s*Orig.*\s*No such key.*\s*Module.*Data Provider.*\s*Using.*\s*Hier.*common\"\s*Path.*\s*Orig.*\s*Found key.*module-env4-mod1-hiera/, + result, + "environment env4 mod1::module_key lookup failed." + ) + + step 'apply env1 puppet.conf manifest' + apply_manifest_on(master, @env1puppetconfmanifest, :catch_failures => true) + + step "puppet.conf specified environment env1 environment_key" + r = on(master, puppet('lookup', "--confdir #{@confdir}", 'environment_key')) + result = r.stdout + assert_match( + /env-env1 hiera/, + result, + "puppet.conf specified environment env1, environment_key lookup failed, expected /env-env1 hiera/" + ) + + step "puppet.conf specified environment env1 mod4::module_key" + r = on(master, puppet('lookup', "--confdir #{@confdir}", 'mod4::module_key'), :acceptable_exit_codes => [1]) + + step 'apply env2 puppet.conf manifest' + apply_manifest_on(master, @env2puppetconfmanifest, :catch_failures => true) + + step "puppet.conf specified environment env2 environment_key" + r = on(master, puppet('lookup', "--confdir #{@confdir}", 'environment_key')) + result = r.stdout + assert_match( + /env-env2-ruby-function/, + result, + "puppet.conf specified environment env2, environment_key lookup failed, expected /env-env2-ruby-function/" + ) + + step "puppet.conf specified environment env2 mod3::module_key" + r = on(master, puppet('lookup', "--confdir #{@confdir}", 'mod3::module_key')) + result = r.stdout + assert_match( + /module-env2-mod3-puppet-function/, + result, + "puppet.conf specified environment env2 mod3::module_key lookup failed, expeccted /module-env2-mod3-puppet-function/" + ) + + step 'apply env3 puppet.conf manifest' + apply_manifest_on(master, @env3puppetconfmanifest, :catch_failures => true) + + step "puppet.conf specified environment env3 environment_key" + r = on(master, puppet('lookup', "--confdir #{@confdir}", 'environment_key')) + result = r.stdout + assert_match( + /env-env3-puppet-function/, + result, + "puppet.conf specified environment env1, environment_key lookup failed, expected /env-env3-puppet-function/" + ) + + step "puppet.conf specified environment env3 mod2::module_key" + r = on(master, puppet('lookup', "--confdir #{@confdir}", 'mod2::module_key')) + result = r.stdout + assert_match( + /module-env3-mod2-ruby-function/, + result, + "puppet.conf specified environment env2 mod3::module_key lookup failed, expeccted /module-env3-mod2-ruby-function/" + ) + + step 'apply env4 puppet.conf manifest' + apply_manifest_on(master, @env4puppetconfmanifest, :catch_failures => true) + + step "puppet.conf specified environment env4 environment_key" + r = on(master, puppet('lookup', "--confdir #{@confdir}", 'environment_key'), :acceptable_exit_codes => [1]) + + step "puppet.conf specified environment env4 mod1::module_key" + r = on(master, puppet('lookup', "--confdir #{@confdir}", 'mod1::module_key')) + result = r.stdout + assert_match( + /module-env4-mod1-hiera/, + result, + "puppet.conf specified environment env4 mod1::module_key lookup failed, expeccted /module-env4-mod1-hiera/" + ) + + step 'apply enc manifest' + apply_manifest_on(master, @encmanifest, :catch_failures => true) + + step "--compile uses environment specified in ENC" + r = on(master, puppet('lookup', '--compile', "--node #{@node1}", "--confdir #{@confdir}", "--facts #{@coderoot}/facts.yaml", 'environment_key')) + result = r.stderr + assert_match( + /CA is not available/, + result, + "lookup in ENC specified environment failed" + ) + + step "handle certificate" + on(master, "puppetserver ca generate --certname #{@node1}") + on(master, "puppetserver ca generate --certname #{@node2}") + on(master, "mkdir -p #{@testroot}/puppet/ssl/certs") + on(master, "mkdir -p #{@testroot}/puppet/ssl/private_keys") + on(master, "cp -a /etc/puppetlabs/puppet/ssl/certs/ca.pem #{@testroot}/puppet/ssl/certs") + on(master, "cp -a /etc/puppetlabs/puppet/ssl/crl.pem #{@testroot}/puppet/ssl") + on(master, "cp -a /etc/puppetlabs/puppet/ssl/private_keys/#{master.connection.hostname}.pem #{@testroot}/puppet/ssl/private_keys") + on(master, "cp -a /etc/puppetlabs/puppet/ssl/certs/#{master.connection.hostname}.pem #{@testroot}/puppet/ssl/certs") + + step "--compile uses environment specified in ENC" + r = on(master, puppet('lookup', '--compile', "--node #{@node1}", "--confdir #{@confdir}", "--facts #{@coderoot}/facts.yaml", 'environment_key')) + result = r.stdout + assert_match( + /env-env2-ruby-function/, + result, + "lookup in ENC specified environment failed" + ) + + step "without --compile does not use environment specified in ENC" + r = on(master, puppet('lookup', "--node #{@node1}", "--confdir #{@confdir}", "--facts #{@coderoot}/facts.yaml", 'environment_key')) + result = r.stdout + assert_match( + /env-production hiera provided value/, + result, + "lookup in production environment failed" + ) + + step "lookup fails when there are no facts available" + r = on(master, puppet('lookup', '--compile', "--node #{@node1}", "--confdir #{@confdir}", 'environment_key'), :acceptable_exit_codes => [1]) + result = r.stderr + assert_match( + /No facts available/, + result, + "Expected to raise when there were no facts available." + ) +end diff --git a/acceptance/tests/pluginsync/3935_pluginsync_should_follow_symlinks.rb b/acceptance/tests/pluginsync/3935_pluginsync_should_follow_symlinks.rb index 36273b73763..40b6e2e83c6 100644 --- a/acceptance/tests/pluginsync/3935_pluginsync_should_follow_symlinks.rb +++ b/acceptance/tests/pluginsync/3935_pluginsync_should_follow_symlinks.rb @@ -1,5 +1,9 @@ test_name "pluginsync should not error when modulepath is a symlink and no modules have plugin directories" +tag 'audit:high', + 'audit:integration', + 'server' + step "Create a modulepath directory which is a symlink and includes a module without facts.d or lib directories" basedir = master.tmpdir("symlink_modulepath") @@ -37,8 +41,9 @@ with_puppet_running_on master, master_opts, basedir do agents.each do |agent| - on(agent, puppet('agent', "-t --server #{master}")) - assert_no_match(/Could not retrieve information from environment production source\(s\) puppet:\/\/\/pluginfacts/, stderr) - assert_no_match(/Could not retrieve information from environment production source\(s\) puppet:\/\/\/plugins/, stderr) + on(agent, puppet('agent', "-t")) do |result| + refute_match(/Could not retrieve information from environment production source\(s\) puppet:\/\/\/pluginfacts/, result.stderr) + refute_match(/Could not retrieve information from environment production source\(s\) puppet:\/\/\/plugins/, result.stderr) + end end end diff --git a/acceptance/tests/pluginsync/4420_pluginfacts_should_be_resolvable_on_agent.rb b/acceptance/tests/pluginsync/4420_pluginfacts_should_be_resolvable_on_agent.rb new file mode 100644 index 00000000000..1305417b9be --- /dev/null +++ b/acceptance/tests/pluginsync/4420_pluginfacts_should_be_resolvable_on_agent.rb @@ -0,0 +1,130 @@ +test_name "Pluginsync'ed external facts should be resolvable on the agent" do + tag 'audit:high', + 'audit:integration' + +# +# This test is intended to ensure that external facts downloaded onto an agent via +# pluginsync are resolvable. In Linux, the external fact should have the same +# permissions as its source on the master. +# + + step "Create a codedir with a manifest and test module with external fact" + codedir = master.tmpdir('4420-codedir') + + site_manifest_content = < true) +File { + ensure => directory, + mode => "0755", + owner => #{master.puppet['user']}, + group => #{master.puppet['group']}, +} + +file { + '#{codedir}':; + '#{codedir}/environments':; + '#{codedir}/environments/production':; + '#{codedir}/environments/production/manifests':; + '#{codedir}/environments/production/modules':; + '#{codedir}/environments/production/modules/mymodule':; + '#{codedir}/environments/production/modules/mymodule/manifests':; + '#{codedir}/environments/production/modules/mymodule/facts.d':; +} + +file { '#{codedir}/environments/production/manifests/site.pp': + ensure => file, + content => '#{site_manifest_content}', +} + +file { '#{codedir}/environments/production/modules/mymodule/manifests/init.pp': + ensure => file, + content => 'class mymodule {}', +} + +file { '#{codedir}/environments/production/modules/mymodule/facts.d/unix_external_fact.sh': + ensure => file, + mode => '755', + content => '#{unix_fact}', +} +file { '#{codedir}/environments/production/modules/mymodule/facts.d/win_external_fact.bat': + ensure => file, + mode => '644', + content => '#{win_fact}', +} +MANIFEST + + master_opts = { + 'main' => { + 'environmentpath' => "#{codedir}/environments" + } + } + + with_puppet_running_on(master, master_opts, codedir) do + agents.each do |agent| + factsd = agent.tmpdir('facts.d') + pluginfactdest = agent.tmpdir('facts.d') + + teardown do + on(master, "rm -rf '#{codedir}'") + on(agent, "rm -rf '#{factsd}' '#{pluginfactdest}'") + end + + step "Pluginsync the external fact to the agent and ensure it resolves correctly" do + on(agent, puppet('agent', '-t', '--pluginfactdest', factsd), :acceptable_exit_codes => [2]) do |result| + assert_match(/foo is bar/, result.stdout) + end + end + step "Use plugin face to download to the agent" do + on(agent, puppet('plugin', 'download', '--pluginfactdest', pluginfactdest)) do |result| + assert_match(/Downloaded these plugins: .*external_fact/, result.stdout) unless agent['locale'] == 'ja' + end + end + + step "Ensure it resolves correctly" do + on(agent, puppet('apply', '--pluginfactdest', pluginfactdest, '-e', "'notify { \"foo is ${foo}\": }'")) do |result| + assert_match(/foo is bar/, result.stdout) + end + end + # Linux specific tests + next if agent['platform'] =~ /windows/ + + step "In Linux, ensure the pluginsync'ed external fact has the same permissions as its source" do + on(agent, puppet('resource', "file '#{factsd}/unix_external_fact.sh'")) do |result| + assert_match(/0755/, result.stdout) + end + end + step "In Linux, ensure puppet apply uses the correct permissions" do + test_source = File.join('/', 'tmp', 'test') + on(agent, puppet('apply', "-e \"file { '#{test_source}': ensure => file, mode => '0456' }\"")) + + { 'source_permissions => use,' => /0456/, + 'source_permissions => ignore,' => /0644/, + '' => /0644/ + }.each do |source_permissions, mode| + on(agent, puppet('apply', "-e \"file { '/tmp/test_target': ensure => file, #{source_permissions} source => '#{test_source}' }\"")) + on(agent, puppet('resource', "file /tmp/test_target")) do |result| + assert_match(mode, result.stdout) + end + + on(agent, "rm -f /tmp/test_target") + end + end + end + end +end diff --git a/acceptance/tests/pluginsync/4847_pluginfacts_should_be_resolvable_from_applications.rb b/acceptance/tests/pluginsync/4847_pluginfacts_should_be_resolvable_from_applications.rb new file mode 100644 index 00000000000..762ef2516e4 --- /dev/null +++ b/acceptance/tests/pluginsync/4847_pluginfacts_should_be_resolvable_from_applications.rb @@ -0,0 +1,70 @@ +test_name "Pluginsync'ed custom facts should be resolvable during application runs" do + + tag 'audit:high', + 'audit:integration' + + # + # This test is intended to ensure that custom facts downloaded onto an agent via + # pluginsync are resolvable by puppet applications besides agent/apply. + # + + require 'puppet/acceptance/environment_utils' + extend Puppet::Acceptance::EnvironmentUtils + + tmp_environment = mk_tmp_environment_with_teardown(master, 'resolve') + master_module_dir = "#{environmentpath}/#{tmp_environment}/modules/module_name" + master_type_dir = "#{master_module_dir}/lib/puppet/type" + master_module_type_file = "#{master_type_dir}/test4847.rb" + master_provider_dir = "#{master_module_dir}/lib/puppet/provider/test4847" + master_provider_file = "#{master_provider_dir}/only.rb" + master_facter_dir = "#{master_module_dir}/lib/facter" + master_facter_file = "#{master_facter_dir}/foo.rb" + on(master, "mkdir -p '#{master_type_dir}' '#{master_provider_dir}' '#{master_facter_dir}'") + + teardown do + on(master, "rm -rf '#{master_module_dir}'") + + # Remove all traces of the last used environment + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + + test_type = <<-TYPE + Puppet::Type.newtype(:test4847) do + newparam(:name, :namevar => true) + end + TYPE + create_remote_file(master, master_module_type_file, test_type) + + test_provider = <<-PROVIDER + Puppet::Type.type(:test4847).provide(:only) do + def self.instances + warn "fact foo=\#{Facter.value('foo')}" + [] + end + end + PROVIDER + create_remote_file(master, master_provider_file, test_provider) + + foo_fact_content = <<-FACT_FOO + Facter.add('foo') do + setcode do + 'bar' + end + end + FACT_FOO + create_remote_file(master, master_facter_file, foo_fact_content) + on(master, "chmod 755 '#{master_module_type_file}' '#{master_provider_file}' '#{master_facter_file}'") + + with_puppet_running_on(master, {}) do + agents.each do |agent| + on(agent, puppet("agent -t --environment #{tmp_environment}")) + on(agent, puppet('resource test4847')) do |result| + assert_match(/fact foo=bar/, result.stderr) + end + end + end +end diff --git a/acceptance/tests/pluginsync/7316_apps_should_be_available_via_pluginsync.rb b/acceptance/tests/pluginsync/7316_apps_should_be_available_via_pluginsync.rb index 35a47b3c4e2..13f6836d39f 100644 --- a/acceptance/tests/pluginsync/7316_apps_should_be_available_via_pluginsync.rb +++ b/acceptance/tests/pluginsync/7316_apps_should_be_available_via_pluginsync.rb @@ -1,34 +1,38 @@ -test_name "the pluginsync functionality should sync app definitions, and they should be runnable afterwards" +test_name 'the pluginsync functionality should sync app definitions, and they should be runnable afterwards' do -# -# This test is intended to ensure that pluginsync syncs app definitions to the agents. -# Further, the apps should be runnable on the agent after the sync has occurred. -# + tag 'audit:high', + 'audit:integration' -require 'puppet/acceptance/temp_file_utils' + # + # This test is intended to ensure that pluginsync syncs app definitions to the agents. + # Further, the apps should be runnable on the agent after the sync has occurred. + # + require 'puppet/acceptance/environment_utils' + extend Puppet::Acceptance::EnvironmentUtils -extend Puppet::Acceptance::TempFileUtils + require 'puppet/acceptance/temp_file_utils' + extend Puppet::Acceptance::TempFileUtils -initialize_temp_dirs() + tmp_environment = mk_tmp_environment_with_teardown(master, 'app') + master_module_dir = "#{environmentpath}/#{tmp_environment}/modules" + on(master, "mkdir -p '#{master_module_dir}'") -all_tests_passed = false + teardown do + on(master, "rm -rf '#{master_module_dir}'") -############################################################################### -# BEGIN TEST LOGIC -############################################################################### - -# create some vars to point to the directories that we're going to point the master/agents at -environments_dir = "environments" -master_module_dir = "#{environments_dir}/production/modules" -agent_lib_dir = "agent_lib" - -app_name = "superbogus" -app_desc = "a simple %1$s for testing %1$s delivery via plugin sync" -app_output = "Hello from the #{app_name} %s" + # Remove all traces of the last used environment + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end -master_module_file_content = {} + app_name = "superbogus" + app_desc = "a simple application for testing application delivery via plugin sync" + app_output = "Hello from the #{app_name} application" -master_module_file_content["application"] = <<-HERE + master_module_file_content = <<-HERE require 'puppet/application' class Puppet::Application::#{app_name.capitalize} < Puppet::Application @@ -36,113 +40,70 @@ class Puppet::Application::#{app_name.capitalize} < Puppet::Application def help <<-HELP -puppet-#{app_name}(8) -- #{app_desc % "application"} +puppet-#{app_name}(8) -- #{app_desc} ======== HELP end def main() - puts("#{app_output % "application"}") + puts("#{app_output}") end end -HERE - - -# this begin block is here for handling temp file cleanup via an "ensure" block -# at the very end of the test. -begin - - modes = ["application"] - - modes.each do |mode| - - # here we create a custom app, which basically doesn't do anything except - # for print a hello-world message - agent_module_app_file = "#{agent_lib_dir}/puppet/#{mode}/#{app_name}.rb" - master_module_app_file = "#{master_module_dir}/#{app_name}/lib/puppet/#{mode}/#{app_name}.rb" - - - # copy all the files to the master - step "write our simple module out to the master" do - create_test_file(master, master_module_app_file, master_module_file_content[mode], :mkdirs => true) - end - - step "verify that the app file exists on the master" do - unless test_file_exists?(master, master_module_app_file) then - fail_test("Failed to create app file '#{get_test_file_path(master, master_module_app_file)}' on master") - end - end + HERE + + # here we create a custom app, which basically doesn't do anything except + # for print a hello-world message + # + master_module_app_path = "#{master_module_dir}/#{app_name}/lib/puppet/application" + master_module_app_file = "#{master_module_app_path}/#{app_name}.rb" + on(master, "mkdir -p '#{master_module_app_path}'") + create_remote_file(master, master_module_app_file, master_module_file_content) + on(master, "chmod 755 '#{master_module_app_file}'") + + step "start the master" do + with_puppet_running_on(master, {}) do + agents.each do |agent| - master_opts = { - 'main' => { - 'environmentpath' => "#{get_test_file_path(master, environments_dir)}", - } - } - step "start the master" do - with_puppet_running_on master, master_opts do + agent_lib_dir = agent.tmpdir('agent_lib_sync') + agent_module_app_file = "#{agent_lib_dir}/puppet/application/#{app_name}.rb" + teardown do + on(agent, "rm -rf '#{agent_lib_dir}'") + end # the module files shouldn't exist on the agent yet because they haven't been synced step "verify that the module files don't exist on the agent path" do - agents.each do |agent| - if test_file_exists?(agent, agent_module_app_file) then - fail_test("app file already exists on agent: '#{get_test_file_path(agent, agent_module_app_file)}'") - end + if file_exists?(agent, agent_module_app_file) + fail_test("app file already exists on agent: '#{agent_module_app_file}'") end end step "run the agent" do - agents.each do |agent| - on(agent, puppet('agent', - "--libdir=\"#{get_test_file_path(agent, agent_lib_dir)}\" ", - "--test --trace --server #{master}") - ) + on(agent, puppet("agent --libdir='#{agent_lib_dir}' --test --environment '#{tmp_environment}'")) do |result| + refute_match( + /The \`source_permissions\` parameter is deprecated/, + result.stderr, + "pluginsync should not get a deprecation warning for source_permissions") end end - end - end - - step "verify that the module files were synced down to the agent" do - agents.each do |agent| - unless test_file_exists?(agent, agent_module_app_file) then - fail_test("The app file we expect was not not synced to agent: '#{get_test_file_path(agent, agent_module_app_file)}'") + step "verify that the module files were synced down to the agent" do + unless file_exists?(agent, agent_module_app_file) + fail_test("The app file we expect was not not synced to agent: '#{agent_module_app_file}'") + end end - end - end - step "verify that the application shows up in help" do - agents.each do |agent| - on(agent, PuppetCommand.new(:help, "--libdir=\"#{get_test_file_path(agent, agent_lib_dir)}\"")) do - assert_match(/^\s+#{app_name}\s+#{app_desc % mode}/, result.stdout) + step "verify that the application shows up in help" do + on(agent, PuppetCommand.new(:help, "--libdir='#{agent_lib_dir}'")) do |result| + assert_match(/^\s+#{app_name}\s+#{app_desc}/, result.stdout) + end end - end - end - step "verify that we can run the application" do - agents.each do |agent| - on(agent, PuppetCommand.new(:"#{app_name}", "--libdir=\"#{get_test_file_path(agent, agent_lib_dir)}\"")) do - assert_match(/^#{app_output % mode}/, result.stdout) + step "verify that we can run the application" do + on(agent, PuppetCommand.new(:"#{app_name}", "--libdir='#{agent_lib_dir}'")) do |result| + assert_match(/^#{app_output}/, result.stdout) + end end end end - - step "clear out the libdir on the agents in preparation for the next test" do - agents.each do |agent| - on(agent, "rm -rf #{get_test_file_path(agent, agent_lib_dir)}/*") - end - end - - end - - all_tests_passed = true - -ensure - ########################################################################################## - # Clean up all of the temp files created by this test. It would be nice if this logic - # could be handled outside of the test itself; I envision a stanza like this one appearing - # in a very large number of the tests going forward unless it is handled by the framework. - ########################################################################################## - if all_tests_passed then - remove_temp_dirs() end end diff --git a/acceptance/tests/pluginsync/7316_faces_with_app_stubs_should_be_available_via_pluginsync.rb b/acceptance/tests/pluginsync/7316_faces_with_app_stubs_should_be_available_via_pluginsync.rb index 7fca0459f78..ee632dc69e5 100644 --- a/acceptance/tests/pluginsync/7316_faces_with_app_stubs_should_be_available_via_pluginsync.rb +++ b/acceptance/tests/pluginsync/7316_faces_with_app_stubs_should_be_available_via_pluginsync.rb @@ -1,5 +1,9 @@ test_name "the pluginsync functionality should sync app definitions, and they should be runnable afterwards" +tag 'audit:high', + 'audit:integration', + 'server' + # # This test is intended to ensure that pluginsync syncs face definitions to the agents. # Further, the face should be runnable on the agent after the sync has occurred. @@ -29,9 +33,6 @@ app_desc = "a simple %1$s for testing %1$s delivery via plugin sync" app_output = "Hello from the #{app_name} %s" -master_module_file_content = {} - - master_module_face_content = <<-HERE Puppet::Face.define(:#{app_name}, '0.0.1') do copyright "Puppet Labs", 2011 @@ -111,7 +112,7 @@ class Puppet::Application::#{app_name.capitalize} < Puppet::Application::FaceBas agents.each do |agent| on(agent, puppet('agent', "--libdir=\"#{get_test_file_path(agent, agent_lib_dir)}\" ", - "--trace --test --server #{master}") + "--trace --test") ) end end @@ -132,7 +133,7 @@ class Puppet::Application::#{app_name.capitalize} < Puppet::Application::FaceBas step "verify that the application shows up in help" do agents.each do |agent| - on(agent, PuppetCommand.new(:help, "--libdir=\"#{get_test_file_path(agent, agent_lib_dir)}\"")) do + on(agent, PuppetCommand.new(:help, "--libdir=\"#{get_test_file_path(agent, agent_lib_dir)}\"")) do |result| assert_match(/^\s+#{app_name}\s+#{app_desc % "face"}/, result.stdout) end end @@ -140,7 +141,7 @@ class Puppet::Application::#{app_name.capitalize} < Puppet::Application::FaceBas step "verify that we can run the application" do agents.each do |agent| - on(agent, PuppetCommand.new(:"#{app_name}", "--libdir=\"#{get_test_file_path(agent, agent_lib_dir)}\"")) do + on(agent, PuppetCommand.new(:"#{app_name}", "--libdir=\"#{get_test_file_path(agent, agent_lib_dir)}\"")) do |result| assert_match(/^#{app_output % "face"}/, result.stdout) end end diff --git a/acceptance/tests/pluginsync/feature/pluginsync_should_sync_features.rb b/acceptance/tests/pluginsync/feature/pluginsync_should_sync_features.rb index 69c8a89cea4..0e73c46de82 100644 --- a/acceptance/tests/pluginsync/feature/pluginsync_should_sync_features.rb +++ b/acceptance/tests/pluginsync/feature/pluginsync_should_sync_features.rb @@ -1,192 +1,174 @@ -test_name "the pluginsync functionality should sync feature definitions" +test_name "the pluginsync functionality should sync feature and function definitions" do -# -# This test is intended to ensure that pluginsync syncs feature definitions to -# the agents. It checks the feature twice; once to make sure that it gets -# loaded successfully during the run in which it was synced, and once to ensure -# that it still gets loaded successfully during the subsequent run (in which it -# should not be synced because the files haven't changed.) -# + tag 'audit:high', + 'audit:integration' + # + # This test is intended to ensure that pluginsync syncs feature definitions to + # the agents. It checks the feature twice; once to make sure that it gets + # loaded successfully during the run in which it was synced, and once to ensure + # that it still gets loaded successfully during the subsequent run (in which it + # should not be synced because the files haven't changed.) + # -require 'puppet/acceptance/temp_file_utils' + require 'puppet/acceptance/environment_utils' + extend Puppet::Acceptance::EnvironmentUtils -extend Puppet::Acceptance::TempFileUtils + require 'puppet/acceptance/temp_file_utils' + extend Puppet::Acceptance::TempFileUtils -initialize_temp_dirs() - -all_tests_passed = false - - -############################################################################### -# BEGIN TEST LOGIC -############################################################################### - -# create some vars to point to the directories that we're going to point the master/agents at -test_identifier = "pluginsync_should_sync_features" -environments_dir = "environments" -master_module_dir = "#{environments_dir}/production/modules" -agent_lib_dir = "agent_lib" - -module_name = "superbogus" - -# here we create a custom type, which basically doesn't do anything except for test the value of -# our custom feature and write the result to a file -agent_module_type_file = "#{agent_lib_dir}/puppet/type/#{module_name}.rb" -master_module_type_file = "#{master_module_dir}/#{module_name}/lib/puppet/type/#{module_name}.rb" -master_module_type_content = < "Hi. I'm setting the testfeature property of #{module_name} here in site.pp", -} -HERE - - -# for convenience we build up a list of all of the files we are expecting to deploy on the master -all_master_files = [ - [master_module_feature_file, 'feature'], - [master_module_type_file, 'type'], - [master_manifest_file, 'manifest'] -] - -# for convenience we build up a list of all of the files we are expecting to deploy on the agents -all_agent_files = [ - [agent_module_feature_file, 'feature'], - [agent_module_type_file, 'type'] -] - -# the command line args we'll pass to the agent each time we call it -agent_args = "--trace --libdir=\"%s\" --pluginsync --no-daemonize --verbose " + - "--onetime --test --server #{master}" -# legal exit codes whenever we run the agent -# we need to allow exit code 2, which means "changes were applied" on the agent -agent_exit_codes = [0, 2] - -# this begin block is here for handling temp file cleanup via an "ensure" block at the very end of the -# test. -begin - - # copy all the files to the master - step "write our simple module out to the master" do - create_test_file(master, master_module_type_file, master_module_type_content, :mkdirs => true) - create_test_file(master, master_module_feature_file, master_module_feature_content, :mkdirs => true) - create_test_file(master, master_manifest_file, master_manifest_content, :mkdirs => true) - end - - step "verify that the module and manifest files exist on the master" do - all_master_files.each do |file_path, desc| - unless test_file_exists?(master, file_path) then - fail_test("Failed to create #{desc} file '#{get_test_file_path(master, file_path)}' on master") + HERE + create_remote_file(master, master_module_function_file, master_module_function_content) + on(master, "chmod 755 '#{master_module_function_file}' '#{master_module_function_file}'") + + master_module_namespaced_function_file = "#{master_module_function_path_namespaced}/bogus_function2.rb" + master_module_namespaced_function_content = <<-HERE + Puppet::Functions.create_function(:'superbogus::bogus_function2') do + dispatch :bogus_function2 do + end + def bogus_function2() + four = call_function('round', 4.14) + hostname = `facter hostname` + "Four is \#{four}. bogus_function reporting hostname is \#{hostname}" end end - end + HERE + create_remote_file(master, master_module_namespaced_function_file, master_module_namespaced_function_content) + on(master, "chmod 755 '#{master_module_namespaced_function_file}'") - step "start the master" do - - master_opts = { - 'main' => { - 'environmentpath' => "#{get_test_file_path(master, environments_dir)}", - }, + site_pp = <<-HERE + #{module_name} { "This is the title of the #{module_name} type instance in site.pp": + testfeature => "Hi. I'm setting the testfeature property of #{module_name} here in site.pp", } - - with_puppet_running_on master, master_opts do - - # the module files shouldn't exist on the agent yet because they haven't been synced - step "verify that the module files don't exist on the agent path" do - agents.each do |agent| - all_agent_files.each do |file_path, desc| - if test_file_exists?(agent, file_path) then - fail_test("#{desc} file already exists on agent: '#{get_test_file_path(agent, file_path)}'") + notify { module_function: + message => Deferred('bogus_function', []) + } + notify { module_function2: + message => Deferred('superbogus::bogus_function2', []) + } + HERE + create_sitepp(master, tmp_environment, site_pp) + + # These master opts should not be necessary whence content negotation for + # Puppet 6.0.0 is completed, and this should just be removed. + master_opts = { + 'master' => { + 'rich_data' => 'true' + } + } + + step 'start the master' do + with_puppet_running_on(master, master_opts) do + agents.each do |agent| + agent_lib_dir = agent.tmpdir('libdir') + agent_module_type_file = "#{agent_lib_dir}/puppet/type/#{module_name}.rb" + agent_module_feature_file = "#{agent_lib_dir}/puppet/feature/#{module_name}.rb" + agent_module_function_file = "#{agent_lib_dir}/puppet/functions/bogus_function.rb" + agent_module_namespaced_function_file = "#{agent_lib_dir}/puppet/functions/superbogus/bogus_function2.rb" + + facter_hostname = fact_on(agent, 'hostname') + step "verify that the module files don't exist on the agent path" do + [agent_module_type_file, agent_module_feature_file, agent_module_function_file].each do |file_path| + if file_exists?(agent, file_path) + fail_test("file should not exist on the agent yet: '#{file_path}'") end end end - end - - step "run the agent and verify that it loaded the feature" do - agents.each do |agent| - on(agent, puppet('agent', agent_args % get_test_file_path(agent, agent_lib_dir)), - :acceptable_exit_codes => agent_exit_codes) do + step 'run the agent and verify that it loaded the feature' do + on(agent, puppet("agent -t --libdir='#{agent_lib_dir}' --rich_data --environment '#{tmp_environment}'"), + :acceptable_exit_codes => [2]) do |result| assert_match(/The value of the #{module_name} feature is: true/, result.stdout, - "Expected agent stdout to include confirmation that the feature was 'true'") + "Expected agent stdout to include confirmation that the feature was 'true'") + assert_match(/Three is 3. bogus_function reporting hostname is #{facter_hostname}/, result.stdout, + "Expect the agent stdout to run bogus_function and report hostname") + assert_match(/Four is 4. bogus_function reporting hostname is #{facter_hostname}/, result.stdout, + "Expect the agent stdout to run bogus_function and report hostname") end end - end - step "verify that the module files were synced down to the agent" do - agents.each do |agent| - all_agent_files.each do |file_path, desc| - unless test_file_exists?(agent, file_path) then - fail_test("Expected #{desc} file not synced to agent: '#{get_test_file_path(agent, file_path)}'") + step 'verify that the module files were synced down to the agent' do + [agent_module_type_file, agent_module_feature_file, agent_module_function_file, agent_module_namespaced_function_file].each do |file_path| + unless file_exists?(agent, file_path) + fail_test("Expected file to exist on the agent now: '#{file_path}'") end end end - end - step "run the agent again" do - agents.each do |agent| - on(agent, puppet('agent', agent_args % get_test_file_path(agent, agent_lib_dir)), - :acceptable_exit_codes => agent_exit_codes) do + step 'run the agent again with a cached catalog' do + on(agent, puppet("agent -t --libdir='#{agent_lib_dir}' --use_cached_catalog --rich_data --environment '#{tmp_environment}'"), :acceptable_exit_codes => [2]) do |result| assert_match(/The value of the #{module_name} feature is: true/, result.stdout, "Expected agent stdout to include confirmation that the feature was 'true'") + assert_match(/Three is 3. bogus_function reporting hostname is #{facter_hostname}/, result.stdout, + "Expect the agent stdout to run bogus_function and report hostname") + assert_match(/Four is 4. bogus_function reporting hostname is #{facter_hostname}/, result.stdout, + "Expect the agent stdout to run bogus_function and report hostname") end end end - - #TODO: was thinking about putting in a check for the timestamps on the files (maybe add a method for that to - # the framework?) to verify that they didn't get re-synced, but it seems like more trouble than it's worth - # at the moment. - #step "verify that the module files were not re-synced" do - # fail_test("NOT YET IMPLEMENTED: verify that the module files were not re-synced") - #end - end - - all_tests_passed = true - - end -ensure - ########################################################################################## - # Clean up all of the temp files created by this test. It would be nice if this logic - # could be handled outside of the test itself; I envision a stanza like this one appearing - # in a very large number of the tests going forward unless it is handled by the framework. - ########################################################################################## - if all_tests_passed then - remove_temp_dirs() end end diff --git a/acceptance/tests/pluginsync/files_earlier_in_modulepath_take_precendence.rb b/acceptance/tests/pluginsync/files_earlier_in_modulepath_take_precendence.rb index 57a24c18a3d..2dacd0ffb32 100644 --- a/acceptance/tests/pluginsync/files_earlier_in_modulepath_take_precendence.rb +++ b/acceptance/tests/pluginsync/files_earlier_in_modulepath_take_precendence.rb @@ -1,5 +1,9 @@ test_name "earlier modules take precendence over later modules in the modulepath" +tag 'audit:high', + 'audit:integration', + 'server' + step "Create some modules in the modulepath" basedir = master.tmpdir("module_precedence") @@ -57,9 +61,9 @@ with_puppet_running_on master, master_opts, basedir do agents.each do |agent| - on(agent, puppet('agent', "-t --server #{master}")) - on agent, "cat \"#{agent.puppet['vardir']}/lib/foo.rb\"" do - assert_match(/from the first module/, stdout, "The synced plugin was not found or the wrong version was synced") + on(agent, puppet('agent', "-t")) + on(agent, "cat \"#{agent.puppet['vardir']}/lib/foo.rb\"") do |result| + assert_match(/from the first module/, result.stdout, "The synced plugin was not found or the wrong version was synced") end end end diff --git a/acceptance/tests/provider/package/apt_install_package_with_range.rb b/acceptance/tests/provider/package/apt_install_package_with_range.rb new file mode 100644 index 00000000000..daa1f39a0d9 --- /dev/null +++ b/acceptance/tests/provider/package/apt_install_package_with_range.rb @@ -0,0 +1,43 @@ +test_name "apt can install range if package is not installed" do + confine :to, :platform => /debian|ubuntu/ + tag 'audit:high' + + require 'puppet/acceptance/common_utils' + extend Puppet::Acceptance::PackageUtils + extend Puppet::Acceptance::ManifestUtils + + package = "helloworld" + available_package_versions = ['1.0-1', '1.19-1', '2.0-1'] + repo_fixture_path = File.join(File.dirname(__FILE__), '..', '..', '..', 'fixtures', 'debian-repo') + + agents.each do |agent| + scp_to(agent, repo_fixture_path, '/tmp') + + file_manifest = resource_manifest('file', '/etc/apt/sources.list.d/tmp.list', ensure: 'present', content: 'deb [trusted=yes] file:/tmp/debian-repo ./') + apply_manifest_on(agent, file_manifest) + + on(agent, 'apt-get update') + + teardown do + package_absent(agent, package, '--force-yes') + file_manifest = resource_manifest('file', '/etc/apt/sources.list.d/tmp.list', ensure: 'absent') + apply_manifest_on(agent, file_manifest) + on(agent, 'rm -rf /tmp/debian-repo') + on(agent, 'apt-get update') + end + + step "Ensure that package is installed first if not present" do + package_manifest = resource_manifest('package', package, ensure: "<=#{available_package_versions[1]}") + apply_manifest_on(agent, package_manifest) + installed_package_version = on(agent.name, "apt-cache policy #{package} | sed -n -e 's/Installed: //p'").stdout + assert_match(available_package_versions[1], installed_package_version) + end + + step "Ensure that package is updated" do + package_manifest = resource_manifest('package', package, ensure: ">#{available_package_versions[1]}") + apply_manifest_on(agent, package_manifest) + installed_package_version = on(agent.name, "apt-cache policy #{package} | sed -n -e 's/Installed: //p'").stdout + assert_match(available_package_versions[2], installed_package_version) + end + end +end diff --git a/acceptance/tests/provider/package/dnfmodule_enable_only.rb b/acceptance/tests/provider/package/dnfmodule_enable_only.rb new file mode 100644 index 00000000000..ddf3fd571d2 --- /dev/null +++ b/acceptance/tests/provider/package/dnfmodule_enable_only.rb @@ -0,0 +1,42 @@ +test_name "dnfmodule can change flavors" do + confine :to, :platform => /el-8-x86_64/ # only el/centos 8 have the appstream repo + tag 'audit:high' + + require 'puppet/acceptance/common_utils' + extend Puppet::Acceptance::PackageUtils + extend Puppet::Acceptance::ManifestUtils + + without_profile = '389-ds' + with_profile = 'swig' + + agents.each do |agent| + skip_test('appstream repo not present') unless on(agent, 'dnf repolist').stdout.include?('appstream') + teardown do + apply_manifest_on(agent, resource_manifest('package', without_profile, ensure: 'absent', provider: 'dnfmodule')) + apply_manifest_on(agent, resource_manifest('package', with_profile, ensure: 'absent', provider: 'dnfmodule')) + end + end + + step "Enable module with no default profile: #{without_profile}" do + apply_manifest_on(agent, resource_manifest('package', without_profile, ensure: 'present', provider: 'dnfmodule'), expect_changes: true) + on(agent, "dnf module list --enabled | grep #{without_profile}") + end + + step "Ensure idempotency for: #{without_profile}" do + apply_manifest_on(agent, resource_manifest('package', without_profile, ensure: 'present', provider: 'dnfmodule'), catch_changes: true) + end + + step "Enable module with a profile: #{with_profile}" do + apply_manifest_on(agent, resource_manifest('package', with_profile, ensure: 'present', enable_only: true, provider: 'dnfmodule'), expect_changes: true) + on(agent, "dnf module list --enabled | grep #{with_profile}") + end + + step "Ensure idempotency for: #{with_profile}" do + apply_manifest_on(agent, resource_manifest('package', with_profile, ensure: 'present', enable_only: true, provider: 'dnfmodule'), catch_changes: true) + end + + step "Install a flavor for: #{with_profile}" do + apply_manifest_on(agent, resource_manifest('package', with_profile, ensure: 'present', flavor: 'common', provider: 'dnfmodule'), expect_changes: true) + on(agent, "dnf module list --installed | grep #{with_profile}") + end +end diff --git a/acceptance/tests/provider/package/dnfmodule_ensure_versionable.rb b/acceptance/tests/provider/package/dnfmodule_ensure_versionable.rb new file mode 100644 index 00000000000..6737f563bdb --- /dev/null +++ b/acceptance/tests/provider/package/dnfmodule_ensure_versionable.rb @@ -0,0 +1,41 @@ +test_name "dnfmodule is versionable" do + confine :to, :platform => /el-8-x86_64/ # only el/centos 8 have the appstream repo + tag 'audit:high' + + require 'puppet/acceptance/common_utils' + extend Puppet::Acceptance::PackageUtils + extend Puppet::Acceptance::ManifestUtils + + + package = "postgresql" + + agents.each do |agent| + skip_test('appstream repo not present') unless on(agent, 'dnf repolist').stdout.include?('appstream') + teardown do + apply_manifest_on(agent, resource_manifest('package', package, ensure: 'absent', provider: 'dnfmodule')) + end + end + + step "Ensure we get the newer version by default" do + apply_manifest_on(agent, resource_manifest('package', package, ensure: 'present', provider: 'dnfmodule')) + on(agent, 'postgres --version') do |version| + assert_match('postgres (PostgreSQL) 10', version.stdout, 'package version not correct') + end + end + + step "Ensure we get a specific version if we want it" do + apply_manifest_on(agent, resource_manifest('package', package, ensure: '9.6', provider: 'dnfmodule')) + on(agent, 'postgres --version') do |version| + assert_match('postgres (PostgreSQL) 9.6', version.stdout, 'package version not correct') + end + end + + step "Ensure we can disable a package" do + apply_manifest_on(agent, resource_manifest('package', package, ensure: :disabled, provider: 'dnfmodule')) + on(agent, "dnf module list | grep #{package}") do |output| + output.stdout.each_line do |line| + assert_match("\[x\]", line, 'package not disabled') + end + end + end +end diff --git a/acceptance/tests/provider/package/dnfmodule_manages_flavors.rb b/acceptance/tests/provider/package/dnfmodule_manages_flavors.rb new file mode 100644 index 00000000000..2658beaab7e --- /dev/null +++ b/acceptance/tests/provider/package/dnfmodule_manages_flavors.rb @@ -0,0 +1,31 @@ +test_name "dnfmodule can change flavors" do + confine :to, :platform => /el-8-x86_64/ # only el/centos 8 have the appstream repo + tag 'audit:high' + + require 'puppet/acceptance/common_utils' + extend Puppet::Acceptance::PackageUtils + extend Puppet::Acceptance::ManifestUtils + + package = "postgresql" + + agents.each do |agent| + skip_test('appstream repo not present') unless on(agent, 'dnf repolist').stdout.include?('appstream') + teardown do + apply_manifest_on(agent, resource_manifest('package', package, ensure: 'absent', provider: 'dnfmodule')) + end + end + + step "Install the client #{package} flavor" do + apply_manifest_on(agent, resource_manifest('package', package, ensure: 'present', flavor: 'client', provider: 'dnfmodule')) + on(agent, "dnf module list --installed | grep #{package} | sed -E 's/\\[d\\] //g'") do |output| + assert_match('client [i]', output.stdout, 'installed flavor not correct') + end + end + + step "Install the server #{package} flavor" do + apply_manifest_on(agent, resource_manifest('package', package, ensure: 'present', flavor: 'server', provider: 'dnfmodule')) + on(agent, "dnf module list --installed | grep #{package} | sed -E 's/\\[d\\] //g'") do |output| + assert_match('server [i]', output.stdout, 'installed flavor not correct') + end + end +end diff --git a/acceptance/tests/provider/package/dpkg_ensure_latest_virtual_packages.rb b/acceptance/tests/provider/package/dpkg_ensure_latest_virtual_packages.rb new file mode 100644 index 00000000000..ffd3f7d8062 --- /dev/null +++ b/acceptance/tests/provider/package/dpkg_ensure_latest_virtual_packages.rb @@ -0,0 +1,47 @@ +test_name "dpkg ensure latest with allow_virtual set to true, the virtual package should detect and install a real package" do + confine :to, :platform => /debian/ + tag 'audit:high' + + require 'puppet/acceptance/common_utils' + extend Puppet::Acceptance::PackageUtils + extend Puppet::Acceptance::ManifestUtils + + pkg = "rubygems" + + agents.each do |agent| + ruby_present = on(agent, 'dpkg -s ruby', accept_all_exit_codes: true).exit_code == 0 + + teardown do + if ruby_present + apply_manifest_on(agent, resource_manifest('package', 'ruby', ensure: 'present')) + else + apply_manifest_on(agent, resource_manifest('package', 'ruby', ensure: 'absent')) + end + end + + step "Uninstall system ruby if already present" do + apply_manifest_on(agent, resource_manifest('package', 'ruby', ensure: 'absent')) if ruby_present + end + + step "Ensure latest should install ruby instead of rubygems when allow_virtual is set to true" do + package_manifest_with_allow_virtual = resource_manifest('package', pkg, ensure: 'latest', allow_virtual: true) + apply_manifest_on(agent, package_manifest_with_allow_virtual, expect_changes: true) + + output = on(agent, "dpkg-query -W --showformat='${Status} ${Package} ${Version} [${Provides}]\n' ").output + lines = output.split("\n") + matched_line = lines.find { |package| package.match(/[\[ ](#{Regexp.escape(pkg)})[\],]/)} + + package_line_info = matched_line.split + real_package_name = package_line_info[3] + real_package_installed_version = package_line_info[4] + + installed_version = on(agent, "apt-cache policy #{real_package_name} | sed -n -e 's/Installed: //p'").stdout.strip + assert_match(real_package_installed_version, installed_version) + end + + step "Ensure latest should not install ruby package if it's already installed and exit code should be 0" do + package_manifest_with_allow_virtual = resource_manifest('package', pkg, ensure: 'latest', allow_virtual: true) + apply_manifest_on(agent, package_manifest_with_allow_virtual, :catch_changes => true) + end + end +end diff --git a/acceptance/tests/provider/package/dpkg_hold_true_package_is_latest.rb b/acceptance/tests/provider/package/dpkg_hold_true_package_is_latest.rb new file mode 100644 index 00000000000..052b7932a4f --- /dev/null +++ b/acceptance/tests/provider/package/dpkg_hold_true_package_is_latest.rb @@ -0,0 +1,27 @@ +test_name "dpkg ensure hold package is latest installed" do + confine :to, :platform => /debian-9-amd64/ + tag 'audit:high' + + require 'puppet/acceptance/common_utils' + extend Puppet::Acceptance::PackageUtils + extend Puppet::Acceptance::ManifestUtils + + + package = "nginx" + + agents.each do |agent| + teardown do + package_absent(agent, package, '--force-yes') + end + end + + step"Ensure that package is installed first if not present" do + expected_package_version = on(agent.name, "apt-cache policy #{package} | sed -n -e 's/Candidate: //p'").stdout + package_manifest = resource_manifest('package', package, mark: "hold") + + apply_manifest_on(agent, package_manifest) do |result| + installed_package_version = on(agent.name, "apt-cache policy #{package} | sed -n -e 's/Installed: //p'").stdout + assert_match(expected_package_version, installed_package_version) + end + end +end diff --git a/acceptance/tests/provider/package/dpkg_hold_true_should_preserve_version.rb b/acceptance/tests/provider/package/dpkg_hold_true_should_preserve_version.rb new file mode 100644 index 00000000000..4d7dd8d73f7 --- /dev/null +++ b/acceptance/tests/provider/package/dpkg_hold_true_should_preserve_version.rb @@ -0,0 +1,22 @@ +test_name "dpkg ensure hold package should preserve version if package is already installed" do + confine :to, :platform => /debian-9-amd64/ + tag 'audit:high' + + require 'puppet/acceptance/common_utils' + extend Puppet::Acceptance::PackageUtils + extend Puppet::Acceptance::ManifestUtils + + package = "openssl" + + step "Ensure hold should lock to specific installed version" do + existing_installed_version = on(agent.name, "dpkg -s #{package} | sed -n -e 's/Version: //p'").stdout + existing_installed_version.delete!(' ') + + package_manifest_hold = resource_manifest('package', package, mark: "hold") + apply_manifest_on(agent, package_manifest_hold) do + installed_version = on(agent.name, "apt-cache policy #{package} | sed -n -e 's/Installed: //p'").stdout + installed_version.delete!(' ') + assert_match(existing_installed_version, installed_version) + end + end +end diff --git a/acceptance/tests/provider/package/gem.rb b/acceptance/tests/provider/package/gem.rb new file mode 100644 index 00000000000..03865821631 --- /dev/null +++ b/acceptance/tests/provider/package/gem.rb @@ -0,0 +1,114 @@ +test_name "gem provider should install and uninstall" do + confine :to, :template => /centos-7-x86_64|redhat-7-x86_64/ + tag 'audit:high' + + require 'puppet/acceptance/common_utils' + extend Puppet::Acceptance::PackageUtils + extend Puppet::Acceptance::ManifestUtils + + package = 'colorize' + + agents.each do |agent| + # On a Linux host with only the 'agent' role, the puppet command fails when another Ruby is installed earlier in the PATH: + # + # [root@agent ~]# env PATH="/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin:/root/bin:/opt/puppetlabs/bin" puppet apply -e ' notify { "Hello": }' + # Activating bundler (2.0.2) failed: + # Could not find 'bundler' (= 2.0.2) among 5 total gem(s) + # To install the version of bundler this project requires, run `gem install bundler -v '2.0.2'` + # + # Magically, the puppet command succeeds on a Linux host with both the 'master' and 'agent' roles. + # + # Puppet's Ruby makes a fine target. Unfortunately, it's first in the PATH on Windows: PUP-6134. + # Also, privatebindir isn't a directory on Windows, it's a PATH: + # https://github.com/puppetlabs/beaker-puppet/blob/master/lib/beaker-puppet/install_utils/aio_defaults.rb + # + # These tests depend upon testing being confined to /centos-7-x86_64|redhat-7-x86_64/. + if agent['roles'].include?('master') + original_path = agent.get_env_var('PATH').split('=').last + + # https://github.com/puppetlabs/puppet-agent/blob/master/resources/files/puppet-agent.sh + puppet_agent_sh_path = '/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin:/root/bin:/opt/puppetlabs/bin' + + system_gem_command = '/usr/bin/gem' + + teardown do + step "Teardown: Uninstall System Ruby, and reset PATH" do + package_absent(agent, 'ruby') + agent.clear_env_var('PATH') + agent.add_env_var('PATH', original_path) + end + end + + step "Setup: Install System Ruby, and set PATH to place System Ruby ahead of Puppet Ruby" do + package_present(agent, 'ruby') + agent.add_env_var('PATH', puppet_agent_sh_path) + end + + step "Install a gem package in System Ruby" do + package_manifest = resource_manifest('package', package, { ensure: 'present', provider: 'gem' } ) + apply_manifest_on(agent, package_manifest, :catch_failures => true) do + list = on(agent, "#{system_gem_command} list").stdout + assert_match(/#{package} \(/, list) + end + on(agent, "#{system_gem_command} uninstall #{package}") + end + + step "Uninstall a gem package in System Ruby" do + on(agent, "/usr/bin/gem install #{package}") + package_manifest = resource_manifest('package', package, { ensure: 'absent', provider: 'gem' } ) + apply_manifest_on(agent, package_manifest, :catch_failures => true) do + list = on(agent, "#{system_gem_command} list").stdout + refute_match(/#{package} \(/, list) + end + on(agent, "#{system_gem_command} uninstall #{package}") + end + + step "Uninstall System Ruby, and reset PATH" do + package_absent(agent, 'ruby') + agent.add_env_var('PATH', original_path) + end + end + + puppet_gem_command = "#{agent['privatebindir']}/gem" + + step "Install a gem package with a target command" do + package_manifest = resource_manifest('package', package, { ensure: 'present', provider: 'gem', command: puppet_gem_command } ) + apply_manifest_on(agent, package_manifest, :catch_failures => true) do + list = on(agent, "#{puppet_gem_command} list").stdout + assert_match(/#{package} \(/, list) + end + on(agent, "#{puppet_gem_command} uninstall #{package}") + end + + step "Install a gem package in a certain min max range" do + package_manifest1 = resource_manifest('package', package, { ensure: '>0.5 <0.7', provider: 'gem' } ) + package_manifest2 = resource_manifest('package', package, { ensure: '>0.7 <0.8.1', provider: 'gem' } ) + + # Install package (with version between 0.5 and 0.7) + apply_manifest_on(agent, package_manifest1, :expect_changes => true) do + list = on(agent, "#{puppet_gem_command} list").stdout + assert_match(/#{package} \((0.6.0)\)/, list) + end + + # Reapply same manifest and expect no changes + apply_manifest_on(agent, package_manifest1, :catch_changes => true) + + # Install besides existing package (with version between 0.7 and 0.8.1) and expect changes + apply_manifest_on(agent, package_manifest2, :expect_changes => true) do + list = on(agent, "#{puppet_gem_command} list").stdout + assert_match(/#{package} \((0.8.0, 0.6.0)\)/, list) + end + on(agent, "#{puppet_gem_command} uninstall #{package} --all") + end + + step "Uninstall a gem package with a target command" do + on(agent, "#{puppet_gem_command} install #{package}") + package_manifest = resource_manifest('package', package, { ensure: 'absent', provider: 'gem', command: puppet_gem_command } ) + apply_manifest_on(agent, package_manifest, :catch_failures => true) do + list = on(agent, "#{puppet_gem_command} list").stdout + refute_match(/#{package} \(/, list) + end + on(agent, "#{puppet_gem_command} uninstall #{package}") + end + end +end diff --git a/acceptance/tests/provider/package/pip.rb b/acceptance/tests/provider/package/pip.rb new file mode 100644 index 00000000000..d1113198b02 --- /dev/null +++ b/acceptance/tests/provider/package/pip.rb @@ -0,0 +1,81 @@ +test_name "pip provider should install, use install_options with latest, and uninstall" do + confine :to, :template => /centos/ + tag 'audit:high' + + require 'puppet/acceptance/common_utils' + extend Puppet::Acceptance::PackageUtils + extend Puppet::Acceptance::ManifestUtils + + package = 'colorize' + pip_command = 'pip' + + teardown do + on(agent, "#{pip_command} uninstall #{package} --disable-pip-version-check --yes", :accept_all_exit_codes => true) + end + + agents.each do |agent| + step "Setup: Install EPEL Repository, Python and Pip" do + package_present(agent, 'epel-release') + if agent.platform =~ /el-8/ + package_present(agent, 'python2') + package_present(agent, 'python2-pip') + pip_command = 'pip2' + else + package_present(agent, 'python') + package_present(agent, 'python-pip') + end + end + + step "Ensure presence of a pip package" do + package_manifest = resource_manifest('package', package, { ensure: 'present', provider: 'pip' } ) + apply_manifest_on(agent, package_manifest, :catch_failures => true) do + list = on(agent, "#{pip_command} list --disable-pip-version-check").stdout + assert_match(/#{package} \(/, list) + end + on(agent, "#{pip_command} uninstall #{package} --disable-pip-version-check --yes") + end + + step "Install a pip package using version range" do + package_manifest1 = resource_manifest('package', package, { ensure: '<=1.1.0', provider: 'pip' } ) + package_manifest2 = resource_manifest('package', package, { ensure: '<1.0.4', provider: 'pip' } ) + + # Make a fresh package install (with version lower than or equal to 1.1.0) + apply_manifest_on(agent, package_manifest1, :expect_changes => true) do + list = on(agent, "#{pip_command} list --disable-pip-version-check").stdout + match = list.match(/#{package} \((.+)\)/) + installed_version = match[1] if match + assert_match(installed_version, '1.1.0') + end + + # Reapply same manifest and expect no changes + apply_manifest_on(agent, package_manifest1, :catch_changes => true) + + # Reinstall over existing package (with version lower than 1.0.4) and expect changes (to be 1.0.3) + apply_manifest_on(agent, package_manifest2, :expect_changes => true) do + list = on(agent, "#{pip_command} list --disable-pip-version-check").stdout + match = list.match(/#{package} \((.+)\)/) + installed_version = match[1] if match + assert_match(installed_version, '1.0.3') + end + + on(agent, "#{pip_command} uninstall #{package} --disable-pip-version-check --yes") + end + + step "Ensure latest with pip uses install_options" do + on(agent, "#{pip_command} install #{package} --disable-pip-version-check") + package_manifest = resource_manifest('package', package, { ensure: 'latest', provider: 'pip', install_options: { '--index' => 'https://pypi.python.org/simple' } } ) + result = apply_manifest_on(agent, package_manifest, { :catch_failures => true, :debug => true } ) + assert_match(/--index=https:\/\/pypi.python.org\/simple/, result.stdout) + on(agent, "#{pip_command} uninstall #{package} --disable-pip-version-check --yes") + end + + step "Uninstall a pip package" do + on(agent, "#{pip_command} install #{package} --disable-pip-version-check") + package_manifest = resource_manifest('package', package, { ensure: 'absent', provider: 'pip' } ) + apply_manifest_on(agent, package_manifest, :catch_failures => true) do + list = on(agent, "#{pip_command} list --disable-pip-version-check").stdout + refute_match(/#{package} \(/, list) + end + end + end +end diff --git a/acceptance/tests/provider/package/puppetserver_gem.rb b/acceptance/tests/provider/package/puppetserver_gem.rb new file mode 100644 index 00000000000..d1aaa1f8874 --- /dev/null +++ b/acceptance/tests/provider/package/puppetserver_gem.rb @@ -0,0 +1,37 @@ +test_name "puppetserver_gem provider should install and uninstall" do + tag 'audit:high', + 'server' + + require 'puppet/acceptance/common_utils' + extend Puppet::Acceptance::PackageUtils + extend Puppet::Acceptance::ManifestUtils + + skip_test 'puppetserver_gem is only suitable on server nodes' unless master + + package = 'world_airports' + + teardown do + # Ensure the gem is uninstalled if anything goes wrong + # TODO maybe execute this only if something fails, as it takes time + on(master, "puppetserver gem uninstall #{package}") + end + + step "Installing a gem executes without error" do + package_manifest = resource_manifest('package', package, { ensure: 'present', provider: 'puppetserver_gem' } ) + apply_manifest_on(master, package_manifest, catch_failures: true) do + list = on(master, "puppetserver gem list").stdout + assert_match(/#{package} \(/, list) + end + + # Run again for idempotency + apply_manifest_on(master, package_manifest, catch_changes: true) + end + + step "Uninstalling a gem executes without error" do + package_manifest = resource_manifest('package', package, { ensure: 'absent', provider: 'puppetserver_gem' } ) + apply_manifest_on(master, package_manifest, catch_failures: true) do + list = on(master, "puppetserver gem list").stdout + refute_match(/#{package} \(/, list) + end + end +end diff --git a/acceptance/tests/provider/package/rpm_ensure_install_multiversion_package.rb b/acceptance/tests/provider/package/rpm_ensure_install_multiversion_package.rb new file mode 100644 index 00000000000..b35550b0187 --- /dev/null +++ b/acceptance/tests/provider/package/rpm_ensure_install_multiversion_package.rb @@ -0,0 +1,82 @@ +test_name "rpm should install packages with multiple versions" do + confine :to, :platform => /redhat|centos|el|fedora/ + tag 'audit:high' + + require 'puppet/acceptance/common_utils' + extend Puppet::Acceptance::PackageUtils + extend Puppet::Acceptance::ManifestUtils + + package = "kernel-devel-puppet" + repo_fixture_path = File.join(File.dirname(__FILE__), '..', '..', '..', 'fixtures', 'el-repo') + + repo_content = <<-REPO +[local] +name=EL-releasever - test packages +baseurl=file:///tmp/el-repo +enabled=1 +gpgcheck=0 +protect=1 +REPO + + agents.each do |agent| + initially_installed_versions = [] + scp_to(agent, repo_fixture_path, '/tmp') + + file_manifest = resource_manifest('file', '/etc/yum.repos.d/local.repo', ensure: 'present', content: repo_content) + apply_manifest_on(agent, file_manifest) + + teardown do + on(agent, 'rm -rf /tmp/el-repo') + on(agent, 'rm -f /etc/yum.repos.d/local.repo') + + available_versions = on(agent, "yum --showduplicates list #{package} | sed -e '1,/Available Packages/ d' | awk '{print $2}'").stdout + initially_installed_versions.each do |version| + if available_versions.include? version + package_manifest = resource_manifest('package', package, ensure: version, install_only: true) + apply_manifest_on(agent, package_manifest, :catch_failures => true) + end + end + end + + step "Uninstall package versions for clean setup" do + initially_installed_versions = on(agent, "yum --showduplicates list #{package} | sed -e '1,/Installed Packages/ d' -e '/Available Packages/,$ d' | awk '{print $2}'").stdout.split("\n") + + package_manifest = resource_manifest('package', package, ensure: 'absent', install_only: true) + apply_manifest_on(agent, package_manifest, :catch_failures => true) do + remaining_installed_versions = on(agent, "yum --showduplicates list #{package} | sed -e '1,/Installed Packages/ d' -e '/Available Packages/,$ d' | awk '{print $2}'").stdout + assert(remaining_installed_versions.empty?) + end + + available_versions = on(agent, "yum --showduplicates list #{package} | sed -e '1,/Available Packages/ d' | awk '{print $2}'").stdout.split("\n") + if available_versions.size < 2 + skip_test "we need at least two package versions to perform the multiversion rpm test" + end + end + + step "Ensure oldest version of multiversion package is installed" do + oldest_version = on(agent, "yum --showduplicates list #{package} | sed -e '1,/Available Packages/ d' | head -1 | awk '{print $2}'").stdout.strip + package_manifest = resource_manifest('package', package, ensure: oldest_version, install_only: true) + apply_manifest_on(agent, package_manifest, :catch_failures => true) do + installed_version = on(agent, "rpm -q #{package}").stdout + assert_match(oldest_version, installed_version) + end + end + + step "Ensure newest package multiversion package in installed" do + newest_version = on(agent, "yum --showduplicates list #{package} | sed -e '1,/Available Packages/ d' | tail -1 | awk '{print $2}'").stdout.strip + package_manifest = resource_manifest('package', package, ensure: newest_version, install_only: true) + apply_manifest_on(agent, package_manifest, :catch_failures => true) do + installed_version = on(agent, "rpm -q #{package}").stdout + assert_match(newest_version, installed_version) + end + end + + step "Ensure rpm will uninstall multiversion package" do + package_manifest = resource_manifest('package', package, ensure: 'absent', install_only: true) + apply_manifest_on(agent, package_manifest, :catch_failures => true) do + remaining_installed_versions = on(agent, "yum --showduplicates list #{package} | sed -e '1,/Installed Packages/ d' -e '/Available Packages/,$ d' | awk '{print $2}'").stdout + assert(remaining_installed_versions.empty?) + end + end + end +end diff --git a/acceptance/tests/provider/package/yum_semantic_versioning.rb b/acceptance/tests/provider/package/yum_semantic_versioning.rb new file mode 100644 index 00000000000..d6fee439fc0 --- /dev/null +++ b/acceptance/tests/provider/package/yum_semantic_versioning.rb @@ -0,0 +1,66 @@ +test_name "yum provider should use semantic versioning for ensuring desired version" do + confine :to, :platform => /el-7/ + tag 'audit:high' + + require 'puppet/acceptance/common_utils' + extend Puppet::Acceptance::PackageUtils + extend Puppet::Acceptance::ManifestUtils + + package = 'yum-utils' + + lower_package_version = '1.1.31-34.el7' + middle_package_version = '1.1.31-42.el7' + higher_package_version = '1.1.31-45.el7' + + agents.each do |agent| + yum_command = 'yum' + + step "Setup: Skip test if box already has the package installed" do + on(agent, "rpm -q #{package}", :acceptable_exit_codes => [1,0]) do |result| + skip_test "package #{package} already installed on this box" unless result.output =~ /package #{package} is not installed/ + end + end + + step "Setup: Skip test if package versions are not available" do + on(agent, "yum list #{package} --showduplicates", :acceptable_exit_codes => [1,0]) do |result| + versions_available = [lower_package_version, middle_package_version, higher_package_version].all? { + |needed_versions| result.output.include? needed_versions } + skip_test "package #{package} versions not available on the box" unless versions_available + end + end + + step "Using semantic versioning to downgrade to a desired version <= X" do + on(agent, "#{yum_command} install #{package} -y") + package_manifest = resource_manifest('package', package, { ensure: "<=#{lower_package_version}", provider: 'yum' } ) + apply_manifest_on(agent, package_manifest, :catch_failures => true) do + installed_version = on(agent, "rpm -q #{package}").stdout + assert_match(/#{lower_package_version}/, installed_version) + end + # idempotency test + package_manifest = resource_manifest('package', package, { ensure: "<=#{lower_package_version}", provider: 'yum' } ) + apply_manifest_on(agent, package_manifest, :catch_changes => true) + on(agent, "#{yum_command} remove #{package} -y") + end + + step "Using semantic versioning to ensure a version >X <=Y" do + on(agent, "#{yum_command} install #{package} -y") + package_manifest = resource_manifest('package', package, { ensure: ">#{lower_package_version} <=#{higher_package_version}", provider: 'yum' } ) + apply_manifest_on(agent, package_manifest) do + installed_version = on(agent, "rpm -q #{package}").stdout + assert_match(/#{higher_package_version}/, installed_version) + end + on(agent, "#{yum_command} remove #{package} -y") + end + + step "Using semantic versioning to install a version >X #{lower_package_version} <#{higher_package_version}", provider: 'yum' } ) + # installing a version >X /sles/ + tag 'audit:high' + + require 'puppet/acceptance/common_utils' + extend Puppet::Acceptance::PackageUtils + extend Puppet::Acceptance::ManifestUtils + + package = "helloworld" + available_package_versions = ['1.0-2', '1.19-2', '2.0-2'] + repo_fixture_path = File.join(File.dirname(__FILE__), '..', '..', '..', 'fixtures', 'sles-repo') + repo_content = <<-REPO +[local] +name=local - test packages +baseurl=file:///tmp/sles-repo +enabled=1 +gpgcheck=0 +REPO + + agents.each do |agent| + scp_to(agent, repo_fixture_path, '/tmp') + + file_manifest = resource_manifest('file', '/etc/zypp/repos.d/local.repo', ensure: 'present', content: repo_content) + apply_manifest_on(agent, file_manifest) + + teardown do + package_absent(agent, package, '--force-yes') + file_manifest = resource_manifest('file', '/etc/zypp/repos.d/local.repo', ensure: 'absent') + apply_manifest_on(agent, file_manifest) + on(agent, 'rm -rf /tmp/sles-repo') + end + + step "Ensure that package is installed first if not present" do + package_manifest = resource_manifest('package', package, ensure: "<=#{available_package_versions[1]}") + apply_manifest_on(agent, package_manifest) + installed_package_version = on(agent, "rpm -q #{package}").stdout + assert_match(available_package_versions[1], installed_package_version) + end + + step "Ensure that package is updated" do + package_manifest = resource_manifest('package', package, ensure: ">#{available_package_versions[1]}") + apply_manifest_on(agent, package_manifest) + installed_package_version = on(agent, "rpm -q #{package}").stdout + assert_match(available_package_versions[2], installed_package_version) + end + end +end diff --git a/acceptance/tests/puppet_apply_a_file_should_create_a_file_and_report_the_md5.rb b/acceptance/tests/puppet_apply_a_file_should_create_a_file_and_report_the_md5.rb deleted file mode 100644 index 0358d119583..00000000000 --- a/acceptance/tests/puppet_apply_a_file_should_create_a_file_and_report_the_md5.rb +++ /dev/null @@ -1,17 +0,0 @@ -test_name "puppet apply should create a file and report an MD5" - -agents.each do |agent| - file = agent.tmpfile('hello-world') - manifest = "file{'#{file}': content => 'test'}" - - step "clean up #{file} for testing" - on(agent, "rm -f #{file}") - - step "Run the manifest and verify MD5 was printed" - apply_manifest_on(agent, manifest) do - assert_match(/defined content as '{md5}098f6bcd4621d373cade4e832627b4f6'/, stdout, "#{agent}: didn't find the content MD5 on output") - end - - step "clean up #{file} after testing" - on(agent, "rm -f #{file}") -end diff --git a/acceptance/tests/puppet_apply_basics.rb b/acceptance/tests/puppet_apply_basics.rb deleted file mode 100644 index c11546ebf08..00000000000 --- a/acceptance/tests/puppet_apply_basics.rb +++ /dev/null @@ -1,17 +0,0 @@ -# Ported from a collection of small spec tests in acceptance. -# -# Unified into a single file because they are literally one-line tests! - -test_name "Trivial puppet tests" - -step "check that puppet apply displays notices" -agents.each do |host| - apply_manifest_on(host, "notice 'Hello World'") do - assert_match(/Hello World/, stdout, "#{host}: missing notice!") - end -end - -step "verify help displays something for puppet master" -on master, puppet_master("--help") do - assert_match(/puppet master/, stdout, "improper help output") -end diff --git a/acceptance/tests/puppet_apply_should_show_a_notice.rb b/acceptance/tests/puppet_apply_should_show_a_notice.rb deleted file mode 100644 index df3922f4458..00000000000 --- a/acceptance/tests/puppet_apply_should_show_a_notice.rb +++ /dev/null @@ -1,7 +0,0 @@ -test_name "puppet apply should show a notice" - -agents.each do |host| - apply_manifest_on(host, "notice 'Hello World'") do - assert_match(/.*: Hello World/, stdout, "#{host}: the notice didn't show") - end -end diff --git a/acceptance/tests/puppet_master_help_should_mention_puppet_master.rb b/acceptance/tests/puppet_master_help_should_mention_puppet_master.rb deleted file mode 100644 index ba7b39cb01f..00000000000 --- a/acceptance/tests/puppet_master_help_should_mention_puppet_master.rb +++ /dev/null @@ -1,4 +0,0 @@ -test_name "puppet master help should mention puppet master" -on master, puppet_master('--help') do - fail_test "puppet master wasn't mentioned" unless stdout.include? 'puppet master' -end diff --git a/acceptance/tests/reports/agent_sends_json_report_for_cached_catalog.rb b/acceptance/tests/reports/agent_sends_json_report_for_cached_catalog.rb new file mode 100644 index 00000000000..b85a54ef178 --- /dev/null +++ b/acceptance/tests/reports/agent_sends_json_report_for_cached_catalog.rb @@ -0,0 +1,32 @@ +test_name "C100533: Agent sends json report for cached catalog" do + + tag 'risk:high', + 'audit:high', + 'audit:integration', + 'server' + + with_puppet_running_on(master, :main => {}) do + expected_format = 'json' + + step "Perform agent run to ensure that catalog is cached" do + agents.each do |agent| + on(agent, puppet('agent', '-t'), :acceptable_exit_codes => [0,2]) + end + end + + step "Ensure agent sends #{expected_format} report for cached catalog" do + agents.each do |agent| + on(agent, puppet('agent', '-t', + '--http_debug'), :acceptable_exit_codes => [0,2]) do |res| + # Expected content-type should be in the headers of the + # HTTP report payload being PUT to the server by the agent. + unless res.stderr =~ /<- "PUT \/puppet\/v[3-9]\/report.*Content-Type: .*\/#{expected_format}/ + fail_test("Report was not submitted in #{expected_format} format") + end + end + end + end + + end + +end diff --git a/acceptance/tests/reports/cached_catalog_status_in_report.rb b/acceptance/tests/reports/cached_catalog_status_in_report.rb new file mode 100644 index 00000000000..6561d353e86 --- /dev/null +++ b/acceptance/tests/reports/cached_catalog_status_in_report.rb @@ -0,0 +1,48 @@ +test_name "PUP-5867: The report specifies whether a cached catalog was used, and if so, why" do + tag 'audit:high', + 'audit:integration', + 'server' + + master_reportdir = create_tmpdir_for_user(master, 'report_dir') + + teardown do + on(master, "rm -rf #{master_reportdir}") + end + + def remove_reports_on_master(master_reportdir, agent_node_name) + on(master, "rm -rf #{master_reportdir}/#{agent_node_name}/*") + end + + with_puppet_running_on(master, :master => { :reportdir => master_reportdir, :reports => 'store' }) do + agents.each do |agent| + step "cached_catalog_status should be 'not used' when a new catalog is retrieved" do + step "Initial run: cache a newly retrieved catalog" do + on(agent, puppet("agent", "-t"), :acceptable_exit_codes => [0,2]) + end + + step "Run again and ensure report indicates that the cached catalog was not used" do + on(agent, puppet("agent", "--onetime", "--no-daemonize"), :acceptable_exit_codes => [0, 2]) + on(master, "cat #{master_reportdir}/#{agent.node_name}/*") do |result| + assert_match(/cached_catalog_status: not_used/, result.stdout, "expected to find 'cached_catalog_status: not_used' in the report") + end + remove_reports_on_master(master_reportdir, agent.node_name) + end + end + + step "Run with --use_cached_catalog and ensure report indicates cached catalog was explicitly requested" do + on(agent, puppet("agent", "--onetime", "--no-daemonize", "--use_cached_catalog"), :acceptable_exit_codes => [0, 2]) + on(master, "cat #{master_reportdir}/#{agent.node_name}/*") do |result| + assert_match(/cached_catalog_status: explicitly_requested/, result.stdout, "expected to find 'cached_catalog_status: explicitly_requested' in the report") + end + remove_reports_on_master(master_reportdir, agent.node_name) + end + + step "On a run which fails to retrieve a new catalog, ensure report indicates cached catalog was used on failure" do + on(agent, puppet("agent", "--onetime", "--no-daemonize", "--report_server #{master}", "--server nonexist"), :acceptable_exit_codes => [0, 2]) + on(master, "cat #{master_reportdir}/#{agent.node_name}/*") do |result| + assert_match(/cached_catalog_status: on_failure/, result.stdout, "expected to find 'cached_catalog_status: on_failure' in the report") + end + end + end + end +end diff --git a/acceptance/tests/reports/corrective_change_new_resource.rb b/acceptance/tests/reports/corrective_change_new_resource.rb new file mode 100644 index 00000000000..de085a8406a --- /dev/null +++ b/acceptance/tests/reports/corrective_change_new_resource.rb @@ -0,0 +1,99 @@ +test_name "C98092 - a new resource should not be reported as a corrective change" do + + require 'yaml' + require 'puppet/acceptance/environment_utils' + extend Puppet::Acceptance::EnvironmentUtils + + require 'puppet/acceptance/agent_fqdn_utils' + extend Puppet::Acceptance::AgentFqdnUtils + + tag 'audit:high', + 'audit:integration', + 'audit:refactor', # Uses a server currently but is testing agent report + 'broken:images' + + test_file_name = File.basename(__FILE__, '.*') + tmp_environment = mk_tmp_environment_with_teardown(master, test_file_name) + tmp_file = {} + + agents.each do |agent| + tmp_file[agent_to_fqdn(agent)] = agent.tmpfile(tmp_environment) + end + + teardown do + step 'clean out produced resources' do + agents.each do |agent| + if tmp_file.has_key?(agent_to_fqdn(agent)) && tmp_file[agent_to_fqdn(agent)] != '' + on(agent, "rm '#{tmp_file[agent_to_fqdn(agent)]}'", :accept_all_exit_codes => true) + end + + + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + end + + step 'create file resource - site.pp to verify corrective change flag' do + file_contents = 'this is a test' + manifest = <<-MANIFEST + file { '#{environmentpath}/#{tmp_environment}/manifests/site.pp': + ensure => file, + content => ' + \$test_path = \$facts["networking"]["fqdn"] ? #{tmp_file} + file { \$test_path: + content => @(UTF8) + #{file_contents} + | UTF8 + } + ', + } + MANIFEST + apply_manifest_on(master, manifest, :catch_failures => true) + end + + step 'run agent(s)' do + with_puppet_running_on(master, {}) do + agents.each do |agent| + #Run agent once to create new File resource + step 'Run agent once to create new File resource' do + on(agent, puppet("agent -t --environment '#{tmp_environment}'"), :acceptable_exit_codes => 2) + end + + #Verify the file resource is created + step 'Verify the file resource is created' do + on(agent, "cat '#{tmp_file[agent_to_fqdn(agent)]}'").stdout do |file_result| + assert_equal(file_contents, file_result, 'file contents did not match accepted') + end + end + end + end + end + + # Open last_run_report.yaml + step 'Check report' do + agents.each do |agent| + on(agent, puppet('config print statedir')) do |command_result| + report_path = command_result.stdout.chomp + '/last_run_report.yaml' + on(agent, "cat '#{report_path}'").stdout do |report_contents| + + yaml_data = YAML::parse(report_contents) + # Remove any Ruby class tags from the yaml + yaml_data.root.each do |o| + if o.respond_to?(:tag=) and o.tag != nil and o.tag.start_with?("!ruby") + o.tag = nil + end + end + report_yaml = yaml_data.to_ruby + + file_resource_details = report_yaml["resource_statuses"]["File[#{tmp_file[agent_to_fqdn(agent)]}]"] + assert(file_resource_details.has_key?("corrective_change"), 'corrective_change key is missing') + corrective_change_value = file_resource_details["corrective_change"] + assert_equal(false, corrective_change_value, 'corrective_change flag should be false') + end + end + end + end + +end diff --git a/acceptance/tests/reports/corrective_change_outside_puppet.rb b/acceptance/tests/reports/corrective_change_outside_puppet.rb new file mode 100644 index 00000000000..2e23651fa8a --- /dev/null +++ b/acceptance/tests/reports/corrective_change_outside_puppet.rb @@ -0,0 +1,116 @@ +test_name "C98093 - a resource changed outside of Puppet will be reported as a corrective change" do + + require 'yaml' + require 'puppet/acceptance/environment_utils' + extend Puppet::Acceptance::EnvironmentUtils + + require 'puppet/acceptance/agent_fqdn_utils' + extend Puppet::Acceptance::AgentFqdnUtils + + tag 'audit:high', + 'audit:integration', + 'audit:refactor', # Uses a server currently, but is testing agent report + 'broken:images' + + test_file_name = File.basename(__FILE__, '.*') + tmp_environment = mk_tmp_environment_with_teardown(master, test_file_name) + tmp_file = {} + + + agents.each do |agent| + tmp_file[agent_to_fqdn(agent)] = agent.tmpfile(tmp_environment) + end + + teardown do + step 'clean out produced resources' do + agents.each do |agent| + if tmp_file.has_key?(agent_to_fqdn(agent)) && tmp_file[agent_to_fqdn(agent)] != '' + on(agent, "rm '#{tmp_file[agent_to_fqdn(agent)]}'", :accept_all_exit_codes => true) + end + + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + end + + step 'create file resource - site.pp to verify corrective change flag' do + file_contents = 'this is a test' + manifest = <<-MANIFEST + file { '#{environmentpath}/#{tmp_environment}/manifests/site.pp': + ensure => file, + content => ' + \$test_path = \$facts["networking"]["fqdn"] ? #{tmp_file} + file { \$test_path: + content => @(UTF8) + #{file_contents} + | UTF8 + } + ', + } + MANIFEST + apply_manifest_on(master, manifest, :catch_failures => true) + end + + step 'run agent(s)' do + with_puppet_running_on(master, {}) do + agents.each do |agent| + #Run agent once to create new File resource + step 'Run agent once to create new File resource' do + on(agent, puppet("agent -t --environment '#{tmp_environment}'"), :acceptable_exit_codes => 2) + end + + #Verify the file resource is created + step 'Verify the file resource is created' do + on(agent, "cat '#{tmp_file[agent_to_fqdn(agent)]}'").stdout do |file_result| + assert_equal(file_contents, file_result, 'file contents did not match accepted') + end + end + + #Delete the file + step 'Delete the file' do + on(agent, "rm '#{tmp_file[agent_to_fqdn(agent)]}'", :accept_all_exit_codes => true) + end + + #Run agent to correct the file's absence + step 'Run agent to correct the files absence' do + on(agent, puppet("agent -t --environment '#{tmp_environment}'"), :acceptable_exit_codes => 2) + end + + #Verify the file resource is created + step 'Verify the file resource is created' do + on(agent, "cat '#{tmp_file[agent_to_fqdn(agent)]}'").stdout do |file_result| + assert_equal(file_contents, file_result, 'file contents did not match accepted') + end + end + end + end + end + + # Open last_run_report.yaml + step 'Check report' do + agents.each do |agent| + on(agent, puppet('config print statedir')) do |command_result| + report_path = command_result.stdout.chomp + '/last_run_report.yaml' + on(agent, "cat '#{report_path}'").stdout do |report_contents| + + yaml_data = YAML::parse(report_contents) + # Remove any Ruby class tags from the yaml + yaml_data.root.each do |o| + if o.respond_to?(:tag=) and o.tag != nil and o.tag.start_with?("!ruby") + o.tag = nil + end + end + report_yaml = yaml_data.to_ruby + + file_resource_details = report_yaml["resource_statuses"]["File[#{tmp_file[agent_to_fqdn(agent)]}]"] + assert(file_resource_details.has_key?("corrective_change"), 'corrective_change key is missing') + corrective_change_value = file_resource_details["corrective_change"] + assert_equal(true, corrective_change_value, 'corrective_change flag should be true') + end + end + end + end + +end diff --git a/acceptance/tests/reports/corrective_change_via_puppet.rb b/acceptance/tests/reports/corrective_change_via_puppet.rb new file mode 100644 index 00000000000..d7cf7d564c9 --- /dev/null +++ b/acceptance/tests/reports/corrective_change_via_puppet.rb @@ -0,0 +1,120 @@ +test_name "C98094 - a resource changed via Puppet manifest will not be reported as a corrective change" do + + require 'yaml' + require 'puppet/acceptance/environment_utils' + extend Puppet::Acceptance::EnvironmentUtils + + require 'puppet/acceptance/agent_fqdn_utils' + extend Puppet::Acceptance::AgentFqdnUtils + + tag 'audit:high', + 'audit:integration', + 'audit:refactor', # Uses a server currently, but is testing agent report + 'broken:images', + 'server' + + test_file_name = File.basename(__FILE__, '.*') + tmp_environment = mk_tmp_environment_with_teardown(master, test_file_name) + tmp_file = {} + + original_test_data = 'this is my original important data' + modified_test_data = 'this is my modified important data' + + agents.each do |agent| + tmp_file[agent_to_fqdn(agent)] = agent.tmpfile(tmp_environment) + end + + teardown do + # Remove all traces of the last used environment + agents.each do |agent| + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + + step 'clean out produced resources' do + agents.each do |agent| + if tmp_file.has_key?(agent_to_fqdn(agent)) && tmp_file[agent_to_fqdn(agent)] != '' + on(agent, "rm '#{tmp_file[agent_to_fqdn(agent)]}'", :accept_all_exit_codes => true) + end + end + end + end + + def create_manifest_for_file_resource(file_resource, file_contents, environment_name) + manifest = <<-MANIFEST + file { '#{environmentpath}/#{environment_name}/manifests/site.pp': + ensure => file, + content => ' + \$test_path = \$facts["networking"]["fqdn"] ? #{file_resource} + file { \$test_path: + content => @(UTF8) + #{file_contents} + | UTF8 + } + ', + } + MANIFEST + apply_manifest_on(master, manifest, :catch_failures => true) + end + + step 'create file resource in site.pp' do + create_manifest_for_file_resource(tmp_file, original_test_data, tmp_environment) + end + + step 'run agent(s) to create the new resource' do + with_puppet_running_on(master, {}) do + agents.each do |agent| + step 'Run agent once to create new File resource' do + on(agent, puppet("agent -t --environment '#{tmp_environment}'"), :acceptable_exit_codes => 2) + end + + step 'Verify the file resource is created' do + on(agent, "cat '#{tmp_file[agent_to_fqdn(agent)]}'").stdout do |file_contents| + assert_equal(original_test_data, file_contents, 'file contents did not match expected contents') + end + end + end + + step 'Change the manifest for the resource' do + create_manifest_for_file_resource(tmp_file, modified_test_data, tmp_environment) + end + + agents.each do |agent| + step 'Run agent a 2nd time to change the File resource' do + on(agent, puppet("agent -t --environment '#{tmp_environment}'"), :acceptable_exit_codes => 2) + end + + step 'Verify the file resource is created' do + on(agent, "cat '#{tmp_file[agent_to_fqdn(agent)]}'").stdout do |file_contents| + assert_equal(modified_test_data, file_contents, 'file contents did not match expected contents') + end + end + end + end + end + + # Open last_run_report.yaml + step 'Check report' do + agents.each do |agent| + on(agent, puppet('config print statedir')) do |command_result| + report_path = command_result.stdout.chomp + '/last_run_report.yaml' + on(agent, "cat '#{report_path}'").stdout do |report_contents| + + yaml_data = YAML::parse(report_contents) + # Remove any Ruby class tags from the yaml + yaml_data.root.each do |o| + if o.respond_to?(:tag=) and o.tag != nil and o.tag.start_with?("!ruby") + o.tag = nil + end + end + report_yaml = yaml_data.to_ruby + file_resource_details = report_yaml["resource_statuses"]["File[#{tmp_file[agent_to_fqdn(agent)]}]"] + assert(file_resource_details.has_key?("corrective_change"), 'corrective_change key is missing') + corrective_change_value = file_resource_details["corrective_change"] + assert_equal(false, corrective_change_value, 'corrective_change flag for the changed resource should be false') + end + end + end + end +end diff --git a/acceptance/tests/reports/finalized_on_cycle.rb b/acceptance/tests/reports/finalized_on_cycle.rb deleted file mode 100644 index ad830de2342..00000000000 --- a/acceptance/tests/reports/finalized_on_cycle.rb +++ /dev/null @@ -1,39 +0,0 @@ -test_name "Reports are finalized on resource cycles" -# PUP-4548: Skip Windows until PUP-4547 can be resolved. -confine :except, :platform => 'windows' - -require 'puppet/acceptance/common_utils' -extend Puppet::Acceptance::CommandUtils - -check_script = < Notify['bar'] -} - -notify { 'bar': - require => Notify['foo'] -} -MANIFEST - -agents.each do |agent| - tmpdir = agent.tmpdir('report_finalized') - check = "#{tmpdir}/check_report.rb" - manifest = "#{tmpdir}/manifest.pp" - report = agent.puppet['lastrunreport'] - - create_remote_file(agent, check, check_script) - - # We can't use apply_manifest_on here because we can't tell it not - # to fail the test when it encounters a cyclic manifest. - create_remote_file(agent, manifest, cyclic_manifest) - on(agent, puppet("apply", manifest), :acceptable_exit_codes => [1]) - result = on(agent, "#{ruby_command(agent)} #{check} #{report}", :acceptable_exit_codes => [0,1]) - fail_test("Report was not finalized") if result.exit_code == 1 -end diff --git a/acceptance/tests/reports/submission.rb b/acceptance/tests/reports/submission.rb index 154b827877a..b08cb863d7b 100644 --- a/acceptance/tests/reports/submission.rb +++ b/acceptance/tests/reports/submission.rb @@ -1,10 +1,13 @@ test_name "Report submission" +tag 'audit:high', + 'audit:integration' + if master.is_pe? require "time" def puppetdb - puppetdb = hosts.detect { |h| h['roles'].include?('database') } + hosts.detect { |h| h['roles'].include?('database') } end def sleep_until_queue_empty(timeout=60) @@ -21,7 +24,7 @@ def sleep_until_queue_empty(timeout=60) sleep 1 end end - rescue Timeout::Error => e + rescue Timeout::Error raise "Queue took longer than allowed #{timeout} seconds to empty" end end @@ -32,7 +35,7 @@ def query_last_report_time_on(agent) require "json" puppetdb_url = URI("http://localhost:8080/v3/reports") - puppetdb_url.query = URI.escape(%Q{query=["=","certname","#{agent}"]}) + puppetdb_url.query = CGI.escape(%Q{query=["=","certname","#{agent}"]}) result = Net::HTTP.get(puppetdb_url) json = JSON.load(result) puts json.first["receive-time"] @@ -48,7 +51,7 @@ def query_last_report_time_on(agent) with_puppet_running_on(master, {}) do agents.each do |agent| - on(agent, puppet('agent', "-t --server #{master}")) + on(agent, puppet('agent', "-t")) sleep_until_queue_empty @@ -69,9 +72,9 @@ def query_last_report_time_on(agent) with_puppet_running_on(master, :main => { :reportdir => testdir, :reports => 'store' }) do agents.each do |agent| - on(agent, puppet('agent', "-t --server #{master}")) + on(agent, puppet('agent', "-t")) - on master, "grep -q #{agent} #{testdir}/*/*" + on master, "grep -q #{agent.node_name} #{testdir}/*/*" end end diff --git a/acceptance/tests/resource/cron/should_allow_changing_parameters.rb b/acceptance/tests/resource/cron/should_allow_changing_parameters.rb deleted file mode 100644 index 224d1407fe2..00000000000 --- a/acceptance/tests/resource/cron/should_allow_changing_parameters.rb +++ /dev/null @@ -1,66 +0,0 @@ -test_name "Cron: should allow changing parameters after creation" -confine :except, :platform => 'windows' -require 'puppet/acceptance/common_utils' -extend Puppet::Acceptance::CronUtils - -teardown do - step "Cron: cleanup" - agents.each do |agent| - clean agent - end -end - - -agents.each do |agent| - step "ensure the user exist via puppet" - setup agent - - step "Cron: basic - verify that it can be created" - apply_manifest_on(agent, 'cron { "myjob": command => "/bin/false", user => "tstuser", hour => "*", minute => [1], ensure => present,}') do - assert_match( /ensure: created/, result.stdout, "err: #{agent}") - end - run_cron_on(agent,:list,'tstuser') do - assert_match(/.bin.false/, result.stdout, "err: #{agent}") - end - - step "Cron: allow changing command" - apply_manifest_on(agent, 'cron { "myjob": command => "/bin/true", user => "tstuser", hour => "*", minute => [1], ensure => present,}') do - assert_match(/command changed '.bin.false'.* to '.bin.true'/, result.stdout, "err: #{agent}") - end - run_cron_on(agent,:list,'tstuser') do - assert_match(/1 . . . . .bin.true/, result.stdout, "err: #{agent}") - end - - step "Cron: allow changing time" - apply_manifest_on(agent, 'cron { "myjob": command => "/bin/true", user => "tstuser", hour => "1", minute => [1], ensure => present,}') do - assert_match(/hour: defined 'hour' as '1'/, result.stdout, "err: #{agent}") - end - run_cron_on(agent,:list,'tstuser') do - assert_match(/1 1 . . . .bin.true/, result.stdout, "err: #{agent}") - end - - step "Cron: allow changing time(array)" - apply_manifest_on(agent, 'cron { "myjob": command => "/bin/true", user => "tstuser", hour => ["1","2"], minute => [1], ensure => present,}') do - assert_match(/hour: hour changed '1'.* to '1,2'/, result.stdout, "err: #{agent}") - end - run_cron_on(agent,:list,'tstuser') do - assert_match(/1 1,2 . . . .bin.true/, result.stdout, "err: #{agent}") - end - - step "Cron: allow changing time(array modification)" - apply_manifest_on(agent, 'cron { "myjob": command => "/bin/true", user => "tstuser", hour => ["3","2"], minute => [1], ensure => present,}') do - assert_match(/hour: hour changed '1,2'.* to '3,2'/, result.stdout, "err: #{agent}") - end - run_cron_on(agent,:list,'tstuser') do - assert_match(/1 3,2 . . . .bin.true/, result.stdout, "err: #{agent}") - end - step "Cron: allow changing time(array modification to *)" - apply_manifest_on(agent, 'cron { "myjob": command => "/bin/true", user => "tstuser", hour => "*", minute => "*", ensure => present,}') do - assert_match(/minute: undefined 'minute' from '1'/,result.stdout, "err: #{agent}") - assert_match(/hour: undefined 'hour' from '3,2'/,result.stdout, "err: #{agent}") - end - run_cron_on(agent,:list,'tstuser') do - assert_match(/\* \* . . . .bin.true/, result.stdout, "err: #{agent}") - end - -end diff --git a/acceptance/tests/resource/cron/should_be_idempotent.rb b/acceptance/tests/resource/cron/should_be_idempotent.rb deleted file mode 100644 index ea738385ac0..00000000000 --- a/acceptance/tests/resource/cron/should_be_idempotent.rb +++ /dev/null @@ -1,31 +0,0 @@ -test_name "Cron: check idempotency" -confine :except, :platform => 'windows' - -require 'puppet/acceptance/common_utils' -extend Puppet::Acceptance::CronUtils - -teardown do - step "Cron: cleanup" - agents.each do |agent| - clean agent - end -end - - -agents.each do |agent| - step "ensure the user exist via puppet" - setup agent - - step "Cron: basic - verify that it can be created" - apply_manifest_on(agent, 'cron { "myjob": command => "/bin/true", user => "tstuser", hour => "*", minute => [1], ensure => present,}') do - assert_match( /ensure: created/, result.stdout, "err: #{agent}") - end - run_cron_on(agent,:list,'tstuser') do - assert_match(/. . . . . .bin.true/, result.stdout, "err: #{agent}") - end - - step "Cron: basic - should not create again" - apply_manifest_on(agent, 'cron { "myjob": command => "/bin/true", user => "tstuser", hour => "*", minute => [1], ensure => present,}') do - assert_no_match( /ensure: created/, result.stdout, "err: #{agent}") - end -end diff --git a/acceptance/tests/resource/cron/should_create_cron.rb b/acceptance/tests/resource/cron/should_create_cron.rb deleted file mode 100644 index 2f68ef4be5a..00000000000 --- a/acceptance/tests/resource/cron/should_create_cron.rb +++ /dev/null @@ -1,29 +0,0 @@ -test_name "should create cron" -confine :except, :platform => 'windows' - -require 'puppet/acceptance/common_utils' -extend Puppet::Acceptance::CronUtils - -teardown do - step "Cron: cleanup" - agents.each do |agent| - clean agent - end -end - -agents.each do |host| - step "ensure the user exist via puppet" - setup host - - step "apply the resource on the host using puppet resource" - on(host, puppet_resource("cron", "crontest", "user=tstuser", - "command=/bin/true", "ensure=present")) do - assert_match(/created/, stdout, "Did not create crontab for tstuser on #{host}") - end - - step "verify that crontab -l contains what you expected" - run_cron_on(host, :list, 'tstuser') do - assert_match(/\* \* \* \* \* \/bin\/true/, stdout, "Incorrect crontab for tstuser on #{host}") - end - -end diff --git a/acceptance/tests/resource/cron/should_match_existing.rb b/acceptance/tests/resource/cron/should_match_existing.rb deleted file mode 100755 index f7f64233a79..00000000000 --- a/acceptance/tests/resource/cron/should_match_existing.rb +++ /dev/null @@ -1,31 +0,0 @@ -test_name "puppet should match existing job" -confine :except, :platform => 'windows' - -require 'puppet/acceptance/common_utils' -extend Puppet::Acceptance::CronUtils - -teardown do - step "Cron: cleanup" - agents.each do |agent| - clean agent - end -end - -agents.each do |host| - step "ensure the user exist via puppet" - setup host - - step "Create the existing cron job by hand..." - run_cron_on(host,:add,'tstuser',"* * * * * /bin/true") - - step "Apply the resource on the host using puppet resource" - on(host, puppet_resource("cron", "crontest", "user=tstuser", - "command=/bin/true", "ensure=present")) do - assert_match(/present/, stdout, "Failed creating crontab for tstuser on #{host}") - end - - step "Verify that crontab -l contains what you expected" - run_cron_on(host, :list, 'tstuser') do - assert_match(/\* \* \* \* \* \/bin\/true/, stdout, "Did not find crontab for tstuser on #{host}") - end -end diff --git a/acceptance/tests/resource/cron/should_remove_cron.rb b/acceptance/tests/resource/cron/should_remove_cron.rb deleted file mode 100755 index 777411f2d4e..00000000000 --- a/acceptance/tests/resource/cron/should_remove_cron.rb +++ /dev/null @@ -1,32 +0,0 @@ -test_name "puppet should remove a crontab entry as expected" -confine :except, :platform => 'windows' - -require 'puppet/acceptance/common_utils' -extend Puppet::Acceptance::CronUtils - -teardown do - step "Cron: cleanup" - agents.each do |agent| - clean agent - end -end - -agents.each do |host| - step "ensure the user exist via puppet" - setup host - - step "create the existing job by hand..." - run_cron_on(host,:add,'tstuser',"* * * * * /bin/true") - - step "apply the resource on the host using puppet resource" - on(host, puppet_resource("cron", "crontest", "user=tstuser", - "command=/bin/true", "ensure=absent")) do - assert_match(/crontest\D+ensure:\s+removed/, stdout, "Didn't remove crobtab entry for tstuser on #{host}") - end - - step "verify that crontab -l contains what you expected" - run_cron_on(host, :list, 'tstuser') do - assert_no_match(/\/bin\/true/, stderr, "Error: Found entry for tstuser on #{host}") - end - -end diff --git a/acceptance/tests/resource/cron/should_remove_leading_and_trailing_whitespace.rb b/acceptance/tests/resource/cron/should_remove_leading_and_trailing_whitespace.rb deleted file mode 100644 index 49c9e58c9cf..00000000000 --- a/acceptance/tests/resource/cron/should_remove_leading_and_trailing_whitespace.rb +++ /dev/null @@ -1,37 +0,0 @@ -test_name "(#656) leading and trailing whitespace in cron entries should should be stripped" -confine :except, :platform => 'windows' - -require 'puppet/acceptance/common_utils' -extend Puppet::Acceptance::CronUtils - -teardown do - step "Cron: cleanup" - agents.each do |agent| - clean agent - end -end - -agents.each do |host| - step "create user account for testing cron entries" - setup host - - step "apply the resource on the host using puppet resource" - on(host, puppet_resource("cron", "crontest", "user=tstuser", "command=' date > /dev/null '", "ensure=present")) do - assert_match(/created/, stdout, "Did not create crontab for tstuser on #{host}") - end - - step "verify the added crontab entry has stripped whitespace" - run_cron_on(host, :list, 'tstuser') do - assert_match(/\* \* \* \* \* date > .dev.null/, stdout, "Incorrect crontab for tstuser on #{host}") - end - - step "apply the resource with trailing whitespace and check nothing happened" - on(host, puppet_resource("cron", "crontest", "user=tstuser", "command='date > /dev/null '", "ensure=present")) do - assert_no_match(/ensure: created/, stdout, "Rewrote the line with trailing space in crontab for tstuser on #{host}") - end - - step "apply the resource with leading whitespace and check nothing happened" - on(host, puppet_resource("cron", "crontest", "user=tstuser", "command=' date > /dev/null'", "ensure=present")) do - assert_no_match(/ensure: created/, stdout, "Rewrote the line with trailing space in crontab for tstuser on #{host}") - end -end diff --git a/acceptance/tests/resource/cron/should_remove_matching.rb b/acceptance/tests/resource/cron/should_remove_matching.rb deleted file mode 100755 index fd83182a8d6..00000000000 --- a/acceptance/tests/resource/cron/should_remove_matching.rb +++ /dev/null @@ -1,33 +0,0 @@ -test_name "puppet should remove a crontab entry based on command matching" -confine :except, :platform => 'windows' - -require 'puppet/acceptance/common_utils' -extend Puppet::Acceptance::CronUtils - -teardown do - step "Cron: cleanup" - agents.each do |agent| - clean agent - end -end - -agents.each do |host| - step "ensure the user exist via puppet" - setup host - - step "create the existing job by hand..." - run_cron_on(host,:add,'tstuser',"* * * * * /bin/true") - - step "Remove cron resource" - on(host, puppet_resource("cron", "bogus", "user=tstuser", - "command=/bin/true", "ensure=absent")) do - assert_match(/bogus\D+ensure: removed/, stdout, "Removing cron entry failed for tstuser on #{host}") - end - - step "verify that crontab -l contains what you expected" - run_cron_on(host,:list,'tstuser') do - count = stdout.scan("/bin/true").length - fail_test "found /bin/true the wrong number of times (#{count})" unless count == 0 - end - -end diff --git a/acceptance/tests/resource/cron/should_update_existing.rb b/acceptance/tests/resource/cron/should_update_existing.rb deleted file mode 100755 index a10516511a5..00000000000 --- a/acceptance/tests/resource/cron/should_update_existing.rb +++ /dev/null @@ -1,35 +0,0 @@ -test_name "puppet should update existing crontab entry" -confine :except, :platform => 'windows' - -require 'puppet/acceptance/common_utils' -extend Puppet::Acceptance::CronUtils - -teardown do - step "Cron: cleanup" - agents.each do |agent| - clean agent - end -end - -agents.each do |host| - step "ensure the user exist via puppet" - setup host - - step "create the existing job by hand..." - run_cron_on(host,:add,'tstuser',"* * * * * /bin/true") - - step "verify that crontab -l contains what you expected" - run_cron_on(host,:list,'tstuser') do - assert_match(/\* \* \* \* \* \/bin\/true/, stdout, "Didn't find correct crobtab entry for tstuser on #{host}") - end - - step "apply the resource change on the host" - on(host, puppet_resource("cron", "crontest", "user=tstuser", "command=/bin/true", "ensure=present", "hour='0-6'")) do - assert_match(/hour\s+=>\s+\['0-6'\]/, stdout, "Modifying cron entry failed for tstuser on #{host}") - end - - step "verify that crontab -l contains what you expected" - run_cron_on(host,:list,'tstuser') do - assert_match(/\* 0-6 \* \* \* \/bin\/true/, stdout, "Didn't find correctly modified time entry in crobtab entry for tstuser on #{host}") - end -end diff --git a/acceptance/tests/resource/exec/accept_array_commands.rb b/acceptance/tests/resource/exec/accept_array_commands.rb new file mode 100644 index 00000000000..2086618db0f --- /dev/null +++ b/acceptance/tests/resource/exec/accept_array_commands.rb @@ -0,0 +1,23 @@ +test_name "Be able to execute array commands" do + tag 'audit:high', + 'audit:acceptance' + + agents.each do |agent| + if agent.platform =~ /windows/ + cmd = ['C:\Windows\System32\cmd.exe', '/c', 'echo', '*'] + else + cmd = ['/bin/echo', '*'] + end + + exec_manifest = <<~MANIFEST + exec { "test exec": + command => #{cmd}, + logoutput => true, + } + MANIFEST + + apply_manifest_on(agent, exec_manifest) do |output| + assert_match('Notice: /Stage[main]/Main/Exec[test exec]/returns: *', output.stdout) + end + end +end diff --git a/acceptance/tests/resource/exec/accept_multi-line_commands.rb b/acceptance/tests/resource/exec/accept_multi-line_commands.rb index 443c7d2ca88..d1a77b33edd 100644 --- a/acceptance/tests/resource/exec/accept_multi-line_commands.rb +++ b/acceptance/tests/resource/exec/accept_multi-line_commands.rb @@ -1,5 +1,8 @@ test_name "Be able to execute multi-line commands (#9996)" confine :except, :platform => 'windows' +tag 'audit:high', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' agents.each do |agent| temp_file_name = agent.tmpfile('9996-multi-line-commands') @@ -20,8 +23,8 @@ apply_manifest_on agent, test_manifest - on(agent, "cat #{temp_file_name}") do - assert_equal(expected_results, stdout, "Unexpected result for host '#{agent}'") + on(agent, "cat #{temp_file_name}") do |result| + assert_equal(expected_results, result.stdout, "Unexpected result for host '#{agent}'") end on(agent, "rm -f #{temp_file_name}") diff --git a/acceptance/tests/resource/exec/should_accept_large_output.rb b/acceptance/tests/resource/exec/should_accept_large_output.rb new file mode 100644 index 00000000000..91f8005dfd2 --- /dev/null +++ b/acceptance/tests/resource/exec/should_accept_large_output.rb @@ -0,0 +1,29 @@ +test_name "tests that puppet correctly captures large and empty output." +tag 'audit:high', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' + +agents.each do |agent| + testfile = agent.tmpfile('should_accept_large_output') + + # Generate >64KB file to exceed pipe buffer. + lorem_ipsum = < ['/bin', '/usr/bin', 'C:/cygwin32/bin', 'C:/cygwin64/bin', 'C:/cygwin/bin'], logoutput => true}") do |result| + fail_test "didn't seem to run the command" unless + result.stdout.include? 'executed successfully' unless agent['locale'] == 'ja' + fail_test "didn't print output correctly" unless + result.stdout.lines.select {|line| line =~ /\/returns:/}.count == 4097 + end + + apply_manifest_on(agent, "exec {'echo': path => ['/bin', '/usr/bin', 'C:/cygwin32/bin', 'C:/cygwin64/bin', 'C:/cygwin/bin'], logoutput => true}") do |result| + fail_test "didn't seem to run the command" unless + result.stdout.include? 'executed successfully' unless agent['locale'] == 'ja' + end +end diff --git a/acceptance/tests/resource/exec/should_not_run_command_creates.rb b/acceptance/tests/resource/exec/should_not_run_command_creates.rb index 3518d24b0fd..de827a097ea 100644 --- a/acceptance/tests/resource/exec/should_not_run_command_creates.rb +++ b/acceptance/tests/resource/exec/should_not_run_command_creates.rb @@ -1,4 +1,7 @@ test_name "should not run command creates" +tag 'audit:high', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' agents.each do |agent| touch = agent.tmpfile('touched') @@ -12,9 +15,9 @@ on agent, "touch #{touch} && rm -f #{donottouch}" step "test using puppet apply" - apply_manifest_on(agent, manifest) do + apply_manifest_on(agent, manifest) do |result| fail_test "looks like the thing executed, which it shouldn't" if - stdout.include? 'executed successfully' + result.stdout.include? 'executed successfully' end step "verify the file didn't get created" @@ -26,9 +29,9 @@ step "test using puppet resource" on(agent, puppet_resource('exec', "test#{Time.new.to_i}", "command='#{agent.touch(donottouch)}'", - "creates='#{touch}'")) do + "creates='#{touch}'")) do |result| fail_test "looks like the thing executed, which it shouldn't" if - stdout.include? 'executed successfully' + result.stdout.include? 'executed successfully' end step "verify the file didn't get created the second time" diff --git a/acceptance/tests/resource/exec/should_run_bad_command.rb b/acceptance/tests/resource/exec/should_run_bad_command.rb new file mode 100644 index 00000000000..bd591117a99 --- /dev/null +++ b/acceptance/tests/resource/exec/should_run_bad_command.rb @@ -0,0 +1,70 @@ +test_name "tests that puppet can run badly written scripts that fork and inherit descriptors" +tag 'audit:high', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' + +def sleepy_daemon_script(agent) + if agent['platform'] =~ /win/ + # Windows uses a shorter sleep, because it's expected to wait until the end. + return < true}") do |result| + fail_test "didn't seem to run the command" unless + result.stdout.include? 'executed successfully' unless agent['locale'] == 'ja' + end +end + diff --git a/acceptance/tests/resource/exec/should_run_command.rb b/acceptance/tests/resource/exec/should_run_command.rb index eff86ccb4d4..cdce50ac045 100644 --- a/acceptance/tests/resource/exec/should_run_command.rb +++ b/acceptance/tests/resource/exec/should_run_command.rb @@ -1,9 +1,13 @@ test_name "tests that puppet correctly runs an exec." # original author: Dan Bode --daniel 2010-12-23 +tag 'audit:high', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' + def before(agent) step "file to be touched should not exist." - touched = agent.tmpfile('test-exec') + agent.tmpfile('test-exec') end def after(agent, touched) @@ -16,18 +20,16 @@ def after(agent, touched) agents.each do |agent| touched = before(agent) - apply_manifest_on(agent, "exec {'test': command=>'#{agent.touch(touched)}'}") do + apply_manifest_on(agent, "exec {'test': command=>'#{agent.touch(touched)}'}") do |result| fail_test "didn't seem to run the command" unless - stdout.include? 'executed successfully' + result.stdout.include? 'executed successfully' unless agent['locale'] == 'ja' end after(agent, touched) touched = before(agent) - on(agent, puppet_resource('-d', 'exec', 'test', "command='#{agent.touch(touched)}'}")) do + on(agent, puppet_resource('-d', 'exec', 'test', "command='#{agent.touch(touched)}'}")) do |result| fail_test "didn't seem to run the command" unless - stdout.include? 'executed successfully' + result.stdout.include? 'executed successfully' unless agent['locale'] == 'ja' end after(agent, touched) end - - diff --git a/acceptance/tests/resource/exec/should_run_command_as_user.rb b/acceptance/tests/resource/exec/should_run_command_as_user.rb new file mode 100644 index 00000000000..6a3d2a3350b --- /dev/null +++ b/acceptance/tests/resource/exec/should_run_command_as_user.rb @@ -0,0 +1,58 @@ +test_name "The exec resource should be able to run commands as a different user" do + confine :except, :platform => 'windows' + + tag 'audit:high', + 'audit:acceptance' + + require 'puppet/acceptance/common_utils' + extend Puppet::Acceptance::BeakerUtils + + def random_username + "pl#{rand(999999).to_i}" + end + + def exec_resource_manifest(params = {}) + default_params = { + :logoutput => true, + :path => '/usr/bin:/usr/sbin:/bin:/sbin', + :command => 'echo Hello' + } + params = default_params.merge(params) + + params_str = params.map do |param, value| + value_str = value.to_s + value_str = "'#{value_str}'" if value.is_a?(String) + " #{param} => #{value_str}" + end.join(",\n") + + <<-MANIFEST +exec { 'run_test_command': + #{params_str} +} +MANIFEST + end + + agents.each do |agent| + username = random_username + + # Create our user. Ensure that we start with a clean slate. + agent.user_absent(username) + agent.user_present(username) + teardown { agent.user_absent(username) } + + tmpdir = agent.tmpdir("forbidden") + on(agent, "chmod 700 #{tmpdir}") + + step "Runs the command even when the user doesn't have permissions to access the pwd" do + # Can't use apply_manifest_on here because that does not take the :cwd + # as an option. + tmpfile = agent.tmpfile("exec_user_perms_manifest") + create_remote_file(agent, tmpfile, exec_resource_manifest(user: username)) + on(agent, "cd #{tmpdir} && puppet apply #{tmpfile} --detailed-exitcodes", acceptable_exit_codes: [0, 2]) + end + + step "Runs the command even when the user doesn't have permission to access the specified cwd" do + apply_manifest_on(agent, exec_resource_manifest(user: username, cwd: tmpdir), catch_failures: true) + end + end +end diff --git a/acceptance/tests/resource/exec/should_run_command_in_cwd.rb b/acceptance/tests/resource/exec/should_run_command_in_cwd.rb new file mode 100644 index 00000000000..a59c8763dc9 --- /dev/null +++ b/acceptance/tests/resource/exec/should_run_command_in_cwd.rb @@ -0,0 +1,179 @@ +test_name "The Exec resource should run commands in the specified cwd" do + tag 'audit:high', + 'audit:acceptance' + confine :except, :platform => /debian-12-amd64/ # PUP-12020 + + require 'puppet/acceptance/windows_utils' + extend Puppet::Acceptance::WindowsUtils + + # Useful utility that converts a string literal + # to a regex. We do a lot of assertions on file + # paths here that we need to escape, so this is + # a nice way of making the code more readable. + def to_regex(str) + Regexp.new(Regexp.escape(str)) + end + + def exec_resource_manifest(command, params = {}) + default_params = { + :command => command + } + params = default_params.merge(params) + + params_str = params.map do |param, value| + value_str = value.to_s + # Single quote the strings in case our value is a Windows + # path + value_str = "'#{value_str}'" if value.is_a?(String) + + " #{param} => #{value_str}" + end.join(",\n") + + <<-MANIFEST + exec { 'run_test_command': + #{params_str} + } + MANIFEST + end + + def assert_file_on(host, filepath, failure_comment) + if host.platform =~ /windows/ + cmd = "cmd.exe /c \"type #{filepath.gsub('/', '\\')}\"" + else + cmd = "test -f #{filepath}" + end + on(host, cmd, :acceptable_exit_codes => [0, 1]) do |result| + assert_equal(0, result.exit_code, failure_comment) + end + end + + agents.each do |agent| + testdir = agent.tmpdir("mock_testdir") + if agent.platform =~ /windows/ + path = 'C:\Windows\System32' + echo_to = 'cmd.exe /c echo testing >' + cat = 'cmd.exe /c type' + non_existant_dir = 'C:\does_not_exist' + origin_working_dir = on(agent, 'cmd.exe /c echo %CD%').stdout.chomp + else + path = '/usr/bin:/usr/sbin:/bin:/sbin' + echo_to = 'echo testing >' + cat = 'cat' + non_existant_dir = '/does_not_exist' + origin_working_dir = on(agent, 'pwd').stdout.chomp + end + + step "clean current working directory" do + on(agent, "rm -f cwd_test*") + end + + step "Defaults to the current directory if the CWD option is not provided" do + apply_manifest_on(agent, exec_resource_manifest("#{echo_to} cwd_test1", {:path => path}), :catch_failures => true) + assert_file_on(agent, File.join(origin_working_dir, 'cwd_test1'), 'Exec did not create file in origin pwd, exec resource not defaulting to pwd when no :cwd option is given') + end + + step "Runs the command in the user specified CWD" do + apply_manifest_on(agent, exec_resource_manifest("#{echo_to} cwd_test2", {:cwd => testdir, :path => path}), :catch_failures => true) + assert_file_on(agent, File.join(testdir, 'cwd_test2'), 'Exec did not create file in test directory, exec resource not using :cwd given') + end + + step "Errors if the user specified CWD does not exist" do + apply_manifest_on(agent, exec_resource_manifest("#{echo_to} cwd_test3", {cwd: non_existant_dir, :path => path}), :expect_failures => true) do |result| + assert_equal(4, result.exit_code, "Exec manifest still executed with non-existant :cwd") + end + end + + # "onlyif" testing will require some form of runnable test in the testdir for the + # onlyif clause to actually execute. The runnable test we will use is attempting to + # 'cat' an unqualified file that will only exist in the testdir + create_remote_file(agent, File.join(testdir, 'testdir_onlyif.txt'), 'testing') + + step 'Runs a "check" command (:onlyif or :unless) in the user specified CWD' do + apply_manifest_on(agent, exec_resource_manifest("#{echo_to} cwd_test4", {cwd: testdir, :path => path, :onlyif => "#{cat} testdir_onlyif.txt"}), :expect_changes => true) + assert_file_on(agent, File.join(testdir, 'cwd_test4'), 'Exec did not create file in test directory, exec resource not using :cwd given') + end + + step 'Does not run the exec if the "check" command (:onlyif or :unless) fails' do + apply_manifest_on(agent, exec_resource_manifest("#{echo_to} cwd_test5", {cwd: testdir, :path => path, :onlyif => "foobar"}), :expect_failures => true) do |result| + assert_equal(4, result.exit_code, "Exec manifest still executed with failed :onlyif clause") + end + end + + tmpdir_noaccess = agent.tmpdir("mock_dir") + create_remote_file(agent, File.join(tmpdir_noaccess, 'noaccess.txt'), 'foobar') + username = "pl#{rand(999999).to_i}" + + # The next two steps set up to test running with a CWD that the user does not have access to. + # The setup for the test creates 1. a new user and 2. a new directory that the new user does + # not have access to. + step "Setup user for 'no access' test" do + agent.user_present(username) + if agent.platform =~ /solaris/ + # for some reason applications of 'user_present' on solaris 10 don't manage the homedir correctly, so just + # force a puppet apply to manage the user + on agent, puppet_resource('user', username, "ensure=present managehome=true home=/export/home/#{username}") + # we need to create the user directory ourselves in order for solaris users to successfully login + on(agent, "mkdir /export/home/#{username} && chown -R #{username} /export/home/#{username}") + elsif agent.platform =~ /osx/ + # we need to create the user directory ourselves in order for macos users to successfully login + on(agent, "mkdir /Users/#{username} && chown -R #{username}:80 /Users/#{username}") + elsif agent.platform =~ /debian|ubuntu|sles/ + # we need to create the user directory ourselves in order for deb users to successfully login + on(agent, "mkdir /home/#{username} && chown -R #{username} /home/#{username}") + end + teardown { agent.user_absent(username) } + end + + tmpdir_noaccess = agent.tmpdir("mock_noaccess") + create_remote_file(agent, File.join(tmpdir_noaccess, 'noaccess.txt'), 'foobar') + + step "Setup restricted access directory for 'no access' test" do + if agent.platform =~ /windows/ + deny_administrator_access_to(agent, tmpdir_noaccess) + deny_administrator_access_to(agent, File.join(tmpdir_noaccess, 'noaccess.txt')) + else + if agent.platform =~ /osx/ + # This is a little nuts, but on MacOS the tmpdir returned from agent.tmpdir is located in + # a directory that users other than root can't even access, i.e. other users won't have access + # to either the noaccess dir itself (which we want) _or the tmpdir root it's located in_. This is + # a problem since it will look to puppet like the noacceess dir doesn't exist at all, and so we + # can't count on any reliaable failure since we want a return indicating no access, not a missing directory. + # + # To get around this for MacOS platforms we simply use the new user's homedir as the 'tmpdir' and + # put the noaccess dir there. + on(agent, "mkdir /Users/#{username}/noaccess_test && cp #{tmpdir_noaccess}/noaccess.txt /Users/#{username}/noaccess_test && chmod -R 600 /Users/#{username}/noaccess_test") + tmpdir_noaccess = "/Users/#{username}/noaccess_test" + end + # remove permissions for all other users other than root, which should force puppet to fail when running as another user + on(agent, "chmod -R 600 #{tmpdir_noaccess}") + end + end + + step "Errors if the user does not have access to the specified CWD" do + manifest_path = agent.tmpfile('apply_manifest.pp') + create_remote_file(agent, manifest_path, exec_resource_manifest("#{cat} noaccess.txt", {:cwd => tmpdir_noaccess, :path => path})) + if agent.platform =~ /windows/ + on(agent, "cmd.exe /c \"puppet apply #{manifest_path} --detailed-exitcodes\"", :acceptable_exit_codes => [4]) do |result| + assert_equal(4, result.exit_code, "Exec manifest still executed inside restricted directory", ) + end + elsif agent.platform =~ /osx/ + # on MacOS we need to copy the manifest to run to the user's home dir and give the user ownership. otherwise puppet won't run on it. + on(agent, "cp #{manifest_path} /Users/#{username}/noaccess_manifest.pp && chown #{username}:80 /Users/#{username}/noaccess_manifest.pp") + on(agent, "su - #{username} -c \"/opt/puppetlabs/bin/puppet apply /Users/#{username}/noaccess_manifest.pp --detailed-exitcodes\"", :acceptable_exit_codes => [4]) do |result| + assert_equal(4, result.exit_code, "Exec manifest still executed inside restricted directory") + end + else + on(agent, "chown #{username} #{manifest_path}") + if agent.platform =~ /solaris|aix/ + on(agent, "su - #{username} -c \"/opt/puppetlabs/bin/puppet apply #{manifest_path} --detailed-exitcodes\"", :acceptable_exit_codes => [4]) do |result| + assert_equal(4, result.exit_code, "Exec manifest still executed inside restricted directory") + end + else + on(agent, "su #{username} -c \"/opt/puppetlabs/bin/puppet apply #{manifest_path} --detailed-exitcodes\"", :acceptable_exit_codes => [4]) do |result| + assert_equal(4, result.exit_code, "Exec manifest still executed inside restricted directory") + end + end + end + end + end +end diff --git a/acceptance/tests/resource/exec/should_set_environment_variables.rb b/acceptance/tests/resource/exec/should_set_environment_variables.rb new file mode 100644 index 00000000000..8da766fe067 --- /dev/null +++ b/acceptance/tests/resource/exec/should_set_environment_variables.rb @@ -0,0 +1,103 @@ +test_name "The Exec resource should set user-specified environment variables" do + tag 'audit:high', + 'audit:acceptance' + + # Would be nice to parse the actual values from puppet_output, + # but that would require some complicated matching since + # puppet_output contains other stuff. + def assert_env_var_values(puppet_output, expected_values) + expected_values.each do |env_var, value| + assert_match(/#{env_var}=#{value}/, puppet_output, "Expected '#{env_var}=#{value}' to be printed as part of the output!") + end + end + + agents.each do |agent| + # Calculate some top-level variables/functions we + # will need for our tests. + unless agent.platform =~ /windows/ + path = '/usr/bin:/usr/sbin:/bin:/sbin' + print_env_vars = lambda do |*env_vars| + env_vars_str = env_vars.map do |env_var| + "#{env_var}=$#{env_var}" + end.join(" ") + + "echo #{env_vars_str}" + end + else + # Powershell's directory is dependent on what version of Powershell is + # installed on the system (e.g. v1.0, v2.0), so we need to programmatically + # calculate the executable's directory to add to our PATH variable. + powershell_path = on(agent, "cmd.exe /c where powershell.exe").stdout.chomp + *powershell_dir, _ = powershell_path.split('\\') + powershell_dir = powershell_dir.join('\\') + + path = "C:\Windows\System32;#{powershell_dir}" + print_env_vars = lambda do |*env_vars| + env_vars_str = env_vars.map do |env_var| + "#{env_var}=$env:#{env_var}" + end + + "powershell.exe \"Write-Host -NoNewLine #{env_vars_str}\"" + end + end + + # Easier to read than a def. The def. would require us + # to specify the host as a param. in order to get the path + # and print_cwd command, which is unnecessary clutter. + exec_resource_manifest = lambda do |params = {}| + default_params = { + :logoutput => true, + :path => path + } + params = default_params.merge(params) + + params_str = params.map do |param, value| + value_str = value.to_s + # Single quote the strings in case our value is a Windows + # path + value_str = "'#{value_str}'" if value.is_a?(String) + + " #{param} => #{value_str}" + end.join(",\n") + + <<-MANIFEST + exec { 'run_test_command': + #{params_str} + } +MANIFEST + end + + step 'Passes the user-specified environment variables into the command' do + manifest = exec_resource_manifest.call( + command: print_env_vars.call('ENV_VAR_ONE', 'ENV_VAR_TWO'), + environment: ['ENV_VAR_ONE=VALUE_ONE', 'ENV_VAR_TWO=VALUE_TWO'] + ) + + apply_manifest_on(agent, manifest) do |result| + assert_env_var_values(result.stdout, ENV_VAR_ONE: 'VALUE_ONE', ENV_VAR_TWO: 'VALUE_TWO') + end + end + + step "Temporarily overrides previously set environment variables" do + manifest = exec_resource_manifest.call( + command: print_env_vars.call('ENV_VAR_ONE'), + environment: ['ENV_VAR_ONE=VALUE_OVERRIDE'] + ) + + apply_manifest_on(agent, manifest, environment: { 'ENV_VAR_ONE' => 'VALUE' }) do |result| + assert_env_var_values(result.stdout, ENV_VAR_ONE: 'VALUE_OVERRIDE') + end + end + + step "Temporarily overrides previously set environment variables even if the passed-in value is empty" do + manifest = exec_resource_manifest.call( + command: print_env_vars.call('ENV_VAR_ONE'), + environment: ['ENV_VAR_ONE='] + ) + + apply_manifest_on(agent, manifest, environment: { 'ENV_VAR_ONE' => 'VALUE' }) do |result| + assert_env_var_values(result.stdout, ENV_VAR_ONE: '') + end + end + end +end diff --git a/acceptance/tests/resource/exec/should_set_path.rb b/acceptance/tests/resource/exec/should_set_path.rb index e1b5aa09d4e..22691b38a08 100644 --- a/acceptance/tests/resource/exec/should_set_path.rb +++ b/acceptance/tests/resource/exec/should_set_path.rb @@ -1,4 +1,7 @@ test_name "the path statement should work to locate commands" +tag 'audit:high', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' agents.each do |agent| file = agent.tmpfile('touched-should-set-path') diff --git a/acceptance/tests/resource/file/ascii_diff_output_content_attribute.rb b/acceptance/tests/resource/file/ascii_diff_output_content_attribute.rb new file mode 100644 index 00000000000..f955ac55882 --- /dev/null +++ b/acceptance/tests/resource/file/ascii_diff_output_content_attribute.rb @@ -0,0 +1,42 @@ +test_name "ASCII Diff Output of Content Attribute" do + tag 'audit:high', + 'audit:acceptance' + + sha256 = Digest::SHA256.new + agents.each do |agent| + step 'When handling ASCII files' do + target = agent.tmpfile('content_ASCII_file_test') + initial_text = 'Initial Text' + initial_text_sha_checksum = sha256.hexdigest(initial_text) + updated_text = 'Updated Text' + updated_text_sha_checksum = sha256.hexdigest(updated_text) + on agent, puppet('config', 'set', 'diff', 'diff') + + step 'Ensure the test environment is clean' do + on agent, "rm -f #{target}" + end + + teardown do + on agent, "rm -f #{target}" + end + + step 'Create ASCII file using content' do + manifest = "file { '#{target}': content => '#{initial_text}', ensure => present , checksum => 'sha256'}" + + on(agent, puppet('apply'), :stdin => manifest) do |result| + assert_match(/ensure: defined content as '{sha256}#{initial_text_sha_checksum}'/, result.stdout, "#{agent}: checksum of ASCII file not matched") + end + end + + step 'Update existing ASCII file content' do + manifest = "file { '#{target}': content => '#{updated_text}', ensure => present , checksum => 'sha256'}" + + on(agent, puppet('apply','--show_diff'), :stdin => manifest) do |result| + assert_match(/content: content changed '{sha256}#{initial_text_sha_checksum}' to '{sha256}#{updated_text_sha_checksum}'/, result.stdout, "#{agent}: checksum of ASCII file not matched after update") + assert_match(/^- ?#{initial_text}$/, result.stdout, "#{agent}: initial text not found in diff") + assert_match(/^\+ ?#{updated_text}$/, result.stdout, "#{agent}: updated text not found in diff") + end + end + end + end +end diff --git a/acceptance/tests/resource/file/bin_diff_output_content_attribute.rb b/acceptance/tests/resource/file/bin_diff_output_content_attribute.rb new file mode 100644 index 00000000000..d15d270f5a0 --- /dev/null +++ b/acceptance/tests/resource/file/bin_diff_output_content_attribute.rb @@ -0,0 +1,68 @@ +test_name "Binary Diff Output of Content Attribute" do + tag 'audit:high', + 'audit:acceptance' + + # cannot test binary diff on windows2012r2_ja-64-1 + # Error: Could not write report for afire-lien.delivery.puppetlabs.net at C:/ProgramData/PuppetLabs/puppet/cache/reports/afire-lien.delivery.puppetlabs.net/201912041455.yaml: anchor value must contain alphanumerical characters only + # Error: Could not send report: anchor value must contain alphanumerical characters only + confine :except, {}, hosts.select { |host| host[:platform]=~ /windows/ && host[:locale] == 'ja' } + + sha256 = Digest::SHA256.new + agents.each do |agent| + step 'When handling binary files' do + target = agent.tmpfile('content_binary_file_test') + initial_bin_data = "\xc7\xd1\xfc\x84" + initial_base64_data = Base64.encode64(initial_bin_data).chomp + initial_sha_checksum = sha256.hexdigest(initial_bin_data) + updated_bin_data = "\xc7\xd1\xfc\x85" + updated_base64_data = Base64.encode64(updated_bin_data).chomp + updated_sha_checksum = sha256.hexdigest(updated_bin_data) + on(agent, puppet('config', 'set', 'diff', 'diff')) + + agent_default_external_encoding=nil + on(agent, "#{ruby_command(agent)} -e \"puts Encoding.default_external\"") do |result| + agent_default_external_encoding = result.stdout.chomp + end + + if agent_default_external_encoding && agent_default_external_encoding != Encoding.default_external + begin + initial_bin_data=initial_bin_data.force_encoding(agent_default_external_encoding).encode(Encoding.default_external) + updated_bin_data=updated_bin_data.force_encoding(agent_default_external_encoding).encode(Encoding.default_external) + rescue Encoding::InvalidByteSequenceError + #depending on agent_default_external_encoding, the conversion may fail, but this should not be a problem + end + end + + teardown do + on agent, "rm -f #{target}" + end + + step 'Ensure the test environment is clean' do + on agent, "rm -f #{target}" + end + + step 'Create binary file using content' do + manifest = "file { '#{target}': content => Binary('#{initial_base64_data}'), ensure => present , checksum => 'sha256'}" + + on(agent, puppet('apply'), :stdin => manifest) do |result| + assert_match(/ensure: defined content as '{sha256}#{initial_sha_checksum}'/, result.stdout, "#{agent}: checksum of binary file not matched") + end + end + + step 'Update existing binary file content' do + manifest = "file { '#{target}': content => Binary('#{updated_base64_data}'), ensure => present , checksum => 'sha256'}" + + on(agent, puppet('apply','--show_diff'), :stdin => manifest) do |result| + assert_match(/content: content changed '{sha256}#{initial_sha_checksum}' to '{sha256}#{updated_sha_checksum}'/, result.stdout, "#{agent}: checksum of binary file not matched after update") + refute_match(/content: Received a Log attribute with invalid encoding:/, result.stdout, "#{agent}: Received a Log attribute with invalid encoding") + if initial_bin_data.valid_encoding? && updated_bin_data.valid_encoding? + assert_match(/^- ?#{initial_bin_data}$/, result.stdout, "#{agent}: initial utf-8 data not found in binary diff") + assert_match(/^\+ ?#{updated_bin_data}$/, result.stdout, "#{agent}: updated utf-8 data not found in binary diff") + else + assert_match(/Binary files #{target} and .* differ/, result.stdout, "#{agent}: Binary file diff notice not matched") + end + end + end + end + end +end diff --git a/acceptance/tests/resource/file/content_attribute.rb b/acceptance/tests/resource/file/content_attribute.rb index da4ada2f548..6b3c820a40e 100644 --- a/acceptance/tests/resource/file/content_attribute.rb +++ b/acceptance/tests/resource/file/content_attribute.rb @@ -1,4 +1,7 @@ test_name "Content Attribute" +tag 'audit:high', + 'audit:refactor', # Use block stype test_name + 'audit:acceptance' agents.each do |agent| target = agent.tmpfile('content_file_test') @@ -8,60 +11,68 @@ step "Content Attribute: using raw content" - checksums = ['md5', 'md5lite', 'sha256', 'sha256lite'] + checksums_fips = ['sha256', 'sha256lite'] + checksums_no_fips = ['sha256', 'sha256lite', 'md5', 'md5lite'] + + if on(agent, facter("fips_enabled")).stdout =~ /true/ + checksums = checksums_fips + else + checksums = checksums_no_fips + end + manifest = "file { '#{target}': content => 'This is the test file content', ensure => present }" manifest += checksums.collect {|checksum_type| "file { '#{target+checksum_type}': content => 'This is the test file content', ensure => present, checksum => #{checksum_type} }" }.join("\n") - apply_manifest_on agent, manifest do + apply_manifest_on(agent, manifest) do |result| checksums.each do |checksum_type| - assert_no_match(/content changed/, stdout, "#{agent}: shouldn't have overwrote #{target+checksum_type}") + refute_match(/content changed/, result.stdout, "#{agent}: shouldn't have overwrote #{target+checksum_type}") end end - on agent, "cat #{target}" do - assert_match(/This is the test file content/, stdout, "File content not matched on #{agent}") + on(agent, "cat #{target}") do |result| + assert_match(/This is the test file content/, result.stdout, "File content not matched on #{agent}") unless agent['locale'] == 'ja' end step "Content Attribute: illegal timesteps" ['mtime', 'ctime'].each do |checksum_type| manifest = "file { '#{target+checksum_type}': content => 'This is the test file content', ensure => present, checksum => #{checksum_type} }" - apply_manifest_on agent, manifest, :acceptable_exit_codes => [1] do - assert_match(/Error: Validation of File\[#{target+checksum_type}\] failed: You cannot specify content when using checksum '#{checksum_type}'/, stderr, "#{agent}: expected failure") + apply_manifest_on(agent, manifest, :acceptable_exit_codes => [1]) do |result| + assert_match(/Error: Validation of File\[#{target+checksum_type}\] failed: You cannot specify content when using checksum '#{checksum_type}'/, result.stderr, "#{agent}: expected failure") unless agent['locale'] == 'ja' end end step "Ensure the test environment is clean" - on agent, "rm -f #{target}" + on(agent, "rm -f #{target}") step "Content Attribute: using a checksum from filebucket" - on agent, "echo 'This is the checksum file contents' > #{target}" + on(agent, "echo 'This is the checksum file contents' > #{target}") step "Backup file into the filebucket" - on agent, puppet_filebucket("backup --local #{target}") + on(agent, puppet_filebucket("backup --local #{target}")) - bucketdir="not set" - on agent, puppet_filebucket("--configprint bucketdir") do - bucketdir = stdout.chomp - end + step "Modify file to force apply to retrieve file from local clientbucket" + on(agent, "echo 'This is the modified file contents' > #{target}") + + dir = on(agent, puppet_filebucket("--configprint clientbucketdir")).stdout.chomp - manifest = %Q| + sha256_manifest = %Q| filebucket { 'local': - path => '#{bucketdir}', + path => '#{dir}', } file { '#{target}': - content => '{md5}18571d3a04b2bb7ccfdbb2c44c72caa9', - ensure => present, - backup => local, + ensure => present, + content => '{sha256}3b9238769b033b48073267b8baea00fa51c598dc14081da51f2e510c37c46a28', + backup => local, } | step "Applying Manifest on Agent" - apply_manifest_on agent, manifest + apply_manifest_on agent, sha256_manifest step "Validate filebucket checksum file contents" - on agent, "cat #{target}" do - assert_match(/This is the checksum file content/, stdout, "File content not matched on #{agent}") + on(agent, "cat #{target}") do |result| + assert_match(/This is the checksum file content/, result.stdout, "File content not matched on #{agent}") unless agent['locale'] == 'ja' end end diff --git a/acceptance/tests/resource/file/handle_fifo_files.rb b/acceptance/tests/resource/file/handle_fifo_files.rb new file mode 100644 index 00000000000..4a19f913a0e --- /dev/null +++ b/acceptance/tests/resource/file/handle_fifo_files.rb @@ -0,0 +1,63 @@ +test_name "should be able to handle fifo files" +tag 'audit:high', + 'audit:acceptance' +confine :except, :platform => /windows/ + +def ensure_content_to_file_manifest(file_path, ensure_value) + return <<-MANIFEST + file { "#{file_path}": + ensure => #{ensure_value}, + content => "Hello World" + } + MANIFEST +end + +agents.each do |agent| + tmp_path = agent.tmpdir("tmpdir") + fifo_path = "#{tmp_path}/myfifo" + + teardown do + agent.rm_rf(tmp_path) + end + + step "create fifo" do + on(agent, "mkfifo #{fifo_path}") + end + + step "check that fifo got created" do + on(agent, "ls -l #{fifo_path}") do |result| + assert(result.stdout.start_with?('p')) + end + end + + step "puppet ensures given fifo is present" do + apply_manifest_on(agent, ensure_content_to_file_manifest(fifo_path, 'present'), :acceptable_exit_codes => [2]) do |result| + assert_match(/Warning: .+ Ensure set to :present but file type is fifo so no content will be synced/, result.stderr) + end + end + + step "check that given file is still a fifo" do + on(agent, "ls -l #{fifo_path}") do |result| + assert(result.stdout.start_with?('p')) + end + end + + step "puppet ensures given fifo is a regular file" do + apply_manifest_on(agent, ensure_content_to_file_manifest(fifo_path, 'file'), :acceptable_exit_codes => [0]) do |result| + assert_match(/Notice: .+\/myfifo\]\/ensure: defined content as '{/, result.stdout) + refute_match(/Warning: .+ Ensure set to :present but file type is fifo so no content will be synced/, result.stderr) + end + end + + step "check that given fifo is now a regular file" do + on(agent, "ls -l #{fifo_path}") do |result| + assert(result.stdout.start_with?('-')) + end + end + + step "check that given file now has desired content" do + on(agent, "cat #{fifo_path}") do |result| + assert_equal('Hello World', result.stdout) + end + end +end diff --git a/acceptance/tests/resource/file/handle_fifo_files_when_recursing.rb b/acceptance/tests/resource/file/handle_fifo_files_when_recursing.rb new file mode 100644 index 00000000000..ee071886eec --- /dev/null +++ b/acceptance/tests/resource/file/handle_fifo_files_when_recursing.rb @@ -0,0 +1,69 @@ +test_name "should be able to handle fifo files when recursing" +tag 'audit:high', + 'audit:acceptance' +confine :except, :platform => /windows/ + +def ensure_owner_recursively_manifest(path, owner_value) + return <<-MANIFEST + file { "#{path}": + ensure => present, + recurse => true, + owner => #{owner_value} + } + MANIFEST +end + +agents.each do |agent| + initial_owner = '' + random_user = "pl#{rand(999).to_i}" + + tmp_path = agent.tmpdir("tmpdir") + fifo_path = "#{tmp_path}/myfifo" + + teardown do + agent.rm_rf(tmp_path) + end + + step "create fifo file" do + on(agent, "mkfifo #{fifo_path}") + on(agent, puppet("resource user #{random_user} ensure=absent")) + end + + step "check that fifo file got created" do + on(agent, "ls -l #{fifo_path}") do |result| + assert(result.stdout.start_with?('p')) + initial_owner = result.stdout.split[2] + end + end + + step "create a new user" do + on(agent, puppet("resource user #{random_user} ensure=present")) + end + + step "puppet ensures '#{random_user}' as owner of path" do + apply_manifest_on(agent, ensure_owner_recursively_manifest(tmp_path, random_user), :acceptable_exit_codes => [0]) do |result| + assert_match(/#{tmp_path}\]\/owner: owner changed '#{initial_owner}' to '#{random_user}'/, result.stdout) + refute_match(/Error: .+ Failed to generate additional resources using ‘eval_generate’: Cannot manage files of type fifo/, result.stderr) + end + end + + step "check that given file is still a fifo" do + on(agent, "ls -l #{fifo_path}") do |result| + assert(result.stdout.start_with?('p')) + end + end + + step "check ownership of fifo file" do + on(agent, "ls -l #{fifo_path}") do |result| + user = result.stdout.split[2] + assert_equal(random_user, user) + end + end + + step "check ownership of tmp folder" do + on(agent, "ls -ld #{tmp_path}") do |result| + user = result.stdout.split[2] + assert_equal(random_user, user) + end + end +end diff --git a/acceptance/tests/resource/file/should_create_directory.rb b/acceptance/tests/resource/file/should_create_directory.rb old mode 100755 new mode 100644 index ebe8441cbca..a8eb76170c2 --- a/acceptance/tests/resource/file/should_create_directory.rb +++ b/acceptance/tests/resource/file/should_create_directory.rb @@ -1,17 +1,43 @@ test_name "should create directory" +tag 'audit:high', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' agents.each do |agent| target = agent.tmpfile("create-dir") + teardown do + step "clean up after the test run" do + on(agent, "rm -rf #{target}") + end + end - step "clean up the system before we begin" - on(agent, "rm -rf #{target}") + step "verify we can create a directory" do + on(agent, puppet_resource("file", target, 'ensure=directory')) + end - step "verify we can create a directory" - on(agent, puppet_resource("file", target, 'ensure=directory')) + step "verify the directory was created" do + on(agent, "test -d #{target}") + end - step "verify the directory was created" - on(agent, "test -d #{target}") + dir_manifest = agent.tmpfile("dir-resource") + create_remote_file(agent, dir_manifest, <<-PP) + $dir='#{target}' + $same_dir='#{target}/' + file {$dir: + ensure => directory, + } + file { $same_dir: + ensure => directory, + } + PP - step "clean up after the test run" - on(agent, "rm -rf #{target}") + step "verify we can't create same dir resource with a trailing slash" do + options = {:acceptable_exit_codes => [1]} + on(agent, puppet_apply("--noop #{dir_manifest}"), options) do |result| + unless agent['locale'] == 'ja' + assert_match('Cannot alias File', result.output, + 'duplicate directory resources did not fail properly') + end + end + end end diff --git a/acceptance/tests/resource/file/should_create_empty.rb b/acceptance/tests/resource/file/should_create_empty.rb old mode 100755 new mode 100644 index 4c16c06aa55..11811360556 --- a/acceptance/tests/resource/file/should_create_empty.rb +++ b/acceptance/tests/resource/file/should_create_empty.rb @@ -1,4 +1,7 @@ test_name "should create empty file for 'present'" +tag 'audit:high', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' agents.each do |agent| target = agent.tmpfile("empty") diff --git a/acceptance/tests/resource/file/should_create_symlink.rb b/acceptance/tests/resource/file/should_create_symlink.rb old mode 100755 new mode 100644 index fd24cb61423..364a322c274 --- a/acceptance/tests/resource/file/should_create_symlink.rb +++ b/acceptance/tests/resource/file/should_create_symlink.rb @@ -1,37 +1,65 @@ test_name "should create symlink" +tag 'audit:high', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' -message = 'hello world' -agents.each do |agent| - confine_block :to, :platform => 'windows' do - # symlinks are supported only on Vista+ (version 6.0 and higher) - on agents, facter('kernelmajversion') do - skip_test "Test not supported on this plaform" if stdout.chomp.to_f < 6.0 - end - end - - link = agent.tmpfile("symlink-link") - target = agent.tmpfile("symlink-target") +def message + 'hello world' +end +def reset_link_and_target(agent, link, target) step "clean up the system before we begin" on agent, "rm -rf #{target} #{link}" on agent, "echo '#{message}' > #{target}" +end - step "verify we can create a symlink" - on(agent, puppet_resource("file", link, "ensure=#{target}")) - +def verify_symlink(agent, link, target) step "verify the symlink was created" - on agent, "test -L #{link} && test -f #{link}" + on(agent, "test -L #{link} && test -f #{link}") step "verify the symlink points to a file" - on agent, "test -f #{target}" + on(agent, "test -f #{target}") step "verify the content is identical on both sides" - on(agent, "cat #{link}") do - fail_test "link missing content" unless stdout.include? message + on(agent, "cat #{link}") do |result| + fail_test "link missing content" unless result.stdout.include?(message) end - on(agent, "cat #{target}") do - fail_test "target missing content" unless stdout.include? message + on(agent, "cat #{target}") do |result| + fail_test "target missing content" unless result.stdout.include?(message) + end +end + +agents.each do |agent| + if agent.platform.variant == 'windows' + # symlinks are supported only on Vista+ (version 6.0 and higher) + on(agent, facter('kernelmajversion')) do |result| + skip_test "Test not supported on this platform" if result.stdout.chomp.to_f < 6.0 + end end + link_file = agent.tmpfile("symlink-link") + target_file = agent.tmpfile("symlink-target") + link_dir = agent.tmpdir("dir_symlink-link") + target_dir = agent.tmpdir("dir-symlink-target") + + reset_link_and_target(agent, link_file, target_file) + reset_link_and_target(agent, link_dir, target_dir) + + step "verify we can create a symlink with puppet resource" + on(agent, puppet_resource("file", "#{link_file}", "ensure=#{target_file}")) + verify_symlink(agent, link_file, target_file) + reset_link_and_target(agent, link_file, target_file) + + step "verify that 'links => manage' preserves a symlink" + apply_manifest_on(agent, "file { '#{link_file}': ensure => link, target => '#{target_file}', links => manage }") + verify_symlink(agent, link_file, target_file) + reset_link_and_target(agent, link_file, target_file) + + step "verify that 'links => manage' and 'recurse => true' preserves links in a directory" + on(agent, puppet_resource("file", target_dir, "ensure=directory")) + reset_link_and_target(agent, link_dir, "#{target_dir}/symlink-target") + apply_manifest_on(agent, "file { '#{link_dir}': ensure => directory, target => '#{target_dir}', links => manage, recurse => true }") + verify_symlink(agent, "#{link_dir}/symlink-target", "#{target_dir}/symlink-target") + step "clean up after the test run" - on agent, "rm -rf #{target} #{link}" + on agent, "rm -rf #{target_file} #{link_file} #{target_dir} #{link_dir}" end diff --git a/acceptance/tests/resource/file/should_default_mode.rb b/acceptance/tests/resource/file/should_default_mode.rb index ac581596166..56cccd52dcf 100644 --- a/acceptance/tests/resource/file/should_default_mode.rb +++ b/acceptance/tests/resource/file/should_default_mode.rb @@ -1,4 +1,7 @@ test_name "file resource: set default modes" +tag 'audit:high', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' def regexp_mode(mode) Regexp.new("mode\s*=>\s*'0?#{mode}'") @@ -10,8 +13,8 @@ def regexp_mode(mode) on(agent, "rm -rf #{parent}") step "puppet should set execute bit on readable directories" - on(agent, puppet_resource("file", parent, "ensure=directory", "mode=0644")) do - assert_match(regexp_mode(755), stdout) + on(agent, puppet_resource("file", parent, "ensure=directory", "mode=0644")) do |result| + assert_match(regexp_mode(755), result.stdout) end step "include execute bit on newly created directories" @@ -21,20 +24,20 @@ def regexp_mode(mode) step "exclude execute bit from newly created files" file = "#{parent}/file.txt" on(agent, "echo foobar > #{file}") - on(agent, "#{file}", :acceptable_exit_codes => (1..255)) do - assert_no_match(/foobar/, stdout) + on(agent, "#{file}", :acceptable_exit_codes => (1..255)) do |result| + refute_match(/foobar/, result.stdout) end - step "set execute git on file if explicitly specified" + step "set execute bit on file if explicitly specified" file_750 = "#{parent}/file_750.txt" - on(agent, puppet_resource("file", file_750, "ensure=file", "mode=0750")) do - assert_match(regexp_mode(750), stdout) + on(agent, puppet_resource("file", file_750, "ensure=file", "mode=0750")) do |result| + assert_match(regexp_mode(750), result.stdout) end step "don't set execute bit if directory not readable" dir_600 = "#{parent}/dir_600" - on(agent, puppet_resource("file", dir_600, "ensure=directory", "mode=0600")) do - assert_match(regexp_mode(700), stdout) # readable by owner, but not group + on(agent, puppet_resource("file", dir_600, "ensure=directory", "mode=0600")) do |result| + assert_match(regexp_mode(700), result.stdout) # readable by owner, but not group end on(agent, "rm -rf #{parent}") diff --git a/acceptance/tests/resource/file/should_remove_dir.rb b/acceptance/tests/resource/file/should_remove_dir.rb old mode 100755 new mode 100644 index ca8a003bb19..ae04d6cce43 --- a/acceptance/tests/resource/file/should_remove_dir.rb +++ b/acceptance/tests/resource/file/should_remove_dir.rb @@ -1,23 +1,26 @@ test_name "should remove directory, but force required" +tag 'audit:high', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' agents.each do |agent| target = agent.tmpdir("delete-dir") step "clean up the system before we begin" - on agent, "rm -rf #{target} ; mkdir -p #{target}" + on(agent, "rm -rf #{target} ; mkdir -p #{target}") step "verify we can't remove a directory without 'force'" - on(agent, puppet_resource("file", target, 'ensure=absent')) do + on(agent, puppet_resource("file", target, 'ensure=absent')) do |result| fail_test "didn't tell us that force was required" unless - stdout.include? "Not removing directory; use 'force' to override" + result.stdout.include? "Not removing directory; use 'force' to override" unless agent['locale'] == 'ja' end step "verify the directory still exists" - on agent, "test -d #{target}" + on(agent, "test -d #{target}") step "verify we can remove a directory with 'force'" on(agent, puppet_resource("file", target, 'ensure=absent', 'force=true')) step "verify that the directory is gone" - on agent, "test -d #{target}", :acceptable_exit_codes => [1] + on(agent, "test -d #{target}", :acceptable_exit_codes => [1]) end diff --git a/acceptance/tests/resource/file/should_remove_file.rb b/acceptance/tests/resource/file/should_remove_file.rb old mode 100755 new mode 100644 index 60cf63251f4..34caee0411a --- a/acceptance/tests/resource/file/should_remove_file.rb +++ b/acceptance/tests/resource/file/should_remove_file.rb @@ -1,4 +1,7 @@ test_name "should remove file" +tag 'audit:high', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' agents.each do |agent| target = agent.tmpfile('delete-file') diff --git a/acceptance/tests/resource/file/source_attribute.rb b/acceptance/tests/resource/file/source_attribute.rb index 24bb040e250..45a51bf3bdd 100644 --- a/acceptance/tests/resource/file/source_attribute.rb +++ b/acceptance/tests/resource/file/source_attribute.rb @@ -1,257 +1,301 @@ -test_name "The source attribute" -require 'puppet/acceptance/module_utils' -extend Puppet::Acceptance::ModuleUtils - -@target_file_on_windows = 'C:/windows/temp/source_attr_test' -@target_file_on_nix = '/tmp/source_attr_test' -@target_dir_on_windows = 'C:/windows/temp/source_attr_test_dir' -@target_dir_on_nix = '/tmp/source_attr_test_dir' - -checksums = [nil, 'md5', 'md5lite', 'sha256', 'sha256lite', 'ctime', 'mtime'] +test_name "The source attribute" do + require 'puppet/acceptance/module_utils' + extend Puppet::Acceptance::ModuleUtils + + tag 'audit:high', + 'audit:acceptance', + 'server' + + @target_file_on_windows = 'C:/windows/temp/source_attr_test' + @target_file_on_nix = '/tmp/source_attr_test' + @target_dir_on_windows = 'C:/windows/temp/source_attr_test_dir' + @target_dir_on_nix = '/tmp/source_attr_test_dir' + + # In case any of the hosts happens to be fips enabled we limit to the lowest + # common denominator. + checksums_fips = [nil, 'sha256', 'sha256lite', 'ctime', 'mtime'] + checksums_no_fips = [nil, 'sha256', 'sha256lite', 'md5', 'md5lite', 'ctime', 'mtime'] + + fips_host_present = hosts.any? { |host| on(host, facter("fips_enabled")).stdout =~ /true/ } + + if fips_host_present + checksums = checksums_fips + else + checksums = checksums_no_fips + end -orig_installed_modules = get_installed_modules_for_hosts hosts -teardown do - rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) - hosts.each do |host| - file_to_rm = host['platform'] =~ /windows/ ? @target_file_on_windows : @target_file_on_nix - dir_to_rm = host['platform'] =~ /windows/ ? @target_dir_on_windows : @target_dir_on_nix + orig_installed_modules = get_installed_modules_for_hosts hosts + teardown do + rm_installed_modules_from_hosts orig_installed_modules, (get_installed_modules_for_hosts hosts) + hosts.each do |host| + file_to_rm = host['platform'] =~ /windows/ ? @target_file_on_windows : @target_file_on_nix + dir_to_rm = host['platform'] =~ /windows/ ? @target_dir_on_windows : @target_dir_on_nix - checksums.each do |checksum_type| - on(host, "rm #{file_to_rm}#{checksum_type}", :acceptable_exit_codes => [0,1]) - on(host, "rm -r #{dir_to_rm}#{checksum_type}", :acceptable_exit_codes => [0,1]) + checksums.each do |checksum_type| + on(host, "rm #{file_to_rm}#{checksum_type}", :acceptable_exit_codes => [0,1]) + on(host, "rm -r #{dir_to_rm}#{checksum_type}", :acceptable_exit_codes => [0,1]) + end end end -end -step "Setup - create environment and test module" -# set directories -testdir = master.tmpdir('file_source_attr') -env_dir = "#{testdir}/environments" -prod_dir = "#{env_dir}/production" -manifest_dir = "#{prod_dir}/manifests" -manifest_file = "#{prod_dir}/manifests/site.pp" -module_dir = "#{prod_dir}/modules" -test_module_dir = "#{module_dir}/source_test_module" -test_module_manifests_dir = "#{test_module_dir}/manifests" -test_module_files_dir = "#{test_module_dir}/files" -mod_manifest_file = "#{test_module_manifests_dir}/init.pp" -mod_source_file = "#{test_module_files_dir}/source_file" -mod_source_dir = "#{test_module_files_dir}/source_dir" -mod_source_dir_file = "#{mod_source_dir}/source_dir_file" - -mod_source = ' the content is present' - -def mod_manifest_entry(checksum_type = nil) - checksum = if checksum_type then "checksum => #{checksum_type}," else "" end - manifest = < \\'#{@target_file_on_windows}#{checksum_type}\\', - default => \\'#{@target_file_on_nix}#{checksum_type}\\' - } - - file { $target_file#{checksum_type}: - source => \\'puppet:///modules/source_test_module/source_file\\', - #{checksum} - ensure => present - } - - $target_dir#{checksum_type} = $::kernel ? { - \\'windows\\' => \\'#{@target_dir_on_windows}#{checksum_type}\\', - default => \\'#{@target_dir_on_nix}#{checksum_type}\\' - } - - file { $target_dir#{checksum_type}: - source => \\'puppet:///modules/source_test_module/source_dir\\', - #{checksum} - ensure => directory, - recurse => true - } -EOF - manifest -end - -mod_manifest = < \\'#{master}\\', - path => false, -} - -File { backup => \\'main\\' } - -node default { - include source_test_module -} -EOF - -# apply manifests to setup environment and modules -apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) - File { - ensure => directory, - mode => '0755', - } + step "Setup - create environment and test module" + # set directories + testdir = master.tmpdir('file_source_attr') + env_dir = "#{testdir}/environments" + prod_dir = "#{env_dir}/production" + manifest_dir = "#{prod_dir}/manifests" + manifest_file = "#{prod_dir}/manifests/site.pp" + module_dir = "#{prod_dir}/modules" + test_module_dir = "#{module_dir}/source_test_module" + test_module_manifests_dir = "#{test_module_dir}/manifests" + test_module_files_dir = "#{test_module_dir}/files" + mod_manifest_file = "#{test_module_manifests_dir}/init.pp" + mod_source_file = "#{test_module_files_dir}/source_file" + mod_source_dir = "#{test_module_files_dir}/source_dir" + mod_source_dir_file = "#{mod_source_dir}/source_dir_file" + + mod_source = ' the content is present' + + def mod_manifest_entry(checksum_type = nil) + checksum = if checksum_type then "checksum => #{checksum_type}," else "" end + manifest = <<-EOF + $target_file#{checksum_type} = $::kernel ? { + \\'windows\\' => \\'#{@target_file_on_windows}#{checksum_type}\\', + default => \\'#{@target_file_on_nix}#{checksum_type}\\' + } + + file { $target_file#{checksum_type}: + source => \\'puppet:///modules/source_test_module/source_file\\', + #{checksum} + ensure => present + } + + $target_dir#{checksum_type} = $::kernel ? { + \\'windows\\' => \\'#{@target_dir_on_windows}#{checksum_type}\\', + default => \\'#{@target_dir_on_nix}#{checksum_type}\\' + } + + file { $target_dir#{checksum_type}: + source => \\'puppet:///modules/source_test_module/source_dir\\', + #{checksum} + ensure => directory, + recurse => true + } + EOF + manifest + end - file { - '#{testdir}':; - '#{env_dir}':; - '#{prod_dir}':; - '#{manifest_dir}':; - '#{module_dir}':; - '#{test_module_dir}':; - '#{test_module_manifests_dir}':; - '#{test_module_files_dir}':; + mod_manifest = <<-EOF + class source_test_module { + #{checksums.collect { |checksum_type| mod_manifest_entry(checksum_type) }.join("\n")} } + EOF - file { '#{mod_manifest_file}': - ensure => file, - mode => '0644', - content => '#{mod_manifest}', + env_manifest = <<-EOF + filebucket { \\'main\\': + server => \\'#{master}\\', + path => false, } - file { '#{mod_source_file}': - ensure => file, - mode => '0644', - content => '#{mod_source}', - } + File { backup => \\'main\\' } - file { '#{mod_source_dir}': - ensure => directory, - mode => '0755' + node default { + include source_test_module } - - file { '#{mod_source_dir_file}': - ensure => file, - mode => '0644', - content => '#{mod_source}', + EOF + + # apply manifests to setup environment and modules + apply_manifest_on(master, <<-MANIFEST, :catch_failures => true) + File { + ensure => directory, + mode => '0755', + } + + file { + '#{testdir}':; + '#{env_dir}':; + '#{prod_dir}':; + '#{manifest_dir}':; + '#{module_dir}':; + '#{test_module_dir}':; + '#{test_module_manifests_dir}':; + '#{test_module_files_dir}':; + } + + file { '#{mod_manifest_file}': + ensure => file, + mode => '0644', + content => '#{mod_manifest}', + } + + file { '#{mod_source_file}': + ensure => file, + mode => '0644', + content => '#{mod_source}', + } + + file { '#{mod_source_dir}': + ensure => directory, + mode => '0755' + } + + file { '#{mod_source_dir_file}': + ensure => file, + mode => '0644', + content => '#{mod_source}', + } + + file { '#{manifest_file}': + ensure => file, + mode => '0644', + content => '#{env_manifest}', + } + MANIFEST + + step "When using a puppet:/// URI with a master/agent setup" + master_opts = { + 'main' => { + 'environmentpath' => "#{env_dir}", + }, } + with_puppet_running_on(master, master_opts, testdir) do + agents.each do |agent| + # accept an exit code of 2 which is returned if there are changes + step "create file the first run" + on(agent, puppet('agent', "--test"), :acceptable_exit_codes => [0,2]) do + file_to_check = agent['platform'] =~ /windows/ ? @target_file_on_windows : @target_file_on_nix + dir_to_check = agent['platform'] =~ /windows/ ? @target_dir_on_windows : @target_dir_on_nix + + checksums.each do |checksum_type| + on agent, "cat #{file_to_check}#{checksum_type}" do |result| + assert_match(/the content is present/, result.stdout, "Result file not created #{checksum_type}") + end + + on agent, "cat #{dir_to_check}#{checksum_type}/source_dir_file" do |result| + assert_match(/the content is present/, result.stdout, "Result file not created #{checksum_type}") + end + end + end - file { '#{manifest_file}': - ensure => file, - mode => '0644', - content => '#{env_manifest}', - } -MANIFEST - -step "When using a puppet:/// URI with a master/agent setup" -master_opts = { - 'main' => { - 'environmentpath' => "#{env_dir}", - }, -} -with_puppet_running_on(master, master_opts, testdir) do - agents.each do |agent| - # accept an exit code of 2 which is returned if there are changes - step "create file the first run" - on(agent, puppet('agent', "--test --server #{master}"), :acceptable_exit_codes => [0,2]) do - file_to_check = agent['platform'] =~ /windows/ ? @target_file_on_windows : @target_file_on_nix - dir_to_check = agent['platform'] =~ /windows/ ? @target_dir_on_windows : @target_dir_on_nix - - checksums.each do |checksum_type| - on agent, "cat #{file_to_check}#{checksum_type}" do - assert_match(/the content is present/, stdout, "Result file not created #{checksum_type}") + step "second run should not update file" + on(agent, puppet('agent', "--test"), :acceptable_exit_codes => [0,2]) do |result| + refute_match(/content changed.*(md5|sha256)/, result.stdout, "Shouldn't have overwritten any files") + + # When using ctime/mtime, the agent compares the values from its + # local file with the values on the master to determine if the + # file is insync or not. If during the first run, the agent + # creates the files, and the resulting ctime/mtime are still + # behind the times on the master, then the 2nd agent run will + # consider the file to not be insync, and will update it + # again. This process will repeat until the agent updates the + # file, and the resulting ctime/mtime are after the values on + # the master, at which point it will have converged. + if result.stdout =~ /content changed.*ctime/ + Log.warn "Agent did not converge using ctime" end - on agent, "cat #{dir_to_check}#{checksum_type}/source_dir_file" do - assert_match(/the content is present/, stdout, "Result file not created #{checksum_type}") + if result.stdout =~ /content changed.*mtime/ + Log.warn "Agent did not converge using mtime" end end end - step "second run should not update file" - on(agent, puppet('agent', "--test --server #{master}")) do - assert_no_match(/content changed/, stdout, "Shouldn't have overwrote any files") +=begin + # Disable flaky test until PUP-4115 is addressed. + step "touch files and verify they're updated with ctime/mtime" + # wait until we're not at the mtime of files on the agents + # this could be done cross-platform using Puppet, but a single puppet query is unlikely to be less than a second, + # and iterating over all agents would be much slower + sleep(1) + + on master, "touch #{mod_source_file} #{mod_source_dir_file}" + agents.each do |agent| + on(agent, puppet('agent', "--test"), :acceptable_exit_codes => [0,2]) do + file_to_check = agent['platform'] =~ /windows/ ? @target_file_on_windows : @target_file_on_nix + dir_to_check = agent['platform'] =~ /windows/ ? @target_dir_on_windows : @target_dir_on_nix + ['ctime', 'mtime'].each do |time_type| + assert_match(/File\[#{file_to_check}#{time_type}\]\/content: content changed/, stdout, "Should have updated files") + assert_match(/File\[#{dir_to_check}#{time_type}\/source_dir_file\]\/content: content changed/, stdout, "Should have updated files") + end + end end +=end end - step "touch files and verify they're updated with ctime/mtime" - # wait until we're not at the mtime of files on the agents - # this could be done cross-platform using Puppet, but a single puppet query is unlikely to be less than a second, - # and iterating over all agents would be much slower - sleep(1) - - on master, "touch #{mod_source_file} #{mod_source_dir_file}" + # TODO: Add tests for puppet:// URIs with multi-master/agent setups. + step "When using puppet apply" agents.each do |agent| - on(agent, puppet('agent', "--test --server #{master}"), :acceptable_exit_codes => [0,2]) do - file_to_check = agent['platform'] =~ /windows/ ? @target_file_on_windows : @target_file_on_nix - dir_to_check = agent['platform'] =~ /windows/ ? @target_dir_on_windows : @target_dir_on_nix - ['ctime', 'mtime'].each do |time_type| - assert_match(/File\[#{file_to_check}#{time_type}\]\/content: content changed/, stdout, "Should have updated files") - assert_match(/File\[#{dir_to_check}#{time_type}\/source_dir_file\]\/content: content changed/, stdout, "Should have updated files") - end + step "Setup testing local file sources" + + # create one larger manifest with all the files so we don't have to run + # puppet apply per each checksum_type + localsource_testdir = agent.tmpdir('local_source_file_test') + source = "#{localsource_testdir}/source_mod/files/source" + on agent, "mkdir -p #{File.dirname(source)}" + # don't put a 'z' in this content + source_content = 'Yay, this is the local file. I have to be bigger than 512 bytes so that my masters. yadda yadda yadda not a nice thing. lorem ipsem. alice bob went to fetch a pail of water. Lorem ipsum dolor sit amet, pede ipsum nam wisi lectus eget, sociis sed, commodo vitae velit eleifend. Vestibulum orci feugiat erat etiam pellentesque sed, imperdiet a integer nulla, mi tincidunt suscipit. Nec sed, mi tortor, in a consequat mattis proin scelerisque eleifend. In lectus magna quam. Magna quam vitae sociosqu. Adipiscing laoreet.' + create_remote_file agent, source, source_content + + local_apply_manifest = "" + target = {} + checksums.each do |checksum_type| + target[checksum_type] = "#{localsource_testdir}/target#{checksum_type}" + checksum = if checksum_type then "checksum => #{checksum_type}," else "" end + local_apply_manifest.concat("file { '#{target[checksum_type]}': source => '#{source}', ensure => present, #{checksum} }\n") end - end -end -# TODO: Add tests for puppet:// URIs with multi-master/agent setups. -step "When using puppet apply" -agents.each do |agent| - step "Setup testing local file sources" - - # create one larger manifest with all the files so we don't have to run - # puppet apply per each checksum_type - localsource_testdir = agent.tmpdir('local_source_file_test') - source = "#{localsource_testdir}/source_mod/files/source" - on agent, "mkdir -p #{File.dirname(source)}" - # don't put a 'z' in this content - source_content = 'Yay, this is the local file. I have to be bigger than 512 bytes so that my masters. yadda yadda yadda not a nice thing. lorem ipsem. alice bob went to fetch a pail of water. Lorem ipsum dolor sit amet, pede ipsum nam wisi lectus eget, sociis sed, commodo vitae velit eleifend. Vestibulum orci feugiat erat etiam pellentesque sed, imperdiet a integer nulla, mi tincidunt suscipit. Nec sed, mi tortor, in a consequat mattis proin scelerisque eleifend. In lectus magna quam. Magna quam vitae sociosqu. Adipiscing laoreet.' - create_remote_file agent, source, source_content - - local_apply_manifest = "" - target = {} - checksums.each do |checksum_type| - target[checksum_type] = "#{localsource_testdir}/target#{checksum_type}" - checksum = if checksum_type then "checksum => #{checksum_type}," else "" end - local_apply_manifest.concat("file { '#{target[checksum_type]}': source => '#{source}', ensure => present, #{checksum} }\n") - end + apply_manifest_on agent, local_apply_manifest - apply_manifest_on agent, local_apply_manifest + checksums.each do |checksum_type| + step "Using a local file path. #{checksum_type}" + on(agent, "cat #{target[checksum_type]}") do |result| + assert_match(/Yay, this is the local file./, result.stdout, "FIRST: File contents not matched on #{agent}") + end + end - checksums.each do |checksum_type| - step "Using a local file path. #{checksum_type}" - on agent, "cat #{target[checksum_type]}" do - assert_match(/Yay, this is the local file./, stdout, "FIRST: File contents not matched on #{agent}") + step "second run should not update any files" + apply_manifest_on(agent, local_apply_manifest) do |result| + refute_match(/content changed/, result.stdout, "Shouldn't have overwrote any files") end - end - step "second run should not update any files" - apply_manifest_on agent, local_apply_manifest do - assert_no_match(/content changed/, stdout, "Shouldn't have overwrote any files") - end + # changes in source file producing updates is tested elsewhere + step "subsequent run should not update file using lite if only after byte 512 is changed" + byte_after_md5lite = 513 + source_content[byte_after_md5lite] = 'z' + create_remote_file agent, source, source_content - # changes in source file producing updates is tested elsewhere - step "subsequent run should not update file using lite if only after byte 512 is changed" - byte_after_md5lite = 513 - source_content[byte_after_md5lite] = 'z' - create_remote_file agent, source, source_content - apply_manifest_on agent, "file { '#{localsource_testdir}/targetmd5lite': source => '#{source}', ensure => present, checksum => md5lite } file { '#{localsource_testdir}/targetsha256lite': source => '#{source}', ensure => present, checksum => sha256lite }" do - assert_no_match(/(content changed|defined content)/, stdout, "Shouldn't have overwrote any files") - end + if fips_host_present + apply_manifest_on(agent, "file { '#{localsource_testdir}/targetsha256lite': source => '#{source}', ensure => present, checksum => sha256lite }") do |result| + refute_match(/(content changed|defined content)/, result.stdout, "Shouldn't have overwrote any files") + end + else + apply_manifest_on(agent, "file { '#{localsource_testdir}/targetmd5lite': source => '#{source}', ensure => present, checksum => md5lite } file { '#{localsource_testdir}/targetsha256lite': source => '#{source}', ensure => present, checksum => sha256lite }") do |result| + refute_match(/(content changed|defined content)/, result.stdout, "Shouldn't have overwrote any files") + end + end - local_module_manifest = "" - checksums.each do |checksum_type| - on agent, "rm -rf #{target[checksum_type]}" - checksum = if checksum_type then "checksum => #{checksum_type}," else "" end - local_module_manifest.concat("file { '#{target[checksum_type]}': source => 'puppet:///modules/source_mod/source', ensure => present, #{checksum} }\n") - end + local_module_manifest = "" + checksums.each do |checksum_type| + on agent, "rm -rf #{target[checksum_type]}" + checksum = if checksum_type then "checksum => #{checksum_type}," else "" end + local_module_manifest.concat("file { '#{target[checksum_type]}': source => 'puppet:///modules/source_mod/source', ensure => present, #{checksum} }\n") + end + + localsource_test_manifest = agent.tmpfile('local_source_test_manifest') + create_remote_file agent, localsource_test_manifest, local_module_manifest + on agent, puppet( %{apply --modulepath=#{localsource_testdir} #{localsource_test_manifest}} ) - localsource_test_manifest = agent.tmpfile('local_source_test_manifest') - create_remote_file agent, localsource_test_manifest, local_module_manifest - on agent, puppet( %{apply --modulepath=#{localsource_testdir} #{localsource_test_manifest}} ) + checksums.each do |checksum_type| + step "Using a puppet:/// URI with checksum type: #{checksum_type}" + on(agent, "cat #{target[checksum_type]}") do |result| + assert_match(/Yay, this is the local file./, result.stdout, "FIRST: File contents not matched on #{agent}") + end + end - checksums.each do |checksum_type| - step "Using a puppet:/// URI with checksum type: #{checksum_type}" - on agent, "cat #{target[checksum_type]}" do - assert_match(/Yay, this is the local file./, stdout, "FIRST: File contents not matched on #{agent}") + step "second run should not update any files using apply with puppet:/// URI source" + on(agent, puppet( %{apply --modulepath=#{localsource_testdir} #{localsource_test_manifest}} )) do |result| + refute_match(/content changed/, result.stdout, "Shouldn't have overwrote any files") end end - step "second run should not update any files using apply with puppet:/// URI source" - on agent, puppet( %{apply --modulepath=#{localsource_testdir} #{localsource_test_manifest}} ) do - assert_no_match(/content changed/, stdout, "Shouldn't have overwrote any files") - end end diff --git a/acceptance/tests/resource/file/symbolic_modes.rb b/acceptance/tests/resource/file/symbolic_modes.rb index b10e6607527..8c558a76a8b 100644 --- a/acceptance/tests/resource/file/symbolic_modes.rb +++ b/acceptance/tests/resource/file/symbolic_modes.rb @@ -1,128 +1,130 @@ -test_name "file resource: symbolic modes" - -module FileModeAssertions - include Beaker::DSL::Assertions - - def assert_create(agent, manifest, path, expected_mode) - testcase.apply_manifest_on(agent, manifest) do - assert_match(/File\[#{Regexp.escape(path)}\]\/ensure: created/, testcase.stdout, "Failed to create #{path}") +test_name 'file resource: symbolic modes' do + confine :except, :platform => /^windows/ + confine :to, {}, hosts.select {|host| !host[:roles].include?('master')} + + tag 'audit:high', + 'audit:acceptance' + + require 'puppet/acceptance/temp_file_utils' + extend Puppet::Acceptance::TempFileUtils + + class FileSymlink + attr_reader :mode, :path, :start_mode, :symbolic_mode + + def initialize(base_dir, file_type, symbolic_mode, mode, start_mode=nil) + @base_dir = base_dir + @file_type = file_type + @symbolic_mode = symbolic_mode + @mode = mode + @start_mode = start_mode + + if @start_mode.nil? + @path= "#{@base_dir}/#{@file_type}_#{@symbolic_mode}_#{@mode.to_s(8)}" + else + @path= "#{@base_dir}/#{@file_type}_#{@symbolic_mode}_#{@start_mode.to_s(8)}_#{@mode.to_s(8)}" + end end - assert_mode(agent, path, expected_mode) - end - - def assert_mode(agent, path, expected_mode) - current_mode = testcase.on(agent, "stat --format '%a' #{path}").stdout.chomp.to_i(8) - assert_equal(expected_mode, current_mode, "current mode #{current_mode.to_s(8)} doesn't match expected mode #{expected_mode.to_s(8)}") - end - - def assert_mode_change(agent, manifest, path, symbolic_mode, start_mode, expected_mode) - testcase.apply_manifest_on(agent, manifest) do - assert_match(/mode changed '#{'%04o' % start_mode}'.* to '#{'%04o' % expected_mode}'/, testcase.stdout, - "couldn't set mode to #{symbolic_mode}") + # does the mode of the file/directory change from start_mode to puppet apply + def mode_changes? + ! @start_mode.nil? && @start_mode != @mode end - assert_mode(agent, path, expected_mode) - end - - def assert_no_mode_change(agent, manifest) - testcase.apply_manifest_on(agent, manifest) do - assert_no_match(/mode changed/, testcase.stdout, "reapplied the symbolic mode change") + def get_manifest + "file { #{@path.inspect}: ensure => '#{@file_type}', mode => '#{@symbolic_mode}' }" end end -end -class ActionModeTest - include FileModeAssertions + class BaseTest + include Beaker::DSL::Assertions - attr_reader :testcase + def initialize(testcase, agent, base_dir) + @testcase = testcase + @agent = agent + @base_dir = base_dir + @file_list = [] + @directory_list = [] + end - def initialize(testcase, agent, basedir, symbolic_mode) - @testcase = testcase - @agent = agent - @basedir = basedir - @symbolic_mode = symbolic_mode + def assert_mode(agent, path, expected_mode) + permissions = @testcase.stat(agent, path) + assert_equal(expected_mode, permissions[2], "'#{path}' current mode #{permissions[2].to_s(8)} doesn't match expected mode #{expected_mode.to_s(8)}") + end - @file = "#{basedir}/file" - @dir = "#{basedir}/dir" + def manifest + manifest_array = (@file_list + @directory_list).map {|x| x.get_manifest} + @testcase.step(manifest_array) + manifest_array.join("\n") + end - testcase.on(agent, "rm -rf #{@file} #{@dir}") + def puppet_reapply + @testcase.apply_manifest_on(@agent, manifest) do |apply_result| + refute_match(/mode changed/, apply_result.stdout, "reapplied the symbolic mode change") + (@file_list + @directory_list).each do |file| + refute_match(/#{Regexp.escape(file.path)}/, apply_result.stdout, "Expected to not see '#{file.path}' in 'puppet apply' output") + end + end + end end - def get_manifest(path, type, symbolic_mode) - "file { #{path.inspect}: ensure => #{type}, mode => '#{symbolic_mode}' }" - end -end + class CreateTest < BaseTest -class CreatesModeTest < ActionModeTest - def initialize(testcase, agent, basedir, symbolic_mode) - super(testcase, agent, basedir, symbolic_mode) - end + def symlink_file(symbolic_mode, mode) + @file_list << FileSymlink.new(@base_dir, 'file', symbolic_mode, mode) + end - def assert_file_mode(expected_mode) - manifest = get_manifest(@file, 'file', @symbolic_mode) - assert_create(@agent, manifest, @file, expected_mode) - assert_no_mode_change(@agent, manifest) - end + def symlink_directory(symbolic_mode, mode) + @directory_list << FileSymlink.new(@base_dir, 'directory', symbolic_mode, mode) + end - def assert_dir_mode(expected_mode) - manifest = get_manifest(@dir, 'directory', @symbolic_mode) - assert_create(@agent, manifest, @dir, expected_mode) - assert_no_mode_change(@agent, manifest) + def puppet_apply + apply_result = @testcase.apply_manifest_on(@agent, manifest).stdout + (@file_list + @directory_list).each do |file| + assert_match(/File\[#{Regexp.escape(file.path)}\]\/ensure: created/, apply_result, "Failed to create #{file.path}") + assert_mode(@agent, file.path, file.mode) + end + end end -end - -class ModifiesModeTest < ActionModeTest - def initialize(testcase, agent, basedir, symbolic_mode, start_mode) - super(testcase, agent, basedir, symbolic_mode) - - @start_mode = start_mode - user = 'symbolictestuser' - group = 'symbolictestgroup' - agent.user_present(user) - agent.group_present(group) + class ModifyTest < BaseTest - testcase.on(agent, "touch #{@file} && chown #{user}:#{group} #{@file} && chmod #{start_mode.to_s(8)} #{@file}") - testcase.on(agent, "mkdir -p #{@dir} && chown #{user}:#{group} #{@dir} && chmod #{start_mode.to_s(8)} #{@dir}") - end - - def assert_file_mode(expected_mode) - manifest = get_manifest(@file, 'file', @symbolic_mode) - if @start_mode != expected_mode - assert_mode_change(@agent, manifest, @file, @symbolic_mode, @start_mode, expected_mode) + def symlink_file(symbolic_mode, start_mode, mode) + @file_list << FileSymlink.new(@base_dir, 'file', symbolic_mode, mode, start_mode) end - assert_no_mode_change(@agent, manifest) - end - def assert_dir_mode(expected_mode) - manifest = get_manifest(@dir, 'directory', @symbolic_mode) - if @start_mode != expected_mode - assert_mode_change(@agent, manifest, @dir, @symbolic_mode, @start_mode, expected_mode) + def symlink_directory(symbolic_mode, start_mode, mode) + @directory_list << FileSymlink.new(@base_dir, 'directory', symbolic_mode, mode, start_mode) end - assert_no_mode_change(@agent, manifest) - end -end - -class ModeTest - def initialize(testcase, agent, basedir) - @testcase = testcase - @agent = agent - @basedir = basedir - end - def assert_creates(symbolic_mode, file_mode, dir_mode) - creates = CreatesModeTest.new(@testcase, @agent, @basedir, symbolic_mode) - creates.assert_file_mode(file_mode) - creates.assert_dir_mode(dir_mode) - end + def create_starting_state + files = @file_list.collect {|x| "'#{x.path}'" } + directories = @directory_list.collect {|x| "'#{x.path}'" } + + @testcase.on(@agent, "touch #{files.join(' ')}") + @testcase.on(@agent, "mkdir -p #{directories.join(' ')}") + @testcase.on(@agent, "chown symuser:symgroup #{files.join(' ')} #{directories.join(' ')}") + cmd_list = [] + (@file_list + @directory_list).each do |file| + cmd_list << "chmod #{file.start_mode.to_s(8)} '#{file.path}'" + end + @testcase.on(@agent, cmd_list.join(' && ')) + end - def assert_modifies(symbolic_mode, start_mode, file_mode, dir_mode) - modifies = ModifiesModeTest.new(@testcase, @agent, @basedir, symbolic_mode, start_mode) - modifies.assert_file_mode(file_mode) - modifies.assert_dir_mode(dir_mode) + def puppet_apply + @testcase.step(manifest) + apply_result = @testcase.apply_manifest_on(@agent, manifest).stdout + @testcase.step(apply_result) + (@file_list + @directory_list).each do |file| + if file.mode_changes? + assert_match(/File\[#{Regexp.escape(file.path)}.* mode changed '#{'%04o' % file.start_mode}'.* to '#{'%04o' % file.mode}'/, + apply_result, "couldn't set mode to #{file.symbolic_mode}") + else + refute_match(/#{Regexp.escape(file.path)}.*mode changed/, apply_result, "reapplied the symbolic mode change for file #{file.path}") + end + assert_mode(@agent, file.path, file.mode) + end + end end -end # For your reference: # 4000 the set-user-ID-on-execution bit @@ -149,7 +151,7 @@ def assert_modifies(symbolic_mode, start_mode, file_mode, dir_mode) # For directories, the set-gid bit can # only be set or cleared by using symbolic mode. -# From http://www.gnu.org/software/coreutils/manual/html_node/Symbolic-Modes.html#Symbolic-Modes +# From https://www.gnu.org/software/coreutils/manual/html_node/Symbolic-Modes.html#Symbolic-Modes # Users # u the user who owns the file; # g other users who are in the file's group; @@ -176,85 +178,151 @@ def assert_modifies(symbolic_mode, start_mode, file_mode, dir_mode) # l mandatory file and record locking refers to a file's ability to have its reading or writing # permissions locked while a program is accessing that file. # -agents.each do |agent| - if agent['platform'].include?('windows') - Log.warn("Pending: this does not currently work on Windows") - next + agents.each do |agent| + is_solaris = agent['platform'].include?('solaris') + + on(agent, puppet('resource user symuser ensure=present')) + on(agent, puppet('resource group symgroup ensure=present')) + base_dir_create = agent.tmpdir('symbolic-modes-create_test') + base_dir_modify = agent.tmpdir('symbolic-modes-modify_test') + + teardown do + on(agent, puppet('resource user symuser ensure=absent')) + on(agent, puppet('resource group symgroup ensure=absent')) + on(agent, "rm -rf '#{base_dir_create}' '#{base_dir_modify}'") + end + + create_test = CreateTest.new(self, agent, base_dir_create) + create_test.symlink_file('u=r', 00444) + create_test.symlink_file('u=w', 00244) + create_test.symlink_file('u=x', 00144) + create_test.symlink_file('u=rw', 00644) + create_test.symlink_file('u=rwx', 00744) + create_test.symlink_file('u=rwxt', 01744) + create_test.symlink_file('u=rwxs', 04744) + create_test.symlink_file('u=rwxts', 05744) + + create_test.symlink_file('ug=r', 00444) + create_test.symlink_file('ug=rw', 00664) + create_test.symlink_file('ug=rwx', 00774) + create_test.symlink_file('ug=rwxt', 01774) + create_test.symlink_file('ug=rwxs', 06774) + create_test.symlink_file('ug=rwxts', 07774) + + create_test.symlink_file('ugo=r', 00444) + create_test.symlink_file('ugo=rw', 00666) + create_test.symlink_file('ugo=rwx', 00777) + create_test.symlink_file('ugo=rwxt', 01777) + #create_test.symlink_file('ugo=rwxs', 06777) ## BUG, puppet creates 07777 + create_test.symlink_file('ugo=rwxts', 07777) + + create_test.symlink_file('u=rwx,go=rx', 00755) + create_test.symlink_file('u=rwx,g=rx,o=r', 00754) + create_test.symlink_file('u=rwx,g=rx,o=', 00750) + create_test.symlink_file('a=rwx', 00777) + + create_test.symlink_file('u+r', 00644) + create_test.symlink_file('u+w', 00644) + create_test.symlink_file('u+x', 00744) + create_test.symlink_directory('u=r', 00455) + create_test.symlink_directory('u=w', 00255) + create_test.symlink_directory('u=x', 00155) + create_test.symlink_directory('u=rw', 00655) + create_test.symlink_directory('u=rwx', 00755) + create_test.symlink_directory('u=rwxt', 01755) + create_test.symlink_directory('u=rwxs', 04755) + create_test.symlink_directory('u=rwxts', 05755) + + create_test.symlink_directory('ug=r', 00445) + create_test.symlink_directory('ug=rw', 00665) + create_test.symlink_directory('ug=rwx', 00775) + create_test.symlink_directory('ug=rwxt', 01775) + create_test.symlink_directory('ug=rwxs', 06775) + create_test.symlink_directory('ug=rwxts', 07775) + + create_test.symlink_directory('ugo=r', 00444) + create_test.symlink_directory('ugo=rw', 00666) + create_test.symlink_directory('ugo=rwx', 00777) + create_test.symlink_directory('ugo=rwxt', 01777) + #create_test.symlink_directory('ugo=rwxs', 06777) ## BUG, puppet creates 07777 + create_test.symlink_directory('ugo=rwxts', 07777) + + create_test.symlink_directory('u=rwx,go=rx', 00755) + create_test.symlink_directory('u=rwx,g=rx,o=r', 00754) + create_test.symlink_directory('u=rwx,g=rx,o=', 00750) + create_test.symlink_directory('a=rwx', 00777) + + create_test.symlink_directory('u+r', 00755) + create_test.symlink_directory('u+w', 00755) + create_test.symlink_directory('u+x', 00755) + create_test.puppet_apply() + create_test.puppet_reapply() + + modify_test = ModifyTest.new(self, agent, base_dir_modify) + modify_test.symlink_file('u+r', 00200, 00600) + modify_test.symlink_file('u+r', 00600, 00600) + modify_test.symlink_file('u+w', 00500, 00700) + modify_test.symlink_file('u+w', 00400, 00600) + modify_test.symlink_file('u+x', 00700, 00700) + modify_test.symlink_file('u+x', 00600, 00700) + modify_test.symlink_file('u+X', 00100, 00100) + modify_test.symlink_file('u+X', 00200, 00200) + modify_test.symlink_file('u+X', 00410, 00510) + modify_test.symlink_file('a+X', 00600, 00600) + modify_test.symlink_file('a+X', 00700, 00711) + + modify_test.symlink_file('u+s', 00744, 04744) + modify_test.symlink_file('g+s', 00744, 02744) + modify_test.symlink_file('u+t', 00744, 01744) + + modify_test.symlink_file('u-r', 00200, 00200) + modify_test.symlink_file('u-r', 00600, 00200) + modify_test.symlink_file('u-w', 00500, 00500) + modify_test.symlink_file('u-w', 00600, 00400) + modify_test.symlink_file('u-x', 00700, 00600) + modify_test.symlink_file('u-x', 00600, 00600) + + modify_test.symlink_file('u-s', 04744, 00744) + modify_test.symlink_file('g-s', 02744, 00744) + modify_test.symlink_file('u-t', 01744, 00744) + + modify_test.symlink_directory('u+r', 00200, 00600) + modify_test.symlink_directory('u+r', 00600, 00600) + modify_test.symlink_directory('u+w', 00500, 00700) + modify_test.symlink_directory('u+w', 00400, 00600) + modify_test.symlink_directory('u+x', 00700, 00700) + modify_test.symlink_directory('u+x', 00600, 00700) + modify_test.symlink_directory('u+X', 00100, 00100) + modify_test.symlink_directory('u+X', 00200, 00300) + modify_test.symlink_directory('u+X', 00410, 00510) + modify_test.symlink_directory('a+X', 00600, 00711) + modify_test.symlink_directory('a+X', 00700, 00711) + + modify_test.symlink_directory('u+s', 00744, 04744) + modify_test.symlink_directory('g+s', 00744, 02744) + modify_test.symlink_directory('u+t', 00744, 01744) + + modify_test.symlink_directory('u-r', 00200, 00200) + modify_test.symlink_directory('u-r', 00600, 00200) + modify_test.symlink_directory('u-w', 00500, 00500) + modify_test.symlink_directory('u-w', 00600, 00400) + modify_test.symlink_directory('u-x', 00700, 00600) + modify_test.symlink_directory('u-x', 00600, 00600) + + modify_test.symlink_directory('u-s', 04744, 00744) + # using chmod 2744 on a directory to set the start_mode fails on Solaris + modify_test.symlink_directory('g-s', 02744, 00744) unless is_solaris + modify_test.symlink_directory('u-t', 01744, 00744) + modify_test.create_starting_state + modify_test.puppet_apply + modify_test.puppet_reapply + + # these raise + # test.assert_raises('') + # test.assert_raises(' ') + # test.assert_raises('u=X') + # test.assert_raises('u-X') + # test.assert_raises('+l') + # test.assert_raises('-l') end - is_solaris = agent['platform'].include?('solaris') - - basedir = agent.tmpdir('symbolic-modes') - on(agent, "mkdir -p #{basedir}") - - test = ModeTest.new(self, agent, basedir) - test.assert_creates('u=r', 00444, 00455) - test.assert_creates('u=w', 00244, 00255) - test.assert_creates('u=x', 00144, 00155) - test.assert_creates('u=rw', 00644, 00655) - test.assert_creates('u=rwx', 00744, 00755) - test.assert_creates('u=rwxt', 01744, 01755) - test.assert_creates('u=rwxs', 04744, 04755) - test.assert_creates('u=rwxts', 05744, 05755) - - test.assert_creates('ug=r', 00444, 00445) - test.assert_creates('ug=rw', 00664, 00665) - test.assert_creates('ug=rwx', 00774, 00775) - test.assert_creates('ug=rwxt', 01774, 01775) - test.assert_creates('ug=rwxs', 06774, 06775) - test.assert_creates('ug=rwxts', 07774, 07775) - - test.assert_creates('ugo=r', 00444, 00444) - test.assert_creates('ugo=rw', 00666, 00666) - test.assert_creates('ugo=rwx', 00777, 00777) - test.assert_creates('ugo=rwxt', 01777, 01777) - # # test.assert_creates('ugo=rwxs', 06777, 06777) ## BUG, puppet creates 07777 - test.assert_creates('ugo=rwxts', 07777, 07777) - - test.assert_creates('u=rwx,go=rx', 00755, 00755) - test.assert_creates('u=rwx,g=rx,o=r', 00754, 00754) - test.assert_creates('u=rwx,g=rx,o=', 00750, 00750) - test.assert_creates('a=rwx', 00777, 00777) - - test.assert_creates('u+r', 00644, 00755) - test.assert_creates('u+w', 00644, 00755) - test.assert_creates('u+x', 00744, 00755) - - test.assert_modifies('u+r', 00200, 00600, 00600) - test.assert_modifies('u+r', 00600, 00600, 00600) - test.assert_modifies('u+w', 00500, 00700, 00700) - test.assert_modifies('u+w', 00400, 00600, 00600) - test.assert_modifies('u+x', 00700, 00700, 00700) - test.assert_modifies('u+x', 00600, 00700, 00700) - test.assert_modifies('u+X', 00100, 00100, 00100) - test.assert_modifies('u+X', 00200, 00200, 00300) - test.assert_modifies('u+X', 00410, 00510, 00510) - test.assert_modifies('a+X', 00600, 00600, 00711) - test.assert_modifies('a+X', 00700, 00711, 00711) - - test.assert_modifies('u+s', 00744, 04744, 04744) - test.assert_modifies('g+s', 00744, 02744, 02744) - test.assert_modifies('u+t', 00744, 01744, 01744) - - test.assert_modifies('u-r', 00200, 00200, 00200) - test.assert_modifies('u-r', 00600, 00200, 00200) - test.assert_modifies('u-w', 00500, 00500, 00500) - test.assert_modifies('u-w', 00600, 00400, 00400) - test.assert_modifies('u-x', 00700, 00600, 00600) - test.assert_modifies('u-x', 00600, 00600, 00600) - - test.assert_modifies('u-s', 04744, 00744, 00744) - # using chmod 2744 on a directory to set the startmode fails on Solaris - test.assert_modifies('g-s', 02744, 00744, 00744) unless is_solaris - test.assert_modifies('u-t', 01744, 00744, 00744) - - # these raise - # test.assert_raises('') - # test.assert_raises(' ') - # test.assert_raises('u=X') - # test.assert_raises('u-X') - # test.assert_raises('+l') - # test.assert_raises('-l') - - step "clean up old test things" - on agent, "rm -rf #{basedir}" end diff --git a/acceptance/tests/resource/file/ticket_6448_file_with_utf8_source.rb b/acceptance/tests/resource/file/ticket_6448_file_with_utf8_source.rb new file mode 100644 index 00000000000..a664310d1bb --- /dev/null +++ b/acceptance/tests/resource/file/ticket_6448_file_with_utf8_source.rb @@ -0,0 +1,91 @@ +test_name 'Ensure a file resource can have a UTF-8 source attribute, content, and path when served via a module' do + tag 'audit:high', + 'broken:images', + 'audit:acceptance', + 'server' + + require 'puppet/acceptance/environment_utils' + extend Puppet::Acceptance::EnvironmentUtils + + require 'puppet/acceptance/agent_fqdn_utils' + extend Puppet::Acceptance::AgentFqdnUtils + + tmp_environment = mk_tmp_environment_with_teardown(master, File.basename(__FILE__, '.*')) + agent_tmp_dirs = {} + + agents.each do |agent| + agent_tmp_dirs[agent_to_fqdn(agent)] = agent.tmpdir(tmp_environment) + end + + teardown do + # note - master teardown is registered by #mk_tmp_environment_with_teardown + step 'remove all test files on agents' do + agents.each do |agent| + on(agent, "rm -r '#{agent_tmp_dirs[agent_to_fqdn(agent)]}'", :accept_all_exit_codes => true) + on(agent, puppet('config print lastrunfile')) do |command_result| + agent.rm_rf(command_result.stdout) + end + end + end + end + + step 'create unicode source file served via module on master' do + # 静 \u9759 0xE9 0x9D 0x99 http://www.fileformat.info/info/unicode/char/9759/index.htm + # 的 \u7684 0xE7 0x9A 0x84 http://www.fileformat.info/info/unicode/char/7684/index.htm + # ☃ \2603 0xE2 0x98 0x83 http://www.fileformat.info/info/unicode/char/2603/index.htm + setup_module_on_master = <<-MASTER_MANIFEST + File { + ensure => directory, + mode => "0755", + } + + file { + '#{environmentpath}/#{tmp_environment}/modules/utf8_file_module':; + '#{environmentpath}/#{tmp_environment}/modules/utf8_file_module/files':; + } + + file { '#{environmentpath}/#{tmp_environment}/modules/utf8_file_module/files/\u9759\u7684': + ensure => file, + content => "\u2603" + } + MASTER_MANIFEST + apply_manifest_on(master, setup_module_on_master, :expect_changes => true) + end + + step 'create a site.pp on master containing a unicode file resource' do + site_pp_contents = <<-SITE_PP + \$test_path = \$facts['networking']['fqdn'] ? #{agent_tmp_dirs} + file { "\$test_path/\uff72\uff67\u30d5\u30eb": + ensure => present, + source => "puppet:///modules/utf8_file_module/\u9759\u7684", + } + SITE_PP + + create_site_pp = <<-CREATE_SITE_PP + file { "#{environmentpath}/#{tmp_environment}/manifests/site.pp": + ensure => file, + content => @(UTF8) + #{site_pp_contents} + | UTF8 + } + CREATE_SITE_PP + apply_manifest_on(master, create_site_pp, :expect_changes => true) + end + + step 'ensure agent can manage unicode file resource' do + # イ \uff72 0xEF 0xBD 0xB2 http://www.fileformat.info/info/unicode/char/ff72/index.htm + # ァ \uff67 0xEF 0xBD 0xA7 http://www.fileformat.info/info/unicode/char/ff67/index.htm + # フ \u30d5 0xE3 0x83 0x95 http://www.fileformat.info/info/unicode/char/30d5/index.htm + # ル \u30eb 0xE3 0x83 0xAB http://www.fileformat.info/info/unicode/char/30eb/index.htm + + with_puppet_running_on(master, {}) do + agents.each do |agent| + on(agent, puppet("agent -t --environment '#{tmp_environment}'"), :acceptable_exit_codes => 2) + + on(agent, "cat '#{agent_tmp_dirs[agent_to_fqdn(agent)]}/\uff72\uff67\u30d5\u30eb'") do |result| + assert_match("\u2603", result.stdout, "managed UTF-8 file contents '#{result.stdout}' did not match expected value '\u2603'") + end + end + end + end +end diff --git a/acceptance/tests/resource/file/ticket_7680-follow-symlinks.rb b/acceptance/tests/resource/file/ticket_7680-follow-symlinks.rb index e4faed55b91..86d4fb94ee3 100644 --- a/acceptance/tests/resource/file/ticket_7680-follow-symlinks.rb +++ b/acceptance/tests/resource/file/ticket_7680-follow-symlinks.rb @@ -1,12 +1,10 @@ test_name "#7680: 'links => follow' should use the file source content" +tag 'audit:high', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' + agents.each do |agent| - confine_block :to, :platform => 'windows' do - # symlinks are supported only on Vista+ (version 6.0 and higher) - on agents, facter('kernelmajversion') do - skip_test "Test not supported on this plaform" if stdout.chomp.to_f < 6.0 - end - end step "Create file content" real_source = agent.tmpfile('follow_links_source') @@ -33,8 +31,8 @@ MANIFEST apply_manifest_on(agent, manifest, :trace => true) - on agent, "cat #{dest}" do - assert_match /This is the real content/, stdout + on(agent, "cat #{dest}") do |result| + assert_match(/This is the real content/, result.stdout) end step "Cleanup" diff --git a/acceptance/tests/resource/file/ticket_8740_should_not_enumerate_root_directory.rb b/acceptance/tests/resource/file/ticket_8740_should_not_enumerate_root_directory.rb index 54e41f87e57..36f61c07eb7 100644 --- a/acceptance/tests/resource/file/ticket_8740_should_not_enumerate_root_directory.rb +++ b/acceptance/tests/resource/file/ticket_8740_should_not_enumerate_root_directory.rb @@ -1,5 +1,11 @@ test_name "#8740: should not enumerate root directory" + confine :except, :platform => 'windows' +confine :except, :platform => 'osx' + +tag 'audit:high', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' target = "/test-socket-#{$$}" @@ -14,14 +20,14 @@ on(agent, "#{ruby_command(agent)} -e \"require 'socket'; UNIXServer::new('#{target}').close\"") step "query for all files, which should return nothing" - on(agent, puppet_resource('file'), :acceptable_exit_codes => [1]) do - assert_match(%r{Listing all file instances is not supported. Please specify a file or directory, e.g. puppet resource file /etc}, stderr) + on(agent, puppet_resource('file'), :acceptable_exit_codes => [1]) do |result| + assert_match(%r{Listing all file instances is not supported. Please specify a file or directory, e.g. puppet resource file /etc}, result.stderr) end ["/", "/etc"].each do |file| step "query '#{file}' directory, which should return single entry" - on(agent, puppet_resource('file', file)) do - files = stdout.scan(/^file \{ '([^']+)'/).flatten + on(agent, puppet_resource('file', file)) do |result| + files = result.stdout.scan(/^file \{ '([^']+)'/).flatten assert_equal(1, files.size, "puppet returned multiple files: #{files.join(', ')}") assert_match(file, files[0], "puppet did not return file") @@ -29,8 +35,8 @@ end step "query file that does not exist, which should report the file is absent" - on(agent, puppet_resource('file', '/this/does/notexist')) do - assert_match(/ensure\s+=>\s+'absent'/, stdout) + on(agent, puppet_resource('file', '/this/does/notexist')) do |result| + assert_match(/ensure\s+=>\s+'absent'/, result.stdout) end step "remove UNIX domain socket" diff --git a/acceptance/tests/resource/group/should_create.rb b/acceptance/tests/resource/group/should_create.rb old mode 100755 new mode 100644 index 16a74c7ddb0..aed9ebea162 --- a/acceptance/tests/resource/group/should_create.rb +++ b/acceptance/tests/resource/group/should_create.rb @@ -1,5 +1,11 @@ test_name "should create a group" +tag 'audit:high', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' # Could be done at the integration (or unit) layer though + # actual changing of resources could irreparably damage a + # host running this, or require special permissions. + name = "pl#{rand(999999).to_i}" agents.each do |agent| diff --git a/acceptance/tests/resource/group/should_destroy.rb b/acceptance/tests/resource/group/should_destroy.rb old mode 100755 new mode 100644 index 6b887579fd1..c0dfcb26138 --- a/acceptance/tests/resource/group/should_destroy.rb +++ b/acceptance/tests/resource/group/should_destroy.rb @@ -1,5 +1,11 @@ test_name "should destroy a group" +tag 'audit:high', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' # Could be done at the integration (or unit) layer though + # actual changing of resources could irreparably damage a + # host running this, or require special permissions. + name = "pl#{rand(999999).to_i}" agents.each do |agent| diff --git a/acceptance/tests/resource/group/should_manage_attributes_aix.rb b/acceptance/tests/resource/group/should_manage_attributes_aix.rb new file mode 100644 index 00000000000..ddde18492e5 --- /dev/null +++ b/acceptance/tests/resource/group/should_manage_attributes_aix.rb @@ -0,0 +1,22 @@ +test_name "should correctly manage the attributes property for the Group (AIX only)" do + confine :to, :platform => /aix/ + + tag 'audit:high', + 'audit:acceptance' # Could be done as integration tests, but would + # require changing the system running the test + # in ways that might require special permissions + # or be harmful to the system running the test + + require 'puppet/acceptance/aix_util' + extend Puppet::Acceptance::AixUtil + + initial_attributes = { + 'admin' => true + } + changed_attributes = { + 'admin' => false + } + + run_attribute_management_tests('group', :gid, initial_attributes, changed_attributes) + +end diff --git a/acceptance/tests/resource/group/should_manage_members.rb b/acceptance/tests/resource/group/should_manage_members.rb new file mode 100644 index 00000000000..61cf21071ec --- /dev/null +++ b/acceptance/tests/resource/group/should_manage_members.rb @@ -0,0 +1,205 @@ +test_name "should correctly manage the members property for the Group resource" do + # These are the only platforms whose group providers manage the members + # property + confine :to, :platform => /windows|osx|aix|^el-|fedora/ + + tag 'audit:high', + 'audit:acceptance' # Could be done as integration tests, but would + # require changing the system running the test + # in ways that might require special permissions + # or be harmful to the system running the test + + require 'puppet/acceptance/common_utils' + extend Puppet::Acceptance::BeakerUtils + + def random_name + "pl#{rand(999999).to_i}" + end + + def group_manifest(user, params) + params_str = params.map do |param, value| + value_str = value.to_s + value_str = "\"#{value_str}\"" if value.is_a?(String) + + " #{param} => #{value_str}" + end.join(",\n") + + <<-MANIFEST +group { '#{user}': + #{params_str} +} +MANIFEST + end + + def members_of(host, group) + case host['platform'] + when /windows/ + # More verbose than 'net localgroup ', but more programmatic + # because it does not require us to parse stdout + get_group_members = <<-PS1 +# Adapted from https://github.com/RamblingCookieMonster/PowerShell/blob/master/Get-ADGroupMembers.ps1 +function Get-Members([string] $group) { + $ErrorActionPreference = 'Stop' + + Add-Type -AssemblyName 'System.DirectoryServices.AccountManagement' -ErrorAction Stop + $contextType = [System.DirectoryServices.AccountManagement.ContextType]::Machine + $groupObject = [System.DirectoryServices.AccountManagement.GroupPrincipal]::FindByIdentity( + $contextType, + $group + ) + + if (-Not $groupObject) { + throw "Could not find the group '$group'!" + } + + $members = $groupObject.GetMembers($false) | ForEach-Object { "'$($_.Name)'" } + write-output "[$([string]::join(',', $members))]" +} + +Get-Members #{group} +PS1 + Kernel.eval( + execute_powershell_script_on(host, get_group_members).stdout.chomp + ) + else + # This reads the group members from the /etc/group file + get_group_members = <<-RUBY +require 'etc' + +group_struct = nil +Etc.group do |g| + if g.name == '#{group}' + group_struct = g + break + end +end + +unless group_struct + raise "Could not find the group '#{group}'!" +end + +puts(group_struct.mem.to_s) +RUBY + + script_path = "#{host.tmpfile("get_group_members")}.rb" + create_remote_file(host, script_path, get_group_members) + + # The setup step should have already set :privatebindir on the + # host. We only include the default here to make this routine + # work for local testing, which sometimes skips the setup step. + privatebindir = host.has_key?(:privatebindir) ? host[:privatebindir] : '/opt/puppetlabs/puppet/bin' + + result = on(host, "#{privatebindir}/ruby #{script_path}") + Kernel.eval(result.stdout.chomp) + end + end + + agents.each do |agent| + users = 6.times.collect { random_name } + users.each { |user| agent.user_absent(user) } + + group = random_name + agent.group_absent(group) + teardown { agent.group_absent(group) } + + step 'Creating the Users' do + users.each do |user| + agent.user_present(user) + teardown { agent.user_absent(user) } + end + end + + group_members = [users[0], users[1]] + + step 'Ensure that the group is created with the specified members' do + manifest = group_manifest(group, members: group_members) + apply_manifest_on(agent, manifest) + assert_matching_arrays(group_members, members_of(agent, group), "The group was not successfully created with the specified members!") + end + + step "Verify that Puppet errors when one of the members does not exist" do + manifest = group_manifest(group, members: ['nonexistent_member']) + apply_manifest_on(agent, manifest, :acceptable_exit_codes => [0, 1]) do |result| + assert_match(/Error:.*#{group}/, result.stderr, "Puppet fails to report an error when one of the members in the members property does not exist") + end + end + + step "Verify that Puppet noops when the group's members are already set after creating the group" do + manifest = group_manifest(group, members: group_members) + apply_manifest_on(agent, manifest, catch_changes: true) + assert_matching_arrays(group_members, members_of(agent, group), "The group's members somehow changed despite Puppet reporting a noop") + end + + step "Verify that Puppet enforces minimum user membership when auth_membership == false" do + new_members = [users[2], users[4]] + + manifest = group_manifest(group, members: new_members, auth_membership: false) + apply_manifest_on(agent, manifest) + + group_members += new_members + assert_matching_arrays(group_members, members_of(agent, group), "Puppet fails to enforce minimum user membership when auth_membership == false") + end + + step "Verify that Puppet noops when the group's members are already set after enforcing minimum user membership" do + manifest = group_manifest(group, members: group_members) + apply_manifest_on(agent, manifest, catch_changes: true) + assert_matching_arrays(group_members, members_of(agent, group), "The group's members somehow changed despite Puppet reporting a noop") + end + + # Run some special, platform-specific tests. If these get too large, then + # we should consider placing them in a separate file. + case agent['platform'] + when /windows/ + domain = on(agent, 'hostname').stdout.chomp.upcase + + step "(Windows) Verify that Puppet prints each group member as DOMAIN\\" do + new_members = [users[3]] + + manifest = group_manifest(group, members: new_members, auth_membership: false) + apply_manifest_on(agent, manifest) do |result| + group_members += new_members + + stdout = result.stdout.chomp + + group_members.each do |user| + assert_match(/#{domain}\\#{user}/, stdout, "Puppet fails to print the group member #{user} as #{domain}\\#{user}") + end + end + end + + step "(Windows) Verify that `puppet resource` prints each group member as DOMAIN\\" do + on(agent, puppet('resource', 'group', group)) do |result| + stdout = result.stdout.chomp + + group_members.each do |user| + assert_match(/#{domain}\\#{user}/, stdout, "`puppet resource` fails to print the group member #{user} as #{domain}\\#{user}") + end + end + end + when /aix/ + step "(AIX) Verify that Puppet accepts a comma-separated list of members for backwards compatibility" do + new_members = [users[3], users[5]] + + manifest = group_manifest(group, members: new_members.join(','), auth_membership: false) + apply_manifest_on(agent, manifest) + + group_members += new_members + assert_matching_arrays(group_members, members_of(agent, group), "Puppet cannot manage the members property when the members are provided as a comma-separated list") + end + end + + step "Verify that Puppet enforces inclusive user membership when auth_membership == true" do + group_members = [users[0]] + + manifest = group_manifest(group, members: group_members, auth_membership: true) + apply_manifest_on(agent, manifest) + assert_matching_arrays(group_members, members_of(agent, group), "Puppet fails to enforce inclusive group membership when auth_membership == true") + end + + step "Verify that Puppet noops when the group's members are already set after enforcing inclusive user membership" do + manifest = group_manifest(group, members: group_members) + apply_manifest_on(agent, manifest, catch_changes: true) + assert_matching_arrays(group_members, members_of(agent, group), "The group's members somehow changed despite Puppet reporting a noop") + end + end +end diff --git a/acceptance/tests/resource/group/should_modify_gid.rb b/acceptance/tests/resource/group/should_modify_gid.rb old mode 100755 new mode 100644 index 43ba6fa4f48..a394449ec56 --- a/acceptance/tests/resource/group/should_modify_gid.rb +++ b/acceptance/tests/resource/group/should_modify_gid.rb @@ -1,26 +1,31 @@ test_name "should modify gid of existing group" confine :except, :platform => 'windows' +tag 'audit:high', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' # Could be done at the integration (or unit) layer though + # actual changing of resources could irreparably damage a + # host running this, or require special permissions. + name = "pl#{rand(999999).to_i}" -gid1 = rand(999999).to_i -gid2 = rand(999999).to_i +gid1 = (rand(989999).to_i + 10000) +gid2 = (rand(989999).to_i + 10000) agents.each do |agent| + # AIX group provider returns quoted gids step "ensure that the group exists with gid #{gid1}" - on(agent, puppet_resource('group', name, 'ensure=present', "gid=#{gid1}")) do - fail_test "missing gid notice" unless stdout =~ /gid +=> +'#{gid1}'/ + on(agent, puppet_resource('group', name, 'ensure=present', "gid=#{gid1}")) do |result| + fail_test "missing gid notice" unless result.stdout =~ /gid +=> +'?#{gid1}'?/ end step "ensure that we can modify the GID of the group to #{gid2}" - on(agent, puppet_resource('group', name, 'ensure=present', "gid=#{gid2}")) do - fail_test "missing gid notice" unless stdout =~ /gid +=> +'#{gid2}'/ + on(agent, puppet_resource('group', name, 'ensure=present', "gid=#{gid2}")) do |result| + fail_test "missing gid notice" unless result.stdout =~ /gid +=> +'?#{gid2}'?/ end step "verify that the GID changed" - on(agent, "getent group #{name}") do - fail_test "gid is wrong through getent output" unless - stdout =~ /^#{name}:.*:#{gid2}:/ - end + gid_output = agent.group_gid(name).to_i + fail_test "gid #{gid_output} does not match expected value of: #{gid2}" unless gid_output == gid2 step "clean up the system after the test run" on(agent, puppet_resource('group', name, 'ensure=absent')) diff --git a/acceptance/tests/resource/group/should_not_create_existing.rb b/acceptance/tests/resource/group/should_not_create_existing.rb old mode 100755 new mode 100644 index 351ca0c74de..8105f21861a --- a/acceptance/tests/resource/group/should_not_create_existing.rb +++ b/acceptance/tests/resource/group/should_not_create_existing.rb @@ -1,15 +1,21 @@ test_name "group should not create existing group" -name = "test-group-#{Time.new.to_i}" +tag 'audit:high', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' # Could be done at the integration (or unit) layer though + # actual changing of resources could irreparably damage a + # host running this, or require special permissions. + +name = "gr#{rand(999999).to_i}" agents.each do |agent| step "ensure the group exists on the target node" agent.group_present(name) step "verify that we don't try and create the existing group" - on(agent, puppet_resource('group', name, 'ensure=present')) do + on(agent, puppet_resource('group', name, 'ensure=present')) do |result| fail_test "looks like we created the group" if - stdout.include? "/Group[#{name}]/ensure: created" + result.stdout.include? "/Group[#{name}]/ensure: created" end step "clean up the system after the test run" diff --git a/acceptance/tests/resource/group/should_not_destoy_unexisting.rb b/acceptance/tests/resource/group/should_not_destoy_unexisting.rb deleted file mode 100755 index 53147f08db5..00000000000 --- a/acceptance/tests/resource/group/should_not_destoy_unexisting.rb +++ /dev/null @@ -1,15 +0,0 @@ -test_name "should not destroy a group that doesn't exist" - -name = "test-group-#{Time.new.to_i}" - -step "verify the group does not already exist" -agents.each do |agent| - agent.group_absent(name) -end - -step "verify that we don't remove the group when it doesn't exist" -on(agents, puppet_resource('group', name, 'ensure=absent')) do - fail_test "it looks like we tried to remove the group" if - stdout.include? "/Group[#{name}]/ensure: removed" -end - diff --git a/acceptance/tests/resource/group/should_not_destroy_unexisting.rb b/acceptance/tests/resource/group/should_not_destroy_unexisting.rb new file mode 100644 index 00000000000..c734bdbfb2d --- /dev/null +++ b/acceptance/tests/resource/group/should_not_destroy_unexisting.rb @@ -0,0 +1,21 @@ +test_name "should not destroy a group that doesn't exist" + +tag 'audit:high', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' # Could be done at the integration (or unit) layer though + # actual changing of resources could irreparably damage a + # host running this, or require special permissions. + +name = "test-group-#{Time.new.to_i}" + +step "verify the group does not already exist" +agents.each do |agent| + agent.group_absent(name) +end + +step "verify that we don't remove the group when it doesn't exist" +on(agents, puppet_resource('group', name, 'ensure=absent')) do |result| + fail_test "it looks like we tried to remove the group" if + result.stdout.include? "/Group[#{name}]/ensure: removed" +end + diff --git a/acceptance/tests/resource/group/should_query.rb b/acceptance/tests/resource/group/should_query.rb old mode 100755 new mode 100644 index 12da6ea1a0f..484122f825a --- a/acceptance/tests/resource/group/should_query.rb +++ b/acceptance/tests/resource/group/should_query.rb @@ -1,14 +1,22 @@ test_name "test that we can query and find a group that exists." +tag 'audit:high', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' # Could be done at the integration (or unit) layer though + # actual changing of resources could irreparably damage a + # host running this, or require special permissions. + name = "pl#{rand(999999).to_i}" agents.each do |agent| + skip_test('this test fails on windows French due to Cygwin/UTF Issues - PUP-8319,IMAGES-492') if agent['platform'] =~ /windows/ && agent['locale'] == 'fr' + step "ensure that our test group exists" agent.group_present(name) step "query for the resource and verify it was found" - on(agent, puppet_resource('group', name)) do - fail_test "didn't find the group #{name}" unless stdout.include? 'present' + on(agent, puppet_resource('group', name)) do |result| + fail_test "didn't find the group #{name}" unless result.stdout.include? 'present' end step "clean up the group we added" diff --git a/acceptance/tests/resource/group/should_query_all.rb b/acceptance/tests/resource/group/should_query_all.rb old mode 100755 new mode 100644 index 87ce781a8b2..ae1b36f9aed --- a/acceptance/tests/resource/group/should_query_all.rb +++ b/acceptance/tests/resource/group/should_query_all.rb @@ -1,14 +1,20 @@ test_name "should query all groups" +tag 'audit:high', + 'audit:refactor', # Use block style `test_name` + 'audit:integration' # Does not modify system running test + agents.each do |agent| + skip_test('this test fails on windows French due to Cygwin/UTF Issues - PUP-8319,IMAGES-492') if agent['platform'] =~ /windows/ && agent['locale'] == 'fr' step "query natively" + groups = agent.group_list fail_test("No groups found") unless groups step "query with puppet" - on(agent, puppet_resource('group')) do - stdout.each_line do |line| + on(agent, puppet_resource('group')) do |result| + result.stdout.each_line do |line| name = ( line.match(/^group \{ '([^']+)'/) or next )[1] unless groups.delete(name) diff --git a/acceptance/tests/resource/host/should_create.rb b/acceptance/tests/resource/host/should_create.rb deleted file mode 100755 index 6ad807f4ead..00000000000 --- a/acceptance/tests/resource/host/should_create.rb +++ /dev/null @@ -1,17 +0,0 @@ -test_name "host should create" - -agents.each do |agent| - target = agent.tmpfile('host-create') - - step "clean up for the test" - on agent, "rm -f #{target}" - - step "create the host record" - on(agent, puppet_resource("host", "test", "ensure=present", - "ip=127.0.0.1", "target=#{target}")) - - step "verify that the record was created" - on(agent, "cat #{target} ; rm -f #{target}") do - fail_test "record was not present" unless stdout =~ /^127\.0\.0\.1[[:space:]]+test/ - end -end diff --git a/acceptance/tests/resource/host/should_create_aliases.rb b/acceptance/tests/resource/host/should_create_aliases.rb deleted file mode 100755 index 91031e6be7f..00000000000 --- a/acceptance/tests/resource/host/should_create_aliases.rb +++ /dev/null @@ -1,18 +0,0 @@ -test_name "host should create aliases" - -agents.each do |agent| - target = agent.tmpfile('host-create-aliases') - - step "clean up the system for testing" - on agent, "rm -f #{target}" - - step "create the record" - on(agent, puppet_resource('host', 'test', "ensure=present", - "ip=127.0.0.7", "target=#{target}", "host_aliases=alias")) - - step "verify that the aliases were added" - on(agent, "cat #{target} ; rm -f #{target}") do - fail_test "alias was missing" unless - stdout =~ /^127\.0\.0\.7[[:space:]]+test[[:space:]]alias/ - end -end diff --git a/acceptance/tests/resource/host/should_destroy.rb b/acceptance/tests/resource/host/should_destroy.rb deleted file mode 100755 index 75e8166b31a..00000000000 --- a/acceptance/tests/resource/host/should_destroy.rb +++ /dev/null @@ -1,21 +0,0 @@ -test_name "should be able to remove a host record" - -agents.each do |agent| - file = agent.tmpfile('host-destroy') - line = "127.0.0.7 test1" - - step "set up files for the test" - on agent, "printf '#{line}\n' > #{file}" - - step "delete the resource from the file" - on(agent, puppet_resource('host', 'test1', "target=#{file}", - 'ensure=absent', 'ip=127.0.0.7')) - - step "verify that the content was removed" - on(agent, "cat #{file}; rm -f #{file}") do - fail_test "the content was still present" if stdout.include? line - end - - step "clean up after the test" - on agent, "rm -f #{file}" -end diff --git a/acceptance/tests/resource/host/should_modify_alias.rb b/acceptance/tests/resource/host/should_modify_alias.rb deleted file mode 100755 index df3ceb8abc9..00000000000 --- a/acceptance/tests/resource/host/should_modify_alias.rb +++ /dev/null @@ -1,21 +0,0 @@ -test_name "should be able to modify a host alias" - -agents.each do |agent| - file = agent.tmpfile('host-modify-alias') - - step "set up files for the test" - on agent, "printf '127.0.0.8 test alias\n' > #{file}" - - step "modify the resource" - on(agent, puppet_resource('host', 'test', "target=#{file}", - 'ensure=present', 'ip=127.0.0.8', 'host_aliases=banzai')) - - step "verify that the content was updated" - on(agent, "cat #{file}; rm -f #{file}") do - fail_test "the alias was not updated" unless - stdout =~ /^127\.0\.0\.8[[:space:]]+test[[:space:]]+banzai[[:space:]]*$/ - end - - step "clean up after the test" - on agent, "rm -f #{file}" -end diff --git a/acceptance/tests/resource/host/should_modify_ip.rb b/acceptance/tests/resource/host/should_modify_ip.rb deleted file mode 100755 index 75eee018bff..00000000000 --- a/acceptance/tests/resource/host/should_modify_ip.rb +++ /dev/null @@ -1,21 +0,0 @@ -test_name "should be able to modify a host address" - -agents.each do |agent| - file = agent.tmpfile('host-modify-ip') - - step "set up files for the test" - on agent, "printf '127.0.0.9 test alias\n' > #{file}" - - step "modify the resource" - on(agent, puppet_resource('host', 'test', "target=#{file}", - 'ensure=present', 'ip=127.0.0.10', 'host_aliases=alias')) - - step "verify that the content was updated" - on(agent, "cat #{file}; rm -f #{file}") do - fail_test "the address was not updated" unless - stdout =~ /^127\.0\.0\.10[[:space:]]+test[[:space:]]+alias[[:space:]]*$/ - end - - step "clean up after the test" - on agent, "rm -f #{file}" -end diff --git a/acceptance/tests/resource/host/should_not_create_existing.rb b/acceptance/tests/resource/host/should_not_create_existing.rb deleted file mode 100755 index 89a052bfdd1..00000000000 --- a/acceptance/tests/resource/host/should_not_create_existing.rb +++ /dev/null @@ -1,18 +0,0 @@ -test_name "should not create host if it exists" - -agents.each do |agent| - file = agent.tmpfile('host-not-create-existing') - - step "set up the system for the test" - on agent, "printf '127.0.0.2 test alias\n' > #{file}" - - step "tell puppet to ensure the host exists" - on(agent, puppet_resource('host', 'test', "target=#{file}", - 'ensure=present', 'ip=127.0.0.2', 'host_aliases=alias')) do - fail_test "darn, we created the host record" if - stdout.include? '/Host[test1]/ensure: created' - end - - step "clean up after we created things" - on agent, "rm -f #{file}" -end diff --git a/acceptance/tests/resource/host/should_query_all.rb b/acceptance/tests/resource/host/should_query_all.rb deleted file mode 100755 index 4506fcca7cc..00000000000 --- a/acceptance/tests/resource/host/should_query_all.rb +++ /dev/null @@ -1,28 +0,0 @@ -test_name "should query all hosts from hosts file" - -content = %q{127.0.0.1 test1 test1.local -127.0.0.2 test2 test2.local -127.0.0.3 test3 test3.local -127.0.0.4 test4 test4.local -} - -agents.each do |agent| - backup = agent.tmpfile('host-query-all') - - step "configure the system for testing (including file backups)" - on agent, "cp /etc/hosts #{backup}" - on agent, "cat > /etc/hosts", :stdin => content - - step "query all host records using puppet" - on(agent, puppet_resource('host')) do - found = stdout.scan(/host \{ '([^']+)'/).flatten.sort - fail_test "the list of returned hosts was wrong: #{found.join(', ')}" unless - found == %w{test1 test2 test3 test4} - - count = stdout.scan(/ensure\s+=>\s+'present'/).length - fail_test "found #{count} records, wanted 4" unless count == 4 - end - - step "clean up the system afterwards" - on agent, "cat #{backup} > /etc/hosts && rm -f #{backup}" -end diff --git a/acceptance/tests/resource/host/ticket_4131_should_not_create_without_ip.rb b/acceptance/tests/resource/host/ticket_4131_should_not_create_without_ip.rb deleted file mode 100755 index 4c56ac0d205..00000000000 --- a/acceptance/tests/resource/host/ticket_4131_should_not_create_without_ip.rb +++ /dev/null @@ -1,23 +0,0 @@ -test_name "#4131: should not create host without IP attribute" - -agents.each do |agent| - file = agent.tmpfile('4131-require-ip') - - step "configure the target system for the test" - on agent, "rm -rf #{file} ; touch #{file}" - - step "try to create the host, which should fail" - # REVISIT: This step should properly need to handle the non-zero exit code, - # and #5668 has been filed to record that. When it is fixed this test will - # start to fail, and this comment will tell you why. --daniel 2010-12-24 - on(agent, puppet_resource('host', 'test', "target=#{file}", - "host_aliases=alias")) do - fail_test "puppet didn't complain about the missing attribute" unless - stderr.include? 'ip is a required attribute for hosts' - end - - step "verify that the host was not added to the file" - on(agent, "cat #{file} ; rm -f #{file}") do - fail_test "the host was apparently added to the file" if stdout.include? 'test' - end -end diff --git a/acceptance/tests/resource/mailalias/create.rb b/acceptance/tests/resource/mailalias/create.rb deleted file mode 100644 index 718159149ca..00000000000 --- a/acceptance/tests/resource/mailalias/create.rb +++ /dev/null @@ -1,26 +0,0 @@ -test_name "should create an email alias" - -confine :except, :platform => 'windows' - -name = "pl#{rand(999999).to_i}" -agents.each do |agent| - teardown do - #(teardown) restore the alias file - on(agent, "mv /tmp/aliases /etc/aliases", :acceptable_exit_codes => [0,1]) - end - - #------- SETUP -------# - step "(setup) backup alias file" - on(agent, "cp /etc/aliases /tmp/aliases", :acceptable_exit_codes => [0,1]) - - #------- TESTS -------# - step "create a mailalias with puppet" - args = ['ensure=present', - 'recipient="foo,bar,baz"'] - on(agent, puppet_resource('mailalias', name, args)) - - step "verify the alias exists" - on(agent, "cat /etc/aliases") do |res| - assert_match(/#{name}:.*foo,bar,baz/, res.stdout, "mailalias not in aliases file") - end -end diff --git a/acceptance/tests/resource/mailalias/destroy.rb b/acceptance/tests/resource/mailalias/destroy.rb deleted file mode 100644 index 65ac5750724..00000000000 --- a/acceptance/tests/resource/mailalias/destroy.rb +++ /dev/null @@ -1,35 +0,0 @@ -test_name "should delete an email alias" - -confine :except, :platform => 'windows' - -name = "pl#{rand(999999).to_i}" -agents.each do |agent| - teardown do - #(teardown) restore the alias file - on(agent, "mv /tmp/aliases /etc/aliases", :acceptable_exit_codes => [0,1]) - end - - #------- SETUP -------# - step "(setup) backup alias file" - on(agent, "cp /etc/aliases /tmp/aliases", :acceptable_exit_codes => [0,1]) - - step "(setup) create a mailalias" - on(agent, "echo '#{name}: foo,bar,baz' >> /etc/aliases") - - step "(setup) verify the alias exists" - on(agent, "cat /etc/aliases") do |res| - assert_match(/#{name}:.*foo,bar,baz/, res.stdout, "mailalias not in aliases file") - end - - #------- TESTS -------# - step "delete the aliases database with puppet" - args = ['ensure=absent', - 'recipient="foo,bar,baz"'] - on(agent, puppet_resource('mailalias', name, args)) - - - step "verify the alias is absent" - on(agent, "cat /etc/aliases") do |res| - assert_no_match(/#{name}:.*foo,bar,baz/, res.stdout, "mailalias was not removed from aliases file") - end -end diff --git a/acceptance/tests/resource/mailalias/modify.rb b/acceptance/tests/resource/mailalias/modify.rb deleted file mode 100644 index 41c52f9db8d..00000000000 --- a/acceptance/tests/resource/mailalias/modify.rb +++ /dev/null @@ -1,35 +0,0 @@ -test_name "should modify an email alias" - -confine :except, :platform => 'windows' - -name = "pl#{rand(999999).to_i}" -agents.each do |agent| - teardown do - #(teardown) restore the alias file - on(agent, "mv /tmp/aliases /etc/aliases", :acceptable_exit_codes => [0,1]) - end - - #------- SETUP -------# - step "(setup) backup alias file" - on(agent, "cp /etc/aliases /tmp/aliases", :acceptable_exit_codes => [0,1]) - - step "(setup) create a mailalias" - on(agent, "echo '#{name}: foo,bar,baz' >> /etc/aliases") - - step "(setup) verify the alias exists" - on(agent, "cat /etc/aliases") do |res| - assert_match(/#{name}:.*foo,bar,baz/, res.stdout, "mailalias not in aliases file") - end - - #------- TESTS -------# - step "modify the aliases database with puppet" - args = ['ensure=present', - 'recipient="foo,bar,baz,blarvitz"'] - on(agent, puppet_resource('mailalias', name, args)) - - - step "verify the updated alias is present" - on(agent, "cat /etc/aliases") do |res| - assert_match(/#{name}:.*foo,bar,baz,blarvitz/, res.stdout, "updated mailalias not in aliases file") - end -end diff --git a/acceptance/tests/resource/mailalias/query.rb b/acceptance/tests/resource/mailalias/query.rb deleted file mode 100644 index afadc370a81..00000000000 --- a/acceptance/tests/resource/mailalias/query.rb +++ /dev/null @@ -1,29 +0,0 @@ -test_name "should be able to find an exisitng email alias" - -confine :except, :platform => 'windows' - -name = "pl#{rand(999999).to_i}" -agents.each do |agent| - teardown do - #(teardown) restore the alias file - on(agent, "mv /tmp/aliases /etc/aliases", :acceptable_exit_codes => [0,1]) - end - - #------- SETUP -------# - step "(setup) backup alias file" - on(agent, "cp /etc/aliases /tmp/aliases", :acceptable_exit_codes => [0,1]) - - step "(setup) create a mailalias" - on(agent, "echo '#{name}: foo,bar,baz' >> /etc/aliases") - - step "(setup) verify the alias exists" - on(agent, "cat /etc/aliases") do |res| - assert_match(/#{name}:.*foo,bar,baz/, res.stdout, "mailalias not in aliases file") - end - - #------- TESTS -------# - step "query for the mail alias with puppet" - on(agent, puppet_resource('mailalias', name)) do - fail_test "didn't find the scheduled_task #{name}" unless stdout.include? 'present' - end -end diff --git a/acceptance/tests/resource/package/common_package_name_in_different_providers.rb b/acceptance/tests/resource/package/common_package_name_in_different_providers.rb index d19eeb5c334..26270085a83 100644 --- a/acceptance/tests/resource/package/common_package_name_in_different_providers.rb +++ b/acceptance/tests/resource/package/common_package_name_in_different_providers.rb @@ -1,108 +1,147 @@ -test_name "ticket 1073: common package name in two different providers should be allowed" +test_name "ticket 1073: common package name in two different providers should be allowed" do -hosts_to_test = agents.select do |agent| - agent['platform'].match /(?:centos|el-|fedora)/ -end -skip_test "No suitable hosts found" if hosts_to_test.empty? + confine :to, {:platform => /(?:centos|el-|fedora)/}, agents + # Skipping tests if facter finds this is an ec2 host, PUP-7774 + agents.each do |agent| + skip_test('Skipping EC2 Hosts') if fact_on(agent, 'ec2_metadata') + end -require 'puppet/acceptance/rpm_util' -extend Puppet::Acceptance::RpmUtils + # Upgrade the AlmaLinux release package for newer keys until our image is updated (RE-16096) + agents.each do |agent| + on(agent, 'dnf -y upgrade almalinux-release') if fact_on(agent, 'os.name') == 'AlmaLinux' + end -rpm_options = {:pkg => 'guid', :version => '1.0'} + tag 'audit:high', + 'audit:acceptance' # Uses a provider that depends on AIO packaging -teardown do - step "cleanup" - agents.each do |agent| - clean_rpm agent, rpm_options + require 'puppet/acceptance/rpm_util' + extend Puppet::Acceptance::RpmUtils + require 'puppet/acceptance/common_utils' + extend Puppet::Acceptance::CommandUtils + + rpm_options = {:pkg => 'guid', :version => '1.0'} + + teardown do + step "cleanup" + agents.each do |agent| + clean_rpm agent, rpm_options + end end -end -def verify_state(hosts, pkg, state, match) - hosts.each do |agent| - # Note yum lists packages as . - on agent, 'yum list installed' do - method(match).call(/^#{pkg}\./, stdout) + step "Verify gem and ruby-devel on fedora-22 and above if not aio" do + if @options[:type] != 'aio' then + agents.each do |agent| + if agent[:platform] =~ /fedora-2[2-9]/ then + unless check_for_package agent, 'rubygems' + install_package agent, 'rubygems' + end + unless check_for_package agent, 'ruby-devel' + install_package agent, 'ruby-devel' + end + end + end end + end - on agent, 'gem list --local' do - method(match).call(/^#{pkg} /, stdout) + def gem_provider + if @options[:type] == 'aio' + 'puppet_gem' + else + 'gem' end end -end -def verify_present(hosts, pkg) - verify_state(hosts, pkg, '(?!purged|absent)[^\']+', :assert_match) -end + def verify_state(hosts, pkg, state, match) + hosts.each do |agent| + cmd = rpm_provider(agent) + # Note yum lists packages as . + on(agent, "#{cmd} list installed") do |result| + method(match).call(/^#{pkg}\./, result.stdout) + end + + on(agent, "#{gem_command(agent, @options[:type])} list --local") do |result| + method(match).call(/^#{pkg} /, result.stdout) + end + end + end -def verify_absent(hosts, pkg) - verify_state(hosts, pkg, '(?:purged|absent)', :assert_no_match) -end + def verify_present(hosts, pkg) + verify_state(hosts, pkg, '(?!purged|absent)[^\']+', :assert_match) + end -# Setup repo and package -hosts_to_test.each do |agent| - clean_rpm agent, rpm_options - setup_rpm agent, rpm_options - send_rpm agent, rpm_options -end + def verify_absent(hosts, pkg) + verify_state(hosts, pkg, '(?:purged|absent)', :refute_match) + end + + # Setup repo and package + agents.each do |agent| + clean_rpm agent, rpm_options + setup_rpm agent, rpm_options + send_rpm agent, rpm_options + end -verify_absent hosts_to_test, 'guid' + verify_absent agents, 'guid' -# Test error trying to install duplicate packages -collide1_manifest = < installed} - package {'other-guid': name => 'guid', ensure => present} -MANIFEST + # Test error trying to install duplicate packages + collide1_manifest = <<-MANIFEST + package {'guid': ensure => installed} + package {'other-guid': name => 'guid', ensure => present} + MANIFEST -apply_manifest_on(hosts_to_test, collide1_manifest, :acceptable_exit_codes => [1]).each do |result| - assert_match(/Error while evaluating a Resource Statement, Cannot alias Package\[other-guid\] to \["guid", nil\]/, "#{result.host}: #{result.stderr}") -end + apply_manifest_on(agents, collide1_manifest, :acceptable_exit_codes => [1]) do |result| + assert_match(/Error while evaluating a Resource Statement, Cannot alias Package\[other-guid\] to \[nil, "guid", nil\]/, "#{result.host}: #{result.stderr}") + end -verify_absent hosts_to_test, 'guid' + verify_absent agents, 'guid' -collide2_manifest = < '0.1.0', provider => gem} - package {'other-guid': name => 'guid', ensure => installed, provider => gem} -MANIFEST + gem_source = if ENV['GEM_SOURCE'] then "source => '#{ENV['GEM_SOURCE']}'," else '' end + collide2_manifest = <<-MANIFEST + package {'guid': ensure => '0.1.0', provider => #{gem_provider}, #{gem_source}} + package {'other-guid': name => 'guid', ensure => installed, provider => #{gem_provider}, #{gem_source}} + MANIFEST -apply_manifest_on(hosts_to_test, collide2_manifest, :acceptable_exit_codes => [1]).each do |result| - assert_match(/Error while evaluating a Resource Statement, Cannot alias Package\[other-guid\] to \["guid", "gem"\]/, "#{result.host}: #{result.stderr}") -end + apply_manifest_on(agents, collide2_manifest, :acceptable_exit_codes => [1]) do |result| + assert_match(/Error while evaluating a Resource Statement, Cannot alias Package\[other-guid\] to \[nil, "guid", "#{gem_provider}"\]/, "#{result.host}: #{result.stderr}") + end -verify_absent hosts_to_test, 'guid' + verify_absent agents, 'guid' -# Test successful parallel installation -install_manifest = < installed} + # Test successful parallel installation + install_manifest = <<-MANIFEST + package {'guid': ensure => installed} - package {'gem-guid': - provider => gem, - name => 'guid', - ensure => installed, - } -MANIFEST + package {'gem-guid': + provider => #{gem_provider}, + name => 'guid', + ensure => installed, + #{gem_source} + } + MANIFEST -apply_manifest_on(hosts_to_test, install_manifest).each do |result| - assert_match('Package[guid]/ensure: created', "#{result.host}: #{result.stdout}") - assert_match('Package[gem-guid]/ensure: created', "#{result.host}: #{result.stdout}") -end + apply_manifest_on(agents, install_manifest) do |result| + assert_match('Package[guid]/ensure: created', "#{result.host}: #{result.stdout}") + assert_match('Package[gem-guid]/ensure: created', "#{result.host}: #{result.stdout}") + end -verify_present hosts_to_test, 'guid' + verify_present agents, 'guid' -# Test removal -remove_manifest = < gem, - name => 'guid', - ensure => absent, - } + # Test removal + remove_manifest = <<-MANIFEST + package {'gem-guid': + provider => #{gem_provider}, + name => 'guid', + ensure => absent, + #{gem_source} + } - package {'guid': ensure => absent} -MANIFEST + package {'guid': ensure => absent} + MANIFEST -apply_manifest_on(hosts_to_test, remove_manifest).each do |result| - assert_match('Package[guid]/ensure: removed', "#{result.host}: #{result.stdout}") - assert_match('Package[gem-guid]/ensure: removed', "#{result.host}: #{result.stdout}") -end + apply_manifest_on(agents, remove_manifest) do |result| + assert_match('Package[guid]/ensure: removed', "#{result.host}: #{result.stdout}") + assert_match('Package[gem-guid]/ensure: removed', "#{result.host}: #{result.stdout}") + end -verify_absent hosts_to_test, 'guid' + verify_absent agents, 'guid' +end diff --git a/acceptance/tests/resource/package/does_not_exist.rb b/acceptance/tests/resource/package/does_not_exist.rb index c0b605a24bd..96fd9eee441 100644 --- a/acceptance/tests/resource/package/does_not_exist.rb +++ b/acceptance/tests/resource/package/does_not_exist.rb @@ -1,15 +1,16 @@ # Redmine (#22529) test_name "Puppet returns only resource package declaration when querying an uninstalled package" do - resource_declaration_regex = %r@package \{ 'not-installed-on-this-host': - ensure => '(?:purged|absent)', -\}@m + tag 'audit:high', + 'audit:acceptance' # Could be done at the integration (or unit) layer though + # actual changing of resources could irreparably damage a + # host running this, or require special permissions. agents.each do |agent| step "test puppet resource package" do - on(agent, puppet('resource', 'package', 'not-installed-on-this-host')) do - assert_match(resource_declaration_regex, stdout) + on(agent, puppet('resource', 'package', 'not-installed-on-this-host')) do |result| + assert_match(/package.*not-installed-on-this-host.*\n.*ensure.*(?:absent|purged).*\n.*provider/, result.stdout) end end @@ -20,8 +21,8 @@ confine_block(:to, :platform => /debian|ubuntu/) do agents.each do |agent| step "test puppet apply" do - on(agent, puppet('apply', '-e', %Q|"package {'not-installed-on-this-host': ensure => purged }"|)) do - assert_no_match(/warning/i, stdout) + on(agent, puppet('apply', '-e', %Q|"package {'not-installed-on-this-host': ensure => purged }"|)) do |result| + refute_match(/warning/i, result.stdout) end end end diff --git a/acceptance/tests/resource/package/ips/basic_tests.rb b/acceptance/tests/resource/package/ips/basic_tests.rb index 84243fb0db5..e1126d35386 100644 --- a/acceptance/tests/resource/package/ips/basic_tests.rb +++ b/acceptance/tests/resource/package/ips/basic_tests.rb @@ -1,6 +1,12 @@ test_name "Package:IPS basic tests" confine :to, :platform => 'solaris-11' +tag 'audit:medium', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' # Could be done at the integration (or unit) layer though + # actual changing of resources could irreparably damage a + # host running this, or require special permissions. + require 'puppet/acceptance/solaris_util' extend Puppet::Acceptance::IPSUtils @@ -25,7 +31,7 @@ step "IPS: basic ensure we are clean" apply_manifest_on(agent, 'package {mypkg : ensure=>absent}') on(agent, "pkg list -v mypkg", :acceptable_exit_codes => [1]) do - assert_no_match( /mypkg@0.0.1/, result.stdout, "err: #{agent}") + refute_match( /mypkg@0.0.1/, result.stdout, "err: #{agent}") end step "IPS: basic - it should create" @@ -35,18 +41,18 @@ step "IPS: check it was created" on(agent, puppet("resource package mypkg")) do - assert_match( /ensure => '0\.0\.1,.*'/, result.stdout, "err: #{agent}") + assert_match( /ensure\s+=> '0\.0\.1[,:]?.*'/, result.stdout, "err: #{agent}") end step "IPS: do not upgrade until latest is mentioned" send_pkg agent,:pkg => 'mypkg@0.0.2' apply_manifest_on(agent, 'package {mypkg : ensure=>present}') do - assert_no_match( /ensure: created/, result.stdout, "err: #{agent}") + refute_match( /ensure: created/, result.stdout, "err: #{agent}") end step "IPS: verify it was not upgraded" on(agent, puppet("resource package mypkg")) do - assert_match( /ensure => '0\.0\.1,.*'/, result.stdout, "err: #{agent}") + assert_match( /ensure\s+=> '0\.0\.1[,:]?.*'/, result.stdout, "err: #{agent}") end step "IPS: ask to be latest" @@ -54,7 +60,7 @@ step "IPS: ensure it was upgraded" on(agent, puppet("resource package mypkg")) do - assert_match( /ensure => '0\.0\.2,.*'/, result.stdout, "err: #{agent}") + assert_match( /ensure\s+=> '0\.0\.2[,:]?.*'/, result.stdout, "err: #{agent}") end step "IPS: when there are more than one option, choose latest." @@ -62,12 +68,12 @@ send_pkg agent,:pkg => 'mypkg@0.0.4' apply_manifest_on(agent, 'package {mypkg : ensure=>latest}') on(agent, puppet("resource package mypkg")) do - assert_match( /ensure => '0\.0\.4,.*'/, result.stdout, "err: #{agent}") + assert_match( /ensure\s+=> '0\.0\.4[,:]?.*'/, result.stdout, "err: #{agent}") end step "IPS: ensure removed." apply_manifest_on(agent, 'package {mypkg : ensure=>absent}') on(agent, "pkg list -v mypkg", :acceptable_exit_codes => [1]) do - assert_no_match( /mypkg@0.0.1/, result.stdout, "err: #{agent}") + refute_match( /mypkg@0.0.1/, result.stdout, "err: #{agent}") end end diff --git a/acceptance/tests/resource/package/ips/should_be_holdable.rb b/acceptance/tests/resource/package/ips/should_be_holdable.rb index ee8cb50f0c5..b23d2fd06b6 100644 --- a/acceptance/tests/resource/package/ips/should_be_holdable.rb +++ b/acceptance/tests/resource/package/ips/should_be_holdable.rb @@ -1,6 +1,12 @@ test_name "Package:IPS versionable" confine :to, :platform => 'solaris-11' +tag 'audit:medium', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' # Could be done at the integration (or unit) layer though + # actual changing of resources could irreparably damage a + # host running this, or require special permissions. + require 'puppet/acceptance/solaris_util' extend Puppet::Acceptance::IPSUtils @@ -47,8 +53,8 @@ assert_match( /mypkg2@0.0.2/, result.stdout, "err: #{agent}") end - step "IPS: it should not upgrade current and dependent package if dependent package is held" - apply_manifest_on(agent, 'package {mypkg : ensure=>"held", provider=>"pkg"}') do + step "IPS: it should not upgrade current and dependent package if dependent package is hold" + apply_manifest_on(agent, 'package {mypkg : ensure=>"present", mark=>"hold", provider=>"pkg"}') do assert_match( //, result.stdout, "err: #{agent}") end setup_fakeroot agent @@ -56,7 +62,7 @@ setup_fakeroot2 agent send_pkg2 agent, :pkg => 'mypkg2@0.0.3', :pkgdep => 'mypkg@0.0.3' apply_manifest_on(agent, 'package {mypkg2 : ensure=>"0.0.2"}') do - assert_no_match( /changed/, result.stdout, "err: #{agent}") + refute_match( /changed/, result.stdout, "err: #{agent}") end on agent, "pkg list -v mypkg" do assert_match( /mypkg@0.0.2/, result.stdout, "err: #{agent}") diff --git a/acceptance/tests/resource/package/ips/should_be_idempotent.rb b/acceptance/tests/resource/package/ips/should_be_idempotent.rb index 68babaa766b..7f7c72cd9c5 100644 --- a/acceptance/tests/resource/package/ips/should_be_idempotent.rb +++ b/acceptance/tests/resource/package/ips/should_be_idempotent.rb @@ -1,6 +1,12 @@ test_name "Package:IPS idempotency" confine :to, :platform => 'solaris-11' +tag 'audit:medium', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' # Could be done at the integration (or unit) layer though + # actual changing of resources could irreparably damage a + # host running this, or require special permissions. + require 'puppet/acceptance/solaris_util' extend Puppet::Acceptance::IPSUtils @@ -26,8 +32,8 @@ step "IPS: should be idempotent (present)" apply_manifest_on(agent, 'package {mypkg : ensure=>present}') do - assert_no_match( /created/, result.stdout, "err: #{agent}") - assert_no_match( /changed/, result.stdout, "err: #{agent}") + refute_match( /created/, result.stdout, "err: #{agent}") + refute_match( /changed/, result.stdout, "err: #{agent}") end send_pkg agent, :pkg => 'mypkg@0.0.2' @@ -36,7 +42,7 @@ step "IPS: ask for latest version again: should be idempotent (latest)" apply_manifest_on(agent, 'package {mypkg : ensure=>latest}') do - assert_no_match( /created/, result.stdout, "err: #{agent}") + refute_match( /created/, result.stdout, "err: #{agent}") end step "IPS: ask for specific version" @@ -47,14 +53,14 @@ step "IPS: ask for specific version again: should be idempotent (version)" apply_manifest_on(agent, 'package {mypkg : ensure=>"0.0.3"}') do - assert_no_match( /created/, result.stdout, "err: #{agent}") - assert_no_match( /changed/, result.stdout, "err: #{agent}") + refute_match( /created/, result.stdout, "err: #{agent}") + refute_match( /changed/, result.stdout, "err: #{agent}") end step "IPS: ensure removed." apply_manifest_on(agent, 'package {mypkg : ensure=>absent}') on(agent, "pkg list -v mypkg", :acceptable_exit_codes => [1]) do - assert_no_match( /mypkg/, result.stdout, "err: #{agent}") + refute_match( /mypkg/, result.stdout, "err: #{agent}") end end diff --git a/acceptance/tests/resource/package/ips/should_be_updatable.rb b/acceptance/tests/resource/package/ips/should_be_updatable.rb index 552429b6818..3c56cb0fbd1 100644 --- a/acceptance/tests/resource/package/ips/should_be_updatable.rb +++ b/acceptance/tests/resource/package/ips/should_be_updatable.rb @@ -1,6 +1,12 @@ test_name "Package:IPS test for updatable (update, latest)" confine :to, :platform => 'solaris-11' +tag 'audit:medium', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' # Could be done at the integration (or unit) layer though + # actual changing of resources could irreparably damage a + # host running this, or require special permissions. + require 'puppet/acceptance/solaris_util' extend Puppet::Acceptance::IPSUtils diff --git a/acceptance/tests/resource/package/ips/should_be_updateable_and_unholdable_at_same_time.rb b/acceptance/tests/resource/package/ips/should_be_updateable_and_unholdable_at_same_time.rb new file mode 100644 index 00000000000..247fc3b24b7 --- /dev/null +++ b/acceptance/tests/resource/package/ips/should_be_updateable_and_unholdable_at_same_time.rb @@ -0,0 +1,38 @@ +test_name "Package:IPS test for updatable holded package" do + confine :to, :platform => 'solaris-11' + + tag 'audit:high' + + require 'puppet/acceptance/solaris_util' + extend Puppet::Acceptance::IPSUtils + + agents.each do |agent| + teardown do + clean agent + end + + step "IPS: setup" do + setup agent + setup_fakeroot agent + send_pkg agent, :pkg => 'mypkg@0.0.1' + set_publisher agent + end + + step "IPS: it should create and hold in same manifest" do + apply_manifest_on(agent, 'package {mypkg : ensure=>"0.0.1", mark=>hold}') do |result| + assert_match( /ensure: created/, result.stdout, "err: #{agent}") + end + end + + step "IPS: it should update and unhold in same manifest" do + send_pkg agent, :pkg => 'mypkg@0.0.2' + apply_manifest_on(agent, 'package {mypkg : ensure=>"0.0.2", mark=>"none"}') + end + + step "IPS: ensure it was upgraded" do + on agent, "pkg list -v mypkg" do |result| + assert_match( /mypkg@0.0.2/, result.stdout, "err: #{agent}") + end + end + end +end diff --git a/acceptance/tests/resource/package/ips/should_be_versionable.rb b/acceptance/tests/resource/package/ips/should_be_versionable.rb index 80315e1a483..ce74efd11ed 100644 --- a/acceptance/tests/resource/package/ips/should_be_versionable.rb +++ b/acceptance/tests/resource/package/ips/should_be_versionable.rb @@ -1,6 +1,12 @@ test_name "Package:IPS versionable" confine :to, :platform => 'solaris-11' +tag 'audit:medium', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' # Could be done at the integration (or unit) layer though + # actual changing of resources could irreparably damage a + # host running this, or require special permissions. + require 'puppet/acceptance/solaris_util' extend Puppet::Acceptance::IPSUtils @@ -32,7 +38,7 @@ assert_match( /ensure changed/, result.stdout, "err: #{agent}") end on agent, "pkg list mypkg" do - assert_no_match( /0.0.1/, result.stdout, "err: #{agent}") + refute_match( /0.0.1/, result.stdout, "err: #{agent}") assert_match( /0.0.2/, result.stdout, "err: #{agent}") end step "IPS: it should downpgrade if asked for previous version" @@ -40,7 +46,7 @@ assert_match( /ensure changed/, result.stdout, "err: #{agent}") end on agent, "pkg list mypkg" do - assert_no_match( /0.0.2/, result.stdout, "err: #{agent}") + refute_match( /0.0.2/, result.stdout, "err: #{agent}") assert_match( /0.0.1/, result.stdout, "err: #{agent}") end end diff --git a/acceptance/tests/resource/package/ips/should_create.rb b/acceptance/tests/resource/package/ips/should_create.rb index 6c339162a08..70cfabc84a9 100644 --- a/acceptance/tests/resource/package/ips/should_create.rb +++ b/acceptance/tests/resource/package/ips/should_create.rb @@ -1,6 +1,12 @@ test_name "Package:IPS basic tests" confine :to, :platform => 'solaris-11' +tag 'audit:medium', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' # Could be done at the integration (or unit) layer though + # actual changing of resources could irreparably damage a + # host running this, or require special permissions. + require 'puppet/acceptance/solaris_util' extend Puppet::Acceptance::IPSUtils @@ -26,7 +32,7 @@ end step "IPS: check it was created" on(agent, puppet("resource package mypkg")) do - assert_match( /ensure => '0\.0\.1,.*'/, result.stdout, "err: #{agent}") + assert_match( /ensure\s+=> '0\.0\.1[,:]?.*'/, result.stdout, "err: #{agent}") end on agent, "pkg list -v mypkg" do assert_match( /mypkg@0.0.1/, result.stdout, "err: #{agent}") diff --git a/acceptance/tests/resource/package/ips/should_query.rb b/acceptance/tests/resource/package/ips/should_query.rb index 048a3fe4f76..0fd33218e6e 100644 --- a/acceptance/tests/resource/package/ips/should_query.rb +++ b/acceptance/tests/resource/package/ips/should_query.rb @@ -1,6 +1,12 @@ test_name "Package:IPS query" confine :to, :platform => 'solaris-11' +tag 'audit:medium', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' # Could be done at the integration (or unit) layer though + # actual changing of resources could irreparably damage a + # host running this, or require special permissions. + require 'puppet/acceptance/solaris_util' extend Puppet::Acceptance::IPSUtils diff --git a/acceptance/tests/resource/package/ips/should_remove.rb b/acceptance/tests/resource/package/ips/should_remove.rb index 921f95ff410..bbedcdba625 100644 --- a/acceptance/tests/resource/package/ips/should_remove.rb +++ b/acceptance/tests/resource/package/ips/should_remove.rb @@ -1,6 +1,12 @@ test_name "Package:IPS basic tests" confine :to, :platform => 'solaris-11' +tag 'audit:medium', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' # Could be done at the integration (or unit) layer though + # actual changing of resources could irreparably damage a + # host running this, or require special permissions. + require 'puppet/acceptance/solaris_util' extend Puppet::Acceptance::IPSUtils @@ -27,7 +33,7 @@ apply_manifest_on(agent, 'package {mypkg : ensure=>absent}') on(agent, "pkg list -v mypkg", :acceptable_exit_codes => [1]) do - assert_no_match( /mypkg@0.0.1/, result.stdout, "err: #{agent}") + refute_match( /mypkg@0.0.1/, result.stdout, "err: #{agent}") end end diff --git a/acceptance/tests/resource/package/windows.rb b/acceptance/tests/resource/package/windows.rb new file mode 100644 index 00000000000..0ffbae02301 --- /dev/null +++ b/acceptance/tests/resource/package/windows.rb @@ -0,0 +1,68 @@ +test_name "Windows Package Provider" do + confine :to, :platform => 'windows' + + tag 'audit:high', + 'audit:acceptance' + + require 'puppet/acceptance/windows_utils' + extend Puppet::Acceptance::WindowsUtils + + def package_manifest(name, params, installer_source) + params_str = params.map do |param, value| + value_str = value.to_s + value_str = "\"#{value_str}\"" if value.is_a?(String) + + " #{param} => #{value_str}" + end.join(",\n") + + <<-MANIFEST +package { '#{name}': + source => '#{installer_source}', + #{params_str} +} +MANIFEST + end + + mock_package = { + :name => "MockPackage" + } + + agents.each do |agent| + tmpdir = agent.tmpdir("mock_installer") + installer_location = create_mock_package(agent, tmpdir, mock_package) + + step 'Verify that ensure = present installs the package' do + apply_manifest_on(agent, package_manifest(mock_package[:name], {ensure: :present}, installer_location)) + assert(package_installed?(agent, mock_package[:name]), 'Package succesfully installed') + end + + step 'Verify that ensure = absent removes the package' do + apply_manifest_on(agent, package_manifest(mock_package[:name], {ensure: :absent}, installer_location)) + assert_equal(false, package_installed?(agent, mock_package[:name]), 'Package successfully Uninstalled') + end + + tmpdir = agent.tmpdir("mock_installer") + mock_package[:name] = "MockPackageWithFile" + mock_package[:install_commands] = 'System.IO.File.ReadAllLines("install.txt");' + installer_location = create_mock_package(agent, tmpdir, mock_package) + + # Since we didn't add the install.txt package the installation should fail with code 1004 + step 'Verify that ensure = present fails when an installer fails with a non-zero exit code' do + apply_manifest_on(agent, package_manifest(mock_package[:name], {ensure: :present}, installer_location)) do |result| + assert_match(/#{mock_package[:name]}/, result.stderr, 'Windows package provider did not fail when the package install failed') + end + end + + step 'Verify that ensure = present installs a package that requires additional resources' do + create_remote_file(agent, "#{tmpdir}/install.txt", 'foobar') + apply_manifest_on(agent, package_manifest(mock_package[:name], {ensure: :present}, installer_location)) + assert(package_installed?(agent, mock_package[:name]), 'Package succesfully installed') + end + + step 'Verify that ensure = absent removes the package that required additional resources' do + apply_manifest_on(agent, package_manifest(mock_package[:name], {ensure: :absent}, installer_location)) + assert_equal(false, package_installed?(agent, mock_package[:name]), 'Package successfully Uninstalled') + end + end + +end diff --git a/acceptance/tests/resource/package/yum.rb b/acceptance/tests/resource/package/yum.rb new file mode 100644 index 00000000000..3597ad54765 --- /dev/null +++ b/acceptance/tests/resource/package/yum.rb @@ -0,0 +1,203 @@ +test_name "test the yum package provider" do + + confine :to, {:platform => /(?:centos|el-|fedora)/}, agents + # Skipping tests if facter finds this is an ec2 host, PUP-7774 + agents.each do |agent| + skip_test('Skipping EC2 Hosts') if fact_on(agent, 'ec2_metadata') + end + + # Upgrade the AlmaLinux release package for newer keys until our image is updated (RE-16096) + agents.each do |agent| + on(agent, 'dnf -y upgrade almalinux-release') if fact_on(agent, 'os.name') == 'AlmaLinux' + end + + tag 'audit:high', + 'audit:acceptance' # Could be done at the integration (or unit) layer though + # actual changing of resources could irreparably damage a + # host running this, or require special permissions. + + require 'puppet/acceptance/rpm_util' + extend Puppet::Acceptance::RpmUtils + + epoch_rpm_options = {:pkg => 'epoch', :version => '1.1', :epoch => '1'} + no_epoch_rpm_options = {:pkg => 'guid', :version => '1.0'} + + teardown do + step "cleanup" + agents.each do |agent| + clean_rpm agent, epoch_rpm_options + clean_rpm agent, no_epoch_rpm_options + end + end + + def verify_state(hosts, pkg, state, match) + hosts.each do |agent| + cmd = rpm_provider(agent) + # Note yum and dnf list packages as . + on(agent, "#{cmd} list installed") do |result| + method(match).call(/^#{pkg}\./, result.stdout) + end + end + end + + def verify_present(hosts, pkg) + verify_state(hosts, pkg, '(?!purged|absent)[^\']+', :assert_match) + end + + def verify_absent(hosts, pkg) + verify_state(hosts, pkg, '(?:purged|absent)', :refute_match) + end + + step "Managing a package which does not include an epoch in its version" do + step 'Setup repo and package' + agents.each do |agent| + clean_rpm agent, no_epoch_rpm_options + setup_rpm agent, no_epoch_rpm_options + send_rpm agent, no_epoch_rpm_options + end + + step 'Installing a known package succeeds' do + verify_absent agents, 'guid' + apply_manifest_on(agents, 'package {"guid": ensure => installed}') do |result| + assert_match('Package[guid]/ensure: created', "#{result.host}: #{result.stdout}") + end + end + + step 'Removing a known package succeeds' do + verify_present agents, 'guid' + apply_manifest_on(agents, 'package {"guid": ensure => absent}') do |result| + assert_match('Package[guid]/ensure: removed', "#{result.host}: #{result.stdout}") + end + end + + step 'Installing a specific version of a known package succeeds' do + verify_absent agents, 'guid' + apply_manifest_on(agents, 'package {"guid": ensure => "1.0"}') do |result| + assert_match('Package[guid]/ensure: created', "#{result.host}: #{result.stdout}") + end + end + + step 'Removing a specific version of a known package succeeds' do + verify_present agents, 'guid' + apply_manifest_on(agents, 'package {"guid": ensure => absent}') do |result| + assert_match('Package[guid]/ensure: removed', "#{result.host}: #{result.stdout}") + end + end + + step 'Installing a non-existent version of a known package fails' do + verify_absent agents, 'guid' + apply_manifest_on(agents, 'package {"guid": ensure => "1.1"}') do |result| + refute_match(/Package\[guid\]\/ensure: created/, "#{result.host}: #{result.stdout}") + assert_match("Package[guid]/ensure: change from 'purged' to '1.1' failed", "#{result.host}: #{result.stderr}") + end + verify_absent agents, 'guid' + end + + step 'Installing a non-existent package fails' do + verify_absent agents, 'not_a_package' + apply_manifest_on(agents, 'package {"not_a_package": ensure => present}') do |result| + refute_match(/Package\[not_a_package\]\/ensure: created/, "#{result.host}: #{result.stdout}") + assert_match("Package[not_a_package]/ensure: change from 'purged' to 'present' failed", "#{result.host}: #{result.stderr}") + end + verify_absent agents, 'not_a_package' + end + + step 'Removing a non-existent package succeeds' do + verify_absent agents, 'not_a_package' + apply_manifest_on(agents, 'package {"not_a_package": ensure => absent}') do |result| + refute_match(/Package\[not_a_package\]\/ensure/, "#{result.host}: #{result.stdout}") + assert_match('Applied catalog', "#{result.host}: #{result.stdout}") + end + verify_absent agents, 'not_a_package' + end + + step 'Installing a known package using source succeeds' do + verify_absent agents, 'guid' + apply_manifest_on(agents, "package { 'guid': ensure => installed, install_options => '--nogpgcheck', source=>'/tmp/rpmrepo/RPMS/noarch/guid-1.0-1.noarch.rpm' }") do |result| + assert_match('Package[guid]/ensure: created', "#{result.host}: #{result.stdout}") + end + end + end + + ### Epoch tests ### + agents.each do |agent| + step "Managing a package which includes an epoch in its version" do + step "Setup repo and package" do + clean_rpm agent, no_epoch_rpm_options + setup_rpm agent, epoch_rpm_options + send_rpm agent, epoch_rpm_options + end + + step 'Installing a known package with an epoch succeeds' do + verify_absent [agent], 'epoch' + apply_manifest_on(agent, 'package {"epoch": ensure => installed}') do |result| + assert_match('Package[epoch]/ensure: created', "#{result.host}: #{result.stdout}") + end + end + + step 'Removing a known package with an epoch succeeds' do + verify_present [agent], 'epoch' + apply_manifest_on(agent, 'package {"epoch": ensure => absent}') do |result| + assert_match('Package[epoch]/ensure: removed', "#{result.host}: #{result.stdout}") + end + end + + step "Installing a specific version of a known package with an epoch succeeds when epoch and arch are specified" do + verify_absent [agent], 'epoch' + apply_manifest_on(agent, "package {'epoch': ensure => '1:1.1-1.noarch'}") do |result| + assert_match('Package[epoch]/ensure: created', "#{result.host}: #{result.stdout}") + end + + apply_manifest_on(agent, "package {'epoch': ensure => '1:1.1-1.noarch'}") do |result| + refute_match(/epoch/, result.stdout) + end + end + + if rpm_provider(agent) == 'dnf' + # Yum requires the arch to be specified whenever epoch is specified. This step is only + # expected to work in DNF. + step "Installing a specific version of a known package with an epoch succeeds when epoch is specified and arch is not" do + step "Remove the package" do + apply_manifest_on(agent, 'package {"epoch": ensure => absent}') + verify_absent [agent], 'epoch' + end + + apply_manifest_on(agent, 'package {"epoch": ensure => "1:1.1-1"}') do |result| + assert_match('Package[epoch]/ensure: created', "#{result.host}: #{result.stdout}") + end + + apply_manifest_on(agent, 'package {"epoch": ensure => "1:1.1-1"}') do |result| + refute_match(/epoch/, result.stdout) + end + + apply_manifest_on(agent, "package {'epoch': ensure => '1:1.1-1.noarch'}") do |result| + refute_match(/epoch/, result.stdout) + end + end + end + + if rpm_provider(agent) == 'yum' + step "Installing a specified version of a known package with an epoch succeeds without epoch or arch provided" do + # Due to a bug in DNF, epoch is required. This step is only expected to work in Yum. + # See https://bugzilla.redhat.com/show_bug.cgi?id=1286877 + step "Remove the package" do + apply_manifest_on(agent, 'package {"epoch": ensure => absent}') + verify_absent [agent], 'epoch' + end + + apply_manifest_on(agent, 'package {"epoch": ensure => "1.1-1"}') do |result| + assert_match('Package[epoch]/ensure: created', "#{result.host}: #{result.stdout}") + end + + apply_manifest_on(agent, 'package {"epoch": ensure => "1.1-1"}') do |result| + refute_match(/epoch/, result.stdout) + end + + apply_manifest_on(agent, "package {'epoch': ensure => '1:1.1-1.noarch'}") do |result| + refute_match(/epoch/, result.stdout) + end + end + end + end + end +end diff --git a/acceptance/tests/resource/scheduled_task/should_create.rb b/acceptance/tests/resource/scheduled_task/should_create.rb deleted file mode 100644 index 507273964e9..00000000000 --- a/acceptance/tests/resource/scheduled_task/should_create.rb +++ /dev/null @@ -1,33 +0,0 @@ -test_name "should create a scheduled task" - -name = "pl#{rand(999999).to_i}" -confine :to, :platform => 'windows' - -agents.each do |agent| - # query only supports /tn parameter on Vista and later - query_cmd = "schtasks.exe /query /v /fo list /tn #{name}" - on agents, facter('kernelmajversion') do - query_cmd = "schtasks.exe /query /v /fo list | grep -q #{name}" if stdout.chomp.to_f < 6.0 - end - - step "create the task" - args = ['ensure=present', - 'command=c:\\\\windows\\\\system32\\\\notepad.exe', - 'arguments="foo bar baz"', - 'working_dir=c:\\\\windows'] - on agent, puppet_resource('scheduled_task', name, args) - - step "verify the task exists" - on agent, query_cmd - - step "verify task properties" - on agent, puppet_resource('scheduled_task', name) do - assert_match(/command\s*=>\s*'c:\\windows\\system32\\notepad.exe'/, stdout) - assert_match(/arguments\s*=>\s*'foo bar baz'/, stdout) - assert_match(/enabled\s*=>\s*'true'/, stdout) - assert_match(/working_dir\s*=>\s*'c:\\windows'/, stdout) - end - - step "delete the task" - on agent, "schtasks.exe /delete /tn #{name} /f" -end diff --git a/acceptance/tests/resource/scheduled_task/should_destroy.rb b/acceptance/tests/resource/scheduled_task/should_destroy.rb deleted file mode 100644 index 8fdbb959c90..00000000000 --- a/acceptance/tests/resource/scheduled_task/should_destroy.rb +++ /dev/null @@ -1,35 +0,0 @@ -test_name "should delete a scheduled task" - -name = "pl#{rand(999999).to_i}" -confine :to, :platform => 'windows' - -agents.each do |agent| - # Have to use /v1 parameter for Vista and later, older versions - # don't accept the parameter - version = '/v1' - # query only supports /tn parameter on Vista and later - query_cmd = "schtasks.exe /query /v /fo list /tn #{name}" - on agent, facter('kernelmajversion') do - if stdout.chomp.to_f < 6.0 - version = '' - query_cmd = "schtasks.exe /query /v /fo list | grep #{name}" - end - end - - step "create the task" - on agent, "schtasks.exe /create #{version} /tn #{name} /tr c:\\\\windows\\\\system32\\\\notepad.exe /sc daily /ru system" - - step "delete the task" - on agent, puppet_resource('scheduled_task', name, 'ensure=absent') - - step "verify the task was deleted" - Timeout.timeout(5) do - loop do - step "Win32::TaskScheduler#delete call seems to be asynchronous, so we my need to test multiple times" - on agent, query_cmd, :acceptable_exit_codes => [0,1] - break if exit_code == 1 - sleep 1 - end - end - fail_test "Unable to verify that scheduled task was removed" unless exit_code == 1 -end diff --git a/acceptance/tests/resource/scheduled_task/should_modify.rb b/acceptance/tests/resource/scheduled_task/should_modify.rb deleted file mode 100644 index 35d9d3ef772..00000000000 --- a/acceptance/tests/resource/scheduled_task/should_modify.rb +++ /dev/null @@ -1,28 +0,0 @@ -test_name "should modify a scheduled task" - -name = "pl#{rand(999999).to_i}" -confine :to, :platform => 'windows' - -agents.each do |agent| - # Have to use /v1 parameter for Vista and later, older versions - # don't accept the parameter - version = '/v1' - on agents, facter('kernelmajversion') do - version = '' if stdout.chomp.to_f < 6.0 - end - - step "create the task" - on agent, "schtasks.exe /create #{version} /tn #{name} /tr c:\\\\windows\\\\system32\\\\notepad.exe /sc daily /ru system" - - step "modify the task" - on agent, puppet_resource('scheduled_task', name, ['ensure=present', 'command=c:\\\\windows\\\\system32\\\\notepad2.exe', "arguments=args-#{name}"]) - - step "verify the arguments were updated" - on agent, puppet_resource('scheduled_task', name) do - assert_match(/command\s*=>\s*'c:\\windows\\system32\\notepad2.exe'/, stdout) - assert_match(/arguments\s*=>\s*'args-#{name}'/, stdout) - end - - step "delete the task" - on agent, "schtasks.exe /delete /tn #{name} /f" -end diff --git a/acceptance/tests/resource/scheduled_task/should_query.rb b/acceptance/tests/resource/scheduled_task/should_query.rb deleted file mode 100644 index 2b9250395cd..00000000000 --- a/acceptance/tests/resource/scheduled_task/should_query.rb +++ /dev/null @@ -1,22 +0,0 @@ -test_name "test that we can query and find a scheduled task that exists." - -name = "pl#{rand(999999).to_i}" -confine :to, :platform => 'windows' - -agents.each do |agent| - # Have to use /v1 parameter for Vista and later, older versions - # don't accept the parameter - kernel_maj_version = on(agent, facter('kernelmajversion')).stdout.chomp.to_f - version = kernel_maj_version < 6.0 ? '' : '/v1' - - step "create the task" - on agent, "schtasks.exe /create #{version} /tn #{name} /tr c:\\\\windows\\\\system32\\\\notepad.exe /sc daily /ru system" - - step "query for the task and verify it was found" - on agent, puppet_resource('scheduled_task', name) do - fail_test "didn't find the scheduled_task #{name}" unless stdout.include? 'present' - end - - step "delete the task" - on agent, "schtasks.exe /delete /tn #{name} /f" -end diff --git a/acceptance/tests/resource/service/AIX_service_provider.rb b/acceptance/tests/resource/service/AIX_service_provider.rb index 403a8765427..42ead3e4fe6 100644 --- a/acceptance/tests/resource/service/AIX_service_provider.rb +++ b/acceptance/tests/resource/service/AIX_service_provider.rb @@ -1,34 +1,43 @@ test_name 'AIX Service Provider Testing' +tag 'audit:high', + 'audit:refactor', # Use block style `test_name` + 'audit:acceptance' # Could be done at the integration (or unit) layer though + # actual changing of resources could irreparably damage a + # host running this, or require special permissions. + confine :to, :platform => 'aix' +require 'puppet/acceptance/service_utils' +extend Puppet::Acceptance::ServiceUtils + sloth_daemon_script = <