diff --git a/.ci/docs b/.ci/docs index 24f6f469e3..a8ce2aada0 100644 --- a/.ci/docs +++ b/.ci/docs @@ -1,8 +1,11 @@ pipeline { - agent { label 'docs' } + agent { + label 'docs' + } options { timestamps() ansiColor('xterm') + timeout(time: 2, unit: 'HOURS') } environment { PYENV_ROOT = "/usr/local/pyenv" diff --git a/.ci/kitchen-centos7-py2 b/.ci/kitchen-centos7-py2 index 5cc9984cf0..ee7101aed5 100644 --- a/.ci/kitchen-centos7-py2 +++ b/.ci/kitchen-centos7-py2 @@ -1,73 +1,82 @@ -pipeline { - agent { label 'kitchen-slave' } - options { - timestamps() - ansiColor('xterm') - } - environment { - SALT_KITCHEN_PLATFORMS = "/var/jenkins/workspace/platforms.yml" - SALT_KITCHEN_DRIVER = "/var/jenkins/workspace/driver.yml" - PATH = "/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin" - RBENV_VERSION = "2.4.2" - TEST_SUITE = "py2" - TEST_PLATFORM = "centos-7" - PY_COLORS = 1 - } - stages { - stage('github-pending') { - steps { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", - status: 'PENDING', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } - stage('setup') { - steps { - sh 'bundle install --with ec2 windows --without opennebula docker' - } - } - stage('run kitchen') { - steps { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' - sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' +timeout(time: 6, unit: 'HOURS') { + node('kitchen-slave') { + timestamps { + ansiColor('xterm') { + withEnv([ + 'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml', + 'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml', + 'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin', + 'RBENV_VERSION=2.4.2', + 'TEST_SUITE=py2', + 'TEST_PLATFORM=centos-7', + 'PY_COLORS=1', + ]) { + stage('checkout-scm') { + cleanWs notFailBuild: true + checkout scm } - }} - } - post { - always { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + try { + stage('github-pending') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", + status: 'PENDING', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" } - }} - archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' - archiveArtifacts artifacts: 'artifacts/logs/minion' - archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + stage('setup-bundle') { + sh 'bundle install --with ec2 windows --without opennebula docker' + } + try { + stage('run kitchen') { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' + sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' + } + } + } + } finally { + stage('cleanup kitchen') { + script { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + } + } + } + archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' + archiveArtifacts artifacts: 'artifacts/logs/minion' + archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + } + } + } catch (Exception e) { + currentBuild.result = 'FAILURE' + } finally { + try { + junit 'artifacts/xml-unittests-output/*.xml' + } finally { + cleanWs notFailBuild: true + def currentResult = currentBuild.result ?: 'SUCCESS' + if (currentResult == 'SUCCESS') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", + status: 'SUCCESS', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } else { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", + status: 'FAILURE', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } + } + } } } } } - post { - always { - junit 'artifacts/xml-unittests-output/*.xml' - cleanWs() - } - success { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", - status: 'SUCCESS', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - failure { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", - status: 'FAILURE', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } } diff --git a/.ci/kitchen-centos7-py3 b/.ci/kitchen-centos7-py3 index 120bd3bee9..7c90e8acb0 100644 --- a/.ci/kitchen-centos7-py3 +++ b/.ci/kitchen-centos7-py3 @@ -1,73 +1,82 @@ -pipeline { - agent { label 'kitchen-slave' } - options { - timestamps() - ansiColor('xterm') - } - environment { - SALT_KITCHEN_PLATFORMS = "/var/jenkins/workspace/platforms.yml" - SALT_KITCHEN_DRIVER = "/var/jenkins/workspace/driver.yml" - PATH = "/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin" - RBENV_VERSION = "2.4.2" - TEST_SUITE = "py3" - TEST_PLATFORM = "centos-7" - PY_COLORS = 1 - } - stages { - stage('github-pending') { - steps { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", - status: 'PENDING', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } - stage('setup') { - steps { - sh 'bundle install --with ec2 windows --without opennebula docker' - } - } - stage('run kitchen') { - steps { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' - sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' +timeout(time: 6, unit: 'HOURS') { + node('kitchen-slave') { + timestamps { + ansiColor('xterm') { + withEnv([ + 'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml', + 'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml', + 'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin', + 'RBENV_VERSION=2.4.2', + 'TEST_SUITE=py3', + 'TEST_PLATFORM=centos-7', + 'PY_COLORS=1', + ]) { + stage('checkout-scm') { + cleanWs notFailBuild: true + checkout scm } - }} - } - post { - always { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + try { + stage('github-pending') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", + status: 'PENDING', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" } - }} - archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' - archiveArtifacts artifacts: 'artifacts/logs/minion' - archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + stage('setup-bundle') { + sh 'bundle install --with ec2 windows --without opennebula docker' + } + try { + stage('run kitchen') { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' + sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' + } + } + } + } finally { + stage('cleanup kitchen') { + script { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + } + } + } + archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' + archiveArtifacts artifacts: 'artifacts/logs/minion' + archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + } + } + } catch (Exception e) { + currentBuild.result = 'FAILURE' + } finally { + try { + junit 'artifacts/xml-unittests-output/*.xml' + } finally { + cleanWs notFailBuild: true + def currentResult = currentBuild.result ?: 'SUCCESS' + if (currentResult == 'SUCCESS') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", + status: 'SUCCESS', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } else { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", + status: 'FAILURE', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } + } + } } } } } - post { - always { - junit 'artifacts/xml-unittests-output/*.xml' - cleanWs() - } - success { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", - status: 'SUCCESS', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - failure { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", - status: 'FAILURE', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } } diff --git a/.ci/kitchen-ubuntu1604-py2 b/.ci/kitchen-ubuntu1604-py2 index f4c8277cec..e477cf4bfe 100644 --- a/.ci/kitchen-ubuntu1604-py2 +++ b/.ci/kitchen-ubuntu1604-py2 @@ -1,73 +1,82 @@ -pipeline { - agent { label 'kitchen-slave' } - options { - timestamps() - ansiColor('xterm') - } - environment { - SALT_KITCHEN_PLATFORMS = "/var/jenkins/workspace/platforms.yml" - SALT_KITCHEN_DRIVER = "/var/jenkins/workspace/driver.yml" - PATH = "/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin" - RBENV_VERSION = "2.4.2" - TEST_SUITE = "py2" - TEST_PLATFORM = "ubuntu-1604" - PY_COLORS = 1 - } - stages { - stage('github-pending') { - steps { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", - status: 'PENDING', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } - stage('setup') { - steps { - sh 'bundle install --with ec2 windows --without opennebula docker' - } - } - stage('run kitchen') { - steps { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' - sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' +timeout(time: 6, unit: 'HOURS') { + node('kitchen-slave') { + timestamps { + ansiColor('xterm') { + withEnv([ + 'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml', + 'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml', + 'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin', + 'RBENV_VERSION=2.4.2', + 'TEST_SUITE=py2', + 'TEST_PLATFORM=ubuntu-1604', + 'PY_COLORS=1', + ]) { + stage('checkout-scm') { + cleanWs notFailBuild: true + checkout scm } - }} - } - post { - always { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + try { + stage('github-pending') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", + status: 'PENDING', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" } - }} - archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' - archiveArtifacts artifacts: 'artifacts/logs/minion' - archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + stage('setup-bundle') { + sh 'bundle install --with ec2 windows --without opennebula docker' + } + try { + stage('run kitchen') { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' + sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' + } + } + } + } finally { + stage('cleanup kitchen') { + script { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + } + } + } + archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' + archiveArtifacts artifacts: 'artifacts/logs/minion' + archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + } + } + } catch (Exception e) { + currentBuild.result = 'FAILURE' + } finally { + try { + junit 'artifacts/xml-unittests-output/*.xml' + } finally { + cleanWs notFailBuild: true + def currentResult = currentBuild.result ?: 'SUCCESS' + if ( currentResult == 'SUCCESS') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", + status: 'SUCCESS', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } else { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", + status: 'FAILURE', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } + } + } } } } } - post { - always { - junit 'artifacts/xml-unittests-output/*.xml' - cleanWs() - } - success { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", - status: 'SUCCESS', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - failure { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", - status: 'FAILURE', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } } diff --git a/.ci/kitchen-ubuntu1604-py3 b/.ci/kitchen-ubuntu1604-py3 index c26f466efa..df713759c2 100644 --- a/.ci/kitchen-ubuntu1604-py3 +++ b/.ci/kitchen-ubuntu1604-py3 @@ -1,73 +1,82 @@ -pipeline { - agent { label 'kitchen-slave' } - options { - timestamps() - ansiColor('xterm') - } - environment { - SALT_KITCHEN_PLATFORMS = "/var/jenkins/workspace/platforms.yml" - SALT_KITCHEN_DRIVER = "/var/jenkins/workspace/driver.yml" - PATH = "/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin" - RBENV_VERSION = "2.4.2" - TEST_SUITE = "py3" - TEST_PLATFORM = "ubuntu-1604" - PY_COLORS = 1 - } - stages { - stage('github-pending') { - steps { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", - status: 'PENDING', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } - stage('setup') { - steps { - sh 'bundle install --with ec2 windows --without opennebula docker' - } - } - stage('run kitchen') { - steps { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' - sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' +timeout(time: 6, unit: 'HOURS') { + node('kitchen-slave') { + timestamps { + ansiColor('xterm') { + withEnv([ + 'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml', + 'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml', + 'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin', + 'RBENV_VERSION=2.4.2', + 'TEST_SUITE=py3', + 'TEST_PLATFORM=ubuntu-1604', + 'PY_COLORS=1', + ]) { + stage('checkout-scm') { + cleanWs notFailBuild: true + checkout scm } - }} - } - post { - always { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + try { + stage('github-pending') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", + status: 'PENDING', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" } - }} - archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' - archiveArtifacts artifacts: 'artifacts/logs/minion' - archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + stage('setup-bundle') { + sh 'bundle install --with ec2 windows --without opennebula docker' + } + try { + stage('run kitchen') { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' + sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' + } + } + } + } finally { + stage('cleanup kitchen') { + script { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + } + } + } + archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' + archiveArtifacts artifacts: 'artifacts/logs/minion' + archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + } + } + } catch (Exception e) { + currentBuild.result = 'FAILURE' + } finally { + try { + junit 'artifacts/xml-unittests-output/*.xml' + } finally { + cleanWs notFailBuild: true + def currentResult = currentBuild.result ?: 'SUCCESS' + if (currentResult == 'SUCCESS') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", + status: 'SUCCESS', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } else { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", + status: 'FAILURE', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } + } + } } } } } - post { - always { - junit 'artifacts/xml-unittests-output/*.xml' - cleanWs() - } - success { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", - status: 'SUCCESS', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - failure { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", - status: 'FAILURE', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } } diff --git a/.ci/kitchen-windows2016-py2 b/.ci/kitchen-windows2016-py2 index d0e7320975..b9e5ff45b2 100644 --- a/.ci/kitchen-windows2016-py2 +++ b/.ci/kitchen-windows2016-py2 @@ -1,73 +1,82 @@ -pipeline { - agent { label 'kitchen-slave' } - options { - timestamps() - ansiColor('xterm') - } - environment { - SALT_KITCHEN_PLATFORMS = "/var/jenkins/workspace/platforms.yml" - SALT_KITCHEN_DRIVER = "/var/jenkins/workspace/driver.yml" - PATH = "/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin" - RBENV_VERSION = "2.4.2" - TEST_SUITE = "py2" - TEST_PLATFORM = "windows-2016" - PY_COLORS = 1 - } - stages { - stage('github-pending') { - steps { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", - status: 'PENDING', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } - stage('setup') { - steps { - sh 'bundle install --with ec2 windows --without opennebula docker' - } - } - stage('run kitchen') { - steps { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' - sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' +timeout(time: 6, unit: 'HOURS') { + node('kitchen-slave') { + timestamps { + ansiColor('xterm') { + withEnv([ + 'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml', + 'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml', + 'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin', + 'RBENV_VERSION=2.4.2', + 'TEST_SUITE=py2', + 'TEST_PLATFORM=windows-2016', + 'PY_COLORS=1', + ]) { + stage('checkout-scm') { + cleanWs notFailBuild: true + checkout scm } - }} - } - post { - always { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + try { + stage('github-pending') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", + status: 'PENDING', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" } - }} - archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' - archiveArtifacts artifacts: 'artifacts/logs/minion' - archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + stage('setup-bundle') { + sh 'bundle install --with ec2 windows --without opennebula docker' + } + try { + stage('run kitchen') { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' + sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' + } + } + } + } finally { + stage('cleanup kitchen') { + script { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + } + } + } + archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' + archiveArtifacts artifacts: 'artifacts/logs/minion' + archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + } + } + } catch (Exception e) { + currentBuild.result = 'FAILURE' + } finally { + try { + junit 'artifacts/xml-unittests-output/*.xml' + } finally { + cleanWs notFailBuild: true + def currentResult = currentBuild.result ?: 'SUCCESS' + if (currentResult == 'SUCCESS') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", + status: 'SUCCESS', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } else { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", + status: 'FAILURE', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } + } + } } } } } - post { - always { - junit 'artifacts/xml-unittests-output/*.xml' - cleanWs() - } - success { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", - status: 'SUCCESS', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - failure { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", - status: 'FAILURE', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } } diff --git a/.ci/kitchen-windows2016-py3 b/.ci/kitchen-windows2016-py3 index 11c3661447..e25d8d0ac6 100644 --- a/.ci/kitchen-windows2016-py3 +++ b/.ci/kitchen-windows2016-py3 @@ -1,73 +1,82 @@ -pipeline { - agent { label 'kitchen-slave' } - options { - timestamps() - ansiColor('xterm') - } - environment { - SALT_KITCHEN_PLATFORMS = "/var/jenkins/workspace/platforms.yml" - SALT_KITCHEN_DRIVER = "/var/jenkins/workspace/driver.yml" - PATH = "/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin" - RBENV_VERSION = "2.4.2" - TEST_SUITE = "py3" - TEST_PLATFORM = "windows-2016" - PY_COLORS = 1 - } - stages { - stage('github-pending') { - steps { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", - status: 'PENDING', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } - stage('setup') { - steps { - sh 'bundle install --with ec2 windows --without opennebula docker' - } - } - stage('run kitchen') { - steps { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' - sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' +timeout(time: 6, unit: 'HOURS') { + node('kitchen-slave') { + timestamps { + ansiColor('xterm') { + withEnv([ + 'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml', + 'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml', + 'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin', + 'RBENV_VERSION=2.4.2', + 'TEST_SUITE=py3', + 'TEST_PLATFORM=windows-2016', + 'PY_COLORS=1', + ]) { + stage('checkout-scm') { + cleanWs notFailBuild: true + checkout scm } - }} - } - post { - always { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + try { + stage('github-pending') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", + status: 'PENDING', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" } - }} - archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' - archiveArtifacts artifacts: 'artifacts/logs/minion' - archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + stage('setup-bundle') { + sh 'bundle install --with ec2 windows --without opennebula docker' + } + try { + stage('run kitchen') { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' + sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' + } + } + } + } finally { + stage('cleanup kitchen') { + script { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + } + } + } + archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' + archiveArtifacts artifacts: 'artifacts/logs/minion' + archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + } + } + } catch (Exception e) { + currentBuild.result = 'FAILURE' + } finally { + try { + junit 'artifacts/xml-unittests-output/*.xml' + } finally { + cleanWs notFailBuild: true + def currentResult = currentBuild.result ?: 'SUCCESS' + if (currentResult == 'SUCCESS') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", + status: 'SUCCESS', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } else { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", + status: 'FAILURE', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } + } + } } } } } - post { - always { - junit 'artifacts/xml-unittests-output/*.xml' - cleanWs() - } - success { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", - status: 'SUCCESS', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - failure { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", - status: 'FAILURE', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } } diff --git a/.ci/lint b/.ci/lint index 885174b0bf..28cea02139 100644 --- a/.ci/lint +++ b/.ci/lint @@ -3,6 +3,7 @@ pipeline { options { timestamps() ansiColor('xterm') + timeout(time: 1, unit: 'HOURS') } environment { PYENV_ROOT = "/usr/local/pyenv" diff --git a/.github/stale.yml b/.github/stale.yml index 97d9fcbcdd..654de6560f 100644 --- a/.github/stale.yml +++ b/.github/stale.yml @@ -1,8 +1,8 @@ # Probot Stale configuration file # Number of days of inactivity before an issue becomes stale -# 550 is approximately 1 year and 6 months -daysUntilStale: 550 +# 540 is approximately 1 year and 6 months +daysUntilStale: 540 # Number of days of inactivity before a stale issue is closed daysUntilClose: 7 diff --git a/doc/man/salt.7 b/doc/man/salt.7 index 362566a6d3..8caabb18e9 100644 --- a/doc/man/salt.7 +++ b/doc/man/salt.7 @@ -5743,8 +5743,8 @@ Default: False .sp Turning on the master stats enables runtime throughput and statistics events to be fired from the master event bus. These events will report on what -functions have been run on the master and how long these runs have, on -average, taken over a given period of time. +functions have been run on the master along with their average latency and +duration, taken over a given period of time. .SS \fBmaster_stats_event_iter\fP .sp Default: 60 diff --git a/doc/ref/configuration/master.rst b/doc/ref/configuration/master.rst index 9b9a0e936b..9419ae4cc1 100644 --- a/doc/ref/configuration/master.rst +++ b/doc/ref/configuration/master.rst @@ -887,8 +887,8 @@ Default: False Turning on the master stats enables runtime throughput and statistics events to be fired from the master event bus. These events will report on what -functions have been run on the master and how long these runs have, on -average, taken over a given period of time. +functions have been run on the master along with their average latency and +duration, taken over a given period of time. .. conf_master:: master_stats_event_iter diff --git a/doc/ref/configuration/minion.rst b/doc/ref/configuration/minion.rst index 98bb9293be..ac94cd087b 100644 --- a/doc/ref/configuration/minion.rst +++ b/doc/ref/configuration/minion.rst @@ -752,6 +752,30 @@ Statically assigns grains to the minion. cabinet: 13 cab_u: 14-15 +.. conf_minion:: grains_blacklist + +``grains_blacklist`` +-------------------- + +Default: ``[]`` + +Each grains key will be compared against each of the expressions in this list. +Any keys which match will be filtered from the grains. Exact matches, glob +matches, and regular expressions are supported. + +.. note:: + Some states and execution modules depend on grains. Filtering may cause + them to be unavailable or run unreliably. + +.. versionadded:: Neon + +.. code-block:: yaml + + grains_blacklist: + - cpu_flags + - zmq* + - ipv[46] + .. conf_minion:: grains_cache ``grains_cache`` diff --git a/doc/ref/states/writing.rst b/doc/ref/states/writing.rst index 481bec1fba..dce5853eaf 100644 --- a/doc/ref/states/writing.rst +++ b/doc/ref/states/writing.rst @@ -259,10 +259,6 @@ A State Module must return a dict containing the following keys/values: Prefer to keep line lengths short (use multiple lines as needed), and end with punctuation (e.g. a period) to delimit multiple comments. -The return data can also, include the **pchanges** key, this stands for -`predictive changes`. The **pchanges** key informs the State system what -changes are predicted to occur. - .. note:: States should not return data which cannot be serialized such as frozensets. @@ -448,7 +444,6 @@ Example state module 'changes': {}, 'result': False, 'comment': '', - 'pchanges': {}, } # Start with basic error-checking. Do all the passed parameters make sense @@ -469,7 +464,7 @@ Example state module # in ``test=true`` mode. if __opts__['test'] == True: ret['comment'] = 'The state of "{0}" will be changed.'.format(name) - ret['pchanges'] = { + ret['changes'] = { 'old': current_state, 'new': 'Description, diff, whatever of the new state', } diff --git a/doc/topics/matchers/index.rst b/doc/topics/matchers/index.rst index 92ed1f1cf3..7983846b44 100644 --- a/doc/topics/matchers/index.rst +++ b/doc/topics/matchers/index.rst @@ -4,7 +4,7 @@ Matchers ======== -.. versionadded:: Flourine +.. versionadded:: Neon Matchers are modules that provide Salt's targeting abilities. As of the Flourine release, matchers can be dynamically loaded. Currently new matchers diff --git a/doc/topics/releases/neon.rst b/doc/topics/releases/neon.rst index 0c13acb59b..0dedddf44b 100644 --- a/doc/topics/releases/neon.rst +++ b/doc/topics/releases/neon.rst @@ -165,6 +165,31 @@ New output: 0 +State Changes +============= + +- The :py:func:`file.rename ` state will now return a + ``True`` result (and make no changes) when the destination file already + exists, and ``Force`` is not set to ``True``. In previous releases, a + ``False`` result would be returned, but this meant that subsequent runs of + the state would fail due to the destination file being present. + +- The ``onchanges`` and ``prereq`` :ref:`requisites ` now behave + properly in test mode. + +Module Changes +============== + +- The :py:func:`debian_ip ` module used by the + :py:func:`network.managed ` state has been + heavily refactored. The order that options appear in inet/inet6 blocks may + produce cosmetic changes. Many options without an 'ipvX' prefix will now be + shared between inet and inet6 blocks. The options ``enable_ipv4`` and + ``enabled_ipv6`` will now fully remove relevant inet/inet6 blocks. Overriding + options by prefixing them with 'ipvX' will now work with most options (i.e. + ``dns`` can be overriden by ``ipv4dns`` or ``ipv6dns``). The ``proto`` option + is now required. + Salt Cloud Features =================== @@ -212,3 +237,13 @@ Module Deprecations - Support for the ``ssh.recv_known_host`` function has been removed. Please use the :py:func:`ssh.recv_known_host_entries ` function instead. + +State Deprecations +------------------ + +- The :py:mod:`win_servermanager ` state has been + changed as follows: + + - Support for the ``force`` kwarg has been removed from the + :py:func:`win_servermanager.installed ` + function. Please use ``recurse`` instead. diff --git a/doc/topics/releases/version_numbers.rst b/doc/topics/releases/version_numbers.rst index 59c2de04e8..2dbd010a59 100644 --- a/doc/topics/releases/version_numbers.rst +++ b/doc/topics/releases/version_numbers.rst @@ -23,7 +23,7 @@ Code Names To distinguish future releases from the current release, code names are used. The periodic table is used to derive the next codename. The first release in the date based system was code named ``Hydrogen``, each subsequent release will -go to the next `atomic number `. +go to the next `atomic number `_. Assigned codenames: @@ -36,6 +36,8 @@ Assigned codenames: - Nitrogen: ``2017.7.0`` - Oxygen: ``2018.3.0`` - Fluorine: ``TBD`` +- Neon: ``TBD`` +- Sodium: ``TBD`` Example ------- diff --git a/salt/cli/support/__init__.py b/salt/cli/support/__init__.py new file mode 100644 index 0000000000..6a98a2d656 --- /dev/null +++ b/salt/cli/support/__init__.py @@ -0,0 +1,65 @@ +# coding=utf-8 +''' +Get default scenario of the support. +''' +from __future__ import print_function, unicode_literals, absolute_import +import yaml +import os +import salt.exceptions +import jinja2 +import logging + +log = logging.getLogger(__name__) + + +def _render_profile(path, caller, runner): + ''' + Render profile as Jinja2. + :param path: + :return: + ''' + env = jinja2.Environment(loader=jinja2.FileSystemLoader(os.path.dirname(path)), trim_blocks=False) + return env.get_template(os.path.basename(path)).render(salt=caller, runners=runner).strip() + + +def get_profile(profile, caller, runner): + ''' + Get profile. + + :param profile: + :return: + ''' + profiles = profile.split(',') + data = {} + for profile in profiles: + if os.path.basename(profile) == profile: + profile = profile.split('.')[0] # Trim extension if someone added it + profile_path = os.path.join(os.path.dirname(__file__), 'profiles', profile + '.yml') + else: + profile_path = profile + if os.path.exists(profile_path): + try: + rendered_template = _render_profile(profile_path, caller, runner) + log.trace('\n{d}\n{t}\n{d}\n'.format(d='-' * 80, t=rendered_template)) + data.update(yaml.load(rendered_template)) + except Exception as ex: + log.debug(ex, exc_info=True) + raise salt.exceptions.SaltException('Rendering profile failed: {}'.format(ex)) + else: + raise salt.exceptions.SaltException('Profile "{}" is not found.'.format(profile)) + + return data + + +def get_profiles(config): + ''' + Get available profiles. + + :return: + ''' + profiles = [] + for profile_name in os.listdir(os.path.join(os.path.dirname(__file__), 'profiles')): + if profile_name.endswith('.yml'): + profiles.append(profile_name.split('.')[0]) + + return sorted(profiles) diff --git a/salt/cli/support/collector.py b/salt/cli/support/collector.py new file mode 100644 index 0000000000..478d07e13b --- /dev/null +++ b/salt/cli/support/collector.py @@ -0,0 +1,495 @@ +# coding=utf-8 +from __future__ import absolute_import, print_function, unicode_literals +import os +import sys +import copy +import yaml +import json +import logging +import tarfile +import time +import salt.ext.six as six + +if six.PY2: + import exceptions +else: + import builtins as exceptions + from io import IOBase as file + +from io import BytesIO + +import salt.utils.stringutils +import salt.utils.parsers +import salt.utils.verify +import salt.utils.platform +import salt.utils.process +import salt.exceptions +import salt.defaults.exitcodes +import salt.cli.caller +import salt.cli.support +import salt.cli.support.console +import salt.cli.support.intfunc +import salt.cli.support.localrunner +import salt.output.table_out +import salt.runner +import salt.utils.files + + +salt.output.table_out.__opts__ = {} +log = logging.getLogger(__name__) + + +class SupportDataCollector(object): + ''' + Data collector. It behaves just like another outputter, + except it grabs the data to the archive files. + ''' + def __init__(self, name, output): + ''' + constructor of the data collector + :param name: + :param path: + :param format: + ''' + self.archive_path = name + self.__default_outputter = output + self.__format = format + self.__arch = None + self.__current_section = None + self.__current_section_name = None + self.__default_root = time.strftime('%Y.%m.%d-%H.%M.%S-snapshot') + self.out = salt.cli.support.console.MessagesOutput() + + def open(self): + ''' + Opens archive. + :return: + ''' + if self.__arch is not None: + raise salt.exceptions.SaltException('Archive already opened.') + self.__arch = tarfile.TarFile.bz2open(self.archive_path, 'w') + + def close(self): + ''' + Closes the archive. + :return: + ''' + if self.__arch is None: + raise salt.exceptions.SaltException('Archive already closed') + self._flush_content() + self.__arch.close() + self.__arch = None + + def _flush_content(self): + ''' + Flush content to the archive + :return: + ''' + if self.__current_section is not None: + buff = BytesIO() + buff._dirty = False + for action_return in self.__current_section: + for title, ret_data in action_return.items(): + if isinstance(ret_data, file): + self.out.put(ret_data.name, indent=4) + self.__arch.add(ret_data.name, arcname=ret_data.name) + else: + buff.write(salt.utils.stringutils.to_bytes(title + '\n')) + buff.write(salt.utils.stringutils.to_bytes(('-' * len(title)) + '\n\n')) + buff.write(salt.utils.stringutils.to_bytes(ret_data)) + buff.write(salt.utils.stringutils.to_bytes('\n\n\n')) + buff._dirty = True + if buff._dirty: + buff.seek(0) + tar_info = tarfile.TarInfo(name="{}/{}".format(self.__default_root, self.__current_section_name)) + if not hasattr(buff, 'getbuffer'): # Py2's BytesIO is older + buff.getbuffer = buff.getvalue + tar_info.size = len(buff.getbuffer()) + self.__arch.addfile(tarinfo=tar_info, fileobj=buff) + + def add(self, name): + ''' + Start a new section. + :param name: + :return: + ''' + if self.__current_section: + self._flush_content() + self.discard_current(name) + + def discard_current(self, name=None): + ''' + Discard current section + :return: + ''' + self.__current_section = [] + self.__current_section_name = name + + def write(self, title, data, output=None): + ''' + Add a data to the current opened section. + :return: + ''' + if not isinstance(data, (dict, list, tuple)): + data = {'raw-content': str(data)} + output = output or self.__default_outputter + + if output != 'null': + try: + if isinstance(data, dict) and 'return' in data: + data = data['return'] + content = salt.output.try_printout(data, output, {'extension_modules': '', 'color': False}) + except Exception: # Fall-back to just raw YAML + content = None + else: + content = None + + if content is None: + data = json.loads(json.dumps(data)) + if isinstance(data, dict) and data.get('return'): + data = data.get('return') + content = yaml.safe_dump(data, default_flow_style=False, indent=4) + + self.__current_section.append({title: content}) + + def link(self, title, path): + ''' + Add a static file on the file system. + + :param title: + :param path: + :return: + ''' + # The filehandler needs to be explicitly passed here, so PyLint needs to accept that. + # pylint: disable=W8470 + if not isinstance(path, file): + path = salt.utils.files.fopen(path) + self.__current_section.append({title: path}) + # pylint: enable=W8470 + + +class SaltSupport(salt.utils.parsers.SaltSupportOptionParser): + ''' + Class to run Salt Support subsystem. + ''' + RUNNER_TYPE = 'run' + CALL_TYPE = 'call' + + def _setup_fun_config(self, fun_conf): + ''' + Setup function configuration. + + :param conf: + :return: + ''' + conf = copy.deepcopy(self.config) + conf['file_client'] = 'local' + conf['fun'] = '' + conf['arg'] = [] + conf['kwarg'] = {} + conf['cache_jobs'] = False + conf['print_metadata'] = False + conf.update(fun_conf) + conf['fun'] = conf['fun'].split(':')[-1] # Discard typing prefix + + return conf + + def _get_runner(self, conf): + ''' + Get & setup runner. + + :param conf: + :return: + ''' + conf = self._setup_fun_config(copy.deepcopy(conf)) + if not getattr(self, '_runner', None): + self._runner = salt.cli.support.localrunner.LocalRunner(conf) + else: + self._runner.opts = conf + return self._runner + + def _get_caller(self, conf): + ''' + Get & setup caller from the factory. + + :param conf: + :return: + ''' + conf = self._setup_fun_config(copy.deepcopy(conf)) + if not getattr(self, '_caller', None): + self._caller = salt.cli.caller.Caller.factory(conf) + else: + self._caller.opts = conf + return self._caller + + def _local_call(self, call_conf): + ''' + Execute local call + ''' + try: + ret = self._get_caller(call_conf).call() + except SystemExit: + ret = 'Data is not available at this moment' + self.out.error(ret) + except Exception as ex: + ret = 'Unhandled exception occurred: {}'.format(ex) + log.debug(ex, exc_info=True) + self.out.error(ret) + + return ret + + def _local_run(self, run_conf): + ''' + Execute local runner + + :param run_conf: + :return: + ''' + try: + ret = self._get_runner(run_conf).run() + except SystemExit: + ret = 'Runner is not available at this moment' + self.out.error(ret) + except Exception as ex: + ret = 'Unhandled exception occurred: {}'.format(ex) + log.debug(ex, exc_info=True) + + return ret + + def _internal_function_call(self, call_conf): + ''' + Call internal function. + + :param call_conf: + :return: + ''' + def stub(*args, **kwargs): + message = 'Function {} is not available'.format(call_conf['fun']) + self.out.error(message) + log.debug('Attempt to run "{fun}" with {arg} arguments and {kwargs} parameters.'.format(**call_conf)) + return message + + return getattr(salt.cli.support.intfunc, + call_conf['fun'], stub)(self.collector, + *call_conf['arg'], + **call_conf['kwargs']) + + def _get_action(self, action_meta): + ''' + Parse action and turn into a calling point. + :param action_meta: + :return: + ''' + conf = { + 'fun': list(action_meta.keys())[0], + 'arg': [], + 'kwargs': {}, + } + if not len(conf['fun'].split('.')) - 1: + conf['salt.int.intfunc'] = True + + action_meta = action_meta[conf['fun']] + info = action_meta.get('info', 'Action for {}'.format(conf['fun'])) + for arg in action_meta.get('args') or []: + if not isinstance(arg, dict): + conf['arg'].append(arg) + else: + conf['kwargs'].update(arg) + + return info, action_meta.get('output'), conf + + def collect_internal_data(self): + ''' + Dumps current running pillars, configuration etc. + :return: + ''' + section = 'configuration' + self.out.put(section) + self.collector.add(section) + self.out.put('Saving config', indent=2) + self.collector.write('General Configuration', self.config) + self.out.put('Saving pillars', indent=2) + self.collector.write('Active Pillars', self._local_call({'fun': 'pillar.items'})) + + section = 'highstate' + self.out.put(section) + self.collector.add(section) + self.out.put('Saving highstate', indent=2) + self.collector.write('Rendered highstate', self._local_call({'fun': 'state.show_highstate'})) + + def _extract_return(self, data): + ''' + Extracts return data from the results. + + :param data: + :return: + ''' + if isinstance(data, dict): + data = data.get('return', data) + + return data + + def collect_local_data(self): + ''' + Collects master system data. + :return: + ''' + def call(func, *args, **kwargs): + ''' + Call wrapper for templates + :param func: + :return: + ''' + return self._extract_return(self._local_call({'fun': func, 'arg': args, 'kwarg': kwargs})) + + def run(func, *args, **kwargs): + ''' + Runner wrapper for templates + :param func: + :return: + ''' + return self._extract_return(self._local_run({'fun': func, 'arg': args, 'kwarg': kwargs})) + + scenario = salt.cli.support.get_profile(self.config['support_profile'], call, run) + for category_name in scenario: + self.out.put(category_name) + self.collector.add(category_name) + for action in scenario[category_name]: + if not action: + continue + action_name = next(iter(action)) + if not isinstance(action[action_name], six.string_types): + info, output, conf = self._get_action(action) + action_type = self._get_action_type(action) # run: for runners + if action_type == self.RUNNER_TYPE: + self.out.put('Running {}'.format(info.lower()), indent=2) + self.collector.write(info, self._local_run(conf), output=output) + elif action_type == self.CALL_TYPE: + if not conf.get('salt.int.intfunc'): + self.out.put('Collecting {}'.format(info.lower()), indent=2) + self.collector.write(info, self._local_call(conf), output=output) + else: + self.collector.discard_current() + self._internal_function_call(conf) + else: + self.out.error('Unknown action type "{}" for action: {}'.format(action_type, action)) + else: + # TODO: This needs to be moved then to the utils. + # But the code is not yet there (other PRs) + self.out.msg('\n'.join(salt.cli.support.console.wrap(action[action_name])), ident=2) + + def _get_action_type(self, action): + ''' + Get action type. + :param action: + :return: + ''' + action_name = next(iter(action or {'': None})) + if ':' not in action_name: + action_name = '{}:{}'.format(self.CALL_TYPE, action_name) + + return action_name.split(':')[0] or None + + def collect_targets_data(self): + ''' + Collects minion targets data + :return: + ''' + # TODO: remote collector? + + def _cleanup(self): + ''' + Cleanup if crash/exception + :return: + ''' + if (hasattr(self, 'config') + and self.config.get('support_archive') + and os.path.exists(self.config['support_archive'])): + self.out.warning('Terminated earlier, cleaning up') + os.unlink(self.config['support_archive']) + + def _check_existing_archive(self): + ''' + Check if archive exists or not. If exists and --force was not specified, + bail out. Otherwise remove it and move on. + + :return: + ''' + if os.path.exists(self.config['support_archive']): + if self.config['support_archive_force_overwrite']: + self.out.warning('Overwriting existing archive: {}'.format(self.config['support_archive'])) + os.unlink(self.config['support_archive']) + ret = True + else: + self.out.warning('File {} already exists.'.format(self.config['support_archive'])) + ret = False + else: + ret = True + + return ret + + def run(self): + exit_code = salt.defaults.exitcodes.EX_OK + self.out = salt.cli.support.console.MessagesOutput() + try: + self.parse_args() + except (Exception, SystemExit) as ex: + if not isinstance(ex, exceptions.SystemExit): + exit_code = salt.defaults.exitcodes.EX_GENERIC + self.out.error(ex) + elif isinstance(ex, exceptions.SystemExit): + exit_code = ex.code + else: + exit_code = salt.defaults.exitcodes.EX_GENERIC + self.out.error(ex) + else: + if self.config['log_level'] not in ('quiet', ): + self.setup_logfile_logger() + salt.utils.verify.verify_log(self.config) + salt.cli.support.log = log # Pass update logger so trace is available + + if self.config['support_profile_list']: + self.out.put('List of available profiles:') + for idx, profile in enumerate(salt.cli.support.get_profiles(self.config)): + msg_template = ' {}. '.format(idx + 1) + '{}' + self.out.highlight(msg_template, profile) + exit_code = salt.defaults.exitcodes.EX_OK + elif self.config['support_show_units']: + self.out.put('List of available units:') + for idx, unit in enumerate(self.find_existing_configs(None)): + msg_template = ' {}. '.format(idx + 1) + '{}' + self.out.highlight(msg_template, unit) + exit_code = salt.defaults.exitcodes.EX_OK + else: + if not self.config['support_profile']: + self.print_help() + raise SystemExit() + + if self._check_existing_archive(): + try: + self.collector = SupportDataCollector(self.config['support_archive'], + output=self.config['support_output_format']) + except Exception as ex: + self.out.error(ex) + exit_code = salt.defaults.exitcodes.EX_GENERIC + log.debug(ex, exc_info=True) + else: + try: + self.collector.open() + self.collect_local_data() + self.collect_internal_data() + self.collect_targets_data() + self.collector.close() + + archive_path = self.collector.archive_path + self.out.highlight('\nSupport data has been written to "{}" file.\n', + archive_path, _main='YELLOW') + except Exception as ex: + self.out.error(ex) + log.debug(ex, exc_info=True) + exit_code = salt.defaults.exitcodes.EX_SOFTWARE + + if exit_code: + self._cleanup() + + sys.exit(exit_code) diff --git a/salt/cli/support/console.py b/salt/cli/support/console.py new file mode 100644 index 0000000000..fb6992d657 --- /dev/null +++ b/salt/cli/support/console.py @@ -0,0 +1,165 @@ +# coding=utf-8 +''' +Collection of tools to report messages to console. + +NOTE: This is subject to incorporate other formatting bits + from all around everywhere and then to be moved to utils. +''' + +from __future__ import absolute_import, print_function, unicode_literals + +import sys +import os +import salt.utils.color +import textwrap + + +class IndentOutput(object): + ''' + Paint different indends in different output. + ''' + def __init__(self, conf=None, device=sys.stdout): + if conf is None: + conf = {0: 'CYAN', 2: 'GREEN', 4: 'LIGHT_BLUE', 6: 'BLUE'} + self._colors_conf = conf + self._device = device + self._colors = salt.utils.color.get_colors() + self._default_color = 'GREEN' + self._default_hl_color = 'LIGHT_GREEN' + + def put(self, message, indent=0): + ''' + Print message with an indent. + + :param message: + :param indent: + :return: + ''' + color = self._colors_conf.get(indent + indent % 2, self._colors_conf.get(0, self._default_color)) + + for chunk in [' ' * indent, self._colors[color], message, self._colors['ENDC']]: + self._device.write(str(chunk)) + self._device.write(os.linesep) + self._device.flush() + + +class MessagesOutput(IndentOutput): + ''' + Messages output to the CLI. + ''' + def msg(self, message, title=None, title_color=None, color='BLUE', ident=0): + ''' + Hint message. + + :param message: + :param title: + :param title_color: + :param color: + :param ident: + :return: + ''' + if title and not title_color: + title_color = color + if title_color and not title: + title_color = None + + self.__colored_output(title, message, title_color, color, ident=ident) + + def info(self, message, ident=0): + ''' + Write an info message to the CLI. + + :param message: + :param ident: + :return: + ''' + self.__colored_output('Info', message, 'GREEN', 'LIGHT_GREEN', ident=ident) + + def warning(self, message, ident=0): + ''' + Write a warning message to the CLI. + + :param message: + :param ident: + :return: + ''' + self.__colored_output('Warning', message, 'YELLOW', 'LIGHT_YELLOW', ident=ident) + + def error(self, message, ident=0): + ''' + Write an error message to the CLI. + + :param message: + :param ident + :return: + ''' + self.__colored_output('Error', message, 'RED', 'LIGHT_RED', ident=ident) + + def __colored_output(self, title, message, title_color, message_color, ident=0): + if title and not title.endswith(':'): + _linesep = title.endswith(os.linesep) + title = '{}:{}'.format(title.strip(), _linesep and os.linesep or ' ') + + for chunk in [title_color and self._colors[title_color] or None, ' ' * ident, + title, self._colors[message_color], message, self._colors['ENDC']]: + if chunk: + self._device.write(str(chunk)) + self._device.write(os.linesep) + self._device.flush() + + def highlight(self, message, *values, **colors): + ''' + Highlighter works the way that message parameter is a template, + the "values" is a list of arguments going one after another as values there. + And so the "colors" should designate either highlight color or alternate for each. + + Example: + + highlight('Hello {}, there! It is {}.', 'user', 'daytime', _main='GREEN', _highlight='RED') + highlight('Hello {}, there! It is {}.', 'user', 'daytime', _main='GREEN', _highlight='RED', 'daytime'='YELLOW') + + First example will highlight all the values in the template with the red color. + Second example will highlight the second value with the yellow color. + + Usage: + + colors: + _main: Sets the main color (or default is used) + _highlight: Sets the alternative color for everything + 'any phrase' that is the same in the "values" can override color. + + :param message: + :param formatted: + :param colors: + :return: + ''' + + m_color = colors.get('_main', self._default_color) + h_color = colors.get('_highlight', self._default_hl_color) + + _values = [] + for value in values: + _values.append('{p}{c}{r}'.format(p=self._colors[colors.get(value, h_color)], + c=value, r=self._colors[m_color])) + self._device.write('{s}{m}{e}'.format(s=self._colors[m_color], + m=message.format(*_values), e=self._colors['ENDC'])) + self._device.write(os.linesep) + self._device.flush() + + +def wrap(txt, width=80, ident=0): + ''' + Wrap text to the required dimensions and clean it up, prepare for display. + + :param txt: + :param width: + :return: + ''' + ident = ' ' * ident + txt = (txt or '').replace(os.linesep, ' ').strip() + + wrapper = textwrap.TextWrapper() + wrapper.fix_sentence_endings = False + wrapper.initial_indent = wrapper.subsequent_indent = ident + + return wrapper.wrap(txt) diff --git a/salt/cli/support/intfunc.py b/salt/cli/support/intfunc.py new file mode 100644 index 0000000000..2727cd6394 --- /dev/null +++ b/salt/cli/support/intfunc.py @@ -0,0 +1,42 @@ +# coding=utf-8 +''' +Internal functions. +''' +# Maybe this needs to be a modules in a future? + +from __future__ import absolute_import, print_function, unicode_literals +import os +from salt.cli.support.console import MessagesOutput +import salt.utils.files + + +out = MessagesOutput() + + +def filetree(collector, path): + ''' + Add all files in the tree. If the "path" is a file, + only that file will be added. + + :param path: File or directory + :return: + ''' + if not path: + out.error('Path not defined', ident=2) + else: + # The filehandler needs to be explicitly passed here, so PyLint needs to accept that. + # pylint: disable=W8470 + if os.path.isfile(path): + filename = os.path.basename(path) + try: + file_ref = salt.utils.files.fopen(path) # pylint: disable=W + out.put('Add {}'.format(filename), indent=2) + collector.add(filename) + collector.link(title=path, path=file_ref) + except Exception as err: + out.error(err, ident=4) + # pylint: enable=W8470 + else: + for fname in os.listdir(path): + fname = os.path.join(path, fname) + filetree(collector, fname) diff --git a/salt/cli/support/localrunner.py b/salt/cli/support/localrunner.py new file mode 100644 index 0000000000..26deb883bc --- /dev/null +++ b/salt/cli/support/localrunner.py @@ -0,0 +1,34 @@ +# coding=utf-8 +''' +Local Runner +''' + +from __future__ import print_function, absolute_import, unicode_literals +import salt.runner +import salt.utils.platform +import salt.utils.process +import logging + +log = logging.getLogger(__name__) + + +class LocalRunner(salt.runner.Runner): + ''' + Runner class that changes its default behaviour. + ''' + + def _proc_function(self, fun, low, user, tag, jid, daemonize=True): + ''' + Same as original _proc_function in AsyncClientMixin, + except it calls "low" without firing a print event. + ''' + if daemonize and not salt.utils.platform.is_windows(): + salt.log.setup.shutdown_multiprocessing_logging() + salt.utils.process.daemonize() + salt.log.setup.setup_multiprocessing_logging() + + low['__jid__'] = jid + low['__user__'] = user + low['__tag__'] = tag + + return self.low(fun, low, print_event=False, full_return=False) diff --git a/salt/cli/support/profiles/default.yml b/salt/cli/support/profiles/default.yml new file mode 100644 index 0000000000..01d9a26193 --- /dev/null +++ b/salt/cli/support/profiles/default.yml @@ -0,0 +1,71 @@ +sysinfo: + - description: | + Get the Salt grains of the current system. + - grains.items: + info: System grains + +packages: + - description: | + Fetch list of all the installed packages. + - pkg.list_pkgs: + info: Installed packages + +repositories: + - pkg.list_repos: + info: Available repositories + +upgrades: + - pkg.list_upgrades: + info: Possible upgrades + +## TODO: Some data here belongs elsewhere and also is duplicated +status: + - status.version: + info: Status version + - status.cpuinfo: + info: CPU information + - status.cpustats: + info: CPU stats + - status.diskstats: + info: Disk stats + - status.loadavg: + info: Average load of the current system + - status.uptime: + info: Uptime of the machine + - status.meminfo: + info: Information about memory + - status.vmstats: + info: Virtual memory stats + - status.netdev: + info: Network device stats + - status.nproc: + info: Number of processing units available on this system + - status.procs: + info: Process data + +general-health: + - ps.boot_time: + info: System Boot Time + - ps.swap_memory: + info: Swap Memory + output: txt + - ps.cpu_times: + info: CPU times + - ps.disk_io_counters: + info: Disk IO counters + - ps.disk_partition_usage: + info: Disk partition usage + output: table + - ps.disk_partitions: + info: Disk partitions + output: table + - ps.top: + info: Top CPU consuming processes + +system.log: + # This works on any file system object. + - filetree: + info: Add system log + args: + - /var/log/syslog + diff --git a/salt/cli/support/profiles/jobs-active.yml b/salt/cli/support/profiles/jobs-active.yml new file mode 100644 index 0000000000..508c54ece7 --- /dev/null +++ b/salt/cli/support/profiles/jobs-active.yml @@ -0,0 +1,3 @@ +jobs-active: + - run:jobs.active: + info: List of all actively running jobs diff --git a/salt/cli/support/profiles/jobs-last.yml b/salt/cli/support/profiles/jobs-last.yml new file mode 100644 index 0000000000..e3b719f552 --- /dev/null +++ b/salt/cli/support/profiles/jobs-last.yml @@ -0,0 +1,3 @@ +jobs-last: + - run:jobs.last_run: + info: List all detectable jobs and associated functions diff --git a/salt/cli/support/profiles/jobs-trace.yml b/salt/cli/support/profiles/jobs-trace.yml new file mode 100644 index 0000000000..00b28e0502 --- /dev/null +++ b/salt/cli/support/profiles/jobs-trace.yml @@ -0,0 +1,7 @@ +jobs-details: + {% for job in runners('jobs.list_jobs') %} + - run:jobs.list_job: + info: Details on JID {{job}} + args: + - {{job}} + {% endfor %} diff --git a/salt/cli/support/profiles/network.yml b/salt/cli/support/profiles/network.yml new file mode 100644 index 0000000000..268f02e61f --- /dev/null +++ b/salt/cli/support/profiles/network.yml @@ -0,0 +1,27 @@ +network: + - network.get_hostname: + info: Hostname + output: txt + - network.get_fqdn: + info: FQDN + output: txt + - network.default_route: + info: Default route + output: table + - network.interfaces: + info: All the available interfaces + output: table + - network.subnets: + info: List of IPv4 subnets + - network.subnets6: + info: List of IPv6 subnets + - network.routes: + info: Network configured routes from routing tables + output: table + - network.netstat: + info: Information on open ports and states + output: table + - network.active_tcp: + info: All running TCP connections + - network.arp: + info: ARP table diff --git a/salt/cli/support/profiles/postgres.yml b/salt/cli/support/profiles/postgres.yml new file mode 100644 index 0000000000..2238752c7a --- /dev/null +++ b/salt/cli/support/profiles/postgres.yml @@ -0,0 +1,11 @@ +system.log: + - filetree: + info: Add system log + args: + - /var/log/syslog + +etc/postgres: + - filetree: + info: Pick entire /etc/postgresql + args: + - /etc/postgresql diff --git a/salt/cli/support/profiles/salt.yml b/salt/cli/support/profiles/salt.yml new file mode 100644 index 0000000000..4b18d98870 --- /dev/null +++ b/salt/cli/support/profiles/salt.yml @@ -0,0 +1,9 @@ +sysinfo: + - grains.items: + info: System grains + +logfile: + - filetree: + info: Add current logfile + args: + - {{salt('config.get', 'log_file')}} diff --git a/salt/cli/support/profiles/users.yml b/salt/cli/support/profiles/users.yml new file mode 100644 index 0000000000..391acdb606 --- /dev/null +++ b/salt/cli/support/profiles/users.yml @@ -0,0 +1,22 @@ +all-users: + {%for uname in salt('user.list_users') %} + - user.info: + info: Information about "{{uname}}" + args: + - {{uname}} + - user.list_groups: + info: List groups for user "{{uname}}" + args: + - {{uname}} + - shadow.info: + info: Shadow information about user "{{uname}}" + args: + - {{uname}} + - cron.raw_cron: + info: Cron for user "{{uname}}" + args: + - {{uname}} + {%endfor%} + - group.getent: + info: List of all available groups + output: table diff --git a/salt/client/ssh/wrapper/state.py b/salt/client/ssh/wrapper/state.py index b4272e4b55..3f1cead4b1 100644 --- a/salt/client/ssh/wrapper/state.py +++ b/salt/client/ssh/wrapper/state.py @@ -167,6 +167,16 @@ def _cleanup_slsmod_high_data(high_data): stateconf_data['slsmod'] = None +def _parse_mods(mods): + ''' + Parse modules. + ''' + if isinstance(mods, six.string_types): + mods = [item.strip() for item in mods.split(',') if item.strip()] + + return mods + + def sls(mods, saltenv='base', test=None, exclude=None, **kwargs): ''' Create the seed file for a state.sls run @@ -181,8 +191,7 @@ def sls(mods, saltenv='base', test=None, exclude=None, **kwargs): __salt__, __context__['fileclient']) st_.push_active() - if isinstance(mods, six.string_types): - mods = mods.split(',') + mods = _parse_mods(mods) high_data, errors = st_.render_highstate({saltenv: mods}) if exclude: if isinstance(exclude, six.string_types): @@ -922,8 +931,7 @@ def sls_id(id_, mods, test=None, queue=False, **kwargs): err += __pillar__['_errors'] return err - if isinstance(mods, six.string_types): - split_mods = mods.split(',') + split_mods = _parse_mods(mods) st_.push_active() high_, errors = st_.render_highstate({opts['saltenv']: split_mods}) errors += st_.state.verify_high(high_) @@ -980,8 +988,7 @@ def show_sls(mods, saltenv='base', test=None, **kwargs): __salt__, __context__['fileclient']) st_.push_active() - if isinstance(mods, six.string_types): - mods = mods.split(',') + mods = _parse_mods(mods) high_data, errors = st_.render_highstate({saltenv: mods}) high_data, ext_errors = st_.state.reconcile_extend(high_data) errors += ext_errors @@ -1025,8 +1032,7 @@ def show_low_sls(mods, saltenv='base', test=None, **kwargs): __salt__, __context__['fileclient']) st_.push_active() - if isinstance(mods, six.string_types): - mods = mods.split(',') + mods = _parse_mods(mods) high_data, errors = st_.render_highstate({saltenv: mods}) high_data, ext_errors = st_.state.reconcile_extend(high_data) errors += ext_errors diff --git a/salt/cloud/clouds/aliyun.py b/salt/cloud/clouds/aliyun.py index 9d81e12dcc..9e587d9486 100644 --- a/salt/cloud/clouds/aliyun.py +++ b/salt/cloud/clouds/aliyun.py @@ -50,6 +50,7 @@ from salt.exceptions import ( SaltCloudExecutionFailure, SaltCloudExecutionTimeout ) +from salt.utils.stringutils import to_bytes # Import 3rd-party libs from salt.ext import six @@ -770,7 +771,7 @@ def _compute_signature(parameters, access_key_secret): # All aliyun API only support GET method stringToSign = 'GET&%2F&' + percent_encode(canonicalizedQueryString[1:]) - h = hmac.new(access_key_secret + "&", stringToSign, sha1) + h = hmac.new(to_bytes(access_key_secret + "&"), stringToSign, sha1) signature = base64.encodestring(h.digest()).strip() return signature diff --git a/salt/config/__init__.py b/salt/config/__init__.py index b28cea5713..e8ede34800 100644 --- a/salt/config/__init__.py +++ b/salt/config/__init__.py @@ -918,6 +918,9 @@ VALID_OPTS = { # Set a hard limit for the amount of memory modules can consume on a minion. 'modules_max_memory': int, + # Blacklist specific core grains to be filtered + 'grains_blacklist': list, + # The number of minutes between the minion refreshing its cache of grains 'grains_refresh_every': int, @@ -1248,6 +1251,7 @@ DEFAULT_MINION_OPTS = { 'cachedir': os.path.join(salt.syspaths.CACHE_DIR, 'minion'), 'append_minionid_config_dirs': [], 'cache_jobs': False, + 'grains_blacklist': [], 'grains_cache': False, 'grains_cache_expiration': 300, 'grains_deep_merge': False, diff --git a/salt/grains/chronos.py b/salt/grains/chronos.py index df8eca32e0..3b5add6895 100644 --- a/salt/grains/chronos.py +++ b/salt/grains/chronos.py @@ -5,9 +5,11 @@ Generate chronos proxy minion grains. .. versionadded:: 2015.8.2 ''' +# Import Python libs from __future__ import absolute_import, print_function, unicode_literals +# Import Salt libs import salt.utils.http import salt.utils.platform __proxyenabled__ = ['chronos'] diff --git a/salt/grains/core.py b/salt/grains/core.py index 051678e7a3..95b84fef89 100644 --- a/salt/grains/core.py +++ b/salt/grains/core.py @@ -49,6 +49,7 @@ except ImportError: # Import salt libs import salt.exceptions import salt.log +import salt.utils.args import salt.utils.dns import salt.utils.files import salt.utils.network @@ -2775,3 +2776,24 @@ def default_gateway(): except Exception: continue return grains + + +def kernelparams(): + ''' + Return the kernel boot parameters + ''' + try: + with salt.utils.files.fopen('/proc/cmdline', 'r') as fhr: + cmdline = fhr.read() + grains = {'kernelparams': []} + for data in [item.split('=') for item in salt.utils.args.shlex_split(cmdline)]: + value = None + if len(data) == 2: + value = data[1].strip('"') + + grains['kernelparams'] += [(data[0], value)] + except IOError as exc: + grains = {} + log.debug('Failed to read /proc/cmdline: %s', exc) + + return grains diff --git a/salt/loader.py b/salt/loader.py index 7a09314091..2b028e0fcb 100644 --- a/salt/loader.py +++ b/salt/loader.py @@ -34,6 +34,7 @@ import salt.utils.lazy import salt.utils.odict import salt.utils.platform import salt.utils.versions +import salt.utils.stringutils from salt.exceptions import LoaderError from salt.template import check_render_pipe_str from salt.utils.decorators import Depends @@ -747,6 +748,7 @@ def grains(opts, force_refresh=False, proxy=None): opts['grains'] = {} grains_data = {} + blist = opts.get('grains_blacklist', []) funcs = grain_funcs(opts, proxy=proxy) if force_refresh: # if we refresh, lets reload grain modules funcs.clear() @@ -758,6 +760,14 @@ def grains(opts, force_refresh=False, proxy=None): ret = funcs[key]() if not isinstance(ret, dict): continue + if blist: + for key in list(ret): + for block in blist: + if salt.utils.stringutils.expr_match(key, block): + del ret[key] + log.trace('Filtering %s grain', key) + if not ret: + continue if grains_deep_merge: salt.utils.dictupdate.update(grains_data, ret) else: @@ -793,6 +803,14 @@ def grains(opts, force_refresh=False, proxy=None): continue if not isinstance(ret, dict): continue + if blist: + for key in list(ret): + for block in blist: + if salt.utils.stringutils.expr_match(key, block): + del ret[key] + log.trace('Filtering %s grain', key) + if not ret: + continue if grains_deep_merge: salt.utils.dictupdate.update(grains_data, ret) else: diff --git a/salt/log/handlers/sentry_mod.py b/salt/log/handlers/sentry_mod.py index 43ac5b8262..c126eb20de 100644 --- a/salt/log/handlers/sentry_mod.py +++ b/salt/log/handlers/sentry_mod.py @@ -113,13 +113,16 @@ __virtualname__ = 'sentry' def __virtual__(): if HAS_RAVEN is True: - __grains__ = salt.loader.grains(__opts__) - __salt__ = salt.loader.minion_mods(__opts__) return __virtualname__ return False def setup_handlers(): + ''' + sets up the sentry handler + ''' + __grains__ = salt.loader.grains(__opts__) + __salt__ = salt.loader.minion_mods(__opts__) if 'sentry_handler' not in __opts__: log.debug('No \'sentry_handler\' key was found in the configuration') return False @@ -133,7 +136,9 @@ def setup_handlers(): transport_registry = TransportRegistry(default_transports) url = urlparse(dsn) if not transport_registry.supported_scheme(url.scheme): - raise ValueError('Unsupported Sentry DSN scheme: {0}'.format(url.scheme)) + raise ValueError( + 'Unsupported Sentry DSN scheme: %s', url.scheme + ) except ValueError as exc: log.info( 'Raven failed to parse the configuration provided DSN: %s', exc @@ -202,7 +207,11 @@ def setup_handlers(): context_dict = {} if context is not None: for tag in context: - tag_value = __salt__['grains.get'](tag) + try: + tag_value = __grains__[tag] + except KeyError: + log.debug('Sentry tag \'%s\' not found in grains.', tag) + continue if len(tag_value) > 0: context_dict[tag] = tag_value if len(context_dict) > 0: @@ -229,4 +238,7 @@ def setup_handlers(): def get_config_value(name, default=None): + ''' + returns a configuration option for the sentry_handler + ''' return __opts__['sentry_handler'].get(name, default) diff --git a/salt/master.py b/salt/master.py index a68ce37502..e1c6774b9b 100644 --- a/salt/master.py +++ b/salt/master.py @@ -977,7 +977,7 @@ class MWorker(salt.utils.process.SignalHandlingMultiprocessingProcess): self.mkey = mkey self.key = key self.k_mtime = 0 - self.stats = collections.defaultdict(lambda: {'mean': 0, 'runs': 0}) + self.stats = collections.defaultdict(lambda: {'mean': 0, 'latency': 0, 'runs': 0}) self.stat_clock = time.time() # We need __setstate__ and __getstate__ to also pickle 'SMaster.secrets'. @@ -1059,18 +1059,16 @@ class MWorker(salt.utils.process.SignalHandlingMultiprocessingProcess): 'clear': self._handle_clear}[key](load) raise tornado.gen.Return(ret) - def _post_stats(self, start, cmd): + def _post_stats(self, stats): ''' - Calculate the master stats and fire events with stat info + Fire events with stat info if it's time ''' - end = time.time() - duration = end - start - self.stats[cmd]['mean'] = (self.stats[cmd]['mean'] * (self.stats[cmd]['runs'] - 1) + duration) / self.stats[cmd]['runs'] - if end - self.stat_clock > self.opts['master_stats_event_iter']: + end_time = time.time() + if end_time - self.stat_clock > self.opts['master_stats_event_iter']: # Fire the event with the stats and wipe the tracker - self.aes_funcs.event.fire_event({'time': end - self.stat_clock, 'worker': self.name, 'stats': self.stats}, tagify(self.name, 'stats')) - self.stats = collections.defaultdict(lambda: {'mean': 0, 'runs': 0}) - self.stat_clock = end + self.aes_funcs.event.fire_event({'time': end_time - self.stat_clock, 'worker': self.name, 'stats': stats}, tagify(self.name, 'stats')) + self.stats = collections.defaultdict(lambda: {'mean': 0, 'latency': 0, 'runs': 0}) + self.stat_clock = end_time def _handle_clear(self, load): ''' @@ -1086,10 +1084,10 @@ class MWorker(salt.utils.process.SignalHandlingMultiprocessingProcess): return False if self.opts['master_stats']: start = time.time() - self.stats[cmd]['runs'] += 1 ret = getattr(self.clear_funcs, cmd)(load), {'fun': 'send_clear'} if self.opts['master_stats']: - self._post_stats(start, cmd) + stats = salt.utils.event.update_stats(self.stats, start, load) + self._post_stats(stats) return ret def _handle_aes(self, data): @@ -1109,7 +1107,6 @@ class MWorker(salt.utils.process.SignalHandlingMultiprocessingProcess): return False if self.opts['master_stats']: start = time.time() - self.stats[cmd]['runs'] += 1 def run_func(data): return self.aes_funcs.run_func(data['cmd'], data) @@ -1120,7 +1117,8 @@ class MWorker(salt.utils.process.SignalHandlingMultiprocessingProcess): ret = run_func(data) if self.opts['master_stats']: - self._post_stats(start, cmd) + stats = salt.utils.event.update_stats(self.stats, start, data) + self._post_stats(stats) return ret def run(self): diff --git a/salt/minion.py b/salt/minion.py index d318aff7c1..36a22b2ba2 100644 --- a/salt/minion.py +++ b/salt/minion.py @@ -2393,7 +2393,8 @@ class Minion(MinionBase): else: # delete the scheduled job to don't interfere with the failover process if self.opts['transport'] != 'tcp': - self.schedule.delete_job(name=master_event(type='alive')) + self.schedule.delete_job(name=master_event(type='alive', master=self.opts['master']), + persist=True) log.info('Trying to tune in to next master from master-list') diff --git a/salt/modules/file.py b/salt/modules/file.py index 9b2a3ec6ff..701125efba 100644 --- a/salt/modules/file.py +++ b/salt/modules/file.py @@ -4511,7 +4511,7 @@ def check_perms(name, ret, user, group, mode, attrs=None, follow_symlinks=False) try: lattrs = lsattr(name) except SaltInvocationError: - lsattrs = None + lattrs = None if lattrs is not None: # List attributes on file perms['lattrs'] = ''.join(lattrs.get(name, '')) diff --git a/salt/modules/ldap3.py b/salt/modules/ldap3.py index 9b11745709..30452f55e7 100644 --- a/salt/modules/ldap3.py +++ b/salt/modules/ldap3.py @@ -402,18 +402,14 @@ def add(connect_spec, dn, attributes): # convert the "iterable of values" to lists in case that's what # addModlist() expects (also to ensure that the caller's objects # are not modified) - attributes = dict(((attr, list(vals)) + attributes = dict(((attr, salt.utils.data.encode(list(vals))) for attr, vals in six.iteritems(attributes))) log.info('adding entry: dn: %s attributes: %s', repr(dn), repr(attributes)) if 'unicodePwd' in attributes: attributes['unicodePwd'] = [_format_unicode_password(x) for x in attributes['unicodePwd']] - modlist = salt.utils.data.decode( - ldap.modlist.addModlist(attributes), - to_str=True, - preserve_tuples=True - ) + modlist = ldap.modlist.addModlist(attributes), try: l.c.add_s(dn, modlist) except ldap.LDAPError as e: @@ -572,19 +568,16 @@ def change(connect_spec, dn, before, after): # convert the "iterable of values" to lists in case that's what # modifyModlist() expects (also to ensure that the caller's dicts # are not modified) - before = dict(((attr, list(vals)) + before = dict(((attr, salt.utils.data.encode(list(vals))) for attr, vals in six.iteritems(before))) - after = dict(((attr, list(vals)) + after = dict(((attr, salt.utils.data.encode(list(vals))) for attr, vals in six.iteritems(after))) if 'unicodePwd' in after: after['unicodePwd'] = [_format_unicode_password(x) for x in after['unicodePwd']] - modlist = salt.utils.data.decode( - ldap.modlist.modifyModlist(before, after), - to_str=True, - preserve_tuples=True - ) + modlist = ldap.modlist.modifyModlist(before, after) + try: l.c.modify_s(dn, modlist) except ldap.LDAPError as e: diff --git a/salt/modules/salt_version.py b/salt/modules/salt_version.py new file mode 100644 index 0000000000..7e1b0287bd --- /dev/null +++ b/salt/modules/salt_version.py @@ -0,0 +1,174 @@ +# -*- coding: utf-8 -*- +''' +Access Salt's elemental release code-names. + +.. versionadded:: Neon + +Salt's feature release schedule is based on the Periodic Table, as described +in the :ref:`Version Numbers ` documentation. + +Since deprecation notices often use the elemental release code-name when warning +users about deprecated changes, it can be difficult to build out future-proof +functionality that are dependent on a naming scheme that moves. + +For example, a state syntax needs to change to support an option that will be +removed in the future, but there are many Minion versions in use across an +infrastructure. It would be handy to use some Jinja syntax to check for these +instances to perform one state syntax over another. + +A simple example might be something like the following: + +.. code-block:: jinja + + {# a boolean check #} + {% set option_deprecated = salt['salt_version.is_older']("Sodium") %} + + {% if option_deprecated %} + + {% else %} + + {% endif %} + +''' + +# Import Python libs +from __future__ import absolute_import, print_function, unicode_literals +import logging + +# Import Salt libs +from salt.ext import six +import salt.version +import salt.utils.versions + + +log = logging.getLogger(__name__) + +__virtualname__ = 'salt_version' + + +def __virtual__(): + ''' + Only work on POSIX-like systems + ''' + return __virtualname__ + + +def get_release_number(name): + ''' + Returns the release number of a given release code name in a + ``.`` context. + + If the release name has not been given an assigned release number, the + function returns a string. If the release cannot be found, it returns + ``None``. + + name + The release codename for which to find a release number. + + CLI Example: + + .. code-block:: bash + + salt '*' salt_version.get_release_number 'Oxygen' + ''' + name = name.lower() + version_map = salt.version.SaltStackVersion.LNAMES + version = version_map.get(name) + if version is None: + log.info('Version {} not found.'.format(name)) + return None + + if version[1] == 0: + log.info('Version {} found, but no release number has been assigned ' + 'yet.'.format(name)) + return 'No version assigned.' + + return '.'.join(str(item) for item in version) + + +def is_equal(name): + ''' + Returns a boolean if the named version matches the minion's current Salt + version. + + name + The release codename to check the version against. + + CLI Example: + + .. code-block:: bash + + salt '*' salt_version.is_equal 'Oxygen' + ''' + if _check_release_cmp(name) == 0: + log.info('Release codename \'{}\' equals the minion\'s ' + 'version.'.format(name)) + return True + + return False + + +def is_newer(name): + ''' + Returns a boolean if the named version is newer that the minion's current + Salt version. + + name + The release codename to check the version against. + + CLI Example: + + .. code-block:: bash + + salt '*' salt_version.is_newer 'Sodium' + ''' + if _check_release_cmp(name) == 1: + log.info('Release codename \'{}\' is newer than the minion\'s ' + 'version.'.format(name)) + return True + + return False + + +def is_older(name): + ''' + Returns a boolean if the named version is older that the minion's current + Salt version. + + name + The release codename to check the version against. + + CLI Example: + + .. code-block:: bash + + salt '*' salt_version.is_newer 'Sodium' + ''' + if _check_release_cmp(name) == -1: + log.info('Release codename \'{}\' is older than the minion\'s ' + 'version.'.format(name)) + return True + + return False + + +def _check_release_cmp(name): + ''' + Helper function to compare release codename versions to the minion's current + Salt version. + + If release codename isn't found, the function returns None. Otherwise, it + returns the results of the version comparison as documented by the + ``versions_cmp`` function in ``salt.utils.versions.py``. + ''' + map_version = get_release_number(name) + if map_version is None: + log.info('Release codename {} was not found.'.format(name)) + return None + + current_version = six.text_type(salt.version.SaltStackVersion( + *salt.version.__version_info__)) + current_version = current_version.rsplit('.', 1)[0] + version_cmp = salt.utils.versions.version_cmp(map_version, current_version) + + return version_cmp diff --git a/salt/modules/saltcheck.py b/salt/modules/saltcheck.py index 94bedc36fd..81f1c4c20a 100644 --- a/salt/modules/saltcheck.py +++ b/salt/modules/saltcheck.py @@ -570,7 +570,7 @@ class SaltCheck(object): else: assertion = test_dict['assertion'] expected_return = test_dict.get('expected-return', None) - assert_print_result = test_dict.get('print_result', None) + assert_print_result = test_dict.get('print_result', True) actual_return = self._call_salt_command(mod_and_func, args, kwargs, assertion_section) if assertion not in ["assertIn", "assertNotIn", "assertEmpty", "assertNotEmpty", "assertTrue", "assertFalse"]: diff --git a/salt/netapi/rest_tornado/saltnado.py b/salt/netapi/rest_tornado/saltnado.py index 94f83041a7..18ff597260 100644 --- a/salt/netapi/rest_tornado/saltnado.py +++ b/salt/netapi/rest_tornado/saltnado.py @@ -1100,13 +1100,15 @@ class SaltAPIHandler(BaseSaltAPIHandler): # pylint: disable=W0223 ''' Disbatch runner client commands ''' + full_return = chunk.pop('full_return', False) pub_data = self.saltclients['runner'](chunk) tag = pub_data['tag'] + '/ret' try: event = yield self.application.event_listener.get_event(self, tag=tag) # only return the return data - raise tornado.gen.Return(event['data']['return']) + ret = event if full_return else event['data']['return'] + raise tornado.gen.Return(ret) except TimeoutException: raise tornado.gen.Return('Timeout waiting for runner to execute') diff --git a/salt/scripts.py b/salt/scripts.py index 1bbc83a300..d366128ed0 100644 --- a/salt/scripts.py +++ b/salt/scripts.py @@ -518,3 +518,17 @@ def salt_extend(extension, name, description, salt_dir, merge): description=description, salt_dir=salt_dir, merge=merge) + + +def salt_support(): + ''' + Run Salt Support that collects system data, logs etc for debug and support purposes. + :return: + ''' + + import salt.cli.support.collector + if '' in sys.path: + sys.path.remove('') + client = salt.cli.support.collector.SaltSupport() + _install_signal_handlers(client) + client.run() diff --git a/salt/states/archive.py b/salt/states/archive.py index 7dac0d2f32..de9ee3cf01 100644 --- a/salt/states/archive.py +++ b/salt/states/archive.py @@ -1431,25 +1431,19 @@ def extracted(name, dir_result = __states__['file.directory'](full_path, user=user, group=group, - recurse=recurse, - test=__opts__['test']) + recurse=recurse) log.debug('file.directory: %s', dir_result) - if __opts__['test']: - if dir_result.get('pchanges'): - ret['changes']['updated ownership'] = True - else: - try: - if dir_result['result']: - if dir_result['changes']: - ret['changes']['updated ownership'] = True - else: - enforce_failed.append(full_path) - except (KeyError, TypeError): - log.warning( - 'Bad state return %s for file.directory state on %s', - dir_result, dirname - ) + if dir_result.get('changes'): + ret['changes']['updated ownership'] = True + try: + if not dir_result['result']: + enforce_failed.append(full_path) + except (KeyError, TypeError): + log.warning( + 'Bad state return %s for file.directory state on %s', + dir_result, dirname + ) for filename in enforce_files + enforce_links: full_path = os.path.join(name, filename) diff --git a/salt/states/boto_cloudfront.py b/salt/states/boto_cloudfront.py index 27c6260e9d..d29d3df235 100644 --- a/salt/states/boto_cloudfront.py +++ b/salt/states/boto_cloudfront.py @@ -135,7 +135,7 @@ def present( if __opts__['test']: ret['result'] = None ret['comment'] = 'Distribution {0} set for creation.'.format(name) - ret['pchanges'] = {'old': None, 'new': name} + ret['changes'] = {'old': None, 'new': name} return ret res = __salt__['boto_cloudfront.create_distribution']( @@ -203,7 +203,7 @@ def present( 'Distribution {0} set for new config:'.format(name), changes_diff, ]) - ret['pchanges'] = {'diff': changes_diff} + ret['changes'] = {'diff': changes_diff} return ret res = __salt__['boto_cloudfront.update_distribution']( diff --git a/salt/states/boto_s3.py b/salt/states/boto_s3.py index a75fe71afa..49e77510cf 100644 --- a/salt/states/boto_s3.py +++ b/salt/states/boto_s3.py @@ -282,7 +282,7 @@ def object_present( ret['result'] = None ret['comment'] = 'S3 object {0} set to be {1}d.'.format(name, action) ret['comment'] += '\nChanges:\n{0}'.format(changes_diff) - ret['pchanges'] = {'diff': changes_diff} + ret['changes'] = {'diff': changes_diff} return ret r = __salt__['boto_s3.upload_file']( diff --git a/salt/states/boto_sqs.py b/salt/states/boto_sqs.py index 9f42dedf09..964c6e863e 100644 --- a/salt/states/boto_sqs.py +++ b/salt/states/boto_sqs.py @@ -136,7 +136,7 @@ def present( ret['comment'].append( 'SQS queue {0} is set to be created.'.format(name), ) - ret['pchanges'] = {'old': None, 'new': name} + ret['changes'] = {'old': None, 'new': name} return ret r = __salt__['boto_sqs.create']( @@ -225,7 +225,7 @@ def present( attributes_diff, ) ) - ret['pchanges'] = {'attributes': {'diff': attributes_diff}} + ret['changes'] = {'attributes': {'diff': attributes_diff}} return ret r = __salt__['boto_sqs.set_attributes']( @@ -300,7 +300,7 @@ def absent( if __opts__['test']: ret['result'] = None ret['comment'] = 'SQS queue {0} is set to be removed.'.format(name) - ret['pchanges'] = {'old': name, 'new': None} + ret['changes'] = {'old': name, 'new': None} return ret r = __salt__['boto_sqs.delete']( diff --git a/salt/states/chocolatey.py b/salt/states/chocolatey.py index 5f2e6e9842..021e9ac68b 100644 --- a/salt/states/chocolatey.py +++ b/salt/states/chocolatey.py @@ -336,7 +336,6 @@ def upgraded(name, ret = {'name': name, 'result': True, 'changes': {}, - 'pchanges': {}, 'comment': ''} # Get list of currently installed packages @@ -346,12 +345,10 @@ def upgraded(name, # Package not installed if name.lower() not in [package.lower() for package in pre_install.keys()]: if version: - ret['pchanges'] = { - name: 'Version {0} will be installed'.format(version) - } + ret['changes'][name] = 'Version {0} will be installed'.format(version) ret['comment'] = 'Install version {0}'.format(version) else: - ret['pchanges'] = {name: 'Latest version will be installed'} + ret['changes'][name] = 'Latest version will be installed' ret['comment'] = 'Install latest version' # Package installed @@ -378,8 +375,7 @@ def upgraded(name, oper="==", ver2=version): if force: - ret['pchanges'] = { - name: 'Version {0} will be reinstalled'.format(version)} + ret['changes'][name] = 'Version {0} will be reinstalled'.format(version) ret['comment'] = 'Reinstall {0} {1}'.format(full_name, version) else: ret['comment'] = '{0} {1} is already installed'.format( @@ -389,11 +385,9 @@ def upgraded(name, # If installed version is older than new version if salt.utils.versions.compare( ver1=installed_version, oper="<", ver2=version): - ret['pchanges'] = { - name: 'Version {0} will be upgraded to Version {1}'.format( - installed_version, version - ) - } + ret['changes'][name] = 'Version {0} will be upgraded to Version {1}'.format( + installed_version, version + ) ret['comment'] = 'Upgrade {0} {1} to {2}'.format( full_name, installed_version, version ) @@ -409,13 +403,13 @@ def upgraded(name, else: ret['comment'] = 'No version found to install' - # Return if `test=True` - if __opts__['test']: - ret['result'] = None + # Return if there are no changes to be made + if not ret['changes']: return ret - # Return if there are no changes to be made - if not ret['pchanges']: + # Return if running in test mode + if __opts__['test']: + ret['result'] = None return ret # Install the package @@ -439,6 +433,9 @@ def upgraded(name, # Get list of installed packages after 'chocolatey.install' post_install = __salt__['chocolatey.list'](local_only=True) + # Prior to this, ret['changes'] would have contained expected changes, + # replace them with the actual changes now that we have completed the + # installation. ret['changes'] = salt.utils.data.compare_dicts(pre_install, post_install) return ret diff --git a/salt/states/dvs.py b/salt/states/dvs.py index 421254a327..1ff39cde00 100644 --- a/salt/states/dvs.py +++ b/salt/states/dvs.py @@ -401,13 +401,11 @@ def dvs_configured(name, dvs): ''.format(dvs_name, datacenter_name)), 'result': True}) else: - ret.update({'comment': '\n'.join(comments)}) - if __opts__['test']: - ret.update({'pchanges': changes, - 'result': None}) - else: - ret.update({'changes': changes, - 'result': True}) + ret.update({ + 'comment': '\n'.join(comments), + 'changes': changes, + 'result': None if __opts__['test'] else True, + }) return ret @@ -512,8 +510,10 @@ def portgroups_configured(name, dvs, portgroups): log.info('Running state {0} on DVS \'{1}\', datacenter ' '\'{2}\''.format(name, dvs, datacenter)) changes_required = False - ret = {'name': name, 'changes': {}, 'result': None, 'comment': None, - 'pchanges': {}} + ret = {'name': name, + 'changes': {}, + 'result': None, + 'comment': None} comments = [] changes = {} changes_required = False @@ -623,13 +623,11 @@ def portgroups_configured(name, dvs, portgroups): 'Nothing to be done.'.format(dvs, datacenter)), 'result': True}) else: - ret.update({'comment': '\n'.join(comments)}) - if __opts__['test']: - ret.update({'pchanges': changes, - 'result': None}) - else: - ret.update({'changes': changes, - 'result': True}) + ret.update({ + 'comment': '\n'.join(comments), + 'changes': changes, + 'result': None if __opts__['test'] else True, + }) return ret @@ -649,8 +647,10 @@ def uplink_portgroup_configured(name, dvs, uplink_portgroup): log.info('Running {0} on DVS \'{1}\', datacenter \'{2}\'' ''.format(name, dvs, datacenter)) changes_required = False - ret = {'name': name, 'changes': {}, 'result': None, 'comment': None, - 'pchanges': {}} + ret = {'name': name, + 'changes': {}, + 'result': None, + 'comment': None} comments = [] changes = {} changes_required = False @@ -708,11 +708,9 @@ def uplink_portgroup_configured(name, dvs, uplink_portgroup): 'Nothing to be done.'.format(dvs, datacenter)), 'result': True}) else: - ret.update({'comment': '\n'.join(comments)}) - if __opts__['test']: - ret.update({'pchanges': changes, - 'result': None}) - else: - ret.update({'changes': changes, - 'result': True}) + ret.update({ + 'comment': '\n'.join(comments), + 'changes': changes, + 'result': None if __opts__['test'] else True, + }) return ret diff --git a/salt/states/esxdatacenter.py b/salt/states/esxdatacenter.py index 09c69750ed..ae83b4d371 100644 --- a/salt/states/esxdatacenter.py +++ b/salt/states/esxdatacenter.py @@ -89,11 +89,11 @@ def datacenter_configured(name): dc_name = name log.info('Running datacenter_configured for datacenter \'{0}\'' ''.format(dc_name)) - ret = {'name': name, 'changes': {}, 'pchanges': {}, - 'result': None, 'comment': 'Default'} + ret = {'name': name, + 'changes': {}, + 'result': None, + 'comment': 'Default'} comments = [] - changes = {} - pchanges = {} si = None try: si = __salt__['vsphere.get_service_instance_via_proxy']() @@ -103,27 +103,19 @@ def datacenter_configured(name): if __opts__['test']: comments.append('State will create ' 'datacenter \'{0}\'.'.format(dc_name)) - log.info(comments[-1]) - pchanges.update({'new': {'name': dc_name}}) else: log.debug('Creating datacenter \'{0}\'. '.format(dc_name)) __salt__['vsphere.create_datacenter'](dc_name, si) comments.append('Created datacenter \'{0}\'.'.format(dc_name)) - log.info(comments[-1]) - changes.update({'new': {'name': dc_name}}) + log.info(comments[-1]) + ret['changes'].update({'new': {'name': dc_name}}) else: comments.append('Datacenter \'{0}\' already exists. Nothing to be ' 'done.'.format(dc_name)) log.info(comments[-1]) __salt__['vsphere.disconnect'](si) - if __opts__['test'] and pchanges: - ret_status = None - else: - ret_status = True - ret.update({'result': ret_status, - 'comment': '\n'.join(comments), - 'changes': changes, - 'pchanges': pchanges}) + ret['comment'] = '\n'.join(comments) + ret['result'] = None if __opts__['test'] and ret['changes'] else True return ret except salt.exceptions.CommandExecutionError as exc: log.error('Error: {}'.format(exc)) diff --git a/salt/states/esxi.py b/salt/states/esxi.py index 486d9df53e..8728224716 100644 --- a/salt/states/esxi.py +++ b/salt/states/esxi.py @@ -1070,8 +1070,10 @@ def diskgroups_configured(name, diskgroups, erase_disks=False): else proxy_details['esxi_host'] log.info('Running state {0} for host \'{1}\''.format(name, hostname)) # Variable used to return the result of the invocation - ret = {'name': name, 'result': None, 'changes': {}, - 'pchanges': {}, 'comments': None} + ret = {'name': name, + 'result': None, + 'changes': {}, + 'comments': None} # Signals if errors have been encountered errors = False # Signals if changes are required @@ -1294,12 +1296,8 @@ def diskgroups_configured(name, diskgroups, erase_disks=False): None if __opts__['test'] else # running in test mode False if errors else True) # found errors; defaults to True ret.update({'result': result, - 'comment': '\n'.join(comments)}) - if changes: - if __opts__['test']: - ret['pchanges'] = diskgroup_changes - elif changes: - ret['changes'] = diskgroup_changes + 'comment': '\n'.join(comments), + 'changes': diskgroup_changes}) return ret @@ -1387,8 +1385,10 @@ def host_cache_configured(name, enabled, datastore, swap_size='100%', else proxy_details['esxi_host'] log.trace('hostname = %s', hostname) log.info('Running host_cache_swap_configured for host \'%s\'', hostname) - ret = {'name': hostname, 'comment': 'Default comments', - 'result': None, 'changes': {}, 'pchanges': {}} + ret = {'name': hostname, + 'comment': 'Default comments', + 'result': None, + 'changes': {}} result = None if __opts__['test'] else True # We assume success needs_setting = False comments = [] @@ -1582,11 +1582,8 @@ def host_cache_configured(name, enabled, datastore, swap_size='100%', __salt__['vsphere.disconnect'](si) log.info(comments[-1]) ret.update({'comment': '\n'.join(comments), - 'result': result}) - if __opts__['test']: - ret['pchanges'] = changes - else: - ret['changes'] = changes + 'result': result, + 'changes': changes}) return ret except CommandExecutionError as err: log.error('Error: %s.', err) diff --git a/salt/states/file.py b/salt/states/file.py index 1a0b82fc37..6f2fbc2169 100644 --- a/salt/states/file.py +++ b/salt/states/file.py @@ -950,16 +950,25 @@ def _check_touch(name, atime, mtime): ''' Check to see if a file needs to be updated or created ''' + ret = { + 'result': None, + 'comment': '', + 'changes': {'new': name}, + } if not os.path.exists(name): - return None, 'File {0} is set to be created'.format(name) - stats = __salt__['file.stats'](name, follow_symlinks=False) - if atime is not None: - if six.text_type(atime) != six.text_type(stats['atime']): - return None, 'Times set to be updated on file {0}'.format(name) - if mtime is not None: - if six.text_type(mtime) != six.text_type(stats['mtime']): - return None, 'Times set to be updated on file {0}'.format(name) - return True, 'File {0} exists and has the correct times'.format(name) + ret['comment'] = 'File {0} is set to be created'.format(name) + else: + stats = __salt__['file.stats'](name, follow_symlinks=False) + if ((atime is not None + and six.text_type(atime) != six.text_type(stats['atime'])) or + (mtime is not None + and six.text_type(mtime) != six.text_type(stats['mtime']))): + ret['comment'] = 'Times set to be updated on file {0}'.format(name) + ret['changes'] = {'touched': name} + else: + ret['result'] = True + ret['comment'] = 'File {0} exists and has the correct times'.format(name) + return ret def _get_symlink_ownership(path): @@ -1006,36 +1015,36 @@ def _symlink_check(name, target, force, user, group, win_owner): ''' Check the symlink function ''' - pchanges = {} + changes = {} if not os.path.exists(name) and not __salt__['file.is_link'](name): - pchanges['new'] = name + changes['new'] = name return None, 'Symlink {0} to {1} is set for creation'.format( name, target - ), pchanges + ), changes if __salt__['file.is_link'](name): if __salt__['file.readlink'](name) != target: - pchanges['change'] = name + changes['change'] = name return None, 'Link {0} target is set to be changed to {1}'.format( name, target - ), pchanges + ), changes else: result = True msg = 'The symlink {0} is present'.format(name) if not _check_symlink_ownership(name, user, group, win_owner): result = None - pchanges['ownership'] = '{0}:{1}'.format(*_get_symlink_ownership(name)) + changes['ownership'] = '{0}:{1}'.format(*_get_symlink_ownership(name)) msg += ( ', but the ownership of the symlink would be changed ' 'from {2}:{3} to {0}:{1}' ).format(user, group, *_get_symlink_ownership(name)) - return result, msg, pchanges + return result, msg, changes else: if force: return None, ('The file or directory {0} is set for removal to ' 'make way for a new symlink targeting {1}' - .format(name, target)), pchanges + .format(name, target)), changes return False, ('File or directory exists where the symlink {0} ' - 'should be. Did you mean to use force?'.format(name)), pchanges + 'should be. Did you mean to use force?'.format(name)), changes def _test_owner(kwargs, user=None): @@ -1197,12 +1206,12 @@ def _shortcut_check(name, ''' Check the shortcut function ''' - pchanges = {} + changes = {} if not os.path.exists(name): - pchanges['new'] = name + changes['new'] = name return None, 'Shortcut "{0}" to "{1}" is set for creation'.format( name, target - ), pchanges + ), changes if os.path.isfile(name): shell = win32com.client.Dispatch("WScript.Shell") @@ -1222,28 +1231,28 @@ def _shortcut_check(name, ) if not all(state_checks): - pchanges['change'] = name + changes['change'] = name return None, 'Shortcut "{0}" target is set to be changed to "{1}"'.format( name, target - ), pchanges + ), changes else: result = True msg = 'The shortcut "{0}" is present'.format(name) if not _check_shortcut_ownership(name, user): result = None - pchanges['ownership'] = '{0}'.format(_get_shortcut_ownership(name)) + changes['ownership'] = '{0}'.format(_get_shortcut_ownership(name)) msg += ( ', but the ownership of the shortcut would be changed ' 'from {1} to {0}' ).format(user, _get_shortcut_ownership(name)) - return result, msg, pchanges + return result, msg, changes else: if force: return None, ('The link or directory "{0}" is set for removal to ' 'make way for a new shortcut targeting "{1}"' - .format(name, target)), pchanges + .format(name, target)), changes return False, ('Link or directory exists where the shortcut "{0}" ' - 'should be. Did you mean to use force?'.format(name)), pchanges + 'should be. Did you mean to use force?'.format(name)), changes def _makedirs(name, @@ -1473,12 +1482,12 @@ def symlink( msg += '.' return _error(ret, msg) - presult, pcomment, ret['pchanges'] = _symlink_check(name, - target, - force, - user, - group, - win_owner) + presult, pcomment, pchanges = _symlink_check(name, + target, + force, + user, + group, + win_owner) if not os.path.isdir(os.path.dirname(name)): if makedirs: @@ -1511,6 +1520,7 @@ def symlink( if __opts__['test']: ret['result'] = presult ret['comment'] = pcomment + ret['changes'] = pchanges return ret if __salt__['file.is_link'](name): @@ -1633,7 +1643,6 @@ def absent(name, ret = {'name': name, 'changes': {}, - 'pchanges': {}, 'result': True, 'comment': ''} if not name: @@ -1645,9 +1654,9 @@ def absent(name, if name == '/': return _error(ret, 'Refusing to make "/" absent') if os.path.isfile(name) or os.path.islink(name): - ret['pchanges']['removed'] = name if __opts__['test']: ret['result'] = None + ret['changes']['removed'] = name ret['comment'] = 'File {0} is set for removal'.format(name) return ret try: @@ -1662,9 +1671,9 @@ def absent(name, return _error(ret, '{0}'.format(exc)) elif os.path.isdir(name): - ret['pchanges']['removed'] = name if __opts__['test']: ret['result'] = None + ret['changes']['removed'] = name ret['comment'] = 'Directory {0} is set for removal'.format(name) return ret try: @@ -1726,7 +1735,6 @@ def tidied(name, ret = {'name': name, 'changes': {}, - 'pchanges': {}, 'result': True, 'comment': ''} @@ -1823,7 +1831,6 @@ def exists(name, ret = {'name': name, 'changes': {}, - 'pchanges': {}, 'result': True, 'comment': ''} if not name: @@ -1848,7 +1855,6 @@ def missing(name, ret = {'name': name, 'changes': {}, - 'pchanges': {}, 'result': True, 'comment': ''} if not name: @@ -2457,7 +2463,6 @@ def managed(name, name = os.path.expanduser(name) ret = {'changes': {}, - 'pchanges': {}, 'comment': '', 'name': name, 'result': True} @@ -2700,7 +2705,7 @@ def managed(name, try: if __opts__['test']: if 'file.check_managed_changes' in __salt__: - ret['pchanges'] = __salt__['file.check_managed_changes']( + ret['changes'] = __salt__['file.check_managed_changes']( name, source, source_hash, @@ -2731,15 +2736,15 @@ def managed(name, reset=win_perms_reset) except CommandExecutionError as exc: if exc.strerror.startswith('Path not found'): - ret['pchanges'] = '{0} will be created'.format(name) + ret['changes'] = '{0} will be created'.format(name) - if isinstance(ret['pchanges'], tuple): - ret['result'], ret['comment'] = ret['pchanges'] - elif ret['pchanges']: + if isinstance(ret['changes'], tuple): + ret['result'], ret['comment'] = ret['changes'] + elif ret['changes']: ret['result'] = None ret['comment'] = 'The file {0} is set to be changed'.format(name) - if 'diff' in ret['pchanges'] and not show_changes: - ret['pchanges']['diff'] = '' + if 'diff' in ret['changes'] and not show_changes: + ret['changes']['diff'] = '' else: ret['result'] = True ret['comment'] = 'The file {0} is in the correct state'.format(name) @@ -3181,7 +3186,6 @@ def directory(name, name = os.path.normcase(os.path.expanduser(name)) ret = {'name': name, 'changes': {}, - 'pchanges': {}, 'result': True, 'comment': ''} if not name: @@ -3255,19 +3259,19 @@ def directory(name, # Remove whatever is in the way if os.path.isfile(name): if __opts__['test']: - ret['pchanges']['forced'] = 'File was forcibly replaced' + ret['changes']['forced'] = 'File would be forcibly replaced' else: os.remove(name) ret['changes']['forced'] = 'File was forcibly replaced' elif __salt__['file.is_link'](name): if __opts__['test']: - ret['pchanges']['forced'] = 'Symlink was forcibly replaced' + ret['changes']['forced'] = 'Symlink would be forcibly replaced' else: __salt__['file.remove'](name) ret['changes']['forced'] = 'Symlink was forcibly replaced' else: if __opts__['test']: - ret['pchanges']['forced'] = 'Directory was forcibly replaced' + ret['changes']['forced'] = 'Directory would be forcibly replaced' else: __salt__['file.remove'](name) ret['changes']['forced'] = 'Directory was forcibly replaced' @@ -3296,11 +3300,11 @@ def directory(name, exclude_pat, max_depth, follow_symlinks) if pchanges: - ret['pchanges'].update(pchanges) + ret['changes'].update(pchanges) # Don't run through the reset of the function if there are no changes to be # made - if not ret['pchanges'] or __opts__['test']: + if __opts__['test'] or not ret['changes']: ret['result'] = presult ret['comment'] = pcomment return ret @@ -3415,7 +3419,7 @@ def directory(name, dir_mode = None if 'silent' in recurse_set: - ret['pchanges'] = 'Changes silenced' + ret['changes'] = 'Changes silenced' check_files = 'ignore_files' not in recurse_set check_dirs = 'ignore_dirs' not in recurse_set @@ -3743,7 +3747,6 @@ def recurse(name, ret = { 'name': name, 'changes': {}, - 'pchanges': {}, 'result': True, 'comment': {} # { path: [comment, ...] } } @@ -4042,7 +4045,6 @@ def retention_schedule(name, retain, strptime_format=None, timezone=None): name = os.path.expanduser(name) ret = {'name': name, 'changes': {'retained': [], 'deleted': [], 'ignored': []}, - 'pchanges': {'retained': [], 'deleted': [], 'ignored': []}, 'result': True, 'comment': ''} if not name: @@ -4152,7 +4154,7 @@ def retention_schedule(name, retain, strptime_format=None, timezone=None): 'deleted': deletable_files, 'ignored': sorted(list(ignored_files), reverse=True), } - ret['pchanges'] = changes + ret['changes'] = changes # TODO: track and report how much space was / would be reclaimed if __opts__['test']: @@ -4293,7 +4295,6 @@ def line(name, content=None, match=None, mode=None, location=None, name = os.path.expanduser(name) ret = {'name': name, 'changes': {}, - 'pchanges': {}, 'result': True, 'comment': ''} if not name: @@ -4327,14 +4328,13 @@ def line(name, content=None, match=None, mode=None, location=None, before=before, after=after, show_changes=show_changes, backup=backup, quiet=quiet, indent=indent) if changes: - ret['pchanges']['diff'] = changes + ret['changes']['diff'] = changes if __opts__['test']: ret['result'] = None - ret['comment'] = 'Changes would be made:\ndiff:\n{0}'.format(changes) + ret['comment'] = 'Changes would be made' else: ret['result'] = True ret['comment'] = 'Changes were made' - ret['changes'] = {'diff': changes} else: ret['result'] = True ret['comment'] = 'No changes needed to be made' @@ -4484,7 +4484,6 @@ def replace(name, ret = {'name': name, 'changes': {}, - 'pchanges': {}, 'result': True, 'comment': ''} if not name: @@ -4514,14 +4513,13 @@ def replace(name, backslash_literal=backslash_literal) if changes: - ret['pchanges']['diff'] = changes + ret['changes']['diff'] = changes if __opts__['test']: ret['result'] = None - ret['comment'] = 'Changes would have been made:\ndiff:\n{0}'.format(changes) + ret['comment'] = 'Changes would have been made' else: ret['result'] = True ret['comment'] = 'Changes were made' - ret['changes'] = {'diff': changes} else: ret['result'] = True ret['comment'] = 'No changes needed to be made' @@ -4757,7 +4755,6 @@ def blockreplace( ret = {'name': name, 'changes': {}, - 'pchanges': {}, 'result': False, 'comment': ''} if not name: @@ -4832,13 +4829,11 @@ def blockreplace( return ret if changes: - ret['pchanges'] = {'diff': changes} + ret['changes']['diff'] = changes if __opts__['test']: - ret['changes']['diff'] = ret['pchanges']['diff'] ret['result'] = None ret['comment'] = 'Changes would be made' else: - ret['changes']['diff'] = ret['pchanges']['diff'] ret['result'] = True ret['comment'] = 'Changes were made' else: @@ -4889,7 +4884,6 @@ def comment(name, regex, char='#', backup='.bak'): ret = {'name': name, 'changes': {}, - 'pchanges': {}, 'result': False, 'comment': ''} if not name: @@ -4919,8 +4913,8 @@ def comment(name, regex, char='#', backup='.bak'): else: return _error(ret, '{0}: Pattern not found'.format(unanchor_regex)) - ret['pchanges'][name] = 'updated' if __opts__['test']: + ret['changes'][name] = 'updated' ret['comment'] = 'File {0} is set to be updated'.format(name) ret['result'] = None return ret @@ -4999,7 +4993,6 @@ def uncomment(name, regex, char='#', backup='.bak'): ret = {'name': name, 'changes': {}, - 'pchanges': {}, 'result': False, 'comment': ''} if not name: @@ -5026,26 +5019,20 @@ def uncomment(name, regex, char='#', backup='.bak'): else: return _error(ret, '{0}: Pattern not found'.format(regex)) - ret['pchanges'][name] = 'updated' if __opts__['test']: + ret['changes'][name] = 'updated' ret['comment'] = 'File {0} is set to be updated'.format(name) ret['result'] = None return ret with salt.utils.files.fopen(name, 'rb') as fp_: - slines = fp_.read() - if six.PY3: - slines = slines.decode(__salt_system_encoding__) - slines = slines.splitlines(True) + slines = salt.utils.data.decode(fp_.readlines()) # Perform the edit __salt__['file.comment_line'](name, regex, char, False, backup) with salt.utils.files.fopen(name, 'rb') as fp_: - nlines = fp_.read() - if six.PY3: - nlines = nlines.decode(__salt_system_encoding__) - nlines = nlines.splitlines(True) + nlines = salt.utils.data.decode(fp_.readlines()) # Check the result ret['result'] = __salt__['file.search']( @@ -5209,10 +5196,9 @@ def append(name, .. versionadded:: 0.9.5 ''' ret = {'name': name, - 'changes': {}, - 'pchanges': {}, - 'result': False, - 'comment': ''} + 'changes': {}, + 'result': False, + 'comment': ''} if not name: return _error(ret, 'Must provide name to file.append') @@ -5243,18 +5229,20 @@ def append(name, except CommandExecutionError as exc: return _error(ret, 'Drive {0} is not mapped'.format(exc.message)) - if salt.utils.platform.is_windows(): - check_res, check_msg, ret['pchanges'] = _check_directory_win(dirname) - else: - check_res, check_msg, ret['pchanges'] = _check_directory(dirname) + check_res, check_msg, check_changes = _check_directory_win(dirname) \ + if salt.utils.platform.is_windows() \ + else _check_directory(dirname) if not check_res: + ret['changes'] = check_changes return _error(ret, check_msg) check_res, check_msg = _check_file(name) if not check_res: # Try to create the file - touch(name, makedirs=makedirs) + touch_ret = touch(name, makedirs=makedirs) + if __opts__['test']: + return touch_ret retry_res, retry_msg = _check_file(name) if not retry_res: return _error(ret, check_msg) @@ -5495,7 +5483,6 @@ def prepend(name, ret = {'name': name, 'changes': {}, - 'pchanges': {}, 'result': False, 'comment': ''} if not name: @@ -5525,17 +5512,20 @@ def prepend(name, except CommandExecutionError as exc: return _error(ret, 'Drive {0} is not mapped'.format(exc.message)) - if salt.utils.platform.is_windows(): - check_res, check_msg, ret['pchanges'] = _check_directory_win(dirname) - else: - check_res, check_msg, ret['pchanges'] = _check_directory(dirname) + check_res, check_msg, check_changes = _check_directory_win(dirname) \ + if salt.utils.platform.is_windows() \ + else _check_directory(dirname) + if not check_res: + ret['changes'] = check_changes return _error(ret, check_msg) check_res, check_msg = _check_file(name) if not check_res: # Try to create the file - touch(name, makedirs=makedirs) + touch_ret = touch(name, makedirs=makedirs) + if __opts__['test']: + return touch_ret retry_res, retry_msg = _check_file(name) if not retry_res: return _error(ret, check_msg) @@ -6116,7 +6106,7 @@ def touch(name, atime=None, mtime=None, makedirs=False): ) if __opts__['test']: - ret['result'], ret['comment'] = _check_touch(name, atime, mtime) + ret.update(_check_touch(name, atime, mtime)) return ret if makedirs: @@ -6394,7 +6384,6 @@ def rename(name, source, force=False, makedirs=False): if not force: ret['comment'] = ('The target file "{0}" exists and will not be ' 'overwritten'.format(name)) - ret['result'] = False return ret elif not __opts__['test']: # Remove the destination to prevent problems later @@ -7386,17 +7375,18 @@ def shortcut( msg += '.' return _error(ret, msg) - presult, pcomment, ret['pchanges'] = _shortcut_check(name, - target, - arguments, - working_dir, - description, - icon_location, - force, - user) + presult, pcomment, pchanges = _shortcut_check(name, + target, + arguments, + working_dir, + description, + icon_location, + force, + user) if __opts__['test']: ret['result'] = presult ret['comment'] = pcomment + ret['changes'] = pchanges return ret if not os.path.isdir(os.path.dirname(name)): diff --git a/salt/states/glance_image.py b/salt/states/glance_image.py index aff285a48d..d9d9e971c3 100644 --- a/salt/states/glance_image.py +++ b/salt/states/glance_image.py @@ -52,15 +52,16 @@ def present(name, auth=None, **kwargs): 'result': True, 'comment': ''} + kwargs = __utils__['args.clean_kwargs'](**kwargs) + __salt__['glanceng.setup_clouds'](auth) image = __salt__['glanceng.image_get'](name=name) if not image: - if __opts__['test'] is True: + if __opts__['test']: ret['result'] = None ret['changes'] = kwargs - ret['pchanges'] = ret['changes'] ret['comment'] = 'Image {} will be created.'.format(name) return ret @@ -91,10 +92,9 @@ def absent(name, auth=None): image = __salt__['glanceng.image_get'](name=name) if image: - if __opts__['test'] is True: + if __opts__['test']: ret['result'] = None ret['changes'] = {'name': name} - ret['pchanges'] = ret['changes'] ret['comment'] = 'Image {} will be deleted.'.format(name) return ret diff --git a/salt/states/group.py b/salt/states/group.py index acf775134c..bc2ea53de1 100644 --- a/salt/states/group.py +++ b/salt/states/group.py @@ -83,8 +83,11 @@ def _changes(name, ret['comment'] = 'Invalid gid' return ret - if members: - # -- if new member list if different than the current + if members is not None and not members: + if set(lgrp['members']).symmetric_difference(members): + change['delusers'] = set(lgrp['members']) + elif members: + # if new member list if different than the current if set(lgrp['members']).symmetric_difference(members): change['members'] = members @@ -165,7 +168,7 @@ def present(name, 'result': True, 'comment': 'Group {0} is present and up to date'.format(name)} - if members and (addusers or delusers): + if members is not None and (addusers is not None or delusers is not None): ret['result'] = None ret['comment'] = ( 'Error: Conflicting options "members" with "addusers" and/or' diff --git a/salt/states/kernelpkg.py b/salt/states/kernelpkg.py index 1f28debd1e..a94c1ae454 100644 --- a/salt/states/kernelpkg.py +++ b/salt/states/kernelpkg.py @@ -144,8 +144,7 @@ def latest_active(name, at_time=None, **kwargs): # pylint: disable=unused-argum if __opts__['test']: ret['result'] = None - ret['changes'] = {} - ret['pchanges'] = {'kernel': { + ret['changes'] = {'kernel': { 'old': active, 'new': latest }} diff --git a/salt/states/keystone_domain.py b/salt/states/keystone_domain.py index 27d98657e7..095a181cc0 100644 --- a/salt/states/keystone_domain.py +++ b/salt/states/keystone_domain.py @@ -56,15 +56,16 @@ def present(name, auth=None, **kwargs): 'result': True, 'comment': ''} + kwargs = __utils__['args.clean_kwargs'](**kwargs) + __salt__['keystoneng.setup_clouds'](auth) domain = __salt__['keystoneng.domain_get'](name=name) if not domain: - if __opts__['test'] is True: + if __opts__['test']: ret['result'] = None ret['changes'] = kwargs - ret['pchanges'] = ret['changes'] ret['comment'] = 'Domain {} will be created.'.format(name) return ret @@ -76,10 +77,9 @@ def present(name, auth=None, **kwargs): changes = __salt__['keystoneng.compare_changes'](domain, **kwargs) if changes: - if __opts__['test'] is True: + if __opts__['test']: ret['result'] = None ret['changes'] = changes - ret['pchanges'] = ret['changes'] ret['comment'] = 'Domain {} will be updated.'.format(name) return ret @@ -111,7 +111,6 @@ def absent(name, auth=None): if __opts__['test'] is True: ret['result'] = None ret['changes'] = {'name': name} - ret['pchanges'] = ret['changes'] ret['comment'] = 'Domain {} will be deleted.'.format(name) return ret diff --git a/salt/states/keystone_endpoint.py b/salt/states/keystone_endpoint.py index fb6151519d..7b19913572 100644 --- a/salt/states/keystone_endpoint.py +++ b/salt/states/keystone_endpoint.py @@ -101,6 +101,8 @@ def present(name, service_name, auth=None, **kwargs): 'result': True, 'comment': ''} + kwargs = __utils__['args.clean_kwargs'](**kwargs) + __salt__['keystoneng.setup_clouds'](auth) success, val = _, endpoint = _common(ret, name, service_name, kwargs) @@ -111,7 +113,6 @@ def present(name, service_name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = kwargs - ret['pchanges'] = ret['changes'] ret['comment'] = 'Endpoint will be created.' return ret @@ -131,7 +132,6 @@ def present(name, service_name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = changes - ret['pchanges'] = ret['changes'] ret['comment'] = 'Endpoint will be updated.' return ret @@ -174,7 +174,6 @@ def absent(name, service_name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = {'id': endpoint.id} - ret['pchanges'] = ret['changes'] ret['comment'] = 'Endpoint will be deleted.' return ret diff --git a/salt/states/keystone_group.py b/salt/states/keystone_group.py index ac00b6a553..106097cba8 100644 --- a/salt/states/keystone_group.py +++ b/salt/states/keystone_group.py @@ -73,6 +73,8 @@ def present(name, auth=None, **kwargs): __salt__['keystoneng.setup_cloud'](auth) + kwargs = __utils__['args.clean_kwargs'](**kwargs) + kwargs['name'] = name group = _common(kwargs) @@ -80,7 +82,6 @@ def present(name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = kwargs - ret['pchanges'] = ret['changes'] ret['comment'] = 'Group will be created.' return ret @@ -94,7 +95,6 @@ def present(name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = changes - ret['pchanges'] = ret['changes'] ret['comment'] = 'Group will be updated.' return ret @@ -120,6 +120,8 @@ def absent(name, auth=None, **kwargs): 'result': True, 'comment': ''} + kwargs = __utils__['args.clean_kwargs'](**kwargs) + __salt__['keystoneng.setup_cloud'](auth) kwargs['name'] = name @@ -129,7 +131,6 @@ def absent(name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = {'id': group.id} - ret['pchanges'] = ret['changes'] ret['comment'] = 'Group will be deleted.' return ret diff --git a/salt/states/keystone_project.py b/salt/states/keystone_project.py index 94a6cc52ac..bb9327b5db 100644 --- a/salt/states/keystone_project.py +++ b/salt/states/keystone_project.py @@ -72,6 +72,8 @@ def present(name, auth=None, **kwargs): 'result': True, 'comment': ''} + kwargs = __utils__['args.clean_kwargs'](**kwargs) + __salt__['keystoneng.setup_clouds'](auth) kwargs['name'] = name @@ -81,7 +83,6 @@ def present(name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = kwargs - ret['pchanges'] = ret['changes'] ret['comment'] = 'Project will be created.' return ret @@ -95,7 +96,6 @@ def present(name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = changes - ret['pchanges'] = ret['changes'] ret['comment'] = 'Project will be updated.' return ret @@ -121,6 +121,8 @@ def absent(name, auth=None, **kwargs): 'result': True, 'comment': ''} + kwargs = __utils__['args.clean_kwargs'](**kwargs) + __salt__['keystoneng.setup_clouds'](auth) kwargs['name'] = name @@ -130,7 +132,6 @@ def absent(name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = {'id': project.id} - ret['pchanges'] = ret['changes'] ret['comment'] = 'Project will be deleted.' return ret diff --git a/salt/states/keystone_role.py b/salt/states/keystone_role.py index 394a51cfb7..d90d45f0a2 100644 --- a/salt/states/keystone_role.py +++ b/salt/states/keystone_role.py @@ -52,6 +52,8 @@ def present(name, auth=None, **kwargs): 'result': True, 'comment': ''} + kwargs = __utils__['args.clean_kwargs'](**kwargs) + __salt__['keystoneng.setup_clouds'](auth) kwargs['name'] = name @@ -61,7 +63,6 @@ def present(name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = kwargs - ret['pchanges'] = ret['changes'] ret['comment'] = 'Role will be created.' return ret @@ -95,7 +96,6 @@ def absent(name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = {'id': role.id} - ret['pchanges'] = ret['changes'] ret['comment'] = 'Role will be deleted.' return ret diff --git a/salt/states/keystone_service.py b/salt/states/keystone_service.py index ac62b59584..faca6d6235 100644 --- a/salt/states/keystone_service.py +++ b/salt/states/keystone_service.py @@ -61,6 +61,8 @@ def present(name, auth=None, **kwargs): 'result': True, 'comment': ''} + kwargs = __utils__['args.clean_kwargs'](**kwargs) + __salt__['keystoneng.setup_clouds'](auth) service = __salt__['keystoneng.service_get'](name=name) @@ -69,7 +71,6 @@ def present(name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = kwargs - ret['pchanges'] = ret['changes'] ret['comment'] = 'Service will be created.' return ret @@ -84,7 +85,6 @@ def present(name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = changes - ret['pchanges'] = ret['changes'] ret['comment'] = 'Service will be updated.' return ret @@ -117,7 +117,6 @@ def absent(name, auth=None): if __opts__['test'] is True: ret['result'] = None ret['changes'] = {'id': service.id} - ret['pchanges'] = ret['changes'] ret['comment'] = 'Service will be deleted.' return ret diff --git a/salt/states/keystone_user.py b/salt/states/keystone_user.py index 23f95fd260..a1bfd8d85e 100644 --- a/salt/states/keystone_user.py +++ b/salt/states/keystone_user.py @@ -83,6 +83,8 @@ def present(name, auth=None, **kwargs): 'result': True, 'comment': ''} + kwargs = __utils__['args.clean_kwargs'](**kwargs) + __salt__['keystoneng.setup_clouds'](auth) kwargs['name'] = name @@ -92,7 +94,6 @@ def present(name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = kwargs - ret['pchanges'] = ret['changes'] ret['comment'] = 'User will be created.' return ret @@ -106,7 +107,6 @@ def present(name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = changes - ret['pchanges'] = ret['changes'] ret['comment'] = 'User will be updated.' return ret @@ -133,6 +133,8 @@ def absent(name, auth=None, **kwargs): 'result': True, 'comment': ''} + kwargs = __utils__['args.clean_kwargs'](**kwargs) + __salt__['keystoneng.setup_clouds'](auth) kwargs['name'] = name @@ -142,7 +144,6 @@ def absent(name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = {'id': user.id} - ret['pchanges'] = ret['changes'] ret['comment'] = 'User will be deleted.' return ret diff --git a/salt/states/linux_acl.py b/salt/states/linux_acl.py index 966e4b58b2..ee8b6a3642 100644 --- a/salt/states/linux_acl.py +++ b/salt/states/linux_acl.py @@ -104,7 +104,6 @@ def present(name, acl_type, acl_name='', perms='', recurse=False, force=False): ret = {'name': name, 'result': True, 'changes': {}, - 'pchanges': {}, 'comment': ''} _octal = {'r': 4, 'w': 2, 'x': 1, '-': 0} @@ -172,7 +171,7 @@ def present(name, acl_type, acl_name='', perms='', recurse=False, force=False): acl_name, six.text_type(user[_search_name]['octal']), perms), - 'result': None, 'pchanges': changes}) + 'result': None, 'changes': changes}) return ret try: if force: @@ -195,7 +194,7 @@ def present(name, acl_type, acl_name='', perms='', recurse=False, force=False): if __opts__['test']: ret.update({'comment': 'New permissions will be applied for ' '{0}: {1}'.format(acl_name, perms), - 'result': None, 'pchanges': changes}) + 'result': None, 'changes': changes}) ret['result'] = None return ret @@ -337,7 +336,6 @@ def list_present(name, acl_type, acl_names=None, perms='', recurse=False, force= ret = {'name': name, 'result': True, 'changes': {}, - 'pchanges': {}, 'comment': ''} _octal = {'r': 4, 'w': 2, 'x': 1, '-': 0} @@ -381,7 +379,6 @@ def list_present(name, acl_type, acl_names=None, perms='', recurse=False, force= ret = {'name': name, 'result': True, 'changes': {}, - 'pchanges': {}, 'comment': 'Permissions and {}s are in the desired state'.format(acl_type)} return ret # The getfacl execution module lists default with empty names as being @@ -425,7 +422,7 @@ def list_present(name, acl_type, acl_names=None, perms='', recurse=False, force= acl_names, six.text_type(users[search_name]['octal']), perms), - 'result': None, 'pchanges': changes}) + 'result': None, 'changes': changes}) return ret try: if force: @@ -449,7 +446,7 @@ def list_present(name, acl_type, acl_names=None, perms='', recurse=False, force= if __opts__['test']: ret.update({'comment': 'New permissions will be applied for ' '{0}: {1}'.format(acl_names, perms), - 'result': None, 'pchanges': changes}) + 'result': None, 'changes': changes}) ret['result'] = None return ret @@ -476,7 +473,7 @@ def list_present(name, acl_type, acl_names=None, perms='', recurse=False, force= if __opts__['test']: ret.update({'comment': 'New permissions will be applied for ' '{0}: {1}'.format(acl_names, perms), - 'result': None, 'pchanges': changes}) + 'result': None, 'changes': changes}) ret['result'] = None return ret diff --git a/salt/states/netyang.py b/salt/states/netyang.py index e5914768bd..8884dc8bfb 100644 --- a/salt/states/netyang.py +++ b/salt/states/netyang.py @@ -95,8 +95,6 @@ def managed(name, compliance_report: ``False`` Return the compliance report in the comment. - The compliance report structured object can be found however - in the ``pchanges`` field of the output (not displayed on the CLI). .. versionadded:: 2017.7.3 diff --git a/salt/states/neutron_network.py b/salt/states/neutron_network.py index e9f2b8a0d0..191207e826 100644 --- a/salt/states/neutron_network.py +++ b/salt/states/neutron_network.py @@ -72,6 +72,8 @@ def present(name, auth=None, **kwargs): 'result': True, 'comment': ''} + kwargs = __utils__['args.clean_kwargs'](**kwargs) + __salt__['neutronng.setup_clouds'](auth) kwargs['name'] = name @@ -81,7 +83,6 @@ def present(name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = kwargs - ret['pchanges'] = ret['changes'] ret['comment'] = 'Network will be created.' return ret @@ -115,7 +116,6 @@ def present(name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = changes - ret['pchanges'] = ret['changes'] ret['comment'] = 'Project will be updated.' return ret @@ -140,6 +140,8 @@ def absent(name, auth=None, **kwargs): 'result': True, 'comment': ''} + kwargs = __utils__['args.clean_kwargs'](**kwargs) + __salt__['neutronng.setup_clouds'](auth) kwargs['name'] = name @@ -149,7 +151,6 @@ def absent(name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = {'id': network.id} - ret['pchanges'] = ret['changes'] ret['comment'] = 'Network will be deleted.' return ret diff --git a/salt/states/neutron_secgroup.py b/salt/states/neutron_secgroup.py index 7859ac60df..1a62ecd671 100644 --- a/salt/states/neutron_secgroup.py +++ b/salt/states/neutron_secgroup.py @@ -74,6 +74,8 @@ def present(name, auth=None, **kwargs): 'result': True, 'comment': ''} + kwargs = __utils__['args.clean_kwargs'](**kwargs) + __salt__['neutronng.setup_clouds'](auth) if 'project_name' in kwargs: @@ -95,7 +97,6 @@ def present(name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = kwargs - ret['pchanges'] = ret['changes'] ret['comment'] = 'Security Group will be created.' return ret @@ -109,7 +110,6 @@ def present(name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = changes - ret['pchanges'] = ret['changes'] ret['comment'] = 'Security Group will be updated.' return ret @@ -133,6 +133,8 @@ def absent(name, auth=None, **kwargs): 'result': True, 'comment': ''} + kwargs = __utils__['args.clean_kwargs'](**kwargs) + __salt__['neutronng.setup_clouds'](auth) kwargs['project_id'] = __salt__['keystoneng.project_get']( @@ -147,7 +149,6 @@ def absent(name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = {'id': secgroup.id} - ret['pchanges'] = ret['changes'] ret['comment'] = 'Security group will be deleted.' return ret diff --git a/salt/states/neutron_secgroup_rule.py b/salt/states/neutron_secgroup_rule.py index 888969e90d..ccc6f2f064 100644 --- a/salt/states/neutron_secgroup_rule.py +++ b/salt/states/neutron_secgroup_rule.py @@ -77,6 +77,8 @@ def present(name, auth=None, **kwargs): 'result': True, 'comment': ''} + kwargs = __utils__['args.clean_kwargs'](**kwargs) + __salt__['neutronng.setup_clouds'](auth) if 'project_name' in kwargs: @@ -112,7 +114,6 @@ def present(name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = kwargs - ret['pchanges'] = ret['changes'] ret['comment'] = 'Security Group rule will be created.' return ret @@ -166,10 +167,9 @@ def absent(name, auth=None, **kwargs): rule_exists = True if rule_exists: - if __opts__['test'] is True: + if __opts__['test']: ret['result'] = None ret['changes'] = {'id': kwargs['rule_id']} - ret['pchanges'] = ret['changes'] ret['comment'] = 'Security group rule will be deleted.' return ret diff --git a/salt/states/neutron_subnet.py b/salt/states/neutron_subnet.py index 43e4ab3ccf..58219019ee 100644 --- a/salt/states/neutron_subnet.py +++ b/salt/states/neutron_subnet.py @@ -96,16 +96,17 @@ def present(name, auth=None, **kwargs): 'result': True, 'comment': ''} + kwargs = __utils__['args.clean_kwargs'](**kwargs) + __salt__['neutronng.setup_clouds'](auth) kwargs['subnet_name'] = name subnet = __salt__['neutronng.subnet_get'](name=name) if subnet is None: - if __opts__['test'] is True: + if __opts__['test']: ret['result'] = None ret['changes'] = kwargs - ret['pchanges'] = ret['changes'] ret['comment'] = 'Subnet will be created.' return ret @@ -119,7 +120,6 @@ def present(name, auth=None, **kwargs): if __opts__['test'] is True: ret['result'] = None ret['changes'] = changes - ret['pchanges'] = ret['changes'] ret['comment'] = 'Project will be updated.' return ret @@ -160,7 +160,6 @@ def absent(name, auth=None): if __opts__['test'] is True: ret['result'] = None ret['changes'] = {'id': subnet.id} - ret['pchanges'] = ret['changes'] ret['comment'] = 'Project will be deleted.' return ret diff --git a/salt/states/pbm.py b/salt/states/pbm.py index 00945fc65c..836c95b807 100644 --- a/salt/states/pbm.py +++ b/salt/states/pbm.py @@ -156,8 +156,10 @@ def default_vsan_policy_configured(name, policy): '\'{1}\''.format(name, vcenter)) log.trace('policy = {0}'.format(policy)) changes_required = False - ret = {'name': name, 'changes': {}, 'result': None, 'comment': None, - 'pchanges': {}} + ret = {'name': name, + 'changes': {}, + 'result': None, + 'comment': None} comments = [] changes = {} changes_required = False @@ -266,13 +268,11 @@ def default_vsan_policy_configured(name, policy): 'Nothing to be done.'.format(vcenter)), 'result': True}) else: - ret.update({'comment': '\n'.join(comments)}) - if __opts__['test']: - ret.update({'pchanges': changes, - 'result': None}) - else: - ret.update({'changes': changes, - 'result': True}) + ret.update({ + 'comment': '\n'.join(comments), + 'changes': changes, + 'result': None if __opts__['test'] else True, + }) return ret @@ -286,8 +286,10 @@ def storage_policies_configured(name, policies): comments = [] changes = [] changes_required = False - ret = {'name': name, 'changes': {}, 'result': None, 'comment': None, - 'pchanges': {}} + ret = {'name': name, + 'changes': {}, + 'result': None, + 'comment': None} log.trace('policies = {0}'.format(policies)) si = None try: @@ -430,13 +432,11 @@ def storage_policies_configured(name, policies): 'Nothing to be done.'.format(vcenter)), 'result': True}) else: - ret.update({'comment': '\n'.join(comments)}) - if __opts__['test']: - ret.update({'pchanges': {'storage_policies': changes}, - 'result': None}) - else: - ret.update({'changes': {'storage_policies': changes}, - 'result': True}) + ret.update({ + 'comment': '\n'.join(comments), + 'changes': {'storage_policies': changes}, + 'result': None if __opts__['test'] else True, + }) return ret @@ -454,8 +454,10 @@ def default_storage_policy_assigned(name, policy, datastore): ''.format(name, policy, datastore)) changes = {} changes_required = False - ret = {'name': name, 'changes': {}, 'result': None, 'comment': None, - 'pchanges': {}} + ret = {'name': name, + 'changes': {}, + 'result': None, + 'comment': None} si = None try: si = __salt__['vsphere.get_service_instance_via_proxy']() @@ -488,14 +490,13 @@ def default_storage_policy_assigned(name, policy, datastore): ret.update({'comment': exc.strerror, 'result': False if not __opts__['test'] else None}) return ret + ret['comment'] = comment if changes_required: - if __opts__['test']: - ret.update({'result': None, - 'pchanges': changes}) - else: - ret.update({'result': True, - 'changes': changes}) + ret.update({ + 'changes': changes, + 'result': None if __opts__['test'] else True, + }) else: ret['result'] = True return ret diff --git a/salt/states/reg.py b/salt/states/reg.py index d1faa5a08d..0c2c760dfa 100644 --- a/salt/states/reg.py +++ b/salt/states/reg.py @@ -385,7 +385,6 @@ def present(name, ret = {'name': name, 'result': True, 'changes': {}, - 'pchanges': {}, 'comment': ''} hive, key = _parse_key(name) diff --git a/salt/states/saltmod.py b/salt/states/saltmod.py index a3dbbc6941..4c57f8973f 100644 --- a/salt/states/saltmod.py +++ b/salt/states/saltmod.py @@ -444,7 +444,8 @@ def function( kwarg=None, timeout=None, batch=None, - subset=None): + subset=None, + **kwargs): # pylint: disable=unused-argument ''' Execute a single module function on a remote minion via salt or salt-ssh @@ -495,15 +496,15 @@ def function( ''' func_ret = {'name': name, - 'changes': {}, - 'comment': '', - 'result': True} + 'changes': {}, + 'comment': '', + 'result': True} if kwarg is None: kwarg = {} if isinstance(arg, six.string_types): - func_ret['warnings'] = ['Please specify \'arg\' as a list, not a string. ' - 'Modifying in place, but please update SLS file ' - 'to remove this warning.'] + func_ret['warnings'] = [ + 'Please specify \'arg\' as a list of arguments.' + ] arg = arg.split() cmd_kw = {'arg': arg or [], 'kwarg': kwarg, 'ret': ret, 'timeout': timeout} @@ -526,9 +527,8 @@ def function( fun = name if __opts__['test'] is True: - func_ret['comment'] = ( - 'Function {0} will be executed on target {1} as test={2}' - ).format(fun, tgt, six.text_type(False)) + func_ret['comment'] = \ + 'Function {0} would be executed on target {1}'.format(fun, tgt) func_ret['result'] = None return func_ret try: @@ -768,7 +768,7 @@ def runner(name, **kwargs): return ret -def parallel_runners(name, runners): +def parallel_runners(name, runners, **kwargs): # pylint: disable=unused-argument ''' Executes multiple runner modules on the master in parallel. diff --git a/salt/states/snapper.py b/salt/states/snapper.py index 0b8eea5396..c49b114162 100644 --- a/salt/states/snapper.py +++ b/salt/states/snapper.py @@ -199,8 +199,7 @@ def baseline_snapshot(name, number=None, tag=None, include_diff=True, config='ro filename=file).get(file, {})) if __opts__['test'] and status: - ret['pchanges'] = status - ret['changes'] = ret['pchanges'] + ret['changes'] = status ret['comment'] = "{0} files changes are set to be undone".format(len(status.keys())) ret['result'] = None elif __opts__['test'] and not status: diff --git a/salt/states/solrcloud.py b/salt/states/solrcloud.py index 3a00b85715..4079be7a6a 100644 --- a/salt/states/solrcloud.py +++ b/salt/states/solrcloud.py @@ -34,10 +34,9 @@ def alias(name, collections, **kwargs): 'changes': {}, 'result': False, 'comment': '', - 'pchanges': {}, } - if __salt__["solrcloud.alias_exists"](name, **kwargs): + if __salt__['solrcloud.alias_exists'](name, **kwargs): alias_content = __salt__['solrcloud.alias_get_collections'](name, **kwargs) diff = set(alias_content).difference(set(collections)) @@ -48,38 +47,31 @@ def alias(name, collections, **kwargs): if __opts__['test']: ret['comment'] = 'The alias "{0}" will be updated.'.format(name) - ret['pchanges'] = { - 'old': ",".join(alias_content), - 'new': ",".join(collections) - } ret['result'] = None else: - __salt__["solrcloud.alias_set_collections"](name, collections, **kwargs) + __salt__['solrcloud.alias_set_collections'](name, collections, **kwargs) ret['comment'] = 'The alias "{0}" has been updated.'.format(name) - ret['changes'] = { - 'old': ",".join(alias_content), - 'new': ",".join(collections) - } - ret['result'] = True + + ret['changes'] = { + 'old': ','.join(alias_content), + 'new': ','.join(collections), + } + else: if __opts__['test']: ret['comment'] = 'The alias "{0}" will be created.'.format(name) - ret['pchanges'] = { - 'old': None, - 'new': ",".join(collections) - } ret['result'] = None else: - __salt__["solrcloud.alias_set_collections"](name, collections, **kwargs) + __salt__['solrcloud.alias_set_collections'](name, collections, **kwargs) ret['comment'] = 'The alias "{0}" has been created.'.format(name) - ret['changes'] = { - 'old': None, - 'new': ",".join(collections) - } - ret['result'] = True + ret['changes'] = { + 'old': None, + 'new': ','.join(collections), + } + return ret @@ -101,7 +93,6 @@ def collection(name, options=None, **kwargs): 'changes': {}, 'result': False, 'comment': '', - 'pchanges': {}, } if options is None: @@ -137,42 +128,32 @@ def collection(name, options=None, **kwargs): if __opts__['test']: ret['comment'] = 'Collection options "{0}" will be changed.'.format(name) - ret['pchanges'] = { - 'old': salt.utils.json.dumps(current_options, sort_keys=True, indent=4, separators=(',', ': ')), - 'new': salt.utils.json.dumps(options, sort_keys=True, indent=4, separators=(',', ': ')) - } ret['result'] = None - - return ret else: - __salt__["solrcloud.collection_set_options"](name, diff, **kwargs) - + __salt__['solrcloud.collection_set_options'](name, diff, **kwargs) ret['comment'] = 'Parameters were updated for collection "{0}".'.format(name) ret['result'] = True - ret['changes'] = { - 'old': salt.utils.json.dumps(current_options, sort_keys=True, indent=4, separators=(',', ': ')), - 'new': salt.utils.json.dumps(options, sort_keys=True, indent=4, separators=(',', ': ')) - } - return ret + ret['changes'] = { + 'old': salt.utils.json.dumps(current_options, sort_keys=True, indent=4, separators=(',', ': ')), + 'new': salt.utils.json.dumps(options, sort_keys=True, indent=4, separators=(',', ': ')) + } + return ret else: + new_changes = salt.utils.json.dumps(options, sort_keys=True, indent=4, separators=(',', ': ')) if __opts__['test']: ret['comment'] = 'The collection "{0}" will be created.'.format(name) - ret['pchanges'] = { - 'old': None, - 'new': str('options=') + new_changes # future lint: disable=blacklisted-function - } ret['result'] = None else: __salt__["solrcloud.collection_create"](name, options, **kwargs) ret['comment'] = 'The collection "{0}" has been created.'.format(name) - ret['changes'] = { - 'old': None, - 'new': str('options=') + new_changes # future lint: disable=blacklisted-function - } - ret['result'] = True + ret['changes'] = { + 'old': None, + 'new': str('options=') + new_changes # future lint: disable=blacklisted-function + } + return ret diff --git a/salt/states/test.py b/salt/states/test.py index 4e7c0975dc..944eee92b1 100644 --- a/salt/states/test.py +++ b/salt/states/test.py @@ -67,7 +67,7 @@ def nop(name, **kwargs): return succeed_without_changes(name) -def succeed_without_changes(name): +def succeed_without_changes(name, **kwargs): # pylint: disable=unused-argument ''' Returns successful. @@ -85,7 +85,7 @@ def succeed_without_changes(name): return ret -def fail_without_changes(name): +def fail_without_changes(name, **kwargs): # pylint: disable=unused-argument ''' Returns failure. @@ -108,7 +108,7 @@ def fail_without_changes(name): return ret -def succeed_with_changes(name): +def succeed_with_changes(name, **kwargs): # pylint: disable=unused-argument ''' Returns successful and changes is not empty @@ -141,7 +141,7 @@ def succeed_with_changes(name): return ret -def fail_with_changes(name): +def fail_with_changes(name, **kwargs): # pylint: disable=unused-argument ''' Returns failure and changes is not empty. diff --git a/salt/states/win_servermanager.py b/salt/states/win_servermanager.py index 9b2d247ed9..2836e6253f 100644 --- a/salt/states/win_servermanager.py +++ b/salt/states/win_servermanager.py @@ -27,8 +27,7 @@ def installed(name, recurse=False, restart=False, source=None, - exclude=None, - **kwargs): + exclude=None): ''' Install the windows feature. To install a single feature, use the ``name`` parameter. To install multiple features, use the ``features`` parameter. @@ -113,15 +112,6 @@ def installed(name, - exclude: - Web-Server ''' - if 'force' in kwargs: - salt.utils.versions.warn_until( - 'Neon', - 'Parameter \'force\' has been detected in the argument list. This' - 'parameter is no longer used and has been replaced by \'recurse\'' - 'as of Salt 2018.3.0. This warning will be removed in Salt Neon.' - ) - kwargs.pop('force') - ret = {'name': name, 'result': True, 'changes': {}, diff --git a/salt/utils/data.py b/salt/utils/data.py index b88d5c795d..0de3992e71 100644 --- a/salt/utils/data.py +++ b/salt/utils/data.py @@ -455,13 +455,14 @@ def traverse_dict(data, key, default=None, delimiter=DEFAULT_TARGET_DELIM): data['foo']['bar']['baz'] if this value exists, and will otherwise return the dict in the default argument. ''' + ptr = data try: for each in key.split(delimiter): - data = data[each] + ptr = ptr[each] except (KeyError, IndexError, TypeError): # Encountered a non-indexable value in the middle of traversing return default - return data + return ptr @jinja_filter('traverse') @@ -476,16 +477,17 @@ def traverse_dict_and_list(data, key, default=None, delimiter=DEFAULT_TARGET_DEL {'foo':{'bar':['baz']}} , if data like {'foo':{'bar':{'0':'baz'}}} then return data['foo']['bar']['0'] ''' + ptr = data for each in key.split(delimiter): - if isinstance(data, list): + if isinstance(ptr, list): try: idx = int(each) except ValueError: embed_match = False # Index was not numeric, lets look at any embedded dicts - for embedded in (x for x in data if isinstance(x, dict)): + for embedded in (x for x in ptr if isinstance(x, dict)): try: - data = embedded[each] + ptr = embedded[each] embed_match = True break except KeyError: @@ -495,15 +497,15 @@ def traverse_dict_and_list(data, key, default=None, delimiter=DEFAULT_TARGET_DEL return default else: try: - data = data[idx] + ptr = ptr[idx] except IndexError: return default else: try: - data = data[each] + ptr = ptr[each] except (KeyError, TypeError): return default - return data + return ptr def subdict_match(data, @@ -519,16 +521,33 @@ def subdict_match(data, former, as more deeply-nested matches are tried first. ''' def _match(target, pattern, regex_match=False, exact_match=False): + # The reason for using six.text_type first and _then_ using + # to_unicode as a fallback is because we want to eventually have + # unicode types for comparison below. If either value is numeric then + # six.text_type will turn it into a unicode string. However, if the + # value is a PY2 str type with non-ascii chars, then the result will be + # a UnicodeDecodeError. In those cases, we simply use to_unicode to + # decode it to unicode. The reason we can't simply use to_unicode to + # begin with is that (by design) to_unicode will raise a TypeError if a + # non-string/bytestring/bytearray value is passed. + try: + target = six.text_type(target).lower() + except UnicodeDecodeError: + target = salt.utils.stringutils.to_unicode(target).lower() + try: + pattern = six.text_type(pattern).lower() + except UnicodeDecodeError: + pattern = salt.utils.stringutils.to_unicode(pattern).lower() + if regex_match: try: - return re.match(pattern.lower(), six.text_type(target).lower()) + return re.match(pattern, target) except Exception: log.error('Invalid regex \'%s\' in match', pattern) return False - elif exact_match: - return six.text_type(target).lower() == pattern.lower() else: - return fnmatch.fnmatch(six.text_type(target).lower(), pattern.lower()) + return target == pattern if exact_match \ + else fnmatch.fnmatch(target, pattern) def _dict_match(target, pattern, regex_match=False, exact_match=False): wildcard = pattern.startswith('*:') @@ -548,11 +567,6 @@ def subdict_match(data, return True if wildcard: for key in target: - if _match(key, - pattern, - regex_match=regex_match, - exact_match=exact_match): - return True if isinstance(target[key], dict): if _dict_match(target[key], pattern, @@ -566,6 +580,17 @@ def subdict_match(data, regex_match=regex_match, exact_match=exact_match): return True + elif _match(target[key], + pattern, + regex_match=regex_match, + exact_match=exact_match): + return True + return False + + splits = expr.split(delimiter) + num_splits = len(splits) + if num_splits == 1: + # Delimiter not present, this can't possibly be a match return False splits = expr.split(delimiter) @@ -578,10 +603,16 @@ def subdict_match(data, # want to use are 3, 2, and 1, in that order. for idx in range(num_splits - 1, 0, -1): key = delimiter.join(splits[:idx]) - matchstr = delimiter.join(splits[idx:]) + if key == '*': + # We are matching on everything under the top level, so we need to + # treat the match as the entire data being passed in + matchstr = expr + match = data + else: + matchstr = delimiter.join(splits[idx:]) + match = traverse_dict_and_list(data, key, {}, delimiter=delimiter) log.debug("Attempting to match '%s' in '%s' using delimiter '%s'", matchstr, key, delimiter) - match = traverse_dict_and_list(data, key, {}, delimiter=delimiter) if match == {}: continue if isinstance(match, dict): diff --git a/salt/utils/event.py b/salt/utils/event.py index 910504902f..c336db2439 100644 --- a/salt/utils/event.py +++ b/salt/utils/event.py @@ -212,6 +212,32 @@ def tagify(suffix='', prefix='', base=SALT): return TAGPARTER.join([part for part in parts if part]) +def update_stats(stats, start_time, data): + ''' + Calculate the master stats and return the updated stat info + ''' + end_time = time.time() + cmd = data['cmd'] + # the jid is used as the create time + try: + jid = data['jid'] + except KeyError: + try: + jid = data['data']['__pub_jid'] + except KeyError: + log.info('jid not found in data, stats not updated') + return stats + create_time = int(time.mktime(time.strptime(jid, '%Y%m%d%H%M%S%f'))) + latency = start_time - create_time + duration = end_time - start_time + + stats[cmd]['runs'] += 1 + stats[cmd]['latency'] = (stats[cmd]['latency'] * (stats[cmd]['runs'] - 1) + latency) / stats[cmd]['runs'] + stats[cmd]['mean'] = (stats[cmd]['mean'] * (stats[cmd]['runs'] - 1) + duration) / stats[cmd]['runs'] + + return stats + + class SaltEvent(object): ''' Warning! Use the get_event function or the code will not be diff --git a/salt/utils/mac_utils.py b/salt/utils/mac_utils.py index bdf7e65f2e..4c8742f685 100644 --- a/salt/utils/mac_utils.py +++ b/salt/utils/mac_utils.py @@ -350,7 +350,11 @@ def _available_services(refresh=False): try: # This assumes most of the plist files # will be already in XML format - plist = plistlib.readPlist(true_path) + if six.PY2: + plist = plistlib.readPlist(true_path) + else: + with salt.utils.files.fopen(true_path, 'rb') as plist_handle: + plist = plistlib.load(plist_handle) except Exception: # If plistlib is unable to read the file we'll need to use diff --git a/salt/utils/napalm.py b/salt/utils/napalm.py index b3377f7b96..243eaac654 100644 --- a/salt/utils/napalm.py +++ b/salt/utils/napalm.py @@ -492,7 +492,6 @@ def default_ret(name): ''' ret = { 'name': name, - 'pchanges': {}, 'changes': {}, 'result': False, 'comment': '' @@ -510,22 +509,16 @@ def loaded_ret(ret, loaded, test, debug, compliance_report=False, opts=None): ''' # Always get the comment changes = {} - pchanges = {} ret['comment'] = loaded['comment'] if 'diff' in loaded: changes['diff'] = loaded['diff'] - pchanges['diff'] = loaded['diff'] if 'commit_id' in loaded: changes['commit_id'] = loaded['commit_id'] - pchanges['commit_id'] = loaded['commit_id'] if 'compliance_report' in loaded: if compliance_report: changes['compliance_report'] = loaded['compliance_report'] - pchanges['compliance_report'] = loaded['compliance_report'] if debug and 'loaded_config' in loaded: changes['loaded_config'] = loaded['loaded_config'] - pchanges['loaded_config'] = loaded['loaded_config'] - ret['pchanges'] = pchanges if changes.get('diff'): ret['comment'] = '{comment_base}\n\nConfiguration diff:\n\n{diff}'.format(comment_base=ret['comment'], diff=changes['diff']) diff --git a/salt/utils/parsers.py b/salt/utils/parsers.py index 7f0eebc17b..ac0bca1325 100644 --- a/salt/utils/parsers.py +++ b/salt/utils/parsers.py @@ -20,6 +20,7 @@ import getpass import logging import optparse import traceback +import tempfile from functools import partial @@ -34,6 +35,7 @@ import salt.utils.data import salt.utils.files import salt.utils.jid import salt.utils.kinds as kinds +import salt.utils.network import salt.utils.platform import salt.utils.process import salt.utils.stringutils @@ -1902,6 +1904,69 @@ class SyndicOptionParser(six.with_metaclass(OptionParserMeta, self.get_config_file_path('minion')) +class SaltSupportOptionParser(six.with_metaclass(OptionParserMeta, OptionParser, ConfigDirMixIn, + MergeConfigMixIn, LogLevelMixIn, TimeoutMixIn)): + default_timeout = 5 + description = 'Salt Support is a program to collect all support data: logs, system configuration etc.' + usage = '%prog [options] \'\' [arguments]' + # ConfigDirMixIn config filename attribute + _config_filename_ = 'master' + + # LogLevelMixIn attributes + _default_logging_level_ = config.DEFAULT_MASTER_OPTS['log_level'] + _default_logging_logfile_ = config.DEFAULT_MASTER_OPTS['log_file'] + + def _mixin_setup(self): + self.add_option('-P', '--show-profiles', default=False, action='store_true', + dest='support_profile_list', help='Show available profiles') + self.add_option('-p', '--profile', default='', dest='support_profile', + help='Specify support profile or comma-separated profiles, e.g.: "salt,network"') + support_archive = '{t}/{h}-support.tar.bz2'.format(t=tempfile.gettempdir(), + h=salt.utils.network.get_fqhostname()) + self.add_option('-a', '--archive', default=support_archive, dest='support_archive', + help=('Specify name of the resulting support archive. ' + 'Default is "{f}".'.format(f=support_archive))) + self.add_option('-u', '--unit', default='', dest='support_unit', + help='Specify examined unit (default "master").') + self.add_option('-U', '--show-units', default=False, action='store_true', dest='support_show_units', + help='Show available units') + self.add_option('-f', '--force', default=False, action='store_true', dest='support_archive_force_overwrite', + help='Force overwrite existing archive, if exists') + self.add_option('-o', '--out', default='null', dest='support_output_format', + help=('Set the default output using the specified outputter, ' + 'unless profile does not overrides this. Default: "yaml".')) + + def find_existing_configs(self, default): + ''' + Find configuration files on the system. + :return: + ''' + configs = [] + for cfg in [default, self._config_filename_, 'minion', 'proxy', 'cloud', 'spm']: + if not cfg: + continue + config_path = self.get_config_file_path(cfg) + if os.path.exists(config_path): + configs.append(cfg) + + if default and default not in configs: + raise SystemExit('Unknown configuration unit: {}'.format(default)) + + return configs + + def setup_config(self, cfg=None): + ''' + Open suitable config file. + :return: + ''' + _opts, _args = optparse.OptionParser.parse_args(self) + configs = self.find_existing_configs(_opts.support_unit) + if cfg not in configs: + cfg = configs[0] + + return config.master_config(self.get_config_file_path(cfg)) + + class SaltCMDOptionParser(six.with_metaclass(OptionParserMeta, OptionParser, ConfigDirMixIn, diff --git a/salt/utils/reactor.py b/salt/utils/reactor.py index b2ac5abfcc..00328675eb 100644 --- a/salt/utils/reactor.py +++ b/salt/utils/reactor.py @@ -6,9 +6,11 @@ Functions which implement running reactor jobs # Import python libs from __future__ import absolute_import, print_function, unicode_literals +import collections import fnmatch import glob import logging +import time # Import salt libs import salt.client @@ -23,6 +25,7 @@ import salt.utils.process import salt.utils.yaml import salt.wheel import salt.defaults.exitcodes +from salt.utils.event import tagify # Import 3rd-party libs from salt.ext import six @@ -56,6 +59,9 @@ class Reactor(salt.utils.process.SignalHandlingMultiprocessingProcess, salt.stat local_minion_opts['file_client'] = 'local' self.minion = salt.minion.MasterMinion(local_minion_opts) salt.state.Compiler.__init__(self, opts, self.minion.rend) + self.event = salt.utils.event.get_master_event(opts, opts['sock_dir'], listen=False) + self.stats = collections.defaultdict(lambda: {'mean': 0, 'latency': 0, 'runs': 0}) + self.stat_clock = time.time() # We need __setstate__ and __getstate__ to avoid pickling errors since # 'self.rend' (from salt.state.Compiler) contains a function reference @@ -77,6 +83,17 @@ class Reactor(salt.utils.process.SignalHandlingMultiprocessingProcess, salt.stat 'log_queue_level': self.log_queue_level } + def _post_stats(self, stats): + ''' + Fire events with stat info if it's time + ''' + end_time = time.time() + if end_time - self.stat_clock > self.opts['master_stats_event_iter']: + # Fire the event with the stats and wipe the tracker + self.event.fire_event({'time': end_time - self.stat_clock, 'worker': self.name, 'stats': stats}, tagify(self.name, 'stats')) + self.stats = collections.defaultdict(lambda: {'mean': 0, 'latency': 0, 'runs': 0}) + self.stat_clock = end_time + def render_reaction(self, glob_ref, tag, data): ''' Execute the render system against a single reaction file and return @@ -246,6 +263,7 @@ class Reactor(salt.utils.process.SignalHandlingMultiprocessingProcess, salt.stat # skip all events fired by ourselves if data['data'].get('user') == self.wrap.event_user: continue + if data['tag'].endswith('salt/reactors/manage/add'): _data = data['data'] res = self.add_reactor(_data['event'], _data['reactors']) @@ -267,11 +285,18 @@ class Reactor(salt.utils.process.SignalHandlingMultiprocessingProcess, salt.stat continue chunks = self.reactions(data['tag'], data['data'], reactors) if chunks: + if self.opts['master_stats']: + _data = data['data'] + start = time.time() try: self.call_reactions(chunks) except SystemExit: log.warning('Exit ignored by reactor') + if self.opts['master_stats']: + stats = salt.utils.event.update_stats(self.stats, start, _data) + self._post_stats(stats) + class ReactWrap(object): ''' diff --git a/salt/utils/state.py b/salt/utils/state.py index b90f36beaa..371f393a4a 100644 --- a/salt/utils/state.py +++ b/salt/utils/state.py @@ -212,10 +212,6 @@ def merge_subreturn(original_return, sub_return, subkey=None): original_return.setdefault('changes', {}) original_return['changes'][subkey] = sub_return['changes'] - if sub_return.get('pchanges'): # pchanges may or may not exist - original_return.setdefault('pchanges', {}) - original_return['pchanges'][subkey] = sub_return['pchanges'] - return original_return diff --git a/salt/utils/win_dacl.py b/salt/utils/win_dacl.py index 06498f7172..afdace29eb 100644 --- a/salt/utils/win_dacl.py +++ b/salt/utils/win_dacl.py @@ -1957,17 +1957,15 @@ def _check_perms(obj_name, obj_type, new_perms, cur_perms, access_mode, ret): changes[user]['applies_to'] = applies_to if changes: - if 'perms' not in ret['pchanges']: - ret['pchanges']['perms'] = {} if 'perms' not in ret['changes']: ret['changes']['perms'] = {} for user in changes: user_name = get_name(principal=user) if __opts__['test'] is True: - if user not in ret['pchanges']['perms']: - ret['pchanges']['perms'][user] = {} - ret['pchanges']['perms'][user][access_mode] = changes[user][access_mode] + if user not in ret['changes']['perms']: + ret['changes']['perms'][user] = {} + ret['changes']['perms'][user][access_mode] = changes[user][access_mode] else: # Get applies_to applies_to = None @@ -2123,7 +2121,6 @@ def check_perms(obj_name, if not ret: ret = {'name': obj_name, 'changes': {}, - 'pchanges': {}, 'comment': [], 'result': True} orig_comment = '' @@ -2137,7 +2134,7 @@ def check_perms(obj_name, current_owner = get_owner(obj_name=obj_name, obj_type=obj_type) if owner != current_owner: if __opts__['test'] is True: - ret['pchanges']['owner'] = owner + ret['changes']['owner'] = owner else: try: set_owner(obj_name=obj_name, @@ -2155,7 +2152,7 @@ def check_perms(obj_name, if not inheritance == get_inheritance(obj_name=obj_name, obj_type=obj_type): if __opts__['test'] is True: - ret['pchanges']['inheritance'] = inheritance + ret['changes']['inheritance'] = inheritance else: try: set_inheritance( @@ -2202,9 +2199,9 @@ def check_perms(obj_name, if user_name.lower() not in set(k.lower() for k in grant_perms): if 'grant' in cur_perms['Not Inherited'][user_name]: if __opts__['test'] is True: - if 'remove_perms' not in ret['pchanges']: - ret['pchanges']['remove_perms'] = {} - ret['pchanges']['remove_perms'].update( + if 'remove_perms' not in ret['changes']: + ret['changes']['remove_perms'] = {} + ret['changes']['remove_perms'].update( {user_name: cur_perms['Not Inherited'][user_name]}) else: if 'remove_perms' not in ret['changes']: @@ -2220,9 +2217,9 @@ def check_perms(obj_name, if user_name.lower() not in set(k.lower() for k in deny_perms): if 'deny' in cur_perms['Not Inherited'][user_name]: if __opts__['test'] is True: - if 'remove_perms' not in ret['pchanges']: - ret['pchanges']['remove_perms'] = {} - ret['pchanges']['remove_perms'].update( + if 'remove_perms' not in ret['changes']: + ret['changes']['remove_perms'] = {} + ret['changes']['remove_perms'].update( {user_name: cur_perms['Not Inherited'][user_name]}) else: if 'remove_perms' not in ret['changes']: @@ -2246,7 +2243,7 @@ def check_perms(obj_name, ret['comment'] = '\n'.join(ret['comment']) # Set result for test = True - if __opts__['test'] and (ret['changes'] or ret['pchanges']): + if __opts__['test'] and ret['changes']: ret['result'] = None return ret diff --git a/scripts/salt-support b/scripts/salt-support new file mode 100755 index 0000000000..48ce141c67 --- /dev/null +++ b/scripts/salt-support @@ -0,0 +1,11 @@ +#!/usr/bin/env python +''' +Salt support is to collect logs, +debug data and system information +for support purposes. +''' + +from salt.scripts import salt_support + +if __name__ == '__main__': + salt_support() diff --git a/tests/integration/files/conf/minion b/tests/integration/files/conf/minion index 29fdd43e6c..2af31bfd6e 100644 --- a/tests/integration/files/conf/minion +++ b/tests/integration/files/conf/minion @@ -25,6 +25,7 @@ integration.test: True # Grains addons grains: test_grain: cheese + grain_path: /tmp/salt-tests-tmpdir/file-grain-test script: grail alot: many planets: diff --git a/tests/integration/files/file/base/onchanges_prereq.sls b/tests/integration/files/file/base/onchanges_prereq.sls new file mode 100644 index 0000000000..9ab27b71e2 --- /dev/null +++ b/tests/integration/files/file/base/onchanges_prereq.sls @@ -0,0 +1,22 @@ +one: + file.managed: + - name: {{ pillar['file1'] }} + - source: {{ pillar['source'] }} + +# This should run because there were changes +two: + test.succeed_without_changes: + - {{ pillar['req'] }}: + - file: one + +# Run the same state as "one" again, this should not cause changes +three: + file.managed: + - name: {{ pillar['file2'] }} + - source: {{ pillar['source'] }} + +# This should not run because there should be no changes +four: + test.succeed_without_changes: + - {{ pillar['req'] }}: + - file: three diff --git a/tests/integration/files/file/base/orch/req_test.sls b/tests/integration/files/file/base/orch/req_test.sls new file mode 100644 index 0000000000..cb992de829 --- /dev/null +++ b/tests/integration/files/file/base/orch/req_test.sls @@ -0,0 +1,3 @@ +{{ salt['runtests_helpers.get_salt_temp_dir_for_path']('orch.req_test') }}: + file.managed: + - contents: 'Hello world!' diff --git a/tests/integration/files/pillar/base/top.sls b/tests/integration/files/pillar/base/top.sls index a81bb77154..0bcfdfb9bd 100644 --- a/tests/integration/files/pillar/base/top.sls +++ b/tests/integration/files/pillar/base/top.sls @@ -11,7 +11,3 @@ base: 'localhost': - generic - blackout - 'N@mins not L@minion': - - ng1 - 'N@missing_minion': - - ng2 diff --git a/tests/integration/minion/test_pillar.py b/tests/integration/minion/test_pillar.py index 4a6c9e8882..82c8c4781e 100644 --- a/tests/integration/minion/test_pillar.py +++ b/tests/integration/minion/test_pillar.py @@ -194,6 +194,35 @@ class BasePillarTest(ModuleCase): ''' Tests for pillar decryption ''' + @classmethod + def setUpClass(cls): + os.makedirs(PILLAR_BASE) + with salt.utils.files.fopen(TOP_SLS, 'w') as fp_: + fp_.write(textwrap.dedent('''\ + base: + 'N@mins not L@minion': + - ng1 + 'N@missing_minion': + - ng2 + ''')) + + with salt.utils.files.fopen(os.path.join(PILLAR_BASE, 'ng1.sls'), 'w') as fp_: + fp_.write('pillar_from_nodegroup: True') + + with salt.utils.files.fopen(os.path.join(PILLAR_BASE, 'ng2.sls'), 'w') as fp_: + fp_.write('pillar_from_nodegroup_with_ghost: True') + + @classmethod + def tearDownClass(cls): + shutil.rmtree(PILLAR_BASE) + + def _build_opts(self, opts): + ret = copy.deepcopy(DEFAULT_OPTS) + for item in ADDITIONAL_OPTS: + ret[item] = self.master_opts[item] + ret.update(opts) + return ret + def test_pillar_top_compound_match(self, grains=None): ''' Test that a compound match topfile that refers to a nodegroup via N@ works @@ -202,12 +231,21 @@ class BasePillarTest(ModuleCase): if not grains: grains = {} grains['os'] = 'Fedora' - pillar_obj = pillar.Pillar(self.get_config('master', from_scratch=True), grains, 'minion', 'base') + nodegroup_opts = salt.utils.yaml.safe_load(textwrap.dedent('''\ + nodegroups: + min: minion + sub_min: sub_minion + mins: N@min or N@sub_min + missing_minion: L@minion,ghostminion + ''')) + + opts = self._build_opts(nodegroup_opts) + pillar_obj = pillar.Pillar(opts, grains, 'minion', 'base') ret = pillar_obj.compile_pillar() self.assertEqual(ret.get('pillar_from_nodegroup_with_ghost'), True) self.assertEqual(ret.get('pillar_from_nodegroup'), None) - sub_pillar_obj = pillar.Pillar(self.get_config('master', from_scratch=True), grains, 'sub_minion', 'base') + sub_pillar_obj = pillar.Pillar(opts, grains, 'sub_minion', 'base') sub_ret = sub_pillar_obj.compile_pillar() self.assertEqual(sub_ret.get('pillar_from_nodegroup_with_ghost'), None) self.assertEqual(sub_ret.get('pillar_from_nodegroup'), True) diff --git a/tests/integration/modules/test_state.py b/tests/integration/modules/test_state.py index 9f6cc05669..fc4b08bc0d 100644 --- a/tests/integration/modules/test_state.py +++ b/tests/integration/modules/test_state.py @@ -71,9 +71,7 @@ class StateModuleTest(ModuleCase, SaltReturnAssertsMixin): def setUp(self): super(StateModuleTest, self).setUp() destpath = os.path.join(FILES, 'file', 'base', 'testappend', 'firstif') - reline(destpath, destpath, force=True) destpath = os.path.join(FILES, 'file', 'base', 'testappend', 'secondif') - reline(destpath, destpath, force=True) sls = self.run_function('saltutil.sync_modules') assert isinstance(sls, list) @@ -1874,7 +1872,7 @@ class StateModuleTest(ModuleCase, SaltReturnAssertsMixin): for key, val in ret.items(): self.assertEqual(val['comment'], comment) - self.assertEqual(val['changes'], {}) + self.assertEqual(val['changes'], {'newfile': testfile}) def test_state_sls_id_test_state_test_post_run(self): ''' @@ -1907,7 +1905,7 @@ class StateModuleTest(ModuleCase, SaltReturnAssertsMixin): self.assertEqual( val['comment'], 'The file {0} is set to be changed'.format(file_name)) - self.assertEqual(val['changes'], {}) + self.assertEqual(val['changes'], {'newfile': file_name}) def test_state_sls_id_test_true_post_run(self): ''' @@ -1965,7 +1963,6 @@ class StateModuleTest(ModuleCase, SaltReturnAssertsMixin): 'result': True}, 'file_|-unless_false_onlyif_true_|-{0}{1}test.txt_|-managed'.format(TMP, os.path.sep): {'comment': 'Empty file', - 'pchanges': {}, 'name': '{0}{1}test.txt'.format(TMP, os.path.sep), 'start_time': '18:10:20.341753', 'result': True, diff --git a/tests/integration/modules/test_vault.py b/tests/integration/modules/test_vault.py new file mode 100644 index 0000000000..2539fd574d --- /dev/null +++ b/tests/integration/modules/test_vault.py @@ -0,0 +1,100 @@ +# -*- coding: utf-8 -*- +''' +Integration tests for the vault execution module +''' + +# Import Python Libs +from __future__ import absolute_import, print_function, unicode_literals +import inspect +import time + +# Import Salt Testing Libs +from tests.support.unit import skipIf +from tests.support.case import ModuleCase, ShellCase +from tests.support.helpers import destructiveTest, flaky +from tests.support.paths import FILES + +# Import Salt Libs +import salt.utils.path + +import logging +log = logging.getLogger(__name__) + + +@destructiveTest +@skipIf(not salt.utils.path.which('dockerd'), 'Docker not installed') +@skipIf(not salt.utils.path.which('vault'), 'Vault not installed') +class VaultTestCase(ModuleCase, ShellCase): + ''' + Test vault module + ''' + count = 0 + + def setUp(self): + ''' + SetUp vault container + ''' + if self.count == 0: + config = '{"backend": {"file": {"path": "/vault/file"}}, "default_lease_ttl": "168h", "max_lease_ttl": "720h"}' + self.run_state('docker_image.present', name='vault', tag='0.9.6') + self.run_state( + 'docker_container.running', + name='vault', + image='vault:0.9.6', + port_bindings='8200:8200', + environment={ + 'VAULT_DEV_ROOT_TOKEN_ID': 'testsecret', + 'VAULT_LOCAL_CONFIG': config, + }, + cap_add='IPC_LOCK', + ) + time.sleep(5) + ret = self.run_function( + 'cmd.retcode', + cmd='/usr/local/bin/vault login token=testsecret', + env={'VAULT_ADDR': 'http://127.0.0.1:8200'}, + ) + if ret != 0: + self.skipTest('unable to login to vault') + ret = self.run_function( + 'cmd.retcode', + cmd='/usr/local/bin/vault policy write testpolicy {0}/vault.hcl'.format(FILES), + env={'VAULT_ADDR': 'http://127.0.0.1:8200'}, + ) + if ret != 0: + self.skipTest('unable to assign policy to vault') + self.count += 1 + + def tearDown(self): + ''' + TearDown vault container + ''' + def count_tests(funcobj): + return inspect.ismethod(funcobj) and funcobj.__name__.startswith('test_') + numtests = len(inspect.getmembers(VaultTestCase, predicate=count_tests)) + if self.count >= numtests: + self.run_state('docker_container.stopped', name='vault') + self.run_state('docker_container.absent', name='vault') + self.run_state('docker_image.absent', name='vault', force=True) + + @flaky + def test_write_read_secret(self): + assert self.run_function('vault.write_secret', path='secret/my/secret', user='foo', password='bar') is True + assert self.run_function('vault.read_secret', arg=['secret/my/secret']) == {'password': 'bar', 'user': 'foo'} + + @flaky + def test_write_raw_read_secret(self): + assert self.run_function('vault.write_raw', + path='secret/my/secret', + raw={"user": "foo", "password": "bar"}) is True + assert self.run_function('vault.read_secret', arg=['secret/my/secret']) == {'password': 'bar', 'user': 'foo'} + + @flaky + def test_delete_secret(self): + assert self.run_function('vault.write_secret', path='secret/my/secret', user='foo', password='bar') is True + assert self.run_function('vault.delete_secret', arg=['secret/my/secret']) is True + + @flaky + def test_list_secrets(self): + assert self.run_function('vault.write_secret', path='secret/my/secret', user='foo', password='bar') is True + assert self.run_function('vault.list_secrets', arg=['secret/my/']) == {'keys': ['secret']} diff --git a/tests/integration/runners/test_state.py b/tests/integration/runners/test_state.py index eb1dfa98c3..626d9927da 100644 --- a/tests/integration/runners/test_state.py +++ b/tests/integration/runners/test_state.py @@ -643,3 +643,119 @@ class OrchEventTest(ShellCase): self.assertTrue(received) del listener signal.alarm(0) + + def test_orchestration_onchanges_and_prereq(self): + ''' + Test to confirm that the parallel state requisite works in orch + we do this by running 10 test.sleep's of 10 seconds, and insure it only takes roughly 10s + ''' + self.write_conf({ + 'fileserver_backend': ['roots'], + 'file_roots': { + 'base': [self.base_env], + }, + }) + + orch_sls = os.path.join(self.base_env, 'orch.sls') + with salt.utils.files.fopen(orch_sls, 'w') as fp_: + fp_.write(textwrap.dedent(''' + manage_a_file: + salt.state: + - tgt: minion + - sls: + - orch.req_test + + do_onchanges: + salt.function: + - tgt: minion + - name: test.ping + - onchanges: + - salt: manage_a_file + + do_prereq: + salt.function: + - tgt: minion + - name: test.ping + - prereq: + - salt: manage_a_file + ''')) + + listener = salt.utils.event.get_event( + 'master', + sock_dir=self.master_opts['sock_dir'], + transport=self.master_opts['transport'], + opts=self.master_opts) + + try: + jid1 = self.run_run_plus( + 'state.orchestrate', + 'orch', + test=True, + __reload_config=True).get('jid') + + # Run for real to create the file + self.run_run_plus( + 'state.orchestrate', + 'orch', + __reload_config=True).get('jid') + + # Run again in test mode. Since there were no changes, the + # requisites should not fire. + jid2 = self.run_run_plus( + 'state.orchestrate', + 'orch', + test=True, + __reload_config=True).get('jid') + finally: + try: + os.remove(os.path.join(TMP, 'orch.req_test')) + except OSError: + pass + + assert jid1 is not None + assert jid2 is not None + + tags = {'salt/run/{0}/ret'.format(x): x for x in (jid1, jid2)} + ret = {} + + signal.signal(signal.SIGALRM, self.alarm_handler) + signal.alarm(self.timeout) + try: + while True: + event = listener.get_event(full=True) + if event is None: + continue + + if event['tag'] in tags: + ret[tags.pop(event['tag'])] = self.repack_state_returns( + event['data']['return']['data']['master'] + ) + if not tags: + # If tags is empty, we've grabbed all the returns we + # wanted, so let's stop listening to the event bus. + break + finally: + del listener + signal.alarm(0) + + for sls_id in ('manage_a_file', 'do_onchanges', 'do_prereq'): + # The first time through, all three states should have a None + # result, while the second time through, they should all have a + # True result. + assert ret[jid1][sls_id]['result'] is None, \ + 'result of {0} ({1}) is not None'.format( + sls_id, + ret[jid1][sls_id]['result']) + assert ret[jid2][sls_id]['result'] is True, \ + 'result of {0} ({1}) is not True'.format( + sls_id, + ret[jid2][sls_id]['result']) + + # The file.managed state should have shown changes in the test mode + # return data. + assert ret[jid1]['manage_a_file']['changes'] + + # After the file was created, running again in test mode should have + # shown no changes. + assert not ret[jid2]['manage_a_file']['changes'], \ + ret[jid2]['manage_a_file']['changes'] diff --git a/tests/integration/shell/test_call.py b/tests/integration/shell/test_call.py index a5d4867e8d..0f8d1e18be 100644 --- a/tests/integration/shell/test_call.py +++ b/tests/integration/shell/test_call.py @@ -77,6 +77,7 @@ class CallTest(ShellCase, testprogram.TestProgramCase, ShellCaseCommonTestsMixin self.assertIn('hello', ''.join(out)) self.assertIn('Succeeded: 1', ''.join(out)) + @skipIf(True, 'This test causes the test to hang. Skipping until further investigation can occur.') @destructiveTest @skip_if_not_root @skipIf(salt.utils.platform.is_windows(), 'This test does not apply on Windows') @@ -114,11 +115,14 @@ class CallTest(ShellCase, testprogram.TestProgramCase, ShellCaseCommonTestsMixin if target in cur_pkgs: self.fail('Target package \'{0}\' already installed'.format(target)) - out = ''.join(self.run_call('--local pkg.install {0}'.format(target))) - self.assertIn('local: ----------', out) - self.assertIn('{0}: ----------'.format(target), out) - self.assertIn('new:', out) - self.assertIn('old:', out) + try: + out = ''.join(self.run_call('--local pkg.install {0}'.format(target))) + self.assertIn('local: ----------', out) + self.assertIn('{0}: ----------'.format(target), out) + self.assertIn('new:', out) + self.assertIn('old:', out) + finally: + self.run_call('--local pkg.remove {0}'.format(target)) @skipIf(sys.platform.startswith('win'), 'This test does not apply on Win') @flaky diff --git a/tests/integration/states/test_file.py b/tests/integration/states/test_file.py index 09e19756f4..daa605dd5a 100644 --- a/tests/integration/states/test_file.py +++ b/tests/integration/states/test_file.py @@ -357,7 +357,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin): file. ''' grain_path = os.path.join(TMP, 'file-grain-test') - self.run_function('grains.set', ['grain_path', grain_path]) state_file = 'file-grainget' self.run_function('state.sls', [state_file]) @@ -744,7 +743,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin): source_hash=uppercase_hash ) assert ret[state_name]['result'] is True - assert ret[state_name]['pchanges'] == {} assert ret[state_name]['changes'] == {} # Test uppercase source_hash using test=true @@ -757,7 +755,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin): test=True ) assert ret[state_name]['result'] is True - assert ret[state_name]['pchanges'] == {} assert ret[state_name]['changes'] == {} finally: @@ -811,6 +808,87 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin): result = self.run_function('cp.is_cached', [source, saltenv]) assert result == '', 'File is still cached at {0}'.format(result) + @with_tempfile(create=False) + @with_tempfile(create=False) + def test_file_managed_onchanges(self, file1, file2): + ''' + Test file.managed state with onchanges + ''' + pillar = {'file1': file1, + 'file2': file2, + 'source': 'salt://testfile', + 'req': 'onchanges'} + + # Lay down the file used in the below SLS to ensure that when it is + # run, there are no changes. + self.run_state( + 'file.managed', + name=pillar['file2'], + source=pillar['source']) + + ret = self.repack_state_returns( + self.run_function( + 'state.apply', + mods='onchanges_prereq', + pillar=pillar, + test=True, + ) + ) + # The file states should both exit with None + assert ret['one']['result'] is None, ret['one']['result'] + assert ret['three']['result'] is True, ret['three']['result'] + # The first file state should have changes, since a new file was + # created. The other one should not, since we already created that file + # before applying the SLS file. + assert ret['one']['changes'] + assert not ret['three']['changes'], ret['three']['changes'] + # The state watching 'one' should have been run due to changes + assert ret['two']['comment'] == 'Success!', ret['two']['comment'] + # The state watching 'three' should not have been run + assert ret['four']['comment'] == \ + 'State was not run because none of the onchanges reqs changed', \ + ret['four']['comment'] + + @with_tempfile(create=False) + @with_tempfile(create=False) + def test_file_managed_prereq(self, file1, file2): + ''' + Test file.managed state with prereq + ''' + pillar = {'file1': file1, + 'file2': file2, + 'source': 'salt://testfile', + 'req': 'prereq'} + + # Lay down the file used in the below SLS to ensure that when it is + # run, there are no changes. + self.run_state( + 'file.managed', + name=pillar['file2'], + source=pillar['source']) + + ret = self.repack_state_returns( + self.run_function( + 'state.apply', + mods='onchanges_prereq', + pillar=pillar, + test=True, + ) + ) + # The file states should both exit with None + assert ret['one']['result'] is None, ret['one']['result'] + assert ret['three']['result'] is True, ret['three']['result'] + # The first file state should have changes, since a new file was + # created. The other one should not, since we already created that file + # before applying the SLS file. + assert ret['one']['changes'] + assert not ret['three']['changes'], ret['three']['changes'] + # The state watching 'one' should have been run due to changes + assert ret['two']['comment'] == 'Success!', ret['two']['comment'] + # The state watching 'three' should not have been run + assert ret['four']['comment'] == 'No changes detected', \ + ret['four']['comment'] + def test_directory(self): ''' file.directory diff --git a/tests/support/helpers.py b/tests/support/helpers.py index f3af86924b..fa2ee235f3 100644 --- a/tests/support/helpers.py +++ b/tests/support/helpers.py @@ -211,7 +211,10 @@ def flaky(caller=None, condition=True): if attempt >= 3: raise exc backoff_time = attempt ** 2 - log.info('Found Exception. Waiting %s seconds to retry.', backoff_time) + log.info( + 'Found Exception. Waiting %s seconds to retry.', + backoff_time + ) time.sleep(backoff_time) return cls return wrap diff --git a/tests/support/unit.py b/tests/support/unit.py index 70cd8b7891..7e862e919f 100644 --- a/tests/support/unit.py +++ b/tests/support/unit.py @@ -268,6 +268,19 @@ class TestCase(_TestCase): ) # return _TestCase.assertNotAlmostEquals(self, *args, **kwargs) + def repack_state_returns(self, state_ret): + ''' + Accepts a state return dict and returns it back with the top level key + names rewritten such that the ID declaration is the key instead of the + State's unique tag. For example: 'foo' instead of + 'file_|-foo_|-/etc/foo.conf|-managed' + + This makes it easier to work with state returns when crafting asserts + after running states. + ''' + assert isinstance(state_ret, dict), state_ret + return {x.split('_|-')[1]: y for x, y in six.iteritems(state_ret)} + def failUnlessEqual(self, *args, **kwargs): raise DeprecationWarning( 'The {0}() function is deprecated. Please start using {1}() ' diff --git a/tests/unit/cli/test_support.py b/tests/unit/cli/test_support.py new file mode 100644 index 0000000000..85ea957d79 --- /dev/null +++ b/tests/unit/cli/test_support.py @@ -0,0 +1,477 @@ +# -*- coding: utf-8 -*- +''' + :codeauthor: Bo Maryniuk +''' + +from __future__ import absolute_import, print_function, unicode_literals + +from tests.support.unit import skipIf, TestCase +from tests.support.mock import MagicMock, patch, NO_MOCK, NO_MOCK_REASON + +from salt.cli.support.console import IndentOutput +from salt.cli.support.collector import SupportDataCollector, SaltSupport +from salt.utils.color import get_colors +from salt.utils.stringutils import to_bytes +import salt.exceptions +import salt.cli.support.collector +import salt.utils.files +import os +import yaml +import jinja2 + +try: + import pytest +except ImportError: + pytest = None + + +@skipIf(not bool(pytest), 'Pytest needs to be installed') +@skipIf(NO_MOCK, NO_MOCK_REASON) +class SaltSupportIndentOutputTestCase(TestCase): + ''' + Unit Tests for the salt-support indent output. + ''' + + def setUp(self): + ''' + Setup test + :return: + ''' + + self.message = 'Stubborn processes on dumb terminal' + self.device = MagicMock() + self.iout = IndentOutput(device=self.device) + self.colors = get_colors() + + def tearDown(self): + ''' + Remove instances after test run + :return: + ''' + del self.message + del self.device + del self.iout + del self.colors + + def test_standard_output(self): + ''' + Test console standard output. + ''' + self.iout.put(self.message) + assert self.device.write.called + assert self.device.write.call_count == 5 + for idx, data in enumerate(['', str(self.colors['CYAN']), self.message, str(self.colors['ENDC']), '\n']): + assert self.device.write.call_args_list[idx][0][0] == data + + def test_indent_output(self): + ''' + Test indent distance. + :return: + ''' + self.iout.put(self.message, indent=10) + for idx, data in enumerate([' ' * 10, str(self.colors['CYAN']), self.message, str(self.colors['ENDC']), '\n']): + assert self.device.write.call_args_list[idx][0][0] == data + + def test_color_config(self): + ''' + Test color config changes on each ident. + :return: + ''' + + conf = {0: 'MAGENTA', 2: 'RED', 4: 'WHITE', 6: 'YELLOW'} + self.iout = IndentOutput(conf=conf, device=self.device) + for indent in sorted(list(conf)): + self.iout.put(self.message, indent=indent) + + step = 1 + for ident_key in sorted(list(conf)): + assert str(self.device.write.call_args_list[step][0][0]) == str(self.colors[conf[ident_key]]) + step += 5 + + +@skipIf(not bool(pytest), 'Pytest needs to be installed') +@skipIf(NO_MOCK, NO_MOCK_REASON) +class SaltSupportCollectorTestCase(TestCase): + ''' + Collector tests. + ''' + def setUp(self): + ''' + Setup the test case + :return: + ''' + self.archive_path = '/highway/to/hell' + self.output_device = MagicMock() + self.collector = SupportDataCollector(self.archive_path, self.output_device) + + def tearDown(self): + ''' + Tear down the test case elements + :return: + ''' + del self.collector + del self.archive_path + del self.output_device + + @patch('salt.cli.support.collector.tarfile.TarFile', MagicMock()) + def test_archive_open(self): + ''' + Test archive is opened. + + :return: + ''' + self.collector.open() + assert self.collector.archive_path == self.archive_path + with pytest.raises(salt.exceptions.SaltException) as err: + self.collector.open() + assert 'Archive already opened' in str(err) + + @patch('salt.cli.support.collector.tarfile.TarFile', MagicMock()) + def test_archive_close(self): + ''' + Test archive is opened. + + :return: + ''' + self.collector.open() + self.collector._flush_content = lambda: None + self.collector.close() + assert self.collector.archive_path == self.archive_path + with pytest.raises(salt.exceptions.SaltException) as err: + self.collector.close() + assert 'Archive already closed' in str(err) + + def test_archive_addwrite(self): + ''' + Test add to the archive a section and write to it. + + :return: + ''' + archive = MagicMock() + with patch('salt.cli.support.collector.tarfile.TarFile', archive): + self.collector.open() + self.collector.add('foo') + self.collector.write(title='title', data='data', output='null') + self.collector._flush_content() + + assert (archive.bz2open().addfile.call_args[1]['fileobj'].read() + == to_bytes('title\n-----\n\nraw-content: data\n\n\n\n')) + + @patch('salt.utils.files.fopen', MagicMock(return_value='path=/dev/null')) + def test_archive_addlink(self): + ''' + Test add to the archive a section and link an external file or directory to it. + + :return: + ''' + archive = MagicMock() + with patch('salt.cli.support.collector.tarfile.TarFile', archive): + self.collector.open() + self.collector.add('foo') + self.collector.link(title='Backup Path', path='/path/to/backup.config') + self.collector._flush_content() + + assert archive.bz2open().addfile.call_count == 1 + assert (archive.bz2open().addfile.call_args[1]['fileobj'].read() + == to_bytes('Backup Path\n-----------\n\npath=/dev/null\n\n\n')) + + @patch('salt.utils.files.fopen', MagicMock(return_value='path=/dev/null')) + def test_archive_discard_section(self): + ''' + Test discard a section from the archive. + + :return: + ''' + archive = MagicMock() + with patch('salt.cli.support.collector.tarfile.TarFile', archive): + self.collector.open() + self.collector.add('solar-interference') + self.collector.link(title='Thermal anomaly', path='/path/to/another/great.config') + self.collector.add('foo') + self.collector.link(title='Backup Path', path='/path/to/backup.config') + self.collector._flush_content() + assert archive.bz2open().addfile.call_count == 2 + assert (archive.bz2open().addfile.mock_calls[0][2]['fileobj'].read() + == to_bytes('Thermal anomaly\n---------------\n\npath=/dev/null\n\n\n')) + self.collector.close() + + archive = MagicMock() + with patch('salt.cli.support.collector.tarfile.TarFile', archive): + self.collector.open() + self.collector.add('solar-interference') + self.collector.link(title='Thermal anomaly', path='/path/to/another/great.config') + self.collector.discard_current() + self.collector.add('foo') + self.collector.link(title='Backup Path', path='/path/to/backup.config') + self.collector._flush_content() + assert archive.bz2open().addfile.call_count == 2 + assert (archive.bz2open().addfile.mock_calls[0][2]['fileobj'].read() + == to_bytes('Backup Path\n-----------\n\npath=/dev/null\n\n\n')) + self.collector.close() + + +@skipIf(not bool(pytest), 'Pytest needs to be installed') +@skipIf(NO_MOCK, NO_MOCK_REASON) +class SaltSupportRunnerTestCase(TestCase): + ''' + Test runner class. + ''' + + def setUp(self): + ''' + Set up test suite. + :return: + ''' + self.archive_path = '/dev/null' + self.output_device = MagicMock() + self.runner = SaltSupport() + self.runner.collector = SupportDataCollector(self.archive_path, self.output_device) + + def tearDown(self): + ''' + Tear down. + + :return: + ''' + del self.archive_path + del self.output_device + del self.runner + + def test_function_config(self): + ''' + Test function config formation. + + :return: + ''' + self.runner.config = {} + msg = 'Electromagnetic energy loss' + assert self.runner._setup_fun_config({'description': msg}) == {'print_metadata': False, + 'file_client': 'local', + 'fun': '', 'kwarg': {}, + 'description': msg, + 'cache_jobs': False, 'arg': []} + + def test_local_caller(self): + ''' + Test local caller. + + :return: + ''' + msg = 'Because of network lag due to too many people playing deathmatch' + caller = MagicMock() + caller().call = MagicMock(return_value=msg) + + self.runner._get_caller = caller + self.runner.out = MagicMock() + assert self.runner._local_call({}) == msg + + caller().call = MagicMock(side_effect=SystemExit) + assert self.runner._local_call({}) == 'Data is not available at this moment' + + err_msg = "The UPS doesn't have a battery backup." + caller().call = MagicMock(side_effect=Exception(err_msg)) + assert self.runner._local_call({}) == "Unhandled exception occurred: The UPS doesn't have a battery backup." + + def test_local_runner(self): + ''' + Test local runner. + + :return: + ''' + msg = 'Big to little endian conversion error' + runner = MagicMock() + runner().run = MagicMock(return_value=msg) + + self.runner._get_runner = runner + self.runner.out = MagicMock() + assert self.runner._local_run({}) == msg + + runner().run = MagicMock(side_effect=SystemExit) + assert self.runner._local_run({}) == 'Runner is not available at this moment' + + err_msg = 'Trojan horse ran out of hay' + runner().run = MagicMock(side_effect=Exception(err_msg)) + assert self.runner._local_run({}) == 'Unhandled exception occurred: Trojan horse ran out of hay' + + @patch('salt.cli.support.intfunc', MagicMock(spec=[])) + def test_internal_function_call_stub(self): + ''' + Test missing internal function call is handled accordingly. + + :return: + ''' + self.runner.out = MagicMock() + out = self.runner._internal_function_call({'fun': 'everythingisawesome', + 'arg': [], 'kwargs': {}}) + assert out == 'Function everythingisawesome is not available' + + def test_internal_function_call(self): + ''' + Test missing internal function call is handled accordingly. + + :return: + ''' + msg = 'Internet outage' + intfunc = MagicMock() + intfunc.everythingisawesome = MagicMock(return_value=msg) + self.runner.out = MagicMock() + with patch('salt.cli.support.intfunc', intfunc): + out = self.runner._internal_function_call({'fun': 'everythingisawesome', + 'arg': [], 'kwargs': {}}) + assert out == msg + + def test_get_action(self): + ''' + Test action meta gets parsed. + + :return: + ''' + action_meta = {'run:jobs.list_jobs_filter': {'info': 'List jobs filter', 'args': [1]}} + assert self.runner._get_action(action_meta) == ('List jobs filter', None, + {'fun': 'run:jobs.list_jobs_filter', 'kwargs': {}, 'arg': [1]}) + action_meta = {'user.info': {'info': 'Information about "usbmux"', 'args': ['usbmux']}} + assert self.runner._get_action(action_meta) == ('Information about "usbmux"', None, + {'fun': 'user.info', 'kwargs': {}, 'arg': ['usbmux']}) + + def test_extract_return(self): + ''' + Test extract return from the output. + + :return: + ''' + out = {'key': 'value'} + assert self.runner._extract_return(out) == out + assert self.runner._extract_return({'return': out}) == out + + def test_get_action_type(self): + ''' + Test action meta determines action type. + + :return: + ''' + action_meta = {'run:jobs.list_jobs_filter': {'info': 'List jobs filter', 'args': [1]}} + assert self.runner._get_action_type(action_meta) == 'run' + + action_meta = {'user.info': {'info': 'Information about "usbmux"', 'args': ['usbmux']}} + assert self.runner._get_action_type(action_meta) == 'call' + + @patch('os.path.exists', MagicMock(return_value=True)) + def test_cleanup(self): + ''' + Test cleanup routine. + + :return: + ''' + arch = '/tmp/killme.zip' + unlink = MagicMock() + with patch('os.unlink', unlink): + self.runner.config = {'support_archive': arch} + self.runner.out = MagicMock() + self.runner._cleanup() + + assert self.runner.out.warning.call_args[0][0] == 'Terminated earlier, cleaning up' + unlink.assert_called_once_with(arch) + + @patch('os.path.exists', MagicMock(return_value=True)) + def test_check_existing_archive(self): + ''' + Test check existing archive. + + :return: + ''' + arch = '/tmp/endothermal-recalibration.zip' + unlink = MagicMock() + with patch('os.unlink', unlink), patch('os.path.exists', MagicMock(return_value=False)): + self.runner.config = {'support_archive': '', + 'support_archive_force_overwrite': True} + self.runner.out = MagicMock() + assert self.runner._check_existing_archive() + assert self.runner.out.warning.call_count == 0 + + with patch('os.unlink', unlink): + self.runner.config = {'support_archive': arch, + 'support_archive_force_overwrite': False} + self.runner.out = MagicMock() + assert not self.runner._check_existing_archive() + assert self.runner.out.warning.call_args[0][0] == 'File {} already exists.'.format(arch) + + with patch('os.unlink', unlink): + self.runner.config = {'support_archive': arch, + 'support_archive_force_overwrite': True} + self.runner.out = MagicMock() + assert self.runner._check_existing_archive() + assert self.runner.out.warning.call_args[0][0] == 'Overwriting existing archive: {}'.format(arch) + + +@skipIf(not bool(pytest), 'Pytest needs to be installed') +@skipIf(NO_MOCK, NO_MOCK_REASON) +class ProfileIntegrityTestCase(TestCase): + ''' + Default profile integrity + ''' + def setUp(self): + ''' + Set up test suite. + + :return: + ''' + self.profiles = {} + profiles = os.path.join(os.path.dirname(salt.cli.support.collector.__file__), 'profiles') + for profile in os.listdir(profiles): + self.profiles[profile.split('.')[0]] = os.path.join(profiles, profile) + + def tearDown(self): + ''' + Tear down test suite. + + :return: + ''' + del self.profiles + + def _render_template_to_yaml(self, name, *args, **kwargs): + ''' + Get template referene for rendering. + :return: + ''' + with salt.utils.files.fopen(self.profiles[name]) as t_fh: + template = t_fh.read() + return yaml.load(jinja2.Environment().from_string(template).render(*args, **kwargs)) + + def test_non_template_profiles_parseable(self): + ''' + Test shipped default profile is YAML parse-able. + + :return: + ''' + for t_name in ['default', 'jobs-active', 'jobs-last', 'network', 'postgres']: + with salt.utils.files.fopen(self.profiles[t_name]) as ref: + try: + yaml.load(ref) + parsed = True + except Exception: + parsed = False + assert parsed + + def test_users_template_profile(self): + ''' + Test users template profile. + + :return: + ''' + users_data = self._render_template_to_yaml('users', salt=MagicMock(return_value=['pokemon'])) + assert len(users_data['all-users']) == 5 + for user_data in users_data['all-users']: + for tgt in ['user.list_groups', 'shadow.info', 'cron.raw_cron']: + if tgt in user_data: + assert user_data[tgt]['args'] == ['pokemon'] + + def test_jobs_trace_template_profile(self): + ''' + Test jobs-trace template profile. + + :return: + ''' + jobs_trace = self._render_template_to_yaml('jobs-trace', runners=MagicMock(return_value=['0000'])) + assert len(jobs_trace['jobs-details']) == 1 + assert jobs_trace['jobs-details'][0]['run:jobs.list_job']['info'] == 'Details on JID 0000' + assert jobs_trace['jobs-details'][0]['run:jobs.list_job']['args'] == [0] diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py index d477f3b692..4245d37b16 100644 --- a/tests/unit/grains/test_core.py +++ b/tests/unit/grains/test_core.py @@ -1001,3 +1001,32 @@ class CoreGrainsTestCase(TestCase, LoaderModuleMockMixin): assert ret[count]['model'] == device[2] assert ret[count]['vendor'] == device[3] count += 1 + + @skipIf(not salt.utils.platform.is_linux(), 'System is not Linux') + def test_kernelparams_return(self): + expectations = [ + ('BOOT_IMAGE=/vmlinuz-3.10.0-693.2.2.el7.x86_64', + {'kernelparams': [('BOOT_IMAGE', '/vmlinuz-3.10.0-693.2.2.el7.x86_64')]}), + ('root=/dev/mapper/centos_daemon-root', + {'kernelparams': [('root', '/dev/mapper/centos_daemon-root')]}), + ('rhgb quiet ro', + {'kernelparams': [('rhgb', None), ('quiet', None), ('ro', None)]}), + ('param="value1"', + {'kernelparams': [('param', 'value1')]}), + ('param="value1 value2 value3"', + {'kernelparams': [('param', 'value1 value2 value3')]}), + ('param="value1 value2 value3" LANG="pl" ro', + {'kernelparams': [('param', 'value1 value2 value3'), ('LANG', 'pl'), ('ro', None)]}), + ('ipv6.disable=1', + {'kernelparams': [('ipv6.disable', '1')]}), + ('param="value1:value2:value3"', + {'kernelparams': [('param', 'value1:value2:value3')]}), + ('param="value1,value2,value3"', + {'kernelparams': [('param', 'value1,value2,value3')]}), + ('param="value1" param="value2" param="value3"', + {'kernelparams': [('param', 'value1'), ('param', 'value2'), ('param', 'value3')]}), + ] + + for cmdline, expectation in expectations: + with patch('salt.utils.files.fopen', mock_open(read_data=cmdline)): + self.assertEqual(core.kernelparams(), expectation) diff --git a/tests/unit/modules/test_network.py b/tests/unit/modules/test_network.py index 9ed307f1dc..ea3026299d 100644 --- a/tests/unit/modules/test_network.py +++ b/tests/unit/modules/test_network.py @@ -267,7 +267,7 @@ class NetworkTestCase(TestCase, LoaderModuleMockMixin): self.assertDictEqual(network.connect('host', 'port'), {'comment': ret, 'result': True}) - @skipIf(bool(ipaddress) is False, 'unable to import \'ipaddress\'') + @skipIf(not bool(ipaddress), 'unable to import \'ipaddress\'') def test_is_private(self): ''' Test for Check if the given IP address is a private address @@ -279,7 +279,7 @@ class NetworkTestCase(TestCase, LoaderModuleMockMixin): return_value=True): self.assertTrue(network.is_private('::1')) - @skipIf(bool(ipaddress) is False, 'unable to import \'ipaddress\'') + @skipIf(not bool(ipaddress), 'unable to import \'ipaddress\'') def test_is_loopback(self): ''' Test for Check if the given IP address is a loopback address diff --git a/tests/unit/modules/test_salt_version.py b/tests/unit/modules/test_salt_version.py new file mode 100644 index 0000000000..b781d883d3 --- /dev/null +++ b/tests/unit/modules/test_salt_version.py @@ -0,0 +1,143 @@ +# -*- coding: utf-8 -*- +''' +Unit tests for salt/modules/salt_version.py +''' + +# Import Python libs +from __future__ import absolute_import, print_function, unicode_literals + +# Import Salt Testing libs +from tests.support.unit import TestCase, skipIf +from tests.support.mock import ( + MagicMock, + patch, + NO_MOCK, + NO_MOCK_REASON +) + +# Import Salt libs +import salt.modules.salt_version as salt_version +import salt.version + + +@skipIf(NO_MOCK, NO_MOCK_REASON) +class SaltVersionTestCase(TestCase): + ''' + Test cases for salt.modules.salt_version + ''' + + # get_release_number tests: 3 + + def test_get_release_number_no_version(self): + ''' + Test that None is returned when the codename isn't found. + ''' + assert salt_version.get_release_number('foo') is None + + @patch('salt.version.SaltStackVersion.LNAMES', {'foo': (12345, 0)}) + def test_get_release_number_unassigned(self): + ''' + Test that a string is returned when a version is found, but unassigned. + ''' + mock_str = 'No version assigned.' + assert salt_version.get_release_number('foo') == mock_str + + def test_get_release_number_success(self): + ''' + Test that a version is returned for a released codename + ''' + assert salt_version.get_release_number('Oxygen') == '2018.3' + + # is_equal tests: 3 + + @patch('salt.version.SaltStackVersion.LNAMES', {'foo': (1900, 5)}) + @patch('salt.version.SaltStackVersion', MagicMock(return_value='1900.5.0')) + def test_is_equal_success(self): + ''' + Test that the current version is equal to the codename + ''' + assert salt_version.is_equal('foo') is True + + @patch('salt.version.SaltStackVersion.LNAMES', {'Oxygen': (2018, 3), + 'Nitrogen': (2017, 7)}) + @patch('salt.version.SaltStackVersion', MagicMock(return_value='2018.3.2')) + def test_is_equal_older_version(self): + ''' + Test that when an older codename is passed in, the function returns False. + ''' + assert salt_version.is_equal('Nitrogen') is False + + @patch('salt.version.SaltStackVersion.LNAMES', {'Fluorine': (salt.version.MAX_SIZE - 100, 0)}) + @patch('salt.version.SaltStackVersion', MagicMock(return_value='2018.3.2')) + def test_is_equal_newer_version(self): + ''' + Test that when a newer codename is passed in, the function returns False + ''' + assert salt_version.is_equal('Fluorine') is False + + # is_newer tests: 3 + + @patch('salt.modules.salt_version.get_release_number', MagicMock(return_value='No version assigned.')) + @patch('salt.version.SaltStackVersion', MagicMock(return_value='2018.3.2')) + def test_is_newer_success(self): + ''' + Test that the current version is newer than the codename + ''' + assert salt_version.is_newer('Fluorine') is True + + @patch('salt.version.SaltStackVersion.LNAMES', {'Oxygen': (2018, 3)}) + @patch('salt.version.SaltStackVersion', MagicMock(return_value='2018.3.2')) + def test_is_newer_with_equal_version(self): + ''' + Test that when an equal codename is passed in, the function returns False. + ''' + assert salt_version.is_newer('Oxygen') is False + + @patch('salt.version.SaltStackVersion.LNAMES', {'Oxygen': (2018, 3), + 'Nitrogen': (2017, 7)}) + @patch('salt.version.SaltStackVersion', MagicMock(return_value='2018.3.2')) + def test_is_newer_with_older_version(self): + ''' + Test that when an older codename is passed in, the function returns False. + ''' + assert salt_version.is_newer('Nitrogen') is False + + # is_older tests: 3 + + @patch('salt.modules.salt_version.get_release_number', MagicMock(return_value='2017.7')) + @patch('salt.version.SaltStackVersion', MagicMock(return_value='2018.3.2')) + def test_is_older_success(self): + ''' + Test that the current version is older than the codename + ''' + assert salt_version.is_older('Nitrogen') is True + + @patch('salt.version.SaltStackVersion', MagicMock(return_value='2018.3.2')) + @patch('salt.version.SaltStackVersion.LNAMES', {'Oxygen': (2018, 3)}) + def test_is_older_with_equal_version(self): + ''' + Test that when an equal codename is passed in, the function returns False. + ''' + assert salt_version.is_older('Oxygen') is False + + @patch('salt.modules.salt_version.get_release_number', MagicMock(return_value='No version assigned.')) + @patch('salt.version.SaltStackVersion', MagicMock(return_value='2018.3.2')) + def test_is_older_with_newer_version(self): + ''' + Test that when an newer codename is passed in, the function returns False. + ''' + assert salt_version.is_older('Fluorine') is False + + # _check_release_cmp tests: 2 + + def test_check_release_cmp_no_codename(self): + ''' + Test that None is returned when the codename isn't found. + ''' + assert salt_version._check_release_cmp('foo') is None + + def test_check_release_cmp_success(self): + ''' + Test that an int is returned from the version compare + ''' + assert isinstance(salt_version._check_release_cmp('Oxygen'), int) diff --git a/tests/unit/states/test_boto_cloudfront.py b/tests/unit/states/test_boto_cloudfront.py index a9ab77505a..cb9b831a35 100644 --- a/tests/unit/states/test_boto_cloudfront.py +++ b/tests/unit/states/test_boto_cloudfront.py @@ -92,7 +92,7 @@ class BotoCloudfrontTestCase(TestCase, LoaderModuleMockMixin): self.base_ret_with({ 'result': None, 'comment': comment, - 'pchanges': {'old': None, 'new': self.name}, + 'changes': {'old': None, 'new': self.name}, }), ) @@ -192,7 +192,7 @@ class BotoCloudfrontTestCase(TestCase, LoaderModuleMockMixin): self.base_ret_with({ 'result': None, 'comment': '\n'.join([header, diff]), - 'pchanges': {'diff': diff}, + 'changes': {'diff': diff}, }), ) diff --git a/tests/unit/states/test_boto_sqs.py b/tests/unit/states/test_boto_sqs.py index b2d51706a3..7c49e88ab6 100644 --- a/tests/unit/states/test_boto_sqs.py +++ b/tests/unit/states/test_boto_sqs.py @@ -75,7 +75,7 @@ class BotoSqsTestCase(TestCase, LoaderModuleMockMixin): ret.update({ 'result': None, 'comment': comt, - 'pchanges': {'old': None, 'new': 'mysqs'}, + 'changes': {'old': None, 'new': 'mysqs'}, }) self.assertDictEqual(boto_sqs.present(name), ret) diff = textwrap.dedent('''\ @@ -102,7 +102,7 @@ class BotoSqsTestCase(TestCase, LoaderModuleMockMixin): ] ret.update({ 'comment': comt, - 'pchanges': {'attributes': {'diff': diff}}, + 'changes': {'attributes': {'diff': diff}}, }) self.assertDictEqual(boto_sqs.present(name, attributes), ret) @@ -134,6 +134,6 @@ class BotoSqsTestCase(TestCase, LoaderModuleMockMixin): ret.update({ 'result': None, 'comment': comt, - 'pchanges': {'old': name, 'new': None}, + 'changes': {'old': name, 'new': None}, }) self.assertDictEqual(boto_sqs.absent(name), ret) diff --git a/tests/unit/states/test_esxdatacenter.py b/tests/unit/states/test_esxdatacenter.py index a55dd0308a..38d6f9a86b 100644 --- a/tests/unit/states/test_esxdatacenter.py +++ b/tests/unit/states/test_esxdatacenter.py @@ -64,7 +64,6 @@ class DatacenterConfiguredTestCase(TestCase, LoaderModuleMockMixin): res = esxdatacenter.datacenter_configured('fake_dc') self.assertDictEqual(res, {'name': 'fake_dc', 'changes': {}, - 'pchanges': {}, 'result': True, 'comment': 'Datacenter \'fake_dc\' already ' 'exists. Nothing to be done.'}) @@ -78,7 +77,6 @@ class DatacenterConfiguredTestCase(TestCase, LoaderModuleMockMixin): res = esxdatacenter.datacenter_configured('fake_dc') self.assertDictEqual(res, {'name': 'fake_dc', 'changes': {}, - 'pchanges': {}, 'result': True, 'comment': 'Datacenter \'proxy_dc\' ' 'already exists. Nothing to be done.'}) @@ -112,7 +110,6 @@ class DatacenterConfiguredTestCase(TestCase, LoaderModuleMockMixin): self.assertDictEqual(res, {'name': 'fake_dc', 'changes': {'new': {'name': 'fake_dc'}}, - 'pchanges': {}, 'result': True, 'comment': 'Created datacenter \'fake_dc\'.'}) @@ -124,8 +121,7 @@ class DatacenterConfiguredTestCase(TestCase, LoaderModuleMockMixin): res = esxdatacenter.datacenter_configured('fake_dc') self.assertDictEqual(res, {'name': 'fake_dc', - 'changes': {}, - 'pchanges': {'new': {'name': 'fake_dc'}}, + 'changes': {'new': {'name': 'fake_dc'}}, 'result': None, 'comment': 'State will create ' 'datacenter \'fake_dc\'.'}) @@ -138,7 +134,6 @@ class DatacenterConfiguredTestCase(TestCase, LoaderModuleMockMixin): res = esxdatacenter.datacenter_configured('fake_dc') self.assertDictEqual(res, {'name': 'fake_dc', 'changes': {}, - 'pchanges': {}, 'result': True, 'comment': 'Datacenter \'fake_dc\' already ' 'exists. Nothing to be done.'}) @@ -154,7 +149,6 @@ class DatacenterConfiguredTestCase(TestCase, LoaderModuleMockMixin): self.assertEqual(mock_disconnect.call_count, 0) self.assertDictEqual(res, {'name': 'fake_dc', 'changes': {}, - 'pchanges': {}, 'result': False, 'comment': 'Error'}) @@ -169,7 +163,6 @@ class DatacenterConfiguredTestCase(TestCase, LoaderModuleMockMixin): mock_disconnect.assert_called_once_with(self.mock_si) self.assertDictEqual(res, {'name': 'fake_dc', 'changes': {}, - 'pchanges': {}, 'result': False, 'comment': 'Error'}) @@ -182,6 +175,5 @@ class DatacenterConfiguredTestCase(TestCase, LoaderModuleMockMixin): res = esxdatacenter.datacenter_configured('fake_dc') self.assertDictEqual(res, {'name': 'fake_dc', 'changes': {}, - 'pchanges': {}, 'result': None, 'comment': 'Error'}) diff --git a/tests/unit/states/test_file.py b/tests/unit/states/test_file.py index 8929ba4d40..c313b2f8d9 100644 --- a/tests/unit/states/test_file.py +++ b/tests/unit/states/test_file.py @@ -230,7 +230,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin): ' is set for creation').format(name, target) ret = return_val({'comment': comt, 'result': None, - 'pchanges': {'new': name}}) + 'changes': {'new': name}}) self.assertDictEqual(filestate.symlink(name, target, user=user, group=group), ret) @@ -258,7 +258,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin): comt = ('Directory {0} for symlink is not present').format(test_dir) ret = return_val({'comment': comt, 'result': False, - 'pchanges': {'new': name}}) + 'changes': {}}) self.assertDictEqual(filestate.symlink(name, target, user=user, group=group), ret) @@ -282,7 +282,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin): '{1}:{2}'.format(name, user, group)) ret = return_val({'comment': comt, 'result': True, - 'pchanges': {}}) + 'changes': {}}) self.assertDictEqual(filestate.symlink(name, target, user=user, group=group), ret) @@ -307,7 +307,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin): ) ret.update({'comment': comt, 'result': False, - 'pchanges': {'new': name}}) + 'changes': {}}) self.assertDictEqual( filestate.symlink( name, target, user=user, @@ -329,7 +329,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin): 'or a file name: {0}').format('tmp/SALT') ret.update({'comment': comt, 'result': False, - 'pchanges': {'new': name}}) + 'changes': {}}) self.assertDictEqual( filestate.symlink( name, target, user=user, @@ -353,7 +353,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin): comt = ('File exists where the symlink {0} should be' .format(name)) ret = return_val({'comment': comt, - 'pchanges': {'new': name}, + 'changes': {}, 'result': False}) self.assertDictEqual(filestate.symlink (name, target, user=user, @@ -373,7 +373,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin): with patch.object(os.path, 'exists', mock_f): with patch('salt.utils.win_functions.get_sid_from_name', return_value='test-sid'): comt = ('File exists where the symlink {0} should be'.format(name)) - ret = return_val({'comment': comt, 'result': False, 'pchanges': {'new': '/tmp/testfile.txt'}}) + ret = return_val({'comment': comt, 'result': False, 'changes': {}}) self.assertDictEqual(filestate.symlink (name, target, user=user, group=group), ret) @@ -392,7 +392,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin): with patch.object(os.path, 'exists', mock_f): with patch('salt.utils.win_functions.get_sid_from_name', return_value='test-sid'): comt = ('Directory exists where the symlink {0} should be'.format(name)) - ret = return_val({'comment': comt, 'result': False, 'pchanges': {'new': '/tmp/testfile.txt'}}) + ret = return_val({'comment': comt, 'result': False, 'changes': {}}) self.assertDictEqual(filestate.symlink (name, target, user=user, group=group), ret) @@ -411,7 +411,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin): with patch('salt.utils.win_functions.get_sid_from_name', return_value='test-sid'): comt = ('Unable to create new symlink {0} -> ' '{1}: '.format(name, target)) - ret = return_val({'comment': comt, 'result': False, 'pchanges': {'new': '/tmp/testfile.txt'}}) + ret = return_val({'comment': comt, 'result': False, 'changes': {}}) self.assertDictEqual(filestate.symlink (name, target, user=user, group=group), ret) @@ -433,8 +433,8 @@ class TestFileState(TestCase, LoaderModuleMockMixin): with patch('salt.utils.win_functions.get_sid_from_name', return_value='test-sid'): comt = 'Created new symlink {0} -> {1}'.format(name, target) ret = return_val({'comment': comt, - 'result': True, 'pchanges': {'new': '/tmp/testfile.txt'}, - 'changes': {'new': name}}) + 'result': True, + 'changes': {'new': name}}) self.assertDictEqual(filestate.symlink (name, target, user=user, group=group), ret) @@ -460,7 +460,6 @@ class TestFileState(TestCase, LoaderModuleMockMixin): '{2}:{3}'.format(name, target, user, group)) ret = return_val({'comment': comt, 'result': False, - 'pchanges': {'new': '/tmp/testfile.txt'}, 'changes': {'new': name}}) self.assertDictEqual(filestate.symlink (name, target, user=user, @@ -476,7 +475,6 @@ class TestFileState(TestCase, LoaderModuleMockMixin): ret = {'name': name, 'result': False, 'comment': '', - 'pchanges': {}, 'changes': {}} mock_t = MagicMock(return_value=True) @@ -507,17 +505,15 @@ class TestFileState(TestCase, LoaderModuleMockMixin): ret.update({'comment': comt, 'name': name, 'result': None, - 'pchanges': {'removed': '/fake/file.conf'}}) + 'changes': {'removed': '/fake/file.conf'}}) self.assertDictEqual(filestate.absent(name), ret) - ret.update({'pchanges': {}}) with patch.dict(filestate.__opts__, {'test': False}): with patch.dict(filestate.__salt__, {'file.remove': mock_file}): comt = ('Removed file {0}'.format(name)) ret.update({'comment': comt, 'result': True, - 'changes': {'removed': name}, - 'pchanges': {'removed': name}}) + 'changes': {'removed': name}}) self.assertDictEqual(filestate.absent(name), ret) comt = ('Removed file {0}'.format(name)) @@ -525,7 +521,6 @@ class TestFileState(TestCase, LoaderModuleMockMixin): 'result': False, 'changes': {}}) self.assertDictEqual(filestate.absent(name), ret) - ret.update({'pchanges': {}}) with patch.object(os.path, 'isfile', mock_f): with patch.object(os.path, 'isdir', mock_t): @@ -533,7 +528,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin): comt = \ 'Directory {0} is set for removal'.format(name) ret.update({'comment': comt, - 'pchanges': {'removed': name}, + 'changes': {'removed': name}, 'result': None}) self.assertDictEqual(filestate.absent(name), ret) @@ -550,7 +545,6 @@ class TestFileState(TestCase, LoaderModuleMockMixin): ret.update({'comment': comt, 'result': False, 'changes': {}}) self.assertDictEqual(filestate.absent(name), ret) - ret.update({'pchanges': {}}) with patch.object(os.path, 'isdir', mock_f): with patch.dict(filestate.__opts__, {'test': True}): @@ -569,8 +563,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin): ret = {'name': name, 'result': False, 'comment': '', - 'changes': {}, - 'pchanges': {}} + 'changes': {}} mock_t = MagicMock(return_value=True) mock_f = MagicMock(return_value=False) @@ -606,7 +599,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin): mock_f = MagicMock(return_value=False) comt = ('Must provide name to file.missing') - ret.update({'comment': comt, 'name': '', 'pchanges': {}}) + ret.update({'comment': comt, 'name': '', 'changes': {}}) self.assertDictEqual(filestate.missing(''), ret) with patch.object(os.path, 'exists', mock_t): @@ -677,7 +670,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin): 'file.manage_file': mock_ex, 'cmd.run_all': mock_cmd_fail}): comt = ('Destination file name is required') - ret.update({'comment': comt, 'name': '', 'pchanges': {}}) + ret.update({'comment': comt, 'name': '', 'changes': {}}) self.assertDictEqual(filestate.managed(''), ret) with patch.object(os.path, 'isfile', mock_f): @@ -782,13 +775,12 @@ class TestFileState(TestCase, LoaderModuleMockMixin): comt = ('check_cmd execution failed') ret.update({'comment': comt, 'result': False, 'skip_watch': True}) - ret.pop('pchanges') self.assertDictEqual(filestate.managed (name, user=user, group=group, check_cmd='A'), ret) comt = ('check_cmd execution failed') - ret.update({'comment': True, 'pchanges': {}}) + ret.update({'comment': True, 'changes': {}}) ret.pop('skip_watch', None) self.assertDictEqual(filestate.managed (name, user=user, group=group), @@ -818,7 +810,6 @@ class TestFileState(TestCase, LoaderModuleMockMixin): ret = {'name': name, 'result': False, 'comment': '', - 'pchanges': {}, 'changes': {}} comt = ('Must provide name to file.directory') @@ -910,12 +901,10 @@ class TestFileState(TestCase, LoaderModuleMockMixin): else: comt = ('The following files will be changed:\n{0}:' ' directory - new\n'.format(name)) - p_chg = {name: {'directory': 'new'}} ret.update({ 'comment': comt, 'result': None, - 'pchanges': p_chg, - 'changes': {} + 'changes': {name: {'directory': 'new'}} }) self.assertDictEqual(filestate.directory(name, user=user, @@ -926,7 +915,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin): with patch.object(os.path, 'isdir', mock_f): comt = ('No directory to create {0} in' .format(name)) - ret.update({'comment': comt, 'result': False, 'changes': {}}) + ret.update({'comment': comt, 'result': False}) self.assertDictEqual(filestate.directory (name, user=user, group=group), ret) @@ -945,7 +934,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin): 'options "ignore_files" and ' '"ignore_dirs" at the same ' 'time.', - 'pchanges': {}}) + 'changes': {}}) with patch.object(os.path, 'isdir', mock_t): self.assertDictEqual(filestate.directory (name, user=user, @@ -973,7 +962,6 @@ class TestFileState(TestCase, LoaderModuleMockMixin): ret = {'name': name, 'result': False, 'comment': '', - 'pchanges': {}, 'changes': {}} comt = ("'mode' is not allowed in 'file.recurse'." @@ -1062,7 +1050,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin): 'changes': {}} comt = ('Must provide name to file.replace') - ret.update({'comment': comt, 'name': '', 'pchanges': {}}) + ret.update({'comment': comt, 'name': '', 'changes': {}}) self.assertDictEqual(filestate.replace('', pattern, repl), ret) mock_t = MagicMock(return_value=True) @@ -1096,7 +1084,6 @@ class TestFileState(TestCase, LoaderModuleMockMixin): ret = {'name': name, 'result': False, 'comment': '', - 'pchanges': {}, 'changes': {}} comt = ('Must provide name to file.blockreplace') @@ -1116,8 +1103,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin): with patch.dict(filestate.__opts__, {'test': True}): comt = ('Changes would be made') ret.update({'comment': comt, 'result': None, - 'changes': {'diff': True}, - 'pchanges': {'diff': True}}) + 'changes': {'diff': True}}) self.assertDictEqual(filestate.blockreplace(name), ret) # 'comment' function tests: 1 @@ -1133,7 +1119,6 @@ class TestFileState(TestCase, LoaderModuleMockMixin): ret = {'name': name, 'result': False, 'comment': '', - 'pchanges': {}, 'changes': {}} comt = ('Must provide name to file.comment') @@ -1164,14 +1149,15 @@ class TestFileState(TestCase, LoaderModuleMockMixin): 'file.comment_line': mock_t}): with patch.dict(filestate.__opts__, {'test': True}): comt = ('File {0} is set to be updated'.format(name)) - ret.update({'comment': comt, 'result': None, 'pchanges': {name: 'updated'}}) + ret.update({'comment': comt, 'result': None, 'changes': {name: 'updated'}}) self.assertDictEqual(filestate.comment(name, regex), ret) with patch.dict(filestate.__opts__, {'test': False}): with patch.object(salt.utils.files, 'fopen', MagicMock(mock_open())): comt = ('Commented lines successfully') - ret.update({'comment': comt, 'result': True}) + ret.update({'comment': comt, 'result': True, + 'changes': {}}) self.assertDictEqual(filestate.comment(name, regex), ret) @@ -1186,7 +1172,6 @@ class TestFileState(TestCase, LoaderModuleMockMixin): regex = 'bind 127.0.0.1' ret = {'name': name, - 'pchanges': {}, 'result': False, 'comment': '', 'changes': {}} @@ -1219,14 +1204,16 @@ class TestFileState(TestCase, LoaderModuleMockMixin): with patch.dict(filestate.__opts__, {'test': True}): comt = ('File {0} is set to be updated'.format(name)) - ret.update({'comment': comt, 'result': None, 'pchanges': {name: 'updated'}, }) + ret.update({'comment': comt, 'result': None, + 'changes': {name: 'updated'}}) self.assertDictEqual(filestate.uncomment(name, regex), ret) with patch.dict(filestate.__opts__, {'test': False}): with patch.object(salt.utils.files, 'fopen', MagicMock(mock_open())): comt = ('Uncommented lines successfully') - ret.update({'comment': comt, 'result': True}) + ret.update({'comment': comt, 'result': True, + 'changes': {}}) self.assertDictEqual(filestate.uncomment(name, regex), ret) # 'prepend' function tests: 1 @@ -1246,7 +1233,6 @@ class TestFileState(TestCase, LoaderModuleMockMixin): ret = {'name': name, 'result': False, 'comment': '', - 'pchanges': {}, 'changes': {}} comt = ('Must provide name to file.prepend') @@ -1269,24 +1255,23 @@ class TestFileState(TestCase, LoaderModuleMockMixin): 'file.prepend': mock_t}): comt = ('The following files will be changed:\n/tmp/etc:' ' directory - new\n') - pchanges = {'/tmp/etc': {'directory': 'new'}} + changes = {'/tmp/etc': {'directory': 'new'}} if salt.utils.platform.is_windows(): comt = 'The directory "c:\\tmp\\etc" will be changed' - pchanges = {'c:\\tmp\\etc': {'directory': 'new'}} - ret.update({'comment': comt, 'name': name, 'pchanges': pchanges}) + changes = {'c:\\tmp\\etc': {'directory': 'new'}} + ret.update({'comment': comt, 'name': name, 'changes': changes}) self.assertDictEqual(filestate.prepend(name, makedirs=True), ret) with patch.object(os.path, 'isabs', mock_f): comt = ('Specified file {0} is not an absolute path' .format(name)) - ret.update({'comment': comt, 'pchanges': {}}) + ret.update({'comment': comt, 'changes': {}}) self.assertDictEqual(filestate.prepend(name), ret) with patch.object(os.path, 'isabs', mock_t): with patch.object(os.path, 'exists', mock_t): comt = ("Failed to load template file {0}".format(source)) - ret.pop('pchanges') ret.update({'comment': comt, 'name': source, 'data': []}) self.assertDictEqual(filestate.prepend(name, source=source), ret) @@ -1300,8 +1285,9 @@ class TestFileState(TestCase, LoaderModuleMockMixin): change = {'diff': 'Replace binary file'} comt = ('File {0} is set to be updated' .format(name)) - ret.update({'comment': comt, 'result': None, - 'changes': change, 'pchanges': {}}) + ret.update({'comment': comt, + 'result': None, + 'changes': change}) self.assertDictEqual(filestate.prepend (name, text=text), ret) @@ -1342,14 +1328,18 @@ class TestFileState(TestCase, LoaderModuleMockMixin): with patch.object(os.path, 'exists', mock_f): with patch.dict(filestate.__opts__, {'test': True}): comt = ('File {0} is set to be created'.format(name)) - ret.update({'comment': comt, 'result': None}) + ret.update({'comment': comt, + 'result': None, + 'changes': {'new': name}}) self.assertDictEqual(filestate.touch(name), ret) with patch.dict(filestate.__opts__, {'test': False}): with patch.object(os.path, 'isdir', mock_f): comt = ('Directory not present to touch file {0}' .format(name)) - ret.update({'comment': comt, 'result': False}) + ret.update({'comment': comt, + 'result': False, + 'changes': {}}) self.assertDictEqual(filestate.touch(name), ret) with patch.object(os.path, 'isdir', mock_t): @@ -1500,7 +1490,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin): with patch.object(os.path, 'lexists', mock_lex): comt = ('The target file "{0}" exists and will not be ' 'overwritten'.format(name)) - ret.update({'comment': comt, 'result': False}) + ret.update({'comment': comt, 'result': True}) self.assertDictEqual(filestate.rename(name, source), ret) mock_lex = MagicMock(side_effect=[True, True, True]) @@ -1819,7 +1809,6 @@ class TestFileState(TestCase, LoaderModuleMockMixin): expected_ret = { 'name': fake_name, 'changes': {'retained': [], 'deleted': [], 'ignored': []}, - 'pchanges': {'retained': [], 'deleted': [], 'ignored': []}, 'result': True, 'comment': 'Name provided to file.retention must be a directory', } @@ -1865,8 +1854,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin): deleted_files = sorted(list(set(fake_file_list) - retained_files - set(ignored_files)), reverse=True) retained_files = sorted(list(retained_files), reverse=True) - changes = {'retained': retained_files, 'deleted': deleted_files, 'ignored': ignored_files} - expected_ret['pchanges'] = changes + expected_ret['changes'] = {'retained': retained_files, 'deleted': deleted_files, 'ignored': ignored_files} if test: expected_ret['result'] = None expected_ret['comment'] = ('{0} backups would have been removed from {1}.\n' @@ -1874,7 +1862,6 @@ class TestFileState(TestCase, LoaderModuleMockMixin): else: expected_ret['comment'] = ('{0} backups were removed from {1}.\n' ''.format(len(deleted_files), fake_name)) - expected_ret['changes'] = changes mock_remove.assert_has_calls( [call(os.path.join(fake_name, x)) for x in deleted_files], any_order=True @@ -1957,7 +1944,6 @@ class TestFileTidied(TestCase): 'test/file3' ], }, - 'pchanges': {}, 'result': True, 'comment': 'Removed 3 files or directories from directory /test/', } @@ -1985,7 +1971,6 @@ class TestFileTidied(TestCase): 'test/test2' ] }, - 'pchanges': {}, 'result': True, 'comment': 'Removed 6 files or directories from directory /test/', } @@ -1996,7 +1981,6 @@ class TestFileTidied(TestCase): exp = { 'name': 'test/', 'changes': {}, - 'pchanges': {}, 'result': False, 'comment': 'Specified file test/ is not an absolute path', } @@ -2004,7 +1988,6 @@ class TestFileTidied(TestCase): exp = { 'name': '/bad-directory-name/', 'changes': {}, - 'pchanges': {}, 'result': False, 'comment': '/bad-directory-name/ does not exist or is not a directory.', } diff --git a/tests/unit/states/test_kernelpkg.py b/tests/unit/states/test_kernelpkg.py index f2ba87ecee..4a81aacbf4 100644 --- a/tests/unit/states/test_kernelpkg.py +++ b/tests/unit/states/test_kernelpkg.py @@ -114,22 +114,28 @@ class KernelPkgTestCase(TestCase, LoaderModuleMockMixin): Test - latest_active when a new kernel is available ''' reboot = MagicMock(return_value=True) - with patch.dict(kernelpkg.__salt__, {'kernelpkg.needs_reboot': reboot}): - with patch.dict(kernelpkg.__opts__, {'test': False}): - kernelpkg.__salt__['system.reboot'].reset_mock() - ret = kernelpkg.latest_active(name=STATE_NAME) - self.assertEqual(ret['name'], STATE_NAME) - self.assertTrue(ret['result']) - self.assertIsInstance(ret['changes'], dict) - self.assertIsInstance(ret['comment'], six.text_type) - self.assert_called_once(kernelpkg.__salt__['system.reboot']) + latest = MagicMock(return_value=1) + with patch.dict( + kernelpkg.__salt__, {'kernelpkg.needs_reboot': reboot, + 'kernelpkg.latest_installed': latest}), \ + patch.dict(kernelpkg.__opts__, {'test': False}): + kernelpkg.__salt__['system.reboot'].reset_mock() + ret = kernelpkg.latest_active(name=STATE_NAME) + self.assertEqual(ret['name'], STATE_NAME) + self.assertTrue(ret['result']) + self.assertIsInstance(ret['changes'], dict) + self.assertIsInstance(ret['comment'], six.text_type) + self.assert_called_once(kernelpkg.__salt__['system.reboot']) with patch.dict(kernelpkg.__opts__, {'test': True}): kernelpkg.__salt__['system.reboot'].reset_mock() ret = kernelpkg.latest_active(name=STATE_NAME) self.assertEqual(ret['name'], STATE_NAME) self.assertIsNone(ret['result']) - self.assertDictEqual(ret['changes'], {}) + self.assertDictEqual( + ret['changes'], + {'kernel': {'new': 1, 'old': 0}} + ) self.assertIsInstance(ret['comment'], six.text_type) kernelpkg.__salt__['system.reboot'].assert_not_called() diff --git a/tests/unit/states/test_linux_acl.py b/tests/unit/states/test_linux_acl.py index 309b09ab34..bae747f4b0 100644 --- a/tests/unit/states/test_linux_acl.py +++ b/tests/unit/states/test_linux_acl.py @@ -69,13 +69,12 @@ class LinuxAclTestCase(TestCase, LoaderModuleMockMixin): ''.format(acl_name, perms)) ret = {'name': name, 'comment': comt, - 'changes': {}, - 'pchanges': {'new': {'acl_name': acl_name, - 'acl_type': acl_type, - 'perms': perms}, - 'old': {'acl_name': acl_name, - 'acl_type': acl_type, - 'perms': 'A'}}, + 'changes': {'new': {'acl_name': acl_name, + 'acl_type': acl_type, + 'perms': perms}, + 'old': {'acl_name': acl_name, + 'acl_type': acl_type, + 'perms': 'A'}}, 'result': None} self.assertDictEqual(linux_acl.present(name, acl_type, acl_name, @@ -92,7 +91,6 @@ class LinuxAclTestCase(TestCase, LoaderModuleMockMixin): 'old': {'acl_name': acl_name, 'acl_type': acl_type, 'perms': 'A'}}, - 'pchanges': {}, 'result': True} self.assertDictEqual(linux_acl.present(name, acl_type, acl_name, perms), @@ -106,7 +104,6 @@ class LinuxAclTestCase(TestCase, LoaderModuleMockMixin): ret = {'name': name, 'comment': comt, 'changes': {}, - 'pchanges': {}, 'result': False} self.assertDictEqual(linux_acl.present(name, acl_type, acl_name, perms), @@ -118,10 +115,9 @@ class LinuxAclTestCase(TestCase, LoaderModuleMockMixin): 'for {0}: {1}'.format(acl_name, perms)) ret = {'name': name, 'comment': comt, - 'changes': {}, - 'pchanges': {'new': {'acl_name': acl_name, - 'acl_type': acl_type, - 'perms': perms}}, + 'changes': {'new': {'acl_name': acl_name, + 'acl_type': acl_type, + 'perms': perms}}, 'result': None} self.assertDictEqual(linux_acl.present(name, acl_type, acl_name, perms), @@ -135,7 +131,6 @@ class LinuxAclTestCase(TestCase, LoaderModuleMockMixin): 'changes': {'new': {'acl_name': acl_name, 'acl_type': acl_type, 'perms': perms}}, - 'pchanges': {}, 'result': True} self.assertDictEqual(linux_acl.present(name, acl_type, acl_name, perms), @@ -149,7 +144,6 @@ class LinuxAclTestCase(TestCase, LoaderModuleMockMixin): ret = {'name': name, 'comment': comt, 'changes': {}, - 'pchanges': {}, 'result': False} self.assertDictEqual(linux_acl.present(name, acl_type, acl_name, perms), @@ -163,13 +157,12 @@ class LinuxAclTestCase(TestCase, LoaderModuleMockMixin): ''.format(acl_name, perms)) ret = {'name': name, 'comment': comt, - 'changes': {}, - 'pchanges': {'new': {'acl_name': acl_name, - 'acl_type': acl_type, - 'perms': perms}, - 'old': {'acl_name': acl_name, - 'acl_type': acl_type, - 'perms': '7'}}, + 'changes': {'new': {'acl_name': acl_name, + 'acl_type': acl_type, + 'perms': perms}, + 'old': {'acl_name': acl_name, + 'acl_type': acl_type, + 'perms': '7'}}, 'result': None} self.assertDictEqual(linux_acl.present(name, acl_type, acl_name, @@ -183,7 +176,6 @@ class LinuxAclTestCase(TestCase, LoaderModuleMockMixin): ret = {'name': name, 'comment': comt, 'changes': {}, - 'pchanges': {}, 'result': True} self.assertDictEqual(linux_acl.present(name, acl_type, acl_name, @@ -191,8 +183,7 @@ class LinuxAclTestCase(TestCase, LoaderModuleMockMixin): # No acl type comt = ('ACL Type does not exist') - ret = {'name': name, 'comment': comt, 'result': False, - 'changes': {}, 'pchanges': {}} + ret = {'name': name, 'comment': comt, 'result': False, 'changes': {}} self.assertDictEqual(linux_acl.present(name, acl_type, acl_name, perms), ret) @@ -268,13 +259,12 @@ class LinuxAclTestCase(TestCase, LoaderModuleMockMixin): ''.format(acl_names, perms)) expected = {'name': name, 'comment': comt, - 'changes': {}, - 'pchanges': {'new': {'acl_name': ', '.join(acl_names), - 'acl_type': acl_type, - 'perms': 7}, - 'old': {'acl_name': ', '.join(acl_names), - 'acl_type': acl_type, - 'perms': 'A'}}, + 'changes': {'new': {'acl_name': ', '.join(acl_names), + 'acl_type': acl_type, + 'perms': 7}, + 'old': {'acl_name': ', '.join(acl_names), + 'acl_type': acl_type, + 'perms': 'A'}}, 'result': None} ret = linux_acl.list_present(name, acl_type, acl_names, perms) @@ -289,7 +279,6 @@ class LinuxAclTestCase(TestCase, LoaderModuleMockMixin): 'changes': {'new': {'acl_name': ', '.join(acl_names), 'acl_type': acl_type, 'perms': 'rwx'}}, - 'pchanges': {}, 'result': True} ret = linux_acl.list_present(name, acl_type, acl_names, perms) @@ -304,7 +293,6 @@ class LinuxAclTestCase(TestCase, LoaderModuleMockMixin): expected = {'name': name, 'comment': comt, 'changes': {}, - 'pchanges': {}, 'result': False} ret = linux_acl.list_present(name, acl_type, acl_names, perms) @@ -317,10 +305,9 @@ class LinuxAclTestCase(TestCase, LoaderModuleMockMixin): 'for {0}: {1}'.format(acl_names, perms)) expected = {'name': name, 'comment': comt, - 'changes': {}, - 'pchanges': {'new': {'acl_name': ', '.join(acl_names), - 'acl_type': acl_type, - 'perms': perms}}, + 'changes': {'new': {'acl_name': ', '.join(acl_names), + 'acl_type': acl_type, + 'perms': perms}}, 'result': None} ret = linux_acl.list_present(name, acl_type, acl_names, perms) @@ -335,7 +322,6 @@ class LinuxAclTestCase(TestCase, LoaderModuleMockMixin): 'changes': {'new': {'acl_name': ', '.join(acl_names), 'acl_type': acl_type, 'perms': perms}}, - 'pchanges': {}, 'result': True} ret = linux_acl.list_present(name, acl_type, acl_names, perms) self.assertDictEqual(expected, ret) @@ -349,7 +335,6 @@ class LinuxAclTestCase(TestCase, LoaderModuleMockMixin): expected = {'name': name, 'comment': comt, 'changes': {}, - 'pchanges': {}, 'result': False} ret = linux_acl.list_present(name, acl_type, acl_names, perms) @@ -357,8 +342,7 @@ class LinuxAclTestCase(TestCase, LoaderModuleMockMixin): # No acl type comt = ('ACL Type does not exist') - expected = {'name': name, 'comment': comt, 'result': False, - 'changes': {}, 'pchanges': {}} + expected = {'name': name, 'comment': comt, 'result': False, 'changes': {}} ret = linux_acl.list_present(name, acl_type, acl_names, perms) self.assertDictEqual(expected, ret) diff --git a/tests/unit/states/test_saltmod.py b/tests/unit/states/test_saltmod.py index 3b214d6fe6..bb2d93a0bb 100644 --- a/tests/unit/states/test_saltmod.py +++ b/tests/unit/states/test_saltmod.py @@ -175,13 +175,11 @@ class SaltmodTestCase(TestCase, LoaderModuleMockMixin): name = 'state' tgt = 'larry' - comt = ('Function state will be executed' - ' on target {0} as test=False'.format(tgt)) - ret = {'name': name, 'changes': {}, 'result': None, - 'comment': comt} + 'comment': 'Function state would be executed ' + 'on target {0}'.format(tgt)} with patch.dict(saltmod.__opts__, {'test': True}): self.assertDictEqual(saltmod.function(name, tgt), ret) diff --git a/tests/unit/test_loader.py b/tests/unit/test_loader.py index 30c8bcaba0..4f2aca8499 100644 --- a/tests/unit/test_loader.py +++ b/tests/unit/test_loader.py @@ -283,6 +283,31 @@ class LazyLoaderWhitelistTest(TestCase): self.assertNotIn('grains.get', self.loader) +class LazyLoaderGrainsBlacklistTest(TestCase): + ''' + Test the loader of grains with a blacklist + ''' + def setUp(self): + self.opts = salt.config.minion_config(None) + + def tearDown(self): + del self.opts + + def test_whitelist(self): + opts = copy.deepcopy(self.opts) + opts['grains_blacklist'] = [ + 'master', + 'os*', + 'ipv[46]' + ] + + grains = salt.loader.grains(opts) + self.assertNotIn('master', grains) + self.assertNotIn('os', set([g[:2] for g in list(grains)])) + self.assertNotIn('ipv4', grains) + self.assertNotIn('ipv6', grains) + + class LazyLoaderSingleItem(TestCase): ''' Test loading a single item via the _load() function diff --git a/tests/unit/utils/test_data.py b/tests/unit/utils/test_data.py index 78f4144f9f..030f22b202 100644 --- a/tests/unit/utils/test_data.py +++ b/tests/unit/utils/test_data.py @@ -144,6 +144,36 @@ class DataTestCase(TestCase): ) ) + def test_subdict_match_with_wildcards(self): + ''' + Tests subdict matching when wildcards are used in the expression + ''' + data = { + 'a': { + 'b': { + 'ç': 'd', + 'é': ['eff', 'gee', '8ch'], + 'ĩ': {'j': 'k'} + } + } + } + assert salt.utils.data.subdict_match(data, '*:*:*:*') + assert salt.utils.data.subdict_match(data, 'a:*:*:*') + assert salt.utils.data.subdict_match(data, 'a:b:*:*') + assert salt.utils.data.subdict_match(data, 'a:b:ç:*') + assert salt.utils.data.subdict_match(data, 'a:b:*:d') + assert salt.utils.data.subdict_match(data, 'a:*:ç:d') + assert salt.utils.data.subdict_match(data, '*:b:ç:d') + assert salt.utils.data.subdict_match(data, '*:*:ç:d') + assert salt.utils.data.subdict_match(data, '*:*:*:d') + assert salt.utils.data.subdict_match(data, 'a:*:*:d') + assert salt.utils.data.subdict_match(data, 'a:b:*:ef*') + assert salt.utils.data.subdict_match(data, 'a:b:*:g*') + assert salt.utils.data.subdict_match(data, 'a:b:*:j:*') + assert salt.utils.data.subdict_match(data, 'a:b:*:j:k') + assert salt.utils.data.subdict_match(data, 'a:b:*:*:k') + assert salt.utils.data.subdict_match(data, 'a:b:*:*:*') + def test_traverse_dict(self): test_two_level_dict = {'foo': {'bar': 'baz'}} diff --git a/tests/unit/utils/test_jinja.py b/tests/unit/utils/test_jinja.py index 1748197936..e4be1f9ce0 100644 --- a/tests/unit/utils/test_jinja.py +++ b/tests/unit/utils/test_jinja.py @@ -984,6 +984,10 @@ class TestCustomExtensions(TestCase): dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, 'False') + rendered = render_jinja_tmpl("{{ 'fe80::20d:b9ff:fe01:ea8%eth0' | is_ipv6 }}", + dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) + self.assertEqual(rendered, 'True') + rendered = render_jinja_tmpl("{{ 'FE80::' | is_ipv6 }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, 'True') diff --git a/tests/unit/utils/test_mac_utils.py b/tests/unit/utils/test_mac_utils.py index 445a63b777..c1dcee6625 100644 --- a/tests/unit/utils/test_mac_utils.py +++ b/tests/unit/utils/test_mac_utils.py @@ -9,7 +9,7 @@ import os # Import Salt Testing Libs from tests.support.unit import TestCase, skipIf -from tests.support.mock import MagicMock, patch, NO_MOCK, NO_MOCK_REASON, call +from tests.support.mock import MagicMock, patch, NO_MOCK, NO_MOCK_REASON, call, mock_open from tests.support.mixins import LoaderModuleMockMixin # Import Salt libs @@ -215,18 +215,22 @@ class MacUtilsTestCase(TestCase, LoaderModuleMockMixin): @patch('salt.utils.path.os_walk') @patch('os.path.exists') - @patch('plistlib.readPlist') + @patch('plistlib.readPlist' if six.PY2 else 'plistlib.load') def test_available_services(self, mock_read_plist, mock_exists, mock_os_walk): ''' test available_services ''' - mock_os_walk.side_effect = [ - [('/Library/LaunchAgents', [], ['com.apple.lla1.plist', 'com.apple.lla2.plist'])], - [('/Library/LaunchDaemons', [], ['com.apple.lld1.plist', 'com.apple.lld2.plist'])], - [('/System/Library/LaunchAgents', [], ['com.apple.slla1.plist', 'com.apple.slla2.plist'])], - [('/System/Library/LaunchDaemons', [], ['com.apple.slld1.plist', 'com.apple.slld2.plist'])], - ] + def walk_side_effect(*args, **kwargs): + path = args[0] + results = { + '/Library/LaunchAgents': ['com.apple.lla1.plist', 'com.apple.lla2.plist'], + '/Library/LaunchDaemons': ['com.apple.lld1.plist', 'com.apple.lld2.plist'], + '/System/Library/LaunchAgents': ['com.apple.slla1.plist', 'com.apple.slla2.plist'], + '/System/Library/LaunchDaemons': ['com.apple.slld1.plist', 'com.apple.slld2.plist']} + files = results.get(path, []) + return [(path, [], files)] + mock_os_walk.side_effect = walk_side_effect mock_read_plist.side_effect = [ MagicMock(Label='com.apple.lla1'), MagicMock(Label='com.apple.lla2'), @@ -239,7 +243,16 @@ class MacUtilsTestCase(TestCase, LoaderModuleMockMixin): ] mock_exists.return_value = True - ret = mac_utils._available_services() + + if six.PY3: + # Py3's plistlib.load does not handle opening and closing a + # file, unlike py2's plistlib.readPlist. Therefore, we have + # to patch open for py3 since we're using it in addition + # to the plistlib.load call. + with patch('salt.utils.files.fopen', mock_open()): + ret = mac_utils._available_services() + else: + ret = mac_utils._available_services() # Make sure it's a dict with 8 items self.assertTrue(isinstance(ret, dict)) @@ -265,17 +278,22 @@ class MacUtilsTestCase(TestCase, LoaderModuleMockMixin): @patch('salt.utils.path.os_walk') @patch('os.path.exists') - @patch('plistlib.readPlist') + @patch('plistlib.readPlist' if six.PY2 else 'plistlib.load') def test_available_services_broken_symlink(self, mock_read_plist, mock_exists, mock_os_walk): ''' test available_services ''' - mock_os_walk.side_effect = [ - [('/Library/LaunchAgents', [], ['com.apple.lla1.plist', 'com.apple.lla2.plist'])], - [('/Library/LaunchDaemons', [], ['com.apple.lld1.plist', 'com.apple.lld2.plist'])], - [('/System/Library/LaunchAgents', [], ['com.apple.slla1.plist', 'com.apple.slla2.plist'])], - [('/System/Library/LaunchDaemons', [], ['com.apple.slld1.plist', 'com.apple.slld2.plist'])], - ] + def walk_side_effect(*args, **kwargs): + path = args[0] + results = { + '/Library/LaunchAgents': ['com.apple.lla1.plist', 'com.apple.lla2.plist'], + '/Library/LaunchDaemons': ['com.apple.lld1.plist', 'com.apple.lld2.plist'], + '/System/Library/LaunchAgents': ['com.apple.slla1.plist', 'com.apple.slla2.plist'], + '/System/Library/LaunchDaemons': ['com.apple.slld1.plist', 'com.apple.slld2.plist']} + files = results.get(path, []) + return [(path, [], files)] + + mock_os_walk.side_effect = walk_side_effect mock_read_plist.side_effect = [ MagicMock(Label='com.apple.lla1'), @@ -287,7 +305,15 @@ class MacUtilsTestCase(TestCase, LoaderModuleMockMixin): ] mock_exists.side_effect = [True, True, True, True, False, False, True, True] - ret = mac_utils._available_services() + if six.PY3: + # Py3's plistlib.load does not handle opening and closing a + # file, unlike py2's plistlib.readPlist. Therefore, we have + # to patch open for py3 since we're using it in addition + # to the plistlib.load call. + with patch('salt.utils.files.fopen', mock_open()): + ret = mac_utils._available_services() + else: + ret = mac_utils._available_services() # Make sure it's a dict with 6 items self.assertTrue(isinstance(ret, dict)) @@ -325,12 +351,17 @@ class MacUtilsTestCase(TestCase, LoaderModuleMockMixin): ''' test available_services ''' - mock_os_walk.side_effect = [ - [('/Library/LaunchAgents', [], ['com.apple.lla1.plist', 'com.apple.lla2.plist'])], - [('/Library/LaunchDaemons', [], ['com.apple.lld1.plist', 'com.apple.lld2.plist'])], - [('/System/Library/LaunchAgents', [], ['com.apple.slla1.plist', 'com.apple.slla2.plist'])], - [('/System/Library/LaunchDaemons', [], ['com.apple.slld1.plist', 'com.apple.slld2.plist'])], - ] + def walk_side_effect(*args, **kwargs): + path = args[0] + results = { + '/Library/LaunchAgents': ['com.apple.lla1.plist', 'com.apple.lla2.plist'], + '/Library/LaunchDaemons': ['com.apple.lld1.plist', 'com.apple.lld2.plist'], + '/System/Library/LaunchAgents': ['com.apple.slla1.plist', 'com.apple.slla2.plist'], + '/System/Library/LaunchDaemons': ['com.apple.slld1.plist', 'com.apple.slld2.plist']} + files = results.get(path, []) + return [(path, [], files)] + + mock_os_walk.side_effect = walk_side_effect attrs = {'cmd.run': MagicMock(return_value='')} def getitem(name): @@ -410,12 +441,17 @@ class MacUtilsTestCase(TestCase, LoaderModuleMockMixin): ''' test available_services ''' - mock_os_walk.side_effect = [ - [('/Library/LaunchAgents', [], ['com.apple.lla1.plist', 'com.apple.lla2.plist'])], - [('/Library/LaunchDaemons', [], ['com.apple.lld1.plist', 'com.apple.lld2.plist'])], - [('/System/Library/LaunchAgents', [], ['com.apple.slla1.plist', 'com.apple.slla2.plist'])], - [('/System/Library/LaunchDaemons', [], ['com.apple.slld1.plist', 'com.apple.slld2.plist'])], - ] + def walk_side_effect(*args, **kwargs): + path = args[0] + results = { + '/Library/LaunchAgents': ['com.apple.lla1.plist', 'com.apple.lla2.plist'], + '/Library/LaunchDaemons': ['com.apple.lld1.plist', 'com.apple.lld2.plist'], + '/System/Library/LaunchAgents': ['com.apple.slla1.plist', 'com.apple.slla2.plist'], + '/System/Library/LaunchDaemons': ['com.apple.slld1.plist', 'com.apple.slld2.plist']} + files = results.get(path, []) + return [(path, [], files)] + + mock_os_walk.side_effect = walk_side_effect attrs = {'cmd.run': MagicMock(return_value='')} def getitem(name): diff --git a/tests/unit/utils/test_state.py b/tests/unit/utils/test_state.py index d076e7d004..0f356c59e7 100644 --- a/tests/unit/utils/test_state.py +++ b/tests/unit/utils/test_state.py @@ -527,56 +527,6 @@ class UtilStateMergeSubreturnTestcase(TestCase): 'alarms': secondary_changes, }) - def test_merge_pchanges(self): - primary_pchanges = {'old': None, 'new': 'my_resource'} - secondary_pchanges = {'old': None, 'new': ['alarm-1', 'alarm-2']} - - # Neither main nor sub pchanges case - m = copy.deepcopy(self.main_ret) - s = copy.deepcopy(self.sub_ret) - res = salt.utils.state.merge_subreturn(m, s) - self.assertNotIn('pchanges', res) - - # No main pchanges, sub pchanges - m = copy.deepcopy(self.main_ret) - s = copy.deepcopy(self.sub_ret) - s['pchanges'] = copy.deepcopy(secondary_pchanges) - res = salt.utils.state.merge_subreturn(m, s) - self.assertDictEqual(res['pchanges'], { - 'secondary': secondary_pchanges - }) - - # Main pchanges, no sub pchanges - m = copy.deepcopy(self.main_ret) - m['pchanges'] = copy.deepcopy(primary_pchanges) - s = copy.deepcopy(self.sub_ret) - res = salt.utils.state.merge_subreturn(m, s) - self.assertDictEqual(res['pchanges'], primary_pchanges) - - # Both main and sub pchanges, new pchanges don't affect existing ones - m = copy.deepcopy(self.main_ret) - m['pchanges'] = copy.deepcopy(primary_pchanges) - s = copy.deepcopy(self.sub_ret) - s['pchanges'] = copy.deepcopy(secondary_pchanges) - res = salt.utils.state.merge_subreturn(m, s) - self.assertDictEqual(res['pchanges'], { - 'old': None, - 'new': 'my_resource', - 'secondary': secondary_pchanges, - }) - - # The subkey parameter is respected - m = copy.deepcopy(self.main_ret) - m['pchanges'] = copy.deepcopy(primary_pchanges) - s = copy.deepcopy(self.sub_ret) - s['pchanges'] = copy.deepcopy(secondary_pchanges) - res = salt.utils.state.merge_subreturn(m, s, subkey='alarms') - self.assertDictEqual(res['pchanges'], { - 'old': None, - 'new': 'my_resource', - 'alarms': secondary_pchanges, - }) - def test_merge_comments(self): main_comment_1 = 'First primary comment.' main_comment_2 = 'Second primary comment.' diff --git a/tests/unit/utils/test_win_dacl.py b/tests/unit/utils/test_win_dacl.py index 0cee578021..eb080c7b81 100644 --- a/tests/unit/utils/test_win_dacl.py +++ b/tests/unit/utils/test_win_dacl.py @@ -271,7 +271,6 @@ class WinDaclRegTestCase(TestCase, LoaderModuleMockMixin): 'write_owner']}}}, 'comment': '', 'name': self.obj_name, - 'pchanges': {'perms': {}}, 'result': True} self.assertDictEqual(result, expected) @@ -318,7 +317,7 @@ class WinDaclRegTestCase(TestCase, LoaderModuleMockMixin): ''' Test the check_perms function ''' - with patch.dict(win_dacl.__opts__, {"test": True}): + with patch.dict(win_dacl.__opts__, {'test': True}): result = win_dacl.check_perms( obj_name=self.obj_name, obj_type=self.obj_type, @@ -334,16 +333,15 @@ class WinDaclRegTestCase(TestCase, LoaderModuleMockMixin): reset=False) expected = { - 'changes': {'perms': {}}, 'comment': '', 'name': self.obj_name, - 'pchanges': {'owner': 'Users', - 'perms': {'Backup Operators': {'grant': 'read', - 'deny': ['delete']}, - 'NETWORK SERVICE': {'deny': ['delete', - 'set_value', - 'write_dac', - 'write_owner']}}}, + 'changes': {'owner': 'Users', + 'perms': {'Backup Operators': {'grant': 'read', + 'deny': ['delete']}, + 'NETWORK SERVICE': {'deny': ['delete', + 'set_value', + 'write_dac', + 'write_owner']}}}, 'result': None} self.assertDictEqual(result, expected) @@ -573,7 +571,6 @@ class WinDaclFileTestCase(TestCase, LoaderModuleMockMixin): 'write_data']}}}, 'comment': '', 'name': self.obj_name, - 'pchanges': {'perms': {}}, 'result': True} self.assertDictEqual(result, expected) @@ -636,16 +633,15 @@ class WinDaclFileTestCase(TestCase, LoaderModuleMockMixin): reset=False) expected = { - 'changes': {'perms': {}}, 'comment': '', 'name': self.obj_name, - 'pchanges': {'owner': 'Users', - 'perms': {'Backup Operators': {'grant': 'read', - 'deny': ['delete']}, - 'NETWORK SERVICE': {'deny': ['delete', - 'set_value', - 'write_dac', - 'write_owner']}}}, + 'changes': {'owner': 'Users', + 'perms': {'Backup Operators': {'grant': 'read', + 'deny': ['delete']}, + 'NETWORK SERVICE': {'deny': ['delete', + 'set_value', + 'write_dac', + 'write_owner']}}}, 'result': None} self.assertDictEqual(result, expected)