diff --git a/.ci/docs b/.ci/docs index 24f6f469e3..a8ce2aada0 100644 --- a/.ci/docs +++ b/.ci/docs @@ -1,8 +1,11 @@ pipeline { - agent { label 'docs' } + agent { + label 'docs' + } options { timestamps() ansiColor('xterm') + timeout(time: 2, unit: 'HOURS') } environment { PYENV_ROOT = "/usr/local/pyenv" diff --git a/.ci/kitchen-centos7-py2 b/.ci/kitchen-centos7-py2 index 5cc9984cf0..44fbf8b87f 100644 --- a/.ci/kitchen-centos7-py2 +++ b/.ci/kitchen-centos7-py2 @@ -1,73 +1,80 @@ -pipeline { - agent { label 'kitchen-slave' } - options { - timestamps() - ansiColor('xterm') - } - environment { - SALT_KITCHEN_PLATFORMS = "/var/jenkins/workspace/platforms.yml" - SALT_KITCHEN_DRIVER = "/var/jenkins/workspace/driver.yml" - PATH = "/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin" - RBENV_VERSION = "2.4.2" - TEST_SUITE = "py2" - TEST_PLATFORM = "centos-7" - PY_COLORS = 1 - } - stages { - stage('github-pending') { - steps { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", - status: 'PENDING', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } - stage('setup') { - steps { - sh 'bundle install --with ec2 windows --without opennebula docker' - } - } - stage('run kitchen') { - steps { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' - sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' +timeout(time: 6, unit: 'HOURS') { + node('kitchen-slave') { + timestamps { + ansiColor('xterm') { + withEnv([ + 'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml', + 'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml', + 'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin', + 'RBENV_VERSION=2.4.2', + 'TEST_SUITE=py2', + 'TEST_PLATFORM=centos-7', + 'PY_COLORS=1', + ]) { + stage('checkout-scm') { + cleanWs notFailBuild: true + checkout scm } - }} - } - post { - always { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + try { + stage('github-pending') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", + status: 'PENDING', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" } - }} - archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' - archiveArtifacts artifacts: 'artifacts/logs/minion' - archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + stage('setup-bundle') { + sh 'bundle install --with ec2 windows --without opennebula docker' + } + try { + stage('run kitchen') { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' + sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' + } + } + } + } finally { + stage('cleanup kitchen') { + script { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + } + } + } + archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' + archiveArtifacts artifacts: 'artifacts/logs/minion' + archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + } + } + } finally { + try { + junit 'artifacts/xml-unittests-output/*.xml' + } finally { + cleanWs notFailBuild: true + def currentResult = currentBuild.result ?: 'SUCCESS' + if (currentResult == 'SUCCESS') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", + status: 'SUCCESS', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } else { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", + status: 'FAILURE', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } + } + } } } } } - post { - always { - junit 'artifacts/xml-unittests-output/*.xml' - cleanWs() - } - success { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", - status: 'SUCCESS', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - failure { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", - status: 'FAILURE', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } } diff --git a/.ci/kitchen-centos7-py3 b/.ci/kitchen-centos7-py3 index 120bd3bee9..de5e344a97 100644 --- a/.ci/kitchen-centos7-py3 +++ b/.ci/kitchen-centos7-py3 @@ -1,73 +1,80 @@ -pipeline { - agent { label 'kitchen-slave' } - options { - timestamps() - ansiColor('xterm') - } - environment { - SALT_KITCHEN_PLATFORMS = "/var/jenkins/workspace/platforms.yml" - SALT_KITCHEN_DRIVER = "/var/jenkins/workspace/driver.yml" - PATH = "/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin" - RBENV_VERSION = "2.4.2" - TEST_SUITE = "py3" - TEST_PLATFORM = "centos-7" - PY_COLORS = 1 - } - stages { - stage('github-pending') { - steps { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", - status: 'PENDING', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } - stage('setup') { - steps { - sh 'bundle install --with ec2 windows --without opennebula docker' - } - } - stage('run kitchen') { - steps { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' - sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' +timeout(time: 6, unit: 'HOURS') { + node('kitchen-slave') { + timestamps { + ansiColor('xterm') { + withEnv([ + 'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml', + 'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml', + 'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin', + 'RBENV_VERSION=2.4.2', + 'TEST_SUITE=py3', + 'TEST_PLATFORM=centos-7', + 'PY_COLORS=1', + ]) { + stage('checkout-scm') { + cleanWs notFailBuild: true + checkout scm } - }} - } - post { - always { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + try { + stage('github-pending') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", + status: 'PENDING', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" } - }} - archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' - archiveArtifacts artifacts: 'artifacts/logs/minion' - archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + stage('setup-bundle') { + sh 'bundle install --with ec2 windows --without opennebula docker' + } + try { + stage('run kitchen') { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' + sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' + } + } + } + } finally { + stage('cleanup kitchen') { + script { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + } + } + } + archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' + archiveArtifacts artifacts: 'artifacts/logs/minion' + archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + } + } + } finally { + try { + junit 'artifacts/xml-unittests-output/*.xml' + } finally { + cleanWs notFailBuild: true + def currentResult = currentBuild.result ?: 'SUCCESS' + if (currentResult == 'SUCCESS') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", + status: 'SUCCESS', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } else { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", + status: 'FAILURE', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } + } + } } } } } - post { - always { - junit 'artifacts/xml-unittests-output/*.xml' - cleanWs() - } - success { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", - status: 'SUCCESS', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - failure { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", - status: 'FAILURE', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } } diff --git a/.ci/kitchen-ubuntu1604-py2 b/.ci/kitchen-ubuntu1604-py2 index f4c8277cec..3ba95ebf5d 100644 --- a/.ci/kitchen-ubuntu1604-py2 +++ b/.ci/kitchen-ubuntu1604-py2 @@ -1,73 +1,80 @@ -pipeline { - agent { label 'kitchen-slave' } - options { - timestamps() - ansiColor('xterm') - } - environment { - SALT_KITCHEN_PLATFORMS = "/var/jenkins/workspace/platforms.yml" - SALT_KITCHEN_DRIVER = "/var/jenkins/workspace/driver.yml" - PATH = "/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin" - RBENV_VERSION = "2.4.2" - TEST_SUITE = "py2" - TEST_PLATFORM = "ubuntu-1604" - PY_COLORS = 1 - } - stages { - stage('github-pending') { - steps { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", - status: 'PENDING', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } - stage('setup') { - steps { - sh 'bundle install --with ec2 windows --without opennebula docker' - } - } - stage('run kitchen') { - steps { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' - sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' +timeout(time: 6, unit: 'HOURS') { + node('kitchen-slave') { + timestamps { + ansiColor('xterm') { + withEnv([ + 'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml', + 'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml', + 'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin', + 'RBENV_VERSION=2.4.2', + 'TEST_SUITE=py2', + 'TEST_PLATFORM=ubuntu-1604', + 'PY_COLORS=1', + ]) { + stage('checkout-scm') { + cleanWs notFailBuild: true + checkout scm } - }} - } - post { - always { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + try { + stage('github-pending') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", + status: 'PENDING', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" } - }} - archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' - archiveArtifacts artifacts: 'artifacts/logs/minion' - archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + stage('setup-bundle') { + sh 'bundle install --with ec2 windows --without opennebula docker' + } + try { + stage('run kitchen') { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' + sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' + } + } + } + } finally { + stage('cleanup kitchen') { + script { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + } + } + } + archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' + archiveArtifacts artifacts: 'artifacts/logs/minion' + archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + } + } + } finally { + try { + junit 'artifacts/xml-unittests-output/*.xml' + } finally { + cleanWs notFailBuild: true + def currentResult = currentBuild.result ?: 'SUCCESS' + if ( currentResult == 'SUCCESS') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", + status: 'SUCCESS', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } else { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", + status: 'FAILURE', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } + } + } } } } } - post { - always { - junit 'artifacts/xml-unittests-output/*.xml' - cleanWs() - } - success { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", - status: 'SUCCESS', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - failure { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", - status: 'FAILURE', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } } diff --git a/.ci/kitchen-ubuntu1604-py3 b/.ci/kitchen-ubuntu1604-py3 index c26f466efa..84c127de9b 100644 --- a/.ci/kitchen-ubuntu1604-py3 +++ b/.ci/kitchen-ubuntu1604-py3 @@ -1,73 +1,80 @@ -pipeline { - agent { label 'kitchen-slave' } - options { - timestamps() - ansiColor('xterm') - } - environment { - SALT_KITCHEN_PLATFORMS = "/var/jenkins/workspace/platforms.yml" - SALT_KITCHEN_DRIVER = "/var/jenkins/workspace/driver.yml" - PATH = "/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin" - RBENV_VERSION = "2.4.2" - TEST_SUITE = "py3" - TEST_PLATFORM = "ubuntu-1604" - PY_COLORS = 1 - } - stages { - stage('github-pending') { - steps { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", - status: 'PENDING', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } - stage('setup') { - steps { - sh 'bundle install --with ec2 windows --without opennebula docker' - } - } - stage('run kitchen') { - steps { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' - sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' +timeout(time: 6, unit: 'HOURS') { + node('kitchen-slave') { + timestamps { + ansiColor('xterm') { + withEnv([ + 'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml', + 'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml', + 'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin', + 'RBENV_VERSION=2.4.2', + 'TEST_SUITE=py3', + 'TEST_PLATFORM=ubuntu-1604', + 'PY_COLORS=1', + ]) { + stage('checkout-scm') { + cleanWs notFailBuild: true + checkout scm } - }} - } - post { - always { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + try { + stage('github-pending') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", + status: 'PENDING', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" } - }} - archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' - archiveArtifacts artifacts: 'artifacts/logs/minion' - archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + stage('setup-bundle') { + sh 'bundle install --with ec2 windows --without opennebula docker' + } + try { + stage('run kitchen') { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' + sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' + } + } + } + } finally { + stage('cleanup kitchen') { + script { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + } + } + } + archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' + archiveArtifacts artifacts: 'artifacts/logs/minion' + archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + } + } + } finally { + try { + junit 'artifacts/xml-unittests-output/*.xml' + } finally { + cleanWs notFailBuild: true + def currentResult = currentBuild.result ?: 'SUCCESS' + if (currentResult == 'SUCCESS') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", + status: 'SUCCESS', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } else { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", + status: 'FAILURE', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } + } + } } } } } - post { - always { - junit 'artifacts/xml-unittests-output/*.xml' - cleanWs() - } - success { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", - status: 'SUCCESS', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - failure { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", - status: 'FAILURE', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } } diff --git a/.ci/kitchen-windows2016-py2 b/.ci/kitchen-windows2016-py2 index d0e7320975..886fa4cbd9 100644 --- a/.ci/kitchen-windows2016-py2 +++ b/.ci/kitchen-windows2016-py2 @@ -1,73 +1,80 @@ -pipeline { - agent { label 'kitchen-slave' } - options { - timestamps() - ansiColor('xterm') - } - environment { - SALT_KITCHEN_PLATFORMS = "/var/jenkins/workspace/platforms.yml" - SALT_KITCHEN_DRIVER = "/var/jenkins/workspace/driver.yml" - PATH = "/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin" - RBENV_VERSION = "2.4.2" - TEST_SUITE = "py2" - TEST_PLATFORM = "windows-2016" - PY_COLORS = 1 - } - stages { - stage('github-pending') { - steps { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", - status: 'PENDING', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } - stage('setup') { - steps { - sh 'bundle install --with ec2 windows --without opennebula docker' - } - } - stage('run kitchen') { - steps { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' - sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' +timeout(time: 6, unit: 'HOURS') { + node('kitchen-slave') { + timestamps { + ansiColor('xterm') { + withEnv([ + 'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml', + 'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml', + 'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin', + 'RBENV_VERSION=2.4.2', + 'TEST_SUITE=py2', + 'TEST_PLATFORM=windows-2016', + 'PY_COLORS=1', + ]) { + stage('checkout-scm') { + cleanWs notFailBuild: true + checkout scm } - }} - } - post { - always { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + try { + stage('github-pending') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", + status: 'PENDING', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" } - }} - archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' - archiveArtifacts artifacts: 'artifacts/logs/minion' - archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + stage('setup-bundle') { + sh 'bundle install --with ec2 windows --without opennebula docker' + } + try { + stage('run kitchen') { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' + sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' + } + } + } + } finally { + stage('cleanup kitchen') { + script { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + } + } + } + archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' + archiveArtifacts artifacts: 'artifacts/logs/minion' + archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + } + } + } finally { + try { + junit 'artifacts/xml-unittests-output/*.xml' + } finally { + cleanWs notFailBuild: true + def currentResult = currentBuild.result ?: 'SUCCESS' + if (currentResult == 'SUCCESS') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", + status: 'SUCCESS', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } else { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", + status: 'FAILURE', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } + } + } } } } } - post { - always { - junit 'artifacts/xml-unittests-output/*.xml' - cleanWs() - } - success { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", - status: 'SUCCESS', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - failure { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", - status: 'FAILURE', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } } diff --git a/.ci/kitchen-windows2016-py3 b/.ci/kitchen-windows2016-py3 index 11c3661447..a85b57b399 100644 --- a/.ci/kitchen-windows2016-py3 +++ b/.ci/kitchen-windows2016-py3 @@ -1,73 +1,80 @@ -pipeline { - agent { label 'kitchen-slave' } - options { - timestamps() - ansiColor('xterm') - } - environment { - SALT_KITCHEN_PLATFORMS = "/var/jenkins/workspace/platforms.yml" - SALT_KITCHEN_DRIVER = "/var/jenkins/workspace/driver.yml" - PATH = "/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin" - RBENV_VERSION = "2.4.2" - TEST_SUITE = "py3" - TEST_PLATFORM = "windows-2016" - PY_COLORS = 1 - } - stages { - stage('github-pending') { - steps { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", - status: 'PENDING', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } - stage('setup') { - steps { - sh 'bundle install --with ec2 windows --without opennebula docker' - } - } - stage('run kitchen') { - steps { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' - sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' +timeout(time: 6, unit: 'HOURS') { + node('kitchen-slave') { + timestamps { + ansiColor('xterm') { + withEnv([ + 'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml', + 'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml', + 'PATH=/usr/local/rbenv/shims/:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin', + 'RBENV_VERSION=2.4.2', + 'TEST_SUITE=py3', + 'TEST_PLATFORM=windows-2016', + 'PY_COLORS=1', + ]) { + stage('checkout-scm') { + cleanWs notFailBuild: true + checkout scm } - }} - } - post { - always { - script { withCredentials([[$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY']]) { - sshagent(credentials: ['jenkins-testing-ssh-key']) { - sh 'ssh-add ~/.ssh/jenkins-testing.pem' - sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + try { + stage('github-pending') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "running ${TEST_SUITE}-${TEST_PLATFORM}...", + status: 'PENDING', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" } - }} - archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' - archiveArtifacts artifacts: 'artifacts/logs/minion' - archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + stage('setup-bundle') { + sh 'bundle install --with ec2 windows --without opennebula docker' + } + try { + stage('run kitchen') { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM || bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM' + sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM' + } + } + } + } finally { + stage('cleanup kitchen') { + script { + withCredentials([ + [$class: 'AmazonWebServicesCredentialsBinding', accessKeyVariable: 'AWS_ACCESS_KEY_ID', credentialsId: 'AWS_ACCESS_KEY_ID', secretKeyVariable: 'AWS_SECRET_ACCESS_KEY'] + ]) { + sshagent(credentials: ['jenkins-testing-ssh-key']) { + sh 'ssh-add ~/.ssh/jenkins-testing.pem' + sh 'bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM' + } + } + } + archiveArtifacts artifacts: 'artifacts/xml-unittests-output/*.xml' + archiveArtifacts artifacts: 'artifacts/logs/minion' + archiveArtifacts artifacts: 'artifacts/logs/salt-runtests.log' + } + } + } finally { + try { + junit 'artifacts/xml-unittests-output/*.xml' + } finally { + cleanWs notFailBuild: true + def currentResult = currentBuild.result ?: 'SUCCESS' + if (currentResult == 'SUCCESS') { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", + status: 'SUCCESS', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } else { + githubNotify credentialsId: 'test-jenkins-credentials', + description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", + status: 'FAILURE', + context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" + } + } + } } } } } - post { - always { - junit 'artifacts/xml-unittests-output/*.xml' - cleanWs() - } - success { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has passed", - status: 'SUCCESS', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - failure { - githubNotify credentialsId: 'test-jenkins-credentials', - description: "The ${TEST_SUITE}-${TEST_PLATFORM} job has failed", - status: 'FAILURE', - context: "jenkins/pr/${TEST_SUITE}-${TEST_PLATFORM}" - } - } } diff --git a/.ci/lint b/.ci/lint index 885174b0bf..28cea02139 100644 --- a/.ci/lint +++ b/.ci/lint @@ -3,6 +3,7 @@ pipeline { options { timestamps() ansiColor('xterm') + timeout(time: 1, unit: 'HOURS') } environment { PYENV_ROOT = "/usr/local/pyenv" diff --git a/salt/_compat.py b/salt/_compat.py index 9b10646ace..0576210afc 100644 --- a/salt/_compat.py +++ b/salt/_compat.py @@ -2,18 +2,21 @@ ''' Salt compatibility code ''' -# pylint: disable=import-error,unused-import,invalid-name +# pylint: disable=import-error,unused-import,invalid-name,W0231,W0233 # Import python libs -from __future__ import absolute_import +from __future__ import absolute_import, unicode_literals, print_function import sys import types +import logging # Import 3rd-party libs -from salt.ext.six import binary_type, string_types, text_type +from salt.exceptions import SaltException +from salt.ext.six import binary_type, string_types, text_type, integer_types from salt.ext.six.moves import cStringIO, StringIO -HAS_XML = True +log = logging.getLogger(__name__) + try: # Python >2.5 import xml.etree.cElementTree as ElementTree @@ -31,11 +34,10 @@ except Exception: import elementtree.ElementTree as ElementTree except Exception: ElementTree = None - HAS_XML = False # True if we are running on Python 3. -PY3 = sys.version_info[0] == 3 +PY3 = sys.version_info.major == 3 if PY3: @@ -45,13 +47,12 @@ else: import exceptions -if HAS_XML: +if ElementTree is not None: if not hasattr(ElementTree, 'ParseError'): class ParseError(Exception): ''' older versions of ElementTree do not have ParseError ''' - pass ElementTree.ParseError = ParseError @@ -61,9 +62,7 @@ def text_(s, encoding='latin-1', errors='strict'): If ``s`` is an instance of ``binary_type``, return ``s.decode(encoding, errors)``, otherwise return ``s`` ''' - if isinstance(s, binary_type): - return s.decode(encoding, errors) - return s + return s.decode(encoding, errors) if isinstance(s, binary_type) else s def bytes_(s, encoding='latin-1', errors='strict'): @@ -71,57 +70,37 @@ def bytes_(s, encoding='latin-1', errors='strict'): If ``s`` is an instance of ``text_type``, return ``s.encode(encoding, errors)``, otherwise return ``s`` ''' + return s.encode(encoding, errors) if isinstance(s, text_type) else s + + +def ascii_native_(s): + ''' + Python 3: If ``s`` is an instance of ``text_type``, return + ``s.encode('ascii')``, otherwise return ``str(s, 'ascii', 'strict')`` + + Python 2: If ``s`` is an instance of ``text_type``, return + ``s.encode('ascii')``, otherwise return ``str(s)`` + ''' if isinstance(s, text_type): - return s.encode(encoding, errors) - return s + s = s.encode('ascii') + + return str(s, 'ascii', 'strict') if PY3 else s -if PY3: - def ascii_native_(s): - if isinstance(s, text_type): - s = s.encode('ascii') - return str(s, 'ascii', 'strict') -else: - def ascii_native_(s): - if isinstance(s, text_type): - s = s.encode('ascii') - return str(s) +def native_(s, encoding='latin-1', errors='strict'): + ''' + Python 3: If ``s`` is an instance of ``text_type``, return ``s``, otherwise + return ``str(s, encoding, errors)`` -ascii_native_.__doc__ = ''' -Python 3: If ``s`` is an instance of ``text_type``, return -``s.encode('ascii')``, otherwise return ``str(s, 'ascii', 'strict')`` + Python 2: If ``s`` is an instance of ``text_type``, return + ``s.encode(encoding, errors)``, otherwise return ``str(s)`` + ''' + if PY3: + out = s if isinstance(s, text_type) else str(s, encoding, errors) + else: + out = s.encode(encoding, errors) if isinstance(s, text_type) else str(s) -Python 2: If ``s`` is an instance of ``text_type``, return -``s.encode('ascii')``, otherwise return ``str(s)`` -''' - - -if PY3: - def native_(s, encoding='latin-1', errors='strict'): - ''' - If ``s`` is an instance of ``text_type``, return - ``s``, otherwise return ``str(s, encoding, errors)`` - ''' - if isinstance(s, text_type): - return s - return str(s, encoding, errors) -else: - def native_(s, encoding='latin-1', errors='strict'): - ''' - If ``s`` is an instance of ``text_type``, return - ``s.encode(encoding, errors)``, otherwise return ``str(s)`` - ''' - if isinstance(s, text_type): - return s.encode(encoding, errors) - return str(s) - -native_.__doc__ = ''' -Python 3: If ``s`` is an instance of ``text_type``, return ``s``, otherwise -return ``str(s, encoding, errors)`` - -Python 2: If ``s`` is an instance of ``text_type``, return -``s.encode(encoding, errors)``, otherwise return ``str(s)`` -''' + return out def string_io(data=None): # cStringIO can't handle unicode @@ -133,7 +112,199 @@ def string_io(data=None): # cStringIO can't handle unicode except (UnicodeEncodeError, TypeError): return StringIO(data) -if PY3: - import ipaddress -else: - import salt.ext.ipaddress as ipaddress + +try: + if PY3: + import ipaddress + else: + import salt.ext.ipaddress as ipaddress +except ImportError: + ipaddress = None + + +class IPv6AddressScoped(ipaddress.IPv6Address): + ''' + Represent and manipulate single IPv6 Addresses. + Scope-aware version + ''' + def __init__(self, address): + ''' + Instantiate a new IPv6 address object. Scope is moved to an attribute 'scope'. + + Args: + address: A string or integer representing the IP + + Additionally, an integer can be passed, so + IPv6Address('2001:db8::') == IPv6Address(42540766411282592856903984951653826560) + or, more generally + IPv6Address(int(IPv6Address('2001:db8::'))) == IPv6Address('2001:db8::') + + Raises: + AddressValueError: If address isn't a valid IPv6 address. + + :param address: + ''' + # pylint: disable-all + if not hasattr(self, '_is_packed_binary'): + # This method (below) won't be around for some Python 3 versions + # and we need check this differently anyway + self._is_packed_binary = lambda p: isinstance(p, bytes) + # pylint: enable-all + + if isinstance(address, string_types) and '%' in address: + buff = address.split('%') + if len(buff) != 2: + raise SaltException('Invalid IPv6 address: "{}"'.format(address)) + address, self.__scope = buff + else: + self.__scope = None + + if sys.version_info.major == 2: + ipaddress._BaseAddress.__init__(self, address) + ipaddress._BaseV6.__init__(self, address) + else: + # Python 3.4 fix. Versions higher are simply not affected + # https://github.com/python/cpython/blob/3.4/Lib/ipaddress.py#L543-L544 + self._version = 6 + self._max_prefixlen = ipaddress.IPV6LENGTH + + # Efficient constructor from integer. + if isinstance(address, integer_types): + self._check_int_address(address) + self._ip = address + elif self._is_packed_binary(address): + self._check_packed_address(address, 16) + self._ip = ipaddress._int_from_bytes(address, 'big') + else: + address = str(address) + if '/' in address: + raise ipaddress.AddressValueError("Unexpected '/' in {}".format(address)) + self._ip = self._ip_int_from_string(address) + + def _is_packed_binary(self, data): + ''' + Check if data is hexadecimal packed + + :param data: + :return: + ''' + packed = False + if len(data) == 16 and ':' not in data: + try: + packed = bool(int(str(bytearray(data)).encode('hex'), 16)) + except ValueError: + pass + + return packed + + @property + def scope(self): + ''' + Return scope of IPv6 address. + + :return: + ''' + return self.__scope + + def __str__(self): + return text_type(self._string_from_ip_int(self._ip) + + ('%' + self.scope if self.scope is not None else '')) + + +class IPv6InterfaceScoped(ipaddress.IPv6Interface, IPv6AddressScoped): + ''' + Update + ''' + def __init__(self, address): + if isinstance(address, (bytes, int)): + IPv6AddressScoped.__init__(self, address) + self.network = ipaddress.IPv6Network(self._ip) + self._prefixlen = self._max_prefixlen + return + + addr = ipaddress._split_optional_netmask(address) + IPv6AddressScoped.__init__(self, addr[0]) + self.network = ipaddress.IPv6Network(address, strict=False) + self.netmask = self.network.netmask + self._prefixlen = self.network._prefixlen + self.hostmask = self.network.hostmask + + +def ip_address(address): + """Take an IP string/int and return an object of the correct type. + + Args: + address: A string or integer, the IP address. Either IPv4 or + IPv6 addresses may be supplied; integers less than 2**32 will + be considered to be IPv4 by default. + + Returns: + An IPv4Address or IPv6Address object. + + Raises: + ValueError: if the *address* passed isn't either a v4 or a v6 + address + + """ + try: + return ipaddress.IPv4Address(address) + except (ipaddress.AddressValueError, ipaddress.NetmaskValueError) as err: + log.debug('Error while parsing IPv4 address: %s', address) + log.debug(err) + + try: + return IPv6AddressScoped(address) + except (ipaddress.AddressValueError, ipaddress.NetmaskValueError) as err: + log.debug('Error while parsing IPv6 address: %s', address) + log.debug(err) + + if isinstance(address, bytes): + raise ipaddress.AddressValueError('{} does not appear to be an IPv4 or IPv6 address. ' + 'Did you pass in a bytes (str in Python 2) instead ' + 'of a unicode object?'.format(repr(address))) + + raise ValueError('{} does not appear to be an IPv4 or IPv6 address'.format(repr(address))) + + +def ip_interface(address): + """Take an IP string/int and return an object of the correct type. + + Args: + address: A string or integer, the IP address. Either IPv4 or + IPv6 addresses may be supplied; integers less than 2**32 will + be considered to be IPv4 by default. + + Returns: + An IPv4Interface or IPv6Interface object. + + Raises: + ValueError: if the string passed isn't either a v4 or a v6 + address. + + Notes: + The IPv?Interface classes describe an Address on a particular + Network, so they're basically a combination of both the Address + and Network classes. + + """ + try: + return ipaddress.IPv4Interface(address) + except (ipaddress.AddressValueError, ipaddress.NetmaskValueError) as err: + log.debug('Error while getting IPv4 interface for address %s', address) + log.debug(err) + + try: + return ipaddress.IPv6Interface(address) + except (ipaddress.AddressValueError, ipaddress.NetmaskValueError) as err: + log.debug('Error while getting IPv6 interface for address %s', address) + log.debug(err) + + raise ValueError('{} does not appear to be an IPv4 or IPv6 interface'.format(address)) + + +if ipaddress: + ipaddress.IPv6Address = IPv6AddressScoped + if sys.version_info.major == 2: + ipaddress.IPv6Interface = IPv6InterfaceScoped + ipaddress.ip_address = ip_address + ipaddress.ip_interface = ip_interface diff --git a/salt/cloud/clouds/saltify.py b/salt/cloud/clouds/saltify.py index c9cc281b42..e0e56349a0 100644 --- a/salt/cloud/clouds/saltify.py +++ b/salt/cloud/clouds/saltify.py @@ -27,10 +27,7 @@ import salt.utils.cloud import salt.config as config import salt.client import salt.ext.six as six -if six.PY3: - import ipaddress -else: - import salt.ext.ipaddress as ipaddress +from salt._compat import ipaddress from salt.exceptions import SaltCloudException, SaltCloudSystemExit diff --git a/salt/cloud/clouds/vagrant.py b/salt/cloud/clouds/vagrant.py index a24170c78a..0fe410eb91 100644 --- a/salt/cloud/clouds/vagrant.py +++ b/salt/cloud/clouds/vagrant.py @@ -25,13 +25,8 @@ import tempfile import salt.utils import salt.config as config import salt.client -import salt.ext.six as six -if six.PY3: - import ipaddress -else: - import salt.ext.ipaddress as ipaddress -from salt.exceptions import SaltCloudException, SaltCloudSystemExit, \ - SaltInvocationError +from salt._compat import ipaddress +from salt.exceptions import SaltCloudException, SaltCloudSystemExit, SaltInvocationError # Get logging started log = logging.getLogger(__name__) diff --git a/salt/ext/win_inet_pton.py b/salt/ext/win_inet_pton.py index 1204bede10..89aba14ce9 100644 --- a/salt/ext/win_inet_pton.py +++ b/salt/ext/win_inet_pton.py @@ -9,7 +9,7 @@ from __future__ import absolute_import import socket import ctypes import os -import ipaddress +from salt._compat import ipaddress import salt.ext.six as six diff --git a/salt/grains/chronos.py b/salt/grains/chronos.py index df8eca32e0..3b5add6895 100644 --- a/salt/grains/chronos.py +++ b/salt/grains/chronos.py @@ -5,9 +5,11 @@ Generate chronos proxy minion grains. .. versionadded:: 2015.8.2 ''' +# Import Python libs from __future__ import absolute_import, print_function, unicode_literals +# Import Salt libs import salt.utils.http import salt.utils.platform __proxyenabled__ = ['chronos'] diff --git a/salt/log/handlers/sentry_mod.py b/salt/log/handlers/sentry_mod.py index 43ac5b8262..c126eb20de 100644 --- a/salt/log/handlers/sentry_mod.py +++ b/salt/log/handlers/sentry_mod.py @@ -113,13 +113,16 @@ __virtualname__ = 'sentry' def __virtual__(): if HAS_RAVEN is True: - __grains__ = salt.loader.grains(__opts__) - __salt__ = salt.loader.minion_mods(__opts__) return __virtualname__ return False def setup_handlers(): + ''' + sets up the sentry handler + ''' + __grains__ = salt.loader.grains(__opts__) + __salt__ = salt.loader.minion_mods(__opts__) if 'sentry_handler' not in __opts__: log.debug('No \'sentry_handler\' key was found in the configuration') return False @@ -133,7 +136,9 @@ def setup_handlers(): transport_registry = TransportRegistry(default_transports) url = urlparse(dsn) if not transport_registry.supported_scheme(url.scheme): - raise ValueError('Unsupported Sentry DSN scheme: {0}'.format(url.scheme)) + raise ValueError( + 'Unsupported Sentry DSN scheme: %s', url.scheme + ) except ValueError as exc: log.info( 'Raven failed to parse the configuration provided DSN: %s', exc @@ -202,7 +207,11 @@ def setup_handlers(): context_dict = {} if context is not None: for tag in context: - tag_value = __salt__['grains.get'](tag) + try: + tag_value = __grains__[tag] + except KeyError: + log.debug('Sentry tag \'%s\' not found in grains.', tag) + continue if len(tag_value) > 0: context_dict[tag] = tag_value if len(context_dict) > 0: @@ -229,4 +238,7 @@ def setup_handlers(): def get_config_value(name, default=None): + ''' + returns a configuration option for the sentry_handler + ''' return __opts__['sentry_handler'].get(name, default) diff --git a/salt/modules/ipset.py b/salt/modules/ipset.py index 7047e84c29..1a0fa0044d 100644 --- a/salt/modules/ipset.py +++ b/salt/modules/ipset.py @@ -13,10 +13,7 @@ from salt.ext.six.moves import map, range import salt.utils.path # Import third-party libs -if six.PY3: - import ipaddress -else: - import salt.ext.ipaddress as ipaddress +from salt._compat import ipaddress # Set up logging log = logging.getLogger(__name__) diff --git a/salt/modules/network.py b/salt/modules/network.py index bdf73dfc89..73677fca67 100644 --- a/salt/modules/network.py +++ b/salt/modules/network.py @@ -26,10 +26,7 @@ from salt.exceptions import CommandExecutionError # Import 3rd-party libs from salt.ext import six from salt.ext.six.moves import range # pylint: disable=import-error,no-name-in-module,redefined-builtin -if six.PY3: - import ipaddress -else: - import salt.ext.ipaddress as ipaddress +from salt._compat import ipaddress log = logging.getLogger(__name__) diff --git a/salt/modules/vagrant.py b/salt/modules/vagrant.py index 0592dede55..0f518c2602 100644 --- a/salt/modules/vagrant.py +++ b/salt/modules/vagrant.py @@ -39,11 +39,7 @@ import salt.utils.path import salt.utils.stringutils from salt.exceptions import CommandExecutionError, SaltInvocationError import salt.ext.six as six - -if six.PY3: - import ipaddress -else: - import salt.ext.ipaddress as ipaddress +from salt._compat import ipaddress log = logging.getLogger(__name__) diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py index a1a5c8a0be..c9632e15a1 100644 --- a/salt/modules/zypper.py +++ b/salt/modules/zypper.py @@ -76,7 +76,25 @@ class _Zypper(object): Allows serial zypper calls (first came, first won). ''' - SUCCESS_EXIT_CODES = [0, 100, 101, 102, 103] + SUCCESS_EXIT_CODES = { + 0: 'Successful run of zypper with no special info.', + 100: 'Patches are available for installation.', + 101: 'Security patches are available for installation.', + 102: 'Installation successful, reboot required.', + 103: 'Installation succesful, restart of the package manager itself required.', + } + + WARNING_EXIT_CODES = { + 6: 'No repositories are defined.', + 7: 'The ZYPP library is locked.', + 106: 'Some repository had to be disabled temporarily because it failed to refresh. ' + 'You should check your repository configuration (e.g. zypper ref -f).', + 107: 'Installation basically succeeded, but some of the packages %post install scripts returned an error. ' + 'These packages were successfully unpacked to disk and are registered in the rpm database, ' + 'but due to the failed install script they may not work as expected. The failed scripts output might ' + 'reveal what actually went wrong. Any scripts output is also logged to /var/log/zypp/history.' + } + LOCK_EXIT_CODE = 7 XML_DIRECTIVES = ['-x', '--xmlout'] ZYPPER_LOCK = '/var/run/zypp.pid' @@ -189,7 +207,15 @@ class _Zypper(object): :return: ''' - return self.exit_code not in self.SUCCESS_EXIT_CODES + if self.exit_code: + msg = self.SUCCESS_EXIT_CODES.get(self.exit_code) + if msg: + log.info(msg) + msg = self.WARNING_EXIT_CODES.get(self.exit_code) + if msg: + log.warning(msg) + + return self.exit_code not in self.SUCCESS_EXIT_CODES and self.exit_code not in self.WARNING_EXIT_CODES def _is_lock(self): ''' diff --git a/salt/runners/state.py b/salt/runners/state.py index d866dc8ef1..f9a5cc0dbd 100644 --- a/salt/runners/state.py +++ b/salt/runners/state.py @@ -10,6 +10,7 @@ import logging import salt.loader import salt.utils.event import salt.utils.functools +import salt.utils.jid from salt.exceptions import SaltInvocationError LOGGER = logging.getLogger(__name__) @@ -110,6 +111,8 @@ def orchestrate(mods, pillarenv = __opts__['pillarenv'] if saltenv is None and 'saltenv' in __opts__: saltenv = __opts__['saltenv'] + if orchestration_jid is None: + orchestration_jid = salt.utils.jid.gen_jid(__opts__) running = minion.functions['state.sls']( mods, diff --git a/salt/states/file.py b/salt/states/file.py index 355a8d5a08..9ac07e1461 100644 --- a/salt/states/file.py +++ b/salt/states/file.py @@ -5215,7 +5215,9 @@ def append(name, check_res, check_msg = _check_file(name) if not check_res: # Try to create the file - touch(name, makedirs=makedirs) + touch_ret = touch(name, makedirs=makedirs) + if __opts__['test']: + return touch_ret retry_res, retry_msg = _check_file(name) if not retry_res: return _error(ret, check_msg) @@ -5496,7 +5498,9 @@ def prepend(name, check_res, check_msg = _check_file(name) if not check_res: # Try to create the file - touch(name, makedirs=makedirs) + touch_ret = touch(name, makedirs=makedirs) + if __opts__['test']: + return touch_ret retry_res, retry_msg = _check_file(name) if not retry_res: return _error(ret, check_msg) diff --git a/salt/utils/data.py b/salt/utils/data.py index b88d5c795d..0de3992e71 100644 --- a/salt/utils/data.py +++ b/salt/utils/data.py @@ -455,13 +455,14 @@ def traverse_dict(data, key, default=None, delimiter=DEFAULT_TARGET_DELIM): data['foo']['bar']['baz'] if this value exists, and will otherwise return the dict in the default argument. ''' + ptr = data try: for each in key.split(delimiter): - data = data[each] + ptr = ptr[each] except (KeyError, IndexError, TypeError): # Encountered a non-indexable value in the middle of traversing return default - return data + return ptr @jinja_filter('traverse') @@ -476,16 +477,17 @@ def traverse_dict_and_list(data, key, default=None, delimiter=DEFAULT_TARGET_DEL {'foo':{'bar':['baz']}} , if data like {'foo':{'bar':{'0':'baz'}}} then return data['foo']['bar']['0'] ''' + ptr = data for each in key.split(delimiter): - if isinstance(data, list): + if isinstance(ptr, list): try: idx = int(each) except ValueError: embed_match = False # Index was not numeric, lets look at any embedded dicts - for embedded in (x for x in data if isinstance(x, dict)): + for embedded in (x for x in ptr if isinstance(x, dict)): try: - data = embedded[each] + ptr = embedded[each] embed_match = True break except KeyError: @@ -495,15 +497,15 @@ def traverse_dict_and_list(data, key, default=None, delimiter=DEFAULT_TARGET_DEL return default else: try: - data = data[idx] + ptr = ptr[idx] except IndexError: return default else: try: - data = data[each] + ptr = ptr[each] except (KeyError, TypeError): return default - return data + return ptr def subdict_match(data, @@ -519,16 +521,33 @@ def subdict_match(data, former, as more deeply-nested matches are tried first. ''' def _match(target, pattern, regex_match=False, exact_match=False): + # The reason for using six.text_type first and _then_ using + # to_unicode as a fallback is because we want to eventually have + # unicode types for comparison below. If either value is numeric then + # six.text_type will turn it into a unicode string. However, if the + # value is a PY2 str type with non-ascii chars, then the result will be + # a UnicodeDecodeError. In those cases, we simply use to_unicode to + # decode it to unicode. The reason we can't simply use to_unicode to + # begin with is that (by design) to_unicode will raise a TypeError if a + # non-string/bytestring/bytearray value is passed. + try: + target = six.text_type(target).lower() + except UnicodeDecodeError: + target = salt.utils.stringutils.to_unicode(target).lower() + try: + pattern = six.text_type(pattern).lower() + except UnicodeDecodeError: + pattern = salt.utils.stringutils.to_unicode(pattern).lower() + if regex_match: try: - return re.match(pattern.lower(), six.text_type(target).lower()) + return re.match(pattern, target) except Exception: log.error('Invalid regex \'%s\' in match', pattern) return False - elif exact_match: - return six.text_type(target).lower() == pattern.lower() else: - return fnmatch.fnmatch(six.text_type(target).lower(), pattern.lower()) + return target == pattern if exact_match \ + else fnmatch.fnmatch(target, pattern) def _dict_match(target, pattern, regex_match=False, exact_match=False): wildcard = pattern.startswith('*:') @@ -548,11 +567,6 @@ def subdict_match(data, return True if wildcard: for key in target: - if _match(key, - pattern, - regex_match=regex_match, - exact_match=exact_match): - return True if isinstance(target[key], dict): if _dict_match(target[key], pattern, @@ -566,6 +580,17 @@ def subdict_match(data, regex_match=regex_match, exact_match=exact_match): return True + elif _match(target[key], + pattern, + regex_match=regex_match, + exact_match=exact_match): + return True + return False + + splits = expr.split(delimiter) + num_splits = len(splits) + if num_splits == 1: + # Delimiter not present, this can't possibly be a match return False splits = expr.split(delimiter) @@ -578,10 +603,16 @@ def subdict_match(data, # want to use are 3, 2, and 1, in that order. for idx in range(num_splits - 1, 0, -1): key = delimiter.join(splits[:idx]) - matchstr = delimiter.join(splits[idx:]) + if key == '*': + # We are matching on everything under the top level, so we need to + # treat the match as the entire data being passed in + matchstr = expr + match = data + else: + matchstr = delimiter.join(splits[idx:]) + match = traverse_dict_and_list(data, key, {}, delimiter=delimiter) log.debug("Attempting to match '%s' in '%s' using delimiter '%s'", matchstr, key, delimiter) - match = traverse_dict_and_list(data, key, {}, delimiter=delimiter) if match == {}: continue if isinstance(match, dict): diff --git a/salt/utils/dns.py b/salt/utils/dns.py index 3687491545..687b9ac4c1 100644 --- a/salt/utils/dns.py +++ b/salt/utils/dns.py @@ -1139,18 +1139,13 @@ def parse_resolv(src='/etc/resolv.conf'): try: (directive, arg) = (line[0].lower(), line[1:]) # Drop everything after # or ; (comments) - arg = list(itertools.takewhile( - lambda x: x[0] not in ('#', ';'), arg)) - + arg = list(itertools.takewhile(lambda x: x[0] not in ('#', ';'), arg)) if directive == 'nameserver': - # Split the scope (interface) if it is present - addr, scope = arg[0].split('%', 1) if '%' in arg[0] else (arg[0], '') + addr = arg[0] try: ip_addr = ipaddress.ip_address(addr) version = ip_addr.version - # Rejoin scope after address validation - if scope: - ip_addr = '%'.join((str(ip_addr), scope)) + ip_addr = str(ip_addr) if ip_addr not in nameservers: nameservers.append(ip_addr) if version == 4 and ip_addr not in ip4_nameservers: diff --git a/salt/utils/minions.py b/salt/utils/minions.py index 260544c101..d598386b8c 100644 --- a/salt/utils/minions.py +++ b/salt/utils/minions.py @@ -26,10 +26,7 @@ import salt.cache from salt.ext import six # Import 3rd-party libs -if six.PY3: - import ipaddress -else: - import salt.ext.ipaddress as ipaddress +from salt._compat import ipaddress HAS_RANGE = False try: import seco.range # pylint: disable=import-error diff --git a/tests/integration/files/conf/minion b/tests/integration/files/conf/minion index 29fdd43e6c..2af31bfd6e 100644 --- a/tests/integration/files/conf/minion +++ b/tests/integration/files/conf/minion @@ -25,6 +25,7 @@ integration.test: True # Grains addons grains: test_grain: cheese + grain_path: /tmp/salt-tests-tmpdir/file-grain-test script: grail alot: many planets: diff --git a/tests/integration/states/test_file.py b/tests/integration/states/test_file.py index 09e19756f4..d40efacda8 100644 --- a/tests/integration/states/test_file.py +++ b/tests/integration/states/test_file.py @@ -357,7 +357,6 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin): file. ''' grain_path = os.path.join(TMP, 'file-grain-test') - self.run_function('grains.set', ['grain_path', grain_path]) state_file = 'file-grainget' self.run_function('state.sls', [state_file]) diff --git a/tests/unit/grains/test_core.py b/tests/unit/grains/test_core.py index 8539d5e9df..9ff3d7a603 100644 --- a/tests/unit/grains/test_core.py +++ b/tests/unit/grains/test_core.py @@ -36,10 +36,7 @@ import salt.grains.core as core # Import 3rd-party libs from salt.ext import six -if six.PY3: - import ipaddress -else: - import salt.ext.ipaddress as ipaddress +from salt._compat import ipaddress log = logging.getLogger(__name__) diff --git a/tests/unit/modules/test_network.py b/tests/unit/modules/test_network.py index d616d8b2ba..ea3026299d 100644 --- a/tests/unit/modules/test_network.py +++ b/tests/unit/modules/test_network.py @@ -20,20 +20,11 @@ from tests.support.mock import ( ) # Import Salt Libs -from salt.ext import six import salt.utils.network import salt.utils.path import salt.modules.network as network from salt.exceptions import CommandExecutionError -if six.PY2: - import salt.ext.ipaddress as ipaddress - HAS_IPADDRESS = True -else: - try: - import ipaddress - HAS_IPADDRESS = True - except ImportError: - HAS_IPADDRESS = False +from salt._compat import ipaddress @skipIf(NO_MOCK, NO_MOCK_REASON) @@ -276,7 +267,7 @@ class NetworkTestCase(TestCase, LoaderModuleMockMixin): self.assertDictEqual(network.connect('host', 'port'), {'comment': ret, 'result': True}) - @skipIf(HAS_IPADDRESS is False, 'unable to import \'ipaddress\'') + @skipIf(not bool(ipaddress), 'unable to import \'ipaddress\'') def test_is_private(self): ''' Test for Check if the given IP address is a private address @@ -288,7 +279,7 @@ class NetworkTestCase(TestCase, LoaderModuleMockMixin): return_value=True): self.assertTrue(network.is_private('::1')) - @skipIf(HAS_IPADDRESS is False, 'unable to import \'ipaddress\'') + @skipIf(not bool(ipaddress), 'unable to import \'ipaddress\'') def test_is_loopback(self): ''' Test for Check if the given IP address is a loopback address diff --git a/tests/unit/utils/test_data.py b/tests/unit/utils/test_data.py index 78f4144f9f..030f22b202 100644 --- a/tests/unit/utils/test_data.py +++ b/tests/unit/utils/test_data.py @@ -144,6 +144,36 @@ class DataTestCase(TestCase): ) ) + def test_subdict_match_with_wildcards(self): + ''' + Tests subdict matching when wildcards are used in the expression + ''' + data = { + 'a': { + 'b': { + 'ç': 'd', + 'é': ['eff', 'gee', '8ch'], + 'ĩ': {'j': 'k'} + } + } + } + assert salt.utils.data.subdict_match(data, '*:*:*:*') + assert salt.utils.data.subdict_match(data, 'a:*:*:*') + assert salt.utils.data.subdict_match(data, 'a:b:*:*') + assert salt.utils.data.subdict_match(data, 'a:b:ç:*') + assert salt.utils.data.subdict_match(data, 'a:b:*:d') + assert salt.utils.data.subdict_match(data, 'a:*:ç:d') + assert salt.utils.data.subdict_match(data, '*:b:ç:d') + assert salt.utils.data.subdict_match(data, '*:*:ç:d') + assert salt.utils.data.subdict_match(data, '*:*:*:d') + assert salt.utils.data.subdict_match(data, 'a:*:*:d') + assert salt.utils.data.subdict_match(data, 'a:b:*:ef*') + assert salt.utils.data.subdict_match(data, 'a:b:*:g*') + assert salt.utils.data.subdict_match(data, 'a:b:*:j:*') + assert salt.utils.data.subdict_match(data, 'a:b:*:j:k') + assert salt.utils.data.subdict_match(data, 'a:b:*:*:k') + assert salt.utils.data.subdict_match(data, 'a:b:*:*:*') + def test_traverse_dict(self): test_two_level_dict = {'foo': {'bar': 'baz'}} diff --git a/tests/unit/utils/test_jinja.py b/tests/unit/utils/test_jinja.py index fea6a2c0b1..e4be1f9ce0 100644 --- a/tests/unit/utils/test_jinja.py +++ b/tests/unit/utils/test_jinja.py @@ -984,6 +984,10 @@ class TestCustomExtensions(TestCase): dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, 'False') + rendered = render_jinja_tmpl("{{ 'fe80::20d:b9ff:fe01:ea8%eth0' | is_ipv6 }}", + dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) + self.assertEqual(rendered, 'True') + rendered = render_jinja_tmpl("{{ 'FE80::' | is_ipv6 }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, 'True') @@ -996,6 +1000,10 @@ class TestCustomExtensions(TestCase): ''' Test the `ipaddr` Jinja filter. ''' + rendered = render_jinja_tmpl("{{ '::' | ipaddr }}", + dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) + self.assertEqual(rendered, '::') + rendered = render_jinja_tmpl("{{ '192.168.0.1' | ipaddr }}", dict(opts=self.local_opts, saltenv='test', salt=self.local_salt)) self.assertEqual(rendered, '192.168.0.1')