mirror of
https://github.com/valitydev/salt.git
synced 2024-11-06 08:35:21 +00:00
Merge branch 'master' into 2019_2_1_port_53229
This commit is contained in:
commit
21d8e5cd1c
15
.ci/docs
15
.ci/docs
@ -41,13 +41,22 @@ wrappedNode('docs', global_timeout, '#jenkins-prod-pr') {
|
||||
'''
|
||||
}
|
||||
|
||||
stage('Build') {
|
||||
stage('Build HTML Docs') {
|
||||
sh shell_header + '''
|
||||
eval "$(pyenv init -)"
|
||||
pyenv shell 3.6.8
|
||||
nox -e docs
|
||||
nox -e 'docs-html(compress=True)'
|
||||
'''
|
||||
archiveArtifacts artifacts: 'doc/doc-archive.tar.gz'
|
||||
archiveArtifacts artifacts: 'doc/html-archive.tar.gz'
|
||||
}
|
||||
|
||||
stage('Build Man Pages') {
|
||||
sh shell_header + '''
|
||||
eval "$(pyenv init -)"
|
||||
pyenv shell 3.6.8
|
||||
nox -e 'docs-man(compress=True, update=False)'
|
||||
'''
|
||||
archiveArtifacts artifacts: 'doc/man-archive.tar.gz'
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'amazon'
|
||||
def distro_version = '1'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'amazon'
|
||||
def distro_version = '2'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'amazon'
|
||||
def distro_version = '2'
|
||||
def python_version = 'py3'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'arch'
|
||||
def distro_version = 'lts'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '-n integration.modules.test_pkg'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'arch'
|
||||
def distro_version = 'lts'
|
||||
def python_version = 'py3'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '-n integration.modules.test_pkg'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'centos'
|
||||
def distro_version = '6'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'centos'
|
||||
def distro_version = '7'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'centos'
|
||||
def distro_version = '7'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'ZeroMQ-M2Crypto'
|
||||
def nox_env_name = 'runtests-zeromq-m2crypto'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'centos'
|
||||
def distro_version = '7'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--proxy'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,26 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label,
|
||||
extra_codecov_flags: ["proxy"])
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'centos'
|
||||
def distro_version = '7'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'ZeroMQ-Pycryptodomex'
|
||||
def nox_env_name = 'runtests-zeromq-pycryptodomex'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'centos'
|
||||
def distro_version = '7'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'TCP'
|
||||
def nox_env_name = 'runtests-tcp'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'centos'
|
||||
def distro_version = '7'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'Tornado'
|
||||
def nox_env_name = 'runtests-tornado'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'centos'
|
||||
def distro_version = '7'
|
||||
def python_version = 'py3'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'centos'
|
||||
def distro_version = '7'
|
||||
def python_version = 'py3'
|
||||
def test_transport = 'ZeroMQ-M2Crypto'
|
||||
def nox_env_name = 'runtests-zeromq-m2crypto'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'centos'
|
||||
def distro_version = '7'
|
||||
def python_version = 'py3'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--proxy'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,26 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label,
|
||||
extra_codecov_flags: ["proxy"])
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'centos'
|
||||
def distro_version = '7'
|
||||
def python_version = 'py3'
|
||||
def test_transport = 'ZeroMQ-Pycryptodomex'
|
||||
def nox_env_name = 'runtests-zeromq-pycryptodomex'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'centos'
|
||||
def distro_version = '7'
|
||||
def python_version = 'py3'
|
||||
def test_transport = 'TCP'
|
||||
def nox_env_name = 'runtests-tcp'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'debian'
|
||||
def distro_version = '10'
|
||||
def python_version = 'py3'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def golden_images_branch = 'master'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'debian'
|
||||
def distro_version = '8'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'debian'
|
||||
def distro_version = '9'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'debian'
|
||||
def distro_version = '9'
|
||||
def python_version = 'py3'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'fedora'
|
||||
def distro_version = '29'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'fedora'
|
||||
def distro_version = '29'
|
||||
def python_version = 'py3'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'fedora'
|
||||
def distro_version = '30'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'fedora'
|
||||
def distro_version = '30'
|
||||
def python_version = 'py3'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,4 +1,4 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
@ -9,10 +9,12 @@ def global_timeout = testrun_timeout + 1;
|
||||
def distro_name = 'macosx'
|
||||
def distro_version = 'mojave'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = ''
|
||||
def concurrent_builds = 1
|
||||
def jenkins_slave_label = 'kitchen-slave-mac'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,29 +25,27 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
|
||||
wrappedNode(jenkins_slave_label, global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
"NOX_ENV_NAME=${nox_env_name}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
stage('VM Cleanup') {
|
||||
@ -88,7 +88,7 @@ wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
@ -128,15 +128,18 @@ wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
def distro_strings = [
|
||||
distro_name,
|
||||
distro_version
|
||||
]
|
||||
def report_strings = (
|
||||
[python_version] + nox_env_name.split('-')
|
||||
).flatten()
|
||||
uploadCodeCoverage(
|
||||
report_path: 'artifacts/coverage/coverage.xml',
|
||||
report_name: "${distro_strings.join('-')}-${report_strings.join('-')}",
|
||||
report_flags: ([distro_strings.join('')] + report_strings).flatten()
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
@ -9,10 +9,12 @@ def global_timeout = testrun_timeout + 1;
|
||||
def distro_name = 'macosx'
|
||||
def distro_version = 'mojave'
|
||||
def python_version = 'py3'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = ''
|
||||
def concurrent_builds = 1
|
||||
def jenkins_slave_label = 'kitchen-slave-mac'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,29 +25,27 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
|
||||
wrappedNode(jenkins_slave_label, global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
"NOX_ENV_NAME=${nox_env_name}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
stage('VM Cleanup') {
|
||||
@ -88,7 +88,7 @@ wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
@ -128,15 +128,18 @@ wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
def distro_strings = [
|
||||
distro_name,
|
||||
distro_version
|
||||
]
|
||||
def report_strings = (
|
||||
[python_version] + nox_env_name.split('-')
|
||||
).flatten()
|
||||
uploadCodeCoverage(
|
||||
report_path: 'artifacts/coverage/coverage.xml',
|
||||
report_name: "${distro_strings.join('-')}-${report_strings.join('-')}",
|
||||
report_flags: ([distro_strings.join('')] + report_strings).flatten()
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'opensuse'
|
||||
def distro_version = '15'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'opensuse'
|
||||
def distro_version = '15'
|
||||
def python_version = 'py3'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'ubuntu'
|
||||
def distro_version = '1604'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'ubuntu'
|
||||
def distro_version = '1604'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'ZeroMQ-M2Crypto'
|
||||
def nox_env_name = 'runtests-zeromq-m2crypto'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'ubuntu'
|
||||
def distro_version = '1604'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--proxy'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,26 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label,
|
||||
extra_codecov_flags: ["proxy"])
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'ubuntu'
|
||||
def distro_version = '1604'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'ZeroMQ-Pycryptodomex'
|
||||
def nox_env_name = 'runtests-zeromq-pycryptodomex'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'ubuntu'
|
||||
def distro_version = '1604'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'TCP'
|
||||
def nox_env_name = 'runtests-tcp'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'ubuntu'
|
||||
def distro_version = '1604'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'Tornado'
|
||||
def nox_env_name = 'runtests-tornado'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'ubuntu'
|
||||
def distro_version = '1604'
|
||||
def python_version = 'py3'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'ubuntu'
|
||||
def distro_version = '1604'
|
||||
def python_version = 'py3'
|
||||
def test_transport = 'ZeroMQ-M2Crypto'
|
||||
def nox_env_name = 'runtests-zeromq-m2crypto'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'ubuntu'
|
||||
def distro_version = '1604'
|
||||
def python_version = 'py3'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--proxy'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,26 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label,
|
||||
extra_codecov_flags: ["proxy"])
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'ubuntu'
|
||||
def distro_version = '1604'
|
||||
def python_version = 'py3'
|
||||
def test_transport = 'ZeroMQ-Pycryptodomex'
|
||||
def nox_env_name = 'runtests-zeromq-pycryptodomex'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'ubuntu'
|
||||
def distro_version = '1604'
|
||||
def python_version = 'py3'
|
||||
def test_transport = 'TCP'
|
||||
def nox_env_name = 'runtests-tcp'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'ubuntu'
|
||||
def distro_version = '1804'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 6
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'ubuntu'
|
||||
def distro_version = '1804'
|
||||
def python_version = 'py3'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--ssh-tests'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = true
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,137 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
cp -f ~/workspace/spot.yml .kitchen.local.yml
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 8
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'windows'
|
||||
def distro_version = '2016'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--unit'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = false
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,136 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 8
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'windows'
|
||||
def distro_version = '2016'
|
||||
def python_version = 'py3'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--unit'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = false
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,136 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 8
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'windows'
|
||||
def distro_version = '2019'
|
||||
def python_version = 'py2'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--unit'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = false
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,136 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
@ -1,18 +1,18 @@
|
||||
@Library('salt@1.1') _
|
||||
@Library('salt@master-1.2') _
|
||||
|
||||
// Define the maximum time, in hours, that a test run should run for
|
||||
def testrun_timeout = 8
|
||||
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
|
||||
// hour to allow for artifacts to be downloaded, if possible.
|
||||
def global_timeout = testrun_timeout + 1;
|
||||
|
||||
def distro_name = 'windows'
|
||||
def distro_version = '2019'
|
||||
def python_version = 'py3'
|
||||
def test_transport = 'ZeroMQ'
|
||||
def nox_env_name = 'runtests-zeromq'
|
||||
def salt_target_branch = 'master'
|
||||
def golden_images_branch = '2019.2'
|
||||
def nox_passthrough_opts = '--unit'
|
||||
def concurrent_builds = 1
|
||||
def use_spot_instances = false
|
||||
def jenkins_slave_label = 'kitchen-slave'
|
||||
|
||||
properties([
|
||||
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
|
||||
@ -23,136 +23,25 @@ properties([
|
||||
|
||||
// Be sure to cancel any previously running builds
|
||||
def buildNumber = env.BUILD_NUMBER as int
|
||||
if (buildNumber > 1) {
|
||||
if (buildNumber > concurrent_builds) {
|
||||
// This will cancel the previous build which also defined a matching milestone
|
||||
milestone(buildNumber - 1)
|
||||
milestone(buildNumber - concurrent_builds)
|
||||
}
|
||||
// Define a milestone for this build so that, if another build starts, this one will be aborted
|
||||
milestone(buildNumber)
|
||||
|
||||
|
||||
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
|
||||
withEnv([
|
||||
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
|
||||
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
|
||||
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
|
||||
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
|
||||
'NOX_ENABLE_FROM_FILENAMES=true',
|
||||
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
|
||||
"SALT_TARGET_BRANCH=${salt_target_branch}",
|
||||
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
|
||||
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
|
||||
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
|
||||
'RBENV_VERSION=2.6.3',
|
||||
"TEST_SUITE=${python_version}",
|
||||
"TEST_PLATFORM=${distro_name}-${distro_version}",
|
||||
"TEST_TRANSPORT=${test_transport}",
|
||||
"FORCE_FULL=${params.runFull}",
|
||||
]) {
|
||||
// Checkout the repo
|
||||
stage('Clone') {
|
||||
cleanWs notFailBuild: true
|
||||
checkout scm
|
||||
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
|
||||
}
|
||||
|
||||
// Setup the kitchen required bundle
|
||||
stage('Setup') {
|
||||
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
|
||||
}
|
||||
|
||||
stage('Create VM') {
|
||||
retry(3) {
|
||||
sh '''
|
||||
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
|
||||
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
sh '''
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
|
||||
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
|
||||
'''
|
||||
}
|
||||
|
||||
try {
|
||||
timeout(time: testrun_timeout, unit: 'HOURS') {
|
||||
stage('Converge VM') {
|
||||
sh '''
|
||||
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
|
||||
'''
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
stage('Run Tests') {
|
||||
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
|
||||
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
try {
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
|
||||
fi
|
||||
"""
|
||||
stage('Download Artefacts') {
|
||||
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
|
||||
sh '''
|
||||
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
|
||||
'''
|
||||
}
|
||||
sh """
|
||||
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
|
||||
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
|
||||
fi
|
||||
if [ -s ".kitchen/logs/kitchen.log" ]; then
|
||||
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
|
||||
fi
|
||||
"""
|
||||
}
|
||||
archiveArtifacts(
|
||||
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
|
||||
allowEmptyArchive: true
|
||||
)
|
||||
junit 'artifacts/xml-unittests-output/*.xml'
|
||||
} finally {
|
||||
stage('Cleanup') {
|
||||
sh '''
|
||||
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
|
||||
'''
|
||||
}
|
||||
stage('Upload Coverage') {
|
||||
script {
|
||||
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
|
||||
sh '''
|
||||
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
|
||||
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
runTests(
|
||||
env: env,
|
||||
distro_name: distro_name,
|
||||
distro_version: distro_version,
|
||||
python_version: python_version,
|
||||
salt_target_branch: salt_target_branch,
|
||||
golden_images_branch: golden_images_branch,
|
||||
nox_env_name: nox_env_name,
|
||||
nox_passthrough_opts: nox_passthrough_opts,
|
||||
testrun_timeout: testrun_timeout,
|
||||
run_full: params.runFull,
|
||||
use_spot_instances: use_spot_instances,
|
||||
jenkins_slave_label: jenkins_slave_label)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
64
.codecov.yml
64
.codecov.yml
@ -6,12 +6,11 @@ codecov:
|
||||
branch: master
|
||||
|
||||
notify:
|
||||
require_ci_to_pass: no
|
||||
require_ci_to_pass: yes # Less spammy. Only notify on passing builds.
|
||||
|
||||
ignore:
|
||||
- ^*.py$
|
||||
- doc/.*
|
||||
- tests/.*
|
||||
- ^*.py$ # python files at the repo root, ie, setup.py
|
||||
- doc/.* # ignore any code under doc/
|
||||
|
||||
coverage:
|
||||
round: up
|
||||
@ -20,30 +19,61 @@ coverage:
|
||||
|
||||
status:
|
||||
project: # measuring the overall project coverage
|
||||
default:
|
||||
default: false # disable the default status that measures entire project
|
||||
salt: # declare a new status context "salt"
|
||||
enabled: yes # must be yes|true to enable this status
|
||||
if_no_uploads: error # will post commit status of "error" if no coverage reports we uploaded
|
||||
paths: "!tests/" # remove all files in "tests/"
|
||||
target: auto # will use the coverage from the base commit (pull request base or parent commit) coverage to compare against.
|
||||
base: auto # will use the pull request base if the commit is on a pull request. If not, the parent commit will be used.
|
||||
if_no_uploads: error # will post commit status of "error" if no coverage reports were uploaded
|
||||
# options: success, error, failure
|
||||
if_not_found: success # if parent is not found report status as success, error, or failure
|
||||
if_ci_failed: success # if ci fails report status as success, error, or failure
|
||||
if_ci_failed: error # if ci fails report status as success, error, or failure
|
||||
tests: # declare a new status context "tests"
|
||||
enabled: yes # must be yes|true to enable this status
|
||||
#target: 100% # we always want 100% coverage here
|
||||
target: auto # auto while we get this going
|
||||
base: auto # will use the pull request base if the commit is on a pull request. If not, the parent commit will be used.
|
||||
paths: "!salt/" # only include coverage in "tests/" folder
|
||||
if_no_uploads: error # will post commit status of "error" if no coverage reports were uploaded
|
||||
# options: success, error, failure
|
||||
if_not_found: success # if parent is not found report status as success, error, or failure
|
||||
if_ci_failed: error # if ci fails report status as success, error, or failure
|
||||
|
||||
patch: # pull requests only: this commit status will measure the
|
||||
# entire pull requests Coverage Diff. Checking if the lines
|
||||
# adjusted are covered at least X%.
|
||||
default:
|
||||
enabled: no # must be yes|true to enable this status
|
||||
target: 80% # specify the target "X%" coverage to hit
|
||||
if_no_uploads: error # will post commit status of "error" if no coverage reports we uploaded
|
||||
# options: success, error, failure
|
||||
enabled: yes # must be yes|true to enable this status
|
||||
target: 100% # Newly added lines must have 100% coverage
|
||||
if_no_uploads: error # will post commit status of "error" if no coverage reports were uploaded
|
||||
# options: success, error, failure
|
||||
if_not_found: success
|
||||
if_ci_failed: success
|
||||
if_ci_failed: error
|
||||
|
||||
changes: # if there are any unexpected changes in coverage
|
||||
default:
|
||||
enabled: no # must be yes|true to enable this status
|
||||
if_no_uploads: success
|
||||
enabled: yes # must be yes|true to enable this status
|
||||
if_no_uploads: error
|
||||
if_not_found: success
|
||||
if_ci_failed: success
|
||||
if_ci_failed: error
|
||||
|
||||
# No commends because we're not yet running the full test suite on PRs
|
||||
comment: off
|
||||
flags:
|
||||
salt:
|
||||
paths:
|
||||
- salt/
|
||||
tests:
|
||||
paths:
|
||||
- tests/
|
||||
|
||||
comment:
|
||||
layout: "reach, diff, flags, files"
|
||||
after_n_builds: 46 # Only comment on PRs after N builds
|
||||
# This value is the output of:
|
||||
# sh -c 'echo "$(ls .ci/ | grep kitchen | wc -l)"'
|
||||
|
||||
behavior: new # Comment posting behaviour
|
||||
# default: update, if exists. Otherwise post new.
|
||||
# once: update, if exists. Otherwise post new. Skip if deleted.
|
||||
# new: delete old and post new.
|
||||
# spammy: post new (do not delete old comments).
|
||||
|
@ -1,14 +1,11 @@
|
||||
[run]
|
||||
branch = True
|
||||
cover_pylib = False
|
||||
source =
|
||||
salt
|
||||
parallel = True
|
||||
concurrency = multiprocessing
|
||||
omit =
|
||||
tests/*.py
|
||||
setup.py
|
||||
salt/daemons/test/*
|
||||
.nox/*
|
||||
|
||||
[report]
|
||||
# Regexes for lines to exclude from consideration
|
||||
@ -30,7 +27,3 @@ exclude_lines =
|
||||
|
||||
|
||||
ignore_errors = True
|
||||
|
||||
[paths]
|
||||
source =
|
||||
salt
|
||||
|
@ -7,7 +7,7 @@ repos:
|
||||
alias: compile-linux-py2.7-zmq-requirements
|
||||
name: Linux Py2.7 ZeroMQ Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
|
||||
exclude: ^requirements/static/(lint|cloud|docs|osx|windows)\.in$
|
||||
exclude: ^requirements/static/(lint|cloud|docs|darwin|windows)\.in$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=2.7
|
||||
@ -17,9 +17,9 @@ repos:
|
||||
- --include=requirements/pytest.txt
|
||||
- --remove-line=^pycrypto==(.*)$
|
||||
- id: pip-tools-compile
|
||||
alias: compile-osx-py2.7-zmq-requirements
|
||||
name: OSX Py2.7 ZeroMQ Requirements
|
||||
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/osx\.in))$
|
||||
alias: compile-darwin-py2.7-zmq-requirements
|
||||
name: Darwin Py2.7 ZeroMQ Requirements
|
||||
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=2.7
|
||||
@ -68,7 +68,7 @@ repos:
|
||||
alias: compile-linux-py3.4-zmq-requirements
|
||||
name: Linux Py3.4 ZeroMQ Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
|
||||
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
|
||||
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|darwin|windows)\.in$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.4
|
||||
@ -90,7 +90,7 @@ repos:
|
||||
alias: compile-linux-py3.5-zmq-requirements
|
||||
name: Linux Py3.5 ZeroMQ Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
|
||||
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
|
||||
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|darwin|windows)\.in$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.5
|
||||
@ -100,9 +100,9 @@ repos:
|
||||
- --include=requirements/pytest.txt
|
||||
- --remove-line=^pycrypto==(.*)$
|
||||
- id: pip-tools-compile
|
||||
alias: compile-osx-py3.5-zmq-requirements
|
||||
name: OSX Py3.5 ZeroMQ Requirements
|
||||
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/osx\.in))$
|
||||
alias: compile-darwin-py3.5-zmq-requirements
|
||||
name: Darwin Py3.5 ZeroMQ Requirements
|
||||
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.5
|
||||
@ -150,7 +150,7 @@ repos:
|
||||
alias: compile-linux-py3.6-zmq-requirements
|
||||
name: Linux Py3.6 ZeroMQ Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
|
||||
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
|
||||
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|darwin|windows)\.in$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.6
|
||||
@ -160,9 +160,9 @@ repos:
|
||||
- --include=requirements/pytest.txt
|
||||
- --remove-line=^pycrypto==(.*)$
|
||||
- id: pip-tools-compile
|
||||
alias: compile-osx-py3.6-zmq-requirements
|
||||
name: OSX Py3.6 ZeroMQ Requirements
|
||||
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/osx\.in))$
|
||||
alias: compile-darwin-py3.6-zmq-requirements
|
||||
name: Darwin Py3.6 ZeroMQ Requirements
|
||||
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.6
|
||||
@ -210,7 +210,7 @@ repos:
|
||||
alias: compile-linux-py3.7-zmq-requirements
|
||||
name: Linux Py3.7 ZeroMQ Requirements
|
||||
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
|
||||
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
|
||||
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|darwin|windows)\.in$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.7
|
||||
@ -220,9 +220,9 @@ repos:
|
||||
- --include=requirements/pytest.txt
|
||||
- --remove-line=^pycrypto==(.*)$
|
||||
- id: pip-tools-compile
|
||||
alias: compile-osx-py3.7-zmq-requirements
|
||||
name: OSX Py3.7 ZeroMQ Requirements
|
||||
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/osx\.in))$
|
||||
alias: compile-darwin-py3.7-zmq-requirements
|
||||
name: Darwin Py3.7 ZeroMQ Requirements
|
||||
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
|
||||
args:
|
||||
- -v
|
||||
- --py-version=3.7
|
||||
|
@ -391,7 +391,7 @@
|
||||
#mine_interval: 60
|
||||
|
||||
# Windows platforms lack posix IPC and must rely on slower TCP based inter-
|
||||
# process communications. Set ipc_mode to 'tcp' on such systems
|
||||
# process communications. ipc_mode is set to 'tcp' on such systems.
|
||||
#ipc_mode: ipc
|
||||
|
||||
# Overwrite the default tcp ports used by the minion when ipc_mode is set to 'tcp'
|
||||
|
@ -1,79 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
||||
|
||||
|
||||
compile-translation-catalogs
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Compile the existing translation catalogs.
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
import os
|
||||
import sys
|
||||
import fnmatch
|
||||
|
||||
# Import 3rd-party libs
|
||||
HAS_BABEL = False
|
||||
try:
|
||||
from babel.messages import mofile, pofile
|
||||
HAS_BABEL = True
|
||||
except ImportError:
|
||||
try:
|
||||
import polib
|
||||
except ImportError:
|
||||
print(
|
||||
'You need to install either babel or pofile in order to compile '
|
||||
'the message catalogs. One of:\n'
|
||||
' pip install babel\n'
|
||||
' pip install polib'
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
DOC_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||
LOCALES_DIR = os.path.join(DOC_DIR, 'locale')
|
||||
|
||||
|
||||
def main():
|
||||
'''
|
||||
Run the compile code
|
||||
'''
|
||||
|
||||
print('Gathering the translation catalogs to compile...'),
|
||||
sys.stdout.flush()
|
||||
entries = {}
|
||||
for locale in os.listdir(os.path.join(LOCALES_DIR)):
|
||||
if locale == 'pot':
|
||||
continue
|
||||
|
||||
locale_path = os.path.join(LOCALES_DIR, locale)
|
||||
entries[locale] = []
|
||||
|
||||
for dirpath, _, filenames in os.walk(locale_path):
|
||||
for filename in fnmatch.filter(filenames, '*.po'):
|
||||
entries[locale].append(os.path.join(dirpath, filename))
|
||||
print('DONE')
|
||||
|
||||
for locale, po_files in sorted(entries.items()):
|
||||
lc_messages_path = os.path.join(LOCALES_DIR, locale, 'LC_MESSAGES')
|
||||
print('\nCompiling the \'{0}\' locale:'.format(locale))
|
||||
for po_file in sorted(po_files):
|
||||
relpath = os.path.relpath(po_file, lc_messages_path)
|
||||
print ' {0}.po -> {0}.mo'.format(relpath.split('.po', 1)[0])
|
||||
if HAS_BABEL:
|
||||
catalog = pofile.read_po(open(po_file))
|
||||
mofile.write_mo(
|
||||
open(po_file.replace('.po', '.mo'), 'wb'), catalog
|
||||
)
|
||||
continue
|
||||
|
||||
catalog = polib.pofile(po_file)
|
||||
catalog.save_as_mofile(fpath=po_file.replace('.po', '.mo'))
|
||||
|
||||
print('Done')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -1,53 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
||||
|
||||
|
||||
download-translation-catalog
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Download a translation catalog from Transifex.
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Import 3rd-party libs
|
||||
try:
|
||||
import txclib.utils
|
||||
except ImportError:
|
||||
print(
|
||||
'The \'transifex-client\' library needs to be installed. '
|
||||
'Please execute one of \'pip install transifex-client\' or '
|
||||
'\'easy_install transifex-client\''
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
DOC_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||
LOCALES_DIR = os.path.join(DOC_DIR, 'locale')
|
||||
|
||||
|
||||
def main():
|
||||
'''
|
||||
Run the compile code
|
||||
'''
|
||||
|
||||
os.chdir(DOC_DIR)
|
||||
tx_root = txclib.utils.find_dot_tx()
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
print('You need to pass a locale to this script. For example: '
|
||||
'pt_PT, zh_CN, ru, etc...')
|
||||
sys.exit(1)
|
||||
|
||||
for locale in sys.argv[1:]:
|
||||
print('Download \'{0}\' translations catalog...'.format(locale))
|
||||
txclib.utils.exec_command('pull', ['-l', locale], tx_root)
|
||||
|
||||
print('Done')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -1,223 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
||||
|
||||
|
||||
update-transifex-source-translations
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Update the transifex sources configuration file and push the source
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import logging
|
||||
import subprocess
|
||||
import ConfigParser
|
||||
|
||||
try:
|
||||
import txclib.utils
|
||||
except ImportError:
|
||||
sys.stdout.write(
|
||||
'The \'transifex-client\' library needs to be installed. '
|
||||
'Please execute one of \'pip install transifex-client\' or '
|
||||
'\'easy_install transifex-client\'\n'
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
DOC_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||
|
||||
|
||||
def main():
|
||||
'''
|
||||
Run the update code
|
||||
'''
|
||||
os.chdir(DOC_DIR)
|
||||
|
||||
sys.stdout.write('Extracting translatable strings....\n')
|
||||
try:
|
||||
subprocess.check_call(['make', 'gettext'])
|
||||
except subprocess.CalledProcessError as exc:
|
||||
sys.stdout.write('An error occurred while extracting the translation '
|
||||
'strings: {0}\n'.format(exc))
|
||||
sys.exit(1)
|
||||
|
||||
locale_dir = os.path.join(DOC_DIR, 'locale')
|
||||
pot_dir = os.path.join(DOC_DIR, '_build', 'locale')
|
||||
tx_root = txclib.utils.find_dot_tx()
|
||||
tx_config = os.path.join(tx_root, '.tx', 'config')
|
||||
|
||||
if not tx_root:
|
||||
sys.stdout.write(
|
||||
'Unable to find the \'.tx/\' directory. Unable to continue\n'
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# We do not want the txclib INFO or WARNING logging
|
||||
logging.getLogger('txclib').setLevel(logging.ERROR)
|
||||
|
||||
sys.stdout.write('Gathering the translation template files...')
|
||||
sys.stdout.flush()
|
||||
entries = []
|
||||
for dirpath, dirnames, filenames in os.walk(pot_dir):
|
||||
for filename in filenames:
|
||||
pot_file = os.path.join(dirpath, filename)
|
||||
base, ext = os.path.splitext(pot_file)
|
||||
if ext != '.pot':
|
||||
continue
|
||||
resource_path = os.path.relpath(base, pot_dir)
|
||||
try:
|
||||
import babel.messages.pofile
|
||||
if not len(babel.messages.pofile.read_po(open(pot_file))):
|
||||
# Empty pot file, continue
|
||||
continue
|
||||
except ImportError:
|
||||
# No babel package, let's keep on going
|
||||
pass
|
||||
|
||||
resource_name = resource_path.replace(
|
||||
'\\', '/').replace('/', '--').replace('.', '_')
|
||||
entries.append((resource_path, resource_name))
|
||||
sys.stdout.write('Done\n')
|
||||
|
||||
# Let's load the resources already present in the configuration file
|
||||
cfg = ConfigParser.SafeConfigParser()
|
||||
cfg.read([tx_config])
|
||||
handled_resources = set(
|
||||
section for section in
|
||||
cfg.sections() if section.startswith('salt.')
|
||||
)
|
||||
|
||||
sys.stdout.write('Updating the entries in \'.tx/config\'...\n')
|
||||
sys.stdout.flush()
|
||||
total_entries = len(entries)
|
||||
for idx, (resource_path, resource_name) in enumerate(sorted(entries)):
|
||||
sys.stdout.write(
|
||||
'[{0:>{pad}}/{1}] Updating resource for '
|
||||
'{resource_path}.pot ({resource_name})'.format(
|
||||
idx + 1,
|
||||
total_entries,
|
||||
pad=len(str(total_entries)),
|
||||
locale_dir=locale_dir,
|
||||
resource_name=resource_name,
|
||||
resource_path=resource_path
|
||||
)
|
||||
)
|
||||
sys.stdout.flush()
|
||||
try:
|
||||
txclib.utils.exec_command(
|
||||
'set',
|
||||
'--auto-local -r salt.{resource_name} '
|
||||
'{locale_dir}/<lang>/LC_MESSAGES/{resource_path}.po '
|
||||
'--source-lang en '
|
||||
'--source-file {pot_dir}/{resource_path}.pot '
|
||||
'--source-name {resource_path}.rst '
|
||||
'--execute'.format(
|
||||
resource_name=resource_name,
|
||||
resource_path=resource_path,
|
||||
locale_dir=locale_dir,
|
||||
pot_dir=pot_dir.rstrip('/')
|
||||
).split(),
|
||||
tx_root
|
||||
)
|
||||
sys.stdout.write('\n')
|
||||
if 'salt.{0}'.format(resource_name) in handled_resources:
|
||||
handled_resources.remove('salt.{0}'.format(resource_name))
|
||||
except Exception as err:
|
||||
sys.stdout.write('An error occurred: {0}\n'.format(err))
|
||||
except KeyboardInterrupt:
|
||||
sys.stdout.write('\n')
|
||||
sys.exit(1)
|
||||
time.sleep(0.025)
|
||||
|
||||
if handled_resources:
|
||||
non_handled_resources = len(handled_resources)
|
||||
sys.stdout.write(
|
||||
'Removing old resources from configuration and upstream'
|
||||
'(if possible)\n'
|
||||
)
|
||||
for idx, resource_name in enumerate(sorted(handled_resources)):
|
||||
sys.stdout.write(
|
||||
'[{0:>{pad}}/{1}] Removing resource \'{resource_name}\''.format(
|
||||
idx + 1,
|
||||
non_handled_resources,
|
||||
pad=len(str(non_handled_resources)),
|
||||
resource_name=resource_name,
|
||||
)
|
||||
)
|
||||
sys.stdout.flush()
|
||||
try:
|
||||
txclib.utils.exec_command(
|
||||
'delete',
|
||||
['-r', resource_name],
|
||||
tx_root
|
||||
)
|
||||
handled_resources.remove(resource_name)
|
||||
except Exception as err:
|
||||
sys.stdout.write('An error occurred: {0}\n'.format(err))
|
||||
finally:
|
||||
if cfg.has_section(resource_name):
|
||||
cfg.remove_section(resource_name)
|
||||
sys.stdout.write('\n')
|
||||
time.sleep(0.025)
|
||||
cfg.write(open(tx_config, 'w'))
|
||||
sys.stdout.write('\n')
|
||||
|
||||
# Set the translations file type we're using
|
||||
txclib.utils.exec_command('set', ['-t', 'PO'], tx_root)
|
||||
time.sleep(0.025)
|
||||
|
||||
if 'TRANSIFEX_NO_PUSH' not in os.environ:
|
||||
|
||||
sys.stdout.write('\n')
|
||||
sys.stdout.write('Pushing translation template files...\n')
|
||||
for idx, (resource_path, resource_name) in enumerate(sorted(entries)):
|
||||
sys.stdout.write(
|
||||
'[{0:>{pad}}/{1}] Pushing resource for '
|
||||
'{resource_path}.pot ({resource_name})'.format(
|
||||
idx + 1,
|
||||
total_entries,
|
||||
pad=len(str(total_entries)),
|
||||
locale_dir=locale_dir,
|
||||
resource_name=resource_name,
|
||||
resource_path=resource_path
|
||||
)
|
||||
)
|
||||
sys.stdout.flush()
|
||||
try:
|
||||
txclib.utils.exec_command(
|
||||
'push',
|
||||
'--resource salt.{resource_name} '
|
||||
'--source '
|
||||
'--skip '
|
||||
'--no-interactive'.format(
|
||||
resource_name=resource_name,
|
||||
resource_path=resource_path,
|
||||
locale_dir=locale_dir
|
||||
).split(),
|
||||
tx_root
|
||||
)
|
||||
sys.stdout.write('\n')
|
||||
except Exception as err:
|
||||
sys.stdout.write('An error occurred: {0}\n'.format(err))
|
||||
except KeyboardInterrupt:
|
||||
sys.stdout.write('\n')
|
||||
sys.exit(1)
|
||||
time.sleep(0.025)
|
||||
|
||||
if handled_resources:
|
||||
sys.stdout.write('=' * 80)
|
||||
sys.stdout.write(
|
||||
'\nDon\'t forget to delete the following remote resources:\n')
|
||||
for resource_name in sorted(handled_resources):
|
||||
sys.stdout.write(' {0}\n'.format(resource_name))
|
||||
sys.stdout.write('=' * 80)
|
||||
|
||||
sys.stdout.write('\nDONE\n')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
4834
doc/.tx/config
4834
doc/.tx/config
File diff suppressed because it is too large
Load Diff
82
doc/Makefile
82
doc/Makefile
@ -9,29 +9,14 @@ BUILDDIR = _build
|
||||
SPHINXLANG =
|
||||
XELATEX = xelatex
|
||||
|
||||
# ----- Translations Support ------------------------------------------------>
|
||||
# If language is set, also set translation options
|
||||
ifeq ($(shell [ "x$(SPHINXLANG)" != "x" ] && echo 0 || echo 1), 0)
|
||||
TRANSLATIONOPTS = -D language='$(SPHINXLANG)'
|
||||
else
|
||||
TRANSLATIONOPTS =
|
||||
endif
|
||||
|
||||
# Reset settings if sphinx-intl is not available
|
||||
ifeq ($(shell which sphinx-intl >/dev/null 2>&1; echo $$?), 1)
|
||||
SPHINXLANG =
|
||||
TRANSLATIONOPTS =
|
||||
endif
|
||||
# <---- Translations Support -------------------------------------------------
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(TRANSLATIONOPTS) $(SPHINXOPTS) .
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
# the i18n builder cannot share the environment and doctrees with the others
|
||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
|
||||
.PHONY: help clean check_sphinx-build html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext translations download-translations
|
||||
.PHONY: help clean check_sphinx-build html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest
|
||||
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@ -53,7 +38,6 @@ help:
|
||||
@echo " man to make manual pages"
|
||||
@echo " texinfo to make Texinfo files"
|
||||
@echo " info to make Texinfo files and run them through makeinfo"
|
||||
@echo " gettext to make PO message catalogs"
|
||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
@echo " xml to make Docutils-native XML files"
|
||||
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
|
||||
@ -68,38 +52,38 @@ clean:
|
||||
check_sphinx-build:
|
||||
@which $(SPHINXBUILD) >/dev/null 2>&1 || (echo "The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://www.sphinx-doc.org/en/master/)" >&2; false)
|
||||
|
||||
html: check_sphinx-build translations
|
||||
html: check_sphinx-build
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
dirhtml: check_sphinx-build translations
|
||||
dirhtml: check_sphinx-build
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
singlehtml: check_sphinx-build translations
|
||||
singlehtml: check_sphinx-build
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
pickle: check_sphinx-build translations
|
||||
pickle: check_sphinx-build
|
||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
@echo
|
||||
@echo "Build finished; now you can process the pickle files."
|
||||
|
||||
json: check_sphinx-build translations
|
||||
json: check_sphinx-build
|
||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||
@echo
|
||||
@echo "Build finished; now you can process the JSON files."
|
||||
|
||||
htmlhelp: check_sphinx-build translations
|
||||
htmlhelp: check_sphinx-build
|
||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||
|
||||
qthelp: check_sphinx-build translations
|
||||
qthelp: check_sphinx-build
|
||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||
@ -108,7 +92,7 @@ qthelp: check_sphinx-build translations
|
||||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Salt.qhc"
|
||||
|
||||
devhelp: check_sphinx-build translations
|
||||
devhelp: check_sphinx-build
|
||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||
@echo
|
||||
@echo "Build finished."
|
||||
@ -117,31 +101,31 @@ devhelp: check_sphinx-build translations
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Salt"
|
||||
@echo "# devhelp"
|
||||
|
||||
epub: check_sphinx-build translations
|
||||
epub: check_sphinx-build
|
||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||
@echo
|
||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||
|
||||
latex: check_sphinx-build translations
|
||||
latex: check_sphinx-build
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo
|
||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||
"(use \`make latexpdf' here to do that automatically)."
|
||||
|
||||
latexpdf: check_sphinx-build translations
|
||||
latexpdf: check_sphinx-build
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through pdflatex..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
latexpdfja: check_sphinx-build translations
|
||||
latexpdfja: check_sphinx-build
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through platex and dvipdfmx..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
pdf: check_sphinx-build translations
|
||||
pdf: check_sphinx-build
|
||||
@if [ "$(XELATEX)" = "xelatex" ] || [ "x$(XELATEX)" = "x" ]; then \
|
||||
echo "The '$(XELATEX)' command was not found."; \
|
||||
fi
|
||||
@ -150,40 +134,35 @@ pdf: check_sphinx-build translations
|
||||
$(MAKE) -C $(BUILDDIR)/latex -i "PDFLATEX=latexmk" "LATEXMKOPTS=-xelatex -interaction=nonstopmode -f -quiet"
|
||||
@echo "xelatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
cheatsheet: translations
|
||||
cheatsheet:
|
||||
@echo "Running cheatsheet/salt.tex file through xelatex..."
|
||||
cd cheatsheet && xelatex salt.tex && cp salt.pdf ../salt-cheatsheet.pdf
|
||||
@echo "./salt-cheatsheet.pdf created."
|
||||
|
||||
text: check_sphinx-build translations
|
||||
text: check_sphinx-build
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||
@echo
|
||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||
|
||||
man: check_sphinx-build translations
|
||||
man: check_sphinx-build
|
||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||
@echo
|
||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||
|
||||
texinfo: check_sphinx-build translations
|
||||
texinfo: check_sphinx-build
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo
|
||||
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||
"(use \`make info' here to do that automatically)."
|
||||
|
||||
info: check_sphinx-build translations
|
||||
info: check_sphinx-build
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo "Running Texinfo files through makeinfo..."
|
||||
make -C $(BUILDDIR)/texinfo info
|
||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||
|
||||
gettext: check_sphinx-build
|
||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||
@echo
|
||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale"
|
||||
|
||||
changes: check_sphinx-build translations
|
||||
changes: check_sphinx-build
|
||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||
@echo
|
||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||
@ -205,27 +184,12 @@ doctest: check_sphinx-build
|
||||
@echo "Testing of doctests in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/doctest/output.txt."
|
||||
|
||||
xml: check_sphinx-build translations
|
||||
xml: check_sphinx-build
|
||||
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
|
||||
@echo
|
||||
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
|
||||
|
||||
pseudoxml: check_sphinx-build translations
|
||||
pseudoxml: check_sphinx-build
|
||||
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
|
||||
@echo
|
||||
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
|
||||
|
||||
translations:
|
||||
@if [ "$(SPHINXLANG)" = "en" ] || [ "x$(SPHINXLANG)" = "x" ]; then \
|
||||
echo "No need to update translations. Skipping..."; \
|
||||
elif [ ! -d locale/$(SPHINXLANG) ]; then \
|
||||
echo "The locale directory for $(SPHINXLANG) does not exist"; \
|
||||
exit 1; \
|
||||
else \
|
||||
echo "Compiling exising message catalog for '$(SPHINXLANG)'"; \
|
||||
.scripts/compile-translation-catalogs; \
|
||||
fi
|
||||
|
||||
download-translations:
|
||||
@echo "Downloading $(SPHINXLANG) translations"
|
||||
.scripts/download-translation-catalog $(SPHINXLANG)
|
||||
|
14
doc/_themes/saltstack/layout.html
vendored
14
doc/_themes/saltstack/layout.html
vendored
@ -264,20 +264,6 @@
|
||||
<script type="text/javascript" language="javascript">llactid=23943</script>
|
||||
<script type="text/javascript" language="javascript" src="http://t6.trackalyzer.com/trackalyze.js"></script>
|
||||
|
||||
<script>
|
||||
var _gaq = _gaq || [];
|
||||
var pluginUrl = '//www.google-analytics.com/plugins/ga/inpage_linkid.js';
|
||||
_gaq.push(['_require', 'inpage_linkid', pluginUrl]);
|
||||
_gaq.push(['_setAccount', 'UA-26984928-1']);
|
||||
_gaq.push(['_setDomainName', 'saltstack.com']);
|
||||
_gaq.push(['_trackPageview']);
|
||||
|
||||
(function() {
|
||||
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
|
||||
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
|
||||
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
|
||||
})();
|
||||
</script>
|
||||
{% endif %}
|
||||
</body>
|
||||
</html>
|
||||
|
30
doc/_themes/saltstack2/layout.html
vendored
30
doc/_themes/saltstack2/layout.html
vendored
@ -52,7 +52,8 @@
|
||||
SEARCH_CX: '{{ search_cx }}',
|
||||
COLLAPSE_INDEX: false,
|
||||
FILE_SUFFIX: '{{ '' if no_search_suffix else file_suffix }}',
|
||||
HAS_SOURCE: '{{ has_source|lower }}'
|
||||
HAS_SOURCE: '{{ has_source|lower }}',
|
||||
REPO_PRIMARY_BRANCH_TAB_NAME: '{{ repo_primary_branch | capitalize }}'
|
||||
};
|
||||
</script>
|
||||
{%- for scriptfile in script_files %}
|
||||
@ -184,9 +185,12 @@
|
||||
<ul class="nav navbar-nav">
|
||||
{%- block relbar_small %}{{ relbar() }}{% endblock %}
|
||||
|
||||
{% if not (build_type == "{{ repo_primary_branch }}" or build_type == "next") and on_saltstack %}
|
||||
{% if not (build_type == repo_primary_branch or build_type == "next") and on_saltstack %}
|
||||
<li><a class="icon-dl" href="/en/pdf/Salt-{{ release }}.pdf"><img height="25" width="25" class="nolightbox" src="{{ pathto('_static/images/pdf_icon.svg', 1) }}"></a></li>
|
||||
<li><a class="icon-dl" href="/en/epub/Salt-{{ release }}.epub"><img height="25" width="18" class="nolightbox" src="{{ pathto('_static/images/epub_icon.svg', 1) }}"></a></li>
|
||||
{% elif build_type == repo_primary_branch and on_saltstack %}
|
||||
<li><a class="icon-dl" href="/en/pdf/Salt-{{ repo_primary_branch }}.pdf"><img height="25" width="25" class="nolightbox" src="{{ pathto('_static/images/pdf_icon.svg', 1) }}"></a></li>
|
||||
<li><a class="icon-dl" href="/en/epub/Salt-{{ repo_primary_branch }}.epub"><img height="25" width="18" class="nolightbox" src="{{ pathto('_static/images/epub_icon.svg', 1) }}"></a></li>
|
||||
{% endif %}
|
||||
</ul>
|
||||
</nav>
|
||||
@ -285,12 +289,12 @@
|
||||
<!-- <a class="ss-logo" href="http://saltstack.com"><img width="250" height="63" class="nolightbox sidebar-logo" src="{{ pathto('_static/images/saltstack_logo.svg', 1) }}"></a>
|
||||
|
||||
{% if on_saltstack %}
|
||||
|
||||
{#
|
||||
{% if [True, False]|random %}
|
||||
<a href="http://saltconf.com/register" target="_blank"><img class="nolightbox sidebar-banner center" src="{{ pathto('_static/images/banner-saltconf.png', 1) }}"/></a>
|
||||
{% else %}
|
||||
<a href="http://saltstack.com/support" target="_blank"><img class="nolightbox sidebar-banner center" src="{{ pathto('_static/images/banner-support.png', 1) }}"/></a>
|
||||
{% endif %}-->
|
||||
{% endif %} #}-->
|
||||
<a href="https://saltconf.com/menu-ad" target="_blank"><img class="nolightbox sidebar-banner center" src="https://get.saltstack.com/rs/304-PHQ-615/images/Salt-docs-menu-ad-250x63.jpg"/></a>
|
||||
|
||||
|
||||
@ -304,11 +308,11 @@
|
||||
|
||||
<div class="releaselinks versions {{ build_type }}">
|
||||
|
||||
<a class="btn btn-secondary{% if build_type == "previous" or build_type == "inactive" %} active{% endif %}" id="previous"{% if build_type == "previous" or build_type == "inactive" %} title="View release notes"{% else %} title="Switch to docs for the previous stable release"{% endif %} data-container="body" data-toggle="tooltip" data-placement="bottom" href="/en/{{ previous_release_dir }}/">{{ previous_release }}{% if build_type == "previous" or build_type == "inactive" %} <i class="glyphicon glyphicon-ok"></i>{%- endif %}</a>
|
||||
<a class="btn btn-secondary{% if build_type == "previous" or build_type == "inactive" %} active{% endif %}" id="previous"{% if build_type == "previous" or build_type == "inactive" %} title="View release notes"{% else %} title="Switch to docs for the previous stable release"{% endif %} data-container="body" data-toggle="tooltip" data-placement="bottom" href="/en/{{ previous_release_dir }}/">{{ previous_release }}{% if build_type == "previous" or build_type == "inactive" %} <i class="glyphicon glyphicon-ok"></i>{% endif %}</a>
|
||||
|
||||
<a class="btn btn-secondary{% if build_type == "latest" %} active{% endif %}" id="latest"{% if build_type == "latest" %} title="View release notes"{% else %} title="Switch to docs for the latest stable release"{% endif %} data-container="body" data-toggle="tooltip" data-placement="bottom" href="/en/latest/">{{ latest_release }}{% if build_type == "latest" %} <i class="glyphicon glyphicon-ok"></i>{% endif %}</a>
|
||||
|
||||
<a class="btn btn-secondary{% if build_type == "{{ repo_primary_branch }}" %} active{% endif %}" id="{{ repo_primary_branch }}"{% if build_type == "{{ repo_primary_branch }}" %} title="View all release notes"{% endif %} title="Switch to docs built recently from the {{ repo_primary_branch }} branch" data-container="body" data-toggle="tooltip" data-placement="bottom" href="/en/{{ repo_primary_branch }}/">{{ repo_primary_branch | capitalize }}{% if build_type == "{{ repo_primary_branch }}" %} <i class="glyphicon glyphicon-ok"></i>{% endif %}</a>
|
||||
<a class="btn btn-secondary{% if build_type == repo_primary_branch %} active{% endif %}" id="{{ repo_primary_branch }}"{% if build_type == repo_primary_branch %} title="View all release notes"{% endif %} title="Switch to docs built recently from the {{ repo_primary_branch }} branch" data-container="body" data-toggle="tooltip" data-placement="bottom" href="/en/{{ repo_primary_branch }}/">{{ repo_primary_branch | capitalize }}{% if build_type == repo_primary_branch %} <i class="glyphicon glyphicon-ok"></i>{% endif %}</a>
|
||||
|
||||
</div>
|
||||
|
||||
@ -376,20 +380,6 @@
|
||||
<script type="text/javascript" language="javascript">llactid=23943</script>
|
||||
<script type="text/javascript" language="javascript" src="https://trackalyzer.com/trackalyze_secure.js"></script>
|
||||
|
||||
<script>
|
||||
var _gaq = _gaq || [];
|
||||
var pluginUrl = '//www.google-analytics.com/plugins/ga/inpage_linkid.js';
|
||||
_gaq.push(['_require', 'inpage_linkid', pluginUrl]);
|
||||
_gaq.push(['_setAccount', 'UA-26984928-1']);
|
||||
_gaq.push(['_setDomainName', 'saltstack.com']);
|
||||
_gaq.push(['_trackPageview']);
|
||||
|
||||
(function() {
|
||||
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
|
||||
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
|
||||
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
|
||||
})();
|
||||
</script>
|
||||
{% endif %}
|
||||
</body>
|
||||
</html>
|
||||
|
@ -125,7 +125,7 @@ $( document ).ready(function() {
|
||||
window.location.href = window.location.href.replace($currentVer.attr("href"), clickedVer);
|
||||
}
|
||||
else {
|
||||
if ($currentVer.text().indexOf("Develop") == -1) {
|
||||
if ($currentVer.text().indexOf(DOCUMENTATION_OPTIONS.REPO_PRIMARY_BRANCH_TAB_NAME) == -1) {
|
||||
window.location.href = clickedVer + "topics/releases/" + $currentVer.text().trim() + ".html";
|
||||
}
|
||||
else window.location.href = clickedVer + "topics/releases/";
|
||||
|
33
doc/conf.py
33
doc/conf.py
@ -268,22 +268,31 @@ if on_saltstack:
|
||||
copyright = time.strftime("%Y")
|
||||
|
||||
# < --- START do not merge these settings to other branches START ---> #
|
||||
build_type = 'latest' # latest, previous, develop, next
|
||||
release = latest_release
|
||||
build_type = repo_primary_branch # latest, previous, master, next
|
||||
# < --- END do not merge these settings to other branches END ---> #
|
||||
|
||||
# Set google custom search engine
|
||||
|
||||
if release == latest_release:
|
||||
if build_type == repo_primary_branch:
|
||||
release = latest_release
|
||||
search_cx = '011515552685726825874:v1had6i279q' # master
|
||||
#search_cx = '011515552685726825874:x17j5zl74g8' # develop
|
||||
elif build_type == 'next':
|
||||
release = next_release
|
||||
search_cx = '011515552685726825874:ht0p8miksrm' # latest
|
||||
elif build_type == 'previous':
|
||||
release = previous_release
|
||||
if release.startswith('2018.3'):
|
||||
search_cx = '011515552685726825874:vadptdpvyyu' # 2018.3
|
||||
elif release.startswith('2017.7'):
|
||||
search_cx = '011515552685726825874:w-hxmnbcpou' # 2017.7
|
||||
elif release.startswith('2016.11'):
|
||||
search_cx = '011515552685726825874:dlsj745pvhq' # 2016.11
|
||||
else:
|
||||
search_cx = '011515552685726825874:ht0p8miksrm' # latest
|
||||
else: # latest or something else
|
||||
release = latest_release
|
||||
search_cx = '011515552685726825874:ht0p8miksrm' # latest
|
||||
elif release.startswith('2018.3'):
|
||||
search_cx = '011515552685726825874:vadptdpvyyu' # 2018.3
|
||||
elif release.startswith('2017.7'):
|
||||
search_cx = '011515552685726825874:w-hxmnbcpou' # 2017.7
|
||||
elif release.startswith('2016.11'):
|
||||
search_cx = '011515552685726825874:dlsj745pvhq' # 2016.11
|
||||
else:
|
||||
search_cx = '011515552685726825874:x17j5zl74g8' # develop
|
||||
|
||||
needs_sphinx = '1.3'
|
||||
|
||||
@ -365,7 +374,7 @@ rst_prolog = """\
|
||||
|
||||
# A shortcut for linking to tickets on the GitHub issue tracker
|
||||
extlinks = {
|
||||
'blob': ('https://github.com/saltstack/salt/blob/%s/%%s' % 'develop', None),
|
||||
'blob': ('https://github.com/saltstack/salt/blob/%s/%%s' % repo_primary_branch, None),
|
||||
'issue': ('https://github.com/saltstack/salt/issues/%s', 'issue #'),
|
||||
'pull': ('https://github.com/saltstack/salt/pull/%s', 'PR #'),
|
||||
'formula_url': ('https://github.com/saltstack-formulas/%s', ''),
|
||||
|
@ -769,6 +769,30 @@ Statically assigns grains to the minion.
|
||||
cabinet: 13
|
||||
cab_u: 14-15
|
||||
|
||||
.. conf_minion:: grains_blacklist
|
||||
|
||||
``grains_blacklist``
|
||||
--------------------
|
||||
|
||||
Default: ``[]``
|
||||
|
||||
Each grains key will be compared against each of the expressions in this list.
|
||||
Any keys which match will be filtered from the grains. Exact matches, glob
|
||||
matches, and regular expressions are supported.
|
||||
|
||||
.. note::
|
||||
Some states and execution modules depend on grains. Filtering may cause
|
||||
them to be unavailable or run unreliably.
|
||||
|
||||
.. versionadded:: Neon
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
grains_blacklist:
|
||||
- cpu_flags
|
||||
- zmq*
|
||||
- ipv[46]
|
||||
|
||||
.. conf_minion:: grains_cache
|
||||
|
||||
``grains_cache``
|
||||
@ -893,6 +917,20 @@ minion. Since this grain is expensive, it is disabled by default.
|
||||
|
||||
iscsi_grains: True
|
||||
|
||||
.. conf_minion:: nvme_grains
|
||||
|
||||
``nvme_grains``
|
||||
------------------------
|
||||
|
||||
Default: ``False``
|
||||
|
||||
The ``nvme_grains`` setting will enable the ``nvme_nqn`` grain on the
|
||||
minion. Since this grain is expensive, it is disabled by default.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
nvme_grains: True
|
||||
|
||||
.. conf_minion:: mine_enabled
|
||||
|
||||
``mine_enabled``
|
||||
@ -1326,7 +1364,7 @@ creates a new connection for every return to the master.
|
||||
Default: ``ipc``
|
||||
|
||||
Windows platforms lack POSIX IPC and must rely on slower TCP based inter-
|
||||
process communications. Set ipc_mode to ``tcp`` on such systems.
|
||||
process communications. ``ipc_mode`` is set to ``tcp`` on such systems.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -1,97 +0,0 @@
|
||||
Translating Documentation
|
||||
=========================
|
||||
|
||||
If you wish to help translate the Salt documentation to your language, please
|
||||
head over to the `Transifex`_ website and `signup`__ for an account.
|
||||
|
||||
Once registered, head over to the `Salt Translation Project`__, and either
|
||||
click on **Request Language** if you can't find yours, or, select the language
|
||||
for which you wish to contribute and click **Join Team**.
|
||||
|
||||
`Transifex`_ provides some useful reading resources on their `support
|
||||
domain`__, namely, some useful articles `directed to translators`__.
|
||||
|
||||
|
||||
.. __: https://www.transifex.com/signup/
|
||||
.. __: https://www.transifex.com/projects/p/salt/
|
||||
.. __: http://support.transifex.com/
|
||||
.. __: http://support.transifex.com/customer/portal/topics/414107-translators/articles
|
||||
|
||||
|
||||
Building A Localized Version of the Documentation
|
||||
-------------------------------------------------
|
||||
|
||||
While you're working on your translation on `Transifex`_, you might want to
|
||||
have a look at how it's rendering.
|
||||
|
||||
|
||||
Install The Transifex Client
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
To interact with the `Transifex`_ web service you will need to install the
|
||||
`transifex-client`__:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
pip install transifex-client
|
||||
|
||||
|
||||
.. __: https://github.com/transifex/transifex-client
|
||||
|
||||
|
||||
|
||||
Configure The Transifex Client
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Once installed, you will need to set it up on your computer. We created a
|
||||
script to help you with that:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
.scripts/setup-transifex-config
|
||||
|
||||
|
||||
|
||||
Download Remote Translations
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
There's a little script which simplifies the download process of the
|
||||
translations(which isn't that complicated in the first place).
|
||||
So, let's assume you're translating ``pt_PT``, Portuguese(Portugal). To
|
||||
download the translations, execute from the ``doc/`` directory of your Salt
|
||||
checkout:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
|
||||
make download-translations SPHINXLANG=pt_PT
|
||||
|
||||
|
||||
To download ``pt_PT``, Portuguese(Portugal), and ``nl``, Dutch, you can use the
|
||||
helper script directly:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
.scripts/download-translation-catalog pt_PT nl
|
||||
|
||||
|
||||
Build Localized Documentation
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
After the download process finishes, which might take a while, the next step is
|
||||
to build a localized version of the documentation.
|
||||
Following the ``pt_PT`` example above:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
make html SPHINXLANG=pt_PT
|
||||
|
||||
|
||||
View Localized Documentation
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Open your browser, point it to the local documentation path and check the
|
||||
localized output you've just build.
|
||||
|
||||
|
||||
.. _`Transifex`: https://www.transifex.com
|
@ -929,11 +929,12 @@ you can now configure which type numbers indicate a login and logout.
|
||||
See the :py:mod:`wtmp beacon documentation <salt.beacons.wtmp>` for more
|
||||
information.
|
||||
|
||||
Deprecations
|
||||
============
|
||||
|
||||
API Deprecations
|
||||
----------------
|
||||
Deprecated and Removed Options
|
||||
==============================
|
||||
|
||||
API Removed Arguments
|
||||
---------------------
|
||||
|
||||
Support for :ref:`LocalClient <local-client>`'s ``expr_form`` argument has
|
||||
been removed. Please use ``tgt_type`` instead. This change was made due to
|
||||
@ -952,14 +953,14 @@ their code to use ``tgt_type``.
|
||||
>>> local.cmd('*', 'cmd.run', ['whoami'], tgt_type='glob')
|
||||
{'jerry': 'root'}
|
||||
|
||||
Minion Configuration Deprecations
|
||||
---------------------------------
|
||||
Minion Configuration Deprecated Option
|
||||
--------------------------------------
|
||||
|
||||
The :conf_minion:`master_shuffle` configuration option is deprecated as of the
|
||||
``2019.2.0`` release. Please use the :conf_minion:`random_master` option instead.
|
||||
|
||||
Module Deprecations
|
||||
-------------------
|
||||
Module Removed Options
|
||||
----------------------
|
||||
|
||||
- The :py:mod:`napalm_network <salt.modules.napalm_network>` module has been
|
||||
changed as follows:
|
||||
@ -1011,8 +1012,8 @@ Module Deprecations
|
||||
functions have been removed. Please use :py:func:`win_wua.list
|
||||
<salt.modules.win_wua.list_>` instead.
|
||||
|
||||
Pillar Deprecations
|
||||
-------------------
|
||||
Pillar Removed Option
|
||||
---------------------
|
||||
|
||||
- The :py:mod:`vault <salt.pillar.vault>` external pillar has been changed as
|
||||
follows:
|
||||
@ -1020,8 +1021,8 @@ Pillar Deprecations
|
||||
- Support for the ``profile`` argument was removed. Any options passed up
|
||||
until and following the first ``path=`` are discarded.
|
||||
|
||||
Roster Deprecations
|
||||
-------------------
|
||||
Roster Removed Option
|
||||
---------------------
|
||||
|
||||
- The :py:mod:`cache <salt.roster.cache>` roster has been changed as follows:
|
||||
|
||||
@ -1032,8 +1033,8 @@ Roster Deprecations
|
||||
``private``, ``public``, ``global`` or ``local`` settings. The syntax for
|
||||
these settings has changed to ``ipv4-*`` or ``ipv6-*``, respectively.
|
||||
|
||||
State Deprecations
|
||||
------------------
|
||||
State Removed Modules and Options
|
||||
---------------------------------
|
||||
|
||||
- The ``docker`` state module has been removed
|
||||
|
||||
@ -1126,8 +1127,8 @@ State Deprecations
|
||||
- Support for virtual packages has been removed from the
|
||||
py:mod:`pkg state <salt.states.pkg>`.
|
||||
|
||||
Utils Deprecations
|
||||
------------------
|
||||
Utils Removed Options
|
||||
---------------------
|
||||
|
||||
The ``cloud`` utils module had the following changes:
|
||||
|
||||
@ -1151,7 +1152,7 @@ been deprecated in favor of ``pypsexec``.
|
||||
Salt-Cloud has deprecated the use ``impacket`` in favor of ``smbprotocol``.
|
||||
This changes was made because ``impacket`` is not compatible with Python 3.
|
||||
|
||||
SaltSSH major updates
|
||||
SaltSSH Major Updates
|
||||
=====================
|
||||
|
||||
SaltSSH now works across different major Python versions. Python 2.7 ~ Python 3.x
|
||||
|
208
noxfile.py
208
noxfile.py
@ -15,6 +15,7 @@ import json
|
||||
import pprint
|
||||
import shutil
|
||||
import tempfile
|
||||
import datetime
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.stderr.write('Do not execute this file directly. Use nox instead, it will know how to handle this file\n')
|
||||
@ -34,8 +35,8 @@ PIP_INSTALL_SILENT = (os.environ.get('JENKINS_URL') or os.environ.get('CI') or o
|
||||
# Global Path Definitions
|
||||
REPO_ROOT = os.path.abspath(os.path.dirname(__file__))
|
||||
SITECUSTOMIZE_DIR = os.path.join(REPO_ROOT, 'tests', 'support', 'coverage')
|
||||
IS_DARWIN = sys.platform.lower().startswith('darwin')
|
||||
IS_WINDOWS = sys.platform.lower().startswith('win')
|
||||
|
||||
# Python versions to run against
|
||||
_PYTHON_VERSIONS = ('2', '2.7', '3', '3.4', '3.5', '3.6', '3.7')
|
||||
|
||||
@ -45,10 +46,18 @@ nox.options.reuse_existing_virtualenvs = True
|
||||
# Don't fail on missing interpreters
|
||||
nox.options.error_on_missing_interpreters = False
|
||||
|
||||
# Change current directory to REPO_ROOT
|
||||
os.chdir(REPO_ROOT)
|
||||
|
||||
RUNTESTS_LOGFILE = os.path.join(
|
||||
'artifacts', 'logs',
|
||||
'runtests-{}.log'.format(datetime.datetime.now().strftime('%Y%m%d%H%M%S.%f'))
|
||||
)
|
||||
|
||||
|
||||
def _create_ci_directories():
|
||||
for dirname in ('logs', 'coverage', 'xml-unittests-output'):
|
||||
path = os.path.join(REPO_ROOT, 'artifacts', dirname)
|
||||
path = os.path.join('artifacts', dirname)
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(path)
|
||||
|
||||
@ -186,20 +195,43 @@ def _get_distro_pip_constraints(session, transport):
|
||||
pydir = _get_pydir(session)
|
||||
|
||||
if IS_WINDOWS:
|
||||
_distro_constraints = os.path.join(REPO_ROOT,
|
||||
'requirements',
|
||||
_distro_constraints = os.path.join('requirements',
|
||||
'static',
|
||||
pydir,
|
||||
'{}-windows.txt'.format(transport))
|
||||
if os.path.exists(_distro_constraints):
|
||||
distro_constraints.append(_distro_constraints)
|
||||
_distro_constraints = os.path.join(REPO_ROOT,
|
||||
'requirements',
|
||||
_distro_constraints = os.path.join('requirements',
|
||||
'static',
|
||||
pydir,
|
||||
'windows.txt')
|
||||
if os.path.exists(_distro_constraints):
|
||||
distro_constraints.append(_distro_constraints)
|
||||
_distro_constraints = os.path.join('requirements',
|
||||
'static',
|
||||
pydir,
|
||||
'windows-crypto.txt')
|
||||
if os.path.exists(_distro_constraints):
|
||||
distro_constraints.append(_distro_constraints)
|
||||
elif IS_DARWIN:
|
||||
_distro_constraints = os.path.join('requirements',
|
||||
'static',
|
||||
pydir,
|
||||
'{}-darwin.txt'.format(transport))
|
||||
if os.path.exists(_distro_constraints):
|
||||
distro_constraints.append(_distro_constraints)
|
||||
_distro_constraints = os.path.join('requirements',
|
||||
'static',
|
||||
pydir,
|
||||
'darwin.txt')
|
||||
if os.path.exists(_distro_constraints):
|
||||
distro_constraints.append(_distro_constraints)
|
||||
_distro_constraints = os.path.join('requirements',
|
||||
'static',
|
||||
pydir,
|
||||
'darwin-crypto.txt')
|
||||
if os.path.exists(_distro_constraints):
|
||||
distro_constraints.append(_distro_constraints)
|
||||
else:
|
||||
_install_system_packages(session)
|
||||
distro = _get_distro_info(session)
|
||||
@ -210,20 +242,31 @@ def _get_distro_pip_constraints(session, transport):
|
||||
'{id}-{version_parts[major]}'.format(**distro)
|
||||
]
|
||||
for distro_key in distro_keys:
|
||||
_distro_constraints = os.path.join(REPO_ROOT,
|
||||
'requirements',
|
||||
_distro_constraints = os.path.join('requirements',
|
||||
'static',
|
||||
pydir,
|
||||
'{}.txt'.format(distro_key))
|
||||
if os.path.exists(_distro_constraints):
|
||||
distro_constraints.append(_distro_constraints)
|
||||
_distro_constraints = os.path.join(REPO_ROOT,
|
||||
'requirements',
|
||||
_distro_constraints = os.path.join('requirements',
|
||||
'static',
|
||||
pydir,
|
||||
'{}-crypto.txt'.format(distro_key))
|
||||
if os.path.exists(_distro_constraints):
|
||||
distro_constraints.append(_distro_constraints)
|
||||
_distro_constraints = os.path.join('requirements',
|
||||
'static',
|
||||
pydir,
|
||||
'{}-{}.txt'.format(transport, distro_key))
|
||||
if os.path.exists(_distro_constraints):
|
||||
distro_constraints.append(_distro_constraints)
|
||||
distro_constraints.append(_distro_constraints)
|
||||
_distro_constraints = os.path.join('requirements',
|
||||
'static',
|
||||
pydir,
|
||||
'{}-{}-crypto.txt'.format(transport, distro_key))
|
||||
if os.path.exists(_distro_constraints):
|
||||
distro_constraints.append(_distro_constraints)
|
||||
return distro_constraints
|
||||
|
||||
|
||||
@ -232,24 +275,24 @@ def _install_requirements(session, transport, *extra_requirements):
|
||||
distro_constraints = _get_distro_pip_constraints(session, transport)
|
||||
|
||||
_requirements_files = [
|
||||
os.path.join(REPO_ROOT, 'requirements', 'base.txt'),
|
||||
os.path.join(REPO_ROOT, 'requirements', 'zeromq.txt'),
|
||||
os.path.join(REPO_ROOT, 'requirements', 'pytest.txt')
|
||||
os.path.join('requirements', 'base.txt'),
|
||||
os.path.join('requirements', 'zeromq.txt'),
|
||||
os.path.join('requirements', 'pytest.txt')
|
||||
]
|
||||
if sys.platform.startswith('linux'):
|
||||
requirements_files = [
|
||||
os.path.join(REPO_ROOT, 'requirements', 'static', 'linux.in')
|
||||
os.path.join('requirements', 'static', 'linux.in')
|
||||
]
|
||||
elif sys.platform.startswith('win'):
|
||||
requirements_files = [
|
||||
os.path.join(REPO_ROOT, 'pkg', 'windows', 'req.txt'),
|
||||
os.path.join(REPO_ROOT, 'requirements', 'static', 'windows.in')
|
||||
os.path.join('pkg', 'windows', 'req.txt'),
|
||||
os.path.join('requirements', 'static', 'windows.in')
|
||||
]
|
||||
elif sys.platform.startswith('darwin'):
|
||||
requirements_files = [
|
||||
os.path.join(REPO_ROOT, 'pkg', 'osx', 'req.txt'),
|
||||
os.path.join(REPO_ROOT, 'pkg', 'osx', 'req_ext.txt'),
|
||||
os.path.join(REPO_ROOT, 'requirements', 'static', 'osx.in')
|
||||
os.path.join('pkg', 'osx', 'req.txt'),
|
||||
os.path.join('pkg', 'osx', 'req_ext.txt'),
|
||||
os.path.join('requirements', 'static', 'osx.in')
|
||||
]
|
||||
|
||||
while True:
|
||||
@ -328,7 +371,20 @@ def _run_with_coverage(session, *test_cmd):
|
||||
# Sometimes some of the coverage files are corrupt which would trigger a CommandFailed
|
||||
# exception
|
||||
pass
|
||||
session.run('coverage', 'xml', '-o', os.path.join(REPO_ROOT, 'artifacts', 'coverage', 'coverage.xml'))
|
||||
# Generate report for salt code coverage
|
||||
session.run(
|
||||
'coverage', 'xml',
|
||||
'-o', os.path.join('artifacts', 'coverage', 'salt.xml'),
|
||||
'--omit=tests/*',
|
||||
'--include=salt/*'
|
||||
)
|
||||
# Generate report for tests code coverage
|
||||
session.run(
|
||||
'coverage', 'xml',
|
||||
'-o', os.path.join('artifacts', 'coverage', 'tests.xml'),
|
||||
'--omit=salt/*',
|
||||
'--include=tests/*'
|
||||
)
|
||||
|
||||
|
||||
def _runtests(session, coverage, cmd_args):
|
||||
@ -418,9 +474,7 @@ def runtests_parametrized(session, coverage, transport, crypto):
|
||||
session.install(*install_command, silent=PIP_INSTALL_SILENT)
|
||||
|
||||
cmd_args = [
|
||||
'--tests-logfile={}'.format(
|
||||
os.path.join(REPO_ROOT, 'artifacts', 'logs', 'runtests.log')
|
||||
),
|
||||
'--tests-logfile={}'.format(RUNTESTS_LOGFILE),
|
||||
'--transport={}'.format(transport)
|
||||
] + session.posargs
|
||||
_runtests(session, coverage, cmd_args)
|
||||
@ -559,14 +613,12 @@ def runtests_cloud(session, coverage):
|
||||
_install_requirements(session, 'zeromq', 'unittest-xml-reporting==2.2.1')
|
||||
|
||||
pydir = _get_pydir(session)
|
||||
cloud_requirements = os.path.join(REPO_ROOT, 'requirements', 'static', pydir, 'cloud.txt')
|
||||
cloud_requirements = os.path.join('requirements', 'static', pydir, 'cloud.txt')
|
||||
|
||||
session.install('--progress-bar=off', '-r', cloud_requirements, silent=PIP_INSTALL_SILENT)
|
||||
|
||||
cmd_args = [
|
||||
'--tests-logfile={}'.format(
|
||||
os.path.join(REPO_ROOT, 'artifacts', 'logs', 'runtests.log')
|
||||
),
|
||||
'--tests-logfile={}'.format(RUNTESTS_LOGFILE),
|
||||
'--cloud-provider-tests'
|
||||
] + session.posargs
|
||||
_runtests(session, coverage, cmd_args)
|
||||
@ -581,9 +633,7 @@ def runtests_tornado(session, coverage):
|
||||
session.install('--progress-bar=off', 'pyzmq==17.0.0', silent=PIP_INSTALL_SILENT)
|
||||
|
||||
cmd_args = [
|
||||
'--tests-logfile={}'.format(
|
||||
os.path.join(REPO_ROOT, 'artifacts', 'logs', 'runtests.log')
|
||||
),
|
||||
'--tests-logfile={}'.format(RUNTESTS_LOGFILE)
|
||||
] + session.posargs
|
||||
_runtests(session, coverage, cmd_args)
|
||||
|
||||
@ -614,9 +664,8 @@ def pytest_parametrized(session, coverage, transport, crypto):
|
||||
|
||||
cmd_args = [
|
||||
'--rootdir', REPO_ROOT,
|
||||
'--log-file={}'.format(
|
||||
os.path.join(REPO_ROOT, 'artifacts', 'logs', 'runtests.log')
|
||||
),
|
||||
'--log-file={}'.format(RUNTESTS_LOGFILE),
|
||||
'--log-file-level=debug',
|
||||
'--no-print-logs',
|
||||
'-ra',
|
||||
'-s',
|
||||
@ -757,19 +806,18 @@ def pytest_cloud(session, coverage):
|
||||
# Install requirements
|
||||
_install_requirements(session, 'zeromq')
|
||||
pydir = _get_pydir(session)
|
||||
cloud_requirements = os.path.join(REPO_ROOT, 'requirements', 'static', pydir, 'cloud.txt')
|
||||
cloud_requirements = os.path.join('requirements', 'static', pydir, 'cloud.txt')
|
||||
|
||||
session.install('--progress-bar=off', '-r', cloud_requirements, silent=PIP_INSTALL_SILENT)
|
||||
|
||||
cmd_args = [
|
||||
'--rootdir', REPO_ROOT,
|
||||
'--log-file={}'.format(
|
||||
os.path.join(REPO_ROOT, 'artifacts', 'logs', 'runtests.log')
|
||||
),
|
||||
'--log-file={}'.format(RUNTESTS_LOGFILE),
|
||||
'--log-file-level=debug',
|
||||
'--no-print-logs',
|
||||
'-ra',
|
||||
'-s',
|
||||
os.path.join(REPO_ROOT, 'tests', 'integration', 'cloud', 'providers')
|
||||
os.path.join('tests', 'integration', 'cloud', 'providers')
|
||||
] + session.posargs
|
||||
_pytest(session, coverage, cmd_args)
|
||||
|
||||
@ -784,9 +832,8 @@ def pytest_tornado(session, coverage):
|
||||
|
||||
cmd_args = [
|
||||
'--rootdir', REPO_ROOT,
|
||||
'--log-file={}'.format(
|
||||
os.path.join(REPO_ROOT, 'artifacts', 'logs', 'runtests.log')
|
||||
),
|
||||
'--log-file={}'.format(RUNTESTS_LOGFILE),
|
||||
'--log-file-level=debug',
|
||||
'--no-print-logs',
|
||||
'-ra',
|
||||
'-s',
|
||||
@ -815,7 +862,18 @@ def _pytest(session, coverage, cmd_args):
|
||||
|
||||
def _lint(session, rcfile, flags, paths):
|
||||
_install_requirements(session, 'zeromq')
|
||||
session.install('--progress-bar=off', '-r', 'requirements/static/{}/lint.txt'.format(_get_pydir(session)), silent=PIP_INSTALL_SILENT)
|
||||
requirements_file = 'requirements/static/lint.in'
|
||||
distro_constraints = [
|
||||
'requirements/static/{}/lint.txt'.format(_get_pydir(session))
|
||||
]
|
||||
install_command = [
|
||||
'--progress-bar=off', '-r', requirements_file
|
||||
]
|
||||
for distro_constraint in distro_constraints:
|
||||
install_command.extend([
|
||||
'--constraint', distro_constraint
|
||||
])
|
||||
session.install(*install_command, silent=PIP_INSTALL_SILENT)
|
||||
session.run('pylint', '--version')
|
||||
pylint_report_path = os.environ.get('PYLINT_REPORT')
|
||||
|
||||
@ -889,19 +947,73 @@ def lint_tests(session):
|
||||
|
||||
|
||||
@nox.session(python='3')
|
||||
def docs(session):
|
||||
@nox.parametrize('update', [False, True])
|
||||
@nox.parametrize('compress', [False, True])
|
||||
def docs(session, compress, update):
|
||||
'''
|
||||
Build Salt's Documentation
|
||||
'''
|
||||
session.notify('docs-html(compress={})'.format(compress))
|
||||
session.notify('docs-man(compress={}, update={})'.format(compress, update))
|
||||
|
||||
|
||||
@nox.session(name='docs-html', python='3')
|
||||
@nox.parametrize('compress', [False, True])
|
||||
def docs_html(session, compress):
|
||||
'''
|
||||
Build Salt's HTML Documentation
|
||||
'''
|
||||
pydir = _get_pydir(session)
|
||||
if pydir == 'py3.4':
|
||||
session.error('Sphinx only runs on Python >= 3.5')
|
||||
session.install(
|
||||
'--progress-bar=off',
|
||||
'-r', 'requirements/static/{}/docs.txt'.format(pydir),
|
||||
silent=PIP_INSTALL_SILENT)
|
||||
requirements_file = 'requirements/static/docs.in'
|
||||
distro_constraints = [
|
||||
'requirements/static/{}/docs.txt'.format(_get_pydir(session))
|
||||
]
|
||||
install_command = [
|
||||
'--progress-bar=off', '-r', requirements_file
|
||||
]
|
||||
for distro_constraint in distro_constraints:
|
||||
install_command.extend([
|
||||
'--constraint', distro_constraint
|
||||
])
|
||||
session.install(*install_command, silent=PIP_INSTALL_SILENT)
|
||||
os.chdir('doc/')
|
||||
session.run('make', 'clean', external=True)
|
||||
session.run('make', 'html', 'SPHINXOPTS=-W', external=True)
|
||||
session.run('tar', '-czvf', 'doc-archive.tar.gz', '_build/html')
|
||||
if compress:
|
||||
session.run('tar', '-czvf', 'html-archive.tar.gz', '_build/html', external=True)
|
||||
os.chdir('..')
|
||||
|
||||
|
||||
@nox.session(name='docs-man', python='3')
|
||||
@nox.parametrize('update', [False, True])
|
||||
@nox.parametrize('compress', [False, True])
|
||||
def docs_man(session, compress, update):
|
||||
'''
|
||||
Build Salt's Manpages Documentation
|
||||
'''
|
||||
pydir = _get_pydir(session)
|
||||
if pydir == 'py3.4':
|
||||
session.error('Sphinx only runs on Python >= 3.5')
|
||||
requirements_file = 'requirements/static/docs.in'
|
||||
distro_constraints = [
|
||||
'requirements/static/{}/docs.txt'.format(_get_pydir(session))
|
||||
]
|
||||
install_command = [
|
||||
'--progress-bar=off', '-r', requirements_file
|
||||
]
|
||||
for distro_constraint in distro_constraints:
|
||||
install_command.extend([
|
||||
'--constraint', distro_constraint
|
||||
])
|
||||
session.install(*install_command, silent=PIP_INSTALL_SILENT)
|
||||
os.chdir('doc/')
|
||||
session.run('make', 'clean', external=True)
|
||||
session.run('make', 'man', 'SPHINXOPTS=-W', external=True)
|
||||
if update:
|
||||
session.run('rm', '-rf', 'man/', external=True)
|
||||
session.run('cp', '-Rp', '_build/man', 'man/', external=True)
|
||||
if compress:
|
||||
session.run('tar', '-czvf', 'man-archive.tar.gz', '_build/man', external=True)
|
||||
os.chdir('..')
|
||||
|
2
requirements/static/crypto.in
Normal file
2
requirements/static/crypto.in
Normal file
@ -0,0 +1,2 @@
|
||||
m2crypto
|
||||
pycryptodomex
|
10
requirements/static/py2.7/darwin-crypto.txt
Normal file
10
requirements/static/py2.7/darwin-crypto.txt
Normal file
@ -0,0 +1,10 @@
|
||||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py2.7/darwin-crypto.txt -v requirements/static/crypto.in
|
||||
#
|
||||
m2crypto==0.35.2
|
||||
pycryptodome==3.9.0
|
||||
pycryptodomex==3.9.0
|
||||
typing==3.7.4.1 # via m2crypto
|
@ -2,7 +2,7 @@
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py2.7/osx.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/osx.in
|
||||
# pip-compile -o requirements/static/py2.7/darwin.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/darwin.in
|
||||
#
|
||||
apache-libcloud==2.4.0
|
||||
argh==0.26.2 # via watchdog
|
10
requirements/static/py2.7/linux-crypto.txt
Normal file
10
requirements/static/py2.7/linux-crypto.txt
Normal file
@ -0,0 +1,10 @@
|
||||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py2.7/linux-crypto.txt -v requirements/static/crypto.in
|
||||
#
|
||||
m2crypto==0.35.2
|
||||
pycryptodome==3.9.0
|
||||
pycryptodomex==3.9.0
|
||||
typing==3.7.4.1 # via m2crypto
|
10
requirements/static/py2.7/windows-crypto.txt
Normal file
10
requirements/static/py2.7/windows-crypto.txt
Normal file
@ -0,0 +1,10 @@
|
||||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py2.7/windows-crypto.txt -v requirements/static/crypto.in
|
||||
#
|
||||
m2crypto==0.35.2
|
||||
pycryptodome==3.9.0
|
||||
pycryptodomex==3.9.0
|
||||
typing==3.7.4.1 # via m2crypto
|
10
requirements/static/py3.5/darwin-crypto.txt
Normal file
10
requirements/static/py3.5/darwin-crypto.txt
Normal file
@ -0,0 +1,10 @@
|
||||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.5/darwin-crypto.txt -v requirements/static/crypto.in
|
||||
#
|
||||
m2crypto==0.35.2
|
||||
pycryptodome==3.9.0
|
||||
pycryptodomex==3.9.0
|
||||
typing==3.7.4.1 # via m2crypto
|
@ -2,7 +2,7 @@
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.5/osx.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/osx.in
|
||||
# pip-compile -o requirements/static/py3.5/darwin.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/darwin.in
|
||||
#
|
||||
apache-libcloud==2.4.0
|
||||
argh==0.26.2 # via watchdog
|
10
requirements/static/py3.5/windows-crypto.txt
Normal file
10
requirements/static/py3.5/windows-crypto.txt
Normal file
@ -0,0 +1,10 @@
|
||||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.5/windows-crypto.txt -v requirements/static/crypto.in
|
||||
#
|
||||
m2crypto==0.35.2
|
||||
pycryptodome==3.9.0
|
||||
pycryptodomex==3.9.0
|
||||
typing==3.7.4.1 # via m2crypto
|
10
requirements/static/py3.6/darwin-crypto.txt
Normal file
10
requirements/static/py3.6/darwin-crypto.txt
Normal file
@ -0,0 +1,10 @@
|
||||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.6/darwin-crypto.txt -v requirements/static/crypto.in
|
||||
#
|
||||
m2crypto==0.35.2
|
||||
pycryptodome==3.9.0
|
||||
pycryptodomex==3.9.0
|
||||
typing==3.7.4.1 # via m2crypto
|
@ -2,7 +2,7 @@
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.6/osx.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/osx.in
|
||||
# pip-compile -o requirements/static/py3.6/darwin.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/darwin.in
|
||||
#
|
||||
apache-libcloud==2.4.0
|
||||
argh==0.26.2 # via watchdog
|
10
requirements/static/py3.6/windows-crypto.txt
Normal file
10
requirements/static/py3.6/windows-crypto.txt
Normal file
@ -0,0 +1,10 @@
|
||||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.6/windows-crypto.txt -v requirements/static/crypto.in
|
||||
#
|
||||
m2crypto==0.35.2
|
||||
pycryptodome==3.9.0
|
||||
pycryptodomex==3.9.0
|
||||
typing==3.7.4.1 # via m2crypto
|
10
requirements/static/py3.7/darwin-crypto.txt
Normal file
10
requirements/static/py3.7/darwin-crypto.txt
Normal file
@ -0,0 +1,10 @@
|
||||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.7/darwin-crypto.txt -v requirements/static/crypto.in
|
||||
#
|
||||
m2crypto==0.35.2
|
||||
pycryptodome==3.9.0
|
||||
pycryptodomex==3.9.0
|
||||
typing==3.7.4.1 # via m2crypto
|
@ -2,7 +2,7 @@
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.7/osx.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/osx.in
|
||||
# pip-compile -o requirements/static/py3.7/darwin.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/darwin.in
|
||||
#
|
||||
apache-libcloud==2.4.0
|
||||
argh==0.26.2 # via watchdog
|
10
requirements/static/py3.7/windows-crypto.txt
Normal file
10
requirements/static/py3.7/windows-crypto.txt
Normal file
@ -0,0 +1,10 @@
|
||||
#
|
||||
# This file is autogenerated by pip-compile
|
||||
# To update, run:
|
||||
#
|
||||
# pip-compile -o requirements/static/py3.7/windows-crypto.txt -v requirements/static/crypto.in
|
||||
#
|
||||
m2crypto==0.35.2
|
||||
pycryptodome==3.9.0
|
||||
pycryptodomex==3.9.0
|
||||
typing==3.7.4.1 # via m2crypto
|
@ -925,6 +925,9 @@ VALID_OPTS = {
|
||||
# Set a hard limit for the amount of memory modules can consume on a minion.
|
||||
'modules_max_memory': int,
|
||||
|
||||
# Blacklist specific core grains to be filtered
|
||||
'grains_blacklist': list,
|
||||
|
||||
# The number of minutes between the minion refreshing its cache of grains
|
||||
'grains_refresh_every': int,
|
||||
|
||||
@ -1222,6 +1225,7 @@ DEFAULT_MINION_OPTS = {
|
||||
'cachedir': os.path.join(salt.syspaths.CACHE_DIR, 'minion'),
|
||||
'append_minionid_config_dirs': [],
|
||||
'cache_jobs': False,
|
||||
'grains_blacklist': [],
|
||||
'grains_cache': False,
|
||||
'grains_cache_expiration': 300,
|
||||
'grains_deep_merge': False,
|
||||
|
64
salt/grains/nvme.py
Normal file
64
salt/grains/nvme.py
Normal file
@ -0,0 +1,64 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Grains for NVMe Qualified Names (NQN).
|
||||
|
||||
.. versionadded:: Flourine
|
||||
|
||||
To enable these grains set `nvme_grains: True`.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
nvme_grains: True
|
||||
'''
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import errno
|
||||
import logging
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
import salt.utils.platform
|
||||
|
||||
__virtualname__ = 'nvme'
|
||||
|
||||
# Get logging started
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def __virtual__():
|
||||
if __opts__.get('nvme_grains', False) is False:
|
||||
return False
|
||||
return __virtualname__
|
||||
|
||||
|
||||
def nvme_nqn():
|
||||
'''
|
||||
Return NVMe NQN
|
||||
'''
|
||||
grains = {}
|
||||
grains['nvme_nqn'] = False
|
||||
if salt.utils.platform.is_linux():
|
||||
grains['nvme_nqn'] = _linux_nqn()
|
||||
return grains
|
||||
|
||||
|
||||
def _linux_nqn():
|
||||
'''
|
||||
Return NVMe NQN from a Linux host.
|
||||
'''
|
||||
ret = []
|
||||
|
||||
initiator = '/etc/nvme/hostnqn'
|
||||
try:
|
||||
with salt.utils.files.fopen(initiator, 'r') as _nvme:
|
||||
for line in _nvme:
|
||||
line = line.strip()
|
||||
if line.startswith('nqn.'):
|
||||
ret.append(line)
|
||||
except IOError as ex:
|
||||
if ex.errno != errno.ENOENT:
|
||||
log.debug("Error while accessing '%s': %s", initiator, ex)
|
||||
|
||||
return ret
|
@ -34,6 +34,7 @@ import salt.utils.lazy
|
||||
import salt.utils.odict
|
||||
import salt.utils.platform
|
||||
import salt.utils.versions
|
||||
import salt.utils.stringutils
|
||||
from salt.exceptions import LoaderError
|
||||
from salt.template import check_render_pipe_str
|
||||
from salt.utils.decorators import Depends
|
||||
@ -773,6 +774,7 @@ def grains(opts, force_refresh=False, proxy=None):
|
||||
opts['grains'] = {}
|
||||
|
||||
grains_data = {}
|
||||
blist = opts.get('grains_blacklist', [])
|
||||
funcs = grain_funcs(opts, proxy=proxy)
|
||||
if force_refresh: # if we refresh, lets reload grain modules
|
||||
funcs.clear()
|
||||
@ -784,6 +786,14 @@ def grains(opts, force_refresh=False, proxy=None):
|
||||
ret = funcs[key]()
|
||||
if not isinstance(ret, dict):
|
||||
continue
|
||||
if blist:
|
||||
for key in list(ret):
|
||||
for block in blist:
|
||||
if salt.utils.stringutils.expr_match(key, block):
|
||||
del ret[key]
|
||||
log.trace('Filtering %s grain', key)
|
||||
if not ret:
|
||||
continue
|
||||
if grains_deep_merge:
|
||||
salt.utils.dictupdate.update(grains_data, ret)
|
||||
else:
|
||||
@ -819,6 +829,14 @@ def grains(opts, force_refresh=False, proxy=None):
|
||||
continue
|
||||
if not isinstance(ret, dict):
|
||||
continue
|
||||
if blist:
|
||||
for key in list(ret):
|
||||
for block in blist:
|
||||
if salt.utils.stringutils.expr_match(key, block):
|
||||
del ret[key]
|
||||
log.trace('Filtering %s grain', key)
|
||||
if not ret:
|
||||
continue
|
||||
if grains_deep_merge:
|
||||
salt.utils.dictupdate.update(grains_data, ret)
|
||||
else:
|
||||
|
@ -1156,7 +1156,7 @@ def unhold(name=None, pkgs=None, sources=None, **kwargs): # pylint: disable=W06
|
||||
salt '*' pkg.unhold <package name>
|
||||
|
||||
pkgs
|
||||
A list of packages to hold. Must be passed as a python list.
|
||||
A list of packages to unhold. Must be passed as a python list.
|
||||
|
||||
CLI Example:
|
||||
|
||||
|
@ -13,12 +13,14 @@ Microsoft IIS site management via WebAdministration powershell module
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import decimal
|
||||
import logging
|
||||
import re
|
||||
import os
|
||||
import yaml
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.json
|
||||
import salt.utils.platform
|
||||
from salt.ext.six.moves import range
|
||||
from salt.ext.six.moves import range, map
|
||||
from salt.exceptions import SaltInvocationError, CommandExecutionError
|
||||
from salt.ext import six
|
||||
|
||||
@ -160,6 +162,45 @@ def _srvmgr(cmd, return_json=False):
|
||||
return ret
|
||||
|
||||
|
||||
def _collection_match_to_index(pspath, colfilter, name, match):
|
||||
'''
|
||||
Returns index of collection item matching the match dictionary.
|
||||
'''
|
||||
collection = get_webconfiguration_settings(pspath, [{'name': name, 'filter': colfilter}])[0]['value']
|
||||
for idx, collect_dict in enumerate(collection):
|
||||
if all(item in collect_dict.items() for item in match.items()):
|
||||
return idx
|
||||
return -1
|
||||
|
||||
|
||||
def _prepare_settings(pspath, settings):
|
||||
'''
|
||||
Prepare settings before execution with get or set functions.
|
||||
Removes settings with a match parameter when index is not found.
|
||||
'''
|
||||
prepared_settings = []
|
||||
for setting in settings:
|
||||
if setting.get('name', None) is None:
|
||||
log.warning('win_iis: Setting has no name: {}'.format(setting))
|
||||
continue
|
||||
if setting.get('filter', None) is None:
|
||||
log.warning('win_iis: Setting has no filter: {}'.format(setting))
|
||||
continue
|
||||
match = re.search(r'Collection\[(\{.*\})\]', setting['name'])
|
||||
if match:
|
||||
name = setting['name'][:match.start(1)-1]
|
||||
match_dict = yaml.load(match.group(1))
|
||||
index = _collection_match_to_index(pspath, setting['filter'], name, match_dict)
|
||||
if index == -1:
|
||||
log.warning('win_iis: No match found for setting: {}'.format(setting))
|
||||
else:
|
||||
setting['name'] = setting['name'].replace(match.group(1), str(index))
|
||||
prepared_settings.append(setting)
|
||||
else:
|
||||
prepared_settings.append(setting)
|
||||
return prepared_settings
|
||||
|
||||
|
||||
def list_sites():
|
||||
'''
|
||||
List all the currently deployed websites.
|
||||
@ -1985,3 +2026,167 @@ def set_webapp_settings(name, site, settings):
|
||||
|
||||
log.debug('Settings configured successfully: {0}'.format(settings.keys()))
|
||||
return True
|
||||
|
||||
|
||||
def get_webconfiguration_settings(name, settings):
|
||||
r'''
|
||||
Get the webconfiguration settings for the IIS PSPath.
|
||||
|
||||
Args:
|
||||
name (str): The PSPath of the IIS webconfiguration settings.
|
||||
settings (list): A list of dictionaries containing setting name and filter.
|
||||
|
||||
Returns:
|
||||
dict: A list of dictionaries containing setting name, filter and value.
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' win_iis.get_webconfiguration_settings name='IIS:\' settings="[{'name': 'enabled', 'filter': 'system.webServer/security/authentication/anonymousAuthentication'}]"
|
||||
'''
|
||||
ret = {}
|
||||
ps_cmd = [r'$Settings = New-Object System.Collections.ArrayList;']
|
||||
ps_cmd_validate = []
|
||||
settings = _prepare_settings(name, settings)
|
||||
|
||||
if not settings:
|
||||
log.warning('No settings provided')
|
||||
return ret
|
||||
|
||||
for setting in settings:
|
||||
|
||||
# Build the commands to verify that the property names are valid.
|
||||
|
||||
ps_cmd_validate.extend(['Get-WebConfigurationProperty',
|
||||
'-PSPath', "'{0}'".format(name),
|
||||
'-Filter', "'{0}'".format(setting['filter']),
|
||||
'-Name', "'{0}'".format(setting['name']),
|
||||
'-ErrorAction', 'Stop',
|
||||
'|', 'Out-Null;'])
|
||||
|
||||
# Some ItemProperties are Strings and others are ConfigurationAttributes.
|
||||
# Since the former doesn't have a Value property, we need to account
|
||||
# for this.
|
||||
ps_cmd.append("$Property = Get-WebConfigurationProperty -PSPath '{0}'".format(name))
|
||||
ps_cmd.append("-Name '{0}' -Filter '{1}' -ErrorAction Stop;".format(setting['name'], setting['filter']))
|
||||
if setting['name'].split('.')[-1] == 'Collection':
|
||||
if 'value' in setting:
|
||||
ps_cmd.append("$Property = $Property | select -Property {0} ;"
|
||||
.format(",".join(list(setting['value'][0].keys()))))
|
||||
ps_cmd.append("$Settings.add(@{{filter='{0}';name='{1}';value=[System.Collections.ArrayList] @($Property)}})| Out-Null;"
|
||||
.format(setting['filter'], setting['name']))
|
||||
else:
|
||||
ps_cmd.append(r'if (([String]::IsNullOrEmpty($Property) -eq $False) -and')
|
||||
ps_cmd.append(r"($Property.GetType()).Name -eq 'ConfigurationAttribute') {")
|
||||
ps_cmd.append(r'$Property = $Property | Select-Object')
|
||||
ps_cmd.append(r'-ExpandProperty Value };')
|
||||
ps_cmd.append("$Settings.add(@{{filter='{0}';name='{1}';value=[String] $Property}})| Out-Null;"
|
||||
.format(setting['filter'], setting['name']))
|
||||
ps_cmd.append(r'$Property = $Null;')
|
||||
|
||||
# Validate the setting names that were passed in.
|
||||
cmd_ret = _srvmgr(cmd=ps_cmd_validate, return_json=True)
|
||||
|
||||
if cmd_ret['retcode'] != 0:
|
||||
message = 'One or more invalid property names were specified for the provided container.'
|
||||
raise SaltInvocationError(message)
|
||||
|
||||
ps_cmd.append('$Settings')
|
||||
cmd_ret = _srvmgr(cmd=ps_cmd, return_json=True)
|
||||
|
||||
try:
|
||||
ret = salt.utils.json.loads(cmd_ret['stdout'], strict=False)
|
||||
|
||||
except ValueError:
|
||||
raise CommandExecutionError('Unable to parse return data as Json.')
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def set_webconfiguration_settings(name, settings):
|
||||
r'''
|
||||
Set the value of the setting for an IIS container.
|
||||
|
||||
Args:
|
||||
name (str): The PSPath of the IIS webconfiguration settings.
|
||||
settings (list): A list of dictionaries containing setting name, filter and value.
|
||||
|
||||
Returns:
|
||||
bool: True if successful, otherwise False
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' win_iis.set_webconfiguration_settings name='IIS:\' settings="[{'name': 'enabled', 'filter': 'system.webServer/security/authentication/anonymousAuthentication', 'value': False}]"
|
||||
'''
|
||||
ps_cmd = []
|
||||
settings = _prepare_settings(name, settings)
|
||||
|
||||
if not settings:
|
||||
log.warning('No settings provided')
|
||||
return False
|
||||
|
||||
# Treat all values as strings for the purpose of comparing them to existing values.
|
||||
for idx, setting in enumerate(settings):
|
||||
if setting['name'].split('.')[-1] != 'Collection':
|
||||
settings[idx]['value'] = six.text_type(setting['value'])
|
||||
|
||||
current_settings = get_webconfiguration_settings(
|
||||
name=name, settings=settings)
|
||||
|
||||
if settings == current_settings:
|
||||
log.debug('Settings already contain the provided values.')
|
||||
return True
|
||||
|
||||
for setting in settings:
|
||||
# If the value is numeric, don't treat it as a string in PowerShell.
|
||||
if setting['name'].split('.')[-1] != 'Collection':
|
||||
try:
|
||||
complex(setting['value'])
|
||||
value = setting['value']
|
||||
except ValueError:
|
||||
value = "'{0}'".format(setting['value'])
|
||||
else:
|
||||
configelement_list = []
|
||||
for value_item in setting['value']:
|
||||
configelement_construct = []
|
||||
for key, value in value_item.items():
|
||||
configelement_construct.append("{0}='{1}'".format(key, value))
|
||||
configelement_list.append('@{' + ';'.join(configelement_construct) + '}')
|
||||
value = ','.join(configelement_list)
|
||||
|
||||
ps_cmd.extend(['Set-WebConfigurationProperty',
|
||||
'-PSPath', "'{0}'".format(name),
|
||||
'-Filter', "'{0}'".format(setting['filter']),
|
||||
'-Name', "'{0}'".format(setting['name']),
|
||||
'-Value', '{0};'.format(value)])
|
||||
|
||||
cmd_ret = _srvmgr(ps_cmd)
|
||||
|
||||
if cmd_ret['retcode'] != 0:
|
||||
msg = 'Unable to set settings for {0}'.format(name)
|
||||
raise CommandExecutionError(msg)
|
||||
|
||||
# Get the fields post-change so that we can verify tht all values
|
||||
# were modified successfully. Track the ones that weren't.
|
||||
new_settings = get_webconfiguration_settings(
|
||||
name=name, settings=settings)
|
||||
|
||||
failed_settings = []
|
||||
|
||||
for idx, setting in enumerate(settings):
|
||||
|
||||
is_collection = setting['name'].split('.')[-1] == 'Collection'
|
||||
|
||||
if ((not is_collection and six.text_type(setting['value']) != six.text_type(new_settings[idx]['value']))
|
||||
or (is_collection and list(map(dict, setting['value'])) != list(map(dict, new_settings[idx]['value'])))):
|
||||
failed_settings.append(setting)
|
||||
|
||||
if failed_settings:
|
||||
log.error('Failed to change settings: %s', failed_settings)
|
||||
return False
|
||||
|
||||
log.debug('Settings configured successfully: %s', settings)
|
||||
return True
|
||||
|
@ -193,5 +193,7 @@ def _checkblk(name):
|
||||
Check if the blk exists and return its fstype if ok
|
||||
'''
|
||||
|
||||
blk = __salt__['cmd.run']('blkid -o value -s TYPE {0}'.format(name))
|
||||
blk = __salt__['cmd.run'](
|
||||
['blkid', '-o', 'value', '-s', 'TYPE', name],
|
||||
ignore_retcode=True)
|
||||
return '' if not blk else blk
|
||||
|
@ -11,6 +11,7 @@ from Microsoft IIS.
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, unicode_literals, print_function
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
|
||||
# Define the module's virtual name
|
||||
@ -865,3 +866,125 @@ def set_app(name, site, settings=None):
|
||||
ret['result'] = True
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def webconfiguration_settings(name, settings=None):
|
||||
r'''
|
||||
Set the value of webconfiguration settings.
|
||||
|
||||
:param str name: The name of the IIS PSPath containing the settings.
|
||||
Possible PSPaths are :
|
||||
MACHINE, MACHINE/WEBROOT, IIS:\, IIS:\Sites\sitename, ...
|
||||
:param dict settings: Dictionaries of dictionaries.
|
||||
You can match a specific item in a collection with this syntax inside a key:
|
||||
'Collection[{name: site0}].logFile.directory'
|
||||
|
||||
Example of usage for the ``MACHINE/WEBROOT`` PSPath:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
MACHINE-WEBROOT-level-security:
|
||||
win_iis.webconfiguration_settings:
|
||||
- name: 'MACHINE/WEBROOT'
|
||||
- settings:
|
||||
system.web/authentication/forms:
|
||||
requireSSL: True
|
||||
protection: "All"
|
||||
credentials.passwordFormat: "SHA1"
|
||||
system.web/httpCookies:
|
||||
httpOnlyCookies: True
|
||||
|
||||
Example of usage for the ``IIS:\Sites\site0`` PSPath:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
site0-IIS-Sites-level-security:
|
||||
win_iis.webconfiguration_settings:
|
||||
- name: 'IIS:\Sites\site0'
|
||||
- settings:
|
||||
system.webServer/httpErrors:
|
||||
errorMode: "DetailedLocalOnly"
|
||||
system.webServer/security/requestFiltering:
|
||||
allowDoubleEscaping: False
|
||||
verbs.Collection:
|
||||
- verb: TRACE
|
||||
allowed: False
|
||||
fileExtensions.allowUnlisted: False
|
||||
|
||||
Example of usage for the ``IIS:\`` PSPath with a collection matching:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
site0-IIS-level-security:
|
||||
win_iis.webconfiguration_settings:
|
||||
- name: 'IIS:\'
|
||||
- settings:
|
||||
system.applicationHost/sites:
|
||||
'Collection[{name: site0}].logFile.directory': 'C:\logs\iis\site0'
|
||||
|
||||
'''
|
||||
|
||||
ret = {'name': name,
|
||||
'changes': {},
|
||||
'comment': str(),
|
||||
'result': None}
|
||||
|
||||
if not settings:
|
||||
ret['comment'] = 'No settings to change provided.'
|
||||
ret['result'] = True
|
||||
return ret
|
||||
|
||||
ret_settings = {
|
||||
'changes': {},
|
||||
'failures': {},
|
||||
}
|
||||
|
||||
settings_list = list()
|
||||
|
||||
for filter, filter_settings in settings.items():
|
||||
for setting_name, value in filter_settings.items():
|
||||
settings_list.append({'filter': filter, 'name': setting_name, 'value': value})
|
||||
|
||||
current_settings_list = __salt__['win_iis.get_webconfiguration_settings'](name=name, settings=settings_list)
|
||||
for idx, setting in enumerate(settings_list):
|
||||
|
||||
is_collection = setting['name'].split('.')[-1] == 'Collection'
|
||||
# If this is a new setting and not an update to an existing setting
|
||||
if len(current_settings_list) <= idx:
|
||||
ret_settings['changes'][setting['filter'] + '.' + setting['name']] = {'old': {},
|
||||
'new': settings_list[idx]['value']}
|
||||
elif ((is_collection and list(map(dict, setting['value'])) != list(map(dict, current_settings_list[idx]['value'])))
|
||||
or (not is_collection and str(setting['value']) != str(current_settings_list[idx]['value']))):
|
||||
ret_settings['changes'][setting['filter'] + '.' + setting['name']] = {'old': current_settings_list[idx]['value'],
|
||||
'new': settings_list[idx]['value']}
|
||||
if not ret_settings['changes']:
|
||||
ret['comment'] = 'Settings already contain the provided values.'
|
||||
ret['result'] = True
|
||||
return ret
|
||||
elif __opts__['test']:
|
||||
ret['comment'] = 'Settings will be changed.'
|
||||
ret['changes'] = ret_settings
|
||||
return ret
|
||||
|
||||
success = __salt__['win_iis.set_webconfiguration_settings'](name=name, settings=settings_list)
|
||||
|
||||
new_settings_list = __salt__['win_iis.get_webconfiguration_settings'](name=name, settings=settings_list)
|
||||
for idx, setting in enumerate(settings_list):
|
||||
|
||||
is_collection = setting['name'].split('.')[-1] == 'Collection'
|
||||
if ((is_collection and setting['value'] != new_settings_list[idx]['value'])
|
||||
or (not is_collection and str(setting['value']) != str(new_settings_list[idx]['value']))):
|
||||
ret_settings['failures'][setting['filter'] + '.' + setting['name']] = {'old': current_settings_list[idx]['value'],
|
||||
'new': new_settings_list[idx]['value']}
|
||||
ret_settings['changes'].get(setting['filter'] + '.' + setting['name'], None)
|
||||
|
||||
if ret_settings['failures']:
|
||||
ret['comment'] = 'Some settings failed to change.'
|
||||
ret['changes'] = ret_settings
|
||||
ret['result'] = False
|
||||
else:
|
||||
ret['comment'] = 'Set settings to contain the provided values.'
|
||||
ret['changes'] = ret_settings['changes']
|
||||
ret['result'] = success
|
||||
|
||||
return ret
|
||||
|
@ -15,6 +15,7 @@
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import os
|
||||
import sys
|
||||
import types
|
||||
import signal
|
||||
import getpass
|
||||
import logging
|
||||
@ -56,6 +57,17 @@ def _sorted(mixins_or_funcs):
|
||||
)
|
||||
|
||||
|
||||
class MixinFuncsContainer(list):
|
||||
|
||||
def append(self, func):
|
||||
if isinstance(func, types.MethodType):
|
||||
# We only care about unbound methods
|
||||
func = func.__func__
|
||||
if func not in self:
|
||||
# And no duplicates please
|
||||
list.append(self, func)
|
||||
|
||||
|
||||
class MixInMeta(type):
|
||||
# This attribute here won't actually do anything. But, if you need to
|
||||
# specify an order or a dependency within the mix-ins, please define the
|
||||
@ -79,13 +91,13 @@ class OptionParserMeta(MixInMeta):
|
||||
bases,
|
||||
attrs)
|
||||
if not hasattr(instance, '_mixin_setup_funcs'):
|
||||
instance._mixin_setup_funcs = []
|
||||
instance._mixin_setup_funcs = MixinFuncsContainer()
|
||||
if not hasattr(instance, '_mixin_process_funcs'):
|
||||
instance._mixin_process_funcs = []
|
||||
instance._mixin_process_funcs = MixinFuncsContainer()
|
||||
if not hasattr(instance, '_mixin_after_parsed_funcs'):
|
||||
instance._mixin_after_parsed_funcs = []
|
||||
instance._mixin_after_parsed_funcs = MixinFuncsContainer()
|
||||
if not hasattr(instance, '_mixin_before_exit_funcs'):
|
||||
instance._mixin_before_exit_funcs = []
|
||||
instance._mixin_before_exit_funcs = MixinFuncsContainer()
|
||||
|
||||
for base in _sorted(bases + (instance,)):
|
||||
func = getattr(base, '_mixin_setup', None)
|
||||
@ -304,7 +316,7 @@ class MergeConfigMixIn(six.with_metaclass(MixInMeta, object)):
|
||||
# the config options and if needed override them
|
||||
self._mixin_after_parsed_funcs.append(self.__merge_config_with_cli)
|
||||
|
||||
def __merge_config_with_cli(self, *args): # pylint: disable=unused-argument
|
||||
def __merge_config_with_cli(self):
|
||||
# Merge parser options
|
||||
for option in self.option_list:
|
||||
if option.dest is None:
|
||||
@ -687,7 +699,7 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
|
||||
# Remove it from config so it inherits from log_level_logfile
|
||||
self.config.pop(self._logfile_loglevel_config_setting_name_)
|
||||
|
||||
def __setup_logfile_logger_config(self, *args): # pylint: disable=unused-argument
|
||||
def __setup_logfile_logger_config(self):
|
||||
if self._logfile_loglevel_config_setting_name_ in self.config and not \
|
||||
self.config.get(self._logfile_loglevel_config_setting_name_):
|
||||
# Remove it from config so it inherits from log_level
|
||||
@ -852,7 +864,7 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
|
||||
for name, level in six.iteritems(self.config.get('log_granular_levels', {})):
|
||||
log.set_logger_level(name, level)
|
||||
|
||||
def __setup_extended_logging(self, *args): # pylint: disable=unused-argument
|
||||
def __setup_extended_logging(self):
|
||||
if salt.utils.platform.is_windows() and self._setup_mp_logging_listener_:
|
||||
# On Windows when using a logging listener, all extended logging
|
||||
# will go through the logging listener.
|
||||
@ -862,14 +874,14 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
|
||||
def _get_mp_logging_listener_queue(self):
|
||||
return log.get_multiprocessing_logging_queue()
|
||||
|
||||
def _setup_mp_logging_listener(self, *args): # pylint: disable=unused-argument
|
||||
def _setup_mp_logging_listener(self):
|
||||
if self._setup_mp_logging_listener_:
|
||||
log.setup_multiprocessing_logging_listener(
|
||||
self.config,
|
||||
self._get_mp_logging_listener_queue()
|
||||
)
|
||||
|
||||
def _setup_mp_logging_client(self, *args): # pylint: disable=unused-argument
|
||||
def _setup_mp_logging_client(self):
|
||||
if self._setup_mp_logging_listener_:
|
||||
# Set multiprocessing logging level even in non-Windows
|
||||
# environments. In non-Windows environments, this setting will
|
||||
@ -895,7 +907,7 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
|
||||
log.shutdown_console_logging()
|
||||
log.shutdown_logfile_logging()
|
||||
|
||||
def __setup_console_logger_config(self, *args): # pylint: disable=unused-argument
|
||||
def __setup_console_logger_config(self):
|
||||
# Since we're not going to be a daemon, setup the console logger
|
||||
logfmt = self.config.get(
|
||||
'log_fmt_console',
|
||||
@ -921,7 +933,7 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
|
||||
self.config['log_fmt_console'] = logfmt
|
||||
self.config['log_datefmt_console'] = datefmt
|
||||
|
||||
def __setup_console_logger(self, *args): # pylint: disable=unused-argument
|
||||
def __setup_console_logger(self):
|
||||
# If daemon is set force console logger to quiet
|
||||
if getattr(self.options, 'daemon', False) is True:
|
||||
return
|
||||
@ -2580,7 +2592,7 @@ class SaltKeyOptionParser(six.with_metaclass(OptionParserMeta,
|
||||
# info or error.
|
||||
self.config['loglevel'] = 'info'
|
||||
|
||||
def __create_keys_dir(self, *args): # pylint: disable=unused-argument
|
||||
def __create_keys_dir(self):
|
||||
if not os.path.isdir(self.config['gen_keys_dir']):
|
||||
os.makedirs(self.config['gen_keys_dir'])
|
||||
|
||||
|
@ -14,6 +14,7 @@ import errno
|
||||
import types
|
||||
import signal
|
||||
import logging
|
||||
import functools
|
||||
import threading
|
||||
import contextlib
|
||||
import subprocess
|
||||
@ -21,7 +22,6 @@ import multiprocessing
|
||||
import multiprocessing.util
|
||||
import socket
|
||||
|
||||
|
||||
# Import salt libs
|
||||
import salt.defaults.exitcodes
|
||||
import salt.utils.files
|
||||
@ -716,6 +716,15 @@ class Process(multiprocessing.Process, NewStyleClassMixIn):
|
||||
(salt.log.setup.shutdown_multiprocessing_logging, [], {})
|
||||
]
|
||||
|
||||
# Because we need to enforce our after fork and finalize routines,
|
||||
# we must wrap this class run method to allow for these extra steps
|
||||
# to be executed pre and post calling the actual run method,
|
||||
# having subclasses call super would just not work.
|
||||
#
|
||||
# We use setattr here to fool pylint not to complain that we're
|
||||
# overriding run from the subclass here
|
||||
setattr(self, 'run', self.__decorate_run(self.run))
|
||||
|
||||
# __setstate__ and __getstate__ are only used on Windows.
|
||||
def __setstate__(self, state):
|
||||
args = state['args']
|
||||
@ -741,25 +750,30 @@ class Process(multiprocessing.Process, NewStyleClassMixIn):
|
||||
def _setup_process_logging(self):
|
||||
salt.log.setup.setup_multiprocessing_logging(self.log_queue)
|
||||
|
||||
def run(self):
|
||||
for method, args, kwargs in self._after_fork_methods:
|
||||
method(*args, **kwargs)
|
||||
try:
|
||||
return super(Process, self).run()
|
||||
except SystemExit:
|
||||
# These are handled by multiprocessing.Process._bootstrap()
|
||||
raise
|
||||
except Exception as exc:
|
||||
log.error(
|
||||
'An un-handled exception from the multiprocessing process '
|
||||
'\'%s\' was caught:\n', self.name, exc_info=True)
|
||||
# Re-raise the exception. multiprocessing.Process will write it to
|
||||
# sys.stderr and set the proper exitcode and we have already logged
|
||||
# it above.
|
||||
raise
|
||||
finally:
|
||||
for method, args, kwargs in self._finalize_methods:
|
||||
def __decorate_run(self, run_func):
|
||||
|
||||
@functools.wraps(run_func)
|
||||
def wrapped_run_func():
|
||||
for method, args, kwargs in self._after_fork_methods:
|
||||
method(*args, **kwargs)
|
||||
try:
|
||||
return run_func()
|
||||
except SystemExit:
|
||||
# These are handled by multiprocessing.Process._bootstrap()
|
||||
six.reraise(*sys.exc_info())
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
log.error(
|
||||
'An un-handled exception from the multiprocessing process '
|
||||
'\'%s\' was caught:\n', self.name, exc_info=True)
|
||||
# Re-raise the exception. multiprocessing.Process will write it to
|
||||
# sys.stderr and set the proper exitcode and we have already logged
|
||||
# it above.
|
||||
six.reraise(*sys.exc_info())
|
||||
finally:
|
||||
for method, args, kwargs in self._finalize_methods:
|
||||
method(*args, **kwargs)
|
||||
|
||||
return wrapped_run_func
|
||||
|
||||
|
||||
class MultiprocessingProcess(Process):
|
||||
|
@ -68,7 +68,22 @@ class PipModuleTest(ModuleCase):
|
||||
if salt.utils.is_windows():
|
||||
python = os.path.join(sys.real_prefix, os.path.basename(sys.executable))
|
||||
else:
|
||||
python = os.path.join(sys.real_prefix, 'bin', os.path.basename(sys.executable))
|
||||
python_binary_names = [
|
||||
'python{}.{}'.format(*sys.version_info),
|
||||
'python{}'.format(*sys.version_info),
|
||||
'python'
|
||||
]
|
||||
for binary_name in python_binary_names:
|
||||
python = os.path.join(sys.real_prefix, 'bin', binary_name)
|
||||
if os.path.exists(python):
|
||||
break
|
||||
else:
|
||||
self.fail(
|
||||
'Couldn\'t find a python binary name under \'{}\' matching: {}'.format(
|
||||
os.path.join(sys.real_prefix, 'bin'),
|
||||
python_binary_names
|
||||
)
|
||||
)
|
||||
# We're running off a virtualenv, and we don't want to create a virtualenv off of
|
||||
# a virtualenv
|
||||
kwargs = {'python': python}
|
||||
|
@ -87,7 +87,22 @@ class PipStateTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
if salt.utils.is_windows():
|
||||
python = os.path.join(sys.real_prefix, os.path.basename(sys.executable))
|
||||
else:
|
||||
python = os.path.join(sys.real_prefix, 'bin', os.path.basename(sys.executable))
|
||||
python_binary_names = [
|
||||
'python{}.{}'.format(*sys.version_info),
|
||||
'python{}'.format(*sys.version_info),
|
||||
'python'
|
||||
]
|
||||
for binary_name in python_binary_names:
|
||||
python = os.path.join(sys.real_prefix, 'bin', binary_name)
|
||||
if os.path.exists(python):
|
||||
break
|
||||
else:
|
||||
self.fail(
|
||||
'Couldn\'t find a python binary name under \'{}\' matching: {}'.format(
|
||||
os.path.join(sys.real_prefix, 'bin'),
|
||||
python_binary_names
|
||||
)
|
||||
)
|
||||
# We're running off a virtualenv, and we don't want to create a virtualenv off of
|
||||
# a virtualenv, let's point to the actual python that created the virtualenv
|
||||
kwargs = {'python': python}
|
||||
|
@ -65,7 +65,8 @@ class TestHandleEvents(MultimasterModuleCase, MultiMasterTestShellCase, AdaptedC
|
||||
# Since minion could be not responsive now use `salt-call --local` for this.
|
||||
res = self.run_call(
|
||||
"iptables.delete filter INPUT rule='{0}'".format(disconnect_master_rule),
|
||||
local=True)
|
||||
local=True,
|
||||
timeout=30)
|
||||
self.assertEqual(res, ['local:'])
|
||||
# Ensure the master is back.
|
||||
res = self.run_function(
|
||||
|
@ -907,11 +907,11 @@ class SaltTestsuiteParser(SaltCoverageTestingParser):
|
||||
|
||||
try:
|
||||
print_header(
|
||||
' * Setting up Salt daemons to execute tests',
|
||||
' * Setting up multimaster Salt daemons to execute tests',
|
||||
top=False, width=getattr(self.options, 'output_columns', PNUM)
|
||||
)
|
||||
except TypeError:
|
||||
print_header(' * Setting up Salt daemons to execute tests', top=False)
|
||||
print_header(' * Setting up multimaster Salt daemons to execute tests', top=False)
|
||||
|
||||
status = []
|
||||
|
||||
|
@ -233,11 +233,15 @@ class ShellTestCase(TestCase, AdaptedConfigurationTestCaseMixin):
|
||||
arg_str = '--config-dir {0} {1}'.format(self.config_dir, arg_str)
|
||||
return self.run_script('salt-cp', arg_str, with_retcode=with_retcode, catch_stderr=catch_stderr)
|
||||
|
||||
def run_call(self, arg_str, with_retcode=False, catch_stderr=False, local=False):
|
||||
def run_call(self, arg_str, with_retcode=False, catch_stderr=False, local=False, timeout=15):
|
||||
arg_str = '{0} --config-dir {1} {2}'.format('--local' if local else '',
|
||||
self.config_dir, arg_str)
|
||||
|
||||
return self.run_script('salt-call', arg_str, with_retcode=with_retcode, catch_stderr=catch_stderr)
|
||||
return self.run_script('salt-call',
|
||||
arg_str,
|
||||
with_retcode=with_retcode,
|
||||
catch_stderr=catch_stderr,
|
||||
timeout=timeout)
|
||||
|
||||
def run_cloud(self, arg_str, catch_stderr=False, timeout=None):
|
||||
'''
|
||||
|
67
tests/unit/grains/test_nvme.py
Normal file
67
tests/unit/grains/test_nvme.py
Normal file
@ -0,0 +1,67 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Simon Dodsley <simon@purestorage.com>`
|
||||
'''
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import errno
|
||||
import textwrap
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import (
|
||||
patch,
|
||||
mock_open,
|
||||
MagicMock,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON
|
||||
)
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.grains.nvme as nvme
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class NvmeGrainsTestCase(TestCase):
|
||||
'''
|
||||
Test cases for nvme grains
|
||||
'''
|
||||
|
||||
def test_linux_nvme_nqn_grains(self):
|
||||
_nvme_file = textwrap.dedent('''\
|
||||
nqn.2014-08.org.nvmexpress:fc_lif:uuid:2cd61a74-17f9-4c22-b350-3020020c458d
|
||||
''')
|
||||
|
||||
with patch('salt.utils.files.fopen', mock_open(read_data=_nvme_file)):
|
||||
nqn = nvme._linux_nqn()
|
||||
|
||||
assert isinstance(nqn, list)
|
||||
assert len(nqn) == 1
|
||||
assert nqn == ['nqn.2014-08.org.nvmexpress:fc_lif:uuid:2cd61a74-17f9-4c22-b350-3020020c458d']
|
||||
|
||||
@patch('salt.utils.files.fopen', MagicMock(side_effect=IOError(errno.EPERM,
|
||||
'The cables are not the same length.')))
|
||||
@patch('salt.grains.nvme.log', MagicMock())
|
||||
def test_linux_nqn_non_root(self):
|
||||
'''
|
||||
Test if linux_nqn is running on salt-master as non-root
|
||||
and handling access denial properly.
|
||||
:return:
|
||||
'''
|
||||
assert nvme._linux_nqn() == []
|
||||
nvme.log.debug.assert_called()
|
||||
assert 'Error while accessing' in nvme.log.debug.call_args[0][0]
|
||||
assert 'cables are not the same' in nvme.log.debug.call_args[0][2].strerror
|
||||
assert nvme.log.debug.call_args[0][2].errno == errno.EPERM
|
||||
assert nvme.log.debug.call_args[0][1] == '/etc/nvme/hostnqn'
|
||||
|
||||
@patch('salt.utils.files.fopen', MagicMock(side_effect=IOError(errno.ENOENT, '')))
|
||||
@patch('salt.grains.nvme.log', MagicMock())
|
||||
def test_linux_nqn_no_nvme_initiator(self):
|
||||
'''
|
||||
Test if linux_nqn is running on salt-master as root.
|
||||
nvme initiator is not there accessible or is not supported.
|
||||
:return:
|
||||
'''
|
||||
assert nvme._linux_nqn() == []
|
||||
nvme.log.debug.assert_not_called()
|
@ -131,9 +131,9 @@ class WinIisTestCase(TestCase, LoaderModuleMockMixin):
|
||||
'''
|
||||
with patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_apppools',
|
||||
MagicMock(return_value=dict())), \
|
||||
patch.dict(win_iis.__salt__):
|
||||
patch('salt.modules.win_iis.list_apppools',
|
||||
MagicMock(return_value=dict())), \
|
||||
patch.dict(win_iis.__salt__):
|
||||
self.assertTrue(win_iis.create_apppool('MyTestPool'))
|
||||
|
||||
def test_list_apppools(self):
|
||||
@ -141,8 +141,8 @@ class WinIisTestCase(TestCase, LoaderModuleMockMixin):
|
||||
Test - List all configured IIS application pools.
|
||||
'''
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value=LIST_APPPOOLS_SRVMGR)):
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value=LIST_APPPOOLS_SRVMGR)):
|
||||
self.assertEqual(win_iis.list_apppools(), APPPOOL_LIST)
|
||||
|
||||
def test_remove_apppool(self):
|
||||
@ -150,12 +150,12 @@ class WinIisTestCase(TestCase, LoaderModuleMockMixin):
|
||||
Test - Remove an IIS application pool.
|
||||
'''
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_apppools',
|
||||
MagicMock(return_value={'MyTestPool': {
|
||||
'applications': list(),
|
||||
'state': 'Started'}})):
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_apppools',
|
||||
MagicMock(return_value={'MyTestPool': {
|
||||
'applications': list(),
|
||||
'state': 'Started'}})):
|
||||
self.assertTrue(win_iis.remove_apppool('MyTestPool'))
|
||||
|
||||
def test_restart_apppool(self):
|
||||
@ -163,8 +163,8 @@ class WinIisTestCase(TestCase, LoaderModuleMockMixin):
|
||||
Test - Restart an IIS application pool.
|
||||
'''
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})):
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})):
|
||||
self.assertTrue(win_iis.restart_apppool('MyTestPool'))
|
||||
|
||||
def test_create_site(self):
|
||||
@ -175,12 +175,12 @@ class WinIisTestCase(TestCase, LoaderModuleMockMixin):
|
||||
'apppool': 'MyTestPool', 'hostheader': 'mytestsite.local',
|
||||
'ipaddress': '*', 'port': 80, 'protocol': 'http'}
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_sites',
|
||||
MagicMock(return_value=dict())), \
|
||||
patch('salt.modules.win_iis.list_apppools',
|
||||
MagicMock(return_value=dict())):
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_sites',
|
||||
MagicMock(return_value=dict())), \
|
||||
patch('salt.modules.win_iis.list_apppools',
|
||||
MagicMock(return_value=dict())):
|
||||
self.assertTrue(win_iis.create_site(**kwargs))
|
||||
|
||||
def test_create_site_failed(self):
|
||||
@ -191,12 +191,12 @@ class WinIisTestCase(TestCase, LoaderModuleMockMixin):
|
||||
'apppool': 'MyTestPool', 'hostheader': 'mytestsite.local',
|
||||
'ipaddress': '*', 'port': 80, 'protocol': 'invalid-protocol-name'}
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_sites',
|
||||
MagicMock(return_value=dict())), \
|
||||
patch('salt.modules.win_iis.list_apppools',
|
||||
MagicMock(return_value=dict())):
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_sites',
|
||||
MagicMock(return_value=dict())), \
|
||||
patch('salt.modules.win_iis.list_apppools',
|
||||
MagicMock(return_value=dict())):
|
||||
self.assertRaises(SaltInvocationError, win_iis.create_site, **kwargs)
|
||||
|
||||
def test_remove_site(self):
|
||||
@ -204,10 +204,10 @@ class WinIisTestCase(TestCase, LoaderModuleMockMixin):
|
||||
Test - Delete a website from IIS.
|
||||
'''
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_sites',
|
||||
MagicMock(return_value=SITE_LIST)):
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_sites',
|
||||
MagicMock(return_value=SITE_LIST)):
|
||||
self.assertTrue(win_iis.remove_site('MyTestSite'))
|
||||
|
||||
def test_create_app(self):
|
||||
@ -217,11 +217,11 @@ class WinIisTestCase(TestCase, LoaderModuleMockMixin):
|
||||
kwargs = {'name': 'testApp', 'site': 'MyTestSite',
|
||||
'sourcepath': r'C:\inetpub\apps\testApp', 'apppool': 'MyTestPool'}
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('os.path.isdir', MagicMock(return_value=True)), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_apps',
|
||||
MagicMock(return_value=APP_LIST)):
|
||||
patch('os.path.isdir', MagicMock(return_value=True)), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_apps',
|
||||
MagicMock(return_value=APP_LIST)):
|
||||
self.assertTrue(win_iis.create_app(**kwargs))
|
||||
|
||||
def test_list_apps(self):
|
||||
@ -229,8 +229,8 @@ class WinIisTestCase(TestCase, LoaderModuleMockMixin):
|
||||
Test - Get all configured IIS applications for the specified site.
|
||||
'''
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value=LIST_APPS_SRVMGR)):
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value=LIST_APPS_SRVMGR)):
|
||||
self.assertEqual(win_iis.list_apps('MyTestSite'), APP_LIST)
|
||||
|
||||
def test_remove_app(self):
|
||||
@ -239,10 +239,10 @@ class WinIisTestCase(TestCase, LoaderModuleMockMixin):
|
||||
'''
|
||||
kwargs = {'name': 'otherApp', 'site': 'MyTestSite'}
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_apps',
|
||||
MagicMock(return_value=APP_LIST)):
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_apps',
|
||||
MagicMock(return_value=APP_LIST)):
|
||||
self.assertTrue(win_iis.remove_app(**kwargs))
|
||||
|
||||
def test_create_binding(self):
|
||||
@ -252,10 +252,10 @@ class WinIisTestCase(TestCase, LoaderModuleMockMixin):
|
||||
kwargs = {'site': 'MyTestSite', 'hostheader': '', 'ipaddress': '*',
|
||||
'port': 80, 'protocol': 'http', 'sslflags': 0}
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_bindings',
|
||||
MagicMock(return_value=BINDING_LIST)):
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_bindings',
|
||||
MagicMock(return_value=BINDING_LIST)):
|
||||
self.assertTrue(win_iis.create_binding(**kwargs))
|
||||
|
||||
def test_create_binding_failed(self):
|
||||
@ -265,10 +265,10 @@ class WinIisTestCase(TestCase, LoaderModuleMockMixin):
|
||||
kwargs = {'site': 'MyTestSite', 'hostheader': '', 'ipaddress': '*',
|
||||
'port': 80, 'protocol': 'invalid-protocol-name', 'sslflags': 999}
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_bindings',
|
||||
MagicMock(return_value=BINDING_LIST)):
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_bindings',
|
||||
MagicMock(return_value=BINDING_LIST)):
|
||||
self.assertRaises(SaltInvocationError, win_iis.create_binding, **kwargs)
|
||||
|
||||
def test_list_bindings(self):
|
||||
@ -276,8 +276,8 @@ class WinIisTestCase(TestCase, LoaderModuleMockMixin):
|
||||
Test - Get all configured IIS bindings for the specified site.
|
||||
'''
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis.list_sites',
|
||||
MagicMock(return_value=SITE_LIST)):
|
||||
patch('salt.modules.win_iis.list_sites',
|
||||
MagicMock(return_value=SITE_LIST)):
|
||||
self.assertEqual(win_iis.list_bindings('MyTestSite'), BINDING_LIST)
|
||||
|
||||
def test_remove_binding(self):
|
||||
@ -287,10 +287,10 @@ class WinIisTestCase(TestCase, LoaderModuleMockMixin):
|
||||
kwargs = {'site': 'MyTestSite', 'hostheader': 'myothertestsite.local',
|
||||
'ipaddress': '*', 'port': 443}
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_bindings',
|
||||
MagicMock(return_value=BINDING_LIST)):
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_bindings',
|
||||
MagicMock(return_value=BINDING_LIST)):
|
||||
self.assertTrue(win_iis.remove_binding(**kwargs))
|
||||
|
||||
def test_create_vdir(self):
|
||||
@ -300,12 +300,12 @@ class WinIisTestCase(TestCase, LoaderModuleMockMixin):
|
||||
kwargs = {'name': 'TestVdir', 'site': 'MyTestSite',
|
||||
'sourcepath': r'C:\inetpub\vdirs\TestVdir'}
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('os.path.isdir',
|
||||
MagicMock(return_value=True)), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_vdirs',
|
||||
MagicMock(return_value=VDIR_LIST)):
|
||||
patch('os.path.isdir',
|
||||
MagicMock(return_value=True)), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_vdirs',
|
||||
MagicMock(return_value=VDIR_LIST)):
|
||||
self.assertTrue(win_iis.create_vdir(**kwargs))
|
||||
|
||||
def test_list_vdirs(self):
|
||||
@ -318,8 +318,8 @@ class WinIisTestCase(TestCase, LoaderModuleMockMixin):
|
||||
}
|
||||
}
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value=LIST_VDIRS_SRVMGR)):
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value=LIST_VDIRS_SRVMGR)):
|
||||
self.assertEqual(win_iis.list_vdirs('MyTestSite'), vdirs)
|
||||
|
||||
def test_remove_vdir(self):
|
||||
@ -328,10 +328,10 @@ class WinIisTestCase(TestCase, LoaderModuleMockMixin):
|
||||
'''
|
||||
kwargs = {'name': 'TestOtherVdir', 'site': 'MyTestSite'}
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_vdirs',
|
||||
MagicMock(return_value=VDIR_LIST)):
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_vdirs',
|
||||
MagicMock(return_value=VDIR_LIST)):
|
||||
self.assertTrue(win_iis.remove_vdir(**kwargs))
|
||||
|
||||
def test_create_cert_binding(self):
|
||||
@ -342,15 +342,15 @@ class WinIisTestCase(TestCase, LoaderModuleMockMixin):
|
||||
'site': 'MyTestSite', 'hostheader': 'mytestsite.local',
|
||||
'ipaddress': '*', 'port': 443}
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis._list_certs',
|
||||
MagicMock(return_value={'9988776655443322111000AAABBBCCCDDDEEEFFF': None})), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0, 'stdout': 10})), \
|
||||
patch('salt.utils.json.loads', MagicMock(return_value=[{'MajorVersion': 10, 'MinorVersion': 0}])), \
|
||||
patch('salt.modules.win_iis.list_bindings',
|
||||
MagicMock(return_value=BINDING_LIST)), \
|
||||
patch('salt.modules.win_iis.list_cert_bindings',
|
||||
MagicMock(return_value={CERT_BINDING_INFO: BINDING_LIST[CERT_BINDING_INFO]})):
|
||||
patch('salt.modules.win_iis._list_certs',
|
||||
MagicMock(return_value={'9988776655443322111000AAABBBCCCDDDEEEFFF': None})), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0, 'stdout': 10})), \
|
||||
patch('salt.utils.json.loads', MagicMock(return_value=[{'MajorVersion': 10, 'MinorVersion': 0}])), \
|
||||
patch('salt.modules.win_iis.list_bindings',
|
||||
MagicMock(return_value=BINDING_LIST)), \
|
||||
patch('salt.modules.win_iis.list_cert_bindings',
|
||||
MagicMock(return_value={CERT_BINDING_INFO: BINDING_LIST[CERT_BINDING_INFO]})):
|
||||
self.assertTrue(win_iis.create_cert_binding(**kwargs))
|
||||
|
||||
def test_list_cert_bindings(self):
|
||||
@ -359,8 +359,8 @@ class WinIisTestCase(TestCase, LoaderModuleMockMixin):
|
||||
'''
|
||||
key = '*:443:mytestsite.local'
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis.list_sites',
|
||||
MagicMock(return_value=SITE_LIST)):
|
||||
patch('salt.modules.win_iis.list_sites',
|
||||
MagicMock(return_value=SITE_LIST)):
|
||||
self.assertEqual(win_iis.list_cert_bindings('MyTestSite'),
|
||||
{key: BINDING_LIST[key]})
|
||||
|
||||
@ -372,10 +372,10 @@ class WinIisTestCase(TestCase, LoaderModuleMockMixin):
|
||||
'site': 'MyOtherTestSite', 'hostheader': 'myothertestsite.local',
|
||||
'ipaddress': '*', 'port': 443}
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_cert_bindings',
|
||||
MagicMock(return_value={CERT_BINDING_INFO: BINDING_LIST[CERT_BINDING_INFO]})):
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.list_cert_bindings',
|
||||
MagicMock(return_value={CERT_BINDING_INFO: BINDING_LIST[CERT_BINDING_INFO]})):
|
||||
self.assertTrue(win_iis.remove_cert_binding(**kwargs))
|
||||
|
||||
def test_get_container_setting(self):
|
||||
@ -385,8 +385,8 @@ class WinIisTestCase(TestCase, LoaderModuleMockMixin):
|
||||
kwargs = {'name': 'MyTestSite', 'container': 'AppPools',
|
||||
'settings': ['managedPipelineMode']}
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value=CONTAINER_SETTING)):
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value=CONTAINER_SETTING)):
|
||||
self.assertEqual(win_iis.get_container_setting(**kwargs),
|
||||
{'managedPipelineMode': 'Integrated'})
|
||||
|
||||
@ -397,8 +397,159 @@ class WinIisTestCase(TestCase, LoaderModuleMockMixin):
|
||||
kwargs = {'name': 'MyTestSite', 'container': 'AppPools',
|
||||
'settings': {'managedPipelineMode': 'Integrated'}}
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.get_container_setting',
|
||||
MagicMock(return_value={'managedPipelineMode': 'Integrated'})):
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0})), \
|
||||
patch('salt.modules.win_iis.get_container_setting',
|
||||
MagicMock(return_value={'managedPipelineMode': 'Integrated'})):
|
||||
self.assertTrue(win_iis.set_container_setting(**kwargs))
|
||||
|
||||
def test__collection_match_to_index(self):
|
||||
bad_match = {'key_0': 'value'}
|
||||
first_match = {'key_1': 'value'}
|
||||
second_match = {'key_2': 'value'}
|
||||
collection = [first_match, second_match]
|
||||
settings = [{'name': 'enabled', 'value': collection}]
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis.get_webconfiguration_settings',
|
||||
MagicMock(return_value=settings)):
|
||||
ret = win_iis._collection_match_to_index('pspath', 'colfilter', 'name', bad_match)
|
||||
self.assertEqual(ret, -1)
|
||||
ret = win_iis._collection_match_to_index('pspath', 'colfilter', 'name', first_match)
|
||||
self.assertEqual(ret, 0)
|
||||
ret = win_iis._collection_match_to_index('pspath', 'colfilter', 'name', second_match)
|
||||
self.assertEqual(ret, 1)
|
||||
|
||||
def test__prepare_settings(self):
|
||||
simple_setting = {'name': 'value', 'filter': 'value'}
|
||||
collection_setting = {'name': 'Collection[{yaml:\n\tdata}]', 'filter': 'value'}
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis._collection_match_to_index',
|
||||
MagicMock(return_value=0)):
|
||||
ret = win_iis._prepare_settings('pspath', [
|
||||
simple_setting, collection_setting, {'invalid': 'setting'}, {'name': 'filter-less_setting'}
|
||||
])
|
||||
self.assertEqual(ret, [simple_setting, collection_setting])
|
||||
|
||||
@patch('salt.modules.win_iis.log')
|
||||
def test_get_webconfiguration_settings_empty(self, mock_log):
|
||||
ret = win_iis.get_webconfiguration_settings('name', settings=[])
|
||||
mock_log.warning.assert_called_once_with('No settings provided')
|
||||
self.assertEqual(ret, {})
|
||||
|
||||
def test_get_webconfiguration_settings(self):
|
||||
# Setup
|
||||
name = 'IIS'
|
||||
collection_setting = {'name': 'Collection[{yaml:\n\tdata}]', 'filter': 'value'}
|
||||
filter_setting = {'name': 'enabled',
|
||||
'filter': 'system.webServer / security / authentication / anonymousAuthentication'}
|
||||
settings = [collection_setting, filter_setting]
|
||||
|
||||
ps_cmd = ['$Settings = New-Object System.Collections.ArrayList;', ]
|
||||
for setting in settings:
|
||||
ps_cmd.extend([
|
||||
"$Property = Get-WebConfigurationProperty -PSPath '{}'".format(name),
|
||||
"-Name '{name}' -Filter '{filter}' -ErrorAction Stop;".format(
|
||||
filter=setting['filter'], name=setting['name']),
|
||||
'if (([String]::IsNullOrEmpty($Property) -eq $False) -and',
|
||||
"($Property.GetType()).Name -eq 'ConfigurationAttribute') {",
|
||||
'$Property = $Property | Select-Object',
|
||||
'-ExpandProperty Value };',
|
||||
"$Settings.add(@{{filter='{filter}';name='{name}';value=[String] $Property}})| Out-Null;".format(
|
||||
filter=setting['filter'], name=setting['name']),
|
||||
'$Property = $Null;',
|
||||
])
|
||||
ps_cmd.append('$Settings')
|
||||
|
||||
# Execute
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis._prepare_settings',
|
||||
MagicMock(return_value=settings)), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0, 'stdout': '{}'})):
|
||||
ret = win_iis.get_webconfiguration_settings(name, settings=settings)
|
||||
|
||||
# Verify
|
||||
win_iis._srvmgr.assert_called_with(cmd=ps_cmd, return_json=True)
|
||||
self.assertEqual(ret, {})
|
||||
|
||||
@patch('salt.modules.win_iis.log')
|
||||
def test_set_webconfiguration_settings_empty(self, mock_log):
|
||||
ret = win_iis.set_webconfiguration_settings('name', settings=[])
|
||||
mock_log.warning.assert_called_once_with('No settings provided')
|
||||
self.assertEqual(ret, False)
|
||||
|
||||
@patch('salt.modules.win_iis.log')
|
||||
def test_set_webconfiguration_settings_no_changes(self, mock_log):
|
||||
# Setup
|
||||
name = 'IIS'
|
||||
setting = {
|
||||
'name': 'Collection[{yaml:\n\tdata}]',
|
||||
'filter': 'system.webServer / security / authentication / anonymousAuthentication',
|
||||
'value': []
|
||||
}
|
||||
settings = [setting]
|
||||
|
||||
# Execute
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis._prepare_settings',
|
||||
MagicMock(return_value=settings)), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0, 'stdout': '{}'})), \
|
||||
patch('salt.modules.win_iis.get_webconfiguration_settings',
|
||||
MagicMock(return_value=settings)):
|
||||
ret = win_iis.set_webconfiguration_settings(name, settings=settings)
|
||||
|
||||
# Verify
|
||||
mock_log.debug.assert_called_with('Settings already contain the provided values.')
|
||||
self.assertEqual(ret, True)
|
||||
|
||||
@patch('salt.modules.win_iis.log')
|
||||
def test_set_webconfiguration_settings_failed(self, mock_log):
|
||||
# Setup
|
||||
name = 'IIS'
|
||||
setting = {
|
||||
'name': 'Collection[{yaml:\n\tdata}]',
|
||||
'filter': 'system.webServer / security / authentication / anonymousAuthentication',
|
||||
'value': []
|
||||
}
|
||||
settings = [setting]
|
||||
|
||||
# Execute
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis._prepare_settings',
|
||||
MagicMock(return_value=settings)), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0, 'stdout': '{}'})), \
|
||||
patch('salt.modules.win_iis.get_webconfiguration_settings',
|
||||
MagicMock(side_effect=[[], [{'value': 'unexpected_change!'}]])):
|
||||
|
||||
ret = win_iis.set_webconfiguration_settings(name, settings=settings)
|
||||
|
||||
# Verify
|
||||
self.assertEqual(ret, False)
|
||||
mock_log.error.assert_called_with('Failed to change settings: %s', settings)
|
||||
|
||||
@patch('salt.modules.win_iis.log')
|
||||
def test_set_webconfiguration_settings(self, mock_log):
|
||||
# Setup
|
||||
name = 'IIS'
|
||||
setting = {
|
||||
'name': 'Collection[{yaml:\n\tdata}]',
|
||||
'filter': 'system.webServer / security / authentication / anonymousAuthentication',
|
||||
'value': []
|
||||
}
|
||||
settings = [setting]
|
||||
|
||||
# Execute
|
||||
with patch.dict(win_iis.__salt__), \
|
||||
patch('salt.modules.win_iis._prepare_settings',
|
||||
MagicMock(return_value=settings)), \
|
||||
patch('salt.modules.win_iis._srvmgr',
|
||||
MagicMock(return_value={'retcode': 0, 'stdout': '{}'})), \
|
||||
patch('salt.modules.win_iis.get_webconfiguration_settings',
|
||||
MagicMock(side_effect=[[], settings])):
|
||||
ret = win_iis.set_webconfiguration_settings(name, settings=settings)
|
||||
|
||||
# Verify
|
||||
self.assertEqual(ret, True)
|
||||
mock_log.debug.assert_called_with('Settings configured successfully: %s', settings)
|
||||
|
@ -12,6 +12,7 @@ from tests.support.unit import skipIf, TestCase
|
||||
from tests.support.mock import (
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON,
|
||||
Mock,
|
||||
MagicMock,
|
||||
patch)
|
||||
|
||||
@ -107,3 +108,15 @@ class BlockdevTestCase(TestCase, LoaderModuleMockMixin):
|
||||
MagicMock(return_value=True)):
|
||||
with patch.dict(blockdev.__opts__, {'test': False}):
|
||||
self.assertDictEqual(blockdev.formatted(name), ret)
|
||||
|
||||
def test__checkblk(self):
|
||||
'''
|
||||
Confirm that we call cmd.run with ignore_retcode=True
|
||||
'''
|
||||
cmd_mock = Mock()
|
||||
with patch.dict(blockdev.__salt__, {'cmd.run': cmd_mock}):
|
||||
blockdev._checkblk('/dev/foo')
|
||||
|
||||
cmd_mock.assert_called_once_with(
|
||||
['blkid', '-o', 'value', '-s', 'TYPE', '/dev/foo'],
|
||||
ignore_retcode=True)
|
||||
|
114
tests/unit/states/test_win_iis.py
Normal file
114
tests/unit/states/test_win_iis.py
Normal file
@ -0,0 +1,114 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:synopsis: Unit Tests for Windows iis Module 'state.win_iis'
|
||||
:platform: Windows
|
||||
.. versionadded:: 2019.2.2
|
||||
'''
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import, unicode_literals, print_function
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.states.win_iis as win_iis
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import (
|
||||
MagicMock,
|
||||
patch,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON,
|
||||
)
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class WinIisTestCase(TestCase, LoaderModuleMockMixin):
|
||||
'''
|
||||
Test cases for salt.states.win_pki
|
||||
'''
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {win_iis: {}}
|
||||
|
||||
def __base_webconfiguration_ret(self, comment='', changes=None, name='', result=None):
|
||||
return {
|
||||
'name': name,
|
||||
'changes': changes if changes else {},
|
||||
'comment': comment,
|
||||
'result': result,
|
||||
}
|
||||
|
||||
def test_webconfiguration_settings_no_settings(self):
|
||||
name = 'IIS'
|
||||
settings = {}
|
||||
expected_ret = self.__base_webconfiguration_ret(name=name, comment='No settings to change provided.',
|
||||
result=True)
|
||||
actual_ret = win_iis.webconfiguration_settings(name, settings)
|
||||
self.assertEqual(expected_ret, actual_ret)
|
||||
|
||||
def test_webconfiguration_settings_collection_failure(self):
|
||||
name = 'IIS:\\'
|
||||
settings = {
|
||||
'system.applicationHost/sites': {
|
||||
'Collection[{name: site0}].logFile.directory': 'C:\\logs\\iis\\site0',
|
||||
},
|
||||
}
|
||||
old_settings = [
|
||||
{'filter': 'system.applicationHost/sites', 'name': 'Collection[{name: site0}].logFile.directory',
|
||||
'value': 'C:\\logs\\iis\\old_site'}]
|
||||
current_settings = old_settings
|
||||
new_settings = old_settings
|
||||
expected_ret = self.__base_webconfiguration_ret(
|
||||
name=name,
|
||||
result=False,
|
||||
changes={
|
||||
'changes': {old_settings[0]['filter'] + '.' + old_settings[0]['name']: {
|
||||
'old': old_settings[0]['value'],
|
||||
'new': settings[old_settings[0]['filter']][old_settings[0]['name']],
|
||||
}},
|
||||
'failures': {old_settings[0]['filter'] + '.' + old_settings[0]['name']: {
|
||||
'old': old_settings[0]['value'],
|
||||
'new': new_settings[0]['value'],
|
||||
}},
|
||||
},
|
||||
comment='Some settings failed to change.'
|
||||
)
|
||||
with patch.dict(win_iis.__salt__, {
|
||||
'win_iis.get_webconfiguration_settings': MagicMock(
|
||||
side_effect=[old_settings, current_settings, new_settings]),
|
||||
'win_iis.set_webconfiguration_settings': MagicMock(return_value=True),
|
||||
}), patch.dict(win_iis.__opts__, {'test': False}):
|
||||
actual_ret = win_iis.webconfiguration_settings(name, settings)
|
||||
self.assertEqual(expected_ret, actual_ret)
|
||||
|
||||
def test_webconfiguration_settings_collection(self):
|
||||
name = 'IIS:\\'
|
||||
settings = {
|
||||
'system.applicationHost/sites': {
|
||||
'Collection[{name: site0}].logFile.directory': 'C:\\logs\\iis\\site0',
|
||||
},
|
||||
}
|
||||
old_settings = [
|
||||
{'filter': 'system.applicationHost/sites', 'name': 'Collection[{name: site0}].logFile.directory',
|
||||
'value': 'C:\\logs\\iis\\old_site'}]
|
||||
current_settings = [
|
||||
{'filter': 'system.applicationHost/sites', 'name': 'Collection[{name: site0}].logFile.directory',
|
||||
'value': 'C:\\logs\\iis\\site0'}]
|
||||
new_settings = current_settings
|
||||
expected_ret = self.__base_webconfiguration_ret(
|
||||
name=name,
|
||||
result=True,
|
||||
changes={old_settings[0]['filter'] + '.' + old_settings[0]['name']: {
|
||||
'old': old_settings[0]['value'],
|
||||
'new': new_settings[0]['value'],
|
||||
}},
|
||||
comment='Set settings to contain the provided values.'
|
||||
)
|
||||
with patch.dict(win_iis.__salt__, {
|
||||
'win_iis.get_webconfiguration_settings': MagicMock(
|
||||
side_effect=[old_settings, current_settings, new_settings]),
|
||||
'win_iis.set_webconfiguration_settings': MagicMock(return_value=True),
|
||||
}), patch.dict(win_iis.__opts__, {'test': False}):
|
||||
actual_ret = win_iis.webconfiguration_settings(name, settings)
|
||||
self.assertEqual(expected_ret, actual_ret)
|
@ -283,6 +283,31 @@ class LazyLoaderWhitelistTest(TestCase):
|
||||
self.assertNotIn('grains.get', self.loader)
|
||||
|
||||
|
||||
class LazyLoaderGrainsBlacklistTest(TestCase):
|
||||
'''
|
||||
Test the loader of grains with a blacklist
|
||||
'''
|
||||
def setUp(self):
|
||||
self.opts = salt.config.minion_config(None)
|
||||
|
||||
def tearDown(self):
|
||||
del self.opts
|
||||
|
||||
def test_whitelist(self):
|
||||
opts = copy.deepcopy(self.opts)
|
||||
opts['grains_blacklist'] = [
|
||||
'master',
|
||||
'os*',
|
||||
'ipv[46]'
|
||||
]
|
||||
|
||||
grains = salt.loader.grains(opts)
|
||||
self.assertNotIn('master', grains)
|
||||
self.assertNotIn('os', set([g[:2] for g in list(grains)]))
|
||||
self.assertNotIn('ipv4', grains)
|
||||
self.assertNotIn('ipv6', grains)
|
||||
|
||||
|
||||
class LazyLoaderSingleItem(TestCase):
|
||||
'''
|
||||
Test loading a single item via the _load() function
|
||||
|
@ -6,10 +6,11 @@
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import os
|
||||
|
||||
import shutil
|
||||
import tempfile
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.unit import skipIf, TestCase
|
||||
from tests.support.helpers import destructiveTest, skip_if_not_root
|
||||
from tests.support.runtests import RUNTIME_VARS
|
||||
from tests.support.mock import (
|
||||
MagicMock,
|
||||
patch,
|
||||
@ -109,9 +110,7 @@ class ObjectView(object): # pylint: disable=too-few-public-methods
|
||||
self.__dict__ = d
|
||||
|
||||
|
||||
@destructiveTest
|
||||
@skip_if_not_root
|
||||
class LogSettingsParserTests(TestCase):
|
||||
class ParserBase(object):
|
||||
'''
|
||||
Unit Tests for Log Level Mixin with Salt parsers
|
||||
'''
|
||||
@ -126,10 +125,25 @@ class LogSettingsParserTests(TestCase):
|
||||
logfile_config_setting_name = 'log_file'
|
||||
logfile_loglevel_config_setting_name = 'log_level_logfile' # pylint: disable=invalid-name
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.root_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
shutil.rmtree(cls.root_dir, ignore_errors=True)
|
||||
|
||||
def setup_log(self):
|
||||
'''
|
||||
Mock logger functions
|
||||
'''
|
||||
testing_config = self.default_config.copy()
|
||||
testing_config['root_dir'] = self.root_dir
|
||||
for name in ('pki_dir', 'cachedir'):
|
||||
testing_config[name] = name
|
||||
testing_config[self.logfile_config_setting_name] = getattr(self, self.logfile_config_setting_name, self.log_file)
|
||||
self.testing_config = testing_config
|
||||
self.addCleanup(setattr, self, 'testing_config', None)
|
||||
self.log_setup = LogSetupMock()
|
||||
patcher = patch.multiple(
|
||||
log,
|
||||
@ -151,14 +165,14 @@ class LogSettingsParserTests(TestCase):
|
||||
Tests that log level match command-line specified value
|
||||
'''
|
||||
# Set defaults
|
||||
default_log_level = self.default_config[self.loglevel_config_setting_name]
|
||||
default_log_level = self.testing_config[self.loglevel_config_setting_name]
|
||||
|
||||
# Set log level in CLI
|
||||
log_level = 'critical'
|
||||
args = ['--log-level', log_level] + self.args
|
||||
|
||||
parser = self.parser()
|
||||
with patch(self.config_func, MagicMock(return_value=self.default_config)):
|
||||
with patch(self.config_func, MagicMock(return_value=self.testing_config)):
|
||||
parser.parse_args(args)
|
||||
with patch('salt.utils.parsers.is_writeable', MagicMock(return_value=True)):
|
||||
parser.setup_logfile_logger()
|
||||
@ -183,7 +197,7 @@ class LogSettingsParserTests(TestCase):
|
||||
|
||||
# Set log level in config
|
||||
log_level = 'info'
|
||||
opts = self.default_config.copy()
|
||||
opts = self.testing_config.copy()
|
||||
opts.update({self.loglevel_config_setting_name: log_level})
|
||||
|
||||
parser = self.parser()
|
||||
@ -209,12 +223,12 @@ class LogSettingsParserTests(TestCase):
|
||||
Tests that log level match the default value
|
||||
'''
|
||||
# Set defaults
|
||||
log_level = default_log_level = self.default_config[self.loglevel_config_setting_name]
|
||||
log_level = default_log_level = self.testing_config[self.loglevel_config_setting_name]
|
||||
|
||||
args = self.args
|
||||
|
||||
parser = self.parser()
|
||||
with patch(self.config_func, MagicMock(return_value=self.default_config)):
|
||||
with patch(self.config_func, MagicMock(return_value=self.testing_config)):
|
||||
parser.parse_args(args)
|
||||
with patch('salt.utils.parsers.is_writeable', MagicMock(return_value=True)):
|
||||
parser.setup_logfile_logger()
|
||||
@ -242,14 +256,14 @@ class LogSettingsParserTests(TestCase):
|
||||
Tests that log file match command-line specified value
|
||||
'''
|
||||
# Set defaults
|
||||
log_level = self.default_config[self.loglevel_config_setting_name]
|
||||
log_level = self.testing_config[self.loglevel_config_setting_name]
|
||||
|
||||
# Set log file in CLI
|
||||
log_file = '{0}_cli.log'.format(self.log_file)
|
||||
args = ['--log-file', log_file] + self.args
|
||||
|
||||
parser = self.parser()
|
||||
with patch(self.config_func, MagicMock(return_value=self.default_config)):
|
||||
with patch(self.config_func, MagicMock(return_value=self.testing_config)):
|
||||
parser.parse_args(args)
|
||||
with patch('salt.utils.parsers.is_writeable', MagicMock(return_value=True)):
|
||||
parser.setup_logfile_logger()
|
||||
@ -276,13 +290,13 @@ class LogSettingsParserTests(TestCase):
|
||||
Tests that log file match the configured value
|
||||
'''
|
||||
# Set defaults
|
||||
log_level = self.default_config[self.loglevel_config_setting_name]
|
||||
log_level = self.testing_config[self.loglevel_config_setting_name]
|
||||
|
||||
args = self.args
|
||||
|
||||
# Set log file in config
|
||||
log_file = '{0}_config.log'.format(self.log_file)
|
||||
opts = self.default_config.copy()
|
||||
opts = self.testing_config.copy()
|
||||
opts.update({self.logfile_config_setting_name: log_file})
|
||||
|
||||
parser = self.parser()
|
||||
@ -313,13 +327,14 @@ class LogSettingsParserTests(TestCase):
|
||||
Tests that log file match the default value
|
||||
'''
|
||||
# Set defaults
|
||||
log_level = self.default_config[self.loglevel_config_setting_name]
|
||||
log_file = default_log_file = self.default_config[self.logfile_config_setting_name]
|
||||
log_level = self.testing_config[self.loglevel_config_setting_name]
|
||||
log_file = self.testing_config[self.logfile_config_setting_name]
|
||||
default_log_file = self.default_config[self.logfile_config_setting_name]
|
||||
|
||||
args = self.args
|
||||
|
||||
parser = self.parser()
|
||||
with patch(self.config_func, MagicMock(return_value=self.default_config)):
|
||||
with patch(self.config_func, MagicMock(return_value=self.testing_config)):
|
||||
parser.parse_args(args)
|
||||
with patch('salt.utils.parsers.is_writeable', MagicMock(return_value=True)):
|
||||
parser.setup_logfile_logger()
|
||||
@ -351,14 +366,14 @@ class LogSettingsParserTests(TestCase):
|
||||
Tests that file log level match command-line specified value
|
||||
'''
|
||||
# Set defaults
|
||||
default_log_level = self.default_config[self.loglevel_config_setting_name]
|
||||
default_log_level = self.testing_config[self.loglevel_config_setting_name]
|
||||
|
||||
# Set log file level in CLI
|
||||
log_level_logfile = 'error'
|
||||
args = ['--log-file-level', log_level_logfile] + self.args
|
||||
|
||||
parser = self.parser()
|
||||
with patch(self.config_func, MagicMock(return_value=self.default_config)):
|
||||
with patch(self.config_func, MagicMock(return_value=self.testing_config)):
|
||||
parser.parse_args(args)
|
||||
with patch('salt.utils.parsers.is_writeable', MagicMock(return_value=True)):
|
||||
parser.setup_logfile_logger()
|
||||
@ -386,13 +401,13 @@ class LogSettingsParserTests(TestCase):
|
||||
Tests that log file level match the configured value
|
||||
'''
|
||||
# Set defaults
|
||||
log_level = self.default_config[self.loglevel_config_setting_name]
|
||||
log_level = self.testing_config[self.loglevel_config_setting_name]
|
||||
|
||||
args = self.args
|
||||
|
||||
# Set log file level in config
|
||||
log_level_logfile = 'info'
|
||||
opts = self.default_config.copy()
|
||||
opts = self.testing_config.copy()
|
||||
opts.update({self.logfile_loglevel_config_setting_name: log_level_logfile})
|
||||
|
||||
parser = self.parser()
|
||||
@ -424,7 +439,7 @@ class LogSettingsParserTests(TestCase):
|
||||
Tests that log file level match the default value
|
||||
'''
|
||||
# Set defaults
|
||||
default_log_level = self.default_config[self.loglevel_config_setting_name]
|
||||
default_log_level = self.testing_config[self.loglevel_config_setting_name]
|
||||
|
||||
log_level = default_log_level
|
||||
log_level_logfile = default_log_level
|
||||
@ -432,7 +447,7 @@ class LogSettingsParserTests(TestCase):
|
||||
args = self.args
|
||||
|
||||
parser = self.parser()
|
||||
with patch(self.config_func, MagicMock(return_value=self.default_config)):
|
||||
with patch(self.config_func, MagicMock(return_value=self.testing_config)):
|
||||
parser.parse_args(args)
|
||||
with patch('salt.utils.parsers.is_writeable', MagicMock(return_value=True)):
|
||||
parser.setup_logfile_logger()
|
||||
@ -467,7 +482,7 @@ class LogSettingsParserTests(TestCase):
|
||||
|
||||
args = ['--log-file-level', log_level_logfile] + self.args
|
||||
|
||||
opts = self.default_config.copy()
|
||||
opts = self.testing_config.copy()
|
||||
opts.update({self.loglevel_config_setting_name: log_level})
|
||||
|
||||
parser = self.parser()
|
||||
@ -502,7 +517,7 @@ class LogSettingsParserTests(TestCase):
|
||||
args = self.args
|
||||
log_file = self.log_file
|
||||
log_file_name = self.logfile_config_setting_name
|
||||
opts = self.default_config.copy()
|
||||
opts = self.testing_config.copy()
|
||||
opts.update({'log_file': log_file})
|
||||
if log_file_name != 'log_file':
|
||||
opts.update({log_file_name: getattr(self, log_file_name)})
|
||||
@ -519,10 +534,33 @@ class LogSettingsParserTests(TestCase):
|
||||
else:
|
||||
self.assertEqual(os.path.getsize(getattr(self, log_file_name)), 0)
|
||||
|
||||
def test_callbacks_uniqueness(self):
|
||||
'''
|
||||
Test that the callbacks are only added once, no matter
|
||||
how many instances of the parser we create
|
||||
'''
|
||||
mixin_container_names = ('_mixin_setup_funcs',
|
||||
'_mixin_process_funcs',
|
||||
'_mixin_after_parsed_funcs',
|
||||
'_mixin_before_exit_funcs')
|
||||
parser = self.parser()
|
||||
nums_1 = {}
|
||||
for cb_container in mixin_container_names:
|
||||
obj = getattr(parser, cb_container)
|
||||
nums_1[cb_container] = len(obj)
|
||||
|
||||
# The next time we instantiate the parser, the counts should be equal
|
||||
parser = self.parser()
|
||||
nums_2 = {}
|
||||
for cb_container in mixin_container_names:
|
||||
obj = getattr(parser, cb_container)
|
||||
nums_2[cb_container] = len(obj)
|
||||
self.assertDictEqual(nums_1, nums_2)
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
@skipIf(salt.utils.platform.is_windows(), 'Windows uses a logging listener')
|
||||
class MasterOptionParserTestCase(LogSettingsParserTests):
|
||||
class MasterOptionParserTestCase(ParserBase, TestCase):
|
||||
'''
|
||||
Tests parsing Salt Master options
|
||||
'''
|
||||
@ -546,10 +584,14 @@ class MasterOptionParserTestCase(LogSettingsParserTests):
|
||||
self.parser = salt.utils.parsers.MasterOptionParser
|
||||
self.addCleanup(delattr, self, 'parser')
|
||||
|
||||
def tearDown(self):
|
||||
if os.path.exists(self.log_file):
|
||||
os.unlink(self.log_file)
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
@skipIf(salt.utils.platform.is_windows(), 'Windows uses a logging listener')
|
||||
class MinionOptionParserTestCase(LogSettingsParserTests):
|
||||
class MinionOptionParserTestCase(ParserBase, TestCase):
|
||||
'''
|
||||
Tests parsing Salt Minion options
|
||||
'''
|
||||
@ -573,9 +615,13 @@ class MinionOptionParserTestCase(LogSettingsParserTests):
|
||||
self.parser = salt.utils.parsers.MinionOptionParser
|
||||
self.addCleanup(delattr, self, 'parser')
|
||||
|
||||
def tearDown(self):
|
||||
if os.path.exists(self.log_file):
|
||||
os.unlink(self.log_file)
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class ProxyMinionOptionParserTestCase(LogSettingsParserTests):
|
||||
class ProxyMinionOptionParserTestCase(ParserBase, TestCase):
|
||||
'''
|
||||
Tests parsing Salt Proxy Minion options
|
||||
'''
|
||||
@ -600,10 +646,14 @@ class ProxyMinionOptionParserTestCase(LogSettingsParserTests):
|
||||
self.parser = salt.utils.parsers.ProxyMinionOptionParser
|
||||
self.addCleanup(delattr, self, 'parser')
|
||||
|
||||
def tearDown(self):
|
||||
if os.path.exists(self.log_file):
|
||||
os.unlink(self.log_file)
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
@skipIf(salt.utils.platform.is_windows(), 'Windows uses a logging listener')
|
||||
class SyndicOptionParserTestCase(LogSettingsParserTests):
|
||||
class SyndicOptionParserTestCase(ParserBase, TestCase):
|
||||
'''
|
||||
Tests parsing Salt Syndic options
|
||||
'''
|
||||
@ -631,9 +681,15 @@ class SyndicOptionParserTestCase(LogSettingsParserTests):
|
||||
self.parser = salt.utils.parsers.SyndicOptionParser
|
||||
self.addCleanup(delattr, self, 'parser')
|
||||
|
||||
def tearDown(self):
|
||||
if os.path.exists(self.log_file):
|
||||
os.unlink(self.log_file)
|
||||
if os.path.exists(self.syndic_log_file):
|
||||
os.unlink(self.syndic_log_file)
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class SaltCMDOptionParserTestCase(LogSettingsParserTests):
|
||||
class SaltCMDOptionParserTestCase(ParserBase, TestCase):
|
||||
'''
|
||||
Tests parsing Salt CLI options
|
||||
'''
|
||||
@ -660,9 +716,13 @@ class SaltCMDOptionParserTestCase(LogSettingsParserTests):
|
||||
self.parser = salt.utils.parsers.SaltCMDOptionParser
|
||||
self.addCleanup(delattr, self, 'parser')
|
||||
|
||||
def tearDown(self):
|
||||
if os.path.exists(self.log_file):
|
||||
os.unlink(self.log_file)
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class SaltCPOptionParserTestCase(LogSettingsParserTests):
|
||||
class SaltCPOptionParserTestCase(ParserBase, TestCase):
|
||||
'''
|
||||
Tests parsing salt-cp options
|
||||
'''
|
||||
@ -689,9 +749,13 @@ class SaltCPOptionParserTestCase(LogSettingsParserTests):
|
||||
self.parser = salt.utils.parsers.SaltCPOptionParser
|
||||
self.addCleanup(delattr, self, 'parser')
|
||||
|
||||
def tearDown(self):
|
||||
if os.path.exists(self.log_file):
|
||||
os.unlink(self.log_file)
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class SaltKeyOptionParserTestCase(LogSettingsParserTests):
|
||||
class SaltKeyOptionParserTestCase(ParserBase, TestCase):
|
||||
'''
|
||||
Tests parsing salt-key options
|
||||
'''
|
||||
@ -785,7 +849,7 @@ class SaltKeyOptionParserTestCase(LogSettingsParserTests):
|
||||
Tests that log level default value is ignored
|
||||
'''
|
||||
# Set defaults
|
||||
default_log_level = self.default_config[self.loglevel_config_setting_name]
|
||||
default_log_level = self.testing_config[self.loglevel_config_setting_name]
|
||||
|
||||
log_level = None
|
||||
args = self.args
|
||||
@ -806,9 +870,15 @@ class SaltKeyOptionParserTestCase(LogSettingsParserTests):
|
||||
# Check log file logger log level
|
||||
self.assertEqual(self.log_setup.log_level_logfile, default_log_level)
|
||||
|
||||
def tearDown(self):
|
||||
if os.path.exists(self.log_file):
|
||||
os.unlink(self.log_file)
|
||||
if os.path.exists(self.key_logfile):
|
||||
os.unlink(self.key_logfile)
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class SaltCallOptionParserTestCase(LogSettingsParserTests):
|
||||
class SaltCallOptionParserTestCase(ParserBase, TestCase):
|
||||
'''
|
||||
Tests parsing Salt Minion options
|
||||
'''
|
||||
@ -835,9 +905,13 @@ class SaltCallOptionParserTestCase(LogSettingsParserTests):
|
||||
self.parser = salt.utils.parsers.SaltCallOptionParser
|
||||
self.addCleanup(delattr, self, 'parser')
|
||||
|
||||
def tearDown(self):
|
||||
if os.path.exists(self.log_file):
|
||||
os.unlink(self.log_file)
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class SaltRunOptionParserTestCase(LogSettingsParserTests):
|
||||
class SaltRunOptionParserTestCase(ParserBase, TestCase):
|
||||
'''
|
||||
Tests parsing Salt Master options
|
||||
'''
|
||||
@ -864,9 +938,13 @@ class SaltRunOptionParserTestCase(LogSettingsParserTests):
|
||||
self.parser = salt.utils.parsers.SaltRunOptionParser
|
||||
self.addCleanup(delattr, self, 'parser')
|
||||
|
||||
def tearDown(self):
|
||||
if os.path.exists(self.log_file):
|
||||
os.unlink(self.log_file)
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class SaltSSHOptionParserTestCase(LogSettingsParserTests):
|
||||
class SaltSSHOptionParserTestCase(ParserBase, TestCase):
|
||||
'''
|
||||
Tests parsing Salt Master options
|
||||
'''
|
||||
@ -897,9 +975,15 @@ class SaltSSHOptionParserTestCase(LogSettingsParserTests):
|
||||
self.parser = salt.utils.parsers.SaltSSHOptionParser
|
||||
self.addCleanup(delattr, self, 'parser')
|
||||
|
||||
def tearDown(self):
|
||||
if os.path.exists(self.log_file):
|
||||
os.unlink(self.log_file)
|
||||
if os.path.exists(self.ssh_log_file):
|
||||
os.unlink(self.ssh_log_file)
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class SaltCloudParserTestCase(LogSettingsParserTests):
|
||||
class SaltCloudParserTestCase(ParserBase, TestCase):
|
||||
'''
|
||||
Tests parsing Salt Cloud options
|
||||
'''
|
||||
@ -930,9 +1014,13 @@ class SaltCloudParserTestCase(LogSettingsParserTests):
|
||||
self.parser = salt.utils.parsers.SaltCloudParser
|
||||
self.addCleanup(delattr, self, 'parser')
|
||||
|
||||
def tearDown(self):
|
||||
if os.path.exists(self.log_file):
|
||||
os.unlink(self.log_file)
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class SPMParserTestCase(LogSettingsParserTests):
|
||||
class SPMParserTestCase(ParserBase, TestCase):
|
||||
'''
|
||||
Tests parsing Salt Cloud options
|
||||
'''
|
||||
@ -964,9 +1052,15 @@ class SPMParserTestCase(LogSettingsParserTests):
|
||||
self.parser = salt.utils.parsers.SPMParser
|
||||
self.addCleanup(delattr, self, 'parser')
|
||||
|
||||
def tearDown(self):
|
||||
if os.path.exists(self.log_file):
|
||||
os.unlink(self.log_file)
|
||||
if os.path.exists(self.spm_logfile):
|
||||
os.unlink(self.spm_logfile)
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class SaltAPIParserTestCase(LogSettingsParserTests):
|
||||
class SaltAPIParserTestCase(ParserBase, TestCase):
|
||||
'''
|
||||
Tests parsing Salt Cloud options
|
||||
'''
|
||||
@ -998,6 +1092,12 @@ class SaltAPIParserTestCase(LogSettingsParserTests):
|
||||
self.parser = salt.utils.parsers.SaltAPIParser
|
||||
self.addCleanup(delattr, self, 'parser')
|
||||
|
||||
def tearDown(self):
|
||||
if os.path.exists(self.log_file):
|
||||
os.unlink(self.log_file)
|
||||
if os.path.exists(self.api_logfile):
|
||||
os.unlink(self.api_logfile)
|
||||
|
||||
|
||||
@skipIf(not pytest, False)
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
@ -1074,7 +1174,3 @@ class DaemonMixInTestCase(TestCase):
|
||||
assert salt.utils.parsers.os.unlink.call_count == 1
|
||||
salt.utils.parsers.logger.info.assert_not_called()
|
||||
salt.utils.parsers.logger.debug.assert_not_called()
|
||||
|
||||
|
||||
# Hide the class from unittest framework when it searches for TestCase classes in the module
|
||||
del LogSettingsParserTests
|
||||
|
@ -43,7 +43,9 @@ def die(func):
|
||||
|
||||
def _die():
|
||||
salt.utils.process.appendproctitle('test_{0}'.format(name))
|
||||
setattr(self, 'die_' + name, _die)
|
||||
attrname = 'die_' + name
|
||||
setattr(self, attrname, _die)
|
||||
self.addCleanup(delattr, self, attrname)
|
||||
|
||||
return wrapper
|
||||
|
||||
@ -61,7 +63,9 @@ def incr(func):
|
||||
salt.utils.process.appendproctitle('test_{0}'.format(name))
|
||||
for _ in range(0, num):
|
||||
counter.value += 1
|
||||
setattr(self, 'incr_' + name, _incr)
|
||||
attrname = 'incr_' + name
|
||||
setattr(self, attrname, _incr)
|
||||
self.addCleanup(delattr, self, attrname)
|
||||
|
||||
return wrapper
|
||||
|
||||
@ -79,7 +83,9 @@ def spin(func):
|
||||
salt.utils.process.appendproctitle('test_{0}'.format(name))
|
||||
while True:
|
||||
time.sleep(1)
|
||||
setattr(self, 'spin_' + name, _spin)
|
||||
attrname = 'spin_' + name
|
||||
setattr(self, attrname, _spin)
|
||||
self.addCleanup(delattr, self, attrname)
|
||||
|
||||
return wrapper
|
||||
|
||||
@ -241,6 +247,48 @@ class TestProcess(TestCase):
|
||||
# pylint: enable=assignment-from-none
|
||||
|
||||
|
||||
class TestProcessCallbacks(TestCase):
|
||||
|
||||
@staticmethod
|
||||
def process_target(evt):
|
||||
evt.set()
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
def test_callbacks(self):
|
||||
'Validate Process call after fork and finalize methods'
|
||||
teardown_to_mock = 'salt.log.setup.shutdown_multiprocessing_logging'
|
||||
log_to_mock = 'salt.utils.process.Process._setup_process_logging'
|
||||
with patch(teardown_to_mock) as ma, patch(log_to_mock) as mb:
|
||||
evt = multiprocessing.Event()
|
||||
proc = salt.utils.process.Process(target=self.process_target, args=(evt,))
|
||||
proc.run()
|
||||
assert evt.is_set()
|
||||
mb.assert_called()
|
||||
ma.assert_called()
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
def test_callbacks_called_when_run_overriden(self):
|
||||
'Validate Process sub classes call after fork and finalize methods when run is overridden'
|
||||
|
||||
class MyProcess(salt.utils.process.Process):
|
||||
|
||||
def __init__(self):
|
||||
super(MyProcess, self).__init__()
|
||||
self.evt = multiprocessing.Event()
|
||||
|
||||
def run(self):
|
||||
self.evt.set()
|
||||
|
||||
teardown_to_mock = 'salt.log.setup.shutdown_multiprocessing_logging'
|
||||
log_to_mock = 'salt.utils.process.Process._setup_process_logging'
|
||||
with patch(teardown_to_mock) as ma, patch(log_to_mock) as mb:
|
||||
proc = MyProcess()
|
||||
proc.run()
|
||||
assert proc.evt.is_set()
|
||||
ma.assert_called()
|
||||
mb.assert_called()
|
||||
|
||||
|
||||
class TestSignalHandlingProcess(TestCase):
|
||||
|
||||
@classmethod
|
||||
@ -323,33 +371,6 @@ class TestSignalHandlingProcess(TestCase):
|
||||
def no_op_target():
|
||||
pass
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
def test_signal_processing_test_after_fork_called(self):
|
||||
'Validate Process and sub classes call after fork methods'
|
||||
evt = multiprocessing.Event()
|
||||
sig_to_mock = 'salt.utils.process.SignalHandlingProcess._setup_signals'
|
||||
log_to_mock = 'salt.utils.process.Process._setup_process_logging'
|
||||
with patch(sig_to_mock) as ma, patch(log_to_mock) as mb:
|
||||
self.sh_proc = salt.utils.process.SignalHandlingProcess(target=self.no_op_target)
|
||||
self.sh_proc.run()
|
||||
ma.assert_called()
|
||||
mb.assert_called()
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
def test_signal_processing_test_final_methods_called(self):
|
||||
'Validate Process and sub classes call finalize methods'
|
||||
evt = multiprocessing.Event()
|
||||
teardown_to_mock = 'salt.log.setup.shutdown_multiprocessing_logging'
|
||||
log_to_mock = 'salt.utils.process.Process._setup_process_logging'
|
||||
sig_to_mock = 'salt.utils.process.SignalHandlingProcess._setup_signals'
|
||||
# Mock _setup_signals so we do not register one for this process.
|
||||
with patch(sig_to_mock):
|
||||
with patch(teardown_to_mock) as ma, patch(log_to_mock) as mb:
|
||||
self.sh_proc = salt.utils.process.SignalHandlingProcess(target=self.no_op_target)
|
||||
self.sh_proc.run()
|
||||
ma.assert_called()
|
||||
mb.assert_called()
|
||||
|
||||
@staticmethod
|
||||
def pid_setting_target(sub_target, val, evt):
|
||||
val.value = os.getpid()
|
||||
@ -406,6 +427,58 @@ class TestSignalHandlingProcess(TestCase):
|
||||
proc.join(30)
|
||||
|
||||
|
||||
class TestSignalHandlingProcessCallbacks(TestCase):
|
||||
|
||||
@staticmethod
|
||||
def process_target(evt):
|
||||
evt.set()
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
def test_callbacks(self):
|
||||
'Validate SignalHandlingProcess call after fork and finalize methods'
|
||||
|
||||
teardown_to_mock = 'salt.log.setup.shutdown_multiprocessing_logging'
|
||||
log_to_mock = 'salt.utils.process.Process._setup_process_logging'
|
||||
sig_to_mock = 'salt.utils.process.SignalHandlingProcess._setup_signals'
|
||||
# Mock _setup_signals so we do not register one for this process.
|
||||
evt = multiprocessing.Event()
|
||||
with patch(sig_to_mock):
|
||||
with patch(teardown_to_mock) as ma, patch(log_to_mock) as mb:
|
||||
sh_proc = salt.utils.process.SignalHandlingProcess(
|
||||
target=self.process_target,
|
||||
args=(evt,)
|
||||
)
|
||||
sh_proc.run()
|
||||
assert evt.is_set()
|
||||
ma.assert_called()
|
||||
mb.assert_called()
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
def test_callbacks_called_when_run_overriden(self):
|
||||
'Validate SignalHandlingProcess sub classes call after fork and finalize methods when run is overridden'
|
||||
|
||||
class MyProcess(salt.utils.process.SignalHandlingProcess):
|
||||
|
||||
def __init__(self):
|
||||
super(MyProcess, self).__init__()
|
||||
self.evt = multiprocessing.Event()
|
||||
|
||||
def run(self):
|
||||
self.evt.set()
|
||||
|
||||
teardown_to_mock = 'salt.log.setup.shutdown_multiprocessing_logging'
|
||||
log_to_mock = 'salt.utils.process.Process._setup_process_logging'
|
||||
sig_to_mock = 'salt.utils.process.SignalHandlingProcess._setup_signals'
|
||||
# Mock _setup_signals so we do not register one for this process.
|
||||
with patch(sig_to_mock):
|
||||
with patch(teardown_to_mock) as ma, patch(log_to_mock) as mb:
|
||||
sh_proc = MyProcess()
|
||||
sh_proc.run()
|
||||
assert sh_proc.evt.is_set()
|
||||
ma.assert_called()
|
||||
mb.assert_called()
|
||||
|
||||
|
||||
class TestDup2(TestCase):
|
||||
|
||||
def test_dup2_no_fileno(self):
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user