Merge branch 'master' into fix/file.managed_binary_file

This commit is contained in:
Joe Eacott 2019-11-13 17:29:13 -07:00 committed by GitHub
commit aa2829791b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
132 changed files with 4788 additions and 12018 deletions

View File

@ -2,12 +2,13 @@
// Define the maximum time, in hours, that a test run should run for
def global_timeout = 2
def salt_target_branch = 'master'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '10')),
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
@ -16,6 +17,7 @@ if (buildNumber > 1) {
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
def shell_header
@ -41,13 +43,22 @@ wrappedNode('docs', global_timeout, '#jenkins-prod-pr') {
'''
}
stage('Build') {
stage('Build HTML Docs') {
sh shell_header + '''
eval "$(pyenv init -)"
pyenv shell 3.6.8
nox -e docs
nox -e 'docs-html(compress=True)'
'''
archiveArtifacts artifacts: 'doc/doc-archive.tar.gz'
archiveArtifacts artifacts: 'doc/html-archive.tar.gz'
}
stage('Build Man Pages') {
sh shell_header + '''
eval "$(pyenv init -)"
pyenv shell 3.6.8
nox -e 'docs-man(compress=True, update=False)'
'''
archiveArtifacts artifacts: 'doc/man-archive.tar.gz'
}
}

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'amazon'
def distro_version = '1'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'amazon'
def distro_version = '2'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'amazon'
def distro_version = '2'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'arch'
def distro_version = 'lts'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '-n integration.modules.test_pkg'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'arch'
def distro_version = 'lts'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '-n integration.modules.test_pkg'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '6'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -0,0 +1,35 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py2'
def nox_env_name = 'runtests-cloud'
def golden_images_branch = 'master'
def nox_passthrough_opts = ''
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py2'
def test_transport = 'ZeroMQ-M2Crypto'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq-m2crypto'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--proxy'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,30 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label,
extra_codecov_flags: ["proxy"])
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py2'
def test_transport = 'ZeroMQ-Pycryptodomex'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq-pycryptodomex'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py2'
def test_transport = 'TCP'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-tcp'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py2'
def test_transport = 'Tornado'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-tornado'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -0,0 +1,35 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py3'
def nox_env_name = 'runtests-cloud'
def golden_images_branch = 'master'
def nox_passthrough_opts = ''
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py3'
def test_transport = 'ZeroMQ-M2Crypto'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq-m2crypto'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--proxy'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,30 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label,
extra_codecov_flags: ["proxy"])
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py3'
def test_transport = 'ZeroMQ-Pycryptodomex'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq-pycryptodomex'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py3'
def test_transport = 'TCP'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-tcp'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'debian'
def distro_version = '10'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'debian'
def distro_version = '8'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'debian'
def distro_version = '9'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'debian'
def distro_version = '9'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,159 +0,0 @@
@Library('salt@1.1') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'fedora'
def distro_version = '29'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
// vim: ft=groovy

View File

@ -1,159 +0,0 @@
@Library('salt@1.1') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'fedora'
def distro_version = '29'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'fedora'
def distro_version = '30'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'fedora'
def distro_version = '30'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -0,0 +1,149 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'macosx'
def distro_version = 'highsierra'
def python_version = 'py2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = ''
def concurrent_builds = 1
def jenkins_slave_label = 'kitchen-slave-mac'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
wrappedNode(jenkins_slave_label, global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=${nox_env_name}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"FORCE_FULL=${params.runFull}",
]) {
stage('VM Cleanup') {
sh '''
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 86400) and (.State == "running"))|.ID'`
do
prlctl stop $i --kill
done
# don't delete vm's that haven't started yet ((.State == "stopped") and (.Uptime == "0"))
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 0) and (.State != "running"))|.ID'`
do
prlctl delete $i
done
'''
}
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with vagrant macos --without ec2 windows opennebula docker'
}
stage('Create VM') {
sh '''
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
def distro_strings = [
distro_name,
distro_version
]
def report_strings = (
[python_version] + nox_env_name.split('-')
).flatten()
uploadCodeCoverage(
report_path: 'artifacts/coverage/coverage.xml',
report_name: "${distro_strings.join('-')}-${report_strings.join('-')}",
report_flags: ([distro_strings.join('')] + report_strings).flatten()
)
}
}
}
}
}
// vim: ft=groovy

View File

@ -0,0 +1,149 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'macosx'
def distro_version = 'highsierra'
def python_version = 'py3'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = ''
def concurrent_builds = 1
def jenkins_slave_label = 'kitchen-slave-mac'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
wrappedNode(jenkins_slave_label, global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=${nox_env_name}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"FORCE_FULL=${params.runFull}",
]) {
stage('VM Cleanup') {
sh '''
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 86400) and (.State == "running"))|.ID'`
do
prlctl stop $i --kill
done
# don't delete vm's that haven't started yet ((.State == "stopped") and (.Uptime == "0"))
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 0) and (.State != "running"))|.ID'`
do
prlctl delete $i
done
'''
}
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with vagrant macos --without ec2 windows opennebula docker'
}
stage('Create VM') {
sh '''
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
def distro_strings = [
distro_name,
distro_version
]
def report_strings = (
[python_version] + nox_env_name.split('-')
).flatten()
uploadCodeCoverage(
report_path: 'artifacts/coverage/coverage.xml',
report_name: "${distro_strings.join('-')}-${report_strings.join('-')}",
report_flags: ([distro_strings.join('')] + report_strings).flatten()
)
}
}
}
}
}
// vim: ft=groovy

View File

@ -1,4 +1,4 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
@ -9,10 +9,11 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'macosx'
def distro_version = 'mojave'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = ''
def concurrent_builds = 1
def jenkins_slave_label = 'kitchen-slave-mac'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,31 +22,31 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
wrappedNode(jenkins_slave_label, global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
"NOX_ENV_NAME=${nox_env_name}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
stage('VM Cleanup') {
@ -65,7 +66,6 @@ wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
@ -88,7 +88,7 @@ wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
@ -128,15 +128,18 @@ wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
def distro_strings = [
distro_name,
distro_version
]
def report_strings = (
[python_version] + nox_env_name.split('-')
).flatten()
uploadCodeCoverage(
report_path: 'artifacts/coverage/coverage.xml',
report_name: "${distro_strings.join('-')}-${report_strings.join('-')}",
report_flags: ([distro_strings.join('')] + report_strings).flatten()
)
}
}
}

View File

@ -1,4 +1,4 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
@ -9,10 +9,11 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'macosx'
def distro_version = 'mojave'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = ''
def concurrent_builds = 1
def jenkins_slave_label = 'kitchen-slave-mac'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,31 +22,31 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
wrappedNode(jenkins_slave_label, global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
"NOX_ENV_NAME=${nox_env_name}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
stage('VM Cleanup') {
@ -65,7 +66,6 @@ wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
@ -88,7 +88,7 @@ wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
@ -128,15 +128,18 @@ wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
def distro_strings = [
distro_name,
distro_version
]
def report_strings = (
[python_version] + nox_env_name.split('-')
).flatten()
uploadCodeCoverage(
report_path: 'artifacts/coverage/coverage.xml',
report_name: "${distro_strings.join('-')}-${report_strings.join('-')}",
report_flags: ([distro_strings.join('')] + report_strings).flatten()
)
}
}
}

View File

@ -0,0 +1,149 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'macosx'
def distro_version = 'sierra'
def python_version = 'py2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = ''
def concurrent_builds = 1
def jenkins_slave_label = 'kitchen-slave-mac'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
wrappedNode(jenkins_slave_label, global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=${nox_env_name}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"FORCE_FULL=${params.runFull}",
]) {
stage('VM Cleanup') {
sh '''
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 86400) and (.State == "running"))|.ID'`
do
prlctl stop $i --kill
done
# don't delete vm's that haven't started yet ((.State == "stopped") and (.Uptime == "0"))
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 0) and (.State != "running"))|.ID'`
do
prlctl delete $i
done
'''
}
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with vagrant macos --without ec2 windows opennebula docker'
}
stage('Create VM') {
sh '''
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
def distro_strings = [
distro_name,
distro_version
]
def report_strings = (
[python_version] + nox_env_name.split('-')
).flatten()
uploadCodeCoverage(
report_path: 'artifacts/coverage/coverage.xml',
report_name: "${distro_strings.join('-')}-${report_strings.join('-')}",
report_flags: ([distro_strings.join('')] + report_strings).flatten()
)
}
}
}
}
}
// vim: ft=groovy

View File

@ -0,0 +1,149 @@
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'macosx'
def distro_version = 'sierra'
def python_version = 'py3'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = ''
def concurrent_builds = 1
def jenkins_slave_label = 'kitchen-slave-mac'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
parameters([
booleanParam(defaultValue: true, description: 'Run full test suite', name: 'runFull')
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
wrappedNode(jenkins_slave_label, global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=${nox_env_name}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"FORCE_FULL=${params.runFull}",
]) {
stage('VM Cleanup') {
sh '''
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 86400) and (.State == "running"))|.ID'`
do
prlctl stop $i --kill
done
# don't delete vm's that haven't started yet ((.State == "stopped") and (.Uptime == "0"))
for i in `prlctl list -aij|jq -r '.[]|select((.Uptime|tonumber > 0) and (.State != "running"))|.ID'`
do
prlctl delete $i
done
'''
}
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with vagrant macos --without ec2 windows opennebula docker'
}
stage('Create VM') {
sh '''
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
def distro_strings = [
distro_name,
distro_version
]
def report_strings = (
[python_version] + nox_env_name.split('-')
).flatten()
uploadCodeCoverage(
report_path: 'artifacts/coverage/coverage.xml',
report_name: "${distro_strings.join('-')}-${report_strings.join('-')}",
report_flags: ([distro_strings.join('')] + report_strings).flatten()
)
}
}
}
}
}
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'opensuse'
def distro_version = '15'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'opensuse'
def distro_version = '15'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py2'
def test_transport = 'ZeroMQ-M2Crypto'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq-m2crypto'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--proxy'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,30 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label,
extra_codecov_flags: ["proxy"])
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py2'
def test_transport = 'ZeroMQ-Pycryptodomex'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq-pycryptodomex'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py2'
def test_transport = 'TCP'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-tcp'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py2'
def test_transport = 'Tornado'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-tornado'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py3'
def test_transport = 'ZeroMQ-M2Crypto'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq-m2crypto'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--proxy'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,30 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label,
extra_codecov_flags: ["proxy"])
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py3'
def test_transport = 'ZeroMQ-Pycryptodomex'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq-pycryptodomex'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py3'
def test_transport = 'TCP'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-tcp'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1804'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1804'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,139 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 8
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'windows'
def distro_version = '2016'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--unit'
def concurrent_builds = 1
def use_spot_instances = false
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,138 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 8
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'windows'
def distro_version = '2016'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--unit'
def concurrent_builds = 1
def use_spot_instances = false
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,138 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 8
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'windows'
def distro_version = '2019'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--unit'
def concurrent_builds = 1
def use_spot_instances = false
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,138 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,17 @@
@Library('salt@1.1') _
@Library('salt@master-1.3') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 8
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'windows'
def distro_version = '2019'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_env_name = 'runtests-zeromq'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--unit'
def concurrent_builds = 1
def use_spot_instances = false
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -21,138 +20,29 @@ properties([
])
])
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -10,6 +10,8 @@ properties([
def shell_header
// Only set milestones on PR builds
if (env.CHANGE_ID) {
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
@ -18,6 +20,7 @@ if (buildNumber > 1) {
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
}
def lint_report_issues = []
@ -30,7 +33,6 @@ wrappedNode('lint', global_timeout, '#jenkins-prod-pr') {
stage('checkout-scm') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle

View File

@ -6,12 +6,11 @@ codecov:
branch: master
notify:
require_ci_to_pass: no
require_ci_to_pass: yes # Less spammy. Only notify on passing builds.
ignore:
- ^*.py$
- doc/.*
- tests/.*
- ^*.py$ # python files at the repo root, ie, setup.py
- doc/.* # ignore any code under doc/
coverage:
round: up
@ -20,30 +19,61 @@ coverage:
status:
project: # measuring the overall project coverage
default:
default: false # disable the default status that measures entire project
salt: # declare a new status context "salt"
enabled: yes # must be yes|true to enable this status
if_no_uploads: error # will post commit status of "error" if no coverage reports we uploaded
paths: "!tests/" # remove all files in "tests/"
target: auto # will use the coverage from the base commit (pull request base or parent commit) coverage to compare against.
base: auto # will use the pull request base if the commit is on a pull request. If not, the parent commit will be used.
if_no_uploads: error # will post commit status of "error" if no coverage reports were uploaded
# options: success, error, failure
if_not_found: success # if parent is not found report status as success, error, or failure
if_ci_failed: success # if ci fails report status as success, error, or failure
if_ci_failed: error # if ci fails report status as success, error, or failure
tests: # declare a new status context "tests"
enabled: yes # must be yes|true to enable this status
#target: 100% # we always want 100% coverage here
target: auto # auto while we get this going
base: auto # will use the pull request base if the commit is on a pull request. If not, the parent commit will be used.
paths: "!salt/" # only include coverage in "tests/" folder
if_no_uploads: error # will post commit status of "error" if no coverage reports were uploaded
# options: success, error, failure
if_not_found: success # if parent is not found report status as success, error, or failure
if_ci_failed: error # if ci fails report status as success, error, or failure
patch: # pull requests only: this commit status will measure the
# entire pull requests Coverage Diff. Checking if the lines
# adjusted are covered at least X%.
default:
enabled: no # must be yes|true to enable this status
target: 80% # specify the target "X%" coverage to hit
if_no_uploads: error # will post commit status of "error" if no coverage reports we uploaded
enabled: yes # must be yes|true to enable this status
target: 100% # Newly added lines must have 100% coverage
if_no_uploads: error # will post commit status of "error" if no coverage reports were uploaded
# options: success, error, failure
if_not_found: success
if_ci_failed: success
if_ci_failed: error
changes: # if there are any unexpected changes in coverage
default:
enabled: no # must be yes|true to enable this status
if_no_uploads: success
enabled: yes # must be yes|true to enable this status
if_no_uploads: error
if_not_found: success
if_ci_failed: success
if_ci_failed: error
# No commends because we're not yet running the full test suite on PRs
comment: off
flags:
salt:
paths:
- salt/
tests:
paths:
- tests/
comment:
layout: "reach, diff, flags, files"
after_n_builds: 46 # Only comment on PRs after N builds
# This value is the output of:
# sh -c 'echo "$(ls .ci/ | grep kitchen | wc -l)"'
behavior: new # Comment posting behaviour
# default: update, if exists. Otherwise post new.
# once: update, if exists. Otherwise post new. Skip if deleted.
# new: delete old and post new.
# spammy: post new (do not delete old comments).

View File

@ -1,14 +1,11 @@
[run]
branch = True
cover_pylib = False
source =
salt
parallel = True
concurrency = multiprocessing
omit =
tests/*.py
setup.py
salt/daemons/test/*
.nox/*
[report]
# Regexes for lines to exclude from consideration
@ -30,7 +27,3 @@ exclude_lines =
ignore_errors = True
[paths]
source =
salt

View File

@ -7,7 +7,7 @@ repos:
alias: compile-linux-py2.7-zmq-requirements
name: Linux Py2.7 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(lint|cloud|docs|osx|windows)\.in$
exclude: ^requirements/static/(lint|cloud|docs|darwin|windows)\.in$
args:
- -v
- --py-version=2.7
@ -17,9 +17,9 @@ repos:
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile
alias: compile-osx-py2.7-zmq-requirements
name: OSX Py2.7 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/osx\.in))$
alias: compile-darwin-py2.7-zmq-requirements
name: Darwin Py2.7 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
args:
- -v
- --py-version=2.7
@ -68,7 +68,7 @@ repos:
alias: compile-linux-py3.4-zmq-requirements
name: Linux Py3.4 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|darwin|windows)\.in$
args:
- -v
- --py-version=3.4
@ -90,7 +90,7 @@ repos:
alias: compile-linux-py3.5-zmq-requirements
name: Linux Py3.5 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|darwin|windows)\.in$
args:
- -v
- --py-version=3.5
@ -100,9 +100,9 @@ repos:
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile
alias: compile-osx-py3.5-zmq-requirements
name: OSX Py3.5 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/osx\.in))$
alias: compile-darwin-py3.5-zmq-requirements
name: Darwin Py3.5 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
args:
- -v
- --py-version=3.5
@ -150,7 +150,7 @@ repos:
alias: compile-linux-py3.6-zmq-requirements
name: Linux Py3.6 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|darwin|windows)\.in$
args:
- -v
- --py-version=3.6
@ -160,9 +160,9 @@ repos:
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile
alias: compile-osx-py3.6-zmq-requirements
name: OSX Py3.6 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/osx\.in))$
alias: compile-darwin-py3.6-zmq-requirements
name: Darwin Py3.6 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
args:
- -v
- --py-version=3.6
@ -210,7 +210,7 @@ repos:
alias: compile-linux-py3.7-zmq-requirements
name: Linux Py3.7 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|darwin|windows)\.in$
args:
- -v
- --py-version=3.7
@ -220,9 +220,9 @@ repos:
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile
alias: compile-osx-py3.7-zmq-requirements
name: OSX Py3.7 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/osx\.in))$
alias: compile-darwin-py3.7-zmq-requirements
name: Darwin Py3.7 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
args:
- -v
- --py-version=3.7

View File

@ -23,6 +23,8 @@ Versions are `MAJOR.PATCH`.
### Added
- [#54917](https://github.com/saltstack/salt/pull/54917) - Added get_settings, put_settings and flush_synced methods for Elasticsearch module. - [@Oloremo](https://github.com/Oloremo)
---
## [2019.2.2]

View File

@ -1,79 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
compile-translation-catalogs
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Compile the existing translation catalogs.
'''
# Import python libs
import os
import sys
import fnmatch
# Import 3rd-party libs
HAS_BABEL = False
try:
from babel.messages import mofile, pofile
HAS_BABEL = True
except ImportError:
try:
import polib
except ImportError:
print(
'You need to install either babel or pofile in order to compile '
'the message catalogs. One of:\n'
' pip install babel\n'
' pip install polib'
)
sys.exit(1)
DOC_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
LOCALES_DIR = os.path.join(DOC_DIR, 'locale')
def main():
'''
Run the compile code
'''
print('Gathering the translation catalogs to compile...'),
sys.stdout.flush()
entries = {}
for locale in os.listdir(os.path.join(LOCALES_DIR)):
if locale == 'pot':
continue
locale_path = os.path.join(LOCALES_DIR, locale)
entries[locale] = []
for dirpath, _, filenames in os.walk(locale_path):
for filename in fnmatch.filter(filenames, '*.po'):
entries[locale].append(os.path.join(dirpath, filename))
print('DONE')
for locale, po_files in sorted(entries.items()):
lc_messages_path = os.path.join(LOCALES_DIR, locale, 'LC_MESSAGES')
print('\nCompiling the \'{0}\' locale:'.format(locale))
for po_file in sorted(po_files):
relpath = os.path.relpath(po_file, lc_messages_path)
print ' {0}.po -> {0}.mo'.format(relpath.split('.po', 1)[0])
if HAS_BABEL:
catalog = pofile.read_po(open(po_file))
mofile.write_mo(
open(po_file.replace('.po', '.mo'), 'wb'), catalog
)
continue
catalog = polib.pofile(po_file)
catalog.save_as_mofile(fpath=po_file.replace('.po', '.mo'))
print('Done')
if __name__ == '__main__':
main()

View File

@ -1,53 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
download-translation-catalog
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Download a translation catalog from Transifex.
'''
# Import python libs
import os
import sys
# Import 3rd-party libs
try:
import txclib.utils
except ImportError:
print(
'The \'transifex-client\' library needs to be installed. '
'Please execute one of \'pip install transifex-client\' or '
'\'easy_install transifex-client\''
)
sys.exit(1)
DOC_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
LOCALES_DIR = os.path.join(DOC_DIR, 'locale')
def main():
'''
Run the compile code
'''
os.chdir(DOC_DIR)
tx_root = txclib.utils.find_dot_tx()
if len(sys.argv) < 2:
print('You need to pass a locale to this script. For example: '
'pt_PT, zh_CN, ru, etc...')
sys.exit(1)
for locale in sys.argv[1:]:
print('Download \'{0}\' translations catalog...'.format(locale))
txclib.utils.exec_command('pull', ['-l', locale], tx_root)
print('Done')
if __name__ == '__main__':
main()

View File

@ -1,223 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
update-transifex-source-translations
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Update the transifex sources configuration file and push the source
'''
# Import python libs
import os
import sys
import time
import logging
import subprocess
import ConfigParser
try:
import txclib.utils
except ImportError:
sys.stdout.write(
'The \'transifex-client\' library needs to be installed. '
'Please execute one of \'pip install transifex-client\' or '
'\'easy_install transifex-client\'\n'
)
sys.exit(1)
DOC_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def main():
'''
Run the update code
'''
os.chdir(DOC_DIR)
sys.stdout.write('Extracting translatable strings....\n')
try:
subprocess.check_call(['make', 'gettext'])
except subprocess.CalledProcessError as exc:
sys.stdout.write('An error occurred while extracting the translation '
'strings: {0}\n'.format(exc))
sys.exit(1)
locale_dir = os.path.join(DOC_DIR, 'locale')
pot_dir = os.path.join(DOC_DIR, '_build', 'locale')
tx_root = txclib.utils.find_dot_tx()
tx_config = os.path.join(tx_root, '.tx', 'config')
if not tx_root:
sys.stdout.write(
'Unable to find the \'.tx/\' directory. Unable to continue\n'
)
sys.exit(1)
# We do not want the txclib INFO or WARNING logging
logging.getLogger('txclib').setLevel(logging.ERROR)
sys.stdout.write('Gathering the translation template files...')
sys.stdout.flush()
entries = []
for dirpath, dirnames, filenames in os.walk(pot_dir):
for filename in filenames:
pot_file = os.path.join(dirpath, filename)
base, ext = os.path.splitext(pot_file)
if ext != '.pot':
continue
resource_path = os.path.relpath(base, pot_dir)
try:
import babel.messages.pofile
if not len(babel.messages.pofile.read_po(open(pot_file))):
# Empty pot file, continue
continue
except ImportError:
# No babel package, let's keep on going
pass
resource_name = resource_path.replace(
'\\', '/').replace('/', '--').replace('.', '_')
entries.append((resource_path, resource_name))
sys.stdout.write('Done\n')
# Let's load the resources already present in the configuration file
cfg = ConfigParser.SafeConfigParser()
cfg.read([tx_config])
handled_resources = set(
section for section in
cfg.sections() if section.startswith('salt.')
)
sys.stdout.write('Updating the entries in \'.tx/config\'...\n')
sys.stdout.flush()
total_entries = len(entries)
for idx, (resource_path, resource_name) in enumerate(sorted(entries)):
sys.stdout.write(
'[{0:>{pad}}/{1}] Updating resource for '
'{resource_path}.pot ({resource_name})'.format(
idx + 1,
total_entries,
pad=len(str(total_entries)),
locale_dir=locale_dir,
resource_name=resource_name,
resource_path=resource_path
)
)
sys.stdout.flush()
try:
txclib.utils.exec_command(
'set',
'--auto-local -r salt.{resource_name} '
'{locale_dir}/<lang>/LC_MESSAGES/{resource_path}.po '
'--source-lang en '
'--source-file {pot_dir}/{resource_path}.pot '
'--source-name {resource_path}.rst '
'--execute'.format(
resource_name=resource_name,
resource_path=resource_path,
locale_dir=locale_dir,
pot_dir=pot_dir.rstrip('/')
).split(),
tx_root
)
sys.stdout.write('\n')
if 'salt.{0}'.format(resource_name) in handled_resources:
handled_resources.remove('salt.{0}'.format(resource_name))
except Exception as err:
sys.stdout.write('An error occurred: {0}\n'.format(err))
except KeyboardInterrupt:
sys.stdout.write('\n')
sys.exit(1)
time.sleep(0.025)
if handled_resources:
non_handled_resources = len(handled_resources)
sys.stdout.write(
'Removing old resources from configuration and upstream'
'(if possible)\n'
)
for idx, resource_name in enumerate(sorted(handled_resources)):
sys.stdout.write(
'[{0:>{pad}}/{1}] Removing resource \'{resource_name}\''.format(
idx + 1,
non_handled_resources,
pad=len(str(non_handled_resources)),
resource_name=resource_name,
)
)
sys.stdout.flush()
try:
txclib.utils.exec_command(
'delete',
['-r', resource_name],
tx_root
)
handled_resources.remove(resource_name)
except Exception as err:
sys.stdout.write('An error occurred: {0}\n'.format(err))
finally:
if cfg.has_section(resource_name):
cfg.remove_section(resource_name)
sys.stdout.write('\n')
time.sleep(0.025)
cfg.write(open(tx_config, 'w'))
sys.stdout.write('\n')
# Set the translations file type we're using
txclib.utils.exec_command('set', ['-t', 'PO'], tx_root)
time.sleep(0.025)
if 'TRANSIFEX_NO_PUSH' not in os.environ:
sys.stdout.write('\n')
sys.stdout.write('Pushing translation template files...\n')
for idx, (resource_path, resource_name) in enumerate(sorted(entries)):
sys.stdout.write(
'[{0:>{pad}}/{1}] Pushing resource for '
'{resource_path}.pot ({resource_name})'.format(
idx + 1,
total_entries,
pad=len(str(total_entries)),
locale_dir=locale_dir,
resource_name=resource_name,
resource_path=resource_path
)
)
sys.stdout.flush()
try:
txclib.utils.exec_command(
'push',
'--resource salt.{resource_name} '
'--source '
'--skip '
'--no-interactive'.format(
resource_name=resource_name,
resource_path=resource_path,
locale_dir=locale_dir
).split(),
tx_root
)
sys.stdout.write('\n')
except Exception as err:
sys.stdout.write('An error occurred: {0}\n'.format(err))
except KeyboardInterrupt:
sys.stdout.write('\n')
sys.exit(1)
time.sleep(0.025)
if handled_resources:
sys.stdout.write('=' * 80)
sys.stdout.write(
'\nDon\'t forget to delete the following remote resources:\n')
for resource_name in sorted(handled_resources):
sys.stdout.write(' {0}\n'.format(resource_name))
sys.stdout.write('=' * 80)
sys.stdout.write('\nDONE\n')
if __name__ == '__main__':
main()

File diff suppressed because it is too large Load Diff

View File

@ -9,29 +9,14 @@ BUILDDIR = _build
SPHINXLANG =
XELATEX = xelatex
# ----- Translations Support ------------------------------------------------>
# If language is set, also set translation options
ifeq ($(shell [ "x$(SPHINXLANG)" != "x" ] && echo 0 || echo 1), 0)
TRANSLATIONOPTS = -D language='$(SPHINXLANG)'
else
TRANSLATIONOPTS =
endif
# Reset settings if sphinx-intl is not available
ifeq ($(shell which sphinx-intl >/dev/null 2>&1; echo $$?), 1)
SPHINXLANG =
TRANSLATIONOPTS =
endif
# <---- Translations Support -------------------------------------------------
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(TRANSLATIONOPTS) $(SPHINXOPTS) .
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help clean check_sphinx-build html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext translations download-translations
.PHONY: help clean check_sphinx-build html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest
help:
@echo "Please use \`make <target>' where <target> is one of"
@ -53,7 +38,6 @@ help:
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@ -68,38 +52,38 @@ clean:
check_sphinx-build:
@which $(SPHINXBUILD) >/dev/null 2>&1 || (echo "The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://www.sphinx-doc.org/en/master/)" >&2; false)
html: check_sphinx-build translations
html: check_sphinx-build
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
dirhtml: check_sphinx-build translations
dirhtml: check_sphinx-build
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
singlehtml: check_sphinx-build translations
singlehtml: check_sphinx-build
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
pickle: check_sphinx-build translations
pickle: check_sphinx-build
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json: check_sphinx-build translations
json: check_sphinx-build
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp: check_sphinx-build translations
htmlhelp: check_sphinx-build
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp: check_sphinx-build translations
qthelp: check_sphinx-build
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
@ -108,7 +92,7 @@ qthelp: check_sphinx-build translations
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Salt.qhc"
devhelp: check_sphinx-build translations
devhelp: check_sphinx-build
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@ -117,31 +101,31 @@ devhelp: check_sphinx-build translations
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Salt"
@echo "# devhelp"
epub: check_sphinx-build translations
epub: check_sphinx-build
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex: check_sphinx-build translations
latex: check_sphinx-build
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
latexpdf: check_sphinx-build translations
latexpdf: check_sphinx-build
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
latexpdfja: check_sphinx-build translations
latexpdfja: check_sphinx-build
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
pdf: check_sphinx-build translations
pdf: check_sphinx-build
@if [ "$(XELATEX)" = "xelatex" ] || [ "x$(XELATEX)" = "x" ]; then \
echo "The '$(XELATEX)' command was not found."; \
fi
@ -150,40 +134,35 @@ pdf: check_sphinx-build translations
$(MAKE) -C $(BUILDDIR)/latex -i "PDFLATEX=latexmk" "LATEXMKOPTS=-xelatex -interaction=nonstopmode -f -quiet"
@echo "xelatex finished; the PDF files are in $(BUILDDIR)/latex."
cheatsheet: translations
cheatsheet:
@echo "Running cheatsheet/salt.tex file through xelatex..."
cd cheatsheet && xelatex salt.tex && cp salt.pdf ../salt-cheatsheet.pdf
@echo "./salt-cheatsheet.pdf created."
text: check_sphinx-build translations
text: check_sphinx-build
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
man: check_sphinx-build translations
man: check_sphinx-build
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
texinfo: check_sphinx-build translations
texinfo: check_sphinx-build
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
info: check_sphinx-build translations
info: check_sphinx-build
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
gettext: check_sphinx-build
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale"
changes: check_sphinx-build translations
changes: check_sphinx-build
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
@ -205,27 +184,12 @@ doctest: check_sphinx-build
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
xml: check_sphinx-build translations
xml: check_sphinx-build
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
pseudoxml: check_sphinx-build translations
pseudoxml: check_sphinx-build
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
translations:
@if [ "$(SPHINXLANG)" = "en" ] || [ "x$(SPHINXLANG)" = "x" ]; then \
echo "No need to update translations. Skipping..."; \
elif [ ! -d locale/$(SPHINXLANG) ]; then \
echo "The locale directory for $(SPHINXLANG) does not exist"; \
exit 1; \
else \
echo "Compiling exising message catalog for '$(SPHINXLANG)'"; \
.scripts/compile-translation-catalogs; \
fi
download-translations:
@echo "Downloading $(SPHINXLANG) translations"
.scripts/download-translation-catalog $(SPHINXLANG)

View File

@ -264,20 +264,6 @@
<script type="text/javascript" language="javascript">llactid=23943</script>
<script type="text/javascript" language="javascript" src="http://t6.trackalyzer.com/trackalyze.js"></script>
<script>
var _gaq = _gaq || [];
var pluginUrl = '//www.google-analytics.com/plugins/ga/inpage_linkid.js';
_gaq.push(['_require', 'inpage_linkid', pluginUrl]);
_gaq.push(['_setAccount', 'UA-26984928-1']);
_gaq.push(['_setDomainName', 'saltstack.com']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
</script>
{% endif %}
</body>
</html>

View File

@ -380,20 +380,6 @@
<script type="text/javascript" language="javascript">llactid=23943</script>
<script type="text/javascript" language="javascript" src="https://trackalyzer.com/trackalyze_secure.js"></script>
<script>
var _gaq = _gaq || [];
var pluginUrl = '//www.google-analytics.com/plugins/ga/inpage_linkid.js';
_gaq.push(['_require', 'inpage_linkid', pluginUrl]);
_gaq.push(['_setAccount', 'UA-26984928-1']);
_gaq.push(['_setDomainName', 'saltstack.com']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
</script>
{% endif %}
</body>
</html>

View File

@ -769,6 +769,30 @@ Statically assigns grains to the minion.
cabinet: 13
cab_u: 14-15
.. conf_minion:: grains_blacklist
``grains_blacklist``
--------------------
Default: ``[]``
Each grains key will be compared against each of the expressions in this list.
Any keys which match will be filtered from the grains. Exact matches, glob
matches, and regular expressions are supported.
.. note::
Some states and execution modules depend on grains. Filtering may cause
them to be unavailable or run unreliably.
.. versionadded:: Neon
.. code-block:: yaml
grains_blacklist:
- cpu_flags
- zmq*
- ipv[46]
.. conf_minion:: grains_cache
``grains_cache``
@ -893,6 +917,20 @@ minion. Since this grain is expensive, it is disabled by default.
iscsi_grains: True
.. conf_minion:: nvme_grains
``nvme_grains``
------------------------
Default: ``False``
The ``nvme_grains`` setting will enable the ``nvme_nqn`` grain on the
minion. Since this grain is expensive, it is disabled by default.
.. code-block:: yaml
nvme_grains: True
.. conf_minion:: mine_enabled
``mine_enabled``

View File

@ -255,6 +255,7 @@ state modules
rvm
salt_proxy
saltmod
saltutil
schedule
selinux
serverdensity_device

View File

@ -0,0 +1,6 @@
====================
salt.states.saltutil
====================
.. automodule:: salt.states.saltutil
:members:

View File

@ -69,6 +69,16 @@ dynamic modules when states are run. To disable this behavior set
When dynamic modules are autoloaded via states, only the modules defined in the
same saltenvs as the states currently being run.
Also it is possible to use the explicit ``saltutil.sync_*`` :py:mod:`state functions <salt.states.saltutil>`
to sync the modules (previously it was necessary to use the ``module.run`` state):
.. code-block::yaml
synchronize_modules:
saltutil.sync_modules:
- refresh: True
Sync Via the saltutil Module
~~~~~~~~~~~~~~~~~~~~~~~~~~~~

View File

@ -1,97 +0,0 @@
Translating Documentation
=========================
If you wish to help translate the Salt documentation to your language, please
head over to the `Transifex`_ website and `signup`__ for an account.
Once registered, head over to the `Salt Translation Project`__, and either
click on **Request Language** if you can't find yours, or, select the language
for which you wish to contribute and click **Join Team**.
`Transifex`_ provides some useful reading resources on their `support
domain`__, namely, some useful articles `directed to translators`__.
.. __: https://www.transifex.com/signup/
.. __: https://www.transifex.com/projects/p/salt/
.. __: http://support.transifex.com/
.. __: http://support.transifex.com/customer/portal/topics/414107-translators/articles
Building A Localized Version of the Documentation
-------------------------------------------------
While you're working on your translation on `Transifex`_, you might want to
have a look at how it's rendering.
Install The Transifex Client
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
To interact with the `Transifex`_ web service you will need to install the
`transifex-client`__:
.. code-block:: bash
pip install transifex-client
.. __: https://github.com/transifex/transifex-client
Configure The Transifex Client
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Once installed, you will need to set it up on your computer. We created a
script to help you with that:
.. code-block:: bash
.scripts/setup-transifex-config
Download Remote Translations
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There's a little script which simplifies the download process of the
translations(which isn't that complicated in the first place).
So, let's assume you're translating ``pt_PT``, Portuguese(Portugal). To
download the translations, execute from the ``doc/`` directory of your Salt
checkout:
.. code-block:: bash
make download-translations SPHINXLANG=pt_PT
To download ``pt_PT``, Portuguese(Portugal), and ``nl``, Dutch, you can use the
helper script directly:
.. code-block:: bash
.scripts/download-translation-catalog pt_PT nl
Build Localized Documentation
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
After the download process finishes, which might take a while, the next step is
to build a localized version of the documentation.
Following the ``pt_PT`` example above:
.. code-block:: bash
make html SPHINXLANG=pt_PT
View Localized Documentation
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Open your browser, point it to the local documentation path and check the
localized output you've just build.
.. _`Transifex`: https://www.transifex.com

View File

@ -929,11 +929,12 @@ you can now configure which type numbers indicate a login and logout.
See the :py:mod:`wtmp beacon documentation <salt.beacons.wtmp>` for more
information.
Deprecations
============
API Deprecations
----------------
Deprecated and Removed Options
==============================
API Removed Arguments
---------------------
Support for :ref:`LocalClient <local-client>`'s ``expr_form`` argument has
been removed. Please use ``tgt_type`` instead. This change was made due to
@ -952,14 +953,14 @@ their code to use ``tgt_type``.
>>> local.cmd('*', 'cmd.run', ['whoami'], tgt_type='glob')
{'jerry': 'root'}
Minion Configuration Deprecations
---------------------------------
Minion Configuration Deprecated Option
--------------------------------------
The :conf_minion:`master_shuffle` configuration option is deprecated as of the
``2019.2.0`` release. Please use the :conf_minion:`random_master` option instead.
Module Deprecations
-------------------
Module Removed Options
----------------------
- The :py:mod:`napalm_network <salt.modules.napalm_network>` module has been
changed as follows:
@ -1011,8 +1012,8 @@ Module Deprecations
functions have been removed. Please use :py:func:`win_wua.list
<salt.modules.win_wua.list_>` instead.
Pillar Deprecations
-------------------
Pillar Removed Option
---------------------
- The :py:mod:`vault <salt.pillar.vault>` external pillar has been changed as
follows:
@ -1020,8 +1021,8 @@ Pillar Deprecations
- Support for the ``profile`` argument was removed. Any options passed up
until and following the first ``path=`` are discarded.
Roster Deprecations
-------------------
Roster Removed Option
---------------------
- The :py:mod:`cache <salt.roster.cache>` roster has been changed as follows:
@ -1032,8 +1033,8 @@ Roster Deprecations
``private``, ``public``, ``global`` or ``local`` settings. The syntax for
these settings has changed to ``ipv4-*`` or ``ipv6-*``, respectively.
State Deprecations
------------------
State Removed Modules and Options
---------------------------------
- The ``docker`` state module has been removed
@ -1126,8 +1127,8 @@ State Deprecations
- Support for virtual packages has been removed from the
py:mod:`pkg state <salt.states.pkg>`.
Utils Deprecations
------------------
Utils Removed Options
---------------------
The ``cloud`` utils module had the following changes:
@ -1151,7 +1152,7 @@ been deprecated in favor of ``pypsexec``.
Salt-Cloud has deprecated the use ``impacket`` in favor of ``smbprotocol``.
This changes was made because ``impacket`` is not compatible with Python 3.
SaltSSH major updates
SaltSSH Major Updates
=====================
SaltSSH now works across different major Python versions. Python 2.7 ~ Python 3.x

View File

@ -3,3 +3,30 @@
==================================
Salt Release Notes - Codename Neon
==================================
Slot Syntax Updates
===================
The slot syntax has been updated to support parsing dictionary responses and to append text.
.. code-block:: yaml
demo dict parsing and append:
test.configurable_test_state:
- name: slot example
- changes: False
- comment: __slot__:salt:test.arg(shell="/bin/bash").kwargs.shell ~ /appended
.. code-block:: none
local:
----------
ID: demo dict parsing and append
Function: test.configurable_test_state
Name: slot example
Result: True
Comment: /bin/bash/appended
Started: 09:59:58.623575
Duration: 1.229 ms
Changes:

View File

@ -5,6 +5,7 @@ Slots
=====
.. versionadded:: 2018.3.0
.. versionchanged:: Neon
.. note:: This functionality is under development and could be changed in the
future releases
@ -33,7 +34,14 @@ Slot syntax looks close to the simple python function call.
__slot__:salt:<module>.<function>(<args>, ..., <kwargs...>, ...)
Also there are some specifics in the syntax coming from the execution functions
For the Neon release, this syntax has been updated to support parsing functions
which return dictionaries and for appending text to the slot result.
.. code-block:: text
__slot__:salt:<module>.<function>(<args>..., <kwargs...>, ...).dictionary ~ append
There are some specifics in the syntax coming from the execution functions
nature and a desire to simplify the user experience. First one is that you
don't need to quote the strings passed to the slots functions. The second one
is that all arguments handled as strings.
@ -51,3 +59,12 @@ This will execute the :py:func:`test.echo <salt.modules.test.echo>` execution
functions right before calling the state. The functions in the example will
return `/tmp/some_file` and `/etc/hosts` strings that will be used as a target
and source arguments in the state function `file.copy`.
Here is an example of result parsing and appending:
.. code-block:: yaml
file-in-user-home:
file.copy:
- name: __slot__:salt:user.info(someuser).home ~ /subdirectory
- source: salt://somefile

View File

@ -15,6 +15,7 @@ import json
import pprint
import shutil
import tempfile
import datetime
if __name__ == '__main__':
sys.stderr.write('Do not execute this file directly. Use nox instead, it will know how to handle this file\n')
@ -34,8 +35,8 @@ PIP_INSTALL_SILENT = (os.environ.get('JENKINS_URL') or os.environ.get('CI') or o
# Global Path Definitions
REPO_ROOT = os.path.abspath(os.path.dirname(__file__))
SITECUSTOMIZE_DIR = os.path.join(REPO_ROOT, 'tests', 'support', 'coverage')
IS_DARWIN = sys.platform.lower().startswith('darwin')
IS_WINDOWS = sys.platform.lower().startswith('win')
# Python versions to run against
_PYTHON_VERSIONS = ('2', '2.7', '3', '3.4', '3.5', '3.6', '3.7')
@ -45,10 +46,18 @@ nox.options.reuse_existing_virtualenvs = True
# Don't fail on missing interpreters
nox.options.error_on_missing_interpreters = False
# Change current directory to REPO_ROOT
os.chdir(REPO_ROOT)
RUNTESTS_LOGFILE = os.path.join(
'artifacts', 'logs',
'runtests-{}.log'.format(datetime.datetime.now().strftime('%Y%m%d%H%M%S.%f'))
)
def _create_ci_directories():
for dirname in ('logs', 'coverage', 'xml-unittests-output'):
path = os.path.join(REPO_ROOT, 'artifacts', dirname)
path = os.path.join('artifacts', dirname)
if not os.path.exists(path):
os.makedirs(path)
@ -186,20 +195,43 @@ def _get_distro_pip_constraints(session, transport):
pydir = _get_pydir(session)
if IS_WINDOWS:
_distro_constraints = os.path.join(REPO_ROOT,
'requirements',
_distro_constraints = os.path.join('requirements',
'static',
pydir,
'{}-windows.txt'.format(transport))
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
_distro_constraints = os.path.join(REPO_ROOT,
'requirements',
_distro_constraints = os.path.join('requirements',
'static',
pydir,
'windows.txt')
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
_distro_constraints = os.path.join('requirements',
'static',
pydir,
'windows-crypto.txt')
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
elif IS_DARWIN:
_distro_constraints = os.path.join('requirements',
'static',
pydir,
'{}-darwin.txt'.format(transport))
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
_distro_constraints = os.path.join('requirements',
'static',
pydir,
'darwin.txt')
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
_distro_constraints = os.path.join('requirements',
'static',
pydir,
'darwin-crypto.txt')
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
else:
_install_system_packages(session)
distro = _get_distro_info(session)
@ -210,20 +242,31 @@ def _get_distro_pip_constraints(session, transport):
'{id}-{version_parts[major]}'.format(**distro)
]
for distro_key in distro_keys:
_distro_constraints = os.path.join(REPO_ROOT,
'requirements',
_distro_constraints = os.path.join('requirements',
'static',
pydir,
'{}.txt'.format(distro_key))
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
_distro_constraints = os.path.join(REPO_ROOT,
'requirements',
_distro_constraints = os.path.join('requirements',
'static',
pydir,
'{}-crypto.txt'.format(distro_key))
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
_distro_constraints = os.path.join('requirements',
'static',
pydir,
'{}-{}.txt'.format(transport, distro_key))
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
distro_constraints.append(_distro_constraints)
_distro_constraints = os.path.join('requirements',
'static',
pydir,
'{}-{}-crypto.txt'.format(transport, distro_key))
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
return distro_constraints
@ -232,24 +275,24 @@ def _install_requirements(session, transport, *extra_requirements):
distro_constraints = _get_distro_pip_constraints(session, transport)
_requirements_files = [
os.path.join(REPO_ROOT, 'requirements', 'base.txt'),
os.path.join(REPO_ROOT, 'requirements', 'zeromq.txt'),
os.path.join(REPO_ROOT, 'requirements', 'pytest.txt')
os.path.join('requirements', 'base.txt'),
os.path.join('requirements', 'zeromq.txt'),
os.path.join('requirements', 'pytest.txt')
]
if sys.platform.startswith('linux'):
requirements_files = [
os.path.join(REPO_ROOT, 'requirements', 'static', 'linux.in')
os.path.join('requirements', 'static', 'linux.in')
]
elif sys.platform.startswith('win'):
requirements_files = [
os.path.join(REPO_ROOT, 'pkg', 'windows', 'req.txt'),
os.path.join(REPO_ROOT, 'requirements', 'static', 'windows.in')
os.path.join('pkg', 'windows', 'req.txt'),
os.path.join('requirements', 'static', 'windows.in')
]
elif sys.platform.startswith('darwin'):
requirements_files = [
os.path.join(REPO_ROOT, 'pkg', 'osx', 'req.txt'),
os.path.join(REPO_ROOT, 'pkg', 'osx', 'req_ext.txt'),
os.path.join(REPO_ROOT, 'requirements', 'static', 'osx.in')
os.path.join('pkg', 'osx', 'req.txt'),
os.path.join('pkg', 'osx', 'req_ext.txt'),
os.path.join('requirements', 'static', 'darwin.in')
]
while True:
@ -328,7 +371,20 @@ def _run_with_coverage(session, *test_cmd):
# Sometimes some of the coverage files are corrupt which would trigger a CommandFailed
# exception
pass
session.run('coverage', 'xml', '-o', os.path.join(REPO_ROOT, 'artifacts', 'coverage', 'coverage.xml'))
# Generate report for salt code coverage
session.run(
'coverage', 'xml',
'-o', os.path.join('artifacts', 'coverage', 'salt.xml'),
'--omit=tests/*',
'--include=salt/*'
)
# Generate report for tests code coverage
session.run(
'coverage', 'xml',
'-o', os.path.join('artifacts', 'coverage', 'tests.xml'),
'--omit=salt/*',
'--include=tests/*'
)
def _runtests(session, coverage, cmd_args):
@ -418,9 +474,7 @@ def runtests_parametrized(session, coverage, transport, crypto):
session.install(*install_command, silent=PIP_INSTALL_SILENT)
cmd_args = [
'--tests-logfile={}'.format(
os.path.join(REPO_ROOT, 'artifacts', 'logs', 'runtests.log')
),
'--tests-logfile={}'.format(RUNTESTS_LOGFILE),
'--transport={}'.format(transport)
] + session.posargs
_runtests(session, coverage, cmd_args)
@ -559,14 +613,12 @@ def runtests_cloud(session, coverage):
_install_requirements(session, 'zeromq', 'unittest-xml-reporting==2.2.1')
pydir = _get_pydir(session)
cloud_requirements = os.path.join(REPO_ROOT, 'requirements', 'static', pydir, 'cloud.txt')
cloud_requirements = os.path.join('requirements', 'static', pydir, 'cloud.txt')
session.install('--progress-bar=off', '-r', cloud_requirements, silent=PIP_INSTALL_SILENT)
cmd_args = [
'--tests-logfile={}'.format(
os.path.join(REPO_ROOT, 'artifacts', 'logs', 'runtests.log')
),
'--tests-logfile={}'.format(RUNTESTS_LOGFILE),
'--cloud-provider-tests'
] + session.posargs
_runtests(session, coverage, cmd_args)
@ -581,9 +633,7 @@ def runtests_tornado(session, coverage):
session.install('--progress-bar=off', 'pyzmq==17.0.0', silent=PIP_INSTALL_SILENT)
cmd_args = [
'--tests-logfile={}'.format(
os.path.join(REPO_ROOT, 'artifacts', 'logs', 'runtests.log')
),
'--tests-logfile={}'.format(RUNTESTS_LOGFILE)
] + session.posargs
_runtests(session, coverage, cmd_args)
@ -614,9 +664,8 @@ def pytest_parametrized(session, coverage, transport, crypto):
cmd_args = [
'--rootdir', REPO_ROOT,
'--log-file={}'.format(
os.path.join(REPO_ROOT, 'artifacts', 'logs', 'runtests.log')
),
'--log-file={}'.format(RUNTESTS_LOGFILE),
'--log-file-level=debug',
'--no-print-logs',
'-ra',
'-s',
@ -757,19 +806,18 @@ def pytest_cloud(session, coverage):
# Install requirements
_install_requirements(session, 'zeromq')
pydir = _get_pydir(session)
cloud_requirements = os.path.join(REPO_ROOT, 'requirements', 'static', pydir, 'cloud.txt')
cloud_requirements = os.path.join('requirements', 'static', pydir, 'cloud.txt')
session.install('--progress-bar=off', '-r', cloud_requirements, silent=PIP_INSTALL_SILENT)
cmd_args = [
'--rootdir', REPO_ROOT,
'--log-file={}'.format(
os.path.join(REPO_ROOT, 'artifacts', 'logs', 'runtests.log')
),
'--log-file={}'.format(RUNTESTS_LOGFILE),
'--log-file-level=debug',
'--no-print-logs',
'-ra',
'-s',
os.path.join(REPO_ROOT, 'tests', 'integration', 'cloud', 'providers')
os.path.join('tests', 'integration', 'cloud', 'providers')
] + session.posargs
_pytest(session, coverage, cmd_args)
@ -784,9 +832,8 @@ def pytest_tornado(session, coverage):
cmd_args = [
'--rootdir', REPO_ROOT,
'--log-file={}'.format(
os.path.join(REPO_ROOT, 'artifacts', 'logs', 'runtests.log')
),
'--log-file={}'.format(RUNTESTS_LOGFILE),
'--log-file-level=debug',
'--no-print-logs',
'-ra',
'-s',
@ -815,7 +862,18 @@ def _pytest(session, coverage, cmd_args):
def _lint(session, rcfile, flags, paths):
_install_requirements(session, 'zeromq')
session.install('--progress-bar=off', '-r', 'requirements/static/{}/lint.txt'.format(_get_pydir(session)), silent=PIP_INSTALL_SILENT)
requirements_file = 'requirements/static/lint.in'
distro_constraints = [
'requirements/static/{}/lint.txt'.format(_get_pydir(session))
]
install_command = [
'--progress-bar=off', '-r', requirements_file
]
for distro_constraint in distro_constraints:
install_command.extend([
'--constraint', distro_constraint
])
session.install(*install_command, silent=PIP_INSTALL_SILENT)
session.run('pylint', '--version')
pylint_report_path = os.environ.get('PYLINT_REPORT')
@ -889,19 +947,73 @@ def lint_tests(session):
@nox.session(python='3')
def docs(session):
@nox.parametrize('update', [False, True])
@nox.parametrize('compress', [False, True])
def docs(session, compress, update):
'''
Build Salt's Documentation
'''
session.notify('docs-html(compress={})'.format(compress))
session.notify('docs-man(compress={}, update={})'.format(compress, update))
@nox.session(name='docs-html', python='3')
@nox.parametrize('compress', [False, True])
def docs_html(session, compress):
'''
Build Salt's HTML Documentation
'''
pydir = _get_pydir(session)
if pydir == 'py3.4':
session.error('Sphinx only runs on Python >= 3.5')
session.install(
'--progress-bar=off',
'-r', 'requirements/static/{}/docs.txt'.format(pydir),
silent=PIP_INSTALL_SILENT)
requirements_file = 'requirements/static/docs.in'
distro_constraints = [
'requirements/static/{}/docs.txt'.format(_get_pydir(session))
]
install_command = [
'--progress-bar=off', '-r', requirements_file
]
for distro_constraint in distro_constraints:
install_command.extend([
'--constraint', distro_constraint
])
session.install(*install_command, silent=PIP_INSTALL_SILENT)
os.chdir('doc/')
session.run('make', 'clean', external=True)
session.run('make', 'html', 'SPHINXOPTS=-W', external=True)
session.run('tar', '-czvf', 'doc-archive.tar.gz', '_build/html')
if compress:
session.run('tar', '-czvf', 'html-archive.tar.gz', '_build/html', external=True)
os.chdir('..')
@nox.session(name='docs-man', python='3')
@nox.parametrize('update', [False, True])
@nox.parametrize('compress', [False, True])
def docs_man(session, compress, update):
'''
Build Salt's Manpages Documentation
'''
pydir = _get_pydir(session)
if pydir == 'py3.4':
session.error('Sphinx only runs on Python >= 3.5')
requirements_file = 'requirements/static/docs.in'
distro_constraints = [
'requirements/static/{}/docs.txt'.format(_get_pydir(session))
]
install_command = [
'--progress-bar=off', '-r', requirements_file
]
for distro_constraint in distro_constraints:
install_command.extend([
'--constraint', distro_constraint
])
session.install(*install_command, silent=PIP_INSTALL_SILENT)
os.chdir('doc/')
session.run('make', 'clean', external=True)
session.run('make', 'man', 'SPHINXOPTS=-W', external=True)
if update:
session.run('rm', '-rf', 'man/', external=True)
session.run('cp', '-Rp', '_build/man', 'man/', external=True)
if compress:
session.run('tar', '-czvf', 'man-archive.tar.gz', '_build/man', external=True)
os.chdir('..')

View File

@ -0,0 +1,2 @@
m2crypto
pycryptodomex

View File

@ -0,0 +1,10 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py2.7/darwin-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodome==3.9.0
pycryptodomex==3.9.0
typing==3.7.4.1 # via m2crypto

View File

@ -2,7 +2,7 @@
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py2.7/osx.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/osx.in
# pip-compile -o requirements/static/py2.7/darwin.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/darwin.in
#
apache-libcloud==2.4.0
argh==0.26.2 # via watchdog

View File

@ -0,0 +1,10 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py2.7/linux-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodome==3.9.0
pycryptodomex==3.9.0
typing==3.7.4.1 # via m2crypto

View File

@ -0,0 +1,10 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py2.7/windows-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodome==3.9.0
pycryptodomex==3.9.0
typing==3.7.4.1 # via m2crypto

View File

@ -0,0 +1,10 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py3.5/darwin-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodome==3.9.0
pycryptodomex==3.9.0
typing==3.7.4.1 # via m2crypto

View File

@ -2,7 +2,7 @@
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py3.5/osx.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/osx.in
# pip-compile -o requirements/static/py3.5/darwin.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/darwin.in
#
apache-libcloud==2.4.0
argh==0.26.2 # via watchdog

View File

@ -0,0 +1,10 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py3.5/windows-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodome==3.9.0
pycryptodomex==3.9.0
typing==3.7.4.1 # via m2crypto

View File

@ -0,0 +1,10 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py3.6/darwin-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodome==3.9.0
pycryptodomex==3.9.0
typing==3.7.4.1 # via m2crypto

View File

@ -2,7 +2,7 @@
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py3.6/osx.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/osx.in
# pip-compile -o requirements/static/py3.6/darwin.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/darwin.in
#
apache-libcloud==2.4.0
argh==0.26.2 # via watchdog

View File

@ -0,0 +1,10 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py3.6/windows-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodome==3.9.0
pycryptodomex==3.9.0
typing==3.7.4.1 # via m2crypto

View File

@ -0,0 +1,10 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py3.7/darwin-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodome==3.9.0
pycryptodomex==3.9.0
typing==3.7.4.1 # via m2crypto

View File

@ -2,7 +2,7 @@
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py3.7/osx.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/osx.in
# pip-compile -o requirements/static/py3.7/darwin.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/darwin.in
#
apache-libcloud==2.4.0
argh==0.26.2 # via watchdog

View File

@ -0,0 +1,10 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py3.7/windows-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodome==3.9.0
pycryptodomex==3.9.0
typing==3.7.4.1 # via m2crypto

View File

@ -925,6 +925,9 @@ VALID_OPTS = {
# Set a hard limit for the amount of memory modules can consume on a minion.
'modules_max_memory': int,
# Blacklist specific core grains to be filtered
'grains_blacklist': list,
# The number of minutes between the minion refreshing its cache of grains
'grains_refresh_every': int,
@ -1222,6 +1225,7 @@ DEFAULT_MINION_OPTS = {
'cachedir': os.path.join(salt.syspaths.CACHE_DIR, 'minion'),
'append_minionid_config_dirs': [],
'cache_jobs': False,
'grains_blacklist': [],
'grains_cache': False,
'grains_cache_expiration': 300,
'grains_deep_merge': False,

64
salt/grains/nvme.py Normal file
View File

@ -0,0 +1,64 @@
# -*- coding: utf-8 -*-
'''
Grains for NVMe Qualified Names (NQN).
.. versionadded:: Flourine
To enable these grains set `nvme_grains: True`.
.. code-block:: yaml
nvme_grains: True
'''
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
import errno
import logging
# Import Salt libs
import salt.utils.files
import salt.utils.path
import salt.utils.platform
__virtualname__ = 'nvme'
# Get logging started
log = logging.getLogger(__name__)
def __virtual__():
if __opts__.get('nvme_grains', False) is False:
return False
return __virtualname__
def nvme_nqn():
'''
Return NVMe NQN
'''
grains = {}
grains['nvme_nqn'] = False
if salt.utils.platform.is_linux():
grains['nvme_nqn'] = _linux_nqn()
return grains
def _linux_nqn():
'''
Return NVMe NQN from a Linux host.
'''
ret = []
initiator = '/etc/nvme/hostnqn'
try:
with salt.utils.files.fopen(initiator, 'r') as _nvme:
for line in _nvme:
line = line.strip()
if line.startswith('nqn.'):
ret.append(line)
except IOError as ex:
if ex.errno != errno.ENOENT:
log.debug("Error while accessing '%s': %s", initiator, ex)
return ret

View File

@ -34,6 +34,7 @@ import salt.utils.lazy
import salt.utils.odict
import salt.utils.platform
import salt.utils.versions
import salt.utils.stringutils
from salt.exceptions import LoaderError
from salt.template import check_render_pipe_str
from salt.utils.decorators import Depends
@ -773,6 +774,7 @@ def grains(opts, force_refresh=False, proxy=None):
opts['grains'] = {}
grains_data = {}
blist = opts.get('grains_blacklist', [])
funcs = grain_funcs(opts, proxy=proxy)
if force_refresh: # if we refresh, lets reload grain modules
funcs.clear()
@ -784,6 +786,14 @@ def grains(opts, force_refresh=False, proxy=None):
ret = funcs[key]()
if not isinstance(ret, dict):
continue
if blist:
for key in list(ret):
for block in blist:
if salt.utils.stringutils.expr_match(key, block):
del ret[key]
log.trace('Filtering %s grain', key)
if not ret:
continue
if grains_deep_merge:
salt.utils.dictupdate.update(grains_data, ret)
else:
@ -819,6 +829,14 @@ def grains(opts, force_refresh=False, proxy=None):
continue
if not isinstance(ret, dict):
continue
if blist:
for key in list(ret):
for block in blist:
if salt.utils.stringutils.expr_match(key, block):
del ret[key]
log.trace('Filtering %s grain', key)
if not ret:
continue
if grains_deep_merge:
salt.utils.dictupdate.update(grains_data, ret)
else:
@ -1883,20 +1901,6 @@ class LazyLoader(salt.utils.lazy.LazyDict):
# with the new name
log.trace('Loaded %s as virtual %s', module_name, virtual)
if not hasattr(mod, '__virtualname__'):
salt.utils.versions.warn_until(
'Hydrogen',
'The \'{0}\' module is renaming itself in its '
'__virtual__() function ({1} => {2}). Please '
'set it\'s virtual name as the '
'\'__virtualname__\' module attribute. '
'Example: "__virtualname__ = \'{2}\'"'.format(
mod.__name__,
module_name,
virtual
)
)
if virtualname != virtual:
# The __virtualname__ attribute does not match what's
# being returned by the __virtual__() function. This

View File

@ -1156,7 +1156,7 @@ def unhold(name=None, pkgs=None, sources=None, **kwargs): # pylint: disable=W06
salt '*' pkg.unhold <package name>
pkgs
A list of packages to hold. Must be passed as a python list.
A list of packages to unhold. Must be passed as a python list.
CLI Example:

View File

@ -158,7 +158,7 @@ def _error_msg_iface(iface, option, expected):
a list of expected values.
'''
msg = 'Invalid option -- Interface: {0}, Option: {1}, Expected: [{2}]'
return msg.format(iface, option, '|'.join(expected))
return msg.format(iface, option, '|'.join(str(e) for e in expected))
def _error_msg_routes(iface, option, expected):
@ -181,7 +181,7 @@ def _error_msg_network(option, expected):
a list of expected values.
'''
msg = 'Invalid network setting -- Setting: {0}, Expected: [{1}]'
return msg.format(option, '|'.join(expected))
return msg.format(option, '|'.join(str(e) for e in expected))
def _log_default_network(opt, value):

View File

@ -264,6 +264,59 @@ def cluster_stats(nodes=None, hosts=None, profile=None):
raise CommandExecutionError("Cannot retrieve cluster stats, server returned code {0} with message {1}".format(e.status_code, e.error))
def cluster_get_settings(flat_settings=False, include_defaults=False, hosts=None, profile=None):
'''
.. versionadded:: Neon
Return Elasticsearch cluster settings.
flat_settings
Return settings in flat format.
include_defaults
Whether to return all default clusters setting.
CLI example::
salt myminion elasticsearch.cluster_get_settings
'''
es = _get_instance(hosts, profile)
try:
return es.cluster.get_settings(flat_settings=flat_settings, include_defaults=include_defaults)
except elasticsearch.TransportError as e:
raise CommandExecutionError("Cannot retrieve cluster settings, server returned code {0} with message {1}".format(e.status_code, e.error))
def cluster_put_settings(body=None, flat_settings=False, hosts=None, profile=None):
'''
.. versionadded:: Neon
Set Elasticsearch cluster settings.
body
The settings to be updated. Can be either 'transient' or 'persistent' (survives cluster restart)
http://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-update-settings.html
flat_settings
Return settings in flat format.
CLI example::
salt myminion elasticsearch.cluster_put_settings '{"persistent": {"indices.recovery.max_bytes_per_sec": "50mb"}}'
salt myminion elasticsearch.cluster_put_settings '{"transient": {"indices.recovery.max_bytes_per_sec": "50mb"}}'
'''
if not body:
message = 'You must provide a body with settings'
raise SaltInvocationError(message)
es = _get_instance(hosts, profile)
try:
return es.cluster.put_settings(body=body, flat_settings=flat_settings)
except elasticsearch.TransportError as e:
raise CommandExecutionError("Cannot update cluster settings, server returned code {0} with message {1}".format(e.status_code, e.error))
def alias_create(indices, alias, hosts=None, body=None, profile=None, source=None):
'''
Create an alias for a specific index/indices
@ -1214,3 +1267,44 @@ def snapshot_delete(repository, snapshot, hosts=None, profile=None):
return True
except elasticsearch.TransportError as e:
raise CommandExecutionError("Cannot delete snapshot {0} from repository {1}, server returned code {2} with message {3}".format(snapshot, repository, e.status_code, e.error))
def flush_synced(hosts=None, profile=None, **kwargs):
'''
.. versionadded:: Neon
Perform a normal flush, then add a generated unique marker (sync_id) to all shards.
http://www.elastic.co/guide/en/elasticsearch/reference/current/indices-synced-flush.html
index
(Optional, string) A comma-separated list of index names; use _all or empty string for all indices. Defaults to '_all'.
ignore_unavailable
(Optional, boolean) If true, missing or closed indices are not included in the response. Defaults to false.
allow_no_indices
(Optional, boolean) If true, the request does not return an error if a wildcard expression or _all value retrieves only missing or closed indices.
This parameter also applies to index aliases that point to a missing or closed index.
expand_wildcards
(Optional, string) Controls what kind of indices that wildcard expressions can expand to.
Valid values are::
all - Expand to open and closed indices.
open - Expand only to open indices.
closed - Expand only to closed indices.
none - Wildcard expressions are not accepted.
The defaults settings for the above parameters depend on the API being used.
CLI example::
salt myminion elasticsearch.flush_synced index='index1,index2' ignore_unavailable=True allow_no_indices=True expand_wildcards='all'
'''
es = _get_instance(hosts, profile)
try:
return es.indices.flush_synced(kwargs)
except elasticsearch.TransportError as e:
raise CommandExecutionError("Cannot flush synced, server returned code {} with message {}".format(e.status_code, e.error))

View File

@ -269,6 +269,14 @@ def refresh_db(failhard=False, **kwargs): # pylint: disable=unused-argument
return ret
def _append_noaction_if_testmode(cmd, **kwargs):
'''
Adds the --noaction flag to the command if it's running in the test mode.
'''
if bool(kwargs.get('test') or __opts__.get('test')):
cmd.append('--noaction')
def install(name=None,
refresh=False,
pkgs=None,
@ -366,6 +374,7 @@ def install(name=None,
to_reinstall = []
to_downgrade = []
_append_noaction_if_testmode(cmd_prefix, **kwargs)
if pkg_params is None or len(pkg_params) == 0:
return {}
elif pkg_type == 'file':
@ -540,6 +549,7 @@ def remove(name=None, pkgs=None, **kwargs): # pylint: disable=unused-argument
if not targets:
return {}
cmd = ['opkg', 'remove']
_append_noaction_if_testmode(cmd, **kwargs)
if kwargs.get('remove_dependencies', False):
cmd.append('--force-removal-of-dependent-packages')
if kwargs.get('auto_remove_deps', False):

View File

@ -44,7 +44,7 @@ Installation Prerequisites
:maintainer: Simon Dodsley (simon@purestorage.com)
:maturity: new
:requires: purestorage
:requires: purity_fb
:platform: all
.. versionadded:: 2019.2.0

View File

@ -81,7 +81,7 @@ def _error_msg_iface(iface, option, expected):
a list of expected values.
'''
msg = 'Invalid option -- Interface: {0}, Option: {1}, Expected: [{2}]'
return msg.format(iface, option, '|'.join(expected))
return msg.format(iface, option, '|'.join(str(e) for e in expected))
def _error_msg_routes(iface, option, expected):
@ -104,7 +104,7 @@ def _error_msg_network(option, expected):
a list of expected values.
'''
msg = 'Invalid network setting -- Setting: {0}, Expected: [{1}]'
return msg.format(option, '|'.join(expected))
return msg.format(option, '|'.join(str(e) for e in expected))
def _log_default_network(opt, value):

View File

@ -159,7 +159,7 @@ def target(key, full=True):
key = os.path.realpath(key)
if not os.path.exists(key):
log.debug('Unkown SysFS key %s', key)
log.debug('Unknown SysFS key %s', key)
return False
elif full:
return key

View File

@ -6,7 +6,10 @@ Module for notifications via Twilio
:depends: - twilio python module
:configuration: Configure this module by specifying the name of a configuration
profile in the minion config, minion pillar, or master config.
profile in the minion config, minion pillar, or master config (with :conf_master:`pillar_opts` set to True).
.. warning: Setting pillar_opts to True in the master config may be considered
unsafe as it copies the master config to pillar
For example:
@ -73,7 +76,7 @@ def send_sms(profile, body, to, from_):
CLI Example:
twilio.send_sms twilio-account 'Test sms' '+18019999999' '+18011111111'
twilio.send_sms my-twilio-account 'Test sms' '+18019999999' '+18011111111'
'''
ret = {}
ret['message'] = {}

Some files were not shown because too many files have changed in this diff Show More