Merge branch 'master' into 2019_2_1_port_54154

This commit is contained in:
Daniel Wozniak 2019-11-07 08:57:29 -07:00 committed by GitHub
commit aac7f408e3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
113 changed files with 2774 additions and 11677 deletions

View File

@ -41,13 +41,22 @@ wrappedNode('docs', global_timeout, '#jenkins-prod-pr') {
'''
}
stage('Build') {
stage('Build HTML Docs') {
sh shell_header + '''
eval "$(pyenv init -)"
pyenv shell 3.6.8
nox -e docs
nox -e 'docs-html(compress=True)'
'''
archiveArtifacts artifacts: 'doc/doc-archive.tar.gz'
archiveArtifacts artifacts: 'doc/html-archive.tar.gz'
}
stage('Build Man Pages') {
sh shell_header + '''
eval "$(pyenv init -)"
pyenv shell 3.6.8
nox -e 'docs-man(compress=True, update=False)'
'''
archiveArtifacts artifacts: 'doc/man-archive.tar.gz'
}
}

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'amazon'
def distro_version = '1'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'amazon'
def distro_version = '2'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'amazon'
def distro_version = '2'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'arch'
def distro_version = 'lts'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '-n integration.modules.test_pkg'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'arch'
def distro_version = 'lts'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '-n integration.modules.test_pkg'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '6'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py2'
def test_transport = 'ZeroMQ-M2Crypto'
def nox_env_name = 'runtests-zeromq-m2crypto'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--proxy'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,26 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label,
extra_codecov_flags: ["proxy"])
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py2'
def test_transport = 'ZeroMQ-Pycryptodomex'
def nox_env_name = 'runtests-zeromq-pycryptodomex'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py2'
def test_transport = 'TCP'
def nox_env_name = 'runtests-tcp'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py2'
def test_transport = 'Tornado'
def nox_env_name = 'runtests-tornado'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py3'
def test_transport = 'ZeroMQ-M2Crypto'
def nox_env_name = 'runtests-zeromq-m2crypto'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--proxy'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,26 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label,
extra_codecov_flags: ["proxy"])
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py3'
def test_transport = 'ZeroMQ-Pycryptodomex'
def nox_env_name = 'runtests-zeromq-pycryptodomex'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'centos'
def distro_version = '7'
def python_version = 'py3'
def test_transport = 'TCP'
def nox_env_name = 'runtests-tcp'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'debian'
def distro_version = '10'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def golden_images_branch = 'master'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'debian'
def distro_version = '8'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'debian'
def distro_version = '9'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'debian'
def distro_version = '9'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'fedora'
def distro_version = '29'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'fedora'
def distro_version = '29'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'fedora'
def distro_version = '30'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'fedora'
def distro_version = '30'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,4 +1,4 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
@ -9,10 +9,12 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'macosx'
def distro_version = 'mojave'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = ''
def concurrent_builds = 1
def jenkins_slave_label = 'kitchen-slave-mac'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,29 +25,27 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
wrappedNode(jenkins_slave_label, global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
"NOX_ENV_NAME=${nox_env_name}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
stage('VM Cleanup') {
@ -88,7 +88,7 @@ wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
@ -128,15 +128,18 @@ wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
def distro_strings = [
distro_name,
distro_version
]
def report_strings = (
[python_version] + nox_env_name.split('-')
).flatten()
uploadCodeCoverage(
report_path: 'artifacts/coverage/coverage.xml',
report_name: "${distro_strings.join('-')}-${report_strings.join('-')}",
report_flags: ([distro_strings.join('')] + report_strings).flatten()
)
}
}
}

View File

@ -1,4 +1,4 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
@ -9,10 +9,12 @@ def global_timeout = testrun_timeout + 1;
def distro_name = 'macosx'
def distro_version = 'mojave'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = ''
def concurrent_builds = 1
def jenkins_slave_label = 'kitchen-slave-mac'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,29 +25,27 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
wrappedNode(jenkins_slave_label, global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
"NOX_ENV_NAME=${nox_env_name}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
"PATH=/Users/parallels/.rbenv/shims:/Users/parallels/.rbenv/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin:/opt/salt/bin:/usr/local/sbin",
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
stage('VM Cleanup') {
@ -88,7 +88,7 @@ wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.vagrant.d/insecure_private_key; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
@ -128,15 +128,18 @@ wrappedNode('kitchen-slave-mac', global_timeout, '#jenkins-prod-pr') {
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
def distro_strings = [
distro_name,
distro_version
]
def report_strings = (
[python_version] + nox_env_name.split('-')
).flatten()
uploadCodeCoverage(
report_path: 'artifacts/coverage/coverage.xml',
report_name: "${distro_strings.join('-')}-${report_strings.join('-')}",
report_flags: ([distro_strings.join('')] + report_strings).flatten()
)
}
}
}

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'opensuse'
def distro_version = '15'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'opensuse'
def distro_version = '15'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py2'
def test_transport = 'ZeroMQ-M2Crypto'
def nox_env_name = 'runtests-zeromq-m2crypto'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--proxy'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,26 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label,
extra_codecov_flags: ["proxy"])
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py2'
def test_transport = 'ZeroMQ-Pycryptodomex'
def nox_env_name = 'runtests-zeromq-pycryptodomex'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py2'
def test_transport = 'TCP'
def nox_env_name = 'runtests-tcp'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py2'
def test_transport = 'Tornado'
def nox_env_name = 'runtests-tornado'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py3'
def test_transport = 'ZeroMQ-M2Crypto'
def nox_env_name = 'runtests-zeromq-m2crypto'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--proxy'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,26 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label,
extra_codecov_flags: ["proxy"])
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py3'
def test_transport = 'ZeroMQ-Pycryptodomex'
def nox_env_name = 'runtests-zeromq-pycryptodomex'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1604'
def python_version = 'py3'
def test_transport = 'TCP'
def nox_env_name = 'runtests-tcp'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1804'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 6
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'ubuntu'
def distro_version = '1804'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--ssh-tests'
def concurrent_builds = 1
def use_spot_instances = true
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,137 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
cp -f ~/workspace/spot.yml .kitchen.local.yml
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM || (bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; rm .kitchen.local.yml; bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM); echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout * 60 - 15, unit: 'MINUTES') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 8
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'windows'
def distro_version = '2016'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--unit'
def concurrent_builds = 1
def use_spot_instances = false
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,136 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 8
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'windows'
def distro_version = '2016'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--unit'
def concurrent_builds = 1
def use_spot_instances = false
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,136 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 8
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'windows'
def distro_version = '2019'
def python_version = 'py2'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--unit'
def concurrent_builds = 1
def use_spot_instances = false
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,136 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -1,18 +1,18 @@
@Library('salt@1.1') _
@Library('salt@master-1.2') _
// Define the maximum time, in hours, that a test run should run for
def testrun_timeout = 8
// Now define a global pipeline timeout. This is the test run timeout with one(1) additional
// hour to allow for artifacts to be downloaded, if possible.
def global_timeout = testrun_timeout + 1;
def distro_name = 'windows'
def distro_version = '2019'
def python_version = 'py3'
def test_transport = 'ZeroMQ'
def nox_env_name = 'runtests-zeromq'
def salt_target_branch = 'master'
def golden_images_branch = '2019.2'
def nox_passthrough_opts = '--unit'
def concurrent_builds = 1
def use_spot_instances = false
def jenkins_slave_label = 'kitchen-slave'
properties([
buildDiscarder(logRotator(artifactDaysToKeepStr: '', artifactNumToKeepStr: '', daysToKeepStr: '', numToKeepStr: '30')),
@ -23,136 +23,25 @@ properties([
// Be sure to cancel any previously running builds
def buildNumber = env.BUILD_NUMBER as int
if (buildNumber > 1) {
if (buildNumber > concurrent_builds) {
// This will cancel the previous build which also defined a matching milestone
milestone(buildNumber - 1)
milestone(buildNumber - concurrent_builds)
}
// Define a milestone for this build so that, if another build starts, this one will be aborted
milestone(buildNumber)
wrappedNode('kitchen-slave', global_timeout, '#jenkins-prod-pr') {
withEnv([
'SALT_KITCHEN_PLATFORMS=/var/jenkins/workspace/nox-platforms.yml',
'SALT_KITCHEN_VERIFIER=/var/jenkins/workspace/nox-verifier.yml',
'SALT_KITCHEN_DRIVER=/var/jenkins/workspace/driver.yml',
"NOX_ENV_NAME=runtests-${test_transport.toLowerCase()}",
'NOX_ENABLE_FROM_FILENAMES=true',
"NOX_PASSTHROUGH_OPTS=${nox_passthrough_opts}",
"SALT_TARGET_BRANCH=${salt_target_branch}",
"GOLDEN_IMAGES_CI_BRANCH=${golden_images_branch}",
"CODECOV_FLAGS=${distro_name}${distro_version},${python_version},${test_transport.toLowerCase()}",
'PATH=~/.rbenv/shims:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin',
'RBENV_VERSION=2.6.3',
"TEST_SUITE=${python_version}",
"TEST_PLATFORM=${distro_name}-${distro_version}",
"TEST_TRANSPORT=${test_transport}",
"FORCE_FULL=${params.runFull}",
]) {
// Checkout the repo
stage('Clone') {
cleanWs notFailBuild: true
checkout scm
sh 'git fetch --no-tags https://github.com/saltstack/salt.git +refs/heads/${SALT_TARGET_BRANCH}:refs/remotes/origin/${SALT_TARGET_BRANCH}'
}
// Setup the kitchen required bundle
stage('Setup') {
sh 'bundle install --with ec2 windows --without docker macos opennebula vagrant'
}
stage('Create VM') {
retry(3) {
sh '''
t=$(shuf -i 30-120 -n 1); echo "Sleeping $t seconds"; sleep $t
bundle exec kitchen create $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-create.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-create.log"
fi
"""
}
sh '''
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'image_id:'
bundle exec kitchen diagnose $TEST_SUITE-$TEST_PLATFORM | grep 'instance_type:' -A5
'''
}
try {
timeout(time: testrun_timeout, unit: 'HOURS') {
stage('Converge VM') {
sh '''
ssh-agent /bin/bash -c 'ssh-add ~/.ssh/kitchen.pem; bundle exec kitchen converge $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?"'
'''
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-converge.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-converge.log"
fi
"""
}
stage('Run Tests') {
withEnv(["DONT_DOWNLOAD_ARTEFACTS=1"]) {
sh 'bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";'
}
}
}
} finally {
try {
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-verify.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-verify.log"
fi
"""
stage('Download Artefacts') {
withEnv(["ONLY_DOWNLOAD_ARTEFACTS=1"]){
sh '''
bundle exec kitchen verify $TEST_SUITE-$TEST_PLATFORM || exit 0
'''
}
sh """
if [ -s ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ]; then
mv ".kitchen/logs/${python_version}-${distro_name}-${distro_version}.log" ".kitchen/logs/${python_version}-${distro_name}-${distro_version}-download.log"
fi
if [ -s ".kitchen/logs/kitchen.log" ]; then
mv ".kitchen/logs/kitchen.log" ".kitchen/logs/kitchen-download.log"
fi
"""
}
archiveArtifacts(
artifacts: "artifacts/*,artifacts/**/*,.kitchen/logs/*-create.log,.kitchen/logs/*-converge.log,.kitchen/logs/*-verify.log,.kitchen/logs/*-download.log,artifacts/xml-unittests-output/*.xml",
allowEmptyArchive: true
)
junit 'artifacts/xml-unittests-output/*.xml'
} finally {
stage('Cleanup') {
sh '''
bundle exec kitchen destroy $TEST_SUITE-$TEST_PLATFORM; echo "ExitCode: $?";
'''
}
stage('Upload Coverage') {
script {
withCredentials([[$class: 'StringBinding', credentialsId: 'codecov-upload-token-salt', variable: 'CODECOV_TOKEN']]) {
sh '''
if [ -n "${FORCE_FULL}" -a "${FORCE_FULL}" = "true" -a -f artifacts/coverage/coverage.xml ]; then
(curl -L https://codecov.io/bash | /bin/sh -s -- -R $(pwd) -s artifacts/coverage/ -F "${CODECOV_FLAGS}") || true
fi
'''
}
}
}
}
}
}
}
runTests(
env: env,
distro_name: distro_name,
distro_version: distro_version,
python_version: python_version,
salt_target_branch: salt_target_branch,
golden_images_branch: golden_images_branch,
nox_env_name: nox_env_name,
nox_passthrough_opts: nox_passthrough_opts,
testrun_timeout: testrun_timeout,
run_full: params.runFull,
use_spot_instances: use_spot_instances,
jenkins_slave_label: jenkins_slave_label)
// vim: ft=groovy

View File

@ -6,12 +6,11 @@ codecov:
branch: master
notify:
require_ci_to_pass: no
require_ci_to_pass: yes # Less spammy. Only notify on passing builds.
ignore:
- ^*.py$
- doc/.*
- tests/.*
- ^*.py$ # python files at the repo root, ie, setup.py
- doc/.* # ignore any code under doc/
coverage:
round: up
@ -20,30 +19,61 @@ coverage:
status:
project: # measuring the overall project coverage
default:
default: false # disable the default status that measures entire project
salt: # declare a new status context "salt"
enabled: yes # must be yes|true to enable this status
if_no_uploads: error # will post commit status of "error" if no coverage reports we uploaded
paths: "!tests/" # remove all files in "tests/"
target: auto # will use the coverage from the base commit (pull request base or parent commit) coverage to compare against.
base: auto # will use the pull request base if the commit is on a pull request. If not, the parent commit will be used.
if_no_uploads: error # will post commit status of "error" if no coverage reports were uploaded
# options: success, error, failure
if_not_found: success # if parent is not found report status as success, error, or failure
if_ci_failed: success # if ci fails report status as success, error, or failure
if_ci_failed: error # if ci fails report status as success, error, or failure
tests: # declare a new status context "tests"
enabled: yes # must be yes|true to enable this status
#target: 100% # we always want 100% coverage here
target: auto # auto while we get this going
base: auto # will use the pull request base if the commit is on a pull request. If not, the parent commit will be used.
paths: "!salt/" # only include coverage in "tests/" folder
if_no_uploads: error # will post commit status of "error" if no coverage reports were uploaded
# options: success, error, failure
if_not_found: success # if parent is not found report status as success, error, or failure
if_ci_failed: error # if ci fails report status as success, error, or failure
patch: # pull requests only: this commit status will measure the
# entire pull requests Coverage Diff. Checking if the lines
# adjusted are covered at least X%.
default:
enabled: no # must be yes|true to enable this status
target: 80% # specify the target "X%" coverage to hit
if_no_uploads: error # will post commit status of "error" if no coverage reports we uploaded
# options: success, error, failure
enabled: yes # must be yes|true to enable this status
target: 100% # Newly added lines must have 100% coverage
if_no_uploads: error # will post commit status of "error" if no coverage reports were uploaded
# options: success, error, failure
if_not_found: success
if_ci_failed: success
if_ci_failed: error
changes: # if there are any unexpected changes in coverage
default:
enabled: no # must be yes|true to enable this status
if_no_uploads: success
enabled: yes # must be yes|true to enable this status
if_no_uploads: error
if_not_found: success
if_ci_failed: success
if_ci_failed: error
# No commends because we're not yet running the full test suite on PRs
comment: off
flags:
salt:
paths:
- salt/
tests:
paths:
- tests/
comment:
layout: "reach, diff, flags, files"
after_n_builds: 46 # Only comment on PRs after N builds
# This value is the output of:
# sh -c 'echo "$(ls .ci/ | grep kitchen | wc -l)"'
behavior: new # Comment posting behaviour
# default: update, if exists. Otherwise post new.
# once: update, if exists. Otherwise post new. Skip if deleted.
# new: delete old and post new.
# spammy: post new (do not delete old comments).

View File

@ -1,14 +1,11 @@
[run]
branch = True
cover_pylib = False
source =
salt
parallel = True
concurrency = multiprocessing
omit =
tests/*.py
setup.py
salt/daemons/test/*
.nox/*
[report]
# Regexes for lines to exclude from consideration
@ -30,7 +27,3 @@ exclude_lines =
ignore_errors = True
[paths]
source =
salt

View File

@ -7,7 +7,7 @@ repos:
alias: compile-linux-py2.7-zmq-requirements
name: Linux Py2.7 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(lint|cloud|docs|osx|windows)\.in$
exclude: ^requirements/static/(lint|cloud|docs|darwin|windows)\.in$
args:
- -v
- --py-version=2.7
@ -17,9 +17,9 @@ repos:
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile
alias: compile-osx-py2.7-zmq-requirements
name: OSX Py2.7 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/osx\.in))$
alias: compile-darwin-py2.7-zmq-requirements
name: Darwin Py2.7 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
args:
- -v
- --py-version=2.7
@ -68,7 +68,7 @@ repos:
alias: compile-linux-py3.4-zmq-requirements
name: Linux Py3.4 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|darwin|windows)\.in$
args:
- -v
- --py-version=3.4
@ -90,7 +90,7 @@ repos:
alias: compile-linux-py3.5-zmq-requirements
name: Linux Py3.5 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|darwin|windows)\.in$
args:
- -v
- --py-version=3.5
@ -100,9 +100,9 @@ repos:
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile
alias: compile-osx-py3.5-zmq-requirements
name: OSX Py3.5 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/osx\.in))$
alias: compile-darwin-py3.5-zmq-requirements
name: Darwin Py3.5 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
args:
- -v
- --py-version=3.5
@ -150,7 +150,7 @@ repos:
alias: compile-linux-py3.6-zmq-requirements
name: Linux Py3.6 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|darwin|windows)\.in$
args:
- -v
- --py-version=3.6
@ -160,9 +160,9 @@ repos:
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile
alias: compile-osx-py3.6-zmq-requirements
name: OSX Py3.6 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/osx\.in))$
alias: compile-darwin-py3.6-zmq-requirements
name: Darwin Py3.6 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
args:
- -v
- --py-version=3.6
@ -210,7 +210,7 @@ repos:
alias: compile-linux-py3.7-zmq-requirements
name: Linux Py3.7 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/linux\.in)$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|osx|windows)\.in$
exclude: ^requirements/static/(centos-6|amzn-2018\.03|lint|cloud|docs|darwin|windows)\.in$
args:
- -v
- --py-version=3.7
@ -220,9 +220,9 @@ repos:
- --include=requirements/pytest.txt
- --remove-line=^pycrypto==(.*)$
- id: pip-tools-compile
alias: compile-osx-py3.7-zmq-requirements
name: OSX Py3.7 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/osx\.in))$
alias: compile-darwin-py3.7-zmq-requirements
name: Darwin Py3.7 ZeroMQ Requirements
files: ^(pkg/osx/(req|req_ext)\.txt|requirements/((base|zeromq|pytest)\.txt|static/darwin\.in))$
args:
- -v
- --py-version=3.7

View File

@ -391,7 +391,7 @@
#mine_interval: 60
# Windows platforms lack posix IPC and must rely on slower TCP based inter-
# process communications. Set ipc_mode to 'tcp' on such systems
# process communications. ipc_mode is set to 'tcp' on such systems.
#ipc_mode: ipc
# Overwrite the default tcp ports used by the minion when ipc_mode is set to 'tcp'

View File

@ -1,79 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
compile-translation-catalogs
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Compile the existing translation catalogs.
'''
# Import python libs
import os
import sys
import fnmatch
# Import 3rd-party libs
HAS_BABEL = False
try:
from babel.messages import mofile, pofile
HAS_BABEL = True
except ImportError:
try:
import polib
except ImportError:
print(
'You need to install either babel or pofile in order to compile '
'the message catalogs. One of:\n'
' pip install babel\n'
' pip install polib'
)
sys.exit(1)
DOC_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
LOCALES_DIR = os.path.join(DOC_DIR, 'locale')
def main():
'''
Run the compile code
'''
print('Gathering the translation catalogs to compile...'),
sys.stdout.flush()
entries = {}
for locale in os.listdir(os.path.join(LOCALES_DIR)):
if locale == 'pot':
continue
locale_path = os.path.join(LOCALES_DIR, locale)
entries[locale] = []
for dirpath, _, filenames in os.walk(locale_path):
for filename in fnmatch.filter(filenames, '*.po'):
entries[locale].append(os.path.join(dirpath, filename))
print('DONE')
for locale, po_files in sorted(entries.items()):
lc_messages_path = os.path.join(LOCALES_DIR, locale, 'LC_MESSAGES')
print('\nCompiling the \'{0}\' locale:'.format(locale))
for po_file in sorted(po_files):
relpath = os.path.relpath(po_file, lc_messages_path)
print ' {0}.po -> {0}.mo'.format(relpath.split('.po', 1)[0])
if HAS_BABEL:
catalog = pofile.read_po(open(po_file))
mofile.write_mo(
open(po_file.replace('.po', '.mo'), 'wb'), catalog
)
continue
catalog = polib.pofile(po_file)
catalog.save_as_mofile(fpath=po_file.replace('.po', '.mo'))
print('Done')
if __name__ == '__main__':
main()

View File

@ -1,53 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
download-translation-catalog
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Download a translation catalog from Transifex.
'''
# Import python libs
import os
import sys
# Import 3rd-party libs
try:
import txclib.utils
except ImportError:
print(
'The \'transifex-client\' library needs to be installed. '
'Please execute one of \'pip install transifex-client\' or '
'\'easy_install transifex-client\''
)
sys.exit(1)
DOC_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
LOCALES_DIR = os.path.join(DOC_DIR, 'locale')
def main():
'''
Run the compile code
'''
os.chdir(DOC_DIR)
tx_root = txclib.utils.find_dot_tx()
if len(sys.argv) < 2:
print('You need to pass a locale to this script. For example: '
'pt_PT, zh_CN, ru, etc...')
sys.exit(1)
for locale in sys.argv[1:]:
print('Download \'{0}\' translations catalog...'.format(locale))
txclib.utils.exec_command('pull', ['-l', locale], tx_root)
print('Done')
if __name__ == '__main__':
main()

View File

@ -1,223 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
update-transifex-source-translations
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Update the transifex sources configuration file and push the source
'''
# Import python libs
import os
import sys
import time
import logging
import subprocess
import ConfigParser
try:
import txclib.utils
except ImportError:
sys.stdout.write(
'The \'transifex-client\' library needs to be installed. '
'Please execute one of \'pip install transifex-client\' or '
'\'easy_install transifex-client\'\n'
)
sys.exit(1)
DOC_DIR = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def main():
'''
Run the update code
'''
os.chdir(DOC_DIR)
sys.stdout.write('Extracting translatable strings....\n')
try:
subprocess.check_call(['make', 'gettext'])
except subprocess.CalledProcessError as exc:
sys.stdout.write('An error occurred while extracting the translation '
'strings: {0}\n'.format(exc))
sys.exit(1)
locale_dir = os.path.join(DOC_DIR, 'locale')
pot_dir = os.path.join(DOC_DIR, '_build', 'locale')
tx_root = txclib.utils.find_dot_tx()
tx_config = os.path.join(tx_root, '.tx', 'config')
if not tx_root:
sys.stdout.write(
'Unable to find the \'.tx/\' directory. Unable to continue\n'
)
sys.exit(1)
# We do not want the txclib INFO or WARNING logging
logging.getLogger('txclib').setLevel(logging.ERROR)
sys.stdout.write('Gathering the translation template files...')
sys.stdout.flush()
entries = []
for dirpath, dirnames, filenames in os.walk(pot_dir):
for filename in filenames:
pot_file = os.path.join(dirpath, filename)
base, ext = os.path.splitext(pot_file)
if ext != '.pot':
continue
resource_path = os.path.relpath(base, pot_dir)
try:
import babel.messages.pofile
if not len(babel.messages.pofile.read_po(open(pot_file))):
# Empty pot file, continue
continue
except ImportError:
# No babel package, let's keep on going
pass
resource_name = resource_path.replace(
'\\', '/').replace('/', '--').replace('.', '_')
entries.append((resource_path, resource_name))
sys.stdout.write('Done\n')
# Let's load the resources already present in the configuration file
cfg = ConfigParser.SafeConfigParser()
cfg.read([tx_config])
handled_resources = set(
section for section in
cfg.sections() if section.startswith('salt.')
)
sys.stdout.write('Updating the entries in \'.tx/config\'...\n')
sys.stdout.flush()
total_entries = len(entries)
for idx, (resource_path, resource_name) in enumerate(sorted(entries)):
sys.stdout.write(
'[{0:>{pad}}/{1}] Updating resource for '
'{resource_path}.pot ({resource_name})'.format(
idx + 1,
total_entries,
pad=len(str(total_entries)),
locale_dir=locale_dir,
resource_name=resource_name,
resource_path=resource_path
)
)
sys.stdout.flush()
try:
txclib.utils.exec_command(
'set',
'--auto-local -r salt.{resource_name} '
'{locale_dir}/<lang>/LC_MESSAGES/{resource_path}.po '
'--source-lang en '
'--source-file {pot_dir}/{resource_path}.pot '
'--source-name {resource_path}.rst '
'--execute'.format(
resource_name=resource_name,
resource_path=resource_path,
locale_dir=locale_dir,
pot_dir=pot_dir.rstrip('/')
).split(),
tx_root
)
sys.stdout.write('\n')
if 'salt.{0}'.format(resource_name) in handled_resources:
handled_resources.remove('salt.{0}'.format(resource_name))
except Exception as err:
sys.stdout.write('An error occurred: {0}\n'.format(err))
except KeyboardInterrupt:
sys.stdout.write('\n')
sys.exit(1)
time.sleep(0.025)
if handled_resources:
non_handled_resources = len(handled_resources)
sys.stdout.write(
'Removing old resources from configuration and upstream'
'(if possible)\n'
)
for idx, resource_name in enumerate(sorted(handled_resources)):
sys.stdout.write(
'[{0:>{pad}}/{1}] Removing resource \'{resource_name}\''.format(
idx + 1,
non_handled_resources,
pad=len(str(non_handled_resources)),
resource_name=resource_name,
)
)
sys.stdout.flush()
try:
txclib.utils.exec_command(
'delete',
['-r', resource_name],
tx_root
)
handled_resources.remove(resource_name)
except Exception as err:
sys.stdout.write('An error occurred: {0}\n'.format(err))
finally:
if cfg.has_section(resource_name):
cfg.remove_section(resource_name)
sys.stdout.write('\n')
time.sleep(0.025)
cfg.write(open(tx_config, 'w'))
sys.stdout.write('\n')
# Set the translations file type we're using
txclib.utils.exec_command('set', ['-t', 'PO'], tx_root)
time.sleep(0.025)
if 'TRANSIFEX_NO_PUSH' not in os.environ:
sys.stdout.write('\n')
sys.stdout.write('Pushing translation template files...\n')
for idx, (resource_path, resource_name) in enumerate(sorted(entries)):
sys.stdout.write(
'[{0:>{pad}}/{1}] Pushing resource for '
'{resource_path}.pot ({resource_name})'.format(
idx + 1,
total_entries,
pad=len(str(total_entries)),
locale_dir=locale_dir,
resource_name=resource_name,
resource_path=resource_path
)
)
sys.stdout.flush()
try:
txclib.utils.exec_command(
'push',
'--resource salt.{resource_name} '
'--source '
'--skip '
'--no-interactive'.format(
resource_name=resource_name,
resource_path=resource_path,
locale_dir=locale_dir
).split(),
tx_root
)
sys.stdout.write('\n')
except Exception as err:
sys.stdout.write('An error occurred: {0}\n'.format(err))
except KeyboardInterrupt:
sys.stdout.write('\n')
sys.exit(1)
time.sleep(0.025)
if handled_resources:
sys.stdout.write('=' * 80)
sys.stdout.write(
'\nDon\'t forget to delete the following remote resources:\n')
for resource_name in sorted(handled_resources):
sys.stdout.write(' {0}\n'.format(resource_name))
sys.stdout.write('=' * 80)
sys.stdout.write('\nDONE\n')
if __name__ == '__main__':
main()

File diff suppressed because it is too large Load Diff

View File

@ -9,29 +9,14 @@ BUILDDIR = _build
SPHINXLANG =
XELATEX = xelatex
# ----- Translations Support ------------------------------------------------>
# If language is set, also set translation options
ifeq ($(shell [ "x$(SPHINXLANG)" != "x" ] && echo 0 || echo 1), 0)
TRANSLATIONOPTS = -D language='$(SPHINXLANG)'
else
TRANSLATIONOPTS =
endif
# Reset settings if sphinx-intl is not available
ifeq ($(shell which sphinx-intl >/dev/null 2>&1; echo $$?), 1)
SPHINXLANG =
TRANSLATIONOPTS =
endif
# <---- Translations Support -------------------------------------------------
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(TRANSLATIONOPTS) $(SPHINXOPTS) .
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help clean check_sphinx-build html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext translations download-translations
.PHONY: help clean check_sphinx-build html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest
help:
@echo "Please use \`make <target>' where <target> is one of"
@ -53,7 +38,6 @@ help:
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@ -68,38 +52,38 @@ clean:
check_sphinx-build:
@which $(SPHINXBUILD) >/dev/null 2>&1 || (echo "The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://www.sphinx-doc.org/en/master/)" >&2; false)
html: check_sphinx-build translations
html: check_sphinx-build
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
dirhtml: check_sphinx-build translations
dirhtml: check_sphinx-build
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
singlehtml: check_sphinx-build translations
singlehtml: check_sphinx-build
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
pickle: check_sphinx-build translations
pickle: check_sphinx-build
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json: check_sphinx-build translations
json: check_sphinx-build
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp: check_sphinx-build translations
htmlhelp: check_sphinx-build
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp: check_sphinx-build translations
qthelp: check_sphinx-build
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
@ -108,7 +92,7 @@ qthelp: check_sphinx-build translations
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Salt.qhc"
devhelp: check_sphinx-build translations
devhelp: check_sphinx-build
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@ -117,31 +101,31 @@ devhelp: check_sphinx-build translations
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Salt"
@echo "# devhelp"
epub: check_sphinx-build translations
epub: check_sphinx-build
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex: check_sphinx-build translations
latex: check_sphinx-build
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
latexpdf: check_sphinx-build translations
latexpdf: check_sphinx-build
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
latexpdfja: check_sphinx-build translations
latexpdfja: check_sphinx-build
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
pdf: check_sphinx-build translations
pdf: check_sphinx-build
@if [ "$(XELATEX)" = "xelatex" ] || [ "x$(XELATEX)" = "x" ]; then \
echo "The '$(XELATEX)' command was not found."; \
fi
@ -150,40 +134,35 @@ pdf: check_sphinx-build translations
$(MAKE) -C $(BUILDDIR)/latex -i "PDFLATEX=latexmk" "LATEXMKOPTS=-xelatex -interaction=nonstopmode -f -quiet"
@echo "xelatex finished; the PDF files are in $(BUILDDIR)/latex."
cheatsheet: translations
cheatsheet:
@echo "Running cheatsheet/salt.tex file through xelatex..."
cd cheatsheet && xelatex salt.tex && cp salt.pdf ../salt-cheatsheet.pdf
@echo "./salt-cheatsheet.pdf created."
text: check_sphinx-build translations
text: check_sphinx-build
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
man: check_sphinx-build translations
man: check_sphinx-build
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
texinfo: check_sphinx-build translations
texinfo: check_sphinx-build
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
info: check_sphinx-build translations
info: check_sphinx-build
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
gettext: check_sphinx-build
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale"
changes: check_sphinx-build translations
changes: check_sphinx-build
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
@ -205,27 +184,12 @@ doctest: check_sphinx-build
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
xml: check_sphinx-build translations
xml: check_sphinx-build
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
pseudoxml: check_sphinx-build translations
pseudoxml: check_sphinx-build
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
translations:
@if [ "$(SPHINXLANG)" = "en" ] || [ "x$(SPHINXLANG)" = "x" ]; then \
echo "No need to update translations. Skipping..."; \
elif [ ! -d locale/$(SPHINXLANG) ]; then \
echo "The locale directory for $(SPHINXLANG) does not exist"; \
exit 1; \
else \
echo "Compiling exising message catalog for '$(SPHINXLANG)'"; \
.scripts/compile-translation-catalogs; \
fi
download-translations:
@echo "Downloading $(SPHINXLANG) translations"
.scripts/download-translation-catalog $(SPHINXLANG)

24
doc/_ext/saltrepo.py Normal file
View File

@ -0,0 +1,24 @@
# -*- coding: utf-8 -*-
'''
saltrepo
~~~~~~~~
SaltStack Repository Sphinx directives
'''
def source_read_handler(app, docname, source):
if '|repo_primary_branch|' in source[0]:
source[0] = source[0].replace(
'|repo_primary_branch|',
app.config.html_context['repo_primary_branch']
)
def setup(app):
app.connect('source-read', source_read_handler)
return {
'version': 'builtin',
'parallel_read_safe': True,
'parallel_write_safe': True,
}

View File

@ -264,20 +264,6 @@
<script type="text/javascript" language="javascript">llactid=23943</script>
<script type="text/javascript" language="javascript" src="http://t6.trackalyzer.com/trackalyze.js"></script>
<script>
var _gaq = _gaq || [];
var pluginUrl = '//www.google-analytics.com/plugins/ga/inpage_linkid.js';
_gaq.push(['_require', 'inpage_linkid', pluginUrl]);
_gaq.push(['_setAccount', 'UA-26984928-1']);
_gaq.push(['_setDomainName', 'saltstack.com']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
</script>
{% endif %}
</body>
</html>

View File

@ -52,7 +52,8 @@
SEARCH_CX: '{{ search_cx }}',
COLLAPSE_INDEX: false,
FILE_SUFFIX: '{{ '' if no_search_suffix else file_suffix }}',
HAS_SOURCE: '{{ has_source|lower }}'
HAS_SOURCE: '{{ has_source|lower }}',
REPO_PRIMARY_BRANCH_TAB_NAME: '{{ repo_primary_branch | capitalize }}'
};
</script>
{%- for scriptfile in script_files %}
@ -178,15 +179,18 @@
<div class="row">
<div class="col-sm-12 col-md-11 col-md-offset-1 col-lg-10 col-lg-offset-1">
<nav class="pull-left text-muted">
<a href="https://github.com/saltstack/salt/edit/develop/doc/{{pagename}}.rst">Edit on GitHub</a>
<a href="https://github.com/saltstack/salt/edit/{{ repo_primary_branch }}/doc/{{pagename}}.rst">Edit on GitHub</a>
</nav>
<nav id="localnav">
<ul class="nav navbar-nav">
{%- block relbar_small %}{{ relbar() }}{% endblock %}
{% if not (build_type == "develop" or build_type == "next") and on_saltstack %}
{% if not (build_type == repo_primary_branch or build_type == "next") and on_saltstack %}
<li><a class="icon-dl" href="/en/pdf/Salt-{{ release }}.pdf"><img height="25" width="25" class="nolightbox" src="{{ pathto('_static/images/pdf_icon.svg', 1) }}"></a></li>
<li><a class="icon-dl" href="/en/epub/Salt-{{ release }}.epub"><img height="25" width="18" class="nolightbox" src="{{ pathto('_static/images/epub_icon.svg', 1) }}"></a></li>
{% elif build_type == repo_primary_branch and on_saltstack %}
<li><a class="icon-dl" href="/en/pdf/Salt-{{ repo_primary_branch }}.pdf"><img height="25" width="25" class="nolightbox" src="{{ pathto('_static/images/pdf_icon.svg', 1) }}"></a></li>
<li><a class="icon-dl" href="/en/epub/Salt-{{ repo_primary_branch }}.epub"><img height="25" width="18" class="nolightbox" src="{{ pathto('_static/images/epub_icon.svg', 1) }}"></a></li>
{% endif %}
</ul>
</nav>
@ -202,9 +206,9 @@
<div class="alert alert-warning dev-notification-text" role="alert"><i class="glyphicon glyphicon-warning-sign"></i> These release notes are for an old release of Salt. This release might contain known security and other issues that are fixed in the <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Release notes for the latest release" href="{{ release }}.html">latest release</a>.</div>
{% elif build_type == "develop" and on_saltstack %}
{% elif build_type == repo_primary_branch and on_saltstack %}
<div class="alert alert-warning dev-notification-text" role="alert"><i class="glyphicon glyphicon-warning-sign"></i> You are viewing docs from the develop branch, some of these features are not yet released.</div>
<div class="alert alert-warning dev-notification-text" role="alert"><i class="glyphicon glyphicon-warning-sign"></i> You are viewing docs from the {{ repo_primary_branch }} branch, some of these features are not yet released.</div>
{% elif build_type == "inactive" and on_saltstack %}
@ -244,16 +248,16 @@
<p><i>{{ today }}</i></p>
{% if build_type == "latest" %}
<p>You are viewing docs for the latest stable release, {{ latest_release }}. Switch to docs for the previous stable release, <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Docs for the previous stable release" href="/en/{{ previous_release_dir }}/">{{ previous_release }}</a>, or to a recent doc build from the <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Latest docs from the develop branch" href="/en/develop/">develop</a> branch.</p>
<p>You are viewing docs for the latest stable release, {{ latest_release }}. Switch to docs for the previous stable release, <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Docs for the previous stable release" href="/en/{{ previous_release_dir }}/">{{ previous_release }}</a>, or to a recent doc build from the <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Latest docs from the {{ repo_primary_branch }} branch" href="/en/{{ repo_primary_branch }}/">{{ repo_primary_branch }}</a> branch.</p>
{% elif build_type == "previous" %}
<p>You are viewing docs for the previous stable release, {{ previous_release }}. Switch to docs for the latest stable release, <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Docs for the latest stable release" href="/en/latest/">{{ latest_release }}</a>, or to a recent doc build from the <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Latest docs from the develop branch" href="/en/develop/">develop</a> branch.</p>
<p>You are viewing docs for the previous stable release, {{ previous_release }}. Switch to docs for the latest stable release, <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Docs for the latest stable release" href="/en/latest/">{{ latest_release }}</a>, or to a recent doc build from the <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Latest docs from the {{ repo_primary_branch }} branch" href="/en/{{ repo_primary_branch }}/">{{ repo_primary_branch }}</a> branch.</p>
{% elif build_type == "inactive" %}
<p>You are viewing docs for an inactive release, {{ previous_release }}. Switch to docs for the latest stable release, <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Docs for the latest stable release" href="/en/latest/">{{ latest_release }}</a>, or to a recent doc build from the <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Latest docs from the develop branch" href="/en/develop/">develop</a> branch.</p>
<p>You are viewing docs for an inactive release, {{ previous_release }}. Switch to docs for the latest stable release, <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Docs for the latest stable release" href="/en/latest/">{{ latest_release }}</a>, or to a recent doc build from the <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Latest docs from the {{ repo_primary_branch }} branch" href="/en/{{ repo_primary_branch }}/">{{ repo_primary_branch }}</a> branch.</p>
{% elif build_type == "develop" %}
<p>You are viewing docs built from a recent snapshot of the develop branch. Switch to docs for the latest stable release, <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Docs for the latest stable release" href="/en/latest/">{{ latest_release }}</a>.</p>
{% elif build_type == repo_primary_branch %}
<p>You are viewing docs built from a recent snapshot of the {{ repo_primary_branch }} branch. Switch to docs for the latest stable release, <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Docs for the latest stable release" href="/en/latest/">{{ latest_release }}</a>.</p>
{% elif build_type == "next" %}
<p>You are viewing preview docs for the next major release, {{ next_release }}. Switch to docs for the latest stable release, <a data-container="body" data-toggle="tooltip" data-placement="bottom" title="Docs for the latest stable release" href="/en/latest/">{{ latest_release }}</a>.</p>
@ -285,12 +289,12 @@
<!-- <a class="ss-logo" href="http://saltstack.com"><img width="250" height="63" class="nolightbox sidebar-logo" src="{{ pathto('_static/images/saltstack_logo.svg', 1) }}"></a>
{% if on_saltstack %}
{#
{% if [True, False]|random %}
<a href="http://saltconf.com/register" target="_blank"><img class="nolightbox sidebar-banner center" src="{{ pathto('_static/images/banner-saltconf.png', 1) }}"/></a>
{% else %}
<a href="http://saltstack.com/support" target="_blank"><img class="nolightbox sidebar-banner center" src="{{ pathto('_static/images/banner-support.png', 1) }}"/></a>
{% endif %}-->
{% endif %} #}-->
<a href="https://saltconf.com/menu-ad" target="_blank"><img class="nolightbox sidebar-banner center" src="https://get.saltstack.com/rs/304-PHQ-615/images/Salt-docs-menu-ad-250x63.jpg"/></a>
@ -304,11 +308,11 @@
<div class="releaselinks versions {{ build_type }}">
<a class="btn btn-secondary{% if build_type == "previous" or build_type == "inactive" %} active{% endif %}" id="previous"{% if build_type == "previous" or build_type == "inactive" %} title="View release notes"{% else %} title="Switch to docs for the previous stable release"{% endif %} data-container="body" data-toggle="tooltip" data-placement="bottom" href="/en/{{ previous_release_dir }}/">{{ previous_release }}{% if build_type == "previous" or build_type == "inactive" %} <i class="glyphicon glyphicon-ok"></i>{%- endif %}</a>
<a class="btn btn-secondary{% if build_type == "previous" or build_type == "inactive" %} active{% endif %}" id="previous"{% if build_type == "previous" or build_type == "inactive" %} title="View release notes"{% else %} title="Switch to docs for the previous stable release"{% endif %} data-container="body" data-toggle="tooltip" data-placement="bottom" href="/en/{{ previous_release_dir }}/">{{ previous_release }}{% if build_type == "previous" or build_type == "inactive" %} <i class="glyphicon glyphicon-ok"></i>{% endif %}</a>
<a class="btn btn-secondary{% if build_type == "latest" %} active{% endif %}" id="latest"{% if build_type == "latest" %} title="View release notes"{% else %} title="Switch to docs for the latest stable release"{% endif %} data-container="body" data-toggle="tooltip" data-placement="bottom" href="/en/latest/">{{ latest_release }}{% if build_type == "latest" %} <i class="glyphicon glyphicon-ok"></i>{% endif %}</a>
<a class="btn btn-secondary{% if build_type == "develop" %} active{% endif %}" id="develop"{% if build_type == "develop" %} title="View all release notes"{% endif %} title="Switch to docs built recently from the develop branch" data-container="body" data-toggle="tooltip" data-placement="bottom" href="/en/develop/">Develop{% if build_type == "develop" %} <i class="glyphicon glyphicon-ok"></i>{% endif %}</a>
<a class="btn btn-secondary{% if build_type == repo_primary_branch %} active{% endif %}" id="{{ repo_primary_branch }}"{% if build_type == repo_primary_branch %} title="View all release notes"{% endif %} title="Switch to docs built recently from the {{ repo_primary_branch }} branch" data-container="body" data-toggle="tooltip" data-placement="bottom" href="/en/{{ repo_primary_branch }}/">{{ repo_primary_branch | capitalize }}{% if build_type == repo_primary_branch %} <i class="glyphicon glyphicon-ok"></i>{% endif %}</a>
</div>
@ -376,20 +380,6 @@
<script type="text/javascript" language="javascript">llactid=23943</script>
<script type="text/javascript" language="javascript" src="https://trackalyzer.com/trackalyze_secure.js"></script>
<script>
var _gaq = _gaq || [];
var pluginUrl = '//www.google-analytics.com/plugins/ga/inpage_linkid.js';
_gaq.push(['_require', 'inpage_linkid', pluginUrl]);
_gaq.push(['_setAccount', 'UA-26984928-1']);
_gaq.push(['_setDomainName', 'saltstack.com']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
</script>
{% endif %}
</body>
</html>

View File

@ -125,7 +125,7 @@ $( document ).ready(function() {
window.location.href = window.location.href.replace($currentVer.attr("href"), clickedVer);
}
else {
if ($currentVer.text().indexOf("Develop") == -1) {
if ($currentVer.text().indexOf(DOCUMENTATION_OPTIONS.REPO_PRIMARY_BRANCH_TAB_NAME) == -1) {
window.location.href = clickedVer + "topics/releases/" + $currentVer.text().trim() + ".html";
}
else window.location.href = clickedVer + "topics/releases/";

View File

@ -253,7 +253,7 @@ intersphinx_mapping = {
on_saltstack = 'SALT_ON_SALTSTACK' in os.environ
project = 'Salt'
repo_primary_branch = 'master' # This is the default branch on GitHub for the Salt project
version = salt.version.__version__
latest_release = '2019.2.2' # latest release
previous_release = '2018.3.4' # latest release from previous branch
@ -268,22 +268,31 @@ if on_saltstack:
copyright = time.strftime("%Y")
# < --- START do not merge these settings to other branches START ---> #
build_type = 'latest' # latest, previous, develop, next
release = latest_release
build_type = repo_primary_branch # latest, previous, master, next
# < --- END do not merge these settings to other branches END ---> #
# Set google custom search engine
if release == latest_release:
if build_type == repo_primary_branch:
release = latest_release
search_cx = '011515552685726825874:v1had6i279q' # master
#search_cx = '011515552685726825874:x17j5zl74g8' # develop
elif build_type == 'next':
release = next_release
search_cx = '011515552685726825874:ht0p8miksrm' # latest
elif build_type == 'previous':
release = previous_release
if release.startswith('2018.3'):
search_cx = '011515552685726825874:vadptdpvyyu' # 2018.3
elif release.startswith('2017.7'):
search_cx = '011515552685726825874:w-hxmnbcpou' # 2017.7
elif release.startswith('2016.11'):
search_cx = '011515552685726825874:dlsj745pvhq' # 2016.11
else:
search_cx = '011515552685726825874:ht0p8miksrm' # latest
else: # latest or something else
release = latest_release
search_cx = '011515552685726825874:ht0p8miksrm' # latest
elif release.startswith('2018.3'):
search_cx = '011515552685726825874:vadptdpvyyu' # 2018.3
elif release.startswith('2017.7'):
search_cx = '011515552685726825874:w-hxmnbcpou' # 2017.7
elif release.startswith('2016.11'):
search_cx = '011515552685726825874:dlsj745pvhq' # 2016.11
else:
search_cx = '011515552685726825874:x17j5zl74g8' # develop
needs_sphinx = '1.3'
@ -306,6 +315,7 @@ extensions = [
'sphinx.ext.intersphinx',
'httpdomain',
'youtube',
'saltrepo'
#'saltautodoc', # Must be AFTER autodoc
#'shorturls',
]
@ -364,7 +374,7 @@ rst_prolog = """\
# A shortcut for linking to tickets on the GitHub issue tracker
extlinks = {
'blob': ('https://github.com/saltstack/salt/blob/%s/%%s' % 'develop', None),
'blob': ('https://github.com/saltstack/salt/blob/%s/%%s' % repo_primary_branch, None),
'issue': ('https://github.com/saltstack/salt/issues/%s', 'issue #'),
'pull': ('https://github.com/saltstack/salt/pull/%s', 'PR #'),
'formula_url': ('https://github.com/saltstack-formulas/%s', ''),
@ -436,6 +446,7 @@ html_context = {
'build_type': build_type,
'today': today,
'copyright': copyright,
'repo_primary_branch': repo_primary_branch
}
html_use_index = True

View File

@ -769,6 +769,30 @@ Statically assigns grains to the minion.
cabinet: 13
cab_u: 14-15
.. conf_minion:: grains_blacklist
``grains_blacklist``
--------------------
Default: ``[]``
Each grains key will be compared against each of the expressions in this list.
Any keys which match will be filtered from the grains. Exact matches, glob
matches, and regular expressions are supported.
.. note::
Some states and execution modules depend on grains. Filtering may cause
them to be unavailable or run unreliably.
.. versionadded:: Neon
.. code-block:: yaml
grains_blacklist:
- cpu_flags
- zmq*
- ipv[46]
.. conf_minion:: grains_cache
``grains_cache``
@ -893,6 +917,20 @@ minion. Since this grain is expensive, it is disabled by default.
iscsi_grains: True
.. conf_minion:: nvme_grains
``nvme_grains``
------------------------
Default: ``False``
The ``nvme_grains`` setting will enable the ``nvme_nqn`` grain on the
minion. Since this grain is expensive, it is disabled by default.
.. code-block:: yaml
nvme_grains: True
.. conf_minion:: mine_enabled
``mine_enabled``
@ -1326,7 +1364,7 @@ creates a new connection for every return to the master.
Default: ``ipc``
Windows platforms lack POSIX IPC and must rely on slower TCP based inter-
process communications. Set ipc_mode to ``tcp`` on such systems.
process communications. ``ipc_mode`` is set to ``tcp`` on such systems.
.. code-block:: yaml

View File

@ -44,8 +44,8 @@ will be skipped if the ``update`` function is run on the fileserver.
Backends for the fileserver are located in `salt/fileserver/`_ (the files not
named ``__init__.py``).
.. _`salt/fileserver/__init__.py`: https://github.com/saltstack/salt/tree/develop/salt/fileserver/__init__.py
.. _`salt/fileserver/`: https://github.com/saltstack/salt/tree/develop/salt/fileserver
.. _`salt/fileserver/__init__.py`: https://github.com/saltstack/salt/tree/|repo_primary_branch|/salt/fileserver/__init__.py
.. _`salt/fileserver/`: https://github.com/saltstack/salt/tree/|repo_primary_branch|/salt/fileserver
Fileclient
----------

View File

@ -383,4 +383,4 @@ Full List of Returners
.. toctree::
all/index
.. _`redis`: https://github.com/saltstack/salt/tree/develop/salt/returners/redis_return.py
.. _`redis`: https://github.com/saltstack/salt/tree/|repo_primary_branch|/salt/returners/redis_return.py

View File

@ -3,7 +3,7 @@ salt.runners.mattermost
**Note for 2017.7 releases!**
Due to the `salt.runners.config <https://github.com/saltstack/salt/blob/develop/salt/runners/config.py>`_ module not being available in this release series, importing the `salt.runners.config <https://github.com/saltstack/salt/blob/develop/salt/runners/config.py>`_ module from the develop branch is required to make this module work.
Due to the `salt.runners.config <https://github.com/saltstack/salt/blob/|repo_primary_branch|/salt/runners/config.py>`_ module not being available in this release series, importing the `salt.runners.config <https://github.com/saltstack/salt/blob/|repo_primary_branch|/salt/runners/config.py>`_ module from the |repo_primary_branch| branch is required to make this module work.
Ref: `Mattermost runner failing to retrieve config values due to unavailable config runner #43479 <https://github.com/saltstack/salt/issues/43479>`_

View File

@ -21,7 +21,7 @@ illustrate:
.. code-block:: yaml
/etc/salt/master: # maps to "name", unless a "name" argument is specified below
file.managed: # maps to <filename>.<function> - e.g. "managed" in https://github.com/saltstack/salt/tree/develop/salt/states/file.py
file.managed: # maps to <filename>.<function> - e.g. "managed" in https://github.com/saltstack/salt/tree/|repo_primary_branch|/salt/states/file.py
- user: root # one of many options passed to the manage function
- group: root
- mode: 644
@ -59,7 +59,7 @@ A well-written state function will follow these steps:
This is an extremely simplified example. Feel free to browse the `source
code`_ for Salt's state modules to see other examples.
.. _`source code`: https://github.com/saltstack/salt/tree/develop/salt/states
.. _`source code`: https://github.com/saltstack/salt/tree/|repo_primary_branch|/salt/states
1. Set up the return dictionary and perform any necessary input validation
(type checking, looking for use of mutually-exclusive arguments, etc.).

View File

@ -31,7 +31,7 @@ imports, to decide whether to load the module. In most cases, it will return a
filename, then that name should be returned instead of ``True``. An example of
this may be seen in the Azure module:
https://github.com/saltstack/salt/tree/develop/salt/cloud/clouds/msazure.py
https://github.com/saltstack/salt/tree/|repo_primary_branch|/salt/cloud/clouds/msazure.py
The get_configured_provider() Function
--------------------------------------
@ -57,7 +57,7 @@ created by the cloud host, wait for it to become available, and then
A good example to follow for writing a cloud driver module based on libcloud
is the module provided for Linode:
https://github.com/saltstack/salt/tree/develop/salt/cloud/clouds/linode.py
https://github.com/saltstack/salt/tree/|repo_primary_branch|/salt/cloud/clouds/linode.py
The basic flow of a ``create()`` function is as follows:
@ -183,7 +183,7 @@ imports should be absent from the Salt Cloud module.
A good example of a non-libcloud driver is the DigitalOcean driver:
https://github.com/saltstack/salt/tree/develop/salt/cloud/clouds/digitalocean.py
https://github.com/saltstack/salt/tree/|repo_primary_branch|/salt/cloud/clouds/digitalocean.py
The ``create()`` Function
-------------------------

View File

@ -89,7 +89,7 @@ functions include:
A good, well commented example of this process is the Fedora deployment
script:
https://github.com/saltstack/salt/blob/develop/salt/cloud/deploy/Fedora.sh
https://github.com/saltstack/salt/blob/|repo_primary_branch|/salt/cloud/deploy/Fedora.sh
A number of legacy deploy scripts are included with the release tarball. None
of them are as functional or complete as Salt Bootstrap, and are still included

View File

@ -78,12 +78,12 @@ Fork a Repo Guide_>`_ and is well worth reading.
git fetch upstream
git checkout -b fix-broken-thing upstream/2016.11
If you're working on a feature, create your branch from the develop branch.
If you're working on a feature, create your branch from the |repo_primary_branch| branch.
.. code-block:: bash
git fetch upstream
git checkout -b add-cool-feature upstream/develop
git checkout -b add-cool-feature upstream/|repo_primary_branch|
#. Edit and commit changes to your branch.
@ -149,7 +149,7 @@ Fork a Repo Guide_>`_ and is well worth reading.
.. code-block:: bash
git fetch upstream
git rebase upstream/develop add-cool-feature
git rebase upstream/|repo_primary_branch| add-cool-feature
git push -u origin add-cool-feature
If you do rebase, and the push is rejected with a
@ -185,9 +185,9 @@ Fork a Repo Guide_>`_ and is well worth reading.
https://github.com/my-account/salt/compare/saltstack:2016.11...fix-broken-thing
If your branch is a feature, choose ``develop`` as the base branch,
If your branch is a feature, choose ``|repo_primary_branch|`` as the base branch,
https://github.com/my-account/salt/compare/saltstack:develop...add-cool-feature
https://github.com/my-account/salt/compare/saltstack:|repo_primary_branch|...add-cool-feature
#. Review that the proposed changes are what you expect.
#. Write a descriptive comment. Include links to related issues (e.g.
@ -219,10 +219,10 @@ Fork a Repo Guide_>`_ and is well worth reading.
Salt's Branch Topology
----------------------
There are three different kinds of branches in use: develop, main release
There are three different kinds of branches in use: |repo_primary_branch|, main release
branches, and dot release branches.
- All feature work should go into the ``develop`` branch.
- All feature work should go into the ``|repo_primary_branch|`` branch.
- Bug fixes and documentation changes should go into the oldest **supported
main** release branch affected by the the bug or documentation change (you
can use the blame button in github to figure out when the bug was introduced).
@ -236,22 +236,22 @@ branches, and dot release branches.
.. note::
GitHub will open pull requests against Salt's main branch, ``develop``,
GitHub will open pull requests against Salt's main branch, ``|repo_primary_branch|``,
by default. Be sure to check which branch is selected when creating the
pull request.
The Develop Branch
==================
The |repo_primary_branch| Branch
================================
The ``develop`` branch is unstable and bleeding-edge. Pull requests containing
feature additions or non-bug-fix changes should be made against the ``develop``
The ``|repo_primary_branch|`` branch is unstable and bleeding-edge. Pull requests containing
feature additions or non-bug-fix changes should be made against the ``|repo_primary_branch|``
branch.
.. note::
If you have a bug fix or documentation change and have already forked your
working branch from ``develop`` and do not know how to rebase your commits
against another branch, then submit it to ``develop`` anyway. SaltStack's
working branch from ``|repo_primary_branch|`` and do not know how to rebase your commits
against another branch, then submit it to ``|repo_primary_branch|`` anyway. SaltStack's
development team will be happy to back-port it to the correct branch.
**Please make sure you let the maintainers know that the pull request needs
@ -298,7 +298,7 @@ automatically be "merged-forward" into the newer branches.
For example, a pull request is merged into ``2017.7``. Then, the entire
``2017.7`` branch is merged-forward into the ``2018.3`` branch, and the
``2018.3`` branch is merged-forward into the ``develop`` branch.
``2018.3`` branch is merged-forward into the ``|repo_primary_branch|`` branch.
This process makes is easy for contributors to make only one pull-request
against an older branch, but allows the change to propagate to all **main**
@ -338,7 +338,7 @@ message text directly. Only including the text in a pull request will not
close the issue.
GitHub will close the referenced issue once the *commit* containing the
magic text is merged into the default branch (``develop``). Any magic text
magic text is merged into the default branch (``|repo_primary_branch|``). Any magic text
input only into the pull request description will not be seen at the
Git-level when those commits are merged-forward. In other words, only the
commits are merged-forward and not the pull request text.
@ -348,7 +348,7 @@ commits are merged-forward and not the pull request text.
Backporting Pull Requests
=========================
If a bug is fixed on ``develop`` and the bug is also present on a
If a bug is fixed on ``|repo_primary_branch|`` and the bug is also present on a
currently-supported release branch, it will need to be back-ported to an
applicable branch.
@ -363,11 +363,11 @@ applicable branch.
changes themselves.
It is often easiest to fix a bug on the oldest supported release branch and
then merge that branch forward into ``develop`` (as described earlier in this
then merge that branch forward into ``|repo_primary_branch|`` (as described earlier in this
document). When that is not possible the fix must be back-ported, or copied,
into any other affected branches.
These steps assume a pull request ``#1234`` has been merged into ``develop``.
These steps assume a pull request ``#1234`` has been merged into ``|repo_primary_branch|``.
And ``upstream`` is the name of the remote pointing to the main Salt repo.
#. Identify the oldest supported release branch that is affected by the bug.
@ -450,20 +450,20 @@ the name of the main `saltstack/salt`_ repository.
git fetch upstream
#. Update your copy of the ``develop`` branch.
#. Update your copy of the ``|repo_primary_branch|`` branch.
.. code-block:: bash
git checkout develop
git merge --ff-only upstream/develop
git checkout |repo_primary_branch|
git merge --ff-only upstream/|repo_primary_branch|
If Git complains that a fast-forward merge is not possible, you have local
commits.
* Run ``git pull --rebase origin develop`` to rebase your changes on top of
* Run ``git pull --rebase origin |repo_primary_branch|`` to rebase your changes on top of
the upstream changes.
* Or, run ``git branch <branch-name>`` to create a new branch with your
commits. You will then need to reset your ``develop`` branch before
commits. You will then need to reset your ``|repo_primary_branch|`` branch before
updating it with the changes from upstream.
If Git complains that local files will be overwritten, you have changes to
@ -474,7 +474,7 @@ the name of the main `saltstack/salt`_ repository.
.. code-block:: bash
git push origin develop
git push origin |repo_primary_branch|
#. Repeat the previous two steps for any other branches you work with, such as
the current release branch.
@ -551,6 +551,6 @@ Script, see the Bootstrap Script's `Contributing Guidelines`_.
.. _GPG Probot: https://probot.github.io/apps/gpg/
.. _help articles: https://help.github.com/articles/signing-commits-with-gpg/
.. _GPG Signature Verification feature announcement: https://github.com/blog/2144-gpg-signature-verification
.. _bootstrap-salt.sh: https://github.com/saltstack/salt/blob/develop/salt/cloud/deploy/bootstrap-salt.sh
.. _bootstrap-salt.sh: https://github.com/saltstack/salt/blob/|repo_primary_branch|/salt/cloud/deploy/bootstrap-salt.sh
.. _salt-bootstrap repo: https://github.com/saltstack/salt-bootstrap
.. _Contributing Guidelines: https://github.com/saltstack/salt-bootstrap/blob/develop/CONTRIBUTING.md

View File

@ -69,7 +69,7 @@ test files for Salt execution modules.
in the ``test_module_name_source_match`` function. This unit test
ensures that we maintain the naming convention for test files.
.. __: https://github.com/saltstack/salt/blob/develop/tests/filename_map.yml
.. __: https://github.com/saltstack/salt/blob/|repo_primary_branch|/tests/filename_map.yml
Integration Tests
@ -416,13 +416,13 @@ Tests for New Features
If you are adding new functionality to Salt, please write the tests for this new
feature in the same pull request as the new feature. New features should always be
submitted to the ``develop`` branch.
submitted to the ``|repo_primary_branch|`` branch.
If you have already submitted the new feature, but did not write tests in the original
pull request that has already been merged, please feel free to submit a new pull
request containing tests. If the feature was recently added to Salt's ``develop``
request containing tests. If the feature was recently added to Salt's ``|repo_primary_branch|``
branch, then the tests should be added there as well. However, if the feature was
added to ``develop`` some time ago and is already present in one or more release
added to ``|repo_primary_branch|`` some time ago and is already present in one or more release
branches, please refer to the `Tests for Entire Files or Functions`_ section below
for more details about where to submit tests for functions or files that do not
already have tests.
@ -448,7 +448,7 @@ earliest supported release branch that contains the file or function you're test
Once your tests are submitted in a pull request and is merged into the branch in
question, the tests you wrote will be merged-forward by SaltStack core engineers and
the new tests will propagate to the newer release branches. That way the tests you
wrote will apply to all current and relevant release branches, and not just the ``develop``
wrote will apply to all current and relevant release branches, and not just the ``|repo_primary_branch|``
branch, for example. This methodology will help protect against regressions on older
files in Salt's codebase.

View File

@ -1,97 +0,0 @@
Translating Documentation
=========================
If you wish to help translate the Salt documentation to your language, please
head over to the `Transifex`_ website and `signup`__ for an account.
Once registered, head over to the `Salt Translation Project`__, and either
click on **Request Language** if you can't find yours, or, select the language
for which you wish to contribute and click **Join Team**.
`Transifex`_ provides some useful reading resources on their `support
domain`__, namely, some useful articles `directed to translators`__.
.. __: https://www.transifex.com/signup/
.. __: https://www.transifex.com/projects/p/salt/
.. __: http://support.transifex.com/
.. __: http://support.transifex.com/customer/portal/topics/414107-translators/articles
Building A Localized Version of the Documentation
-------------------------------------------------
While you're working on your translation on `Transifex`_, you might want to
have a look at how it's rendering.
Install The Transifex Client
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
To interact with the `Transifex`_ web service you will need to install the
`transifex-client`__:
.. code-block:: bash
pip install transifex-client
.. __: https://github.com/transifex/transifex-client
Configure The Transifex Client
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Once installed, you will need to set it up on your computer. We created a
script to help you with that:
.. code-block:: bash
.scripts/setup-transifex-config
Download Remote Translations
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There's a little script which simplifies the download process of the
translations(which isn't that complicated in the first place).
So, let's assume you're translating ``pt_PT``, Portuguese(Portugal). To
download the translations, execute from the ``doc/`` directory of your Salt
checkout:
.. code-block:: bash
make download-translations SPHINXLANG=pt_PT
To download ``pt_PT``, Portuguese(Portugal), and ``nl``, Dutch, you can use the
helper script directly:
.. code-block:: bash
.scripts/download-translation-catalog pt_PT nl
Build Localized Documentation
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
After the download process finishes, which might take a while, the next step is
to build a localized version of the documentation.
Following the ``pt_PT`` example above:
.. code-block:: bash
make html SPHINXLANG=pt_PT
View Localized Documentation
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Open your browser, point it to the local documentation path and check the
localized output you've just build.
.. _`Transifex`: https://www.transifex.com

View File

@ -48,21 +48,21 @@ completed, add SaltStack as a second remote and fetch any changes from
$ git remote add upstream https://github.com/saltstack/salt.git
$ git fetch upstream
For this tutorial, we will be working off from the ``develop`` branch, which is
For this tutorial, we will be working off from the ``|repo_primary_branch|`` branch, which is
the default branch for the SaltStack GitHub project. This branch needs to
track ``upstream/develop`` so that we will get all upstream changes when they
track ``upstream/|repo_primary_branch|`` so that we will get all upstream changes when they
happen.
.. code-block:: bash
$ git checkout develop
$ git branch --set-upstream-to upstream/develop
$ git checkout |repo_primary_branch|
$ git branch --set-upstream-to upstream/|repo_primary_branch|
-----
Fetch
-----
Fetch any ``upstream`` changes on the ``develop`` branch and sync them to your
Fetch any ``upstream`` changes on the ``|repo_primary_branch|`` branch and sync them to your
local copy of the branch with a single command:
.. code-block:: bash
@ -84,7 +84,7 @@ Now we are ready to get to work. Consult the `sprint beginner bug list
and select an execution module whose ``__virtual__`` function needs to be
updated. I'll select the ``alternatives`` module.
Create a new branch off from ``develop``. Be sure to name it something short
Create a new branch off from ``|repo_primary_branch|``. Be sure to name it something short
and descriptive.
.. code-block:: bash
@ -173,7 +173,7 @@ In your browser, navigate to the `new pull request
<https://github.com/saltstack/salt/compare>`_ page on the ``saltstack/salt``
GitHub repository and click on ``compare across forks``. Select
``<my_account>`` from the list of head forks and the branch you are wanting to
merge into ``develop`` (``virt_ret`` in this case).
merge into ``|repo_primary_branch|`` (``virt_ret`` in this case).
When you have finished reviewing the changes, click ``Create pull request``.
@ -194,7 +194,7 @@ request``.
* I find it easiest to edit the following URL:
``https://github.com/saltstack/salt/compare/develop...<my_account>:virt_ret``
``https://github.com/saltstack/salt/compare/|repo_primary_branch|...<my_account>:virt_ret``
---------
Resources

View File

@ -265,7 +265,7 @@ customize or fix bugs. It will also allow you to build your own installation.
There are several scripts to automate creating a Windows installer as well as
setting up an environment that facilitates developing and troubleshooting Salt
code. They are located in the ``pkg\windows`` directory in the Salt repo
`(here) <https://github.com/saltstack/salt/tree/develop/pkg/windows>`_.
`(here) <https://github.com/saltstack/salt/tree/|repo_primary_branch|/pkg/windows>`_.
Scripts:
--------

View File

@ -741,11 +741,11 @@ done:
option in the `master config template`_ should be updated to show the correct
new default value.
.. _`salt/renderers/`: https://github.com/saltstack/salt/tree/develop/salt/renderers/
.. _`salt/config/__init__.py`: https://github.com/saltstack/salt/tree/develop/salt/config/__init__.py
.. _`master config file`: https://github.com/saltstack/salt/tree/develop/doc/ref/configuration/master.rst
.. _`minion config file`: https://github.com/saltstack/salt/tree/develop/doc/ref/configuration/minion.rst
.. _`master config template`: https://github.com/saltstack/salt/tree/develop/conf/master
.. _`salt/renderers/`: https://github.com/saltstack/salt/tree/|repo_primary_branch|/salt/renderers/
.. _`salt/config/__init__.py`: https://github.com/saltstack/salt/tree/|repo_primary_branch|/salt/config/__init__.py
.. _`master config file`: https://github.com/saltstack/salt/tree/|repo_primary_branch|/doc/ref/configuration/master.rst
.. _`minion config file`: https://github.com/saltstack/salt/tree/|repo_primary_branch|/doc/ref/configuration/minion.rst
.. _`master config template`: https://github.com/saltstack/salt/tree/|repo_primary_branch|/conf/master
Binary Data in the Pillar
=========================

View File

@ -929,11 +929,12 @@ you can now configure which type numbers indicate a login and logout.
See the :py:mod:`wtmp beacon documentation <salt.beacons.wtmp>` for more
information.
Deprecations
============
API Deprecations
----------------
Deprecated and Removed Options
==============================
API Removed Arguments
---------------------
Support for :ref:`LocalClient <local-client>`'s ``expr_form`` argument has
been removed. Please use ``tgt_type`` instead. This change was made due to
@ -952,14 +953,14 @@ their code to use ``tgt_type``.
>>> local.cmd('*', 'cmd.run', ['whoami'], tgt_type='glob')
{'jerry': 'root'}
Minion Configuration Deprecations
---------------------------------
Minion Configuration Deprecated Option
--------------------------------------
The :conf_minion:`master_shuffle` configuration option is deprecated as of the
``2019.2.0`` release. Please use the :conf_minion:`random_master` option instead.
Module Deprecations
-------------------
Module Removed Options
----------------------
- The :py:mod:`napalm_network <salt.modules.napalm_network>` module has been
changed as follows:
@ -1011,8 +1012,8 @@ Module Deprecations
functions have been removed. Please use :py:func:`win_wua.list
<salt.modules.win_wua.list_>` instead.
Pillar Deprecations
-------------------
Pillar Removed Option
---------------------
- The :py:mod:`vault <salt.pillar.vault>` external pillar has been changed as
follows:
@ -1020,8 +1021,8 @@ Pillar Deprecations
- Support for the ``profile`` argument was removed. Any options passed up
until and following the first ``path=`` are discarded.
Roster Deprecations
-------------------
Roster Removed Option
---------------------
- The :py:mod:`cache <salt.roster.cache>` roster has been changed as follows:
@ -1032,8 +1033,8 @@ Roster Deprecations
``private``, ``public``, ``global`` or ``local`` settings. The syntax for
these settings has changed to ``ipv4-*`` or ``ipv6-*``, respectively.
State Deprecations
------------------
State Removed Modules and Options
---------------------------------
- The ``docker`` state module has been removed
@ -1126,8 +1127,8 @@ State Deprecations
- Support for virtual packages has been removed from the
py:mod:`pkg state <salt.states.pkg>`.
Utils Deprecations
------------------
Utils Removed Options
---------------------
The ``cloud`` utils module had the following changes:
@ -1151,7 +1152,7 @@ been deprecated in favor of ``pypsexec``.
Salt-Cloud has deprecated the use ``impacket`` in favor of ``smbprotocol``.
This changes was made because ``impacket`` is not compatible with Python 3.
SaltSSH major updates
SaltSSH Major Updates
=====================
SaltSSH now works across different major Python versions. Python 2.7 ~ Python 3.x

View File

@ -15,6 +15,7 @@ import json
import pprint
import shutil
import tempfile
import datetime
if __name__ == '__main__':
sys.stderr.write('Do not execute this file directly. Use nox instead, it will know how to handle this file\n')
@ -34,8 +35,8 @@ PIP_INSTALL_SILENT = (os.environ.get('JENKINS_URL') or os.environ.get('CI') or o
# Global Path Definitions
REPO_ROOT = os.path.abspath(os.path.dirname(__file__))
SITECUSTOMIZE_DIR = os.path.join(REPO_ROOT, 'tests', 'support', 'coverage')
IS_DARWIN = sys.platform.lower().startswith('darwin')
IS_WINDOWS = sys.platform.lower().startswith('win')
# Python versions to run against
_PYTHON_VERSIONS = ('2', '2.7', '3', '3.4', '3.5', '3.6', '3.7')
@ -45,10 +46,18 @@ nox.options.reuse_existing_virtualenvs = True
# Don't fail on missing interpreters
nox.options.error_on_missing_interpreters = False
# Change current directory to REPO_ROOT
os.chdir(REPO_ROOT)
RUNTESTS_LOGFILE = os.path.join(
'artifacts', 'logs',
'runtests-{}.log'.format(datetime.datetime.now().strftime('%Y%m%d%H%M%S.%f'))
)
def _create_ci_directories():
for dirname in ('logs', 'coverage', 'xml-unittests-output'):
path = os.path.join(REPO_ROOT, 'artifacts', dirname)
path = os.path.join('artifacts', dirname)
if not os.path.exists(path):
os.makedirs(path)
@ -186,20 +195,43 @@ def _get_distro_pip_constraints(session, transport):
pydir = _get_pydir(session)
if IS_WINDOWS:
_distro_constraints = os.path.join(REPO_ROOT,
'requirements',
_distro_constraints = os.path.join('requirements',
'static',
pydir,
'{}-windows.txt'.format(transport))
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
_distro_constraints = os.path.join(REPO_ROOT,
'requirements',
_distro_constraints = os.path.join('requirements',
'static',
pydir,
'windows.txt')
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
_distro_constraints = os.path.join('requirements',
'static',
pydir,
'windows-crypto.txt')
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
elif IS_DARWIN:
_distro_constraints = os.path.join('requirements',
'static',
pydir,
'{}-darwin.txt'.format(transport))
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
_distro_constraints = os.path.join('requirements',
'static',
pydir,
'darwin.txt')
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
_distro_constraints = os.path.join('requirements',
'static',
pydir,
'darwin-crypto.txt')
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
else:
_install_system_packages(session)
distro = _get_distro_info(session)
@ -210,20 +242,31 @@ def _get_distro_pip_constraints(session, transport):
'{id}-{version_parts[major]}'.format(**distro)
]
for distro_key in distro_keys:
_distro_constraints = os.path.join(REPO_ROOT,
'requirements',
_distro_constraints = os.path.join('requirements',
'static',
pydir,
'{}.txt'.format(distro_key))
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
_distro_constraints = os.path.join(REPO_ROOT,
'requirements',
_distro_constraints = os.path.join('requirements',
'static',
pydir,
'{}-crypto.txt'.format(distro_key))
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
_distro_constraints = os.path.join('requirements',
'static',
pydir,
'{}-{}.txt'.format(transport, distro_key))
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
distro_constraints.append(_distro_constraints)
_distro_constraints = os.path.join('requirements',
'static',
pydir,
'{}-{}-crypto.txt'.format(transport, distro_key))
if os.path.exists(_distro_constraints):
distro_constraints.append(_distro_constraints)
return distro_constraints
@ -232,24 +275,24 @@ def _install_requirements(session, transport, *extra_requirements):
distro_constraints = _get_distro_pip_constraints(session, transport)
_requirements_files = [
os.path.join(REPO_ROOT, 'requirements', 'base.txt'),
os.path.join(REPO_ROOT, 'requirements', 'zeromq.txt'),
os.path.join(REPO_ROOT, 'requirements', 'pytest.txt')
os.path.join('requirements', 'base.txt'),
os.path.join('requirements', 'zeromq.txt'),
os.path.join('requirements', 'pytest.txt')
]
if sys.platform.startswith('linux'):
requirements_files = [
os.path.join(REPO_ROOT, 'requirements', 'static', 'linux.in')
os.path.join('requirements', 'static', 'linux.in')
]
elif sys.platform.startswith('win'):
requirements_files = [
os.path.join(REPO_ROOT, 'pkg', 'windows', 'req.txt'),
os.path.join(REPO_ROOT, 'requirements', 'static', 'windows.in')
os.path.join('pkg', 'windows', 'req.txt'),
os.path.join('requirements', 'static', 'windows.in')
]
elif sys.platform.startswith('darwin'):
requirements_files = [
os.path.join(REPO_ROOT, 'pkg', 'osx', 'req.txt'),
os.path.join(REPO_ROOT, 'pkg', 'osx', 'req_ext.txt'),
os.path.join(REPO_ROOT, 'requirements', 'static', 'osx.in')
os.path.join('pkg', 'osx', 'req.txt'),
os.path.join('pkg', 'osx', 'req_ext.txt'),
os.path.join('requirements', 'static', 'osx.in')
]
while True:
@ -328,7 +371,20 @@ def _run_with_coverage(session, *test_cmd):
# Sometimes some of the coverage files are corrupt which would trigger a CommandFailed
# exception
pass
session.run('coverage', 'xml', '-o', os.path.join(REPO_ROOT, 'artifacts', 'coverage', 'coverage.xml'))
# Generate report for salt code coverage
session.run(
'coverage', 'xml',
'-o', os.path.join('artifacts', 'coverage', 'salt.xml'),
'--omit=tests/*',
'--include=salt/*'
)
# Generate report for tests code coverage
session.run(
'coverage', 'xml',
'-o', os.path.join('artifacts', 'coverage', 'tests.xml'),
'--omit=salt/*',
'--include=tests/*'
)
def _runtests(session, coverage, cmd_args):
@ -418,9 +474,7 @@ def runtests_parametrized(session, coverage, transport, crypto):
session.install(*install_command, silent=PIP_INSTALL_SILENT)
cmd_args = [
'--tests-logfile={}'.format(
os.path.join(REPO_ROOT, 'artifacts', 'logs', 'runtests.log')
),
'--tests-logfile={}'.format(RUNTESTS_LOGFILE),
'--transport={}'.format(transport)
] + session.posargs
_runtests(session, coverage, cmd_args)
@ -559,14 +613,12 @@ def runtests_cloud(session, coverage):
_install_requirements(session, 'zeromq', 'unittest-xml-reporting==2.2.1')
pydir = _get_pydir(session)
cloud_requirements = os.path.join(REPO_ROOT, 'requirements', 'static', pydir, 'cloud.txt')
cloud_requirements = os.path.join('requirements', 'static', pydir, 'cloud.txt')
session.install('--progress-bar=off', '-r', cloud_requirements, silent=PIP_INSTALL_SILENT)
cmd_args = [
'--tests-logfile={}'.format(
os.path.join(REPO_ROOT, 'artifacts', 'logs', 'runtests.log')
),
'--tests-logfile={}'.format(RUNTESTS_LOGFILE),
'--cloud-provider-tests'
] + session.posargs
_runtests(session, coverage, cmd_args)
@ -581,9 +633,7 @@ def runtests_tornado(session, coverage):
session.install('--progress-bar=off', 'pyzmq==17.0.0', silent=PIP_INSTALL_SILENT)
cmd_args = [
'--tests-logfile={}'.format(
os.path.join(REPO_ROOT, 'artifacts', 'logs', 'runtests.log')
),
'--tests-logfile={}'.format(RUNTESTS_LOGFILE)
] + session.posargs
_runtests(session, coverage, cmd_args)
@ -614,9 +664,8 @@ def pytest_parametrized(session, coverage, transport, crypto):
cmd_args = [
'--rootdir', REPO_ROOT,
'--log-file={}'.format(
os.path.join(REPO_ROOT, 'artifacts', 'logs', 'runtests.log')
),
'--log-file={}'.format(RUNTESTS_LOGFILE),
'--log-file-level=debug',
'--no-print-logs',
'-ra',
'-s',
@ -757,19 +806,18 @@ def pytest_cloud(session, coverage):
# Install requirements
_install_requirements(session, 'zeromq')
pydir = _get_pydir(session)
cloud_requirements = os.path.join(REPO_ROOT, 'requirements', 'static', pydir, 'cloud.txt')
cloud_requirements = os.path.join('requirements', 'static', pydir, 'cloud.txt')
session.install('--progress-bar=off', '-r', cloud_requirements, silent=PIP_INSTALL_SILENT)
cmd_args = [
'--rootdir', REPO_ROOT,
'--log-file={}'.format(
os.path.join(REPO_ROOT, 'artifacts', 'logs', 'runtests.log')
),
'--log-file={}'.format(RUNTESTS_LOGFILE),
'--log-file-level=debug',
'--no-print-logs',
'-ra',
'-s',
os.path.join(REPO_ROOT, 'tests', 'integration', 'cloud', 'providers')
os.path.join('tests', 'integration', 'cloud', 'providers')
] + session.posargs
_pytest(session, coverage, cmd_args)
@ -784,9 +832,8 @@ def pytest_tornado(session, coverage):
cmd_args = [
'--rootdir', REPO_ROOT,
'--log-file={}'.format(
os.path.join(REPO_ROOT, 'artifacts', 'logs', 'runtests.log')
),
'--log-file={}'.format(RUNTESTS_LOGFILE),
'--log-file-level=debug',
'--no-print-logs',
'-ra',
'-s',
@ -815,7 +862,18 @@ def _pytest(session, coverage, cmd_args):
def _lint(session, rcfile, flags, paths):
_install_requirements(session, 'zeromq')
session.install('--progress-bar=off', '-r', 'requirements/static/{}/lint.txt'.format(_get_pydir(session)), silent=PIP_INSTALL_SILENT)
requirements_file = 'requirements/static/lint.in'
distro_constraints = [
'requirements/static/{}/lint.txt'.format(_get_pydir(session))
]
install_command = [
'--progress-bar=off', '-r', requirements_file
]
for distro_constraint in distro_constraints:
install_command.extend([
'--constraint', distro_constraint
])
session.install(*install_command, silent=PIP_INSTALL_SILENT)
session.run('pylint', '--version')
pylint_report_path = os.environ.get('PYLINT_REPORT')
@ -889,19 +947,73 @@ def lint_tests(session):
@nox.session(python='3')
def docs(session):
@nox.parametrize('update', [False, True])
@nox.parametrize('compress', [False, True])
def docs(session, compress, update):
'''
Build Salt's Documentation
'''
session.notify('docs-html(compress={})'.format(compress))
session.notify('docs-man(compress={}, update={})'.format(compress, update))
@nox.session(name='docs-html', python='3')
@nox.parametrize('compress', [False, True])
def docs_html(session, compress):
'''
Build Salt's HTML Documentation
'''
pydir = _get_pydir(session)
if pydir == 'py3.4':
session.error('Sphinx only runs on Python >= 3.5')
session.install(
'--progress-bar=off',
'-r', 'requirements/static/{}/docs.txt'.format(pydir),
silent=PIP_INSTALL_SILENT)
requirements_file = 'requirements/static/docs.in'
distro_constraints = [
'requirements/static/{}/docs.txt'.format(_get_pydir(session))
]
install_command = [
'--progress-bar=off', '-r', requirements_file
]
for distro_constraint in distro_constraints:
install_command.extend([
'--constraint', distro_constraint
])
session.install(*install_command, silent=PIP_INSTALL_SILENT)
os.chdir('doc/')
session.run('make', 'clean', external=True)
session.run('make', 'html', 'SPHINXOPTS=-W', external=True)
session.run('tar', '-czvf', 'doc-archive.tar.gz', '_build/html')
if compress:
session.run('tar', '-czvf', 'html-archive.tar.gz', '_build/html', external=True)
os.chdir('..')
@nox.session(name='docs-man', python='3')
@nox.parametrize('update', [False, True])
@nox.parametrize('compress', [False, True])
def docs_man(session, compress, update):
'''
Build Salt's Manpages Documentation
'''
pydir = _get_pydir(session)
if pydir == 'py3.4':
session.error('Sphinx only runs on Python >= 3.5')
requirements_file = 'requirements/static/docs.in'
distro_constraints = [
'requirements/static/{}/docs.txt'.format(_get_pydir(session))
]
install_command = [
'--progress-bar=off', '-r', requirements_file
]
for distro_constraint in distro_constraints:
install_command.extend([
'--constraint', distro_constraint
])
session.install(*install_command, silent=PIP_INSTALL_SILENT)
os.chdir('doc/')
session.run('make', 'clean', external=True)
session.run('make', 'man', 'SPHINXOPTS=-W', external=True)
if update:
session.run('rm', '-rf', 'man/', external=True)
session.run('cp', '-Rp', '_build/man', 'man/', external=True)
if compress:
session.run('tar', '-czvf', 'man-archive.tar.gz', '_build/man', external=True)
os.chdir('..')

View File

@ -0,0 +1,2 @@
m2crypto
pycryptodomex

View File

@ -0,0 +1,10 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py2.7/darwin-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodome==3.9.0
pycryptodomex==3.9.0
typing==3.7.4.1 # via m2crypto

View File

@ -2,7 +2,7 @@
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py2.7/osx.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/osx.in
# pip-compile -o requirements/static/py2.7/darwin.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/darwin.in
#
apache-libcloud==2.4.0
argh==0.26.2 # via watchdog

View File

@ -0,0 +1,10 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py2.7/linux-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodome==3.9.0
pycryptodomex==3.9.0
typing==3.7.4.1 # via m2crypto

View File

@ -0,0 +1,10 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py2.7/windows-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodome==3.9.0
pycryptodomex==3.9.0
typing==3.7.4.1 # via m2crypto

View File

@ -0,0 +1,10 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py3.5/darwin-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodome==3.9.0
pycryptodomex==3.9.0
typing==3.7.4.1 # via m2crypto

View File

@ -2,7 +2,7 @@
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py3.5/osx.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/osx.in
# pip-compile -o requirements/static/py3.5/darwin.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/darwin.in
#
apache-libcloud==2.4.0
argh==0.26.2 # via watchdog

View File

@ -0,0 +1,10 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py3.5/windows-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodome==3.9.0
pycryptodomex==3.9.0
typing==3.7.4.1 # via m2crypto

View File

@ -0,0 +1,10 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py3.6/darwin-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodome==3.9.0
pycryptodomex==3.9.0
typing==3.7.4.1 # via m2crypto

View File

@ -2,7 +2,7 @@
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py3.6/osx.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/osx.in
# pip-compile -o requirements/static/py3.6/darwin.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/darwin.in
#
apache-libcloud==2.4.0
argh==0.26.2 # via watchdog

View File

@ -0,0 +1,10 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py3.6/windows-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodome==3.9.0
pycryptodomex==3.9.0
typing==3.7.4.1 # via m2crypto

View File

@ -0,0 +1,10 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py3.7/darwin-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodome==3.9.0
pycryptodomex==3.9.0
typing==3.7.4.1 # via m2crypto

View File

@ -2,7 +2,7 @@
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py3.7/osx.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/osx.in
# pip-compile -o requirements/static/py3.7/darwin.txt -v pkg/osx/req.txt pkg/osx/req_ext.txt requirements/base.txt requirements/zeromq.txt requirements/pytest.txt requirements/static/darwin.in
#
apache-libcloud==2.4.0
argh==0.26.2 # via watchdog

View File

@ -0,0 +1,10 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile -o requirements/static/py3.7/windows-crypto.txt -v requirements/static/crypto.in
#
m2crypto==0.35.2
pycryptodome==3.9.0
pycryptodomex==3.9.0
typing==3.7.4.1 # via m2crypto

View File

@ -925,6 +925,9 @@ VALID_OPTS = {
# Set a hard limit for the amount of memory modules can consume on a minion.
'modules_max_memory': int,
# Blacklist specific core grains to be filtered
'grains_blacklist': list,
# The number of minutes between the minion refreshing its cache of grains
'grains_refresh_every': int,
@ -1222,6 +1225,7 @@ DEFAULT_MINION_OPTS = {
'cachedir': os.path.join(salt.syspaths.CACHE_DIR, 'minion'),
'append_minionid_config_dirs': [],
'cache_jobs': False,
'grains_blacklist': [],
'grains_cache': False,
'grains_cache_expiration': 300,
'grains_deep_merge': False,

64
salt/grains/nvme.py Normal file
View File

@ -0,0 +1,64 @@
# -*- coding: utf-8 -*-
'''
Grains for NVMe Qualified Names (NQN).
.. versionadded:: Flourine
To enable these grains set `nvme_grains: True`.
.. code-block:: yaml
nvme_grains: True
'''
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
import errno
import logging
# Import Salt libs
import salt.utils.files
import salt.utils.path
import salt.utils.platform
__virtualname__ = 'nvme'
# Get logging started
log = logging.getLogger(__name__)
def __virtual__():
if __opts__.get('nvme_grains', False) is False:
return False
return __virtualname__
def nvme_nqn():
'''
Return NVMe NQN
'''
grains = {}
grains['nvme_nqn'] = False
if salt.utils.platform.is_linux():
grains['nvme_nqn'] = _linux_nqn()
return grains
def _linux_nqn():
'''
Return NVMe NQN from a Linux host.
'''
ret = []
initiator = '/etc/nvme/hostnqn'
try:
with salt.utils.files.fopen(initiator, 'r') as _nvme:
for line in _nvme:
line = line.strip()
if line.startswith('nqn.'):
ret.append(line)
except IOError as ex:
if ex.errno != errno.ENOENT:
log.debug("Error while accessing '%s': %s", initiator, ex)
return ret

View File

@ -34,6 +34,7 @@ import salt.utils.lazy
import salt.utils.odict
import salt.utils.platform
import salt.utils.versions
import salt.utils.stringutils
from salt.exceptions import LoaderError
from salt.template import check_render_pipe_str
from salt.utils.decorators import Depends
@ -773,6 +774,7 @@ def grains(opts, force_refresh=False, proxy=None):
opts['grains'] = {}
grains_data = {}
blist = opts.get('grains_blacklist', [])
funcs = grain_funcs(opts, proxy=proxy)
if force_refresh: # if we refresh, lets reload grain modules
funcs.clear()
@ -784,6 +786,14 @@ def grains(opts, force_refresh=False, proxy=None):
ret = funcs[key]()
if not isinstance(ret, dict):
continue
if blist:
for key in list(ret):
for block in blist:
if salt.utils.stringutils.expr_match(key, block):
del ret[key]
log.trace('Filtering %s grain', key)
if not ret:
continue
if grains_deep_merge:
salt.utils.dictupdate.update(grains_data, ret)
else:
@ -819,6 +829,14 @@ def grains(opts, force_refresh=False, proxy=None):
continue
if not isinstance(ret, dict):
continue
if blist:
for key in list(ret):
for block in blist:
if salt.utils.stringutils.expr_match(key, block):
del ret[key]
log.trace('Filtering %s grain', key)
if not ret:
continue
if grains_deep_merge:
salt.utils.dictupdate.update(grains_data, ret)
else:

View File

@ -1156,7 +1156,7 @@ def unhold(name=None, pkgs=None, sources=None, **kwargs): # pylint: disable=W06
salt '*' pkg.unhold <package name>
pkgs
A list of packages to hold. Must be passed as a python list.
A list of packages to unhold. Must be passed as a python list.
CLI Example:

View File

@ -13,12 +13,14 @@ Microsoft IIS site management via WebAdministration powershell module
from __future__ import absolute_import, print_function, unicode_literals
import decimal
import logging
import re
import os
import yaml
# Import salt libs
import salt.utils.json
import salt.utils.platform
from salt.ext.six.moves import range
from salt.ext.six.moves import range, map
from salt.exceptions import SaltInvocationError, CommandExecutionError
from salt.ext import six
@ -160,6 +162,45 @@ def _srvmgr(cmd, return_json=False):
return ret
def _collection_match_to_index(pspath, colfilter, name, match):
'''
Returns index of collection item matching the match dictionary.
'''
collection = get_webconfiguration_settings(pspath, [{'name': name, 'filter': colfilter}])[0]['value']
for idx, collect_dict in enumerate(collection):
if all(item in collect_dict.items() for item in match.items()):
return idx
return -1
def _prepare_settings(pspath, settings):
'''
Prepare settings before execution with get or set functions.
Removes settings with a match parameter when index is not found.
'''
prepared_settings = []
for setting in settings:
if setting.get('name', None) is None:
log.warning('win_iis: Setting has no name: {}'.format(setting))
continue
if setting.get('filter', None) is None:
log.warning('win_iis: Setting has no filter: {}'.format(setting))
continue
match = re.search(r'Collection\[(\{.*\})\]', setting['name'])
if match:
name = setting['name'][:match.start(1)-1]
match_dict = yaml.load(match.group(1))
index = _collection_match_to_index(pspath, setting['filter'], name, match_dict)
if index == -1:
log.warning('win_iis: No match found for setting: {}'.format(setting))
else:
setting['name'] = setting['name'].replace(match.group(1), str(index))
prepared_settings.append(setting)
else:
prepared_settings.append(setting)
return prepared_settings
def list_sites():
'''
List all the currently deployed websites.
@ -1985,3 +2026,167 @@ def set_webapp_settings(name, site, settings):
log.debug('Settings configured successfully: {0}'.format(settings.keys()))
return True
def get_webconfiguration_settings(name, settings):
r'''
Get the webconfiguration settings for the IIS PSPath.
Args:
name (str): The PSPath of the IIS webconfiguration settings.
settings (list): A list of dictionaries containing setting name and filter.
Returns:
dict: A list of dictionaries containing setting name, filter and value.
CLI Example:
.. code-block:: bash
salt '*' win_iis.get_webconfiguration_settings name='IIS:\' settings="[{'name': 'enabled', 'filter': 'system.webServer/security/authentication/anonymousAuthentication'}]"
'''
ret = {}
ps_cmd = [r'$Settings = New-Object System.Collections.ArrayList;']
ps_cmd_validate = []
settings = _prepare_settings(name, settings)
if not settings:
log.warning('No settings provided')
return ret
for setting in settings:
# Build the commands to verify that the property names are valid.
ps_cmd_validate.extend(['Get-WebConfigurationProperty',
'-PSPath', "'{0}'".format(name),
'-Filter', "'{0}'".format(setting['filter']),
'-Name', "'{0}'".format(setting['name']),
'-ErrorAction', 'Stop',
'|', 'Out-Null;'])
# Some ItemProperties are Strings and others are ConfigurationAttributes.
# Since the former doesn't have a Value property, we need to account
# for this.
ps_cmd.append("$Property = Get-WebConfigurationProperty -PSPath '{0}'".format(name))
ps_cmd.append("-Name '{0}' -Filter '{1}' -ErrorAction Stop;".format(setting['name'], setting['filter']))
if setting['name'].split('.')[-1] == 'Collection':
if 'value' in setting:
ps_cmd.append("$Property = $Property | select -Property {0} ;"
.format(",".join(list(setting['value'][0].keys()))))
ps_cmd.append("$Settings.add(@{{filter='{0}';name='{1}';value=[System.Collections.ArrayList] @($Property)}})| Out-Null;"
.format(setting['filter'], setting['name']))
else:
ps_cmd.append(r'if (([String]::IsNullOrEmpty($Property) -eq $False) -and')
ps_cmd.append(r"($Property.GetType()).Name -eq 'ConfigurationAttribute') {")
ps_cmd.append(r'$Property = $Property | Select-Object')
ps_cmd.append(r'-ExpandProperty Value };')
ps_cmd.append("$Settings.add(@{{filter='{0}';name='{1}';value=[String] $Property}})| Out-Null;"
.format(setting['filter'], setting['name']))
ps_cmd.append(r'$Property = $Null;')
# Validate the setting names that were passed in.
cmd_ret = _srvmgr(cmd=ps_cmd_validate, return_json=True)
if cmd_ret['retcode'] != 0:
message = 'One or more invalid property names were specified for the provided container.'
raise SaltInvocationError(message)
ps_cmd.append('$Settings')
cmd_ret = _srvmgr(cmd=ps_cmd, return_json=True)
try:
ret = salt.utils.json.loads(cmd_ret['stdout'], strict=False)
except ValueError:
raise CommandExecutionError('Unable to parse return data as Json.')
return ret
def set_webconfiguration_settings(name, settings):
r'''
Set the value of the setting for an IIS container.
Args:
name (str): The PSPath of the IIS webconfiguration settings.
settings (list): A list of dictionaries containing setting name, filter and value.
Returns:
bool: True if successful, otherwise False
CLI Example:
.. code-block:: bash
salt '*' win_iis.set_webconfiguration_settings name='IIS:\' settings="[{'name': 'enabled', 'filter': 'system.webServer/security/authentication/anonymousAuthentication', 'value': False}]"
'''
ps_cmd = []
settings = _prepare_settings(name, settings)
if not settings:
log.warning('No settings provided')
return False
# Treat all values as strings for the purpose of comparing them to existing values.
for idx, setting in enumerate(settings):
if setting['name'].split('.')[-1] != 'Collection':
settings[idx]['value'] = six.text_type(setting['value'])
current_settings = get_webconfiguration_settings(
name=name, settings=settings)
if settings == current_settings:
log.debug('Settings already contain the provided values.')
return True
for setting in settings:
# If the value is numeric, don't treat it as a string in PowerShell.
if setting['name'].split('.')[-1] != 'Collection':
try:
complex(setting['value'])
value = setting['value']
except ValueError:
value = "'{0}'".format(setting['value'])
else:
configelement_list = []
for value_item in setting['value']:
configelement_construct = []
for key, value in value_item.items():
configelement_construct.append("{0}='{1}'".format(key, value))
configelement_list.append('@{' + ';'.join(configelement_construct) + '}')
value = ','.join(configelement_list)
ps_cmd.extend(['Set-WebConfigurationProperty',
'-PSPath', "'{0}'".format(name),
'-Filter', "'{0}'".format(setting['filter']),
'-Name', "'{0}'".format(setting['name']),
'-Value', '{0};'.format(value)])
cmd_ret = _srvmgr(ps_cmd)
if cmd_ret['retcode'] != 0:
msg = 'Unable to set settings for {0}'.format(name)
raise CommandExecutionError(msg)
# Get the fields post-change so that we can verify tht all values
# were modified successfully. Track the ones that weren't.
new_settings = get_webconfiguration_settings(
name=name, settings=settings)
failed_settings = []
for idx, setting in enumerate(settings):
is_collection = setting['name'].split('.')[-1] == 'Collection'
if ((not is_collection and six.text_type(setting['value']) != six.text_type(new_settings[idx]['value']))
or (is_collection and list(map(dict, setting['value'])) != list(map(dict, new_settings[idx]['value'])))):
failed_settings.append(setting)
if failed_settings:
log.error('Failed to change settings: %s', failed_settings)
return False
log.debug('Settings configured successfully: %s', settings)
return True

View File

@ -193,5 +193,7 @@ def _checkblk(name):
Check if the blk exists and return its fstype if ok
'''
blk = __salt__['cmd.run']('blkid -o value -s TYPE {0}'.format(name))
blk = __salt__['cmd.run'](
['blkid', '-o', 'value', '-s', 'TYPE', name],
ignore_retcode=True)
return '' if not blk else blk

View File

@ -11,6 +11,7 @@ from Microsoft IIS.
# Import python libs
from __future__ import absolute_import, unicode_literals, print_function
from salt.ext.six.moves import map
# Define the module's virtual name
@ -865,3 +866,125 @@ def set_app(name, site, settings=None):
ret['result'] = True
return ret
def webconfiguration_settings(name, settings=None):
r'''
Set the value of webconfiguration settings.
:param str name: The name of the IIS PSPath containing the settings.
Possible PSPaths are :
MACHINE, MACHINE/WEBROOT, IIS:\, IIS:\Sites\sitename, ...
:param dict settings: Dictionaries of dictionaries.
You can match a specific item in a collection with this syntax inside a key:
'Collection[{name: site0}].logFile.directory'
Example of usage for the ``MACHINE/WEBROOT`` PSPath:
.. code-block:: yaml
MACHINE-WEBROOT-level-security:
win_iis.webconfiguration_settings:
- name: 'MACHINE/WEBROOT'
- settings:
system.web/authentication/forms:
requireSSL: True
protection: "All"
credentials.passwordFormat: "SHA1"
system.web/httpCookies:
httpOnlyCookies: True
Example of usage for the ``IIS:\Sites\site0`` PSPath:
.. code-block:: yaml
site0-IIS-Sites-level-security:
win_iis.webconfiguration_settings:
- name: 'IIS:\Sites\site0'
- settings:
system.webServer/httpErrors:
errorMode: "DetailedLocalOnly"
system.webServer/security/requestFiltering:
allowDoubleEscaping: False
verbs.Collection:
- verb: TRACE
allowed: False
fileExtensions.allowUnlisted: False
Example of usage for the ``IIS:\`` PSPath with a collection matching:
.. code-block:: yaml
site0-IIS-level-security:
win_iis.webconfiguration_settings:
- name: 'IIS:\'
- settings:
system.applicationHost/sites:
'Collection[{name: site0}].logFile.directory': 'C:\logs\iis\site0'
'''
ret = {'name': name,
'changes': {},
'comment': str(),
'result': None}
if not settings:
ret['comment'] = 'No settings to change provided.'
ret['result'] = True
return ret
ret_settings = {
'changes': {},
'failures': {},
}
settings_list = list()
for filter, filter_settings in settings.items():
for setting_name, value in filter_settings.items():
settings_list.append({'filter': filter, 'name': setting_name, 'value': value})
current_settings_list = __salt__['win_iis.get_webconfiguration_settings'](name=name, settings=settings_list)
for idx, setting in enumerate(settings_list):
is_collection = setting['name'].split('.')[-1] == 'Collection'
# If this is a new setting and not an update to an existing setting
if len(current_settings_list) <= idx:
ret_settings['changes'][setting['filter'] + '.' + setting['name']] = {'old': {},
'new': settings_list[idx]['value']}
elif ((is_collection and list(map(dict, setting['value'])) != list(map(dict, current_settings_list[idx]['value'])))
or (not is_collection and str(setting['value']) != str(current_settings_list[idx]['value']))):
ret_settings['changes'][setting['filter'] + '.' + setting['name']] = {'old': current_settings_list[idx]['value'],
'new': settings_list[idx]['value']}
if not ret_settings['changes']:
ret['comment'] = 'Settings already contain the provided values.'
ret['result'] = True
return ret
elif __opts__['test']:
ret['comment'] = 'Settings will be changed.'
ret['changes'] = ret_settings
return ret
success = __salt__['win_iis.set_webconfiguration_settings'](name=name, settings=settings_list)
new_settings_list = __salt__['win_iis.get_webconfiguration_settings'](name=name, settings=settings_list)
for idx, setting in enumerate(settings_list):
is_collection = setting['name'].split('.')[-1] == 'Collection'
if ((is_collection and setting['value'] != new_settings_list[idx]['value'])
or (not is_collection and str(setting['value']) != str(new_settings_list[idx]['value']))):
ret_settings['failures'][setting['filter'] + '.' + setting['name']] = {'old': current_settings_list[idx]['value'],
'new': new_settings_list[idx]['value']}
ret_settings['changes'].get(setting['filter'] + '.' + setting['name'], None)
if ret_settings['failures']:
ret['comment'] = 'Some settings failed to change.'
ret['changes'] = ret_settings
ret['result'] = False
else:
ret['comment'] = 'Set settings to contain the provided values.'
ret['changes'] = ret_settings['changes']
ret['result'] = success
return ret

View File

@ -15,6 +15,7 @@
from __future__ import absolute_import, print_function, unicode_literals
import os
import sys
import types
import signal
import getpass
import logging
@ -56,6 +57,17 @@ def _sorted(mixins_or_funcs):
)
class MixinFuncsContainer(list):
def append(self, func):
if isinstance(func, types.MethodType):
# We only care about unbound methods
func = func.__func__
if func not in self:
# And no duplicates please
list.append(self, func)
class MixInMeta(type):
# This attribute here won't actually do anything. But, if you need to
# specify an order or a dependency within the mix-ins, please define the
@ -79,13 +91,13 @@ class OptionParserMeta(MixInMeta):
bases,
attrs)
if not hasattr(instance, '_mixin_setup_funcs'):
instance._mixin_setup_funcs = []
instance._mixin_setup_funcs = MixinFuncsContainer()
if not hasattr(instance, '_mixin_process_funcs'):
instance._mixin_process_funcs = []
instance._mixin_process_funcs = MixinFuncsContainer()
if not hasattr(instance, '_mixin_after_parsed_funcs'):
instance._mixin_after_parsed_funcs = []
instance._mixin_after_parsed_funcs = MixinFuncsContainer()
if not hasattr(instance, '_mixin_before_exit_funcs'):
instance._mixin_before_exit_funcs = []
instance._mixin_before_exit_funcs = MixinFuncsContainer()
for base in _sorted(bases + (instance,)):
func = getattr(base, '_mixin_setup', None)
@ -304,7 +316,7 @@ class MergeConfigMixIn(six.with_metaclass(MixInMeta, object)):
# the config options and if needed override them
self._mixin_after_parsed_funcs.append(self.__merge_config_with_cli)
def __merge_config_with_cli(self, *args): # pylint: disable=unused-argument
def __merge_config_with_cli(self):
# Merge parser options
for option in self.option_list:
if option.dest is None:
@ -687,7 +699,7 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
# Remove it from config so it inherits from log_level_logfile
self.config.pop(self._logfile_loglevel_config_setting_name_)
def __setup_logfile_logger_config(self, *args): # pylint: disable=unused-argument
def __setup_logfile_logger_config(self):
if self._logfile_loglevel_config_setting_name_ in self.config and not \
self.config.get(self._logfile_loglevel_config_setting_name_):
# Remove it from config so it inherits from log_level
@ -852,7 +864,7 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
for name, level in six.iteritems(self.config.get('log_granular_levels', {})):
log.set_logger_level(name, level)
def __setup_extended_logging(self, *args): # pylint: disable=unused-argument
def __setup_extended_logging(self):
if salt.utils.platform.is_windows() and self._setup_mp_logging_listener_:
# On Windows when using a logging listener, all extended logging
# will go through the logging listener.
@ -862,14 +874,14 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
def _get_mp_logging_listener_queue(self):
return log.get_multiprocessing_logging_queue()
def _setup_mp_logging_listener(self, *args): # pylint: disable=unused-argument
def _setup_mp_logging_listener(self):
if self._setup_mp_logging_listener_:
log.setup_multiprocessing_logging_listener(
self.config,
self._get_mp_logging_listener_queue()
)
def _setup_mp_logging_client(self, *args): # pylint: disable=unused-argument
def _setup_mp_logging_client(self):
if self._setup_mp_logging_listener_:
# Set multiprocessing logging level even in non-Windows
# environments. In non-Windows environments, this setting will
@ -895,7 +907,7 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
log.shutdown_console_logging()
log.shutdown_logfile_logging()
def __setup_console_logger_config(self, *args): # pylint: disable=unused-argument
def __setup_console_logger_config(self):
# Since we're not going to be a daemon, setup the console logger
logfmt = self.config.get(
'log_fmt_console',
@ -921,7 +933,7 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
self.config['log_fmt_console'] = logfmt
self.config['log_datefmt_console'] = datefmt
def __setup_console_logger(self, *args): # pylint: disable=unused-argument
def __setup_console_logger(self):
# If daemon is set force console logger to quiet
if getattr(self.options, 'daemon', False) is True:
return
@ -2580,7 +2592,7 @@ class SaltKeyOptionParser(six.with_metaclass(OptionParserMeta,
# info or error.
self.config['loglevel'] = 'info'
def __create_keys_dir(self, *args): # pylint: disable=unused-argument
def __create_keys_dir(self):
if not os.path.isdir(self.config['gen_keys_dir']):
os.makedirs(self.config['gen_keys_dir'])

View File

@ -14,6 +14,7 @@ import errno
import types
import signal
import logging
import functools
import threading
import contextlib
import subprocess
@ -21,7 +22,6 @@ import multiprocessing
import multiprocessing.util
import socket
# Import salt libs
import salt.defaults.exitcodes
import salt.utils.files
@ -716,6 +716,15 @@ class Process(multiprocessing.Process, NewStyleClassMixIn):
(salt.log.setup.shutdown_multiprocessing_logging, [], {})
]
# Because we need to enforce our after fork and finalize routines,
# we must wrap this class run method to allow for these extra steps
# to be executed pre and post calling the actual run method,
# having subclasses call super would just not work.
#
# We use setattr here to fool pylint not to complain that we're
# overriding run from the subclass here
setattr(self, 'run', self.__decorate_run(self.run))
# __setstate__ and __getstate__ are only used on Windows.
def __setstate__(self, state):
args = state['args']
@ -741,25 +750,30 @@ class Process(multiprocessing.Process, NewStyleClassMixIn):
def _setup_process_logging(self):
salt.log.setup.setup_multiprocessing_logging(self.log_queue)
def run(self):
for method, args, kwargs in self._after_fork_methods:
method(*args, **kwargs)
try:
return super(Process, self).run()
except SystemExit:
# These are handled by multiprocessing.Process._bootstrap()
raise
except Exception as exc:
log.error(
'An un-handled exception from the multiprocessing process '
'\'%s\' was caught:\n', self.name, exc_info=True)
# Re-raise the exception. multiprocessing.Process will write it to
# sys.stderr and set the proper exitcode and we have already logged
# it above.
raise
finally:
for method, args, kwargs in self._finalize_methods:
def __decorate_run(self, run_func):
@functools.wraps(run_func)
def wrapped_run_func():
for method, args, kwargs in self._after_fork_methods:
method(*args, **kwargs)
try:
return run_func()
except SystemExit:
# These are handled by multiprocessing.Process._bootstrap()
six.reraise(*sys.exc_info())
except Exception as exc: # pylint: disable=broad-except
log.error(
'An un-handled exception from the multiprocessing process '
'\'%s\' was caught:\n', self.name, exc_info=True)
# Re-raise the exception. multiprocessing.Process will write it to
# sys.stderr and set the proper exitcode and we have already logged
# it above.
six.reraise(*sys.exc_info())
finally:
for method, args, kwargs in self._finalize_methods:
method(*args, **kwargs)
return wrapped_run_func
class MultiprocessingProcess(Process):

Some files were not shown because too many files have changed in this diff Show More