Merge branch '2015.8' into '2016.3'

Conflicts:
  - salt/cloud/deploy/bootstrap-salt.sh
This commit is contained in:
rallytime 2016-08-16 11:02:05 -06:00
commit c6c82be1de
12 changed files with 239 additions and 105 deletions

View File

@ -134,8 +134,11 @@ SUDO=""
if [ -n "{{SUDO}}" ]
then SUDO="sudo "
fi
if [ "$SUDO" ]
SUDO_USER="{{SUDO_USER}}"
if [ "$SUDO" ] && [ "$SUDO_USER" ]
then SUDO="sudo -u {{SUDO_USER}}"
elif [ "$SUDO" ] && [ -n "$SUDO_USER" ]
then SUDO="sudo "
fi
EX_PYTHON_INVALID={EX_THIN_PYTHON_INVALID}
PYTHON_CMDS="python3 python27 python2.7 python26 python2.6 python2 python"

View File

@ -231,7 +231,7 @@ def main(argv): # pylint: disable=W0613
subprocess.call(salt_argv)
shutil.rmtree(OPTIONS.saltdir)
else:
os.execv(sys.executable, salt_argv)
subprocess.call(salt_argv)
if OPTIONS.cmd_umask is not None:
os.umask(old_umask)

View File

@ -18,7 +18,7 @@
#======================================================================================================================
set -o nounset # Treat unset variables as an error
__ScriptVersion="2016.08.15"
__ScriptVersion="2016.08.16"
__ScriptName="bootstrap-salt.sh"
__ScriptFullName="$0"
@ -2270,23 +2270,8 @@ __enable_universe_repository() {
}
install_ubuntu_deps() {
if ([ "${_SLEEP}" -eq "${__DEFAULT_SLEEP}" ] && [ "$DISTRO_MAJOR_VERSION" -lt 15 ]); then
# The user did not pass a custom sleep value as an argument, let's increase the default value
echodebug "On Ubuntu systems we increase the default sleep value to 10."
echodebug "See https://github.com/saltstack/salt/issues/12248 for more info."
_SLEEP=10
fi
if [ $_START_DAEMONS -eq $BS_FALSE ]; then
echowarn "Not starting daemons on Debian based distributions is not working mostly because starting them is the default behaviour."
fi
# No user interaction, libc6 restart services for example
export DEBIAN_FRONTEND=noninteractive
apt-get update
# Install Keys
__apt_get_install_noinput debian-archive-keyring && apt-get update
if [ "$DISTRO_MAJOR_VERSION" -gt 12 ] || ([ "$DISTRO_MAJOR_VERSION" -eq 12 ] && [ "$DISTRO_MINOR_VERSION" -eq 10 ]); then
# Above Ubuntu 12.04 add-apt-repository is in a different package
__apt_get_install_noinput software-properties-common || return 1
@ -2317,9 +2302,9 @@ install_ubuntu_deps() {
fi
fi
fi
__PIP_PACKAGES=""
apt-get update
fi
# Minimal systems might not have upstart installed, install it
__PACKAGES="upstart"
@ -2339,22 +2324,13 @@ install_ubuntu_deps() {
# Additionally install procps and pciutils which allows for Docker bootstraps. See 366#issuecomment-39666813
__PACKAGES="${__PACKAGES} procps pciutils"
apt-get update
# shellcheck disable=SC2086,SC2090
__apt_get_install_noinput ${__PACKAGES} || return 1
if [ "${__PIP_PACKAGES}" != "" ]; then
# shellcheck disable=SC2086,SC2090
if [ "$_VIRTUALENV_DIR" != "null" ]; then
__activate_virtualenv
fi
pip install -U "${__PIP_PACKAGES}"
fi
if [ "$_UPGRADE_SYS" -eq $BS_TRUE ]; then
__apt_get_upgrade_noinput || return 1
fi
# shellcheck disable=SC2086,SC2090
__apt_get_install_noinput ${__PACKAGES} || return 1
if [ "${_EXTRA_PACKAGES}" != "" ]; then
echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}"
# shellcheck disable=SC2086
@ -2365,7 +2341,24 @@ install_ubuntu_deps() {
}
install_ubuntu_stable_deps() {
install_ubuntu_deps || return 1
if ([ "${_SLEEP}" -eq "${__DEFAULT_SLEEP}" ] && [ "$DISTRO_MAJOR_VERSION" -lt 15 ]); then
# The user did not pass a custom sleep value as an argument, let's increase the default value
echodebug "On Ubuntu systems we increase the default sleep value to 10."
echodebug "See https://github.com/saltstack/salt/issues/12248 for more info."
_SLEEP=10
fi
if [ $_START_DAEMONS -eq $BS_FALSE ]; then
echowarn "Not starting daemons on Debian based distributions is not working mostly because starting them is the default behaviour."
fi
# No user interaction, libc6 restart services for example
export DEBIAN_FRONTEND=noninteractive
apt-get update
# Install Keys
__apt_get_install_noinput debian-archive-keyring && apt-get update
if [ $_DISABLE_REPOS -eq $BS_FALSE ]; then
__get_dpkg_architecture || return 1
@ -2385,7 +2378,7 @@ install_ubuntu_stable_deps() {
fi
fi
# Versions starting with 2015.5.6 and 2015.8.1 are hosted at repo.saltstack.com
# Versions starting with 2015.5.6, 2015.8.1 and 2016.3.0 are hosted at repo.saltstack.com
if [ "$(echo "$STABLE_REV" | egrep '^(2015\.5|2015\.8|2016\.3|latest|archive\/)')" != "" ]; then
# Workaround for latest non-LTS ubuntu
if [ "$DISTRO_MAJOR_VERSION" -eq 15 ]; then
@ -2400,11 +2393,7 @@ install_ubuntu_stable_deps() {
# SaltStack's stable Ubuntu repository:
SALTSTACK_UBUNTU_URL="${HTTP_VAL}://repo.saltstack.com/apt/ubuntu/${UBUNTU_VERSION}/${__REPO_ARCH}/${STABLE_REV}"
if [ "$(grep -ER 'latest .+ main' /etc/apt)" = "" ]; then
set +o nounset
echo "deb $SALTSTACK_UBUNTU_URL $UBUNTU_CODENAME main" > "/etc/apt/sources.list.d/saltstack.list"
set -o nounset
fi
apt-get update
# Make sure https transport is available
if [ "$HTTP_VAL" = "https" ] ; then
@ -2414,6 +2403,12 @@ install_ubuntu_stable_deps() {
# Make sure wget is available
__apt_get_install_noinput wget
if [ "$(grep -ER 'latest .+ main' /etc/apt)" = "" ]; then
set +o nounset
echo "deb $SALTSTACK_UBUNTU_URL $UBUNTU_CODENAME main" > "/etc/apt/sources.list.d/saltstack.list"
set -o nounset
fi
# shellcheck disable=SC2086
wget $_WGET_ARGS -q $SALTSTACK_UBUNTU_URL/SALTSTACK-GPG-KEY.pub -O - | apt-key add - || return 1
@ -2436,11 +2431,11 @@ install_ubuntu_stable_deps() {
fi
fi
apt-get update
install_ubuntu_deps || return 1
}
install_ubuntu_daily_deps() {
install_ubuntu_deps || return 1
install_ubuntu_stable_deps || return 1
if [ "$DISTRO_MAJOR_VERSION" -ge 12 ]; then
# Above Ubuntu 11.10 add-apt-repository is in a different package
@ -2476,10 +2471,9 @@ install_ubuntu_git_deps() {
__git_clone_and_checkout || return 1
__PACKAGES=""
__PIP_PACKAGES=""
# See how we are installing packages
if [ ${_PIP_ALL} -eq $BS_TRUE ]; then
if [ "${_PIP_ALL}" -eq $BS_TRUE ]; then
__PACKAGES="${__PACKAGES} python-dev swig libssl-dev libzmq3 libzmq3-dev"
if ! __check_command_exists pip; then
@ -2492,37 +2486,18 @@ install_ubuntu_git_deps() {
# Install the pythons from requirements (only zmq for now)
__install_pip_deps "${_SALT_GIT_CHECKOUT_DIR}/requirements/zeromq.txt" || return 1
else
install_ubuntu_deps || return 1
__PACKAGES="${__PACKAGES} python-yaml python-m2crypto python-crypto msgpack-python python-zmq python-jinja2"
install_ubuntu_stable_deps || return 1
__PACKAGES="${__PACKAGES} python-crypto python-jinja2 python-m2crypto python-msgpack python-requests"
__PACKAGES="${__PACKAGES} python-tornado python-yaml python-zmq"
if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
# Install python-libcloud if asked to
__PACKAGES="${__PACKAGES} python-libcloud"
fi
if [ -f "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt" ]; then
# We're on the develop branch, install whichever tornado is on the requirements file
__REQUIRED_TORNADO="$(grep tornado "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt")"
if [ "${__REQUIRED_TORNADO}" != "" ]; then
__check_pip_allowed "You need to allow pip based installations (-P) in order to install the python package '${__REQUIRED_TORNADO}'"
__PACKAGES="${__PACKAGES} python-dev"
__PIP_PACKAGES="${__PIP_PACKAGES} ${__REQUIRED_TORNADO}"
if ! __check_command_exists pip; then
__PACKAGES="${__PACKAGES} python-setuptools python-pip"
fi
fi
fi
# shellcheck disable=SC2086
__apt_get_install_noinput ${__PACKAGES} || return 1
if [ "${__PIP_PACKAGES}" != "" ]; then
# shellcheck disable=SC2086,SC2090
pip install -U ${__PIP_PACKAGES} || return 1
fi
fi
# Let's trigger config_salt()
@ -2578,7 +2553,7 @@ install_ubuntu_stable_post() {
# Workaround for latest LTS packages on latest ubuntu. Normally packages on
# debian-based systems will automatically start the corresponding daemons
if [ "$DISTRO_MAJOR_VERSION" -lt 15 ]; then
return 0
return 0
fi
for fname in minion master syndic api; do

View File

@ -1189,10 +1189,9 @@ class AESFuncs(object):
'''
if any(key not in load for key in ('id', 'path', 'loc')):
return False
if not self.opts['file_recv'] or os.path.isabs(load['path']):
if not isinstance(load['path'], list):
return False
if os.path.isabs(load['path']) or '../' in load['path']:
# Can overwrite master files!!
if not self.opts['file_recv']:
return False
if not salt.utils.verify.valid_id(self.opts, load['id']):
return False
@ -1229,18 +1228,28 @@ class AESFuncs(object):
)
return {}
load.pop('tok')
# Normalize Windows paths
normpath = load['path']
if ':' in normpath:
# make sure double backslashes are normalized
normpath = normpath.replace('\\', '/')
normpath = os.path.normpath(normpath)
# Path normalization should have been done by the sending
# minion but we can't guarantee it. Re-do it here.
normpath = os.path.normpath(os.path.join(*load['path']))
# Ensure that this safety check is done after the path
# have been normalized.
if os.path.isabs(normpath) or '../' in load['path']:
# Can overwrite master files!!
return False
cpath = os.path.join(
self.opts['cachedir'],
'minions',
load['id'],
'files',
normpath)
# One last safety check here
if not os.path.normpath(cpath).startswith(self.opts['cachedir']):
log.warning('Attempt to write received file outside of master cache '
'directory! Requested file write: {0}. Access denied.'.format(cpath))
return False
cdir = os.path.dirname(cpath)
if not os.path.isdir(cdir):
try:

View File

@ -788,9 +788,19 @@ def push(path, keep_symlinks=False, upload_path=None, remove_source=False):
load_path = upload_path.lstrip(os.sep)
else:
load_path = path.lstrip(os.sep)
# Normalize the path. This does not eliminate
# the possibility that relative entries will still be present
load_path_normal = os.path.normpath(load_path)
# If this is Windows and a drive letter is present, remove it
load_path_split_drive = os.path.splitdrive(load_path_normal)[1:]
# Finally, split the remaining path into a list for delivery to the master
load_path_list = os.path.split(load_path_split_drive)
load = {'cmd': '_file_recv',
'id': __opts__['id'],
'path': load_path,
'path': load_path_list,
'tok': auth.gen_token('salt')}
channel = salt.transport.Channel.factory(__opts__)
with salt.utils.fopen(path, 'rb') as fp_:

View File

@ -87,9 +87,18 @@ def latest_version(*names, **kwargs):
# get latest installed version of package
if name in installed_pkgs:
log.trace('Sorting out the latest available version of {0}'.format(name))
latest_installed = sorted(installed_pkgs[name], cmp=_reverse_cmp_pkg_versions).pop()
log.debug('Latest installed version of package {0} is {1}'.format(name, latest_installed))
log.trace('Determining latest installed version of %s', name)
try:
latest_installed = sorted(
installed_pkgs[name], cmp=_reverse_cmp_pkg_versions).pop()
except IndexError:
log.warning(
'%s was empty in pkg.list_pkgs return data, this is '
'probably a bug in list_pkgs', name
)
else:
log.debug('Latest installed version of %s is %s',
name, latest_installed)
# get latest available (from winrepo_dir) version of package
pkg_info = _get_package_info(name)
@ -1100,7 +1109,10 @@ def _reverse_cmp_pkg_versions(pkg1, pkg2):
def _get_latest_pkg_version(pkginfo):
if len(pkginfo) == 1:
return next(six.iterkeys(pkginfo))
return sorted(pkginfo, cmp=_reverse_cmp_pkg_versions).pop()
try:
return sorted(pkginfo, cmp=_reverse_cmp_pkg_versions).pop()
except IndexError:
return ''
def compare_versions(ver1='', oper='==', ver2=''):

View File

@ -100,6 +100,18 @@ class _Zypper(object):
self.__no_lock = False
self.__no_raise = False
self.__refresh = False
self.__ignore_repo_failure = False
def __call__(self, *args, **kwargs):
'''
:param args:
:param kwargs:
:return:
'''
# Ignore exit code for 106 (repo is not available)
if 'no_repo_failure' in kwargs:
self.__ignore_repo_failure = kwargs['no_repo_failure']
return self
def __getattr__(self, item):
'''
@ -275,7 +287,7 @@ class _Zypper(object):
__salt__['event.fire_master']({'success': not len(self.error_msg),
'info': self.error_msg or 'Zypper has been released'},
self.TAG_RELEASED)
if self.error_msg and not self.__no_raise:
if self.error_msg and not self.__no_raise and not self.__ignore_repo_failure:
raise CommandExecutionError('Zypper command failure: {0}'.format(self.error_msg))
return self._is_xml_mode() and dom.parseString(self.__call_result['stdout']) or self.__call_result['stdout']
@ -811,6 +823,7 @@ def mod_repo(repo, **kwargs):
cmd_opt = global_cmd_opt + ['mr'] + cmd_opt + [repo]
__zypper__.refreshable.xml.call(*cmd_opt)
comment = None
if call_refresh:
# when used with "zypper ar --refresh" or "zypper mr --refresh"
# --gpg-auto-import-keys is not doing anything
@ -818,11 +831,13 @@ def mod_repo(repo, **kwargs):
refresh_opts = global_cmd_opt + ['refresh'] + [repo]
__zypper__.xml.call(*refresh_opts)
elif not added and not cmd_opt:
raise CommandExecutionError(
'Specified arguments did not result in modification of repo'
)
comment = 'Specified arguments did not result in modification of repo'
return get_repo(repo)
repo = get_repo(repo)
if comment:
repo['comment'] = comment
return repo
def refresh_db():
@ -862,6 +877,7 @@ def install(name=None,
downloadonly=None,
skip_verify=False,
version=None,
ignore_repo_failure=False,
**kwargs):
'''
Install the passed package(s), add refresh=True to force a 'zypper refresh'
@ -928,6 +944,10 @@ def install(name=None,
salt '*' pkg.install sources='[{"foo": "salt://foo.rpm"},{"bar": "salt://bar.rpm"}]'
ignore_repo_failure
Zypper returns error code 106 if one of the repositories are not available for various reasons.
In case to set strict check, this parameter needs to be set to True. Default: False.
Returns a dict containing the new package names and versions::
@ -999,7 +1019,7 @@ def install(name=None,
while targets:
cmd = cmd_install + targets[:500]
targets = targets[500:]
for line in __zypper__.call(*cmd).splitlines():
for line in __zypper__(no_repo_failure=ignore_repo_failure).call(*cmd).splitlines():
match = re.match(r"^The selected package '([^']+)'.+has lower version", line)
if match:
downgrades.append(match.group(1))
@ -1007,7 +1027,7 @@ def install(name=None,
while downgrades:
cmd = cmd_install + ['--force'] + downgrades[:500]
downgrades = downgrades[500:]
__zypper__.call(*cmd)
__zypper__(no_repo_failure=ignore_repo_failure).call(*cmd)
__context__.pop('pkg.list_pkgs', None)
new = list_pkgs()

View File

@ -89,7 +89,18 @@ class CacheDisk(CacheDict):
def __init__(self, ttl, path, *args, **kwargs):
super(CacheDisk, self).__init__(ttl, *args, **kwargs)
self._path = path
self._dict = self._read()
self._dict = {}
self._read()
def _enforce_ttl_key(self, key):
'''
Enforce the TTL to a specific key, delete if its past TTL
'''
if key not in self._key_cache_time:
return
if time.time() - self._key_cache_time[key] > self._ttl:
del self._key_cache_time[key]
self._dict.__delitem__(key)
def __contains__(self, key):
self._enforce_ttl_key(key)
@ -111,16 +122,33 @@ class CacheDisk(CacheDict):
# Do the same as the parent but also persist
self._write()
def __delitem__(self, key):
'''
Make sure to remove the key cache time
'''
del self._key_cache_time[key]
self._dict.__delitem__(key)
# Do the same as the parent but also persist
self._write()
def _read(self):
'''
Read in from disk
'''
if not HAS_MSGPACK or not os.path.exists(self._path):
return {}
return
with salt.utils.fopen(self._path, 'r') as fp_:
cache = msgpack.load(fp_)
log.debug('Disk cache retrieved: {0}'.format(cache))
return cache
if "CacheDisk_cachetime" in cache: # new format
self._dict = cache["CacheDisk_data"]
self._key_cache_time = cache["CacheDisk_cachetime"]
else: # old format
self._dict = cache
timestamp = os.path.getmtime(self._path)
for key in self._dict.keys():
self._key_cache_time[key] = timestamp
if log.isEnabledFor(logging.DEBUG):
log.debug('Disk cache retrieved: {0}'.format(cache))
def _write(self):
'''
@ -131,7 +159,11 @@ class CacheDisk(CacheDict):
# TODO Add check into preflight to ensure dir exists
# TODO Dir hashing?
with salt.utils.fopen(self._path, 'w+') as fp_:
msgpack.dump(self._dict, fp_)
cache = {
"CacheDisk_data": self._dict,
"CacheDisk_cachetime": self._key_cache_time
}
msgpack.dump(cache, fp_)
class CacheCli(object):

View File

@ -10,6 +10,7 @@ sock_dir: .salt-unix
open_mode: True
syndic_master: localhost
fileserver_list_cache_time: 0
file_recv: True
pillar_opts: True
peer:
'.*':

View File

@ -4,6 +4,7 @@
from __future__ import absolute_import
import os
import hashlib
import tempfile
# Import Salt Testing libs
from salttesting.helpers import ensure_in_syspath
@ -322,6 +323,22 @@ class CPModuleTest(integration.ModuleCase):
finally:
os.unlink(tgt)
def test_push(self):
log_to_xfer = os.path.join(tempfile.gettempdir(), 'salt-runtests.log')
try:
self.run_function('cp.push', log_to_xfer)
tgt_cache_file = os.path.join(
integration.TMP,
'master-minion-root',
'cache',
'minions',
'minion',
'files',
tempfile.gettempdir(),
'salt-runtests.log')
self.assertTrue(os.path.isfile(tgt_cache_file), 'File was not cached on the master')
finally:
os.unlink(tgt_cache_file)
if __name__ == '__main__':
from integration import run_tests

View File

@ -73,7 +73,7 @@ class ZypperTestCase(TestCase):
self.zypper_patcher_config = {
'_get_configured_repos': Mock(side_effect=side_effect),
'__zypper__': Mock(),
'get_repo': Mock()
'get_repo': Mock(return_value={})
}
def test_list_upgrades(self):
@ -493,17 +493,8 @@ class ZypperTestCase(TestCase):
'salt.modules.zypper', **self.zypper_patcher_config)
with zypper_patcher:
with self.assertRaisesRegexp(
Exception,
'Specified arguments did not result in modification of repo'
):
zypper.mod_repo(name, **{'url': url})
with self.assertRaisesRegexp(
Exception,
'Specified arguments did not result in modification of repo'
):
zypper.mod_repo(name, **{'url': url, 'gpgautoimport': 'a'})
self.assertEqual(zypper.mod_repo(name, **{'url': url}),
{'comment': 'Specified arguments did not result in modification of repo'})
zypper.__zypper__.xml.call.assert_not_called()
zypper.__zypper__.refreshable.xml.call.assert_not_called()

View File

@ -0,0 +1,64 @@
# -*- coding: utf-8 -*-
'''
tests.unit.utils.disk_cache_test
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Test the salt disk cache objects
'''
# Import python libs
from __future__ import absolute_import
import os.path
import shutil
import tempfile
import time
# Import Salt Testing libs
from salttesting import TestCase
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import salt libs
from salt.utils import cache
class CacheDiskTestCase(TestCase):
def test_everything(self):
'''
Make sure you can instantiate, add, update, remove, expire
'''
try:
tmpdir = tempfile.mkdtemp()
path = os.path.join(tmpdir, 'CacheDisk_test')
# test instantiation
cd = cache.CacheDisk(0.1, path)
self.assertIsInstance(cd, cache.CacheDisk)
# test to make sure it looks like a dict
self.assertNotIn('foo', cd)
cd['foo'] = 'bar'
self.assertIn('foo', cd)
self.assertEqual(cd['foo'], 'bar')
del cd['foo']
self.assertNotIn('foo', cd)
# test persistence
cd['foo'] = 'bar'
cd2 = cache.CacheDisk(0.1, path)
self.assertIn('foo', cd2)
self.assertEqual(cd2['foo'], 'bar')
# test ttl
time.sleep(0.2)
self.assertNotIn('foo', cd)
self.assertNotIn('foo', cd2)
finally:
shutil.rmtree(tmpdir, ignore_errors=True)
if __name__ == '__main__':
from integration import run_tests
run_tests(CacheDiskTestCase, needs_daemon=False)