mirror of
https://github.com/valitydev/salt.git
synced 2024-11-07 08:58:59 +00:00
Merge pull request #43011 from rallytime/merge-develop
[develop] Merge forward from 2017.7 to develop
This commit is contained in:
commit
c35706fa3f
@ -4,6 +4,13 @@ Salt 2017.7.1 Release Notes
|
||||
|
||||
Version 2017.7.1 is a bugfix release for :ref:`2017.7.0 <release-2017-7-0>`.
|
||||
|
||||
Security Fix
|
||||
============
|
||||
|
||||
CVE-2017-12791 Maliciously crafted minion IDs can cause unwanted directory traversals on the Salt-master
|
||||
|
||||
Correct a flaw in minion id validation which could allow certain minions to authenticate to a master despite not having the correct credentials. To exploit the vulnerability, an attacker must create a salt-minion with an ID containing characters that will cause a directory traversal. Credit for discovering the security flaw goes to: Vernhk@qq.com
|
||||
|
||||
Changes for v2017.7.0..v2017.7.1
|
||||
--------------------------------
|
||||
|
||||
|
@ -383,8 +383,8 @@ Section -Post
|
||||
nsExec::Exec "nssm.exe set salt-minion Description Salt Minion from saltstack.com"
|
||||
nsExec::Exec "nssm.exe set salt-minion Start SERVICE_AUTO_START"
|
||||
nsExec::Exec "nssm.exe set salt-minion AppNoConsole 1"
|
||||
|
||||
RMDir /R "$INSTDIR\var\cache\salt" ; removing cache from old version
|
||||
nsExec::Exec "nssm.exe set salt-minion AppStopMethodConsole 24000"
|
||||
nsExec::Exec "nssm.exe set salt-minion AppStopMethodWindow 2000"
|
||||
|
||||
Call updateMinionConfig
|
||||
|
||||
|
7
salt/cache/redis_cache.py
vendored
7
salt/cache/redis_cache.py
vendored
@ -162,7 +162,7 @@ from salt.exceptions import SaltCacheError
|
||||
|
||||
__virtualname__ = 'redis'
|
||||
__func_alias__ = {
|
||||
'list_': 'list'
|
||||
'ls': 'list'
|
||||
}
|
||||
|
||||
log = logging.getLogger(__file__)
|
||||
@ -196,6 +196,9 @@ def __virtual__():
|
||||
# helper functions -- will not be exported
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
def init_kwargs(kwargs):
|
||||
return {}
|
||||
|
||||
|
||||
def _get_redis_cache_opts():
|
||||
'''
|
||||
@ -475,7 +478,7 @@ def flush(bank, key=None):
|
||||
return True
|
||||
|
||||
|
||||
def list_(bank):
|
||||
def ls(bank):
|
||||
'''
|
||||
Lists entries stored in the specified bank.
|
||||
'''
|
||||
|
@ -1663,7 +1663,8 @@ DEFAULT_PROXY_MINION_OPTS = {
|
||||
'log_file': os.path.join(salt.syspaths.LOGS_DIR, 'proxy'),
|
||||
'add_proxymodule_to_opts': False,
|
||||
'proxy_merge_grains_in_module': True,
|
||||
'append_minionid_config_dirs': ['cachedir', 'pidfile', 'default_include'],
|
||||
'extension_modules': os.path.join(salt.syspaths.CACHE_DIR, 'proxy', 'extmods'),
|
||||
'append_minionid_config_dirs': ['cachedir', 'pidfile', 'default_include', 'extension_modules'],
|
||||
'default_include': 'proxy.d/*.conf',
|
||||
|
||||
# By default, proxies will preserve the connection.
|
||||
|
@ -1275,7 +1275,7 @@ class Minion(MinionBase):
|
||||
ret = yield channel.send(load, timeout=timeout)
|
||||
raise tornado.gen.Return(ret)
|
||||
|
||||
def _fire_master(self, data=None, tag=None, events=None, pretag=None, timeout=60, sync=True):
|
||||
def _fire_master(self, data=None, tag=None, events=None, pretag=None, timeout=60, sync=True, timeout_handler=None):
|
||||
'''
|
||||
Fire an event on the master, or drop message if unable to send.
|
||||
'''
|
||||
@ -1294,10 +1294,6 @@ class Minion(MinionBase):
|
||||
else:
|
||||
return
|
||||
|
||||
def timeout_handler(*_):
|
||||
log.info(u'fire_master failed: master could not be contacted. Request timed out.')
|
||||
return True
|
||||
|
||||
if sync:
|
||||
try:
|
||||
self._send_req_sync(load, timeout)
|
||||
@ -1308,6 +1304,12 @@ class Minion(MinionBase):
|
||||
log.info(u'fire_master failed: %s', traceback.format_exc())
|
||||
return False
|
||||
else:
|
||||
if timeout_handler is None:
|
||||
def handle_timeout(*_):
|
||||
log.info(u'fire_master failed: master could not be contacted. Request timed out.')
|
||||
return True
|
||||
timeout_handler = handle_timeout
|
||||
|
||||
with tornado.stack_context.ExceptionStackContext(timeout_handler):
|
||||
self._send_req_async(load, timeout, callback=lambda f: None) # pylint: disable=unexpected-keyword-arg
|
||||
return True
|
||||
@ -2027,8 +2029,9 @@ class Minion(MinionBase):
|
||||
elif tag.startswith(u'_minion_mine'):
|
||||
self._mine_send(tag, data)
|
||||
elif tag.startswith(u'fire_master'):
|
||||
log.debug(u'Forwarding master event tag=%s', data[u'tag'])
|
||||
self._fire_master(data[u'data'], data[u'tag'], data[u'events'], data[u'pretag'])
|
||||
if self.connected:
|
||||
log.debug(u'Forwarding master event tag=%s', data[u'tag'])
|
||||
self._fire_master(data[u'data'], data[u'tag'], data[u'events'], data[u'pretag'])
|
||||
elif tag.startswith(master_event(type=u'disconnected')) or tag.startswith(master_event(type=u'failback')):
|
||||
# if the master disconnect event is for a different master, raise an exception
|
||||
if tag.startswith(master_event(type=u'disconnected')) and data[u'master'] != self.opts[u'master']:
|
||||
@ -2249,13 +2252,15 @@ class Minion(MinionBase):
|
||||
if ping_interval > 0 and self.connected:
|
||||
def ping_master():
|
||||
try:
|
||||
if not self._fire_master(u'ping', u'minion_ping'):
|
||||
def ping_timeout_handler(*_):
|
||||
if not self.opts.get(u'auth_safemode', True):
|
||||
log.error(u'** Master Ping failed. Attempting to restart minion**')
|
||||
delay = self.opts.get(u'random_reauth_delay', 5)
|
||||
log.info(u'delaying random_reauth_delay %ss', delay)
|
||||
# regular sys.exit raises an exception -- which isn't sufficient in a thread
|
||||
os._exit(salt.defaults.exitcodes.SALT_KEEPALIVE)
|
||||
|
||||
self._fire_master('ping', 'minion_ping', sync=False, timeout_handler=ping_timeout_handler)
|
||||
except Exception:
|
||||
log.warning(u'Attempt to ping master failed.', exc_on_loglevel=logging.DEBUG)
|
||||
self.periodic_callbacks[u'ping'] = tornado.ioloop.PeriodicCallback(ping_master, ping_interval * 1000, io_loop=self.io_loop)
|
||||
@ -2270,7 +2275,7 @@ class Minion(MinionBase):
|
||||
except Exception:
|
||||
log.critical(u'The beacon errored: ', exc_info=True)
|
||||
if beacons and self.connected:
|
||||
self._fire_master(events=beacons)
|
||||
self._fire_master(events=beacons, sync=False)
|
||||
|
||||
self.periodic_callbacks[u'beacons'] = tornado.ioloop.PeriodicCallback(handle_beacons, loop_interval * 1000, io_loop=self.io_loop)
|
||||
|
||||
|
@ -302,6 +302,11 @@ def get_community_names():
|
||||
# Windows SNMP service GUI.
|
||||
if isinstance(current_values, list):
|
||||
for current_value in current_values:
|
||||
|
||||
# Ignore error values
|
||||
if not isinstance(current_value, dict):
|
||||
continue
|
||||
|
||||
permissions = str()
|
||||
for permission_name in _PERMISSION_TYPES:
|
||||
if current_value['vdata'] == _PERMISSION_TYPES[permission_name]:
|
||||
|
@ -67,6 +67,17 @@ provider: ``napalm_base``
|
||||
|
||||
.. versionadded:: 2017.7.1
|
||||
|
||||
multiprocessing: ``False``
|
||||
Overrides the :conf_minion:`multiprocessing` option, per proxy minion.
|
||||
The ``multiprocessing`` option must be turned off for SSH-based proxies.
|
||||
However, some NAPALM drivers (e.g. Arista, NX-OS) are not SSH-based.
|
||||
As multiple proxy minions may share the same configuration file,
|
||||
this option permits the configuration of the ``multiprocessing`` option
|
||||
more specifically, for some proxy minions.
|
||||
|
||||
.. versionadded:: 2017.7.2
|
||||
|
||||
|
||||
.. _`NAPALM Read the Docs page`: https://napalm.readthedocs.io/en/latest/#supported-network-operating-systems
|
||||
.. _`optional arguments`: http://napalm.readthedocs.io/en/latest/support/index.html#list-of-supported-optional-arguments
|
||||
|
||||
|
@ -17,16 +17,28 @@ import salt.netapi
|
||||
|
||||
|
||||
def mk_token(**load):
|
||||
'''
|
||||
r'''
|
||||
Create an eauth token using provided credentials
|
||||
|
||||
Non-root users may specify an expiration date -- if allowed via the
|
||||
:conf_master:`token_expire_user_override` setting -- by passing an
|
||||
additional ``token_expire`` param. This overrides the
|
||||
:conf_master:`token_expire` setting of the same name in the Master config
|
||||
and is how long a token should live in seconds.
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
salt-run auth.mk_token username=saltdev password=saltdev eauth=auto
|
||||
salt-run auth.mk_token username=saltdev password=saltdev eauth=auto \\
|
||||
|
||||
# Create a token valid for three years.
|
||||
salt-run auth.mk_token username=saltdev password=saltdev eauth=auto \
|
||||
token_expire=94670856
|
||||
|
||||
# Calculate the number of seconds using expr.
|
||||
salt-run auth.mk_token username=saltdev password=saltdev eauth=auto \
|
||||
token_expire=$(expr \( 365 \* 24 \* 60 \* 60 \) \* 3)
|
||||
'''
|
||||
# This will hang if the master daemon is not running.
|
||||
netapi = salt.netapi.NetapiClient(__opts__)
|
||||
|
@ -78,7 +78,7 @@ def recursive_copy(source, dest):
|
||||
(identical to cp -r on a unix machine)
|
||||
'''
|
||||
for root, _, files in os.walk(source):
|
||||
path_from_source = root.replace(source, '').lstrip('/')
|
||||
path_from_source = root.replace(source, '').lstrip(os.sep)
|
||||
target_directory = os.path.join(dest, path_from_source)
|
||||
if not os.path.exists(target_directory):
|
||||
os.makedirs(target_directory)
|
||||
|
@ -243,6 +243,11 @@ def get_device_opts(opts, salt_obj=None):
|
||||
network_device = {}
|
||||
# by default, look in the proxy config details
|
||||
device_dict = opts.get('proxy', {}) or opts.get('napalm', {})
|
||||
if opts.get('proxy') or opts.get('napalm'):
|
||||
opts['multiprocessing'] = device_dict.get('multiprocessing', False)
|
||||
# Most NAPALM drivers are SSH-based, so multiprocessing should default to False.
|
||||
# But the user can be allows to have a different value for the multiprocessing, which will
|
||||
# override the opts.
|
||||
if salt_obj and not device_dict:
|
||||
# get the connection details from the opts
|
||||
device_dict = salt_obj['config.merge']('napalm')
|
||||
|
@ -134,8 +134,10 @@ def get_pidfile(pidfile):
|
||||
'''
|
||||
with salt.utils.files.fopen(pidfile) as pdf:
|
||||
pid = pdf.read()
|
||||
|
||||
return int(pid)
|
||||
if pid:
|
||||
return int(pid)
|
||||
else:
|
||||
return
|
||||
|
||||
|
||||
def clean_proc(proc, wait_for_kill=10):
|
||||
|
@ -482,12 +482,21 @@ def clean_path(root, path, subdir=False):
|
||||
return ''
|
||||
|
||||
|
||||
def clean_id(id_):
|
||||
'''
|
||||
Returns if the passed id is clean.
|
||||
'''
|
||||
if re.search(r'\.\.\{sep}'.format(sep=os.sep), id_):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def valid_id(opts, id_):
|
||||
'''
|
||||
Returns if the passed id is valid
|
||||
'''
|
||||
try:
|
||||
return bool(clean_path(opts['pki_dir'], id_))
|
||||
return bool(clean_path(opts['pki_dir'], id_)) and clean_id(id_)
|
||||
except (AttributeError, KeyError, TypeError) as e:
|
||||
return False
|
||||
|
||||
|
@ -6,6 +6,7 @@
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
import logging
|
||||
import pwd
|
||||
import grp
|
||||
import random
|
||||
@ -21,6 +22,8 @@ from salt.utils.pycrypto import gen_hash
|
||||
# Import 3rd-party libs
|
||||
from salt.ext.six.moves import range # pylint: disable=import-error,redefined-builtin
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def gen_password():
|
||||
'''
|
||||
@ -99,6 +102,7 @@ class AuthTest(ShellCase):
|
||||
cmd = ('-a pam "*" test.ping '
|
||||
'--username {0} --password {1}'.format(self.userA, password))
|
||||
resp = self.run_salt(cmd)
|
||||
log.debug('resp = %s', resp)
|
||||
self.assertTrue(
|
||||
'minion:' in resp
|
||||
)
|
||||
|
@ -6,6 +6,7 @@
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
import errno
|
||||
import os
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.mock import patch, Mock
|
||||
@ -38,7 +39,7 @@ class FileclientTestCase(TestCase):
|
||||
for exists in range(2):
|
||||
with patch('os.makedirs', self._fake_makedir()):
|
||||
with Client(self.opts)._cache_loc('testfile') as c_ref_itr:
|
||||
assert c_ref_itr == '/__test__/files/base/testfile'
|
||||
assert c_ref_itr == os.sep + os.sep.join(['__test__', 'files', 'base', 'testfile'])
|
||||
|
||||
def test_cache_raises_exception_on_non_eexist_ioerror(self):
|
||||
'''
|
||||
|
@ -5,6 +5,7 @@
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
import textwrap
|
||||
|
||||
# Import Salt Libs
|
||||
from yaml.constructor import ConstructorError
|
||||
@ -36,12 +37,11 @@ class YamlLoaderTestCase(TestCase):
|
||||
'''
|
||||
Test parsing an ordinary path
|
||||
'''
|
||||
|
||||
self.assertEqual(
|
||||
self._render_yaml(b'''
|
||||
p1:
|
||||
- alpha
|
||||
- beta'''),
|
||||
self._render_yaml(textwrap.dedent('''\
|
||||
p1:
|
||||
- alpha
|
||||
- beta''')),
|
||||
{'p1': ['alpha', 'beta']}
|
||||
)
|
||||
|
||||
@ -49,38 +49,37 @@ p1:
|
||||
'''
|
||||
Test YAML anchors
|
||||
'''
|
||||
|
||||
# Simple merge test
|
||||
self.assertEqual(
|
||||
self._render_yaml(b'''
|
||||
p1: &p1
|
||||
v1: alpha
|
||||
p2:
|
||||
<<: *p1
|
||||
v2: beta'''),
|
||||
self._render_yaml(textwrap.dedent('''\
|
||||
p1: &p1
|
||||
v1: alpha
|
||||
p2:
|
||||
<<: *p1
|
||||
v2: beta''')),
|
||||
{'p1': {'v1': 'alpha'}, 'p2': {'v1': 'alpha', 'v2': 'beta'}}
|
||||
)
|
||||
|
||||
# Test that keys/nodes are overwritten
|
||||
self.assertEqual(
|
||||
self._render_yaml(b'''
|
||||
p1: &p1
|
||||
v1: alpha
|
||||
p2:
|
||||
<<: *p1
|
||||
v1: new_alpha'''),
|
||||
self._render_yaml(textwrap.dedent('''\
|
||||
p1: &p1
|
||||
v1: alpha
|
||||
p2:
|
||||
<<: *p1
|
||||
v1: new_alpha''')),
|
||||
{'p1': {'v1': 'alpha'}, 'p2': {'v1': 'new_alpha'}}
|
||||
)
|
||||
|
||||
# Test merging of lists
|
||||
self.assertEqual(
|
||||
self._render_yaml(b'''
|
||||
p1: &p1
|
||||
v1: &v1
|
||||
- t1
|
||||
- t2
|
||||
p2:
|
||||
v2: *v1'''),
|
||||
self._render_yaml(textwrap.dedent('''\
|
||||
p1: &p1
|
||||
v1: &v1
|
||||
- t1
|
||||
- t2
|
||||
p2:
|
||||
v2: *v1''')),
|
||||
{"p2": {"v2": ["t1", "t2"]}, "p1": {"v1": ["t1", "t2"]}}
|
||||
)
|
||||
|
||||
@ -89,15 +88,27 @@ p2:
|
||||
Test that duplicates still throw an error
|
||||
'''
|
||||
with self.assertRaises(ConstructorError):
|
||||
self._render_yaml(b'''
|
||||
p1: alpha
|
||||
p1: beta''')
|
||||
self._render_yaml(textwrap.dedent('''\
|
||||
p1: alpha
|
||||
p1: beta'''))
|
||||
|
||||
with self.assertRaises(ConstructorError):
|
||||
self._render_yaml(b'''
|
||||
p1: &p1
|
||||
v1: alpha
|
||||
p2:
|
||||
<<: *p1
|
||||
v2: beta
|
||||
v2: betabeta''')
|
||||
self._render_yaml(textwrap.dedent('''\
|
||||
p1: &p1
|
||||
v1: alpha
|
||||
p2:
|
||||
<<: *p1
|
||||
v2: beta
|
||||
v2: betabeta'''))
|
||||
|
||||
def test_yaml_with_unicode_literals(self):
|
||||
'''
|
||||
Test proper loading of unicode literals
|
||||
'''
|
||||
self.assertEqual(
|
||||
self._render_yaml(textwrap.dedent('''\
|
||||
foo:
|
||||
a: Д
|
||||
b: {'a': u'\\u0414'}''')),
|
||||
{'foo': {'a': u'\u0414', 'b': {'a': u'\u0414'}}}
|
||||
)
|
||||
|
Loading…
Reference in New Issue
Block a user