Merge pull request #47123 from rallytime/merge-develop

[develop] Merge forward from 2018.3 to develop
This commit is contained in:
Nicole Thomas 2018-04-17 16:46:37 -04:00 committed by GitHub
commit ac0656967c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 411 additions and 123 deletions

View File

@ -139,7 +139,7 @@ blacklist, can be found below:
- web*
- 'mail\d+\.domain\.tld'
minionfs_whitelist:
minionfs_blacklist:
- web21
Potential Concerns

View File

@ -3686,6 +3686,8 @@ def apply_minion_config(overrides=None,
'''
if defaults is None:
defaults = DEFAULT_MINION_OPTS
if overrides is None:
overrides = {}
opts = defaults.copy()
opts['__role'] = 'minion'
@ -3694,7 +3696,7 @@ def apply_minion_config(overrides=None,
opts.update(overrides)
if 'environment' in opts:
if 'saltenv' in opts:
if opts['saltenv'] is not None:
log.warning(
'The \'saltenv\' and \'environment\' minion config options '
'cannot both be used. Ignoring \'environment\' in favor of '
@ -3794,7 +3796,7 @@ def apply_minion_config(overrides=None,
if 'beacons' not in opts:
opts['beacons'] = {}
if (overrides or {}).get('ipc_write_buffer', '') == 'dynamic':
if overrides.get('ipc_write_buffer', '') == 'dynamic':
opts['ipc_write_buffer'] = _DFLT_IPC_WBUFFER
if 'ipc_write_buffer' not in overrides:
opts['ipc_write_buffer'] = 0
@ -3883,6 +3885,8 @@ def apply_master_config(overrides=None, defaults=None):
'''
if defaults is None:
defaults = DEFAULT_MASTER_OPTS
if overrides is None:
overrides = {}
opts = defaults.copy()
opts['__role'] = 'master'
@ -3891,7 +3895,7 @@ def apply_master_config(overrides=None, defaults=None):
opts.update(overrides)
if 'environment' in opts:
if 'saltenv' in opts:
if opts['saltenv'] is not None:
log.warning(
'The \'saltenv\' and \'environment\' master config options '
'cannot both be used. Ignoring \'environment\' in favor of '
@ -3941,7 +3945,7 @@ def apply_master_config(overrides=None, defaults=None):
# Insert all 'utils_dirs' directories to the system path
insert_system_path(opts, opts['utils_dirs'])
if (overrides or {}).get('ipc_write_buffer', '') == 'dynamic':
if overrides.get('ipc_write_buffer', '') == 'dynamic':
opts['ipc_write_buffer'] = _DFLT_IPC_WBUFFER
if 'ipc_write_buffer' not in overrides:
opts['ipc_write_buffer'] = 0

View File

@ -28,7 +28,7 @@ from salt.ext.six.moves import zip # pylint: disable=import-error,redefined-bui
from salt.ext import six
try:
from M2Crypto import RSA, EVP
from M2Crypto import RSA, EVP, BIO
HAS_M2 = True
except ImportError:
HAS_M2 = False
@ -206,7 +206,10 @@ def get_rsa_pub_key(path):
'''
log.debug('salt.crypt.get_rsa_pub_key: Loading public key')
if HAS_M2:
key = RSA.load_pub_key(path)
with salt.utils.files.fopen(path) as f:
data = f.read().replace(b'RSA ', '')
bio = BIO.MemoryBuffer(data)
key = RSA.load_pub_key_bio(bio)
else:
with salt.utils.files.fopen(path) as f:
key = RSA.importKey(f.read())

View File

@ -219,7 +219,7 @@ class SlackClient(object):
ret_groups[name]['aliases'].update(config.get('aliases', {}))
ret_groups[name]['default_target'].update(config.get('default_target', {}))
ret_groups[name]['targets'].update(config.get('targets', {}))
except IndexError:
except (IndexError, AttributeError):
log.warn("Couldn't use group %s. Check that targets is a dict and not a list", name)
log.debug('Got the groups: %s', ret_groups)
@ -236,16 +236,16 @@ class SlackClient(object):
XXX: instead of using Caller, make the minion to use configurable so there could be some
restrictions placed on what pillars can be used.
'''
if pillar_name:
caller = salt.client.Caller()
pillar_groups = caller.cmd('pillar.get', pillar_name)
# pillar_groups = __salt__['pillar.get'](pillar_name, {})
log.debug('Got pillar groups %s from pillar %s', pillar_groups, pillar_name)
log.debug('pillar groups is %s', pillar_groups)
log.debug('pillar groups type is %s', type(pillar_groups))
if pillar_groups:
return pillar_groups
else:
return {}
pillar_groups = {}
return pillar_groups
def fire(self, tag, msg):
'''
@ -351,7 +351,7 @@ class SlackClient(object):
# maybe there are aliases, so check on that
if cmdline[0] in permitted_group[1].get('aliases', {}).keys():
use_cmdline = self.commandline_to_list(permitted_group[1]['aliases'][cmdline[0]], '')
use_cmdline = self.commandline_to_list(permitted_group[1]['aliases'][cmdline[0]].get('cmd', ''), '')
else:
use_cmdline = cmdline
target = self.get_target(permitted_group, cmdline, use_cmdline)
@ -416,6 +416,11 @@ class SlackClient(object):
'so we look for user in '
'the original message.')
user_id = m_data['message']['user']
elif 'comment' in m_data and 'user' in m_data['comment']:
log.debug('Comment was added, '
'so we look for user in '
'the comment.')
user_id = m_data['comment']['user']
else:
user_id = m_data.get('user')
channel_id = m_data.get('channel')

View File

@ -2456,9 +2456,8 @@ def get_server_id():
if py_ver >= (3, 3):
# Python 3.3 enabled hash randomization, so we need to shell out to get
# a reliable hash.
py_bin = 'python{0}.{1}'.format(*py_ver)
id_hash = __salt__['cmd.run'](
[py_bin, '-c', 'print(hash("{0}"))'.format(id_)],
[sys.executable, '-c', 'print(hash("{0}"))'.format(id_)],
env={'PYTHONHASHSEED': '0'}
)
try:

View File

@ -90,6 +90,13 @@ def _p_to_cp(p):
except portage.exception.InvalidAtom:
pass
try:
ret = _porttree().dbapi.xmatch("match-all", p)
if ret:
return portage.cpv_getkey(ret[0])
except portage.exception.InvalidAtom:
pass
return None
@ -110,6 +117,13 @@ def _cpv_to_cp(cpv):
except portage.exception.InvalidAtom:
pass
try:
ret = portage.cpv_getkey(cpv)
if ret:
return ret
except portage.exception.InvalidAtom:
pass
return cpv

View File

@ -4815,7 +4815,7 @@ def check_file_meta(
if sfn:
try:
changes['diff'] = get_diff(
sfn, name, template=True, show_filenames=False)
name, sfn, template=True, show_filenames=False)
except CommandExecutionError as exc:
changes['diff'] = exc.strerror
else:

View File

@ -12,6 +12,8 @@ This is an alternative to the ``ldap`` interface provided by the
'''
from __future__ import absolute_import, print_function, unicode_literals
import logging
import sys
available_backends = set()
try:
@ -22,9 +24,9 @@ try:
available_backends.add('ldap')
except ImportError:
pass
import logging
import salt.utils.data
from salt.ext import six
import sys
log = logging.getLogger(__name__)
@ -407,7 +409,10 @@ def add(connect_spec, dn, attributes):
if 'unicodePwd' in attributes:
attributes['unicodePwd'] = [_format_unicode_password(x) for x in attributes['unicodePwd']]
modlist = ldap.modlist.addModlist(attributes)
modlist = salt.utils.data.decode(
ldap.modlist.addModlist(attributes),
to_str=True
)
try:
l.c.add_s(dn, modlist)
except ldap.LDAPError as e:
@ -507,6 +512,7 @@ def modify(connect_spec, dn, directives):
modlist[idx] = (mod[0], mod[1],
[_format_unicode_password(x) for x in mod[2]])
modlist = salt.utils.data.decode(modlist, to_str=True)
try:
l.c.modify_s(dn, modlist)
except ldap.LDAPError as e:
@ -573,7 +579,10 @@ def change(connect_spec, dn, before, after):
if 'unicodePwd' in after:
after['unicodePwd'] = [_format_unicode_password(x) for x in after['unicodePwd']]
modlist = ldap.modlist.modifyModlist(before, after)
modlist = salt.utils.data.decode(
ldap.modlist.modifyModlist(before, after),
to_str=True
)
try:
l.c.modify_s(dn, modlist)
except ldap.LDAPError as e:

View File

@ -46,6 +46,7 @@ import logging
import time
# Import Salt libs
import salt.utils.data
from salt.ext import six
from salt.exceptions import CommandExecutionError
@ -140,7 +141,7 @@ def search(filter, # pylint: disable=C0103
if attrs == '': # Allow command line 'return all' attr override
attrs = None
elif attrs is None:
attrs = _config('attrs')
attrs = salt.utils.data.decode(_config('attrs'), to_str=True)
_ldap = _connect(**kwargs)
start = time.time()
log.debug(

View File

@ -31,7 +31,7 @@ def __virtual__():
'''
if not salt.utils.platform.is_darwin():
return False, 'Must be run on macOS'
if not _LooseVersion(__grains__['osrelease']) >= salt.utils.stringutils.to_str('10.9'):
if _LooseVersion(__grains__['osrelease']) < salt.utils.stringutils.to_str('10.9'):
return False, 'Must be run on macOS 10.9 or newer'
return __virtualname__

View File

@ -130,7 +130,8 @@ def _get_pip_bin(bin_env):
'pip{0}'.format(sys.version_info[0]),
'pip', 'pip-python']
)
if salt.utils.platform.is_windows() and six.PY2:
if salt.utils.platform.is_windows() and six.PY2 \
and isinstance(which_result, str):
which_result.encode('string-escape')
if which_result is None:
raise CommandNotFoundError('Could not find a `pip` binary')

View File

@ -111,6 +111,13 @@ def _p_to_cp(p):
except portage.exception.InvalidAtom:
pass
try:
ret = _porttree().dbapi.xmatch("match-all", p)
if ret:
return portage.cpv_getkey(ret[0])
except portage.exception.InvalidAtom:
pass
return None

View File

@ -49,6 +49,7 @@ import time
# Import Salt libs
from salt.exceptions import CommandExecutionError, SaltInvocationError
from salt.serializers.configparser import deserialize
import salt.utils.dictupdate as dictupdate
import salt.utils.files
import salt.utils.path
@ -4652,37 +4653,34 @@ def _writeAdminTemplateRegPolFile(admtemplate_data,
def _getScriptSettingsFromIniFile(policy_info):
'''
helper function to parse/read a GPO Startup/Shutdown script file
psscript.ini and script.ini file definitions are here
https://msdn.microsoft.com/en-us/library/ff842529.aspx
https://msdn.microsoft.com/en-us/library/dd303238.aspx
'''
_existingData = _read_regpol_file(policy_info['ScriptIni']['IniPath'])
_existingData = None
if os.path.isfile(policy_info['ScriptIni']['IniPath']):
with salt.utils.files.fopen(policy_info['ScriptIni']['IniPath'], 'rb') as fhr:
_existingData = fhr.read()
if _existingData:
_existingData = _existingData.split('\r\n')
script_settings = {}
this_section = None
for eLine in _existingData:
if eLine.startswith('[') and eLine.endswith(']'):
this_section = eLine.replace('[', '').replace(']', '')
log.debug('adding section %s', this_section)
if this_section:
script_settings[this_section] = {}
else:
if '=' in eLine:
log.debug('working with config line %s', eLine)
eLine = eLine.split('=')
if this_section in script_settings:
script_settings[this_section][eLine[0]] = eLine[1]
try:
_existingData = deserialize(_existingData.decode('utf-16-le').lstrip('\ufeff'))
log.debug('Have deserialized data %s', _existingData)
except Exception as error:
log.error('An error occurred attempting to deserialize data for %s', policy_info['Policy'])
raise CommandExecutionError(error)
if 'Section' in policy_info['ScriptIni'] and policy_info['ScriptIni']['Section'].lower() in [z.lower() for z in _existingData.keys()]:
if 'SettingName' in policy_info['ScriptIni']:
log.debug('Setting Name is in policy_info')
if policy_info['ScriptIni']['SettingName'] in script_settings[policy_info['ScriptIni']['Section']]:
log.debug('the value is set in the file')
return script_settings[policy_info['ScriptIni']['Section']][policy_info['ScriptIni']['SettingName']]
log.debug('Need to look for %s', policy_info['ScriptIni']['SettingName'])
if policy_info['ScriptIni']['SettingName'].lower() in [z.lower() for z in _existingData[policy_info['ScriptIni']['Section']].keys()]:
return _existingData[policy_info['ScriptIni']['Section']][policy_info['ScriptIni']['SettingName'].lower()]
else:
return None
elif policy_info['ScriptIni']['Section'] in script_settings:
log.debug('no setting name')
return script_settings[policy_info['ScriptIni']['Section']]
else:
log.debug('broad else')
return _existingData[policy_info['ScriptIni']['Section']]
else:
return None
return None

View File

@ -163,7 +163,7 @@ def present(name,
datasource[key] = None
if data == datasource:
ret['changes'] = None
ret['changes'] = {}
ret['comment'] = 'Data source {0} already up-to-date'.format(name)
return ret

View File

@ -187,7 +187,7 @@ def present(name,
if ret['changes']:
ret['comment'] = 'Org {0} updated'.format(name)
else:
ret['changes'] = None
ret['changes'] = {}
ret['comment'] = 'Org {0} already up-to-date'.format(name)
return ret

View File

@ -124,7 +124,7 @@ def present(name,
if ret['changes']:
ret['comment'] = 'User {0} updated'.format(name)
else:
ret['changes'] = None
ret['changes'] = {}
ret['comment'] = 'User {0} already up-to-date'.format(name)
return ret

View File

@ -68,9 +68,12 @@ def compare_lists(old=None, new=None):
def decode(data, encoding=None, errors='strict', keep=False,
normalize=False, preserve_dict_class=False, preserve_tuples=False):
normalize=False, preserve_dict_class=False, preserve_tuples=False,
to_str=False):
'''
Generic function which will decode whichever type is passed, if necessary
Generic function which will decode whichever type is passed, if necessary.
Optionally use to_str=True to ensure strings are str types and not unicode
on Python 2.
If `strict` is True, and `keep` is False, and we fail to decode, a
UnicodeDecodeError will be raised. Passing `keep` as True allows for the
@ -94,22 +97,24 @@ def decode(data, encoding=None, errors='strict', keep=False,
for the base character, and one for the breve mark). Normalizing allows for
a more reliable test case.
'''
_decode_func = salt.utils.stringutils.to_unicode \
if not to_str \
else salt.utils.stringutils.to_str
if isinstance(data, collections.Mapping):
return decode_dict(data, encoding, errors, keep, normalize,
preserve_dict_class, preserve_tuples)
preserve_dict_class, preserve_tuples, to_str)
elif isinstance(data, list):
return decode_list(data, encoding, errors, keep, normalize,
preserve_dict_class, preserve_tuples)
preserve_dict_class, preserve_tuples, to_str)
elif isinstance(data, tuple):
return decode_tuple(data, encoding, errors, keep, normalize,
preserve_dict_class) \
preserve_dict_class, to_str) \
if preserve_tuples \
else decode_list(data, encoding, errors, keep, normalize,
preserve_dict_class, preserve_tuples)
preserve_dict_class, preserve_tuples, to_str)
else:
try:
data = salt.utils.stringutils.to_unicode(
data, encoding, errors, normalize)
data = _decode_func(data, encoding, errors, normalize)
except TypeError:
# to_unicode raises a TypeError when input is not a
# string/bytestring/bytearray. This is expected and simply means we
@ -123,23 +128,26 @@ def decode(data, encoding=None, errors='strict', keep=False,
def decode_dict(data, encoding=None, errors='strict', keep=False,
normalize=False, preserve_dict_class=False,
preserve_tuples=False):
preserve_tuples=False, to_str=False):
'''
Decode all string values to Unicode
Decode all string values to Unicode. Optionally use to_str=True to ensure
strings are str types and not unicode on Python 2.
'''
_decode_func = salt.utils.stringutils.to_unicode \
if not to_str \
else salt.utils.stringutils.to_str
# Make sure we preserve OrderedDicts
rv = data.__class__() if preserve_dict_class else {}
for key, value in six.iteritems(data):
if isinstance(key, tuple):
key = decode_tuple(key, encoding, errors, keep, normalize,
preserve_dict_class) \
preserve_dict_class, to_str) \
if preserve_tuples \
else decode_list(key, encoding, errors, keep, normalize,
preserve_dict_class, preserve_tuples)
preserve_dict_class, preserve_tuples, to_str)
else:
try:
key = salt.utils.stringutils.to_unicode(
key, encoding, errors, normalize)
key = _decode_func(key, encoding, errors, normalize)
except TypeError:
# to_unicode raises a TypeError when input is not a
# string/bytestring/bytearray. This is expected and simply
@ -151,20 +159,19 @@ def decode_dict(data, encoding=None, errors='strict', keep=False,
if isinstance(value, list):
value = decode_list(value, encoding, errors, keep, normalize,
preserve_dict_class, preserve_tuples)
preserve_dict_class, preserve_tuples, to_str)
elif isinstance(value, tuple):
value = decode_tuple(value, encoding, errors, keep, normalize,
preserve_dict_class) \
preserve_dict_class, to_str) \
if preserve_tuples \
else decode_list(value, encoding, errors, keep, normalize,
preserve_dict_class, preserve_tuples)
preserve_dict_class, preserve_tuples, to_str)
elif isinstance(value, collections.Mapping):
value = decode_dict(value, encoding, errors, keep, normalize,
preserve_dict_class, preserve_tuples)
preserve_dict_class, preserve_tuples, to_str)
else:
try:
value = salt.utils.stringutils.to_unicode(
value, encoding, errors, normalize)
value = _decode_func(value, encoding, errors, normalize)
except TypeError:
# to_unicode raises a TypeError when input is not a
# string/bytestring/bytearray. This is expected and simply
@ -180,28 +187,31 @@ def decode_dict(data, encoding=None, errors='strict', keep=False,
def decode_list(data, encoding=None, errors='strict', keep=False,
normalize=False, preserve_dict_class=False,
preserve_tuples=False):
preserve_tuples=False, to_str=False):
'''
Decode all string values to Unicode
Decode all string values to Unicode. Optionally use to_str=True to ensure
strings are str types and not unicode on Python 2.
'''
_decode_func = salt.utils.stringutils.to_unicode \
if not to_str \
else salt.utils.stringutils.to_str
rv = []
for item in data:
if isinstance(item, list):
item = decode_list(item, encoding, errors, keep, normalize,
preserve_dict_class, preserve_tuples)
preserve_dict_class, preserve_tuples, to_str)
elif isinstance(item, tuple):
item = decode_tuple(item, encoding, errors, keep, normalize,
preserve_dict_class) \
preserve_dict_class, to_str) \
if preserve_tuples \
else decode_list(item, encoding, errors, keep, normalize,
preserve_dict_class, preserve_tuples)
preserve_dict_class, preserve_tuples, to_str)
elif isinstance(item, collections.Mapping):
item = decode_dict(item, encoding, errors, keep, normalize,
preserve_dict_class, preserve_tuples)
preserve_dict_class, preserve_tuples, to_str)
else:
try:
item = salt.utils.stringutils.to_unicode(
item, encoding, errors, normalize)
item = _decode_func(item, encoding, errors, normalize)
except TypeError:
# to_unicode raises a TypeError when input is not a
# string/bytestring/bytearray. This is expected and simply
@ -216,13 +226,14 @@ def decode_list(data, encoding=None, errors='strict', keep=False,
def decode_tuple(data, encoding=None, errors='strict', keep=False,
normalize=False, preserve_dict_class=False):
normalize=False, preserve_dict_class=False, to_str=False):
'''
Decode all string values to Unicode
Decode all string values to Unicode. Optionally use to_str=True to ensure
strings are str types and not unicode on Python 2.
'''
return tuple(
decode_list(data, encoding, errors, keep, normalize,
preserve_dict_class, True)
preserve_dict_class, True, to_str)
)

View File

@ -51,39 +51,45 @@ def to_bytes(s, encoding=None, errors='strict'):
return to_str(s, encoding, errors)
def to_str(s, encoding=None, errors='strict'):
def to_str(s, encoding=None, errors='strict', normalize=False):
'''
Given str, bytes, bytearray, or unicode (py2), return str
'''
def _normalize(s):
try:
return unicodedata.normalize('NFC', s) if normalize else s
except TypeError:
return s
# This shouldn't be six.string_types because if we're on PY2 and we already
# have a string, we should just return it.
if isinstance(s, str):
return s
return _normalize(s)
if six.PY3:
if isinstance(s, (bytes, bytearray)):
if encoding:
return s.decode(encoding, errors)
return _normalize(s.decode(encoding, errors))
else:
try:
# Try UTF-8 first
return s.decode('utf-8', errors)
return _normalize(s.decode('utf-8', errors))
except UnicodeDecodeError:
# Fall back to detected encoding
return s.decode(__salt_system_encoding__, errors)
return _normalize(s.decode(__salt_system_encoding__, errors))
raise TypeError('expected str, bytes, or bytearray not {}'.format(type(s)))
else:
if isinstance(s, bytearray):
return str(s) # future lint: disable=blacklisted-function
if isinstance(s, unicode): # pylint: disable=incompatible-py3-code,undefined-variable
if encoding:
return s.encode(encoding, errors)
return _normalize(s).encode(encoding, errors)
else:
try:
# Try UTF-8 first
return s.encode('utf-8', errors)
return _normalize(s).encode('utf-8', errors)
except UnicodeEncodeError:
# Fall back to detected encoding
return s.encode(__salt_system_encoding__, errors)
return _normalize(s).encode(__salt_system_encoding__, errors)
raise TypeError('expected str, bytearray, or unicode')

View File

@ -205,7 +205,8 @@ def _get_jinja_error_slug(tb_data):
return [
x
for x in tb_data if x[2] in ('top-level template code',
'template')
'template',
'<module>')
][-1]
except IndexError:
pass

View File

@ -546,7 +546,7 @@ def thin_sum(cachedir, form='sha1'):
thintar = gen_thin(cachedir)
code_checksum_path = os.path.join(cachedir, 'thin', 'code-checksum')
if os.path.isfile(code_checksum_path):
with salt.utils.fopen(code_checksum_path, 'r') as fh:
with salt.utils.files.fopen(code_checksum_path, 'r') as fh:
code_checksum = "'{0}'".format(fh.read().strip())
else:
code_checksum = "'0'"

View File

@ -19,7 +19,7 @@ import textwrap
from tests.support.mixins import AdaptedConfigurationTestCaseMixin
from tests.support.paths import TMP
from tests.support.unit import skipIf, TestCase
from tests.support.mock import NO_MOCK, NO_MOCK_REASON, MagicMock, patch
from tests.support.mock import NO_MOCK, NO_MOCK_REASON, Mock, MagicMock, patch
# Import Salt libs
import salt.minion
@ -1318,3 +1318,92 @@ class ConfigTestCase(TestCase, AdaptedConfigurationTestCaseMixin):
config_path,
verbose=False,
exit_on_config_errors=True)
@staticmethod
def _get_defaults(**kwargs):
ret = {
'saltenv': kwargs.pop('saltenv', None),
'id': 'test',
'cachedir': '/A',
'sock_dir': '/B',
'root_dir': '/C',
'fileserver_backend': 'roots',
'open_mode': False,
'auto_accept': False,
'file_roots': {},
'pillar_roots': {},
'file_ignore_glob': [],
'file_ignore_regex': [],
'worker_threads': 5,
'hash_type': 'sha256',
'log_file': 'foo.log',
}
ret.update(kwargs)
return ret
@skipIf(NO_MOCK, NO_MOCK_REASON)
def test_apply_config(self):
'''
Ensure that the environment and saltenv options work properly
'''
with patch.object(sconfig, '_adjust_log_file_override', Mock()), \
patch.object(sconfig, '_update_ssl_config', Mock()), \
patch.object(sconfig, '_update_discovery_config', Mock()):
# MASTER CONFIG
# Ensure that environment overrides saltenv when saltenv not
# explicitly passed.
defaults = self._get_defaults(environment='foo')
ret = sconfig.apply_master_config(defaults=defaults)
self.assertEqual(ret['environment'], 'foo')
self.assertEqual(ret['saltenv'], 'foo')
# Ensure that environment overrides saltenv when saltenv not
# explicitly passed.
defaults = self._get_defaults(environment='foo', saltenv='bar')
ret = sconfig.apply_master_config(defaults=defaults)
self.assertEqual(ret['environment'], 'bar')
self.assertEqual(ret['saltenv'], 'bar')
# If environment was not explicitly set, it should not be in the
# opts at all.
defaults = self._get_defaults()
ret = sconfig.apply_master_config(defaults=defaults)
self.assertNotIn('environment', ret)
self.assertEqual(ret['saltenv'], None)
# Same test as above but with saltenv explicitly set
defaults = self._get_defaults(saltenv='foo')
ret = sconfig.apply_master_config(defaults=defaults)
self.assertNotIn('environment', ret)
self.assertEqual(ret['saltenv'], 'foo')
# MINION CONFIG
# Ensure that environment overrides saltenv when saltenv not
# explicitly passed.
defaults = self._get_defaults(environment='foo')
ret = sconfig.apply_minion_config(defaults=defaults)
self.assertEqual(ret['environment'], 'foo')
self.assertEqual(ret['saltenv'], 'foo')
# Ensure that environment overrides saltenv when saltenv not
# explicitly passed.
defaults = self._get_defaults(environment='foo', saltenv='bar')
ret = sconfig.apply_minion_config(defaults=defaults)
self.assertEqual(ret['environment'], 'bar')
self.assertEqual(ret['saltenv'], 'bar')
# If environment was not explicitly set, it should not be in the
# opts at all.
defaults = self._get_defaults()
ret = sconfig.apply_minion_config(defaults=defaults)
self.assertNotIn('environment', ret)
self.assertEqual(ret['saltenv'], None)
# Same test as above but with saltenv explicitly set
defaults = self._get_defaults(saltenv='foo')
ret = sconfig.apply_minion_config(defaults=defaults)
self.assertNotIn('environment', ret)
self.assertEqual(ret['saltenv'], 'foo')

View File

@ -3,6 +3,8 @@
# python libs
from __future__ import absolute_import
import os
import tempfile
import shutil
# salt testing libs
from tests.support.unit import TestCase, skipIf
@ -196,8 +198,7 @@ class M2CryptTestCase(TestCase):
self.assertEqual(SIG, crypt.sign_message('/keydir/keyname.pem', MSG, passphrase='password'))
def test_verify_signature(self):
key = M2Crypto.RSA.load_pub_key_bio(M2Crypto.BIO.MemoryBuffer(six.b(PUBKEY_DATA)))
with patch('M2Crypto.RSA.load_pub_key', return_value=key):
with patch('salt.utils.files.fopen', mock_open(read_data=PUBKEY_DATA)):
self.assertTrue(crypt.verify_signature('/keydir/keyname.pub', MSG, SIG))
def test_encrypt_decrypt_bin(self):
@ -206,3 +207,46 @@ class M2CryptTestCase(TestCase):
encrypted = salt.crypt.private_encrypt(priv_key, b'salt')
decrypted = salt.crypt.public_decrypt(pub_key, encrypted)
self.assertEqual(b'salt', decrypted)
class TestBadCryptodomePubKey(TestCase):
'''
Test that we can load public keys exported by pycrpytodome<=3.4.6
'''
TEST_KEY = (
'-----BEGIN RSA PUBLIC KEY-----\n'
'MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzLtFhsvfbFDFaUgulSEX\n'
'Gl12XriL1DT78Ef2/u8HHaSMmPie37BLWas/zaHwI6066bIyYQJ/nUCahTaoHM7L\n'
'GlWc0wOU6zyfpihCRQHil05Y6F+olFBoZuYbFPvtp7/hJx/D7I/0n2o/c7M5i3Y2\n'
'3sBxAYNooIQHXHUmPQW6C9iu95ylZDW8JQzYy/EI4vCC8yQMdTK8jK1FQV0Sbwny\n'
'qcMxSyAWDoFbnhh2P2TnO8HOWuUOaXR8ZHOJzVcDl+a6ew+medW090x3K5O1f80D\n'
'+WjgnG6b2HG7VQpOCfM2GALD/FrxicPilvZ38X1aLhJuwjmVE4LAAv8DVNJXohaO\n'
'WQIDAQAB\n'
'-----END RSA PUBLIC KEY-----\n'
)
def setUp(self):
self.test_dir = tempfile.mkdtemp()
self.key_path = os.path.join(self.test_dir, 'cryptodom-3.4.6.pub')
with salt.utils.files.fopen(self.key_path, 'wb') as fd:
fd.write(self.TEST_KEY.encode())
def tearDown(self):
shutil.rmtree(self.test_dir)
@skipIf(not HAS_M2, "Skip when m2crypto is not installed")
def test_m2_bad_key(self):
'''
Load public key with an invalid header using m2crypto and validate it
'''
key = salt.crypt.get_rsa_pub_key(self.key_path)
assert key.check_key() == 1
@skipIf(HAS_M2, "Skip when m2crypto is installed")
def test_crypto_bad_key(self):
'''
Load public key with an invalid header and validate it without m2crypto
'''
key = salt.crypt.get_rsa_pub_key(self.key_path)
assert key.can_encrypt()

View File

@ -9,14 +9,16 @@ import logging
# Import Salt libs
import salt.utils.data
import salt.utils.data
import salt.utils.stringutils
from salt.utils.odict import OrderedDict
from tests.support.unit import TestCase, skipIf, LOREM_IPSUM
from tests.support.mock import patch, NO_MOCK, NO_MOCK_REASON
from salt.ext.six.moves import builtins # pylint: disable=import-error,redefined-builtin
from salt.ext import six
log = logging.getLogger(__name__)
_b = lambda x: x.encode('utf-8')
_s = lambda x: salt.utils.stringutils.to_str(x, normalize=True)
# Some randomized data that will not decode
BYTES = b'\x9c\xb1\xf7\xa3'
# This is an example of a unicode string with й constructed using two separate
@ -213,6 +215,9 @@ class DataTestCase(TestCase):
def test_decode(self):
'''
Companion to test_decode_to_str, they should both be kept up-to-date
with one another.
NOTE: This uses the lambda "_b" defined above in the global scope,
which encodes a string to a bytestring, assuming utf-8.
'''
@ -291,6 +296,97 @@ class DataTestCase(TestCase):
BYTES,
keep=False)
def test_decode_to_str(self):
'''
Companion to test_decode, they should both be kept up-to-date with one
another.
NOTE: This uses the lambda "_s" defined above in the global scope,
which converts the string/bytestring to a str type.
'''
expected = [
_s('unicode_str'),
_s('питон'),
123,
456.789,
True,
False,
None,
_s('яйца'),
BYTES,
[123, 456.789, _s('спам'), True, False, None, _s('яйца'), BYTES],
(987, 654.321, _s('яйца'), _s('яйца'), None, (True, _s('яйца'), BYTES)),
{_s('str_key'): _s('str_val'),
None: True,
123: 456.789,
_s('яйца'): BYTES,
_s('subdict'): {
_s('unicode_key'): _s('яйца'),
_s('tuple'): (123, _s('hello'), _s('world'), True, _s('яйца'), BYTES),
_s('list'): [456, _s('спам'), False, _s('яйца'), BYTES]}},
OrderedDict([(_s('foo'), _s('bar')), (123, 456), (_s('яйца'), BYTES)])
]
ret = salt.utils.data.decode(
self.test_data,
keep=True,
normalize=True,
preserve_dict_class=True,
preserve_tuples=True,
to_str=True)
self.assertEqual(ret, expected)
if six.PY3:
# The binary data in the data structure should fail to decode, even
# using the fallback, and raise an exception.
self.assertRaises(
UnicodeDecodeError,
salt.utils.data.decode,
self.test_data,
keep=False,
normalize=True,
preserve_dict_class=True,
preserve_tuples=True,
to_str=True)
# Now munge the expected data so that we get what we would expect if we
# disable preservation of dict class and tuples
expected[10] = [987, 654.321, _s('яйца'), _s('яйца'), None, [True, _s('яйца'), BYTES]]
expected[11][_s('subdict')][_s('tuple')] = [123, _s('hello'), _s('world'), True, _s('яйца'), BYTES]
expected[12] = {_s('foo'): _s('bar'), 123: 456, _s('яйца'): BYTES}
ret = salt.utils.data.decode(
self.test_data,
keep=True,
normalize=True,
preserve_dict_class=False,
preserve_tuples=False,
to_str=True)
self.assertEqual(ret, expected)
# Now test single non-string, non-data-structure items, these should
# return the same value when passed to this function
for item in (123, 4.56, True, False, None):
log.debug('Testing decode of %s', item)
self.assertEqual(salt.utils.data.decode(item, to_str=True), item)
# Test single strings (not in a data structure)
self.assertEqual(salt.utils.data.decode('foo', to_str=True), _s('foo'))
self.assertEqual(salt.utils.data.decode(_b('bar'), to_str=True), _s('bar'))
# Test binary blob
self.assertEqual(
salt.utils.data.decode(BYTES, keep=True, to_str=True),
BYTES
)
if six.PY3:
self.assertRaises(
UnicodeDecodeError,
salt.utils.data.decode,
BYTES,
keep=False,
to_str=True)
@skipIf(NO_MOCK, NO_MOCK_REASON)
def test_decode_fallback(self):
'''