mirror of
https://github.com/valitydev/salt.git
synced 2024-11-08 01:18:58 +00:00
Merge pull request #38297 from rallytime/merge-develop
[develop] Merge forward from 2016.11 to develop
This commit is contained in:
commit
5ece8fb244
6
doc/topics/releases/2016.11.2.rst
Normal file
6
doc/topics/releases/2016.11.2.rst
Normal file
@ -0,0 +1,6 @@
|
||||
============================
|
||||
Salt 2016.11.2 Release Notes
|
||||
============================
|
||||
|
||||
Version 2016.11.2 is a bugfix release for :doc:`2016.11.0
|
||||
</topics/releases/2016.11.0>`.
|
@ -244,7 +244,7 @@ class KeyCLI(object):
|
||||
if not ret:
|
||||
self._print_no_match(cmd, self.opts['match'])
|
||||
return
|
||||
print('The following keys are going to be {0}ed:'.format(cmd))
|
||||
print('The following keys are going to be {0}ed:'.format(cmd.rstrip('e')))
|
||||
salt.output.display_output(ret, 'key', opts=self.opts)
|
||||
|
||||
if not self.opts.get('yes', False):
|
||||
|
@ -40,6 +40,7 @@ log = logging.getLogger(__name__)
|
||||
def list_(name,
|
||||
archive_format=None,
|
||||
options=None,
|
||||
strip_components=None,
|
||||
clean=False,
|
||||
verbose=False,
|
||||
saltenv='base'):
|
||||
@ -92,6 +93,14 @@ def list_(name,
|
||||
It is not necessary to manually specify options for gzip'ed
|
||||
archives, as gzip compression is natively supported by tarfile_.
|
||||
|
||||
strip_components
|
||||
This argument specifies a number of top-level directories to strip from
|
||||
the results. This is similar to the paths that would be extracted if
|
||||
``--strip-components`` (or ``--strip``) were used when extracting tar
|
||||
archives.
|
||||
|
||||
.. versionadded:: 2016.11.2
|
||||
|
||||
clean : False
|
||||
Set this value to ``True`` to delete the path referred to by ``name``
|
||||
once the contents have been listed. This option should be used with
|
||||
@ -120,6 +129,7 @@ def list_(name,
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' archive.list /path/to/myfile.tar.gz
|
||||
salt '*' archive.list /path/to/myfile.tar.gz strip_components=1
|
||||
salt '*' archive.list salt://foo.tar.gz
|
||||
salt '*' archive.list https://domain.tld/myfile.zip
|
||||
salt '*' archive.list ftp://10.1.2.3/foo.rar
|
||||
@ -213,6 +223,17 @@ def list_(name,
|
||||
raise CommandExecutionError('Failed to cache {0}'.format(name))
|
||||
|
||||
try:
|
||||
if strip_components:
|
||||
try:
|
||||
int(strip_components)
|
||||
except ValueError:
|
||||
strip_components = -1
|
||||
|
||||
if strip_components <= 0:
|
||||
raise CommandExecutionError(
|
||||
'\'strip_components\' must be a positive integer'
|
||||
)
|
||||
|
||||
parsed = _urlparse(name)
|
||||
path = parsed.path or parsed.netloc
|
||||
|
||||
@ -266,6 +287,24 @@ def list_(name,
|
||||
'Failed to clean cached archive %s: %s',
|
||||
cached, exc.__str__()
|
||||
)
|
||||
|
||||
if strip_components:
|
||||
stripped_ret = []
|
||||
for item in ret:
|
||||
try:
|
||||
# Strip off the specified number of directory boundaries,
|
||||
# and grab what comes after the last stripped path
|
||||
# separator.
|
||||
stripped_item = item.split(
|
||||
os.sep, strip_components)[strip_components]
|
||||
if stripped_item:
|
||||
stripped_ret.append(stripped_item)
|
||||
except IndexError:
|
||||
# Path is excluded by strip_components because it is not
|
||||
# deep enough.
|
||||
pass
|
||||
ret = stripped_ret
|
||||
|
||||
if verbose:
|
||||
verbose_ret = {'dirs': [],
|
||||
'files': [],
|
||||
|
@ -134,7 +134,7 @@ def setenforce(mode):
|
||||
conf = _cf.read()
|
||||
try:
|
||||
with salt.utils.fopen(config, 'w') as _cf:
|
||||
conf = re.sub(r"\nSELINUX.*\n", "\nSELINUX=" + modestring + "\n", conf)
|
||||
conf = re.sub(r"\nSELINUX=.*\n", "\nSELINUX=" + modestring + "\n", conf)
|
||||
_cf.write(conf)
|
||||
except (IOError, OSError) as exc:
|
||||
msg = 'Could not write SELinux config file: {0}'
|
||||
|
@ -463,6 +463,7 @@ import functools
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
import signal
|
||||
import tarfile
|
||||
import time
|
||||
from multiprocessing import Process, Pipe
|
||||
@ -2264,6 +2265,12 @@ class WebsocketEndpoint(object):
|
||||
listen=True)
|
||||
stream = event.iter_events(full=True, auto_reconnect=True)
|
||||
SaltInfo = event_processor.SaltInfo(handler)
|
||||
|
||||
def signal_handler(signal, frame):
|
||||
os._exit(0)
|
||||
|
||||
signal.signal(signal.SIGTERM, signal_handler)
|
||||
|
||||
while True:
|
||||
data = next(stream)
|
||||
if data:
|
||||
|
@ -332,7 +332,7 @@ class Pillar(object):
|
||||
if not opts.get('environment'):
|
||||
opts['environment'] = saltenv
|
||||
opts['id'] = self.minion_id
|
||||
if 'pillarenv' not in opts:
|
||||
if not opts.get('pillarenv'):
|
||||
opts['pillarenv'] = pillarenv
|
||||
if opts['state_top'].startswith('salt://'):
|
||||
opts['state_top'] = opts['state_top']
|
||||
|
@ -2674,7 +2674,7 @@ class BaseHighState(object):
|
||||
tops[saltenv].append({})
|
||||
log.debug('No contents loaded for env: {0}'.format(saltenv))
|
||||
|
||||
if found > 1 and merging_strategy.startswith('merge'):
|
||||
if found > 1 and merging_strategy == 'merge':
|
||||
log.warning(
|
||||
'top_file_merging_strategy is set to \'%s\' and '
|
||||
'multiple top files were found. Merging order is not '
|
||||
|
@ -527,7 +527,7 @@ def extracted(name,
|
||||
- name: /opt/
|
||||
- source: https://github.com/downloads/Graylog2/graylog2-server/graylog2-server-0.9.6p1.tar.gz
|
||||
- source_hash: md5=499ae16dcae71eeb7c3a30c75ea7a1a6
|
||||
- tar_options: v
|
||||
- options: v
|
||||
- user: foo
|
||||
- group: foo
|
||||
|
||||
@ -684,6 +684,21 @@ def extracted(name,
|
||||
ret.setdefault('warnings', []).append(msg)
|
||||
options = zip_options
|
||||
|
||||
if options is not None and not isinstance(options, six.string_types):
|
||||
options = str(options)
|
||||
|
||||
strip_components = None
|
||||
if options and archive_format == 'tar':
|
||||
try:
|
||||
strip_components = int(
|
||||
re.search(
|
||||
r'''--strip(?:-components)?(?:\s+|=)["']?(\d+)["']?''',
|
||||
options
|
||||
).group(1)
|
||||
)
|
||||
except (AttributeError, ValueError):
|
||||
pass
|
||||
|
||||
if archive_format == 'zip':
|
||||
if options:
|
||||
if use_cmd_unzip is None:
|
||||
@ -859,6 +874,7 @@ def extracted(name,
|
||||
contents = __salt__['archive.list'](cached_source,
|
||||
archive_format=archive_format,
|
||||
options=list_options,
|
||||
strip_components=strip_components,
|
||||
clean=False,
|
||||
verbose=True)
|
||||
except CommandExecutionError as exc:
|
||||
@ -1160,10 +1176,7 @@ def extracted(name,
|
||||
)
|
||||
return ret
|
||||
|
||||
try:
|
||||
tar_opts = shlex.split(options)
|
||||
except AttributeError:
|
||||
tar_opts = shlex.split(str(options))
|
||||
tar_opts = shlex.split(options)
|
||||
|
||||
tar_cmd = ['tar']
|
||||
tar_shortopts = 'x'
|
||||
|
@ -104,12 +104,16 @@ def mode(name):
|
||||
ret['comment'] = 'SELinux mode is set to be changed to {0}'.format(
|
||||
tmode)
|
||||
ret['result'] = None
|
||||
ret['changes'] = {'old': mode,
|
||||
'new': tmode}
|
||||
return ret
|
||||
|
||||
mode = __salt__['selinux.setenforce'](tmode)
|
||||
oldmode, mode = mode, __salt__['selinux.setenforce'](tmode)
|
||||
if mode == tmode:
|
||||
ret['result'] = True
|
||||
ret['comment'] = 'SELinux has been set to {0} mode'.format(tmode)
|
||||
ret['changes'] = {'old': oldmode,
|
||||
'new': mode}
|
||||
return ret
|
||||
ret['comment'] = 'Failed to set SELinux to {0} mode'.format(tmode)
|
||||
return ret
|
||||
|
@ -2973,7 +2973,9 @@ def to_str(s, encoding=None):
|
||||
return s
|
||||
if six.PY3:
|
||||
if isinstance(s, (bytes, bytearray)):
|
||||
return s.decode(encoding or __salt_system_encoding__)
|
||||
# https://docs.python.org/3/howto/unicode.html#the-unicode-type
|
||||
# replace error with U+FFFD, REPLACEMENT CHARACTER
|
||||
return s.decode(encoding or __salt_system_encoding__, "replace")
|
||||
raise TypeError('expected str, bytes, or bytearray')
|
||||
else:
|
||||
if isinstance(s, bytearray):
|
||||
|
@ -4,10 +4,11 @@ Tests for the archive state
|
||||
'''
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
import errno
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
import socket
|
||||
import shutil
|
||||
import threading
|
||||
import tornado.ioloop
|
||||
import tornado.web
|
||||
@ -21,15 +22,18 @@ ensure_in_syspath('../../')
|
||||
import integration
|
||||
import salt.utils
|
||||
|
||||
# Setup logging
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
STATE_DIR = os.path.join(integration.FILES, 'file', 'base')
|
||||
if salt.utils.is_windows():
|
||||
ARCHIVE_DIR = os.path.join("c:/", "tmp")
|
||||
else:
|
||||
ARCHIVE_DIR = '/tmp/archive/'
|
||||
ARCHIVE_DIR = '/tmp/archive'
|
||||
|
||||
PORT = 9999
|
||||
ARCHIVE_TAR_SOURCE = 'http://localhost:{0}/custom.tar.gz'.format(PORT)
|
||||
UNTAR_FILE = ARCHIVE_DIR + 'custom/README'
|
||||
UNTAR_FILE = os.path.join(ARCHIVE_DIR, 'custom/README')
|
||||
ARCHIVE_TAR_HASH = 'md5=7643861ac07c30fe7d2310e9f25ca514'
|
||||
STATE_DIR = os.path.join(integration.FILES, 'file', 'base')
|
||||
if '7' in platform.dist()[1]:
|
||||
@ -77,18 +81,26 @@ class ArchiveTest(integration.ModuleCase,
|
||||
tornado.ioloop.IOLoop.instance().stop()
|
||||
cls.server_thread.join()
|
||||
|
||||
def _check_ext_remove(self, dir, file):
|
||||
def setUp(self):
|
||||
self._clear_archive_dir()
|
||||
|
||||
def tearDown(self):
|
||||
self._clear_archive_dir()
|
||||
|
||||
@staticmethod
|
||||
def _clear_archive_dir():
|
||||
try:
|
||||
salt.utils.rm_rf(ARCHIVE_DIR)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
def _check_extracted(self, path):
|
||||
'''
|
||||
function to check if file was extracted
|
||||
and remove the directory.
|
||||
'''
|
||||
# check to see if it extracted
|
||||
check_dir = os.path.isfile(file)
|
||||
self.assertTrue(check_dir)
|
||||
|
||||
# wipe away dir. Can't do this in teardown
|
||||
# because it needs to be wiped before each test
|
||||
shutil.rmtree(dir)
|
||||
log.debug('Checking for extracted file: %s', path)
|
||||
self.assertTrue(os.path.isfile(path))
|
||||
|
||||
def test_archive_extracted_skip_verify(self):
|
||||
'''
|
||||
@ -97,11 +109,12 @@ class ArchiveTest(integration.ModuleCase,
|
||||
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
|
||||
source=ARCHIVE_TAR_SOURCE, archive_format='tar',
|
||||
skip_verify=True)
|
||||
log.debug('ret = %s', ret)
|
||||
if 'Timeout' in ret:
|
||||
self.skipTest('Timeout talking to local tornado server.')
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
self._check_ext_remove(ARCHIVE_DIR, UNTAR_FILE)
|
||||
self._check_extracted(UNTAR_FILE)
|
||||
|
||||
def test_archive_extracted_with_source_hash(self):
|
||||
'''
|
||||
@ -112,30 +125,80 @@ class ArchiveTest(integration.ModuleCase,
|
||||
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
|
||||
source=ARCHIVE_TAR_SOURCE, archive_format='tar',
|
||||
source_hash=ARCHIVE_TAR_HASH)
|
||||
log.debug('ret = %s', ret)
|
||||
if 'Timeout' in ret:
|
||||
self.skipTest('Timeout talking to local tornado server.')
|
||||
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
self._check_ext_remove(ARCHIVE_DIR, UNTAR_FILE)
|
||||
self._check_extracted(UNTAR_FILE)
|
||||
|
||||
@skipIf(os.geteuid() != 0, 'you must be root to run this test')
|
||||
def test_archive_extracted_with_root_user_and_group(self):
|
||||
'''
|
||||
test archive.extracted without skip_verify
|
||||
only external resources work to check to
|
||||
ensure source_hash is verified correctly
|
||||
test archive.extracted with user and group set to "root"
|
||||
'''
|
||||
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
|
||||
source=ARCHIVE_TAR_SOURCE, archive_format='tar',
|
||||
source_hash=ARCHIVE_TAR_HASH,
|
||||
user='root', group='root')
|
||||
log.debug('ret = %s', ret)
|
||||
if 'Timeout' in ret:
|
||||
self.skipTest('Timeout talking to local tornado server.')
|
||||
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
self._check_ext_remove(ARCHIVE_DIR, UNTAR_FILE)
|
||||
self._check_extracted(UNTAR_FILE)
|
||||
|
||||
@skipIf(os.geteuid() != 0, 'you must be root to run this test')
|
||||
def test_archive_extracted_with_strip_in_options(self):
|
||||
'''
|
||||
test archive.extracted with --strip in options
|
||||
'''
|
||||
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
|
||||
source=ARCHIVE_TAR_SOURCE,
|
||||
source_hash=ARCHIVE_TAR_HASH,
|
||||
options='--strip=1',
|
||||
enforce_toplevel=False)
|
||||
log.debug('ret = %s', ret)
|
||||
if 'Timeout' in ret:
|
||||
self.skipTest('Timeout talking to local tornado server.')
|
||||
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
self._check_extracted(os.path.join(ARCHIVE_DIR, 'README'))
|
||||
|
||||
@skipIf(os.geteuid() != 0, 'you must be root to run this test')
|
||||
def test_archive_extracted_with_strip_components_in_options(self):
|
||||
'''
|
||||
test archive.extracted with --strip-components in options
|
||||
'''
|
||||
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
|
||||
source=ARCHIVE_TAR_SOURCE,
|
||||
source_hash=ARCHIVE_TAR_HASH,
|
||||
options='--strip-components=1',
|
||||
enforce_toplevel=False)
|
||||
log.debug('ret = %s', ret)
|
||||
if 'Timeout' in ret:
|
||||
self.skipTest('Timeout talking to local tornado server.')
|
||||
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
self._check_extracted(os.path.join(ARCHIVE_DIR, 'README'))
|
||||
|
||||
def test_archive_extracted_without_archive_format(self):
|
||||
'''
|
||||
test archive.extracted with no archive_format option
|
||||
'''
|
||||
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
|
||||
source=ARCHIVE_TAR_SOURCE,
|
||||
source_hash=ARCHIVE_TAR_HASH)
|
||||
log.debug('ret = %s', ret)
|
||||
if 'Timeout' in ret:
|
||||
self.skipTest('Timeout talking to local tornado server.')
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
self._check_extracted(UNTAR_FILE)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -37,13 +37,10 @@ class SelinuxTestCase(TestCase):
|
||||
Test to verifies the mode SELinux is running in,
|
||||
can be set to enforcing or permissive.
|
||||
'''
|
||||
ret = {'name': '',
|
||||
ret = {'name': 'unknown',
|
||||
'changes': {},
|
||||
'result': False,
|
||||
'comment': ''}
|
||||
|
||||
comt = ('unknown is not an accepted mode')
|
||||
ret.update({'name': 'unknown', 'comment': comt})
|
||||
'comment': 'unknown is not an accepted mode'}
|
||||
self.assertDictEqual(selinux.mode('unknown'), ret)
|
||||
|
||||
mock_en = MagicMock(return_value='Enforcing')
|
||||
@ -52,24 +49,24 @@ class SelinuxTestCase(TestCase):
|
||||
{'selinux.getenforce': mock_en,
|
||||
'selinux.setenforce': mock_pr}):
|
||||
comt = ('SELinux is already in Enforcing mode')
|
||||
ret.update({'name': 'Enforcing', 'comment': comt, 'result': True})
|
||||
ret = {'name': 'Enforcing', 'comment': comt, 'result': True, 'changes': {}}
|
||||
self.assertDictEqual(selinux.mode('Enforcing'), ret)
|
||||
|
||||
with patch.dict(selinux.__opts__, {'test': True}):
|
||||
comt = ('SELinux mode is set to be changed to Permissive')
|
||||
ret.update({'name': 'Permissive', 'comment': comt,
|
||||
'result': None})
|
||||
ret = {'name': 'Permissive', 'comment': comt,
|
||||
'result': None, 'changes': {'new': 'Permissive', 'old': 'Enforcing'}}
|
||||
self.assertDictEqual(selinux.mode('Permissive'), ret)
|
||||
|
||||
with patch.dict(selinux.__opts__, {'test': False}):
|
||||
comt = ('SELinux has been set to Permissive mode')
|
||||
ret.update({'name': 'Permissive', 'comment': comt,
|
||||
'result': True})
|
||||
ret = {'name': 'Permissive', 'comment': comt,
|
||||
'result': True, 'changes': {'new': 'Permissive', 'old': 'Enforcing'}}
|
||||
self.assertDictEqual(selinux.mode('Permissive'), ret)
|
||||
|
||||
comt = ('Failed to set SELinux to Permissive mode')
|
||||
ret.update({'name': 'Permissive', 'comment': comt,
|
||||
'result': False})
|
||||
'result': False, 'changes': {}})
|
||||
self.assertDictEqual(selinux.mode('Permissive'), ret)
|
||||
|
||||
# 'boolean' function tests: 1
|
||||
|
@ -768,6 +768,10 @@ class UtilsTestCase(TestCase):
|
||||
ut = bytes((0xe4, 0xb8, 0xad, 0xe5, 0x9b, 0xbd, 0xe8, 0xaa, 0x9e, 0x20, 0x28, 0xe7, 0xb9, 0x81, 0xe4, 0xbd, 0x93, 0x29))
|
||||
self.assertEqual(utils.to_str(ut, 'utf-8'), un)
|
||||
self.assertEqual(utils.to_str(bytearray(ut), 'utf-8'), un)
|
||||
# Test situation when a minion returns incorrect utf-8 string because of... million reasons
|
||||
ut2 = b'\x9c'
|
||||
self.assertEqual(utils.to_str(ut2, 'utf-8'), u'\ufffd')
|
||||
self.assertEqual(utils.to_str(bytearray(ut2), 'utf-8'), u'\ufffd')
|
||||
else:
|
||||
self.assertEqual(utils.to_str('plugh'), 'plugh')
|
||||
self.assertEqual(utils.to_str(u'áéíóúý', 'utf-8'), 'áéíóúý')
|
||||
|
Loading…
Reference in New Issue
Block a user