Merge remote-tracking branch 'upstream/2014.7' into merge-forward-2015.2

Conflicts:
    doc/topics/topology/syndic.rst
    salt/client/__init__.py
    salt/daemons/masterapi.py
    salt/minion.py
    salt/modules/dockerio.py
This commit is contained in:
Colton Myers 2015-03-19 16:03:29 -06:00
commit 2b1f400ab4
10 changed files with 201 additions and 52 deletions

View File

@ -66,6 +66,7 @@ starting the other Salt daemons.
Topology Topology
======== ========
The ``salt-syndic`` is little more than a command and event forwarder. When a The ``salt-syndic`` is little more than a command and event forwarder. When a
command is issued from a higher-level master, it will be received by the command is issued from a higher-level master, it will be received by the
configured syndics on lower-level masters, and propagated to to their minions, configured syndics on lower-level masters, and propagated to to their minions,
@ -83,3 +84,22 @@ Nodes on the lowest points of the hierarchy (minions which do not propagate
data to another level) will only have the ``salt-minion`` daemon running. There data to another level) will only have the ``salt-minion`` daemon running. There
is no need for either ``salt-master`` or ``salt-syndic`` to be running on a is no need for either ``salt-master`` or ``salt-syndic`` to be running on a
standard minion. standard minion.
Syndic and the CLI
==================
In order for the high-level master to return information from minions that are
below the syndic(s), the CLI requires a short wait time in order to allow the
syndic(s) to gather responses from their minions. This value is defined in the
``syndic_wait` and has a default of five seconds.
While it is possible to run a syndic without a minion installed on the same machine,
it is recommended, for a faster CLI response time, to do so. Without a minion
installed on the syndic, the timeout value of ``syndic_wait`` increases
significantly - about three-fold. With a minion installed on the syndic, the CLI
timeout resides at the value defined in ``syndic_wait``.
.. note::
To reduce the amount of time the CLI waits for minions to respond, install a minion
on the syndic or tune the value of the ``syndic_wait`` configuration.

View File

@ -805,6 +805,14 @@ class LocalClient(object):
event bus for non-return data, such as minion lists returned from event bus for non-return data, such as minion lists returned from
syndics. syndics.
''' '''
tag_search = []
tag_search.append(jid)
if isinstance(additional_tags, str):
tag_search.append(additional_tags)
elif isinstance(additional_tags, list):
for tag in additional_tags:
tag_search.append(tag)
if event is None: if event is None:
event = self.event event = self.event

View File

@ -2301,6 +2301,7 @@ class MultiSyndic(MinionBase):
self.syndic_mode = self.opts.get('syndic_mode', 'sync') self.syndic_mode = self.opts.get('syndic_mode', 'sync')
self._has_master = threading.Event() self._has_master = threading.Event()
self.jid_forward_cache = set()
# create all of the syndics you need # create all of the syndics you need
self.master_syndics = {} self.master_syndics = {}
@ -2534,10 +2535,19 @@ class MultiSyndic(MinionBase):
jdict['__fun__'] = event['data'].get('fun') jdict['__fun__'] = event['data'].get('fun')
jdict['__jid__'] = event['data']['jid'] jdict['__jid__'] = event['data']['jid']
jdict['__load__'] = {} jdict['__load__'] = {}
fstr = '{0}.get_jid'.format(self.opts['master_job_cache']) fstr = '{0}.get_load'.format(self.opts['master_job_cache'])
# Only need to forward each load once. Don't hit the disk
# for every minion return!
if event['data']['jid'] not in self.jid_forward_cache:
jdict['__load__'].update( jdict['__load__'].update(
self.mminion.returners[fstr](event['data']['jid']) self.mminion.returners[fstr](event['data']['jid'])
) )
self.jid_forward_cache.add(event['data']['jid'])
if len(self.jid_forward_cache) > self.opts['syndic_jid_forward_cache_hwm']:
# Pop the oldest jid from the cache
tmp = sorted(list(self.jid_forward_cache))
tmp.pop(0)
self.jid_forward_cache = set(tmp)
if 'master_id' in event['data']: if 'master_id' in event['data']:
# __'s to make sure it doesn't print out on the master cli # __'s to make sure it doesn't print out on the master cli
jdict['__master_id__'] = event['data']['master_id'] jdict['__master_id__'] = event['data']['master_id']

View File

@ -392,7 +392,7 @@ def get_containers(all=True,
status['host'] = {} status['host'] = {}
status['host']['interfaces'] = __salt__['network.interfaces']() status['host']['interfaces'] = __salt__['network.interfaces']()
containers = ret = client.containers(all=all, containers = client.containers(all=all,
trunc=trunc, trunc=trunc,
since=since, since=since,
before=before, before=before,
@ -401,20 +401,13 @@ def get_containers(all=True,
# Optionally for each container get more granular information from them # Optionally for each container get more granular information from them
# by inspecting the container # by inspecting the container
if inspect: if inspect:
ret = []
for container in containers: for container in containers:
container_id = container.get('Id') container_id = container.get('Id')
if container_id: if container_id:
inspect = _get_container_infos(container_id) inspect = _get_container_infos(container_id)
container['detail'] = {} container['detail'] = inspect.copy()
for key, value in inspect.items():
container['detail'][key] = value
ret.append(container)
if ret: _valid(status, comment='All containers in out', out=containers)
_valid(status, comment='All containers in out', out=ret)
else:
_invalid(status)
return status return status

View File

@ -84,7 +84,7 @@ def enable_server(name, backend, socket='/var/run/haproxy.sock'):
ha_conn = _get_conn(socket) ha_conn = _get_conn(socket)
ha_cmd = haproxy.cmds.enableServer(server=name, backend=backend) ha_cmd = haproxy.cmds.enableServer(server=name, backend=backend)
ha_conn.sendCmd(ha_cmd) ha_conn.sendCmd(ha_cmd)
return list_servers(backend) return list_servers(backend, socket=socket)
def disable_server(name, backend, socket='/var/run/haproxy.sock'): def disable_server(name, backend, socket='/var/run/haproxy.sock'):
@ -107,7 +107,7 @@ def disable_server(name, backend, socket='/var/run/haproxy.sock'):
ha_conn = _get_conn(socket) ha_conn = _get_conn(socket)
ha_cmd = haproxy.cmds.disableServer(server=name, backend=backend) ha_cmd = haproxy.cmds.disableServer(server=name, backend=backend)
ha_conn.sendCmd(ha_cmd) ha_conn.sendCmd(ha_cmd)
return list_servers(backend) return list_servers(backend, socket=socket)
def get_weight(name, backend, socket='/var/run/haproxy.sock'): def get_weight(name, backend, socket='/var/run/haproxy.sock'):

View File

@ -189,14 +189,13 @@ def extracted(name,
tar_cmd = ['tar'] tar_cmd = ['tar']
tar_shortopts = 'x' tar_shortopts = 'x'
tar_longopts = [] tar_longopts = []
tar_afterfile = []
for position, opt in enumerate(tar_opts): for position, opt in enumerate(tar_opts):
if opt.startswith('-'): if opt.startswith('-'):
tar_longopts.append(opt) tar_longopts.append(opt)
else: else:
if position > 0: if position > 0:
tar_afterfile.append(opt) tar_longopts.append(opt)
else: else:
append_opt = opt append_opt = opt
append_opt = append_opt.replace('x', '').replace('f', '') append_opt = append_opt.replace('x', '').replace('f', '')
@ -205,7 +204,6 @@ def extracted(name,
tar_cmd.append(tar_shortopts) tar_cmd.append(tar_shortopts)
tar_cmd.extend(tar_longopts) tar_cmd.extend(tar_longopts)
tar_cmd.extend(['-f', filename]) tar_cmd.extend(['-f', filename])
tar_cmd.extend(tar_afterfile)
results = __salt__['cmd.run_all'](tar_cmd, cwd=name, python_shell=False) results = __salt__['cmd.run_all'](tar_cmd, cwd=name, python_shell=False)
if results['retcode'] != 0: if results['retcode'] != 0:

View File

@ -37,6 +37,10 @@ external_auth:
- '@wheel' - '@wheel'
- '@runner' - '@runner'
- test.* - test.*
saltops%:
- '@wheel'
- '@runner'
- 'test.*'
auto: auto:
saltdev_auto: saltdev_auto:
- '@wheel' - '@wheel'

View File

@ -7,18 +7,37 @@
# Import python libs # Import python libs
import os import os
import pwd import pwd
import grp
import random import random
# Import Salt Testing libs # Import Salt Testing libs
from salttesting import skipIf
from salttesting.helpers import ( from salttesting.helpers import (
ensure_in_syspath, ensure_in_syspath,
destructiveTest) destructiveTest)
ensure_in_syspath('../../') ensure_in_syspath('../../')
# Import salt libs # Import salt libs
from salt.utils.pycrypto import gen_hash
import integration import integration
from salttesting import skipIf
def gen_password():
'''
generate a password and hash it
'''
alphabet = ('abcdefghijklmnopqrstuvwxyz'
'0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ')
password = ''
# generate password
for _ in range(20):
next_index = random.randrange(len(alphabet))
password += alphabet[next_index]
# hash the password
hashed_pwd = gen_hash('salt', password, 'sha512')
return (password, hashed_pwd)
class AuthTest(integration.ShellCase): class AuthTest(integration.ShellCase):
@ -28,38 +47,38 @@ class AuthTest(integration.ShellCase):
_call_binary_ = 'salt' _call_binary_ = 'salt'
is_root = os.geteuid() != 0 is_not_root = os.geteuid() != 0
userA = 'saltdev'
userB = 'saltadm'
group = 'saltops'
@destructiveTest @destructiveTest
@skipIf(is_root, 'You must be logged in as root to run this test') @skipIf(is_not_root, 'You must be logged in as root to run this test')
def setUp(self): def setUp(self):
# This is a little wasteful but shouldn't be a problem # This is a little wasteful but shouldn't be a problem
for user in (self.userA, self.userB):
try: try:
pwd.getpwnam('saltdev') pwd.getpwnam(user)
except KeyError: except KeyError:
self.run_call('user.add saltdev createhome=False') self.run_call('user.add {0} createhome=False'.format(user))
# only put userB into the group for the group auth test
try:
grp.getgrnam(self.group)
except KeyError:
self.run_call('group.add {0}'.format(self.group))
self.run_call('user.chgroups {0} {1} True'.format(self.userB, self.group))
def test_pam_auth_valid_user(self): def test_pam_auth_valid_user(self):
''' '''
test pam auth mechanism is working with a valid user test pam auth mechanism is working with a valid user
''' '''
alphabet = ('abcdefghijklmnopqrstuvwxyz' password, hashed_pwd = gen_password()
'0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ') self.run_call("shadow.set_password {0} '{1}'".format(self.userA, hashed_pwd))
self.password = ''
# generate password
for _ in range(20):
next_index = random.randrange(len(alphabet))
self.password = self.password + alphabet[next_index]
# hash the password
from salt.utils.pycrypto import gen_hash
pwd = gen_hash('salt', self.password, 'sha512')
self.run_call("shadow.set_password saltdev '{0}'".format(pwd))
cmd = ('-a pam "*"'
' test.ping --username {0}'
' --password {1}'.format('saltdev', self.password))
cmd = ('-a pam "*" test.ping '
'--username {0} --password {1}'.format(self.userA, password))
resp = self.run_salt(cmd) resp = self.run_salt(cmd)
self.assertTrue( self.assertTrue(
'minion:' in resp 'minion:' in resp
@ -69,19 +88,35 @@ class AuthTest(integration.ShellCase):
''' '''
test pam auth mechanism errors for an invalid user test pam auth mechanism errors for an invalid user
''' '''
cmd = ('-a pam' cmd = ('-a pam "*" test.ping '
' * test.ping --username nouser' '--username nouser --password {0}'.format('abcd1234'))
' --password {0}'.format('abcd1234'))
resp = self.run_salt(cmd) resp = self.run_salt(cmd)
self.assertTrue( self.assertTrue(
'Failed to authenticate' in ''.join(resp) 'Failed to authenticate' in ''.join(resp)
) )
def test_pam_auth_valid_group(self):
'''
test pam auth mechanism success for a valid group
'''
password, hashed_pwd = gen_password()
self.run_call("shadow.set_password {0} '{1}'".format(self.userB, hashed_pwd))
cmd = ('-a pam "*" test.ping '
'--username {0} --password {1}'.format(self.userB, password))
resp = self.run_salt(cmd)
self.assertTrue(
'minion:' in resp
)
@destructiveTest @destructiveTest
@skipIf(is_root, 'You must be logged in as root to run this test') @skipIf(is_not_root, 'You must be logged in as root to run this test')
def test_zzzz_tearDown(self): def test_zzzz_tearDown(self):
if pwd.getpwnam('saltdev'): for user in (self.userA, self.userB):
self.run_call('user.delete saltdev') if pwd.getpwnam(user):
self.run_call('user.delete {0}'.format(user))
if grp.getgrnam(self.group):
self.run_call('group.delete {0}'.format(self.group))
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -0,0 +1,81 @@
# -*- coding: utf-8 -*-
'''
unit tests for the archive state
'''
# Import Python Libs
import os
import tempfile
# Import Salt Libs
from salt.states import archive
# Import Salt Testing Libs
from salttesting import skipIf, TestCase
from salttesting.helpers import ensure_in_syspath
from salttesting.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch
)
ensure_in_syspath('../../')
archive.__opts__ = {}
archive.__salt__ = {}
archive.__env__ = 'test'
@skipIf(NO_MOCK, NO_MOCK_REASON)
class ArchiveTest(TestCase):
'''
Validate the archive state
'''
def test_extracted_tar(self):
'''
archive.extracted tar options
'''
source = 'file.tar.gz'
tmp_dir = os.path.join(tempfile.gettempdir(), 'test_archive')
test_tar_opts = [
'--no-anchored foo',
'v -p --opt',
'-v -p',
'--long-opt -z',
'z -v -weird-long-opt arg',
]
ret_tar_opts = [
['tar', 'x', '--no-anchored', 'foo', '-f'],
['tar', 'xv', '-p', '--opt', '-f'],
['tar', 'x', '-v', '-p', '-f'],
['tar', 'x', '--long-opt', '-z', '-f'],
['tar', 'xz', '-v', '-weird-long-opt', 'arg', '-f'],
]
mock_true = MagicMock(return_value=True)
mock_false = MagicMock(return_value=False)
ret = {'stdout': ['saltines', 'cheese'], 'stderr': 'biscuits', 'retcode': '31337', 'pid': '1337'}
mock_run = MagicMock(return_value=ret)
with patch('os.path.exists', mock_true):
with patch.dict(archive.__opts__, {'test': False,
'cachedir': tmp_dir}):
with patch.dict(archive.__salt__, {'file.directory_exists': mock_false,
'file.file_exists': mock_false,
'file.makedirs': mock_true,
'cmd.run_all': mock_run}):
for test_opts, ret_opts in zip(test_tar_opts, ret_tar_opts):
ret = archive.extracted(tmp_dir,
source,
'tar',
tar_options=test_opts)
ret_opts.append(os.path.join(tmp_dir, 'files/test/_tmp_test_archive.tar'))
mock_run.assert_called_with(ret_opts, cwd=tmp_dir, python_shell=False)
if __name__ == '__main__':
from integration import run_tests
run_tests(ArchiveTest)

View File

@ -3,7 +3,7 @@
:codeauthor: :email:`Nicole Thomas (nicole@saltstack.com)` :codeauthor: :email:`Nicole Thomas (nicole@saltstack.com)`
''' '''
# Import Pyhton Libs # Import Python Libs
from inspect import ArgSpec from inspect import ArgSpec
# Import Salt Libs # Import Salt Libs