Merge remote-tracking branch 'upstream/develop' into sam_raet_12

This commit is contained in:
Samuel M Smith 2014-02-26 12:53:58 -07:00
commit 94314ed8ff
31 changed files with 693 additions and 135 deletions

View File

@ -1466,6 +1466,12 @@ source_file = _build/locale/ref/renderers/all/salt.renderers.pydsl.pot
source_lang = en
source_name = ref/renderers/all/salt.renderers.pydsl.rst
[salt.ref--renderers--all--salt_renderers_pyobjects]
file_filter = locale/<lang>/LC_MESSAGES/ref/renderers/all/salt.renderers.pyobjects.po
source_file = _build/locale/ref/renderers/all/salt.renderers.pyobjects.pot
source_lang = en
source_name = ref/renderers/all/salt.renderers.pyobjects.rst
[salt.ref--renderers--all--salt_renderers_stateconf]
file_filter = locale/<lang>/LC_MESSAGES/ref/renderers/all/salt.renderers.stateconf.po
source_file = _build/locale/ref/renderers/all/salt.renderers.stateconf.pot

View File

@ -933,6 +933,60 @@ Defines which branch/tag should be used as the ``base`` environment.
gitfs_base: salt
.. conf_master:: gitfs_env_whitelist
``gitfs_env_whitelist``
***********************
.. versionadded:: Helium
Default: ``[]``
Used to restrict which environments are made available. Can speed up state runs
if your gitfs remotes contain many branches/tags. Full names, globs, and
regular expressions are accepted.
If used, only branches/tags/SHAs which match one of the specified expressions
will be exposed as fileserver environments.
If used in conjunction with :conf_master:`gitfs_env_blacklist`, then the subset
of hosts which match the whitelist but do *not* match the blacklist will be
exposed as fileserver environments.
.. code-block:: yaml
gitfs_env_whitelist:
- base
- v1.*
- 'mybranch\d+'
.. conf_master:: gitfs_env_blacklist
``gitfs_env_blacklist``
***********************
.. versionadded:: Helium
Default: ``[]``
Used to restrict which environments are made available. Can speed up state runs
if your gitfs remotes contain many branches/tags. Full names, globs, and
regular expressions are accepted.
If used, branches/tags/SHAs which match one of the specified expressions will
*not* be exposed as fileserver environments.
If used in conjunction with :conf_master:`gitfs_env_whitelist`, then the subset
of hosts which match the whitelist but do *not* match the blacklist will be
exposed as fileserver environments.
.. code-block:: yaml
gitfs_env_blacklist:
- base
- v1.*
- 'mybranch\d+'
hg: Mercurial Remote File Server Backend
----------------------------------------

View File

@ -327,16 +327,19 @@ full programming constructs are available when creating SLS files.
Other renderers available are ``yaml_mako`` and ``yaml_wempy`` which each use
the `Mako`_ or `Wempy`_ templating system respectively rather than the jinja
templating system, and more notably, the pure Python or ``py`` and ``pydsl``
renderers.
templating system, and more notably, the pure Python or ``py``, ``pydsl`` &
``pyobjects`` renderers.
The ``py`` renderer allows for SLS files to be written in pure Python,
allowing for the utmost level of flexibility and power when preparing SLS
data; while the :doc:`pydsl</ref/renderers/all/salt.renderers.pydsl>` renderer
provides a flexible, domain-specific language for authoring SLS data in Python.
provides a flexible, domain-specific language for authoring SLS data in Python;
and the :doc:`pyobjects</ref/renderers/all/salt.renderers.pyobjects>` renderer
gives you a `"Pythonic"`_ interface to building state data.
.. _`Jinja2`: http://jinja.pocoo.org/
.. _`Mako`: http://www.makotemplates.org/
.. _`Wempy`: https://fossil.secution.com/u/gcw/wempy/doc/tip/README.wiki
.. _`"Pythonic"`: http://legacy.python.org/dev/peps/pep-0008/
.. note::
The templating engines described above aren't just available in SLS files.
@ -467,8 +470,8 @@ and set them up to be mounted, and the ``salt`` object is used multiple
times to call shell commands to gather data.
Introducing the Python and the PyDSL Renderers
----------------------------------------------
Introducing the Python, PyDSL and the Pyobjects Renderers
---------------------------------------------------------
Sometimes the chosen default renderer might not have enough logical power to
accomplish the needed task. When this happens, the Python renderer can be
@ -499,8 +502,6 @@ must be a Salt friendly data structure, or better known as a Salt
Alternatively, using the :doc:`pydsl</ref/renderers/all/salt.renderers.pydsl>`
renderer, the above example can be written more succinctly as:
``python/django.sls:``
.. code-block:: python
#!pydsl
@ -508,6 +509,17 @@ renderer, the above example can be written more succinctly as:
include('python', delayed=True)
state('django').pkg.installed()
The :doc:`pyobjects</ref/renderers/all/salt.renderers.pyobjects>` renderer
provides an `"Pythonic"`_ object based approach for building the state data.
The above example could be written as:
.. code-block:: python
#!pyobjects
include('python')
Pkg.installed("django")
This Python examples would look like this if they were written in YAML:

View File

@ -13,7 +13,7 @@ HERE=$(pwd)
mv /opt/local /opt/local.backup ; hash -r
cd /
curl http://pkgsrc.joyent.com/packages/SmartOS/bootstrap/bootstrap-2013Q3-x86_64.tar.gz | gtar xz
curl http://pkgsrc.joyent.com/packages/SmartOS/bootstrap/bootstrap-2013Q4-x86_64.tar.gz | gtar xz
hash -r
pkgin -y up
@ -23,13 +23,8 @@ pkgin -y rm salt
cd /opt/local/bin
curl -kO 'https://us-east.manta.joyent.com/nahamu/public/smartos/bins/patchelf'
chmod +x patchelf
cat >swig <<"EOF"
#!/bin/bash
exec /opt/local/bin/swig2.0 -I/opt/local/include "$@"
EOF
pip install esky
yes | pip uninstall bbfreeze
pip install esky bbfreeze
cd $HERE
curl -kO 'https://pypi.python.org/packages/source/b/bbfreeze-loader/bbfreeze-loader-1.1.0.zip'
@ -41,16 +36,17 @@ $COMPILE -c bbfreeze-loader-1.1.0/_bbfreeze_loader/getpath.c -o $HERE/getpath.o
gcc $HERE/console.o $HERE/getpath.o /opt/local/lib/python2.7/config/libpython2.7.a -L/opt/local/lib -L/opt/local/lib/python2.7/config -L/opt/local/lib -lsocket -lnsl -ldl -lrt -lm -static-libgcc -o $HERE/console.exe
patchelf --set-rpath '$ORIGIN:$ORIGIN/../lib' $HERE/console.exe
git clone git://github.com/schmir/bbfreeze -b master
( cd $HERE/bbfreeze && easy_install-2.7 . )
find /opt/local -name console.exe -exec mv $HERE/console.exe {} \;
git clone git://github.com/saltstack/salt -b 0.17
( cd $HERE/salt && python2.7 setup.py bdist && python2.7 setup.py bdist_esky )
mv /opt/local /opt/local.build ; hash -r
mv /opt/local.backup /opt/local ; hash -r
git clone git://github.com/saltstack/salt -b 2014.1
cd $HERE/salt
pip install -r requirements.txt
# packages not in main requirements file that are nice to have
pip install -r pkg/smartos/esky/requirements.txt
bash pkg/smartos/esky/build-tarball.sh
# Upload packages into Manta
pkgin -y in sdc-manta
mmkdir -p /$MANTA_USER/public/salt
mput /$MANTA_USER/public/salt -f $(ls salt/dist/*.zip)
for file in dist/salt*; do mput -m /$MANTA_USER/public/salt -f $file; done;
```

View File

@ -0,0 +1,3 @@
GitPython==0.3.2.RC1
halite
cherrypy

View File

@ -71,6 +71,9 @@ class Master(parsers.MasterOptionParser):
os.path.join(self.config['pki_dir'], 'minions'),
os.path.join(self.config['pki_dir'], 'minions_pre'),
os.path.join(self.config['pki_dir'], 'minions_denied'),
os.path.join(self.config['pki_dir'], 'accepted'),
os.path.join(self.config['pki_dir'], 'pending'),
os.path.join(self.config['pki_dir'], 'rejected'),
os.path.join(self.config['pki_dir'],
'minions_rejected'),
self.config['cachedir'],

View File

@ -326,6 +326,7 @@ def create(vm_):
key_filename = config.get_cloud_config_value(
'ssh_key_file', vm_, __opts__, search_global=False, default=None
)
key_filename = os.path.expanduser(key_filename)
if key_filename is not None and not os.path.isfile(key_filename):
raise SaltCloudConfigError(
'The defined ssh_key_file {0!r} does not exist'.format(

View File

@ -137,6 +137,8 @@ VALID_OPTS = {
'gitfs_mountpoint': str,
'gitfs_root': str,
'gitfs_base': str,
'gitfs_env_whitelist': list,
'gitfs_env_blacklist': list,
'hgfs_remotes': list,
'hgfs_mountpoint': str,
'hgfs_root': str,
@ -253,6 +255,7 @@ DEFAULT_MINION_OPTS = {
'providers': {},
'clean_dynamic_modules': True,
'open_mode': False,
'auto_accept': True,
'multiprocessing': True,
'mine_interval': 60,
'ipc_mode': 'ipc',
@ -337,6 +340,8 @@ DEFAULT_MASTER_OPTS = {
'gitfs_mountpoint': '',
'gitfs_root': '',
'gitfs_base': 'master',
'gitfs_env_whitelist': [],
'gitfs_env_blacklist': [],
'hgfs_remotes': [],
'hgfs_mountpoint': '',
'hgfs_root': '',

View File

@ -1,18 +1,16 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
'''
Runs minion floscript
"""
'''
# pylint: skip-file
import os
import salt.daemons.flo
import ioflo.app.run
FLO_DIR_PATH = os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
, 'flo')
os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'flo'
)
def test():

View File

@ -1,18 +1,16 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
'''
Runs minion floscript
'''
# pylint: skip-file
"""
import os
import salt.daemons.flo
import ioflo.app.run
FLO_DIR_PATH = os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
, 'flo')
os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'flo'
)
def test():

View File

@ -323,15 +323,20 @@ _dulwich_env_refs = lambda refs: [x for x in refs
def _get_tree_gitpython(repo, short):
'''
Return a git.Tree object if the branch/tag/SHA is found, otherwise False
Return a git.Tree object if the branch/tag/SHA is found, otherwise None
'''
for ref in repo.refs:
if isinstance(ref, (git.RemoteReference, git.TagReference)):
parted = ref.name.partition('/')
refname = parted[2] if parted[2] else parted[0]
if short == refname:
return ref.commit.tree
if short in envs():
for ref in repo.refs:
if isinstance(ref, (git.RemoteReference, git.TagReference)):
parted = ref.name.partition('/')
rspec = parted[2] if parted[2] else parted[0]
rspec = rspec.replace('/', '_')
if rspec == short:
return ref.commit.tree
# Branch or tag not matched, check if 'short' is a commit
if not _env_is_exposed(short):
return None
try:
commit = repo.rev_parse(short)
except gitdb.exc.BadObject:
@ -343,16 +348,21 @@ def _get_tree_gitpython(repo, short):
def _get_tree_pygit2(repo, short):
'''
Return a pygit2.Tree object if the branch/tag/SHA is found, otherwise False
Return a pygit2.Tree object if the branch/tag/SHA is found, otherwise None
'''
for ref in repo.listall_references():
_, rtype, rspec = ref.split('/', 2)
if rtype in ('remotes', 'tags'):
parted = rspec.partition('/')
refname = parted[2] if parted[2] else parted[0]
if short == refname:
return repo.lookup_reference(ref).get_object().tree
if short in envs():
for ref in repo.listall_references():
_, rtype, rspec = ref.split('/', 2)
if rtype in ('remotes', 'tags'):
parted = rspec.partition('/')
rspec = parted[2] if parted[2] else parted[0]
rspec = rspec.replace('/', '_')
if rspec == short and _env_is_exposed(rspec):
return repo.lookup_reference(ref).get_object().tree
# Branch or tag not matched, check if 'short' is a commit
if not _env_is_exposed(short):
return None
try:
commit = repo.revparse_single(short)
except (KeyError, TypeError):
@ -366,35 +376,39 @@ def _get_tree_pygit2(repo, short):
def _get_tree_dulwich(repo, short):
'''
Return a dulwich.objects.Tree object if the branch/tag/SHA is found,
otherwise False
otherwise None
'''
refs = repo.get_refs()
# Sorting ensures we check heads (branches) before tags
for ref in sorted(_dulwich_env_refs(refs)):
# ref will be something like 'refs/heads/master'
rtype, rspec = ref[5:].split('/')
if rspec == short:
if rtype == 'heads':
commit = repo.get_object(refs[ref])
elif rtype == 'tags':
tag = repo.get_object(refs[ref])
if isinstance(tag, dulwich.objects.Tag):
# Tag.get_object() returns a 2-tuple, the 2nd element of
# which is the commit SHA to which the tag refers
commit = repo.get_object(tag.object[1])
elif isinstance(tag, dulwich.objects.Commit):
commit = tag
else:
log.error(
'Unhandled object type {0!r} in _get_tree_dulwich. '
'This is a bug, please report it.'
.format(tag.type_name)
)
return repo.get_object(commit.tree)
if short in envs():
refs = repo.get_refs()
# Sorting ensures we check heads (branches) before tags
for ref in sorted(_dulwich_env_refs(refs)):
# ref will be something like 'refs/heads/master'
rtype, rspec = ref[5:].split('/', 1)
rspec = rspec.replace('/', '_')
if rspec == short and _env_is_exposed(rspec):
if rtype == 'heads':
commit = repo.get_object(refs[ref])
elif rtype == 'tags':
tag = repo.get_object(refs[ref])
if isinstance(tag, dulwich.objects.Tag):
# Tag.get_object() returns a 2-tuple, the 2nd element
# of which is the commit SHA to which the tag refers
commit = repo.get_object(tag.object[1])
elif isinstance(tag, dulwich.objects.Commit):
commit = tag
else:
log.error(
'Unhandled object type {0!r} in '
'_get_tree_dulwich. This is a bug, please report '
'it.'.format(tag.type_name)
)
return repo.get_object(commit.tree)
# Branch or tag not matched, check if 'short' is a commit. This is more
# difficult with Dulwich because of its inability to deal with shortened
# SHA-1 hashes.
if not _env_is_exposed(short):
return None
try:
int(short, 16)
except ValueError:
@ -407,18 +421,18 @@ def _get_tree_dulwich(repo, short):
if isinstance(sha_obj, dulwich.objects.Commit):
sha_commit = sha_obj
else:
matches = [
matches = set([
x for x in (
repo.get_object(x) for x in repo.object_store
if x.startswith(short)
)
if isinstance(x, dulwich.objects.Commit)
]
])
if len(matches) > 1:
log.warning('Ambiguous commit ID {0!r}'.format(short))
return None
try:
sha_commit = matches[0]
sha_commit = matches.pop()
except IndexError:
pass
except TypeError as exc:
@ -802,6 +816,18 @@ def update():
pass
def _env_is_exposed(env):
'''
Check if an environment is exposed by comparing it against a whitelist and
blacklist.
'''
return salt.utils.check_whitelist_blacklist(
env,
whitelist=__opts__['gitfs_env_whitelist'],
blacklist=__opts__['gitfs_env_blacklist']
)
def envs(ignore_cache=False):
'''
Return a list of refs that can be used as environments
@ -841,14 +867,15 @@ def _envs_gitpython(repo, base_branch):
remote = repo.remotes[0]
for ref in repo.refs:
parted = ref.name.partition('/')
short = parted[2] if parted[2] else parted[0]
rspec = parted[2] if parted[2] else parted[0]
rspec = rspec.replace('/', '_')
if isinstance(ref, git.Head):
if short == base_branch:
short = 'base'
if ref not in remote.stale_refs:
ret.add(short)
elif isinstance(ref, git.Tag):
ret.add(short)
if rspec == base_branch:
rspec = 'base'
if ref not in remote.stale_refs and _env_is_exposed(rspec):
ret.add(rspec)
elif isinstance(ref, git.Tag) and _env_is_exposed(rspec):
ret.add(rspec)
return ret
@ -863,15 +890,17 @@ def _envs_pygit2(repo, base_branch):
for ref in repo.listall_references():
ref = re.sub('^refs/', '', ref)
rtype, rspec = ref.split('/', 1)
if rtype == 'tags':
ret.add(rspec)
elif rtype == 'remotes':
if rtype == 'remotes':
if rspec not in stale_refs:
parted = rspec.partition('/')
short = parted[2] if parted[2] else parted[0]
if short == base_branch:
short = 'base'
ret.add(short)
rspec = parted[2] if parted[2] else parted[0]
rspec = rspec.replace('/', '_')
if rspec == base_branch:
rspec = 'base'
if _env_is_exposed(rspec):
ret.add(rspec)
elif rtype == 'tags' and _env_is_exposed(rspec):
ret.add(rspec)
return ret
@ -884,11 +913,13 @@ def _envs_dulwich(repo, base_branch):
for ref in _dulwich_env_refs(repo.get_refs()):
# ref will be something like 'refs/heads/master'
rtype, rspec = ref[5:].split('/', 1)
if rtype == 'tags':
ret.add(rspec)
elif rtype == 'heads':
rspec = rspec.replace('/', '_')
if rtype == 'heads':
if rspec == base_branch:
rspec = 'base'
if _env_is_exposed(rspec):
ret.add(rspec)
elif rtype == 'tags' and _env_is_exposed(rspec):
ret.add(rspec)
return ret

View File

@ -24,7 +24,10 @@ class KeyCLI(object):
'''
def __init__(self, opts):
self.opts = opts
self.key = Key(opts)
if self.opts['transport'] == 'zeromq':
self.key = Key(opts)
else:
self.key = RaetKey(opts)
def list_status(self, status):
'''
@ -988,3 +991,36 @@ class RaetKey(Key):
path = os.path.join(self.opts['pki_dir'], status, key)
ret[status][key] = self._get_key_finger(path)
return ret
def read_remote(self, minion_id):
'''
Read in a remote accepted key
'''
path = os.path.join(self.opts['pki_dir'], 'accepted', minion_id)
if not os.path.isfile(path):
return {}
with salt.utils.fopen(path, 'rb') as fp_:
return self.serial.loads(fp_.read())
def read_local(self):
'''
Read in the local private keys, return an empy dict if the keys do not
exist
'''
path = os.path.join(self.opts['pki_dir'], 'local.key')
if not os.path.isfile(path):
return {}
with salt.utils.fopen(path, 'rb') as fp_:
return self.serial.loads(fp_.read())
def write_local(self, priv, sign):
'''
Write the private key and the signing key to a file on disk
'''
keydata = {'priv': priv,
'sign': sign}
path = os.path.join(self.opts['pki_dir'], 'local.key')
c_umask = os.umask(191)
with salt.utils.fopen(path, 'w+') as fp_:
fp_.write(self.serial.dumps(keydata))
os.umask(c_umask)

View File

@ -9,7 +9,6 @@ import os
import re
import time
import errno
import fnmatch
import signal
import shutil
import stat
@ -165,6 +164,25 @@ class Master(SMaster):
br, loc = opts_dict['git'].strip().split()
pillargitfs.append(git_pillar.GitPillar(br, loc, self.opts))
# Clear remote fileserver backend env cache so it gets recreated during
# the first loop_interval
for backend in ('git', 'hg', 'svn'):
if backend in self.opts['fileserver_backend']:
env_cache = os.path.join(
self.opts['cachedir'],
'{0}fs'.format(backend),
'envs.p'
)
if os.path.isfile(env_cache):
log.debug('Clearing {0}fs env cache'.format(backend))
try:
os.remove(env_cache)
except (IOError, OSError) as exc:
log.critical(
'Unable to clear env cache file {0}: {1}'
.format(env_cache, exc)
)
old_present = set()
while True:
now = int(time.time())
@ -1729,24 +1747,11 @@ class ClearFuncs(object):
with salt.utils.fopen(signing_file, 'r') as fp_:
for line in fp_:
line = line.strip()
if line.startswith('#'):
continue
if line == keyid:
return True
if fnmatch.fnmatch(keyid, line):
return True
try:
if re.match(r'\A{0}\Z'.format(line), keyid):
else:
if salt.utils.expr_match(keyid, line):
return True
except re.error:
log.warn(
'{0} is not a valid regular expression, ignoring line '
'in {1}'.format(line, signing_file)
)
continue
return False
def __check_autoreject(self, keyid):

View File

@ -94,7 +94,8 @@ def _render_tab(lst):
if cron['comment'] is not None or cron['identifier'] is not None:
comment = '#'
if cron['comment']:
comment += ' {0}'.format(cron['comment'])
comment += ' {0}'.format(
cron['comment'].rstrip().replace('\n', '\n# '))
if cron['identifier']:
comment += ' {0}:{1}'.format(SALT_CRON_IDENTIFIER,
cron['identifier'])

View File

@ -2635,7 +2635,8 @@ def manage_file(name,
else:
if not os.path.isdir(os.path.dirname(name)):
if makedirs:
makedirs(name, user=user, group=group, mode=mode)
makedirs(name, user=user, group=group,
mode=dir_mode or mode)
else:
__clean_tmp(sfn)
return _error(ret, 'Parent directory not present')

View File

@ -192,6 +192,10 @@ def build_rule(table=None, chain=None, command=None, position='', full=None, fam
after_jump.append('--to-destination {0} '.format(kwargs['to-destination']))
del kwargs['to-destination']
if 'reject-with' in kwargs:
after_jump.append('--reject-with {0} '.format(kwargs['reject-with']))
del kwargs['reject-with']
for item in kwargs:
if len(item) == 1:
rule += '-{0} {1} '.format(item, kwargs[item])
@ -881,7 +885,10 @@ def _parser():
add_arg('--string', dest='string', action='append')
add_arg('--hex-string', dest='hex-string', action='append')
## tcp
add_arg('--tcp-flags', dest='tcp-flags', action='append')
if sys.version.startswith('2.6'):
add_arg('--tcp-flags', dest='tcp-flags', action='append')
else:
add_arg('--tcp-flags', dest='tcp-flags', action='append', nargs='*')
add_arg('--syn', dest='syn', action='append')
add_arg('--tcp-option', dest='tcp-option', action='append')
## tcpmss

255
salt/modules/nagios.py Normal file
View File

@ -0,0 +1,255 @@
# -*- coding: utf-8 -*-
'''
Run nagios plugins/checks from salt and get the return as data.
'''
# Import python libs
import os
import stat
# Import salt libs
import logging
log = logging.getLogger(__name__)
PLUGINDIR = '/usr/lib/nagios/plugins/'
def __virtual__():
'''
Only load if nagios-plugins are installed
'''
if os.path.isdir('/usr/lib/nagios/'):
return 'nagios'
return False
def _execute_cmd(plugin, args='', run_type='cmd.retcode', key_name=None):
'''
Execute nagios plugin if it's in the directory with salt command specified in run_type
'''
data = {}
all_plugins = list_plugins()
if plugin in all_plugins:
data = __salt__[run_type]('{0}{1} {2}'.format(PLUGINDIR, plugin, args))
return data
def _execute_pillar(pillar_name, run_type):
'''
Run one or more nagios plugins from pillar data and get the result of run_type
The pillar have to be in this format:
------
webserver:
Ping_google:
- check_icmp:8.8.8.8
- check_icmp:google.com
Load:
- check_load:-w 0.8 -c 1
APT:
- check_apt
-------
'''
groups = __salt__['pillar.get'](pillar_name)
data = {}
for group in groups:
data[group] = {}
commands = groups[group]
for command in commands:
#Check if is a dict to get the arguments
#in command if not set the arguments to empty string
if isinstance(command, dict):
plugin = command.keys()[0]
args = command[plugin]
else:
plugin = command
args = ''
command_key = _format_dict_key(args, plugin)
data[group][command_key] = run_type(plugin, args, group)
return data
def _format_dict_key(args, plugin):
key_name = plugin
args_key = args.replace(' ', '')
if args != '':
args_key = '_' + args_key
key_name = plugin + args_key
return key_name
def run(plugin, args='', key_name=None):
'''
Run nagios plugin and return all the data execution with cmd.run
'''
data = _execute_cmd(plugin, args, 'cmd.run', key_name)
return data
def retcode(plugin, args='', key_name=None):
'''
Run one nagios plugin and return retcode of the execution
CLI Example:
.. code-block:: bash
salt '*' nagios.run check_apt
salt '*' nagios.run check_icmp '8.8.8.8'
'''
data = {}
# Remove all the spaces, the key must not have any space
if key_name is None:
key_name = _format_dict_key(args, plugin)
data[key_name] = {}
status = _execute_cmd(plugin, args, 'cmd.retcode', key_name)
data[key_name]['status'] = status
return data
def run_all(plugin, args='', key_name=None):
'''
Run nagios plugin and return all the data execution with cmd.run_all
'''
data = _execute_cmd(plugin, args, 'cmd.run_all', key_name)
return data
def retcode_pillar(pillar_name):
'''
Run one or more nagios plugins from pillar data and get the result of cdm.retcode
The pillar have to be in this format:
------
webserver:
Ping_google:
- check_icmp:8.8.8.8
- check_icmp:google.com
Load:
- check_load:-w 0.8 -c 1
APT:
- check_apt
-------
webserver is the role to check, the next keys are the group and the items the check with the arguments if needed
You must to group different checks(one o more) and always it will return the highest value of all the checks
CLI Example:
.. code-block:: bash
salt '*' nagios.retcode webserver
'''
groups = __salt__['pillar.get'](pillar_name)
check = {}
data = {}
for group in groups:
commands = groups[group]
for command in commands:
#Check if is a dict to get the arguments
#in command if not set the arguments to empty string
if isinstance(command, dict):
plugin = command.keys()[0]
args = command[plugin]
else:
plugin = command
args = ''
check.update(retcode(plugin, args, group))
current_value = 0
new_value = int(check[group]['status'])
if group in data:
current_value = int(data[group]['status'])
if (new_value > current_value) or (group not in data):
if group not in data:
data[group] = {}
data[group]['status'] = new_value
return data
def run_pillar(pillar_name):
'''
Run one or more nagios plugins from pillar data and get the result of cdm.run
The pillar have to be in this format:
------
webserver:
Ping_google:
- check_icmp:8.8.8.8
- check_icmp:google.com
Load:
- check_load:-w 0.8 -c 1
APT:
- check_apt
-------
webserver is the role to check, the next keys are the group and the items the check with the arguments if needed
You have to group different checks in a group
CLI Example:
.. code-block:: bash
salt '*' nagios.run webserver
'''
data = _execute_pillar(pillar_name, run)
return data
def run_all_pillar(pillar_name):
'''
Run one or more nagios plugins from pillar data and get the result of cdm.run_all
The pillar have to be in this format:
------
webserver:
Ping_google:
- check_icmp:8.8.8.8
- check_icmp:google.com
Load:
- check_load:-w 0.8 -c 1
APT:
- check_apt
-------
webserver is the role to check, the next keys are the group and the items the check with the arguments if needed
You have to group different checks in a group
CLI Example:
.. code-block:: bash
salt '*' nagios.run webserver
'''
data = _execute_pillar(pillar_name, run_all)
return data
def list_plugins():
'''
List all the nagios plugins
CLI Example:
.. code-block:: bash
salt '*' nagios.list_plugins
'''
plugin_list = os.listdir(PLUGINDIR)
ret = []
for plugin in plugin_list:
# Check if execute bit
stat_f = os.path.join(PLUGINDIR, plugin)
execute_bit = stat.S_IXUSR & os.stat(stat_f)[stat.ST_MODE]
if execute_bit:
ret.append(plugin)
return ret

View File

@ -128,7 +128,8 @@ def set_zone(timezone):
'/etc/sysconfig/clock', '^ZONE=.*', 'ZONE="{0}"'.format(timezone))
elif 'Debian' in __grains__['os_family']:
with salt.utils.fopen('/etc/timezone', 'w') as ofh:
ofh.write(timezone)
ofh.write(timezone.strip())
ofh.write('\n')
elif 'Gentoo' in __grains__['os_family']:
with salt.utils.fopen('/etc/timezone', 'w') as ofh:
ofh.write(timezone)

View File

@ -105,9 +105,14 @@ def update_git_repos():
#else:
#targetname = gitrepo
targetname = gitrepo
rev = None
# If a revision is specified, use it.
if len(gitrepo.strip().split(' ')) > 1:
rev, gitrepo = gitrepo.strip().split(' ')
gittarget = os.path.join(repo, targetname)
#result = mminion.states['git.latest'](gitrepo,
result = __salt__['git.latest'](gitrepo,
rev=rev,
target=gittarget,
force=True)
ret[result['name']] = result['result']

View File

@ -9,6 +9,7 @@ example that ensures the ``/tmp`` directory is in the correct state.
.. code-block:: python
:linenos:
#!pyobjects
File.managed("/tmp", user='root', group='root', mode='1777')
@ -46,6 +47,7 @@ core of what makes pyobjects the best way to write states.
.. code-block:: python
:linenos:
#!pyobjects
with Pkg.installed("nginx"):
@ -66,6 +68,7 @@ The above could have also been written use direct requisite statements as.
.. code-block:: python
:linenos:
#!pyobjects
Pkg.installed("nginx")
@ -80,6 +83,7 @@ generated outside of the current file.
.. code-block:: python
:linenos:
#!pyobjects
# some-other-package is defined in some other state file
@ -91,6 +95,7 @@ watch_in, use & use_in) when using the requisite as a context manager.
.. code-block:: python
:linenos:
#!pyobjects
with Service("my-service", "watch_in"):
@ -111,6 +116,7 @@ a state.
.. code-block:: python
:linenos:
#!pyobjects
include('http', 'ssh')
@ -130,6 +136,7 @@ The following lines are functionally equivalent:
.. code-block:: python
:linenos:
#!pyobjects
ret = salt.cmd.run(bar)
@ -148,6 +155,7 @@ The following pairs of lines are functionally equivalent:
.. code-block:: python
:linenos:
#!pyobjects
value = pillar('foo:bar:baz', 'qux')

View File

@ -50,7 +50,7 @@ def over(saltenv='base', os_fn=None):
return overstate.over_run
def sls(mods, saltenv='base', test=None, exclude=None):
def sls(mods, saltenv='base', test=None, exclude=None, pillar=None):
'''
Execute a state run from the master, used as a powerful orchestration
system.
@ -64,7 +64,12 @@ def sls(mods, saltenv='base', test=None, exclude=None):
'''
__opts__['file_client'] = 'local'
minion = salt.minion.MasterMinion(__opts__)
running = minion.functions['state.sls'](mods, saltenv, test, exclude)
running = minion.functions['state.sls'](
mods,
saltenv,
test,
exclude,
pillar=pillar)
ret = {minion.opts['id']: running}
salt.output.display_output(ret, 'highstate', opts=__opts__)
return ret

View File

@ -91,8 +91,13 @@ def update_git_repos():
targetname = gitrepo.split('/')[-1]
else:
targetname = gitrepo
rev = None
# If a revision is specified, use it.
if len(gitrepo.strip().split(' ')) > 1:
rev, gitrepo = gitrepo.strip().split(' ')
gittarget = os.path.join(repo, targetname)
result = mminion.states['git.latest'](gitrepo,
rev=rev,
target=gittarget,
force=True)
ret[result['name']] = result['result']

View File

@ -1380,11 +1380,9 @@ class State(object):
if len(cdata['args']) > 0:
name = cdata['args'][0]
elif 'name' in cdata['kwargs']:
name = cdata['kwargs'].get(
'name',
low.get('name',
low.get('__id__'))
)
name = cdata['kwargs']['name']
else:
name = low.get('name', low.get('__id__'))
ret = {
'result': False,
'name': name,

View File

@ -213,7 +213,7 @@ def latest(name,
identity=identity)
elif rev:
cmd = 'git rev-parse {0} ^{{commit}}'.format(rev)
cmd = 'git rev-parse {0}'.format(rev)
retcode = __salt__['cmd.retcode'](cmd,
cwd=target,
runas=user)

View File

@ -296,7 +296,7 @@ def insert(name, family='ipv4', **kwargs):
for ignore in _STATE_INTERNAL_KEYWORDS:
if ignore in kwargs:
del kwargs[ignore]
rule = __salt__['iptables.build_rule'](family, **kwargs)
rule = __salt__['iptables.build_rule'](family=family, **kwargs)
command = __salt__['iptables.build_rule'](full=True, family=family, command='I', **kwargs)
if __salt__['iptables.check'](kwargs['table'],
kwargs['chain'],
@ -450,7 +450,7 @@ def set_policy(name, family='ipv4', **kwargs):
family):
ret['changes'] = {'locale': name}
ret['result'] = True
ret['comment'] = 'Set default policy for {0} to {1} family {2]'.format(
ret['comment'] = 'Set default policy for {0} to {1} family {2}'.format(
kwargs['chain'],
kwargs['policy'],
family
@ -487,7 +487,7 @@ def flush(name, family='ipv4', **kwargs):
if not __salt__['iptables.flush'](kwargs['table'], kwargs['chain'], family):
ret['changes'] = {'locale': name}
ret['result'] = True
ret['comment'] = 'Flush iptables rules in {0} table {1} chain {2] family'.format(
ret['comment'] = 'Flush iptables rules in {0} table {1} chain {2} family'.format(
kwargs['table'],
kwargs['chain'],
family

View File

@ -1074,6 +1074,69 @@ def flopen(*args, **kwargs):
fcntl.flock(fp_.fileno(), fcntl.LOCK_UN)
def expr_match(expr, line):
'''
Evaluate a line of text against an expression. First try a full-string
match, next try globbing, and then try to match assuming expr is a regular
expression. Originally designed to match minion IDs for
whitelists/blacklists.
'''
if line == expr:
return True
if fnmatch.fnmatch(line, expr):
return True
try:
if re.match(r'\A{0}\Z'.format(expr), line):
return True
except re.error:
pass
return False
def check_whitelist_blacklist(value, whitelist=None, blacklist=None):
'''
Check a whitelist and/or blacklist to see if the value matches it.
'''
if not any((whitelist, blacklist)):
return True
in_whitelist = False
in_blacklist = False
if whitelist:
try:
for expr in whitelist:
if expr_match(expr, value):
in_whitelist = True
break
except TypeError:
log.error('Non-iterable whitelist {0}'.format(whitelist))
whitelist = None
else:
whitelist = None
if blacklist:
try:
for expr in blacklist:
if expr_match(expr, value):
in_blacklist = True
break
except TypeError:
log.error('Non-iterable blacklist {0}'.format(whitelist))
blacklist = None
else:
blacklist = None
if whitelist and not blacklist:
ret = in_whitelist
elif blacklist and not whitelist:
ret = not in_blacklist
elif whitelist and blacklist:
ret = in_whitelist and not in_blacklist
else:
ret = True
return ret
def subdict_match(data, expr, delim=':', regex_match=False):
'''
Check for a match in a dictionary using a delimiter character to denote

View File

@ -2176,13 +2176,6 @@ class SaltSSHOptionParser(OptionParser, ConfigDirMixIn, MergeConfigMixIn,
'been changed and the auto refresh timeframe has not been '
'reached.')
)
self.add_option(
'-v', '--verbose',
default=False,
action='store_true',
help=('Turn on command verbosity, display jid')
)
self.add_option(
'--max-procs',
dest='ssh_max_procs',

View File

@ -22,6 +22,12 @@ from distutils.command.clean import clean
from distutils.command.sdist import sdist
# pylint: enable=E0611
try:
import zmq
HAS_ZMQ = True
except ImportError:
HAS_ZMQ = False
# Change to salt source's directory prior to running any command
try:
SETUP_DIRNAME = os.path.dirname(__file__)
@ -517,6 +523,13 @@ FREEZER_INCLUDES = [
'email.mime.*',
]
if HAS_ZMQ and zmq.pyzmq_version_info() >= (0, 14):
# We're freezing, and when freezing ZMQ needs to be installed, so this
# works fine
if 'zmq.core.*' in FREEZER_INCLUDES:
# For PyZMQ >= 0.14, freezing does not need 'zmq.core.*'
FREEZER_INCLUDES.remove('zmq.core.*')
if IS_WINDOWS_PLATFORM:
FREEZER_INCLUDES.extend([
'win32api',

View File

@ -77,3 +77,6 @@ class MacGroupTestCase(TestCase):
self.assertEqual(mac_group._format_info(data), ret)
if __name__ == '__main__':
from integration import run_tests
run_tests(MacGroupTestCase, needs_daemon=False)

View File

@ -5,8 +5,11 @@
# Import Salt Testing Libs
from salttesting import TestCase, skipIf
from salttesting.helpers import ensure_in_syspath
from salttesting.mock import MagicMock, patch
ensure_in_syspath('../../')
# Import Salt Libs
from salt.modules import mac_user
from salt.exceptions import SaltInvocationError, CommandExecutionError
@ -272,7 +275,7 @@ class MacUserTestCase(TestCase):
@patch('salt.modules.mac_user.info', MagicMock(return_value=mock_info_ret))
@patch('salt.modules.mac_user.list_groups',
MagicMock(return_value={'wheel', 'root'}))
MagicMock(return_value=('wheel', 'root')))
def test_chgroups_same_desired(self):
'''
Tests if the user's list of groups is the same as the arguments
@ -324,3 +327,8 @@ class MacUserTestCase(TestCase):
'''
ret = ['_amavisd', '_appleevents', '_appowner']
self.assertEqual(mac_user.list_users(), ret)
if __name__ == '__main__':
from integration import run_tests
run_tests(MacUserTestCase, needs_daemon=False)

View File

@ -76,6 +76,14 @@ def write_crontab(*args, **kw):
@skipIf(NO_MOCK, NO_MOCK_REASON)
class CronTestCase(TestCase):
def setUp(self):
super(CronTestCase, self).setUp()
set_crontab('')
def tearDown(self):
super(CronTestCase, self).tearDown()
set_crontab('')
@patch('salt.modules.cron.raw_cron',
new=MagicMock(side_effect=get_crontab))
@patch('salt.modules.cron._write_cron_lines',
@ -178,6 +186,42 @@ class CronTestCase(TestCase):
'# Lines below here are managed by Salt, do not edit'
)
@patch('salt.modules.cron.raw_cron',
new=MagicMock(side_effect=get_crontab))
@patch('salt.modules.cron._write_cron_lines',
new=MagicMock(side_effect=write_crontab))
def test_aissue_1072(self):
(
'# Lines below here are managed by Salt, do not edit\n'
'# I have a multi-line comment SALT_CRON_IDENTIFIER:1\n'
'* 1 * * * foo'
)
cron.present(
name='foo',
hour='1',
comment='1I have a multi-line comment\n2about my script here.\n',
identifier='1',
user='root')
cron.present(
name='foo',
hour='1',
comment='3I have a multi-line comment\n3about my script here.\n',
user='root')
cron.present(
name='foo',
hour='1',
comment='I have a multi-line comment\nabout my script here.\n',
identifier='2',
user='root')
self.assertEqual(
get_crontab(),
'# Lines below here are managed by Salt, do not edit\n'
'# 2about my script here. SALT_CRON_IDENTIFIER:1\n'
'* 1 * * * foo\n'
'# I have a multi-line comment\n'
'# about my script here. SALT_CRON_IDENTIFIER:2\n'
'* 1 * * * foo')
if __name__ == '__main__':
from integration import run_tests