Merge remote-tracking branch 'upstream/develop' into develop

This commit is contained in:
Mickey Malone 2013-09-20 19:26:03 -05:00
commit e46fae6a2e
73 changed files with 498 additions and 172 deletions

View File

@ -27,7 +27,7 @@ persistent=yes
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=
load-plugins=salttesting.pylintplugins.pep263
[MESSAGES CONTROL]

View File

@ -14,6 +14,8 @@ init-hook="
# Pickle collected data for later comparisons.
persistent=no
load-plugins=salttesting.pylintplugins.pep263
[MESSAGES CONTROL]

View File

@ -1,6 +1,8 @@
Frequently Asked Questions
==========================
.. contents:: FAQ
Is Salt open-core?
------------------
@ -19,8 +21,8 @@ Minions need to be able to connect to the Master on TCP ports 4505 and 4506.
Minions do not need any inbound ports open. More detailed information on
firewall settings can be found :doc:`here </topics/tutorials/firewall>`.
My script runs every time I run a :mod:`state.highstate <salt.modules.state.highstate>`. Why?
---------------------------------------------------------------------------------------------
My script runs every time I run a *state.highstate*. Why?
---------------------------------------------------------
You are probably using :mod:`cmd.run <salt.states.cmd.run>` rather than
:mod:`cmd.wait <salt.states.cmd.wait>`. A :mod:`cmd.wait
@ -34,13 +36,13 @@ arguments).
More details can be found in the docmentation for the :mod:`cmd
<salt.states.cmd>` states.
When I run :mod:`test.ping <salt.modules.test.ping>`, why don't the Minions that aren't responding return anything? Returning ``False`` would be helpful.
---------------------------------------------------------------------------------------------------------------------------------------------------------
When I run *test.ping*, why don't the Minions that aren't responding return anything? Returning ``False`` would be helpful.
---------------------------------------------------------------------------------------------------------------------------
The reason for this is because the Master tells Minions to run
commands/functions, and listens for the return data, printing it to the screen
when it is received. If it doesn't receive anything back, it doesn't have
anything to display for that Minion.
When you run *test.ping* the Master tells Minions
to run commands/functions, and listens for the return data, printing it to the
screen when it is received. If it doesn't receive anything back, it doesn't
have anything to display for that Minion.
There are a couple options for getting information on Minions that are not
responding. One is to use the verbose (``-v``) option when you run salt

View File

@ -13,6 +13,7 @@ Full list of builtin pillar modules
cmd_json
cmd_yaml
cobbler
django_orm
git_pillar
hiera
libvirt

View File

@ -0,0 +1,6 @@
======================
salt.pillar.django_orm
======================
.. automodule:: salt.pillar.django_orm
:members:

View File

@ -143,9 +143,11 @@ else:
from urllib2 import HTTPBasicAuthHandler as url_auth_handler
from urllib2 import build_opener as url_build_opener
from urllib2 import install_opener as url_install_opener
def url_unquote_text(v, encoding='utf-8', errors='replace'):
v = url_unquote(v)
return v.decode(encoding, errors)
def url_unquote_native(v, encoding='utf-8', errors='replace'):
return native_(url_unquote_text(v, encoding, errors))

View File

@ -33,6 +33,7 @@ def __virtual__():
else:
return False
def auth(username, password):
'''
Try and authenticate

View File

@ -18,6 +18,7 @@ import salt.syspaths as syspaths
from salt.exceptions import SaltException, EauthAuthenticationError
from salt.utils.event import tagify
def tokenify(cmd, token=None):
'''
If token is not None Then assign token to 'token' key of cmd dict
@ -102,17 +103,17 @@ class APIClient(object):
eauth: the authentication type such as 'pam' or 'ldap'. Required if token is missing
'''
client = 'minion' #default to local minion client
mode = cmd.get('mode', 'async') #default to 'async'
client = 'minion' # default to local minion client
mode = cmd.get('mode', 'async') # default to 'async'
# check for wheel or runner prefix to fun name to use wheel or runner client
funparts = cmd.get('fun', '').split('.')
if len(funparts) > 2 and funparts[0] in ['wheel', 'runner']: #master
if len(funparts) > 2 and funparts[0] in ['wheel', 'runner']: # master
client = funparts[0]
cmd['fun'] = '.'.join(funparts[1:]) #strip prefix
cmd['fun'] = '.'.join(funparts[1:]) # strip prefix
if not ('token' in cmd or
('eauth' in cmd and 'password' in cmd and 'username' in cmd) ):
if not ('token' in cmd or
('eauth' in cmd and 'password' in cmd and 'username' in cmd)):
raise EauthAuthenticationError('No authentication credentials given')
executor = getattr(self, '{0}_{1}'.format(client, mode))
@ -145,7 +146,7 @@ class APIClient(object):
'''
return self.runnerClient.master_call(**kwargs)
runner_sync = runner_async # always runner async, so works in either mode
runner_sync = runner_async # always runner async, so works in either mode
def wheel_sync(self, **kwargs):
'''
@ -155,7 +156,7 @@ class APIClient(object):
'''
return self.wheelClient.master_call(**kwargs)
wheel_async = wheel_sync # always wheel_sync, so it works either mode
wheel_async = wheel_sync # always wheel_sync, so it works either mode
def signature(self, cmd):
'''
@ -188,7 +189,7 @@ class APIClient(object):
eauth: the authentication type such as 'pam' or 'ldap'. Required if token is missing
'''
result = {}
result = {}
client = cmd.get('client', 'minion')
if client == 'minion':
@ -198,12 +199,12 @@ class APIClient(object):
elif client == 'master':
parts = cmd['module'].split('.')
client = parts[0]
module = '.'.join(parts[1:]) #strip prefix
module = '.'.join(parts[1:]) # strip prefix
if client == 'wheel':
functions = self.wheelClient.w_funcs
elif client == 'runner':
elif client == 'runner':
functions = self.runnerClient.functions
result = salt.utils.argspec_report(functions, module)
result = salt.utils.argspec_report(functions, module)
return result
def create_token(self, creds):

View File

@ -39,6 +39,7 @@ class FunctionWrapper(object):
'''
if cmd in self.wfuncs:
return self.wfuncs[cmd]
def caller(*args, **kwargs):
'''
The remote execution function

View File

@ -82,4 +82,3 @@ def raw(key=None):
# Allow pillar.data to also be used to return pillar data
items = raw
data = items

View File

@ -84,12 +84,14 @@ class SaltReqTimeoutError(SaltException):
Thrown when a salt master request call fails to return within the timeout
'''
class TimedProcTimeoutError(SaltException):
'''
Thrown when a timed subprocess does not terminate within the timeout,
or if the specified timeout is not an int or a float
'''
class EauthAuthenticationError(SaltException):
'''
Thrown when eauth authentication fails

View File

@ -12,9 +12,9 @@ import logging
# Import salt libs
import salt.loader
log = logging.getLogger(__name__)
def generate_mtime_map(path_map):
'''
Generate a dict of filename -> mtime
@ -28,6 +28,7 @@ def generate_mtime_map(path_map):
file_map[file_path] = os.path.getmtime(file_path)
return file_map
def diff_mtime_map(map1, map2):
'''
Is there a change to the mtime map? return a boolean
@ -46,6 +47,7 @@ def diff_mtime_map(map1, map2):
log.debug('diff_mtime_map: the maps are the same')
return False
def reap_fileserver_cache_dir(cache_base, find_func):
'''
Remove unused cache items assuming the cache directory follows a directory convention:
@ -57,7 +59,7 @@ def reap_fileserver_cache_dir(cache_base, find_func):
for root, dirs, files in os.walk(env_base):
# if we have an empty directory, lets cleanup
# This will only remove the directory on the second time "_reap_cache" is called (which is intentional)
if len(dirs) == 0 and len (files) == 0:
if len(dirs) == 0 and len(files) == 0:
os.rmdir(root)
continue
# if not, lets check the files in the directory
@ -71,6 +73,7 @@ def reap_fileserver_cache_dir(cache_base, find_func):
if ret['path'] == '':
os.unlink(file_path)
def is_file_ignored(opts, fname):
'''
If file_ignore_regex or file_ignore_glob were given in config,

View File

@ -14,6 +14,7 @@ import salt.fileserver
import salt.utils
from salt.utils.event import tagify
def find_file(path, env='base', **kwargs):
'''
Search the environment for the relative path

View File

@ -4,6 +4,7 @@ Simple grain to merge the opts into the grains directly if the grain_opts
configuration value is set
'''
def opts():
'''
Return the minion configuration settings

View File

@ -15,6 +15,7 @@ import salt.utils
import salt.utils.event
from salt.utils.event import tagify
class KeyCLI(object):
'''
Manage key CLI operations

View File

@ -537,7 +537,7 @@ class ReqServer(object):
else:
log.info('Halite: Not starting. '
'Package available is {0}. '
'Opts for "halite" exists is {1}.'\
'Opts for "halite" exists is {1}.'
.format(HAS_HALITE, 'halite' in self.opts))
def run(self):
@ -1082,8 +1082,8 @@ class AESFuncs(object):
return False
if 'events' in load:
for event in load['events']:
self.event.fire_event(event, event['tag']) # old dup event
if load.get('pretag') != None:
self.event.fire_event(event, event['tag']) # old dup event
if load.get('pretag') is not None:
self.event.fire_event(event, tagify(event['tag'], base=load['pretag']))
else:
tag = load['tag']
@ -1106,7 +1106,7 @@ class AESFuncs(object):
self.opts['hash_type'],
load.get('nocache', False))
log.info('Got return from {id} for job {jid}'.format(**load))
self.event.fire_event(load, load['jid']) # old dup event
self.event.fire_event(load, load['jid']) # old dup event
self.event.fire_event(load, tagify([load['jid'], 'ret', load['id']], 'job'))
self.event.fire_ret_load(load)
if self.opts['master_ext_job_cache']:
@ -1682,7 +1682,7 @@ class ClearFuncs(object):
eload = {'result': False,
'id': load['id'],
'pub': load['pub']}
self.event.fire_event(eload, tagify(prefix = 'auth'))
self.event.fire_event(eload, tagify(prefix='auth'))
return ret
elif os.path.isfile(pubfn):
# The key has been accepted check it
@ -1697,7 +1697,7 @@ class ClearFuncs(object):
eload = {'result': False,
'id': load['id'],
'pub': load['pub']}
self.event.fire_event(eload, tagify(prefix = 'auth'))
self.event.fire_event(eload, tagify(prefix='auth'))
return ret
elif not os.path.isfile(pubfn_pend)\
and not self._check_autosign(load['id']):
@ -1711,7 +1711,7 @@ class ClearFuncs(object):
eload = {'result': False,
'id': load['id'],
'pub': load['pub']}
self.event.fire_event(eload, tagify(prefix = 'auth'))
self.event.fire_event(eload, tagify(prefix='auth'))
return ret
# This is a new key, stick it in pre
log.info(
@ -1725,7 +1725,7 @@ class ClearFuncs(object):
'act': 'pend',
'id': load['id'],
'pub': load['pub']}
self.event.fire_event(eload, tagify(prefix = 'auth'))
self.event.fire_event(eload, tagify(prefix='auth'))
return ret
elif os.path.isfile(pubfn_pend)\
and not self._check_autosign(load['id']):
@ -1740,7 +1740,7 @@ class ClearFuncs(object):
eload = {'result': False,
'id': load['id'],
'pub': load['pub']}
self.event.fire_event(eload, tagify(prefix = 'auth'))
self.event.fire_event(eload, tagify(prefix='auth'))
return {'enc': 'clear',
'load': {'ret': False}}
else:
@ -1753,7 +1753,7 @@ class ClearFuncs(object):
'act': 'pend',
'id': load['id'],
'pub': load['pub']}
self.event.fire_event(eload, tagify(prefix = 'auth'))
self.event.fire_event(eload, tagify(prefix='auth'))
return {'enc': 'clear',
'load': {'ret': True}}
elif os.path.isfile(pubfn_pend)\
@ -1768,7 +1768,7 @@ class ClearFuncs(object):
eload = {'result': False,
'id': load['id'],
'pub': load['pub']}
self.event.fire_event(eload, tagify(prefix = 'auth'))
self.event.fire_event(eload, tagify(prefix='auth'))
return {'enc': 'clear',
'load': {'ret': False}}
else:
@ -1783,7 +1783,7 @@ class ClearFuncs(object):
eload = {'result': False,
'id': load['id'],
'pub': load['pub']}
self.event.fire_event(eload, tagify(prefix = 'auth'))
self.event.fire_event(eload, tagify(prefix='auth'))
return {'enc': 'clear',
'load': {'ret': False}}
@ -1841,7 +1841,7 @@ class ClearFuncs(object):
'act': 'accept',
'id': load['id'],
'pub': load['pub']}
self.event.fire_event(eload, tagify(prefix = 'auth'))
self.event.fire_event(eload, tagify(prefix='auth'))
return ret
def runner(self, clear_load):
@ -1964,8 +1964,8 @@ class ClearFuncs(object):
log.warning('Authentication failure of type "token" occurred.')
return ''
good = self.ckminions.wheel_check(
self.opts['external_auth'][token['eauth']][token['name']] \
if token['name'] in self.opts['external_auth'][token['eauth']] \
self.opts['external_auth'][token['eauth']][token['name']]
if token['name'] in self.opts['external_auth'][token['eauth']]
else self.opts['external_auth'][token['eauth']]['*'],
clear_load['fun'])
if not good:
@ -2025,8 +2025,8 @@ class ClearFuncs(object):
log.warning(msg)
return ''
good = self.ckminions.wheel_check(
self.opts['external_auth'][clear_load['eauth']][name] \
if name in self.opts['external_auth'][clear_load['eauth']] \
self.opts['external_auth'][clear_load['eauth']][name]
if name in self.opts['external_auth'][clear_load['eauth']]
else self.opts['external_auth'][token['eauth']]['*'],
clear_load['fun'])
if not good:
@ -2120,7 +2120,7 @@ class ClearFuncs(object):
for module_re in self.opts['client_acl_blacklist'].get('modules', []):
# if this is a regular command, its a single function
if type(clear_load['fun']) == str:
funs_to_check = [ clear_load['fun'] ]
funs_to_check = [clear_load['fun']]
# if this a compound function
else:
funs_to_check = clear_load['fun']
@ -2165,8 +2165,8 @@ class ClearFuncs(object):
log.warning('Authentication failure of type "token" occurred.')
return ''
good = self.ckminions.auth_check(
self.opts['external_auth'][token['eauth']][token['name']] \
if token['name'] in self.opts['external_auth'][token['eauth']] \
self.opts['external_auth'][token['eauth']][token['name']]
if token['name'] in self.opts['external_auth'][token['eauth']]
else self.opts['external_auth'][token['eauth']]['*'],
clear_load['fun'],
clear_load['tgt'],
@ -2206,8 +2206,8 @@ class ClearFuncs(object):
)
return ''
good = self.ckminions.auth_check(
self.opts['external_auth'][extra['eauth']][name] \
if name in self.opts['external_auth'][extra['eauth']] \
self.opts['external_auth'][extra['eauth']][name]
if name in self.opts['external_auth'][extra['eauth']]
else self.opts['external_auth'][extra['eauth']]['*'],
clear_load['fun'],
clear_load['tgt'],
@ -2317,9 +2317,9 @@ class ClearFuncs(object):
self.opts['hash_type']
)
new_job_load = {
new_job_load = {
'jid': clear_load['jid'],
'tgt_type':clear_load['tgt_type'],
'tgt_type': clear_load['tgt_type'],
'tgt': clear_load['tgt'],
'user': clear_load['user'],
'fun': clear_load['fun'],
@ -2328,7 +2328,7 @@ class ClearFuncs(object):
}
# Announce the job on the event bus
self.event.fire_event(new_job_load, 'new_job') # old dup event
self.event.fire_event(new_job_load, 'new_job') # old dup event
self.event.fire_event(new_job_load, tagify([clear_load['jid'], 'new'], 'job'))
# Verify the jid dir

View File

@ -168,4 +168,3 @@ def set_(name, path):
if out['retcode'] > 0:
return out['stderr']
return out['stdout']

View File

@ -957,8 +957,8 @@ def replace(path,
# Search the file; track if any changes have been made for the return val
has_changes = False
orig_file = [] # used if show_changes
new_file = [] # used if show_changes
orig_file = [] # used if show_changes
new_file = [] # used if show_changes
for line in fileinput.input(path,
inplace=not dry_run, backup=False if dry_run else backup,
bufsize=bufsize, mode='rb'):
@ -973,7 +973,7 @@ def replace(path,
result = re.sub(cpattern, repl, line, count)
# Identity check each potential change until one change is made
if has_changes == False and not result is line:
if has_changes is False and not result is line:
has_changes = True
if show_changes:
@ -2233,6 +2233,7 @@ def makedirs_perms(name,
group,
int('{0}'.format(mode)) if mode else None)
def get_devmm(name):
'''
Get major/minor info from a device
@ -2273,6 +2274,7 @@ def is_chrdev(name):
raise
return stat.S_ISCHR(stat_structure.st_mode)
def mknod_chrdev(name,
major,
minor,
@ -2298,13 +2300,13 @@ def mknod_chrdev(name,
mode))
try:
if __opts__['test']:
ret['changes'] = {'new' : 'Character device {0} created.'.format(name)}
ret['changes'] = {'new': 'Character device {0} created.'.format(name)}
ret['result'] = None
else:
if os.mknod(name,
int(str(mode).lstrip('0'),8)|stat.S_IFCHR,
os.makedev(major,minor)) is None:
ret['changes'] = {'new' : 'Character device {0} created.'.format(name)}
int(str(mode).lstrip('0'), 8) | stat.S_IFCHR,
os.makedev(major, minor)) is None:
ret['changes'] = {'new': 'Character device {0} created.'.format(name)}
ret['result'] = True
except OSError as exc:
# be happy it is already there....however, if you are trying to change the
@ -2321,6 +2323,7 @@ def mknod_chrdev(name,
int('{0}'.format(mode)) if mode else None)
return ret
def is_blkdev(name):
'''
Check if a file exists and is a block device.
@ -2342,6 +2345,7 @@ def is_blkdev(name):
raise
return stat.S_ISBLK(stat_structure.st_mode)
def mknod_blkdev(name,
major,
minor,
@ -2367,13 +2371,13 @@ def mknod_blkdev(name,
mode))
try:
if __opts__['test']:
ret['changes'] = {'new' : 'Block device {0} created.'.format(name)}
ret['changes'] = {'new': 'Block device {0} created.'.format(name)}
ret['result'] = None
else:
if os.mknod(name,
int(str(mode).lstrip('0'),8)|stat.S_IFBLK,
os.makedev(major,minor)) is None:
ret['changes'] = {'new' : 'Block device {0} created.'.format(name)}
int(str(mode).lstrip('0'), 8) | stat.S_IFBLK,
os.makedev(major, minor)) is None:
ret['changes'] = {'new': 'Block device {0} created.'.format(name)}
ret['result'] = True
except OSError as exc:
# be happy it is already there....however, if you are trying to change the
@ -2390,6 +2394,7 @@ def mknod_blkdev(name,
int('{0}'.format(mode)) if mode else None)
return ret
def is_fifo(name):
'''
Check if a file exists and is a FIFO.
@ -2411,6 +2416,7 @@ def is_fifo(name):
raise
return stat.S_ISFIFO(stat_structure.st_mode)
def mknod_fifo(name,
user=None,
group=None,
@ -2431,11 +2437,11 @@ def mknod_fifo(name,
log.debug("Creating FIFO name:{0}".format(name))
try:
if __opts__['test']:
ret['changes'] = {'new' : 'Fifo pipe {0} created.'.format(name)}
ret['changes'] = {'new': 'Fifo pipe {0} created.'.format(name)}
ret['result'] = None
else:
if os.mkfifo(name, int(str(mode).lstrip('0'), 8)) is None:
ret['changes'] = {'new' : 'Fifo pipe {0} created.'.format(name)}
ret['changes'] = {'new': 'Fifo pipe {0} created.'.format(name)}
ret['result'] = True
except OSError as exc:
#be happy it is already there
@ -2451,6 +2457,7 @@ def mknod_fifo(name,
int('{0}'.format(mode)) if mode else None)
return ret
def mknod(name,
ntype,
major=0,
@ -2497,6 +2504,7 @@ def mknod(name,
raise Exception("Node type unavailable: '{0}'. Available node types are character ('c'), block ('b'), and pipe ('p').".format(ntype))
return ret
def list_backups(path, limit=None):
'''
.. note::

View File

@ -24,6 +24,7 @@ def __virtual__():
return False
return 'git'
def _git_ssh_helper(identity):
'''
Returns the path to a helper script which can be used in the GIT_SSH env
@ -110,6 +111,16 @@ def _check_git():
utils.check_or_die('git')
def current_branch(cwd, user=None):
'''
Returns the current branch name, if on a branch.
'''
cmd = 'git branch --list | grep "^*\ " | cut -d " " -f 2 | ' + \
'grep -v "(detached"'
return __salt__['cmd.run_stdout'](cmd, cwd=cwd, runas=user)
def revision(cwd, rev='HEAD', short=False, user=None):
'''
Returns the long hash of a given identifier (hash, branch, tag, HEAD, etc)

View File

@ -156,6 +156,7 @@ def get(tgt, fun, expr_form='glob'):
ret = sreq.send('aes', auth.crypticle.dumps(load))
return auth.crypticle.loads(ret)
def delete(fun):
'''
Remove specific function contents of minion. Returns True on success.
@ -176,6 +177,7 @@ def delete(fun):
ret = sreq.send('aes', auth.crypticle.dumps(load))
return auth.crypticle.loads(ret)
def flush():
'''
Remove all mine contents of minion. Returns True on success.

View File

@ -227,6 +227,7 @@ def workers(profile='default'):
return ret
def recover_all(lbn, profile='default'):
'''
Set the all the workers in lbn to recover and activate them if they are not

View File

@ -6,6 +6,7 @@ Module for gathering and managing information about MooseFS
# Import salt libs
import salt.utils
def __virtual__():
'''
Only load if the mfs commands are installed

View File

@ -10,6 +10,7 @@ __func_alias__ = {
'list_': 'list'
}
def __virtual__():
'''
Only load the module if apache is installed

View File

@ -9,6 +9,7 @@ import re
# Import salt libs
import salt.utils
def __virtual__():
'''
Only works on Windows systems

View File

@ -956,9 +956,9 @@ def set_replication_enabled(status, host=None, core_name=None):
return ret
else:
if status:
return _replication_request(cmd, host=host, core_name=core_name)
return _replication_request(cmd, host=host, core_name=core_name)
else:
return _replication_request(cmd, host=host, core_name=core_name)
return _replication_request(cmd, host=host, core_name=core_name)
def signal(signal=None):

View File

@ -529,9 +529,9 @@ def recv_known_host(hostname, enc=None, port=None, hash_hostname=False):
'''
# The following list of OSes have an old version of openssh-clients
# and thus require the '-t' option for ssh-keyscan
need_dash_t = ['CentOS-5',]
need_dash_t = ['CentOS-5']
chunks = ['ssh-keyscan', ]
chunks = ['ssh-keyscan']
if port:
chunks += ['-p', str(port)]
if enc:

View File

@ -397,6 +397,7 @@ def status(cwd, targets=None, user=None, username=None, password=None, *opts):
opts += tuple(shlex.split(targets))
return _run_svn('status', cwd, user, username, password, opts)
def export(cwd,
remote,
target=None,

View File

@ -17,6 +17,7 @@ try:
except ImportError:
pass
def __virtual__():
'''
Only works on Windows systems

View File

@ -39,7 +39,7 @@ def _normalize_dir(string):
'''
Normalize the directory to make comparison possible
'''
return re.sub(r'\\$', '', string.lower())
return re.sub(r'\\$', '', string.lower())
def rehash():

View File

@ -72,7 +72,7 @@ def get_disabled():
for line in lines:
if 'DEMAND_START' in line:
ret.add(service)
elif 'DISABLED' in line:
elif 'DISABLED' in line:
ret.add(service)
return sorted(ret)

View File

@ -8,6 +8,7 @@ from numbers import Number
# Import salt libs
import salt.utils
class NestDisplay(object):
'''
Manage the nested display contents
@ -71,6 +72,7 @@ class NestDisplay(object):
out = self.display(val, indent + 4, '', out)
return out
def output(ret):
'''
Display ret data

View File

@ -3,9 +3,11 @@
Display no output.
'''
def __virtual__():
return 'quiet'
def output(ret):
'''
Don't display data. Used when you only are interested in the

View File

@ -3,8 +3,10 @@
Display output for minions that did not return
'''
# Import salt libs
import salt.utils
class NestDisplay(object):
'''
Create generator for nested output
@ -37,6 +39,7 @@ class NestDisplay(object):
out = self.display(val, indent + 4, '', out)
return out
def output(ret):
'''
Display ret data

View File

@ -3,6 +3,7 @@
virt.query outputter
'''
def output(data):
'''
Display output for the salt-run virt.query function

View File

@ -385,7 +385,7 @@ class Pillar(object):
Render the external pillar data
'''
if not 'ext_pillar' in self.opts:
return {}
return {}
if not isinstance(self.opts['ext_pillar'], list):
log.critical('The "ext_pillar" option is malformed')
return {}

186
salt/pillar/django_orm.py Normal file
View File

@ -0,0 +1,186 @@
# -*- coding: utf-8 -*-
'''
Generate pillar data from Django models through the Django ORM
:maintainer: Micah Hausler <micah.hausler@gmail.com>
:maturity: new
Configuring the django_orm ext_pillar
================================
To use this module, your Django project must be on the saltmaster server with
databse access. This assumes you are using virtualenv with all the project's
requirements installed.
.. code-block:: yaml
ext_pillar:
- django_orm:
pillar_name: my_application
env: /path/to/virtualenv/
project_path: /path/to/project/
env_file: /path/to/env/file.sh
settings_module: my_application.settings
django_app:
# Required: the app that is included in INSTALLED_APPS
my_application.clients:
# Required: the model name
Client:
# Required: model field to use as a name in the
# rendered pillar, should be unique
name: shortname
# Optional:
# See the Django QuerySet docuemntation for how to use .filter()
filter: {'kw': 'args'}
# Required: a list of field names
fields:
- field_1
- field_2
This would return pillar data that would look like
.. code-block:: yaml
my_application:
my_application.clients:
Client:
client_1:
field_1: data_from_field_1
field_2: data_from_field_2
client_2:
field_1: data_from_field_1
field_2: data_from_field_2
Module Documentation
====================
'''
import logging
import os
import sys
HAS_VIRTUALENV = False
try:
import virtualenv
HAS_VIRTUALENV = True
except ImportError:
pass
log = logging.getLogger(__name__)
def __virtual__():
if not HAS_VIRTUALENV:
log.warn('virtualenv not installed, please install first')
return False
return 'django_orm'
def ext_pillar(minion_id,
pillar,
pillar_name,
env,
project_path,
settings_module,
django_app,
env_file=None,
*args,
**kwargs):
'''
Connect to a django database through the ORM and retrieve model fields
Parameters:
* `pillar_name`: The name of the pillar to be returned
* `env`: The full path to the virtualenv for your django project
* `project_path`: The full path to your django project (the directory
manage.py is in.)
* `settings_module`: The settings module for your project. This can be
found in your manage.py file.
* `django_app`: A dictionary containing your apps, models, and fields
* `env_file`: A bash file that sets up your environment. The file is
run in a subprocess and the changed variables are then added.
'''
if not os.path.isdir(project_path):
log.error('Django project dir: \'{}\' not a directory!'.format(env))
return {}
for path in virtualenv.path_locations(env):
if not os.path.isdir(path):
log.error('Virtualenv {} not a directory!'.format(path))
return {}
# load the virtualenv
sys.path.append(virtualenv.path_locations(env)[1] + '/site-packages/')
# load the django project
sys.path.append(project_path)
os.environ['DJANGO_SETTINGS_MODULE'] = settings_module
if env_file is not None:
import subprocess
base_env = {}
proc = subprocess.Popen(['bash', '-c', 'env'], stdout=subprocess.PIPE)
for line in proc.stdout:
(key, _, value) = line.partition('=')
base_env[key] = value
command = ['bash', '-c', 'source {0} && env'.format(env_file)]
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
for line in proc.stdout:
(key, _, value) = line.partition('=')
# only add a key if it is different or doesn't already exist
if key not in base_env or base_env[key] != value:
os.environ[key] = value.rstrip('\n')
log.debug('Adding {} = {} to django environment'.format(
key,
value.rstrip('\n')))
try:
import importlib
django_pillar = {}
for app, models in django_app.iteritems():
django_pillar[app] = {}
model_file = importlib.import_module(app + '.models')
for model_name, model_meta in models.iteritems():
model_orm = model_file.__dict__[model_name]
django_pillar[app][model_orm.__name__] = {}
fields = model_meta['fields']
if 'filter' in model_meta.keys():
qs = model_orm.objects.filter(**model_meta['filter'])
else:
qs = model_orm.objects.all()
# Loop through records for the queryset
for model in qs:
django_pillar[app][model_orm.__name__][
model.__dict__[
model_meta['name']
]] = {}
for field in fields:
django_pillar[app][model_orm.__name__][
model.__dict__[
model_meta['name']
]][field] = model.__dict__[field]
return {pillar_name: django_pillar}
except ImportError, e:
log.error('Failed to import library: {}'.format(e.message))
return {}

View File

@ -57,6 +57,7 @@ def _get_ref(repo, short):
return ref
return False
def init(branch, repo_location):
'''
Return the git repo object for this session

View File

@ -13,6 +13,7 @@ import yaml
# Set up logging
log = logging.getLogger(__name__)
def ext_pillar(minion_id, pillar, command):
'''
Execute an unmodified puppet_node_classifier and read the output as YAML

View File

@ -48,6 +48,7 @@ setting the configuration option, like in the example above.
# not work. Thanks to the __virtual__ function, however, the plugin still
# responds to the name 'reclass'.
from salt.exceptions import SaltInvocationError
from salt.utils.reclass import (
prepend_reclass_source_path,
filter_out_source_path_option,
@ -76,8 +77,6 @@ def __virtual__(retry=False):
return __virtual__(retry=True)
from salt.exceptions import SaltInvocationError
def ext_pillar(minion_id, pillar, **kwargs):
'''
Obtain the Pillar data from **reclass** for the given ``minion_id``.

View File

@ -316,6 +316,7 @@ from salt.utils import pydsl
__all__ = ['render']
def render(template, env='', sls='', tmplpath=None, rendered_sls=None, **kws):
mod = imp.new_module(sls)
# Note: mod object is transient. It's existence only lasts as long as

View File

@ -9,22 +9,25 @@ Add the following configuration to your minion configuration files::
'''
# Import python libs
import pickle
import socket
import logging
import time
import struct
log = logging.getLogger(__name__)
def __virtual__():
return 'carbon'
def _formatHostname(hostname, separator='_'):
''' carbon uses . as separator, so replace this in the hostname '''
return hostname.replace('.', separator)
def _send_picklemetrics(metrics, carbon_sock):
''' Uses pickle protocol to send data '''
metrics = [(metric_name, (timestamp, value)) for (metric_name, timestamp, value) in metrics]

View File

@ -62,8 +62,6 @@ import sys
import json
import logging
log = logging.getLogger( __name__ )
# Import third party libs
try:
import MySQLdb
@ -71,12 +69,15 @@ try:
except ImportError:
HAS_MYSQL = False
log = logging.getLogger(__name__)
def __virtual__():
if not HAS_MYSQL:
return False
return 'mysql'
def _get_options():
'''
Returns options used for the MySQL connection.
@ -97,6 +98,7 @@ def _get_options():
return _options
@contextmanager
def _get_serv(commit=False):
'''

View File

@ -48,7 +48,6 @@ Use the commands to create the sqlite3 database and tables::
import logging
import json
import datetime
log = logging.getLogger(__name__)
# Better safe than sorry here. Even though sqlite3 is included in python
try:
@ -57,11 +56,15 @@ try:
except ImportError:
HAS_SQLITE3 = False
log = logging.getLogger(__name__)
def __virtual__():
if not HAS_SQLITE3:
return False
return 'sqlite3'
def _get_conn():
'''
Return a sqlite3 database connection
@ -79,9 +82,10 @@ def _get_conn():
__salt__['config.option']('returner.sqlite3.timeout')))
conn = sqlite3.connect(
__salt__['config.option']('returner.sqlite3.database'),
timeout = float(__salt__['config.option']('returner.sqlite3.timeout')))
timeout=float(__salt__['config.option']('returner.sqlite3.timeout')))
return conn
def _close_conn(conn):
'''
Close the sqlite3 database connection
@ -90,6 +94,7 @@ def _close_conn(conn):
conn.commit()
conn.close()
def returner(ret):
'''
Insert minion return data into the sqlite3 database
@ -101,14 +106,15 @@ def returner(ret):
(fun, jid, id, date, full_ret, success)
VALUES (:fun, :jid, :id, :date, :full_ret, :success)'''
cur.execute(sql,
{'fun':ret['fun'],
'jid':ret['jid'],
'id':ret['id'],
'date':str(datetime.datetime.now()),
'full_ret':json.dumps(ret['return']),
'success':ret['success']})
{'fun': ret['fun'],
'jid': ret['jid'],
'id': ret['id'],
'date': str(datetime.datetime.now()),
'full_ret': json.dumps(ret['return']),
'success': ret['success']})
_close_conn(conn)
def save_load(jid, load):
'''
Save the load to the specified jid
@ -119,10 +125,11 @@ def save_load(jid, load):
cur = conn.cursor()
sql = '''INSERT INTO jids (jid, load) VALUES (:jid, :load)'''
cur.execute(sql,
{'jid':jid,
'load':json.dumps(load)})
{'jid': jid,
'load': json.dumps(load)})
_close_conn(conn)
def get_load(jid):
'''
Return the load from a specified jid
@ -132,13 +139,14 @@ def get_load(jid):
cur = conn.cursor()
sql = '''SELECT load FROM jids WHERE jid = :jid'''
cur.execute(sql,
{'jid':jid})
{'jid': jid})
data = cur.fetchone()
if data:
return json.loads(data)
_close_conn(conn)
return {}
def get_jid(jid):
'''
Return the information returned from a specified jid
@ -148,16 +156,17 @@ def get_jid(jid):
cur = conn.cursor()
sql = '''SELECT id, full_ret FROM salt_returns WHERE jid = :jid'''
cur.execute(sql,
{'jid':jid})
{'jid': jid})
data = cur.fetchone()
log.debug('query result: {0}'.format(data))
ret = {}
if data and len(data) > 1:
ret = {str(data[0]):{u'return':json.loads(data[1])}}
ret = {str(data[0]): {u'return': json.loads(data[1])}}
log.debug("ret: {0}".format(ret))
_close_conn(conn)
return ret
def get_fun(fun):
'''
Return a dict of the last function called for all minions
@ -172,7 +181,7 @@ def get_fun(fun):
WHERE s.fun = :fun
'''
cur.execute(sql,
{'fun':fun})
{'fun': fun})
data = cur.fetchall()
ret = {}
if data:
@ -185,6 +194,7 @@ def get_fun(fun):
_close_conn(conn)
return ret
def get_jids():
'''
Return a list of all job ids
@ -201,6 +211,7 @@ def get_jids():
_close_conn(conn)
return ret
def get_minions():
'''
Return a list of minions

View File

@ -12,6 +12,7 @@ import re
import salt.loader
from salt.template import compile_template
def targets(tgt, tgt_type='glob', **kwargs):
'''
Return the targets from the flat yaml file, checks opts for location but

View File

@ -98,6 +98,7 @@ def _clear_cache(tgt=None,
clear_mine=clear_mine,
clear_mine_func=clear_mine_func)
def clear_pillar(tgt, expr_form='glob'):
'''
Clear the cached pillar data of the targeted minions
@ -110,6 +111,7 @@ def clear_pillar(tgt, expr_form='glob'):
'''
return _clear_cache(tgt, expr_form, clear_pillar=True)
def clear_grains(tgt=None, expr_form='glob'):
'''
Clear the cached grains data of the targeted minions
@ -122,6 +124,7 @@ def clear_grains(tgt=None, expr_form='glob'):
'''
return _clear_cache(tgt, expr_form, clear_grains=True)
def clear_mine(tgt=None, expr_form='glob'):
'''
Clear the cached mine data of the targeted minions
@ -134,6 +137,7 @@ def clear_mine(tgt=None, expr_form='glob'):
'''
return _clear_cache(tgt, expr_form, clear_mine=True)
def clear_mine_func(tgt=None, expr_form='glob', clear_mine_func=None):
'''
Clear the cached mine function data of the targeted minions
@ -146,6 +150,7 @@ def clear_mine_func(tgt=None, expr_form='glob', clear_mine_func=None):
'''
return _clear_cache(tgt, expr_form, clear_mine_func=clear_mine_func)
def clear_all(tgt=None, expr_form='glob'):
'''
Clear the cached pillar, grains, and mine data of the targeted minions

View File

@ -45,7 +45,7 @@ def held(name):
ret.update(result=None,
comment='Package {0} is set to be held'.format(name))
else:
ret.update(result= True,
ret.update(result=True,
comment='Package {0} is already held'.format(name))
return ret

View File

@ -581,56 +581,63 @@ def _test_owner(kwargs, user=None):
return user
def _unify_sources_and_hashes(source=None, source_hash=None,
def _unify_sources_and_hashes(source=None, source_hash=None,
sources=None, source_hashes=None):
'''
Silly lil function to give us a standard tuple list for sources and
Silly little function to give us a standard tuple list for sources and
source_hashes
'''
if sources is None:
sources = []
if source_hashes is None:
source_hashes = []
if ( source and sources ):
return (False,
"source and sources are mutally exclusive", [] )
if ( source_hash and source_hashes ):
return (False,
"source_hash and source_hashes are mutally exclusive", [] )
if source and sources:
return (False,
"source and sources are mutually exclusive", [])
if ( source ):
return (True, '', [ (source, source_hash) ] )
if source_hash and source_hashes:
return (False,
"source_hash and source_hashes are mutually exclusive", [])
if source:
return (True, '', [(source, source_hash)])
# Make a nice neat list of tuples exactly len(sources) long..
return (True, '', map(None, sources, source_hashes[:len(sources)]) )
return (True, '', map(None, sources, source_hashes[:len(sources)]))
def _get_template_texts(source_list = None, template='jinja', defaults = None,
context = None, env = 'base', **kwargs):
def _get_template_texts(source_list=None,
template='jinja',
defaults=None,
context=None,
env='base',
**kwargs):
'''
Iterate a list of sources and process them as templates.
Returns a list of 'chunks' containing the rendered templates.
'''
ret = {'name': '_get_template_texts', 'changes': {},
'result': True, 'comment': '', 'data': []}
ret = {'name': '_get_template_texts',
'changes': {},
'result': True,
'comment': '',
'data': []}
if source_list is None:
return _error(ret,
return _error(ret,
'_get_template_texts called with empty source_list')
txtl = []
for (source, source_hash) in source_list:
tmpctx = defaults if defaults else {}
if context:
tmpctx.update(context)
rndrd_templ_fn = __salt__['cp.get_template'](source, '',
template=template, env=env,
context = tmpctx, **kwargs )
rndrd_templ_fn = __salt__['cp.get_template'](source, '',
template=template, env=env,
context=tmpctx, **kwargs)
msg = 'cp.get_template returned {0} (Called with: {1})'
log.debug(msg.format(rndrd_templ_fn, source))
if rndrd_templ_fn:
@ -639,15 +646,15 @@ def _get_template_texts(source_list = None, template='jinja', defaults = None,
tmplines = fp_.readlines()
if not tmplines:
msg = 'Failed to read rendered template file {0} ({1})'
log.debug( msg.format(rndrd_templ_fn, source))
log.debug(msg.format(rndrd_templ_fn, source))
ret['name'] = source
return _error(ret, msg.format( rndrd_templ_fn, source) )
txtl.append( ''.join(tmplines))
return _error(ret, msg.format(rndrd_templ_fn, source))
txtl.append(''.join(tmplines))
else:
msg = 'Failed to load template file {0}'.format(source)
log.debug(msg)
ret['name'] = source
return _error(ret, msg )
return _error(ret, msg)
ret['data'] = txtl
return ret
@ -993,7 +1000,7 @@ def managed(name,
contents_pillar
.. versionadded:: 0.17
Operates like ``contents``, but draws from a value stored in pillar,
using the pillar path syntax used in :mod:`pillar.get
<salt.modules.pillar.get>`. This is useful when the pillar value
@ -1696,6 +1703,7 @@ def replace(name,
ret['result'] = True
return ret
def sed(name,
before,
after,
@ -1996,11 +2004,11 @@ def append(name,
source=None,
source_hash=None,
__env__='base',
template = 'jinja',
template='jinja',
sources=None,
source_hashes=None,
defaults = None,
context = None):
defaults=None,
context=None):
'''
Ensure that some text appears at the end of a file
@ -2025,7 +2033,7 @@ def append(name,
- "Salt is born of the purest of parents: the sun and the sea."
Gather text from multiple template files::
/etc/motd:
file:
- append
@ -2034,28 +2042,27 @@ def append(name,
- salt://motd/devops-messages.tmpl
- salt://motd/hr-messages.tmpl
- salt://motd/general-messages.tmpl
.. versionadded:: 0.9.5
'''
ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''}
if sources is None:
sources = []
if source_hashes is None:
source_hashes = []
# Add sources and source_hashes with template support
# NOTE: FIX 'text' and any 'source' are mutally exclusive as 'text'
# NOTE: FIX 'text' and any 'source' are mutually exclusive as 'text'
# is re-assigned in the original code.
(ok, err, sl) = _unify_sources_and_hashes(source = source,
source_hash = source_hash,
sources = sources,
source_hashes = source_hashes )
(ok, err, sl) = _unify_sources_and_hashes(source=source,
source_hash=source_hash,
sources=sources,
source_hashes=source_hashes)
if not ok:
return _error(ret, err)
if makedirs is True:
dirname = os.path.dirname(name)
if not __salt__['file.directory_exists'](dirname):
@ -2075,11 +2082,11 @@ def append(name,
#Follow the original logic and re-assign 'text' if using source(s)...
if sl:
tmpret = _get_template_texts(source_list = sl,
template = template,
defaults = defaults,
context = context,
env = __env__)
tmpret = _get_template_texts(source_list=sl,
template=template,
defaults=defaults,
context=context,
env=__env__)
if not tmpret['result']:
return tmpret
text = tmpret['data']
@ -2092,7 +2099,6 @@ def append(name,
count = 0
for chunk in text:
if __salt__['file.contains_regex_multiline'](
@ -2619,6 +2625,7 @@ def serialize(name,
show_diff=show_diff,
contents=contents)
def mknod(name, ntype, major=0, minor=0, user=None, group=None, mode='0600'):
'''
Create a special file similar to the 'nix mknod command. The supported device types are
@ -2748,4 +2755,3 @@ def mknod(name, ntype, major=0, minor=0, user=None, group=None, mode='0600'):
ret['comment'] = "Node type unavailable: '{0}. Available node types are character ('c'), block ('b'), and pipe ('p')".format(ntype)
return ret

View File

@ -113,6 +113,10 @@ def latest(name,
try:
current_rev = __salt__['git.revision'](target, user=runas)
branch = __salt__['git.current_branch'](target, user=runas)
if len(branch) > 0:
current_rev = branch
#only do something, if the specified rev differs from the
#current_rev
if rev == current_rev:

View File

@ -18,6 +18,7 @@ Note: This does NOT override any grains set in the minion file.
- value: edam
'''
def present(name, value):
'''
Ensure that a grain is set

View File

@ -11,12 +11,14 @@ A state module to manage Gentoo package overlays via layman
layman.present
'''
def __virtual__():
'''
Only load if the layman module is available in __salt__
'''
return 'layman' if 'layman.add' in __salt__ else False
def present(name):
'''
Verify that the overlay is present
@ -51,6 +53,7 @@ def present(name):
return ret
def absent(name):
'''
Verify that the overlay is absent

View File

@ -12,12 +12,14 @@ A state module to manage Gentoo's make.conf file
- value: '-j3'
'''
def __virtual__():
'''
Only load if the makeconf module is available in __salt__
'''
return 'makeconf' if 'makeconf.get_var' in __salt__ else False
def _make_set(var):
'''
Force var to be a set
@ -31,6 +33,7 @@ def _make_set(var):
var = list(var)
return set(var)
def present(name, value=None, contains=None, excludes=None):
'''
Verify that the variable is in the make.conf and has the provided
@ -154,6 +157,7 @@ def present(name, value=None, contains=None, excludes=None):
# Now finally return
return ret
def absent(name):
'''
Verify that the variable is not in the make.conf.

View File

@ -21,6 +21,7 @@ import salt.utils
# Set up logger
log = logging.getLogger(__name__)
def __virtual__():
'''
mdadm provides raid functions for Linux
@ -31,6 +32,7 @@ def __virtual__():
return False
return 'raid'
def present(name, opts=None):
'''
Verify that the raid is present

View File

@ -5,6 +5,8 @@ Management of Mongodb databases
Only deletion is supported, creation doesn't make sense
and can be done using mongodb_user.present
'''
def absent(name,
user=None,
password=None,

View File

@ -4,6 +4,7 @@ Management of Mongodb users
===========================
'''
def present(name,
passwd,
database="admin",

View File

@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
'''
Manage package remote repo using FreeBSD pkgng.
==========================================================================
Manage package remote repo using FreeBSD pkgng
==============================================
Salt can manage the URL pkgng pulls packages from.
ATM the state and module are small so use cases are
@ -15,6 +15,7 @@ typically rather simple:
- name: "http://192.168.0.2"
'''
def update_packaging_site(name):
ret = {
'name': name,

View File

@ -13,12 +13,14 @@ A state module to manage Portage configuration on Gentoo
- openssl
'''
def __virtual__():
'''
Only load if the portage_config module is available in __salt__
'''
return 'portage_config' if 'portage_config.get_missing_flags' in __salt__ else False
def mod_init(low):
'''
Enforce a nice structure on the configuration files.
@ -29,6 +31,7 @@ def mod_init(low):
return False
return True
def _flags_helper(conf, atom, new_flags, test=False):
try:
new_flags = __salt__['portage_config.get_missing_flags'](conf, atom, new_flags)
@ -42,6 +45,7 @@ def _flags_helper(conf, atom, new_flags, test=False):
return {'result': True, 'changes': {'old': old_flags, 'new': new_flags}}
return {'result': None}
def _mask_helper(conf, atom, test=False):
try:
is_present = __salt__['portage_config.is_present'](conf, atom)
@ -54,6 +58,7 @@ def _mask_helper(conf, atom, test=False):
return {'result': True}
return {'result': None}
def flags(name,
use=None,
accept_keywords=None,

View File

@ -140,4 +140,3 @@ def absent(name,
ret['comment'] = 'Policy {0} {1} is not present'.format(vhost, name)
return ret

View File

@ -44,6 +44,7 @@ This is how a state configuration could look like:
# Import python libs
import re
def _check_rbenv(ret, runas=None):
'''
Check to see if rbenv is installed.
@ -53,6 +54,7 @@ def _check_rbenv(ret, runas=None):
ret['comment'] = 'Rbenv is not installed.'
return ret
def _ruby_installed(ret, ruby, runas=None):
'''
Check to see if given ruby is installed.
@ -67,6 +69,7 @@ def _ruby_installed(ret, ruby, runas=None):
return ret
def _check_and_install_ruby(ret, ruby, default=False, runas=None):
'''
Verify that ruby is installed, install if unavailable
@ -88,6 +91,7 @@ def _check_and_install_ruby(ret, ruby, default=False, runas=None):
return ret
def installed(name, default=False, runas=None):
'''
Verify that the specified ruby is installed with rbenv. Rbenv is
@ -121,6 +125,7 @@ def installed(name, default=False, runas=None):
else:
return _check_and_install_ruby(ret, name, default, runas=runas)
def _check_and_uninstall_ruby(ret, ruby, runas=None):
'''
Verify that ruby is uninstalled
@ -145,6 +150,7 @@ def _check_and_uninstall_ruby(ret, ruby, runas=None):
return ret
def absent(name, runas=None):
'''
Verify that the specified ruby is not installed with rbenv. Rbenv

View File

@ -3,6 +3,7 @@
Manage RDP Service on Windows servers
'''
def __virtual__():
'''
Load only if network_win is loaded

View File

@ -21,6 +21,7 @@ booleans can be set.
execution module is available.
'''
def __virtual__():
'''
Only make this state available if the selinux module is available.

View File

@ -40,6 +40,7 @@ Notes:
Apache Tomcat/7.0.37
'''
# Private
def __virtual__():
'''
@ -48,6 +49,7 @@ def __virtual__():
return 'tomcat' if 'tomcat.status' in __salt__ else False
# Functions
def war_deployed(name, war, url='http://localhost:8080/manager',
__env__='base', timeout=180):
@ -150,6 +152,7 @@ def war_deployed(name, war, url='http://localhost:8080/manager',
ret['changes'].pop('deploy')
return ret
def wait(name, url='http://localhost:8080/manager', timeout=180):
'''
Wait for the tomcat manager to load
@ -198,6 +201,7 @@ def wait(name, url='http://localhost:8080/manager', timeout=180):
return ret
def mod_watch(name, url='http://localhost:8080/manager', timeout=180):
'''
The tomcat watcher function.

View File

@ -6,12 +6,14 @@ Manage the Windows System PATH
# Python Libs
import re
def __virtual__():
'''
Load this state if the win_path module exists
'''
return 'win_path' if 'win_path.rehash' in __salt__ else False
def _normalize_dir(string):
'''
Normalize the directory to make comparison possible
@ -49,6 +51,7 @@ def absent(name):
ret['comment'] = 'could not remove {0} from the PATH'.format(name)
return ret
def exists(name, index=0):
'''
Add the directory to the system PATH at index location

View File

@ -93,9 +93,9 @@ TAGS = {
'minion': 'minion', # prefix for all salt/minion events (minion sourced events)
'syndic': 'syndic', # prefix for all salt/syndic events (syndic minion sourced events)
'run': 'run', # prefix for all salt/run events (salt runners)
'wheel': 'wheel', # prefix for all salt/wheel events
'cloud': 'cloud', # prefix for all salt/cloud events
'fileserver': 'fileserver', #prefix for all salt/fileserver events
'wheel': 'wheel', # prefix for all salt/wheel events
'cloud': 'cloud', # prefix for all salt/cloud events
'fileserver': 'fileserver', # prefix for all salt/fileserver events
}

View File

@ -147,7 +147,7 @@ def v6_int_to_packed(address):
Returns:
The binary representation of this address.
"""
return Bytes(struct.pack('!QQ', address >> 64, address & (2**64 - 1)))
return Bytes(struct.pack('!QQ', address >> 64, address & (2 ** 64 - 1)))
def _find_address_range(addresses):
@ -168,6 +168,7 @@ def _find_address_range(addresses):
break
return (first, last)
def _get_prefix_length(number1, number2, bits):
"""Get the number of leading bits that are same for two numbers.
@ -185,6 +186,7 @@ def _get_prefix_length(number1, number2, bits):
return bits - i
return 0
def _count_righthand_zero_bits(number, bits):
"""Count the number of zero bits on the right hand side.
@ -202,6 +204,7 @@ def _count_righthand_zero_bits(number, bits):
if (number >> i) % 2:
return i
def summarize_address_range(first, last):
"""Summarize a network range given the first and last IP addresses.
@ -252,7 +255,7 @@ def summarize_address_range(first, last):
nbits = _count_righthand_zero_bits(first_int, ip_bits)
current = None
while nbits >= 0:
addend = 2**nbits - 1
addend = 2 ** nbits - 1
current = first_int + addend
nbits -= 1
if current <= last_int:
@ -266,6 +269,7 @@ def summarize_address_range(first, last):
first = IPAddress(first_int, version=first._version)
return networks
def _collapse_address_list_recursive(addresses):
"""Loops through the addresses, collapsing concurrent netblocks.
@ -391,6 +395,7 @@ except (NameError, TypeError):
def __repr__(self):
return 'Bytes(%s)' % str.__repr__(self)
def get_mixed_type_key(obj):
"""Return a key suitable for sorting between networks and addresses.
@ -415,6 +420,7 @@ def get_mixed_type_key(obj):
return obj._get_address_key()
return NotImplemented
class _IPAddrBase(object):
"""The mother class."""
@ -511,7 +517,7 @@ class _BaseIP(_IPAddrBase):
return '%s(%r)' % (self.__class__.__name__, str(self))
def __str__(self):
return '%s' % self._string_from_ip_int(self._ip)
return '%s' % self._string_from_ip_int(self._ip)
def __hash__(self):
return hash(hex(long(self._ip)))
@ -627,8 +633,8 @@ class _BaseNet(_IPAddrBase):
return not eq
def __str__(self):
return '%s/%s' % (str(self.ip),
str(self._prefixlen))
return '%s/%s' % (str(self.ip),
str(self._prefixlen))
def __hash__(self):
return hash(int(self.network) ^ int(self.netmask))
@ -764,7 +770,7 @@ class _BaseNet(_IPAddrBase):
s1, s2 = s2.subnet()
else:
# If we got here, there's a bug somewhere.
assert True == False, ('Error performing exclusion: '
assert True is False, ('Error performing exclusion: '
's1: %s s2: %s other: %s' %
(str(s1), str(s2), str(other)))
if s1 == other:
@ -773,7 +779,7 @@ class _BaseNet(_IPAddrBase):
ret_addrs.append(s1)
else:
# If we got here, there's a bug somewhere.
assert True == False, ('Error performing exclusion: '
assert True is False, ('Error performing exclusion: '
's1: %s s2: %s other: %s' %
(str(s1), str(s2), str(other)))
@ -989,7 +995,6 @@ class _BaseNet(_IPAddrBase):
raise ValueError('cannot set prefixlen_diff and new_prefix')
prefixlen_diff = self._prefixlen - new_prefix
if self.prefixlen - prefixlen_diff < 0:
raise ValueError(
'current prefixlen is %d, cannot have a prefixlen_diff of %d' %
@ -1016,7 +1021,7 @@ class _BaseV4(object):
"""
# Equivalent to 255.255.255.255 or 32 bits of 1's.
_ALL_ONES = (2**IPV4LENGTH) - 1
_ALL_ONES = (2 ** IPV4LENGTH) - 1
_DECIMAL_DIGITS = frozenset('0123456789')
def __init__(self, address):
@ -1388,7 +1393,7 @@ class _BaseV6(object):
"""
_ALL_ONES = (2**IPV6LENGTH) - 1
_ALL_ONES = (2 ** IPV6LENGTH) - 1
_HEXTET_COUNT = 8
_HEX_DIGITS = frozenset('0123456789ABCDEFabcdef')
@ -1567,7 +1572,7 @@ class _BaseV6(object):
hex_str = '%032x' % ip_int
hextets = []
for x in range(0, 32, 4):
hextets.append('%x' % int(hex_str[x:x+4], 16))
hextets.append('%x' % int(hex_str[x:x + 4], 16))
hextets = self._compress_hextets(hextets)
return ':'.join(hextets)
@ -1807,7 +1812,6 @@ class IPv6Network(_BaseV6, _BaseNet):
"""
def __init__(self, address, strict=False):
"""Instantiate a new IPv6 Network object.

View File

@ -11,6 +11,7 @@ from mako.lookup import TemplateCollection, TemplateLookup
# Import salt libs
import salt.fileclient
class SaltMakoTemplateLookup(TemplateCollection):
"""
Look up Mako template files on Salt master via salt://... URLs.

View File

@ -21,6 +21,7 @@ from salt.exceptions import SaltException
log = logging.getLogger(__name__)
class MasterPillarUtil(object):
'''
Helper utility for easy access to targeted minion grain and
@ -111,7 +112,7 @@ class MasterPillarUtil(object):
def _get_live_minion_pillar(self, minion_id=None, minion_grains=None):
# Returns a dict of pillar data for one minion
if minion_id == None:
if minion_id is None:
return {}
if not minion_grains:
log.warn('Cannot get pillar data for {0}: no grains supplied.'.format(minion_id))
@ -137,14 +138,14 @@ class MasterPillarUtil(object):
lret = {}
if self.use_cached_grains:
cret = dict([(minion_id, mcache) for (minion_id, mcache) in cached_grains.iteritems() if mcache])
missed_minions = [ minion_id for minion_id in minion_ids if minion_id not in cret ]
missed_minions = [minion_id for minion_id in minion_ids if minion_id not in cret]
log.debug('Missed cached minion grains for: {0}'.format(missed_minions))
if self.grains_fallback:
lret = self._get_live_minion_grains(missed_minions)
ret = dict(dict([(minion_id, {}) for minion_id in minion_ids]).items() + lret.items() + cret.items())
else:
lret = self._get_live_minion_grains(minion_ids)
missed_minions = [ minion_id for minion_id in minion_ids if minion_id not in lret ]
missed_minions = [minion_id for minion_id in minion_ids if minion_id not in lret]
log.debug('Missed live minion grains for: {0}'.format(missed_minions))
if self.grains_fallback:
cret = dict([(minion_id, mcache) for (minion_id, mcache) in cached_grains.iteritems() if mcache])
@ -162,14 +163,14 @@ class MasterPillarUtil(object):
lret = {}
if self.use_cached_pillar:
cret = dict([(minion_id, mcache) for (minion_id, mcache) in cached_pillar.iteritems() if mcache])
missed_minions = [ minion_id for minion_id in minion_ids if minion_id not in cret ]
missed_minions = [minion_id for minion_id in minion_ids if minion_id not in cret]
log.debug('Missed cached minion pillars for: {0}'.format(missed_minions))
if self.pillar_fallback:
lret = dict([(minion_id, self._get_live_minion_pillar(minion_id, grains.get(minion_id, {}))) for minion_id in missed_minions])
ret = dict(dict([(minion_id, {}) for minion_id in minion_ids]).items() + lret.items() + cret.items())
else:
lret = dict([(minion_id, self._get_live_minion_pillar(minion_id, grains.get(minion_id, {}))) for minion_id in minion_ids])
missed_minions = [ minion_id for minion_id in minion_ids if minion_id not in lret ]
missed_minions = [minion_id for minion_id in minion_ids if minion_id not in lret]
log.debug('Missed live minion pillars for: {0}'.format(missed_minions))
if self.pillar_fallback:
cret = dict([(minion_id, mcache) for (minion_id, mcache) in cached_pillar.iteritems() if mcache])
@ -218,12 +219,12 @@ class MasterPillarUtil(object):
log.debug('Getting minion grain data for: {0}'.format(minion_ids))
minion_grains = self._get_minion_grains(
*minion_ids,
cached_grains = cached_minion_grains)
cached_grains=cached_minion_grains)
log.debug('Getting minion pillar data for: {0}'.format(minion_ids))
minion_pillars = self._get_minion_pillar(
*minion_ids,
grains = minion_grains,
cached_pillar = cached_minion_pillars)
grains=minion_grains,
cached_pillar=cached_minion_pillars)
return minion_pillars
def get_minion_grains(self):
@ -249,7 +250,7 @@ class MasterPillarUtil(object):
log.debug('Getting minion grain data for: {0}'.format(minion_ids))
minion_grains = self._get_minion_grains(
*minion_ids,
cached_grains = cached_minion_grains)
cached_grains=cached_minion_grains)
return minion_grains
def clear_cached_minion_data(self,

View File

@ -84,7 +84,6 @@ class NonBlockingPopen(subprocess.Popen):
self._stderr_logger_name_.format(pid=self.pid)
)
self._stderr_logger = logging.getLogger(
self._stderr_logger_name_.format(pid=self.pid)
)

View File

@ -3,20 +3,25 @@
Common utility functions for the reclass adapters
http://reclass.pantsfullofunix.net
'''
# Import python libs
import sys
import os
def prepend_reclass_source_path(opts):
source_path = opts.get('reclass_source_path')
if source_path:
source_path = os.path.abspath(os.path.expanduser(source_path))
sys.path.insert(0, source_path)
def filter_out_source_path_option(opts):
if 'reclass_source_path' in opts:
del opts['reclass_source_path']
# no return required, object was passed by reference
def set_inventory_base_uri_default(config, opts):
if 'inventory_base_uri' in opts:
return

View File

@ -245,8 +245,8 @@ def get_template_context(template, line, num_lines=5, marker=None):
if line > num_template_lines:
return template
context_start = max(0, line-num_lines-1) # subtract 1 for 0-based indexing
context_end = min(num_template_lines, line+num_lines)
context_start = max(0, line - num_lines - 1) # subtract 1 for 0-based indexing
context_end = min(num_template_lines, line + num_lines)
error_line_in_context = line - context_start - 1 # subtract 1 for 0-based indexing
buf = []

View File

@ -88,6 +88,7 @@ def zmq_version():
sys.stderr.write('CRITICAL {0}\n'.format(msg))
return False
def lookup_family(hostname):
'''
Lookup a hostname and determine its address family. The first address returned
@ -106,6 +107,7 @@ def lookup_family(hostname):
except socket.gaierror:
return fallback
def verify_socket(interface, pub_port, ret_port):
'''
Attempt to bind to the sockets to verify that they are available

View File

@ -1,11 +1,13 @@
# -*- coding: utf-8 -*-
# Import python libs
import logging
import pythoncom
import threading
log = logging.getLogger(__name__)
class Com(object):
def __init__(self):
self.need_com_init = not self._is_main_thread()
@ -15,10 +17,10 @@ class Com(object):
def __enter__(self):
if self.need_com_init:
log.debug("Initializing COM library")
log.debug('Initializing COM library')
pythoncom.CoInitialize()
def __exit__(self, exc_type, exc_value, traceback):
if self.need_com_init:
log.debug("Uninitializing COM library")
log.debug('Uninitializing COM library')
pythoncom.CoUninitialize()

View File

@ -3,6 +3,7 @@
Various XML utilities
'''
def to_dict(xmltree):
'''
Convert an XML tree into a dict. The tree that is passed in must be an

View File

@ -10,6 +10,7 @@ __func_alias__ = {
'list_': 'list'
}
def list_(match):
'''
List all the keys under a named status