mirror of
https://github.com/valitydev/salt.git
synced 2024-11-06 16:45:27 +00:00
Merge remote-tracking branch 'upstream/2015.8' into merge-forward-develop
Conflicts: salt/modules/git.py salt/renderers/gpg.py salt/utils/__init__.py
This commit is contained in:
commit
d76d62877d
@ -1,5 +1,5 @@
|
||||
===========================
|
||||
Salt 2015.8.1 Release Notes
|
||||
Salt 2015.8.2 Release Notes
|
||||
===========================
|
||||
|
||||
Extended changelog courtesy of Todd Stansell (https://github.com/tjstansell/salt-changelogs):
|
||||
|
@ -334,8 +334,8 @@ of ``salt-run``:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt-call win_repo.update_git_repos
|
||||
salt-call win_repo.genrepo
|
||||
salt-call winrepo.update_git_repos
|
||||
salt-call winrepo.genrepo
|
||||
salt-call pkg.refresh_db
|
||||
|
||||
After executing the previous commands the repository on the standalone system
|
||||
|
@ -591,7 +591,7 @@ class SSH(object):
|
||||
self.cache_job(jid, host, ret[host], fun)
|
||||
ret = self.key_deploy(host, ret)
|
||||
|
||||
if ret[host].get('stderr', '').startswith('ssh:'):
|
||||
if isinstance(ret[host], dict) and ret[host].get('stderr', '').startswith('ssh:'):
|
||||
ret[host] = ret[host]['stderr']
|
||||
|
||||
if not isinstance(ret[host], dict):
|
||||
|
@ -1214,12 +1214,15 @@ def os_data():
|
||||
elif os.path.isfile('/etc/SuSE-release'):
|
||||
grains['lsb_distrib_id'] = 'SUSE'
|
||||
with salt.utils.fopen('/etc/SuSE-release') as fhr:
|
||||
rel = re.sub("[^0-9]", "", fhr.read().split('\n')[1])
|
||||
with salt.utils.fopen('/etc/SuSE-release') as fhr:
|
||||
patch = re.sub("[^0-9]", "", fhr.read().split('\n')[2])
|
||||
release = rel + " SP" + patch
|
||||
grains['lsb_distrib_release'] = release
|
||||
grains['lsb_distrib_codename'] = "n.a"
|
||||
for line in fhr:
|
||||
if 'enterprise' in line.lower():
|
||||
grains['lsb_distrib_id'] = 'SLES'
|
||||
elif 'version' in line.lower():
|
||||
version = re.sub(r'[^0-9]', '', line)
|
||||
elif 'patchlevel' in line.lower():
|
||||
patch = re.sub(r'[^0-9]', '', line)
|
||||
grains['lsb_distrib_release'] = version + ' SP' + patch
|
||||
grains['lsb_distrib_codename'] = 'n.a'
|
||||
elif os.path.isfile('/etc/altlinux-release'):
|
||||
# ALT Linux
|
||||
grains['lsb_distrib_id'] = 'altlinux'
|
||||
|
@ -17,6 +17,15 @@ import sys
|
||||
import logging
|
||||
|
||||
|
||||
class LoggingProfileMixIn(object):
|
||||
'''
|
||||
Simple mix-in class to add a trace method to python's logging.
|
||||
'''
|
||||
|
||||
def profile(self, msg, *args, **kwargs):
|
||||
self.log(getattr(logging, 'PROFILE', 15), msg, *args, **kwargs)
|
||||
|
||||
|
||||
class LoggingTraceMixIn(object):
|
||||
'''
|
||||
Simple mix-in class to add a trace method to python's logging.
|
||||
@ -45,7 +54,7 @@ class LoggingMixInMeta(type):
|
||||
the bases.
|
||||
'''
|
||||
def __new__(mcs, name, bases, attrs):
|
||||
include_trace = include_garbage = True
|
||||
include_profile = include_trace = include_garbage = True
|
||||
bases = list(bases)
|
||||
if name == 'SaltLoggingClass':
|
||||
for base in bases:
|
||||
@ -53,6 +62,8 @@ class LoggingMixInMeta(type):
|
||||
include_trace = False
|
||||
if hasattr(base, 'garbage'):
|
||||
include_garbage = False
|
||||
if include_profile:
|
||||
bases.append(LoggingProfileMixIn)
|
||||
if include_trace:
|
||||
bases.append(LoggingTraceMixIn)
|
||||
if include_garbage:
|
||||
|
@ -32,6 +32,7 @@ from salt.ext.six.moves.urllib.parse import urlparse # pylint: disable=import-e
|
||||
|
||||
# Let's define these custom logging levels before importing the salt.log.mixins
|
||||
# since they will be used there
|
||||
PROFILE = logging.PROFILE = 15
|
||||
TRACE = logging.TRACE = 5
|
||||
GARBAGE = logging.GARBAGE = 1
|
||||
QUIET = logging.QUIET = 1000
|
||||
@ -54,6 +55,7 @@ LOG_LEVELS = {
|
||||
'critical': logging.CRITICAL,
|
||||
'garbage': GARBAGE,
|
||||
'info': logging.INFO,
|
||||
'profile': PROFILE,
|
||||
'quiet': QUIET,
|
||||
'trace': TRACE,
|
||||
'warning': logging.WARNING,
|
||||
@ -67,6 +69,7 @@ LOG_COLORS = {
|
||||
'ERROR': TextFormat('bold', 'red'),
|
||||
'WARNING': TextFormat('bold', 'yellow'),
|
||||
'INFO': TextFormat('bold', 'green'),
|
||||
'PROFILE': TextFormat('bold', 'cyan'),
|
||||
'DEBUG': TextFormat('bold', 'cyan'),
|
||||
'TRACE': TextFormat('bold', 'magenta'),
|
||||
'GARBAGE': TextFormat('bold', 'blue'),
|
||||
@ -80,6 +83,7 @@ LOG_COLORS = {
|
||||
'ERROR': TextFormat('red'),
|
||||
'WARNING': TextFormat('yellow'),
|
||||
'INFO': TextFormat('green'),
|
||||
'PROFILE': TextFormat('bold', 'cyan'),
|
||||
'DEBUG': TextFormat('cyan'),
|
||||
'TRACE': TextFormat('magenta'),
|
||||
'GARBAGE': TextFormat('blue'),
|
||||
@ -366,6 +370,7 @@ if logging.getLoggerClass() is not SaltLoggingClass:
|
||||
|
||||
logging.setLoggerClass(SaltLoggingClass)
|
||||
logging.addLevelName(QUIET, 'QUIET')
|
||||
logging.addLevelName(PROFILE, 'PROFILE')
|
||||
logging.addLevelName(TRACE, 'TRACE')
|
||||
logging.addLevelName(GARBAGE, 'GARBAGE')
|
||||
|
||||
|
@ -1419,6 +1419,7 @@ def run_all(cmd,
|
||||
ignore_retcode=False,
|
||||
saltenv='base',
|
||||
use_vt=False,
|
||||
redirect_stderr=False,
|
||||
**kwargs):
|
||||
'''
|
||||
Execute the passed command and return a dict of return data
|
||||
@ -1513,6 +1514,14 @@ def run_all(cmd,
|
||||
Note that ``env`` represents the environment variables for the command, and
|
||||
should be formatted as a dict, or a YAML string which resolves to a dict.
|
||||
|
||||
redirect_stderr : False
|
||||
If set to ``True``, then stderr will be redirected to stdout. This is
|
||||
helpful for cases where obtaining both the retcode and output is
|
||||
desired, but it is not desired to have the output separated into both
|
||||
stdout and stderr.
|
||||
|
||||
.. versionadded:: 2015.8.2
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
@ -1537,10 +1546,12 @@ def run_all(cmd,
|
||||
'''
|
||||
python_shell = _python_shell_default(python_shell,
|
||||
kwargs.get('__pub_jid', ''))
|
||||
stderr = subprocess.STDOUT if redirect_stderr else subprocess.PIPE
|
||||
ret = _run(cmd,
|
||||
runas=runas,
|
||||
cwd=cwd,
|
||||
stdin=stdin,
|
||||
stderr=stderr,
|
||||
shell=shell,
|
||||
python_shell=python_shell,
|
||||
env=env,
|
||||
|
@ -9,6 +9,7 @@ import copy
|
||||
import errno
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from distutils.version import LooseVersion as _LooseVersion
|
||||
|
||||
# Import salt libs
|
||||
@ -130,7 +131,8 @@ def _format_opts(opts):
|
||||
|
||||
|
||||
def _git_run(command, cwd=None, runas=None, identity=None,
|
||||
ignore_retcode=False, failhard=True, **kwargs):
|
||||
ignore_retcode=False, failhard=True, redirect_stderr=False,
|
||||
**kwargs):
|
||||
'''
|
||||
simple, throw an exception with the error message on an error return code.
|
||||
|
||||
@ -204,6 +206,7 @@ def _git_run(command, cwd=None, runas=None, identity=None,
|
||||
python_shell=False,
|
||||
log_callback=salt.utils.url.redact_http_basic_auth,
|
||||
ignore_retcode=ignore_retcode,
|
||||
redirect_stderr=redirect_stderr,
|
||||
**kwargs)
|
||||
finally:
|
||||
if not salt.utils.is_windows() and 'GIT_SSH' in env:
|
||||
@ -231,6 +234,7 @@ def _git_run(command, cwd=None, runas=None, identity=None,
|
||||
python_shell=False,
|
||||
log_callback=salt.utils.url.redact_http_basic_auth,
|
||||
ignore_retcode=ignore_retcode,
|
||||
redirect_stderr=redirect_stderr,
|
||||
**kwargs)
|
||||
|
||||
if result['retcode'] == 0:
|
||||
@ -622,7 +626,8 @@ def checkout(cwd,
|
||||
return _git_run(command,
|
||||
cwd=cwd,
|
||||
runas=user,
|
||||
ignore_retcode=ignore_retcode)['stderr']
|
||||
ignore_retcode=ignore_retcode,
|
||||
redirect_stderr=True)['stdout']
|
||||
|
||||
|
||||
def clone(cwd,
|
||||
@ -1324,6 +1329,10 @@ def fetch(cwd,
|
||||
identity=None,
|
||||
ignore_retcode=False):
|
||||
'''
|
||||
.. versionchanged:: 2015.8.2
|
||||
Return data is now a dictionary containing information on branches and
|
||||
tags that were added/updated
|
||||
|
||||
Interface to `git-fetch(1)`_
|
||||
|
||||
cwd
|
||||
@ -1398,7 +1407,9 @@ def fetch(cwd,
|
||||
[x for x in _format_opts(opts) if x not in ('-f', '--force')]
|
||||
)
|
||||
if remote:
|
||||
command.append(str(remote))
|
||||
if not isinstance(remote, six.string_types):
|
||||
remote = str(remote)
|
||||
command.append(remote)
|
||||
if refspecs is not None:
|
||||
if isinstance(refspecs, (list, tuple)):
|
||||
refspec_list = []
|
||||
@ -1412,11 +1423,38 @@ def fetch(cwd,
|
||||
refspecs = str(refspecs)
|
||||
refspec_list = refspecs.split(',')
|
||||
command.extend(refspec_list)
|
||||
return _git_run(command,
|
||||
cwd=cwd,
|
||||
runas=user,
|
||||
identity=identity,
|
||||
ignore_retcode=ignore_retcode)['stdout']
|
||||
output = _git_run(command,
|
||||
cwd=cwd,
|
||||
runas=user,
|
||||
identity=identity,
|
||||
ignore_retcode=ignore_retcode,
|
||||
redirect_stderr=True)['stdout']
|
||||
|
||||
update_re = re.compile(
|
||||
r'[\s*]*(?:([0-9a-f]+)\.\.([0-9a-f]+)|'
|
||||
r'\[(?:new (tag|branch)|tag update)\])\s+(.+)->'
|
||||
)
|
||||
ret = {}
|
||||
for line in salt.utils.itertools.split(output, '\n'):
|
||||
match = update_re.match(line)
|
||||
if match:
|
||||
old_sha, new_sha, new_ref_type, ref_name = \
|
||||
match.groups()
|
||||
ref_name = ref_name.rstrip()
|
||||
if new_ref_type is not None:
|
||||
# ref is a new tag/branch
|
||||
ref_key = 'new tags' \
|
||||
if new_ref_type == 'tag' \
|
||||
else 'new branches'
|
||||
ret.setdefault(ref_key, []).append(ref_name)
|
||||
elif old_sha is not None:
|
||||
# ref is a branch update
|
||||
ret.setdefault('updated branches', {})[ref_name] = \
|
||||
{'old': old_sha, 'new': new_sha}
|
||||
else:
|
||||
# ref is an updated tag
|
||||
ret.setdefault('updated tags', []).append(ref_name)
|
||||
return ret
|
||||
|
||||
|
||||
def init(cwd,
|
||||
@ -3363,7 +3401,8 @@ def worktree_add(cwd,
|
||||
return _git_run(command,
|
||||
cwd=cwd,
|
||||
runas=user,
|
||||
ignore_retcode=ignore_retcode)['stderr']
|
||||
ignore_retcode=ignore_retcode,
|
||||
redirect_stderr=True)['stdout']
|
||||
|
||||
|
||||
def worktree_prune(cwd,
|
||||
|
@ -44,6 +44,7 @@ def __virtual__():
|
||||
'''
|
||||
# Enable on these platforms only.
|
||||
enable = set((
|
||||
'XenServer',
|
||||
'RedHat',
|
||||
'CentOS',
|
||||
'ScientificLinux',
|
||||
@ -57,6 +58,8 @@ def __virtual__():
|
||||
'McAfee OS Server'
|
||||
))
|
||||
if __grains__['os'] in enable:
|
||||
if __grains__['os'] == 'XenServer':
|
||||
return __virtualname__
|
||||
if __grains__['os'] == 'SUSE':
|
||||
if str(__grains__['osrelease']).startswith('11'):
|
||||
return __virtualname__
|
||||
@ -153,23 +156,22 @@ def _sysv_is_enabled(name, runlevel=None):
|
||||
|
||||
def _chkconfig_is_enabled(name, runlevel=None):
|
||||
'''
|
||||
Return True if the service is enabled according to chkconfig; otherwise
|
||||
return False. If `runlevel` is None, then use the current runlevel.
|
||||
Return ``True`` if the service is enabled according to chkconfig; otherwise
|
||||
return ``False``. If ``runlevel`` is ``None``, then use the current
|
||||
runlevel.
|
||||
'''
|
||||
cmdline = '/sbin/chkconfig --list {0}'.format(name)
|
||||
result = __salt__['cmd.run_all'](cmdline, python_shell=False)
|
||||
|
||||
if runlevel is None:
|
||||
runlevel = _runlevel()
|
||||
if result['retcode'] == 0:
|
||||
cols = result['stdout'].splitlines()[0].split()
|
||||
try:
|
||||
if cols[0].strip(':') == name:
|
||||
if runlevel is None:
|
||||
runlevel = _runlevel()
|
||||
if len(cols) > 3 and '{0}:on'.format(runlevel) in cols:
|
||||
for row in result['stdout'].splitlines():
|
||||
if '{0}:on'.format(runlevel) in row:
|
||||
if row.split()[0] == name:
|
||||
return True
|
||||
elif len(cols) < 3 and cols[1] and cols[1] == 'on':
|
||||
return True
|
||||
except IndexError:
|
||||
pass
|
||||
elif row.split() == [name + ':', 'on']:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
|
@ -697,7 +697,7 @@ class State(object):
|
||||
Execute the aggregation systems to runtime modify the low chunk
|
||||
'''
|
||||
agg_opt = self.functions['config.option']('state_aggregate')
|
||||
if low.get('aggregate') is True:
|
||||
if 'aggregate' in low:
|
||||
agg_opt = low['aggregate']
|
||||
if agg_opt is True:
|
||||
agg_opt = [low['state']]
|
||||
|
@ -4,7 +4,7 @@ Manage chassis via Salt Proxies.
|
||||
|
||||
.. versionadded:: 2015.8.2
|
||||
|
||||
Below is an example state that sets parameters just to show the basics.
|
||||
Below is an example state that sets basic parameters:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@ -24,15 +24,16 @@ Below is an example state that sets parameters just to show the basics.
|
||||
- server-3: powercycle
|
||||
|
||||
However, it is possible to place the entire set of chassis configuration
|
||||
data in pillar. Here's an example pillar
|
||||
structure:
|
||||
data in pillar. Here's an example pillar structure:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
proxy:
|
||||
host: 10.27.20.18
|
||||
admin_username: root
|
||||
admin_password: saltstack
|
||||
admin_password: super-secret
|
||||
fallback_admin_username: root
|
||||
fallback_admin_password: old-secret
|
||||
proxytype: fx2
|
||||
|
||||
chassis:
|
||||
@ -48,30 +49,30 @@ structure:
|
||||
- 'server-2': blade2
|
||||
|
||||
blades:
|
||||
blade1:
|
||||
idrac_password: saltstack1
|
||||
ipmi_over_lan: True
|
||||
ip: 172.17.17.1
|
||||
subnet: 255.255.0.0
|
||||
netmask: 172.17.255.255
|
||||
blade1:
|
||||
idrac_password: saltstack1
|
||||
ipmi_over_lan: True
|
||||
ip: 172.17.17.1
|
||||
subnet: 255.255.0.0
|
||||
netmask: 172.17.255.255
|
||||
blade2:
|
||||
idrac_password: saltstack1
|
||||
ipmi_over_lan: True
|
||||
ip: 172.17.17.2
|
||||
subnet: 255.255.0.0
|
||||
netmask: 172.17.255.255
|
||||
idrac_password: saltstack1
|
||||
ipmi_over_lan: True
|
||||
ip: 172.17.17.2
|
||||
subnet: 255.255.0.0
|
||||
netmask: 172.17.255.255
|
||||
blade3:
|
||||
idrac_password: saltstack1
|
||||
ipmi_over_lan: True
|
||||
ip: 172.17.17.2
|
||||
subnet: 255.255.0.0
|
||||
netmask: 172.17.255.255
|
||||
idrac_password: saltstack1
|
||||
ipmi_over_lan: True
|
||||
ip: 172.17.17.2
|
||||
subnet: 255.255.0.0
|
||||
netmask: 172.17.255.255
|
||||
blade4:
|
||||
idrac_password: saltstack1
|
||||
ipmi_over_lan: True
|
||||
ip: 172.17.17.2
|
||||
subnet: 255.255.0.0
|
||||
netmask: 172.17.255.255
|
||||
idrac_password: saltstack1
|
||||
ipmi_over_lan: True
|
||||
ip: 172.17.17.2
|
||||
subnet: 255.255.0.0
|
||||
netmask: 172.17.255.255
|
||||
|
||||
switches:
|
||||
switch-1:
|
||||
@ -87,33 +88,8 @@ structure:
|
||||
snmp: nonpublic
|
||||
password: saltstack1
|
||||
|
||||
And to go with it, here's an example state that pulls the data from pillar.
|
||||
This example assumes that the pillar data would be structured like
|
||||
|
||||
Pillar:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
proxy:
|
||||
host: 192.168.1.1
|
||||
admin_username: root
|
||||
admin_password: sekrit
|
||||
fallback_admin_username: root
|
||||
fallback_admin_password: old_sekrit
|
||||
proxytype: fx2
|
||||
|
||||
chassis:
|
||||
name: fx2-1
|
||||
username: root
|
||||
datacenter: UT1
|
||||
location: UT1
|
||||
management_mode: 2
|
||||
idrac_launch: 0
|
||||
slot_names:
|
||||
1: blade1
|
||||
2: blade2
|
||||
|
||||
State:
|
||||
And to go with it, here's an example state that pulls the data from the
|
||||
pillar stated above:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@ -124,15 +100,14 @@ State:
|
||||
- location: {{ details['location'] }}
|
||||
- mode: {{ details['management_mode'] }}
|
||||
- idrac_launch: {{ details['idrac_launch'] }}
|
||||
- slot_names
|
||||
{% for k, v in details['chassis']['slot_names'].iteritems() %}
|
||||
- slot_names:
|
||||
{% for k, v in details['slot_names'].iteritems() %}
|
||||
- {{ k }}: {{ v }}
|
||||
{% endfor %}
|
||||
|
||||
|
||||
{% for k, v in details['chassis']['switches'].iteritems() %}
|
||||
{% for k, v in details['switches'].iteritems() %}
|
||||
standup-switches-{{ k }}:
|
||||
dellchassis.dell_switch:
|
||||
dellchassis.switch:
|
||||
- name: {{ k }}
|
||||
- ip: {{ v['ip'] }}
|
||||
- netmask: {{ v['netmask'] }}
|
||||
@ -141,14 +116,8 @@ State:
|
||||
- snmp: {{ v['snmp'] }}
|
||||
{% endfor %}
|
||||
|
||||
dellchassis
|
||||
{% for k, v in details['chassis']['slot_names'].iteritems() %}
|
||||
|
||||
- {{ k }}: {{ v }}
|
||||
{% endfor %}
|
||||
|
||||
blade_powercycle:
|
||||
chassis.dell_chassis:
|
||||
dellchassis.chassis:
|
||||
- blade_power_states:
|
||||
- server-1: powercycle
|
||||
- server-2: powercycle
|
||||
@ -171,7 +140,7 @@ def __virtual__():
|
||||
return 'chassis.cmd' in __salt__
|
||||
|
||||
|
||||
def blade_idrac(idrac_password=None, idrac_ipmi=None,
|
||||
def blade_idrac(name, idrac_password=None, idrac_ipmi=None,
|
||||
idrac_ip=None, idrac_netmask=None, idrac_gateway=None,
|
||||
idrac_dnsname=None,
|
||||
drac_dhcp=None):
|
||||
@ -188,7 +157,8 @@ def blade_idrac(idrac_password=None, idrac_ipmi=None,
|
||||
:return: A standard Salt changes dictionary
|
||||
'''
|
||||
|
||||
ret = {'result': True,
|
||||
ret = {'name': name,
|
||||
'result': True,
|
||||
'changes': {},
|
||||
'comment': ''}
|
||||
|
||||
@ -284,7 +254,7 @@ def chassis(name, chassis_name=None, password=None, datacenter=None,
|
||||
- server-2: off
|
||||
- server-3: powercycle
|
||||
'''
|
||||
ret = {'chassis_name': chassis_name,
|
||||
ret = {'name': chassis_name,
|
||||
'result': True,
|
||||
'changes': {},
|
||||
'comment': ''}
|
||||
@ -479,7 +449,7 @@ def switch(name, ip=None, netmask=None, gateway=None, dhcp=None,
|
||||
.. code-block:: yaml
|
||||
|
||||
my-dell-chassis:
|
||||
dellchassis.dell_switch:
|
||||
dellchassis.switch:
|
||||
- switch: switch-1
|
||||
- ip: 192.168.1.1
|
||||
- netmask: 255.255.255.0
|
||||
|
@ -65,37 +65,6 @@ def _format_comments(comments):
|
||||
return ret
|
||||
|
||||
|
||||
def _parse_fetch(output):
|
||||
'''
|
||||
Go through the output from a git fetch and return a dict
|
||||
'''
|
||||
update_re = re.compile(
|
||||
r'.*(?:([0-9a-f]+)\.\.([0-9a-f]+)|'
|
||||
r'\[(?:new (tag|branch)|tag update)\])\s+(.+)->'
|
||||
)
|
||||
ret = {}
|
||||
for line in output.splitlines():
|
||||
match = update_re.match(line)
|
||||
if match:
|
||||
old_sha, new_sha, new_ref_type, ref_name = \
|
||||
match.groups()
|
||||
ref_name = ref_name.rstrip()
|
||||
if new_ref_type is not None:
|
||||
# ref is a new tag/branch
|
||||
ref_key = 'new tags' \
|
||||
if new_ref_type == 'tag' \
|
||||
else 'new branches'
|
||||
ret.setdefault(ref_key, []).append(ref_name)
|
||||
elif old_sha is not None:
|
||||
# ref is a branch update
|
||||
ret.setdefault('updated_branches', {})[ref_name] = \
|
||||
{'old': old_sha, 'new': new_sha}
|
||||
else:
|
||||
# ref is an updated tag
|
||||
ret.setdefault('updated tags', []).append(ref_name)
|
||||
return ret
|
||||
|
||||
|
||||
def _get_local_rev_and_branch(target, user):
|
||||
'''
|
||||
Return the local revision for before/after comparisons
|
||||
@ -135,8 +104,7 @@ def _uptodate(ret, target, comments=None):
|
||||
# Shouldn't be making any changes if the repo was up to date, but
|
||||
# report on them so we are alerted to potential problems with our
|
||||
# logic.
|
||||
ret['comment'] += '\n\nChanges made: '
|
||||
ret['comment'] += _format_comments(comments)
|
||||
ret['comment'] += '\n\nChanges made: ' + comments
|
||||
return ret
|
||||
|
||||
|
||||
@ -176,6 +144,7 @@ def latest(name,
|
||||
target=None,
|
||||
branch=None,
|
||||
user=None,
|
||||
update_head=True,
|
||||
force_checkout=False,
|
||||
force_clone=False,
|
||||
force_fetch=False,
|
||||
@ -208,6 +177,10 @@ def latest(name,
|
||||
If ``rev`` is not specified, it will be assumed to be ``HEAD``, and
|
||||
Salt will not manage the tracking branch at all.
|
||||
|
||||
.. versionchanged:: 2015.8.0
|
||||
If not specified, ``rev`` now defaults to the remote repository's
|
||||
HEAD.
|
||||
|
||||
target
|
||||
Name of the target directory where repository is about to be cloned
|
||||
|
||||
@ -228,6 +201,13 @@ def latest(name,
|
||||
|
||||
.. versionadded:: 0.17.0
|
||||
|
||||
update_head : True
|
||||
If set to ``False``, then the remote repository will be fetched (if
|
||||
necessary) to ensure that the commit to which ``rev`` points exists in
|
||||
the local checkout, but no changes will be made to the local HEAD.
|
||||
|
||||
.. versionadded:: 2015.8.3
|
||||
|
||||
force : False
|
||||
.. deprecated:: 2015.8.0
|
||||
Use ``force_clone`` instead. For earlier Salt versions, ``force``
|
||||
@ -527,9 +507,6 @@ def latest(name,
|
||||
else:
|
||||
if rev == 'HEAD':
|
||||
if 'HEAD' in all_remote_refs:
|
||||
# head_ref will only be defined if rev == 'HEAD', be careful
|
||||
# how this is used below
|
||||
head_ref = remote + '/HEAD'
|
||||
remote_rev = all_remote_refs['HEAD']
|
||||
# Just go with whatever the upstream currently is
|
||||
desired_upstream = None
|
||||
@ -565,6 +542,20 @@ def latest(name,
|
||||
else:
|
||||
remote_rev = None
|
||||
|
||||
# For the comment field of the state return dict, the remote location
|
||||
# (and short-sha1, if rev is not a sha1) is referenced several times,
|
||||
# determine it once here and reuse the value below.
|
||||
if remote_rev_type == 'sha1':
|
||||
if rev == 'HEAD':
|
||||
remote_loc = 'remote HEAD (' + remote_rev[:7] + ')'
|
||||
else:
|
||||
remote_loc = remote_rev[:7]
|
||||
else:
|
||||
remote_loc = '{0} ({1})'.format(
|
||||
desired_upstream if remote_rev_type == 'branch' else rev,
|
||||
remote_rev[:7]
|
||||
)
|
||||
|
||||
if remote_rev is None and not bare:
|
||||
if rev != 'HEAD':
|
||||
# A specific rev is desired, but that rev doesn't exist on the
|
||||
@ -635,6 +626,8 @@ def latest(name,
|
||||
user=user,
|
||||
redact_auth=False)
|
||||
|
||||
revs_match = _revs_equal(local_rev, remote_rev, remote_rev_type)
|
||||
|
||||
if remote_rev_type == 'sha1' \
|
||||
and base_rev is not None \
|
||||
and base_rev.startswith(remote_rev):
|
||||
@ -730,6 +723,19 @@ def latest(name,
|
||||
# and don't have to worry about fast-forwarding.
|
||||
fast_forward = None
|
||||
else:
|
||||
# Remote rev already present
|
||||
if (not revs_match and not update_head) \
|
||||
and (branch is None or branch == local_branch):
|
||||
ret['comment'] = remote_loc.capitalize() \
|
||||
if rev == 'HEAD' \
|
||||
else remote_loc
|
||||
ret['comment'] += (
|
||||
' is already present and local HEAD ({0}) does not '
|
||||
'match, but update_head=False. HEAD has not been '
|
||||
'updated locally.'.format(local_rev[:7])
|
||||
)
|
||||
return ret
|
||||
|
||||
if base_rev is None:
|
||||
# If we're here, the remote_rev exists in the local
|
||||
# checkout but there is still no HEAD locally. A possible
|
||||
@ -794,35 +800,39 @@ def latest(name,
|
||||
|
||||
if remote_rev is not None and desired_fetch_url != fetch_url:
|
||||
if __opts__['test']:
|
||||
ret['changes']['remotes/{0}'.format(remote)] = {
|
||||
'old': salt.utils.url.redact_http_basic_auth(fetch_url),
|
||||
'new': redacted_fetch_url
|
||||
}
|
||||
actions = [
|
||||
'Remote \'{0}\' would be set to {1}'.format(
|
||||
'Remote \'{0}\' would be changed from {1} to {2}'
|
||||
.format(
|
||||
remote,
|
||||
salt.utils.url.redact_http_basic_auth(fetch_url),
|
||||
redacted_fetch_url
|
||||
)
|
||||
]
|
||||
if not has_remote_rev:
|
||||
actions.append('Remote would be fetched')
|
||||
if not _revs_equal(local_rev,
|
||||
remote_rev,
|
||||
remote_rev_type):
|
||||
ret['changes']['revision'] = {
|
||||
'old': local_rev, 'new': remote_rev
|
||||
}
|
||||
if fast_forward is False:
|
||||
ret['changes']['forced update'] = True
|
||||
actions.append(
|
||||
'Repository would be {0} to {1}'.format(
|
||||
merge_action,
|
||||
_short_sha(remote_rev)
|
||||
if not revs_match:
|
||||
if update_head:
|
||||
ret['changes']['revision'] = {
|
||||
'old': local_rev, 'new': remote_rev
|
||||
}
|
||||
if fast_forward is False:
|
||||
ret['changes']['forced update'] = True
|
||||
actions.append(
|
||||
'Repository would be {0} to {1}'.format(
|
||||
merge_action,
|
||||
_short_sha(remote_rev)
|
||||
)
|
||||
)
|
||||
)
|
||||
if ret['changes']:
|
||||
return _neutral_test(ret, _format_comments(actions))
|
||||
else:
|
||||
if not revs_match and not update_head:
|
||||
# Repo content would not be modified but the remote
|
||||
# URL would be modified, so we can't just say that
|
||||
# the repo is up-to-date, we need to inform the
|
||||
# user of the actions taken.
|
||||
ret['comment'] = _format_comments(actions)
|
||||
return ret
|
||||
return _uptodate(ret,
|
||||
target,
|
||||
_format_comments(actions))
|
||||
@ -836,114 +846,105 @@ def latest(name,
|
||||
user=user,
|
||||
https_user=https_user,
|
||||
https_pass=https_pass)
|
||||
ret['changes']['remotes/{0}'.format(remote)] = {
|
||||
'old': salt.utils.url.redact_http_basic_auth(fetch_url),
|
||||
'new': redacted_fetch_url
|
||||
}
|
||||
comments.append(
|
||||
'Remote \'{0}\' set to {1}'.format(
|
||||
'Remote \'{0}\' changed from {1} to {2}'.format(
|
||||
remote,
|
||||
salt.utils.url.redact_http_basic_auth(fetch_url),
|
||||
redacted_fetch_url
|
||||
)
|
||||
)
|
||||
|
||||
if remote_rev is not None:
|
||||
if __opts__['test']:
|
||||
if not _revs_equal(local_rev, remote_rev, remote_rev_type):
|
||||
ret['changes']['revision'] = {
|
||||
'old': local_rev, 'new': remote_rev
|
||||
}
|
||||
actions = []
|
||||
if not has_remote_rev:
|
||||
actions.append(
|
||||
'Remote \'{0}\' would be fetched'
|
||||
.format(remote)
|
||||
'Remote \'{0}\' would be fetched'.format(remote)
|
||||
)
|
||||
if branch is not None:
|
||||
if branch != local_branch:
|
||||
ret['changes']['local branch'] = {
|
||||
'old': local_branch, 'new': branch
|
||||
}
|
||||
if branch not in all_local_branches:
|
||||
if (not revs_match) \
|
||||
and (update_head or (branch is not None
|
||||
and branch != local_branch)):
|
||||
ret['changes']['revision'] = {
|
||||
'old': local_rev, 'new': remote_rev
|
||||
}
|
||||
if branch is not None and branch != local_branch:
|
||||
if branch not in all_local_branches:
|
||||
actions.append(
|
||||
'New branch \'{0}\' would be checked '
|
||||
'out, with {1} as a starting '
|
||||
'point'.format(branch, remote_loc)
|
||||
)
|
||||
if desired_upstream:
|
||||
actions.append(
|
||||
'New branch \'{0}\' would be checked '
|
||||
'out, with {1} ({2}) as a starting '
|
||||
'point'.format(
|
||||
branch,
|
||||
desired_upstream
|
||||
if desired_upstream
|
||||
else rev,
|
||||
_short_sha(remote_rev)
|
||||
)
|
||||
'Tracking branch would be set to {0}'
|
||||
.format(desired_upstream)
|
||||
)
|
||||
if desired_upstream:
|
||||
actions.append(
|
||||
'Tracking branch would be set to {0}'
|
||||
.format(desired_upstream)
|
||||
)
|
||||
else:
|
||||
if fast_forward is False:
|
||||
ret['changes']['hard reset'] = True
|
||||
actions.append(
|
||||
'Branch \'{0}\' would be checked out '
|
||||
'and {1} to {2}'.format(
|
||||
branch,
|
||||
merge_action,
|
||||
_short_sha(remote_rev)
|
||||
)
|
||||
else:
|
||||
actions.append(
|
||||
'Branch \'{0}\' would be checked out '
|
||||
'and {1} to {2}'.format(
|
||||
branch,
|
||||
merge_action,
|
||||
_short_sha(remote_rev)
|
||||
)
|
||||
)
|
||||
else:
|
||||
if not _revs_equal(local_rev,
|
||||
remote_rev,
|
||||
remote_rev_type):
|
||||
if fast_forward is True:
|
||||
actions.append(
|
||||
'Repository would be fast-forwarded from '
|
||||
'{0} to {1}'.format(
|
||||
_short_sha(local_rev),
|
||||
_short_sha(remote_rev)
|
||||
if not revs_match:
|
||||
if update_head:
|
||||
if fast_forward is True:
|
||||
actions.append(
|
||||
'Repository would be fast-forwarded from '
|
||||
'{0} to {1}'.format(
|
||||
_short_sha(local_rev),
|
||||
_short_sha(remote_rev)
|
||||
)
|
||||
)
|
||||
else:
|
||||
actions.append(
|
||||
'Repository would be {0} from {1} to {2}'
|
||||
.format(
|
||||
'hard-reset'
|
||||
if force_reset and has_remote_rev
|
||||
else 'updated',
|
||||
_short_sha(local_rev),
|
||||
_short_sha(remote_rev)
|
||||
)
|
||||
)
|
||||
)
|
||||
else:
|
||||
actions.append(
|
||||
'Repository would be {0} from {1} to {2}'
|
||||
.format(
|
||||
'hard-reset'
|
||||
if force_reset and has_remote_rev
|
||||
else 'updated',
|
||||
_short_sha(local_rev),
|
||||
_short_sha(remote_rev)
|
||||
'Local HEAD ({0}) does not match {1} but '
|
||||
'update_head=False, HEAD would not be '
|
||||
'updated locally'.format(
|
||||
local_rev[:7],
|
||||
remote_loc
|
||||
)
|
||||
)
|
||||
|
||||
# Check if upstream needs changing
|
||||
upstream_changed = False
|
||||
if not upstream and desired_upstream:
|
||||
upstream_changed = True
|
||||
actions.append(
|
||||
'Tracking branch would be set to {0}'.format(
|
||||
desired_upstream
|
||||
)
|
||||
)
|
||||
elif upstream and desired_upstream is False:
|
||||
upstream_changed = True
|
||||
actions.append(
|
||||
'Tracking branch would be unset'
|
||||
)
|
||||
elif desired_upstream and upstream != desired_upstream:
|
||||
upstream_changed = True
|
||||
actions.append(
|
||||
'Tracking branch would be '
|
||||
'updated to {0}'.format(desired_upstream)
|
||||
)
|
||||
if upstream_changed:
|
||||
ret['changes']['upstream'] = {
|
||||
'old': upstream,
|
||||
'new': desired_upstream
|
||||
}
|
||||
if ret['changes']:
|
||||
return _neutral_test(ret, _format_comments(actions))
|
||||
else:
|
||||
formatted_actions = _format_comments(actions)
|
||||
if not revs_match \
|
||||
and not update_head \
|
||||
and formatted_actions:
|
||||
ret['comment'] = formatted_actions
|
||||
return ret
|
||||
return _uptodate(ret,
|
||||
target,
|
||||
_format_comments(actions))
|
||||
@ -970,7 +971,7 @@ def latest(name,
|
||||
|
||||
if not has_remote_rev:
|
||||
try:
|
||||
output = __salt__['git.fetch'](
|
||||
fetch_changes = __salt__['git.fetch'](
|
||||
target,
|
||||
remote=remote,
|
||||
force=force_fetch,
|
||||
@ -991,9 +992,11 @@ def latest(name,
|
||||
msg += ':\n\n' + str(exc)
|
||||
return _fail(ret, msg, comments)
|
||||
else:
|
||||
fetch_changes = _parse_fetch(output)
|
||||
if fetch_changes:
|
||||
ret['changes']['fetch'] = fetch_changes
|
||||
comments.append(
|
||||
'{0} was fetched, resulting in updated '
|
||||
'refs'.format(name)
|
||||
)
|
||||
|
||||
try:
|
||||
__salt__['git.rev_parse'](
|
||||
@ -1003,10 +1006,24 @@ def latest(name,
|
||||
except CommandExecutionError as exc:
|
||||
return _fail(
|
||||
ret,
|
||||
'Fetch did not successfully retrieve rev '
|
||||
'{0}: {1}'.format(rev, exc)
|
||||
'Fetch did not successfully retrieve rev \'{0}\' '
|
||||
'from {1}: {2}'.format(rev, name, exc)
|
||||
)
|
||||
|
||||
if (not revs_match and not update_head) \
|
||||
and (branch is None or branch == local_branch):
|
||||
# Rev now exists locally (was fetched), and since we're
|
||||
# not updating HEAD we'll just exit here.
|
||||
ret['comment'] = remote_loc.capitalize() \
|
||||
if rev == 'HEAD' \
|
||||
else remote_loc
|
||||
ret['comment'] += (
|
||||
' is already present and local HEAD ({0}) does not '
|
||||
'match, but update_head=False. HEAD has not been '
|
||||
'updated locally.'.format(local_rev[:7])
|
||||
)
|
||||
return ret
|
||||
|
||||
# Now that we've fetched, check again whether or not
|
||||
# the update is a fast-forward.
|
||||
if base_rev is None:
|
||||
@ -1034,8 +1051,8 @@ def latest(name,
|
||||
return _fail(
|
||||
ret,
|
||||
'Local branch \'{0}\' has uncommitted '
|
||||
'changes. Set \'force_checkout\' to discard '
|
||||
'them and proceed.'
|
||||
'changes. Set \'force_checkout\' to True to '
|
||||
'discard them and proceed.'
|
||||
)
|
||||
|
||||
# TODO: Maybe re-retrieve all_local_branches to handle
|
||||
@ -1044,7 +1061,7 @@ def latest(name,
|
||||
# a long time to complete.
|
||||
if branch not in all_local_branches:
|
||||
if rev == 'HEAD':
|
||||
checkout_rev = head_ref
|
||||
checkout_rev = remote_rev
|
||||
else:
|
||||
checkout_rev = desired_upstream \
|
||||
if desired_upstream \
|
||||
@ -1058,17 +1075,8 @@ def latest(name,
|
||||
force=force_checkout,
|
||||
opts=checkout_opts,
|
||||
user=user)
|
||||
ret['changes']['local branch'] = {
|
||||
'old': local_branch, 'new': branch
|
||||
}
|
||||
|
||||
if fast_forward is False:
|
||||
if rev == 'HEAD':
|
||||
reset_ref = head_ref
|
||||
else:
|
||||
reset_ref = desired_upstream \
|
||||
if desired_upstream \
|
||||
else rev
|
||||
__salt__['git.reset'](
|
||||
target,
|
||||
opts=['--hard', remote_rev],
|
||||
@ -1076,10 +1084,7 @@ def latest(name,
|
||||
)
|
||||
ret['changes']['forced update'] = True
|
||||
comments.append(
|
||||
'Repository was hard-reset to {0} ({1})'.format(
|
||||
reset_ref,
|
||||
_short_sha(remote_rev)
|
||||
)
|
||||
'Repository was hard-reset to {0}'.format(remote_loc)
|
||||
)
|
||||
|
||||
if branch_opts is not None:
|
||||
@ -1088,11 +1093,6 @@ def latest(name,
|
||||
base_branch,
|
||||
opts=branch_opts,
|
||||
user=user)
|
||||
ret['changes']['upstream'] = {
|
||||
'old': upstream,
|
||||
'new': desired_upstream if desired_upstream
|
||||
else None
|
||||
}
|
||||
comments.append(upstream_action)
|
||||
|
||||
# Fast-forward to the desired revision
|
||||
@ -1100,7 +1100,7 @@ def latest(name,
|
||||
and not _revs_equal(base_rev,
|
||||
remote_rev,
|
||||
remote_rev_type):
|
||||
if desired_upstream:
|
||||
if desired_upstream or rev == 'HEAD':
|
||||
# Check first to see if we are on a branch before
|
||||
# trying to merge changes. (The call to
|
||||
# git.symbolic_ref will only return output if HEAD
|
||||
@ -1109,8 +1109,7 @@ def latest(name,
|
||||
'HEAD',
|
||||
opts=['--quiet'],
|
||||
ignore_retcode=True):
|
||||
merge_rev = head_ref \
|
||||
if rev == 'HEAD' \
|
||||
merge_rev = remote_rev if rev == 'HEAD' \
|
||||
else desired_upstream
|
||||
__salt__['git.merge'](
|
||||
target,
|
||||
@ -1119,15 +1118,14 @@ def latest(name,
|
||||
user=user
|
||||
)
|
||||
comments.append(
|
||||
'Repository was fast-forwarded to {0} ({1})'
|
||||
.format(merge_rev, _short_sha(remote_rev))
|
||||
'Repository was fast-forwarded to {0}'
|
||||
.format(remote_loc)
|
||||
)
|
||||
else:
|
||||
# Shouldn't ever happen but fail with a meaningful
|
||||
# error message if it does.
|
||||
msg = (
|
||||
'Unable to merge {0}, HEAD is detached'
|
||||
.format(desired_upstream)
|
||||
return _fail(
|
||||
ret,
|
||||
'Unable to fast-forward, HEAD is detached',
|
||||
comments
|
||||
)
|
||||
else:
|
||||
# Update is a fast forward, but we cannot merge to that
|
||||
@ -1161,20 +1159,36 @@ def latest(name,
|
||||
return _neutral_test(ret, msg)
|
||||
else:
|
||||
return _uptodate(ret, target, msg)
|
||||
output = __salt__['git.fetch'](
|
||||
target,
|
||||
remote=remote,
|
||||
force=force_fetch,
|
||||
refspecs=refspecs,
|
||||
user=user,
|
||||
identity=identity)
|
||||
fetch_changes = _parse_fetch(output)
|
||||
if fetch_changes:
|
||||
ret['changes']['fetch'] = fetch_changes
|
||||
comments.append(
|
||||
'Bare repository at {0} was fetched'.format(target)
|
||||
)
|
||||
|
||||
try:
|
||||
fetch_changes = __salt__['git.fetch'](
|
||||
target,
|
||||
remote=remote,
|
||||
force=force_fetch,
|
||||
refspecs=refspecs,
|
||||
user=user,
|
||||
identity=identity)
|
||||
except CommandExecutionError as exc:
|
||||
msg = 'Fetch failed'
|
||||
if isinstance(exc, CommandExecutionError):
|
||||
msg += (
|
||||
'. Set \'force_fetch\' to True to force '
|
||||
'the fetch if the failure was due to it '
|
||||
'bein non-fast-forward. Output of the '
|
||||
'fetch command follows:\n\n'
|
||||
)
|
||||
msg += _strip_exc(exc)
|
||||
else:
|
||||
msg += ':\n\n' + str(exc)
|
||||
return _fail(ret, msg, comments)
|
||||
else:
|
||||
comments.append(
|
||||
'Bare repository at {0} was fetched{1}'.format(
|
||||
target,
|
||||
', resulting in updated refs'
|
||||
if fetch_changes
|
||||
else ''
|
||||
)
|
||||
)
|
||||
try:
|
||||
new_rev = __salt__['git.revision'](
|
||||
cwd=target,
|
||||
@ -1320,7 +1334,7 @@ def latest(name,
|
||||
__salt__['git.list_branches'](target,
|
||||
user=user):
|
||||
if rev == 'HEAD':
|
||||
checkout_rev = head_ref
|
||||
checkout_rev = remote_rev
|
||||
else:
|
||||
checkout_rev = desired_upstream \
|
||||
if desired_upstream \
|
||||
@ -1330,13 +1344,10 @@ def latest(name,
|
||||
opts=['-b', branch],
|
||||
user=user)
|
||||
comments.append(
|
||||
'Branch \'{0}\' checked out, with {1} ({2}) '
|
||||
'Branch \'{0}\' checked out, with {1} '
|
||||
'as a starting point'.format(
|
||||
branch,
|
||||
desired_upstream
|
||||
if desired_upstream
|
||||
else rev,
|
||||
_short_sha(remote_rev)
|
||||
remote_loc
|
||||
)
|
||||
)
|
||||
|
||||
@ -1344,25 +1355,13 @@ def latest(name,
|
||||
_get_local_rev_and_branch(target, user)
|
||||
|
||||
if not _revs_equal(local_rev, remote_rev, remote_rev_type):
|
||||
if rev == 'HEAD':
|
||||
# Shouldn't happen, if we just cloned the repo and
|
||||
# than the remote HEAD and remote_rev should be the
|
||||
# same SHA1.
|
||||
reset_ref = head_ref
|
||||
else:
|
||||
reset_ref = desired_upstream \
|
||||
if desired_upstream \
|
||||
else rev
|
||||
__salt__['git.reset'](
|
||||
target,
|
||||
opts=['--hard', remote_rev],
|
||||
user=user
|
||||
)
|
||||
comments.append(
|
||||
'Repository was reset to {0} ({1})'.format(
|
||||
reset_ref,
|
||||
_short_sha(remote_rev)
|
||||
)
|
||||
'Repository was reset to {0}'.format(remote_loc)
|
||||
)
|
||||
|
||||
try:
|
||||
@ -1785,7 +1784,7 @@ def config_unset(name,
|
||||
**{'global': global_}
|
||||
)
|
||||
|
||||
for key_name, values in six.iteritems(pre):
|
||||
for key_name in pre:
|
||||
if key_name not in post:
|
||||
ret['changes'][key_name] = pre[key_name]
|
||||
unset = [x for x in pre[key_name] if x not in post[key_name]]
|
||||
@ -1817,7 +1816,6 @@ def config_unset(name,
|
||||
|
||||
|
||||
def config_set(name,
|
||||
cwd=None,
|
||||
value=None,
|
||||
multivar=None,
|
||||
repo=None,
|
||||
|
@ -91,7 +91,15 @@ def compile_template(template,
|
||||
render_kwargs.update(kwargs)
|
||||
if argline:
|
||||
render_kwargs['argline'] = argline
|
||||
start = time.time()
|
||||
ret = render(input_data, saltenv, sls, **render_kwargs)
|
||||
log.profile(
|
||||
'Time (in seconds) to render \'{0}\' using \'{1}\' renderer: {2}'.format(
|
||||
template,
|
||||
render.__module__.split('.')[-1],
|
||||
time.time() - start
|
||||
)
|
||||
)
|
||||
if ret is None:
|
||||
# The file is empty or is being written elsewhere
|
||||
time.sleep(0.01)
|
||||
|
@ -520,11 +520,7 @@ def which(exe=None):
|
||||
# executable in cwd or fullpath
|
||||
return exe
|
||||
|
||||
# default path based on busybox's default
|
||||
default_path = '/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin'
|
||||
search_path = os.environ.get('PATH', default_path)
|
||||
path_ext = os.environ.get('PATHEXT', '.EXE')
|
||||
ext_list = path_ext.split(';')
|
||||
ext_list = os.environ.get('PATHEXT', '.EXE').split(';')
|
||||
|
||||
@real_memoize
|
||||
def _exe_has_ext():
|
||||
@ -541,7 +537,13 @@ def which(exe=None):
|
||||
continue
|
||||
return False
|
||||
|
||||
search_path = search_path.split(os.pathsep)
|
||||
# Enhance POSIX path for the reliability at some environments, when $PATH is changing
|
||||
# This also keeps order, where 'first came, first win' for cases to find optional alternatives
|
||||
search_path = os.environ.get('PATH') and os.environ['PATH'].split(os.pathsep) or list()
|
||||
for default_path in ['/bin', '/sbin', '/usr/bin', '/usr/sbin', '/usr/local/bin']:
|
||||
if default_path not in search_path:
|
||||
search_path.append(default_path)
|
||||
os.environ['PATH'] = os.pathsep.join(search_path)
|
||||
for path in search_path:
|
||||
full_path = os.path.join(path, exe)
|
||||
if _is_executable_file_or_link(full_path):
|
||||
@ -554,16 +556,10 @@ def which(exe=None):
|
||||
# safely rely on that behavior
|
||||
if _is_executable_file_or_link(full_path + ext):
|
||||
return full_path + ext
|
||||
log.trace(
|
||||
'\'{0}\' could not be found in the following search '
|
||||
'path: \'{1}\''.format(
|
||||
exe, search_path
|
||||
)
|
||||
)
|
||||
log.trace('\'{0}\' could not be found in the following search path: \'{1}\''.format(exe, search_path))
|
||||
else:
|
||||
log.error(
|
||||
'No executable was passed to be searched by salt.utils.which()'
|
||||
)
|
||||
log.error('No executable was passed to be searched by salt.utils.which()')
|
||||
|
||||
return None
|
||||
|
||||
|
||||
|
@ -1,18 +1,20 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Helpful decorators module writing
|
||||
Helpful decorators for module writing
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
import inspect
|
||||
import logging
|
||||
import time
|
||||
from functools import wraps
|
||||
from collections import defaultdict
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
from salt.exceptions import CommandNotFoundError
|
||||
from salt.log import LOG_LEVELS
|
||||
|
||||
# Import 3rd-party libs
|
||||
import salt.ext.six as six
|
||||
@ -140,6 +142,30 @@ class depends(Depends): # pylint: disable=C0103
|
||||
'''
|
||||
|
||||
|
||||
def timing(function):
|
||||
'''
|
||||
Decorator wrapper to log execution time, for profiling purposes
|
||||
'''
|
||||
@wraps(function)
|
||||
def wrapped(*args, **kwargs):
|
||||
start_time = time.time()
|
||||
ret = function(*args, **salt.utils.clean_kwargs(**kwargs))
|
||||
end_time = time.time()
|
||||
if function.__module__.startswith('salt.loaded.int.'):
|
||||
mod_name = function.__module__[16:]
|
||||
else:
|
||||
mod_name = function.__module__
|
||||
log.profile(
|
||||
'Function {0}.{1} took {2:.20f} seconds to execute'.format(
|
||||
mod_name,
|
||||
function.__name__,
|
||||
end_time - start_time
|
||||
)
|
||||
)
|
||||
return ret
|
||||
return wrapped
|
||||
|
||||
|
||||
def which(exe):
|
||||
'''
|
||||
Decorator wrapper for salt.utils.which
|
||||
|
@ -145,21 +145,25 @@ class IptablesTestCase(TestCase):
|
||||
|
||||
# 'get_saved_rules' function tests: 1
|
||||
|
||||
@patch('salt.modules.iptables._parse_conf', MagicMock(return_value=False))
|
||||
def test_get_saved_rules(self):
|
||||
'''
|
||||
Test if it return a data structure of the rules in the conf file
|
||||
'''
|
||||
self.assertFalse(iptables.get_saved_rules())
|
||||
mock = MagicMock(return_value=False)
|
||||
with patch.object(iptables, '_parse_conf', mock):
|
||||
self.assertFalse(iptables.get_saved_rules())
|
||||
mock.assert_called_with(conf_file=None, family='ipv4')
|
||||
|
||||
# 'get_rules' function tests: 1
|
||||
|
||||
@patch('salt.modules.iptables._parse_conf', MagicMock(return_value=False))
|
||||
def test_get_rules(self):
|
||||
'''
|
||||
Test if it return a data structure of the current, in-memory rules
|
||||
'''
|
||||
self.assertFalse(iptables.get_rules())
|
||||
mock = MagicMock(return_value=False)
|
||||
with patch.object(iptables, '_parse_conf', mock):
|
||||
self.assertFalse(iptables.get_rules())
|
||||
mock.assert_called_with(in_mem=True, family='ipv4')
|
||||
|
||||
# 'get_saved_policy' function tests: 1
|
||||
|
||||
|
@ -5,6 +5,7 @@
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import
|
||||
import textwrap
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from salttesting import TestCase, skipIf
|
||||
@ -91,6 +92,35 @@ class RhServiceTestCase(TestCase):
|
||||
'''
|
||||
return MagicMock(return_value=bol)
|
||||
|
||||
def test__chkconfig_is_enabled(self):
|
||||
'''
|
||||
test _chkconfig_is_enabled function
|
||||
'''
|
||||
name = 'atd'
|
||||
chkconfig_out = textwrap.dedent('''\
|
||||
|
||||
{0} 0:off 1:off 2:off 3:on 4:on 5:on 6:off
|
||||
'''.format(name))
|
||||
xinetd_out = textwrap.dedent('''\
|
||||
xinetd based services:
|
||||
{0}: on
|
||||
'''.format(name))
|
||||
|
||||
with patch.object(rh_service, '_runlevel', MagicMock(return_value=3)):
|
||||
mock_run = MagicMock(return_value={'retcode': 0,
|
||||
'stdout': chkconfig_out})
|
||||
with patch.dict(rh_service.__salt__, {'cmd.run_all': mock_run}):
|
||||
self.assertTrue(rh_service._chkconfig_is_enabled(name))
|
||||
self.assertFalse(rh_service._chkconfig_is_enabled(name, 2))
|
||||
self.assertTrue(rh_service._chkconfig_is_enabled(name, 3))
|
||||
|
||||
mock_run = MagicMock(return_value={'retcode': 0,
|
||||
'stdout': xinetd_out})
|
||||
with patch.dict(rh_service.__salt__, {'cmd.run_all': mock_run}):
|
||||
self.assertTrue(rh_service._chkconfig_is_enabled(name))
|
||||
self.assertTrue(rh_service._chkconfig_is_enabled(name, 2))
|
||||
self.assertTrue(rh_service._chkconfig_is_enabled(name, 3))
|
||||
|
||||
# 'get_enabled' function tests: 1
|
||||
|
||||
def test_get_enabled(self):
|
||||
|
@ -71,9 +71,9 @@ class TestWhich(integration.TestCase):
|
||||
False,
|
||||
# The second, iterating through $PATH, should also return False,
|
||||
# still checking for Linux
|
||||
False,
|
||||
# Lastly return True, this is the windows check.
|
||||
True
|
||||
# which() will add 4 extra paths to the given one, os.access will
|
||||
# be called 5 times
|
||||
False, False, False, False, False
|
||||
]
|
||||
# Let's patch os.environ to provide a custom PATH variable
|
||||
with patch.dict(os.environ, {'PATH': '/bin'}):
|
||||
|
Loading…
Reference in New Issue
Block a user