mirror of
https://github.com/valitydev/salt.git
synced 2024-11-07 08:58:59 +00:00
Merge pull request #37609 from rallytime/merge-2016.11
[2016.11] Merge forward from carbon to 2016.11
This commit is contained in:
commit
8ba9ceda50
@ -3088,7 +3088,7 @@ master, specify the log_file of the syndic daemon.
|
||||
|
||||
syndic_log_file: salt-syndic.log
|
||||
|
||||
.. master_conf:: syndic_failover
|
||||
.. conf_master:: syndic_failover
|
||||
|
||||
``syndic_failover``
|
||||
-------------------
|
||||
|
@ -992,7 +992,7 @@ The port number used for HTTP proxy access.
|
||||
.. conf_minion:: proxy_username
|
||||
|
||||
``proxy_username``
|
||||
--------------
|
||||
------------------
|
||||
|
||||
Default: ``''``
|
||||
|
||||
@ -1005,7 +1005,7 @@ The username used for HTTP proxy access.
|
||||
.. conf_minion:: proxy_password
|
||||
|
||||
``proxy_password``
|
||||
--------------
|
||||
------------------
|
||||
|
||||
Default: ``''``
|
||||
|
||||
|
@ -198,7 +198,7 @@ The name of the service in which to create the VM. If this is not specified,
|
||||
then a service will be created with the same name as the VM.
|
||||
|
||||
virtual_network_name
|
||||
------------
|
||||
--------------------
|
||||
Optional. The name of the virtual network for the VM to join. If this is not
|
||||
specified, then no virtual network will be joined.
|
||||
|
||||
|
@ -43,10 +43,10 @@ Key events
|
||||
.. salt:event:: salt/key
|
||||
|
||||
Fired when accepting and rejecting minions keys on the Salt master.
|
||||
These happen as a result of actions undertaken by the `salt-key` command.
|
||||
|
||||
:var id: The minion ID.
|
||||
:var act: The new status of the minion key: ``accept``, ``pend``,
|
||||
``reject``, ``delete``.
|
||||
:var act: The new status of the minion key: ``accept``, ``delete``,
|
||||
|
||||
.. warning:: If a master is in :conf_master:`auto_accept mode`, ``salt/key`` events
|
||||
will not be fired when the keys are accepted. In addition, pre-seeding
|
||||
|
@ -58,6 +58,14 @@ other Windows service.
|
||||
If the minion won't start, try installing the Microsoft Visual C++ 2008 x64 SP1
|
||||
redistributable. Allow all Windows updates to run salt-minion smoothly.
|
||||
|
||||
Installation Prerequisites
|
||||
--------------------------
|
||||
|
||||
Most Salt functionality should work just fine right out of the box. A few Salt
|
||||
modules rely on PowerShell. The minimum version of PowerShell required for Salt
|
||||
is version 3. If you intend to work with DSC then Powershell version 5 is the
|
||||
minimum.
|
||||
|
||||
.. _windows-installer-options:
|
||||
|
||||
Silent Installer Options
|
||||
|
@ -4,3 +4,19 @@ Salt 2016.3.5 Release Notes
|
||||
|
||||
Version 2016.3.5 is a bugfix release for :doc:`2016.3.0
|
||||
</topics/releases/2016.3.0>`.
|
||||
|
||||
|
||||
Improved Checksum Handling in :py:func:`file.managed <salt.states.file.managed>`, :py:func:`archive.extracted <salt.states.archive.extracted>` States
|
||||
-----------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
|
||||
When the ``source_hash`` argument for these states refers to a file containing
|
||||
checksums, Salt now looks for checksums matching the name of the source URI, as
|
||||
well as the file being managed. Prior releases only looked for checksums
|
||||
matching the filename being managed. Additionally, a new argument
|
||||
(``source_hash_name``) has been added, which allows the user to disambiguate
|
||||
ambiguous matches when more than one matching checksum is found in the
|
||||
``source_hash`` file.
|
||||
|
||||
A more detailed explanation of this functionality can be found in the
|
||||
:py:func:`file.managed <salt.states.file.managed>` documentation, in the
|
||||
section for the new ``source_hash_name`` argument.
|
||||
|
@ -26,7 +26,7 @@ Builds for a few platforms are available as part of the RC at https://repo.salts
|
||||
|
||||
baseurl=https://repo.saltstack.com/salt_rc/yum/redhat/$releasever/$basearch/
|
||||
|
||||
.. code-block::
|
||||
.. code-block:: none
|
||||
|
||||
deb http://repo.saltstack.com/salt_rc/apt/ubuntu/14.04/amd64 jessie main
|
||||
|
||||
|
@ -377,7 +377,7 @@ using the ``ca_bundle`` variable.
|
||||
)
|
||||
|
||||
Updating CA Bundles
|
||||
+++++++++++++++++++
|
||||
'''''''''''''''''''
|
||||
The ``update_ca_bundle()`` function can be used to update the bundle file at a
|
||||
specified location. If the target location is not specified, then it will
|
||||
attempt to auto-detect the location of the bundle file. If the URL to download
|
||||
|
@ -125,6 +125,7 @@ case "$1" in
|
||||
echo -n "Checking for service salt-api "
|
||||
checkproc $SALTAPI
|
||||
rc_status -v
|
||||
RETVAL=$?
|
||||
elif [ -f $DEBIAN_VERSION ]; then
|
||||
if [ -f $LOCKFILE ]; then
|
||||
RETVAL=0
|
||||
|
@ -108,6 +108,7 @@ case "$1" in
|
||||
echo -n "Checking for service salt-master "
|
||||
checkproc $SALTMASTER
|
||||
rc_status -v
|
||||
RETVAL=$?
|
||||
elif [ -f $DEBIAN_VERSION ]; then
|
||||
if [ -f $LOCKFILE ]; then
|
||||
RETVAL=0
|
||||
|
@ -57,6 +57,7 @@ start() {
|
||||
if [ -f $SUSE_RELEASE ]; then
|
||||
startproc -p /var/run/$SERVICE.pid $SALTMINION -d $MINION_ARGS
|
||||
rc_status -v
|
||||
RETVAL=$?
|
||||
elif [ -e $DEBIAN_VERSION ]; then
|
||||
if [ -f $LOCKFILE ]; then
|
||||
echo -n "already started, lock file found"
|
||||
@ -114,6 +115,7 @@ case "$1" in
|
||||
echo -n "Checking for service salt-minion "
|
||||
checkproc $SALTMINION
|
||||
rc_status -v
|
||||
RETVAL=$?
|
||||
elif [ -f $DEBIAN_VERSION ]; then
|
||||
if [ -f $LOCKFILE ]; then
|
||||
RETVAL=0
|
||||
|
@ -109,6 +109,7 @@ case "$1" in
|
||||
echo -n "Checking for service salt-syndic "
|
||||
checkproc $SALTSYNDIC
|
||||
rc_status -v
|
||||
RETVAL=$?
|
||||
elif [ -f $DEBIAN_VERSION ]; then
|
||||
if [ -f $LOCKFILE ]; then
|
||||
RETVAL=0
|
||||
|
@ -1888,6 +1888,7 @@ def minion_config(path,
|
||||
opts = apply_minion_config(overrides, defaults,
|
||||
cache_minion_id=cache_minion_id,
|
||||
minion_id=minion_id)
|
||||
apply_sdb(opts)
|
||||
_validate_opts(opts)
|
||||
return opts
|
||||
|
||||
@ -1963,7 +1964,6 @@ def syndic_config(master_config_path,
|
||||
return opts
|
||||
|
||||
|
||||
# ----- Salt Cloud Configuration Functions ---------------------------------->
|
||||
def apply_sdb(opts, sdb_opts=None):
|
||||
'''
|
||||
Recurse for sdb:// links for opts
|
||||
@ -1988,6 +1988,7 @@ def apply_sdb(opts, sdb_opts=None):
|
||||
return sdb_opts
|
||||
|
||||
|
||||
# ----- Salt Cloud Configuration Functions ---------------------------------->
|
||||
def cloud_config(path, env_var='SALT_CLOUD_CONFIG', defaults=None,
|
||||
master_config_path=None, master_config=None,
|
||||
providers_config_path=None, providers_config=None,
|
||||
@ -3173,6 +3174,7 @@ def master_config(path, env_var='SALT_MASTER_CONFIG', defaults=None, exit_on_con
|
||||
opts['nodegroups'] = DEFAULT_MASTER_OPTS.get('nodegroups', {})
|
||||
if opts.get('transport') == 'raet' and 'aes' in opts:
|
||||
opts.pop('aes')
|
||||
apply_sdb(opts)
|
||||
return opts
|
||||
|
||||
|
||||
|
@ -1087,10 +1087,12 @@ _OS_FAMILY_MAP = {
|
||||
'SLES_SAP': 'Suse',
|
||||
'Solaris': 'Solaris',
|
||||
'SmartOS': 'Solaris',
|
||||
'OmniOS': 'Solaris',
|
||||
'OpenIndiana Development': 'Solaris',
|
||||
'OpenIndiana': 'Solaris',
|
||||
'OpenSolaris Development': 'Solaris',
|
||||
'OpenSolaris': 'Solaris',
|
||||
'Oracle Solaris': 'Solaris',
|
||||
'Arch ARM': 'Arch',
|
||||
'Manjaro': 'Arch',
|
||||
'Antergos': 'Arch',
|
||||
@ -1455,12 +1457,19 @@ def os_data():
|
||||
grains.update(_linux_cpudata())
|
||||
grains.update(_linux_gpu_data())
|
||||
elif grains['kernel'] == 'SunOS':
|
||||
grains['os_family'] = 'Solaris'
|
||||
if salt.utils.is_smartos():
|
||||
# See https://github.com/joyent/smartos-live/issues/224
|
||||
uname_v = os.uname()[3]
|
||||
uname_v = os.uname()[3] # format: joyent_20161101T004406Z
|
||||
uname_v = uname_v[uname_v.index('_')+1:]
|
||||
grains['os'] = grains['osfullname'] = 'SmartOS'
|
||||
grains['osrelease'] = uname_v[uname_v.index('_')+1:]
|
||||
# store a parsed version of YYYY.MM.DD as osrelease
|
||||
grains['osrelease'] = ".".join([
|
||||
uname_v.split('T')[0][0:4],
|
||||
uname_v.split('T')[0][4:6],
|
||||
uname_v.split('T')[0][6:8],
|
||||
])
|
||||
# store a untouched copy of the timestamp in osrelease_stamp
|
||||
grains['osrelease_stamp'] = uname_v
|
||||
if salt.utils.is_smartos_globalzone():
|
||||
grains.update(_smartos_computenode_data())
|
||||
elif os.path.isfile('/etc/release'):
|
||||
@ -1468,10 +1477,10 @@ def os_data():
|
||||
rel_data = fp_.read()
|
||||
try:
|
||||
release_re = re.compile(
|
||||
r'((?:Open)?Solaris|OpenIndiana) (Development)?'
|
||||
r'\s*(\d+ \d+\/\d+|oi_\S+|snv_\S+)?'
|
||||
r'((?:Open|Oracle )?Solaris|OpenIndiana|OmniOS) (Development)?'
|
||||
r'\s*(\d+\.?\d*|v\d+)\s?[A-Z]*\s?(r\d+|\d+\/\d+|oi_\S+|snv_\S+)?'
|
||||
)
|
||||
osname, development, osrelease = \
|
||||
osname, development, osmajorrelease, osminorrelease = \
|
||||
release_re.search(rel_data).groups()
|
||||
except AttributeError:
|
||||
# Set a blank osrelease grain and fallback to 'Solaris'
|
||||
@ -1481,8 +1490,26 @@ def os_data():
|
||||
else:
|
||||
if development is not None:
|
||||
osname = ' '.join((osname, development))
|
||||
uname_v = os.uname()[3]
|
||||
grains['os'] = grains['osfullname'] = osname
|
||||
grains['osrelease'] = osrelease
|
||||
if osname in ['Oracle Solaris'] and uname_v.startswith(osmajorrelease):
|
||||
# Oracla Solars 11 and up have minor version in uname
|
||||
grains['osrelease'] = uname_v
|
||||
elif osname in ['OmniOS']:
|
||||
# OmniOS
|
||||
osrelease = []
|
||||
osrelease.append(osmajorrelease[1:])
|
||||
osrelease.append(osminorrelease[1:])
|
||||
grains['osrelease'] = ".".join(osrelease)
|
||||
grains['osrelease_stamp'] = uname_v
|
||||
else:
|
||||
# Sun Solaris 10 and earlier/comparable
|
||||
osrelease = []
|
||||
osrelease.append(osmajorrelease)
|
||||
if osminorrelease:
|
||||
osrelease.append(osminorrelease)
|
||||
grains['osrelease'] = ".".join(osrelease)
|
||||
grains['osrelease_stamp'] = uname_v
|
||||
|
||||
grains.update(_sunos_cpudata())
|
||||
elif grains['kernel'] == 'VMkernel':
|
||||
|
@ -165,7 +165,6 @@ def minion_mods(
|
||||
context=None,
|
||||
utils=None,
|
||||
whitelist=None,
|
||||
include_errors=False,
|
||||
initial_load=False,
|
||||
loaded_base_name=None,
|
||||
notify=False,
|
||||
@ -188,7 +187,6 @@ def minion_mods(
|
||||
configuration.
|
||||
|
||||
:param list whitelist: A list of modules which should be whitelisted.
|
||||
:param bool include_errors: Deprecated flag! Unused.
|
||||
:param bool initial_load: Deprecated flag! Unused.
|
||||
:param str loaded_base_name: A string marker for the loaded base name.
|
||||
:param bool notify: Flag indicating that an event should be fired upon
|
||||
|
@ -2351,7 +2351,7 @@ class ClearFuncs(object):
|
||||
self.opts['ext_job_cache']
|
||||
)
|
||||
)
|
||||
except AttributeError:
|
||||
except (AttributeError, KeyError):
|
||||
save_load_func = False
|
||||
log.critical(
|
||||
'The specified returner used for the external job cache '
|
||||
|
@ -682,8 +682,7 @@ class SMinion(MinionBase):
|
||||
).compile_pillar()
|
||||
|
||||
self.utils = salt.loader.utils(self.opts)
|
||||
self.functions = salt.loader.minion_mods(self.opts, utils=self.utils,
|
||||
include_errors=True)
|
||||
self.functions = salt.loader.minion_mods(self.opts, utils=self.utils)
|
||||
self.serializers = salt.loader.serializers(self.opts)
|
||||
self.returners = salt.loader.returners(self.opts, self.functions)
|
||||
self.proxy = salt.loader.proxy(self.opts, self.functions, self.returners, None)
|
||||
@ -1509,9 +1508,13 @@ class Minion(MinionBase):
|
||||
ret['id'] = opts['id']
|
||||
for returner in set(data['ret'].split(',')):
|
||||
try:
|
||||
minion_instance.returners['{0}.returner'.format(
|
||||
returner
|
||||
)](ret)
|
||||
returner_str = '{0}.returner'.format(returner)
|
||||
if returner_str in minion_instance.returners:
|
||||
minion_instance.returners[returner_str](ret)
|
||||
else:
|
||||
returner_err = minion_instance.returners.missing_fun_string(returner_str)
|
||||
log.error('Returner {0} could not be loaded: {1}'.format(
|
||||
returner_str, returner_err))
|
||||
except Exception as exc:
|
||||
log.error(
|
||||
'The return failed for job {0} {1}'.format(
|
||||
|
@ -683,7 +683,7 @@ def upgrade(name,
|
||||
force_x86=False,
|
||||
package_args=None):
|
||||
'''
|
||||
.. version-added:: 2016.3.4
|
||||
.. versionadded:: 2016.3.4
|
||||
|
||||
Instructs Chocolatey to upgrade packages on the system. (update is being
|
||||
deprecated)
|
||||
|
@ -61,14 +61,15 @@ __func_alias__ = {
|
||||
'makedirs_': 'makedirs'
|
||||
}
|
||||
|
||||
HASHES = [
|
||||
['sha512', 128],
|
||||
['sha384', 96],
|
||||
['sha256', 64],
|
||||
['sha224', 56],
|
||||
['sha1', 40],
|
||||
['md5', 32],
|
||||
]
|
||||
HASHES = {
|
||||
'sha512': 128,
|
||||
'sha384': 96,
|
||||
'sha256': 64,
|
||||
'sha224': 56,
|
||||
'sha1': 40,
|
||||
'md5': 32,
|
||||
}
|
||||
HASHES_REVMAP = dict([(y, x) for x, y in six.iteritems(HASHES)])
|
||||
|
||||
|
||||
def __virtual__():
|
||||
@ -538,12 +539,22 @@ def get_hash(path, form='sha256', chunk_size=65536):
|
||||
return salt.utils.get_hash(os.path.expanduser(path), form, chunk_size)
|
||||
|
||||
|
||||
def get_source_sum(source, source_hash, saltenv='base'):
|
||||
def get_source_sum(file_name='',
|
||||
source='',
|
||||
source_hash=None,
|
||||
source_hash_name=None,
|
||||
saltenv='base'):
|
||||
'''
|
||||
.. versionadded:: 2016.11.0
|
||||
|
||||
Obtain a checksum and hash type, given a ``source_hash`` file/expression
|
||||
and the source file name.
|
||||
Used by :py:func:`file.get_managed <salt.modules.file.get_managed>` to
|
||||
obtain the hash and hash type from the parameters specified below.
|
||||
|
||||
file_name
|
||||
Optional file name being managed, for matching with
|
||||
:py:func:`file.extract_hash <salt.modules.file.extract_hash>`.
|
||||
|
||||
.. versionadded:: 2016.11.1
|
||||
|
||||
source
|
||||
Source file, as used in :py:mod:`file <salt.states.file>` and other
|
||||
@ -559,27 +570,43 @@ def get_source_sum(source, source_hash, saltenv='base'):
|
||||
<salt.modules.file.extract_hash>` will be used to obtain a hash from
|
||||
it.
|
||||
|
||||
source_hash_name
|
||||
Specific file name to look for when ``source_hash`` refers to a remote
|
||||
file, used to disambiguate ambiguous matches.
|
||||
|
||||
.. versionadded:: 2016.11.1
|
||||
|
||||
saltenv : base
|
||||
Salt fileserver environment from which to retrive the source_hash. This
|
||||
value will only be used when ``source_hash`` refers to a file on the
|
||||
Salt fileserver (i.e. one beginning with ``salt://``).
|
||||
|
||||
CLI Examples:
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' file.get_source_sum /etc/foo.conf source_hash=499ae16dcae71eeb7c3a30c75ea7a1a6
|
||||
salt '*' file.get_source_sum /etc/foo.conf source_hash=md5=499ae16dcae71eeb7c3a30c75ea7a1a6
|
||||
salt '*' file.get_source_sum /etc/foo.conf source_hash=https://foo.domain.tld/hashfile
|
||||
salt '*' file.get_source_sum /tmp/foo.tar.gz source=http://mydomain.tld/foo.tar.gz source_hash=499ae16dcae71eeb7c3a30c75ea7a1a6
|
||||
salt '*' file.get_source_sum /tmp/foo.tar.gz source=http://mydomain.tld/foo.tar.gz source_hash=https://mydomain.tld/hashes.md5
|
||||
salt '*' file.get_source_sum /tmp/foo.tar.gz source=http://mydomain.tld/foo.tar.gz source_hash=https://mydomain.tld/hashes.md5 source_hash_name=./dir2/foo.tar.gz
|
||||
'''
|
||||
def _invalid_source_hash_format():
|
||||
'''
|
||||
DRY helper for reporting invalid source_hash input
|
||||
'''
|
||||
raise CommandExecutionError(
|
||||
'Source hash {0} format is invalid. It must be in the format '
|
||||
'<hash type>=<hash>, or it must be a supported protocol: {1}'
|
||||
.format(source_hash, ', '.join(salt.utils.files.VALID_PROTOS))
|
||||
'Source hash {0} format is invalid. The supported formats are: '
|
||||
'1) a hash, 2) an expression in the format <hash_type>=<hash>, or '
|
||||
'3) either a path to a local file containing hashes, or a URI of '
|
||||
'a remote hash file. Supported protocols for remote hash files '
|
||||
'are: {1}. The hash may also not be of a valid length, the '
|
||||
'following are supported hash types and lengths: {2}.'.format(
|
||||
source_hash,
|
||||
', '.join(salt.utils.files.VALID_PROTOS),
|
||||
', '.join(
|
||||
['{0} ({1})'.format(HASHES_REVMAP[x], x)
|
||||
for x in sorted(HASHES_REVMAP)]
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
hash_fn = None
|
||||
@ -606,7 +633,7 @@ def get_source_sum(source, source_hash, saltenv='base'):
|
||||
_invalid_source_hash_format()
|
||||
|
||||
if hash_fn is not None:
|
||||
ret = extract_hash(hash_fn, '', source)
|
||||
ret = extract_hash(hash_fn, '', file_name, source, source_hash_name)
|
||||
if ret is None:
|
||||
_invalid_source_hash_format()
|
||||
return ret
|
||||
@ -624,12 +651,34 @@ def get_source_sum(source, source_hash, saltenv='base'):
|
||||
_invalid_source_hash_format()
|
||||
ret['hsum'] = source_hash
|
||||
source_hash_len = len(source_hash)
|
||||
for hash_type, hash_len in HASHES:
|
||||
if source_hash_len == hash_len:
|
||||
ret['hash_type'] = hash_type
|
||||
break
|
||||
if source_hash_len in HASHES_REVMAP:
|
||||
ret['hash_type'] = HASHES_REVMAP[source_hash_len]
|
||||
else:
|
||||
_invalid_source_hash_format()
|
||||
|
||||
if ret['hash_type'] not in HASHES:
|
||||
raise CommandExecutionError(
|
||||
'Invalid hash type \'{0}\'. Supported hash types are: {1}. '
|
||||
'Either remove the hash type and simply use \'{2}\' as the '
|
||||
'source_hash, or change the hash type to a supported type.'
|
||||
.format(ret['hash_type'], ', '.join(HASHES), ret['hsum'])
|
||||
)
|
||||
else:
|
||||
hsum_len = len(ret['hsum'])
|
||||
if hsum_len not in HASHES_REVMAP:
|
||||
_invalid_source_hash_format()
|
||||
elif hsum_len != HASHES[ret['hash_type']]:
|
||||
raise CommandExecutionError(
|
||||
'Invalid length ({0}) for hash type \'{1}\'. Either '
|
||||
'remove the hash type and simply use \'{2}\' as the '
|
||||
'source_hash, or change the hash type to \'{3}\''.format(
|
||||
hsum_len,
|
||||
ret['hash_type'],
|
||||
ret['hsum'],
|
||||
HASHES_REVMAP[hsum_len],
|
||||
)
|
||||
)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
@ -1506,45 +1555,66 @@ def line(path, content, match=None, mode=None, location=None,
|
||||
'''
|
||||
.. versionadded:: 2015.8.0
|
||||
|
||||
Edit a line in the configuration file.
|
||||
Edit a line in the configuration file. The ``path`` and ``content``
|
||||
arguments are required, as well as passing in one of the ``mode``
|
||||
options.
|
||||
|
||||
:param path:
|
||||
path
|
||||
Filesystem path to the file to be edited.
|
||||
|
||||
:param content:
|
||||
content
|
||||
Content of the line.
|
||||
|
||||
:param match:
|
||||
match
|
||||
Match the target line for an action by
|
||||
a fragment of a string or regular expression.
|
||||
|
||||
:param mode:
|
||||
:Ensure:
|
||||
If line does not exist, it will be added.
|
||||
If neither ``before`` nor ``after`` are provided, and ``match``
|
||||
is also ``None``, match becomes the ``content`` value.
|
||||
|
||||
:Replace:
|
||||
If line already exist, it will be replaced.
|
||||
mode
|
||||
Defines how to edit a line. One of the following options is
|
||||
required:
|
||||
|
||||
:Delete:
|
||||
- ensure
|
||||
If line does not exist, it will be added. This is based on the
|
||||
``content`` argument.
|
||||
- replace
|
||||
If line already exists, it will be replaced.
|
||||
- delete
|
||||
Delete the line, once found.
|
||||
|
||||
:Insert:
|
||||
- insert
|
||||
Insert a line.
|
||||
|
||||
:param location:
|
||||
:start:
|
||||
Place the content at the beginning of the file.
|
||||
.. note::
|
||||
|
||||
:end:
|
||||
If ``mode=insert`` is used, at least one of the following
|
||||
options must also be defined: ``location``, ``before``, or
|
||||
``after``. If ``location`` is used, it takes precedence
|
||||
over the other two options.
|
||||
|
||||
location
|
||||
Defines where to place content in the line. Note this option is only
|
||||
used when ``mode=insert`` is specified. If a location is passed in, it
|
||||
takes precedence over both the ``before`` and ``after`` kwargs. Valid
|
||||
locations are:
|
||||
|
||||
- start
|
||||
Place the content at the beginning of the file.
|
||||
- end
|
||||
Place the content at the end of the file.
|
||||
|
||||
:param before:
|
||||
before
|
||||
Regular expression or an exact case-sensitive fragment of the string.
|
||||
This option is only used when either the ``ensure`` or ``insert`` mode
|
||||
is defined.
|
||||
|
||||
:param after:
|
||||
after
|
||||
Regular expression or an exact case-sensitive fragment of the string.
|
||||
This option is only used when either the ``ensure`` or ``insert`` mode
|
||||
is defined.
|
||||
|
||||
:param show_changes:
|
||||
show_changes
|
||||
Output a unified diff of the old file and the new file.
|
||||
If ``False`` return a boolean if any changes were made.
|
||||
Default is ``True``
|
||||
@ -1553,31 +1623,34 @@ def line(path, content, match=None, mode=None, location=None,
|
||||
Using this option will store two copies of the file in-memory
|
||||
(the original version and the edited version) in order to generate the diff.
|
||||
|
||||
:param backup:
|
||||
backup
|
||||
Create a backup of the original file with the extension:
|
||||
"Year-Month-Day-Hour-Minutes-Seconds".
|
||||
|
||||
:param quiet:
|
||||
quiet
|
||||
Do not raise any exceptions. E.g. ignore the fact that the file that is
|
||||
tried to be edited does not exist and nothing really happened.
|
||||
|
||||
:param indent:
|
||||
Keep indentation with the previous line.
|
||||
indent
|
||||
Keep indentation with the previous line. This option is not considered when
|
||||
the ``delete`` mode is specified.
|
||||
|
||||
If an equal sign (``=``) appears in an argument to a Salt command, it is
|
||||
interpreted as a keyword argument in the format of ``key=val``. That
|
||||
processing can be bypassed in order to pass an equal sign through to the
|
||||
remote shell command by manually specifying the kwarg:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' file.line /path/to/file content="CREATEMAIL_SPOOL=no" match="CREATE_MAIL_SPOOL=yes" mode="replace"
|
||||
|
||||
CLI Examples:
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' file.line /etc/nsswitch.conf "networks:\tfiles dns" after="hosts:.*?" mode='ensure'
|
||||
|
||||
.. note::
|
||||
|
||||
If an equal sign (``=``) appears in an argument to a Salt command, it is
|
||||
interpreted as a keyword argument in the format of ``key=val``. That
|
||||
processing can be bypassed in order to pass an equal sign through to the
|
||||
remote shell command by manually specifying the kwarg:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' file.line /path/to/file content="CREATEMAIL_SPOOL=no" match="CREATE_MAIL_SPOOL=yes" mode="replace"
|
||||
'''
|
||||
path = os.path.realpath(os.path.expanduser(path))
|
||||
if not os.path.isfile(path):
|
||||
@ -1607,9 +1680,13 @@ def line(path, content, match=None, mode=None, location=None,
|
||||
body = os.linesep.join([line for line in body.split(os.linesep) if line.find(match) < 0])
|
||||
|
||||
elif mode == 'replace':
|
||||
body = os.linesep.join([(_get_line_indent(line, content, indent)
|
||||
if (line.find(match) > -1 and not line == content) else line)
|
||||
for line in body.split(os.linesep)])
|
||||
if os.stat(path).st_size == 0:
|
||||
log.warning('Cannot find text to replace. File \'{0}\' is empty.'.format(path))
|
||||
body = ''
|
||||
else:
|
||||
body = os.linesep.join([(_get_line_indent(file_line, content, indent)
|
||||
if (file_line.find(match) > -1 and not file_line == content) else file_line)
|
||||
for file_line in body.split(os.linesep)])
|
||||
elif mode == 'insert':
|
||||
if not location and not before and not after:
|
||||
raise CommandExecutionError('On insert must be defined either "location" or "before/after" conditions.')
|
||||
@ -3543,13 +3620,14 @@ def get_managed(
|
||||
template,
|
||||
source,
|
||||
source_hash,
|
||||
source_hash_name,
|
||||
user,
|
||||
group,
|
||||
mode,
|
||||
saltenv,
|
||||
context,
|
||||
defaults,
|
||||
skip_verify,
|
||||
skip_verify=False,
|
||||
**kwargs):
|
||||
'''
|
||||
Return the managed file data for file.managed
|
||||
@ -3566,20 +3644,26 @@ def get_managed(
|
||||
source_hash
|
||||
hash of the source file
|
||||
|
||||
source_hash_name
|
||||
When ``source_hash`` refers to a remote file, this specifies the
|
||||
filename to look for in that file.
|
||||
|
||||
.. versionadded:: 2016.3.5
|
||||
|
||||
user
|
||||
user owner
|
||||
Owner of file
|
||||
|
||||
group
|
||||
group owner
|
||||
Group owner of file
|
||||
|
||||
mode
|
||||
file mode
|
||||
Permissions of file
|
||||
|
||||
context
|
||||
variables to add to the environment
|
||||
Variables to add to the template context
|
||||
|
||||
defaults
|
||||
default values of for context_dict
|
||||
Default values of for context_dict
|
||||
|
||||
skip_verify
|
||||
If ``True``, hash verification of remote file sources (``http://``,
|
||||
@ -3592,7 +3676,7 @@ def get_managed(
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' file.get_managed /etc/httpd/conf.d/httpd.conf jinja salt://http/httpd.conf '{hash_type: 'md5', 'hsum': <md5sum>}' root root '755' base None None
|
||||
salt '*' file.get_managed /etc/httpd/conf.d/httpd.conf jinja salt://http/httpd.conf '{hash_type: 'md5', 'hsum': <md5sum>}' None root root '755' base None None
|
||||
'''
|
||||
# Copy the file to the minion and templatize it
|
||||
sfn = ''
|
||||
@ -3605,7 +3689,6 @@ def get_managed(
|
||||
'''
|
||||
return {'hsum': get_hash(path, form='sha256'), 'hash_type': 'sha256'}
|
||||
|
||||
source_hash_name = kwargs.pop('source_hash_name', None)
|
||||
# If we have a source defined, let's figure out what the hash is
|
||||
if source:
|
||||
urlparsed_source = _urlparse(source)
|
||||
@ -3629,8 +3712,10 @@ def get_managed(
|
||||
if not skip_verify:
|
||||
if source_hash:
|
||||
try:
|
||||
source_sum = get_source_sum(source_hash_name or source,
|
||||
source_sum = get_source_sum(name,
|
||||
source,
|
||||
source_hash,
|
||||
source_hash_name,
|
||||
saltenv)
|
||||
except CommandExecutionError as exc:
|
||||
return '', {}, exc.strerror
|
||||
@ -3705,8 +3790,31 @@ def get_managed(
|
||||
return sfn, source_sum, ''
|
||||
|
||||
|
||||
def extract_hash(hash_fn, hash_type='sha256', file_name=''):
|
||||
def extract_hash(hash_fn,
|
||||
hash_type='sha256',
|
||||
file_name='',
|
||||
source='',
|
||||
source_hash_name=None):
|
||||
'''
|
||||
.. versionchanged:: 2016.3.5
|
||||
Prior to this version, only the ``file_name`` argument was considered
|
||||
for filename matches in the hash file. This would be problematic for
|
||||
cases in which the user was relying on a remote checksum file that they
|
||||
do not control, and they wished to use a different name for that file
|
||||
on the minion from the filename on the remote server (and in the
|
||||
checksum file). For example, managing ``/tmp/myfile.tar.gz`` when the
|
||||
remote file was at ``https://mydomain.tld/different_name.tar.gz``. The
|
||||
:py:func:`file.managed <salt.states.file.managed>` state now also
|
||||
passes this function the source URI as well as the ``source_hash_name``
|
||||
(if specified). In cases where ``source_hash_name`` is specified, it
|
||||
takes precedence over both the ``file_name`` and ``source``. When it is
|
||||
not specified, ``file_name`` takes precedence over ``source``. This
|
||||
allows for better capability for matching hashes.
|
||||
.. versionchanged:: 2016.11.1
|
||||
File name and source URI matches are no longer disregarded when
|
||||
``source_hash_name`` is specified. They will be used as fallback
|
||||
matches if there is no match to the ``source_hash_name`` value.
|
||||
|
||||
This routine is called from the :mod:`file.managed
|
||||
<salt.states.file.managed>` state to pull a hash from a remote file.
|
||||
Regular expressions are used line by line on the ``source_hash`` file, to
|
||||
@ -3732,72 +3840,184 @@ def extract_hash(hash_fn, hash_type='sha256', file_name=''):
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' file.extract_hash /etc/foo sha512 /path/to/hash/file
|
||||
salt '*' file.extract_hash /path/to/hash/file sha512 /etc/foo
|
||||
'''
|
||||
source_sum = None
|
||||
partial_id = False
|
||||
name_sought = os.path.basename(file_name)
|
||||
log.debug(
|
||||
'modules.file.py - extract_hash(): Extracting hash for file named: %s',
|
||||
name_sought
|
||||
)
|
||||
try:
|
||||
with salt.utils.fopen(hash_fn, 'r') as hash_fn_fopen:
|
||||
for hash_variant in HASHES:
|
||||
if hash_type == '' or hash_type == hash_variant[0]:
|
||||
log.debug(
|
||||
'modules.file.py - extract_hash(): Will use regex to '
|
||||
'get a purely hexadecimal number of length (%s), '
|
||||
'presumably hash type : %s',
|
||||
hash_variant[1], hash_variant[0]
|
||||
)
|
||||
hash_fn_fopen.seek(0)
|
||||
for line in hash_fn_fopen.read().splitlines():
|
||||
hash_array = re.findall(
|
||||
r'(?i)(?<![a-z0-9])[a-f0-9]{' + str(hash_variant[1]) + '}(?![a-z0-9])',
|
||||
line)
|
||||
log.debug(
|
||||
'modules.file.py - extract_hash(): From "%s", '
|
||||
'got : %s', line, hash_array
|
||||
)
|
||||
if hash_array:
|
||||
if not partial_id:
|
||||
source_sum = {'hsum': hash_array[0],
|
||||
'hash_type': hash_variant[0]}
|
||||
partial_id = True
|
||||
|
||||
log.debug(
|
||||
'modules.file.py - extract_hash(): Found: %s '
|
||||
'-- %s',
|
||||
source_sum['hash_type'], source_sum['hsum']
|
||||
)
|
||||
|
||||
if name_sought in line:
|
||||
source_sum = {'hsum': hash_array[0],
|
||||
'hash_type': hash_variant[0]}
|
||||
log.debug(
|
||||
'modules.file.py - extract_hash: For %s -- '
|
||||
'returning the %s hash "%s".',
|
||||
name_sought, source_sum['hash_type'],
|
||||
source_sum['hsum']
|
||||
)
|
||||
return source_sum
|
||||
except OSError as exc:
|
||||
raise CommandExecutionError(
|
||||
'Error encountered extracting hash from {0}: {1}'.format(
|
||||
exc.filename, exc.strerror
|
||||
hash_len = HASHES.get(hash_type)
|
||||
if hash_len is None:
|
||||
if hash_type:
|
||||
log.warning(
|
||||
'file.extract_hash: Unsupported hash_type \'%s\', falling '
|
||||
'back to matching any supported hash_type', hash_type
|
||||
)
|
||||
hash_type = ''
|
||||
hash_len_expr = '{0},{1}'.format(min(HASHES_REVMAP), max(HASHES_REVMAP))
|
||||
else:
|
||||
hash_len_expr = str(hash_len)
|
||||
|
||||
filename_separators = string.whitespace + r'\/'
|
||||
|
||||
if source_hash_name:
|
||||
if not isinstance(source_hash_name, six.string_types):
|
||||
source_hash_name = str(source_hash_name)
|
||||
source_hash_name_idx = (len(source_hash_name) + 1) * -1
|
||||
log.debug(
|
||||
'file.extract_hash: Extracting %s hash for file matching '
|
||||
'source_hash_name \'%s\'',
|
||||
'any supported' if not hash_type else hash_type,
|
||||
source_hash_name
|
||||
)
|
||||
if file_name:
|
||||
if not isinstance(file_name, six.string_types):
|
||||
file_name = str(file_name)
|
||||
file_name_basename = os.path.basename(file_name)
|
||||
file_name_idx = (len(file_name_basename) + 1) * -1
|
||||
if source:
|
||||
if not isinstance(source, six.string_types):
|
||||
source = str(source)
|
||||
urlparsed_source = _urlparse(source)
|
||||
source_basename = os.path.basename(
|
||||
urlparsed_source.path or urlparsed_source.netloc
|
||||
)
|
||||
source_idx = (len(source_basename) + 1) * -1
|
||||
|
||||
basename_searches = [x for x in (file_name, source) if x]
|
||||
if basename_searches:
|
||||
log.debug(
|
||||
'file.extract_hash: %s %s hash for file matching%s: %s',
|
||||
'If no source_hash_name match found, will extract'
|
||||
if source_hash_name
|
||||
else 'Extracting',
|
||||
'any supported' if not hash_type else hash_type,
|
||||
'' if len(basename_searches) == 1 else ' either of the following',
|
||||
', '.join(basename_searches)
|
||||
)
|
||||
|
||||
if partial_id:
|
||||
partial = None
|
||||
found = {}
|
||||
|
||||
with salt.utils.fopen(hash_fn, 'r') as fp_:
|
||||
for line in fp_:
|
||||
line = line.strip()
|
||||
hash_re = r'(?i)(?<![a-z0-9])([a-f0-9]{' + hash_len_expr + '})(?![a-z0-9])'
|
||||
hash_match = re.search(hash_re, line)
|
||||
matched = None
|
||||
if hash_match:
|
||||
matched_hsum = hash_match.group(1)
|
||||
if matched_hsum is not None:
|
||||
matched_type = HASHES_REVMAP.get(len(matched_hsum))
|
||||
if matched_type is None:
|
||||
# There was a match, but it's not of the correct length
|
||||
# to match one of the supported hash types.
|
||||
matched = None
|
||||
else:
|
||||
matched = {'hsum': matched_hsum,
|
||||
'hash_type': matched_type}
|
||||
|
||||
if matched is None:
|
||||
log.debug(
|
||||
'file.extract_hash: In line \'%s\', no %shash found',
|
||||
line,
|
||||
'' if not hash_type else hash_type + ' '
|
||||
)
|
||||
continue
|
||||
|
||||
if partial is None:
|
||||
partial = matched
|
||||
|
||||
def _add_to_matches(found, line, match_type, value, matched):
|
||||
log.debug(
|
||||
'file.extract_hash: Line \'%s\' matches %s \'%s\'',
|
||||
line, match_type, value
|
||||
)
|
||||
found.setdefault(match_type, []).append(matched)
|
||||
|
||||
hash_matched = False
|
||||
if source_hash_name:
|
||||
if line.endswith(source_hash_name):
|
||||
# Checking the character before where the basename
|
||||
# should start for either whitespace or a path
|
||||
# separator. We can't just rsplit on spaces/whitespace,
|
||||
# because the filename may contain spaces.
|
||||
try:
|
||||
if line[source_hash_name_idx] in string.whitespace:
|
||||
_add_to_matches(found, line, 'source_hash_name',
|
||||
source_hash_name, matched)
|
||||
hash_matched = True
|
||||
except IndexError:
|
||||
pass
|
||||
elif re.match(source_hash_name.replace('.', r'\.') + r'\s+',
|
||||
line):
|
||||
_add_to_matches(found, line, 'source_hash_name',
|
||||
source_hash_name, matched)
|
||||
hash_matched = True
|
||||
if file_name:
|
||||
if line.endswith(file_name_basename):
|
||||
# Checking the character before where the basename
|
||||
# should start for either whitespace or a path
|
||||
# separator. We can't just rsplit on spaces/whitespace,
|
||||
# because the filename may contain spaces.
|
||||
try:
|
||||
if line[file_name_idx] in filename_separators:
|
||||
_add_to_matches(found, line, 'file_name',
|
||||
file_name, matched)
|
||||
hash_matched = True
|
||||
except IndexError:
|
||||
pass
|
||||
elif re.match(file_name.replace('.', r'\.') + r'\s+', line):
|
||||
_add_to_matches(found, line, 'file_name',
|
||||
file_name, matched)
|
||||
hash_matched = True
|
||||
if source:
|
||||
if line.endswith(source_basename):
|
||||
# Same as above, we can't just do an rsplit here.
|
||||
try:
|
||||
if line[source_idx] in filename_separators:
|
||||
_add_to_matches(found, line, 'source',
|
||||
source, matched)
|
||||
hash_matched = True
|
||||
except IndexError:
|
||||
pass
|
||||
elif re.match(source.replace('.', r'\.') + r'\s+', line):
|
||||
_add_to_matches(found, line, 'source', source, matched)
|
||||
hash_matched = True
|
||||
|
||||
if not hash_matched:
|
||||
log.debug(
|
||||
'file.extract_hash: Line \'%s\' contains %s hash '
|
||||
'\'%s\', but line did not meet the search criteria',
|
||||
line, matched['hash_type'], matched['hsum']
|
||||
)
|
||||
|
||||
for found_type, found_str in (('source_hash_name', source_hash_name),
|
||||
('file_name', file_name),
|
||||
('source', source)):
|
||||
if found_type in found:
|
||||
if len(found[found_type]) > 1:
|
||||
log.debug(
|
||||
'file.extract_hash: Multiple %s matches for %s: %s',
|
||||
found_type,
|
||||
found_str,
|
||||
', '.join(
|
||||
['{0} ({1})'.format(x['hsum'], x['hash_type'])
|
||||
for x in found[found_type]]
|
||||
)
|
||||
)
|
||||
ret = found[found_type][0]
|
||||
log.debug(
|
||||
'file.extract_hash: Returning %s hash \'%s\' as a match of %s',
|
||||
ret['hash_type'], ret['hsum'], found_str
|
||||
)
|
||||
return ret
|
||||
|
||||
if partial:
|
||||
log.debug(
|
||||
'modules.file.py - extract_hash: Returning the partially '
|
||||
'identified %s hash "%s".',
|
||||
source_sum['hash_type'], source_sum['hsum']
|
||||
'file.extract_hash: Returning the partially identified %s hash '
|
||||
'\'%s\'', partial['hash_type'], partial['hsum']
|
||||
)
|
||||
else:
|
||||
log.debug('modules.file.py - extract_hash: Returning None.')
|
||||
return source_sum
|
||||
return partial
|
||||
|
||||
log.debug('file.extract_hash: No matches, returning None')
|
||||
return None
|
||||
|
||||
|
||||
def check_perms(name, ret, user, group, mode, follow_symlinks=False):
|
||||
@ -3947,6 +4167,7 @@ def check_managed(
|
||||
name,
|
||||
source,
|
||||
source_hash,
|
||||
source_hash_name,
|
||||
user,
|
||||
group,
|
||||
mode,
|
||||
@ -3981,6 +4202,7 @@ def check_managed(
|
||||
template,
|
||||
source,
|
||||
source_hash,
|
||||
source_hash_name,
|
||||
user,
|
||||
group,
|
||||
mode,
|
||||
@ -4013,6 +4235,7 @@ def check_managed_changes(
|
||||
name,
|
||||
source,
|
||||
source_hash,
|
||||
source_hash_name,
|
||||
user,
|
||||
group,
|
||||
mode,
|
||||
@ -4048,6 +4271,7 @@ def check_managed_changes(
|
||||
template,
|
||||
source,
|
||||
source_hash,
|
||||
source_hash_name,
|
||||
user,
|
||||
group,
|
||||
mode,
|
||||
@ -4387,7 +4611,10 @@ def manage_file(name,
|
||||
if dl_sum != source_sum['hsum']:
|
||||
ret['comment'] = (
|
||||
'Specified {0} checksum for {1} ({2}) does not match '
|
||||
'actual checksum ({3})'.format(
|
||||
'actual checksum ({3}). If the \'source_hash\' value '
|
||||
'refers to a remote file with multiple possible '
|
||||
'matches, then it may be necessary to set '
|
||||
'\'source_hash_name\'.'.format(
|
||||
source_sum['hash_type'],
|
||||
source,
|
||||
source_sum['hsum'],
|
||||
|
@ -75,6 +75,7 @@ def push_note(device=None, title=None, body=None):
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt "*" pushbullet.push_note device="Chrome" title="Example title" body="Example body."
|
||||
'''
|
||||
spb = _SaltPushbullet(device)
|
||||
|
@ -175,6 +175,7 @@ def interfaces(root):
|
||||
|
||||
Output example:
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"r": [
|
||||
"state",
|
||||
|
@ -3584,6 +3584,7 @@ def add_host_to_dvs(host, username, password, vmknic_name, vmnic_name,
|
||||
Return Example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
somehost:
|
||||
----------
|
||||
esxi1.corp.com:
|
||||
|
@ -262,6 +262,9 @@ def apply_config(path, source=None, salt_env='base'):
|
||||
salt '*' dsc.run_config C:\\DSC\\WebSiteConfiguration salt://dsc/configs/WebSiteConfiguration
|
||||
|
||||
'''
|
||||
# If you're getting an error along the lines of "The client cannot connect
|
||||
# to the destination specified in the request.", try the following:
|
||||
# Enable-PSRemoting -SkipNetworkProfileCheck
|
||||
config = path
|
||||
if source:
|
||||
# Make sure the folder names match
|
||||
@ -291,16 +294,17 @@ def apply_config(path, source=None, salt_env='base'):
|
||||
|
||||
# Run the DSC Configuration
|
||||
# Putting quotes around the parameter protects against command injection
|
||||
cmd = '$job = Start-DscConfiguration -Path "{0}"; '.format(config)
|
||||
cmd += 'Do{ } While ($job.State -notin \'Completed\', \'Failed\'); ' \
|
||||
'return $job.State'
|
||||
cmd = 'Start-DscConfiguration -Path "{0}" -Wait -Force'.format(config)
|
||||
ret = _pshell(cmd)
|
||||
|
||||
if ret is False:
|
||||
raise CommandExecutionError('Apply Config Failed: {0}'.format(path))
|
||||
|
||||
cmd = '$status = Get-DscConfigurationStatus; $status.Status'
|
||||
ret = _pshell(cmd)
|
||||
log.info('DSC Apply Config: {0}'.format(ret))
|
||||
|
||||
if ret == 'Completed':
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
return ret == 'Success'
|
||||
|
||||
|
||||
def get_config():
|
||||
@ -336,10 +340,7 @@ def test_config():
|
||||
'''
|
||||
cmd = 'Test-DscConfiguration *>&1'
|
||||
ret = _pshell(cmd)
|
||||
if ret == 'True':
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
return ret == 'True'
|
||||
|
||||
|
||||
def get_config_status():
|
||||
|
@ -62,8 +62,8 @@ from salt.modules.file import (check_hash, # pylint: disable=W0611
|
||||
access, copy, readdir, rmdir, truncate, replace, delete_backup,
|
||||
search, _get_flags, extract_hash, _error, _sed_esc, _psed,
|
||||
RE_FLAG_TABLE, blockreplace, prepend, seek_read, seek_write, rename,
|
||||
lstat, path_exists_glob, write, pardir, join, HASHES, comment,
|
||||
uncomment, _add_flags, comment_line, apply_template_on_contents)
|
||||
lstat, path_exists_glob, write, pardir, join, HASHES, HASHES_REVMAP,
|
||||
comment, uncomment, _add_flags, comment_line, apply_template_on_contents)
|
||||
|
||||
from salt.utils import namespaced_function as _namespaced_function
|
||||
|
||||
|
@ -5,6 +5,7 @@ Manage Windows features via the ServerManager powershell module
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
import ast
|
||||
import json
|
||||
import logging
|
||||
|
||||
@ -26,6 +27,17 @@ def __virtual__():
|
||||
'''
|
||||
Load only on windows with servermanager module
|
||||
'''
|
||||
def _module_present():
|
||||
'''
|
||||
Check for the presence of the ServerManager module.
|
||||
'''
|
||||
cmd = r"[Bool] (Get-Module -ListAvailable | Where-Object { $_.Name -eq 'ServerManager' })"
|
||||
cmd_ret = __salt__['cmd.run_all'](cmd, shell='powershell', python_shell=True)
|
||||
|
||||
if cmd_ret['retcode'] == 0:
|
||||
return ast.literal_eval(cmd_ret['stdout'])
|
||||
return False
|
||||
|
||||
if not salt.utils.is_windows():
|
||||
return False
|
||||
|
||||
|
@ -727,6 +727,8 @@ def hypermedia_handler(*args, **kwargs):
|
||||
except (salt.exceptions.SaltDaemonNotRunning,
|
||||
salt.exceptions.SaltReqTimeoutError) as exc:
|
||||
raise cherrypy.HTTPError(503, exc.strerror)
|
||||
except (cherrypy.TimeoutError, salt.exceptions.SaltClientTimeout):
|
||||
raise cherrypy.HTTPError(504)
|
||||
except cherrypy.CherryPyException:
|
||||
raise
|
||||
except Exception as exc:
|
||||
|
@ -92,7 +92,8 @@ To use the cassandra returner, append '--return cassandra_cql' to the salt comma
|
||||
Note: if your Cassandra instance has not been tuned much you may benefit from
|
||||
altering some timeouts in `cassandra.yaml` like so:
|
||||
|
||||
.. code-block:: bash
|
||||
.. code-block:: yaml
|
||||
|
||||
# How long the coordinator should wait for read operations to complete
|
||||
read_request_timeout_in_ms: 5000
|
||||
# How long the coordinator should wait for seq or index scans to complete
|
||||
@ -175,7 +176,8 @@ __virtualname__ = 'cassandra_cql'
|
||||
|
||||
def __virtual__():
|
||||
if not HAS_CASSANDRA_DRIVER:
|
||||
return False
|
||||
return False, 'Could not import cassandra_cql returner; ' \
|
||||
'cassandra-driver is not installed.'
|
||||
|
||||
return True
|
||||
|
||||
@ -304,7 +306,7 @@ def save_load(jid, load, minions=None):
|
||||
raise
|
||||
|
||||
|
||||
def save_minions(jid, minions): # pylint: disable=unused-argument
|
||||
def save_minions(jid, minions, syndic_id=None): # pylint: disable=unused-argument
|
||||
'''
|
||||
Included for API consistency
|
||||
'''
|
||||
|
@ -47,7 +47,7 @@ __virtualname__ = 'cassandra'
|
||||
|
||||
def __virtual__():
|
||||
if not HAS_PYCASSA:
|
||||
return False
|
||||
return False, 'Could not import cassandra returner; pycassa is not installed.'
|
||||
return __virtualname__
|
||||
|
||||
|
||||
|
@ -54,7 +54,7 @@ VERIFIED_VIEWS = False
|
||||
|
||||
def __virtual__():
|
||||
if not HAS_DEPS:
|
||||
return False
|
||||
return False, 'Could not import couchbase returner; couchbase is not installed.'
|
||||
|
||||
# try to load some faster json libraries. In order of fastest to slowest
|
||||
json = salt.utils.import_json()
|
||||
|
@ -367,7 +367,7 @@ def prep_jid(nocache=False, passed_jid=None): # pylint: disable=unused-argument
|
||||
return passed_jid if passed_jid is not None else salt.utils.jid.gen_jid()
|
||||
|
||||
|
||||
def save_minions(jid, minions): # pylint: disable=unused-argument
|
||||
def save_minions(jid, minions, syndic_id=None): # pylint: disable=unused-argument
|
||||
'''
|
||||
Included for API consistency
|
||||
'''
|
||||
|
@ -50,7 +50,7 @@ __virtualname__ = 'django'
|
||||
|
||||
def __virtual__():
|
||||
if not HAS_DJANGO:
|
||||
return False
|
||||
return False, 'Could not import django returner; django is not installed.'
|
||||
return True
|
||||
|
||||
|
||||
|
@ -64,7 +64,7 @@ log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def __virtual__():
|
||||
return __virtualname__
|
||||
return 'elasticsearch.alias_exists' in __salt__
|
||||
|
||||
|
||||
def _ensure_index(index):
|
||||
|
@ -90,7 +90,10 @@ def __virtual__():
|
||||
'''
|
||||
Only return if python-etcd is installed
|
||||
'''
|
||||
return __virtualname__ if HAS_LIBS else False
|
||||
if HAS_LIBS:
|
||||
return __virtualname__
|
||||
|
||||
return False, 'Could not import etcd returner; python-etcd is not installed.'
|
||||
|
||||
|
||||
def _get_conn(opts, profile=None):
|
||||
@ -150,7 +153,7 @@ def save_load(jid, load, minions=None):
|
||||
)
|
||||
|
||||
|
||||
def save_minions(jid, minions): # pylint: disable=unused-argument
|
||||
def save_minions(jid, minions, syndic_id=None): # pylint: disable=unused-argument
|
||||
'''
|
||||
Included for API consistency
|
||||
'''
|
||||
|
@ -81,7 +81,8 @@ __virtualname__ = 'influxdb'
|
||||
|
||||
def __virtual__():
|
||||
if not HAS_INFLUXDB:
|
||||
return False
|
||||
return False, 'Could not import influxdb returner; ' \
|
||||
'influxdb python client is not installed.'
|
||||
return __virtualname__
|
||||
|
||||
|
||||
@ -226,7 +227,7 @@ def save_load(jid, load, minions=None):
|
||||
log.critical('Failed to store load with InfluxDB returner: {0}'.format(ex))
|
||||
|
||||
|
||||
def save_minions(jid, minions): # pylint: disable=unused-argument
|
||||
def save_minions(jid, minions, syndic_id=None): # pylint: disable=unused-argument
|
||||
'''
|
||||
Included for API consistency
|
||||
'''
|
||||
|
@ -24,28 +24,26 @@ To use the kafka returner, append '--return kafka' to the Salt command, eg;
|
||||
|
||||
'''
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
import json
|
||||
import logging
|
||||
|
||||
|
||||
# Import third-party libs
|
||||
try:
|
||||
from kafka import KafkaClient, SimpleProducer
|
||||
HAS_KAFKA = True
|
||||
except ImportError:
|
||||
HAS_KAFKA = False
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
__virtualname__ = 'kafka'
|
||||
|
||||
|
||||
def __virtual__():
|
||||
if not HAS_KAFKA:
|
||||
return False
|
||||
return False, 'Could not import kafka returner; kafka-python is not installed.'
|
||||
return __virtualname__
|
||||
|
||||
|
||||
|
@ -70,7 +70,8 @@ __virtualname__ = 'memcache'
|
||||
|
||||
def __virtual__():
|
||||
if not HAS_MEMCACHE:
|
||||
return False
|
||||
return False, 'Could not import memcache returner; ' \
|
||||
'memcache python client is not installed.'
|
||||
return __virtualname__
|
||||
|
||||
|
||||
@ -163,7 +164,7 @@ def save_load(jid, load, minions=None):
|
||||
_append_list(serv, 'jids', jid)
|
||||
|
||||
|
||||
def save_minions(jid, minions): # pylint: disable=unused-argument
|
||||
def save_minions(jid, minions, syndic_id=None): # pylint: disable=unused-argument
|
||||
'''
|
||||
Included for API consistency
|
||||
'''
|
||||
|
@ -73,7 +73,6 @@ import salt.utils.jid
|
||||
import salt.returners
|
||||
import salt.ext.six as six
|
||||
|
||||
|
||||
# Import third party libs
|
||||
try:
|
||||
import pymongo
|
||||
@ -91,7 +90,7 @@ __virtualname__ = 'mongo'
|
||||
|
||||
def __virtual__():
|
||||
if not HAS_PYMONGO:
|
||||
return False
|
||||
return False, 'Could not import mongo returner; pymongo is not installed.'
|
||||
return __virtualname__
|
||||
|
||||
|
||||
@ -213,7 +212,7 @@ def save_load(jid, load, minions=None):
|
||||
mdb.jobs.insert(load.copy())
|
||||
|
||||
|
||||
def save_minions(jid, minions): # pylint: disable=unused-argument
|
||||
def save_minions(jid, minions, syndic_id=None): # pylint: disable=unused-argument
|
||||
'''
|
||||
Included for API consistency
|
||||
'''
|
||||
|
@ -89,7 +89,7 @@ __virtualname__ = 'mongo'
|
||||
|
||||
def __virtual__():
|
||||
if not HAS_PYMONGO:
|
||||
return False
|
||||
return False, 'Could not import mongo returner; pymongo is not installed.'
|
||||
return 'mongo_return'
|
||||
|
||||
|
||||
@ -234,7 +234,7 @@ def prep_jid(nocache=False, passed_jid=None): # pylint: disable=unused-argument
|
||||
return passed_jid if passed_jid is not None else salt.utils.jid.gen_jid()
|
||||
|
||||
|
||||
def save_minions(jid, minions): # pylint: disable=unused-argument
|
||||
def save_minions(jid, minions, syndic_id=None): # pylint: disable=unused-argument
|
||||
'''
|
||||
Included for API consistency
|
||||
'''
|
||||
|
@ -69,7 +69,7 @@ def save_load(jid, clear_load, minions=None):
|
||||
_mminion().returners['{0}.save_load'.format(returner_)](jid, clear_load)
|
||||
|
||||
|
||||
def save_minions(jid, minions): # pylint: disable=unused-argument
|
||||
def save_minions(jid, minions, syndic_id=None): # pylint: disable=unused-argument
|
||||
'''
|
||||
Included for API consistency
|
||||
'''
|
||||
|
@ -168,7 +168,8 @@ __virtualname__ = 'mysql'
|
||||
|
||||
def __virtual__():
|
||||
if not HAS_MYSQL:
|
||||
return False
|
||||
return False, 'Could not import mysql returner; ' \
|
||||
'mysql python client is not installed.'
|
||||
return True
|
||||
|
||||
|
||||
@ -325,7 +326,7 @@ def save_load(jid, load, minions=None):
|
||||
pass
|
||||
|
||||
|
||||
def save_minions(jid, minions): # pylint: disable=unused-argument
|
||||
def save_minions(jid, minions, syndic_id=None): # pylint: disable=unused-argument
|
||||
'''
|
||||
Included for API consistency
|
||||
'''
|
||||
|
@ -177,7 +177,7 @@ def __virtual__():
|
||||
'''
|
||||
Return virtualname
|
||||
'''
|
||||
return __virtualname__
|
||||
return 'nagios.list_plugins' in __salt__
|
||||
|
||||
|
||||
def returner(ret):
|
||||
|
@ -149,7 +149,7 @@ __virtualname__ = 'odbc'
|
||||
|
||||
def __virtual__():
|
||||
if not HAS_ODBC:
|
||||
return False
|
||||
return False, 'Could not import odbc returner; pyodbc is not installed.'
|
||||
return True
|
||||
|
||||
|
||||
@ -226,7 +226,7 @@ def save_load(jid, load, minions=None):
|
||||
_close_conn(conn)
|
||||
|
||||
|
||||
def save_minions(jid, minions): # pylint: disable=unused-argument
|
||||
def save_minions(jid, minions, syndic_id=None): # pylint: disable=unused-argument
|
||||
'''
|
||||
Included for API consistency
|
||||
'''
|
||||
|
@ -160,7 +160,7 @@ __virtualname__ = 'pgjsonb'
|
||||
|
||||
def __virtual__():
|
||||
if not HAS_PG:
|
||||
return False
|
||||
return False, 'Could not import pgjsonb returner; python-psycopg2 is not installed.'
|
||||
return True
|
||||
|
||||
|
||||
@ -291,7 +291,7 @@ def save_load(jid, load, minions=None):
|
||||
pass
|
||||
|
||||
|
||||
def save_minions(jid, minions): # pylint: disable=unused-argument
|
||||
def save_minions(jid, minions, syndic_id=None): # pylint: disable=unused-argument
|
||||
'''
|
||||
Included for API consistency
|
||||
'''
|
||||
|
@ -151,7 +151,7 @@ log = logging.getLogger(__name__)
|
||||
|
||||
def __virtual__():
|
||||
if not HAS_POSTGRES:
|
||||
return False
|
||||
return False, 'Could not import postgres returner; psycopg2 is not installed.'
|
||||
return __virtualname__
|
||||
|
||||
|
||||
@ -276,7 +276,7 @@ def save_load(jid, load, minions=None): # pylint: disable=unused-argument
|
||||
pass
|
||||
|
||||
|
||||
def save_minions(jid, minions): # pylint: disable=unused-argument
|
||||
def save_minions(jid, minions, syndic_id=None): # pylint: disable=unused-argument
|
||||
'''
|
||||
Included for API consistency
|
||||
'''
|
||||
|
@ -304,7 +304,7 @@ def save_load(jid, clear_load, minions=None):
|
||||
_close_conn(conn)
|
||||
|
||||
|
||||
def save_minions(jid, minions): # pylint: disable=unused-argument
|
||||
def save_minions(jid, minions, syndic_id=None): # pylint: disable=unused-argument
|
||||
'''
|
||||
Included for API consistency
|
||||
'''
|
||||
|
@ -69,7 +69,8 @@ __virtualname__ = 'redis'
|
||||
|
||||
def __virtual__():
|
||||
if not HAS_REDIS:
|
||||
return False
|
||||
return False, 'Could not import redis returner; ' \
|
||||
'redis python client is not installed.'
|
||||
return __virtualname__
|
||||
|
||||
|
||||
@ -137,7 +138,7 @@ def save_load(jid, load, minions=None):
|
||||
serv.setex('load:{0}'.format(jid), json.dumps(load), _get_ttl())
|
||||
|
||||
|
||||
def save_minions(jid, minions): # pylint: disable=unused-argument
|
||||
def save_minions(jid, minions, syndic_id=None): # pylint: disable=unused-argument
|
||||
'''
|
||||
Included for API consistency
|
||||
'''
|
||||
|
@ -66,7 +66,8 @@ __virtualname__ = 'sentry'
|
||||
|
||||
def __virtual__():
|
||||
if not has_raven:
|
||||
return False
|
||||
return False, 'Could not import sentry returner; ' \
|
||||
'raven python client is not installed.'
|
||||
return __virtualname__
|
||||
|
||||
|
||||
|
@ -51,8 +51,8 @@ __virtualname__ = 'sms'
|
||||
def __virtual__():
|
||||
if HAS_TWILIO:
|
||||
return __virtualname__
|
||||
else:
|
||||
return False
|
||||
|
||||
return False, 'Could not import sms returner; twilio is not installed.'
|
||||
|
||||
|
||||
def _get_options(ret=None):
|
||||
|
@ -132,7 +132,9 @@ __virtualname__ = 'smtp'
|
||||
|
||||
|
||||
def __virtual__():
|
||||
return __virtualname__
|
||||
if HAS_GNUPG:
|
||||
return __virtualname__
|
||||
return False, 'Could not import smtp returner; gnupg is not installed.'
|
||||
|
||||
|
||||
def _get_options(ret=None):
|
||||
|
@ -106,7 +106,7 @@ __virtualname__ = 'sqlite3'
|
||||
|
||||
def __virtual__():
|
||||
if not HAS_SQLITE3:
|
||||
return False
|
||||
return False, 'Could not import sqlite3 returner; sqlite3 is not installed.'
|
||||
return __virtualname__
|
||||
|
||||
|
||||
@ -193,7 +193,7 @@ def save_load(jid, load, minions=None):
|
||||
_close_conn(conn)
|
||||
|
||||
|
||||
def save_minions(jid, minions): # pylint: disable=unused-argument
|
||||
def save_minions(jid, minions, syndic_id=None): # pylint: disable=unused-argument
|
||||
'''
|
||||
Included for API consistency
|
||||
'''
|
||||
|
@ -168,7 +168,7 @@ def _verify_options(options):
|
||||
|
||||
def __virtual__():
|
||||
if not HAS_SYSLOG:
|
||||
return False
|
||||
return False, 'Could not import syslog returner; syslog is not installed.'
|
||||
return __virtualname__
|
||||
|
||||
|
||||
|
@ -2,6 +2,8 @@
|
||||
'''
|
||||
Return salt data via xmpp
|
||||
|
||||
:depends: sleekxmpp >= 1.3.1
|
||||
|
||||
The following fields can be set in the minion conf file::
|
||||
|
||||
xmpp.jid (required)
|
||||
@ -117,14 +119,16 @@ def __virtual__():
|
||||
'''
|
||||
Only load this module if right version of sleekxmpp is installed on this minion.
|
||||
'''
|
||||
min_version = '1.3.1'
|
||||
if HAS_LIBS:
|
||||
import sleekxmpp
|
||||
# Certain XMPP functionaility we're using doesn't work with versions under 1.3.1
|
||||
sleekxmpp_version = distutils.version.LooseVersion(sleekxmpp.__version__)
|
||||
valid_version = distutils.version.LooseVersion('1.3.1')
|
||||
valid_version = distutils.version.LooseVersion(min_version)
|
||||
if sleekxmpp_version >= valid_version:
|
||||
return __virtualname__
|
||||
return False
|
||||
return False, 'Could not import xmpp returner; sleekxmpp python client is not ' \
|
||||
'installed or is older than version \'{0}\'.'.format(min_version)
|
||||
|
||||
|
||||
class SendMsgBot(_ClientXMPP):
|
||||
|
@ -8,7 +8,7 @@ returned by several different minions.
|
||||
Aggregated results are sorted by the size of the minion pools which returned
|
||||
matching results.
|
||||
|
||||
Useful for playing the game: " some of these things are not like the others... "
|
||||
Useful for playing the game: *"some of these things are not like the others..."*
|
||||
when identifying discrepancies in a large infrastructure managed by salt.
|
||||
'''
|
||||
|
||||
@ -29,20 +29,20 @@ def hash(*args, **kwargs):
|
||||
.. versionadded:: 2014.7.0
|
||||
|
||||
This command is submitted via a salt runner using the
|
||||
general form:
|
||||
general form::
|
||||
|
||||
salt-run survey.hash [survey_sort=up/down] <target>
|
||||
<salt-execution-module> <salt-execution-module parameters>
|
||||
|
||||
Optionally accept a "survey_sort=" parameter. Default: "survey_sort=down"
|
||||
Optionally accept a ``survey_sort=`` parameter. Default: ``survey_sort=down``
|
||||
|
||||
CLI Example #1: ( functionally equivalent to "salt-run manage.up" )
|
||||
CLI Example #1: (functionally equivalent to ``salt-run manage.up``)
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt-run survey.hash "*" test.ping
|
||||
|
||||
CLI Example #2: ( find an "outlier" minion config file )
|
||||
CLI Example #2: (find an "outlier" minion config file)
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
@ -61,20 +61,23 @@ def diff(*args, **kwargs):
|
||||
|
||||
These pools are determined from the aggregated and sorted results of
|
||||
a salt command.
|
||||
This command displays the "diffs" as a series of 2-way differences-- namely
|
||||
the difference between the FIRST displayed minion pool
|
||||
(according to sort order) and EACH SUBSEQUENT minion pool result set.
|
||||
Differences are displayed according to the Python "difflib.unified_diff()"
|
||||
as in the case of the salt execution module "file.get_diff".
|
||||
|
||||
This command is submitted via a salt runner using the general form:
|
||||
This command displays the "diffs" as a series of 2-way differences --
|
||||
namely the difference between the FIRST displayed minion pool
|
||||
(according to sort order) and EACH SUBSEQUENT minion pool result set.
|
||||
|
||||
Differences are displayed according to the Python ``difflib.unified_diff()``
|
||||
as in the case of the salt execution module ``file.get_diff``.
|
||||
|
||||
This command is submitted via a salt runner using the general form::
|
||||
|
||||
salt-run survey.diff [survey_sort=up/down] <target>
|
||||
<salt-execution-module> <salt-execution-module parameters>
|
||||
|
||||
Optionally accept a "survey_sort=" parameter. Default: "survey_sort=down"
|
||||
Optionally accept a ``survey_sort=`` parameter. Default:
|
||||
``survey_sort=down``
|
||||
|
||||
CLI Example #1: ( Example to display the "differences of files" )
|
||||
CLI Example #1: (Example to display the "differences of files")
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
salt.serializers.configparser
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
.. versionadded:: 2016.3.0
|
||||
|
||||
|
@ -123,6 +123,7 @@ def _cleanup_destdir(name):
|
||||
def extracted(name,
|
||||
source,
|
||||
source_hash=None,
|
||||
source_hash_name=None,
|
||||
source_hash_update=False,
|
||||
skip_verify=False,
|
||||
password=None,
|
||||
@ -240,6 +241,71 @@ def extracted(name,
|
||||
This argument uses the same syntax as its counterpart in the
|
||||
:py:func:`file.managed <salt.states.file.managed>` state.
|
||||
|
||||
.. versionchanged:: 2016.11.0
|
||||
If this argument specifies the hash itself, instead of a URI to a
|
||||
file containing hashes, the hash type can now be omitted and Salt
|
||||
will determine the hash type based on the length of the hash. For
|
||||
example, both of the below states are now valid, while before only
|
||||
the second one would be:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
foo_app:
|
||||
archive.extracted:
|
||||
- name: /var/www
|
||||
- source: https://mydomain.tld/foo.tar.gz
|
||||
- source_hash: 3360db35e682f1c5f9c58aa307de16d41361618c
|
||||
|
||||
bar_app:
|
||||
archive.extracted:
|
||||
- name: /var/www
|
||||
- source: https://mydomain.tld/bar.tar.gz
|
||||
- source_hash: sha1=5edb7d584b82ddcbf76e311601f5d4442974aaa5
|
||||
|
||||
source_hash_name
|
||||
When ``source_hash`` refers to a hash file, Salt will try to find the
|
||||
correct hash by matching the filename part of the ``source`` URI. When
|
||||
managing a file with a ``source`` of ``salt://files/foo.tar.gz``, then
|
||||
the following line in a hash file would match:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
acbd18db4cc2f85cedef654fccc4a4d8 foo.tar.gz
|
||||
|
||||
This line would also match:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
acbd18db4cc2f85cedef654fccc4a4d8 ./dir1/foo.tar.gz
|
||||
|
||||
However, sometimes a hash file will include multiple similar paths:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
37b51d194a7513e45b56f6524f2d51f2 ./dir1/foo.txt
|
||||
acbd18db4cc2f85cedef654fccc4a4d8 ./dir2/foo.txt
|
||||
73feffa4b7f6bb68e44cf984c85f6e88 ./dir3/foo.txt
|
||||
|
||||
In cases like this, Salt may match the incorrect hash. This argument
|
||||
can be used to tell Salt which filename to match, to ensure that the
|
||||
correct hash is identified. For example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
/var/www:
|
||||
archive.extracted:
|
||||
- source: https://mydomain.tld/dir2/foo.tar.gz
|
||||
- source_hash: https://mydomain.tld/hashes
|
||||
- source_hash_name: ./dir2/foo.tar.gz
|
||||
|
||||
.. note::
|
||||
This argument must contain the full filename entry from the
|
||||
checksum file, as this argument is meant to disambiguate matches
|
||||
for multiple files that have the same basename. So, in the
|
||||
example above, simply using ``foo.txt`` would not match.
|
||||
|
||||
.. versionadded:: 2016.11.1
|
||||
|
||||
source_hash_update
|
||||
Set this to ``True`` if archive should be extracted if source_hash has
|
||||
changed. This would extract regardless of the ``if_missing`` parameter.
|
||||
@ -569,11 +635,11 @@ def extracted(name,
|
||||
return ret
|
||||
|
||||
urlparsed_source = _urlparse(source_match)
|
||||
source_hash_name = urlparsed_source.path or urlparsed_source.netloc
|
||||
source_hash_basename = urlparsed_source.path or urlparsed_source.netloc
|
||||
|
||||
valid_archive_formats = ('tar', 'rar', 'zip')
|
||||
if not archive_format:
|
||||
archive_format = salt.utils.files.guess_archive_type(source_hash_name)
|
||||
archive_format = salt.utils.files.guess_archive_type(source_hash_basename)
|
||||
if archive_format is None:
|
||||
ret['comment'] = (
|
||||
'Could not guess archive_format from the value of the '
|
||||
@ -672,7 +738,7 @@ def extracted(name,
|
||||
__opts__['cachedir'],
|
||||
'files',
|
||||
__env__,
|
||||
re.sub(r'[:/\\]', '_', source_hash_name),
|
||||
re.sub(r'[:/\\]', '_', source_hash_basename),
|
||||
)
|
||||
|
||||
if os.path.isdir(cached_source):
|
||||
@ -681,8 +747,10 @@ def extracted(name,
|
||||
|
||||
if source_hash:
|
||||
try:
|
||||
source_sum = __salt__['file.get_source_sum'](source_hash_name,
|
||||
source_sum = __salt__['file.get_source_sum']('',
|
||||
source,
|
||||
source_hash,
|
||||
source_hash_name,
|
||||
__env__)
|
||||
except CommandExecutionError as exc:
|
||||
ret['comment'] = exc.strerror
|
||||
@ -710,10 +778,10 @@ def extracted(name,
|
||||
cached_source,
|
||||
source=source_match,
|
||||
source_hash=source_hash,
|
||||
source_hash_name=source_hash_name,
|
||||
makedirs=True,
|
||||
skip_verify=skip_verify,
|
||||
saltenv=__env__,
|
||||
source_hash_name=source_hash_name)
|
||||
saltenv=__env__)
|
||||
log.debug('file.managed: {0}'.format(file_result))
|
||||
|
||||
# Get actual state result. The state.single return is a single-element
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Manage CloudTrail Objects
|
||||
=================
|
||||
=========================
|
||||
|
||||
.. versionadded:: 2016.3.0
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Manage IoT Objects
|
||||
=================
|
||||
==================
|
||||
|
||||
.. versionadded:: 2016.3.0
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Manage Lambda Functions
|
||||
=================
|
||||
=======================
|
||||
|
||||
.. versionadded:: 2016.3.0
|
||||
|
||||
|
@ -3,6 +3,7 @@
|
||||
Configure Chronos jobs via a salt proxy.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
my_job:
|
||||
chronos_job.config:
|
||||
- config:
|
||||
|
@ -419,6 +419,7 @@ def absent(name,
|
||||
|
||||
def file(name,
|
||||
source_hash='',
|
||||
source_hash_name=None,
|
||||
user='root',
|
||||
template=None,
|
||||
context=None,
|
||||
@ -446,6 +447,45 @@ def file(name,
|
||||
hash algorithm followed by the hash of the file:
|
||||
``md5=e138491e9d5b97023cea823fe17bac22``
|
||||
|
||||
source_hash_name
|
||||
When ``source_hash`` refers to a hash file, Salt will try to find the
|
||||
correct hash by matching the filename/URI associated with that hash. By
|
||||
default, Salt will look for the filename being managed. When managing a
|
||||
file at path ``/tmp/foo.txt``, then the following line in a hash file
|
||||
would match:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
acbd18db4cc2f85cedef654fccc4a4d8 foo.txt
|
||||
|
||||
However, sometimes a hash file will include multiple similar paths:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
37b51d194a7513e45b56f6524f2d51f2 ./dir1/foo.txt
|
||||
acbd18db4cc2f85cedef654fccc4a4d8 ./dir2/foo.txt
|
||||
73feffa4b7f6bb68e44cf984c85f6e88 ./dir3/foo.txt
|
||||
|
||||
In cases like this, Salt may match the incorrect hash. This argument
|
||||
can be used to tell Salt which filename to match, to ensure that the
|
||||
correct hash is identified. For example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
foo_crontab:
|
||||
cron.file:
|
||||
- name: https://mydomain.tld/dir2/foo.txt
|
||||
- source_hash: https://mydomain.tld/hashes
|
||||
- source_hash_name: ./dir2/foo.txt
|
||||
|
||||
.. note::
|
||||
This argument must contain the full filename entry from the
|
||||
checksum file, as this argument is meant to disambiguate matches
|
||||
for multiple files that have the same basename. So, in the
|
||||
example above, simply using ``foo.txt`` would not match.
|
||||
|
||||
.. versionadded:: 2016.3.5
|
||||
|
||||
user
|
||||
The user to whom the crontab should be assigned. This defaults to
|
||||
root.
|
||||
@ -498,6 +538,7 @@ def file(name,
|
||||
fcm = __salt__['file.check_managed'](cron_path,
|
||||
source,
|
||||
source_hash,
|
||||
source_hash_name,
|
||||
owner,
|
||||
group,
|
||||
mode,
|
||||
|
@ -1105,6 +1105,7 @@ def missing(name):
|
||||
def managed(name,
|
||||
source=None,
|
||||
source_hash='',
|
||||
source_hash_name=None,
|
||||
user=None,
|
||||
group=None,
|
||||
mode=None,
|
||||
@ -1210,7 +1211,7 @@ def managed(name,
|
||||
- source: https://launchpad.net/tomdroid/beta/0.7.3/+download/tomdroid-src-0.7.3.tar.gz
|
||||
- source_hash: https://launchpad.net/tomdroid/beta/0.7.3/+download/tomdroid-src-0.7.3.hash
|
||||
|
||||
The following is an example of the supported source_hash format:
|
||||
The following lines are all supported formats:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
@ -1220,7 +1221,7 @@ def managed(name,
|
||||
|
||||
Debian file type ``*.dsc`` files are also supported.
|
||||
|
||||
**Inserting the Source Hash in the sls Data**
|
||||
**Inserting the Source Hash in the SLS Data**
|
||||
Examples:
|
||||
|
||||
.. code-block:: yaml
|
||||
@ -1246,6 +1247,44 @@ def managed(name,
|
||||
- source: https://launchpad.net/tomdroid/beta/0.7.3/+download/tomdroid-src-0.7.3.tar.gz
|
||||
- source_hash: https://launchpad.net/tomdroid/beta/0.7.3/+download/tomdroid-src-0.7.3.tar.gz/+md5
|
||||
|
||||
source_hash_name
|
||||
When ``source_hash`` refers to a hash file, Salt will try to find the
|
||||
correct hash by matching the filename/URI associated with that hash. By
|
||||
default, Salt will look for the filename being managed. When managing a
|
||||
file at path ``/tmp/foo.txt``, then the following line in a hash file
|
||||
would match:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
acbd18db4cc2f85cedef654fccc4a4d8 foo.txt
|
||||
|
||||
However, sometimes a hash file will include multiple similar paths:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
37b51d194a7513e45b56f6524f2d51f2 ./dir1/foo.txt
|
||||
acbd18db4cc2f85cedef654fccc4a4d8 ./dir2/foo.txt
|
||||
73feffa4b7f6bb68e44cf984c85f6e88 ./dir3/foo.txt
|
||||
|
||||
In cases like this, Salt may match the incorrect hash. This argument
|
||||
can be used to tell Salt which filename to match, to ensure that the
|
||||
correct hash is identified. For example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
/tmp/foo.txt:
|
||||
file.managed:
|
||||
- source: https://mydomain.tld/dir2/foo.txt
|
||||
- source_hash: https://mydomain.tld/hashes
|
||||
- source_hash_name: ./dir2/foo.txt
|
||||
|
||||
.. note::
|
||||
This argument must contain the full filename entry from the
|
||||
checksum file, as this argument is meant to disambiguate matches
|
||||
for multiple files that have the same basename. So, in the
|
||||
example above, simply using ``foo.txt`` would not match.
|
||||
|
||||
.. versionadded:: 2016.3.5,2016.11.1
|
||||
|
||||
user
|
||||
The user to own the file, this defaults to the user salt is running as
|
||||
@ -1730,6 +1769,7 @@ def managed(name,
|
||||
name,
|
||||
source,
|
||||
source_hash,
|
||||
source_hash_name,
|
||||
user,
|
||||
group,
|
||||
mode,
|
||||
@ -1774,6 +1814,7 @@ def managed(name,
|
||||
template,
|
||||
source,
|
||||
source_hash,
|
||||
source_hash_name,
|
||||
user,
|
||||
group,
|
||||
mode,
|
||||
@ -3000,43 +3041,61 @@ def line(name, content, match=None, mode=None, location=None,
|
||||
|
||||
.. versionadded:: 2015.8.0
|
||||
|
||||
:param name:
|
||||
name
|
||||
Filesystem path to the file to be edited.
|
||||
|
||||
:param content:
|
||||
content
|
||||
Content of the line.
|
||||
|
||||
:param match:
|
||||
match
|
||||
Match the target line for an action by
|
||||
a fragment of a string or regular expression.
|
||||
|
||||
:param mode:
|
||||
:Ensure:
|
||||
If neither ``before`` nor ``after`` are provided, and ``match``
|
||||
is also ``None``, match becomes the ``content`` value.
|
||||
|
||||
mode
|
||||
Defines how to edit a line. One of the following options is
|
||||
required:
|
||||
|
||||
- ensure
|
||||
If line does not exist, it will be added.
|
||||
|
||||
:Replace:
|
||||
If line already exist, it will be replaced.
|
||||
|
||||
:Delete:
|
||||
- replace
|
||||
If line already exists, it will be replaced.
|
||||
- delete
|
||||
Delete the line, once found.
|
||||
|
||||
:Insert:
|
||||
- insert
|
||||
Insert a line.
|
||||
|
||||
:param location:
|
||||
:start:
|
||||
Place the content at the beginning of the file.
|
||||
.. note::
|
||||
|
||||
:end:
|
||||
If ``mode=insert`` is used, at least one of the following
|
||||
options must also be defined: ``location``, ``before``, or
|
||||
``after``. If ``location`` is used, it takes precedence
|
||||
over the other two options.
|
||||
|
||||
location
|
||||
Defines where to place content in the line. Note this option is only
|
||||
used when ``mode=insert`` is specified. If a location is passed in, it
|
||||
takes precedence over both the ``before`` and ``after`` kwargs. Valid
|
||||
locations are:
|
||||
|
||||
- start
|
||||
Place the content at the beginning of the file.
|
||||
- end
|
||||
Place the content at the end of the file.
|
||||
|
||||
:param before:
|
||||
before
|
||||
Regular expression or an exact case-sensitive fragment of the string.
|
||||
This option is only used when either the ``ensure`` or ``insert`` mode
|
||||
is defined.
|
||||
|
||||
:param after:
|
||||
after
|
||||
Regular expression or an exact case-sensitive fragment of the string.
|
||||
This option is only used when either the ``ensure`` or ``insert`` mode
|
||||
is defined.
|
||||
|
||||
:param show_changes:
|
||||
show_changes
|
||||
Output a unified diff of the old file and the new file.
|
||||
If ``False`` return a boolean if any changes were made.
|
||||
Default is ``True``
|
||||
@ -3045,16 +3104,17 @@ def line(name, content, match=None, mode=None, location=None,
|
||||
Using this option will store two copies of the file in-memory
|
||||
(the original version and the edited version) in order to generate the diff.
|
||||
|
||||
:param backup:
|
||||
backup
|
||||
Create a backup of the original file with the extension:
|
||||
"Year-Month-Day-Hour-Minutes-Seconds".
|
||||
|
||||
:param quiet:
|
||||
quiet
|
||||
Do not raise any exceptions. E.g. ignore the fact that the file that is
|
||||
tried to be edited does not exist and nothing really happened.
|
||||
|
||||
:param indent:
|
||||
Keep indentation with the previous line.
|
||||
indent
|
||||
Keep indentation with the previous line. This option is not considered when
|
||||
the ``delete`` mode is specified.
|
||||
|
||||
:param create:
|
||||
Create an empty file if doesn't exists.
|
||||
@ -5059,6 +5119,7 @@ def serialize(name,
|
||||
name=name,
|
||||
source=None,
|
||||
source_hash={},
|
||||
source_hash_name=None,
|
||||
user=user,
|
||||
group=group,
|
||||
mode=mode,
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Management of the GPG keychains
|
||||
==============================
|
||||
===============================
|
||||
|
||||
.. versionadded:: 2016.3.0
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Allows you to manage assistive access on OS X minions with 10.9+
|
||||
=======================
|
||||
================================================================
|
||||
|
||||
Install, enable and disable assitive access on OS X minions
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Writing/reading defaults from an OS X minion
|
||||
=======================
|
||||
============================================
|
||||
|
||||
'''
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Installing of certificates to the keychain
|
||||
=======================
|
||||
==========================================
|
||||
|
||||
Install certificats to the OS X keychain
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Allows you to manage extended attributes on files or directories
|
||||
=======================
|
||||
================================================================
|
||||
|
||||
Install, enable and disable assitive access on OS X minions
|
||||
|
||||
|
@ -3,6 +3,7 @@
|
||||
Configure Marathon apps via a salt proxy.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
my_app:
|
||||
marathon_app.config:
|
||||
- config:
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Allows you to manage proxy settings on minions
|
||||
=======================
|
||||
==============================================
|
||||
|
||||
Setup proxy settings on minions
|
||||
|
||||
|
@ -5,7 +5,7 @@ Send a message to PushOver
|
||||
|
||||
This state is useful for sending messages to PushOver during state runs.
|
||||
|
||||
.. versionadded:: Lithium
|
||||
.. versionadded:: 2015.5.0
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Installing of certificates to the Windows Certificate Manager
|
||||
=======================
|
||||
=============================================================
|
||||
|
||||
Install certificates to the Windows Certificate Manager
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Installing of Windows features using DISM
|
||||
=======================
|
||||
=========================================
|
||||
|
||||
Install windows features/capabilties with DISM
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Installation and activation of windows licenses
|
||||
=======================
|
||||
===============================================
|
||||
|
||||
Install and activate windows licenses
|
||||
|
||||
|
@ -46,6 +46,7 @@ features/states of updates available for configuring:
|
||||
|
||||
The following example installs all driver updates that don't require a reboot:
|
||||
.. code-block:: yaml
|
||||
|
||||
gryffindor:
|
||||
win_update.installed:
|
||||
- skips:
|
||||
@ -56,6 +57,7 @@ The following example installs all driver updates that don't require a reboot:
|
||||
To just update your windows machine, add this your sls:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
updates:
|
||||
win_update.installed
|
||||
'''
|
||||
|
@ -188,7 +188,7 @@ class BotoVpcTestCaseMixin(object):
|
||||
if not self.conn:
|
||||
self.conn = boto.vpc.connect_to_region(region)
|
||||
|
||||
igw = self.conn.create_internet_gateway(vpc_id)
|
||||
igw = self.conn.create_internet_gateway()
|
||||
_maybe_set_name_tag(name, igw)
|
||||
_maybe_set_tags(tags, igw)
|
||||
return igw
|
||||
|
@ -483,10 +483,14 @@ class FileModuleTestCase(TestCase):
|
||||
'''
|
||||
# With file name
|
||||
with tempfile.NamedTemporaryFile(mode='w+') as tfile:
|
||||
tfile.write('rc.conf ef6e82e4006dee563d98ada2a2a80a27\n')
|
||||
tfile.write(
|
||||
'ead48423703509d37c4a90e6a0d53e143b6fc268 example.tar.gz\n')
|
||||
'rc.conf ef6e82e4006dee563d98ada2a2a80a27\n'
|
||||
'ead48423703509d37c4a90e6a0d53e143b6fc268 example.tar.gz\n'
|
||||
'fe05bcdcdc4928012781a5f1a2a77cbb5398e106 ./subdir/example.tar.gz\n'
|
||||
'ad782ecdac770fc6eb9a62e44f90873fb97fb26b foo.tar.bz2\n'
|
||||
)
|
||||
tfile.flush()
|
||||
|
||||
result = filemod.extract_hash(tfile.name, '', '/rc.conf')
|
||||
self.assertEqual(result, {
|
||||
'hsum': 'ef6e82e4006dee563d98ada2a2a80a27',
|
||||
@ -498,15 +502,83 @@ class FileModuleTestCase(TestCase):
|
||||
'hsum': 'ead48423703509d37c4a90e6a0d53e143b6fc268',
|
||||
'hash_type': 'sha1'
|
||||
})
|
||||
# Solohash - no file name (Maven repo checksum file format)
|
||||
|
||||
# All the checksums in this test file are sha1 sums. We run this
|
||||
# loop three times. The first pass tests auto-detection of hash
|
||||
# type by length of the hash. The second tests matching a specific
|
||||
# type. The third tests a failed attempt to match a specific type,
|
||||
# since sha256 was requested but sha1 is what is in the file.
|
||||
for hash_type in ('', 'sha1', 'sha256'):
|
||||
# Test the source_hash_name argument. Even though there are
|
||||
# matches in the source_hash file for both the file_name and
|
||||
# source params, they should be ignored in favor of the
|
||||
# source_hash_name.
|
||||
file_name = '/example.tar.gz'
|
||||
source = 'https://mydomain.tld/foo.tar.bz2?key1=val1&key2=val2'
|
||||
source_hash_name = './subdir/example.tar.gz'
|
||||
result = filemod.extract_hash(
|
||||
tfile.name,
|
||||
hash_type,
|
||||
file_name,
|
||||
source,
|
||||
source_hash_name)
|
||||
expected = {
|
||||
'hsum': 'fe05bcdcdc4928012781a5f1a2a77cbb5398e106',
|
||||
'hash_type': 'sha1'
|
||||
} if hash_type != 'sha256' else None
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
# Test both a file_name and source but no source_hash_name.
|
||||
# Even though there are matches for both file_name and
|
||||
# source_hash_name, file_name should be preferred.
|
||||
file_name = '/example.tar.gz'
|
||||
source = 'https://mydomain.tld/foo.tar.bz2?key1=val1&key2=val2'
|
||||
source_hash_name = None
|
||||
result = filemod.extract_hash(
|
||||
tfile.name,
|
||||
hash_type,
|
||||
file_name,
|
||||
source,
|
||||
source_hash_name)
|
||||
expected = {
|
||||
'hsum': 'ead48423703509d37c4a90e6a0d53e143b6fc268',
|
||||
'hash_type': 'sha1'
|
||||
} if hash_type != 'sha256' else None
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
# Test both a file_name and source but no source_hash_name.
|
||||
# Since there is no match for the file_name, the source is
|
||||
# matched.
|
||||
file_name = '/somefile.tar.gz'
|
||||
source = 'https://mydomain.tld/foo.tar.bz2?key1=val1&key2=val2'
|
||||
source_hash_name = None
|
||||
result = filemod.extract_hash(
|
||||
tfile.name,
|
||||
hash_type,
|
||||
file_name,
|
||||
source,
|
||||
source_hash_name)
|
||||
expected = {
|
||||
'hsum': 'ad782ecdac770fc6eb9a62e44f90873fb97fb26b',
|
||||
'hash_type': 'sha1'
|
||||
} if hash_type != 'sha256' else None
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
# Hash only, no file name (Maven repo checksum format)
|
||||
# Since there is no name match, the first checksum in the file will
|
||||
# always be returned, never the second.
|
||||
with tempfile.NamedTemporaryFile(mode='w+') as tfile:
|
||||
tfile.write('ead48423703509d37c4a90e6a0d53e143b6fc268\n')
|
||||
tfile.write('ead48423703509d37c4a90e6a0d53e143b6fc268\n'
|
||||
'ad782ecdac770fc6eb9a62e44f90873fb97fb26b\n')
|
||||
tfile.flush()
|
||||
result = filemod.extract_hash(tfile.name, '', '/testfile')
|
||||
self.assertEqual(result, {
|
||||
'hsum': 'ead48423703509d37c4a90e6a0d53e143b6fc268',
|
||||
'hash_type': 'sha1'
|
||||
})
|
||||
|
||||
for hash_type in ('', 'sha1', 'sha256'):
|
||||
result = filemod.extract_hash(tfile.name, hash_type, '/testfile')
|
||||
expected = {
|
||||
'hsum': 'ead48423703509d37c4a90e6a0d53e143b6fc268',
|
||||
'hash_type': 'sha1'
|
||||
} if hash_type != 'sha256' else None
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
def test_user_to_uid_int(self):
|
||||
'''
|
||||
@ -571,6 +643,32 @@ class FileModuleTestCase(TestCase):
|
||||
saltenv='base')
|
||||
self.assertEqual(ret, 'This is a templated file.')
|
||||
|
||||
def test_replace_line_in_empty_file(self):
|
||||
'''
|
||||
Tests that when calling file.line with ``mode=replace``,
|
||||
the function doesn't stack trace if the file is empty.
|
||||
Should return ``False``.
|
||||
|
||||
See Issue #31135.
|
||||
'''
|
||||
# Create an empty temporary named file
|
||||
empty_file = tempfile.NamedTemporaryFile(delete=False,
|
||||
mode='w+')
|
||||
|
||||
# Assert that the file was created and is empty
|
||||
self.assertEqual(os.stat(empty_file.name).st_size, 0)
|
||||
|
||||
# Now call the function on the empty file and assert
|
||||
# the return is False instead of stack-tracing
|
||||
self.assertFalse(filemod.line(empty_file.name,
|
||||
content='foo',
|
||||
match='bar',
|
||||
mode='replace'))
|
||||
|
||||
# Close and remove the file
|
||||
empty_file.close()
|
||||
os.remove(empty_file.name)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from integration import run_tests
|
||||
|
@ -312,11 +312,12 @@ class BotoVpcRouteTableTestCase(BotoVpcStateTestCaseBase, BotoVpcResourceTestCas
|
||||
vpc = self._create_vpc(name='test')
|
||||
igw = self._create_internet_gateway(name='test', vpc_id=vpc.id)
|
||||
|
||||
route_table_present_result = salt_states['boto_vpc.route_table_present'](
|
||||
name='test', vpc_name='test', routes=[{'destination_cidr_block': '0.0.0.0/0',
|
||||
'gateway_id': igw.id},
|
||||
{'destination_cidr_block': '10.0.0.0/24',
|
||||
'gateway_id': 'local'}])
|
||||
with patch.dict('salt.utils.boto.__salt__', funcs):
|
||||
route_table_present_result = salt_states['boto_vpc.route_table_present'](
|
||||
name='test', vpc_name='test', routes=[{'destination_cidr_block': '0.0.0.0/0',
|
||||
'gateway_id': igw.id},
|
||||
{'destination_cidr_block': '10.0.0.0/24',
|
||||
'gateway_id': 'local'}])
|
||||
routes = [x['gateway_id'] for x in route_table_present_result['changes']['new']['routes']]
|
||||
|
||||
self.assertEqual(set(routes), set(['local', igw.id]))
|
||||
|
Loading…
Reference in New Issue
Block a user