mirror of
https://github.com/valitydev/salt.git
synced 2024-11-08 09:23:56 +00:00
Merge branch 'develop' of https://github.com/saltstack/salt into state_output_diff_cmdflag
This commit is contained in:
commit
5dec2aadc7
@ -135,17 +135,12 @@ Options
|
||||
form, this is suitable for re-reading the output into
|
||||
an executing python script with eval.
|
||||
|
||||
.. option:: --text-out
|
||||
.. option:: --out=OUTPUT, --output=OUTPUT
|
||||
|
||||
Print the output from the salt command in the same form the shell would.
|
||||
|
||||
.. option:: --yaml-out
|
||||
|
||||
Print the output from the salt command in yaml.
|
||||
|
||||
.. option:: --json-out
|
||||
|
||||
Print the output from the salt command in json.
|
||||
Print the output from the salt-cloud command using the specified outputter. The
|
||||
builtins are 'raw', 'compact', 'no_return', 'grains', 'overstatestage', 'pprint',
|
||||
'json', 'nested', 'yaml', 'highstate', 'quiet', 'key', 'txt', 'newline_values_only',
|
||||
'virt_query'.
|
||||
|
||||
.. option:: --no-color
|
||||
|
||||
|
@ -195,7 +195,6 @@ Depends
|
||||
~~~~~~~
|
||||
|
||||
- `Salt Common`
|
||||
- `sshpass`
|
||||
- `Python MessagePack` (Messagepack C lib, or msgpack-pure)
|
||||
|
||||
Salt Cloud
|
||||
@ -220,7 +219,6 @@ Depends
|
||||
~~~~~~~
|
||||
|
||||
- `Salt Common`
|
||||
- `sshpass`
|
||||
- `apache libcloud` >= 0.14.0
|
||||
|
||||
Salt Doc
|
||||
|
@ -17,3 +17,7 @@ Version 2014.7.1 is a bugfix release for :doc:`2014.7.0
|
||||
pillar globbing is still disabled for those modes, for security reasons.
|
||||
(:issue:`17194`)
|
||||
- Fix for ``tty: True`` in salt-ssh (:issue:`16847`)
|
||||
- Fix for supervisord states when supervisor not installed to system python
|
||||
(:issue:`18044`)
|
||||
- Fix for logging when ``log_level='quiet'`` for :mod:`cmd.run
|
||||
<salt.states.cmd.run>` (:issue:`19479`)
|
||||
|
10
doc/topics/releases/2014.7.2.rst
Normal file
10
doc/topics/releases/2014.7.2.rst
Normal file
@ -0,0 +1,10 @@
|
||||
===========================
|
||||
Salt 2014.7.1 Release Notes
|
||||
===========================
|
||||
|
||||
:release: TBA
|
||||
|
||||
Version 2014.7.2 is a bugfix release for :doc:`2014.7.0
|
||||
</topics/releases/2014.7.0>`. The changes include:
|
||||
|
||||
- Fix erroneous warnings for systemd service enabled check (:issue:`19606`)
|
@ -9,6 +9,7 @@ from __future__ import absolute_import
|
||||
|
||||
# Import python libs
|
||||
import json
|
||||
import copy
|
||||
|
||||
# Import salt libs
|
||||
import salt.loader
|
||||
@ -29,8 +30,10 @@ class FunctionWrapper(object):
|
||||
wfuncs=None,
|
||||
mods=None,
|
||||
fsclient=None,
|
||||
cmd_prefix=None,
|
||||
**kwargs):
|
||||
super(FunctionWrapper, self).__init__()
|
||||
self.cmd_prefix = cmd_prefix
|
||||
self.wfuncs = wfuncs if isinstance(wfuncs, dict) else {}
|
||||
self.opts = opts
|
||||
self.mods = mods if isinstance(mods, dict) else {}
|
||||
@ -43,6 +46,29 @@ class FunctionWrapper(object):
|
||||
'''
|
||||
Return the function call to simulate the salt local lookup system
|
||||
'''
|
||||
if '.' not in cmd and not self.cmd_prefix:
|
||||
# Form of salt.cmd.run in Jinja -- it's expecting a subdictionary
|
||||
# containing only 'cmd' module calls, in that case. Create a new
|
||||
# FunctionWrapper which contains the prefix 'cmd' (again, for the
|
||||
# salt.cmd.run example)
|
||||
kwargs = copy.deepcopy(self.kwargs)
|
||||
id_ = kwargs.pop('id_')
|
||||
host = kwargs.pop('host')
|
||||
return FunctionWrapper(self.opts,
|
||||
id_,
|
||||
host,
|
||||
wfuncs=self.wfuncs,
|
||||
mods=self.mods,
|
||||
fsclient=self.fsclient,
|
||||
cmd_prefix=cmd,
|
||||
**kwargs)
|
||||
|
||||
if self.cmd_prefix:
|
||||
# We're in an inner FunctionWrapper as created by the code block
|
||||
# above. Reconstruct the original cmd in the form 'cmd.run' and
|
||||
# then evaluate as normal
|
||||
cmd = '{0}.{1}'.format(self.cmd_prefix, cmd)
|
||||
|
||||
if cmd in self.wfuncs:
|
||||
return self.wfuncs[cmd]
|
||||
|
||||
|
@ -101,13 +101,6 @@ def create(vm_):
|
||||
'''
|
||||
Create a single VM from a data dict
|
||||
'''
|
||||
deploy = config.get_cloud_config_value('deploy', vm_, __opts__)
|
||||
if deploy is True and salt.utils.which('sshpass') is None:
|
||||
raise SaltCloudSystemExit(
|
||||
'Cannot deploy salt in a VM if the \'sshpass\' binary is not '
|
||||
'present on the system.'
|
||||
)
|
||||
|
||||
salt.utils.cloud.fire_event(
|
||||
'event',
|
||||
'starting create',
|
||||
@ -245,7 +238,6 @@ def create(vm_):
|
||||
transport=__opts__['transport']
|
||||
)
|
||||
|
||||
deployed = False
|
||||
if win_installer:
|
||||
deployed = salt.utils.cloud.deploy_windows(**deploy_kwargs)
|
||||
else:
|
||||
|
@ -175,17 +175,9 @@ def create(vm_):
|
||||
|
||||
salt-cloud -p profile_name vm_name
|
||||
'''
|
||||
deploy = config.get_cloud_config_value('deploy', vm_, __opts__)
|
||||
key_filename = config.get_cloud_config_value(
|
||||
'private_key', vm_, __opts__, search_global=False, default=None
|
||||
)
|
||||
if deploy is True and key_filename is None and \
|
||||
salt.utils.which('sshpass') is None:
|
||||
raise SaltCloudSystemExit(
|
||||
'Cannot deploy salt in a VM if the \'private_key\' setting '
|
||||
'is not set and \'sshpass\' binary is not present on the '
|
||||
'system for the password.'
|
||||
)
|
||||
|
||||
salt.utils.cloud.fire_event(
|
||||
'event',
|
||||
|
@ -26,19 +26,19 @@ http://www.windowsazure.com/en-us/develop/python/how-to-guides/service-managemen
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
import copy
|
||||
import logging
|
||||
import pprint
|
||||
import time
|
||||
|
||||
# Import salt cloud libs
|
||||
import salt.config as config
|
||||
from salt.exceptions import SaltCloudSystemExit
|
||||
import salt.utils.cloud
|
||||
import yaml
|
||||
|
||||
|
||||
# Import python libs
|
||||
# Import salt cloud libs
|
||||
# Import azure libs
|
||||
HAS_LIBS = False
|
||||
try:
|
||||
@ -412,6 +412,37 @@ def show_instance(name, call=None):
|
||||
return nodes[name]
|
||||
|
||||
|
||||
def show_service(kwargs=None, conn=None, call=None):
|
||||
'''
|
||||
Show the details from the provider concerning an instance
|
||||
'''
|
||||
if call != 'function':
|
||||
raise SaltCloudSystemExit(
|
||||
'The show_service function must be called with -f or --function.'
|
||||
)
|
||||
|
||||
if not conn:
|
||||
conn = get_conn()
|
||||
|
||||
services = conn.list_hosted_services()
|
||||
for service in services:
|
||||
if kwargs['service_name'] != service.service_name:
|
||||
continue
|
||||
props = service.hosted_service_properties
|
||||
ret = {
|
||||
'affinity_group': props.affinity_group,
|
||||
'date_created': props.date_created,
|
||||
'date_last_modified': props.date_last_modified,
|
||||
'description': props.description,
|
||||
'extended_properties': props.extended_properties,
|
||||
'label': props.label,
|
||||
'location': props.location,
|
||||
'status': props.status,
|
||||
}
|
||||
return ret
|
||||
return None
|
||||
|
||||
|
||||
def create(vm_):
|
||||
'''
|
||||
Create a single VM from a data dict
|
||||
|
@ -546,14 +546,6 @@ def create(vm_):
|
||||
)
|
||||
)
|
||||
|
||||
if deploy is True and key_filename is None and \
|
||||
salt.utils.which('sshpass') is None:
|
||||
raise SaltCloudSystemExit(
|
||||
'Cannot deploy salt in a VM if the \'ssh_key_file\' setting '
|
||||
'is not set and \'sshpass\' binary is not present on the '
|
||||
'system for the password.'
|
||||
)
|
||||
|
||||
vm_['key_filename'] = key_filename
|
||||
|
||||
salt.utils.cloud.fire_event(
|
||||
|
@ -592,14 +592,6 @@ def create(vm_):
|
||||
)
|
||||
)
|
||||
|
||||
if deploy is True and key_filename is None and \
|
||||
salt.utils.which('sshpass') is None:
|
||||
raise SaltCloudSystemExit(
|
||||
'Cannot deploy salt in a VM if the \'ssh_key_file\' setting '
|
||||
'is not set and \'sshpass\' binary is not present on the '
|
||||
'system for the password.'
|
||||
)
|
||||
|
||||
vm_['key_filename'] = key_filename
|
||||
|
||||
salt.utils.cloud.fire_event(
|
||||
|
@ -270,13 +270,6 @@ def create(vm_):
|
||||
'''
|
||||
Create a single VM from a data dict
|
||||
'''
|
||||
deploy = config.get_cloud_config_value('deploy', vm_, __opts__)
|
||||
if deploy is True and salt.utils.which('sshpass') is None:
|
||||
raise SaltCloudSystemExit(
|
||||
'Cannot deploy salt in a VM if the \'sshpass\' binary is not '
|
||||
'present on the system.'
|
||||
)
|
||||
|
||||
salt.utils.cloud.fire_event(
|
||||
'event',
|
||||
'starting create',
|
||||
|
@ -478,12 +478,6 @@ def create(vm_):
|
||||
salt-cloud -p proxmox-ubuntu vmhostname
|
||||
'''
|
||||
ret = {}
|
||||
deploy = config.get_cloud_config_value('deploy', vm_, __opts__)
|
||||
if deploy is True and salt.utils.which('sshpass') is None:
|
||||
raise SaltCloudSystemExit(
|
||||
'Cannot deploy salt in a VM if the \'sshpass\' binary is not '
|
||||
'present on the system.'
|
||||
)
|
||||
|
||||
salt.utils.cloud.fire_event(
|
||||
'event',
|
||||
@ -648,7 +642,6 @@ def create(vm_):
|
||||
transport=__opts__['transport']
|
||||
)
|
||||
|
||||
deployed = False
|
||||
if win_installer:
|
||||
deployed = salt.utils.cloud.deploy_windows(**deploy_kwargs)
|
||||
else:
|
||||
|
@ -185,12 +185,6 @@ def create(vm_):
|
||||
Create a single VM from a data dict
|
||||
'''
|
||||
deploy = config.get_cloud_config_value('deploy', vm_, __opts__)
|
||||
if deploy is True and salt.utils.which('sshpass') is None:
|
||||
raise SaltCloudSystemExit(
|
||||
'Cannot deploy salt in a VM if the \'sshpass\' binary is not '
|
||||
'present on the system.'
|
||||
)
|
||||
|
||||
salt.utils.cloud.fire_event(
|
||||
'event',
|
||||
'starting create',
|
||||
@ -428,7 +422,6 @@ def create(vm_):
|
||||
transport=__opts__['transport']
|
||||
)
|
||||
|
||||
deployed = False
|
||||
if win_installer:
|
||||
deployed = salt.utils.cloud.deploy_windows(**deploy_kwargs)
|
||||
else:
|
||||
|
@ -23,7 +23,7 @@ import salt.utils
|
||||
# Import salt cloud libs
|
||||
import salt.utils.cloud
|
||||
import salt.config as config
|
||||
from salt.exceptions import SaltCloudConfigError, SaltCloudSystemExit
|
||||
from salt.exceptions import SaltCloudConfigError
|
||||
|
||||
# Get logging started
|
||||
log = logging.getLogger(__name__)
|
||||
@ -70,9 +70,11 @@ def create(vm_):
|
||||
'No Deploy': '\'deploy\' is not enabled. Not deploying.'
|
||||
}
|
||||
}
|
||||
|
||||
key_filename = config.get_cloud_config_value(
|
||||
'key_filename', vm_, __opts__, search_global=False, default=None
|
||||
)
|
||||
|
||||
if key_filename is not None and not os.path.isfile(key_filename):
|
||||
raise SaltCloudConfigError(
|
||||
'The defined ssh_keyfile {0!r} does not exist'.format(
|
||||
@ -80,13 +82,6 @@ def create(vm_):
|
||||
)
|
||||
)
|
||||
|
||||
if key_filename is None and salt.utils.which('sshpass') is None:
|
||||
raise SaltCloudSystemExit(
|
||||
'Cannot deploy salt in a VM if the \'ssh_keyfile\' setting '
|
||||
'is not set and \'sshpass\' binary is not present on the '
|
||||
'system for the password.'
|
||||
)
|
||||
|
||||
ret = {}
|
||||
|
||||
log.info('Provisioning existing machine {0}'.format(vm_['name']))
|
||||
@ -188,7 +183,6 @@ def create(vm_):
|
||||
transport=__opts__['transport']
|
||||
)
|
||||
|
||||
deployed = False
|
||||
if win_installer:
|
||||
deployed = salt.utils.cloud.deploy_windows(**deploy_kwargs)
|
||||
else:
|
||||
|
@ -557,24 +557,16 @@ class MultiMinion(MinionBase):
|
||||
# run scheduled jobs if you have them
|
||||
loop_interval = self.process_schedule(minion['minion'], loop_interval)
|
||||
|
||||
# if you have an event to handle, do it on a single minion
|
||||
# (first one to not throw an exception)
|
||||
# If a minion instance receives event, handle the event on all
|
||||
# instances
|
||||
if package:
|
||||
# If we need to expand this, we may want to consider a specific header
|
||||
# or another approach entirely.
|
||||
if package.startswith('_minion_mine'):
|
||||
for multi_minion in minions:
|
||||
try:
|
||||
minions[master]['minion'].handle_event(package)
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
minion['minion'].handle_event(package)
|
||||
package = None
|
||||
self.epub_sock.send(package)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
for master in masters:
|
||||
minions[master].handle_event(package)
|
||||
except Exception:
|
||||
pass
|
||||
finally:
|
||||
package = None
|
||||
|
||||
# have the Minion class run anything it has to run
|
||||
next(minion['generator'])
|
||||
|
@ -7,13 +7,9 @@ A module to wrap (non-Windows) archive calls
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
|
||||
|
||||
# Import salt libs
|
||||
from salt.exceptions import SaltInvocationError, CommandExecutionError
|
||||
from salt.ext.six import string_types
|
||||
from salt.utils import \
|
||||
which as _which, which_bin as _which_bin, is_windows as _is_windows
|
||||
import salt.utils.decorators as decorators
|
||||
from salt.ext.six import string_types, integer_types
|
||||
import salt.utils
|
||||
|
||||
# TODO: Check that the passed arguments are correct
|
||||
@ -33,17 +29,18 @@ except ImportError:
|
||||
|
||||
|
||||
def __virtual__():
|
||||
if _is_windows():
|
||||
if salt.utils.is_windows():
|
||||
return HAS_ZIPFILE
|
||||
commands = ('tar', 'gzip', 'gunzip', 'zip', 'unzip', 'rar', 'unrar')
|
||||
# If none of the above commands are in $PATH this module is a no-go
|
||||
if not any(_which(cmd) for cmd in commands):
|
||||
if not any(salt.utils.which(cmd) for cmd in commands):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@decorators.which('tar')
|
||||
def tar(options, tarfile, sources=None, dest=None, cwd=None, template=None, runas=None):
|
||||
@salt.utils.decorators.which('tar')
|
||||
def tar(options, tarfile, sources=None, dest=None,
|
||||
cwd=None, template=None, runas=None):
|
||||
'''
|
||||
.. note::
|
||||
|
||||
@ -64,7 +61,7 @@ def tar(options, tarfile, sources=None, dest=None, cwd=None, template=None, runa
|
||||
|
||||
sources
|
||||
Comma delimited list of files to **pack** into the tarfile. Can also be
|
||||
passed as a python list.
|
||||
passed as a Python list.
|
||||
|
||||
dest
|
||||
The destination directory into which to **unpack** the tarfile
|
||||
@ -114,7 +111,7 @@ def tar(options, tarfile, sources=None, dest=None, cwd=None, template=None, runa
|
||||
python_shell=False).splitlines()
|
||||
|
||||
|
||||
@decorators.which('gzip')
|
||||
@salt.utils.decorators.which('gzip')
|
||||
def gzip(sourcefile, template=None, runas=None):
|
||||
'''
|
||||
Uses the gzip command to create gzip files
|
||||
@ -141,7 +138,7 @@ def gzip(sourcefile, template=None, runas=None):
|
||||
python_shell=False).splitlines()
|
||||
|
||||
|
||||
@decorators.which('gunzip')
|
||||
@salt.utils.decorators.which('gunzip')
|
||||
def gunzip(gzipfile, template=None, runas=None):
|
||||
'''
|
||||
Uses the gunzip command to unpack gzip files
|
||||
@ -168,18 +165,90 @@ def gunzip(gzipfile, template=None, runas=None):
|
||||
python_shell=False).splitlines()
|
||||
|
||||
|
||||
@decorators.which('zip')
|
||||
def cmd_zip_(zip_file, sources, template=None,
|
||||
cwd=None, recurse=False, runas=None):
|
||||
@salt.utils.decorators.which('zip')
|
||||
def cmd_zip(zip_file, sources, template=None, cwd=None, runas=None):
|
||||
'''
|
||||
Uses the zip command to create zip files
|
||||
.. versionadded:: 2015.2.0
|
||||
In versions 2014.7.x and earlier, this function was known as
|
||||
``archive.zip``.
|
||||
|
||||
Uses the ``zip`` command to create zip files. This command is part of the
|
||||
`Info-ZIP`_ suite of tools, and is typically packaged as simply ``zip``.
|
||||
|
||||
.. _`Info-ZIP`: http://www.info-zip.org/
|
||||
|
||||
zip_file
|
||||
Path of zip file to be created
|
||||
|
||||
sources
|
||||
Comma-separated list of sources to include in the zip file. Sources can
|
||||
also be passed in a python list.
|
||||
also be passed in a Python list.
|
||||
|
||||
template : None
|
||||
Can be set to 'jinja' or another supported template engine to render
|
||||
the command arguments before execution:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' archive.cmd_zip template=jinja /tmp/zipfile.zip /tmp/sourcefile1,/tmp/{{grains.id}}.txt
|
||||
|
||||
cwd : None
|
||||
Use this argument along with relative paths in ``sources`` to create
|
||||
zip files which do not contain the leading directories. If not
|
||||
specified, the zip file will be created as if the cwd was ``/``, and
|
||||
creating a zip file of ``/foo/bar/baz.txt`` will contain the parent
|
||||
directories ``foo`` and ``bar``. To create a zip file containing just
|
||||
``baz.txt``, the following command would be used:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' archive.cmd_zip /tmp/baz.zip baz.txt cwd=/foo/bar
|
||||
|
||||
.. versionadded:: 2014.7.1
|
||||
|
||||
runas : None
|
||||
Create the zip file as the specified user. Defaults to the user under
|
||||
which the minion is running.
|
||||
|
||||
.. versionadded:: 2015.2.0
|
||||
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' archive.cmd_zip /tmp/zipfile.zip /tmp/sourcefile1,/tmp/sourcefile2
|
||||
'''
|
||||
if isinstance(sources, string_types):
|
||||
sources = [s.strip() for s in sources.split(',')]
|
||||
cmd = ['zip', '-r']
|
||||
cmd.append('{0}'.format(zip_file))
|
||||
cmd.extend(sources)
|
||||
return __salt__['cmd.run'](cmd,
|
||||
cwd=cwd,
|
||||
template=template,
|
||||
runas=runas,
|
||||
python_shell=False).splitlines()
|
||||
|
||||
|
||||
@salt.utils.decorators.depends('zipfile', fallback_function=cmd_zip)
|
||||
def zip_(zip_file, sources, template=None, cwd=None, runas=None):
|
||||
'''
|
||||
Uses the ``zipfile`` Python module to create zip files
|
||||
|
||||
.. versionchanged:: 2015.2.0
|
||||
This function was rewritten to use Python's native zip file support.
|
||||
The old functionality has been preserved in the new function
|
||||
:mod:`archive.cmd_zip <salt.modules.archive.cmd_zip>`. For versions
|
||||
2014.7.x and earlier, see the :mod:`archive.cmd_zip
|
||||
<salt.modules.archive.cmd_zip>` documentation.
|
||||
|
||||
zip_file
|
||||
Path of zip file to be created
|
||||
|
||||
sources
|
||||
Comma-separated list of sources to include in the zip file. Sources can
|
||||
also be passed in a Python list.
|
||||
|
||||
template : None
|
||||
Can be set to 'jinja' or another supported template engine to render
|
||||
@ -190,20 +259,21 @@ def cmd_zip_(zip_file, sources, template=None,
|
||||
salt '*' archive.zip template=jinja /tmp/zipfile.zip /tmp/sourcefile1,/tmp/{{grains.id}}.txt
|
||||
|
||||
cwd : None
|
||||
Run the zip command from the specified directory. Use this argument
|
||||
along with relative file paths to create zip files which do not
|
||||
contain the leading directories. If not specified, this will default
|
||||
to the home directory of the user under which the salt minion process
|
||||
is running.
|
||||
Use this argument along with relative paths in ``sources`` to create
|
||||
zip files which do not contain the leading directories. If not
|
||||
specified, the zip file will be created as if the cwd was ``/``, and
|
||||
creating a zip file of ``/foo/bar/baz.txt`` will contain the parent
|
||||
directories ``foo`` and ``bar``. To create a zip file containing just
|
||||
``baz.txt``, the following command would be used:
|
||||
|
||||
.. versionadded:: 2014.7.1
|
||||
.. code-block:: bash
|
||||
|
||||
recurse : False
|
||||
Recursively include contents of sources which are directories. Combine
|
||||
this with the ``cwd`` argument and use relative paths for the sources
|
||||
to create a zip file which does not contain the leading directories.
|
||||
salt '*' archive.zip /tmp/baz.zip baz.txt cwd=/foo/bar
|
||||
|
||||
runas : None
|
||||
Create the zip file as the specified user. Defaults to the user under
|
||||
which the minion is running.
|
||||
|
||||
.. versionadded:: 2014.7.1
|
||||
|
||||
CLI Example:
|
||||
|
||||
@ -211,69 +281,106 @@ def cmd_zip_(zip_file, sources, template=None,
|
||||
|
||||
salt '*' archive.zip /tmp/zipfile.zip /tmp/sourcefile1,/tmp/sourcefile2
|
||||
'''
|
||||
if isinstance(sources, string_types):
|
||||
sources = [s.strip() for s in sources.split(',')]
|
||||
cmd = ['zip']
|
||||
if recurse:
|
||||
cmd.append('-r')
|
||||
cmd.append('{0}'.format(zip_file))
|
||||
cmd.extend(sources)
|
||||
return __salt__['cmd.run'](cmd,
|
||||
cwd=cwd,
|
||||
template=template,
|
||||
runas=runas,
|
||||
python_shell=False).splitlines()
|
||||
if runas:
|
||||
euid = os.geteuid()
|
||||
egid = os.getegid()
|
||||
uinfo = __salt__['user.info'](runas)
|
||||
if not uinfo:
|
||||
raise SaltInvocationError(
|
||||
'User \'{0}\' does not exist'.format(runas)
|
||||
)
|
||||
|
||||
|
||||
@decorators.depends('zipfile', fallback_function=cmd_zip_)
|
||||
def zip_(archive, sources, template=None, runas=None):
|
||||
'''
|
||||
Uses the zipfile module to create zip files
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' archive.zip /tmp/zipfile.zip /tmp/sourcefile1,/tmp/sourcefile2
|
||||
|
||||
The template arg can be set to 'jinja' or another supported template
|
||||
engine to render the command arguments before execution.
|
||||
|
||||
For example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' archive.zip template=jinja /tmp/zipfile.zip /tmp/sourcefile1,/tmp/{{grains.id}}.txt
|
||||
|
||||
'''
|
||||
(archive, sources) = _render_filenames(archive, sources, None, template)
|
||||
zip_file, sources = _render_filenames(zip_file, sources, None, template)
|
||||
|
||||
if isinstance(sources, string_types):
|
||||
sources = [s.strip() for s in sources.split(',')]
|
||||
sources = [x.strip() for x in sources.split(',')]
|
||||
elif isinstance(sources, (float, integer_types)):
|
||||
sources = [str(sources)]
|
||||
|
||||
archived_files = []
|
||||
with zipfile.ZipFile(archive, "w", zipfile.ZIP_DEFLATED) as zf:
|
||||
if not cwd:
|
||||
for src in sources:
|
||||
if os.path.exists(src):
|
||||
if os.path.isdir(src):
|
||||
rel_root = os.path.abspath(os.path.join(src, os.pardir))
|
||||
for dir_name, sub_dirs, files in os.walk(src):
|
||||
for filename in files:
|
||||
abs_name = os.path.abspath(os.path.join(dir_name, filename))
|
||||
arc_name = os.path.join(os.path.relpath(dir_name, rel_root), filename)
|
||||
archived_files.append(arc_name)
|
||||
zf.write(abs_name, arc_name)
|
||||
else:
|
||||
archived_files.append(src)
|
||||
zf.write(src)
|
||||
if not os.path.isabs(src):
|
||||
raise SaltInvocationError(
|
||||
'Relative paths require the \'cwd\' parameter'
|
||||
)
|
||||
else:
|
||||
def _bad_cwd():
|
||||
raise SaltInvocationError('cwd must be absolute')
|
||||
try:
|
||||
if not os.path.isabs(cwd):
|
||||
_bad_cwd()
|
||||
except AttributeError:
|
||||
_bad_cwd()
|
||||
|
||||
if runas and (euid != uinfo['uid'] or egid != uinfo['gid']):
|
||||
# Change the egid first, as changing it after the euid will fail
|
||||
# if the runas user is non-privileged.
|
||||
os.setegid(uinfo['gid'])
|
||||
os.seteuid(uinfo['uid'])
|
||||
|
||||
try:
|
||||
exc = None
|
||||
archived_files = []
|
||||
with zipfile.ZipFile(zip_file, 'w', zipfile.ZIP_DEFLATED) as zfile:
|
||||
for src in sources:
|
||||
if cwd:
|
||||
src = os.path.join(cwd, src)
|
||||
if os.path.exists(src):
|
||||
if os.path.isabs(src):
|
||||
rel_root = '/'
|
||||
else:
|
||||
rel_root = cwd if cwd is not None else '/'
|
||||
if os.path.isdir(src):
|
||||
for dir_name, sub_dirs, files in os.walk(src):
|
||||
if cwd and dir_name.startswith(cwd):
|
||||
arc_dir = salt.utils.relpath(dir_name, cwd)
|
||||
else:
|
||||
arc_dir = salt.utils.relpath(dir_name,
|
||||
rel_root)
|
||||
if arc_dir:
|
||||
archived_files.append(arc_dir + '/')
|
||||
zfile.write(dir_name, arc_dir)
|
||||
for filename in files:
|
||||
abs_name = os.path.join(dir_name, filename)
|
||||
arc_name = os.path.join(arc_dir, filename)
|
||||
archived_files.append(arc_name)
|
||||
zfile.write(abs_name, arc_name)
|
||||
else:
|
||||
if cwd and src.startswith(cwd):
|
||||
arc_name = salt.utils.relpath(src, cwd)
|
||||
else:
|
||||
arc_name = salt.utils.relpath(src, rel_root)
|
||||
archived_files.append(arc_name)
|
||||
zfile.write(src, arc_name)
|
||||
except Exception as exc:
|
||||
pass
|
||||
finally:
|
||||
# Restore the euid/egid
|
||||
if runas:
|
||||
os.seteuid(euid)
|
||||
os.setegid(egid)
|
||||
if exc is not None:
|
||||
# Wait to raise the exception until euid/egid are restored to avoid
|
||||
# permission errors in writing to minion log.
|
||||
raise CommandExecutionError(
|
||||
'Exception encountered creating zipfile: {0}'.format(exc)
|
||||
)
|
||||
|
||||
return archived_files
|
||||
|
||||
|
||||
@decorators.which('unzip')
|
||||
def cmd_unzip_(zip_file, dest, excludes=None, template=None, options=None, runas=None):
|
||||
@salt.utils.decorators.which('unzip')
|
||||
def cmd_unzip(zip_file, dest, excludes=None,
|
||||
template=None, options=None, runas=None):
|
||||
'''
|
||||
Uses the unzip command to unpack zip files
|
||||
.. versionadded:: 2015.2.0
|
||||
In versions 2014.7.x and earlier, this function was known as
|
||||
``archive.unzip``.
|
||||
|
||||
Uses the ``unzip`` command to unpack zip files. This command is part of the
|
||||
`Info-ZIP`_ suite of tools, and is typically packaged as simply ``unzip``.
|
||||
|
||||
.. _`Info-ZIP`: http://www.info-zip.org/
|
||||
|
||||
zip_file
|
||||
Path of zip file to be unpacked
|
||||
@ -281,8 +388,9 @@ def cmd_unzip_(zip_file, dest, excludes=None, template=None, options=None, runas
|
||||
dest
|
||||
The destination directory into which the file should be unpacked
|
||||
|
||||
options : None
|
||||
Options to pass to the ``unzip`` binary
|
||||
excludes : None
|
||||
Comma-separated list of files not to unpack. Can also be passed in a
|
||||
Python list.
|
||||
|
||||
template : None
|
||||
Can be set to 'jinja' or another supported template engine to render
|
||||
@ -290,16 +398,31 @@ def cmd_unzip_(zip_file, dest, excludes=None, template=None, options=None, runas
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' archive.unzip template=jinja /tmp/zipfile.zip /tmp/{{grains.id}}/ excludes=file_1,file_2
|
||||
salt '*' archive.cmd_unzip template=jinja /tmp/zipfile.zip /tmp/{{grains.id}}/ excludes=file_1,file_2
|
||||
|
||||
options : None
|
||||
Additional command-line options to pass to the ``unzip`` binary.
|
||||
|
||||
runas : None
|
||||
Unpack the zip file as the specified user. Defaults to the user under
|
||||
which the minion is running.
|
||||
|
||||
.. versionadded:: 2015.2.0
|
||||
|
||||
options : None
|
||||
Additional command-line options to pass to the ``unzip`` binary.
|
||||
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' archive.unzip /tmp/zipfile.zip /home/strongbad/ excludes=file_1,file_2
|
||||
salt '*' archive.cmd_unzip /tmp/zipfile.zip /home/strongbad/ excludes=file_1,file_2
|
||||
'''
|
||||
if isinstance(excludes, string_types):
|
||||
excludes = [entry.strip() for entry in excludes.split(',')]
|
||||
excludes = [x.strip() for x in excludes.split(',')]
|
||||
elif isinstance(excludes, (float, integer_types)):
|
||||
excludes = [str(excludes)]
|
||||
|
||||
cmd = ['unzip']
|
||||
if options:
|
||||
@ -318,50 +441,106 @@ def cmd_unzip_(zip_file, dest, excludes=None, template=None, options=None, runas
|
||||
cmd.extend(excludes)
|
||||
return __salt__['cmd.run'](cmd,
|
||||
template=template,
|
||||
runas=runas,
|
||||
python_shell=False).splitlines()
|
||||
|
||||
|
||||
@decorators.depends('zipfile', fallback_function=cmd_unzip_)
|
||||
def unzip(archive, dest, excludes=None, template=None, options=None, runas=None):
|
||||
@salt.utils.decorators.depends('zipfile', fallback_function=cmd_unzip)
|
||||
def unzip(zip_file, dest, excludes=None, template=None, runas=None):
|
||||
'''
|
||||
Uses the zipfile module to unpack zip files
|
||||
Uses the ``zipfile`` Python module to unpack zip files
|
||||
|
||||
options:
|
||||
Options to pass to the ``unzip`` binary.
|
||||
.. versionchanged:: 2015.2.0
|
||||
This function was rewritten to use Python's native zip file support.
|
||||
The old functionality has been preserved in the new function
|
||||
:mod:`archive.cmd_unzip <salt.modules.archive.cmd_unzip>`. For versions
|
||||
2014.7.x and earlier, see the :mod:`archive.cmd_zip
|
||||
<salt.modules.archive.cmd_zip>` documentation.
|
||||
|
||||
zip_file
|
||||
Path of zip file to be unpacked
|
||||
|
||||
dest
|
||||
The destination directory into which the file should be unpacked
|
||||
|
||||
excludes : None
|
||||
Comma-separated list of files not to unpack. Can also be passed in a
|
||||
Python list.
|
||||
|
||||
template : None
|
||||
Can be set to 'jinja' or another supported template engine to render
|
||||
the command arguments before execution:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' archive.unzip template=jinja /tmp/zipfile.zip /tmp/{{grains.id}}/ excludes=file_1,file_2
|
||||
|
||||
runas : None
|
||||
Unpack the zip file as the specified user. Defaults to the user under
|
||||
which the minion is running.
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' archive.unzip /tmp/zipfile.zip /home/strongbad/ excludes=file_1,file_2
|
||||
|
||||
The template arg can be set to 'jinja' or another supported template
|
||||
engine to render the command arguments before execution.
|
||||
|
||||
For example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' archive.unzip template=jinja /tmp/zipfile.zip /tmp/{{grains.id}}/ excludes=file_1,file_2
|
||||
|
||||
'''
|
||||
(archive, dest) = _render_filenames(archive, dest, None, template)
|
||||
with zipfile.ZipFile(archive) as zf:
|
||||
files = zf.namelist()
|
||||
if excludes is None:
|
||||
zf.extractall(dest)
|
||||
return files
|
||||
if runas:
|
||||
euid = os.geteuid()
|
||||
egid = os.getegid()
|
||||
uinfo = __salt__['user.info'](runas)
|
||||
if not uinfo:
|
||||
raise SaltInvocationError(
|
||||
'User \'{0}\' does not exist'.format(runas)
|
||||
)
|
||||
|
||||
if not isinstance(excludes, list):
|
||||
excludes = excludes.split(",")
|
||||
cleaned_files = [x for x in files if x not in excludes]
|
||||
for f in cleaned_files:
|
||||
if f not in excludes:
|
||||
zf.extract(f, dest)
|
||||
return cleaned_files
|
||||
zip_file, dest = _render_filenames(zip_file, dest, None, template)
|
||||
|
||||
if runas and (euid != uinfo['uid'] or egid != uinfo['gid']):
|
||||
# Change the egid first, as changing it after the euid will fail
|
||||
# if the runas user is non-privileged.
|
||||
os.setegid(uinfo['gid'])
|
||||
os.seteuid(uinfo['uid'])
|
||||
|
||||
try:
|
||||
exc = None
|
||||
# Define cleaned_files here so that an exception will not prevent this
|
||||
# variable from being defined and cause a NameError in the return
|
||||
# statement at the end of the function.
|
||||
cleaned_files = []
|
||||
with zipfile.ZipFile(zip_file) as zfile:
|
||||
files = zfile.namelist()
|
||||
if excludes is None:
|
||||
zfile.extractall(dest)
|
||||
return files
|
||||
|
||||
if isinstance(excludes, string_types):
|
||||
excludes = [x.strip() for x in excludes.split(',')]
|
||||
elif isinstance(excludes, (float, integer_types)):
|
||||
excludes = [str(excludes)]
|
||||
|
||||
cleaned_files.extend([x for x in files if x not in excludes])
|
||||
for target in cleaned_files:
|
||||
if target not in excludes:
|
||||
zfile.extract(target, dest)
|
||||
except Exception as exc:
|
||||
pass
|
||||
finally:
|
||||
# Restore the euid/egid
|
||||
if runas:
|
||||
os.seteuid(euid)
|
||||
os.setegid(egid)
|
||||
if exc is not None:
|
||||
# Wait to raise the exception until euid/egid are restored to avoid
|
||||
# permission errors in writing to minion log.
|
||||
raise CommandExecutionError(
|
||||
'Exception encountered unpacking zipfile: {0}'.format(exc)
|
||||
)
|
||||
|
||||
return cleaned_files
|
||||
|
||||
|
||||
@decorators.which('rar')
|
||||
@salt.utils.decorators.which('rar')
|
||||
def rar(rarfile, sources, template=None, cwd=None, runas=None):
|
||||
'''
|
||||
Uses `rar for Linux`_ to create rar files
|
||||
@ -373,7 +552,7 @@ def rar(rarfile, sources, template=None, cwd=None, runas=None):
|
||||
|
||||
sources
|
||||
Comma-separated list of sources to include in the rar file. Sources can
|
||||
also be passed in a python list.
|
||||
also be passed in a Python list.
|
||||
|
||||
cwd : None
|
||||
Run the rar command from the specified directory. Use this argument
|
||||
@ -409,7 +588,7 @@ def rar(rarfile, sources, template=None, cwd=None, runas=None):
|
||||
python_shell=False).splitlines()
|
||||
|
||||
|
||||
@decorators.which_bin(('unrar', 'rar'))
|
||||
@salt.utils.decorators.which_bin(('unrar', 'rar'))
|
||||
def unrar(rarfile, dest, excludes=None, template=None, runas=None):
|
||||
'''
|
||||
Uses `rar for Linux`_ to unpack rar files
|
||||
@ -440,7 +619,8 @@ def unrar(rarfile, dest, excludes=None, template=None, runas=None):
|
||||
if isinstance(excludes, string_types):
|
||||
excludes = [entry.strip() for entry in excludes.split(',')]
|
||||
|
||||
cmd = [_which_bin(('unrar', 'rar')), 'x', '-idp', '{0}'.format(rarfile)]
|
||||
cmd = [salt.utils.which_bin(('unrar', 'rar')),
|
||||
'x', '-idp', '{0}'.format(rarfile)]
|
||||
if excludes is not None:
|
||||
for exclude in excludes:
|
||||
cmd.extend(['-x', '{0}'.format(exclude)])
|
||||
|
@ -67,14 +67,10 @@ def __virtual__():
|
||||
# which was added in boto 2.8.0
|
||||
# https://github.com/boto/boto/commit/33ac26b416fbb48a60602542b4ce15dcc7029f12
|
||||
if not HAS_BOTO:
|
||||
log.debug('The boto_vpc module requires boto {0} to be installed.'.format(required_boto_version))
|
||||
return False
|
||||
elif _LooseVersion(boto.__version__) < _LooseVersion(required_boto_version):
|
||||
log.debug('The boto_vpc module requires boto {0} to be installed. Current boto version: {1}'.format(
|
||||
required_boto_version, boto.__version__))
|
||||
return False
|
||||
else:
|
||||
log.debug('Installed boto version: {0}'.format(boto.__version__))
|
||||
return True
|
||||
|
||||
|
||||
|
@ -179,7 +179,7 @@ def column_families(keyspace=None):
|
||||
return ret
|
||||
|
||||
|
||||
def column_family_definition(keyspace=None, column_family=None):
|
||||
def column_family_definition(keyspace, column_family):
|
||||
'''
|
||||
Return a dictionary of column family definitions for the given
|
||||
keyspace/column_family
|
||||
|
@ -215,6 +215,8 @@ def _run(cmd,
|
||||
if not os.path.isfile(shell) or not os.access(shell, os.X_OK):
|
||||
msg = 'The shell {0} is not available'.format(shell)
|
||||
raise CommandExecutionError(msg)
|
||||
if salt.utils.is_windows() and use_vt: # Memozation so not much overhead
|
||||
raise CommandExecutionError('VT not available on windows')
|
||||
|
||||
if shell.lower().strip() == 'powershell':
|
||||
# If we were called by script(), then fakeout the Windows
|
||||
|
@ -6,11 +6,21 @@ from __future__ import absolute_import
|
||||
|
||||
# Import python libs
|
||||
import os
|
||||
import re
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
|
||||
# Define the module's virtual name
|
||||
__virtualname__ = 'kmod'
|
||||
|
||||
|
||||
_LOAD_MODULE = '{0}_load="YES"'
|
||||
_LOADER_CONF = '/boot/loader.conf'
|
||||
_MODULE_RE = '^{0}_load="YES"'
|
||||
_MODULES_RE = r'^(\w+)_load="YES"'
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
Only runs on FreeBSD systems
|
||||
@ -46,37 +56,45 @@ def _rm_mods(pre_mods, post_mods):
|
||||
return pre - post
|
||||
|
||||
|
||||
def _get_module_name(line):
|
||||
match = re.search(_MODULES_RE, line)
|
||||
if match:
|
||||
return match.group(1)
|
||||
return None
|
||||
|
||||
|
||||
def _get_persistent_modules():
|
||||
'''
|
||||
Returns a list of modules in loader.conf that load on boot.
|
||||
'''
|
||||
mods = set()
|
||||
response = __salt__['cmd.run_all']('sysrc -niq kld_list')
|
||||
if response['retcode'] == 0:
|
||||
for mod in response['stdout'].split():
|
||||
mods.add(mod)
|
||||
with salt.utils.fopen(_LOADER_CONF, 'r') as loader_conf:
|
||||
for line in loader_conf:
|
||||
line = line.strip()
|
||||
mod_name = _get_module_name(line)
|
||||
if mod_name:
|
||||
mods.add(mod_name)
|
||||
return mods
|
||||
|
||||
|
||||
def _set_persistent_module(mod):
|
||||
'''
|
||||
Add a module to sysrc to make it persistent.
|
||||
Add a module to loader.conf to make it persistent.
|
||||
'''
|
||||
if not mod or mod in mod_list(True) or mod not in \
|
||||
available():
|
||||
return set()
|
||||
mods = _get_persistent_modules()
|
||||
mods.add(mod)
|
||||
__salt__['cmd.run_all']("sysrc kld_list='{0}'".format(' '.join(mods)))
|
||||
__salt__['file.append'](_LOADER_CONF, _LOAD_MODULE.format(mod))
|
||||
return set([mod])
|
||||
|
||||
|
||||
def _remove_persistent_module(mod):
|
||||
'''
|
||||
Remove module from sysrc.
|
||||
Remove module from loader.conf.
|
||||
'''
|
||||
if not mod or mod not in mod_list(True):
|
||||
return set()
|
||||
mods = _get_persistent_modules()
|
||||
mods.remove(mod)
|
||||
__salt__['cmd.run_all']("sysrc kld_list='{0}'".format(' '.join(mods)))
|
||||
__salt__['file.sed'](_LOADER_CONF, _MODULE_RE.format(mod), '')
|
||||
return set([mod])
|
||||
|
||||
|
||||
@ -179,7 +197,8 @@ def load(mod, persist=False):
|
||||
salt '*' kmod.load bhyve
|
||||
'''
|
||||
pre_mods = lsmod()
|
||||
response = __salt__['cmd.run_all']('kldload {0}'.format(mod))
|
||||
response = __salt__['cmd.run_all']('kldload {0}'.format(mod),
|
||||
python_shell=False)
|
||||
if response['retcode'] == 0:
|
||||
post_mods = lsmod()
|
||||
mods = _new_mods(pre_mods, post_mods)
|
||||
@ -222,7 +241,8 @@ def remove(mod, persist=False):
|
||||
salt '*' kmod.remove vmm
|
||||
'''
|
||||
pre_mods = lsmod()
|
||||
__salt__['cmd.run_all']('kldunload {0}'.format(mod))
|
||||
__salt__['cmd.run_all']('kldunload {0}'.format(mod),
|
||||
python_shell=False)
|
||||
post_mods = lsmod()
|
||||
mods = _rm_mods(pre_mods, post_mods)
|
||||
persist_mods = set()
|
||||
|
@ -143,7 +143,7 @@ def build_rule(table=None, chain=None, command=None, position='', full=None, fam
|
||||
|
||||
rule = ''
|
||||
proto = False
|
||||
bang_not_pat = re.compile(r'[!|not]\s?')
|
||||
bang_not_pat = re.compile(r'(!|not)\s?')
|
||||
|
||||
if 'if' in kwargs:
|
||||
if kwargs['if'].startswith('!') or kwargs['if'].startswith('not'):
|
||||
|
@ -44,7 +44,7 @@ SCHEDULE_CONF = [
|
||||
'days',
|
||||
'enabled',
|
||||
'cron'
|
||||
]
|
||||
]
|
||||
|
||||
|
||||
def list_(show_all=False, return_yaml=True):
|
||||
@ -77,7 +77,6 @@ def list_(show_all=False, return_yaml=True):
|
||||
for item in schedule[job]:
|
||||
if item not in SCHEDULE_CONF:
|
||||
del schedule[job][item]
|
||||
continue
|
||||
if schedule[job][item] == 'true':
|
||||
schedule[job][item] = True
|
||||
if schedule[job][item] == 'false':
|
||||
|
@ -89,8 +89,6 @@ class RemotePillar(object):
|
||||
load['ext'] = self.ext
|
||||
ret_pillar = self.channel.crypted_transfer_decode_dictentry(load,
|
||||
dictkey='pillar',
|
||||
tries=3,
|
||||
timeout=7200,
|
||||
)
|
||||
|
||||
if not isinstance(ret_pillar, dict):
|
||||
|
@ -1251,7 +1251,7 @@ def managed(name,
|
||||
'''
|
||||
name = os.path.expanduser(name)
|
||||
# contents must be a string
|
||||
if contents:
|
||||
if contents is not None:
|
||||
contents = str(contents)
|
||||
|
||||
# Make sure that leading zeros stripped by YAML loader are added back
|
||||
|
@ -2300,3 +2300,32 @@ def sdecode(string_):
|
||||
except UnicodeDecodeError:
|
||||
continue
|
||||
return string_
|
||||
|
||||
|
||||
def relpath(path, start='.'):
|
||||
'''
|
||||
Work around Python bug #5117, which is not (and will not be) patched in
|
||||
Python 2.6 (http://bugs.python.org/issue5117)
|
||||
'''
|
||||
if sys.version_info < (2, 7) and 'posix' in sys.builtin_module_names:
|
||||
# The below code block is based on posixpath.relpath from Python 2.7,
|
||||
# which has the fix for this bug.
|
||||
if not path:
|
||||
raise ValueError('no path specified')
|
||||
|
||||
start_list = [
|
||||
x for x in os.path.abspath(start).split(os.path.sep) if x
|
||||
]
|
||||
path_list = [
|
||||
x for x in os.path.abspath(path).split(os.path.sep) if x
|
||||
]
|
||||
|
||||
# work out how much of the filepath is shared by start and path.
|
||||
i = len(os.path.commonprefix([start_list, path_list]))
|
||||
|
||||
rel_list = [os.path.pardir] * (len(start_list)-i) + path_list[i:]
|
||||
if not rel_list:
|
||||
return os.path.curdir
|
||||
return os.path.join(*rel_list)
|
||||
|
||||
return os.path.relpath(path, start=start)
|
||||
|
@ -303,13 +303,6 @@ def bootstrap(vm_, opts):
|
||||
if stat.S_ISSOCK(os.stat(os.environ['SSH_AUTH_SOCK']).st_mode):
|
||||
has_ssh_agent = True
|
||||
|
||||
if key_filename is None and salt.utils.which('sshpass') is None and has_ssh_agent is False:
|
||||
raise SaltCloudSystemExit(
|
||||
'Cannot deploy salt in a VM if the \'ssh_keyfile\' setting '
|
||||
'is not set and \'sshpass\' binary is not present on the '
|
||||
'system for the password.'
|
||||
)
|
||||
|
||||
if key_filename is None and ('password' not in vm_ or not vm_['password']) and has_ssh_agent is False:
|
||||
raise SaltCloudSystemExit(
|
||||
'Cannot deploy salt in a VM if the \'ssh_keyfile\' setting '
|
||||
|
@ -13,6 +13,21 @@ import logging
|
||||
import salt.ext.six.moves.http_cookiejar # pylint: disable=E0611
|
||||
from salt._compat import ElementTree as ET
|
||||
|
||||
import ssl
|
||||
try:
|
||||
from ssl import CertificateError # pylint: disable=E0611
|
||||
from ssl import match_hostname # pylint: disable=E0611
|
||||
HAS_MATCHHOSTNAME = True
|
||||
except ImportError:
|
||||
try:
|
||||
from backports.ssl_match_hostname import CertificateError
|
||||
from backports.ssl_match_hostname import match_hostname
|
||||
HAS_MATCHHOSTNAME = True
|
||||
except ImportError:
|
||||
HAS_MATCHHOSTNAME = False
|
||||
import socket
|
||||
import urllib2
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.xmlutil as xml
|
||||
@ -22,7 +37,11 @@ from salt.template import compile_template
|
||||
from salt import syspaths
|
||||
|
||||
# Import 3rd party libs
|
||||
import requests
|
||||
try:
|
||||
import requests
|
||||
HAS_REQUESTS = True
|
||||
except ImportError:
|
||||
HAS_REQUESTS = False
|
||||
import msgpack
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@ -58,15 +77,31 @@ def query(url,
|
||||
test=False,
|
||||
test_url=None,
|
||||
node='minion',
|
||||
port=80,
|
||||
opts=None,
|
||||
requests_lib=None,
|
||||
ca_bundle=None,
|
||||
**kwargs):
|
||||
'''
|
||||
Query a resource, and decode the return data
|
||||
'''
|
||||
ret = {}
|
||||
|
||||
requests_log = logging.getLogger('requests')
|
||||
requests_log.setLevel(logging.WARNING)
|
||||
if requests_lib is None:
|
||||
requests_lib = opts.get('requests_lib', False)
|
||||
|
||||
if requests_lib is True:
|
||||
if HAS_REQUESTS is False:
|
||||
ret['error'] = ('http.query has been set to use requests, but the '
|
||||
'requests library does not seem to be installed')
|
||||
log.error(ret['error'])
|
||||
return ret
|
||||
else:
|
||||
requests_log = logging.getLogger('requests')
|
||||
requests_log.setLevel(logging.WARNING)
|
||||
|
||||
if ca_bundle is None:
|
||||
ca_bundle = get_ca_bundle(opts)
|
||||
|
||||
if opts is None:
|
||||
if node == 'master':
|
||||
@ -122,20 +157,24 @@ def query(url,
|
||||
else:
|
||||
auth = None
|
||||
|
||||
sess = requests.Session()
|
||||
sess.auth = auth
|
||||
sess.headers.update(header_dict)
|
||||
log.trace('Request Headers: {0}'.format(sess.headers))
|
||||
if requests_lib is True:
|
||||
sess = requests.Session()
|
||||
sess.auth = auth
|
||||
sess.headers.update(header_dict)
|
||||
log.trace('Request Headers: {0}'.format(sess.headers))
|
||||
sess_cookies = sess.cookies
|
||||
else:
|
||||
sess_cookies = None
|
||||
|
||||
if cookies is not None:
|
||||
if cookie_format == 'mozilla':
|
||||
sess.cookies = salt.ext.six.moves.http_cookiejar.MozillaCookieJar(cookie_jar)
|
||||
sess_cookies = salt.ext.six.moves.http_cookiejar.MozillaCookieJar(cookie_jar)
|
||||
else:
|
||||
sess.cookies = salt.ext.six.moves.http_cookiejar.LWPCookieJar(cookie_jar)
|
||||
sess_cookies = salt.ext.six.moves.http_cookiejar.LWPCookieJar(cookie_jar)
|
||||
if not os.path.isfile(cookie_jar):
|
||||
sess.cookies.save()
|
||||
sess_cookies.save()
|
||||
else:
|
||||
sess.cookies.load()
|
||||
sess_cookies.load()
|
||||
|
||||
if test is True:
|
||||
if test_url is None:
|
||||
@ -144,36 +183,85 @@ def query(url,
|
||||
url = test_url
|
||||
ret['test'] = True
|
||||
|
||||
result = sess.request(
|
||||
method, url, params=params, data=data
|
||||
)
|
||||
log.debug('Response Status Code: {0}'.format(result.status_code))
|
||||
log.trace('Response Headers: {0}'.format(result.headers))
|
||||
log.trace('Response Text: {0}'.format(result.text))
|
||||
log.trace('Response Cookies: {0}'.format(result.cookies.get_dict()))
|
||||
if requests_lib is True:
|
||||
result = sess.request(
|
||||
method, url, params=params, data=data
|
||||
)
|
||||
result_status_code = result.status_code
|
||||
result_headers = result.headers
|
||||
result_text = result.text
|
||||
result_cookies = result.cookies
|
||||
else:
|
||||
request = urllib2.Request(url)
|
||||
|
||||
if url.startswith('https') or port == 443:
|
||||
if not HAS_MATCHHOSTNAME:
|
||||
log.warn(('match_hostname() not available, SSL hostname '
|
||||
'checking not available. THIS CONNECTION MAY NOT BE SECURE!'))
|
||||
else:
|
||||
hostname = request.get_host()
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
sock.connect((hostname, 443))
|
||||
sockwrap = ssl.wrap_socket(
|
||||
sock,
|
||||
ca_certs=ca_bundle,
|
||||
cert_reqs=ssl.CERT_REQUIRED
|
||||
)
|
||||
try:
|
||||
match_hostname(sockwrap.getpeercert(), hostname)
|
||||
except CertificateError as exc:
|
||||
ret['error'] = (
|
||||
'The certificate was invalid. '
|
||||
'Error returned was: {0}'.format(
|
||||
pprint.pformat(exc)
|
||||
)
|
||||
)
|
||||
return ret
|
||||
|
||||
opener = urllib2.build_opener(
|
||||
urllib2.HTTPHandler,
|
||||
urllib2.HTTPCookieProcessor(sess_cookies)
|
||||
)
|
||||
for header in header_dict:
|
||||
request.add_header(header, header_dict[header])
|
||||
request.get_method = lambda: method
|
||||
result = opener.open(request)
|
||||
|
||||
result_status_code = result.code
|
||||
result_headers = result.headers.headers
|
||||
result_text = result.read()
|
||||
|
||||
log.debug('Response Status Code: {0}'.format(result_status_code))
|
||||
log.trace('Response Headers: {0}'.format(result_headers))
|
||||
log.trace('Response Cookies: {0}'.format(sess_cookies))
|
||||
try:
|
||||
log.trace('Response Text: {0}'.format(result_text))
|
||||
except UnicodeEncodeError as exc:
|
||||
log.trace(('Cannot Trace Log Response Text: {0}. This may be due to '
|
||||
'incompatibilities between requests and logging.').format(exc))
|
||||
|
||||
if cookies is not None:
|
||||
sess.cookies.save()
|
||||
sess_cookies.save()
|
||||
|
||||
if persist_session is True:
|
||||
# TODO: See persist_session above
|
||||
if 'set-cookie' in result.headers:
|
||||
if 'set-cookie' in result_headers:
|
||||
with salt.utils.fopen(session_cookie_jar, 'w') as fh_:
|
||||
session_cookies = result.headers.get('set-cookie', None)
|
||||
session_cookies = result_headers.get('set-cookie', None)
|
||||
if session_cookies is not None:
|
||||
msgpack.dump({'Cookie': session_cookies}, fh_)
|
||||
else:
|
||||
msgpack.dump('', fh_)
|
||||
|
||||
if status is True:
|
||||
ret['status'] = result.status_code
|
||||
ret['status'] = result_status_code
|
||||
|
||||
if headers is True:
|
||||
ret['headers'] = result.headers
|
||||
ret['headers'] = result_headers
|
||||
|
||||
if decode is True:
|
||||
if decode_type == 'auto':
|
||||
content_type = result.headers.get(
|
||||
content_type = result_headers.get(
|
||||
'content-type', 'application/json'
|
||||
)
|
||||
if 'xml' in content_type:
|
||||
@ -195,21 +283,48 @@ def query(url,
|
||||
return ret
|
||||
|
||||
if decode_type == 'json':
|
||||
ret['dict'] = json.loads(result.text)
|
||||
ret['dict'] = json.loads(result_text)
|
||||
elif decode_type == 'xml':
|
||||
ret['dict'] = []
|
||||
items = ET.fromstring(result.text)
|
||||
items = ET.fromstring(result_text)
|
||||
for item in items:
|
||||
ret['dict'].append(xml.to_dict(item))
|
||||
else:
|
||||
text = True
|
||||
|
||||
if text is True:
|
||||
ret['text'] = result.text
|
||||
ret['text'] = result_text
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def get_ca_bundle(opts):
|
||||
'''
|
||||
Return the location of the ca bundle file. See the following article:
|
||||
|
||||
http://tinyurl.com/k7rx42a
|
||||
'''
|
||||
if hasattr(get_ca_bundle, '__return_value__'):
|
||||
return get_ca_bundle.__return_value__
|
||||
|
||||
opts_bundle = opts.get('ca_bundle', None)
|
||||
if opts_bundle is not None and os.path.exists(opts_bundle):
|
||||
return opts_bundle
|
||||
|
||||
for path in (
|
||||
'/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem',
|
||||
'/etc/pki/tls/certs/ca-bundle.crt',
|
||||
'/etc/pki/tls/certs/ca-bundle.trust.crt',
|
||||
'/etc/ssl/certs/ca-bundle.crt',
|
||||
'/etc/ssl/certs/ca-certificates.crt',
|
||||
'/var/lib/ca-certificates/ca-bundle.pem',
|
||||
):
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _render(template, render, renderer, template_dict, opts):
|
||||
'''
|
||||
Render a template
|
||||
|
@ -19,11 +19,6 @@ import requests
|
||||
import salt.ext.six as six
|
||||
|
||||
# pylint: disable=import-error,no-name-in-module
|
||||
try:
|
||||
import msgpack
|
||||
HAS_MSGPACK = True
|
||||
except ImportError:
|
||||
HAS_MSGPACK = False
|
||||
try:
|
||||
import certifi
|
||||
HAS_CERTIFI = True
|
||||
@ -124,9 +119,6 @@ def gen_thin(cachedir, extra_mods='', overwrite=False, so_mods=''):
|
||||
os.path.dirname(yaml.__file__),
|
||||
os.path.dirname(requests.__file__)
|
||||
]
|
||||
if HAS_MSGPACK:
|
||||
tops.append(os.path.dirname(msgpack.__file__))
|
||||
|
||||
if HAS_URLLIB3:
|
||||
tops.append(os.path.dirname(urllib3.__file__))
|
||||
|
||||
|
@ -91,8 +91,8 @@ class TestSaltAPIHandler(SaltnadoTestCase):
|
||||
body=json.dumps(low),
|
||||
headers={'Content-Type': self.content_type_map['json'],
|
||||
saltnado.AUTH_TOKEN_HEADER: self.token['token']},
|
||||
connect_timeout=30,
|
||||
request_timeout=30,
|
||||
connect_timeout=30,
|
||||
request_timeout=30,
|
||||
)
|
||||
response_obj = json.loads(response.body)
|
||||
self.assertEqual(response_obj['return'], [{'minion': True, 'sub_minion': True}])
|
||||
@ -110,8 +110,8 @@ class TestSaltAPIHandler(SaltnadoTestCase):
|
||||
body=json.dumps(low),
|
||||
headers={'Content-Type': self.content_type_map['json'],
|
||||
saltnado.AUTH_TOKEN_HEADER: self.token['token']},
|
||||
connect_timeout=30,
|
||||
request_timeout=30,
|
||||
connect_timeout=30,
|
||||
request_timeout=30,
|
||||
)
|
||||
response_obj = json.loads(response.body)
|
||||
self.assertEqual(response_obj['return'], ["No minions matched the target. No command was sent, no jid was assigned."])
|
||||
@ -130,8 +130,8 @@ class TestSaltAPIHandler(SaltnadoTestCase):
|
||||
body=json.dumps(low),
|
||||
headers={'Content-Type': self.content_type_map['json'],
|
||||
saltnado.AUTH_TOKEN_HEADER: self.token['token']},
|
||||
connect_timeout=30,
|
||||
request_timeout=30,
|
||||
connect_timeout=30,
|
||||
request_timeout=30,
|
||||
)
|
||||
response_obj = json.loads(response.body)
|
||||
self.assertEqual(response_obj['return'], [{'minion': True, 'sub_minion': True}])
|
||||
@ -151,8 +151,8 @@ class TestSaltAPIHandler(SaltnadoTestCase):
|
||||
body=json.dumps(low),
|
||||
headers={'Content-Type': self.content_type_map['json'],
|
||||
saltnado.AUTH_TOKEN_HEADER: self.token['token']},
|
||||
connect_timeout=30,
|
||||
request_timeout=30,
|
||||
connect_timeout=30,
|
||||
request_timeout=30,
|
||||
)
|
||||
response_obj = json.loads(response.body)
|
||||
self.assertEqual(response_obj['return'], [{'minion': True, 'sub_minion': True}])
|
||||
@ -170,8 +170,8 @@ class TestSaltAPIHandler(SaltnadoTestCase):
|
||||
body=json.dumps(low),
|
||||
headers={'Content-Type': self.content_type_map['json'],
|
||||
saltnado.AUTH_TOKEN_HEADER: self.token['token']},
|
||||
connect_timeout=30,
|
||||
request_timeout=30,
|
||||
connect_timeout=30,
|
||||
request_timeout=30,
|
||||
)
|
||||
response_obj = json.loads(response.body)
|
||||
self.assertEqual(response_obj['return'], [{}])
|
||||
@ -270,8 +270,8 @@ class TestSaltAPIHandler(SaltnadoTestCase):
|
||||
body=json.dumps(low),
|
||||
headers={'Content-Type': self.content_type_map['json'],
|
||||
saltnado.AUTH_TOKEN_HEADER: self.token['token']},
|
||||
connect_timeout=30,
|
||||
request_timeout=30,
|
||||
connect_timeout=30,
|
||||
request_timeout=30,
|
||||
)
|
||||
response_obj = json.loads(response.body)
|
||||
self.assertEqual(response_obj['return'], [['minion', 'sub_minion']])
|
||||
|
@ -16,6 +16,7 @@ ensure_in_syspath('../../')
|
||||
# Import salt libs
|
||||
from salt.modules import archive
|
||||
from salt.exceptions import CommandNotFoundError
|
||||
from salt.utils import which_bin
|
||||
|
||||
|
||||
class ZipFileMock(MagicMock):
|
||||
@ -129,28 +130,28 @@ class ArchiveTestCase(TestCase):
|
||||
def test_cmd_zip(self):
|
||||
mock = MagicMock(return_value='salt')
|
||||
with patch.dict(archive.__salt__, {'cmd.run': mock}):
|
||||
ret = archive.cmd_zip_(
|
||||
ret = archive.cmd_zip(
|
||||
'/tmp/salt.{{grains.id}}.zip',
|
||||
'/tmp/tmpePe8yO,/tmp/tmpLeSw1A',
|
||||
template='jinja'
|
||||
)
|
||||
self.assertEqual(['salt'], ret)
|
||||
mock.assert_called_once_with(
|
||||
['zip', '/tmp/salt.{{grains.id}}.zip',
|
||||
['zip', '-r', '/tmp/salt.{{grains.id}}.zip',
|
||||
'/tmp/tmpePe8yO', '/tmp/tmpLeSw1A'],
|
||||
runas=None, python_shell=False, template='jinja', cwd=None
|
||||
)
|
||||
|
||||
mock = MagicMock(return_value='salt')
|
||||
with patch.dict(archive.__salt__, {'cmd.run': mock}):
|
||||
ret = archive.cmd_zip_(
|
||||
ret = archive.cmd_zip(
|
||||
'/tmp/salt.{{grains.id}}.zip',
|
||||
['/tmp/tmpePe8yO', '/tmp/tmpLeSw1A'],
|
||||
template='jinja'
|
||||
)
|
||||
self.assertEqual(['salt'], ret)
|
||||
mock.assert_called_once_with(
|
||||
['zip', '/tmp/salt.{{grains.id}}.zip',
|
||||
['zip', '-r', '/tmp/salt.{{grains.id}}.zip',
|
||||
'/tmp/tmpePe8yO', '/tmp/tmpLeSw1A'],
|
||||
runas=None, python_shell=False, template='jinja', cwd=None
|
||||
)
|
||||
@ -164,7 +165,7 @@ class ArchiveTestCase(TestCase):
|
||||
'/tmp/tmpePe8yO,/tmp/tmpLeSw1A',
|
||||
template='jinja'
|
||||
)
|
||||
self.assertEqual(['/tmp/tmpePe8yO', '/tmp/tmpLeSw1A'], ret)
|
||||
self.assertEqual(['tmp/tmpePe8yO', 'tmp/tmpLeSw1A'], ret)
|
||||
|
||||
@patch('salt.utils.which', lambda exe: None)
|
||||
def test_zip_raises_exception_if_not_found(self):
|
||||
@ -172,7 +173,7 @@ class ArchiveTestCase(TestCase):
|
||||
with patch.dict(archive.__salt__, {'cmd.run': mock}):
|
||||
self.assertRaises(
|
||||
CommandNotFoundError,
|
||||
archive.cmd_zip_,
|
||||
archive.cmd_zip,
|
||||
'/tmp/salt.{{grains.id}}.zip',
|
||||
'/tmp/tmpePe8yO,/tmp/tmpLeSw1A',
|
||||
template='jinja',
|
||||
@ -183,22 +184,22 @@ class ArchiveTestCase(TestCase):
|
||||
def test_cmd_unzip(self):
|
||||
mock = MagicMock(return_value='salt')
|
||||
with patch.dict(archive.__salt__, {'cmd.run': mock}):
|
||||
ret = archive.cmd_unzip_(
|
||||
ret = archive.cmd_unzip(
|
||||
'/tmp/salt.{{grains.id}}.zip',
|
||||
'/tmp/dest',
|
||||
excludes='/tmp/tmpePe8yO,/tmp/tmpLeSw1A',
|
||||
runas=None, template='jinja'
|
||||
template='jinja'
|
||||
)
|
||||
self.assertEqual(['salt'], ret)
|
||||
mock.assert_called_once_with(
|
||||
['unzip', '/tmp/salt.{{grains.id}}.zip', '-d', '/tmp/dest',
|
||||
'-x', '/tmp/tmpePe8yO', '/tmp/tmpLeSw1A'],
|
||||
python_shell=False, template='jinja'
|
||||
runas=None, python_shell=False, template='jinja'
|
||||
)
|
||||
|
||||
mock = MagicMock(return_value='salt')
|
||||
with patch.dict(archive.__salt__, {'cmd.run': mock}):
|
||||
ret = archive.cmd_unzip_(
|
||||
ret = archive.cmd_unzip(
|
||||
'/tmp/salt.{{grains.id}}.zip',
|
||||
'/tmp/dest',
|
||||
excludes=['/tmp/tmpePe8yO', '/tmp/tmpLeSw1A'],
|
||||
@ -208,12 +209,12 @@ class ArchiveTestCase(TestCase):
|
||||
mock.assert_called_once_with(
|
||||
['unzip', '/tmp/salt.{{grains.id}}.zip', '-d', '/tmp/dest',
|
||||
'-x', '/tmp/tmpePe8yO', '/tmp/tmpLeSw1A'],
|
||||
python_shell=False, template='jinja'
|
||||
runas=None, python_shell=False, template='jinja'
|
||||
)
|
||||
|
||||
mock = MagicMock(return_value='salt')
|
||||
with patch.dict(archive.__salt__, {'cmd.run': mock}):
|
||||
ret = archive.cmd_unzip_(
|
||||
ret = archive.cmd_unzip(
|
||||
'/tmp/salt.{{grains.id}}.zip',
|
||||
'/tmp/dest',
|
||||
excludes='/tmp/tmpePe8yO,/tmp/tmpLeSw1A',
|
||||
@ -224,12 +225,12 @@ class ArchiveTestCase(TestCase):
|
||||
mock.assert_called_once_with(
|
||||
['unzip', '-fo', '/tmp/salt.{{grains.id}}.zip', '-d',
|
||||
'/tmp/dest', '-x', '/tmp/tmpePe8yO', '/tmp/tmpLeSw1A'],
|
||||
python_shell=False, template='jinja'
|
||||
runas=None, python_shell=False, template='jinja'
|
||||
)
|
||||
|
||||
mock = MagicMock(return_value='salt')
|
||||
with patch.dict(archive.__salt__, {'cmd.run': mock}):
|
||||
ret = archive.cmd_unzip_(
|
||||
ret = archive.cmd_unzip(
|
||||
'/tmp/salt.{{grains.id}}.zip',
|
||||
'/tmp/dest',
|
||||
excludes=['/tmp/tmpePe8yO', '/tmp/tmpLeSw1A'],
|
||||
@ -240,7 +241,7 @@ class ArchiveTestCase(TestCase):
|
||||
mock.assert_called_once_with(
|
||||
['unzip', '-fo', '/tmp/salt.{{grains.id}}.zip', '-d',
|
||||
'/tmp/dest', '-x', '/tmp/tmpePe8yO', '/tmp/tmpLeSw1A'],
|
||||
python_shell=False, template='jinja'
|
||||
runas=None, python_shell=False, template='jinja'
|
||||
)
|
||||
|
||||
def test_unzip(self):
|
||||
@ -260,7 +261,7 @@ class ArchiveTestCase(TestCase):
|
||||
with patch.dict(archive.__salt__, {'cmd.run': mock}):
|
||||
self.assertRaises(
|
||||
CommandNotFoundError,
|
||||
archive.cmd_unzip_,
|
||||
archive.cmd_unzip,
|
||||
'/tmp/salt.{{grains.id}}.zip',
|
||||
'/tmp/dest',
|
||||
excludes='/tmp/tmpePe8yO,/tmp/tmpLeSw1A',
|
||||
@ -308,6 +309,7 @@ class ArchiveTestCase(TestCase):
|
||||
)
|
||||
self.assertFalse(mock.called)
|
||||
|
||||
@skipIf(which_bin(('unrar', 'rar')) is None, 'unrar not installed')
|
||||
@patch('salt.utils.which', lambda exe: exe)
|
||||
@patch('salt.utils.which_bin', lambda exe: exe)
|
||||
def test_unrar(self):
|
||||
|
140
tests/unit/modules/cassandra_test.py
Normal file
140
tests/unit/modules/cassandra_test.py
Normal file
@ -0,0 +1,140 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Rupesh Tare <rupesht@saltstack.com>`
|
||||
'''
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from salttesting import TestCase, skipIf
|
||||
from salttesting.mock import (
|
||||
MagicMock,
|
||||
patch,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON
|
||||
)
|
||||
|
||||
# Import Salt Libs
|
||||
from salt.modules import cassandra
|
||||
|
||||
|
||||
cassandra.__grains__ = {}
|
||||
cassandra.__salt__ = {}
|
||||
cassandra.__context__ = {}
|
||||
cassandra.__opts__ = {}
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class CassandraTestCase(TestCase):
|
||||
'''
|
||||
Test cases for salt.modules.cassandra
|
||||
'''
|
||||
def test_compactionstats(self):
|
||||
'''
|
||||
Test for Return compactionstats info
|
||||
'''
|
||||
mock = MagicMock(return_value='A')
|
||||
with patch.object(cassandra, '_nodetool', mock):
|
||||
self.assertEqual(cassandra.compactionstats(), 'A')
|
||||
|
||||
def test_version(self):
|
||||
'''
|
||||
Test for Return the cassandra version
|
||||
'''
|
||||
mock = MagicMock(return_value='A')
|
||||
with patch.object(cassandra, '_nodetool', mock):
|
||||
self.assertEqual(cassandra.version(), 'A')
|
||||
|
||||
def test_netstats(self):
|
||||
'''
|
||||
Test for Return netstats info
|
||||
'''
|
||||
mock = MagicMock(return_value='A')
|
||||
with patch.object(cassandra, '_nodetool', mock):
|
||||
self.assertEqual(cassandra.netstats(), 'A')
|
||||
|
||||
def test_tpstats(self):
|
||||
'''
|
||||
Test for Return tpstats info
|
||||
'''
|
||||
mock = MagicMock(return_value='A')
|
||||
with patch.object(cassandra, '_nodetool', mock):
|
||||
self.assertEqual(cassandra.tpstats(), 'A')
|
||||
|
||||
def test_info(self):
|
||||
'''
|
||||
Test for Return cassandra node info
|
||||
'''
|
||||
mock = MagicMock(return_value='A')
|
||||
with patch.object(cassandra, '_nodetool', mock):
|
||||
self.assertEqual(cassandra.info(), 'A')
|
||||
|
||||
def test_ring(self):
|
||||
'''
|
||||
Test for Return ring info
|
||||
'''
|
||||
mock = MagicMock(return_value='A')
|
||||
with patch.object(cassandra, '_nodetool', mock):
|
||||
self.assertEqual(cassandra.ring(), 'A')
|
||||
|
||||
def test_keyspaces(self):
|
||||
'''
|
||||
Test for Return existing keyspaces
|
||||
'''
|
||||
mock_keyspaces = ['A', 'B', 'C', 'D']
|
||||
|
||||
class MockSystemManager(object):
|
||||
def list_keyspaces(self):
|
||||
return mock_keyspaces
|
||||
|
||||
mock_sys_mgr = MagicMock(return_value=MockSystemManager())
|
||||
|
||||
with patch.object(cassandra, '_sys_mgr', mock_sys_mgr):
|
||||
self.assertEqual(cassandra.keyspaces(), mock_keyspaces)
|
||||
|
||||
def test_column_families(self):
|
||||
'''
|
||||
Test for Return existing column families for all keyspaces
|
||||
'''
|
||||
mock_keyspaces = ['A', 'B']
|
||||
|
||||
class MockSystemManager(object):
|
||||
def list_keyspaces(self):
|
||||
return mock_keyspaces
|
||||
|
||||
def get_keyspace_column_families(self, keyspace):
|
||||
if keyspace == 'A':
|
||||
return {'a': 'saltines', 'b': 'biscuits'}
|
||||
if keyspace == 'B':
|
||||
return {'c': 'cheese', 'd': 'crackers'}
|
||||
|
||||
mock_sys_mgr = MagicMock(return_value=MockSystemManager())
|
||||
|
||||
with patch.object(cassandra, '_sys_mgr', mock_sys_mgr):
|
||||
self.assertEqual(cassandra.column_families('A'),
|
||||
['a', 'b'])
|
||||
self.assertEqual(cassandra.column_families('Z'),
|
||||
None)
|
||||
self.assertEqual(cassandra.column_families(),
|
||||
{'A': ['a', 'b'], 'B': ['c', 'd']})
|
||||
|
||||
def test_column_family_definition(self):
|
||||
'''
|
||||
Test for Return a dictionary of column family definitions for the given
|
||||
keyspace/column_family
|
||||
'''
|
||||
class MockSystemManager(object):
|
||||
def get_keyspace_column_families(self, keyspace):
|
||||
if keyspace == 'A':
|
||||
return {'a': object, 'b': object}
|
||||
if keyspace == 'B':
|
||||
raise Exception
|
||||
|
||||
mock_sys_mgr = MagicMock(return_value=MockSystemManager())
|
||||
|
||||
with patch.object(cassandra, '_sys_mgr', mock_sys_mgr):
|
||||
self.assertEqual(cassandra.column_family_definition('A', 'a'), vars(object))
|
||||
self.assertEqual(cassandra.column_family_definition('B', 'a'), None)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from integration import run_tests
|
||||
run_tests(CassandraTestCase, needs_daemon=False)
|
111
tests/unit/modules/ddns_test.py
Normal file
111
tests/unit/modules/ddns_test.py
Normal file
@ -0,0 +1,111 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Rupesh Tare <rupesht@saltstack.com>`
|
||||
'''
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from salttesting import TestCase, skipIf
|
||||
from salttesting.mock import (
|
||||
mock_open,
|
||||
MagicMock,
|
||||
patch,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON
|
||||
)
|
||||
|
||||
# Import Salt Libs
|
||||
from salt.modules import ddns
|
||||
|
||||
try:
|
||||
import dns.query
|
||||
import dns.tsigkeyring
|
||||
dns_support = True
|
||||
except ImportError as e:
|
||||
dns_support = False
|
||||
|
||||
import json
|
||||
# Globals
|
||||
ddns.__grains__ = {}
|
||||
ddns.__salt__ = {}
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class DDNSTestCase(TestCase):
|
||||
'''
|
||||
TestCase for the salt.modules.ddns module
|
||||
'''
|
||||
@patch('salt.modules.ddns.update')
|
||||
def test_add_host(self, ddns_update):
|
||||
'''
|
||||
Test cases for Add, replace, or update the A
|
||||
and PTR (reverse) records for a host.
|
||||
'''
|
||||
ddns_update.return_value = False
|
||||
self.assertFalse(ddns.add_host(zone='A',
|
||||
name='B',
|
||||
ttl=1,
|
||||
ip='172.27.0.0'))
|
||||
|
||||
ddns_update.return_value = True
|
||||
self.assertTrue(ddns.add_host(zone='A',
|
||||
name='B',
|
||||
ttl=1,
|
||||
ip='172.27.0.0'))
|
||||
|
||||
@patch('salt.modules.ddns.delete')
|
||||
def test_delete_host(self, ddns_delete):
|
||||
'''
|
||||
Tests for delete the forward and reverse records for a host.
|
||||
'''
|
||||
ddns_delete.return_value = False
|
||||
with patch.object(dns.query, 'udp') as mock:
|
||||
mock.answer = [{'address': 'localhost'}]
|
||||
self.assertFalse(ddns.delete_host(zone='A', name='B'))
|
||||
|
||||
def test_update(self):
|
||||
'''
|
||||
Test to add, replace, or update a DNS record.
|
||||
'''
|
||||
file_data = json.dumps({'A': 'B'})
|
||||
with patch('dns.message.make_query', return_value=True):
|
||||
with patch('dns.rdatatype.from_text', return_value=True):
|
||||
with patch('dns.rdata.from_text', return_value=True):
|
||||
mock = MagicMock(return_value=True)
|
||||
with patch.dict(ddns.__salt__, {'config.option': mock}):
|
||||
mock = MagicMock(return_value=True)
|
||||
with patch.dict(ddns.__salt__,
|
||||
{'file.file_exists': mock}):
|
||||
with patch('salt.utils.fopen',
|
||||
mock_open(read_data=file_data),
|
||||
create=True):
|
||||
with patch.object(dns.tsigkeyring, 'from_text',
|
||||
return_value=True):
|
||||
with patch.object(dns.query, 'udp') as mock:
|
||||
mock.answer = [{'address': 'localhost'}]
|
||||
self.assertFalse(ddns.update(zone='A',
|
||||
name='B',
|
||||
ttl=1,
|
||||
rdtype='C',
|
||||
data='D'))
|
||||
|
||||
def test_delete(self):
|
||||
'''
|
||||
Test to delete a DNS record.
|
||||
'''
|
||||
file_data = json.dumps({'A': 'B'})
|
||||
with patch.object(dns.query, 'udp') as mock:
|
||||
mock.answer = [{'address': 'localhost'}]
|
||||
mock = MagicMock(return_value=True)
|
||||
with patch.dict(ddns.__salt__, {'config.option': mock}):
|
||||
mock = MagicMock(return_value=True)
|
||||
with patch.dict(ddns.__salt__, {'file.file_exists': mock}):
|
||||
with patch('salt.utils.fopen',
|
||||
mock_open(read_data=file_data),
|
||||
create=True):
|
||||
with patch.object(dns.tsigkeyring, 'from_text',
|
||||
return_value=True):
|
||||
self.assertFalse(ddns.delete(zone='A', name='B'))
|
||||
|
||||
if __name__ == '__main__':
|
||||
from integration import run_tests
|
||||
run_tests(DDNSTestCase, needs_daemon=False)
|
180
tests/unit/modules/debian_service_test.py
Normal file
180
tests/unit/modules/debian_service_test.py
Normal file
@ -0,0 +1,180 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Rupesh Tare <rupesht@saltstack.com>`
|
||||
'''
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from salttesting import TestCase, skipIf
|
||||
from salttesting.mock import (
|
||||
MagicMock,
|
||||
patch,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON
|
||||
)
|
||||
|
||||
# Import Salt Libs
|
||||
from salt.modules import debian_service
|
||||
|
||||
|
||||
# Globals
|
||||
debian_service.__grains__ = {}
|
||||
debian_service.__salt__ = {}
|
||||
debian_service.__context__ = {}
|
||||
debian_service.__opts__ = {}
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class DebianServicesTestCase(TestCase):
|
||||
'''
|
||||
Test cases for salt.modules.debian_service
|
||||
'''
|
||||
def test_get_enabled(self):
|
||||
'''
|
||||
Test for Return a list of service that are enabled on boot
|
||||
'''
|
||||
mock_runlevel = MagicMock(return_value=1)
|
||||
mock_prefix = '/etc/rc1.d/S'
|
||||
mock_glob = MagicMock(return_value=[mock_prefix + '01name'])
|
||||
|
||||
with patch.object(debian_service, '_get_runlevel', mock_runlevel):
|
||||
with patch.object(debian_service.glob, 'glob', mock_glob):
|
||||
self.assertEqual(debian_service.get_enabled()[0], 'name')
|
||||
|
||||
def test_get_disabled(self):
|
||||
'''
|
||||
Test for Return a set of services that are installed but disabled
|
||||
'''
|
||||
mock = MagicMock(return_value=['A'])
|
||||
with patch.object(debian_service, 'get_all', mock):
|
||||
mock = MagicMock(return_value=['B'])
|
||||
with patch.object(debian_service, 'get_enabled', mock):
|
||||
self.assertEqual(debian_service.get_disabled(), ['A'])
|
||||
|
||||
def test_available(self):
|
||||
'''
|
||||
Test for Returns ``True`` if the specified service is
|
||||
available, otherwise returns
|
||||
``False``.
|
||||
'''
|
||||
mock = MagicMock(return_value=['A'])
|
||||
with patch.object(debian_service, 'get_all', mock):
|
||||
self.assertFalse(debian_service.available('name'))
|
||||
|
||||
def test_missing(self):
|
||||
'''
|
||||
Test for The inverse of service.available.
|
||||
'''
|
||||
mock = MagicMock(return_value=['A'])
|
||||
with patch.object(debian_service, 'get_all', mock):
|
||||
self.assertTrue(debian_service.missing('name'))
|
||||
|
||||
def test_getall(self):
|
||||
'''
|
||||
Test for Return all available boot services
|
||||
'''
|
||||
mock = MagicMock(return_value=('A'))
|
||||
with patch.object(debian_service, 'get_enabled', mock):
|
||||
self.assertEqual(debian_service.get_all()[0], 'A')
|
||||
|
||||
def test_start(self):
|
||||
'''
|
||||
Test for Start the specified service
|
||||
'''
|
||||
mock = MagicMock(return_value=True)
|
||||
with patch.object(debian_service, '_service_cmd', mock):
|
||||
with patch.dict(debian_service.__salt__, {'cmd.retcode': mock}):
|
||||
self.assertFalse(debian_service.start('name'))
|
||||
|
||||
def test_stop(self):
|
||||
'''
|
||||
Test for Stop the specified service
|
||||
'''
|
||||
mock = MagicMock(return_value=True)
|
||||
with patch.object(debian_service, '_service_cmd', mock):
|
||||
with patch.dict(debian_service.__salt__, {'cmd.retcode': mock}):
|
||||
self.assertFalse(debian_service.stop('name'))
|
||||
|
||||
def test_restart(self):
|
||||
'''
|
||||
Test for Restart the named service
|
||||
'''
|
||||
mock = MagicMock(return_value=True)
|
||||
with patch.object(debian_service, '_service_cmd', mock):
|
||||
with patch.dict(debian_service.__salt__, {'cmd.retcode': mock}):
|
||||
self.assertFalse(debian_service.restart('name'))
|
||||
|
||||
def test_reload_(self):
|
||||
'''
|
||||
Test for Reload the named service
|
||||
'''
|
||||
mock = MagicMock(return_value=True)
|
||||
with patch.object(debian_service, '_service_cmd', mock):
|
||||
with patch.dict(debian_service.__salt__, {'cmd.retcode': mock}):
|
||||
self.assertFalse(debian_service.reload_('name'))
|
||||
|
||||
def test_force_reload(self):
|
||||
'''
|
||||
Test for Force-reload the named service
|
||||
'''
|
||||
mock = MagicMock(return_value=True)
|
||||
with patch.object(debian_service, '_service_cmd', mock):
|
||||
with patch.dict(debian_service.__salt__, {'cmd.retcode': mock}):
|
||||
self.assertFalse(debian_service.force_reload('name'))
|
||||
|
||||
def test_status(self):
|
||||
'''
|
||||
Test for Return the status for a service
|
||||
'''
|
||||
mock = MagicMock(return_value=True)
|
||||
with patch.dict(debian_service.__salt__, {'status.pid': mock}):
|
||||
self.assertTrue(debian_service.status('name', 1))
|
||||
|
||||
mock = MagicMock(return_value='A')
|
||||
with patch.object(debian_service, '_service_cmd', mock):
|
||||
mock = MagicMock(return_value=True)
|
||||
with patch.dict(debian_service.__salt__, {'cmd.retcode': mock}):
|
||||
self.assertFalse(debian_service.status('name'))
|
||||
|
||||
def test_enable(self):
|
||||
'''
|
||||
Test for Enable the named service to start at boot
|
||||
'''
|
||||
mock = MagicMock(return_value='5')
|
||||
with patch.object(debian_service, '_osrel', mock):
|
||||
mock = MagicMock(return_value='')
|
||||
with patch.object(debian_service, '_cmd_quote', mock):
|
||||
mock = MagicMock(return_value=True)
|
||||
with patch.dict(debian_service.__salt__,
|
||||
{'cmd.retcode': mock}):
|
||||
self.assertFalse(debian_service.enable('name'))
|
||||
|
||||
def test_disable(self):
|
||||
'''
|
||||
Test for Disable the named service to start at boot
|
||||
'''
|
||||
mock = MagicMock(return_value='5')
|
||||
with patch.object(debian_service, '_osrel', mock):
|
||||
mock = MagicMock(return_value=True)
|
||||
with patch.dict(debian_service.__salt__, {'cmd.retcode': mock}):
|
||||
self.assertFalse(debian_service.disable('name'))
|
||||
|
||||
def test_enabled(self):
|
||||
'''
|
||||
Test for Return True if the named service is enabled, false otherwise
|
||||
'''
|
||||
mock = MagicMock(return_value=['A'])
|
||||
with patch.object(debian_service, 'get_enabled', mock):
|
||||
self.assertFalse(debian_service.enabled('name'))
|
||||
|
||||
def test_disabled(self):
|
||||
'''
|
||||
Test for Return True if the named service is enabled, false otherwise
|
||||
'''
|
||||
mock = MagicMock(return_value=['A'])
|
||||
with patch.object(debian_service, 'get_enabled', mock):
|
||||
self.assertFalse(debian_service.disabled('name'))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from integration import run_tests
|
||||
run_tests(DebianServicesTestCase, needs_daemon=False)
|
54
tests/unit/modules/defaults_test.py
Normal file
54
tests/unit/modules/defaults_test.py
Normal file
@ -0,0 +1,54 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Jayesh Kariya <jayeshk@saltstack.com>`
|
||||
'''
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from salttesting import TestCase, skipIf
|
||||
from salttesting.mock import (
|
||||
MagicMock,
|
||||
patch,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON
|
||||
)
|
||||
|
||||
import inspect
|
||||
|
||||
# Import Salt Libs
|
||||
from salt.modules import defaults
|
||||
|
||||
# Globals
|
||||
defaults.__grains__ = {}
|
||||
defaults.__salt__ = {}
|
||||
defaults.__opts__ = {}
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class DefaultsTestCase(TestCase):
|
||||
'''
|
||||
Test cases for salt.modules.defaults
|
||||
'''
|
||||
# 'get' function tests: 1
|
||||
|
||||
def test_get(self):
|
||||
'''
|
||||
Test if it execute a defaults client run and return a dict
|
||||
'''
|
||||
mock = MagicMock(return_value='')
|
||||
with patch.dict(defaults.__salt__, {'pillar.get': mock}):
|
||||
self.assertEqual(defaults.get('core:users:root'), '')
|
||||
|
||||
@patch('salt.modules.defaults.get',
|
||||
MagicMock(return_value={'users': {'root': [0]}}))
|
||||
def test_get_mock(self):
|
||||
'''
|
||||
Test if it execute a defaults client run and return a dict
|
||||
'''
|
||||
with patch.object(inspect, 'stack', MagicMock(return_value=[])):
|
||||
self.assertEqual(defaults.get('core:users:root'),
|
||||
{'users': {'root': [0]}})
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from integration import run_tests
|
||||
run_tests(DefaultsTestCase, needs_daemon=False)
|
86
tests/unit/modules/dnsmasq_test.py
Normal file
86
tests/unit/modules/dnsmasq_test.py
Normal file
@ -0,0 +1,86 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Rupesh Tare <rupesht@saltstack.com>`
|
||||
'''
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from salttesting import TestCase, skipIf
|
||||
from salttesting.mock import (
|
||||
mock_open,
|
||||
MagicMock,
|
||||
patch,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON
|
||||
)
|
||||
|
||||
# Import Salt Libs
|
||||
from salt.modules import dnsmasq
|
||||
|
||||
# Import python libs
|
||||
import os
|
||||
|
||||
# Globals
|
||||
dnsmasq.__salt__ = {}
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class DnsmasqTestCase(TestCase):
|
||||
'''
|
||||
TestCase for the salt.modules.at module
|
||||
'''
|
||||
def test_version(self):
|
||||
'''
|
||||
test to show installed version of dnsmasq.
|
||||
'''
|
||||
mock = MagicMock(return_value='A B C')
|
||||
with patch.dict(dnsmasq.__salt__, {'cmd.run': mock}):
|
||||
self.assertEqual(dnsmasq.version(), "C")
|
||||
|
||||
def test_fullversion(self):
|
||||
'''
|
||||
Test to Show installed version of dnsmasq and compile options.
|
||||
'''
|
||||
mock = MagicMock(return_value='A B C\nD E F G H I')
|
||||
with patch.dict(dnsmasq.__salt__, {'cmd.run': mock}):
|
||||
self.assertDictEqual(dnsmasq.fullversion(),
|
||||
{'version': 'C',
|
||||
'compile options': ['G', 'H', 'I']})
|
||||
|
||||
def test_set_config(self):
|
||||
'''
|
||||
test to show installed version of dnsmasq.
|
||||
'''
|
||||
mock = MagicMock(return_value={'conf-dir': 'A'})
|
||||
with patch.object(dnsmasq, 'get_config', mock):
|
||||
mock = MagicMock(return_value=['.', '~', 'bak', '#'])
|
||||
with patch.object(os, 'listdir', mock):
|
||||
self.assertDictEqual(dnsmasq.set_config(), {})
|
||||
|
||||
def test_get_config(self):
|
||||
'''
|
||||
test to dumps all options from the config file.
|
||||
'''
|
||||
mock = MagicMock(return_value={'conf-dir': 'A'})
|
||||
with patch.object(dnsmasq, 'get_config', mock):
|
||||
mock = MagicMock(return_value=['.', '~', 'bak', '#'])
|
||||
with patch.object(os, 'listdir', mock):
|
||||
self.assertDictEqual(dnsmasq.get_config(), {'conf-dir': 'A'})
|
||||
|
||||
def test_parse_dnamasq(self):
|
||||
'''
|
||||
test for generic function for parsing dnsmasq files including includes.
|
||||
'''
|
||||
text_file_data = '\n'.join(["line here", "second line", "A=B", "#"])
|
||||
with patch('salt.utils.fopen',
|
||||
mock_open(read_data=text_file_data),
|
||||
create=True) as m:
|
||||
m.return_value.__iter__.return_value = text_file_data.splitlines()
|
||||
self.assertDictEqual(dnsmasq._parse_dnamasq('filename'),
|
||||
{'A': 'B',
|
||||
'unparsed': ['line here',
|
||||
'second line']})
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from integration import run_tests
|
||||
run_tests(DnsmasqTestCase, needs_daemon=False)
|
115
tests/unit/modules/environ_test.py
Normal file
115
tests/unit/modules/environ_test.py
Normal file
@ -0,0 +1,115 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Rupesh Tare <rupesht@saltstack.com>`
|
||||
'''
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from salttesting import TestCase, skipIf
|
||||
from salttesting.mock import (
|
||||
MagicMock,
|
||||
patch,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON
|
||||
)
|
||||
|
||||
# Import Salt Libs
|
||||
from salt.modules import environ
|
||||
import os
|
||||
|
||||
|
||||
# Globals
|
||||
environ.__grains__ = {}
|
||||
environ.__salt__ = {}
|
||||
environ.__context__ = {}
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class EnvironTestCase(TestCase):
|
||||
'''
|
||||
Test cases for salt.modules.environ
|
||||
'''
|
||||
def test_setval(self):
|
||||
'''
|
||||
Test for set a single salt process environment variable. Returns True
|
||||
on success.
|
||||
'''
|
||||
mock = MagicMock(return_value=None)
|
||||
with patch.object(os.environ, 'pop', mock):
|
||||
self.assertEqual(environ.setval('key', False, True), None)
|
||||
|
||||
mock = MagicMock(side_effect=Exception())
|
||||
with patch.object(os.environ, 'pop', mock):
|
||||
self.assertFalse(environ.setval('key', False, True))
|
||||
|
||||
mock_environ = {}
|
||||
with patch.dict(os.environ, mock_environ):
|
||||
self.assertEqual(environ.setval('key', False), '')
|
||||
|
||||
with patch.dict(os.environ, mock_environ):
|
||||
self.assertFalse(environ.setval('key', True))
|
||||
|
||||
def test_setenv(self):
|
||||
'''
|
||||
Set multiple salt process environment variables from a dict.
|
||||
Returns a dict.
|
||||
'''
|
||||
self.assertFalse(environ.setenv('environ'))
|
||||
|
||||
self.assertFalse(environ.setenv({'A': True},
|
||||
False,
|
||||
True,
|
||||
False))
|
||||
|
||||
mock_environ = {'key': 'value'}
|
||||
with patch.dict(os.environ, mock_environ):
|
||||
mock_setval = MagicMock(return_value=None)
|
||||
with patch.object(environ, 'setval', mock_setval):
|
||||
self.assertEqual(environ.setenv({}, False, True, False)['key'],
|
||||
None)
|
||||
|
||||
def test_get(self):
|
||||
'''
|
||||
Get a single salt process environment variable.
|
||||
'''
|
||||
self.assertFalse(environ.get(True))
|
||||
|
||||
self.assertEqual(environ.get('key'), '')
|
||||
|
||||
def test_has_value(self):
|
||||
'''
|
||||
Determine whether the key exists in the current salt process
|
||||
environment dictionary. Optionally compare the current value
|
||||
of the environment against the supplied value string.
|
||||
'''
|
||||
mock_environ = {}
|
||||
with patch.dict(os.environ, mock_environ):
|
||||
self.assertFalse(environ.has_value(True))
|
||||
|
||||
os.environ['salty'] = 'yes'
|
||||
self.assertTrue(environ.has_value('salty', 'yes'))
|
||||
|
||||
os.environ['too_salty'] = 'no'
|
||||
self.assertFalse(environ.has_value('too_salty', 'yes'))
|
||||
|
||||
self.assertFalse(environ.has_value('key', 'value'))
|
||||
|
||||
os.environ['key'] = 'value'
|
||||
self.assertTrue(environ.has_value('key'))
|
||||
|
||||
def test_item(self):
|
||||
'''
|
||||
Get one or more salt process environment variables.
|
||||
Returns a dict.
|
||||
'''
|
||||
self.assertEqual(environ.item(None), {})
|
||||
|
||||
def test_items(self):
|
||||
'''
|
||||
Return a dict of the entire environment set for the salt process
|
||||
'''
|
||||
self.assertTrue(environ.items())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from integration import run_tests
|
||||
run_tests(EnvironTestCase, needs_daemon=False)
|
Loading…
Reference in New Issue
Block a user