Merge branch '2016.3' into '2016.11'

Conflicts:
  - doc/topics/cloud/vsphere.rst
  - salt/config/__init__.py
  - salt/daemons/masterapi.py
  - salt/templates/rh_ip/network.jinja
  - salt/utils/parsers.py
  - tests/unit/daemons_test.py
This commit is contained in:
rallytime 2016-12-02 12:18:18 -07:00
commit 8fd53a4808
18 changed files with 1395 additions and 343 deletions

View File

@ -397,6 +397,9 @@
# Pass in an alternative location for the salt-ssh roster file
#roster_file: /etc/salt/roster
# The log file of the salt-ssh command:
#ssh_log_file: /var/log/salt/ssh
# Pass in minion option overrides that will be inserted into the SHIM for
# salt-ssh calls. The local minion config is not used for salt-ssh. Can be
# overridden on a per-minion basis in the roster (`minion_opts`)
@ -832,7 +835,7 @@
# If this master will be running a salt syndic daemon, syndic_master tells
# this master where to receive commands from.
#syndic_master: masterofmaster
#syndic_master: masterofmasters
# This is the 'ret_port' of the MasterOfMaster:
#syndic_master_port: 4506
@ -840,8 +843,8 @@
# PID file of the syndic daemon:
#syndic_pidfile: /var/run/salt-syndic.pid
# LOG file of the syndic daemon:
#syndic_log_file: syndic.log
# The log file of the salt-syndic daemon:
#syndic_log_file: /var/log/salt/syndic
# The behaviour of the multi-syndic when connection to a master of masters failed.
# Can specify ``random`` (default) or ``ordered``. If set to ``random``, masters

View File

@ -41,7 +41,7 @@ The local interface to bind to.
Default: ``False``
Whether the master should listen for IPv6 connections. If this is set to True,
the interface option must be adjusted too (for example: "interface: '::'")
the interface option must be adjusted too (for example: ``interface: '::'``)
.. code-block:: yaml
@ -718,6 +718,21 @@ Pass in an alternative location for the salt-ssh roster file.
roster_file: /root/roster
.. conf_master:: ssh_log_file
``ssh_log_file``
-------------------
.. versionadded:: 2016.3.5
Default: ``/var/log/salt/ssh``
Specify the log file of the ``salt-ssh`` command.
.. code-block:: yaml
ssh_log_file: /var/log/salt/ssh
.. conf_master:: ssh_minion_opts
``ssh_minion_opts``
@ -1663,7 +1678,6 @@ directories above the one specified will be ignored and the relative path will
gitfs_root: somefolder/otherfolder
.. versionchanged:: 2014.7.0
Ability to specify gitfs roots on a per-remote basis was added. See
:ref:`here <gitfs-per-remote-config>` for more info.
@ -2469,8 +2483,19 @@ There are additional details at :ref:`salt-pillars`
Default: ``False``
This option allows for external pillar sources to be evaluated before
:conf_master:`pillar_roots`. This allows for targeting file system pillar from
ext_pillar.
:conf_master:`pillar_roots`. External pillar data is evaluated separately from
:conf_master:`pillar_roots` pillar data, and then both sets of pillar data are
merged into a single pillar dictionary, so the value of this config option will
have an impact on which key "wins" when there is one of the same name in both
the external pillar data and :conf_master:`pillar_roots` pillar data. By
setting this option to ``True``, ext_pillar keys will be overridden by
:conf_master:`pillar_roots`, while leaving it as ``False`` will allow
ext_pillar keys to override those from :conf_master:`pillar_roots`.
.. note::
For a while, this config option did not work as specified above, because of
a bug in Pillar compilation. This bug has been resolved in version 2016.3.4
and later.
.. code-block:: yaml
@ -3003,15 +3028,16 @@ can be utilized:
Syndic Server Settings
======================
A Salt syndic is a Salt master used to pass commands from a higher Salt master to
minions below the syndic. Using the syndic is simple. If this is a master that
will have syndic servers(s) below it, set the "order_masters" setting to True.
A Salt syndic is a Salt master used to pass commands from a higher Salt master
to minions below the syndic. Using the syndic is simple. If this is a master
that will have syndic servers(s) below it, set the ``order_masters`` setting to
``True``.
If this is a master that will be running a syndic daemon for passthrough the
"syndic_master" setting needs to be set to the location of the master server.
``syndic_master`` setting needs to be set to the location of the master server.
Do not not forget that, in other words, it means that it shares with the local minion
its ID and PKI_DIR.
Do not forget that, in other words, it means that it shares with the local minion
its ID and PKI directory.
.. conf_master:: order_masters
@ -3033,9 +3059,13 @@ value must be set to True
``syndic_master``
-----------------
Default: ``''``
.. versionchanged:: 2016.3.5,2016.11.1
If this master will be running a salt-syndic to connect to a higher level
Set default higher level master address.
Default: ``masterofmasters``
If this master will be running the ``salt-syndic`` to connect to a higher level
master, specify the higher level master with this configuration value.
.. code-block:: yaml
@ -3043,7 +3073,7 @@ master, specify the higher level master with this configuration value.
syndic_master: masterofmasters
You can optionally connect a syndic to multiple higher level masters by
setting the 'syndic_master' value to a list:
setting the ``syndic_master`` value to a list:
.. code-block:: yaml
@ -3051,7 +3081,7 @@ setting the 'syndic_master' value to a list:
- masterofmasters1
- masterofmasters2
Each higher level master must be set up in a multimaster configuration.
Each higher level master must be set up in a multi-master configuration.
.. conf_master:: syndic_master_port
@ -3060,7 +3090,7 @@ Each higher level master must be set up in a multimaster configuration.
Default: ``4506``
If this master will be running a salt-syndic to connect to a higher level
If this master will be running the ``salt-syndic`` to connect to a higher level
master, specify the higher level master port with this configuration value.
.. code-block:: yaml
@ -3072,28 +3102,28 @@ master, specify the higher level master port with this configuration value.
``syndic_pidfile``
------------------
Default: ``salt-syndic.pid``
Default: ``/var/run/salt-syndic.pid``
If this master will be running a salt-syndic to connect to a higher level
If this master will be running the ``salt-syndic`` to connect to a higher level
master, specify the pidfile of the syndic daemon.
.. code-block:: yaml
syndic_pidfile: syndic.pid
syndic_pidfile: /var/run/syndic.pid
.. conf_master:: syndic_log_file
``syndic_log_file``
-------------------
Default: ``syndic.log``
Default: ``/var/log/salt/syndic``
If this master will be running a salt-syndic to connect to a higher level
master, specify the log_file of the syndic daemon.
If this master will be running the ``salt-syndic`` to connect to a higher level
master, specify the log file of the syndic daemon.
.. code-block:: yaml
syndic_log_file: salt-syndic.log
syndic_log_file: /var/log/salt-syndic.log
.. conf_master:: syndic_failover

View File

@ -124,3 +124,7 @@ To match minions using other matchers, use ``expr_form``:
.. code-block:: bash
# salt-call publish.publish 'webserv* and not G@os:Ubuntu' test.ping expr_form='compound'
.. note::
The expr_form argument will be renamed to ``tgt_type`` in the Nitrogen
release of Salt.

View File

@ -179,3 +179,7 @@ to add them to the pool of load balanced servers.
{% endfor %}
<...file contents snipped...>
.. note::
The expr_form argument will be renamed to ``tgt_type`` in the Nitrogen
release of Salt.

View File

@ -376,6 +376,13 @@ Use the ``expr_form`` argument to specify a matcher:
- arg:
- rm -rf /tmp/*
.. note::
An easy mistake to make here is to use ``tgt_type`` instead of
``expr_form``, since the job cache and events all refer to the targeting
method as ``tgt_type``. As of the Nitrogen release of Salt, ``expr_form``
will be deprecated in favor of using ``tgt_type``, to help with this
confusion.
Any other parameters in the :py:meth:`LocalClient().cmd()
<salt.client.LocalClient.cmd>` method can be specified as well.

View File

@ -206,7 +206,7 @@ class CloudClient(object):
the kwargs
'''
# Let's start with the default salt cloud configuration
opts = salt.config.CLOUD_CONFIG_DEFAULTS.copy()
opts = salt.config.DEFAULT_CLOUD_OPTS.copy()
# Update it with the loaded configuration
opts.update(self.opts.copy())
# Reset some of the settings to sane values
@ -223,14 +223,15 @@ class CloudClient(object):
profile = opts.get('profile', None)
# filter other profiles if one is specified
if profile:
for _profile in [a for a in opts.get('profiles', {})]:
tmp_profiles = opts.get('profiles', {}).copy()
for _profile in [a for a in tmp_profiles]:
if not _profile == profile:
opts['profiles'].pop(_profile)
tmp_profiles.pop(_profile)
# if profile is specified and we have enough info about providers
# also filter them to speedup methods like
# __filter_non_working_providers
providers = [a.get('provider', '').split(':')[0]
for a in six.itervalues(opts['profiles'])
for a in six.itervalues(tmp_profiles)
if a.get('provider', '')]
if providers:
_providers = opts.get('providers', {})

View File

@ -1105,6 +1105,26 @@ def list_nodes_full(call=None, **kwargs):
return ret
def list_nodes_min(call=None, **kwargs):
'''
Return a list of the VMs that in this location
'''
if call == 'action':
raise SaltCloudSystemExit(
(
'The list_nodes_min function must be called with'
' -f or --function.'
)
)
conn = get_conn()
server_list = conn.server_list_min()
if not server_list:
return {}
return server_list
def list_nodes_select(call=None):
'''
Return a list of the VMs that are on the provider, with select fields

View File

@ -144,19 +144,20 @@ VALID_OPTS = {
# Must also set master_sign_pubkey for this to work
'verify_master_pubkey_sign': bool,
# If verify_master_pubkey_sign is enabled, the signature is only verified, if the public-key of the master changes.
# If the signature should always be verified, this can be set to True.
# If verify_master_pubkey_sign is enabled, the signature is only verified, if the public-key of
# the master changes. If the signature should always be verified, this can be set to True.
'always_verify_signature': bool,
# The name of the file in the masters pki-directory that holds the pre-calculated signature of the masters public-key.
# The name of the file in the masters pki-directory that holds the pre-calculated signature of
# the masters public-key
'master_pubkey_signature': str,
# Instead of computing the signature for each auth-reply, use a pre-calculated signature.
# The master_pubkey_signature must also be set for this.
'master_use_pubkey_signature': bool,
# The key fingerprint of the higher-level master for the syndic to verify it is talking to the intended
# master
# The key fingerprint of the higher-level master for the syndic to verify it is talking to the
# intended master
'syndic_finger': str,
# The caching mechanism to use for the PKI key store. Can substantially decrease master publish
@ -448,8 +449,8 @@ VALID_OPTS = {
# in question have an event_return(event) function!
'event_return': (list, string_types),
# The number of events to queue up in memory before pushing them down the pipe to an event returner
# specified by 'event_return'
# The number of events to queue up in memory before pushing them down the pipe to an event
# returner specified by 'event_return'
'event_return_queue': int,
# Only forward events to an event returner if it matches one of the tags in this list
@ -467,8 +468,8 @@ VALID_OPTS = {
# Used with the SECO range master tops system
'range_server': str,
# The tcp keepalive interval to set on TCP ports. This setting can be used to tune salt connectivity
# issues in messy network environments with misbehaving firewalls
# The tcp keepalive interval to set on TCP ports. This setting can be used to tune Salt
# connectivity issues in messy network environments with misbehaving firewalls
'tcp_keepalive': bool,
# Sets zeromq TCP keepalive idle. May be used to tune issues with minion disconnects
@ -703,7 +704,8 @@ VALID_OPTS = {
# index
'search_index_interval': int,
# A compound target definition. See: http://docs.saltstack.com/en/latest/topics/targeting/nodegroups.html
# A compound target definition.
# See: http://docs.saltstack.com/en/latest/topics/targeting/nodegroups.html
'nodegroups': dict,
# List-only nodegroups for salt-ssh. Each group must be formed as either a
@ -802,6 +804,7 @@ VALID_OPTS = {
# The length that the syndic event queue must hit before events are popped off and forwarded
'syndic_jid_forward_cache_hwm': int,
# Salt SSH configuration
'ssh_passwd': str,
'ssh_port': str,
'ssh_sudo': bool,
@ -811,13 +814,15 @@ VALID_OPTS = {
'ssh_scan_ports': str,
'ssh_scan_timeout': float,
'ssh_identities_only': bool,
'ssh_log_file': str,
# Enable ioflo verbose logging. Warning! Very verbose!
'ioflo_verbose': int,
'ioflo_period': float,
# Set ioflo to realtime. Useful only for testing/debugging to simulate many ioflo periods very quickly.
# Set ioflo to realtime. Useful only for testing/debugging to simulate many ioflo periods very
# quickly
'ioflo_realtime': bool,
# Location for ioflo logs
@ -875,14 +880,14 @@ VALID_OPTS = {
# If set, all minion exec module actions will be rerouted through sudo as this user
'sudo_user': str,
# HTTP request timeout in seconds. Applied for tornado http fetch functions like cp.get_url should be greater than
# overall download time.
# HTTP request timeout in seconds. Applied for tornado http fetch functions like cp.get_url
# should be greater than overall download time
'http_request_timeout': float,
# HTTP request max file content size.
'http_max_body': int,
# Delay in seconds before executing bootstrap (salt cloud)
# Delay in seconds before executing bootstrap (Salt Cloud)
'bootstrap_delay': int,
# If a proxymodule has a function called 'grains', then call it during
@ -890,7 +895,7 @@ VALID_OPTS = {
# dictionary. Otherwise it is assumed that the module calls the grains
# function in a custom way and returns the data elsewhere
#
# Default to False for 2016.3 and 2016.11. Switch to True for Nitrogen
# Default to False for 2016.3 and 2016.11. Switch to True for Nitrogen
'proxy_merge_grains_in_module': bool,
# Command to use to restart salt-minion
@ -1267,7 +1272,7 @@ DEFAULT_MASTER_OPTS = {
'ping_on_rotate': False,
'peer': {},
'preserve_minion_cache': False,
'syndic_master': '',
'syndic_master': 'masterofmasters',
'syndic_failover': 'random',
'syndic_log_file': os.path.join(salt.syspaths.LOGS_DIR, 'syndic'),
'syndic_pidfile': os.path.join(salt.syspaths.PIDFILE_DIR, 'salt-syndic.pid'),
@ -1396,6 +1401,7 @@ DEFAULT_MASTER_OPTS = {
'ssh_scan_ports': '22',
'ssh_scan_timeout': 0.01,
'ssh_identities_only': False,
'ssh_log_file': os.path.join(salt.syspaths.LOGS_DIR, 'ssh'),
'master_floscript': os.path.join(FLO_DIR, 'master.flo'),
'worker_floscript': os.path.join(FLO_DIR, 'worker.flo'),
'maintenance_floscript': os.path.join(FLO_DIR, 'maint.flo'),
@ -1451,7 +1457,7 @@ DEFAULT_PROXY_MINION_OPTS = {
}
# ----- Salt Cloud Configuration Defaults ----------------------------------->
CLOUD_CONFIG_DEFAULTS = {
DEFAULT_CLOUD_OPTS = {
'verify_env': True,
'default_include': 'cloud.conf.d/*.conf',
# Global defaults
@ -1491,7 +1497,7 @@ DEFAULT_SPM_OPTS = {
'formula_path': '/srv/spm/salt',
'pillar_path': '/srv/spm/pillar',
'reactor_path': '/srv/spm/reactor',
'spm_logfile': '/var/log/salt/spm',
'spm_logfile': os.path.join(salt.syspaths.LOGS_DIR, 'spm'),
'spm_default_include': 'spm.d/*.conf',
# spm_repos_config also includes a .d/ directory
'spm_repos_config': '/etc/salt/spm.repos',
@ -1874,7 +1880,7 @@ def minion_config(path,
minion_opts = salt.config.minion_config('/etc/salt/minion')
'''
if defaults is None:
defaults = DEFAULT_MINION_OPTS
defaults = DEFAULT_MINION_OPTS.copy()
if path is not None and path.endswith('proxy'):
defaults.update(DEFAULT_PROXY_MINION_OPTS)
@ -2009,7 +2015,7 @@ def cloud_config(path, env_var='SALT_CLOUD_CONFIG', defaults=None,
providers_config_path=None, providers_config=None,
profiles_config_path=None, profiles_config=None):
'''
Read in the salt cloud config and return the dict
Read in the Salt Cloud config and return the dict
'''
if path:
config_dir = os.path.dirname(path)
@ -2024,14 +2030,15 @@ def cloud_config(path, env_var='SALT_CLOUD_CONFIG', defaults=None,
)
if defaults is None:
defaults = CLOUD_CONFIG_DEFAULTS
defaults = DEFAULT_CLOUD_OPTS.copy()
# Set defaults early to override Salt Master's default config values later
defaults.update(overrides)
overrides = defaults
# Load cloud configuration from any default or provided includes
default_include = overrides.get(
'default_include', defaults['default_include']
)
overrides.update(
salt.config.include_config(default_include, path, verbose=False)
salt.config.include_config(overrides['default_include'], path, verbose=False)
)
include = overrides.get('include', [])
overrides.update(
@ -2234,7 +2241,7 @@ def apply_cloud_config(overrides, defaults=None):
Return a cloud config
'''
if defaults is None:
defaults = CLOUD_CONFIG_DEFAULTS
defaults = DEFAULT_CLOUD_OPTS
config = defaults.copy()
if overrides:
@ -3419,24 +3426,16 @@ def client_config(path, env_var='SALT_CLIENT_CONFIG', defaults=None):
def api_config(path):
'''
Read in the salt master config file and add additional configs that
Read in the Salt Master config file and add additional configs that
need to be stubbed out for salt-api
'''
# Let's grab a copy of salt's master opts
opts = client_config(path, defaults=DEFAULT_MASTER_OPTS)
# Let's override them with salt-api's required defaults
api_opts = {
'log_file': opts.get(
'api_logfile', os.path.join(
opts['root_dir'], DEFAULT_API_OPTS['api_logfile'].lstrip('/')
)
),
'pidfile': opts.get(
'api_pidfile', os.path.join(
opts['root_dir'], DEFAULT_API_OPTS['api_pidfile'].lstrip('/')
)
),
}
api_opts = DEFAULT_API_OPTS
api_opts.update({
'pidfile': opts.get('api_pidfile', DEFAULT_API_OPTS['api_pidfile']),
})
opts.update(api_opts)
return opts
@ -3449,7 +3448,7 @@ def spm_config(path):
.. versionadded:: 2015.8.0
'''
# Let's grab a copy of salt's master default opts
defaults = DEFAULT_MASTER_OPTS
defaults = DEFAULT_MASTER_OPTS.copy()
# Let's override them with spm's required defaults
defaults.update(DEFAULT_SPM_OPTS)

View File

@ -12,6 +12,7 @@ import os
import re
import time
import stat
import msgpack
# Import salt libs
import salt.crypt
@ -146,7 +147,12 @@ def clean_expired_tokens(opts):
for token in filenames:
token_path = os.path.join(dirpath, token)
with salt.utils.fopen(token_path) as token_file:
token_data = serializer.loads(token_file.read())
try:
token_data = serializer.loads(token_file.read())
except msgpack.UnpackValueError:
# Bad token file or empty. Remove.
os.remove(token_path)
return
if 'expire' not in token_data or token_data.get('expire', 0) < time.time():
try:
os.remove(token_path)

View File

@ -1303,7 +1303,9 @@ def os_data():
key
)
grains[lsb_param] = value
except ImportError:
# Catch a NameError to workaround possible breakage in lsb_release
# See https://github.com/saltstack/salt/issues/37867
except (ImportError, NameError):
# if the python library isn't available, default to regex
if os.path.isfile('/etc/lsb-release'):
# Matches any possible format:

View File

@ -93,6 +93,7 @@ multiline encrypted secrets from pillar in a state, use the following format to
creating extra whitespace at the beginning of each line in the cert file:
.. code-block:: yaml
secret.txt:
file.managed:
- template: jinja

View File

@ -7,4 +7,6 @@
{%endif%}{% if networkdelay %}NETWORKDELAY={{networkdelay}}
{%endif%}{% if devtimeout %}DEVTIMEOUT={{devtimeout}}
{%endif%}{% if nozeroconf %}NOZEROCONF={{nozeroconf}}
{%endif%}{% if enable_ipv6 %}IPV6INIT="yes"
{%endif%}{% if ipv6gateway %}IPV6_DEFAULTGW="{{ipv6gateway}}"
{%endif%}

View File

@ -87,7 +87,7 @@ def creds(provider):
proxies={'http': ''}, timeout=AWS_METADATA_TIMEOUT,
)
result.raise_for_status()
role = result.text
role = result.text.encode(result.encoding)
except (requests.exceptions.HTTPError, requests.exceptions.ConnectionError):
return provider['id'], provider['key'], ''
@ -460,7 +460,7 @@ def query(params=None, setname=None, requesturl=None, location=None,
)
LOG.trace(
'AWS Response Text: {0}'.format(
result.text
result.text.encode(result.encoding)
)
)
result.raise_for_status()
@ -501,7 +501,7 @@ def query(params=None, setname=None, requesturl=None, location=None,
return {'error': data}, requesturl
return {'error': data}
response = result.text
response = result.text.encode(result.encoding)
root = ET.fromstring(response)
items = root[1]

View File

@ -776,6 +776,22 @@ class SaltNova(object):
pass
return ret
def server_list_min(self):
'''
List minimal information about servers
'''
nt_ks = self.compute_conn
ret = {}
for item in nt_ks.servers.list(detailed=False):
try:
ret[item.name] = {
'id': item.id,
'status': 'Running'
}
except TypeError:
pass
return ret
def server_list_detailed(self):
'''
Detailed list of servers

View File

@ -128,7 +128,7 @@ class CustomOption(optparse.Option, object):
class OptionParser(optparse.OptionParser, object):
VERSION = version.__saltstack_version__.formatted_version
usage = '%prog'
usage = '%prog [options]'
epilog = ('You can find additional help about %prog issuing "man %prog" '
'or on http://docs.saltstack.com')
@ -168,8 +168,8 @@ class OptionParser(optparse.OptionParser, object):
new_inargs = sys.stdin.readlines()
new_inargs = [arg.rstrip('\r\n') for arg in new_inargs]
new_options, new_args = optparse.OptionParser.parse_args(
self,
new_inargs)
self,
new_inargs)
options.__dict__.update(new_options.__dict__)
args.extend(new_args)
@ -181,8 +181,9 @@ class OptionParser(optparse.OptionParser, object):
# Let's get some proper sys.stderr logging as soon as possible!!!
# This logging handler will be removed once the proper console or
# logfile logging is setup.
temp_log_level = getattr(self.options, 'log_level', None)
log.setup_temp_logger(
getattr(self.options, 'log_level', 'error')
'error' if temp_log_level is None else temp_log_level
)
# Gather and run the process_<option> functions in the proper order
@ -311,7 +312,7 @@ class MergeConfigMixIn(six.with_metaclass(MixInMeta, object)):
if value is not None:
# There's an actual value, add it to the config
self.config[option.dest] = value
elif value is not None and getattr(option, "explicit", False):
elif value is not None and getattr(option, 'explicit', False):
# Only set the value in the config file IF it was explicitly
# specified by the user, this makes it possible to tweak settings
# on the configuration files bypassing the shell option flags'
@ -336,7 +337,7 @@ class MergeConfigMixIn(six.with_metaclass(MixInMeta, object)):
if value is not None:
# There's an actual value, add it to the config
self.config[option.dest] = value
elif value is not None and getattr(option, "explicit", False):
elif value is not None and getattr(option, 'explicit', False):
# Only set the value in the config file IF it was explicitly
# specified by the user, this makes it possible to tweak
# settings on the configuration files bypassing the shell
@ -552,16 +553,18 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
if not getattr(self, '_skip_console_logging_config_', False):
group.add_option(
'-l', '--log-level',
dest=self._loglevel_config_setting_name_,
choices=list(log.LOG_LEVELS),
help='Console logging log level. One of {0}. '
'Default: \'{1}\'.'.format(
', '.join([repr(l) for l in log.SORTED_LEVEL_NAMES]),
getattr(self, '_default_logging_level_', 'warning')
self._default_logging_level_
)
)
group.add_option(
'--log-file',
dest=self._logfile_config_setting_name_,
default=None,
help='Log file path. Default: \'{0}\'.'.format(
self._default_logging_logfile_
@ -575,23 +578,25 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
help='Logfile logging log level. One of {0}. '
'Default: \'{1}\'.'.format(
', '.join([repr(l) for l in log.SORTED_LEVEL_NAMES]),
getattr(self, '_default_logging_level_', 'warning')
self._default_logging_level_
)
)
def process_log_level(self):
if not self.options.log_level:
cli_log_level = 'cli_{0}_log_level'.format(
self.get_prog_name().replace('-', '_')
)
if self.config.get(cli_log_level, None) is not None:
self.options.log_level = self.config.get(cli_log_level)
elif self.config.get(self._loglevel_config_setting_name_, None):
self.options.log_level = self.config.get(
self._loglevel_config_setting_name_
)
if not getattr(self.options, self._loglevel_config_setting_name_, None):
if self.config.get(self._loglevel_config_setting_name_, None):
# Is the regular log level setting set?
setattr(self.options,
self._loglevel_config_setting_name_,
self.config.get(self._loglevel_config_setting_name_)
)
else:
self.options.log_level = self._default_logging_level_
# Nothing is set on the configuration? Let's use the cli tool
# defined default
setattr(self.options,
self._loglevel_config_setting_name_,
self._default_logging_level_
)
# Setup extended logging right before the last step
self._mixin_after_parsed_funcs.append(self.__setup_extended_logging)
@ -605,44 +610,42 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
self._mixin_after_parsed_funcs.append(self.__setup_console_logger)
def process_log_file(self):
if not self.options.log_file:
cli_setting_name = 'cli_{0}_log_file'.format(
self.get_prog_name().replace('-', '_')
)
if self.config.get(cli_setting_name, None) is not None:
# There's a configuration setting defining this log file path,
# i.e., `key_log_file` if the cli tool is `salt-key`
self.options.log_file = self.config.get(cli_setting_name)
elif self.config.get(self._logfile_config_setting_name_, None):
if not getattr(self.options, self._logfile_config_setting_name_, None):
if self.config.get(self._logfile_config_setting_name_, None):
# Is the regular log file setting set?
self.options.log_file = self.config.get(
self._logfile_config_setting_name_
)
setattr(self.options,
self._logfile_config_setting_name_,
self.config.get(self._logfile_config_setting_name_)
)
else:
# Nothing is set on the configuration? Let's use the cli tool
# defined default
self.options.log_file = self._default_logging_logfile_
setattr(self.options,
self._logfile_config_setting_name_,
self._default_logging_logfile_
)
if self._logfile_config_setting_name_ in self.config:
# Remove it from config so it inherits from log_file
self.config.pop(self._logfile_config_setting_name_)
def process_log_file_level(self):
if not self.options.log_file_level:
cli_setting_name = 'cli_{0}_log_file_level'.format(
self.get_prog_name().replace('-', '_')
)
if self.config.get(cli_setting_name, None) is not None:
# There's a configuration setting defining this log file
# logging level, i.e., `key_log_file_level` if the cli tool is
# `salt-key`
self.options.log_file_level = self.config.get(cli_setting_name)
elif self.config.get(
self._logfile_loglevel_config_setting_name_, None):
def process_log_level_logfile(self):
if not getattr(self.options, self._logfile_loglevel_config_setting_name_, None):
if self.config.get(self._logfile_loglevel_config_setting_name_, None):
# Is the regular log file level setting set?
self.options.log_file_level = self.config.get(
self._logfile_loglevel_config_setting_name_
)
setattr(self.options,
self._logfile_loglevel_config_setting_name_,
self.config.get(self._logfile_loglevel_config_setting_name_)
)
else:
# Nothing is set on the configuration? Let's use the cli tool
# defined default
self.options.log_level = self._default_logging_level_
setattr(self.options,
self._logfile_loglevel_config_setting_name_,
self._default_logging_level_
)
if self._logfile_loglevel_config_setting_name_ in self.config:
# Remove it from config so it inherits from log_level
self.config.pop(self._logfile_loglevel_config_setting_name_)
def __setup_logfile_logger_config(self, *args): # pylint: disable=unused-argument
if self._logfile_loglevel_config_setting_name_ in self.config and not \
@ -650,15 +653,10 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
# Remove it from config so it inherits from log_level
self.config.pop(self._logfile_loglevel_config_setting_name_)
loglevel = self.config.get(
self._logfile_loglevel_config_setting_name_,
self.config.get(
# From the config setting
self._loglevel_config_setting_name_,
# From the console setting
self.config['log_level']
)
)
loglevel = getattr(self.options,
self._logfile_loglevel_config_setting_name_,
self._default_logging_level_
)
cli_log_path = 'cli_{0}_log_file'.format(
self.get_prog_name().replace('-', '_')
@ -707,32 +705,16 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
self.config.pop('log_fmt_logfile', None)
log_file_fmt = self.config.get(
cli_log_file_fmt,
'log_fmt_logfile',
self.config.get(
'cli_{0}_log_fmt'.format(
self.get_prog_name().replace('-', '_')
),
'log_fmt_console',
self.config.get(
'log_fmt_logfile',
self.config.get(
'log_fmt_console',
self.config.get(
'log_fmt',
config._DFLT_LOG_FMT_CONSOLE
)
)
'log_fmt',
config._DFLT_LOG_FMT_CONSOLE
)
)
)
cli_log_file_datefmt = 'cli_{0}_log_file_datefmt'.format(
self.get_prog_name().replace('-', '_')
)
if cli_log_file_datefmt in self.config and not \
self.config.get(cli_log_file_datefmt):
# Remove it from config so it inherits from log_datefmt_logfile
self.config.pop(cli_log_file_datefmt)
if self.config.get('log_datefmt_logfile', None) is None:
# Remove it from config so it inherits from log_datefmt_console
self.config.pop('log_datefmt_logfile', None)
@ -742,20 +724,12 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
self.config.pop('log_datefmt_console', None)
log_file_datefmt = self.config.get(
cli_log_file_datefmt,
'log_datefmt_logfile',
self.config.get(
'cli_{0}_log_datefmt'.format(
self.get_prog_name().replace('-', '_')
),
'log_datefmt_console',
self.config.get(
'log_datefmt_logfile',
self.config.get(
'log_datefmt_console',
self.config.get(
'log_datefmt',
'%Y-%m-%d %H:%M:%S'
)
)
'log_datefmt',
'%Y-%m-%d %H:%M:%S'
)
)
)
@ -815,7 +789,7 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
log_format=log_file_fmt,
date_format=log_file_datefmt
)
for name, level in six.iteritems(self.config['log_granular_levels']):
for name, level in six.iteritems(self.config.get('log_granular_levels', {})):
log.set_logger_level(name, level)
def __setup_extended_logging(self, *args): # pylint: disable=unused-argument
@ -833,43 +807,23 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
def __setup_console_logger_config(self, *args): # pylint: disable=unused-argument
# Since we're not going to be a daemon, setup the console logger
cli_log_fmt = 'cli_{0}_log_fmt'.format(
self.get_prog_name().replace('-', '_')
)
if cli_log_fmt in self.config and not self.config.get(cli_log_fmt):
# Remove it from config so it inherits from log_fmt_console
self.config.pop(cli_log_fmt)
logfmt = self.config.get(
cli_log_fmt, self.config.get(
'log_fmt_console',
self.config.get(
'log_fmt',
config._DFLT_LOG_FMT_CONSOLE
)
'log_fmt_console',
self.config.get(
'log_fmt',
config._DFLT_LOG_FMT_CONSOLE
)
)
cli_log_datefmt = 'cli_{0}_log_datefmt'.format(
self.get_prog_name().replace('-', '_')
)
if cli_log_datefmt in self.config and not \
self.config.get(cli_log_datefmt):
# Remove it from config so it inherits from log_datefmt_console
self.config.pop(cli_log_datefmt)
if self.config.get('log_datefmt_console', None) is None:
# Remove it from config so it inherits from log_datefmt
self.config.pop('log_datefmt_console', None)
datefmt = self.config.get(
cli_log_datefmt,
'log_datefmt_console',
self.config.get(
'log_datefmt_console',
self.config.get(
'log_datefmt',
'%Y-%m-%d %H:%M:%S'
)
'log_datefmt',
'%Y-%m-%d %H:%M:%S'
)
)
@ -887,7 +841,7 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
log_format=self.config['log_fmt_console'],
date_format=self.config['log_datefmt_console']
)
for name, level in six.iteritems(self.config['log_granular_levels']):
for name, level in six.iteritems(self.config.get('log_granular_levels', {})):
log.set_logger_level(name, level)
@ -1363,7 +1317,7 @@ class OutputOptionsMixIn(six.with_metaclass(MixInMeta, object)):
def _mixin_after_parsed(self):
group_options_selected = [
option for option in self.output_options_group.option_list if (
option for option in self.output_options_group.option_list if (
getattr(self.options, option.dest) and
(option.dest.endswith('_out') or option.dest == 'output'))
]
@ -1632,8 +1586,8 @@ class CloudProvidersListsMixIn(six.with_metaclass(MixInMeta, object)):
def _mixin_after_parsed(self):
list_options_selected = [
option for option in self.providers_listings_group.option_list if
getattr(self.options, option.dest) is not None
option for option in self.providers_listings_group.option_list if
getattr(self.options, option.dest) is not None
]
if len(list_options_selected) > 1:
self.error(
@ -1698,8 +1652,8 @@ class CloudCredentialsMixIn(six.with_metaclass(MixInMeta, object)):
def process_set_password(self):
if self.options.set_password:
raise RuntimeError(
'This functionality is not supported; '
'please see the keyring module at http://docs.saltstack.com/en/latest/topics/sdb/'
'This functionality is not supported; '
'please see the keyring module at http://docs.saltstack.com/en/latest/topics/sdb/'
)
@ -1751,28 +1705,29 @@ class MasterOptionParser(six.with_metaclass(OptionParserMeta,
DaemonMixIn,
SaltfileMixIn)):
description = 'The Salt master, used to control the Salt minions.'
description = 'The Salt Master, used to control the Salt Minions'
# ConfigDirMixIn config filename attribute
_config_filename_ = 'master'
# LogLevelMixIn attributes
_default_logging_logfile_ = os.path.join(syspaths.LOGS_DIR, 'master')
_default_logging_logfile_ = config.DEFAULT_MASTER_OPTS['log_file']
_setup_mp_logging_listener_ = True
def setup_config(self):
return config.master_config(self.get_config_file_path())
class MinionOptionParser(six.with_metaclass(OptionParserMeta, MasterOptionParser)): # pylint: disable=no-init
class MinionOptionParser(six.with_metaclass(OptionParserMeta,
MasterOptionParser)): # pylint: disable=no-init
description = (
'The Salt minion, receives commands from a remote Salt master.'
'The Salt Minion, receives commands from a remote Salt Master'
)
# ConfigDirMixIn config filename attribute
_config_filename_ = 'minion'
# LogLevelMixIn attributes
_default_logging_logfile_ = os.path.join(syspaths.LOGS_DIR, 'minion')
_default_logging_logfile_ = config.DEFAULT_MINION_OPTS['log_file']
_setup_mp_logging_listener_ = True
def setup_config(self):
@ -1798,14 +1753,14 @@ class ProxyMinionOptionParser(six.with_metaclass(OptionParserMeta,
SaltfileMixIn)): # pylint: disable=no-init
description = (
'The Salt proxy minion, connects to and controls devices not able to run a minion. '
'Receives commands from a remote Salt master.'
'The Salt Proxy Minion, connects to and controls devices not able to run a minion.\n'
'Receives commands from a remote Salt Master.'
)
# ConfigDirMixIn config filename attribute
_config_filename_ = 'proxy'
# LogLevelMixIn attributes
_default_logging_logfile_ = os.path.join(syspaths.LOGS_DIR, 'proxy')
_default_logging_logfile_ = config.DEFAULT_PROXY_MINION_OPTS['log_file']
def setup_config(self):
try:
@ -1814,7 +1769,8 @@ class ProxyMinionOptionParser(six.with_metaclass(OptionParserMeta,
minion_id = None
return config.minion_config(self.get_config_file_path(),
cache_minion_id=False, minion_id=minion_id)
cache_minion_id=False,
minion_id=minion_id)
class SyndicOptionParser(six.with_metaclass(OptionParserMeta,
@ -1827,14 +1783,16 @@ class SyndicOptionParser(six.with_metaclass(OptionParserMeta,
SaltfileMixIn)):
description = (
'A seamless master of masters. Scale Salt to thousands of hosts or '
'across many different networks.'
'The Salt Syndic daemon, a special Minion that passes through commands from a\n'
'higher Master. Scale Salt to thousands of hosts or across many different networks.'
)
# ConfigDirMixIn config filename attribute
_config_filename_ = 'master'
# LogLevelMixIn attributes
_default_logging_logfile_ = os.path.join(syspaths.LOGS_DIR, 'master')
_logfile_config_setting_name_ = 'syndic_log_file'
_default_logging_level_ = config.DEFAULT_MASTER_OPTS['log_level']
_default_logging_logfile_ = config.DEFAULT_MASTER_OPTS[_logfile_config_setting_name_]
_setup_mp_logging_listener_ = True
def setup_config(self):
@ -1858,15 +1816,20 @@ class SaltCMDOptionParser(six.with_metaclass(OptionParserMeta,
default_timeout = 5
description = (
'Salt allows for commands to be executed across a swath of remote systems in\n'
'parallel, so they can be both controlled and queried with ease.'
)
usage = '%prog [options] \'<target>\' <function> [arguments]'
# ConfigDirMixIn config filename attribute
_config_filename_ = 'master'
# LogLevelMixIn attributes
_default_logging_level_ = 'warning'
_default_logging_logfile_ = os.path.join(syspaths.LOGS_DIR, 'master')
_loglevel_config_setting_name_ = 'cli_salt_log_file'
_default_logging_level_ = config.DEFAULT_MASTER_OPTS['log_level']
_default_logging_logfile_ = config.DEFAULT_MASTER_OPTS['log_file']
try:
os.getcwd()
except OSError:
@ -2100,7 +2063,7 @@ class SaltCMDOptionParser(six.with_metaclass(OptionParserMeta,
# interface
for i in range(len(self.config['arg'])):
self.config['arg'][i] = salt.utils.args.parse_input(
self.config['arg'][i])
self.config['arg'][i])
else:
self.config['fun'] = self.args[1]
self.config['arg'] = self.args[2:]
@ -2126,22 +2089,20 @@ class SaltCPOptionParser(six.with_metaclass(OptionParserMeta,
HardCrashMixin,
SaltfileMixIn)):
description = (
'salt-cp is NOT intended to broadcast large files, it is intended to '
'handle text files.\nsalt-cp can be used to distribute configuration '
'files.'
'salt-cp is NOT intended to broadcast large files, it is intended to handle text\n'
'files. salt-cp can be used to distribute configuration files.'
)
default_timeout = 5
usage = '%prog [options] \'<target>\' SOURCE DEST'
default_timeout = 5
# ConfigDirMixIn config filename attribute
_config_filename_ = 'master'
# LogLevelMixIn attributes
_default_logging_level_ = 'warning'
_default_logging_logfile_ = os.path.join(syspaths.LOGS_DIR, 'master')
_loglevel_config_setting_name_ = 'cli_salt_cp_log_file'
_default_logging_level_ = config.DEFAULT_MASTER_OPTS['log_level']
_default_logging_logfile_ = config.DEFAULT_MASTER_OPTS['log_file']
def _mixin_after_parsed(self):
# salt-cp needs arguments
@ -2174,9 +2135,7 @@ class SaltKeyOptionParser(six.with_metaclass(OptionParserMeta,
SaltfileMixIn,
EAuthMixIn)):
description = 'Salt key is used to manage Salt authentication keys'
usage = '%prog [options]'
description = 'salt-key is used to manage Salt authentication keys'
# ConfigDirMixIn config filename attribute
_config_filename_ = 'master'
@ -2184,7 +2143,7 @@ class SaltKeyOptionParser(six.with_metaclass(OptionParserMeta,
# LogLevelMixIn attributes
_skip_console_logging_config_ = True
_logfile_config_setting_name_ = 'key_logfile'
_default_logging_logfile_ = os.path.join(syspaths.LOGS_DIR, 'key')
_default_logging_logfile_ = config.DEFAULT_MASTER_OPTS[_logfile_config_setting_name_]
def _mixin_setup(self):
actions_group = optparse.OptionGroup(self, 'Actions')
@ -2418,7 +2377,7 @@ class SaltKeyOptionParser(six.with_metaclass(OptionParserMeta,
if self.options.gen_keys:
# Since we're generating the keys, some defaults can be assumed
# or tweaked
keys_config['key_logfile'] = os.devnull
keys_config[self._logfile_config_setting_name_] = os.devnull
keys_config['pki_dir'] = self.options.gen_keys_dir
return keys_config
@ -2473,8 +2432,9 @@ class SaltCallOptionParser(six.with_metaclass(OptionParserMeta,
ArgsStdinMixIn,
ProfilingPMixIn)):
description = ('Salt call is used to execute module functions locally '
'on a minion')
description = (
'salt-call is used to execute module functions locally on a Salt Minion'
)
usage = '%prog [options] <function> [arguments]'
@ -2482,8 +2442,8 @@ class SaltCallOptionParser(six.with_metaclass(OptionParserMeta,
_config_filename_ = 'minion'
# LogLevelMixIn attributes
_default_logging_level_ = 'info'
_default_logging_logfile_ = os.path.join(syspaths.LOGS_DIR, 'minion')
_default_logging_level_ = config.DEFAULT_MINION_OPTS['log_level']
_default_logging_logfile_ = config.DEFAULT_MINION_OPTS['log_file']
def _mixin_setup(self):
self.add_option(
@ -2685,15 +2645,19 @@ class SaltRunOptionParser(six.with_metaclass(OptionParserMeta,
default_timeout = 1
usage = '%prog [options]'
description = (
'salt-run is the frontend command for executing Salt Runners.\n'
'Salt Runners are modules used to execute convenience functions on the Salt Master'
)
usage = '%prog [options] <function> [arguments]'
# ConfigDirMixIn config filename attribute
_config_filename_ = 'master'
# LogLevelMixIn attributes
_default_logging_level_ = 'warning'
_default_logging_logfile_ = os.path.join(syspaths.LOGS_DIR, 'master')
_loglevel_config_setting_name_ = 'cli_salt_run_log_file'
_default_logging_level_ = config.DEFAULT_MASTER_OPTS['log_level']
_default_logging_logfile_ = config.DEFAULT_MASTER_OPTS['log_file']
def _mixin_setup(self):
self.add_option(
@ -2750,15 +2714,15 @@ class SaltSSHOptionParser(six.with_metaclass(OptionParserMeta,
SaltfileMixIn,
HardCrashMixin)):
usage = '%prog [options]'
usage = '%prog [options] \'<target>\' <function> [arguments]'
# ConfigDirMixIn config filename attribute
_config_filename_ = 'master'
# LogLevelMixIn attributes
_default_logging_level_ = 'warning'
_default_logging_logfile_ = os.path.join(syspaths.LOGS_DIR, 'ssh')
_loglevel_config_setting_name_ = 'cli_salt_run_log_file'
_logfile_config_setting_name_ = 'ssh_log_file'
_default_logging_level_ = config.DEFAULT_MASTER_OPTS['log_level']
_default_logging_logfile_ = config.DEFAULT_MASTER_OPTS[_logfile_config_setting_name_]
def _mixin_setup(self):
self.add_option(
@ -3016,14 +2980,19 @@ class SaltCloudParser(six.with_metaclass(OptionParserMeta,
HardCrashMixin,
SaltfileMixIn)):
description = (
'Salt Cloud is the system used to provision virtual machines on various public\n'
'clouds via a cleanly controlled profile and mapping system'
)
usage = '%prog [options] <-m MAP | -p PROFILE> <NAME> [NAME2 ...]'
# ConfigDirMixIn attributes
_config_filename_ = 'cloud'
# LogLevelMixIn attributes
_default_logging_level_ = 'info'
_logfile_config_setting_name_ = 'log_file'
_loglevel_config_setting_name_ = 'log_level_logfile'
_default_logging_logfile_ = os.path.join(syspaths.LOGS_DIR, 'cloud')
_default_logging_level_ = config.DEFAULT_CLOUD_OPTS['log_level']
_default_logging_logfile_ = config.DEFAULT_CLOUD_OPTS['log_file']
def print_versions_report(self, file=sys.stdout): # pylint: disable=redefined-builtin
print('\n'.join(version.versions_report(include_salt_cloud=True)),
@ -3043,7 +3012,7 @@ class SaltCloudParser(six.with_metaclass(OptionParserMeta,
if 'DUMP_SALT_CLOUD_CONFIG' in os.environ:
import pprint
print('Salt cloud configuration dump(INCLUDES SENSIBLE DATA):')
print('Salt Cloud configuration dump (INCLUDES SENSIBLE DATA):')
pprint.pprint(self.config)
self.exit(salt.defaults.exitcodes.EX_OK)
@ -3064,16 +3033,17 @@ class SPMParser(six.with_metaclass(OptionParserMeta,
MergeConfigMixIn,
SaltfileMixIn)):
'''
The cli parser object used to fire up the salt spm system.
The CLI parser object used to fire up the Salt SPM system.
'''
description = 'SPM is used to manage 3rd party formulas and other Salt components'
usage = '%prog [options] <function> [arguments]'
usage = '%prog [options] <function> <argument>'
# ConfigDirMixIn config filename attribute
_config_filename_ = 'spm'
# LogLevelMixIn attributes
_default_logging_logfile_ = os.path.join(syspaths.LOGS_DIR, 'spm')
_logfile_config_setting_name_ = 'spm_logfile'
_default_logging_logfile_ = config.DEFAULT_SPM_OPTS[_logfile_config_setting_name_]
def _mixin_setup(self):
self.add_option(
@ -3113,12 +3083,17 @@ class SaltAPIParser(six.with_metaclass(OptionParserMeta,
DaemonMixIn,
MergeConfigMixIn)):
'''
The Salt API cli parser object used to fire up the salt api system.
The CLI parser object used to fire up the Salt API system.
'''
description = (
'The Salt API system manages network API connectors for the Salt Master'
)
# ConfigDirMixIn config filename attribute
_config_filename_ = 'master'
# LogLevelMixIn attributes
_default_logging_logfile_ = os.path.join(syspaths.LOGS_DIR, 'api')
_logfile_config_setting_name_ = 'api_logfile'
_default_logging_logfile_ = config.DEFAULT_API_OPTS[_logfile_config_setting_name_]
def setup_config(self):
return salt.config.api_config(self.get_config_file_path()) # pylint: disable=no-member

View File

@ -735,10 +735,13 @@ class Schedule(object):
'''
Execute this method in a multiprocess or thread
'''
if salt.utils.is_windows():
if salt.utils.is_windows() or self.opts.get('transport') == 'zeromq':
# Since function references can't be pickled and pickling
# is required when spawning new processes on Windows, regenerate
# the functions and returners.
# This also needed for ZeroMQ transport to reset all functions
# context data that could keep paretns connections. ZeroMQ will
# hang on polling parents connections from the child process.
self.functions = salt.loader.minion_mods(self.opts)
self.returners = salt.loader.returners(self.opts, self.functions)
ret = {'id': self.opts.get('id', 'master'),

View File

@ -15,6 +15,7 @@ ensure_in_syspath('../')
# Import Salt libs
import integration
import multiprocessing
from salt.cli import daemons
@ -76,39 +77,51 @@ class DaemonsStarterTestCase(TestCase, integration.SaltClientTestCaseMixIn):
Unit test for the daemons starter classes.
'''
def _multiproc_exec_test(self, exec_test):
m_parent, m_child = multiprocessing.Pipe()
p_ = multiprocessing.Process(target=exec_test, args=(m_child,))
p_.start()
self.assertTrue(m_parent.recv())
p_.join()
def test_master_daemon_hash_type_verified(self):
'''
Verify if Master is verifying hash_type config option.
:return:
'''
def _create_master():
'''
Create master instance
:return:
'''
master = daemons.Master()
master.config = {'user': 'dummy', 'hash_type': alg}
for attr in ['master', 'start_log_info', 'prepare']:
setattr(master, attr, MagicMock())
def exec_test(child_pipe):
def _create_master():
'''
Create master instance
:return:
'''
obj = daemons.Master()
obj.config = {'user': 'dummy', 'hash_type': alg}
for attr in ['start_log_info', 'prepare', 'shutdown', 'master']:
setattr(obj, attr, MagicMock())
return master
return obj
_logger = LoggerMock()
with patch('salt.cli.daemons.check_user', MagicMock(return_value=True)):
with patch('salt.cli.daemons.log', _logger):
for alg in ['md5', 'sha1']:
_create_master().start()
self.assertTrue(_logger.messages)
self.assertTrue(_logger.has_message('Do not use {alg}'.format(alg=alg),
log_type='warning'))
_logger = LoggerMock()
ret = True
with patch('salt.cli.daemons.check_user', MagicMock(return_value=True)):
with patch('salt.cli.daemons.log', _logger):
for alg in ['md5', 'sha1']:
_create_master().start()
ret = ret and _logger.messages \
and _logger.has_message('Do not use {alg}'.format(alg=alg),
log_type='warning')
_logger.reset()
_logger.reset()
for alg in ['sha224', 'sha256', 'sha384', 'sha512']:
_create_master().start()
self.assertTrue(_logger.messages)
self.assertFalse(_logger.has_message('Do not use '))
for alg in ['sha224', 'sha256', 'sha384', 'sha512']:
_create_master().start()
ret = ret and _logger.messages \
and not _logger.has_message('Do not use ')
child_pipe.send(ret)
child_pipe.close()
self._multiproc_exec_test(exec_test)
def test_minion_daemon_hash_type_verified(self):
'''
@ -117,34 +130,40 @@ class DaemonsStarterTestCase(TestCase, integration.SaltClientTestCaseMixIn):
:return:
'''
def _create_minion():
'''
Create minion instance
:return:
'''
obj = daemons.Minion()
obj.config = {'user': 'dummy', 'hash_type': alg}
for attr in ['start_log_info', 'prepare', 'shutdown']:
setattr(obj, attr, MagicMock())
setattr(obj, 'minion', MagicMock(restart=False))
def exec_test(child_pipe):
def _create_minion():
'''
Create minion instance
:return:
'''
obj = daemons.Minion()
obj.config = {'user': 'dummy', 'hash_type': alg}
for attr in ['start_log_info', 'prepare', 'shutdown']:
setattr(obj, attr, MagicMock())
setattr(obj, 'minion', MagicMock(restart=False))
return obj
return obj
_logger = LoggerMock()
with patch('salt.cli.daemons.check_user', MagicMock(return_value=True)):
with patch('salt.cli.daemons.log', _logger):
for alg in ['md5', 'sha1']:
_create_minion().start()
self.assertTrue(_logger.messages)
self.assertTrue(_logger.has_message('Do not use {alg}'.format(alg=alg),
log_type='warning'))
ret = True
_logger = LoggerMock()
with patch('salt.cli.daemons.check_user', MagicMock(return_value=True)):
with patch('salt.cli.daemons.log', _logger):
for alg in ['md5', 'sha1']:
_create_minion().start()
ret = ret and _logger.messages \
and _logger.has_message('Do not use {alg}'.format(alg=alg),
log_type='warning')
_logger.reset()
_logger.reset()
for alg in ['sha224', 'sha256', 'sha384', 'sha512']:
_create_minion().start()
ret = ret and _logger.messages \
and not _logger.has_message('Do not use ')
for alg in ['sha224', 'sha256', 'sha384', 'sha512']:
_create_minion().start()
self.assertTrue(_logger.messages)
self.assertFalse(_logger.has_message('Do not use '))
child_pipe.send(ret)
child_pipe.close()
self._multiproc_exec_test(exec_test)
def test_proxy_minion_daemon_hash_type_verified(self):
'''
@ -153,34 +172,40 @@ class DaemonsStarterTestCase(TestCase, integration.SaltClientTestCaseMixIn):
:return:
'''
def _create_proxy_minion():
'''
Create proxy minion instance
:return:
'''
obj = daemons.ProxyMinion()
obj.config = {'user': 'dummy', 'hash_type': alg}
for attr in ['minion', 'start_log_info', 'prepare', 'shutdown', 'tune_in']:
setattr(obj, attr, MagicMock())
def exec_test(child_pipe):
def _create_proxy_minion():
'''
Create proxy minion instance
:return:
'''
obj = daemons.ProxyMinion()
obj.config = {'user': 'dummy', 'hash_type': alg}
for attr in ['minion', 'start_log_info', 'prepare', 'shutdown', 'tune_in']:
setattr(obj, attr, MagicMock())
obj.minion.restart = False
return obj
obj.minion.restart = False
return obj
_logger = LoggerMock()
with patch('salt.cli.daemons.check_user', MagicMock(return_value=True)):
with patch('salt.cli.daemons.log', _logger):
for alg in ['md5', 'sha1']:
_create_proxy_minion().start()
self.assertTrue(_logger.messages)
self.assertTrue(_logger.has_message('Do not use {alg}'.format(alg=alg),
log_type='warning'))
ret = True
_logger = LoggerMock()
with patch('salt.cli.daemons.check_user', MagicMock(return_value=True)):
with patch('salt.cli.daemons.log', _logger):
for alg in ['md5', 'sha1']:
_create_proxy_minion().start()
ret = ret and _logger.messages \
and _logger.has_message('Do not use {alg}'.format(alg=alg),
log_type='warning')
_logger.reset()
_logger.reset()
for alg in ['sha224', 'sha256', 'sha384', 'sha512']:
_create_proxy_minion().start()
self.assertTrue(_logger.messages)
self.assertFalse(_logger.has_message('Do not use '))
for alg in ['sha224', 'sha256', 'sha384', 'sha512']:
_create_proxy_minion().start()
ret = ret and _logger.messages \
and not _logger.has_message('Do not use ')
child_pipe.send(ret)
child_pipe.close()
self._multiproc_exec_test(exec_test)
def test_syndic_daemon_hash_type_verified(self):
'''
@ -189,34 +214,40 @@ class DaemonsStarterTestCase(TestCase, integration.SaltClientTestCaseMixIn):
:return:
'''
def _create_syndic():
'''
Create syndic instance
:return:
'''
obj = daemons.Syndic()
obj.config = {'user': 'dummy', 'hash_type': alg}
for attr in ['syndic', 'start_log_info', 'prepare', 'shutdown']:
setattr(obj, attr, MagicMock())
def exec_test(child_pipe):
def _create_syndic():
'''
Create syndic instance
:return:
'''
obj = daemons.Syndic()
obj.config = {'user': 'dummy', 'hash_type': alg}
for attr in ['syndic', 'start_log_info', 'prepare', 'shutdown']:
setattr(obj, attr, MagicMock())
return obj
return obj
_logger = LoggerMock()
with patch('salt.cli.daemons.check_user', MagicMock(return_value=True)):
with patch('salt.cli.daemons.log', _logger):
for alg in ['md5', 'sha1']:
_create_syndic().start()
self.assertTrue(_logger.messages)
self.assertTrue(_logger.has_message('Do not use {alg}'.format(alg=alg),
log_type='warning'))
ret = True
_logger = LoggerMock()
with patch('salt.cli.daemons.check_user', MagicMock(return_value=True)):
with patch('salt.cli.daemons.log', _logger):
for alg in ['md5', 'sha1']:
_create_syndic().start()
ret = ret and _logger.messages \
and _logger.has_message('Do not use {alg}'.format(alg=alg),
log_type='warning')
_logger.reset()
_logger.reset()
for alg in ['sha224', 'sha256', 'sha384', 'sha512']:
_create_syndic().start()
self.assertTrue(_logger.messages)
self.assertFalse(_logger.has_message('Do not use '))
for alg in ['sha224', 'sha256', 'sha384', 'sha512']:
_create_syndic().start()
ret = ret and _logger.messages \
and not _logger.has_message('Do not use ')
child_pipe.send(ret)
child_pipe.close()
self._multiproc_exec_test(exec_test)
if __name__ == '__main__':
from integration import run_tests

View File

@ -0,0 +1,948 @@
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Denys Havrysh <denys.gavrysh@gmail.com>`
'''
# Import python libs
from __future__ import absolute_import
# Import Salt Testing Libs
from salttesting import skipIf, TestCase
from salttesting.helpers import ensure_in_syspath
from salttesting.mock import (
MagicMock,
patch,
NO_MOCK,
NO_MOCK_REASON
)
# Import Salt Libs
import salt.utils.parsers
import salt.log.setup as log
import salt.config
import salt.syspaths
ensure_in_syspath('../../')
class ErrorMock(object): # pylint: disable=too-few-public-methods
'''
Error handling
'''
def __init__(self):
'''
init
'''
self.msg = None
def error(self, msg):
'''
Capture error message
'''
self.msg = msg
class LogSetupMock(object):
'''
Logger setup
'''
def __init__(self):
'''
init
'''
self.log_level = None
self.log_file = None
self.log_level_logfile = None
self.config = {}
self.temp_log_level = None
def setup_console_logger(self, log_level='error', **kwargs): # pylint: disable=unused-argument
'''
Set console loglevel
'''
self.log_level = log_level
def setup_extended_logging(self, opts):
'''
Set opts
'''
self.config = opts
def setup_logfile_logger(self, logfile, loglevel, **kwargs): # pylint: disable=unused-argument
'''
Set logfile and loglevel
'''
self.log_file = logfile
self.log_level_logfile = loglevel
@staticmethod
def get_multiprocessing_logging_queue(): # pylint: disable=invalid-name
'''
Mock
'''
return None
def setup_multiprocessing_logging_listener(self, opts, *args): # pylint: disable=invalid-name,unused-argument
'''
Set opts
'''
self.config = opts
def setup_temp_logger(self, log_level='error'):
'''
Set temp loglevel
'''
self.temp_log_level = log_level
class ObjectView(object): # pylint: disable=too-few-public-methods
'''
Dict object view
'''
def __init__(self, d):
self.__dict__ = d
class LogSettingsParserTests(TestCase):
'''
Unit Tests for Log Level Mixin with Salt parsers
'''
args = []
skip_console_logging_config = False
log_setup = None
# Set config option names
loglevel_config_setting_name = 'log_level'
logfile_config_setting_name = 'log_file'
logfile_loglevel_config_setting_name = 'log_level_logfile' # pylint: disable=invalid-name
def setup_log(self):
'''
Mock logger functions
'''
self.log_setup = LogSetupMock()
log.setup_console_logger = self.log_setup.setup_console_logger
log.setup_extended_logging = self.log_setup.setup_extended_logging
log.setup_logfile_logger = self.log_setup.setup_logfile_logger
log.get_multiprocessing_logging_queue = \
self.log_setup.get_multiprocessing_logging_queue
log.setup_multiprocessing_logging_listener = \
self.log_setup.setup_multiprocessing_logging_listener
log.setup_temp_logger = self.log_setup.setup_temp_logger
# log level configuration tests
def test_get_log_level_cli(self):
'''
Tests that log level match command-line specified value
'''
# Set defaults
default_log_level = self.default_config[self.loglevel_config_setting_name]
# Set log level in CLI
log_level = 'critical'
args = ['--log-level', log_level] + self.args
parser = self.parser()
with patch(self.config_func, MagicMock(return_value=self.default_config)):
parser.parse_args(args)
with patch('salt.utils.parsers.is_writeable', MagicMock(return_value=True)):
parser.setup_logfile_logger()
console_log_level = getattr(parser.options, self.loglevel_config_setting_name)
# Check console log level setting
self.assertEqual(console_log_level, log_level)
# Check console loggger log level
self.assertEqual(self.log_setup.log_level, log_level)
self.assertEqual(self.log_setup.config[self.loglevel_config_setting_name],
log_level)
self.assertEqual(self.log_setup.temp_log_level, log_level)
# Check log file logger log level
self.assertEqual(self.log_setup.log_level_logfile, default_log_level)
def test_get_log_level_config(self):
'''
Tests that log level match the configured value
'''
# Set defaults
default_log_level = self.default_config[self.loglevel_config_setting_name]
args = self.args
# Set log level in config
log_level = 'info'
opts = self.default_config.copy()
opts.update({self.loglevel_config_setting_name: log_level})
parser = self.parser()
with patch(self.config_func, MagicMock(return_value=opts)):
parser.parse_args(args)
with patch('salt.utils.parsers.is_writeable', MagicMock(return_value=True)):
parser.setup_logfile_logger()
console_log_level = getattr(parser.options, self.loglevel_config_setting_name)
# Check console log level setting
self.assertEqual(console_log_level, log_level)
# Check console loggger log level
self.assertEqual(self.log_setup.log_level, log_level)
self.assertEqual(self.log_setup.config[self.loglevel_config_setting_name],
log_level)
self.assertEqual(self.log_setup.temp_log_level, 'error')
# Check log file logger log level
self.assertEqual(self.log_setup.log_level_logfile, default_log_level)
def test_get_log_level_default(self):
'''
Tests that log level match the default value
'''
# Set defaults
log_level = default_log_level = self.default_config[self.loglevel_config_setting_name]
args = self.args
parser = self.parser()
with patch(self.config_func, MagicMock(return_value=self.default_config)):
parser.parse_args(args)
with patch('salt.utils.parsers.is_writeable', MagicMock(return_value=True)):
parser.setup_logfile_logger()
console_log_level = getattr(parser.options, self.loglevel_config_setting_name)
# Check log level setting
self.assertEqual(console_log_level, log_level)
# Check console loggger log level
self.assertEqual(self.log_setup.log_level, log_level)
# Check extended logger
self.assertEqual(self.log_setup.config[self.loglevel_config_setting_name],
log_level)
self.assertEqual(self.log_setup.temp_log_level, 'error')
# Check log file logger
self.assertEqual(self.log_setup.log_level_logfile, default_log_level)
# Check help message
self.assertIn('Default: \'{0}\'.'.format(default_log_level),
parser.get_option('--log-level').help)
# log file configuration tests
def test_get_log_file_cli(self):
'''
Tests that log file match command-line specified value
'''
# Set defaults
log_level = self.default_config[self.loglevel_config_setting_name]
# Set log file in CLI
log_file = '{0}_cli.log'.format(self.log_file)
args = ['--log-file', log_file] + self.args
parser = self.parser()
with patch(self.config_func, MagicMock(return_value=self.default_config)):
parser.parse_args(args)
with patch('salt.utils.parsers.is_writeable', MagicMock(return_value=True)):
parser.setup_logfile_logger()
log_file_option = getattr(parser.options, self.logfile_config_setting_name)
if not self.skip_console_logging_config:
# Check console loggger
self.assertEqual(self.log_setup.log_level, log_level)
# Check extended logger
self.assertEqual(self.log_setup.config[self.loglevel_config_setting_name],
log_level)
self.assertEqual(self.log_setup.config[self.logfile_config_setting_name],
log_file)
# Check temp logger
self.assertEqual(self.log_setup.temp_log_level, 'error')
# Check log file setting
self.assertEqual(log_file_option, log_file)
# Check log file logger
self.assertEqual(self.log_setup.log_file, log_file)
def test_get_log_file_config(self):
'''
Tests that log file match the configured value
'''
# Set defaults
log_level = self.default_config[self.loglevel_config_setting_name]
args = self.args
# Set log file in config
log_file = '{0}_config.log'.format(self.log_file)
opts = self.default_config.copy()
opts.update({self.logfile_config_setting_name: log_file})
parser = self.parser()
with patch(self.config_func, MagicMock(return_value=opts)):
parser.parse_args(args)
with patch('salt.utils.parsers.is_writeable', MagicMock(return_value=True)):
parser.setup_logfile_logger()
log_file_option = getattr(parser.options, self.logfile_config_setting_name)
if not self.skip_console_logging_config:
# Check console loggger
self.assertEqual(self.log_setup.log_level, log_level)
# Check extended logger
self.assertEqual(self.log_setup.config[self.loglevel_config_setting_name],
log_level)
self.assertEqual(self.log_setup.config[self.logfile_config_setting_name],
log_file)
# Check temp logger
self.assertEqual(self.log_setup.temp_log_level, 'error')
# Check log file setting
self.assertEqual(log_file_option, log_file)
# Check log file logger
self.assertEqual(self.log_setup.log_file, log_file)
def test_get_log_file_default(self):
'''
Tests that log file match the default value
'''
# Set defaults
log_level = self.default_config[self.loglevel_config_setting_name]
log_file = default_log_file = self.default_config[self.logfile_config_setting_name]
args = self.args
parser = self.parser()
with patch(self.config_func, MagicMock(return_value=self.default_config)):
parser.parse_args(args)
with patch('salt.utils.parsers.is_writeable', MagicMock(return_value=True)):
parser.setup_logfile_logger()
log_file_option = getattr(parser.options, self.logfile_config_setting_name)
if not self.skip_console_logging_config:
# Check console loggger
self.assertEqual(self.log_setup.log_level, log_level)
# Check extended logger
self.assertEqual(self.log_setup.config[self.loglevel_config_setting_name],
log_level)
self.assertEqual(self.log_setup.config[self.logfile_config_setting_name],
log_file)
# Check temp logger
self.assertEqual(self.log_setup.temp_log_level, 'error')
# Check log file setting
self.assertEqual(log_file_option, log_file)
# Check log file logger
self.assertEqual(self.log_setup.log_file, log_file)
# Check help message
self.assertIn('Default: \'{0}\'.'.format(default_log_file),
parser.get_option('--log-file').help)
# log file log level configuration tests
def test_get_log_file_level_cli(self):
'''
Tests that file log level match command-line specified value
'''
# Set defaults
log_level = self.default_config[self.loglevel_config_setting_name]
# Set log file level in CLI
log_level_logfile = 'error'
args = ['--log-file-level', log_level_logfile] + self.args
parser = self.parser()
with patch(self.config_func, MagicMock(return_value=self.default_config)):
parser.parse_args(args)
with patch('salt.utils.parsers.is_writeable', MagicMock(return_value=True)):
parser.setup_logfile_logger()
log_level_logfile_option = getattr(parser.options,
self.logfile_loglevel_config_setting_name)
if not self.skip_console_logging_config:
# Check console loggger
self.assertEqual(self.log_setup.log_level, log_level)
# Check extended logger
self.assertEqual(self.log_setup.config[self.loglevel_config_setting_name],
log_level)
self.assertEqual(self.log_setup.config[self.logfile_loglevel_config_setting_name],
log_level_logfile)
# Check temp logger
self.assertEqual(self.log_setup.temp_log_level, 'error')
# Check log file level setting
self.assertEqual(log_level_logfile_option, log_level_logfile)
# Check log file logger
self.assertEqual(self.log_setup.log_level_logfile, log_level_logfile)
def test_get_log_file_level_config(self):
'''
Tests that log file level match the configured value
'''
# Set defaults
log_level = self.default_config[self.loglevel_config_setting_name]
args = self.args
# Set log file level in config
log_level_logfile = 'info'
opts = self.default_config.copy()
opts.update({self.logfile_loglevel_config_setting_name: log_level_logfile})
parser = self.parser()
with patch(self.config_func, MagicMock(return_value=opts)):
parser.parse_args(args)
with patch('salt.utils.parsers.is_writeable', MagicMock(return_value=True)):
parser.setup_logfile_logger()
log_level_logfile_option = getattr(parser.options,
self.logfile_loglevel_config_setting_name)
if not self.skip_console_logging_config:
# Check console loggger
self.assertEqual(self.log_setup.log_level, log_level)
# Check extended logger
self.assertEqual(self.log_setup.config[self.loglevel_config_setting_name],
log_level)
self.assertEqual(self.log_setup.config[self.logfile_loglevel_config_setting_name],
log_level_logfile)
# Check temp logger
self.assertEqual(self.log_setup.temp_log_level, 'error')
# Check log file level setting
self.assertEqual(log_level_logfile_option, log_level_logfile)
# Check log file logger
self.assertEqual(self.log_setup.log_level_logfile, log_level_logfile)
def test_get_log_file_level_default(self):
'''
Tests that log file level match the default value
'''
# Set defaults
default_log_level = self.default_config[self.loglevel_config_setting_name]
log_level = default_log_level
log_level_logfile = default_log_level
args = self.args
parser = self.parser()
with patch(self.config_func, MagicMock(return_value=self.default_config)):
parser.parse_args(args)
with patch('salt.utils.parsers.is_writeable', MagicMock(return_value=True)):
parser.setup_logfile_logger()
log_level_logfile_option = getattr(parser.options,
self.logfile_loglevel_config_setting_name)
if not self.skip_console_logging_config:
# Check console loggger
self.assertEqual(self.log_setup.log_level, log_level)
# Check extended logger
self.assertEqual(self.log_setup.config[self.loglevel_config_setting_name],
log_level)
self.assertEqual(self.log_setup.config[self.logfile_loglevel_config_setting_name],
log_level_logfile)
# Check temp logger
self.assertEqual(self.log_setup.temp_log_level, 'error')
# Check log file level setting
self.assertEqual(log_level_logfile_option, log_level_logfile)
# Check log file logger
self.assertEqual(self.log_setup.log_level_logfile, log_level_logfile)
# Check help message
self.assertIn('Default: \'{0}\'.'.format(default_log_level),
parser.get_option('--log-file-level').help)
def test_get_console_log_level_with_file_log_level(self): # pylint: disable=invalid-name
'''
Tests that both console log level and log file level setting are working together
'''
log_level = 'critical'
log_level_logfile = 'debug'
args = ['--log-file-level', log_level_logfile] + self.args
opts = self.default_config.copy()
opts.update({self.loglevel_config_setting_name: log_level})
parser = self.parser()
with patch(self.config_func, MagicMock(return_value=opts)):
parser.parse_args(args)
with patch('salt.utils.parsers.is_writeable', MagicMock(return_value=True)):
parser.setup_logfile_logger()
log_level_logfile_option = getattr(parser.options,
self.logfile_loglevel_config_setting_name)
if not self.skip_console_logging_config:
# Check console loggger
self.assertEqual(self.log_setup.log_level, log_level)
# Check extended logger
self.assertEqual(self.log_setup.config[self.loglevel_config_setting_name],
log_level)
self.assertEqual(self.log_setup.config[self.logfile_loglevel_config_setting_name],
log_level_logfile)
# Check temp logger
self.assertEqual(self.log_setup.temp_log_level, 'error')
# Check log file level setting
self.assertEqual(log_level_logfile_option, log_level_logfile)
# Check log file logger
self.assertEqual(self.log_setup.log_level_logfile, log_level_logfile)
@skipIf(NO_MOCK, NO_MOCK_REASON)
class MasterOptionParserTestCase(LogSettingsParserTests):
'''
Tests parsing Salt Master options
'''
def setUp(self):
'''
Setting up
'''
# Set defaults
self.default_config = salt.config.DEFAULT_MASTER_OPTS
# Log file
self.log_file = '/tmp/salt_master_parser_test'
# Function to patch
self.config_func = 'salt.config.master_config'
# Mock log setup
self.setup_log()
# Assign parser
self.parser = salt.utils.parsers.MasterOptionParser
@skipIf(NO_MOCK, NO_MOCK_REASON)
class MinionOptionParserTestCase(LogSettingsParserTests):
'''
Tests parsing Salt Minion options
'''
def setUp(self):
'''
Setting up
'''
# Set defaults
self.default_config = salt.config.DEFAULT_MINION_OPTS
# Log file
self.log_file = '/tmp/salt_minion_parser_test'
# Function to patch
self.config_func = 'salt.config.minion_config'
# Mock log setup
self.setup_log()
# Assign parser
self.parser = salt.utils.parsers.MinionOptionParser
@skipIf(NO_MOCK, NO_MOCK_REASON)
class ProxyMinionOptionParserTestCase(LogSettingsParserTests):
'''
Tests parsing Salt Proxy Minion options
'''
def setUp(self):
'''
Setting up
'''
# Set defaults
self.default_config = salt.config.DEFAULT_MINION_OPTS.copy()
self.default_config.update(salt.config.DEFAULT_PROXY_MINION_OPTS)
# Log file
self.log_file = '/tmp/salt_proxy_minion_parser_test'
# Function to patch
self.config_func = 'salt.config.minion_config'
# Mock log setup
self.setup_log()
# Assign parser
self.parser = salt.utils.parsers.ProxyMinionOptionParser
@skipIf(NO_MOCK, NO_MOCK_REASON)
class SyndicOptionParserTestCase(LogSettingsParserTests):
'''
Tests parsing Salt Syndic options
'''
def setUp(self):
'''
Setting up
'''
# Set config option names
self.logfile_config_setting_name = 'syndic_log_file'
# Set defaults
self.default_config = salt.config.DEFAULT_MASTER_OPTS
# Log file
self.log_file = '/tmp/salt_syndic_parser_test'
# Function to patch
self.config_func = 'salt.config.syndic_config'
# Mock log setup
self.setup_log()
# Assign parser
self.parser = salt.utils.parsers.SyndicOptionParser
@skipIf(NO_MOCK, NO_MOCK_REASON)
class SaltCMDOptionParserTestCase(LogSettingsParserTests):
'''
Tests parsing Salt CLI options
'''
def setUp(self):
'''
Setting up
'''
# Set mandatory CLI options
self.args = ['foo', 'bar.baz']
# Set defaults
self.default_config = salt.config.DEFAULT_MASTER_OPTS
# Log file
self.log_file = '/tmp/salt_cmd_parser_test'
# Function to patch
self.config_func = 'salt.config.client_config'
# Mock log setup
self.setup_log()
# Assign parser
self.parser = salt.utils.parsers.SaltCMDOptionParser
@skipIf(NO_MOCK, NO_MOCK_REASON)
class SaltCPOptionParserTestCase(LogSettingsParserTests):
'''
Tests parsing salt-cp options
'''
def setUp(self):
'''
Setting up
'''
# Set mandatory CLI options
self.args = ['foo', 'bar', 'baz']
# Set defaults
self.default_config = salt.config.DEFAULT_MASTER_OPTS
# Log file
self.log_file = '/tmp/salt_cp_parser_test'
# Function to patch
self.config_func = 'salt.config.master_config'
# Mock log setup
self.setup_log()
# Assign parser
self.parser = salt.utils.parsers.SaltCPOptionParser
@skipIf(NO_MOCK, NO_MOCK_REASON)
class SaltKeyOptionParserTestCase(LogSettingsParserTests):
'''
Tests parsing salt-key options
'''
def setUp(self):
'''
Setting up
'''
self.skip_console_logging_config = True
# Set config option names
self.logfile_config_setting_name = 'key_logfile'
# Set defaults
self.default_config = salt.config.DEFAULT_MASTER_OPTS
# Log file
self.log_file = '/tmp/salt_key_parser_test'
# Function to patch
self.config_func = 'salt.config.master_config'
# Mock log setup
self.setup_log()
# Assign parser
self.parser = salt.utils.parsers.SaltKeyOptionParser
# log level configuration tests
def test_get_log_level_cli(self):
'''
Tests that console log level option is not recognized
'''
# No console log level will be actually set
log_level = default_log_level = None
option = '--log-level'
args = self.args + [option, 'error']
parser = self.parser()
mock_err = ErrorMock()
with patch('optparse.OptionParser.error', mock_err.error):
parser.parse_args(args)
# Check error msg
self.assertEqual(mock_err.msg, 'no such option: {0}'.format(option))
# Check console loggger has not been set
self.assertEqual(self.log_setup.log_level, log_level)
self.assertNotIn(self.loglevel_config_setting_name, self.log_setup.config)
# Check temp logger
self.assertEqual(self.log_setup.temp_log_level, 'error')
# Check log file logger log level
self.assertEqual(self.log_setup.log_level_logfile, default_log_level)
def test_get_log_level_config(self):
'''
Tests that log level set in config is ignored
'''
# Set defaults
default_log_level = self.default_config[self.loglevel_config_setting_name]
log_level = None
args = self.args
# Set log level in config
opts = {self.loglevel_config_setting_name: 'info'}
parser = self.parser()
with patch(self.config_func, MagicMock(return_value=opts)):
parser.parse_args(args)
with patch('salt.utils.parsers.is_writeable', MagicMock(return_value=True)):
parser.setup_logfile_logger()
# Check config name absence in options
self.assertNotIn(self.loglevel_config_setting_name, parser.options.__dict__)
# Check console loggger has not been set
self.assertEqual(self.log_setup.log_level, log_level)
self.assertNotIn(self.loglevel_config_setting_name, self.log_setup.config)
# Check temp logger
self.assertEqual(self.log_setup.temp_log_level, 'error')
# Check log file logger log level
self.assertEqual(self.log_setup.log_level_logfile, default_log_level)
def test_get_log_level_default(self):
'''
Tests that log level default value is ignored
'''
# Set defaults
default_log_level = self.default_config[self.loglevel_config_setting_name]
log_level = None
args = self.args
parser = self.parser()
parser.parse_args(args)
with patch('salt.utils.parsers.is_writeable', MagicMock(return_value=True)):
parser.setup_logfile_logger()
# Check config name absence in options
self.assertNotIn(self.loglevel_config_setting_name, parser.options.__dict__)
# Check console loggger has not been set
self.assertEqual(self.log_setup.log_level, log_level)
self.assertNotIn(self.loglevel_config_setting_name, self.log_setup.config)
# Check temp logger
self.assertEqual(self.log_setup.temp_log_level, 'error')
# Check log file logger log level
self.assertEqual(self.log_setup.log_level_logfile, default_log_level)
@skipIf(NO_MOCK, NO_MOCK_REASON)
class SaltCallOptionParserTestCase(LogSettingsParserTests):
'''
Tests parsing Salt Minion options
'''
def setUp(self):
'''
Setting up
'''
# Set mandatory CLI options
self.args = ['foo.bar']
# Set defaults
self.default_config = salt.config.DEFAULT_MINION_OPTS
# Log file
self.log_file = '/tmp/salt_call_parser_test'
# Function to patch
self.config_func = 'salt.config.minion_config'
# Mock log setup
self.setup_log()
# Assign parser
self.parser = salt.utils.parsers.SaltCallOptionParser
@skipIf(NO_MOCK, NO_MOCK_REASON)
class SaltRunOptionParserTestCase(LogSettingsParserTests):
'''
Tests parsing Salt Master options
'''
def setUp(self):
'''
Setting up
'''
# Set mandatory CLI options
self.args = ['foo.bar']
# Set defaults
self.default_config = salt.config.DEFAULT_MASTER_OPTS
# Log file
self.log_file = '/tmp/salt_run_parser_test'
# Function to patch
self.config_func = 'salt.config.master_config'
# Mock log setup
self.setup_log()
# Assign parser
self.parser = salt.utils.parsers.SaltRunOptionParser
@skipIf(NO_MOCK, NO_MOCK_REASON)
class SaltSSHOptionParserTestCase(LogSettingsParserTests):
'''
Tests parsing Salt Master options
'''
def setUp(self):
'''
Setting up
'''
# Set mandatory CLI options
self.args = ['foo', 'bar.baz']
# Set config option names
self.logfile_config_setting_name = 'ssh_log_file'
# Set defaults
self.default_config = salt.config.DEFAULT_MASTER_OPTS
# Log file
self.log_file = '/tmp/salt_ssh_parser_test'
# Function to patch
self.config_func = 'salt.config.master_config'
# Mock log setup
self.setup_log()
# Assign parser
self.parser = salt.utils.parsers.SaltSSHOptionParser
@skipIf(NO_MOCK, NO_MOCK_REASON)
class SaltCloudParserTestCase(LogSettingsParserTests):
'''
Tests parsing Salt Cloud options
'''
def setUp(self):
'''
Setting up
'''
# Set mandatory CLI options
self.args = ['-p', 'foo', 'bar']
# Set defaults
self.default_config = salt.config.DEFAULT_CLOUD_OPTS
# Log file
self.log_file = '/tmp/salt_cloud_parser_test'
# Function to patch
self.config_func = 'salt.config.cloud_config'
# Mock log setup
self.setup_log()
# Assign parser
self.parser = salt.utils.parsers.SaltCloudParser
@skipIf(NO_MOCK, NO_MOCK_REASON)
class SPMParserTestCase(LogSettingsParserTests):
'''
Tests parsing Salt Cloud options
'''
def setUp(self):
'''
Setting up
'''
# Set mandatory CLI options
self.args = ['foo', 'bar']
# Set config option names
self.logfile_config_setting_name = 'spm_logfile'
# Set defaults
self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy()
self.default_config.update(salt.config.DEFAULT_SPM_OPTS)
# Log file
self.log_file = '/tmp/spm_parser_test'
# Function to patch
self.config_func = 'salt.config.spm_config'
# Mock log setup
self.setup_log()
# Assign parser
self.parser = salt.utils.parsers.SPMParser
@skipIf(NO_MOCK, NO_MOCK_REASON)
class SaltAPIParserTestCase(LogSettingsParserTests):
'''
Tests parsing Salt Cloud options
'''
def setUp(self):
'''
Setting up
'''
# Set mandatory CLI options
self.args = []
# Set config option names
self.logfile_config_setting_name = 'api_logfile'
# Set defaults
self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy()
self.default_config.update(salt.config.DEFAULT_API_OPTS)
# Log file
self.log_file = '/tmp/salt_api_parser_test'
# Function to patch
self.config_func = 'salt.config.api_config'
# Mock log setup
self.setup_log()
# Assign parser
self.parser = salt.utils.parsers.SaltAPIParser
# Hide the class from unittest framework when it searches for TestCase classes in the module
del LogSettingsParserTests
if __name__ == '__main__':
from integration import run_tests # pylint: disable=import-error,wrong-import-position
run_tests(MasterOptionParserTestCase,
MinionOptionParserTestCase,
ProxyMinionOptionParserTestCase,
SyndicOptionParserTestCase,
SaltCMDOptionParserTestCase,
SaltCPOptionParserTestCase,
SaltKeyOptionParserTestCase,
SaltCallOptionParserTestCase,
SaltRunOptionParserTestCase,
SaltSSHOptionParserTestCase,
SaltCloudParserTestCase,
SPMParserTestCase,
SaltAPIParserTestCase,
needs_daemon=False)