Merge branch '2018.3' into '2019.2'

Conflicts:
  - pkg/windows/build_env_2.ps1
  - pkg/windows/build_env_3.ps1
  - pkg/windows/req.txt
  - pkg/windows/req_win.txt
  - salt/grains/core.py
This commit is contained in:
Ch3LL 2019-02-15 15:09:36 -05:00
commit ffa2f5fa50
No known key found for this signature in database
GPG Key ID: 132B55A7C13EFA73
29 changed files with 768 additions and 82 deletions

View File

@ -847,6 +847,23 @@ A value of 10 minutes is a reasonable default.
grains_refresh_every: 0
.. conf_minion:: metadata_server_grains
``metadata_server_grains``
--------------------------
.. versionadded:: 2017.7.0
Default: ``False``
Set this option to enable gathering of cloud metadata from
``http://169.254.169.254/latest`` for use in grains (see :py:mod:`here
<salt.grains.metadata>` for more information).
.. code-block:: yaml
metadata_server_grains: True
.. conf_minion:: fibre_channel_grains
``fibre_channel_grains``

View File

@ -520,7 +520,7 @@ services. For more information on service certificates, see the following link:
* `Manage Certificates`__
.. __: https://msdn.microsoft.com/en-us/library/azure/gg981929.aspx
.. __: https://docs.microsoft.com/en-us/azure/cloud-services/cloud-services-certs-create
The following functions are available.

View File

@ -237,3 +237,17 @@ Defined in: State
__sdb__
-------
Defined in: SDB
Additional Globals
==================
Defined for: Runners, Execution Modules, Wheels
* ``__jid__``: The job ID
* ``__user__``: The user
* ``__tag__``: The jid tag
* ``__jid_event__``: A :py:class:`salt.utils.event.NamespacedEvent`.
:py:class:`NamespacedEvent <salt.utils.event.NamespacedEvent>` defines a single
method :py:meth:`fire_event <salt.utils.event.NamespacedEvent.fire_event>`, that takes data and tag. The :ref:`Runner docs <runners>` has examples.

View File

@ -163,10 +163,10 @@ A few examples of salt states from the community:
* https://github.com/bclermont/states
* https://github.com/pcrews/salt-data
Follow on ohloh
===============
Follow on Open Hub
==================
https://www.ohloh.net/p/salt
https://www.openhub.net/p/salt
Other community links
=====================
@ -178,6 +178,7 @@ Other community links
- `Facebook <https://www.facebook.com/SaltStack>`_
- `Twitter <https://twitter.com/SaltStackInc>`_
- `Wikipedia page <http://en.wikipedia.org/wiki/Salt_(software)>`_
- `Stack Overflow <https://stackoverflow.com/questions/tagged/salt-stack>`_
Hack the Source
===============

View File

@ -317,7 +317,7 @@ class SyncClientMixin(object):
print_func=print_func
)
# TODO: document these, and test that they exist
# TODO: test that they exist
# TODO: Other things to inject??
func_globals = {'__jid__': jid,
'__user__': data['user'],

View File

@ -22,6 +22,7 @@ import locale
import uuid
from errno import EACCES, EPERM
import datetime
import warnings
# pylint: disable=import-error
try:
@ -41,7 +42,12 @@ _supported_dists += ('arch', 'mageia', 'meego', 'vmware', 'bluewhite64',
# linux_distribution deprecated in py3.7
try:
from platform import linux_distribution
from platform import linux_distribution as _deprecated_linux_distribution
def linux_distribution(**kwargs):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
return _deprecated_linux_distribution(**kwargs)
except ImportError:
from distro import linux_distribution
@ -1453,7 +1459,9 @@ _OS_FAMILY_MAP = {
'KDE neon': 'Debian',
'Void': 'Void',
'IDMS': 'Debian',
'Funtoo': 'Gentoo',
'AIX': 'AIX',
'TurnKey': 'Debian',
}
# Matches any possible format:

View File

@ -283,7 +283,7 @@ def _run(cmd,
'''
if 'pillar' in kwargs and not pillar_override:
pillar_override = kwargs['pillar']
if _is_valid_shell(shell) is False:
if output_loglevel != 'quiet' and _is_valid_shell(shell) is False:
log.warning(
'Attempt to run a shell command with what may be an invalid shell! '
'Check to ensure that the shell <%s> is valid for this user.',

View File

@ -91,8 +91,12 @@ def install(gems, # pylint: disable=C0103
Doesn't play nice with multiple gems at once
:param rdoc: boolean : False
Generate RDoc documentation for the gem(s).
For rubygems > 3 this is interpreted as the --no-document arg and the
ri option will then be ignored
:param ri: boolean : False
Generate RI documentation for the gem(s).
For rubygems > 3 this is interpreted as the --no-document arg and the
rdoc option will then be ignored
:param pre_releases: boolean : False
Include pre-releases in the available versions
:param proxy: string : None
@ -119,12 +123,18 @@ def install(gems, # pylint: disable=C0103
options = []
if version:
options.extend(['--version', version])
if not rdoc:
options.append('--no-rdoc')
if not ri:
options.append('--no-ri')
if pre_releases:
options.append('--pre')
if _has_rubygems_3(ruby=ruby, runas=runas, gem_bin=gem_bin):
if not rdoc or not ri:
options.append('--no-document')
if pre_releases:
options.append('--prerelease')
else:
if not rdoc:
options.append('--no-rdoc')
if not ri:
options.append('--no-ri')
if pre_releases:
options.append('--pre')
if proxy:
options.extend(['-p', proxy])
if source:
@ -224,6 +234,45 @@ def update_system(version='', ruby=None, runas=None, gem_bin=None):
runas=runas)
def version(ruby=None, runas=None, gem_bin=None):
'''
Print out the version of gem
:param gem_bin: string : None
Full path to ``gem`` binary to use.
:param ruby: string : None
If RVM or rbenv are installed, the ruby version and gemset to use.
Ignored if ``gem_bin`` is specified.
:param runas: string : None
The user to run gem as.
CLI Example:
.. code-block:: bash
salt '*' gem.version
'''
cmd = ['--version']
stdout = _gem(cmd,
ruby,
gem_bin=gem_bin,
runas=runas)
ret = {}
for line in salt.utils.itertools.split(stdout, '\n'):
match = re.match(r'[.0-9]+', line)
if match:
ret = line
break
return ret
def _has_rubygems_3(ruby=None, runas=None, gem_bin=None):
match = re.match(r'^3\..*', version(ruby=ruby, runas=runas, gem_bin=gem_bin))
if match:
return True
return False
def list_(prefix='', ruby=None, runas=None, gem_bin=None):
'''
List locally installed gems.

View File

@ -10,7 +10,6 @@ import collections
# Import third party libs
import copy
import os
import copy
import logging
from salt.ext import six
@ -157,7 +156,7 @@ def get(key,
'skipped.', default, ret, type(ret).__name__
)
elif isinstance(default, list):
ret = salt.utils.data.traverse_dict_and_list(
ret = salt.utils.data.traverse_dict_and_list( # pylint: disable=redefined-variable-type
pillar_dict,
key,
[],
@ -345,7 +344,7 @@ def ls(*args):
salt '*' pillar.ls
'''
return list(items(*args).keys())
return list(items(*args))
def item(*args, **kwargs):
@ -544,7 +543,7 @@ def keys(key, delimiter=DEFAULT_TARGET_DELIM):
if not isinstance(ret, dict):
raise ValueError("Pillar value in key {0} is not a dict".format(key))
return ret.keys()
return list(ret)
def file_exists(path, saltenv=None):

View File

@ -47,6 +47,7 @@ import salt.utils.data
import salt.utils.functools
import salt.utils.path
import salt.utils.pkg
from salt.ext.six import string_types
from salt.exceptions import CommandExecutionError
from salt.ext import six
from salt.ext.six.moves import zip # pylint: disable=redefined-builtin
@ -527,12 +528,16 @@ def install(name=None, refresh=False, pkgs=None, version=None, test=False, **kwa
pkg2inst = ''
if pkgs: # multiple packages specified
pkg2inst = []
for pkg in pkgs:
if list(pkg.items())[0][1]: # version specified
pkg2inst += '{0}@{1} '.format(list(pkg.items())[0][0],
list(pkg.items())[0][1])
if getattr(pkg, 'items', False):
if list(pkg.items())[0][1]: # version specified
pkg2inst.append('{0}@{1}'.format(list(pkg.items())[0][0],
list(pkg.items())[0][1]))
else:
pkg2inst.append(list(pkg.items())[0][0])
else:
pkg2inst += '{0} '.format(list(pkg.items())[0][0])
pkg2inst.append("{0}".format(pkg))
log.debug('Installing these packages instead of %s: %s',
name, pkg2inst)
@ -552,7 +557,10 @@ def install(name=None, refresh=False, pkgs=None, version=None, test=False, **kwa
# Install or upgrade the package
# If package is already installed
cmd.append(pkg2inst)
if isinstance(pkg2inst, string_types):
cmd.append(pkg2inst)
elif isinstance(pkg2inst, list):
cmd = cmd + pkg2inst
out = __salt__['cmd.run_all'](cmd, output_loglevel='trace')

View File

@ -5083,7 +5083,7 @@ def _findOptionValueAdvAudit(option):
field_names = _get_audit_defaults('fieldnames')
# If the file doesn't exist anywhere, create it with default
# fieldnames
__salt__['file.mkdir'](os.path.dirname(f_audit))
__salt__['file.makedirs'](f_audit)
__salt__['file.write'](f_audit, ','.join(field_names))
audit_settings = {}
@ -5187,7 +5187,7 @@ def _set_audit_file_data(option, value):
# Copy the temporary csv file over the existing audit.csv in both
# locations if a value was written
__salt__['file.copy'](f_temp.name, f_audit, remove_existing=True)
__salt__['file.mkdir'](os.path.dirname(f_audit_gpo))
__salt__['file.makedirs'](f_audit_gpo)
__salt__['file.copy'](f_temp.name, f_audit_gpo, remove_existing=True)
finally:
f_temp.close()
@ -5605,7 +5605,7 @@ def _getDataFromRegPolData(search_string, policy_data, return_value_name=False):
)
].split(encoded_semicolon)
if len(pol_entry) >= 2:
valueName = pol_entry[1]
valueName = pol_entry[1].decode('utf-16-le').rstrip(chr(0))
if len(pol_entry) >= 5:
value = pol_entry[4]
if vtype == 'REG_DWORD' or vtype == 'REG_QWORD':
@ -5857,7 +5857,7 @@ def _processValueItem(element, reg_key, reg_valuename, policy, parent_element,
check_deleted = True
if not check_deleted:
this_vtype = 'REG_DWORD'
this_element_value = chr(1).encode('utf-16-le')
this_element_value = struct.pack('I', 1)
standard_element_expected_string = False
elif etree.QName(element).localname == 'decimal':
# https://msdn.microsoft.com/en-us/library/dn605987(v=vs.85).aspx
@ -5923,18 +5923,18 @@ def _processValueItem(element, reg_key, reg_valuename, policy, parent_element,
']'.encode('utf-16-le')])
if 'expandable' in element.attrib:
this_vtype = 'REG_EXPAND_SZ'
if 'explicitValue' in element.attrib and element.attrib['explicitValue'].lower() == 'true':
if element.attrib.get('explicitValue', 'false').lower() == 'true':
if this_element_value is not None:
element_valuenames = this_element_value.keys()
element_values = this_element_value.values()
if 'valuePrefix' in element.attrib:
element_valuenames = [str(k) for k in this_element_value.keys()]
element_values = [str(v) for v in this_element_value.values()]
elif 'valuePrefix' in element.attrib:
# if the valuePrefix attribute exists, the valuenames are <prefix><number>
# most prefixes attributes are empty in the admx files, so the valuenames
# end up being just numbers
if element.attrib['valuePrefix'] != '':
if this_element_value is not None:
element_valuenames = ['{0}{1}'.format(element.attrib['valuePrefix'],
k) for k in element_valuenames]
element_valuenames = ['{0}{1}'.format(
element.attrib['valuePrefix'], k) for k in element_valuenames]
else:
# if there is no valuePrefix attribute, the valuename is the value
if element_values is not None:

View File

@ -2,26 +2,36 @@
'''
Retrieve EC2 instance data for minions for ec2_tags and ec2_tags_list
The minion id must be the AWS instance-id or value in 'tag_match_key'.
For example set 'tag_match_key' to 'Name', to have the minion-id matched against the
tag 'Name'. The tag contents must be unique. The value of tag_match_value can
be 'uqdn' or 'asis'. if 'uqdn' strips any domain before comparison.
The minion id must be the AWS instance-id or value in ``tag_match_key``. For
example set ``tag_match_key`` to ``Name`` to have the minion-id matched against
the tag 'Name'. The tag contents must be unique. The value of
``tag_match_value`` can be 'uqdn' or 'asis'. if 'uqdn', then the domain will be
stripped before comparison.
The option use_grain can be set to True. This allows the use of an
instance-id grain instead of the minion-id. Since this is a potential
security risk, the configuration can be further expanded to include
a list of minions that are trusted to only allow the alternate id
of the instances to specific hosts. There is no glob matching at
this time.
Additionally, the ``use_grain`` option can be set to ``True``. This allows the
use of an instance-id grain instead of the minion-id. Since this is a potential
security risk, the configuration can be further expanded to include a list of
minions that are trusted to only allow the alternate id of the instances to
specific hosts. There is no glob matching at this time.
The optional 'tag_list_key' indicates which keys should be added to
'ec2_tags_list' and be split by tag_list_sep (default `;`). If a tag key is
included in 'tag_list_key' it is removed from ec2_tags. If a tag does not
exist it is still included as an empty list.
.. note::
If you are using ``use_grain: True`` in the configuration for this external
pillar module, the minion must have :conf_minion:`metadata_server_grains`
enabled in the minion config file (see also :py:mod:`here
<salt.grains.metadata>`).
It is important to also note that enabling the ``use_grain`` option allows
the minion to manipulate the pillar data returned, as described above.
The optional ``tag_list_key`` indicates which keys should be added to
``ec2_tags_list`` and be split by ``tag_list_sep`` (by default ``;``). If a tag
key is included in ``tag_list_key`` it is removed from ec2_tags. If a tag does
not exist it is still included as an empty list.
Note: restart the salt-master for changes to take effect.
..note::
As with any master configuration change, restart the salt-master daemon for
changes to take effect.
.. code-block:: yaml
@ -38,11 +48,10 @@ exist it is still included as an empty list.
- trusted-minion-2
- trusted-minion-3
This is a very simple pillar that simply retrieves the instance data
from AWS. Currently the only portion implemented are EC2 tags, which
returns a list of key/value pairs for all of the EC2 tags assigned to
the instance.
This is a very simple pillar configuration that simply retrieves the instance
data from AWS. Currently the only portion implemented are EC2 tags, which
returns a list of key/value pairs for all of the EC2 tags assigned to the
instance.
'''
# Import python libs

View File

@ -77,6 +77,9 @@ def _walk_through(job_dir):
except Exception:
log.exception('Failed to deserialize %s', load_path)
continue
if not job:
log.error('Deserialization of job succeded but there is no data in %s', load_path)
continue
jid = job['jid']
yield jid, job, t_path, final

View File

@ -209,7 +209,7 @@ def orchestrate_high(data, test=None, queue=False, pillar=None, **kwargs):
def orchestrate_show_sls(mods,
saltenv='base',
test=None,
exclude=None,
queue=False,
pillar=None,
pillarenv=None,
pillar_enc=None):
@ -234,12 +234,12 @@ def orchestrate_show_sls(mods,
minion = salt.minion.MasterMinion(__opts__)
running = minion.functions['state.show_sls'](
mods,
saltenv,
test,
exclude,
queue,
pillar=pillar,
pillarenv=pillarenv,
pillar_enc=pillar_enc)
pillar_enc=pillar_enc,
saltenv=saltenv)
ret = {minion.opts['id']: running}
return ret

View File

@ -787,10 +787,12 @@ def _check_directory_win(name,
if not os.path.isdir(name):
changes = {name: {'directory': 'new'}}
else:
# Check owner
# Check owner by SID
if win_owner is not None:
owner = salt.utils.win_dacl.get_owner(name)
if not owner.lower() == win_owner.lower():
current_owner = salt.utils.win_dacl.get_owner(name)
current_owner_sid = salt.utils.win_functions.get_sid_from_name(current_owner)
expected_owner_sid = salt.utils.win_functions.get_sid_from_name(win_owner)
if not current_owner_sid == expected_owner_sid:
changes['owner'] = win_owner
# Check perms

View File

@ -1398,8 +1398,12 @@ def _remotes_on(port, which_end):
Return a set of ip addrs active tcp connections
'''
port = int(port)
ret = set()
ret = _netlink_tool_remote_on(port, which_end)
if ret is not None:
return ret
ret = set()
proc_available = False
for statf in ['/proc/net/tcp', '/proc/net/tcp6']:
if os.path.isfile(statf):
@ -1451,6 +1455,51 @@ def _parse_tcp_line(line):
return ret
def _netlink_tool_remote_on(port, which_end):
'''
Returns set of ipv4 host addresses of remote established connections
on local or remote tcp port.
Parses output of shell 'ss' to get connections
[root@salt-master ~]# ss -ant
State Recv-Q Send-Q Local Address:Port Peer Address:Port
LISTEN 0 511 *:80 *:*
LISTEN 0 128 *:22 *:*
ESTAB 0 0 127.0.0.1:56726 127.0.0.1:4505
'''
remotes = set()
valid = False
try:
data = subprocess.check_output(['ss', '-ant']) # pylint: disable=minimum-python-version
except subprocess.CalledProcessError:
log.error('Failed ss')
raise
except OSError: # not command "No such file or directory"
return None
lines = salt.utils.stringutils.to_str(data).split('\n')
for line in lines:
if 'Address:Port' in line: # ss tools may not be valid
valid = True
continue
elif 'ESTAB' not in line:
continue
chunks = line.split()
local_host, local_port = chunks[3].split(':', 1)
remote_host, remote_port = chunks[4].split(':', 1)
if which_end == 'remote_port' and int(remote_port) != port:
continue
if which_end == 'local_port' and int(local_port) != port:
continue
remotes.add(remote_host)
if valid is False:
remotes = None
return remotes
def _sunos_remotes_on(port, which_end):
'''
SunOS specific helper function.

View File

@ -192,9 +192,10 @@ class Schedule(object):
if remove_hidden:
_schedule = copy.deepcopy(schedule)
for job in _schedule:
for item in _schedule[job]:
if item.startswith('_'):
del schedule[job][item]
if isinstance(_schedule[job], dict):
for item in _schedule[job]:
if item.startswith('_'):
del schedule[job][item]
return schedule
def _check_max_running(self, func, data, opts, now):

View File

@ -8,10 +8,16 @@ from __future__ import absolute_import, print_function, unicode_literals
import re
import sys
import platform
import warnings
# linux_distribution deprecated in py3.7
try:
from platform import linux_distribution
from platform import linux_distribution as _deprecated_linux_distribution
def linux_distribution(**kwargs):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
return _deprecated_linux_distribution(**kwargs)
except ImportError:
from distro import linux_distribution

View File

@ -224,6 +224,7 @@ salt/utils/schedule.py:
- integration.scheduler.test_postpone
- integration.scheduler.test_skip
- integration.scheduler.test_maxrunning
- integration.scheduler.test_helpers
salt/utils/vt.py:
- integration.cli.test_custom_module

View File

@ -110,7 +110,7 @@ class GemModuleTest(ModuleCase):
gem.sources_add
gem.sources_remove
'''
source = 'http://gems.github.com'
source = 'http://gemcutter.org/'
self.run_function('gem.sources_add', [source])
sources_list = self.run_function('gem.sources_list')

View File

@ -123,25 +123,25 @@ class WinLgpoTest(ModuleCase):
ret = self.run_function('lgpo.set_computer_policy',
(policy_name, policy_config))
log.debug('lgpo set_computer_policy ret == %s', ret)
cmd = ['lgpo.exe',
'/parse',
'/m',
r'c:\Windows\System32\GroupPolicy\Machine\Registry.pol']
if assert_true:
self.assertTrue(ret)
lgpo_output = self.run_function(
'cmd.run',
(),
cmd='lgpo.exe /parse /m c:\\Windows\\System32\\GroupPolicy\\Machine\\Registry.pol')
lgpo_output = self.run_function('cmd.run', (), cmd=' '.join(cmd))
# validate that the lgpo output doesn't say the format is invalid
self.assertIsNone(
re.search(
r'Invalid file format\.',
lgpo_output,
re.IGNORECASE), 'Failed validating Registry.pol file format')
re.search(r'Invalid file format\.', lgpo_output, re.IGNORECASE),
msg='Failed validating Registry.pol file format')
# validate that the regexes we expect are in the output
for expected_regex in expected_regexes:
match = re.search(
expected_regex,
lgpo_output,
re.IGNORECASE)
self.assertIsNotNone(match, 'Failed validating policy "{0}" configuration, regex "{1}" not found in lgpo output'.format(policy_name, expected_regex))
match = re.search(expected_regex, lgpo_output, re.IGNORECASE)
self.assertIsNotNone(
match,
msg='Failed validating policy "{0}" configuration, regex '
'"{1}" not found in lgpo output:\n{2}'
''.format(policy_name, expected_regex, lgpo_output))
else:
# expecting it to fail
self.assertNotEqual(ret, True)
@ -255,11 +255,97 @@ class WinLgpoTest(ModuleCase):
'Not Configured',
[r'; Source file: c:\\windows\\system32\\grouppolicy\\machine\\registry.pol[\s]*; PARSING COMPLETED.'])
@destructiveTest
def test_set_computer_policy_Pol_HardenedPaths(self):
# Disable Pol_HardenedPaths
log.debug('Attempting to disable Pol_HardenedPaths')
self._testComputerAdmxPolicy(
'Pol_HardenedPaths',
'Disabled',
[r'Computer[\s]*Software\\policies\\Microsoft\\Windows\\NetworkProvider\\HardenedPaths[\s]*\*[\s]*DELETEALLVALUES'])
# Configure Pol_HardenedPaths
log.debug('Attempting to configure Pol_HardenedPaths')
self._testComputerAdmxPolicy(
'Pol_HardenedPaths',
{
'Hardened UNC Paths': {
r'\\*\NETLOGON': 'RequireMutualAuthentication=1, RequireIntegrity=1',
r'\\*\SYSVOL': 'RequireMutualAuthentication=1, RequireIntegrity=1'
}
},
[
r'Computer[\s]*Software\\policies\\Microsoft\\Windows\\NetworkProvider\\HardenedPaths[\s]*\\\\\*\\NETLOGON[\s]*SZ:RequireMutualAuthentication=1, RequireIntegrity=1[\s]*',
r'Computer[\s]*Software\\policies\\Microsoft\\Windows\\NetworkProvider\\HardenedPaths[\s]*\\\\\*\\SYSVOL[\s]*SZ:RequireMutualAuthentication=1, RequireIntegrity=1[\s]*',
])
# Not Configure Pol_HardenedPaths
log.debug('Attempting to set Pol_HardenedPaths to Not Configured')
self._testComputerAdmxPolicy(
'Pol_HardenedPaths',
'Not Configured',
[r'; Source file: c:\\windows\\system32\\grouppolicy\\machine\\registry.pol[\s]*; PARSING COMPLETED.'])
@destructiveTest
def test_set_computer_policy_WindowsUpdate(self):
'''
Test setting/unsetting/changing WindowsUpdate policy
'''
the_policy = {
'Configure automatic updating': '4 - Auto download and schedule the install',
'Install during automatic maintenance': False,
'Scheduled install day': '7 - Every Saturday',
'Scheduled install time': '17:00',
'Install updates for other Microsoft products': True
}
the_policy_check = [
r'Computer[\s]*Software\\Policies\\Microsoft\\Windows\\WindowsUpdate\\AU[\s]*NoAutoUpdate[\s]*DWORD:0',
r'Computer[\s]*Software\\Policies\\Microsoft\\Windows\\WindowsUpdate\\AU[\s]*AUOptions[\s]*DWORD:4',
r'Computer[\s]*Software\\Policies\\Microsoft\\Windows\\WindowsUpdate\\AU[\s]*AutomaticMaintenanceEnabled[\s]*DELETE',
r'Computer[\s]*Software\\Policies\\Microsoft\\Windows\\WindowsUpdate\\AU[\s]*ScheduledInstallDay[\s]*DWORD:7',
r'Computer[\s]*Software\\Policies\\Microsoft\\Windows\\WindowsUpdate\\AU[\s]*ScheduledInstallTime[\s]*DWORD:17',
r'Computer[\s]*Software\\Policies\\Microsoft\\Windows\\WindowsUpdate\\AU[\s]*AllowMUUpdateService[\s]*DWORD:1\s*'
]
# Configure Automatic Updates has different options in 2016 than in 2012
# and has only one boolean item, so we'll test it "False" in this block
# and then "True" in next block
if self.osrelease in ['2012Server', '2012ServerR2']:
the_policy = {
'Configure automatic updating': '4 - Auto download and schedule the install',
'Install during automatic maintenance': False,
'Schedule install day': '7 - Every Saturday',
'Schedule install time': '17:00',
}
the_policy_check = [
r'Computer[\s]*Software\\Policies\\Microsoft\\Windows\\WindowsUpdate\\AU[\s]*NoAutoUpdate[\s]*DWORD:0',
r'Computer[\s]*Software\\Policies\\Microsoft\\Windows\\WindowsUpdate\\AU[\s]*AUOptions[\s]*DWORD:4',
r'Computer[\s]*Software\\Policies\\Microsoft\\Windows\\WindowsUpdate\\AU[\s]*AutomaticMaintenanceEnabled[\s]*DELETE',
r'Computer[\s]*Software\\Policies\\Microsoft\\Windows\\WindowsUpdate\\AU[\s]*ScheduledInstallDay[\s]*DWORD:7',
r'Computer[\s]*Software\\Policies\\Microsoft\\Windows\\WindowsUpdate\\AU[\s]*ScheduledInstallTime[\s]*DWORD:17',
]
# test as False
self._testComputerAdmxPolicy(r'Windows Components\Windows Update\Configure Automatic Updates',
the_policy,
the_policy_check)
# configure as True for "enable Automatic Updates" test below
the_policy = {
'Configure automatic updating': '4 - Auto download and schedule the install',
'Install during automatic maintenance': True,
'Schedule install day': '7 - Every Saturday',
'Schedule install time': '17:00',
}
the_policy_check = [
r'Computer[\s]*Software\\Policies\\Microsoft\\Windows\\WindowsUpdate\\AU[\s]*NoAutoUpdate[\s]*DWORD:0',
r'Computer[\s]*Software\\Policies\\Microsoft\\Windows\\WindowsUpdate\\AU[\s]*AUOptions[\s]*DWORD:4',
r'Computer[\s]*Software\\Policies\\Microsoft\\Windows\\WindowsUpdate\\AU[\s]*AutomaticMaintenanceEnabled[\s]*DWORD:1\s*',
r'Computer[\s]*Software\\Policies\\Microsoft\\Windows\\WindowsUpdate\\AU[\s]*ScheduledInstallDay[\s]*DWORD:7',
r'Computer[\s]*Software\\Policies\\Microsoft\\Windows\\WindowsUpdate\\AU[\s]*ScheduledInstallTime[\s]*DWORD:17',
]
# enable Automatic Updates
self._testComputerAdmxPolicy(r'Windows Components\Windows Update\Configure Automatic Updates',
the_policy,
the_policy_check)
# disable Configure Automatic Updates
self._testComputerAdmxPolicy(r'Windows Components\Windows Update\Configure Automatic Updates',
'Disabled',

View File

@ -0,0 +1,68 @@
# -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
import copy
import logging
import os
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.mixins import SaltReturnAssertsMixin
# Import Salt Testing Libs
from tests.support.mock import MagicMock, patch
import tests.integration as integration
# Import Salt libs
import salt.utils.schedule
import salt.utils.platform
from salt.modules.test import ping as ping
log = logging.getLogger(__name__)
ROOT_DIR = os.path.join(integration.TMP, 'schedule-unit-tests')
SOCK_DIR = os.path.join(ROOT_DIR, 'test-socks')
DEFAULT_CONFIG = salt.config.minion_config(None)
DEFAULT_CONFIG['conf_dir'] = ROOT_DIR
DEFAULT_CONFIG['root_dir'] = ROOT_DIR
DEFAULT_CONFIG['sock_dir'] = SOCK_DIR
DEFAULT_CONFIG['pki_dir'] = os.path.join(ROOT_DIR, 'pki')
DEFAULT_CONFIG['cachedir'] = os.path.join(ROOT_DIR, 'cache')
class SchedulerHelpersTest(ModuleCase, SaltReturnAssertsMixin):
'''
Test scheduler helper functions
'''
def setUp(self):
with patch('salt.utils.schedule.clean_proc_dir', MagicMock(return_value=None)):
functions = {'test.ping': ping}
self.schedule = salt.utils.schedule.Schedule(copy.deepcopy(DEFAULT_CONFIG), functions, returners={})
self.schedule.opts['loop_interval'] = 1
def tearDown(self):
self.schedule.reset()
def test_get_schedule(self):
'''
verify that the _get_schedule function works
when remove_hidden is True and schedule data
contains enabled key
'''
job_name = 'test_get_schedule'
job = {
'schedule': {
'enabled': True,
job_name: {
'function': 'test.ping',
'seconds': 60
}
}
}
# Add the job to the scheduler
self.schedule.opts.update(job)
ret = self.schedule._get_schedule(remove_hidden=True)
self.assertEqual(job['schedule'], ret)

View File

@ -934,6 +934,31 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
self.assertFalse(os.path.exists(straydir))
self.assertTrue(os.path.isdir(name))
def test_directory_is_idempotent(self):
'''
Ensure the file.directory state produces no changes when rerun.
'''
name = os.path.join(TMP, 'a_dir_twice')
if IS_WINDOWS:
username = os.environ.get('USERNAME', 'Administrators')
domain = os.environ.get('USERDOMAIN', '')
fullname = '{0}\\{1}'.format(domain, username)
ret = self.run_state('file.directory', name=name, win_owner=fullname)
else:
ret = self.run_state('file.directory', name=name)
self.assertSaltTrueReturn(ret)
if IS_WINDOWS:
ret = self.run_state('file.directory', name=name, win_owner=username)
else:
ret = self.run_state('file.directory', name=name)
self.assertSaltTrueReturn(ret)
self.assertSaltStateChangesEqual(ret, {})
@with_tempdir()
def test_directory_clean_exclude(self, base_dir):
'''

View File

@ -73,12 +73,29 @@ class TestGemModule(TestCase, LoaderModuleMockMixin):
runas=None
)
def test_install_pre_rubygems_3(self):
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
with patch.dict(gem.__salt__,
{'rvm.is_installed': MagicMock(return_value=False),
'rbenv.is_installed': MagicMock(return_value=False),
'cmd.run_all': mock}),\
patch.object(
gem, '_has_rubygems_3', MagicMock(return_value=True)):
gem.install('rails', pre_releases=True)
mock.assert_called_once_with(
['gem', 'install', 'rails', '--no-document', '--prerelease'],
runas=None,
python_shell=False
)
def test_install_pre(self):
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
with patch.dict(gem.__salt__,
{'rvm.is_installed': MagicMock(return_value=False),
'rbenv.is_installed': MagicMock(return_value=False),
'cmd.run_all': mock}):
'cmd.run_all': mock}),\
patch.object(
gem, '_has_rubygems_3', MagicMock(return_value=False)):
gem.install('rails', pre_releases=True)
mock.assert_called_once_with(
['gem', 'install', 'rails', '--no-rdoc', '--no-ri', '--pre'],

View File

@ -0,0 +1,297 @@
# -*- coding: utf-8 -*-
# Import Python Libs
from __future__ import absolute_import
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
MagicMock,
patch,
NO_MOCK,
NO_MOCK_REASON
)
# Import Salt libs
import salt.modules.solarisips as solarisips
import salt.modules.pkg_resource as pkg_resource
import salt.utils.data
@skipIf(NO_MOCK, NO_MOCK_REASON)
class IpsTestCase(TestCase, LoaderModuleMockMixin):
'''
Test cases for salt.modules.solarisips
'''
def setup_loader_modules(self):
self.opts = opts = salt.config.DEFAULT_MINION_OPTS
utils = salt.loader.utils(
opts,
whitelist=['pkg', 'path', 'platform'])
return {
pkg_resource: {
'__grains__': {
'osarch': 'sparcv9',
'os_family': 'Solaris',
'osmajorrelease': 11,
'kernelrelease': 5.11,
},
},
solarisips: {
'__opts__': opts,
'__utils__': utils,
}
}
def test_install_single_package(self):
'''
Test installing a single package
'''
pkg_list_pre = {
'pkg://solaris/compress/bzip2': '1.0.6,5.11-0.175.3.10.0.4.0:20160630T215500Z',
'pkg://solaris/compress/gzip': '1.5,5.11-0.175.3.0.0.30.0:20150821T161446Z',
'pkg://solaris/compress/p7zip': '16.2.3,5.11-0.175.3.34.0.2.0:20180614T204908Z',
}
pkg_list_post = {
'pkg://solaris/compress/bzip2': '1.0.6,5.11-0.175.3.10.0.4.0:20160630T215500Z',
'pkg://solaris/compress/gzip': '1.5,5.11-0.175.3.0.0.30.0:20150821T161446Z',
'pkg://solaris/compress/p7zip': '16.2.3,5.11-0.175.3.34.0.2.0:20180614T204908Z',
'pkg://solaris/text/less': '458,5.11-0.175.3.0.0.30.0:20150821T172730Z',
}
install_cmd = {
'pid': 1234,
'retcode': 0,
'stderr': '',
'stdout': '',
}
mock_install_cmd = MagicMock(return_value=install_cmd)
list_pkgs_responses = [pkg_list_pre, pkg_list_post]
with patch.object(solarisips, 'is_installed', return_value=False), \
patch.object(solarisips, 'list_pkgs', side_effect=list_pkgs_responses), \
patch.dict(solarisips.__salt__, {'cmd.run_all': mock_install_cmd}):
result = solarisips.install(name='less', refresh=False)
self.assertEqual(result, salt.utils.data.compare_dicts(pkg_list_pre, pkg_list_post))
def test_install_list_pkgs(self):
'''
Test installing a list of packages
'''
pkg_list_pre = {
'pkg://solaris/compress/bzip2': '1.0.6,5.11-0.175.3.10.0.4.0:20160630T215500Z',
'pkg://solaris/compress/gzip': '1.5,5.11-0.175.3.0.0.30.0:20150821T161446Z',
'pkg://solaris/compress/p7zip': '16.2.3,5.11-0.175.3.34.0.2.0:20180614T204908Z',
}
pkg_list_post = {
'pkg://solaris/compress/bzip2': '1.0.6,5.11-0.175.3.10.0.4.0:20160630T215500Z',
'pkg://solaris/compress/gzip': '1.5,5.11-0.175.3.0.0.30.0:20150821T161446Z',
'pkg://solaris/compress/p7zip': '16.2.3,5.11-0.175.3.34.0.2.0:20180614T204908Z',
'pkg://solaris/text/less': '458,5.11-0.175.3.0.0.30.0:20150821T172730Z',
'pkg://solaris/system/library/security/libsasl': '0.5.11,5.11-0.175.3.32.0.1.0:20180406T191209Z',
}
install_cmd = {
'pid': 1234,
'retcode': 0,
'stderr': '',
'stdout': '',
}
mock_install_cmd = MagicMock(return_value=install_cmd)
list_pkgs_responses = [pkg_list_pre, pkg_list_post]
with patch.object(solarisips, 'is_installed', return_value=False), \
patch.object(solarisips, 'list_pkgs', side_effect=list_pkgs_responses), \
patch.dict(solarisips.__salt__, {'cmd.run_all': mock_install_cmd}):
result = solarisips.install(pkgs=['less', 'libsasl'], refresh=False)
self.assertEqual(result, salt.utils.data.compare_dicts(pkg_list_pre, pkg_list_post))
def test_install_dict_pkgs_no_version(self):
'''
Test installing a list of packages
'''
pkg_list_pre = {
'pkg://solaris/compress/bzip2': '1.0.6,5.11-0.175.3.10.0.4.0:20160630T215500Z',
'pkg://solaris/compress/gzip': '1.5,5.11-0.175.3.0.0.30.0:20150821T161446Z',
'pkg://solaris/compress/p7zip': '16.2.3,5.11-0.175.3.34.0.2.0:20180614T204908Z',
}
pkg_list_post = {
'pkg://solaris/compress/bzip2': '1.0.6,5.11-0.175.3.10.0.4.0:20160630T215500Z',
'pkg://solaris/compress/gzip': '1.5,5.11-0.175.3.0.0.30.0:20150821T161446Z',
'pkg://solaris/compress/p7zip': '16.2.3,5.11-0.175.3.34.0.2.0:20180614T204908Z',
'pkg://solaris/text/less': '458,5.11-0.175.3.0.0.30.0:20150821T172730Z',
'pkg://solaris/system/library/security/libsasl': '0.5.11,5.11-0.175.3.32.0.1.0:20180406T191209Z',
}
install_cmd = {
'pid': 1234,
'retcode': 0,
'stderr': '',
'stdout': '',
}
mock_install_cmd = MagicMock(return_value=install_cmd)
list_pkgs_responses = [pkg_list_pre, pkg_list_post]
with patch.object(solarisips, 'is_installed', return_value=False), \
patch.object(solarisips, 'list_pkgs', side_effect=list_pkgs_responses), \
patch.dict(solarisips.__salt__, {'cmd.run_all': mock_install_cmd}):
result = solarisips.install(pkgs=[{'less': ''}, {'libsasl': ''}], refresh=False)
self.assertEqual(result, salt.utils.data.compare_dicts(pkg_list_pre, pkg_list_post))
def test_install_dict_pkgs_with_version(self):
'''
Test installing a list of packages
'''
pkg_list_pre = {
'pkg://solaris/compress/bzip2': '1.0.6,5.11-0.175.3.10.0.4.0:20160630T215500Z',
'pkg://solaris/compress/gzip': '1.5,5.11-0.175.3.0.0.30.0:20150821T161446Z',
'pkg://solaris/compress/p7zip': '16.2.3,5.11-0.175.3.34.0.2.0:20180614T204908Z',
}
pkg_list_post = {
'pkg://solaris/compress/bzip2': '1.0.6,5.11-0.175.3.10.0.4.0:20160630T215500Z',
'pkg://solaris/compress/gzip': '1.5,5.11-0.175.3.0.0.30.0:20150821T161446Z',
'pkg://solaris/compress/p7zip': '16.2.3,5.11-0.175.3.34.0.2.0:20180614T204908Z',
'pkg://solaris/text/less': '458,5.11-0.175.3.0.0.30.0:20150821T172730Z',
'pkg://solaris/system/library/security/libsasl': '0.5.11,5.11-0.175.3.32.0.1.0:20180406T191209Z',
}
install_cmd = {
'pid': 1234,
'retcode': 0,
'stderr': '',
'stdout': '',
}
mock_install_cmd = MagicMock(return_value=install_cmd)
list_pkgs_responses = [pkg_list_pre, pkg_list_post]
with patch.object(solarisips, 'is_installed', return_value=False), \
patch.object(solarisips, 'list_pkgs', side_effect=list_pkgs_responses), \
patch.dict(solarisips.__salt__, {'cmd.run_all': mock_install_cmd}):
result = solarisips.install(pkgs=[
{'less': '458,5.11-0.175.3.0.0.30.0:20150821T172730Z'},
{'libsasl': '0.5.11,5.11-0.175.3.32.0.1.0:20180406T191209Z'}], refresh=False)
self.assertEqual(result, salt.utils.data.compare_dicts(pkg_list_pre, pkg_list_post))
def test_install_already_installed_single_pkg(self):
'''
Test installing a package that is already installed
'''
result = None
expected_result = 'Package already installed.'
with patch.object(solarisips, 'is_installed', return_value=True):
result = solarisips.install(name='less')
self.assertEqual(result, expected_result)
def test_install_dict_pkgs_with_version_validate_cmd(self):
'''
Test installing a list of packages
'''
def check_param(arg, **kwargs):
self.assertEqual(arg, [
'pkg',
'install',
'-v',
'--accept',
'less@458,5.11-0.175.3.0.0.30.0:20150821T172730Z',
'libsasl@0.5.11,5.11-0.175.3.32.0.1.0:20180406T191209Z'
])
return {
'pid': 1234,
'retcode': 0,
'stderr': '',
'stdout': '',
}
pkg_list_pre = {
'pkg://solaris/compress/bzip2': '1.0.6,5.11-0.175.3.10.0.4.0:20160630T215500Z',
'pkg://solaris/compress/gzip': '1.5,5.11-0.175.3.0.0.30.0:20150821T161446Z',
'pkg://solaris/compress/p7zip': '16.2.3,5.11-0.175.3.34.0.2.0:20180614T204908Z',
}
pkg_list_post = {
'pkg://solaris/compress/bzip2': '1.0.6,5.11-0.175.3.10.0.4.0:20160630T215500Z',
'pkg://solaris/compress/gzip': '1.5,5.11-0.175.3.0.0.30.0:20150821T161446Z',
'pkg://solaris/compress/p7zip': '16.2.3,5.11-0.175.3.34.0.2.0:20180614T204908Z',
'pkg://solaris/text/less': '458,5.11-0.175.3.0.0.30.0:20150821T172730Z',
'pkg://solaris/system/library/security/libsasl': '0.5.11,5.11-0.175.3.32.0.1.0:20180406T191209Z',
}
mock_install_cmd = MagicMock(side_effect=check_param)
list_pkgs_responses = [pkg_list_pre, pkg_list_post]
with patch.object(solarisips, 'is_installed', return_value=False), \
patch.object(solarisips, 'list_pkgs', side_effect=list_pkgs_responses):
with patch.dict(solarisips.__salt__, {'cmd.run_all': mock_install_cmd}):
result = solarisips.install(pkgs=[
{'less': '458,5.11-0.175.3.0.0.30.0:20150821T172730Z'},
{'libsasl': '0.5.11,5.11-0.175.3.32.0.1.0:20180406T191209Z'}], refresh=False)
def test_install_dict_pkgs_no_version_validate_cmd(self):
'''
Test installing a list of packages
'''
def check_param(arg, **kwargs):
self.assertEqual(arg, [
'pkg',
'install',
'-v',
'--accept',
'less',
'libsasl'
])
return {
'pid': 1234,
'retcode': 0,
'stderr': '',
'stdout': '',
}
pkg_list_pre = {
'pkg://solaris/compress/bzip2': '1.0.6,5.11-0.175.3.10.0.4.0:20160630T215500Z',
'pkg://solaris/compress/gzip': '1.5,5.11-0.175.3.0.0.30.0:20150821T161446Z',
'pkg://solaris/compress/p7zip': '16.2.3,5.11-0.175.3.34.0.2.0:20180614T204908Z',
}
pkg_list_post = {
'pkg://solaris/compress/bzip2': '1.0.6,5.11-0.175.3.10.0.4.0:20160630T215500Z',
'pkg://solaris/compress/gzip': '1.5,5.11-0.175.3.0.0.30.0:20150821T161446Z',
'pkg://solaris/compress/p7zip': '16.2.3,5.11-0.175.3.34.0.2.0:20180614T204908Z',
'pkg://solaris/text/less': '458,5.11-0.175.3.0.0.30.0:20150821T172730Z',
'pkg://solaris/system/library/security/libsasl': '0.5.11,5.11-0.175.3.32.0.1.0:20180406T191209Z',
}
mock_install_cmd = MagicMock(side_effect=check_param)
list_pkgs_responses = [pkg_list_pre, pkg_list_post]
with patch.object(solarisips, 'is_installed', return_value=False), \
patch.object(solarisips, 'list_pkgs', side_effect=list_pkgs_responses):
with patch.dict(solarisips.__salt__, {'cmd.run_all': mock_install_cmd}):
result = solarisips.install(pkgs=[
{'less': ''},
{'libsasl': ''}], refresh=False)
def test_install_list_pkgs_validate_cmd(self):
'''
Test installing a list of packages
'''
def check_param(arg, **kwargs):
self.assertEqual(arg, [
'pkg',
'install',
'-v',
'--accept',
'less',
'libsasl'
])
return {
'pid': 1234,
'retcode': 0,
'stderr': '',
'stdout': '',
}
pkg_list_pre = {
'pkg://solaris/compress/bzip2': '1.0.6,5.11-0.175.3.10.0.4.0:20160630T215500Z',
'pkg://solaris/compress/gzip': '1.5,5.11-0.175.3.0.0.30.0:20150821T161446Z',
'pkg://solaris/compress/p7zip': '16.2.3,5.11-0.175.3.34.0.2.0:20180614T204908Z',
}
pkg_list_post = {
'pkg://solaris/compress/bzip2': '1.0.6,5.11-0.175.3.10.0.4.0:20160630T215500Z',
'pkg://solaris/compress/gzip': '1.5,5.11-0.175.3.0.0.30.0:20150821T161446Z',
'pkg://solaris/compress/p7zip': '16.2.3,5.11-0.175.3.34.0.2.0:20180614T204908Z',
'pkg://solaris/text/less': '458,5.11-0.175.3.0.0.30.0:20150821T172730Z',
'pkg://solaris/system/library/security/libsasl': '0.5.11,5.11-0.175.3.32.0.1.0:20180406T191209Z',
}
mock_install_cmd = MagicMock(side_effect=check_param)
list_pkgs_responses = [pkg_list_pre, pkg_list_post]
with patch.object(solarisips, 'is_installed', return_value=False), \
patch.object(solarisips, 'list_pkgs', side_effect=list_pkgs_responses):
with patch.dict(solarisips.__salt__, {'cmd.run_all': mock_install_cmd}):
result = solarisips.install(pkgs=['less', 'libsasl'], refresh=False)

View File

@ -149,6 +149,7 @@ class BadTestModuleNamesTestCase(TestCase):
'integration.scheduler.test_postpone',
'integration.scheduler.test_skip',
'integration.scheduler.test_maxrunning',
'integration.scheduler.test_helpers',
'integration.shell.test_spm',
'integration.shell.test_cp',
'integration.shell.test_syndic',

View File

@ -25,8 +25,10 @@ if sys.version_info >= (3,):
else:
BUILTINS_OPEN = '__builtin__.open'
zyppnotify = imp.load_source('zyppnotify', os.path.sep.join(os.path.dirname(__file__).split(
os.path.sep)[:-2] + ['scripts', 'suse', 'zypper', 'plugins', 'commit', 'zyppnotify']))
ZYPPNOTIFY_FILE = os.path.sep.join(
os.path.dirname(__file__).split(os.path.sep)[:-2] +
['scripts', 'suse', 'zypper', 'plugins', 'commit', 'zyppnotify']
)
@skipIf(NO_MOCK, NO_MOCK_REASON)
@ -40,6 +42,7 @@ class ZyppPluginsTestCase(TestCase):
Returns:
'''
zyppnotify = imp.load_source('zyppnotify', ZYPPNOTIFY_FILE)
drift = zyppnotify.DriftDetector()
drift._get_mtime = MagicMock(return_value=123)
drift._get_checksum = MagicMock(return_value='deadbeef')

View File

@ -9,9 +9,11 @@
# Import Python libs
from __future__ import absolute_import, unicode_literals, print_function
import os
# Import Salt Testing libs
from tests.support.unit import TestCase
from tests.support.paths import BASE_FILES
# Import salt libs
from salt.utils.filebuffer import BufferedReader, InvalidFileMode
@ -30,3 +32,22 @@ class TestFileBuffer(TestCase):
with self.assertRaises(InvalidFileMode):
BufferedReader('/tmp/foo', mode='wb')
def test_issue_51309(self):
'''
https://github.com/saltstack/salt/issues/51309
'''
file_name = os.path.join(BASE_FILES, 'grail', 'scene33')
def find_value(text):
stripped_text = text.strip()
try:
with BufferedReader(file_name) as breader:
for chunk in breader:
if stripped_text in chunk:
return True
return False
except (IOError, OSError):
return False
self.assertTrue(find_value('We have the Holy Hand Grenade'))

View File

@ -72,6 +72,7 @@ integration.runners.test_salt
integration.scheduler.test_eval
integration.scheduler.test_postpone
integration.scheduler.test_skip
integration.scheduler.test_helpers
integration.sdb.test_env
integration.shell.test_arguments
integration.shell.test_auth