mirror of
https://github.com/valitydev/salt.git
synced 2024-11-06 08:35:21 +00:00
Merge pull request #47801 from rallytime/merge-develop
[develop] Merge forward from 2018.3 to develop
This commit is contained in:
commit
dc7fd3aca0
@ -546,6 +546,10 @@
|
||||
# targeted with the normal -N argument to salt-ssh.
|
||||
#ssh_list_nodegroups: {}
|
||||
|
||||
# salt-ssh has the ability to update the flat roster file if a minion is not
|
||||
# found in the roster. Set this to True to enable it.
|
||||
#ssh_update_roster: False
|
||||
|
||||
##### Master Module Management #####
|
||||
##########################################
|
||||
# Manage how master side modules are loaded.
|
||||
|
@ -341757,7 +341757,6 @@ netacl_example:
|
||||
Or:
|
||||
.INDENT 7.0
|
||||
.INDENT 3.5
|
||||
.sp
|
||||
.nf
|
||||
.ft C
|
||||
netacl_example:
|
||||
|
@ -3275,3 +3275,31 @@ URL of the repository:
|
||||
Replace ``<commit_id>`` with the SHA1 hash of a commit ID. Specifying a commit
|
||||
ID is useful in that it allows one to revert back to a previous version in the
|
||||
event that an error is introduced in the latest revision of the repo.
|
||||
|
||||
``ssh_merge_pillar``
|
||||
--------------------
|
||||
|
||||
.. versionadded:: 2018.3.2
|
||||
|
||||
Default: ``True``
|
||||
|
||||
Merges the compiled pillar data with the pillar data already available globally.
|
||||
This is useful when using ``salt-ssh`` or ``salt-call --local`` and overriding the pillar
|
||||
data in a state file:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
apply_showpillar:
|
||||
module.run:
|
||||
- name: state.apply
|
||||
- mods:
|
||||
- showpillar
|
||||
- kwargs:
|
||||
pillar:
|
||||
test: "foo bar"
|
||||
|
||||
If set to ``True`` the ``showpillar`` state will have access to the
|
||||
global pillar data.
|
||||
|
||||
If set to ``False`` only the overriding pillar data will be available
|
||||
to the ``showpillar`` state.
|
||||
|
@ -40,8 +40,9 @@ Beacons are typically enabled by placing a ``beacons:`` top level block in
|
||||
|
||||
beacons:
|
||||
inotify:
|
||||
/etc/important_file: {}
|
||||
/opt: {}
|
||||
- files:
|
||||
/etc/important_file: {}
|
||||
/opt: {}
|
||||
|
||||
The beacon system, like many others in Salt, can also be configured via the
|
||||
minion pillar, grains, or local config file.
|
||||
@ -50,6 +51,8 @@ minion pillar, grains, or local config file.
|
||||
The `inotify` beacon only works on OSes that have `inotify` kernel support.
|
||||
Currently this excludes FreeBSD, macOS, and Windows.
|
||||
|
||||
All beacon configuration is done using list based configuration.
|
||||
|
||||
Beacon Monitoring Interval
|
||||
--------------------------
|
||||
|
||||
@ -61,21 +64,23 @@ and 10-second intervals:
|
||||
|
||||
beacons:
|
||||
inotify:
|
||||
/etc/important_file: {}
|
||||
/opt: {}
|
||||
interval: 5
|
||||
disable_during_state_run: True
|
||||
- files:
|
||||
/etc/important_file: {}
|
||||
/opt: {}
|
||||
- interval: 5
|
||||
- disable_during_state_run: True
|
||||
load:
|
||||
1m:
|
||||
- 0.0
|
||||
- 2.0
|
||||
5m:
|
||||
- 0.0
|
||||
- 1.5
|
||||
15m:
|
||||
- 0.1
|
||||
- 1.0
|
||||
interval: 10
|
||||
- averages:
|
||||
1m:
|
||||
- 0.0
|
||||
- 2.0
|
||||
5m:
|
||||
- 0.0
|
||||
- 1.5
|
||||
15m:
|
||||
- 0.1
|
||||
- 1.0
|
||||
- interval: 10
|
||||
|
||||
.. _avoid-beacon-event-loops:
|
||||
|
||||
@ -96,8 +101,9 @@ which point the normal beacon interval will resume.
|
||||
|
||||
beacons:
|
||||
inotify:
|
||||
/etc/important_file: {}
|
||||
disable_during_state_run: True
|
||||
- files:
|
||||
/etc/important_file: {}
|
||||
- disable_during_state_run: True
|
||||
|
||||
.. _beacon-example:
|
||||
|
||||
@ -137,10 +143,11 @@ On the Salt minion, add the following configuration to
|
||||
|
||||
beacons:
|
||||
inotify:
|
||||
/etc/important_file:
|
||||
mask:
|
||||
- modify
|
||||
disable_during_state_run: True
|
||||
- files:
|
||||
/etc/important_file:
|
||||
mask:
|
||||
- modify
|
||||
- disable_during_state_run: True
|
||||
|
||||
Save the configuration file and restart the minion service. The beacon is now
|
||||
set up to notify salt upon modifications made to the file.
|
||||
|
@ -6,7 +6,7 @@ Debian GNU/Linux / Raspbian
|
||||
|
||||
Debian GNU/Linux distribution and some derivatives such as Raspbian already
|
||||
have included Salt packages to their repositories. However, current stable
|
||||
release codenamed "Jessie" contains old outdated Salt release. It is
|
||||
Debian release contains old outdated Salt releases. It is
|
||||
recommended to use SaltStack repository for Debian as described
|
||||
:ref:`below <installation-debian-repo>`.
|
||||
|
||||
@ -33,11 +33,13 @@ Instructions are at https://repo.saltstack.com/#debian.
|
||||
Installation from the Debian / Raspbian Official Repository
|
||||
===========================================================
|
||||
|
||||
Stretch (Testing) and Sid (Unstable) distributions are already contain mostly
|
||||
up-to-date Salt packages built by Debian Salt Team. You can install Salt
|
||||
components directly from Debian.
|
||||
The Debian distributions contain mostly old Salt packages
|
||||
built by the Debian Salt Team. You can install Salt
|
||||
components directly from Debian but it is recommended to
|
||||
use the instructions above for the packages from the official
|
||||
Salt repository.
|
||||
|
||||
On Jessie (Stable) there is an option to install Salt minion from Stretch with
|
||||
On Jessie there is an option to install Salt minion from Stretch with
|
||||
`python-tornado` dependency from `jessie-backports` repositories.
|
||||
|
||||
To install fresh release of Salt minion on Jessie:
|
||||
@ -79,7 +81,7 @@ To install fresh release of Salt minion on Jessie:
|
||||
apt-get update
|
||||
apt-get install python-zmq python-tornado/stretch salt-common/stretch
|
||||
|
||||
#. Install Salt minion package from Stretch:
|
||||
#. Install Salt minion package from Latest Debian Release:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
|
@ -13,3 +13,13 @@ used as part of a salt-minion process running on the master. This will allow
|
||||
the minion to have pillars assigned to it, and will still allow the engine to
|
||||
create a LocalClient connection to the master ipc sockets to control
|
||||
environments.
|
||||
|
||||
Changes to Automatically Updating the Roster File
|
||||
-------------------------------------------------
|
||||
|
||||
In ``2018.3.0`` salt-ssh was configured to automatically update the flat roster
|
||||
file if a minion was not found for salt-ssh. This was decided to be
|
||||
undesireable as a default. The ``--skip-roster`` flag has been removed and
|
||||
replaced with ``--update-roster``, which will enable salt-ssh to add minions
|
||||
to the flat roster file. This behavior can also be enabled by setting
|
||||
``ssh_update_roster: True`` in the master config file.
|
||||
|
@ -1,8 +1,10 @@
|
||||
Jinja2
|
||||
msgpack-python>0.3,!=0.5.5
|
||||
# This should be changed to msgpack-python for Packages
|
||||
# msgpack-python>0.3,!=0.5.5
|
||||
msgpack>=0.5,!=0.5.5
|
||||
PyYAML
|
||||
MarkupSafe
|
||||
requests>=1.0.0
|
||||
tornado>=4.2.1,<5.0
|
||||
tornado>=4.2.1,<6.0
|
||||
# Required by Tornado to handle threads stuff.
|
||||
futures>=2.0
|
||||
|
@ -407,7 +407,7 @@ class SSH(object):
|
||||
'host': hostname,
|
||||
'user': user,
|
||||
}
|
||||
if not self.opts.get('ssh_skip_roster'):
|
||||
if self.opts.get('ssh_update_roster'):
|
||||
self._update_roster()
|
||||
|
||||
def get_pubkey(self):
|
||||
|
@ -991,6 +991,7 @@ VALID_OPTS = {
|
||||
'ssh_identities_only': bool,
|
||||
'ssh_log_file': six.string_types,
|
||||
'ssh_config_file': six.string_types,
|
||||
'ssh_merge_pillar': bool,
|
||||
|
||||
# Enable ioflo verbose logging. Warning! Very verbose!
|
||||
'ioflo_verbose': int,
|
||||
@ -1501,6 +1502,7 @@ DEFAULT_MINION_OPTS = {
|
||||
},
|
||||
'discovery': False,
|
||||
'schedule': {},
|
||||
'ssh_merge_pillar': True
|
||||
}
|
||||
|
||||
DEFAULT_MASTER_OPTS = {
|
||||
@ -2108,7 +2110,7 @@ def _validate_ssh_minion_opts(opts):
|
||||
|
||||
for opt_name in list(ssh_minion_opts):
|
||||
if re.match('^[a-z0-9]+fs_', opt_name, flags=re.IGNORECASE) \
|
||||
or 'pillar' in opt_name \
|
||||
or ('pillar' in opt_name and not 'ssh_merge_pillar' == opt_name) \
|
||||
or opt_name in ('fileserver_backend',):
|
||||
log.warning(
|
||||
'\'%s\' is not a valid ssh_minion_opts parameter, ignoring',
|
||||
|
@ -1389,14 +1389,12 @@ class RemoteClient(Client):
|
||||
'''
|
||||
Return the metadata derived from the master_tops system
|
||||
'''
|
||||
salt.utils.versions.warn_until(
|
||||
'Magnesium',
|
||||
'The _ext_nodes master function has '
|
||||
'been renamed to _master_tops. To ensure '
|
||||
'compatibility when using older Salt masters '
|
||||
'we continue to pass the function as _ext_nodes.'
|
||||
log.debug(
|
||||
'The _ext_nodes master function has been renamed to _master_tops. '
|
||||
'To ensure compatibility when using older Salt masters we will '
|
||||
'continue to invoke the function as _ext_nodes until the '
|
||||
'Magnesium release.'
|
||||
)
|
||||
|
||||
# TODO: Change back to _master_tops
|
||||
# for Magnesium release
|
||||
load = {'cmd': '_ext_nodes',
|
||||
|
@ -850,7 +850,8 @@ class FSChan(object):
|
||||
self.opts['__fs_update'] = True
|
||||
else:
|
||||
self.fs.update()
|
||||
self.cmd_stub = {'master_tops': {}}
|
||||
self.cmd_stub = {'master_tops': {},
|
||||
'ext_nodes': {}}
|
||||
|
||||
def send(self, load, tries=None, timeout=None, raw=False): # pylint: disable=unused-argument
|
||||
'''
|
||||
|
@ -253,7 +253,7 @@ def file_hash(load, fnd):
|
||||
except OSError:
|
||||
pass
|
||||
return file_hash(load, fnd)
|
||||
if os.path.getmtime(path) == mtime:
|
||||
if str(os.path.getmtime(path)) == mtime:
|
||||
# check if mtime changed
|
||||
ret['hsum'] = hsum
|
||||
return ret
|
||||
|
@ -1231,6 +1231,7 @@ _OS_NAME_MAP = {
|
||||
'synology': 'Synology',
|
||||
'nilrt': 'NILinuxRT',
|
||||
'nilrt-xfce': 'NILinuxRT-XFCE',
|
||||
'poky': 'Poky',
|
||||
'manjaro': 'Manjaro',
|
||||
'manjarolin': 'Manjaro',
|
||||
'antergos': 'Antergos',
|
||||
@ -1790,7 +1791,7 @@ def os_data():
|
||||
osarch = __salt__['cmd.run']('dpkg --print-architecture').strip()
|
||||
elif grains.get('os_family') == 'RedHat':
|
||||
osarch = __salt__['cmd.run']('rpm --eval %{_host_cpu}').strip()
|
||||
elif grains.get('os_family') == 'NILinuxRT':
|
||||
elif grains.get('os_family') in ('NILinuxRT', 'Poky'):
|
||||
archinfo = {}
|
||||
for line in __salt__['cmd.run']('opkg print-architecture').splitlines():
|
||||
if line.startswith('arch'):
|
||||
|
@ -1629,7 +1629,11 @@ class LazyLoader(salt.utils.lazy.LazyDict):
|
||||
return True
|
||||
# if the modulename isn't in the whitelist, don't bother
|
||||
if self.whitelist and mod_name not in self.whitelist:
|
||||
raise KeyError
|
||||
log.error(
|
||||
'Failed to load function %s because its module (%s) is '
|
||||
'not in the whitelist: %s', key, mod_name, self.whitelist
|
||||
)
|
||||
raise KeyError(key)
|
||||
|
||||
def _inner_load(mod_name):
|
||||
for name in self._iter_files(mod_name):
|
||||
|
@ -103,6 +103,20 @@ class SysLogHandler(ExcInfoOnLogLevelFormatMixIn, logging.handlers.SysLogHandler
|
||||
'''
|
||||
Syslog handler which properly handles exc_info on a per handler basis
|
||||
'''
|
||||
def handleError(self, record):
|
||||
'''
|
||||
Override the default error handling mechanism for py3
|
||||
Deal with syslog os errors when the log file does not exist
|
||||
'''
|
||||
handled = False
|
||||
if sys.stderr and sys.version_info >= (3, 5, 4):
|
||||
t, v, tb = sys.exc_info()
|
||||
if t.__name__ in 'FileNotFoundError':
|
||||
sys.stderr.write('[WARNING ] The log_file does not exist. Logging not setup correctly or syslog service not started.\n')
|
||||
handled = True
|
||||
|
||||
if not handled:
|
||||
super(SysLogHandler, self).handleError(record)
|
||||
|
||||
|
||||
class RotatingFileHandler(ExcInfoOnLogLevelFormatMixIn, logging.handlers.RotatingFileHandler, NewStyleClassMixIn):
|
||||
|
@ -410,9 +410,9 @@ def list_(name,
|
||||
item.sort()
|
||||
|
||||
if verbose:
|
||||
ret = {'dirs': sorted(dirs),
|
||||
'files': sorted(files),
|
||||
'links': sorted(links)}
|
||||
ret = {'dirs': sorted(salt.utils.data.decode_list(dirs)),
|
||||
'files': sorted(salt.utils.data.decode_list(files)),
|
||||
'links': sorted(salt.utils.data.decode_list(links))}
|
||||
ret['top_level_dirs'] = [x for x in ret['dirs']
|
||||
if x.count('/') == 1]
|
||||
ret['top_level_files'] = [x for x in ret['files']
|
||||
|
@ -43,7 +43,7 @@ from salt.ext import six
|
||||
from salt.exceptions import CommandExecutionError, TimedProcTimeoutError, \
|
||||
SaltInvocationError
|
||||
from salt.log import LOG_LEVELS
|
||||
from salt.ext.six.moves import range, zip
|
||||
from salt.ext.six.moves import range, zip, map
|
||||
|
||||
# Only available on POSIX systems, nonfatal on windows
|
||||
try:
|
||||
@ -410,6 +410,19 @@ def _run(cmd,
|
||||
|
||||
return win_runas(cmd, runas, password, cwd)
|
||||
|
||||
if runas and salt.utils.platform.is_darwin():
|
||||
# we need to insert the user simulation into the command itself and not
|
||||
# just run it from the environment on macOS as that
|
||||
# method doesn't work properly when run as root for certain commands.
|
||||
if isinstance(cmd, (list, tuple)):
|
||||
cmd = ' '.join(map(_cmd_quote, cmd))
|
||||
|
||||
cmd = 'su -l {0} -c "{1}"'.format(runas, cmd)
|
||||
# set runas to None, because if you try to run `su -l` as well as
|
||||
# simulate the environment macOS will prompt for the password of the
|
||||
# user and will cause salt to hang.
|
||||
runas = None
|
||||
|
||||
if runas:
|
||||
# Save the original command before munging it
|
||||
try:
|
||||
|
@ -32,7 +32,6 @@ import salt.utils.files
|
||||
import salt.utils.path
|
||||
import salt.utils.platform
|
||||
import salt.utils.stringutils
|
||||
import salt.utils.mac_utils
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from salt.utils.versions import LooseVersion as _LooseVersion
|
||||
|
||||
@ -274,13 +273,11 @@ def list_(name=None, runas=None):
|
||||
return launchctl('list',
|
||||
label,
|
||||
return_stdout=True,
|
||||
output_loglevel='trace',
|
||||
runas=runas)
|
||||
|
||||
# Collect information on all services: will raise an error if it fails
|
||||
return launchctl('list',
|
||||
return_stdout=True,
|
||||
output_loglevel='trace',
|
||||
runas=runas)
|
||||
|
||||
|
||||
|
@ -119,7 +119,10 @@ def __virtual__():
|
||||
os.path.exists(os.path.join(NILRT_MODULE_STATE_PATH, 'modules.dep.md5sum'))):
|
||||
_update_nilrt_module_dep_info()
|
||||
return __virtualname__
|
||||
return (False, "Module opkg only works on nilrt based systems")
|
||||
|
||||
if os.path.isdir(OPKG_CONFDIR):
|
||||
return __virtualname__
|
||||
return False, "Module opkg only works on OpenEmbedded based systems"
|
||||
|
||||
|
||||
def latest_version(*names, **kwargs):
|
||||
|
@ -627,7 +627,7 @@ def install(pkgs=None, # pylint: disable=R0912,R0913,R0914
|
||||
'''
|
||||
if 'no_chown' in kwargs:
|
||||
salt.utils.versions.warn_until(
|
||||
'Flourine',
|
||||
'Fluorine',
|
||||
'The no_chown argument has been deprecated and is no longer used. '
|
||||
'Its functionality was removed in Boron.')
|
||||
kwargs.pop('no_chown')
|
||||
|
@ -65,21 +65,22 @@ def __virtual__():
|
||||
# The module will be exposed as `rpmbuild` on non-RPM based systems
|
||||
return 'rpmbuild'
|
||||
else:
|
||||
return False, 'The rpmbuild module could not be loaded: requires python-gnupg, gpg, rpm, rpmbuild, mock and createrepo utilities to be installed'
|
||||
return False, 'The rpmbuild module could not be loaded: requires python-gnupg, ' \
|
||||
'gpg, rpm, rpmbuild, mock and createrepo utilities to be installed'
|
||||
|
||||
|
||||
def _create_rpmmacros():
|
||||
def _create_rpmmacros(runas='root'):
|
||||
'''
|
||||
Create the .rpmmacros file in user's home directory
|
||||
'''
|
||||
home = os.path.expanduser('~')
|
||||
rpmbuilddir = os.path.join(home, 'rpmbuild')
|
||||
if not os.path.isdir(rpmbuilddir):
|
||||
os.makedirs(rpmbuilddir)
|
||||
__salt__['file.makedirs_perms'](name=rpmbuilddir, user=runas, group='mock')
|
||||
|
||||
mockdir = os.path.join(home, 'mock')
|
||||
if not os.path.isdir(mockdir):
|
||||
os.makedirs(mockdir)
|
||||
__salt__['file.makedirs_perms'](name=mockdir, user=runas, group='mock')
|
||||
|
||||
rpmmacros = os.path.join(home, '.rpmmacros')
|
||||
with salt.utils.files.fopen(rpmmacros, 'w') as afile:
|
||||
@ -92,7 +93,7 @@ def _create_rpmmacros():
|
||||
afile.write('%_gpg_name packaging@saltstack.com\n')
|
||||
|
||||
|
||||
def _mk_tree():
|
||||
def _mk_tree(runas='root'):
|
||||
'''
|
||||
Create the rpm build tree
|
||||
'''
|
||||
@ -100,7 +101,7 @@ def _mk_tree():
|
||||
paths = ['BUILD', 'RPMS', 'SOURCES', 'SPECS', 'SRPMS']
|
||||
for path in paths:
|
||||
full = os.path.join(basedir, path)
|
||||
os.makedirs(full)
|
||||
__salt__['file.makedirs_perms'](name=full, user=runas, group='mock')
|
||||
return basedir
|
||||
|
||||
|
||||
@ -116,7 +117,7 @@ def _get_spec(tree_base, spec, template, saltenv='base'):
|
||||
saltenv=saltenv)
|
||||
|
||||
|
||||
def _get_src(tree_base, source, saltenv='base'):
|
||||
def _get_src(tree_base, source, saltenv='base', runas='root'):
|
||||
'''
|
||||
Get the named sources and place them into the tree_base
|
||||
'''
|
||||
@ -127,6 +128,7 @@ def _get_src(tree_base, source, saltenv='base'):
|
||||
lsrc = __salt__['cp.get_url'](source, dest, saltenv=saltenv)
|
||||
else:
|
||||
shutil.copy(source, dest)
|
||||
__salt__['file.chown'](path=dest, user=runas, group='mock')
|
||||
|
||||
|
||||
def _get_distset(tgt):
|
||||
@ -171,7 +173,7 @@ def _get_deps(deps, tree_base, saltenv='base'):
|
||||
return deps_list
|
||||
|
||||
|
||||
def make_src_pkg(dest_dir, spec, sources, env=None, template=None, saltenv='base'):
|
||||
def make_src_pkg(dest_dir, spec, sources, env=None, template=None, saltenv='base', runas='root'):
|
||||
'''
|
||||
Create a source rpm from the given spec file and sources
|
||||
|
||||
@ -179,33 +181,74 @@ def make_src_pkg(dest_dir, spec, sources, env=None, template=None, saltenv='base
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' pkgbuild.make_src_pkg /var/www/html/ https://raw.githubusercontent.com/saltstack/libnacl/master/pkg/rpm/python-libnacl.spec https://pypi.python.org/packages/source/l/libnacl/libnacl-1.3.5.tar.gz
|
||||
salt '*' pkgbuild.make_src_pkg /var/www/html/
|
||||
https://raw.githubusercontent.com/saltstack/libnacl/master/pkg/rpm/python-libnacl.spec
|
||||
https://pypi.python.org/packages/source/l/libnacl/libnacl-1.3.5.tar.gz
|
||||
|
||||
This example command should build the libnacl SOURCE package and place it in
|
||||
/var/www/html/ on the minion
|
||||
|
||||
.. versionchanged:: 2017.7.0
|
||||
|
||||
dest_dir
|
||||
The directory on the minion to place the built package(s)
|
||||
|
||||
spec
|
||||
The location of the spec file (used for rpms)
|
||||
|
||||
sources
|
||||
The list of package sources
|
||||
|
||||
env
|
||||
A dictionary of environment variables to be set prior to execution.
|
||||
|
||||
template
|
||||
Run the spec file through a templating engine
|
||||
Optional arguement, allows for no templating engine used to be
|
||||
if none is desired.
|
||||
|
||||
saltenv
|
||||
The saltenv to use for files downloaded from the salt filesever
|
||||
|
||||
runas
|
||||
The user to run the build process as
|
||||
|
||||
.. versionadded:: 2018.3.2
|
||||
|
||||
|
||||
.. note::
|
||||
|
||||
using SHA256 as digest and minimum level dist el6
|
||||
|
||||
'''
|
||||
_create_rpmmacros()
|
||||
tree_base = _mk_tree()
|
||||
_create_rpmmacros(runas)
|
||||
tree_base = _mk_tree(runas)
|
||||
spec_path = _get_spec(tree_base, spec, template, saltenv)
|
||||
__salt__['file.chown'](path=spec_path, user=runas, group='mock')
|
||||
__salt__['file.chown'](path=tree_base, user=runas, group='mock')
|
||||
|
||||
if isinstance(sources, six.string_types):
|
||||
sources = sources.split(',')
|
||||
for src in sources:
|
||||
_get_src(tree_base, src, saltenv)
|
||||
_get_src(tree_base, src, saltenv, runas)
|
||||
|
||||
# make source rpms for dist el6 with SHA256, usable with mock on other dists
|
||||
cmd = 'rpmbuild --verbose --define "_topdir {0}" -bs --define "dist .el6" {1}'.format(tree_base, spec_path)
|
||||
__salt__['cmd.run'](cmd)
|
||||
retrc = __salt__['cmd.retcode'](cmd, runas=runas)
|
||||
if retrc != 0:
|
||||
raise SaltInvocationError(
|
||||
'Make source package for destination directory {0}, spec {1}, sources {2}, failed '
|
||||
'with return error {3}, check logs for further details'.format(
|
||||
dest_dir,
|
||||
spec,
|
||||
sources,
|
||||
retrc)
|
||||
)
|
||||
|
||||
srpms = os.path.join(tree_base, 'SRPMS')
|
||||
ret = []
|
||||
if not os.path.isdir(dest_dir):
|
||||
os.makedirs(dest_dir)
|
||||
__salt__['file.makedirs_perms'](name=dest_dir, user=runas, group='mock')
|
||||
for fn_ in os.listdir(srpms):
|
||||
full = os.path.join(srpms, fn_)
|
||||
tgt = os.path.join(dest_dir, fn_)
|
||||
@ -232,14 +275,16 @@ def build(runas,
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' pkgbuild.build mock epel-7-x86_64 /var/www/html https://raw.githubusercontent.com/saltstack/libnacl/master/pkg/rpm/python-libnacl.spec https://pypi.python.org/packages/source/l/libnacl/libnacl-1.3.5.tar.gz
|
||||
salt '*' pkgbuild.build mock epel-7-x86_64 /var/www/html
|
||||
https://raw.githubusercontent.com/saltstack/libnacl/master/pkg/rpm/python-libnacl.spec
|
||||
https://pypi.python.org/packages/source/l/libnacl/libnacl-1.3.5.tar.gz
|
||||
|
||||
This example command should build the libnacl package for rhel 7 using user
|
||||
mock and place it in /var/www/html/ on the minion
|
||||
'''
|
||||
ret = {}
|
||||
try:
|
||||
os.makedirs(dest_dir)
|
||||
__salt__['file.chown'](path=dest_dir, user=runas, group='mock')
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.EEXIST:
|
||||
raise
|
||||
@ -247,7 +292,7 @@ def build(runas,
|
||||
srpm_build_dir = tempfile.mkdtemp()
|
||||
try:
|
||||
srpms = make_src_pkg(srpm_build_dir, spec, sources,
|
||||
env, template, saltenv)
|
||||
env, template, saltenv, runas)
|
||||
except Exception as exc:
|
||||
shutil.rmtree(srpm_build_dir)
|
||||
log.error('Failed to make src package')
|
||||
@ -259,17 +304,18 @@ def build(runas,
|
||||
deps_dir = tempfile.mkdtemp()
|
||||
deps_list = _get_deps(deps, deps_dir, saltenv)
|
||||
|
||||
retrc = 0
|
||||
for srpm in srpms:
|
||||
dbase = os.path.dirname(srpm)
|
||||
results_dir = tempfile.mkdtemp()
|
||||
try:
|
||||
__salt__['cmd.run']('chown {0} -R {1}'.format(runas, dbase))
|
||||
__salt__['cmd.run']('chown {0} -R {1}'.format(runas, results_dir))
|
||||
__salt__['file.chown'](path=dbase, user=runas, group='mock')
|
||||
__salt__['file.chown'](path=results_dir, user=runas, group='mock')
|
||||
cmd = 'mock --root={0} --resultdir={1} --init'.format(tgt, results_dir)
|
||||
__salt__['cmd.run'](cmd, runas=runas)
|
||||
retrc |= __salt__['cmd.retcode'](cmd, runas=runas)
|
||||
if deps_list and not deps_list.isspace():
|
||||
cmd = 'mock --root={0} --resultdir={1} --install {2} {3}'.format(tgt, results_dir, deps_list, noclean)
|
||||
__salt__['cmd.run'](cmd, runas=runas)
|
||||
retrc |= __salt__['cmd.retcode'](cmd, runas=runas)
|
||||
noclean += ' --no-clean'
|
||||
|
||||
cmd = 'mock --root={0} --resultdir={1} {2} {3} {4}'.format(
|
||||
@ -278,17 +324,20 @@ def build(runas,
|
||||
distset,
|
||||
noclean,
|
||||
srpm)
|
||||
__salt__['cmd.run'](cmd, runas=runas)
|
||||
cmd = ['rpm', '-qp', '--queryformat',
|
||||
'{0}/%{{name}}/%{{version}}-%{{release}}'.format(log_dir),
|
||||
srpm]
|
||||
log_dest = __salt__['cmd.run_stdout'](cmd, python_shell=False)
|
||||
retrc |= __salt__['cmd.retcode'](cmd, runas=runas)
|
||||
cmdlist = [
|
||||
'rpm',
|
||||
'-qp',
|
||||
'--queryformat',
|
||||
'{0}/%{{name}}/%{{version}}-%{{release}}'.format(log_dir),
|
||||
srpm]
|
||||
log_dest = __salt__['cmd.run_stdout'](cmdlist, python_shell=False)
|
||||
for filename in os.listdir(results_dir):
|
||||
full = os.path.join(results_dir, filename)
|
||||
if filename.endswith('src.rpm'):
|
||||
sdest = os.path.join(srpm_dir, filename)
|
||||
try:
|
||||
os.makedirs(srpm_dir)
|
||||
__salt__['file.makedirs_perms'](name=srpm_dir, user=runas, group='mock')
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.EEXIST:
|
||||
raise
|
||||
@ -301,7 +350,7 @@ def build(runas,
|
||||
else:
|
||||
log_file = os.path.join(log_dest, filename)
|
||||
try:
|
||||
os.makedirs(log_dest)
|
||||
__salt__['file.makedirs_perms'](name=log_dest, user=runas, group='mock')
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.EEXIST:
|
||||
raise
|
||||
@ -311,6 +360,15 @@ def build(runas,
|
||||
log.error('Error building from %s: %s', srpm, exc)
|
||||
finally:
|
||||
shutil.rmtree(results_dir)
|
||||
if retrc != 0:
|
||||
raise SaltInvocationError(
|
||||
'Building packages for destination directory {0}, spec {1}, sources {2}, failed '
|
||||
'with return error {3}, check logs for further details'.format(
|
||||
dest_dir,
|
||||
spec,
|
||||
sources,
|
||||
retrc)
|
||||
)
|
||||
shutil.rmtree(deps_dir)
|
||||
shutil.rmtree(srpm_build_dir)
|
||||
return ret
|
||||
@ -433,7 +491,7 @@ def make_repo(repodir,
|
||||
phrase = ''
|
||||
|
||||
if keyid is not None:
|
||||
## import_keys
|
||||
# import_keys
|
||||
pkg_pub_key_file = '{0}/{1}'.format(gnupghome, __salt__['pillar.get']('gpg_pkg_pub_keyname', None))
|
||||
pkg_priv_key_file = '{0}/{1}'.format(gnupghome, __salt__['pillar.get']('gpg_pkg_priv_keyname', None))
|
||||
|
||||
@ -477,14 +535,21 @@ def make_repo(repodir,
|
||||
|
||||
# need to update rpm with public key
|
||||
cmd = 'rpm --import {0}'.format(pkg_pub_key_file)
|
||||
__salt__['cmd.run'](cmd, runas=runas, use_vt=True)
|
||||
retrc = __salt__['cmd.retcode'](cmd, runas=runas, use_vt=True)
|
||||
if retrc != 0:
|
||||
raise SaltInvocationError(
|
||||
'Failed to import public key from file {0} with return '
|
||||
'error {1}, check logs for further details'.format(
|
||||
pkg_pub_key_file,
|
||||
retrc)
|
||||
)
|
||||
|
||||
## sign_it_here
|
||||
# sign_it_here
|
||||
# interval of 0.125 is really too fast on some systems
|
||||
interval = 0.5
|
||||
for file in os.listdir(repodir):
|
||||
if file.endswith('.rpm'):
|
||||
abs_file = os.path.join(repodir, file)
|
||||
for fileused in os.listdir(repodir):
|
||||
if fileused.endswith('.rpm'):
|
||||
abs_file = os.path.join(repodir, fileused)
|
||||
number_retries = timeout / interval
|
||||
times_looped = 0
|
||||
error_msg = 'Failed to sign file {0}'.format(abs_file)
|
||||
|
@ -60,17 +60,18 @@ def _get_gecos(name):
|
||||
Retrieve GECOS field info and return it in dictionary form
|
||||
'''
|
||||
gecos_field = salt.utils.stringutils.to_unicode(
|
||||
pwd.getpwnam(_quote_username(name)).pw_gecos).split(',', 3)
|
||||
pwd.getpwnam(_quote_username(name)).pw_gecos).split(',', 4)
|
||||
if not gecos_field:
|
||||
return {}
|
||||
else:
|
||||
# Assign empty strings for any unspecified trailing GECOS fields
|
||||
while len(gecos_field) < 4:
|
||||
while len(gecos_field) < 5:
|
||||
gecos_field.append('')
|
||||
return {'fullname': salt.utils.data.decode(gecos_field[0]),
|
||||
'roomnumber': salt.utils.data.decode(gecos_field[1]),
|
||||
'workphone': salt.utils.data.decode(gecos_field[2]),
|
||||
'homephone': salt.utils.data.decode(gecos_field[3])}
|
||||
'homephone': salt.utils.data.decode(gecos_field[3]),
|
||||
'other': salt.utils.data.decode(gecos_field[4])}
|
||||
|
||||
|
||||
def _build_gecos(gecos_dict):
|
||||
@ -78,10 +79,11 @@ def _build_gecos(gecos_dict):
|
||||
Accepts a dictionary entry containing GECOS field names and their values,
|
||||
and returns a full GECOS comment string, to be used with usermod.
|
||||
'''
|
||||
return '{0},{1},{2},{3}'.format(gecos_dict.get('fullname', ''),
|
||||
gecos_dict.get('roomnumber', ''),
|
||||
gecos_dict.get('workphone', ''),
|
||||
gecos_dict.get('homephone', ''))
|
||||
return '{0},{1},{2},{3},{4}'.format(gecos_dict.get('fullname', ''),
|
||||
gecos_dict.get('roomnumber', ''),
|
||||
gecos_dict.get('workphone', ''),
|
||||
gecos_dict.get('homephone', ''),
|
||||
gecos_dict.get('other', ''),).rstrip(',')
|
||||
|
||||
|
||||
def _update_gecos(name, key, value, root=None):
|
||||
@ -124,6 +126,7 @@ def add(name,
|
||||
roomnumber='',
|
||||
workphone='',
|
||||
homephone='',
|
||||
other='',
|
||||
createhome=True,
|
||||
loginclass=None,
|
||||
root=None,
|
||||
@ -237,6 +240,8 @@ def add(name,
|
||||
chworkphone(name, workphone)
|
||||
if homephone:
|
||||
chhomephone(name, homephone)
|
||||
if other:
|
||||
chother(name, other)
|
||||
return True
|
||||
|
||||
|
||||
@ -507,6 +512,19 @@ def chhomephone(name, homephone):
|
||||
return _update_gecos(name, 'homephone', homephone)
|
||||
|
||||
|
||||
def chother(name, other):
|
||||
'''
|
||||
Change the user's other GECOS attribute
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' user.chother foobar
|
||||
'''
|
||||
return _update_gecos(name, 'other', other)
|
||||
|
||||
|
||||
def chloginclass(name, loginclass, root=None):
|
||||
'''
|
||||
Change the default login class of the user
|
||||
@ -588,9 +606,9 @@ def _format_info(data):
|
||||
Return user information in a pretty way
|
||||
'''
|
||||
# Put GECOS info into a list
|
||||
gecos_field = salt.utils.stringutils.to_unicode(data.pw_gecos).split(',', 3)
|
||||
# Make sure our list has at least four elements
|
||||
while len(gecos_field) < 4:
|
||||
gecos_field = salt.utils.stringutils.to_unicode(data.pw_gecos).split(',', 4)
|
||||
# Make sure our list has at least five elements
|
||||
while len(gecos_field) < 5:
|
||||
gecos_field.append('')
|
||||
|
||||
return {'gid': data.pw_gid,
|
||||
@ -603,7 +621,8 @@ def _format_info(data):
|
||||
'fullname': gecos_field[0],
|
||||
'roomnumber': gecos_field[1],
|
||||
'workphone': gecos_field[2],
|
||||
'homephone': gecos_field[3]}
|
||||
'homephone': gecos_field[3],
|
||||
'other': gecos_field[4]}
|
||||
|
||||
|
||||
@salt.utils.decorators.path.which('id')
|
||||
|
@ -6,8 +6,10 @@ for managing outputters.
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import errno
|
||||
import logging
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
@ -168,7 +170,7 @@ def get_printout(out, opts=None, **kwargs):
|
||||
'''
|
||||
try:
|
||||
fileno = sys.stdout.fileno()
|
||||
except AttributeError:
|
||||
except (AttributeError, io.UnsupportedOperation):
|
||||
fileno = -1 # sys.stdout is StringIO or fake
|
||||
return not os.isatty(fileno)
|
||||
|
||||
|
@ -62,9 +62,9 @@ class TableDisplay(object):
|
||||
'''
|
||||
|
||||
_JUSTIFY_MAP = {
|
||||
'center': str.center,
|
||||
'right': str.rjust,
|
||||
'left': str.ljust
|
||||
'center': six.text_type.center,
|
||||
'right': six.text_type.rjust,
|
||||
'left': six.text_type.ljust
|
||||
}
|
||||
|
||||
def __init__(self,
|
||||
@ -147,7 +147,7 @@ class TableDisplay(object):
|
||||
for item in row
|
||||
]
|
||||
rows = []
|
||||
for item in map(None, *new_rows):
|
||||
for item in map(lambda *args: args, *new_rows):
|
||||
if isinstance(item, (tuple, list)):
|
||||
rows.append([substr or '' for substr in item])
|
||||
else:
|
||||
@ -159,7 +159,7 @@ class TableDisplay(object):
|
||||
for row in rows
|
||||
]
|
||||
|
||||
columns = map(None, *reduce(operator.add, logical_rows))
|
||||
columns = map(lambda *args: args, *reduce(operator.add, logical_rows))
|
||||
|
||||
max_widths = [
|
||||
max([len(six.text_type(item)) for item in column])
|
||||
@ -363,7 +363,7 @@ def output(ret, **kwargs):
|
||||
)
|
||||
)
|
||||
|
||||
return '\n'.join(table.display(ret,
|
||||
return '\n'.join(table.display(salt.utils.data.decode(ret),
|
||||
base_indent,
|
||||
out,
|
||||
rows_key=rows_key,
|
||||
|
@ -1006,6 +1006,13 @@ class Pillar(object):
|
||||
mopts['file_roots'] = self.actual_file_roots
|
||||
mopts['saltversion'] = __version__
|
||||
pillar['master'] = mopts
|
||||
if 'pillar' in self.opts and self.opts.get('ssh_merge_pillar', False):
|
||||
pillar = merge(
|
||||
self.opts['pillar'],
|
||||
pillar,
|
||||
self.merge_strategy,
|
||||
self.opts.get('renderer', 'yaml'),
|
||||
self.opts.get('pillar_merge_lists', False))
|
||||
if errors:
|
||||
for error in errors:
|
||||
log.critical('Pillar render error: %s', error)
|
||||
|
@ -1090,7 +1090,7 @@ def extracted(name,
|
||||
and not stat.S_ISDIR(x)),
|
||||
(contents['links'], stat.S_ISLNK)):
|
||||
for path in path_list:
|
||||
full_path = os.path.join(name, path)
|
||||
full_path = salt.utils.path.join(name, path)
|
||||
try:
|
||||
path_mode = os.lstat(full_path.rstrip(os.sep)).st_mode
|
||||
if not func(path_mode):
|
||||
@ -1259,7 +1259,7 @@ def extracted(name,
|
||||
if options is None:
|
||||
try:
|
||||
with closing(tarfile.open(cached, 'r')) as tar:
|
||||
tar.extractall(name)
|
||||
tar.extractall(salt.utils.stringutils.to_str(name))
|
||||
files = tar.getnames()
|
||||
if trim_output:
|
||||
files = files[:trim_output]
|
||||
|
@ -57,11 +57,14 @@ def run_file(name,
|
||||
grain=None,
|
||||
key=None,
|
||||
overwrite=True,
|
||||
saltenv=None,
|
||||
check_db_exists=True,
|
||||
**connection_args):
|
||||
'''
|
||||
Execute an arbitrary query on the specified database
|
||||
|
||||
.. versionadded:: 2017.7.0
|
||||
|
||||
name
|
||||
Used only as an ID
|
||||
|
||||
@ -86,17 +89,22 @@ def run_file(name,
|
||||
overwrite:
|
||||
The file or grain will be overwritten if it already exists (default)
|
||||
|
||||
saltenv:
|
||||
The saltenv to pull the query_file from
|
||||
|
||||
check_db_exists:
|
||||
The state run will check that the specified database exists (default=True)
|
||||
before running any queries
|
||||
|
||||
.. versionadded:: 2017.7.0
|
||||
'''
|
||||
ret = {'name': name,
|
||||
'changes': {},
|
||||
'result': True,
|
||||
'comment': 'Database {0} is already present'.format(database)}
|
||||
|
||||
if any([query_file.startswith(proto) for proto in ['http://', 'https://', 'salt://', 's3://', 'swift://']]):
|
||||
query_file = __salt__['cp.cache_file'](query_file, saltenv=saltenv or __env__)
|
||||
|
||||
if not os.path.exists(query_file):
|
||||
ret['comment'] = 'File {0} does not exist'.format(query_file)
|
||||
ret['result'] = False
|
||||
|
@ -595,7 +595,7 @@ def installed(name,
|
||||
'''
|
||||
if 'no_chown' in kwargs:
|
||||
salt.utils.versions.warn_until(
|
||||
'Flourine',
|
||||
'Fluorine',
|
||||
'The no_chown argument has been deprecated and is no longer used. '
|
||||
'Its functionality was removed in Boron.')
|
||||
kwargs.pop('no_chown')
|
||||
|
@ -2533,13 +2533,21 @@ def latest(
|
||||
'result': None,
|
||||
'comment': '\n'.join(comments)}
|
||||
|
||||
# Build updated list of pkgs to exclude non-targeted ones
|
||||
targeted_pkgs = list(targets.keys()) if pkgs else None
|
||||
if salt.utils.platform.is_windows():
|
||||
# pkg.install execution module on windows ensures the software
|
||||
# package is installed when no version is specified, it does not
|
||||
# upgrade the software to the latest. This is per the design.
|
||||
# Build updated list of pkgs *with verion number*, exclude
|
||||
# non-targeted ones
|
||||
targeted_pkgs = [{x: targets[x]} for x in targets]
|
||||
else:
|
||||
# Build updated list of pkgs to exclude non-targeted ones
|
||||
targeted_pkgs = list(targets)
|
||||
|
||||
# No need to refresh, if a refresh was necessary it would have been
|
||||
# performed above when pkg.latest_version was run.
|
||||
try:
|
||||
# No need to refresh, if a refresh was necessary it would have been
|
||||
# performed above when pkg.latest_version was run.
|
||||
changes = __salt__['pkg.install'](name,
|
||||
changes = __salt__['pkg.install'](name=None,
|
||||
refresh=False,
|
||||
fromrepo=fromrepo,
|
||||
skip_verify=skip_verify,
|
||||
|
@ -359,7 +359,6 @@ def managed(name, ppa=None, **kwargs):
|
||||
enabled = True
|
||||
|
||||
repo = name
|
||||
os_family = __grains__['os_family'].lower()
|
||||
if __grains__['os'] in ('Ubuntu', 'Mint'):
|
||||
if ppa is not None:
|
||||
# overload the name/repo value for PPAs cleanly
|
||||
@ -373,7 +372,7 @@ def managed(name, ppa=None, **kwargs):
|
||||
if enabled is not None \
|
||||
else salt.utils.data.is_true(disabled)
|
||||
|
||||
elif os_family in ('redhat', 'suse'):
|
||||
elif __grains__['os_family'] in ('RedHat', 'Suse'):
|
||||
if 'humanname' in kwargs:
|
||||
kwargs['name'] = kwargs.pop('humanname')
|
||||
if 'name' not in kwargs:
|
||||
@ -384,7 +383,7 @@ def managed(name, ppa=None, **kwargs):
|
||||
if disabled is not None \
|
||||
else salt.utils.data.is_true(enabled)
|
||||
|
||||
elif os_family == 'nilinuxrt':
|
||||
elif __grains__['os_family'] in ('NILinuxRT', 'Poky'):
|
||||
# opkg is the pkg virtual
|
||||
kwargs['enabled'] = not salt.utils.data.is_true(disabled) \
|
||||
if disabled is not None \
|
||||
@ -413,7 +412,7 @@ def managed(name, ppa=None, **kwargs):
|
||||
else:
|
||||
sanitizedkwargs = kwargs
|
||||
|
||||
if os_family == 'debian':
|
||||
if __grains__['os_family'] == 'Debian':
|
||||
repo = salt.utils.pkg.deb.strip_uri(repo)
|
||||
|
||||
if pre:
|
||||
@ -427,7 +426,7 @@ def managed(name, ppa=None, **kwargs):
|
||||
# not explicitly set, so we don't need to update the repo
|
||||
# if it's desired to be enabled and the 'enabled' key is
|
||||
# missing from the repo definition
|
||||
if os_family == 'redhat':
|
||||
if __grains__['os_family'] == 'RedHat':
|
||||
if not salt.utils.data.is_true(sanitizedkwargs[kwarg]):
|
||||
break
|
||||
else:
|
||||
@ -437,7 +436,7 @@ def managed(name, ppa=None, **kwargs):
|
||||
elif kwarg == 'comps':
|
||||
if sorted(sanitizedkwargs[kwarg]) != sorted(pre[kwarg]):
|
||||
break
|
||||
elif kwarg == 'line' and os_family == 'debian':
|
||||
elif kwarg == 'line' and __grains__['os_family'] == 'Debian':
|
||||
# split the line and sort everything after the URL
|
||||
sanitizedsplit = sanitizedkwargs[kwarg].split()
|
||||
sanitizedsplit[3:] = sorted(sanitizedsplit[3:])
|
||||
@ -452,14 +451,14 @@ def managed(name, ppa=None, **kwargs):
|
||||
salt.utils.pkg.deb.combine_comments(kwargs['comments'])
|
||||
if pre_comments != post_comments:
|
||||
break
|
||||
elif kwarg == 'comments' and os_family == 'redhat':
|
||||
elif kwarg == 'comments' and __grains__['os_family'] == 'RedHat':
|
||||
precomments = salt.utils.pkg.rpm.combine_comments(pre[kwarg])
|
||||
kwargcomments = salt.utils.pkg.rpm.combine_comments(
|
||||
sanitizedkwargs[kwarg])
|
||||
if precomments != kwargcomments:
|
||||
break
|
||||
else:
|
||||
if os_family in ('redhat', 'suse') \
|
||||
if __grains__['os_family'] in ('RedHat', 'Suse') \
|
||||
and any(isinstance(x, bool) for x in
|
||||
(sanitizedkwargs[kwarg], pre[kwarg])):
|
||||
# This check disambiguates 1/0 from True/False
|
||||
@ -490,7 +489,7 @@ def managed(name, ppa=None, **kwargs):
|
||||
pass
|
||||
|
||||
try:
|
||||
if os_family == 'debian':
|
||||
if __grains__['os_family'] == 'Debian':
|
||||
__salt__['pkg.mod_repo'](repo, saltenv=__env__, **kwargs)
|
||||
else:
|
||||
__salt__['pkg.mod_repo'](repo, **kwargs)
|
||||
|
@ -68,6 +68,7 @@ def _changes(name,
|
||||
roomnumber='',
|
||||
workphone='',
|
||||
homephone='',
|
||||
other='',
|
||||
loginclass=None,
|
||||
date=None,
|
||||
mindays=0,
|
||||
@ -188,6 +189,11 @@ def _changes(name,
|
||||
lusr['homephone'] = salt.utils.data.decode(lusr['homephone'])
|
||||
if lusr['homephone'] != homephone:
|
||||
change['homephone'] = homephone
|
||||
if 'user.chother' in __salt__ and other is not None:
|
||||
other = salt.utils.data.decode(other)
|
||||
lusr['other'] = salt.utils.data.decode(lusr['other'])
|
||||
if lusr['other'] != other:
|
||||
change['other'] = other
|
||||
# OpenBSD/FreeBSD login class
|
||||
if __grains__['kernel'] in ('OpenBSD', 'FreeBSD'):
|
||||
if loginclass:
|
||||
@ -236,6 +242,7 @@ def present(name,
|
||||
roomnumber=None,
|
||||
workphone=None,
|
||||
homephone=None,
|
||||
other=None,
|
||||
loginclass=None,
|
||||
date=None,
|
||||
mindays=None,
|
||||
@ -377,7 +384,10 @@ def present(name,
|
||||
|
||||
homephone
|
||||
The user's home phone number (not supported in MacOS)
|
||||
If GECOS field contains more than 3 commas, this field will have the rest of 'em
|
||||
|
||||
other
|
||||
The user's other attribute (not supported in MacOS)
|
||||
If GECOS field contains more than 4 commas, this field will have the rest of 'em
|
||||
|
||||
.. versionchanged:: 2014.7.0
|
||||
Shadow attribute support added.
|
||||
@ -448,6 +458,8 @@ def present(name,
|
||||
workphone = salt.utils.data.decode(workphone)
|
||||
if homephone is not None:
|
||||
homephone = salt.utils.data.decode(homephone)
|
||||
if other is not None:
|
||||
other = salt.utils.data.decode(other)
|
||||
|
||||
# createhome not supported on Windows or Mac
|
||||
if __grains__['kernel'] in ('Darwin', 'Windows'):
|
||||
@ -460,7 +472,7 @@ def present(name,
|
||||
|
||||
# the comma is used to separate field in GECOS, thus resulting into
|
||||
# salt adding the end of fullname each time this function is called
|
||||
for gecos_field in ['fullname', 'roomnumber', 'workphone']:
|
||||
for gecos_field in [fullname, roomnumber, workphone]:
|
||||
if isinstance(gecos_field, string_types) and ',' in gecos_field:
|
||||
ret['comment'] = "Unsupported char ',' in {0}".format(gecos_field)
|
||||
ret['result'] = False
|
||||
@ -519,6 +531,7 @@ def present(name,
|
||||
roomnumber,
|
||||
workphone,
|
||||
homephone,
|
||||
other,
|
||||
loginclass,
|
||||
date,
|
||||
mindays,
|
||||
@ -654,6 +667,7 @@ def present(name,
|
||||
roomnumber,
|
||||
workphone,
|
||||
homephone,
|
||||
other,
|
||||
loginclass,
|
||||
date,
|
||||
mindays,
|
||||
@ -705,6 +719,7 @@ def present(name,
|
||||
'roomnumber': roomnumber,
|
||||
'workphone': workphone,
|
||||
'homephone': homephone,
|
||||
'other': other,
|
||||
'createhome': createhome,
|
||||
'nologinit': nologinit,
|
||||
'loginclass': loginclass}
|
||||
|
@ -137,7 +137,7 @@ def managed(name,
|
||||
'''
|
||||
if 'no_chown' in kwargs:
|
||||
salt.utils.versions.warn_until(
|
||||
'Flourine',
|
||||
'Fluorine',
|
||||
'The no_chown argument has been deprecated and is no longer used. '
|
||||
'Its functionality was removed in Boron.')
|
||||
kwargs.pop('no_chown')
|
||||
|
@ -607,23 +607,22 @@ class TCPReqServerChannel(salt.transport.mixins.auth.AESReqServerMixin, salt.tra
|
||||
self.payload_handler = payload_handler
|
||||
self.io_loop = io_loop
|
||||
self.serial = salt.payload.Serial(self.opts)
|
||||
if USE_LOAD_BALANCER:
|
||||
self.req_server = LoadBalancerWorker(self.socket_queue,
|
||||
self.handle_message,
|
||||
io_loop=self.io_loop,
|
||||
ssl_options=self.opts.get('ssl'))
|
||||
else:
|
||||
if salt.utils.platform.is_windows():
|
||||
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
_set_tcp_keepalive(self._socket, self.opts)
|
||||
self._socket.setblocking(0)
|
||||
self._socket.bind((self.opts['interface'], int(self.opts['ret_port'])))
|
||||
self.req_server = SaltMessageServer(self.handle_message,
|
||||
io_loop=self.io_loop,
|
||||
ssl_options=self.opts.get('ssl'))
|
||||
self.req_server.add_socket(self._socket)
|
||||
self._socket.listen(self.backlog)
|
||||
with salt.utils.async.current_ioloop(self.io_loop):
|
||||
if USE_LOAD_BALANCER:
|
||||
self.req_server = LoadBalancerWorker(self.socket_queue,
|
||||
self.handle_message,
|
||||
ssl_options=self.opts.get('ssl'))
|
||||
else:
|
||||
if salt.utils.platform.is_windows():
|
||||
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
_set_tcp_keepalive(self._socket, self.opts)
|
||||
self._socket.setblocking(0)
|
||||
self._socket.bind((self.opts['interface'], int(self.opts['ret_port'])))
|
||||
self.req_server = SaltMessageServer(self.handle_message,
|
||||
ssl_options=self.opts.get('ssl'))
|
||||
self.req_server.add_socket(self._socket)
|
||||
self._socket.listen(self.backlog)
|
||||
salt.transport.mixins.auth.AESReqServerMixin.post_fork(self, payload_handler, io_loop)
|
||||
|
||||
@tornado.gen.coroutine
|
||||
@ -708,6 +707,7 @@ class SaltMessageServer(tornado.tcpserver.TCPServer, object):
|
||||
'''
|
||||
def __init__(self, message_handler, *args, **kwargs):
|
||||
super(SaltMessageServer, self).__init__(*args, **kwargs)
|
||||
self.io_loop = tornado.ioloop.IOLoop.current()
|
||||
|
||||
self.clients = []
|
||||
self.message_handler = message_handler
|
||||
@ -811,7 +811,9 @@ class TCPClientKeepAlive(tornado.tcpclient.TCPClient):
|
||||
stream = tornado.iostream.IOStream(
|
||||
sock,
|
||||
max_buffer_size=max_buffer_size)
|
||||
return stream.connect(addr)
|
||||
if tornado.version_info < (5,):
|
||||
return stream.connect(addr)
|
||||
return stream, stream.connect(addr)
|
||||
|
||||
|
||||
class SaltMessageClientPool(salt.transport.MessageClientPool):
|
||||
@ -895,33 +897,33 @@ class SaltMessageClient(object):
|
||||
return
|
||||
self._closing = True
|
||||
if hasattr(self, '_stream') and not self._stream.closed():
|
||||
self._stream.close()
|
||||
if self._read_until_future is not None:
|
||||
# This will prevent this message from showing up:
|
||||
# '[ERROR ] Future exception was never retrieved:
|
||||
# StreamClosedError'
|
||||
# This happens because the logic is always waiting to read
|
||||
# the next message and the associated read future is marked
|
||||
# 'StreamClosedError' when the stream is closed.
|
||||
self._read_until_future.exception()
|
||||
if (not self._stream_return_future.done() and
|
||||
self.io_loop != tornado.ioloop.IOLoop.current(
|
||||
instance=False)):
|
||||
# If _stream_return() hasn't completed, it means the IO
|
||||
# Loop is stopped (such as when using
|
||||
# 'salt.utils.async.SyncWrapper'). Ensure that
|
||||
# _stream_return() completes by restarting the IO Loop.
|
||||
# This will prevent potential errors on shutdown.
|
||||
orig_loop = tornado.ioloop.IOLoop.current()
|
||||
self.io_loop.make_current()
|
||||
try:
|
||||
# If _stream_return() hasn't completed, it means the IO
|
||||
# Loop is stopped (such as when using
|
||||
# 'salt.utils.async.SyncWrapper'). Ensure that
|
||||
# _stream_return() completes by restarting the IO Loop.
|
||||
# This will prevent potential errors on shutdown.
|
||||
try:
|
||||
orig_loop = tornado.ioloop.IOLoop.current()
|
||||
self.io_loop.make_current()
|
||||
self._stream.close()
|
||||
if self._read_until_future is not None:
|
||||
# This will prevent this message from showing up:
|
||||
# '[ERROR ] Future exception was never retrieved:
|
||||
# StreamClosedError'
|
||||
# This happens because the logic is always waiting to read
|
||||
# the next message and the associated read future is marked
|
||||
# 'StreamClosedError' when the stream is closed.
|
||||
self._read_until_future.exception()
|
||||
if (not self._stream_return_future.done() and
|
||||
self.io_loop != tornado.ioloop.IOLoop.current(
|
||||
instance=False)):
|
||||
self.io_loop.add_future(
|
||||
self._stream_return_future,
|
||||
lambda future: self.io_loop.stop()
|
||||
)
|
||||
self.io_loop.start()
|
||||
finally:
|
||||
orig_loop.make_current()
|
||||
finally:
|
||||
orig_loop.make_current()
|
||||
self._tcp_client.close()
|
||||
# Clear callback references to allow the object that they belong to
|
||||
# to be deleted.
|
||||
@ -974,7 +976,8 @@ class SaltMessageClient(object):
|
||||
with salt.utils.async.current_ioloop(self.io_loop):
|
||||
self._stream = yield self._tcp_client.connect(self.host,
|
||||
self.port,
|
||||
ssl_options=self.opts.get('ssl'))
|
||||
ssl_options=self.opts.get('ssl'),
|
||||
**kwargs)
|
||||
self._connecting_future.set_result(True)
|
||||
break
|
||||
except Exception as e:
|
||||
|
@ -37,6 +37,11 @@ log = logging.getLogger(__name__)
|
||||
|
||||
__virtualname__ = 'mac_utils'
|
||||
|
||||
__salt__ = {
|
||||
'cmd.run_all': salt.modules.cmdmod._run_all_quiet,
|
||||
'cmd.run': salt.modules.cmdmod._run_quiet,
|
||||
}
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
@ -268,7 +273,8 @@ def launchctl(sub_cmd, *args, **kwargs):
|
||||
|
||||
# Run command
|
||||
kwargs['python_shell'] = False
|
||||
ret = salt.modules.cmdmod.run_all(cmd, **kwargs)
|
||||
kwargs = salt.utils.args.clean_kwargs(**kwargs)
|
||||
ret = __salt__['cmd.run_all'](cmd, **kwargs)
|
||||
|
||||
# Raise an error or return successful result
|
||||
if ret['retcode']:
|
||||
@ -331,7 +337,7 @@ def _available_services():
|
||||
# the system provided plutil program to do the conversion
|
||||
cmd = '/usr/bin/plutil -convert xml1 -o - -- "{0}"'.format(
|
||||
true_path)
|
||||
plist_xml = salt.modules.cmdmod.run(cmd, output_loglevel='quiet')
|
||||
plist_xml = __salt__['cmd.run'](cmd)
|
||||
if six.PY2:
|
||||
plist = plistlib.readPlistFromString(plist_xml)
|
||||
else:
|
||||
|
@ -3087,11 +3087,11 @@ class SaltSSHOptionParser(six.with_metaclass(OptionParserMeta,
|
||||
help='Run command via sudo.'
|
||||
)
|
||||
auth_group.add_option(
|
||||
'--skip-roster',
|
||||
dest='ssh_skip_roster',
|
||||
'--update-roster',
|
||||
dest='ssh_update_roster',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help='If hostname is not found in the roster, do not store the information'
|
||||
help='If hostname is not found in the roster, store the information'
|
||||
'into the default roster file (flat).'
|
||||
)
|
||||
self.add_option_group(auth_group)
|
||||
|
@ -783,11 +783,13 @@ class Schedule(object):
|
||||
else:
|
||||
# Send back to master so the job is included in the job list
|
||||
mret = ret.copy()
|
||||
mret['jid'] = 'req'
|
||||
if data.get('return_job') == 'nocache':
|
||||
# overwrite 'req' to signal to master that
|
||||
# this job shouldn't be stored
|
||||
mret['jid'] = 'nocache'
|
||||
# No returners defined, so we're only sending back to the master
|
||||
if not data_returner and not self.schedule_returner:
|
||||
mret['jid'] = 'req'
|
||||
if data.get('return_job') == 'nocache':
|
||||
# overwrite 'req' to signal to master that
|
||||
# this job shouldn't be stored
|
||||
mret['jid'] = 'nocache'
|
||||
load = {'cmd': '_return', 'id': self.opts['id']}
|
||||
for key, value in six.iteritems(mret):
|
||||
load[key] = value
|
||||
|
@ -6,7 +6,7 @@ ec2-test:
|
||||
script_args: '-P -Z'
|
||||
ec2-win2012r2-test:
|
||||
provider: ec2-config
|
||||
size: t2.micro
|
||||
size: m1.small
|
||||
image: ami-eb1ecd96
|
||||
smb_port: 445
|
||||
win_installer: ''
|
||||
@ -20,7 +20,7 @@ ec2-win2012r2-test:
|
||||
deploy: True
|
||||
ec2-win2016-test:
|
||||
provider: ec2-config
|
||||
size: t2.micro
|
||||
size: m1.small
|
||||
image: ami-ed14c790
|
||||
smb_port: 445
|
||||
win_installer: ''
|
||||
|
@ -47,7 +47,7 @@ class ArchiveTest(ModuleCase):
|
||||
self.arch = os.path.join(self.base_path, 'archive.{0}'.format(arch_fmt))
|
||||
self.dst = os.path.join(self.base_path, '{0}_dst_dir'.format(arch_fmt))
|
||||
|
||||
def _set_up(self, arch_fmt):
|
||||
def _set_up(self, arch_fmt, unicode_filename=False):
|
||||
'''
|
||||
Create source file tree and destination directory
|
||||
|
||||
@ -62,7 +62,11 @@ class ArchiveTest(ModuleCase):
|
||||
|
||||
# Create source
|
||||
os.makedirs(self.src)
|
||||
with salt.utils.files.fopen(os.path.join(self.src, 'file'), 'w') as theorem:
|
||||
if unicode_filename:
|
||||
filename = 'file®'
|
||||
else:
|
||||
filename = 'file'
|
||||
with salt.utils.files.fopen(os.path.join(self.src, filename), 'w') as theorem:
|
||||
theorem.write(textwrap.dedent(salt.utils.stringutils.to_str(r'''\
|
||||
Compression theorem of computational complexity theory:
|
||||
|
||||
@ -150,6 +154,35 @@ class ArchiveTest(ModuleCase):
|
||||
|
||||
self._tear_down()
|
||||
|
||||
@skipIf(not salt.utils.path.which('tar'), 'Cannot find tar executable')
|
||||
def test_tar_pack_unicode(self):
|
||||
'''
|
||||
Validate using the tar function to create archives
|
||||
'''
|
||||
self._set_up(arch_fmt='tar', unicode_filename=True)
|
||||
|
||||
# Test create archive
|
||||
ret = self.run_function('archive.tar', ['-cvf', self.arch], sources=self.src)
|
||||
self.assertTrue(isinstance(ret, list), six.text_type(ret))
|
||||
self._assert_artifacts_in_ret(ret)
|
||||
|
||||
self._tear_down()
|
||||
|
||||
@skipIf(not salt.utils.path.which('tar'), 'Cannot find tar executable')
|
||||
def test_tar_unpack_unicode(self):
|
||||
'''
|
||||
Validate using the tar function to extract archives
|
||||
'''
|
||||
self._set_up(arch_fmt='tar', unicode_filename=True)
|
||||
self.run_function('archive.tar', ['-cvf', self.arch], sources=self.src)
|
||||
|
||||
# Test extract archive
|
||||
ret = self.run_function('archive.tar', ['-xvf', self.arch], dest=self.dst)
|
||||
self.assertTrue(isinstance(ret, list), six.text_type(ret))
|
||||
self._assert_artifacts_in_ret(ret)
|
||||
|
||||
self._tear_down()
|
||||
|
||||
@skipIf(not salt.utils.path.which('gzip'), 'Cannot find gzip executable')
|
||||
def test_gzip(self):
|
||||
'''
|
||||
|
@ -11,7 +11,6 @@ from __future__ import absolute_import, print_function, unicode_literals
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
# Import Salt Testing libs
|
||||
@ -75,8 +74,7 @@ class PipModuleTest(ModuleCase):
|
||||
|
||||
# Let's remove the pip binary
|
||||
pip_bin = os.path.join(self.venv_dir, 'bin', 'pip')
|
||||
py_dir = 'python{0}.{1}'.format(*sys.version_info[:2])
|
||||
site_dir = os.path.join(self.venv_dir, 'lib', py_dir, 'site-packages')
|
||||
site_dir = self.run_function('virtualenv.get_distribution_path', [self.venv_dir, 'pip'])
|
||||
if salt.utils.platform.is_windows():
|
||||
pip_bin = os.path.join(self.venv_dir, 'Scripts', 'pip.exe')
|
||||
site_dir = os.path.join(self.venv_dir, 'lib', 'site-packages')
|
||||
|
@ -329,6 +329,48 @@ class CallTest(ShellCase, testprogram.TestProgramCase, ShellCaseCommonTestsMixin
|
||||
if os.path.isdir(config_dir):
|
||||
shutil.rmtree(config_dir)
|
||||
|
||||
def test_syslog_file_not_found(self):
|
||||
'''
|
||||
test when log_file is set to a syslog file that does not exist
|
||||
'''
|
||||
old_cwd = os.getcwd()
|
||||
config_dir = os.path.join(TMP, 'log_file_incorrect')
|
||||
if not os.path.isdir(config_dir):
|
||||
os.makedirs(config_dir)
|
||||
|
||||
os.chdir(config_dir)
|
||||
|
||||
with salt.utils.files.fopen(self.get_config_file_path('minion'), 'r') as fh_:
|
||||
minion_config = salt.utils.yaml.load(fh_.read())
|
||||
minion_config['log_file'] = 'file:///dev/doesnotexist'
|
||||
with salt.utils.files.fopen(os.path.join(config_dir, 'minion'), 'w') as fh_:
|
||||
fh_.write(
|
||||
salt.utils.yaml.dump(minion_config, default_flow_style=False)
|
||||
)
|
||||
ret = self.run_script(
|
||||
'salt-call',
|
||||
'--config-dir {0} cmd.run "echo foo"'.format(
|
||||
config_dir
|
||||
),
|
||||
timeout=60,
|
||||
catch_stderr=True,
|
||||
with_retcode=True
|
||||
)
|
||||
try:
|
||||
if sys.version_info >= (3, 5, 4):
|
||||
self.assertIn('local:', ret[0])
|
||||
self.assertIn('[WARNING ] The log_file does not exist. Logging not setup correctly or syslog service not started.', ret[1])
|
||||
self.assertEqual(ret[2], 0)
|
||||
else:
|
||||
self.assertIn(
|
||||
'Failed to setup the Syslog logging handler', '\n'.join(ret[1])
|
||||
)
|
||||
self.assertEqual(ret[2], 2)
|
||||
finally:
|
||||
self.chdir(old_cwd)
|
||||
if os.path.isdir(config_dir):
|
||||
shutil.rmtree(config_dir)
|
||||
|
||||
def test_issue_15074_output_file_append(self):
|
||||
output_file_append = os.path.join(TMP, 'issue-15074')
|
||||
try:
|
||||
|
@ -41,6 +41,7 @@ import salt.utils.platform
|
||||
import salt.utils.versions
|
||||
import salt.utils.win_dacl
|
||||
import salt.utils.win_functions
|
||||
import salt.utils.win_runas
|
||||
from salt.modules.virtualenv_mod import KNOWN_BINARY_NAMES
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
@ -48,6 +49,26 @@ from salt.exceptions import CommandExecutionError
|
||||
from salt.ext import six
|
||||
|
||||
|
||||
def can_runas():
|
||||
'''
|
||||
Detect if we are running in a limited shell (winrm) and are un-able to use
|
||||
the runas utility method.
|
||||
'''
|
||||
if salt.utils.platform.is_windows():
|
||||
try:
|
||||
salt.utils.win_runas.runas(
|
||||
'cmd.exe /c echo 1', 'noexistuser', 'n0existp4ss',
|
||||
)
|
||||
except WindowsError as exc: # pylint: disable=undefined-variable
|
||||
if exc.winerror == 5:
|
||||
# Access Denied
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
CAN_RUNAS = can_runas()
|
||||
|
||||
|
||||
class VirtualEnv(object):
|
||||
def __init__(self, test, venv_dir):
|
||||
self.venv_dir = venv_dir
|
||||
@ -219,8 +240,7 @@ class PipStateTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
|
||||
# Let's remove the pip binary
|
||||
pip_bin = os.path.join(venv_dir, 'bin', 'pip')
|
||||
py_dir = 'python{0}.{1}'.format(*sys.version_info[:2])
|
||||
site_dir = os.path.join(venv_dir, 'lib', py_dir, 'site-packages')
|
||||
site_dir = self.run_function('virtualenv.get_distribution_path', [venv_dir, 'pip'])
|
||||
if salt.utils.platform.is_windows():
|
||||
pip_bin = os.path.join(venv_dir, 'Scripts', 'pip.exe')
|
||||
site_dir = os.path.join(venv_dir, 'lib', 'site-packages')
|
||||
@ -274,6 +294,7 @@ class PipStateTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
|
||||
@destructiveTest
|
||||
@skip_if_not_root
|
||||
@skipIf(not CAN_RUNAS, 'Runas support required')
|
||||
@with_system_user('issue-6912', on_existing='delete', delete=True,
|
||||
password='PassWord1!')
|
||||
@with_tempdir()
|
||||
@ -317,6 +338,7 @@ class PipStateTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
|
||||
@destructiveTest
|
||||
@skip_if_not_root
|
||||
@skipIf(not CAN_RUNAS, 'Runas support required')
|
||||
@with_system_user('issue-6912', on_existing='delete', delete=True,
|
||||
password='PassWord1!')
|
||||
@with_tempdir()
|
||||
|
@ -249,6 +249,7 @@ class CMDMODTestCase(TestCase, LoaderModuleMockMixin):
|
||||
self.assertRaises(CommandExecutionError, cmdmod._run, 'foo')
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), 'Do not run on Windows')
|
||||
@skipIf(True, 'Test breaks unittests runs')
|
||||
def test_run(self):
|
||||
'''
|
||||
Tests end result when a command is not found
|
||||
|
@ -46,11 +46,8 @@ class DockerTestCase(TestCase, LoaderModuleMockMixin):
|
||||
def setup_loader_modules(self):
|
||||
utils = salt.loader.utils(
|
||||
salt.config.DEFAULT_MINION_OPTS,
|
||||
whitelist=['state']
|
||||
whitelist=['args', 'docker', 'json', 'state', 'thin']
|
||||
)
|
||||
# Force the LazyDict to populate its references. Otherwise the lookup
|
||||
# will fail inside the unit tests.
|
||||
list(utils)
|
||||
return {docker_mod: {'__context__': {'docker.docker_version': ''},
|
||||
'__utils__': utils}}
|
||||
|
||||
|
@ -46,7 +46,8 @@ class UserAddTestCase(TestCase, LoaderModuleMockMixin):
|
||||
'fullname': 'root',
|
||||
'roomnumber': '',
|
||||
'workphone': '',
|
||||
'homephone': ''}
|
||||
'homephone': '',
|
||||
'other': ''}
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
@ -96,7 +97,8 @@ class UserAddTestCase(TestCase, LoaderModuleMockMixin):
|
||||
'fullname': 'root',
|
||||
'roomnumber': '',
|
||||
'workphone': '',
|
||||
'homephone': ''}]
|
||||
'homephone': '',
|
||||
'other': ''}]
|
||||
with patch('salt.modules.useradd._format_info', MagicMock(return_value=self.mock_pwall)):
|
||||
self.assertEqual(useradd.getent(), ret)
|
||||
|
||||
@ -330,6 +332,36 @@ class UserAddTestCase(TestCase, LoaderModuleMockMixin):
|
||||
with patch.object(useradd, 'info', mock):
|
||||
self.assertFalse(useradd.chhomephone('salt', 1))
|
||||
|
||||
# 'chother' function tests: 1
|
||||
|
||||
def test_chother(self):
|
||||
'''
|
||||
Test if the user's other GECOS attribute is changed
|
||||
'''
|
||||
mock = MagicMock(return_value=False)
|
||||
with patch.object(useradd, '_get_gecos', mock):
|
||||
self.assertFalse(useradd.chother('salt', 1))
|
||||
|
||||
mock = MagicMock(return_value={'other': 'foobar'})
|
||||
with patch.object(useradd, '_get_gecos', mock):
|
||||
self.assertTrue(useradd.chother('salt', 'foobar'))
|
||||
|
||||
mock = MagicMock(return_value={'other': 'foobar2'})
|
||||
with patch.object(useradd, '_get_gecos', mock):
|
||||
mock = MagicMock(return_value=None)
|
||||
with patch.dict(useradd.__salt__, {'cmd.run': mock}):
|
||||
mock = MagicMock(return_value={'other': 'foobar3'})
|
||||
with patch.object(useradd, 'info', mock):
|
||||
self.assertFalse(useradd.chother('salt', 'foobar'))
|
||||
|
||||
mock = MagicMock(return_value={'other': 'foobar3'})
|
||||
with patch.object(useradd, '_get_gecos', mock):
|
||||
mock = MagicMock(return_value=None)
|
||||
with patch.dict(useradd.__salt__, {'cmd.run': mock}):
|
||||
mock = MagicMock(return_value={'other': 'foobar3'})
|
||||
with patch.object(useradd, 'info', mock):
|
||||
self.assertFalse(useradd.chother('salt', 'foobar'))
|
||||
|
||||
# 'info' function tests: 1
|
||||
|
||||
@skipIf(HAS_PWD is False, 'The pwd module is not available')
|
||||
@ -393,3 +425,21 @@ class UserAddTestCase(TestCase, LoaderModuleMockMixin):
|
||||
mock = MagicMock(side_effect=[{'name': ''}, False, {'name': ''}])
|
||||
with patch.object(useradd, 'info', mock):
|
||||
self.assertFalse(useradd.rename('salt', 'salt'))
|
||||
|
||||
def test_build_gecos_field(self):
|
||||
'''
|
||||
Test if gecos fields are built correctly (removing trailing commas)
|
||||
'''
|
||||
test_gecos = {'fullname': 'Testing',
|
||||
'roomnumber': 1234,
|
||||
'workphone': 22222,
|
||||
'homephone': 99999}
|
||||
expected_gecos_fields = 'Testing,1234,22222,99999'
|
||||
self.assertEqual(useradd._build_gecos(test_gecos), expected_gecos_fields)
|
||||
test_gecos.pop('roomnumber')
|
||||
test_gecos.pop('workphone')
|
||||
expected_gecos_fields = 'Testing,,,99999'
|
||||
self.assertEqual(useradd._build_gecos(test_gecos), expected_gecos_fields)
|
||||
test_gecos.pop('homephone')
|
||||
expected_gecos_fields = 'Testing'
|
||||
self.assertEqual(useradd._build_gecos(test_gecos), expected_gecos_fields)
|
||||
|
48
tests/unit/output/test_table_out.py
Normal file
48
tests/unit/output/test_table_out.py
Normal file
@ -0,0 +1,48 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
unittests for table outputter
|
||||
'''
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.output.table_out as table_out
|
||||
import salt.utils.stringutils
|
||||
|
||||
|
||||
class TableTestCase(TestCase, LoaderModuleMockMixin):
|
||||
'''
|
||||
Test cases for salt.output.table_out
|
||||
'''
|
||||
def setup_loader_modules(self):
|
||||
return {table_out: {}}
|
||||
|
||||
# The test data should include unicode chars, and in Python 2 there should
|
||||
# be an example both of an encoded str type and an actual unicode type.
|
||||
# Since unicode_literals is imported, we will achieve the former using
|
||||
# salt.utils.stringutils.to_str and the latter by simply using a string
|
||||
# literal.
|
||||
data = [
|
||||
{'Food': salt.utils.stringutils.to_str('яйца, бекон, колбаса и спам'),
|
||||
'Price': 5.99},
|
||||
{'Food': 'спам, спам, спам, яйца и спам',
|
||||
'Price': 3.99},
|
||||
]
|
||||
|
||||
def test_output(self):
|
||||
ret = table_out.output(self.data)
|
||||
self.assertEqual(
|
||||
ret,
|
||||
(' -----------------------------------------\n'
|
||||
' | Food | Price |\n'
|
||||
' -----------------------------------------\n'
|
||||
' | яйца, бекон, колбаса и спам | 5.99 |\n'
|
||||
' -----------------------------------------\n'
|
||||
' | спам, спам, спам, яйца и спам | 3.99 |\n'
|
||||
' -----------------------------------------')
|
||||
)
|
@ -26,12 +26,9 @@ class BotoCloudfrontTestCase(TestCase, LoaderModuleMockMixin):
|
||||
def setup_loader_modules(self):
|
||||
utils = salt.loader.utils(
|
||||
self.opts,
|
||||
whitelist=['boto3', 'dictdiffer', 'yamldumper'],
|
||||
whitelist=['boto3', 'dictdiffer', 'yaml'],
|
||||
context={},
|
||||
)
|
||||
# Force the LazyDict to populate its references. Otherwise the lookup
|
||||
# will fail inside the unit tests.
|
||||
list(utils)
|
||||
return {
|
||||
boto_cloudfront: {
|
||||
'__utils__': utils,
|
||||
|
@ -25,12 +25,9 @@ class BotoSqsTestCase(TestCase, LoaderModuleMockMixin):
|
||||
def setup_loader_modules(self):
|
||||
utils = salt.loader.utils(
|
||||
self.opts,
|
||||
whitelist=['boto3', 'yamldumper'],
|
||||
whitelist=['boto3', 'yaml'],
|
||||
context={}
|
||||
)
|
||||
# Force the LazyDict to populate its references. Otherwise the lookup
|
||||
# will fail inside the unit tests.
|
||||
list(utils)
|
||||
return {
|
||||
boto_sqs: {
|
||||
'__utils__': utils,
|
||||
|
@ -266,7 +266,9 @@ class MinionTestCase(TestCase, AdaptedConfigurationTestCaseMixin):
|
||||
patch('salt.utils.process.SignalHandlingMultiprocessingProcess.join', MagicMock(return_value=True)):
|
||||
mock_opts = self.get_config('minion', from_scratch=True)
|
||||
mock_opts['beacons_before_connect'] = True
|
||||
minion = salt.minion.Minion(mock_opts, io_loop=tornado.ioloop.IOLoop())
|
||||
io_loop = tornado.ioloop.IOLoop()
|
||||
io_loop.make_current()
|
||||
minion = salt.minion.Minion(mock_opts, io_loop=io_loop)
|
||||
try:
|
||||
|
||||
try:
|
||||
@ -290,7 +292,9 @@ class MinionTestCase(TestCase, AdaptedConfigurationTestCaseMixin):
|
||||
patch('salt.utils.process.SignalHandlingMultiprocessingProcess.join', MagicMock(return_value=True)):
|
||||
mock_opts = self.get_config('minion', from_scratch=True)
|
||||
mock_opts['scheduler_before_connect'] = True
|
||||
minion = salt.minion.Minion(mock_opts, io_loop=tornado.ioloop.IOLoop())
|
||||
io_loop = tornado.ioloop.IOLoop()
|
||||
io_loop.make_current()
|
||||
minion = salt.minion.Minion(mock_opts, io_loop=io_loop)
|
||||
try:
|
||||
try:
|
||||
minion.tune_in(start=True)
|
||||
|
@ -175,11 +175,8 @@ class MacUtilsTestCase(TestCase):
|
||||
mock_cmd = MagicMock(return_value={'retcode': 0,
|
||||
'stdout': 'success',
|
||||
'stderr': 'none'})
|
||||
with patch('salt.modules.cmdmod.run_all', mock_cmd) as m_run_all:
|
||||
with patch('salt.utils.mac_utils.__salt__', {'cmd.run_all': mock_cmd}):
|
||||
ret = mac_utils.launchctl('enable', 'org.salt.minion')
|
||||
m_run_all.assert_called_with(
|
||||
['launchctl', 'enable', 'org.salt.minion'],
|
||||
python_shell=False)
|
||||
self.assertEqual(ret, True)
|
||||
|
||||
def test_launchctl_return_stdout(self):
|
||||
@ -189,12 +186,10 @@ class MacUtilsTestCase(TestCase):
|
||||
mock_cmd = MagicMock(return_value={'retcode': 0,
|
||||
'stdout': 'success',
|
||||
'stderr': 'none'})
|
||||
with patch('salt.modules.cmdmod.run_all', mock_cmd) as m_run_all:
|
||||
with patch('salt.utils.mac_utils.__salt__', {'cmd.run_all': mock_cmd}):
|
||||
ret = mac_utils.launchctl('enable',
|
||||
'org.salt.minion',
|
||||
return_stdout=True)
|
||||
m_run_all.assert_called_with(['launchctl', 'enable', 'org.salt.minion'],
|
||||
python_shell=False)
|
||||
self.assertEqual(ret, 'success')
|
||||
|
||||
def test_launchctl_error(self):
|
||||
@ -208,13 +203,11 @@ class MacUtilsTestCase(TestCase):
|
||||
'stdout: failure\n' \
|
||||
'stderr: test failure\n' \
|
||||
'retcode: 1'
|
||||
with patch('salt.modules.cmdmod.run_all', mock_cmd) as m_run_all:
|
||||
with patch('salt.utils.mac_utils.__salt__', {'cmd.run_all': mock_cmd}):
|
||||
try:
|
||||
mac_utils.launchctl('enable', 'org.salt.minion')
|
||||
except CommandExecutionError as exc:
|
||||
self.assertEqual(exc.message, error)
|
||||
m_run_all.assert_called_with(['launchctl', 'enable', 'org.salt.minion'],
|
||||
python_shell=False)
|
||||
|
||||
@patch('salt.utils.path.os_walk')
|
||||
@patch('os.path.exists')
|
||||
@ -317,7 +310,7 @@ class MacUtilsTestCase(TestCase):
|
||||
@patch('salt.utils.path.os_walk')
|
||||
@patch('os.path.exists')
|
||||
@patch('plistlib.readPlist')
|
||||
@patch('salt.modules.cmdmod.run')
|
||||
@patch('salt.utils.mac_utils.__salt__')
|
||||
@patch('plistlib.readPlistFromString' if six.PY2 else 'plistlib.loads')
|
||||
def test_available_services_non_xml(self,
|
||||
mock_read_plist_from_string,
|
||||
@ -334,9 +327,15 @@ class MacUtilsTestCase(TestCase):
|
||||
[('/System/Library/LaunchAgents', [], ['com.apple.slla1.plist', 'com.apple.slla2.plist'])],
|
||||
[('/System/Library/LaunchDaemons', [], ['com.apple.slld1.plist', 'com.apple.slld2.plist'])],
|
||||
]
|
||||
attrs = {'cmd.run': MagicMock(return_value='<some xml>')}
|
||||
|
||||
def getitem(name):
|
||||
return attrs[name]
|
||||
|
||||
mock_run.__getitem__.side_effect = getitem
|
||||
mock_run.configure_mock(**attrs)
|
||||
mock_exists.return_value = True
|
||||
mock_read_plist.side_effect = Exception()
|
||||
mock_run.return_value = '<some xml>'
|
||||
mock_read_plist_from_string.side_effect = [
|
||||
MagicMock(Label='com.apple.lla1'),
|
||||
MagicMock(Label='com.apple.lla2'),
|
||||
@ -352,32 +351,24 @@ class MacUtilsTestCase(TestCase):
|
||||
|
||||
cmd = '/usr/bin/plutil -convert xml1 -o - -- "{0}"'
|
||||
calls = [
|
||||
call(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchAgents', 'com.apple.lla1.plist'))),
|
||||
output_loglevel='quiet'),
|
||||
call(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchAgents', 'com.apple.lla2.plist'))),
|
||||
output_loglevel='quiet'),
|
||||
call(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchDaemons', 'com.apple.lld1.plist'))),
|
||||
output_loglevel='quiet'),
|
||||
call(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchDaemons', 'com.apple.lld2.plist'))),
|
||||
output_loglevel='quiet'),
|
||||
call(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchAgents', 'com.apple.slla1.plist'))),
|
||||
output_loglevel='quiet'),
|
||||
call(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchAgents', 'com.apple.slla2.plist'))),
|
||||
output_loglevel='quiet'),
|
||||
call(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchDaemons', 'com.apple.slld1.plist'))),
|
||||
output_loglevel='quiet'),
|
||||
call(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchDaemons', 'com.apple.slld2.plist'))),
|
||||
output_loglevel='quiet'),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchAgents', 'com.apple.lla1.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchAgents', 'com.apple.lla2.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchDaemons', 'com.apple.lld1.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchDaemons', 'com.apple.lld2.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchAgents', 'com.apple.slla1.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchAgents', 'com.apple.slla2.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchDaemons', 'com.apple.slld1.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchDaemons', 'com.apple.slld2.plist'))),),
|
||||
]
|
||||
mock_run.assert_has_calls(calls)
|
||||
mock_run.assert_has_calls(calls, any_order=True)
|
||||
|
||||
# Make sure it's a dict with 8 items
|
||||
self.assertTrue(isinstance(ret, dict))
|
||||
@ -404,7 +395,7 @@ class MacUtilsTestCase(TestCase):
|
||||
@patch('salt.utils.path.os_walk')
|
||||
@patch('os.path.exists')
|
||||
@patch('plistlib.readPlist')
|
||||
@patch('salt.modules.cmdmod.run')
|
||||
@patch('salt.utils.mac_utils.__salt__')
|
||||
@patch('plistlib.readPlistFromString' if six.PY2 else 'plistlib.loads')
|
||||
def test_available_services_non_xml_malformed_plist(self,
|
||||
mock_read_plist_from_string,
|
||||
@ -421,41 +412,39 @@ class MacUtilsTestCase(TestCase):
|
||||
[('/System/Library/LaunchAgents', [], ['com.apple.slla1.plist', 'com.apple.slla2.plist'])],
|
||||
[('/System/Library/LaunchDaemons', [], ['com.apple.slld1.plist', 'com.apple.slld2.plist'])],
|
||||
]
|
||||
attrs = {'cmd.run': MagicMock(return_value='<some xml>')}
|
||||
|
||||
def getitem(name):
|
||||
return attrs[name]
|
||||
|
||||
mock_run.__getitem__.side_effect = getitem
|
||||
mock_run.configure_mock(**attrs)
|
||||
mock_exists.return_value = True
|
||||
mock_read_plist.side_effect = Exception()
|
||||
mock_run.return_value = '<some xml>'
|
||||
mock_read_plist_from_string.return_value = 'malformedness'
|
||||
|
||||
ret = mac_utils._available_services()
|
||||
|
||||
cmd = '/usr/bin/plutil -convert xml1 -o - -- "{0}"'
|
||||
calls = [
|
||||
call(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchAgents', 'com.apple.lla1.plist'))),
|
||||
output_loglevel='quiet'),
|
||||
call(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchAgents', 'com.apple.lla2.plist'))),
|
||||
output_loglevel='quiet'),
|
||||
call(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchDaemons', 'com.apple.lld1.plist'))),
|
||||
output_loglevel='quiet'),
|
||||
call(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchDaemons', 'com.apple.lld2.plist'))),
|
||||
output_loglevel='quiet'),
|
||||
call(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchAgents', 'com.apple.slla1.plist'))),
|
||||
output_loglevel='quiet'),
|
||||
call(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchAgents', 'com.apple.slla2.plist'))),
|
||||
output_loglevel='quiet'),
|
||||
call(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchDaemons', 'com.apple.slld1.plist'))),
|
||||
output_loglevel='quiet'),
|
||||
call(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchDaemons', 'com.apple.slld2.plist'))),
|
||||
output_loglevel='quiet'),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchAgents', 'com.apple.lla1.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchAgents', 'com.apple.lla2.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchDaemons', 'com.apple.lld1.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/Library/LaunchDaemons', 'com.apple.lld2.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchAgents', 'com.apple.slla1.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchAgents', 'com.apple.slla2.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchDaemons', 'com.apple.slld1.plist'))),),
|
||||
call.cmd.run(cmd.format(os.path.realpath(os.path.join(
|
||||
'/System/Library/LaunchDaemons', 'com.apple.slld2.plist'))),),
|
||||
]
|
||||
mock_run.assert_has_calls(calls)
|
||||
mock_run.assert_has_calls(calls, any_order=True)
|
||||
|
||||
# Make sure it's a dict with 8 items
|
||||
self.assertTrue(isinstance(ret, dict))
|
||||
|
@ -10,15 +10,19 @@
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
# Import Salt Testing libs
|
||||
import tests.integration as integration
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import patch, NO_MOCK, NO_MOCK_REASON
|
||||
|
||||
# Import Salt libs
|
||||
import salt.modules.cmdmod
|
||||
import salt.version
|
||||
import salt.utils.platform
|
||||
import salt.utils.versions
|
||||
from salt.utils.versions import LooseVersion, StrictVersion
|
||||
|
||||
@ -95,6 +99,40 @@ class VersionTestCase(TestCase):
|
||||
'cmp(%s, %s) should be %s, got %s' %
|
||||
(v1, v2, wanted, res))
|
||||
|
||||
@skipIf(not salt.utils.platform.is_linux(), 'only need to run on linux')
|
||||
def test_spelling_version_name(self):
|
||||
'''
|
||||
check the spelling of the version name for the release
|
||||
names in the salt.utils.versions.warn_until call
|
||||
'''
|
||||
salt_dir = integration.CODE_DIR
|
||||
query = 'salt.utils.versions.warn_until'
|
||||
names = salt.version.SaltStackVersion.NAMES
|
||||
|
||||
salt_dir += '/salt/'
|
||||
cmd = 'grep -lr {0} -A 1 '.format(query) + salt_dir
|
||||
|
||||
grep_call = salt.modules.cmdmod.run_stdout(cmd=cmd).split(os.linesep)
|
||||
|
||||
for line in grep_call:
|
||||
num_cmd = salt.modules.cmdmod.run_stdout('grep -c {0} {1}'.format(query, line))
|
||||
ver_cmd = salt.modules.cmdmod.run_stdout('grep {0} {1} -A 1'.format(query, line))
|
||||
if 'pyc' in line:
|
||||
break
|
||||
|
||||
match = 0
|
||||
for key in names:
|
||||
if key in ver_cmd:
|
||||
match = match + (ver_cmd.count(key))
|
||||
if 'utils/__init__.py' in line:
|
||||
# work around for utils/__init__.py because
|
||||
# it includes the warn_utils function
|
||||
match = match + 1
|
||||
self.assertEqual(match, int(num_cmd), msg='The file: {0} has an '
|
||||
'incorrect spelling for the release name in the warn_utils '
|
||||
'call: {1}. Expecting one of these release names: '
|
||||
'{2}'.format(line, ver_cmd, names))
|
||||
|
||||
|
||||
class VersionFuncsTestCase(TestCase):
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user