Merge branch 'develop' into develop

This commit is contained in:
rares-pop 2017-08-16 10:52:48 +03:00 committed by GitHub
commit 1bbd1565d1
1061 changed files with 21979 additions and 14079 deletions

4
.github/stale.yml vendored
View File

@ -1,8 +1,8 @@
# Probot Stale configuration file # Probot Stale configuration file
# Number of days of inactivity before an issue becomes stale # Number of days of inactivity before an issue becomes stale
# 1130 is approximately 3 years and 1 month # 1115 is approximately 3 years and 1 month
daysUntilStale: 1130 daysUntilStale: 1115
# Number of days of inactivity before a stale issue is closed # Number of days of inactivity before a stale issue is closed
daysUntilClose: 7 daysUntilClose: 7

View File

@ -1,4 +1,11 @@
{ {
"alwaysNotifyForPaths": [
{
"name": "ryan-lane",
"files": ["salt/**/*boto*.py"],
"skipTeamPrs": false
}
],
"skipTitle": "Merge forward", "skipTitle": "Merge forward",
"userBlacklist": ["cvrebert", "markusgattol", "olliewalsh"] "userBlacklist": ["cvrebert", "markusgattol", "olliewalsh"]
} }

View File

@ -245,8 +245,8 @@ on_saltstack = 'SALT_ON_SALTSTACK' in os.environ
project = 'Salt' project = 'Salt'
version = salt.version.__version__ version = salt.version.__version__
latest_release = '2017.7.0' # latest release latest_release = '2017.7.1' # latest release
previous_release = '2016.11.6' # latest release from previous branch previous_release = '2016.11.7' # latest release from previous branch
previous_release_dir = '2016.11' # path on web server for previous branch previous_release_dir = '2016.11' # path on web server for previous branch
next_release = '' # next release next_release = '' # next release
next_release_dir = '' # path on web server for next release branch next_release_dir = '' # path on web server for next release branch
@ -258,8 +258,8 @@ if on_saltstack:
copyright = time.strftime("%Y") copyright = time.strftime("%Y")
# < --- START do not merge these settings to other branches START ---> # # < --- START do not merge these settings to other branches START ---> #
build_type = 'latest' # latest, previous, develop, next build_type = 'develop' # latest, previous, develop, next
release = latest_release # version, latest_release, previous_release release = version # version, latest_release, previous_release
# < --- END do not merge these settings to other branches END ---> # # < --- END do not merge these settings to other branches END ---> #
# Set google custom search engine # Set google custom search engine

View File

@ -19,5 +19,4 @@ auth modules
pki pki
rest rest
sharedsecret sharedsecret
stormpath
yubico yubico

View File

@ -1,6 +0,0 @@
===================
salt.auth.stormpath
===================
.. automodule:: salt.auth.stormpath
:members:

View File

@ -33,6 +33,10 @@ Output Options
Write the output to the specified file. Write the output to the specified file.
.. option:: --out-file-append, --output-file-append
Append the output to the specified file.
.. option:: --no-color .. option:: --no-color
Disable all colored output Disable all colored output
@ -46,3 +50,14 @@ Output Options
``green`` denotes success, ``red`` denotes failure, ``blue`` denotes ``green`` denotes success, ``red`` denotes failure, ``blue`` denotes
changes and success and ``yellow`` denotes a expected future change in configuration. changes and success and ``yellow`` denotes a expected future change in configuration.
.. option:: --state-output=STATE_OUTPUT, --state_output=STATE_OUTPUT
Override the configured state_output value for minion
output. One of 'full', 'terse', 'mixed', 'changes' or
'filter'. Default: 'none'.
.. option:: --state-verbose=STATE_VERBOSE, --state_verbose=STATE_VERBOSE
Override the configured state_verbose value for minion
output. Set to True or False. Default: none.

View File

@ -81,7 +81,7 @@ Options
Pass in an external authentication medium to validate against. The Pass in an external authentication medium to validate against. The
credentials will be prompted for. The options are `auto`, credentials will be prompted for. The options are `auto`,
`keystone`, `ldap`, `pam`, and `stormpath`. Can be used with the -T `keystone`, `ldap`, and `pam`. Can be used with the -T
option. option.
.. option:: -T, --make-token .. option:: -T, --make-token

View File

@ -97,6 +97,7 @@ execution modules
cytest cytest
daemontools daemontools
data data
datadog_api
ddns ddns
deb_apache deb_apache
deb_postgres deb_postgres
@ -399,7 +400,6 @@ execution modules
state state
status status
statuspage statuspage
stormpath
supervisord supervisord
suse_apache suse_apache
svn svn

View File

@ -0,0 +1,6 @@
========================
salt.modules.datadog_api
========================
.. automodule:: salt.modules.datadog_api
:members:

View File

@ -1,6 +0,0 @@
======================
salt.modules.stormpath
======================
.. automodule:: salt.modules.stormpath
:members:

View File

@ -3,4 +3,5 @@ salt.modules.test
================= =================
.. automodule:: salt.modules.test .. automodule:: salt.modules.test
:members: :members:
:exclude-members: rand_str

View File

@ -429,7 +429,7 @@ similar to the following:
Confine this module to Mac OS with Homebrew. Confine this module to Mac OS with Homebrew.
''' '''
if salt.utils.which('brew') and __grains__['os'] == 'MacOS': if salt.utils.path.which('brew') and __grains__['os'] == 'MacOS':
return __virtualname__ return __virtualname__
return False return False

View File

@ -146,8 +146,10 @@ Here is a simple YAML renderer example:
import yaml import yaml
from salt.utils.yamlloader import SaltYamlSafeLoader from salt.utils.yamlloader import SaltYamlSafeLoader
from salt.ext import six
def render(yaml_data, saltenv='', sls='', **kws): def render(yaml_data, saltenv='', sls='', **kws):
if not isinstance(yaml_data, basestring): if not isinstance(yaml_data, six.string_types):
yaml_data = yaml_data.read() yaml_data = yaml_data.read()
data = yaml.load( data = yaml.load(
yaml_data, yaml_data,

View File

@ -250,7 +250,6 @@ state modules
stateconf stateconf
status status
statuspage statuspage
stormpath_account
supervisord supervisord
svn svn
sysctl sysctl

View File

@ -1,6 +1,6 @@
======================= ======================
salt.modules.kubernetes salt.states.kubernetes
======================= ======================
.. automodule:: salt.modules.kubernetes .. automodule:: salt.states.kubernetes
:members: :members:

View File

@ -1,6 +0,0 @@
=============================
salt.states.stormpath_account
=============================
.. automodule:: salt.states.stormpath_account
:members:

View File

@ -135,19 +135,23 @@ A State Module must return a dict containing the following keys/values:
``test=True``, and changes would have been made if the state was not run in ``test=True``, and changes would have been made if the state was not run in
test mode. test mode.
+--------------------+-----------+-----------+ +--------------------+-----------+------------------------+
| | live mode | test mode | | | live mode | test mode |
+====================+===========+===========+ +====================+===========+========================+
| no changes | ``True`` | ``True`` | | no changes | ``True`` | ``True`` |
+--------------------+-----------+-----------+ +--------------------+-----------+------------------------+
| successful changes | ``True`` | ``None`` | | successful changes | ``True`` | ``None`` |
+--------------------+-----------+-----------+ +--------------------+-----------+------------------------+
| failed changes | ``False`` | ``None`` | | failed changes | ``False`` | ``False`` or ``None`` |
+--------------------+-----------+-----------+ +--------------------+-----------+------------------------+
.. note:: .. note::
Test mode does not predict if the changes will be successful or not. Test mode does not predict if the changes will be successful or not,
and hence the result for pending changes is usually ``None``.
However, if a state is going to fail and this can be determined
in test mode without applying the change, ``False`` can be returned.
- **comment:** A string containing a summary of the result. - **comment:** A string containing a summary of the result.

View File

@ -777,8 +777,6 @@ Stateconf
stderr stderr
stdin stdin
stdout stdout
stormpath
Stormpath
str str
strftime strftime
subfolder subfolder

View File

@ -146,24 +146,24 @@ library. The following two lines set up the imports:
.. code-block:: python .. code-block:: python
from salt.cloud.libcloudfuncs import * # pylint: disable=W0614,W0401 from salt.cloud.libcloudfuncs import * # pylint: disable=W0614,W0401
from salt.utils import namespaced_function import salt.utils
And then a series of declarations will make the necessary functions available And then a series of declarations will make the necessary functions available
within the cloud module. within the cloud module.
.. code-block:: python .. code-block:: python
get_size = namespaced_function(get_size, globals()) get_size = salt.utils.namespaced_function(get_size, globals())
get_image = namespaced_function(get_image, globals()) get_image = salt.utils.namespaced_function(get_image, globals())
avail_locations = namespaced_function(avail_locations, globals()) avail_locations = salt.utils.namespaced_function(avail_locations, globals())
avail_images = namespaced_function(avail_images, globals()) avail_images = salt.utils.namespaced_function(avail_images, globals())
avail_sizes = namespaced_function(avail_sizes, globals()) avail_sizes = salt.utils.namespaced_function(avail_sizes, globals())
script = namespaced_function(script, globals()) script = salt.utils.namespaced_function(script, globals())
destroy = namespaced_function(destroy, globals()) destroy = salt.utils.namespaced_function(destroy, globals())
list_nodes = namespaced_function(list_nodes, globals()) list_nodes = salt.utils.namespaced_function(list_nodes, globals())
list_nodes_full = namespaced_function(list_nodes_full, globals()) list_nodes_full = salt.utils.namespaced_function(list_nodes_full, globals())
list_nodes_select = namespaced_function(list_nodes_select, globals()) list_nodes_select = salt.utils.namespaced_function(list_nodes_select, globals())
show_instance = namespaced_function(show_instance, globals()) show_instance = salt.utils.namespaced_function(show_instance, globals())
If necessary, these functions may be replaced by removing the appropriate If necessary, these functions may be replaced by removing the appropriate
declaration line, and then adding the function as normal. declaration line, and then adding the function as normal.

View File

@ -49,7 +49,7 @@ Set up an initial profile at ``/etc/salt/cloud.profiles`` or in the
.. code-block:: yaml .. code-block:: yaml
joyent_512 joyent_512:
provider: my-joyent-config provider: my-joyent-config
size: g4-highcpu-512M size: g4-highcpu-512M
image: ubuntu-16.04 image: ubuntu-16.04

View File

@ -18,10 +18,10 @@ on the significance and complexity of the changes required by the user.
Salt feature releases are based on the Periodic Table. Any new features going Salt feature releases are based on the Periodic Table. Any new features going
into the develop branch will be named after the next element in the Periodic into the develop branch will be named after the next element in the Periodic
Table. For example, Beryllium was the feature release name of the develop branch Table. For example, Beryllium was the feature release name of the develop
before the 2015.8 branch was tagged. At that point in time, any new features going branch before the 2015.8 branch was tagged. At that point in time, any new
into the develop branch after 2015.8 was branched were part of the Boron feature features going into the develop branch after 2015.8 was branched were part of
release. the Boron feature release.
A deprecation warning should be in place for at least two major releases before A deprecation warning should be in place for at least two major releases before
the deprecated code and its accompanying deprecation warning are removed. More the deprecated code and its accompanying deprecation warning are removed. More
@ -29,14 +29,14 @@ time should be given for more complex changes. For example, if the current
release under development is ``Sodium``, the deprecated code and associated release under development is ``Sodium``, the deprecated code and associated
warnings should remain in place and warn for at least ``Aluminum``. warnings should remain in place and warn for at least ``Aluminum``.
To help in this deprecation task, salt provides :func:`salt.utils.warn_until To help in this deprecation task, salt provides
<salt.utils.warn_until>`. The idea behind this helper function is to show the :func:`salt.utils.versions.warn_until <salt.utils.versions.warn_until>`. The
deprecation warning to the user until salt reaches the provided version. Once idea behind this helper function is to show the deprecation warning to the user
that provided version is equaled :func:`salt.utils.warn_until until salt reaches the provided version. Once that provided version is equaled
<salt.utils.warn_until>` will raise a :py:exc:`RuntimeError` making salt stop :func:`salt.utils.versions.warn_until <salt.utils.versions.warn_until>` will
its execution. This stoppage is unpleasant and will remind the developer that raise a :py:exc:`RuntimeError` making salt stop its execution. This stoppage is
the deprecation limit has been reached and that the code can then be safely unpleasant and will remind the developer that the deprecation limit has been
removed. reached and that the code can then be safely removed.
Consider the following example: Consider the following example:
@ -44,7 +44,7 @@ Consider the following example:
def some_function(bar=False, foo=None): def some_function(bar=False, foo=None):
if foo is not None: if foo is not None:
salt.utils.warn_until( salt.utils.versions.warn_until(
'Aluminum', 'Aluminum',
'The \'foo\' argument has been deprecated and its ' 'The \'foo\' argument has been deprecated and its '
'functionality removed, as such, its usage is no longer ' 'functionality removed, as such, its usage is no longer '

View File

@ -0,0 +1,154 @@
=========================================
Arista EOS Salt minion installation guide
=========================================
The Salt minion for Arista EOS is distributed as a SWIX extension and can be installed directly on the switch. The EOS network operating system is based on old Fedora distributions and the installation of the ``salt-minion`` requires backports. This SWIX extension contains the necessary backports, together with the Salt basecode.
.. note::
This SWIX extension has been tested on Arista DCS-7280SE-68-R, running EOS 4.17.5M and vEOS 4.18.3F.
Important Notes
===============
This package is in beta, make sure to test it carefully before running it in production.
If confirmed working correctly, please report and add a note on this page with the platform model and EOS version.
If you want to uninstall this package, please refer to the uninstalling_ section.
Installation from the Official SaltStack Repository
===================================================
Download the swix package and save it to flash.
.. code-block:: bash
veos#copy https://salt-eos.netops.life/salt-eos-latest.swix flash:
veos#copy https://salt-eos.netops.life/startup.sh flash:
Install the Extension
=====================
Copy the Salt package to extension
.. code-block:: bash
veos#copy flash:salt-eos-latest.swix extension:
Install the SWIX
.. code-block:: bash
veos#extension salt-eos-latest.swix force
Verify the installation
.. code-block:: bash
veos#show extensions | include salt-eos
salt-eos-2017-07-19.swix 1.0.11/1.fc25 A, F 27
Change the Salt master IP address or FQDN, by edit the variable (SALT_MASTER)
.. code-block:: bash
veos#bash vi /mnt/flash/startup.sh
Make sure you enable the eAPI with unix-socket
.. code-block:: bash
veos(config)#management api http-commands
protocol unix-socket
no shutdown
Post-installation tasks
=======================
Generate Keys and host record and start Salt minion
.. code-block:: bash
veos#bash
#sudo /mnt/flash/startup.sh
``salt-minion`` should be running
Copy the installed extensions to boot-extensions
.. code-block:: bash
veos#copy installed-extensions boot-extensions
Apply event-handler to let EOS start salt-minion during boot-up
.. code-block:: bash
veos(config)#event-handler boot-up-script
trigger on-boot
action bash sudo /mnt/flash/startup.sh
For more specific installation details of the ``salt-minion``, please refer to :ref:`Configuring Salt<configuring-salt>`.
.. _uninstalling:
Uninstalling
============
If you decide to uninstall this package, the following steps are recommended for safety:
1. Remove the extension from boot-extensions
.. code-block:: bash
veos#bash rm /mnt/flash/boot-extensions
2. Remove the extension from extensions folder
.. code-block:: bash
veos#bash rm /mnt/flash/.extensions/salt-eos-latest.swix
2. Remove boot-up script
.. code-block:: bash
veos(config)#no event-handler boot-up-script
Additional Information
======================
This SWIX extension contains the following RPM packages:
.. code-block:: text
libsodium-1.0.11-1.fc25.i686.rpm
libstdc++-6.2.1-2.fc25.i686.rpm
openpgm-5.2.122-6.fc24.i686.rpm
python-Jinja2-2.8-0.i686.rpm
python-PyYAML-3.12-0.i686.rpm
python-babel-0.9.6-5.fc18.noarch.rpm
python-backports-1.0-3.fc18.i686.rpm
python-backports-ssl_match_hostname-3.4.0.2-1.fc18.noarch.rpm
python-backports_abc-0.5-0.i686.rpm
python-certifi-2016.9.26-0.i686.rpm
python-chardet-2.0.1-5.fc18.noarch.rpm
python-crypto-1.4.1-1.noarch.rpm
python-crypto-2.6.1-1.fc18.i686.rpm
python-futures-3.1.1-1.noarch.rpm
python-jtextfsm-0.3.1-0.noarch.rpm
python-kitchen-1.1.1-2.fc18.noarch.rpm
python-markupsafe-0.18-1.fc18.i686.rpm
python-msgpack-python-0.4.8-0.i686.rpm
python-napalm-base-0.24.3-1.noarch.rpm
python-napalm-eos-0.6.0-1.noarch.rpm
python-netaddr-0.7.18-0.noarch.rpm
python-pyeapi-0.7.0-0.noarch.rpm
python-salt-2017.7.0_1414_g2fb986f-1.noarch.rpm
python-singledispatch-3.4.0.3-0.i686.rpm
python-six-1.10.0-0.i686.rpm
python-tornado-4.4.2-0.i686.rpm
python-urllib3-1.5-7.fc18.noarch.rpm
python2-zmq-15.3.0-2.fc25.i686.rpm
zeromq-4.1.4-5.fc25.i686.rpm

View File

@ -46,6 +46,7 @@ These guides go into detail how to install Salt on a given platform.
arch arch
debian debian
eos
fedora fedora
freebsd freebsd
gentoo gentoo

View File

@ -828,12 +828,14 @@ Returns:
08.03.2017 17:00 08.03.2017 17:00
.. jinja_ref:: str_to_num .. jinja_ref:: to_num
``str_to_num`` ``to_num``
-------------- ----------
.. versionadded:: 2017.7.0 .. versionadded:: 2017.7.0
.. versionadded:: Oxygen
Renamed from ``str_to_num`` to ``to_num``.
Converts a string to its numerical value. Converts a string to its numerical value.
@ -841,7 +843,7 @@ Example:
.. code-block:: jinja .. code-block:: jinja
{{ '5' | str_to_num }} {{ '5' | to_num }}
Returns: Returns:
@ -917,10 +919,10 @@ Returns:
{'a': 'b'} {'a': 'b'}
.. jinja_ref:: rand_str .. jinja_ref:: random_hash
``rand_str`` ``random_hash``
------------ ---------------
.. versionadded:: 2017.7.0 .. versionadded:: 2017.7.0
.. versionadded:: Oxygen .. versionadded:: Oxygen
@ -937,8 +939,8 @@ Example:
.. code-block:: jinja .. code-block:: jinja
{% set num_range = 99999999 %} {% set num_range = 99999999 %}
{{ num_range | rand_str }} {{ num_range | random_hash }}
{{ num_range | rand_str('sha512') }} {{ num_range | random_hash('sha512') }}
Returns: Returns:

View File

@ -89,7 +89,7 @@ they are being loaded for the correct proxytype, example below:
Only work on proxy Only work on proxy
''' '''
try: try:
if salt.utils.is_proxy() and \ if salt.utils.platform.is_proxy() and \
__opts__['proxy']['proxytype'] == 'ssh_sample': __opts__['proxy']['proxytype'] == 'ssh_sample':
return __virtualname__ return __virtualname__
except KeyError: except KeyError:
@ -156,20 +156,23 @@ will need to be restarted to pick up any changes. A corresponding utility funct
``saltutil.sync_proxymodules``, has been added to sync these modules to minions. ``saltutil.sync_proxymodules``, has been added to sync these modules to minions.
In addition, a salt.utils helper function called `is_proxy()` was added to make In addition, a salt.utils helper function called `is_proxy()` was added to make
it easier to tell when the running minion is a proxy minion. it easier to tell when the running minion is a proxy minion. **NOTE: This
function was renamed to salt.utils.platform.is_proxy() for the Oxygen release**
New in 2015.8 New in 2015.8
------------- -------------
Starting with the 2015.8 release of Salt, proxy processes are no longer forked off from a controlling minion. Starting with the 2015.8 release of Salt, proxy processes are no longer forked
Instead, they have their own script ``salt-proxy`` which takes mostly the same arguments that the off from a controlling minion. Instead, they have their own script
standard Salt minion does with the addition of ``--proxyid``. This is the id that the salt-proxy will ``salt-proxy`` which takes mostly the same arguments that the standard Salt
use to identify itself to the master. Proxy configurations are still best kept in Pillar and their format minion does with the addition of ``--proxyid``. This is the id that the
has not changed. salt-proxy will use to identify itself to the master. Proxy configurations are
still best kept in Pillar and their format has not changed.
This change allows for better process control and logging. Proxy processes can now be listed with standard This change allows for better process control and logging. Proxy processes can
process management utilities (``ps`` from the command line). Also, a full Salt minion is no longer now be listed with standard process management utilities (``ps`` from the
required (though it is still strongly recommended) on machines hosting proxies. command line). Also, a full Salt minion is no longer required (though it is
still strongly recommended) on machines hosting proxies.
Getting Started Getting Started
@ -619,9 +622,10 @@ in the proxymodule itself. This might be useful if a proxymodule author wants t
all the code for the proxy interface in the same place instead of splitting it between all the code for the proxy interface in the same place instead of splitting it between
the proxy and grains directories. the proxy and grains directories.
This function will only be called automatically if the configuration variable ``proxy_merge_grains_in_module`` This function will only be called automatically if the configuration variable
is set to True in the proxy configuration file (default ``/etc/salt/proxy``). This ``proxy_merge_grains_in_module`` is set to True in the proxy configuration file
variable defaults to ``True`` in the release code-named *2017.7.0*. (default ``/etc/salt/proxy``). This variable defaults to ``True`` in the
release code-named *2017.7.0*.
.. code: python:: .. code: python::
@ -640,7 +644,7 @@ variable defaults to ``True`` in the release code-named *2017.7.0*.
def __virtual__(): def __virtual__():
try: try:
if salt.utils.is_proxy() and __opts__['proxy']['proxytype'] == 'rest_sample': if salt.utils.platform.is_proxy() and __opts__['proxy']['proxytype'] == 'rest_sample':
return __virtualname__ return __virtualname__
except KeyError: except KeyError:
pass pass
@ -708,7 +712,7 @@ Example from ``salt/grains/rest_sample.py``:
def __virtual__(): def __virtual__():
try: try:
if salt.utils.is_proxy() and __opts__['proxy']['proxytype'] == 'rest_sample': if salt.utils.platform.is_proxy() and __opts__['proxy']['proxytype'] == 'rest_sample':
return __virtualname__ return __virtualname__
except KeyError: except KeyError:
pass pass

View File

@ -175,6 +175,10 @@ they are being loaded for the correct proxytype, example below:
return False return False
.. note::
``salt.utils.is_proxy()`` has been renamed to
``salt.utils.platform.is_proxy`` as of the Oxygen release.
The try/except block above exists because grains are processed very early The try/except block above exists because grains are processed very early
in the proxy minion startup process, sometimes earlier than the proxy in the proxy minion startup process, sometimes earlier than the proxy
key in the ``__opts__`` dictionary is populated. key in the ``__opts__`` dictionary is populated.

View File

@ -38,6 +38,14 @@ In previous releases the bind credentials would only be used to determine
the LDAP user's existence and group membership. The user's LDAP credentials the LDAP user's existence and group membership. The user's LDAP credentials
were used from then on. were used from then on.
Stormpath External Authentication Removed
-----------------------------------------
Per Stormpath's announcement, their API will be shutting down on 8/17/2017 at
noon PST so the Stormpath external authentication module has been removed.
https://stormpath.com/oktaplusstormpath
New GitFS Features New GitFS Features
------------------ ------------------
@ -63,10 +71,10 @@ environments (i.e. ``saltenvs``) have been added:
available as saltenvs. available as saltenvs.
Salt Cloud Features Salt Cloud Features
=================== -------------------
Pre-Flight Commands Pre-Flight Commands
------------------- ===================
Support has been added for specified "preflight commands" to run on a VM before Support has been added for specified "preflight commands" to run on a VM before
the deploy script is run. These must be defined as a list in a cloud configuration the deploy script is run. These must be defined as a list in a cloud configuration
@ -98,23 +106,514 @@ running on T-Series SPARC hardware. The ``virtual_subtype`` grain is
populated as a list of domain roles. populated as a list of domain roles.
Beacon configuration changes
----------------------------------------
In order to remain consistent and to align with other Salt components such as states,
support for configuring beacons using dictionary based configuration has been deprecated
in favor of list based configuration. All beacons have a validation function which will
check the configuration for the correct format and only load if the validation passes.
- ``avahi_announce`` beacon
Old behavior:
```
beacons:
avahi_announce:
run_once: True
servicetype: _demo._tcp
port: 1234
txt:
ProdName: grains.productname
SerialNo: grains.serialnumber
Comments: 'this is a test'
```
New behavior:
```
beacons:
avahi_announce:
- run_once: True
- servicetype: _demo._tcp
- port: 1234
- txt:
ProdName: grains.productname
SerialNo: grains.serialnumber
Comments: 'this is a test'
```
- ``bonjour_announce`` beacon
Old behavior:
```
beacons:
bonjour_announce:
run_once: True
servicetype: _demo._tcp
port: 1234
txt:
ProdName: grains.productname
SerialNo: grains.serialnumber
Comments: 'this is a test'
```
New behavior:
```
beacons:
bonjour_announce:
- run_once: True
- servicetype: _demo._tcp
- port: 1234
- txt:
ProdName: grains.productname
SerialNo: grains.serialnumber
Comments: 'this is a test'
```
- ``btmp`` beacon
Old behavior:
```
beacons:
btmp: {}
```
New behavior:
```
beacons:
btmp: []
```
- ``glxinfo`` beacon
Old behavior:
```
beacons:
glxinfo:
user: frank
screen_event: True
```
New behavior:
```
beacons:
glxinfo:
- user: frank
- screen_event: True
```
- ``haproxy`` beacon
Old behavior:
```
beacons:
haproxy:
- www-backend:
threshold: 45
servers:
- web1
- web2
- interval: 120
```
New behavior:
```
beacons:
haproxy:
- backends:
www-backend:
threshold: 45
servers:
- web1
- web2
- interval: 120
```
- ``inotify`` beacon
Old behavior:
```
beacons:
inotify:
/path/to/file/or/dir:
mask:
- open
- create
- close_write
recurse: True
auto_add: True
exclude:
- /path/to/file/or/dir/exclude1
- /path/to/file/or/dir/exclude2
- /path/to/file/or/dir/regex[a-m]*$:
regex: True
coalesce: True
```
New behavior:
```
beacons:
inotify:
- files:
/path/to/file/or/dir:
mask:
- open
- create
- close_write
recurse: True
auto_add: True
exclude:
- /path/to/file/or/dir/exclude1
- /path/to/file/or/dir/exclude2
- /path/to/file/or/dir/regex[a-m]*$:
regex: True
- coalesce: True
```
- ``journald`` beacon
Old behavior:
```
beacons:
journald:
sshd:
SYSLOG_IDENTIFIER: sshd
PRIORITY: 6
```
New behavior:
```
beacons:
journald:
- services:
sshd:
SYSLOG_IDENTIFIER: sshd
PRIORITY: 6
```
- ``load`` beacon
Old behavior:
```
beacons:
load:
1m:
- 0.0
- 2.0
5m:
- 0.0
- 1.5
15m:
- 0.1
- 1.0
emitatstartup: True
onchangeonly: False
```
New behavior:
```
beacons:
load:
- averages:
1m:
- 0.0
- 2.0
5m:
- 0.0
- 1.5
15m:
- 0.1
- 1.0
- emitatstartup: True
- onchangeonly: False
```
- ``log`` beacon
Old behavior:
```
beacons:
log:
file: <path>
<tag>:
regex: <pattern>
```
New behavior:
```
beacons:
log:
- file: <path>
- tags:
<tag>:
regex: <pattern>
```
- ``network_info`` beacon
Old behavior:
```
beacons:
network_info:
- eth0:
type: equal
bytes_sent: 100000
bytes_recv: 100000
packets_sent: 100000
packets_recv: 100000
errin: 100
errout: 100
dropin: 100
dropout: 100
```
New behavior:
```
beacons:
network_info:
- interfaces:
eth0:
type: equal
bytes_sent: 100000
bytes_recv: 100000
packets_sent: 100000
packets_recv: 100000
errin: 100
errout: 100
dropin: 100
dropout: 100
```
- ``network_settings`` beacon
Old behavior:
```
beacons:
network_settings:
eth0:
ipaddr:
promiscuity:
onvalue: 1
eth1:
linkmode:
```
New behavior:
```
beacons:
network_settings:
- interfaces:
- eth0:
ipaddr:
promiscuity:
onvalue: 1
- eth1:
linkmode:
```
- ``proxy_example`` beacon
Old behavior:
```
beacons:
proxy_example:
endpoint: beacon
```
New behavior:
```
beacons:
proxy_example:
- endpoint: beacon
```
- ``ps`` beacon
Old behavior:
```
beacons:
ps:
- salt-master: running
- mysql: stopped
```
New behavior:
```
beacons:
ps:
- processes:
salt-master: running
mysql: stopped
```
- ``salt_proxy`` beacon
Old behavior:
```
beacons:
salt_proxy:
- p8000: {}
- p8001: {}
```
New behavior:
```
beacons:
salt_proxy:
- proxies:
p8000: {}
p8001: {}
```
- ``sensehat`` beacon
Old behavior:
```
beacons:
sensehat:
humidity: 70%
temperature: [20, 40]
temperature_from_pressure: 40
pressure: 1500
```
New behavior:
```
beacons:
sensehat:
- sensors:
humidity: 70%
temperature: [20, 40]
temperature_from_pressure: 40
pressure: 1500
```
- ``service`` beacon
Old behavior:
```
beacons:
service:
salt-master:
mysql:
```
New behavior:
```
beacons:
service:
- services:
nginx:
onchangeonly: True
delay: 30
uncleanshutdown: /run/nginx.pid
```
- ``sh`` beacon
Old behavior:
```
beacons:
sh: {}
```
New behavior:
```
beacons:
sh: []
```
- ``status`` beacon
Old behavior:
```
beacons:
status: {}
```
New behavior:
```
beacons:
status: []
```
- ``telegram_bot_msg`` beacon
Old behavior:
```
beacons:
telegram_bot_msg:
token: "<bot access token>"
accept_from:
- "<valid username>"
interval: 10
```
New behavior:
```
beacons:
telegram_bot_msg:
- token: "<bot access token>"
- accept_from:
- "<valid username>"
- interval: 10
```
- ``twilio_txt_msg`` beacon
Old behavior:
```
beacons:
twilio_txt_msg:
account_sid: "<account sid>"
auth_token: "<auth token>"
twilio_number: "+15555555555"
interval: 10
```
New behavior:
```
beacons:
twilio_txt_msg:
- account_sid: "<account sid>"
- auth_token: "<auth token>"
- twilio_number: "+15555555555"
- interval: 10
```
- ``wtmp`` beacon
Old behavior:
```
beacons:
wtmp: {}
```
New behavior:
```
beacons:
wtmp: []
```
Deprecations Deprecations
============ ------------
Configuration Option Deprecations Configuration Option Deprecations
--------------------------------- =================================
- The ``requests_lib`` configuration option has been removed. Please use - The ``requests_lib`` configuration option has been removed. Please use
``backend`` instead. ``backend`` instead.
Profitbricks Cloud Updated Dependency Profitbricks Cloud Updated Dependency
------------------------------------- =====================================
The minimum version of the `profitbrick` python package for the `profitbricks` The minimum version of the ``profitbrick`` python package for the ``profitbricks``
cloud driver has changed from 3.0.0 to 3.1.0. cloud driver has changed from 3.0.0 to 3.1.0.
Module Deprecations Module Deprecations
------------------- ===================
The ``blockdev`` execution module has been removed. Its functions were merged The ``blockdev`` execution module has been removed. Its functions were merged
with the ``disk`` module. Please use the ``disk`` execution module instead. with the ``disk`` module. Please use the ``disk`` execution module instead.
@ -154,7 +653,7 @@ The ``win_service`` module had the following changes:
``service_type`` instead. ``service_type`` instead.
Runner Deprecations Runner Deprecations
------------------- ===================
The ``manage`` runner had the following changes: The ``manage`` runner had the following changes:
@ -162,7 +661,7 @@ The ``manage`` runner had the following changes:
use ``salt-ssh`` roster entries for the host instead. use ``salt-ssh`` roster entries for the host instead.
State Deprecations State Deprecations
------------------ ==================
The ``archive`` state had the following changes: The ``archive`` state had the following changes:
@ -185,7 +684,7 @@ The ``file`` state had the following changes:
- The ``show_diff`` option was removed. Please use ``show_changes`` instead. - The ``show_diff`` option was removed. Please use ``show_changes`` instead.
Grain Deprecations Grain Deprecations
------------------ ==================
For ``smartos`` some grains have been deprecated. These grains will be removed in Neon. For ``smartos`` some grains have been deprecated. These grains will be removed in Neon.
@ -205,7 +704,7 @@ as well, by using special grains keys ``minion_blackout`` and
``minion_blackout_whitelist``. ``minion_blackout_whitelist``.
Utils Deprecations Utils Deprecations
------------------ ==================
The ``salt.utils.cloud.py`` file had the following change: The ``salt.utils.cloud.py`` file had the following change:
@ -213,7 +712,7 @@ The ``salt.utils.cloud.py`` file had the following change:
optional. optional.
Other Miscellaneous Deprecations Other Miscellaneous Deprecations
-------------------------------- ================================
The ``version.py`` file had the following changes: The ``version.py`` file had the following changes:

View File

@ -166,13 +166,15 @@ Ubuntu 14.04 LTS and Debian Wheezy (7.x) also have a compatible version packaged
# apt-get install python-git # apt-get install python-git
If your master is running an older version (such as Ubuntu 12.04 LTS or Debian GitPython_ requires the ``git`` CLI utility to work. If installed from a system
Squeeze), then you will need to install GitPython using either pip_ or package, then git should already be installed, but if installed via pip_ then
easy_install (it is recommended to use pip). Version 0.3.2.RC1 is now marked as it may still be necessary to install git separately. For MacOS users,
the stable release in PyPI, so it should be a simple matter of running ``pip GitPython_ comes bundled in with the Salt installer, but git must still be
install GitPython`` (or ``easy_install GitPython``) as root. installed for it to work properly. Git can be installed in several ways,
including by installing XCode_.
.. _`pip`: http://www.pip-installer.org/ .. _pip: http://www.pip-installer.org/
.. _XCode: https://developer.apple.com/xcode/
.. warning:: .. warning::

View File

@ -110,7 +110,7 @@ To pass through a file that contains jinja + yaml templating (the default):
method='POST', method='POST',
data_file='/srv/salt/somefile.jinja', data_file='/srv/salt/somefile.jinja',
data_render=True, data_render=True,
template_data={'key1': 'value1', 'key2': 'value2'} template_dict={'key1': 'value1', 'key2': 'value2'}
) )
To pass through a file that contains mako templating: To pass through a file that contains mako templating:
@ -123,7 +123,7 @@ To pass through a file that contains mako templating:
data_file='/srv/salt/somefile.mako', data_file='/srv/salt/somefile.mako',
data_render=True, data_render=True,
data_renderer='mako', data_renderer='mako',
template_data={'key1': 'value1', 'key2': 'value2'} template_dict={'key1': 'value1', 'key2': 'value2'}
) )
Because this function uses Salt's own rendering system, any Salt renderer can Because this function uses Salt's own rendering system, any Salt renderer can
@ -140,7 +140,7 @@ However, this can be changed to ``master`` if necessary.
method='POST', method='POST',
data_file='/srv/salt/somefile.jinja', data_file='/srv/salt/somefile.jinja',
data_render=True, data_render=True,
template_data={'key1': 'value1', 'key2': 'value2'}, template_dict={'key1': 'value1', 'key2': 'value2'},
opts=__opts__ opts=__opts__
) )
@ -149,7 +149,7 @@ However, this can be changed to ``master`` if necessary.
method='POST', method='POST',
data_file='/srv/salt/somefile.jinja', data_file='/srv/salt/somefile.jinja',
data_render=True, data_render=True,
template_data={'key1': 'value1', 'key2': 'value2'}, template_dict={'key1': 'value1', 'key2': 'value2'},
node='master' node='master'
) )
@ -170,11 +170,11 @@ a Python dict.
header_file='/srv/salt/headers.jinja', header_file='/srv/salt/headers.jinja',
header_render=True, header_render=True,
header_renderer='jinja', header_renderer='jinja',
template_data={'key1': 'value1', 'key2': 'value2'} template_dict={'key1': 'value1', 'key2': 'value2'}
) )
Because much of the data that would be templated between headers and data may be Because much of the data that would be templated between headers and data may be
the same, the ``template_data`` is the same for both. Correcting possible the same, the ``template_dict`` is the same for both. Correcting possible
variable name collisions is up to the user. variable name collisions is up to the user.
Authentication Authentication

View File

@ -28,9 +28,8 @@ Tutorials Index
* :ref:`States tutorial, part 3 - Templating, Includes, Extends <tutorial-states-part-3>` * :ref:`States tutorial, part 3 - Templating, Includes, Extends <tutorial-states-part-3>`
* :ref:`States tutorial, part 4 <tutorial-states-part-4>` * :ref:`States tutorial, part 4 <tutorial-states-part-4>`
* :ref:`How to Convert Jinja Logic to an Execution Module <tutorial-jinja_to_execution-module>` * :ref:`How to Convert Jinja Logic to an Execution Module <tutorial-jinja_to_execution-module>`
* :ref:`Using Salt with Stormpath <tutorial-stormpath>`
* :ref:`Syslog-ng usage <syslog-ng-sate-usage>` * :ref:`Syslog-ng usage <syslog-ng-sate-usage>`
* :ref:`The macOS (Maverick) Developer Step By Step Guide To Salt Installation <tutorial-macos-walk-through>` * :ref:`The macOS (Maverick) Developer Step By Step Guide To Salt Installation <tutorial-macos-walk-through>`
* :ref:`SaltStack Walk-through <tutorial-salt-walk-through>` * :ref:`SaltStack Walk-through <tutorial-salt-walk-through>`
* :ref:`Writing Salt Tests <tutorial-salt-testing>` * :ref:`Writing Salt Tests <tutorial-salt-testing>`
* :ref:`Multi-cloud orchestration with Apache Libcloud <tutorial-libcloud>` * :ref:`Multi-cloud orchestration with Apache Libcloud <tutorial-libcloud>`

View File

@ -1,198 +0,0 @@
.. _tutorial-stormpath:
=========================
Using Salt with Stormpath
=========================
`Stormpath <https://stormpath.com/>`_ is a user management and authentication
service. This tutorial covers using SaltStack to manage and take advantage of
Stormpath's features.
External Authentication
-----------------------
Stormpath can be used for Salt's external authentication system. In order to do
this, the master should be configured with an ``apiid``, ``apikey``, and the ID
of the ``application`` that is associated with the users to be authenticated:
.. code-block:: yaml
stormpath:
apiid: 367DFSF4FRJ8767FSF4G34FGH
apikey: FEFREF43t3FEFRe/f323fwer4FWF3445gferWRWEer1
application: 786786FREFrefreg435fr1
.. note::
These values can be found in the `Stormpath dashboard
<https://api.stormpath.com/ui2/index.html#/>`_`.
Users that are to be authenticated should be set up under the ``stormpath``
dict under ``external_auth``:
.. code-block:: yaml
external_auth:
stormpath:
larry:
- .*
- '@runner'
- '@wheel'
Keep in mind that while Stormpath defaults the username associated with the
account to the email address, it is better to use a username without an ``@``
sign in it.
Configuring Stormpath Modules
-----------------------------
Stormpath accounts can be managed via either an execution or state module. In
order to use either, a minion must be configured with an API ID and key.
.. code-block:: yaml
stormpath:
apiid: 367DFSF4FRJ8767FSF4G34FGH
apikey: FEFREF43t3FEFRe/f323fwer4FWF3445gferWRWEer1
directory: efreg435fr1786786FREFr
application: 786786FREFrefreg435fr1
Some functions in the ``stormpath`` modules can make use of other options. The
following options are also available.
directory
`````````
The ID of the directory that is to be used with this minion. Many functions
require an ID to be specified to do their work. However, if the ID of a
``directory`` is specified, then Salt can often look up the resource in
question.
application
```````````
The ID of the application that is to be used with this minion. Many functions
require an ID to be specified to do their work. However, if the ID of a
``application`` is specified, then Salt can often look up the resource in
question.
Managing Stormpath Accounts
---------------------------
With the ``stormpath`` configuration in place, Salt can be used to configure
accounts (which may be thought of as users) on the Stormpath service. The
following functions are available.
stormpath.create_account
````````````````````````
Create an account on the Stormpath service. This requires a ``directory_id`` as
the first argument; it will not be retrieved from the minion configuration. An
``email`` address, ``password``, first name (``givenName``) and last name
(``surname``) are also required. For the full list of other parameters that may
be specified, see:
http://docs.stormpath.com/rest/product-guide/#account-resource
When executed with no errors, this function will return the information about
the account, from Stormpath.
.. code-block:: bash
salt myminion stormpath.create_account <directory_id> shemp@example.com letmein Shemp Howard
stormpath.list_accounts
```````````````````````
Show all accounts on the Stormpath service. This will return all accounts,
regardless of directory, application, or group.
.. code-block:: bash
salt myminion stormpath.list_accounts
'''
stormpath.show_account
``````````````````````
Show the details for a specific Stormpath account. An ``account_id`` is normally
required. However, if am ``email`` is provided instead, along with either a
``directory_id``, ``application_id``, or ``group_id``, then Salt will search the
specified resource to try and locate the ``account_id``.
.. code-block:: bash
salt myminion stormpath.show_account <account_id>
salt myminion stormpath.show_account email=<email> directory_id=<directory_id>
stormpath.update_account
````````````````````````
Update one or more items for this account. Specifying an empty value will clear
it for that account. This function may be used in one of two ways. In order to
update only one key/value pair, specify them in order:
.. code-block:: bash
salt myminion stormpath.update_account <account_id> givenName shemp
salt myminion stormpath.update_account <account_id> middleName ''
In order to specify multiple items, they need to be passed in as a dict. From
the command line, it is best to do this as a JSON string:
.. code-block:: bash
salt myminion stormpath.update_account <account_id> items='{"givenName": "Shemp"}
salt myminion stormpath.update_account <account_id> items='{"middlename": ""}
When executed with no errors, this function will return the information about
the account, from Stormpath.
stormpath.delete_account
````````````````````````
Delete an account from Stormpath.
.. code-block:: bash
salt myminion stormpath.delete_account <account_id>
stormpath.list_directories
``````````````````````````
Show all directories associated with this tenant.
.. code-block:: bash
salt myminion stormpath.list_directories
Using Stormpath States
----------------------
Stormpath resources may be managed using the state system. The following states
are available.
stormpath_account.present
`````````````````````````
Ensure that an account exists on the Stormpath service. All options that are
available with the ``stormpath.create_account`` function are available here.
If an account needs to be created, then this function will require the same
fields that ``stormpath.create_account`` requires, including the ``password``.
However, if a password changes for an existing account, it will NOT be updated
by this state.
.. code-block:: yaml
curly@example.com:
stormpath_account.present:
- directory_id: efreg435fr1786786FREFr
- password: badpass
- firstName: Curly
- surname: Howard
- nickname: curly
It is advisable to always set a ``nickname`` that is not also an email address,
so that it can be used by Salt's external authentication module.
stormpath_account.absent
````````````````````````
Ensure that an account does not exist on Stormpath. As with
``stormpath_account.present``, the ``name`` supplied to this state is the
``email`` address associated with this account. Salt will use this, with or
without the ``directory`` ID that is configured for the minion. However, lookups
will be much faster with a directory ID specified.

View File

@ -7,7 +7,7 @@ CherryPy==11.0.0
click==6.7 click==6.7
enum34==1.1.6 enum34==1.1.6
gitdb==0.6.4 gitdb==0.6.4
GitPython==2.1.5 GitPython==2.1.1
idna==2.5 idna==2.5
ipaddress==1.0.18 ipaddress==1.0.18
Jinja2==2.9.6 Jinja2==2.9.6

View File

@ -5,3 +5,4 @@ yappi>=0.8.2
--allow-unverified python-neutronclient>2.3.6 --allow-unverified python-neutronclient>2.3.6
python-gnupg python-gnupg
cherrypy>=3.2.2 cherrypy>=3.2.2
libnacl

View File

@ -7,6 +7,7 @@ Salt package
from __future__ import absolute_import from __future__ import absolute_import
import warnings import warnings
# future lint: disable=non-unicode-string
# All salt related deprecation warnings should be shown once each! # All salt related deprecation warnings should be shown once each!
warnings.filterwarnings( warnings.filterwarnings(
'once', # Show once 'once', # Show once
@ -14,18 +15,19 @@ warnings.filterwarnings(
DeprecationWarning, # This filter is for DeprecationWarnings DeprecationWarning, # This filter is for DeprecationWarnings
r'^(salt|salt\.(.*))$' # Match module(s) 'salt' and 'salt.<whatever>' r'^(salt|salt\.(.*))$' # Match module(s) 'salt' and 'salt.<whatever>'
) )
# future lint: enable=non-unicode-string
# While we are supporting Python2.6, hide nested with-statements warnings # While we are supporting Python2.6, hide nested with-statements warnings
warnings.filterwarnings( warnings.filterwarnings(
'ignore', u'ignore',
'With-statements now directly support multiple context managers', u'With-statements now directly support multiple context managers',
DeprecationWarning DeprecationWarning
) )
# Filter the backports package UserWarning about being re-imported # Filter the backports package UserWarning about being re-imported
warnings.filterwarnings( warnings.filterwarnings(
'ignore', u'ignore',
'^Module backports was already imported from (.*), but (.*) is being added to sys.path$', u'^Module backports was already imported from (.*), but (.*) is being added to sys.path$',
UserWarning UserWarning
) )
@ -37,7 +39,7 @@ def __define_global_system_encoding_variable__():
# and reset to None # and reset to None
encoding = None encoding = None
if not sys.platform.startswith('win') and sys.stdin is not None: if not sys.platform.startswith(u'win') and sys.stdin is not None:
# On linux we can rely on sys.stdin for the encoding since it # On linux we can rely on sys.stdin for the encoding since it
# most commonly matches the filesystem encoding. This however # most commonly matches the filesystem encoding. This however
# does not apply to windows # does not apply to windows
@ -63,16 +65,16 @@ def __define_global_system_encoding_variable__():
# the way back to ascii # the way back to ascii
encoding = sys.getdefaultencoding() encoding = sys.getdefaultencoding()
if not encoding: if not encoding:
if sys.platform.startswith('darwin'): if sys.platform.startswith(u'darwin'):
# Mac OS X uses UTF-8 # Mac OS X uses UTF-8
encoding = 'utf-8' encoding = u'utf-8'
elif sys.platform.startswith('win'): elif sys.platform.startswith(u'win'):
# Windows uses a configurable encoding; on Windows, Python uses the name “mbcs” # Windows uses a configurable encoding; on Windows, Python uses the name “mbcs”
# to refer to whatever the currently configured encoding is. # to refer to whatever the currently configured encoding is.
encoding = 'mbcs' encoding = u'mbcs'
else: else:
# On linux default to ascii as a last resort # On linux default to ascii as a last resort
encoding = 'ascii' encoding = u'ascii'
# We can't use six.moves.builtins because these builtins get deleted sooner # We can't use six.moves.builtins because these builtins get deleted sooner
# than expected. See: # than expected. See:
@ -83,7 +85,7 @@ def __define_global_system_encoding_variable__():
import builtins # pylint: disable=import-error import builtins # pylint: disable=import-error
# Define the detected encoding as a built-in variable for ease of use # Define the detected encoding as a built-in variable for ease of use
setattr(builtins, '__salt_system_encoding__', encoding) setattr(builtins, u'__salt_system_encoding__', encoding)
# This is now garbage collectable # This is now garbage collectable
del sys del sys

View File

@ -46,7 +46,7 @@ else:
if HAS_XML: if HAS_XML:
if not hasattr(ElementTree, 'ParseError'): if not hasattr(ElementTree, u'ParseError'):
class ParseError(Exception): class ParseError(Exception):
''' '''
older versions of ElementTree do not have ParseError older versions of ElementTree do not have ParseError
@ -56,7 +56,7 @@ if HAS_XML:
ElementTree.ParseError = ParseError ElementTree.ParseError = ParseError
def text_(s, encoding='latin-1', errors='strict'): def text_(s, encoding=u'latin-1', errors=u'strict'):
''' '''
If ``s`` is an instance of ``binary_type``, return If ``s`` is an instance of ``binary_type``, return
``s.decode(encoding, errors)``, otherwise return ``s`` ``s.decode(encoding, errors)``, otherwise return ``s``
@ -66,7 +66,7 @@ def text_(s, encoding='latin-1', errors='strict'):
return s return s
def bytes_(s, encoding='latin-1', errors='strict'): def bytes_(s, encoding=u'latin-1', errors=u'strict'):
''' '''
If ``s`` is an instance of ``text_type``, return If ``s`` is an instance of ``text_type``, return
``s.encode(encoding, errors)``, otherwise return ``s`` ``s.encode(encoding, errors)``, otherwise return ``s``
@ -79,25 +79,25 @@ def bytes_(s, encoding='latin-1', errors='strict'):
if PY3: if PY3:
def ascii_native_(s): def ascii_native_(s):
if isinstance(s, text_type): if isinstance(s, text_type):
s = s.encode('ascii') s = s.encode(u'ascii')
return str(s, 'ascii', 'strict') return str(s, u'ascii', u'strict')
else: else:
def ascii_native_(s): def ascii_native_(s):
if isinstance(s, text_type): if isinstance(s, text_type):
s = s.encode('ascii') s = s.encode(u'ascii')
return str(s) return str(s)
ascii_native_.__doc__ = ''' ascii_native_.__doc__ = '''
Python 3: If ``s`` is an instance of ``text_type``, return Python 3: If ``s`` is an instance of ``text_type``, return
``s.encode('ascii')``, otherwise return ``str(s, 'ascii', 'strict')`` ``s.encode(u'ascii')``, otherwise return ``str(s, 'ascii', 'strict')``
Python 2: If ``s`` is an instance of ``text_type``, return Python 2: If ``s`` is an instance of ``text_type``, return
``s.encode('ascii')``, otherwise return ``str(s)`` ``s.encode(u'ascii')``, otherwise return ``str(s)``
''' ''' # future lint: disable=non-unicode-string
if PY3: if PY3:
def native_(s, encoding='latin-1', errors='strict'): def native_(s, encoding=u'latin-1', errors=u'strict'):
''' '''
If ``s`` is an instance of ``text_type``, return If ``s`` is an instance of ``text_type``, return
``s``, otherwise return ``str(s, encoding, errors)`` ``s``, otherwise return ``str(s, encoding, errors)``
@ -106,7 +106,7 @@ if PY3:
return s return s
return str(s, encoding, errors) return str(s, encoding, errors)
else: else:
def native_(s, encoding='latin-1', errors='strict'): def native_(s, encoding=u'latin-1', errors=u'strict'):
''' '''
If ``s`` is an instance of ``text_type``, return If ``s`` is an instance of ``text_type``, return
``s.encode(encoding, errors)``, otherwise return ``str(s)`` ``s.encode(encoding, errors)``, otherwise return ``str(s)``
@ -121,7 +121,7 @@ return ``str(s, encoding, errors)``
Python 2: If ``s`` is an instance of ``text_type``, return Python 2: If ``s`` is an instance of ``text_type``, return
``s.encode(encoding, errors)``, otherwise return ``str(s)`` ``s.encode(encoding, errors)``, otherwise return ``str(s)``
''' ''' # future lint: disable=non-unicode-string
def string_io(data=None): # cStringIO can't handle unicode def string_io(data=None): # cStringIO can't handle unicode

View File

@ -10,8 +10,13 @@ found by reading the salt documentation:
# Import python libraries # Import python libraries
from __future__ import absolute_import from __future__ import absolute_import
# Import salt libs
import salt.utils import salt.utils
# Import 3rd-party libs
from salt.ext import six
class PublisherACL(object): class PublisherACL(object):
''' '''
@ -30,7 +35,7 @@ class PublisherACL(object):
def cmd_is_blacklisted(self, cmd): def cmd_is_blacklisted(self, cmd):
# If this is a regular command, it is a single function # If this is a regular command, it is a single function
if isinstance(cmd, str): if isinstance(cmd, six.string_types):
cmd = [cmd] cmd = [cmd]
for fun in cmd: for fun in cmd:
if not salt.utils.check_whitelist_blacklist(fun, blacklist=self.blacklist.get('modules', [])): if not salt.utils.check_whitelist_blacklist(fun, blacklist=self.blacklist.get('modules', [])):

View File

@ -55,7 +55,7 @@ import sys
# Import 3rd-party libs # Import 3rd-party libs
import salt.ext.six as six from salt.ext import six
# pylint: disable=import-error # pylint: disable=import-error
try: try:
import django import django

View File

@ -101,8 +101,8 @@ import logging
import os import os
# Import salt utils # Import salt utils
import salt.utils
import salt.utils.files import salt.utils.files
import salt.utils.versions
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -200,7 +200,7 @@ def _htpasswd(username, password, **kwargs):
pwfile = HtpasswdFile(kwargs['filename']) pwfile = HtpasswdFile(kwargs['filename'])
# passlib below version 1.6 uses 'verify' function instead of 'check_password' # passlib below version 1.6 uses 'verify' function instead of 'check_password'
if salt.utils.version_cmp(kwargs['passlib_version'], '1.6') < 0: if salt.utils.versions.version_cmp(kwargs['passlib_version'], '1.6') < 0:
return pwfile.verify(username, password) return pwfile.verify(username, password)
else: else:
return pwfile.check_password(username, password) return pwfile.check_password(username, password)
@ -222,7 +222,7 @@ def _htdigest(username, password, **kwargs):
pwfile = HtdigestFile(kwargs['filename']) pwfile = HtdigestFile(kwargs['filename'])
# passlib below version 1.6 uses 'verify' function instead of 'check_password' # passlib below version 1.6 uses 'verify' function instead of 'check_password'
if salt.utils.version_cmp(kwargs['passlib_version'], '1.6') < 0: if salt.utils.versions.version_cmp(kwargs['passlib_version'], '1.6') < 0:
return pwfile.verify(username, realm, password) return pwfile.verify(username, realm, password)
else: else:
return pwfile.check_password(username, realm, password) return pwfile.check_password(username, realm, password)

View File

@ -8,7 +8,7 @@ Provide authentication using simple LDAP binds
# Import python libs # Import python libs
from __future__ import absolute_import from __future__ import absolute_import
import logging import logging
import salt.ext.six as six from salt.ext import six
# Import salt libs # Import salt libs
from salt.exceptions import CommandExecutionError, SaltInvocationError from salt.exceptions import CommandExecutionError, SaltInvocationError

View File

@ -42,11 +42,11 @@ from ctypes import c_void_p, c_uint, c_char_p, c_char, c_int
from ctypes.util import find_library from ctypes.util import find_library
# Import Salt libs # Import Salt libs
from salt.utils import get_group_list import salt.utils # Can be removed once get_group_list is moved
from salt.ext.six.moves import range # pylint: disable=import-error,redefined-builtin from salt.ext.six.moves import range # pylint: disable=import-error,redefined-builtin
# Import 3rd-party libs # Import 3rd-party libs
import salt.ext.six as six from salt.ext import six
LIBPAM = CDLL(find_library('pam')) LIBPAM = CDLL(find_library('pam'))
LIBC = CDLL(find_library('c')) LIBC = CDLL(find_library('c'))
@ -214,4 +214,4 @@ def groups(username, *args, **kwargs):
Uses system groups Uses system groups
''' '''
return get_group_list(username) return salt.utils.get_group_list(username)

View File

@ -1,71 +0,0 @@
# -*- coding: utf-8 -*-
'''
Provide authentication using Stormpath.
This driver requires some extra configuration beyond that which Stormpath
normally requires.
.. code-block:: yaml
stormpath:
apiid: 1234567890
apikey: 1234567890/ABCDEF
# Can use an application ID
application: 6789012345
# Or can use a directory ID
directory: 3456789012
# But not both
.. versionadded:: 2015.8.0
'''
from __future__ import absolute_import
import json
import base64
import urllib
import salt.utils.http
import logging
log = logging.getLogger(__name__)
def auth(username, password):
'''
Authenticate using a Stormpath directory or application
'''
apiid = __opts__.get('stormpath', {}).get('apiid', None)
apikey = __opts__.get('stormpath', {}).get('apikey', None)
application = __opts__.get('stormpath', {}).get('application', None)
path = 'https://api.stormpath.com/v1'
if application is not None:
path = '{0}/applications/{1}/loginAttempts'.format(path, application)
else:
return False
username = urllib.quote(username)
data = {
'type': 'basic',
'value': base64.b64encode('{0}:{1}'.format(username, password))
}
log.debug('{0}:{1}'.format(username, password))
log.debug(path)
log.debug(data)
log.debug(json.dumps(data))
result = salt.utils.http.query(
path,
method='POST',
username=apiid,
password=apikey,
data=json.dumps(data),
header_dict={'Content-type': 'application/json;charset=UTF-8'},
decode=False,
status=True,
opts=__opts__,
)
log.debug(result)
if result.get('status', 403) == 200:
return True
return False

View File

@ -37,8 +37,9 @@ class Beacon(object):
.. code_block:: yaml .. code_block:: yaml
beacons: beacons:
inotify: inotify:
- /etc/fstab: {} - files:
- /var/cache/foo: {} - /etc/fstab: {}
- /var/cache/foo: {}
''' '''
ret = [] ret = []
b_config = copy.deepcopy(config) b_config = copy.deepcopy(config)
@ -69,6 +70,7 @@ class Beacon(object):
log.trace('Beacon processing: {0}'.format(mod)) log.trace('Beacon processing: {0}'.format(mod))
fun_str = '{0}.beacon'.format(mod) fun_str = '{0}.beacon'.format(mod)
validate_str = '{0}.validate'.format(mod)
if fun_str in self.beacons: if fun_str in self.beacons:
runonce = self._determine_beacon_config(current_beacon_config, 'run_once') runonce = self._determine_beacon_config(current_beacon_config, 'run_once')
interval = self._determine_beacon_config(current_beacon_config, 'interval') interval = self._determine_beacon_config(current_beacon_config, 'interval')
@ -95,6 +97,17 @@ class Beacon(object):
continue continue
# Update __grains__ on the beacon # Update __grains__ on the beacon
self.beacons[fun_str].__globals__['__grains__'] = grains self.beacons[fun_str].__globals__['__grains__'] = grains
# Run the validate function if it's available,
# otherwise there is a warning about it being missing
if validate_str in self.beacons:
valid, vcomment = self.beacons[validate_str](b_config[mod])
if not valid:
log.info('Beacon %s configuration invalid, '
'not running.\n%s', mod, vcomment)
continue
# Fire the beacon! # Fire the beacon!
raw = self.beacons[fun_str](b_config[mod]) raw = self.beacons[fun_str](b_config[mod])
for data in raw: for data in raw:
@ -193,6 +206,8 @@ class Beacon(object):
# Fire the complete event back along with the list of beacons # Fire the complete event back along with the list of beacons
evt = salt.utils.event.get_event('minion', opts=self.opts) evt = salt.utils.event.get_event('minion', opts=self.opts)
b_conf = self.functions['config.merge']('beacons') b_conf = self.functions['config.merge']('beacons')
if not isinstance(self.opts['beacons'], dict):
self.opts['beacons'] = {}
self.opts['beacons'].update(b_conf) self.opts['beacons'].update(b_conf)
evt.fire_event({'complete': True, 'beacons': self.opts['beacons']}, evt.fire_event({'complete': True, 'beacons': self.opts['beacons']},
tag='/salt/minion/minion_beacons_list_complete') tag='/salt/minion/minion_beacons_list_complete')

View File

@ -10,7 +10,8 @@ from __future__ import absolute_import
import logging import logging
# Salt libs # Salt libs
import salt.utils import salt.utils.path
from salt.ext.six.moves import map
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -21,32 +22,41 @@ last_state_extra = {'value': False, 'no_devices': False}
def __virtual__(): def __virtual__():
which_result = salt.utils.which('adb') which_result = salt.utils.path.which('adb')
if which_result is None: if which_result is None:
return False return False
else: else:
return __virtualname__ return __virtualname__
def __validate__(config): def validate(config):
''' '''
Validate the beacon configuration Validate the beacon configuration
''' '''
# Configuration for adb beacon should be a dictionary with states array # Configuration for adb beacon should be a dictionary with states array
if not isinstance(config, dict): if not isinstance(config, list):
log.info('Configuration for adb beacon must be a dict.') log.info('Configuration for adb beacon must be a list.')
return False, ('Configuration for adb beacon must be a dict.') return False, ('Configuration for adb beacon must be a list.')
elif 'states' not in config.keys():
_config = {}
list(map(_config.update, config))
if 'states' not in _config:
log.info('Configuration for adb beacon must include a states array.') log.info('Configuration for adb beacon must include a states array.')
return False, ('Configuration for adb beacon must include a states array.') return False, ('Configuration for adb beacon must include a states array.')
else: else:
states = ['offline', 'bootloader', 'device', 'host', 'recovery', 'no permissions', if not isinstance(_config['states'], list):
'sideload', 'unauthorized', 'unknown', 'missing'] log.info('Configuration for adb beacon must include a states array.')
if any(s not in states for s in config['states']): return False, ('Configuration for adb beacon must include a states array.')
log.info('Need a one of the following adb ' else:
'states: {0}'.format(', '.join(states))) states = ['offline', 'bootloader', 'device', 'host',
return False, ('Need a one of the following adb ' 'recovery', 'no permissions',
'states: {0}'.format(', '.join(states))) 'sideload', 'unauthorized', 'unknown', 'missing']
if any(s not in states for s in _config['states']):
log.info('Need a one of the following adb '
'states: {0}'.format(', '.join(states)))
return False, ('Need a one of the following adb '
'states: {0}'.format(', '.join(states)))
return True, 'Valid beacon configuration' return True, 'Valid beacon configuration'
@ -74,11 +84,10 @@ def beacon(config):
log.trace('adb beacon starting') log.trace('adb beacon starting')
ret = [] ret = []
_validate = __validate__(config) _config = {}
if not _validate[0]: list(map(_config.update, config))
return ret
out = __salt__['cmd.run']('adb devices', runas=config.get('user', None)) out = __salt__['cmd.run']('adb devices', runas=_config.get('user', None))
lines = out.split('\n')[1:] lines = out.split('\n')[1:]
last_state_devices = list(last_state.keys()) last_state_devices = list(last_state.keys())
@ -90,21 +99,21 @@ def beacon(config):
found_devices.append(device) found_devices.append(device)
if device not in last_state_devices or \ if device not in last_state_devices or \
('state' in last_state[device] and last_state[device]['state'] != state): ('state' in last_state[device] and last_state[device]['state'] != state):
if state in config['states']: if state in _config['states']:
ret.append({'device': device, 'state': state, 'tag': state}) ret.append({'device': device, 'state': state, 'tag': state})
last_state[device] = {'state': state} last_state[device] = {'state': state}
if 'battery_low' in config: if 'battery_low' in _config:
val = last_state.get(device, {}) val = last_state.get(device, {})
cmd = 'adb -s {0} shell cat /sys/class/power_supply/*/capacity'.format(device) cmd = 'adb -s {0} shell cat /sys/class/power_supply/*/capacity'.format(device)
battery_levels = __salt__['cmd.run'](cmd, runas=config.get('user', None)).split('\n') battery_levels = __salt__['cmd.run'](cmd, runas=_config.get('user', None)).split('\n')
for l in battery_levels: for l in battery_levels:
battery_level = int(l) battery_level = int(l)
if 0 < battery_level < 100: if 0 < battery_level < 100:
if 'battery' not in val or battery_level != val['battery']: if 'battery' not in val or battery_level != val['battery']:
if ('battery' not in val or val['battery'] > config['battery_low']) and \ if ('battery' not in val or val['battery'] > _config['battery_low']) and \
battery_level <= config['battery_low']: battery_level <= _config['battery_low']:
ret.append({'device': device, 'battery_level': battery_level, 'tag': 'battery_low'}) ret.append({'device': device, 'battery_level': battery_level, 'tag': 'battery_low'})
if device not in last_state: if device not in last_state:
@ -118,13 +127,13 @@ def beacon(config):
# Find missing devices and remove them / send an event # Find missing devices and remove them / send an event
for device in last_state_devices: for device in last_state_devices:
if device not in found_devices: if device not in found_devices:
if 'missing' in config['states']: if 'missing' in _config['states']:
ret.append({'device': device, 'state': 'missing', 'tag': 'missing'}) ret.append({'device': device, 'state': 'missing', 'tag': 'missing'})
del last_state[device] del last_state[device]
# Maybe send an event if we don't have any devices # Maybe send an event if we don't have any devices
if 'no_devices_event' in config and config['no_devices_event'] is True: if 'no_devices_event' in _config and _config['no_devices_event'] is True:
if len(found_devices) == 0 and not last_state_extra['no_devices']: if len(found_devices) == 0 and not last_state_extra['no_devices']:
ret.append({'tag': 'no_devices'}) ret.append({'tag': 'no_devices'})

View File

@ -15,6 +15,7 @@ Dependencies
from __future__ import absolute_import from __future__ import absolute_import
import logging import logging
import time import time
from salt.ext.six.moves import map
# Import 3rd Party libs # Import 3rd Party libs
try: try:
@ -54,17 +55,23 @@ def __virtual__():
'\'python-avahi\' dependency is missing.'.format(__virtualname__) '\'python-avahi\' dependency is missing.'.format(__virtualname__)
def __validate__(config): def validate(config):
''' '''
Validate the beacon configuration Validate the beacon configuration
''' '''
if not isinstance(config, dict): _config = {}
return False, ('Configuration for avahi_announcement ' list(map(_config.update, config))
'beacon must be a dictionary')
elif not all(x in list(config.keys()) for x in ('servicetype', 'port', 'txt')): if not isinstance(config, list):
return False, ('Configuration for avahi_announce '
'beacon must be a list.')
elif not all(x in _config for x in ('servicetype',
'port',
'txt')):
return False, ('Configuration for avahi_announce beacon ' return False, ('Configuration for avahi_announce beacon '
'must contain servicetype, port and txt items') 'must contain servicetype, port and txt items.')
return True, 'Valid beacon configuration' return True, 'Valid beacon configuration.'
def _enforce_txt_record_maxlen(key, value): def _enforce_txt_record_maxlen(key, value):
@ -138,13 +145,13 @@ def beacon(config):
beacons: beacons:
avahi_announce: avahi_announce:
run_once: True - run_once: True
servicetype: _demo._tcp - servicetype: _demo._tcp
port: 1234 - port: 1234
txt: - txt:
ProdName: grains.productname ProdName: grains.productname
SerialNo: grains.serialnumber SerialNo: grains.serialnumber
Comments: 'this is a test' Comments: 'this is a test'
''' '''
ret = [] ret = []
changes = {} changes = {}
@ -152,30 +159,27 @@ def beacon(config):
global LAST_GRAINS global LAST_GRAINS
_validate = __validate__(config) _config = {}
if not _validate[0]: list(map(_config.update, config))
log.warning('Beacon {0} configuration invalid, '
'not adding. {1}'.format(__virtualname__, _validate[1]))
return ret
if 'servicename' in config: if 'servicename' in _config:
servicename = config['servicename'] servicename = _config['servicename']
else: else:
servicename = __grains__['host'] servicename = __grains__['host']
# Check for hostname change # Check for hostname change
if LAST_GRAINS and LAST_GRAINS['host'] != servicename: if LAST_GRAINS and LAST_GRAINS['host'] != servicename:
changes['servicename'] = servicename changes['servicename'] = servicename
if LAST_GRAINS and config.get('reset_on_change', False): if LAST_GRAINS and _config.get('reset_on_change', False):
# Check for IP address change in the case when we reset on change # Check for IP address change in the case when we reset on change
if LAST_GRAINS.get('ipv4', []) != __grains__.get('ipv4', []): if LAST_GRAINS.get('ipv4', []) != __grains__.get('ipv4', []):
changes['ipv4'] = __grains__.get('ipv4', []) changes['ipv4'] = __grains__.get('ipv4', [])
if LAST_GRAINS.get('ipv6', []) != __grains__.get('ipv6', []): if LAST_GRAINS.get('ipv6', []) != __grains__.get('ipv6', []):
changes['ipv6'] = __grains__.get('ipv6', []) changes['ipv6'] = __grains__.get('ipv6', [])
for item in config['txt']: for item in _config['txt']:
if config['txt'][item].startswith('grains.'): if _config['txt'][item].startswith('grains.'):
grain = config['txt'][item][7:] grain = _config['txt'][item][7:]
grain_index = None grain_index = None
square_bracket = grain.find('[') square_bracket = grain.find('[')
if square_bracket != -1 and grain[-1] == ']': if square_bracket != -1 and grain[-1] == ']':
@ -192,7 +196,7 @@ def beacon(config):
if LAST_GRAINS and (LAST_GRAINS.get(grain, '') != __grains__.get(grain, '')): if LAST_GRAINS and (LAST_GRAINS.get(grain, '') != __grains__.get(grain, '')):
changes[str('txt.' + item)] = txt[item] changes[str('txt.' + item)] = txt[item]
else: else:
txt[item] = _enforce_txt_record_maxlen(item, config['txt'][item]) txt[item] = _enforce_txt_record_maxlen(item, _config['txt'][item])
if not LAST_GRAINS: if not LAST_GRAINS:
changes[str('txt.' + item)] = txt[item] changes[str('txt.' + item)] = txt[item]
@ -200,33 +204,33 @@ def beacon(config):
if changes: if changes:
if not LAST_GRAINS: if not LAST_GRAINS:
changes['servicename'] = servicename changes['servicename'] = servicename
changes['servicetype'] = config['servicetype'] changes['servicetype'] = _config['servicetype']
changes['port'] = config['port'] changes['port'] = _config['port']
changes['ipv4'] = __grains__.get('ipv4', []) changes['ipv4'] = __grains__.get('ipv4', [])
changes['ipv6'] = __grains__.get('ipv6', []) changes['ipv6'] = __grains__.get('ipv6', [])
GROUP.AddService(avahi.IF_UNSPEC, avahi.PROTO_UNSPEC, dbus.UInt32(0), GROUP.AddService(avahi.IF_UNSPEC, avahi.PROTO_UNSPEC, dbus.UInt32(0),
servicename, config['servicetype'], '', '', servicename, _config['servicetype'], '', '',
dbus.UInt16(config['port']), avahi.dict_to_txt_array(txt)) dbus.UInt16(_config['port']), avahi.dict_to_txt_array(txt))
GROUP.Commit() GROUP.Commit()
elif config.get('reset_on_change', False) or 'servicename' in changes: elif _config.get('reset_on_change', False) or 'servicename' in changes:
# A change in 'servicename' requires a reset because we can only # A change in 'servicename' requires a reset because we can only
# directly update TXT records # directly update TXT records
GROUP.Reset() GROUP.Reset()
reset_wait = config.get('reset_wait', 0) reset_wait = _config.get('reset_wait', 0)
if reset_wait > 0: if reset_wait > 0:
time.sleep(reset_wait) time.sleep(reset_wait)
GROUP.AddService(avahi.IF_UNSPEC, avahi.PROTO_UNSPEC, dbus.UInt32(0), GROUP.AddService(avahi.IF_UNSPEC, avahi.PROTO_UNSPEC, dbus.UInt32(0),
servicename, config['servicetype'], '', '', servicename, _config['servicetype'], '', '',
dbus.UInt16(config['port']), avahi.dict_to_txt_array(txt)) dbus.UInt16(_config['port']), avahi.dict_to_txt_array(txt))
GROUP.Commit() GROUP.Commit()
else: else:
GROUP.UpdateServiceTxt(avahi.IF_UNSPEC, avahi.PROTO_UNSPEC, dbus.UInt32(0), GROUP.UpdateServiceTxt(avahi.IF_UNSPEC, avahi.PROTO_UNSPEC, dbus.UInt32(0),
servicename, config['servicetype'], '', servicename, _config['servicetype'], '',
avahi.dict_to_txt_array(txt)) avahi.dict_to_txt_array(txt))
ret.append({'tag': 'result', 'changes': changes}) ret.append({'tag': 'result', 'changes': changes})
if config.get('copy_grains', False): if _config.get('copy_grains', False):
LAST_GRAINS = __grains__.copy() LAST_GRAINS = __grains__.copy()
else: else:
LAST_GRAINS = __grains__ LAST_GRAINS = __grains__

View File

@ -9,6 +9,7 @@ import atexit
import logging import logging
import select import select
import time import time
from salt.ext.six.moves import map
# Import 3rd Party libs # Import 3rd Party libs
try: try:
@ -47,17 +48,23 @@ def _register_callback(sdRef, flags, errorCode, name, regtype, domain): # pylin
log.error('Bonjour registration failed with error code {0}'.format(errorCode)) log.error('Bonjour registration failed with error code {0}'.format(errorCode))
def __validate__(config): def validate(config):
''' '''
Validate the beacon configuration Validate the beacon configuration
''' '''
if not isinstance(config, dict): _config = {}
return False, ('Configuration for bonjour_announcement ' list(map(_config.update, config))
'beacon must be a dictionary')
elif not all(x in list(config.keys()) for x in ('servicetype', 'port', 'txt')): if not isinstance(config, list):
return False, ('Configuration for bonjour_announce '
'beacon must be a list.')
elif not all(x in _config for x in ('servicetype',
'port',
'txt')):
return False, ('Configuration for bonjour_announce beacon ' return False, ('Configuration for bonjour_announce beacon '
'must contain servicetype, port and txt items') 'must contain servicetype, port and txt items.')
return True, 'Valid beacon configuration' return True, 'Valid beacon configuration.'
def _enforce_txt_record_maxlen(key, value): def _enforce_txt_record_maxlen(key, value):
@ -131,13 +138,13 @@ def beacon(config):
beacons: beacons:
bonjour_announce: bonjour_announce:
run_once: True - run_once: True
servicetype: _demo._tcp - servicetype: _demo._tcp
port: 1234 - port: 1234
txt: - txt:
ProdName: grains.productname ProdName: grains.productname
SerialNo: grains.serialnumber SerialNo: grains.serialnumber
Comments: 'this is a test' Comments: 'this is a test'
''' '''
ret = [] ret = []
changes = {} changes = {}
@ -146,30 +153,27 @@ def beacon(config):
global LAST_GRAINS global LAST_GRAINS
global SD_REF global SD_REF
_validate = __validate__(config) _config = {}
if not _validate[0]: list(map(_config.update, config))
log.warning('Beacon {0} configuration invalid, '
'not adding. {1}'.format(__virtualname__, _validate[1]))
return ret
if 'servicename' in config: if 'servicename' in _config:
servicename = config['servicename'] servicename = _config['servicename']
else: else:
servicename = __grains__['host'] servicename = __grains__['host']
# Check for hostname change # Check for hostname change
if LAST_GRAINS and LAST_GRAINS['host'] != servicename: if LAST_GRAINS and LAST_GRAINS['host'] != servicename:
changes['servicename'] = servicename changes['servicename'] = servicename
if LAST_GRAINS and config.get('reset_on_change', False): if LAST_GRAINS and _config.get('reset_on_change', False):
# Check for IP address change in the case when we reset on change # Check for IP address change in the case when we reset on change
if LAST_GRAINS.get('ipv4', []) != __grains__.get('ipv4', []): if LAST_GRAINS.get('ipv4', []) != __grains__.get('ipv4', []):
changes['ipv4'] = __grains__.get('ipv4', []) changes['ipv4'] = __grains__.get('ipv4', [])
if LAST_GRAINS.get('ipv6', []) != __grains__.get('ipv6', []): if LAST_GRAINS.get('ipv6', []) != __grains__.get('ipv6', []):
changes['ipv6'] = __grains__.get('ipv6', []) changes['ipv6'] = __grains__.get('ipv6', [])
for item in config['txt']: for item in _config['txt']:
if config['txt'][item].startswith('grains.'): if _config['txt'][item].startswith('grains.'):
grain = config['txt'][item][7:] grain = _config['txt'][item][7:]
grain_index = None grain_index = None
square_bracket = grain.find('[') square_bracket = grain.find('[')
if square_bracket != -1 and grain[-1] == ']': if square_bracket != -1 and grain[-1] == ']':
@ -186,7 +190,7 @@ def beacon(config):
if LAST_GRAINS and (LAST_GRAINS.get(grain, '') != __grains__.get(grain, '')): if LAST_GRAINS and (LAST_GRAINS.get(grain, '') != __grains__.get(grain, '')):
changes[str('txt.' + item)] = txt[item] changes[str('txt.' + item)] = txt[item]
else: else:
txt[item] = _enforce_txt_record_maxlen(item, config['txt'][item]) txt[item] = _enforce_txt_record_maxlen(item, _config['txt'][item])
if not LAST_GRAINS: if not LAST_GRAINS:
changes[str('txt.' + item)] = txt[item] changes[str('txt.' + item)] = txt[item]
@ -195,32 +199,32 @@ def beacon(config):
txt_record = pybonjour.TXTRecord(items=txt) txt_record = pybonjour.TXTRecord(items=txt)
if not LAST_GRAINS: if not LAST_GRAINS:
changes['servicename'] = servicename changes['servicename'] = servicename
changes['servicetype'] = config['servicetype'] changes['servicetype'] = _config['servicetype']
changes['port'] = config['port'] changes['port'] = _config['port']
changes['ipv4'] = __grains__.get('ipv4', []) changes['ipv4'] = __grains__.get('ipv4', [])
changes['ipv6'] = __grains__.get('ipv6', []) changes['ipv6'] = __grains__.get('ipv6', [])
SD_REF = pybonjour.DNSServiceRegister( SD_REF = pybonjour.DNSServiceRegister(
name=servicename, name=servicename,
regtype=config['servicetype'], regtype=_config['servicetype'],
port=config['port'], port=_config['port'],
txtRecord=txt_record, txtRecord=txt_record,
callBack=_register_callback) callBack=_register_callback)
atexit.register(_close_sd_ref) atexit.register(_close_sd_ref)
ready = select.select([SD_REF], [], []) ready = select.select([SD_REF], [], [])
if SD_REF in ready[0]: if SD_REF in ready[0]:
pybonjour.DNSServiceProcessResult(SD_REF) pybonjour.DNSServiceProcessResult(SD_REF)
elif config.get('reset_on_change', False) or 'servicename' in changes: elif _config.get('reset_on_change', False) or 'servicename' in changes:
# A change in 'servicename' requires a reset because we can only # A change in 'servicename' requires a reset because we can only
# directly update TXT records # directly update TXT records
SD_REF.close() SD_REF.close()
SD_REF = None SD_REF = None
reset_wait = config.get('reset_wait', 0) reset_wait = _config.get('reset_wait', 0)
if reset_wait > 0: if reset_wait > 0:
time.sleep(reset_wait) time.sleep(reset_wait)
SD_REF = pybonjour.DNSServiceRegister( SD_REF = pybonjour.DNSServiceRegister(
name=servicename, name=servicename,
regtype=config['servicetype'], regtype=_config['servicetype'],
port=config['port'], port=_config['port'],
txtRecord=txt_record, txtRecord=txt_record,
callBack=_register_callback) callBack=_register_callback)
ready = select.select([SD_REF], [], []) ready = select.select([SD_REF], [], [])
@ -236,7 +240,7 @@ def beacon(config):
ret.append({'tag': 'result', 'changes': changes}) ret.append({'tag': 'result', 'changes': changes})
if config.get('copy_grains', False): if _config.get('copy_grains', False):
LAST_GRAINS = __grains__.copy() LAST_GRAINS = __grains__.copy()
else: else:
LAST_GRAINS = __grains__ LAST_GRAINS = __grains__

View File

@ -5,7 +5,7 @@ Beacon to fire events at failed login of users
.. code-block:: yaml .. code-block:: yaml
beacons: beacons:
btmp: {} btmp: []
''' '''
# Import python libs # Import python libs
@ -16,6 +16,9 @@ import struct
# Import Salt Libs # Import Salt Libs
import salt.utils.files import salt.utils.files
# Import 3rd-party libs
from salt.ext import six
__virtualname__ = 'btmp' __virtualname__ = 'btmp'
BTMP = '/var/log/btmp' BTMP = '/var/log/btmp'
FMT = 'hi32s4s32s256shhiii4i20x' FMT = 'hi32s4s32s256shhiii4i20x'
@ -49,14 +52,14 @@ def _get_loc():
return __context__[LOC_KEY] return __context__[LOC_KEY]
def __validate__(config): def validate(config):
''' '''
Validate the beacon configuration Validate the beacon configuration
''' '''
# Configuration for load beacon should be a list of dicts # Configuration for load beacon should be a list of dicts
if not isinstance(config, dict): if not isinstance(config, list):
return False, ('Configuration for btmp beacon must ' return False, ('Configuration for btmp beacon must '
'be a list of dictionaries.') 'be a list.')
return True, 'Valid beacon configuration' return True, 'Valid beacon configuration'
@ -68,7 +71,7 @@ def beacon(config):
.. code-block:: yaml .. code-block:: yaml
beacons: beacons:
btmp: {} btmp: []
''' '''
ret = [] ret = []
with salt.utils.files.fopen(BTMP, 'rb') as fp_: with salt.utils.files.fopen(BTMP, 'rb') as fp_:
@ -88,7 +91,7 @@ def beacon(config):
event = {} event = {}
for ind, field in enumerate(FIELDS): for ind, field in enumerate(FIELDS):
event[field] = pack[ind] event[field] = pack[ind]
if isinstance(event[field], str): if isinstance(event[field], six.string_types):
event[field] = event[field].strip('\x00') event[field] = event[field].strip('\x00')
ret.append(event) ret.append(event)
return ret return ret

View File

@ -31,14 +31,14 @@ def __virtual__():
return __virtualname__ return __virtualname__
def __validate__(config): def validate(config):
''' '''
Validate the beacon configuration Validate the beacon configuration
''' '''
# Configuration for diskusage beacon should be a list of dicts # Configuration for diskusage beacon should be a list of dicts
if not isinstance(config, dict): if not isinstance(config, list):
return False, ('Configuration for diskusage beacon ' return False, ('Configuration for diskusage beacon '
'must be a dictionary.') 'must be a list.')
return True, 'Valid beacon configuration' return True, 'Valid beacon configuration'
@ -86,25 +86,24 @@ def beacon(config):
parts = psutil.disk_partitions(all=False) parts = psutil.disk_partitions(all=False)
ret = [] ret = []
for mounts in config: for mounts in config:
mount = mounts.keys()[0] mount = next(iter(mounts))
try: for part in parts:
_current_usage = psutil.disk_usage(mount) if re.match(mount, part.mountpoint):
except OSError: _mount = part.mountpoint
# Ensure a valid mount point
log.warning('{0} is not a valid mount point, try regex.'.format(mount))
for part in parts:
if re.match(mount, part.mountpoint):
row = {}
row[part.mountpoint] = mounts[mount]
config.append(row)
continue
current_usage = _current_usage.percent try:
monitor_usage = mounts[mount] _current_usage = psutil.disk_usage(mount)
if '%' in monitor_usage: except OSError:
monitor_usage = re.sub('%', '', monitor_usage) log.warning('{0} is not a valid mount point.'.format(mount))
monitor_usage = float(monitor_usage) continue
if current_usage >= monitor_usage:
ret.append({'diskusage': current_usage, 'mount': mount}) current_usage = _current_usage.percent
monitor_usage = mounts[mount]
log.info('current_usage {}'.format(current_usage))
if '%' in monitor_usage:
monitor_usage = re.sub('%', '', monitor_usage)
monitor_usage = float(monitor_usage)
if current_usage >= monitor_usage:
ret.append({'diskusage': current_usage, 'mount': _mount})
return ret return ret

View File

@ -10,7 +10,8 @@ from __future__ import absolute_import
import logging import logging
# Salt libs # Salt libs
import salt.utils import salt.utils.path
from salt.ext.six.moves import map
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -21,21 +22,25 @@ last_state = {}
def __virtual__(): def __virtual__():
which_result = salt.utils.which('glxinfo') which_result = salt.utils.path.which('glxinfo')
if which_result is None: if which_result is None:
return False return False
else: else:
return __virtualname__ return __virtualname__
def __validate__(config): def validate(config):
''' '''
Validate the beacon configuration Validate the beacon configuration
''' '''
# Configuration for glxinfo beacon should be a dictionary # Configuration for glxinfo beacon should be a dictionary
if not isinstance(config, dict): if not isinstance(config, list):
return False, ('Configuration for glxinfo beacon must be a dict.') return False, ('Configuration for glxinfo beacon must be a list.')
if 'user' not in config:
_config = {}
list(map(_config.update, config))
if 'user' not in _config:
return False, ('Configuration for glxinfo beacon must ' return False, ('Configuration for glxinfo beacon must '
'include a user as glxinfo is not available to root.') 'include a user as glxinfo is not available to root.')
return True, 'Valid beacon configuration' return True, 'Valid beacon configuration'
@ -45,27 +50,28 @@ def beacon(config):
''' '''
Emit the status of a connected display to the minion Emit the status of a connected display to the minion
Mainly this is used to detect when the display fails to connect for whatever reason. Mainly this is used to detect when the display fails to connect
for whatever reason.
.. code-block:: yaml .. code-block:: yaml
beacons: beacons:
glxinfo: glxinfo:
user: frank - user: frank
screen_event: True - screen_event: True
''' '''
log.trace('glxinfo beacon starting') log.trace('glxinfo beacon starting')
ret = [] ret = []
_validate = __validate__(config) _config = {}
if not _validate[0]: list(map(_config.update, config))
return ret
retcode = __salt__['cmd.retcode']('DISPLAY=:0 glxinfo', runas=config['user'], python_shell=True) retcode = __salt__['cmd.retcode']('DISPLAY=:0 glxinfo',
runas=_config['user'], python_shell=True)
if 'screen_event' in config and config['screen_event']: if 'screen_event' in _config and _config['screen_event']:
last_value = last_state.get('screen_available', False) last_value = last_state.get('screen_available', False)
screen_available = retcode == 0 screen_available = retcode == 0
if last_value != screen_available or 'screen_available' not in last_state: if last_value != screen_available or 'screen_available' not in last_state:

View File

@ -9,7 +9,7 @@ Fire an event when over a specified threshold.
# Import Python libs # Import Python libs
from __future__ import absolute_import from __future__ import absolute_import
import logging import logging
from salt.ext.six.moves import map
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -23,17 +23,39 @@ def __virtual__():
if 'haproxy.get_sessions' in __salt__: if 'haproxy.get_sessions' in __salt__:
return __virtualname__ return __virtualname__
else: else:
log.debug('Not loading haproxy beacon')
return False return False
def __validate__(config): def validate(config):
''' '''
Validate the beacon configuration Validate the beacon configuration
''' '''
if not isinstance(config, dict): if not isinstance(config, list):
return False, ('Configuration for haproxy beacon must be a dictionary.') return False, ('Configuration for haproxy beacon must '
if 'haproxy' not in config: 'be a list.')
return False, ('Configuration for haproxy beacon requires a list of backends and servers') else:
_config = {}
list(map(_config.update, config))
if 'backends' not in _config:
return False, ('Configuration for haproxy beacon '
'requires backends.')
else:
if not isinstance(_config['backends'], dict):
return False, ('Backends for haproxy beacon '
'must be a dictionary.')
else:
for backend in _config['backends']:
log.debug('_config {}'.format(_config['backends'][backend]))
if 'servers' not in _config['backends'][backend]:
return False, ('Backends for haproxy beacon '
'require servers.')
else:
_servers = _config['backends'][backend]['servers']
if not isinstance(_servers, list):
return False, ('Servers for haproxy beacon '
'must be a list.')
return True, 'Valid beacon configuration' return True, 'Valid beacon configuration'
@ -46,22 +68,23 @@ def beacon(config):
beacons: beacons:
haproxy: haproxy:
- www-backend: - backends:
threshold: 45 www-backend:
servers: threshold: 45
- web1 servers:
- web2 - web1
- web2
- interval: 120 - interval: 120
''' '''
log.debug('haproxy beacon starting')
ret = [] ret = []
_validate = __validate__(config)
if not _validate: _config = {}
log.debug('haproxy beacon unable to validate') list(map(_config.update, config))
return ret
for backend in config: for backend in _config.get('backends', ()):
threshold = config[backend]['threshold'] backend_config = _config['backends'][backend]
for server in config[backend]['servers']: threshold = backend_config['threshold']
for server in backend_config['servers']:
scur = __salt__['haproxy.get_sessions'](server, backend) scur = __salt__['haproxy.get_sessions'](server, backend)
if scur: if scur:
if int(scur) > int(threshold): if int(scur) > int(threshold):
@ -69,6 +92,10 @@ def beacon(config):
'scur': scur, 'scur': scur,
'threshold': threshold, 'threshold': threshold,
} }
log.debug('Emit because {0} > {1} for {2} in {3}'.format(scur, threshold, server, backend)) log.debug('Emit because {0} > {1}'
' for {2} in {3}'.format(scur,
threshold,
server,
backend))
ret.append(_server) ret.append(_server)
return ret return ret

View File

@ -23,6 +23,7 @@ import re
# Import salt libs # Import salt libs
import salt.ext.six import salt.ext.six
from salt.ext.six.moves import map
# Import third party libs # Import third party libs
try: try:
@ -79,7 +80,7 @@ def _get_notifier(config):
return __context__['inotify.notifier'] return __context__['inotify.notifier']
def __validate__(config): def validate(config):
''' '''
Validate the beacon configuration Validate the beacon configuration
''' '''
@ -105,37 +106,45 @@ def __validate__(config):
] ]
# Configuration for inotify beacon should be a dict of dicts # Configuration for inotify beacon should be a dict of dicts
log.debug('config {0}'.format(config)) if not isinstance(config, list):
if not isinstance(config, dict): return False, 'Configuration for inotify beacon must be a list.'
return False, 'Configuration for inotify beacon must be a dictionary.'
else: else:
for config_item in config: _config = {}
if not isinstance(config[config_item], dict): list(map(_config.update, config))
return False, ('Configuration for inotify beacon must '
'be a dictionary of dictionaries.') if 'files' not in _config:
else: return False, 'Configuration for inotify beacon must include files.'
if not any(j in ['mask', 'recurse', 'auto_add'] for j in config[config_item]): else:
for path in _config.get('files'):
if not isinstance(_config['files'][path], dict):
return False, ('Configuration for inotify beacon must ' return False, ('Configuration for inotify beacon must '
'contain mask, recurse or auto_add items.') 'be a list of dictionaries.')
else:
if not any(j in ['mask',
'recurse',
'auto_add'] for j in _config['files'][path]):
return False, ('Configuration for inotify beacon must '
'contain mask, recurse or auto_add items.')
if 'auto_add' in config[config_item]: if 'auto_add' in _config['files'][path]:
if not isinstance(config[config_item]['auto_add'], bool): if not isinstance(_config['files'][path]['auto_add'], bool):
return False, ('Configuration for inotify beacon ' return False, ('Configuration for inotify beacon '
'auto_add must be boolean.') 'auto_add must be boolean.')
if 'recurse' in config[config_item]: if 'recurse' in _config['files'][path]:
if not isinstance(config[config_item]['recurse'], bool): if not isinstance(_config['files'][path]['recurse'], bool):
return False, ('Configuration for inotify beacon ' return False, ('Configuration for inotify beacon '
'recurse must be boolean.') 'recurse must be boolean.')
if 'mask' in config[config_item]: if 'mask' in _config['files'][path]:
if not isinstance(config[config_item]['mask'], list): if not isinstance(_config['files'][path]['mask'], list):
return False, ('Configuration for inotify beacon ' return False, ('Configuration for inotify beacon '
'mask must be list.') 'mask must be list.')
for mask in config[config_item]['mask']: for mask in _config['files'][path]['mask']:
if mask not in VALID_MASK: if mask not in VALID_MASK:
return False, ('Configuration for inotify beacon ' return False, ('Configuration for inotify beacon '
'invalid mask option {0}.'.format(mask)) 'invalid mask option {0}.'.format(mask))
return True, 'Valid beacon configuration' return True, 'Valid beacon configuration'
@ -149,19 +158,20 @@ def beacon(config):
beacons: beacons:
inotify: inotify:
/path/to/file/or/dir: - files:
mask: /path/to/file/or/dir:
- open mask:
- create - open
- close_write - create
recurse: True - close_write
auto_add: True recurse: True
exclude: auto_add: True
- /path/to/file/or/dir/exclude1 exclude:
- /path/to/file/or/dir/exclude2 - /path/to/file/or/dir/exclude1
- /path/to/file/or/dir/regex[a-m]*$: - /path/to/file/or/dir/exclude2
regex: True - /path/to/file/or/dir/regex[a-m]*$:
coalesce: True regex: True
- coalesce: True
The mask list can contain the following events (the default mask is create, The mask list can contain the following events (the default mask is create,
delete, and modify): delete, and modify):
@ -203,8 +213,11 @@ def beacon(config):
affects all paths that are being watched. This is due to this option affects all paths that are being watched. This is due to this option
being at the Notifier level in pyinotify. being at the Notifier level in pyinotify.
''' '''
_config = {}
list(map(_config.update, config))
ret = [] ret = []
notifier = _get_notifier(config) notifier = _get_notifier(_config)
wm = notifier._watch_manager wm = notifier._watch_manager
# Read in existing events # Read in existing events
@ -219,11 +232,13 @@ def beacon(config):
# Find the matching path in config # Find the matching path in config
path = event.path path = event.path
while path != '/': while path != '/':
if path in config: if path in _config.get('files', {}):
break break
path = os.path.dirname(path) path = os.path.dirname(path)
excludes = config[path].get('exclude', '') for path in _config.get('files', {}):
excludes = _config['files'][path].get('exclude', '')
if excludes and isinstance(excludes, list): if excludes and isinstance(excludes, list):
for exclude in excludes: for exclude in excludes:
if isinstance(exclude, dict): if isinstance(exclude, dict):
@ -257,9 +272,10 @@ def beacon(config):
# Update existing watches and add new ones # Update existing watches and add new ones
# TODO: make the config handle more options # TODO: make the config handle more options
for path in config: for path in _config.get('files', ()):
if isinstance(config[path], dict):
mask = config[path].get('mask', DEFAULT_MASK) if isinstance(_config['files'][path], dict):
mask = _config['files'][path].get('mask', DEFAULT_MASK)
if isinstance(mask, list): if isinstance(mask, list):
r_mask = 0 r_mask = 0
for sub in mask: for sub in mask:
@ -269,8 +285,8 @@ def beacon(config):
else: else:
r_mask = mask r_mask = mask
mask = r_mask mask = r_mask
rec = config[path].get('recurse', False) rec = _config['files'][path].get('recurse', False)
auto_add = config[path].get('auto_add', False) auto_add = _config['files'][path].get('auto_add', False)
else: else:
mask = DEFAULT_MASK mask = DEFAULT_MASK
rec = False rec = False
@ -287,7 +303,7 @@ def beacon(config):
if update: if update:
wm.update_watch(wd, mask=mask, rec=rec, auto_add=auto_add) wm.update_watch(wd, mask=mask, rec=rec, auto_add=auto_add)
elif os.path.exists(path): elif os.path.exists(path):
excludes = config[path].get('exclude', '') excludes = _config['files'][path].get('exclude', '')
excl = None excl = None
if isinstance(excludes, list): if isinstance(excludes, list):
excl = [] excl = []

View File

@ -10,6 +10,7 @@ from __future__ import absolute_import
import salt.utils import salt.utils
import salt.utils.locales import salt.utils.locales
import salt.ext.six import salt.ext.six
from salt.ext.six.moves import map
# Import third party libs # Import third party libs
try: try:
@ -43,18 +44,21 @@ def _get_journal():
return __context__['systemd.journald'] return __context__['systemd.journald']
def __validate__(config): def validate(config):
''' '''
Validate the beacon configuration Validate the beacon configuration
''' '''
# Configuration for journald beacon should be a list of dicts # Configuration for journald beacon should be a list of dicts
if not isinstance(config, dict): if not isinstance(config, list):
return False return (False, 'Configuration for journald beacon must be a list.')
else: else:
for item in config: _config = {}
if not isinstance(config[item], dict): list(map(_config.update, config))
return False, ('Configuration for journald beacon must '
'be a dictionary of dictionaries.') for name in _config.get('services', {}):
if not isinstance(_config['services'][name], dict):
return False, ('Services configuration for journald beacon '
'must be a list of dictionaries.')
return True, 'Valid beacon configuration' return True, 'Valid beacon configuration'
@ -69,25 +73,31 @@ def beacon(config):
beacons: beacons:
journald: journald:
sshd: - services:
SYSLOG_IDENTIFIER: sshd sshd:
PRIORITY: 6 SYSLOG_IDENTIFIER: sshd
PRIORITY: 6
''' '''
ret = [] ret = []
journal = _get_journal() journal = _get_journal()
_config = {}
list(map(_config.update, config))
while True: while True:
cur = journal.get_next() cur = journal.get_next()
if not cur: if not cur:
break break
for name in config:
for name in _config.get('services', {}):
n_flag = 0 n_flag = 0
for key in config[name]: for key in _config['services'][name]:
if isinstance(key, salt.ext.six.string_types): if isinstance(key, salt.ext.six.string_types):
key = salt.utils.locales.sdecode(key) key = salt.utils.locales.sdecode(key)
if key in cur: if key in cur:
if config[name][key] == cur[key]: if _config['services'][name][key] == cur[key]:
n_flag += 1 n_flag += 1
if n_flag == len(config[name]): if n_flag == len(_config['services'][name]):
# Match! # Match!
sub = salt.utils.simple_types_filter(cur) sub = salt.utils.simple_types_filter(cur)
sub.update({'tag': name}) sub.update({'tag': name})

View File

@ -9,7 +9,8 @@ import logging
import os import os
# Import Salt libs # Import Salt libs
import salt.utils import salt.utils.platform
from salt.ext.six.moves import map
# Import Py3 compat # Import Py3 compat
from salt.ext.six.moves import zip from salt.ext.six.moves import zip
@ -22,13 +23,13 @@ LAST_STATUS = {}
def __virtual__(): def __virtual__():
if salt.utils.is_windows(): if salt.utils.platform.is_windows():
return False return False
else: else:
return __virtualname__ return __virtualname__
def __validate__(config): def validate(config):
''' '''
Validate the beacon configuration Validate the beacon configuration
''' '''
@ -37,25 +38,26 @@ def __validate__(config):
if not isinstance(config, list): if not isinstance(config, list):
return False, ('Configuration for load beacon must be a list.') return False, ('Configuration for load beacon must be a list.')
else: else:
for config_item in config: _config = {}
if not isinstance(config_item, dict): list(map(_config.update, config))
return False, ('Configuration for load beacon must '
'be a list of dictionaries.') if 'averages' not in _config:
else: return False, ('Averages configuration is required'
if not all(j in ['1m', '5m', '15m'] for j in config_item.keys()): ' for load beacon.')
return False, ('Configuration for load beacon must ' else:
'contain 1m, 5m or 15m items.')
if not any(j in ['1m', '5m', '15m'] for j
in _config.get('averages', {})):
return False, ('Averages configuration for load beacon '
'must contain 1m, 5m or 15m items.')
for item in ['1m', '5m', '15m']: for item in ['1m', '5m', '15m']:
if item not in config_item: if not isinstance(_config['averages'][item], list):
continue return False, ('Averages configuration for load beacon: '
if not isinstance(config_item[item], list):
return False, ('Configuration for load beacon: '
'1m, 5m and 15m items must be ' '1m, 5m and 15m items must be '
'a list of two items.') 'a list of two items.')
else: else:
if len(config_item[item]) != 2: if len(_config['averages'][item]) != 2:
return False, ('Configuration for load beacon: ' return False, ('Configuration for load beacon: '
'1m, 5m and 15m items must be ' '1m, 5m and 15m items must be '
'a list of two items.') 'a list of two items.')
@ -82,33 +84,37 @@ def beacon(config):
beacons: beacons:
load: load:
1m: - averages:
- 0.0 1m:
- 2.0 - 0.0
5m: - 2.0
- 0.0 5m:
- 1.5 - 0.0
15m: - 1.5
- 0.1 15m:
- 1.0 - 0.1
emitatstartup: True - 1.0
onchangeonly: False - emitatstartup: True
- onchangeonly: False
''' '''
log.trace('load beacon starting') log.trace('load beacon starting')
_config = {}
list(map(_config.update, config))
# Default config if not present # Default config if not present
if 'emitatstartup' not in config: if 'emitatstartup' not in _config:
config['emitatstartup'] = True _config['emitatstartup'] = True
if 'onchangeonly' not in config: if 'onchangeonly' not in _config:
config['onchangeonly'] = False _config['onchangeonly'] = False
ret = [] ret = []
avgs = os.getloadavg() avgs = os.getloadavg()
avg_keys = ['1m', '5m', '15m'] avg_keys = ['1m', '5m', '15m']
avg_dict = dict(zip(avg_keys, avgs)) avg_dict = dict(zip(avg_keys, avgs))
if config['onchangeonly']: if _config['onchangeonly']:
if not LAST_STATUS: if not LAST_STATUS:
for k in ['1m', '5m', '15m']: for k in ['1m', '5m', '15m']:
LAST_STATUS[k] = avg_dict[k] LAST_STATUS[k] = avg_dict[k]
@ -120,27 +126,40 @@ def beacon(config):
# Check each entry for threshold # Check each entry for threshold
for k in ['1m', '5m', '15m']: for k in ['1m', '5m', '15m']:
if k in config: if k in _config.get('averages', {}):
if config['onchangeonly']: if _config['onchangeonly']:
# Emit if current is more that threshold and old value less that threshold # Emit if current is more that threshold and old value less
if float(avg_dict[k]) > float(config[k][1]) and float(LAST_STATUS[k]) < float(config[k][1]): # that threshold
log.debug('Emit because {0} > {1} and last was {2}'.format(float(avg_dict[k]), float(config[k][1]), float(LAST_STATUS[k]))) if float(avg_dict[k]) > float(_config['averages'][k][1]) and \
float(LAST_STATUS[k]) < float(_config['averages'][k][1]):
log.debug('Emit because {0} > {1} and last was '
'{2}'.format(float(avg_dict[k]),
float(_config['averages'][k][1]),
float(LAST_STATUS[k])))
send_beacon = True send_beacon = True
break break
# Emit if current is less that threshold and old value more that threshold # Emit if current is less that threshold and old value more
if float(avg_dict[k]) < float(config[k][0]) and float(LAST_STATUS[k]) > float(config[k][0]): # that threshold
log.debug('Emit because {0} < {1} and last was {2}'.format(float(avg_dict[k]), float(config[k][0]), float(LAST_STATUS[k]))) if float(avg_dict[k]) < float(_config['averages'][k][0]) and \
float(LAST_STATUS[k]) > float(_config['averages'][k][0]):
log.debug('Emit because {0} < {1} and last was'
'{2}'.format(float(avg_dict[k]),
float(_config['averages'][k][0]),
float(LAST_STATUS[k])))
send_beacon = True send_beacon = True
break break
else: else:
# Emit no matter LAST_STATUS # Emit no matter LAST_STATUS
if float(avg_dict[k]) < float(config[k][0]) or \ if float(avg_dict[k]) < float(_config['averages'][k][0]) or \
float(avg_dict[k]) > float(config[k][1]): float(avg_dict[k]) > float(_config['averages'][k][1]):
log.debug('Emit because {0} < {1} or > {2}'.format(float(avg_dict[k]), float(config[k][0]), float(config[k][1]))) log.debug('Emit because {0} < {1} or > '
'{2}'.format(float(avg_dict[k]),
float(_config['averages'][k][0]),
float(_config['averages'][k][1])))
send_beacon = True send_beacon = True
break break
if config['onchangeonly']: if _config['onchangeonly']:
for k in ['1m', '5m', '15m']: for k in ['1m', '5m', '15m']:
LAST_STATUS[k] = avg_dict[k] LAST_STATUS[k] = avg_dict[k]

View File

@ -11,8 +11,9 @@ from __future__ import absolute_import
import logging import logging
# Import salt libs # Import salt libs
import salt.utils
import salt.utils.files import salt.utils.files
import salt.utils.platform
from salt.ext.six.moves import map
try: try:
@ -35,7 +36,7 @@ log = logging.getLogger(__name__)
def __virtual__(): def __virtual__():
if not salt.utils.is_windows() and HAS_REGEX: if not salt.utils.platform.is_windows() and HAS_REGEX:
return __virtualname__ return __virtualname__
return False return False
@ -48,13 +49,20 @@ def _get_loc():
return __context__[LOC_KEY] return __context__[LOC_KEY]
def __validate__(config): def validate(config):
''' '''
Validate the beacon configuration Validate the beacon configuration
''' '''
_config = {}
list(map(_config.update, config))
# Configuration for log beacon should be a list of dicts # Configuration for log beacon should be a list of dicts
if not isinstance(config, dict): if not isinstance(config, list):
return False, ('Configuration for log beacon must be a dictionary.') return False, ('Configuration for log beacon must be a list.')
if 'file' not in _config:
return False, ('Configuration for log beacon '
'must contain file option.')
return True, 'Valid beacon configuration' return True, 'Valid beacon configuration'
@ -67,20 +75,24 @@ def beacon(config):
beacons: beacons:
log: log:
file: <path> - file: <path>
<tag>: - tags:
regex: <pattern> <tag>:
regex: <pattern>
''' '''
_config = {}
list(map(_config.update, config))
ret = [] ret = []
if 'file' not in config: if 'file' not in _config:
event = SKEL.copy() event = SKEL.copy()
event['tag'] = 'global' event['tag'] = 'global'
event['error'] = 'file not defined in config' event['error'] = 'file not defined in config'
ret.append(event) ret.append(event)
return ret return ret
with salt.utils.files.fopen(config['file'], 'r') as fp_: with salt.utils.files.fopen(_config['file'], 'r') as fp_:
loc = __context__.get(LOC_KEY, 0) loc = __context__.get(LOC_KEY, 0)
if loc == 0: if loc == 0:
fp_.seek(0, 2) fp_.seek(0, 2)
@ -92,16 +104,17 @@ def beacon(config):
fp_.seek(loc) fp_.seek(loc)
txt = fp_.read() txt = fp_.read()
log.info('txt {}'.format(txt))
d = {} d = {}
for tag in config: for tag in _config.get('tags', {}):
if 'regex' not in config[tag]: if 'regex' not in _config['tags'][tag]:
continue continue
if len(config[tag]['regex']) < 1: if len(_config['tags'][tag]['regex']) < 1:
continue continue
try: try:
d[tag] = re.compile(r'{0}'.format(config[tag]['regex'])) d[tag] = re.compile(r'{0}'.format(_config['tags'][tag]['regex']))
except Exception: except Exception as e:
event = SKEL.copy() event = SKEL.copy()
event['tag'] = tag event['tag'] = tag
event['error'] = 'bad regex' event['error'] = 'bad regex'

View File

@ -11,6 +11,7 @@ Beacon to monitor memory usage.
from __future__ import absolute_import from __future__ import absolute_import
import logging import logging
import re import re
from salt.ext.six.moves import map
# Import Third Party Libs # Import Third Party Libs
try: try:
@ -31,14 +32,22 @@ def __virtual__():
return __virtualname__ return __virtualname__
def __validate__(config): def validate(config):
''' '''
Validate the beacon configuration Validate the beacon configuration
''' '''
# Configuration for memusage beacon should be a list of dicts # Configuration for memusage beacon should be a list of dicts
if not isinstance(config, dict): if not isinstance(config, list):
return False, ('Configuration for memusage ' return False, ('Configuration for memusage '
'beacon must be a dictionary.') 'beacon must be a list.')
else:
_config = {}
list(map(_config.update, config))
if 'percent' not in _config:
return False, ('Configuration for memusage beacon '
'requires percent.')
return True, 'Valid beacon configuration' return True, 'Valid beacon configuration'
@ -46,7 +55,8 @@ def beacon(config):
''' '''
Monitor the memory usage of the minion Monitor the memory usage of the minion
Specify thresholds for percent used and only emit a beacon if it is exceeded. Specify thresholds for percent used and only emit a beacon
if it is exceeded.
.. code-block:: yaml .. code-block:: yaml
@ -55,15 +65,17 @@ def beacon(config):
- percent: 63% - percent: 63%
''' '''
ret = [] ret = []
for memusage in config:
mount = memusage.keys()[0]
_current_usage = psutil.virtual_memory()
current_usage = _current_usage.percent _config = {}
monitor_usage = memusage[mount] list(map(_config.update, config))
if '%' in monitor_usage:
monitor_usage = re.sub('%', '', monitor_usage) _current_usage = psutil.virtual_memory()
monitor_usage = float(monitor_usage)
if current_usage >= monitor_usage: current_usage = _current_usage.percent
ret.append({'memusage': current_usage}) monitor_usage = _config['percent']
if '%' in monitor_usage:
monitor_usage = re.sub('%', '', monitor_usage)
monitor_usage = float(monitor_usage)
if current_usage >= monitor_usage:
ret.append({'memusage': current_usage})
return ret return ret

View File

@ -16,6 +16,9 @@ try:
HAS_PSUTIL = True HAS_PSUTIL = True
except ImportError: except ImportError:
HAS_PSUTIL = False HAS_PSUTIL = False
from salt.ext.six.moves import map
# pylint: enable=import-error # pylint: enable=import-error
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -45,7 +48,7 @@ def __virtual__():
return __virtualname__ return __virtualname__
def __validate__(config): def validate(config):
''' '''
Validate the beacon configuration Validate the beacon configuration
''' '''
@ -57,15 +60,19 @@ def __validate__(config):
] ]
# Configuration for load beacon should be a list of dicts # Configuration for load beacon should be a list of dicts
if not isinstance(config, dict): if not isinstance(config, list):
return False, ('Configuration for load beacon must be a dictionary.') return False, ('Configuration for network_info beacon must be a list.')
else: else:
for item in config:
if not isinstance(config[item], dict): _config = {}
return False, ('Configuration for load beacon must ' list(map(_config.update, config))
'be a dictionary of dictionaries.')
for item in _config.get('interfaces', {}):
if not isinstance(_config['interfaces'][item], dict):
return False, ('Configuration for network_info beacon must '
'be a list of dictionaries.')
else: else:
if not any(j in VALID_ITEMS for j in config[item]): if not any(j in VALID_ITEMS for j in _config['interfaces'][item]):
return False, ('Invalid configuration item in ' return False, ('Invalid configuration item in '
'Beacon configuration.') 'Beacon configuration.')
return True, 'Valid beacon configuration' return True, 'Valid beacon configuration'
@ -86,16 +93,17 @@ def beacon(config):
beacons: beacons:
network_info: network_info:
- eth0: - interfaces:
type: equal eth0:
bytes_sent: 100000 type: equal
bytes_recv: 100000 bytes_sent: 100000
packets_sent: 100000 bytes_recv: 100000
packets_recv: 100000 packets_sent: 100000
errin: 100 packets_recv: 100000
errout: 100 errin: 100
dropin: 100 errout: 100
dropout: 100 dropin: 100
dropout: 100
Emit beacon when any values are greater Emit beacon when any values are greater
than configured values. than configured values.
@ -104,46 +112,53 @@ def beacon(config):
beacons: beacons:
network_info: network_info:
- eth0: - interfaces:
type: greater eth0:
bytes_sent: 100000 type: greater
bytes_recv: 100000 bytes_sent: 100000
packets_sent: 100000 bytes_recv: 100000
packets_recv: 100000 packets_sent: 100000
errin: 100 packets_recv: 100000
errout: 100 errin: 100
dropin: 100 errout: 100
dropout: 100 dropin: 100
dropout: 100
''' '''
ret = [] ret = []
_config = {}
list(map(_config.update, config))
log.debug('psutil.net_io_counters {}'.format(psutil.net_io_counters))
_stats = psutil.net_io_counters(pernic=True) _stats = psutil.net_io_counters(pernic=True)
for interface_config in config: log.debug('_stats {}'.format(_stats))
interface = interface_config.keys()[0] for interface in _config.get('interfaces', {}):
if interface in _stats: if interface in _stats:
interface_config = _config['interfaces'][interface]
_if_stats = _stats[interface] _if_stats = _stats[interface]
_diff = False _diff = False
for attr in __attrs: for attr in __attrs:
if attr in interface_config[interface]: if attr in interface_config:
if 'type' in interface_config[interface] and \ if 'type' in interface_config and \
interface_config[interface]['type'] == 'equal': interface_config['type'] == 'equal':
if getattr(_if_stats, attr, None) == \ if getattr(_if_stats, attr, None) == \
int(interface_config[interface][attr]): int(interface_config[attr]):
_diff = True _diff = True
elif 'type' in interface_config[interface] and \ elif 'type' in interface_config and \
interface_config[interface]['type'] == 'greater': interface_config['type'] == 'greater':
if getattr(_if_stats, attr, None) > \ if getattr(_if_stats, attr, None) > \
int(interface_config[interface][attr]): int(interface_config[attr]):
_diff = True _diff = True
else: else:
log.debug('attr {}'.format(getattr(_if_stats, log.debug('attr {}'.format(getattr(_if_stats,
attr, None))) attr, None)))
else: else:
if getattr(_if_stats, attr, None) == \ if getattr(_if_stats, attr, None) == \
int(interface_config[interface][attr]): int(interface_config[attr]):
_diff = True _diff = True
if _diff: if _diff:
ret.append({'interface': interface, ret.append({'interface': interface,

View File

@ -18,6 +18,8 @@ import ast
import re import re
import salt.loader import salt.loader
import logging import logging
from salt.ext.six.moves import map
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
__virtual_name__ = 'network_settings' __virtual_name__ = 'network_settings'
@ -45,22 +47,23 @@ def __virtual__():
return False return False
def __validate__(config): def validate(config):
''' '''
Validate the beacon configuration Validate the beacon configuration
''' '''
if not isinstance(config, dict): if not isinstance(config, list):
return False, ('Configuration for network_settings ' return False, ('Configuration for network_settings '
'beacon must be a dictionary.') 'beacon must be a list.')
else: else:
for item in config: _config = {}
if item == 'coalesce': list(map(_config.update, config))
continue
if not isinstance(config[item], dict): for item in _config.get('interfaces', {}):
return False, ('Configuration for network_settings beacon must be a ' if not isinstance(_config['interfaces'][item], dict):
'dictionary of dictionaries.') return False, ('Configuration for network_settings beacon '
' must be a list of dictionaries.')
else: else:
if not all(j in ATTRS for j in config[item]): if not all(j in ATTRS for j in _config['interfaces'][item]):
return False, ('Invalid configuration item in Beacon ' return False, ('Invalid configuration item in Beacon '
'configuration.') 'configuration.')
return True, 'Valid beacon configuration' return True, 'Valid beacon configuration'
@ -75,9 +78,10 @@ def _copy_interfaces_info(interfaces):
for interface in interfaces: for interface in interfaces:
_interface_attrs_cpy = set() _interface_attrs_cpy = set()
for attr in ATTRS: for attr in ATTRS:
attr_dict = Hashabledict() if attr in interfaces[interface]:
attr_dict[attr] = repr(interfaces[interface][attr]) attr_dict = Hashabledict()
_interface_attrs_cpy.add(attr_dict) attr_dict[attr] = repr(interfaces[interface][attr])
_interface_attrs_cpy.add(attr_dict)
ret[interface] = _interface_attrs_cpy ret[interface] = _interface_attrs_cpy
return ret return ret
@ -89,8 +93,8 @@ def beacon(config):
By default, the beacon will emit when there is a value change on one of the By default, the beacon will emit when there is a value change on one of the
settings on watch. The config also support the onvalue parameter for each settings on watch. The config also support the onvalue parameter for each
setting, which instruct the beacon to only emit if the setting changed to the setting, which instruct the beacon to only emit if the setting changed to
value defined. the value defined.
Example Config Example Config
@ -98,12 +102,13 @@ def beacon(config):
beacons: beacons:
network_settings: network_settings:
eth0: - interfaces:
ipaddr: - eth0:
promiscuity: ipaddr:
onvalue: 1 promiscuity:
eth1: onvalue: 1
linkmode: - eth1:
linkmode:
The config above will check for value changes on eth0 ipaddr and eth1 linkmode. It will also The config above will check for value changes on eth0 ipaddr and eth1 linkmode. It will also
emit if the promiscuity value changes to 1. emit if the promiscuity value changes to 1.
@ -118,12 +123,16 @@ def beacon(config):
beacons: beacons:
network_settings: network_settings:
coalesce: True - coalesce: True
eth0: - interfaces:
ipaddr: - eth0:
promiscuity: ipaddr:
promiscuity:
''' '''
_config = {}
list(map(_config.update, config))
ret = [] ret = []
interfaces = [] interfaces = []
expanded_config = {} expanded_config = {}
@ -137,45 +146,51 @@ def beacon(config):
if not LAST_STATS: if not LAST_STATS:
LAST_STATS = _stats LAST_STATS = _stats
if 'coalesce' in config and config['coalesce']: if 'coalesce' in _config and _config['coalesce']:
coalesce = True coalesce = True
changes = {} changes = {}
log.debug('_stats {}'.format(_stats))
# Get list of interfaces included in config that are registered in the # Get list of interfaces included in config that are registered in the
# system, including interfaces defined by wildcards (eth*, wlan*) # system, including interfaces defined by wildcards (eth*, wlan*)
for item in config: for interface in _config.get('interfaces', {}):
if item == 'coalesce': if interface in _stats:
continue interfaces.append(interface)
if item in _stats:
interfaces.append(item)
else: else:
# No direct match, try with * wildcard regexp # No direct match, try with * wildcard regexp
interface_regexp = item.replace('*', '[0-9]+') interface_regexp = interface.replace('*', '[0-9]+')
for interface in _stats: for interface in _stats:
match = re.search(interface_regexp, interface) match = re.search(interface_regexp, interface)
if match: if match:
interfaces.append(match.group()) interfaces.append(match.group())
expanded_config[match.group()] = config[item] expanded_config[match.group()] = config['interfaces'][interface]
if expanded_config: if expanded_config:
config.update(expanded_config) config.update(expanded_config)
# config updated so update _config
list(map(_config.update, config))
log.debug('interfaces {}'.format(interfaces))
for interface in interfaces: for interface in interfaces:
_send_event = False _send_event = False
_diff_stats = _stats[interface] - LAST_STATS[interface] _diff_stats = _stats[interface] - LAST_STATS[interface]
_ret_diff = {} _ret_diff = {}
interface_config = _config['interfaces'][interface]
log.debug('_diff_stats {}'.format(_diff_stats))
if _diff_stats: if _diff_stats:
_diff_stats_dict = {} _diff_stats_dict = {}
LAST_STATS[interface] = _stats[interface] LAST_STATS[interface] = _stats[interface]
for item in _diff_stats: for item in _diff_stats:
_diff_stats_dict.update(item) _diff_stats_dict.update(item)
for attr in config[interface]: for attr in interface_config:
if attr in _diff_stats_dict: if attr in _diff_stats_dict:
config_value = None config_value = None
if config[interface][attr] and 'onvalue' in config[interface][attr]: if interface_config[attr] and \
config_value = config[interface][attr]['onvalue'] 'onvalue' in interface_config[attr]:
config_value = interface_config[attr]['onvalue']
new_value = ast.literal_eval(_diff_stats_dict[attr]) new_value = ast.literal_eval(_diff_stats_dict[attr])
if not config_value or config_value == new_value: if not config_value or config_value == new_value:
_send_event = True _send_event = True
@ -185,7 +200,9 @@ def beacon(config):
if coalesce: if coalesce:
changes[interface] = _ret_diff changes[interface] = _ret_diff
else: else:
ret.append({'tag': interface, 'interface': interface, 'change': _ret_diff}) ret.append({'tag': interface,
'interface': interface,
'change': _ret_diff})
if coalesce and changes: if coalesce and changes:
grains_info = salt.loader.grains(__opts__, True) grains_info = salt.loader.grains(__opts__, True)

View File

@ -21,15 +21,25 @@ def __virtual__():
return __virtualname__ if 'pkg.upgrade_available' in __salt__ else False return __virtualname__ if 'pkg.upgrade_available' in __salt__ else False
def __validate__(config): def validate(config):
''' '''
Validate the beacon configuration Validate the beacon configuration
''' '''
# Configuration for pkg beacon should be a list # Configuration for pkg beacon should be a list
if not isinstance(config, dict): if not isinstance(config, list):
return False, ('Configuration for pkg beacon must be a dictionary.') return False, ('Configuration for pkg beacon must be a list.')
if 'pkgs' not in config:
return False, ('Configuration for pkg beacon requires list of pkgs.') # Configuration for pkg beacon should contain pkgs
pkgs_found = False
pkgs_not_list = False
for config_item in config:
if 'pkgs' in config_item:
pkgs_found = True
if isinstance(config_item['pkgs'], list):
pkgs_not_list = True
if not pkgs_found or not pkgs_not_list:
return False, 'Configuration for pkg beacon requires list of pkgs.'
return True, 'Valid beacon configuration' return True, 'Valid beacon configuration'
@ -48,14 +58,16 @@ def beacon(config):
- refresh: True - refresh: True
''' '''
ret = [] ret = []
_validate = __validate__(config)
if not _validate[0]:
return ret
_refresh = False _refresh = False
if 'refresh' in config and config['refresh']: pkgs = []
_refresh = True for config_item in config:
for pkg in config['pkgs']: if 'pkgs' in config_item:
pkgs += config_item['pkgs']
if 'refresh' in config and config['refresh']:
_refresh = True
for pkg in pkgs:
_installed = __salt__['pkg.version'](pkg) _installed = __salt__['pkg.version'](pkg)
_latest = __salt__['pkg.latest_version'](pkg, refresh=_refresh) _latest = __salt__['pkg.latest_version'](pkg, refresh=_refresh)
if _installed and _latest: if _installed and _latest:

View File

@ -15,6 +15,7 @@ import logging
# Import salt libs # Import salt libs
import salt.utils.http import salt.utils.http
from salt.ext.six.moves import map
# Important: If used with salt-proxy # Important: If used with salt-proxy
# this is required for the beacon to load!!! # this is required for the beacon to load!!!
@ -33,12 +34,12 @@ def __virtual__():
return True return True
def __validate__(config): def validate(config):
''' '''
Validate the beacon configuration Validate the beacon configuration
''' '''
if not isinstance(config, dict): if not isinstance(config, list):
return False, ('Configuration for rest_example beacon must be a dictionary.') return False, ('Configuration for proxy_example beacon must be a list.')
return True, 'Valid beacon configuration' return True, 'Valid beacon configuration'
@ -51,7 +52,7 @@ def beacon(config):
beacons: beacons:
proxy_example: proxy_example:
endpoint: beacon - endpoint: beacon
''' '''
# Important!!! # Important!!!
# Although this toy example makes an HTTP call # Although this toy example makes an HTTP call
@ -59,8 +60,11 @@ def beacon(config):
# please be advised that doing CPU or IO intensive # please be advised that doing CPU or IO intensive
# operations in this method will cause the beacon loop # operations in this method will cause the beacon loop
# to block. # to block.
_config = {}
list(map(_config.update, config))
beacon_url = '{0}{1}'.format(__opts__['proxy']['url'], beacon_url = '{0}{1}'.format(__opts__['proxy']['url'],
config['endpoint']) _config['endpoint'])
ret = salt.utils.http.query(beacon_url, ret = salt.utils.http.query(beacon_url,
decode_type='json', decode_type='json',
decode=True) decode=True)

View File

@ -14,6 +14,9 @@ try:
HAS_PSUTIL = True HAS_PSUTIL = True
except ImportError: except ImportError:
HAS_PSUTIL = False HAS_PSUTIL = False
from salt.ext.six.moves import map
# pylint: enable=import-error # pylint: enable=import-error
log = logging.getLogger(__name__) # pylint: disable=invalid-name log = logging.getLogger(__name__) # pylint: disable=invalid-name
@ -23,17 +26,27 @@ __virtualname__ = 'ps'
def __virtual__(): def __virtual__():
if not HAS_PSUTIL: if not HAS_PSUTIL:
return (False, 'cannot load network_info beacon: psutil not available') return (False, 'cannot load ps beacon: psutil not available')
return __virtualname__ return __virtualname__
def __validate__(config): def validate(config):
''' '''
Validate the beacon configuration Validate the beacon configuration
''' '''
# Configuration for ps beacon should be a list of dicts # Configuration for ps beacon should be a list of dicts
if not isinstance(config, dict): if not isinstance(config, list):
return False, ('Configuration for ps beacon must be a dictionary.') return False, ('Configuration for ps beacon must be a list.')
else:
_config = {}
list(map(_config.update, config))
if 'processes' not in _config:
return False, ('Configuration for ps beacon requires processes.')
else:
if not isinstance(_config['processes'], dict):
return False, ('Processes for ps beacon must be a dictionary.')
return True, 'Valid beacon configuration' return True, 'Valid beacon configuration'
@ -47,8 +60,9 @@ def beacon(config):
beacons: beacons:
ps: ps:
- salt-master: running - processes:
- mysql: stopped salt-master: running
mysql: stopped
The config above sets up beacons to check that The config above sets up beacons to check that
processes are running or stopped. processes are running or stopped.
@ -60,19 +74,21 @@ def beacon(config):
if _name not in procs: if _name not in procs:
procs.append(_name) procs.append(_name)
for entry in config: _config = {}
for process in entry: list(map(_config.update, config))
ret_dict = {}
if entry[process] == 'running': for process in _config.get('processes', {}):
if process in procs: ret_dict = {}
ret_dict[process] = 'Running' if _config['processes'][process] == 'running':
ret.append(ret_dict) if process in procs:
elif entry[process] == 'stopped': ret_dict[process] = 'Running'
if process not in procs: ret.append(ret_dict)
ret_dict[process] = 'Stopped' elif _config['processes'][process] == 'stopped':
ret.append(ret_dict) if process not in procs:
else: ret_dict[process] = 'Stopped'
if process not in procs: ret.append(ret_dict)
ret_dict[process] = False else:
ret.append(ret_dict) if process not in procs:
ret_dict[process] = False
ret.append(ret_dict)
return ret return ret

View File

@ -9,6 +9,7 @@
# Import python libs # Import python libs
from __future__ import absolute_import from __future__ import absolute_import
import logging import logging
from salt.ext.six.moves import map
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -20,9 +21,7 @@ def _run_proxy_processes(proxies):
aren't running aren't running
''' '''
ret = [] ret = []
for prox_ in proxies: for proxy in proxies:
# prox_ is a dict
proxy = prox_.keys()[0]
result = {} result = {}
if not __salt__['salt_proxy.is_running'](proxy)['result']: if not __salt__['salt_proxy.is_running'](proxy)['result']:
__salt__['salt_proxy.configure_proxy'](proxy, start=True) __salt__['salt_proxy.configure_proxy'](proxy, start=True)
@ -35,7 +34,29 @@ def _run_proxy_processes(proxies):
return ret return ret
def beacon(proxies): def validate(config):
'''
Validate the beacon configuration
'''
# Configuration for adb beacon should be a dictionary with states array
if not isinstance(config, list):
log.info('Configuration for salt_proxy beacon must be a list.')
return False, ('Configuration for salt_proxy beacon must be a list.')
else:
_config = {}
list(map(_config.update, config))
if 'proxies' not in _config:
return False, ('Configuration for salt_proxy'
' beacon requires proxies.')
else:
if not isinstance(_config['proxies'], dict):
return False, ('Proxies for salt_proxy '
'beacon must be a dictionary.')
def beacon(config):
''' '''
Handle configured proxies Handle configured proxies
@ -43,9 +64,13 @@ def beacon(proxies):
beacons: beacons:
salt_proxy: salt_proxy:
- p8000: {} - proxies:
- p8001: {} p8000: {}
p8001: {}
''' '''
log.trace('salt proxy beacon called') log.trace('salt proxy beacon called')
return _run_proxy_processes(proxies) _config = {}
list(map(_config.update, config))
return _run_proxy_processes(_config['proxies'])

View File

@ -11,6 +11,7 @@ of a Raspberry Pi.
from __future__ import absolute_import from __future__ import absolute_import
import logging import logging
import re import re
from salt.ext.six.moves import map
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -19,14 +20,22 @@ def __virtual__():
return 'sensehat.get_pressure' in __salt__ return 'sensehat.get_pressure' in __salt__
def __validate__(config): def validate(config):
''' '''
Validate the beacon configuration Validate the beacon configuration
''' '''
# Configuration for sensehat beacon should be a dict # Configuration for sensehat beacon should be a list
if not isinstance(config, dict): if not isinstance(config, list):
return False, ('Configuration for sensehat beacon ' return False, ('Configuration for sensehat beacon '
'must be a dictionary.') 'must be a list.')
else:
_config = {}
list(map(_config.update, config))
if 'sensors' not in _config:
return False, ('Configuration for sensehat'
' beacon requires sensors.')
return True, 'Valid beacon configuration' return True, 'Valid beacon configuration'
@ -48,10 +57,11 @@ def beacon(config):
beacons: beacons:
sensehat: sensehat:
humidity: 70% - sensors:
temperature: [20, 40] humidity: 70%
temperature_from_pressure: 40 temperature: [20, 40]
pressure: 1500 temperature_from_pressure: 40
pressure: 1500
''' '''
ret = [] ret = []
min_default = { min_default = {
@ -60,13 +70,16 @@ def beacon(config):
'temperature': '-273.15' 'temperature': '-273.15'
} }
for sensor in config: _config = {}
list(map(_config.update, config))
for sensor in _config.get('sensors', {}):
sensor_function = 'sensehat.get_{0}'.format(sensor) sensor_function = 'sensehat.get_{0}'.format(sensor)
if sensor_function not in __salt__: if sensor_function not in __salt__:
log.error('No sensor for meassuring {0}. Skipping.'.format(sensor)) log.error('No sensor for meassuring {0}. Skipping.'.format(sensor))
continue continue
sensor_config = config[sensor] sensor_config = _config['sensors'][sensor]
if isinstance(sensor_config, list): if isinstance(sensor_config, list):
sensor_min = str(sensor_config[0]) sensor_min = str(sensor_config[0])
sensor_max = str(sensor_config[1]) sensor_max = str(sensor_config[1])

View File

@ -9,19 +9,35 @@ from __future__ import absolute_import
import os import os
import logging import logging
import time import time
from salt.ext.six.moves import map
log = logging.getLogger(__name__) # pylint: disable=invalid-name log = logging.getLogger(__name__) # pylint: disable=invalid-name
LAST_STATUS = {} LAST_STATUS = {}
__virtualname__ = 'service'
def __validate__(config):
def validate(config):
''' '''
Validate the beacon configuration Validate the beacon configuration
''' '''
# Configuration for service beacon should be a list of dicts # Configuration for service beacon should be a list of dicts
if not isinstance(config, dict): if not isinstance(config, list):
return False, ('Configuration for service beacon must be a dictionary.') return False, ('Configuration for service beacon must be a list.')
else:
_config = {}
list(map(_config.update, config))
if 'services' not in _config:
return False, ('Configuration for service beacon'
' requires services.')
else:
for config_item in _config['services']:
if not isinstance(_config['services'][config_item], dict):
return False, ('Configuration for service beacon must '
'be a list of dictionaries.')
return True, 'Valid beacon configuration' return True, 'Valid beacon configuration'
@ -35,8 +51,9 @@ def beacon(config):
beacons: beacons:
service: service:
salt-master: - services:
mysql: salt-master:
mysql:
The config above sets up beacons to check for The config above sets up beacons to check for
the salt-master and mysql services. the salt-master and mysql services.
@ -79,14 +96,21 @@ def beacon(config):
beacons: beacons:
service: service:
nginx: - services:
onchangeonly: True nginx:
delay: 30 onchangeonly: True
uncleanshutdown: /run/nginx.pid delay: 30
uncleanshutdown: /run/nginx.pid
''' '''
ret = [] ret = []
for service in config: _config = {}
list(map(_config.update, config))
for service in _config.get('services', {}):
ret_dict = {} ret_dict = {}
service_config = _config['services'][service]
ret_dict[service] = {'running': __salt__['service.status'](service)} ret_dict[service] = {'running': __salt__['service.status'](service)}
ret_dict['service_name'] = service ret_dict['service_name'] = service
ret_dict['tag'] = service ret_dict['tag'] = service
@ -95,40 +119,43 @@ def beacon(config):
# If no options is given to the service, we fall back to the defaults # If no options is given to the service, we fall back to the defaults
# assign a False value to oncleanshutdown and onchangeonly. Those # assign a False value to oncleanshutdown and onchangeonly. Those
# key:values are then added to the service dictionary. # key:values are then added to the service dictionary.
if 'oncleanshutdown' not in config[service]: if not service_config:
config[service]['oncleanshutdown'] = False service_config = {}
if 'emitatstartup' not in config[service]: if 'oncleanshutdown' not in service_config:
config[service]['emitatstartup'] = True service_config['oncleanshutdown'] = False
if 'onchangeonly' not in config[service]: if 'emitatstartup' not in service_config:
config[service]['onchangeonly'] = False service_config['emitatstartup'] = True
if 'delay' not in config[service]: if 'onchangeonly' not in service_config:
config[service]['delay'] = 0 service_config['onchangeonly'] = False
if 'delay' not in service_config:
service_config['delay'] = 0
# We only want to report the nature of the shutdown # We only want to report the nature of the shutdown
# if the current running status is False # if the current running status is False
# as well as if the config for the beacon asks for it # as well as if the config for the beacon asks for it
if 'uncleanshutdown' in config[service] and not ret_dict[service]['running']: if 'uncleanshutdown' in service_config and not ret_dict[service]['running']:
filename = config[service]['uncleanshutdown'] filename = service_config['uncleanshutdown']
ret_dict[service]['uncleanshutdown'] = True if os.path.exists(filename) else False ret_dict[service]['uncleanshutdown'] = True if os.path.exists(filename) else False
if 'onchangeonly' in config[service] and config[service]['onchangeonly'] is True: if 'onchangeonly' in service_config and service_config['onchangeonly'] is True:
if service not in LAST_STATUS: if service not in LAST_STATUS:
LAST_STATUS[service] = ret_dict[service] LAST_STATUS[service] = ret_dict[service]
if config[service]['delay'] > 0: if service_config['delay'] > 0:
LAST_STATUS[service]['time'] = currtime LAST_STATUS[service]['time'] = currtime
elif not config[service]['emitatstartup']: elif not service_config['emitatstartup']:
continue continue
else: else:
ret.append(ret_dict) ret.append(ret_dict)
if LAST_STATUS[service]['running'] != ret_dict[service]['running']: if LAST_STATUS[service]['running'] != ret_dict[service]['running']:
LAST_STATUS[service] = ret_dict[service] LAST_STATUS[service] = ret_dict[service]
if config[service]['delay'] > 0: if service_config['delay'] > 0:
LAST_STATUS[service]['time'] = currtime LAST_STATUS[service]['time'] = currtime
else: else:
ret.append(ret_dict) ret.append(ret_dict)
if 'time' in LAST_STATUS[service]: if 'time' in LAST_STATUS[service]:
elapsedtime = int(round(currtime - LAST_STATUS[service]['time'])) elapsedtime = int(round(currtime - LAST_STATUS[service]['time']))
if elapsedtime > config[service]['delay']: if elapsedtime > service_config['delay']:
del LAST_STATUS[service]['time'] del LAST_STATUS[service]['time']
ret.append(ret_dict) ret.append(ret_dict)
else: else:

View File

@ -9,6 +9,7 @@ import time
# Import salt libs # Import salt libs
import salt.utils import salt.utils
import salt.utils.path
import salt.utils.vt import salt.utils.vt
__virtualname__ = 'sh' __virtualname__ = 'sh'
@ -21,7 +22,7 @@ def __virtual__():
''' '''
Only load if strace is installed Only load if strace is installed
''' '''
return __virtualname__ if salt.utils.which('strace') else False return __virtualname__ if salt.utils.path.which('strace') else False
def _get_shells(): def _get_shells():
@ -40,13 +41,13 @@ def _get_shells():
return __context__['sh.shells'] return __context__['sh.shells']
def __validate__(config): def validate(config):
''' '''
Validate the beacon configuration Validate the beacon configuration
''' '''
# Configuration for sh beacon should be a list of dicts # Configuration for sh beacon should be a list of dicts
if not isinstance(config, dict): if not isinstance(config, list):
return False, ('Configuration for sh beacon must be a dictionary.') return False, ('Configuration for sh beacon must be a list.')
return True, 'Valid beacon configuration' return True, 'Valid beacon configuration'
@ -57,7 +58,7 @@ def beacon(config):
.. code-block:: yaml .. code-block:: yaml
beacons: beacons:
sh: {} sh: []
''' '''
ret = [] ret = []
pkey = 'sh.vt' pkey = 'sh.vt'

View File

@ -15,7 +15,7 @@ the minion config:
.. code-block:: yaml .. code-block:: yaml
beacons: beacons:
status: {} status: []
By default, all of the information from the following execution module By default, all of the information from the following execution module
functions will be returned: functions will be returned:
@ -96,19 +96,19 @@ import datetime
import salt.exceptions import salt.exceptions
# Import salt libs # Import salt libs
import salt.utils import salt.utils.platform
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
__virtualname__ = 'status' __virtualname__ = 'status'
def __validate__(config): def validate(config):
''' '''
Validate the the config is a dict Validate the the config is a dict
''' '''
if not isinstance(config, dict): if not isinstance(config, list):
return False, ('Configuration for status beacon must be a dictionary.') return False, ('Configuration for status beacon must be a list.')
return True, 'Valid beacon configuration' return True, 'Valid beacon configuration'
@ -123,7 +123,7 @@ def beacon(config):
log.debug(config) log.debug(config)
ctime = datetime.datetime.utcnow().isoformat() ctime = datetime.datetime.utcnow().isoformat()
ret = {} ret = {}
if salt.utils.is_windows(): if salt.utils.platform.is_windows():
return [{ return [{
'tag': ctime, 'tag': ctime,
'data': ret, 'data': ret,

View File

@ -1,11 +1,15 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
''' '''
Beacon to emit Telegram messages Beacon to emit Telegram messages
Requires the python-telegram-bot library
''' '''
# Import Python libs # Import Python libs
from __future__ import absolute_import from __future__ import absolute_import
import logging import logging
from salt.ext.six.moves import map
# Import 3rd Party libs # Import 3rd Party libs
try: try:
@ -28,20 +32,23 @@ def __virtual__():
return False return False
def __validate__(config): def validate(config):
''' '''
Validate the beacon configuration Validate the beacon configuration
''' '''
if not isinstance(config, dict): if not isinstance(config, list):
return False, ('Configuration for telegram_bot_msg ' return False, ('Configuration for telegram_bot_msg '
'beacon must be a dictionary.') 'beacon must be a list.')
if not all(config.get(required_config) _config = {}
list(map(_config.update, config))
if not all(_config.get(required_config)
for required_config in ['token', 'accept_from']): for required_config in ['token', 'accept_from']):
return False, ('Not all required configuration for ' return False, ('Not all required configuration for '
'telegram_bot_msg are set.') 'telegram_bot_msg are set.')
if not isinstance(config.get('accept_from'), list): if not isinstance(_config.get('accept_from'), list):
return False, ('Configuration for telegram_bot_msg, ' return False, ('Configuration for telegram_bot_msg, '
'accept_from must be a list of usernames.') 'accept_from must be a list of usernames.')
@ -57,18 +64,22 @@ def beacon(config):
beacons: beacons:
telegram_bot_msg: telegram_bot_msg:
token: "<bot access token>" - token: "<bot access token>"
accept_from: - accept_from:
- "<valid username>" - "<valid username>"
interval: 10 - interval: 10
''' '''
_config = {}
list(map(_config.update, config))
log.debug('telegram_bot_msg beacon starting') log.debug('telegram_bot_msg beacon starting')
ret = [] ret = []
output = {} output = {}
output['msgs'] = [] output['msgs'] = []
bot = telegram.Bot(config['token']) bot = telegram.Bot(_config['token'])
updates = bot.get_updates(limit=100, timeout=0, network_delay=10) updates = bot.get_updates(limit=100, timeout=0, network_delay=10)
log.debug('Num updates: {0}'.format(len(updates))) log.debug('Num updates: {0}'.format(len(updates)))
@ -83,7 +94,7 @@ def beacon(config):
if update.update_id > latest_update_id: if update.update_id > latest_update_id:
latest_update_id = update.update_id latest_update_id = update.update_id
if message.chat.username in config['accept_from']: if message.chat.username in _config['accept_from']:
output['msgs'].append(message.to_dict()) output['msgs'].append(message.to_dict())
# mark in the server that previous messages are processed # mark in the server that previous messages are processed

View File

@ -6,10 +6,17 @@ Beacon to emit Twilio text messages
# Import Python libs # Import Python libs
from __future__ import absolute_import from __future__ import absolute_import
import logging import logging
from salt.ext.six.moves import map
# Import 3rd Party libs # Import 3rd Party libs
try: try:
from twilio.rest import TwilioRestClient import twilio
# Grab version, ensure elements are ints
twilio_version = tuple([int(x) for x in twilio.__version_info__])
if twilio_version > (5, ):
from twilio.rest import Client as TwilioRestClient
else:
from twilio.rest import TwilioRestClient
HAS_TWILIO = True HAS_TWILIO = True
except ImportError: except ImportError:
HAS_TWILIO = False HAS_TWILIO = False
@ -26,14 +33,24 @@ def __virtual__():
return False return False
def __validate__(config): def validate(config):
''' '''
Validate the beacon configuration Validate the beacon configuration
''' '''
# Configuration for twilio_txt_msg beacon should be a list of dicts # Configuration for twilio_txt_msg beacon should be a list of dicts
if not isinstance(config, dict): if not isinstance(config, list):
return False, ('Configuration for twilio_txt_msg beacon ' return False, ('Configuration for twilio_txt_msg beacon '
'must be a dictionary.') 'must be a list.')
else:
_config = {}
list(map(_config.update, config))
if not all(x in _config for x in ('account_sid',
'auth_token',
'twilio_number')):
return False, ('Configuration for twilio_txt_msg beacon '
'must contain account_sid, auth_token '
'and twilio_number items.')
return True, 'Valid beacon configuration' return True, 'Valid beacon configuration'
@ -46,20 +63,26 @@ def beacon(config):
beacons: beacons:
twilio_txt_msg: twilio_txt_msg:
account_sid: "<account sid>" - account_sid: "<account sid>"
auth_token: "<auth token>" - auth_token: "<auth token>"
twilio_number: "+15555555555" - twilio_number: "+15555555555"
interval: 10 - interval: 10
''' '''
log.trace('twilio_txt_msg beacon starting') log.trace('twilio_txt_msg beacon starting')
_config = {}
list(map(_config.update, config))
ret = [] ret = []
if not all([config['account_sid'], config['auth_token'], config['twilio_number']]): if not all([_config['account_sid'],
_config['auth_token'],
_config['twilio_number']]):
return ret return ret
output = {} output = {}
output['texts'] = [] output['texts'] = []
client = TwilioRestClient(config['account_sid'], config['auth_token']) client = TwilioRestClient(_config['account_sid'], _config['auth_token'])
messages = client.messages.list(to=config['twilio_number']) messages = client.messages.list(to=_config['twilio_number'])
log.trace('Num messages: {0}'.format(len(messages))) log.trace('Num messages: {0}'.format(len(messages)))
if len(messages) < 1: if len(messages) < 1:
log.trace('Twilio beacon has no texts') log.trace('Twilio beacon has no texts')

View File

@ -5,7 +5,7 @@ Beacon to fire events at login of users as registered in the wtmp file
.. code-block:: yaml .. code-block:: yaml
beacons: beacons:
wtmp: {} wtmp: []
''' '''
# Import Python libs # Import Python libs
@ -16,6 +16,9 @@ import struct
# Import salt libs # Import salt libs
import salt.utils.files import salt.utils.files
# Import 3rd-party libs
from salt.ext import six
__virtualname__ = 'wtmp' __virtualname__ = 'wtmp'
WTMP = '/var/log/wtmp' WTMP = '/var/log/wtmp'
FMT = 'hi32s4s32s256shhiii4i20x' FMT = 'hi32s4s32s256shhiii4i20x'
@ -52,13 +55,13 @@ def _get_loc():
return __context__[LOC_KEY] return __context__[LOC_KEY]
def __validate__(config): def validate(config):
''' '''
Validate the beacon configuration Validate the beacon configuration
''' '''
# Configuration for wtmp beacon should be a list of dicts # Configuration for wtmp beacon should be a list of dicts
if not isinstance(config, dict): if not isinstance(config, list):
return False, ('Configuration for wtmp beacon must be a dictionary.') return False, ('Configuration for wtmp beacon must be a list.')
return True, 'Valid beacon configuration' return True, 'Valid beacon configuration'
@ -70,7 +73,7 @@ def beacon(config):
.. code-block:: yaml .. code-block:: yaml
beacons: beacons:
wtmp: {} wtmp: []
''' '''
ret = [] ret = []
with salt.utils.files.fopen(WTMP, 'rb') as fp_: with salt.utils.files.fopen(WTMP, 'rb') as fp_:
@ -90,7 +93,7 @@ def beacon(config):
event = {} event = {}
for ind, field in enumerate(FIELDS): for ind, field in enumerate(FIELDS):
event[field] = pack[ind] event[field] = pack[ind]
if isinstance(event[field], str): if isinstance(event[field], six.string_types):
event[field] = event[field].strip('\x00') event[field] = event[field].strip('\x00')
ret.append(event) ret.append(event)
return ret return ret

View File

@ -11,14 +11,14 @@ import copy
from datetime import datetime, timedelta from datetime import datetime, timedelta
# Import salt libs # Import salt libs
import salt.utils # Can be removed once print_cli is moved
import salt.client import salt.client
import salt.output import salt.output
import salt.exceptions import salt.exceptions
from salt.utils import print_cli
# Import 3rd-party libs # Import 3rd-party libs
# pylint: disable=import-error,no-name-in-module,redefined-builtin # pylint: disable=import-error,no-name-in-module,redefined-builtin
import salt.ext.six as six from salt.ext import six
from salt.ext.six.moves import range from salt.ext.six.moves import range
# pylint: enable=import-error,no-name-in-module,redefined-builtin # pylint: enable=import-error,no-name-in-module,redefined-builtin
import logging import logging
@ -73,7 +73,7 @@ class Batch(object):
m = next(six.iterkeys(ret)) m = next(six.iterkeys(ret))
except StopIteration: except StopIteration:
if not self.quiet: if not self.quiet:
print_cli('No minions matched the target.') salt.utils.print_cli('No minions matched the target.')
break break
if m is not None: if m is not None:
fret.add(m) fret.add(m)
@ -95,7 +95,7 @@ class Batch(object):
return int(self.opts['batch']) return int(self.opts['batch'])
except ValueError: except ValueError:
if not self.quiet: if not self.quiet:
print_cli('Invalid batch data sent: {0}\nData must be in the ' salt.utils.print_cli('Invalid batch data sent: {0}\nData must be in the '
'form of %10, 10% or 3'.format(self.opts['batch'])) 'form of %10, 10% or 3'.format(self.opts['batch']))
def __update_wait(self, wait): def __update_wait(self, wait):
@ -146,7 +146,7 @@ class Batch(object):
# We already know some minions didn't respond to the ping, so inform # We already know some minions didn't respond to the ping, so inform
# the user we won't be attempting to run a job on them # the user we won't be attempting to run a job on them
for down_minion in self.down_minions: for down_minion in self.down_minions:
print_cli('Minion {0} did not respond. No job will be sent.'.format(down_minion)) salt.utils.print_cli('Minion {0} did not respond. No job will be sent.'.format(down_minion))
# Iterate while we still have things to execute # Iterate while we still have things to execute
while len(ret) < len(self.minions): while len(ret) < len(self.minions):
@ -171,7 +171,7 @@ class Batch(object):
if next_: if next_:
if not self.quiet: if not self.quiet:
print_cli('\nExecuting run on {0}\n'.format(sorted(next_))) salt.utils.print_cli('\nExecuting run on {0}\n'.format(sorted(next_)))
# create a new iterator for this batch of minions # create a new iterator for this batch of minions
new_iter = self.local.cmd_iter_no_block( new_iter = self.local.cmd_iter_no_block(
*args, *args,
@ -218,14 +218,14 @@ class Batch(object):
if part['data']['id'] in minion_tracker[queue]['minions']: if part['data']['id'] in minion_tracker[queue]['minions']:
minion_tracker[queue]['minions'].remove(part['data']['id']) minion_tracker[queue]['minions'].remove(part['data']['id'])
else: else:
print_cli('minion {0} was already deleted from tracker, probably a duplicate key'.format(part['id'])) salt.utils.print_cli('minion {0} was already deleted from tracker, probably a duplicate key'.format(part['id']))
else: else:
parts.update(part) parts.update(part)
for id in part: for id in part:
if id in minion_tracker[queue]['minions']: if id in minion_tracker[queue]['minions']:
minion_tracker[queue]['minions'].remove(id) minion_tracker[queue]['minions'].remove(id)
else: else:
print_cli('minion {0} was already deleted from tracker, probably a duplicate key'.format(id)) salt.utils.print_cli('minion {0} was already deleted from tracker, probably a duplicate key'.format(id))
except StopIteration: except StopIteration:
# if a iterator is done: # if a iterator is done:
# - set it to inactive # - set it to inactive

View File

@ -1,16 +1,15 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import print_function from __future__ import absolute_import, print_function
from __future__ import absolute_import
import os import os
from salt.utils import parsers import salt.utils.parsers
from salt.utils.verify import verify_log from salt.utils.verify import verify_log
from salt.config import _expand_glob_path from salt.config import _expand_glob_path
import salt.cli.caller import salt.cli.caller
import salt.defaults.exitcodes import salt.defaults.exitcodes
class SaltCall(parsers.SaltCallOptionParser): class SaltCall(salt.utils.parsers.SaltCallOptionParser):
''' '''
Used to locally execute a salt command Used to locally execute a salt command
''' '''

View File

@ -20,19 +20,17 @@ import salt.minion
import salt.output import salt.output
import salt.payload import salt.payload
import salt.transport import salt.transport
import salt.utils # Can be removed once print_cli, activate_profile, and output_profile are moved
import salt.utils.args import salt.utils.args
import salt.utils.files import salt.utils.files
import salt.utils.jid import salt.utils.jid
import salt.utils.kinds as kinds
import salt.utils.minion import salt.utils.minion
import salt.defaults.exitcodes import salt.defaults.exitcodes
from salt.log import LOG_LEVELS
from salt.utils import is_windows
from salt.utils import print_cli
from salt.utils import kinds
from salt.utils import activate_profile
from salt.utils import output_profile
from salt.utils.process import MultiprocessingProcess
from salt.cli import daemons from salt.cli import daemons
from salt.log import LOG_LEVELS
from salt.utils.platform import is_windows
from salt.utils.process import MultiprocessingProcess
try: try:
from raet import raeting, nacling from raet import raeting, nacling
@ -47,7 +45,7 @@ except ImportError:
pass pass
# Import 3rd-party libs # Import 3rd-party libs
import salt.ext.six as six from salt.ext import six
# Custom exceptions # Custom exceptions
from salt.exceptions import ( from salt.exceptions import (
@ -115,7 +113,7 @@ class BaseCaller(object):
docs[name] = func.__doc__ docs[name] = func.__doc__
for name in sorted(docs): for name in sorted(docs):
if name.startswith(self.opts.get('fun', '')): if name.startswith(self.opts.get('fun', '')):
print_cli('{0}:\n{1}\n'.format(name, docs[name])) salt.utils.print_cli('{0}:\n{1}\n'.format(name, docs[name]))
def print_grains(self): def print_grains(self):
''' '''
@ -130,14 +128,14 @@ class BaseCaller(object):
''' '''
profiling_enabled = self.opts.get('profiling_enabled', False) profiling_enabled = self.opts.get('profiling_enabled', False)
try: try:
pr = activate_profile(profiling_enabled) pr = salt.utils.activate_profile(profiling_enabled)
try: try:
ret = self.call() ret = self.call()
finally: finally:
output_profile(pr, salt.utils.output_profile(
stats_path=self.opts.get('profiling_path', pr,
'/tmp/stats'), stats_path=self.opts.get('profiling_path', '/tmp/stats'),
stop=True) stop=True)
out = ret.get('out', 'nested') out = ret.get('out', 'nested')
if self.opts['print_metadata']: if self.opts['print_metadata']:
print_ret = ret print_ret = ret
@ -211,7 +209,7 @@ class BaseCaller(object):
ret['return'] = func(*args, **kwargs) ret['return'] = func(*args, **kwargs)
except TypeError as exc: except TypeError as exc:
sys.stderr.write('\nPassed invalid arguments: {0}.\n\nUsage:\n'.format(exc)) sys.stderr.write('\nPassed invalid arguments: {0}.\n\nUsage:\n'.format(exc))
print_cli(func.__doc__) salt.utils.print_cli(func.__doc__)
active_level = LOG_LEVELS.get( active_level = LOG_LEVELS.get(
self.opts['log_level'].lower(), logging.ERROR) self.opts['log_level'].lower(), logging.ERROR)
if active_level <= logging.DEBUG: if active_level <= logging.DEBUG:

View File

@ -21,7 +21,9 @@ import salt.client
import salt.utils.gzip_util import salt.utils.gzip_util
import salt.utils.itertools import salt.utils.itertools
import salt.utils.minions import salt.utils.minions
from salt.utils import parsers, to_bytes import salt.utils.parsers
import salt.utils.platform
import salt.utils.stringutils
from salt.utils.verify import verify_log from salt.utils.verify import verify_log
import salt.output import salt.output
@ -31,7 +33,7 @@ from salt.ext import six
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
class SaltCPCli(parsers.SaltCPOptionParser): class SaltCPCli(salt.utils.parsers.SaltCPOptionParser):
''' '''
Run the salt-cp command line client Run the salt-cp command line client
''' '''
@ -56,7 +58,7 @@ class SaltCP(object):
''' '''
def __init__(self, opts): def __init__(self, opts):
self.opts = opts self.opts = opts
self.is_windows = salt.utils.is_windows() self.is_windows = salt.utils.platform.is_windows()
def _mode(self, path): def _mode(self, path):
if self.is_windows: if self.is_windows:
@ -152,7 +154,7 @@ class SaltCP(object):
index = 1 index = 1
failed = {} failed = {}
for chunk in reader(fn_, chunk_size=self.opts['salt_cp_chunk_size']): for chunk in reader(fn_, chunk_size=self.opts['salt_cp_chunk_size']):
chunk = base64.b64encode(to_bytes(chunk)) chunk = base64.b64encode(salt.utils.stringutils.to_bytes(chunk))
append = index > 1 append = index > 1
log.debug( log.debug(
'Copying %s to %starget \'%s\' as %s%s', 'Copying %s to %starget \'%s\' as %s%s',

View File

@ -41,10 +41,11 @@ import salt.log.setup
# the try block below bypasses an issue at build time so that modules don't # the try block below bypasses an issue at build time so that modules don't
# cause the build to fail # cause the build to fail
from salt.utils import migrations from salt.utils import migrations
from salt.utils import kinds import salt.utils.kinds as kinds
try: try:
from salt.utils import parsers, ip_bracket from salt.utils import ip_bracket
import salt.utils.parsers
from salt.utils.verify import check_user, verify_env, verify_socket from salt.utils.verify import check_user, verify_env, verify_socket
except ImportError as exc: except ImportError as exc:
if exc.args[0] != 'No module named _msgpack': if exc.args[0] != 'No module named _msgpack':
@ -109,7 +110,7 @@ class DaemonsMixin(object): # pylint: disable=no-init
self.shutdown(error) self.shutdown(error)
class Master(parsers.MasterOptionParser, DaemonsMixin): # pylint: disable=no-init class Master(salt.utils.parsers.MasterOptionParser, DaemonsMixin): # pylint: disable=no-init
''' '''
Creates a master server Creates a master server
''' '''
@ -220,7 +221,7 @@ class Master(parsers.MasterOptionParser, DaemonsMixin): # pylint: disable=no-in
super(Master, self).shutdown(exitcode, exitmsg) super(Master, self).shutdown(exitcode, exitmsg)
class Minion(parsers.MinionOptionParser, DaemonsMixin): # pylint: disable=no-init class Minion(salt.utils.parsers.MinionOptionParser, DaemonsMixin): # pylint: disable=no-init
''' '''
Create a minion server Create a minion server
''' '''
@ -398,7 +399,7 @@ class Minion(parsers.MinionOptionParser, DaemonsMixin): # pylint: disable=no-in
# pylint: enable=no-member # pylint: enable=no-member
class ProxyMinion(parsers.ProxyMinionOptionParser, DaemonsMixin): # pylint: disable=no-init class ProxyMinion(salt.utils.parsers.ProxyMinionOptionParser, DaemonsMixin): # pylint: disable=no-init
''' '''
Create a proxy minion server Create a proxy minion server
''' '''
@ -549,7 +550,7 @@ class ProxyMinion(parsers.ProxyMinionOptionParser, DaemonsMixin): # pylint: dis
# pylint: enable=no-member # pylint: enable=no-member
class Syndic(parsers.SyndicOptionParser, DaemonsMixin): # pylint: disable=no-init class Syndic(salt.utils.parsers.SyndicOptionParser, DaemonsMixin): # pylint: disable=no-init
''' '''
Create a syndic server Create a syndic server
''' '''

View File

@ -3,11 +3,11 @@ from __future__ import print_function
from __future__ import absolute_import from __future__ import absolute_import
from salt.utils import parsers import salt.utils.parsers
from salt.utils.verify import check_user, verify_log from salt.utils.verify import check_user, verify_log
class SaltKey(parsers.SaltKeyOptionParser): class SaltKey(salt.utils.parsers.SaltKeyOptionParser):
''' '''
Initialize the Salt key manager Initialize the Salt key manager
''' '''

View File

@ -2,15 +2,14 @@
from __future__ import print_function from __future__ import print_function
from __future__ import absolute_import from __future__ import absolute_import
from salt.utils import parsers import salt.utils # Can be removed once activate_profile and output_profile are moved
from salt.utils import activate_profile import salt.utils.parsers
from salt.utils import output_profile
from salt.utils.verify import check_user, verify_log from salt.utils.verify import check_user, verify_log
from salt.exceptions import SaltClientError from salt.exceptions import SaltClientError
import salt.defaults.exitcodes # pylint: disable=W0611 import salt.defaults.exitcodes # pylint: disable=W0611
class SaltRun(parsers.SaltRunOptionParser): class SaltRun(salt.utils.parsers.SaltRunOptionParser):
''' '''
Used to execute Salt runners Used to execute Salt runners
''' '''
@ -36,7 +35,7 @@ class SaltRun(parsers.SaltRunOptionParser):
# someone tries to use the runners via the python API # someone tries to use the runners via the python API
try: try:
if check_user(self.config['user']): if check_user(self.config['user']):
pr = activate_profile(profiling_enabled) pr = salt.utils.activate_profile(profiling_enabled)
try: try:
ret = runner.run() ret = runner.run()
# In older versions ret['data']['retcode'] was used # In older versions ret['data']['retcode'] was used
@ -50,7 +49,7 @@ class SaltRun(parsers.SaltRunOptionParser):
elif isinstance(ret, dict) and 'retcode' in ret.get('data', {}): elif isinstance(ret, dict) and 'retcode' in ret.get('data', {}):
self.exit(ret['data']['retcode']) self.exit(ret['data']['retcode'])
finally: finally:
output_profile( salt.utils.output_profile(
pr, pr,
stats_path=self.options.profiling_path, stats_path=self.options.profiling_path,
stop=True) stop=True)

View File

@ -7,8 +7,8 @@ sys.modules['pkg_resources'] = None
import os import os
# Import Salt libs # Import Salt libs
from salt.ext.six import string_types import salt.utils # Can be removed once print_cli is moved
from salt.utils import parsers, print_cli import salt.utils.parsers
from salt.utils.args import yamlify_arg from salt.utils.args import yamlify_arg
from salt.utils.verify import verify_log from salt.utils.verify import verify_log
from salt.exceptions import ( from salt.exceptions import (
@ -18,10 +18,10 @@ from salt.exceptions import (
) )
# Import 3rd-party libs # Import 3rd-party libs
import salt.ext.six as six from salt.ext import six
class SaltCMD(parsers.SaltCMDOptionParser): class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
''' '''
The execution of a salt command happens here The execution of a salt command happens here
''' '''
@ -93,7 +93,7 @@ class SaltCMD(parsers.SaltCMDOptionParser):
# potentially switch to batch execution # potentially switch to batch execution
if self.options.batch_safe_limit > 1: if self.options.batch_safe_limit > 1:
if len(self._preview_target()) >= self.options.batch_safe_limit: if len(self._preview_target()) >= self.options.batch_safe_limit:
print_cli('\nNOTICE: Too many minions targeted, switching to batch execution.') salt.utils.print_cli('\nNOTICE: Too many minions targeted, switching to batch execution.')
self.options.batch = self.options.batch_safe_size self.options.batch = self.options.batch_safe_size
self._run_batch() self._run_batch()
return return
@ -140,7 +140,7 @@ class SaltCMD(parsers.SaltCMDOptionParser):
if self.config['async']: if self.config['async']:
jid = self.local_client.cmd_async(**kwargs) jid = self.local_client.cmd_async(**kwargs)
print_cli('Executed command with job ID: {0}'.format(jid)) salt.utils.print_cli('Executed command with job ID: {0}'.format(jid))
return return
# local will be None when there was an error # local will be None when there was an error
@ -279,12 +279,12 @@ class SaltCMD(parsers.SaltCMDOptionParser):
def _print_errors_summary(self, errors): def _print_errors_summary(self, errors):
if errors: if errors:
print_cli('\n') salt.utils.print_cli('\n')
print_cli('---------------------------') salt.utils.print_cli('---------------------------')
print_cli('Errors') salt.utils.print_cli('Errors')
print_cli('---------------------------') salt.utils.print_cli('---------------------------')
for error in errors: for error in errors:
print_cli(self._format_error(error)) salt.utils.print_cli(self._format_error(error))
def _print_returns_summary(self, ret): def _print_returns_summary(self, ret):
''' '''
@ -301,7 +301,7 @@ class SaltCMD(parsers.SaltCMDOptionParser):
if isinstance(minion_ret, dict) and 'ret' in minion_ret: if isinstance(minion_ret, dict) and 'ret' in minion_ret:
minion_ret = ret[each_minion].get('ret') minion_ret = ret[each_minion].get('ret')
if ( if (
isinstance(minion_ret, string_types) isinstance(minion_ret, six.string_types)
and minion_ret.startswith("Minion did not return") and minion_ret.startswith("Minion did not return")
): ):
if "Not connected" in minion_ret: if "Not connected" in minion_ret:
@ -314,22 +314,22 @@ class SaltCMD(parsers.SaltCMDOptionParser):
return_counter += 1 return_counter += 1
if self._get_retcode(ret[each_minion]): if self._get_retcode(ret[each_minion]):
failed_minions.append(each_minion) failed_minions.append(each_minion)
print_cli('\n') salt.utils.print_cli('\n')
print_cli('-------------------------------------------') salt.utils.print_cli('-------------------------------------------')
print_cli('Summary') salt.utils.print_cli('Summary')
print_cli('-------------------------------------------') salt.utils.print_cli('-------------------------------------------')
print_cli('# of minions targeted: {0}'.format(return_counter + not_return_counter)) salt.utils.print_cli('# of minions targeted: {0}'.format(return_counter + not_return_counter))
print_cli('# of minions returned: {0}'.format(return_counter)) salt.utils.print_cli('# of minions returned: {0}'.format(return_counter))
print_cli('# of minions that did not return: {0}'.format(not_return_counter)) salt.utils.print_cli('# of minions that did not return: {0}'.format(not_return_counter))
print_cli('# of minions with errors: {0}'.format(len(failed_minions))) salt.utils.print_cli('# of minions with errors: {0}'.format(len(failed_minions)))
if self.options.verbose: if self.options.verbose:
if not_connected_minions: if not_connected_minions:
print_cli('Minions not connected: {0}'.format(" ".join(not_connected_minions))) salt.utils.print_cli('Minions not connected: {0}'.format(" ".join(not_connected_minions)))
if not_response_minions: if not_response_minions:
print_cli('Minions not responding: {0}'.format(" ".join(not_response_minions))) salt.utils.print_cli('Minions not responding: {0}'.format(" ".join(not_response_minions)))
if failed_minions: if failed_minions:
print_cli('Minions with failures: {0}'.format(" ".join(failed_minions))) salt.utils.print_cli('Minions with failures: {0}'.format(" ".join(failed_minions)))
print_cli('-------------------------------------------') salt.utils.print_cli('-------------------------------------------')
def _progress_end(self, out): def _progress_end(self, out):
import salt.output import salt.output
@ -406,10 +406,10 @@ class SaltCMD(parsers.SaltCMDOptionParser):
docs = {} docs = {}
if not ret: if not ret:
self.exit(2, 'No minions found to gather docs from\n') self.exit(2, 'No minions found to gather docs from\n')
if isinstance(ret, str): if isinstance(ret, six.string_types):
self.exit(2, '{0}\n'.format(ret)) self.exit(2, '{0}\n'.format(ret))
for host in ret: for host in ret:
if isinstance(ret[host], string_types) \ if isinstance(ret[host], six.string_types) \
and (ret[host].startswith("Minion did not return") and (ret[host].startswith("Minion did not return")
or ret[host] == 'VALUE TRIMMED'): or ret[host] == 'VALUE TRIMMED'):
continue continue
@ -421,6 +421,6 @@ class SaltCMD(parsers.SaltCMDOptionParser):
salt.output.display_output({fun: docs[fun]}, 'nested', self.config) salt.output.display_output({fun: docs[fun]}, 'nested', self.config)
else: else:
for fun in sorted(docs): for fun in sorted(docs):
print_cli('{0}:'.format(fun)) salt.utils.print_cli('{0}:'.format(fun))
print_cli(docs[fun]) salt.utils.print_cli(docs[fun])
print_cli('') salt.utils.print_cli('')

View File

@ -2,17 +2,21 @@
from __future__ import print_function from __future__ import print_function
from __future__ import absolute_import from __future__ import absolute_import
import sys
import salt.client.ssh import salt.client.ssh
from salt.utils import parsers import salt.utils.parsers
from salt.utils.verify import verify_log from salt.utils.verify import verify_log
class SaltSSH(parsers.SaltSSHOptionParser): class SaltSSH(salt.utils.parsers.SaltSSHOptionParser):
''' '''
Used to Execute the salt ssh routine Used to Execute the salt ssh routine
''' '''
def run(self): def run(self):
if '-H' in sys.argv or '--hosts' in sys.argv:
sys.argv += ['x', 'x'] # Hack: pass a mandatory two options
# that won't be used anyways with -H or --hosts
self.parse_args() self.parse_args()
self.setup_logfile_logger() self.setup_logfile_logger()
verify_log(self.config) verify_log(self.config)

File diff suppressed because it is too large Load Diff

View File

@ -37,7 +37,7 @@ def tokenify(cmd, token=None):
Otherwise return cmd Otherwise return cmd
''' '''
if token is not None: if token is not None:
cmd['token'] = token cmd[u'token'] = token
return cmd return cmd
@ -50,19 +50,19 @@ class APIClient(object):
if not opts: if not opts:
opts = salt.config.client_config( opts = salt.config.client_config(
os.environ.get( os.environ.get(
'SALT_MASTER_CONFIG', u'SALT_MASTER_CONFIG',
os.path.join(syspaths.CONFIG_DIR, 'master') os.path.join(syspaths.CONFIG_DIR, u'master')
) )
) )
self.opts = opts self.opts = opts
self.localClient = salt.client.get_local_client(self.opts['conf_file']) self.localClient = salt.client.get_local_client(self.opts[u'conf_file'])
self.runnerClient = salt.runner.RunnerClient(self.opts) self.runnerClient = salt.runner.RunnerClient(self.opts)
self.wheelClient = salt.wheel.Wheel(self.opts) self.wheelClient = salt.wheel.Wheel(self.opts)
self.resolver = salt.auth.Resolver(self.opts) self.resolver = salt.auth.Resolver(self.opts)
self.event = salt.utils.event.get_event( self.event = salt.utils.event.get_event(
'master', u'master',
self.opts['sock_dir'], self.opts[u'sock_dir'],
self.opts['transport'], self.opts[u'transport'],
opts=self.opts, opts=self.opts,
listen=listen) listen=listen)
@ -118,20 +118,20 @@ class APIClient(object):
''' '''
cmd = dict(cmd) # make copy cmd = dict(cmd) # make copy
client = 'minion' # default to local minion client client = u'minion' # default to local minion client
mode = cmd.get('mode', 'async') # default to 'async' mode = cmd.get(u'mode', u'async') # default to 'async'
# check for wheel or runner prefix to fun name to use wheel or runner client # check for wheel or runner prefix to fun name to use wheel or runner client
funparts = cmd.get('fun', '').split('.') funparts = cmd.get(u'fun', u'').split(u'.')
if len(funparts) > 2 and funparts[0] in ['wheel', 'runner']: # master if len(funparts) > 2 and funparts[0] in [u'wheel', u'runner']: # master
client = funparts[0] client = funparts[0]
cmd['fun'] = '.'.join(funparts[1:]) # strip prefix cmd[u'fun'] = u'.'.join(funparts[1:]) # strip prefix
if not ('token' in cmd or if not (u'token' in cmd or
('eauth' in cmd and 'password' in cmd and 'username' in cmd)): (u'eauth' in cmd and u'password' in cmd and u'username' in cmd)):
raise EauthAuthenticationError('No authentication credentials given') raise EauthAuthenticationError(u'No authentication credentials given')
executor = getattr(self, '{0}_{1}'.format(client, mode)) executor = getattr(self, u'{0}_{1}'.format(client, mode))
result = executor(**cmd) result = executor(**cmd)
return result return result
@ -204,9 +204,9 @@ class APIClient(object):
Adds client per the command. Adds client per the command.
''' '''
cmd['client'] = 'minion' cmd[u'client'] = u'minion'
if len(cmd['module'].split('.')) > 2 and cmd['module'].split('.')[0] in ['runner', 'wheel']: if len(cmd[u'module'].split(u'.')) > 2 and cmd[u'module'].split(u'.')[0] in [u'runner', u'wheel']:
cmd['client'] = 'master' cmd[u'client'] = u'master'
return self._signature(cmd) return self._signature(cmd)
def _signature(self, cmd): def _signature(self, cmd):
@ -216,20 +216,20 @@ class APIClient(object):
''' '''
result = {} result = {}
client = cmd.get('client', 'minion') client = cmd.get(u'client', u'minion')
if client == 'minion': if client == u'minion':
cmd['fun'] = 'sys.argspec' cmd[u'fun'] = u'sys.argspec'
cmd['kwarg'] = dict(module=cmd['module']) cmd[u'kwarg'] = dict(module=cmd[u'module'])
result = self.run(cmd) result = self.run(cmd)
elif client == 'master': elif client == u'master':
parts = cmd['module'].split('.') parts = cmd[u'module'].split(u'.')
client = parts[0] client = parts[0]
module = '.'.join(parts[1:]) # strip prefix module = u'.'.join(parts[1:]) # strip prefix
if client == 'wheel': if client == u'wheel':
functions = self.wheelClient.functions functions = self.wheelClient.functions
elif client == 'runner': elif client == u'runner':
functions = self.runnerClient.functions functions = self.runnerClient.functions
result = {'master': salt.utils.argspec_report(functions, module)} result = {u'master': salt.utils.argspec_report(functions, module)}
return result return result
def create_token(self, creds): def create_token(self, creds):
@ -274,20 +274,20 @@ class APIClient(object):
tokenage = self.resolver.mk_token(creds) tokenage = self.resolver.mk_token(creds)
except Exception as ex: except Exception as ex:
raise EauthAuthenticationError( raise EauthAuthenticationError(
"Authentication failed with {0}.".format(repr(ex))) u"Authentication failed with {0}.".format(repr(ex)))
if 'token' not in tokenage: if u'token' not in tokenage:
raise EauthAuthenticationError("Authentication failed with provided credentials.") raise EauthAuthenticationError(u"Authentication failed with provided credentials.")
# Grab eauth config for the current backend for the current user # Grab eauth config for the current backend for the current user
tokenage_eauth = self.opts['external_auth'][tokenage['eauth']] tokenage_eauth = self.opts[u'external_auth'][tokenage[u'eauth']]
if tokenage['name'] in tokenage_eauth: if tokenage[u'name'] in tokenage_eauth:
tokenage['perms'] = tokenage_eauth[tokenage['name']] tokenage[u'perms'] = tokenage_eauth[tokenage[u'name']]
else: else:
tokenage['perms'] = tokenage_eauth['*'] tokenage[u'perms'] = tokenage_eauth[u'*']
tokenage['user'] = tokenage['name'] tokenage[u'user'] = tokenage[u'name']
tokenage['username'] = tokenage['name'] tokenage[u'username'] = tokenage[u'name']
return tokenage return tokenage
@ -300,11 +300,11 @@ class APIClient(object):
result = self.resolver.get_token(token) result = self.resolver.get_token(token)
except Exception as ex: except Exception as ex:
raise EauthAuthenticationError( raise EauthAuthenticationError(
"Token validation failed with {0}.".format(repr(ex))) u"Token validation failed with {0}.".format(repr(ex)))
return result return result
def get_event(self, wait=0.25, tag='', full=False): def get_event(self, wait=0.25, tag=u'', full=False):
''' '''
Get a single salt event. Get a single salt event.
If no events are available, then block for up to ``wait`` seconds. If no events are available, then block for up to ``wait`` seconds.
@ -322,4 +322,4 @@ class APIClient(object):
Need to convert this to a master call with appropriate authentication Need to convert this to a master call with appropriate authentication
''' '''
return self.event.fire_event(data, tagify(tag, 'wui')) return self.event.fire_event(data, tagify(tag, u'wui'))

View File

@ -23,10 +23,12 @@ import salt.utils.event
import salt.utils.jid import salt.utils.jid
import salt.utils.job import salt.utils.job
import salt.utils.lazy import salt.utils.lazy
import salt.utils.platform
import salt.utils.process import salt.utils.process
import salt.utils.versions
import salt.transport import salt.transport
import salt.log.setup import salt.log.setup
import salt.ext.six as six from salt.ext import six
# Import 3rd-party libs # Import 3rd-party libs
import tornado.stack_context import tornado.stack_context
@ -34,18 +36,18 @@ import tornado.stack_context
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
CLIENT_INTERNAL_KEYWORDS = frozenset([ CLIENT_INTERNAL_KEYWORDS = frozenset([
'client', u'client',
'cmd', u'cmd',
'eauth', u'eauth',
'fun', u'fun',
'kwarg', u'kwarg',
'match', u'match',
'token', u'token',
'__jid__', u'__jid__',
'__tag__', u'__tag__',
'__user__', u'__user__',
'username', u'username',
'password' u'password'
]) ])
@ -77,9 +79,9 @@ class ClientFuncsDict(collections.MutableMapping):
raise KeyError raise KeyError
def wrapper(*args, **kwargs): def wrapper(*args, **kwargs):
low = {'fun': key, low = {u'fun': key,
'args': args, u'args': args,
'kwargs': kwargs, u'kwargs': kwargs,
} }
pub_data = {} pub_data = {}
# Copy kwargs keys so we can iterate over and pop the pub data # Copy kwargs keys so we can iterate over and pop the pub data
@ -87,18 +89,18 @@ class ClientFuncsDict(collections.MutableMapping):
# pull out pub_data if you have it # pull out pub_data if you have it
for kwargs_key in kwargs_keys: for kwargs_key in kwargs_keys:
if kwargs_key.startswith('__pub_'): if kwargs_key.startswith(u'__pub_'):
pub_data[kwargs_key] = kwargs.pop(kwargs_key) pub_data[kwargs_key] = kwargs.pop(kwargs_key)
async_pub = self.client._gen_async_pub(pub_data.get('__pub_jid')) async_pub = self.client._gen_async_pub(pub_data.get(u'__pub_jid'))
user = salt.utils.get_specific_user() user = salt.utils.get_specific_user()
return self.client._proc_function( return self.client._proc_function(
key, key,
low, low,
user, user,
async_pub['tag'], # TODO: fix async_pub[u'tag'], # TODO: fix
async_pub['jid'], # TODO: fix async_pub[u'jid'], # TODO: fix
False, # Don't daemonize False, # Don't daemonize
) )
return wrapper return wrapper
@ -129,14 +131,14 @@ class SyncClientMixin(object):
Execute a function through the master network interface. Execute a function through the master network interface.
''' '''
load = kwargs load = kwargs
load['cmd'] = self.client load[u'cmd'] = self.client
channel = salt.transport.Channel.factory(self.opts, channel = salt.transport.Channel.factory(self.opts,
crypt='clear', crypt=u'clear',
usage='master_call') usage=u'master_call')
ret = channel.send(load) ret = channel.send(load)
if isinstance(ret, collections.Mapping): if isinstance(ret, collections.Mapping):
if 'error' in ret: if u'error' in ret:
salt.utils.error.raise_error(**ret['error']) salt.utils.error.raise_error(**ret[u'error'])
return ret return ret
def cmd_sync(self, low, timeout=None, full_return=False): def cmd_sync(self, low, timeout=None, full_return=False):
@ -155,19 +157,19 @@ class SyncClientMixin(object):
'eauth': 'pam', 'eauth': 'pam',
}) })
''' '''
event = salt.utils.event.get_master_event(self.opts, self.opts['sock_dir'], listen=True) event = salt.utils.event.get_master_event(self.opts, self.opts[u'sock_dir'], listen=True)
job = self.master_call(**low) job = self.master_call(**low)
ret_tag = salt.utils.event.tagify('ret', base=job['tag']) ret_tag = salt.utils.event.tagify(u'ret', base=job[u'tag'])
if timeout is None: if timeout is None:
timeout = self.opts.get('rest_timeout', 300) timeout = self.opts.get(u'rest_timeout', 300)
ret = event.get_event(tag=ret_tag, full=True, wait=timeout, auto_reconnect=True) ret = event.get_event(tag=ret_tag, full=True, wait=timeout, auto_reconnect=True)
if ret is None: if ret is None:
raise salt.exceptions.SaltClientTimeout( raise salt.exceptions.SaltClientTimeout(
"RunnerClient job '{0}' timed out".format(job['jid']), u"RunnerClient job '{0}' timed out".format(job[u'jid']),
jid=job['jid']) jid=job[u'jid'])
return ret if full_return else ret['data']['return'] return ret if full_return else ret[u'data'][u'return']
def cmd(self, fun, arg=None, pub_data=None, kwarg=None, print_event=True, full_return=False): def cmd(self, fun, arg=None, pub_data=None, kwarg=None, print_event=True, full_return=False):
''' '''
@ -202,40 +204,40 @@ class SyncClientMixin(object):
arg = tuple() arg = tuple()
if not isinstance(arg, list) and not isinstance(arg, tuple): if not isinstance(arg, list) and not isinstance(arg, tuple):
raise salt.exceptions.SaltInvocationError( raise salt.exceptions.SaltInvocationError(
'arg must be formatted as a list/tuple' u'arg must be formatted as a list/tuple'
) )
if pub_data is None: if pub_data is None:
pub_data = {} pub_data = {}
if not isinstance(pub_data, dict): if not isinstance(pub_data, dict):
raise salt.exceptions.SaltInvocationError( raise salt.exceptions.SaltInvocationError(
'pub_data must be formatted as a dictionary' u'pub_data must be formatted as a dictionary'
) )
if kwarg is None: if kwarg is None:
kwarg = {} kwarg = {}
if not isinstance(kwarg, dict): if not isinstance(kwarg, dict):
raise salt.exceptions.SaltInvocationError( raise salt.exceptions.SaltInvocationError(
'kwarg must be formatted as a dictionary' u'kwarg must be formatted as a dictionary'
) )
arglist = salt.utils.args.parse_input( arglist = salt.utils.args.parse_input(
arg, arg,
no_parse=self.opts.get('no_parse', [])) no_parse=self.opts.get(u'no_parse', []))
# if you were passed kwarg, add it to arglist # if you were passed kwarg, add it to arglist
if kwarg: if kwarg:
kwarg['__kwarg__'] = True kwarg[u'__kwarg__'] = True
arglist.append(kwarg) arglist.append(kwarg)
args, kwargs = salt.minion.load_args_and_kwargs( args, kwargs = salt.minion.load_args_and_kwargs(
self.functions[fun], arglist, pub_data self.functions[fun], arglist, pub_data
) )
low = {'fun': fun, low = {u'fun': fun,
'arg': args, u'arg': args,
'kwarg': kwargs} u'kwarg': kwargs}
return self.low(fun, low, print_event=print_event, full_return=full_return) return self.low(fun, low, print_event=print_event, full_return=full_return)
@property @property
def mminion(self): def mminion(self):
if not hasattr(self, '_mminion'): if not hasattr(self, u'_mminion'):
self._mminion = salt.minion.MasterMinion(self.opts, states=False, rend=False) self._mminion = salt.minion.MasterMinion(self.opts, states=False, rend=False)
return self._mminion return self._mminion
@ -244,15 +246,15 @@ class SyncClientMixin(object):
Check for deprecated usage and allow until Salt Oxygen. Check for deprecated usage and allow until Salt Oxygen.
''' '''
msg = [] msg = []
if 'args' in low: if u'args' in low:
msg.append('call with arg instead') msg.append(u'call with arg instead')
low['arg'] = low.pop('args') low[u'arg'] = low.pop(u'args')
if 'kwargs' in low: if u'kwargs' in low:
msg.append('call with kwarg instead') msg.append(u'call with kwarg instead')
low['kwarg'] = low.pop('kwargs') low[u'kwarg'] = low.pop(u'kwargs')
if msg: if msg:
salt.utils.warn_until('Oxygen', ' '.join(msg)) salt.utils.versions.warn_until(u'Oxygen', u' '.join(msg))
return self._low(fun, low, print_event=print_event, full_return=full_return) return self._low(fun, low, print_event=print_event, full_return=full_return)
@ -266,13 +268,13 @@ class SyncClientMixin(object):
class_name = self.__class__.__name__.lower() class_name = self.__class__.__name__.lower()
except AttributeError: except AttributeError:
log.warning( log.warning(
'Unable to determine class name', u'Unable to determine class name',
exc_info_on_loglevel=logging.DEBUG exc_info_on_loglevel=logging.DEBUG
) )
return True return True
try: try:
return self.opts['{0}_returns'.format(class_name)] return self.opts[u'{0}_returns'.format(class_name)]
except KeyError: except KeyError:
# No such option, assume this isn't one we care about gating and # No such option, assume this isn't one we care about gating and
# just return True. # just return True.
@ -295,24 +297,24 @@ class SyncClientMixin(object):
# this is not to clutter the output with the module loading # this is not to clutter the output with the module loading
# if we have a high debug level. # if we have a high debug level.
self.mminion # pylint: disable=W0104 self.mminion # pylint: disable=W0104
jid = low.get('__jid__', salt.utils.jid.gen_jid()) jid = low.get(u'__jid__', salt.utils.jid.gen_jid())
tag = low.get('__tag__', salt.utils.event.tagify(jid, prefix=self.tag_prefix)) tag = low.get(u'__tag__', salt.utils.event.tagify(jid, prefix=self.tag_prefix))
data = {'fun': '{0}.{1}'.format(self.client, fun), data = {u'fun': u'{0}.{1}'.format(self.client, fun),
'jid': jid, u'jid': jid,
'user': low.get('__user__', 'UNKNOWN'), u'user': low.get(u'__user__', u'UNKNOWN'),
} }
event = salt.utils.event.get_event( event = salt.utils.event.get_event(
'master', u'master',
self.opts['sock_dir'], self.opts[u'sock_dir'],
self.opts['transport'], self.opts[u'transport'],
opts=self.opts, opts=self.opts,
listen=False) listen=False)
if print_event: if print_event:
print_func = self.print_async_event \ print_func = self.print_async_event \
if hasattr(self, 'print_async_event') \ if hasattr(self, u'print_async_event') \
else None else None
else: else:
# Suppress printing of return event (this keeps us from printing # Suppress printing of return event (this keeps us from printing
@ -327,12 +329,12 @@ class SyncClientMixin(object):
# TODO: document these, and test that they exist # TODO: document these, and test that they exist
# TODO: Other things to inject?? # TODO: Other things to inject??
func_globals = {'__jid__': jid, func_globals = {u'__jid__': jid,
'__user__': data['user'], u'__user__': data[u'user'],
'__tag__': tag, u'__tag__': tag,
# weak ref to avoid the Exception in interpreter # weak ref to avoid the Exception in interpreter
# teardown of event # teardown of event
'__jid_event__': weakref.proxy(namespaced_event), u'__jid_event__': weakref.proxy(namespaced_event),
} }
try: try:
@ -344,9 +346,9 @@ class SyncClientMixin(object):
completed_funcs = [] completed_funcs = []
for mod_name in six.iterkeys(self_functions): for mod_name in six.iterkeys(self_functions):
if '.' not in mod_name: if u'.' not in mod_name:
continue continue
mod, _ = mod_name.split('.', 1) mod, _ = mod_name.split(u'.', 1)
if mod in completed_funcs: if mod in completed_funcs:
continue continue
completed_funcs.append(mod) completed_funcs.append(mod)
@ -362,86 +364,87 @@ class SyncClientMixin(object):
# we make the transition we will load "kwargs" using format_call if # we make the transition we will load "kwargs" using format_call if
# there are no kwargs in the low object passed in # there are no kwargs in the low object passed in
f_call = None f_call = None
if 'arg' not in low: if u'arg' not in low:
f_call = salt.utils.format_call( f_call = salt.utils.format_call(
self.functions[fun], self.functions[fun],
low, low,
expected_extra_kws=CLIENT_INTERNAL_KEYWORDS expected_extra_kws=CLIENT_INTERNAL_KEYWORDS
) )
args = f_call.get('args', ()) args = f_call.get(u'args', ())
else: else:
args = low['arg'] args = low[u'arg']
if 'kwarg' not in low: if u'kwarg' not in low:
log.critical( log.critical(
'kwargs must be passed inside the low data within the ' u'kwargs must be passed inside the low data within the '
'\'kwarg\' key. See usage of ' u'\'kwarg\' key. See usage of '
'salt.utils.args.parse_input() and ' u'salt.utils.args.parse_input() and '
'salt.minion.load_args_and_kwargs() elsewhere in the ' u'salt.minion.load_args_and_kwargs() elsewhere in the '
'codebase.' u'codebase.'
) )
kwargs = {} kwargs = {}
else: else:
kwargs = low['kwarg'] kwargs = low[u'kwarg']
# Update the event data with loaded args and kwargs # Update the event data with loaded args and kwargs
data['fun_args'] = list(args) + ([kwargs] if kwargs else []) data[u'fun_args'] = list(args) + ([kwargs] if kwargs else [])
func_globals['__jid_event__'].fire_event(data, 'new') func_globals[u'__jid_event__'].fire_event(data, u'new')
# Initialize a context for executing the method. # Initialize a context for executing the method.
with tornado.stack_context.StackContext(self.functions.context_dict.clone): with tornado.stack_context.StackContext(self.functions.context_dict.clone):
data['return'] = self.functions[fun](*args, **kwargs) data[u'return'] = self.functions[fun](*args, **kwargs)
data['success'] = True data[u'success'] = True
if isinstance(data['return'], dict) and 'data' in data['return']: if isinstance(data[u'return'], dict) and u'data' in data[u'return']:
# some functions can return boolean values # some functions can return boolean values
data['success'] = salt.utils.check_state_result(data['return']['data']) data[u'success'] = salt.utils.check_state_result(data[u'return'][u'data'])
except (Exception, SystemExit) as ex: except (Exception, SystemExit) as ex:
if isinstance(ex, salt.exceptions.NotImplemented): if isinstance(ex, salt.exceptions.NotImplemented):
data['return'] = str(ex) data[u'return'] = str(ex)
else: else:
data['return'] = 'Exception occurred in {0} {1}: {2}'.format( data[u'return'] = u'Exception occurred in {0} {1}: {2}'.format(
self.client, self.client,
fun, fun,
traceback.format_exc(), traceback.format_exc(),
) )
data['success'] = False data[u'success'] = False
namespaced_event.fire_event(data, 'ret')
if self.store_job: if self.store_job:
try: try:
salt.utils.job.store_job( salt.utils.job.store_job(
self.opts, self.opts,
{ {
'id': self.opts['id'], u'id': self.opts[u'id'],
'tgt': self.opts['id'], u'tgt': self.opts[u'id'],
'jid': data['jid'], u'jid': data[u'jid'],
'return': data, u'return': data,
}, },
event=None, event=None,
mminion=self.mminion, mminion=self.mminion,
) )
except salt.exceptions.SaltCacheError: except salt.exceptions.SaltCacheError:
log.error('Could not store job cache info. ' log.error(u'Could not store job cache info. '
'Job details for this run may be unavailable.') u'Job details for this run may be unavailable.')
# Outputters _can_ mutate data so write to the job cache first!
namespaced_event.fire_event(data, u'ret')
# if we fired an event, make sure to delete the event object. # if we fired an event, make sure to delete the event object.
# This will ensure that we call destroy, which will do the 0MQ linger # This will ensure that we call destroy, which will do the 0MQ linger
log.info('Runner completed: {0}'.format(data['jid'])) log.info(u'Runner completed: %s', data[u'jid'])
del event del event
del namespaced_event del namespaced_event
return data if full_return else data['return'] return data if full_return else data[u'return']
def get_docs(self, arg=None): def get_docs(self, arg=None):
''' '''
Return a dictionary of functions and the inline documentation for each Return a dictionary of functions and the inline documentation for each
''' '''
if arg: if arg:
if '*' in arg: if u'*' in arg:
target_mod = arg target_mod = arg
_use_fnmatch = True _use_fnmatch = True
else: else:
target_mod = arg + '.' if not arg.endswith('.') else arg target_mod = arg + u'.' if not arg.endswith(u'.') else arg
if _use_fnmatch: if _use_fnmatch:
docs = [(fun, self.functions[fun].__doc__) docs = [(fun, self.functions[fun].__doc__)
for fun in fnmatch.filter(self.functions, target_mod)] for fun in fnmatch.filter(self.functions, target_mod)]
@ -468,7 +471,7 @@ class AsyncClientMixin(object):
Run this method in a multiprocess target to execute the function in a Run this method in a multiprocess target to execute the function in a
multiprocess and fire the return data on the event bus multiprocess and fire the return data on the event bus
''' '''
if daemonize and not salt.utils.is_windows(): if daemonize and not salt.utils.platform.is_windows():
# Shutdown the multiprocessing before daemonizing # Shutdown the multiprocessing before daemonizing
salt.log.setup.shutdown_multiprocessing_logging() salt.log.setup.shutdown_multiprocessing_logging()
@ -478,9 +481,9 @@ class AsyncClientMixin(object):
salt.log.setup.setup_multiprocessing_logging() salt.log.setup.setup_multiprocessing_logging()
# pack a few things into low # pack a few things into low
low['__jid__'] = jid low[u'__jid__'] = jid
low['__user__'] = user low[u'__user__'] = user
low['__tag__'] = tag low[u'__tag__'] = tag
return self.low(fun, low, full_return=False) return self.low(fun, low, full_return=False)
@ -508,9 +511,9 @@ class AsyncClientMixin(object):
if jid is None: if jid is None:
jid = salt.utils.jid.gen_jid() jid = salt.utils.jid.gen_jid()
tag = salt.utils.event.tagify(jid, prefix=self.tag_prefix) tag = salt.utils.event.tagify(jid, prefix=self.tag_prefix)
return {'tag': tag, 'jid': jid} return {u'tag': tag, u'jid': jid}
def async(self, fun, low, user='UNKNOWN', pub=None): def async(self, fun, low, user=u'UNKNOWN', pub=None):
''' '''
Execute the function in a multiprocess and return the event tag to use Execute the function in a multiprocess and return the event tag to use
to watch for the return to watch for the return
@ -519,7 +522,7 @@ class AsyncClientMixin(object):
proc = salt.utils.process.SignalHandlingMultiprocessingProcess( proc = salt.utils.process.SignalHandlingMultiprocessingProcess(
target=self._proc_function, target=self._proc_function,
args=(fun, low, user, async_pub['tag'], async_pub['jid'])) args=(fun, low, user, async_pub[u'tag'], async_pub[u'jid']))
with salt.utils.process.default_signals(signal.SIGINT, signal.SIGTERM): with salt.utils.process.default_signals(signal.SIGINT, signal.SIGTERM):
# Reset current signals before starting the process in # Reset current signals before starting the process in
# order not to inherit the current signal handlers # order not to inherit the current signal handlers
@ -535,29 +538,29 @@ class AsyncClientMixin(object):
return return
# if we are "quiet", don't print # if we are "quiet", don't print
if self.opts.get('quiet', False): if self.opts.get(u'quiet', False):
return return
# some suffixes we don't want to print # some suffixes we don't want to print
if suffix in ('new',): if suffix in (u'new',):
return return
try: try:
outputter = self.opts.get('output', event.get('outputter', None) or event.get('return').get('outputter')) outputter = self.opts.get(u'output', event.get(u'outputter', None) or event.get(u'return').get(u'outputter'))
except AttributeError: except AttributeError:
outputter = None outputter = None
# if this is a ret, we have our own set of rules # if this is a ret, we have our own set of rules
if suffix == 'ret': if suffix == u'ret':
# Check if outputter was passed in the return data. If this is the case, # Check if outputter was passed in the return data. If this is the case,
# then the return data will be a dict two keys: 'data' and 'outputter' # then the return data will be a dict two keys: 'data' and 'outputter'
if isinstance(event.get('return'), dict) \ if isinstance(event.get(u'return'), dict) \
and set(event['return']) == set(('data', 'outputter')): and set(event[u'return']) == set((u'data', u'outputter')):
event_data = event['return']['data'] event_data = event[u'return'][u'data']
outputter = event['return']['outputter'] outputter = event[u'return'][u'outputter']
else: else:
event_data = event['return'] event_data = event[u'return']
else: else:
event_data = {'suffix': suffix, 'event': event} event_data = {u'suffix': suffix, u'event': event}
salt.output.display_output(event_data, outputter, self.opts) salt.output.display_output(event_data, outputter, self.opts)

View File

@ -20,7 +20,7 @@ class NetapiClient(object):
''' '''
def __init__(self, opts): def __init__(self, opts):
self.opts = opts self.opts = opts
self.process_manager = salt.utils.process.ProcessManager(name='NetAPIProcessManager') self.process_manager = salt.utils.process.ProcessManager(name=u'NetAPIProcessManager')
self.netapi = salt.loader.netapi(self.opts) self.netapi = salt.loader.netapi(self.opts)
def run(self): def run(self):
@ -28,11 +28,11 @@ class NetapiClient(object):
Load and start all available api modules Load and start all available api modules
''' '''
if not len(self.netapi): if not len(self.netapi):
log.error("Did not find any netapi configurations, nothing to start") log.error(u"Did not find any netapi configurations, nothing to start")
for fun in self.netapi: for fun in self.netapi:
if fun.endswith('.start'): if fun.endswith(u'.start'):
log.info('Starting {0} netapi module'.format(fun)) log.info(u'Starting %s netapi module', fun)
self.process_manager.add_process(self.netapi[fun]) self.process_manager.add_process(self.netapi[fun])
# Install the SIGINT/SIGTERM handlers if not done so far # Install the SIGINT/SIGTERM handlers if not done so far

View File

@ -12,9 +12,9 @@ import logging
# Import Salt libs # Import Salt libs
import salt.config import salt.config
import salt.client import salt.client
import salt.utils import salt.utils.kinds as kinds
import salt.utils.versions
import salt.syspaths as syspaths import salt.syspaths as syspaths
from salt.utils import kinds
try: try:
from raet import raeting, nacling from raet import raeting, nacling
@ -32,7 +32,7 @@ class LocalClient(salt.client.LocalClient):
The RAET LocalClient The RAET LocalClient
''' '''
def __init__(self, def __init__(self,
c_path=os.path.join(syspaths.CONFIG_DIR, 'master'), c_path=os.path.join(syspaths.CONFIG_DIR, u'master'),
mopts=None): mopts=None):
salt.client.LocalClient.__init__(self, c_path, mopts) salt.client.LocalClient.__init__(self, c_path, mopts)
@ -41,22 +41,22 @@ class LocalClient(salt.client.LocalClient):
tgt, tgt,
fun, fun,
arg=(), arg=(),
tgt_type='glob', tgt_type=u'glob',
ret='', ret=u'',
jid='', jid=u'',
timeout=5, timeout=5,
**kwargs): **kwargs):
''' '''
Publish the command! Publish the command!
''' '''
if 'expr_form' in kwargs: if u'expr_form' in kwargs:
salt.utils.warn_until( salt.utils.versions.warn_until(
'Fluorine', u'Fluorine',
'The target type should be passed using the \'tgt_type\' ' u'The target type should be passed using the \'tgt_type\' '
'argument instead of \'expr_form\'. Support for using ' u'argument instead of \'expr_form\'. Support for using '
'\'expr_form\' will be removed in Salt Fluorine.' u'\'expr_form\' will be removed in Salt Fluorine.'
) )
tgt_type = kwargs.pop('expr_form') tgt_type = kwargs.pop(u'expr_form')
payload_kwargs = self._prep_pub( payload_kwargs = self._prep_pub(
tgt, tgt,
@ -68,21 +68,21 @@ class LocalClient(salt.client.LocalClient):
timeout=timeout, timeout=timeout,
**kwargs) **kwargs)
kind = self.opts['__role'] kind = self.opts[u'__role']
if kind not in kinds.APPL_KINDS: if kind not in kinds.APPL_KINDS:
emsg = ("Invalid application kind = '{0}' for Raet LocalClient.".format(kind)) emsg = (u"Invalid application kind = '{0}' for Raet LocalClient.".format(kind))
log.error(emsg + "\n") log.error(emsg + u"\n")
raise ValueError(emsg) raise ValueError(emsg)
if kind in [kinds.APPL_KIND_NAMES[kinds.applKinds.master], if kind in [kinds.APPL_KIND_NAMES[kinds.applKinds.master],
kinds.APPL_KIND_NAMES[kinds.applKinds.syndic]]: kinds.APPL_KIND_NAMES[kinds.applKinds.syndic]]:
lanename = 'master' lanename = u'master'
else: else:
emsg = ("Unsupported application kind '{0}' for Raet LocalClient.".format(kind)) emsg = (u"Unsupported application kind '{0}' for Raet LocalClient.".format(kind))
log.error(emsg + '\n') log.error(emsg + u'\n')
raise ValueError(emsg) raise ValueError(emsg)
sockdirpath = self.opts['sock_dir'] sockdirpath = self.opts[u'sock_dir']
name = 'client' + nacling.uuid(size=18) name = u'client' + nacling.uuid(size=18)
stack = LaneStack( stack = LaneStack(
name=name, name=name,
lanename=lanename, lanename=lanename,
@ -91,12 +91,12 @@ class LocalClient(salt.client.LocalClient):
manor_yard = RemoteYard( manor_yard = RemoteYard(
stack=stack, stack=stack,
lanename=lanename, lanename=lanename,
name='manor', name=u'manor',
dirpath=sockdirpath) dirpath=sockdirpath)
stack.addRemote(manor_yard) stack.addRemote(manor_yard)
route = {'dst': (None, manor_yard.name, 'local_cmd'), route = {u'dst': (None, manor_yard.name, u'local_cmd'),
'src': (None, stack.local.name, None)} u'src': (None, stack.local.name, None)}
msg = {'route': route, 'load': payload_kwargs} msg = {u'route': route, u'load': payload_kwargs}
stack.transmit(msg) stack.transmit(msg)
stack.serviceAll() stack.serviceAll()
while True: while True:
@ -104,9 +104,9 @@ class LocalClient(salt.client.LocalClient):
stack.serviceAll() stack.serviceAll()
while stack.rxMsgs: while stack.rxMsgs:
msg, sender = stack.rxMsgs.popleft() msg, sender = stack.rxMsgs.popleft()
ret = msg.get('return', {}) ret = msg.get(u'return', {})
if 'ret' in ret: if u'ret' in ret:
stack.server.close() stack.server.close()
return ret['ret'] return ret[u'ret']
stack.server.close() stack.server.close()
return ret return ret

File diff suppressed because it is too large Load Diff

View File

@ -9,6 +9,7 @@ import random
# Import Salt libs # Import Salt libs
import salt.config import salt.config
import salt.utils.versions
import salt.syspaths as syspaths import salt.syspaths as syspaths
from salt.exceptions import SaltClientError # Temporary from salt.exceptions import SaltClientError # Temporary
@ -22,7 +23,7 @@ class SSHClient(object):
.. versionadded:: 2015.5.0 .. versionadded:: 2015.5.0
''' '''
def __init__(self, def __init__(self,
c_path=os.path.join(syspaths.CONFIG_DIR, 'master'), c_path=os.path.join(syspaths.CONFIG_DIR, u'master'),
mopts=None, mopts=None,
disable_custom_roster=False): disable_custom_roster=False):
if mopts: if mopts:
@ -30,15 +31,14 @@ class SSHClient(object):
else: else:
if os.path.isdir(c_path): if os.path.isdir(c_path):
log.warning( log.warning(
'{0} expects a file path not a directory path({1}) to ' u'%s expects a file path not a directory path(%s) to '
'it\'s \'c_path\' keyword argument'.format( u'its \'c_path\' keyword argument',
self.__class__.__name__, c_path self.__class__.__name__, c_path
)
) )
self.opts = salt.config.client_config(c_path) self.opts = salt.config.client_config(c_path)
# Salt API should never offer a custom roster! # Salt API should never offer a custom roster!
self.opts['__disable_custom_roster'] = disable_custom_roster self.opts[u'__disable_custom_roster'] = disable_custom_roster
def _prep_ssh( def _prep_ssh(
self, self,
@ -46,30 +46,30 @@ class SSHClient(object):
fun, fun,
arg=(), arg=(),
timeout=None, timeout=None,
tgt_type='glob', tgt_type=u'glob',
kwarg=None, kwarg=None,
**kwargs): **kwargs):
''' '''
Prepare the arguments Prepare the arguments
''' '''
if 'expr_form' in kwargs: if u'expr_form' in kwargs:
salt.utils.warn_until( salt.utils.versions.warn_until(
'Fluorine', u'Fluorine',
'The target type should be passed using the \'tgt_type\' ' u'The target type should be passed using the \'tgt_type\' '
'argument instead of \'expr_form\'. Support for using ' u'argument instead of \'expr_form\'. Support for using '
'\'expr_form\' will be removed in Salt Fluorine.' u'\'expr_form\' will be removed in Salt Fluorine.'
) )
tgt_type = kwargs.pop('expr_form') tgt_type = kwargs.pop(u'expr_form')
opts = copy.deepcopy(self.opts) opts = copy.deepcopy(self.opts)
opts.update(kwargs) opts.update(kwargs)
if timeout: if timeout:
opts['timeout'] = timeout opts[u'timeout'] = timeout
arg = salt.utils.args.condition_input(arg, kwarg) arg = salt.utils.args.condition_input(arg, kwarg)
opts['argv'] = [fun] + arg opts[u'argv'] = [fun] + arg
opts['selected_target_option'] = tgt_type opts[u'selected_target_option'] = tgt_type
opts['tgt'] = tgt opts[u'tgt'] = tgt
opts['arg'] = arg opts[u'arg'] = arg
return salt.client.ssh.SSH(opts) return salt.client.ssh.SSH(opts)
def cmd_iter( def cmd_iter(
@ -78,8 +78,8 @@ class SSHClient(object):
fun, fun,
arg=(), arg=(),
timeout=None, timeout=None,
tgt_type='glob', tgt_type=u'glob',
ret='', ret=u'',
kwarg=None, kwarg=None,
**kwargs): **kwargs):
''' '''
@ -88,14 +88,14 @@ class SSHClient(object):
.. versionadded:: 2015.5.0 .. versionadded:: 2015.5.0
''' '''
if 'expr_form' in kwargs: if u'expr_form' in kwargs:
salt.utils.warn_until( salt.utils.versions.warn_until(
'Fluorine', u'Fluorine',
'The target type should be passed using the \'tgt_type\' ' u'The target type should be passed using the \'tgt_type\' '
'argument instead of \'expr_form\'. Support for using ' u'argument instead of \'expr_form\'. Support for using '
'\'expr_form\' will be removed in Salt Fluorine.' u'\'expr_form\' will be removed in Salt Fluorine.'
) )
tgt_type = kwargs.pop('expr_form') tgt_type = kwargs.pop(u'expr_form')
ssh = self._prep_ssh( ssh = self._prep_ssh(
tgt, tgt,
@ -105,7 +105,7 @@ class SSHClient(object):
tgt_type, tgt_type,
kwarg, kwarg,
**kwargs) **kwargs)
for ret in ssh.run_iter(jid=kwargs.get('jid', None)): for ret in ssh.run_iter(jid=kwargs.get(u'jid', None)):
yield ret yield ret
def cmd(self, def cmd(self,
@ -113,7 +113,7 @@ class SSHClient(object):
fun, fun,
arg=(), arg=(),
timeout=None, timeout=None,
tgt_type='glob', tgt_type=u'glob',
kwarg=None, kwarg=None,
**kwargs): **kwargs):
''' '''
@ -122,14 +122,14 @@ class SSHClient(object):
.. versionadded:: 2015.5.0 .. versionadded:: 2015.5.0
''' '''
if 'expr_form' in kwargs: if u'expr_form' in kwargs:
salt.utils.warn_until( salt.utils.versions.warn_until(
'Fluorine', u'Fluorine',
'The target type should be passed using the \'tgt_type\' ' u'The target type should be passed using the \'tgt_type\' '
'argument instead of \'expr_form\'. Support for using ' u'argument instead of \'expr_form\'. Support for using '
'\'expr_form\' will be removed in Salt Fluorine.' u'\'expr_form\' will be removed in Salt Fluorine.'
) )
tgt_type = kwargs.pop('expr_form') tgt_type = kwargs.pop(u'expr_form')
ssh = self._prep_ssh( ssh = self._prep_ssh(
tgt, tgt,
@ -140,7 +140,7 @@ class SSHClient(object):
kwarg, kwarg,
**kwargs) **kwargs)
final = {} final = {}
for ret in ssh.run_iter(jid=kwargs.get('jid', None)): for ret in ssh.run_iter(jid=kwargs.get(u'jid', None)):
final.update(ret) final.update(ret)
return final return final
@ -166,16 +166,16 @@ class SSHClient(object):
kwargs = copy.deepcopy(low) kwargs = copy.deepcopy(low)
for ignore in ['tgt', 'fun', 'arg', 'timeout', 'tgt_type', 'kwarg']: for ignore in [u'tgt', u'fun', u'arg', u'timeout', u'tgt_type', u'kwarg']:
if ignore in kwargs: if ignore in kwargs:
del kwargs[ignore] del kwargs[ignore]
return self.cmd(low['tgt'], return self.cmd(low[u'tgt'],
low['fun'], low[u'fun'],
low.get('arg', []), low.get(u'arg', []),
low.get('timeout'), low.get(u'timeout'),
low.get('tgt_type'), low.get(u'tgt_type'),
low.get('kwarg'), low.get(u'kwarg'),
**kwargs) **kwargs)
def cmd_async(self, low, timeout=None): def cmd_async(self, low, timeout=None):
@ -204,8 +204,8 @@ class SSHClient(object):
fun, fun,
arg=(), arg=(),
timeout=None, timeout=None,
tgt_type='glob', tgt_type=u'glob',
ret='', ret=u'',
kwarg=None, kwarg=None,
sub=3, sub=3,
**kwargs): **kwargs):
@ -226,24 +226,24 @@ class SSHClient(object):
.. versionadded:: 2017.7.0 .. versionadded:: 2017.7.0
''' '''
if 'expr_form' in kwargs: if u'expr_form' in kwargs:
salt.utils.warn_until( salt.utils.versions.warn_until(
'Fluorine', u'Fluorine',
'The target type should be passed using the \'tgt_type\' ' u'The target type should be passed using the \'tgt_type\' '
'argument instead of \'expr_form\'. Support for using ' u'argument instead of \'expr_form\'. Support for using '
'\'expr_form\' will be removed in Salt Fluorine.' u'\'expr_form\' will be removed in Salt Fluorine.'
) )
tgt_type = kwargs.pop('expr_form') tgt_type = kwargs.pop(u'expr_form')
minion_ret = self.cmd(tgt, minion_ret = self.cmd(tgt,
'sys.list_functions', u'sys.list_functions',
tgt_type=tgt_type, tgt_type=tgt_type,
**kwargs) **kwargs)
minions = list(minion_ret) minions = list(minion_ret)
random.shuffle(minions) random.shuffle(minions)
f_tgt = [] f_tgt = []
for minion in minions: for minion in minions:
if fun in minion_ret[minion]['return']: if fun in minion_ret[minion][u'return']:
f_tgt.append(minion) f_tgt.append(minion)
if len(f_tgt) >= sub: if len(f_tgt) >= sub:
break break
return self.cmd_iter(f_tgt, fun, arg, timeout, tgt_type='list', ret=ret, kwarg=kwarg, **kwargs) return self.cmd_iter(f_tgt, fun, arg, timeout, tgt_type=u'list', ret=ret, kwarg=kwarg, **kwargs)

View File

@ -21,12 +21,12 @@ import salt.utils.vt
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
SSH_PASSWORD_PROMPT_RE = re.compile(r'(?:.*)[Pp]assword(?: for .*)?:', re.M) SSH_PASSWORD_PROMPT_RE = re.compile(r'(?:.*)[Pp]assword(?: for .*)?:', re.M) # future lint: disable=non-unicode-string
KEY_VALID_RE = re.compile(r'.*\(yes\/no\).*') KEY_VALID_RE = re.compile(r'.*\(yes\/no\).*') # future lint: disable=non-unicode-string
# Keep these in sync with ./__init__.py # Keep these in sync with ./__init__.py
RSTR = '_edbc7885e4f9aac9b83b35999b68d015148caf467b78fa39c05f669c0ff89878' RSTR = u'_edbc7885e4f9aac9b83b35999b68d015148caf467b78fa39c05f669c0ff89878'
RSTR_RE = re.compile(r'(?:^|\r?\n)' + RSTR + r'(?:\r?\n|$)') RSTR_RE = re.compile(r'(?:^|\r?\n)' + RSTR + r'(?:\r?\n|$)') # future lint: disable=non-unicode-string
class NoPasswdError(Exception): class NoPasswdError(Exception):
@ -41,7 +41,7 @@ def gen_key(path):
''' '''
Generate a key for use with salt-ssh Generate a key for use with salt-ssh
''' '''
cmd = 'ssh-keygen -P "" -f {0} -t rsa -q'.format(path) cmd = u'ssh-keygen -P "" -f {0} -t rsa -q'.format(path)
if not os.path.isdir(os.path.dirname(path)): if not os.path.isdir(os.path.dirname(path)):
os.makedirs(os.path.dirname(path)) os.makedirs(os.path.dirname(path))
subprocess.call(cmd, shell=True) subprocess.call(cmd, shell=True)
@ -51,12 +51,12 @@ def gen_shell(opts, **kwargs):
''' '''
Return the correct shell interface for the target system Return the correct shell interface for the target system
''' '''
if kwargs['winrm']: if kwargs[u'winrm']:
try: try:
import saltwinshell import saltwinshell
shell = saltwinshell.Shell(opts, **kwargs) shell = saltwinshell.Shell(opts, **kwargs)
except ImportError: except ImportError:
log.error('The saltwinshell library is not available') log.error(u'The saltwinshell library is not available')
sys.exit(salt.defaults.exitcodes.EX_GENERIC) sys.exit(salt.defaults.exitcodes.EX_GENERIC)
else: else:
shell = Shell(opts, **kwargs) shell = Shell(opts, **kwargs)
@ -86,7 +86,7 @@ class Shell(object):
ssh_options=None): ssh_options=None):
self.opts = opts self.opts = opts
# ssh <ipv6>, but scp [<ipv6]:/path # ssh <ipv6>, but scp [<ipv6]:/path
self.host = host.strip('[]') self.host = host.strip(u'[]')
self.user = user self.user = user
self.port = port self.port = port
self.passwd = str(passwd) if passwd else passwd self.passwd = str(passwd) if passwd else passwd
@ -97,18 +97,18 @@ class Shell(object):
self.mods = mods self.mods = mods
self.identities_only = identities_only self.identities_only = identities_only
self.remote_port_forwards = remote_port_forwards self.remote_port_forwards = remote_port_forwards
self.ssh_options = '' if ssh_options is None else ssh_options self.ssh_options = u'' if ssh_options is None else ssh_options
def get_error(self, errstr): def get_error(self, errstr):
''' '''
Parse out an error and return a targeted error string Parse out an error and return a targeted error string
''' '''
for line in errstr.split('\n'): for line in errstr.split(u'\n'):
if line.startswith('ssh:'): if line.startswith(u'ssh:'):
return line return line
if line.startswith('Pseudo-terminal'): if line.startswith(u'Pseudo-terminal'):
continue continue
if 'to the list of known hosts.' in line: if u'to the list of known hosts.' in line:
continue continue
return line return line
return errstr return errstr
@ -118,36 +118,36 @@ class Shell(object):
Return options for the ssh command base for Salt to call Return options for the ssh command base for Salt to call
''' '''
options = [ options = [
'KbdInteractiveAuthentication=no', u'KbdInteractiveAuthentication=no',
] ]
if self.passwd: if self.passwd:
options.append('PasswordAuthentication=yes') options.append(u'PasswordAuthentication=yes')
else: else:
options.append('PasswordAuthentication=no') options.append(u'PasswordAuthentication=no')
if self.opts.get('_ssh_version', (0,)) > (4, 9): if self.opts.get(u'_ssh_version', (0,)) > (4, 9):
options.append('GSSAPIAuthentication=no') options.append(u'GSSAPIAuthentication=no')
options.append('ConnectTimeout={0}'.format(self.timeout)) options.append(u'ConnectTimeout={0}'.format(self.timeout))
if self.opts.get('ignore_host_keys'): if self.opts.get(u'ignore_host_keys'):
options.append('StrictHostKeyChecking=no') options.append(u'StrictHostKeyChecking=no')
if self.opts.get('no_host_keys'): if self.opts.get(u'no_host_keys'):
options.extend(['StrictHostKeyChecking=no', options.extend([u'StrictHostKeyChecking=no',
'UserKnownHostsFile=/dev/null']) u'UserKnownHostsFile=/dev/null'])
known_hosts = self.opts.get('known_hosts_file') known_hosts = self.opts.get(u'known_hosts_file')
if known_hosts and os.path.isfile(known_hosts): if known_hosts and os.path.isfile(known_hosts):
options.append('UserKnownHostsFile={0}'.format(known_hosts)) options.append(u'UserKnownHostsFile={0}'.format(known_hosts))
if self.port: if self.port:
options.append('Port={0}'.format(self.port)) options.append(u'Port={0}'.format(self.port))
if self.priv: if self.priv:
options.append('IdentityFile={0}'.format(self.priv)) options.append(u'IdentityFile={0}'.format(self.priv))
if self.user: if self.user:
options.append('User={0}'.format(self.user)) options.append(u'User={0}'.format(self.user))
if self.identities_only: if self.identities_only:
options.append('IdentitiesOnly=yes') options.append(u'IdentitiesOnly=yes')
ret = [] ret = []
for option in options: for option in options:
ret.append('-o {0} '.format(option)) ret.append(u'-o {0} '.format(option))
return ''.join(ret) return u''.join(ret)
def _passwd_opts(self): def _passwd_opts(self):
''' '''
@ -156,42 +156,42 @@ class Shell(object):
# TODO ControlMaster does not work without ControlPath # TODO ControlMaster does not work without ControlPath
# user could take advantage of it if they set ControlPath in their # user could take advantage of it if they set ControlPath in their
# ssh config. Also, ControlPersist not widely available. # ssh config. Also, ControlPersist not widely available.
options = ['ControlMaster=auto', options = [u'ControlMaster=auto',
'StrictHostKeyChecking=no', u'StrictHostKeyChecking=no',
] ]
if self.opts['_ssh_version'] > (4, 9): if self.opts[u'_ssh_version'] > (4, 9):
options.append('GSSAPIAuthentication=no') options.append(u'GSSAPIAuthentication=no')
options.append('ConnectTimeout={0}'.format(self.timeout)) options.append(u'ConnectTimeout={0}'.format(self.timeout))
if self.opts.get('ignore_host_keys'): if self.opts.get(u'ignore_host_keys'):
options.append('StrictHostKeyChecking=no') options.append(u'StrictHostKeyChecking=no')
if self.opts.get('no_host_keys'): if self.opts.get(u'no_host_keys'):
options.extend(['StrictHostKeyChecking=no', options.extend([u'StrictHostKeyChecking=no',
'UserKnownHostsFile=/dev/null']) u'UserKnownHostsFile=/dev/null'])
if self.passwd: if self.passwd:
options.extend(['PasswordAuthentication=yes', options.extend([u'PasswordAuthentication=yes',
'PubkeyAuthentication=yes']) u'PubkeyAuthentication=yes'])
else: else:
options.extend(['PasswordAuthentication=no', options.extend([u'PasswordAuthentication=no',
'PubkeyAuthentication=yes', u'PubkeyAuthentication=yes',
'KbdInteractiveAuthentication=no', u'KbdInteractiveAuthentication=no',
'ChallengeResponseAuthentication=no', u'ChallengeResponseAuthentication=no',
'BatchMode=yes']) u'BatchMode=yes'])
if self.port: if self.port:
options.append('Port={0}'.format(self.port)) options.append(u'Port={0}'.format(self.port))
if self.user: if self.user:
options.append('User={0}'.format(self.user)) options.append(u'User={0}'.format(self.user))
if self.identities_only: if self.identities_only:
options.append('IdentitiesOnly=yes') options.append(u'IdentitiesOnly=yes')
ret = [] ret = []
for option in options: for option in options:
ret.append('-o {0} '.format(option)) ret.append(u'-o {0} '.format(option))
return ''.join(ret) return u''.join(ret)
def _ssh_opts(self): def _ssh_opts(self):
return ' '.join(['-o {0}'.format(opt) return u' '.join([u'-o {0}'.format(opt)
for opt in self.ssh_options]) for opt in self.ssh_options])
def _copy_id_str_old(self): def _copy_id_str_old(self):
''' '''
@ -200,9 +200,9 @@ class Shell(object):
if self.passwd: if self.passwd:
# Using single quotes prevents shell expansion and # Using single quotes prevents shell expansion and
# passwords containing '$' # passwords containing '$'
return "{0} {1} '{2} -p {3} {4} {5}@{6}'".format( return u"{0} {1} '{2} -p {3} {4} {5}@{6}'".format(
'ssh-copy-id', u'ssh-copy-id',
'-i {0}.pub'.format(self.priv), u'-i {0}.pub'.format(self.priv),
self._passwd_opts(), self._passwd_opts(),
self.port, self.port,
self._ssh_opts(), self._ssh_opts(),
@ -218,9 +218,9 @@ class Shell(object):
if self.passwd: if self.passwd:
# Using single quotes prevents shell expansion and # Using single quotes prevents shell expansion and
# passwords containing '$' # passwords containing '$'
return "{0} {1} {2} -p {3} {4} {5}@{6}".format( return u"{0} {1} {2} -p {3} {4} {5}@{6}".format(
'ssh-copy-id', u'ssh-copy-id',
'-i {0}.pub'.format(self.priv), u'-i {0}.pub'.format(self.priv),
self._passwd_opts(), self._passwd_opts(),
self.port, self.port,
self._ssh_opts(), self._ssh_opts(),
@ -233,11 +233,11 @@ class Shell(object):
Execute ssh-copy-id to plant the id file on the target Execute ssh-copy-id to plant the id file on the target
''' '''
stdout, stderr, retcode = self._run_cmd(self._copy_id_str_old()) stdout, stderr, retcode = self._run_cmd(self._copy_id_str_old())
if salt.defaults.exitcodes.EX_OK != retcode and 'Usage' in stderr: if salt.defaults.exitcodes.EX_OK != retcode and u'Usage' in stderr:
stdout, stderr, retcode = self._run_cmd(self._copy_id_str_new()) stdout, stderr, retcode = self._run_cmd(self._copy_id_str_new())
return stdout, stderr, retcode return stdout, stderr, retcode
def _cmd_str(self, cmd, ssh='ssh'): def _cmd_str(self, cmd, ssh=u'ssh'):
''' '''
Return the cmd string to execute Return the cmd string to execute
''' '''
@ -246,21 +246,21 @@ class Shell(object):
# need to deliver the SHIM to the remote host and execute it there # need to deliver the SHIM to the remote host and execute it there
command = [ssh] command = [ssh]
if ssh != 'scp': if ssh != u'scp':
command.append(self.host) command.append(self.host)
if self.tty and ssh == 'ssh': if self.tty and ssh == u'ssh':
command.append('-t -t') command.append(u'-t -t')
if self.passwd or self.priv: if self.passwd or self.priv:
command.append(self.priv and self._key_opts() or self._passwd_opts()) command.append(self.priv and self._key_opts() or self._passwd_opts())
if ssh != 'scp' and self.remote_port_forwards: if ssh != u'scp' and self.remote_port_forwards:
command.append(' '.join(['-R {0}'.format(item) command.append(u' '.join([u'-R {0}'.format(item)
for item in self.remote_port_forwards.split(',')])) for item in self.remote_port_forwards.split(u',')]))
if self.ssh_options: if self.ssh_options:
command.append(self._ssh_opts()) command.append(self._ssh_opts())
command.append(cmd) command.append(cmd)
return ' '.join(command) return u' '.join(command)
def _old_run_cmd(self, cmd): def _old_run_cmd(self, cmd):
''' '''
@ -277,7 +277,7 @@ class Shell(object):
data = proc.communicate() data = proc.communicate()
return data[0], data[1], proc.returncode return data[0], data[1], proc.returncode
except Exception: except Exception:
return ('local', 'Unknown Error', None) return (u'local', u'Unknown Error', None)
def _run_nb_cmd(self, cmd): def _run_nb_cmd(self, cmd):
''' '''
@ -301,7 +301,7 @@ class Shell(object):
err = self.get_error(err) err = self.get_error(err)
yield out, err, rcode yield out, err, rcode
except Exception: except Exception:
yield ('', 'Unknown Error', None) yield (u'', u'Unknown Error', None)
def exec_nb_cmd(self, cmd): def exec_nb_cmd(self, cmd):
''' '''
@ -312,9 +312,9 @@ class Shell(object):
rcode = None rcode = None
cmd = self._cmd_str(cmd) cmd = self._cmd_str(cmd)
logmsg = 'Executing non-blocking command: {0}'.format(cmd) logmsg = u'Executing non-blocking command: {0}'.format(cmd)
if self.passwd: if self.passwd:
logmsg = logmsg.replace(self.passwd, ('*' * 6)) logmsg = logmsg.replace(self.passwd, (u'*' * 6))
log.debug(logmsg) log.debug(logmsg)
for out, err, rcode in self._run_nb_cmd(cmd): for out, err, rcode in self._run_nb_cmd(cmd):
@ -323,7 +323,7 @@ class Shell(object):
if err is not None: if err is not None:
r_err.append(err) r_err.append(err)
yield None, None, None yield None, None, None
yield ''.join(r_out), ''.join(r_err), rcode yield u''.join(r_out), u''.join(r_err), rcode
def exec_cmd(self, cmd): def exec_cmd(self, cmd):
''' '''
@ -331,11 +331,11 @@ class Shell(object):
''' '''
cmd = self._cmd_str(cmd) cmd = self._cmd_str(cmd)
logmsg = 'Executing command: {0}'.format(cmd) logmsg = u'Executing command: {0}'.format(cmd)
if self.passwd: if self.passwd:
logmsg = logmsg.replace(self.passwd, ('*' * 6)) logmsg = logmsg.replace(self.passwd, (u'*' * 6))
if 'decode("base64")' in logmsg or 'base64.b64decode(' in logmsg: if u'decode("base64")' in logmsg or u'base64.b64decode(' in logmsg:
log.debug('Executed SHIM command. Command logged to TRACE') log.debug(u'Executed SHIM command. Command logged to TRACE')
log.trace(logmsg) log.trace(logmsg)
else: else:
log.debug(logmsg) log.debug(logmsg)
@ -348,19 +348,19 @@ class Shell(object):
scp a file or files to a remote system scp a file or files to a remote system
''' '''
if makedirs: if makedirs:
self.exec_cmd('mkdir -p {0}'.format(os.path.dirname(remote))) self.exec_cmd(u'mkdir -p {0}'.format(os.path.dirname(remote)))
# scp needs [<ipv6} # scp needs [<ipv6}
host = self.host host = self.host
if ':' in host: if u':' in host:
host = '[{0}]'.format(host) host = u'[{0}]'.format(host)
cmd = '{0} {1}:{2}'.format(local, host, remote) cmd = u'{0} {1}:{2}'.format(local, host, remote)
cmd = self._cmd_str(cmd, ssh='scp') cmd = self._cmd_str(cmd, ssh=u'scp')
logmsg = 'Executing command: {0}'.format(cmd) logmsg = u'Executing command: {0}'.format(cmd)
if self.passwd: if self.passwd:
logmsg = logmsg.replace(self.passwd, ('*' * 6)) logmsg = logmsg.replace(self.passwd, (u'*' * 6))
log.debug(logmsg) log.debug(logmsg)
return self._run_cmd(cmd) return self._run_cmd(cmd)
@ -374,16 +374,16 @@ class Shell(object):
cmd, cmd,
shell=True, shell=True,
log_stdout=True, log_stdout=True,
log_stdout_level='trace', log_stdout_level=u'trace',
log_stderr=True, log_stderr=True,
log_stderr_level='trace', log_stderr_level=u'trace',
stream_stdout=False, stream_stdout=False,
stream_stderr=False) stream_stderr=False)
sent_passwd = 0 sent_passwd = 0
send_password = True send_password = True
ret_stdout = '' ret_stdout = u''
ret_stderr = '' ret_stderr = u''
old_stdout = '' old_stdout = u''
try: try:
while term.has_unread_data: while term.has_unread_data:
@ -400,26 +400,26 @@ class Shell(object):
send_password = False send_password = False
if buff and SSH_PASSWORD_PROMPT_RE.search(buff) and send_password: if buff and SSH_PASSWORD_PROMPT_RE.search(buff) and send_password:
if not self.passwd: if not self.passwd:
return '', 'Permission denied, no authentication information', 254 return u'', u'Permission denied, no authentication information', 254
if sent_passwd < passwd_retries: if sent_passwd < passwd_retries:
term.sendline(self.passwd) term.sendline(self.passwd)
sent_passwd += 1 sent_passwd += 1
continue continue
else: else:
# asking for a password, and we can't seem to send it # asking for a password, and we can't seem to send it
return '', 'Password authentication failed', 254 return u'', u'Password authentication failed', 254
elif buff and KEY_VALID_RE.search(buff): elif buff and KEY_VALID_RE.search(buff):
if key_accept: if key_accept:
term.sendline('yes') term.sendline(u'yes')
continue continue
else: else:
term.sendline('no') term.sendline(u'no')
ret_stdout = ('The host key needs to be accepted, to ' ret_stdout = (u'The host key needs to be accepted, to '
'auto accept run salt-ssh with the -i ' u'auto accept run salt-ssh with the -i '
'flag:\n{0}').format(stdout) u'flag:\n{0}').format(stdout)
return ret_stdout, '', 254 return ret_stdout, u'', 254
elif buff and buff.endswith('_||ext_mods||_'): elif buff and buff.endswith(u'_||ext_mods||_'):
mods_raw = json.dumps(self.mods, separators=(',', ':')) + '|_E|0|' mods_raw = json.dumps(self.mods, separators=(u',', u':')) + u'|_E|0|'
term.sendline(mods_raw) term.sendline(mods_raw)
if stdout: if stdout:
old_stdout = stdout old_stdout = stdout

View File

@ -18,8 +18,8 @@ import os
import stat import stat
import subprocess import subprocess
THIN_ARCHIVE = 'salt-thin.tgz' THIN_ARCHIVE = u'salt-thin.tgz'
EXT_ARCHIVE = 'salt-ext_mods.tgz' EXT_ARCHIVE = u'salt-ext_mods.tgz'
# Keep these in sync with salt/defaults/exitcodes.py # Keep these in sync with salt/defaults/exitcodes.py
EX_THIN_DEPLOY = 11 EX_THIN_DEPLOY = 11
@ -30,7 +30,9 @@ EX_CANTCREAT = 73
class OBJ(object): class OBJ(object):
"""An empty class for holding instance attribute values.""" '''
An empty class for holding instance attribute values.
'''
pass pass
@ -52,7 +54,7 @@ def get_system_encoding():
# and reset to None # and reset to None
encoding = None encoding = None
if not sys.platform.startswith('win') and sys.stdin is not None: if not sys.platform.startswith(u'win') and sys.stdin is not None:
# On linux we can rely on sys.stdin for the encoding since it # On linux we can rely on sys.stdin for the encoding since it
# most commonly matches the filesystem encoding. This however # most commonly matches the filesystem encoding. This however
# does not apply to windows # does not apply to windows
@ -78,16 +80,16 @@ def get_system_encoding():
# the way back to ascii # the way back to ascii
encoding = sys.getdefaultencoding() encoding = sys.getdefaultencoding()
if not encoding: if not encoding:
if sys.platform.startswith('darwin'): if sys.platform.startswith(u'darwin'):
# Mac OS X uses UTF-8 # Mac OS X uses UTF-8
encoding = 'utf-8' encoding = u'utf-8'
elif sys.platform.startswith('win'): elif sys.platform.startswith(u'win'):
# Windows uses a configurable encoding; on Windows, Python uses the name "mbcs" # Windows uses a configurable encoding; on Windows, Python uses the name "mbcs"
# to refer to whatever the currently configured encoding is. # to refer to whatever the currently configured encoding is.
encoding = 'mbcs' encoding = u'mbcs'
else: else:
# On linux default to ascii as a last resort # On linux default to ascii as a last resort
encoding = 'ascii' encoding = u'ascii'
return encoding return encoding
@ -95,14 +97,14 @@ def is_windows():
''' '''
Simple function to return if a host is Windows or not Simple function to return if a host is Windows or not
''' '''
return sys.platform.startswith('win') return sys.platform.startswith(u'win')
def need_deployment(): def need_deployment():
""" '''
Salt thin needs to be deployed - prep the target directory and emit the Salt thin needs to be deployed - prep the target directory and emit the
delimeter and exit code that signals a required deployment. delimeter and exit code that signals a required deployment.
""" '''
if os.path.exists(OPTIONS.saltdir): if os.path.exists(OPTIONS.saltdir):
shutil.rmtree(OPTIONS.saltdir) shutil.rmtree(OPTIONS.saltdir)
old_umask = os.umask(0o077) old_umask = os.umask(0o077)
@ -119,39 +121,43 @@ def need_deployment():
# Attack detected # Attack detected
need_deployment() need_deployment()
# If SUDOing then also give the super user group write permissions # If SUDOing then also give the super user group write permissions
sudo_gid = os.environ.get('SUDO_GID') sudo_gid = os.environ.get(u'SUDO_GID')
if sudo_gid: if sudo_gid:
try: try:
os.chown(OPTIONS.saltdir, -1, int(sudo_gid)) os.chown(OPTIONS.saltdir, -1, int(sudo_gid))
stt = os.stat(OPTIONS.saltdir) stt = os.stat(OPTIONS.saltdir)
os.chmod(OPTIONS.saltdir, stt.st_mode | stat.S_IWGRP | stat.S_IRGRP | stat.S_IXGRP) os.chmod(OPTIONS.saltdir, stt.st_mode | stat.S_IWGRP | stat.S_IRGRP | stat.S_IXGRP)
except OSError: except OSError:
sys.stdout.write('\n\nUnable to set permissions on thin directory.\nIf sudo_user is set ' sys.stdout.write(u'\n\nUnable to set permissions on thin directory.\nIf sudo_user is set '
'and is not root, be certain the user is in the same group\nas the login user') u'and is not root, be certain the user is in the same group\nas the login user')
sys.exit(1) sys.exit(1)
# Delimiter emitted on stdout *only* to indicate shim message to master. # Delimiter emitted on stdout *only* to indicate shim message to master.
sys.stdout.write("{0}\ndeploy\n".format(OPTIONS.delimiter)) sys.stdout.write(u"{0}\ndeploy\n".format(OPTIONS.delimiter))
sys.exit(EX_THIN_DEPLOY) sys.exit(EX_THIN_DEPLOY)
# Adapted from salt.utils.get_hash() # Adapted from salt.utils.get_hash()
def get_hash(path, form='sha1', chunk_size=4096): def get_hash(path, form=u'sha1', chunk_size=4096):
"""Generate a hash digest string for a file.""" '''
Generate a hash digest string for a file.
'''
try: try:
hash_type = getattr(hashlib, form) hash_type = getattr(hashlib, form)
except AttributeError: except AttributeError:
raise ValueError('Invalid hash type: {0}'.format(form)) raise ValueError(u'Invalid hash type: {0}'.format(form))
with open(path, 'rb') as ifile: with open(path, u'rb') as ifile:
hash_obj = hash_type() hash_obj = hash_type()
# read the file in in chunks, not the entire file # read the file in in chunks, not the entire file
for chunk in iter(lambda: ifile.read(chunk_size), b''): for chunk in iter(lambda: ifile.read(chunk_size), b''): # future lint: disable=non-unicode-string
hash_obj.update(chunk) hash_obj.update(chunk)
return hash_obj.hexdigest() return hash_obj.hexdigest()
def unpack_thin(thin_path): def unpack_thin(thin_path):
"""Unpack the Salt thin archive.""" '''
Unpack the Salt thin archive.
'''
tfile = tarfile.TarFile.gzopen(thin_path) tfile = tarfile.TarFile.gzopen(thin_path)
old_umask = os.umask(0o077) old_umask = os.umask(0o077)
tfile.extractall(path=OPTIONS.saltdir) tfile.extractall(path=OPTIONS.saltdir)
@ -161,34 +167,40 @@ def unpack_thin(thin_path):
def need_ext(): def need_ext():
"""Signal that external modules need to be deployed.""" '''
sys.stdout.write("{0}\next_mods\n".format(OPTIONS.delimiter)) Signal that external modules need to be deployed.
'''
sys.stdout.write(u"{0}\next_mods\n".format(OPTIONS.delimiter))
sys.exit(EX_MOD_DEPLOY) sys.exit(EX_MOD_DEPLOY)
def unpack_ext(ext_path): def unpack_ext(ext_path):
"""Unpack the external modules.""" '''
Unpack the external modules.
'''
modcache = os.path.join( modcache = os.path.join(
OPTIONS.saltdir, OPTIONS.saltdir,
'running_data', u'running_data',
'var', u'var',
'cache', u'cache',
'salt', u'salt',
'minion', u'minion',
'extmods') u'extmods')
tfile = tarfile.TarFile.gzopen(ext_path) tfile = tarfile.TarFile.gzopen(ext_path)
old_umask = os.umask(0o077) old_umask = os.umask(0o077)
tfile.extractall(path=modcache) tfile.extractall(path=modcache)
tfile.close() tfile.close()
os.umask(old_umask) os.umask(old_umask)
os.unlink(ext_path) os.unlink(ext_path)
ver_path = os.path.join(modcache, 'ext_version') ver_path = os.path.join(modcache, u'ext_version')
ver_dst = os.path.join(OPTIONS.saltdir, 'ext_version') ver_dst = os.path.join(OPTIONS.saltdir, u'ext_version')
shutil.move(ver_path, ver_dst) shutil.move(ver_path, ver_dst)
def main(argv): # pylint: disable=W0613 def main(argv): # pylint: disable=W0613
"""Main program body""" '''
Main program body
'''
thin_path = os.path.join(OPTIONS.saltdir, THIN_ARCHIVE) thin_path = os.path.join(OPTIONS.saltdir, THIN_ARCHIVE)
if os.path.isfile(thin_path): if os.path.isfile(thin_path):
if OPTIONS.checksum != get_hash(thin_path, OPTIONS.hashfunc): if OPTIONS.checksum != get_hash(thin_path, OPTIONS.hashfunc):
@ -196,8 +208,8 @@ def main(argv): # pylint: disable=W0613
unpack_thin(thin_path) unpack_thin(thin_path)
# Salt thin now is available to use # Salt thin now is available to use
else: else:
if not sys.platform.startswith('win'): if not sys.platform.startswith(u'win'):
scpstat = subprocess.Popen(['/bin/sh', '-c', 'command -v scp']).wait() scpstat = subprocess.Popen([u'/bin/sh', u'-c', u'command -v scp']).wait()
if scpstat != 0: if scpstat != 0:
sys.exit(EX_SCP_NOT_FOUND) sys.exit(EX_SCP_NOT_FOUND)
@ -206,46 +218,46 @@ def main(argv): # pylint: disable=W0613
if not os.path.isdir(OPTIONS.saltdir): if not os.path.isdir(OPTIONS.saltdir):
sys.stderr.write( sys.stderr.write(
'ERROR: salt path "{0}" exists but is' u'ERROR: salt path "{0}" exists but is'
' not a directory\n'.format(OPTIONS.saltdir) u' not a directory\n'.format(OPTIONS.saltdir)
) )
sys.exit(EX_CANTCREAT) sys.exit(EX_CANTCREAT)
version_path = os.path.normpath(os.path.join(OPTIONS.saltdir, 'version')) version_path = os.path.normpath(os.path.join(OPTIONS.saltdir, u'version'))
if not os.path.exists(version_path) or not os.path.isfile(version_path): if not os.path.exists(version_path) or not os.path.isfile(version_path):
sys.stderr.write( sys.stderr.write(
'WARNING: Unable to locate current thin ' u'WARNING: Unable to locate current thin '
' version: {0}.\n'.format(version_path) u' version: {0}.\n'.format(version_path)
) )
need_deployment() need_deployment()
with open(version_path, 'r') as vpo: with open(version_path, u'r') as vpo:
cur_version = vpo.readline().strip() cur_version = vpo.readline().strip()
if cur_version != OPTIONS.version: if cur_version != OPTIONS.version:
sys.stderr.write( sys.stderr.write(
'WARNING: current thin version {0}' u'WARNING: current thin version {0}'
' is not up-to-date with {1}.\n'.format( u' is not up-to-date with {1}.\n'.format(
cur_version, OPTIONS.version cur_version, OPTIONS.version
) )
) )
need_deployment() need_deployment()
# Salt thin exists and is up-to-date - fall through and use it # Salt thin exists and is up-to-date - fall through and use it
salt_call_path = os.path.join(OPTIONS.saltdir, 'salt-call') salt_call_path = os.path.join(OPTIONS.saltdir, u'salt-call')
if not os.path.isfile(salt_call_path): if not os.path.isfile(salt_call_path):
sys.stderr.write('ERROR: thin is missing "{0}"\n'.format(salt_call_path)) sys.stderr.write(u'ERROR: thin is missing "{0}"\n'.format(salt_call_path))
need_deployment() need_deployment()
with open(os.path.join(OPTIONS.saltdir, 'minion'), 'w') as config: with open(os.path.join(OPTIONS.saltdir, u'minion'), u'w') as config:
config.write(OPTIONS.config + '\n') config.write(OPTIONS.config + u'\n')
if OPTIONS.ext_mods: if OPTIONS.ext_mods:
ext_path = os.path.join(OPTIONS.saltdir, EXT_ARCHIVE) ext_path = os.path.join(OPTIONS.saltdir, EXT_ARCHIVE)
if os.path.exists(ext_path): if os.path.exists(ext_path):
unpack_ext(ext_path) unpack_ext(ext_path)
else: else:
version_path = os.path.join(OPTIONS.saltdir, 'ext_version') version_path = os.path.join(OPTIONS.saltdir, u'ext_version')
if not os.path.exists(version_path) or not os.path.isfile(version_path): if not os.path.exists(version_path) or not os.path.isfile(version_path):
need_ext() need_ext()
with open(version_path, 'r') as vpo: with open(version_path, u'r') as vpo:
cur_version = vpo.readline().strip() cur_version = vpo.readline().strip()
if cur_version != OPTIONS.ext_mods: if cur_version != OPTIONS.ext_mods:
need_ext() need_ext()
@ -258,38 +270,38 @@ def main(argv): # pylint: disable=W0613
salt_argv = [ salt_argv = [
sys.executable, sys.executable,
salt_call_path, salt_call_path,
'--retcode-passthrough', u'--retcode-passthrough',
'--local', u'--local',
'--metadata', u'--metadata',
'--out', 'json', u'--out', u'json',
'-l', 'quiet', u'-l', u'quiet',
'-c', OPTIONS.saltdir u'-c', OPTIONS.saltdir
] ]
try: try:
if argv_prepared[-1].startswith('--no-parse='): if argv_prepared[-1].startswith(u'--no-parse='):
salt_argv.append(argv_prepared.pop(-1)) salt_argv.append(argv_prepared.pop(-1))
except (IndexError, TypeError): except (IndexError, TypeError):
pass pass
salt_argv.append('--') salt_argv.append(u'--')
salt_argv.extend(argv_prepared) salt_argv.extend(argv_prepared)
sys.stderr.write('SALT_ARGV: {0}\n'.format(salt_argv)) sys.stderr.write(u'SALT_ARGV: {0}\n'.format(salt_argv))
# Only emit the delimiter on *both* stdout and stderr when completely successful. # Only emit the delimiter on *both* stdout and stderr when completely successful.
# Yes, the flush() is necessary. # Yes, the flush() is necessary.
sys.stdout.write(OPTIONS.delimiter + '\n') sys.stdout.write(OPTIONS.delimiter + u'\n')
sys.stdout.flush() sys.stdout.flush()
if not OPTIONS.tty: if not OPTIONS.tty:
sys.stderr.write(OPTIONS.delimiter + '\n') sys.stderr.write(OPTIONS.delimiter + u'\n')
sys.stderr.flush() sys.stderr.flush()
if OPTIONS.cmd_umask is not None: if OPTIONS.cmd_umask is not None:
old_umask = os.umask(OPTIONS.cmd_umask) old_umask = os.umask(OPTIONS.cmd_umask)
if OPTIONS.tty: if OPTIONS.tty:
# Returns bytes instead of string on python 3 # Returns bytes instead of string on python 3
stdout, _ = subprocess.Popen(salt_argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() stdout, _ = subprocess.Popen(salt_argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
sys.stdout.write(stdout.decode(encoding=get_system_encoding(), errors="replace")) sys.stdout.write(stdout.decode(encoding=get_system_encoding(), errors=u"replace"))
sys.stdout.flush() sys.stdout.flush()
if OPTIONS.wipe: if OPTIONS.wipe:
shutil.rmtree(OPTIONS.saltdir) shutil.rmtree(OPTIONS.saltdir)
@ -301,5 +313,5 @@ def main(argv): # pylint: disable=W0613
if OPTIONS.cmd_umask is not None: if OPTIONS.cmd_umask is not None:
os.umask(old_umask) os.umask(old_umask)
if __name__ == '__main__': if __name__ == u'__main__':
sys.exit(main(sys.argv)) sys.exit(main(sys.argv))

View File

@ -24,6 +24,9 @@ import salt.state
import salt.loader import salt.loader
import salt.minion import salt.minion
# Import 3rd-party libs
from salt.ext import six
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -82,35 +85,33 @@ class SSHHighState(salt.state.BaseHighState):
''' '''
Evaluate master_tops locally Evaluate master_tops locally
''' '''
if 'id' not in self.opts: if u'id' not in self.opts:
log.error('Received call for external nodes without an id') log.error(u'Received call for external nodes without an id')
return {} return {}
if not salt.utils.verify.valid_id(self.opts, self.opts['id']): if not salt.utils.verify.valid_id(self.opts, self.opts[u'id']):
return {} return {}
# Evaluate all configured master_tops interfaces # Evaluate all configured master_tops interfaces
grains = {} grains = {}
ret = {} ret = {}
if 'grains' in self.opts: if u'grains' in self.opts:
grains = self.opts['grains'] grains = self.opts[u'grains']
for fun in self.tops: for fun in self.tops:
if fun not in self.opts.get('master_tops', {}): if fun not in self.opts.get(u'master_tops', {}):
continue continue
try: try:
ret.update(self.tops[fun](opts=self.opts, grains=grains)) ret.update(self.tops[fun](opts=self.opts, grains=grains))
except Exception as exc: except Exception as exc:
# If anything happens in the top generation, log it and move on # If anything happens in the top generation, log it and move on
log.error( log.error(
'Top function {0} failed with error {1} for minion ' u'Top function %s failed with error %s for minion %s',
'{2}'.format( fun, exc, self.opts[u'id']
fun, exc, self.opts['id']
)
) )
return ret return ret
def lowstate_file_refs(chunks, extras=''): def lowstate_file_refs(chunks, extras=u''):
''' '''
Create a list of file ref objects to reconcile Create a list of file ref objects to reconcile
''' '''
@ -118,12 +119,12 @@ def lowstate_file_refs(chunks, extras=''):
for chunk in chunks: for chunk in chunks:
if not isinstance(chunk, dict): if not isinstance(chunk, dict):
continue continue
saltenv = 'base' saltenv = u'base'
crefs = [] crefs = []
for state in chunk: for state in chunk:
if state == '__env__': if state == u'__env__':
saltenv = chunk[state] saltenv = chunk[state]
elif state.startswith('__'): elif state.startswith(u'__'):
continue continue
crefs.extend(salt_refs(chunk[state])) crefs.extend(salt_refs(chunk[state]))
if crefs: if crefs:
@ -131,7 +132,7 @@ def lowstate_file_refs(chunks, extras=''):
refs[saltenv] = [] refs[saltenv] = []
refs[saltenv].append(crefs) refs[saltenv].append(crefs)
if extras: if extras:
extra_refs = extras.split(',') extra_refs = extras.split(u',')
if extra_refs: if extra_refs:
for env in refs: for env in refs:
for x in extra_refs: for x in extra_refs:
@ -143,10 +144,10 @@ def salt_refs(data, ret=None):
''' '''
Pull salt file references out of the states Pull salt file references out of the states
''' '''
proto = 'salt://' proto = u'salt://'
if ret is None: if ret is None:
ret = [] ret = []
if isinstance(data, str): if isinstance(data, six.string_types):
if data.startswith(proto) and data not in ret: if data.startswith(proto) and data not in ret:
ret.append(data) ret.append(data)
if isinstance(data, list): if isinstance(data, list):
@ -165,38 +166,38 @@ def prep_trans_tar(opts, file_client, chunks, file_refs, pillar=None, id_=None,
''' '''
gendir = tempfile.mkdtemp() gendir = tempfile.mkdtemp()
trans_tar = salt.utils.files.mkstemp() trans_tar = salt.utils.files.mkstemp()
lowfn = os.path.join(gendir, 'lowstate.json') lowfn = os.path.join(gendir, u'lowstate.json')
pillarfn = os.path.join(gendir, 'pillar.json') pillarfn = os.path.join(gendir, u'pillar.json')
roster_grainsfn = os.path.join(gendir, 'roster_grains.json') roster_grainsfn = os.path.join(gendir, u'roster_grains.json')
sync_refs = [ sync_refs = [
[salt.utils.url.create('_modules')], [salt.utils.url.create(u'_modules')],
[salt.utils.url.create('_states')], [salt.utils.url.create(u'_states')],
[salt.utils.url.create('_grains')], [salt.utils.url.create(u'_grains')],
[salt.utils.url.create('_renderers')], [salt.utils.url.create(u'_renderers')],
[salt.utils.url.create('_returners')], [salt.utils.url.create(u'_returners')],
[salt.utils.url.create('_output')], [salt.utils.url.create(u'_output')],
[salt.utils.url.create('_utils')], [salt.utils.url.create(u'_utils')],
] ]
with salt.utils.files.fopen(lowfn, 'w+') as fp_: with salt.utils.files.fopen(lowfn, u'w+') as fp_:
fp_.write(json.dumps(chunks)) fp_.write(json.dumps(chunks))
if pillar: if pillar:
with salt.utils.files.fopen(pillarfn, 'w+') as fp_: with salt.utils.files.fopen(pillarfn, u'w+') as fp_:
fp_.write(json.dumps(pillar)) fp_.write(json.dumps(pillar))
if roster_grains: if roster_grains:
with salt.utils.files.fopen(roster_grainsfn, 'w+') as fp_: with salt.utils.files.fopen(roster_grainsfn, u'w+') as fp_:
fp_.write(json.dumps(roster_grains)) fp_.write(json.dumps(roster_grains))
if id_ is None: if id_ is None:
id_ = '' id_ = u''
try: try:
cachedir = os.path.join('salt-ssh', id_).rstrip(os.sep) cachedir = os.path.join(u'salt-ssh', id_).rstrip(os.sep)
except AttributeError: except AttributeError:
# Minion ID should always be a str, but don't let an int break this # Minion ID should always be a str, but don't let an int break this
cachedir = os.path.join('salt-ssh', str(id_)).rstrip(os.sep) cachedir = os.path.join(u'salt-ssh', str(id_)).rstrip(os.sep)
for saltenv in file_refs: for saltenv in file_refs:
# Location where files in this saltenv will be cached # Location where files in this saltenv will be cached
cache_dest_root = os.path.join(cachedir, 'files', saltenv) cache_dest_root = os.path.join(cachedir, u'files', saltenv)
file_refs[saltenv].extend(sync_refs) file_refs[saltenv].extend(sync_refs)
env_root = os.path.join(gendir, saltenv) env_root = os.path.join(gendir, saltenv)
if not os.path.isdir(env_root): if not os.path.isdir(env_root):
@ -208,7 +209,7 @@ def prep_trans_tar(opts, file_client, chunks, file_refs, pillar=None, id_=None,
try: try:
path = file_client.cache_file(name, saltenv, cachedir=cachedir) path = file_client.cache_file(name, saltenv, cachedir=cachedir)
except IOError: except IOError:
path = '' path = u''
if path: if path:
tgt = os.path.join(env_root, short) tgt = os.path.join(env_root, short)
tgt_dir = os.path.dirname(tgt) tgt_dir = os.path.dirname(tgt)
@ -219,10 +220,10 @@ def prep_trans_tar(opts, file_client, chunks, file_refs, pillar=None, id_=None,
try: try:
files = file_client.cache_dir(name, saltenv, cachedir=cachedir) files = file_client.cache_dir(name, saltenv, cachedir=cachedir)
except IOError: except IOError:
files = '' files = u''
if files: if files:
for filename in files: for filename in files:
fn = filename[len(file_client.get_cachedir(cache_dest)):].strip('/') fn = filename[len(file_client.get_cachedir(cache_dest)):].strip(u'/')
tgt = os.path.join( tgt = os.path.join(
env_root, env_root,
short, short,
@ -239,7 +240,7 @@ def prep_trans_tar(opts, file_client, chunks, file_refs, pillar=None, id_=None,
except OSError: except OSError:
cwd = None cwd = None
os.chdir(gendir) os.chdir(gendir)
with closing(tarfile.open(trans_tar, 'w:gz')) as tfp: with closing(tarfile.open(trans_tar, u'w:gz')) as tfp:
for root, dirs, files in os.walk(gendir): for root, dirs, files in os.walk(gendir):
for name in files: for name in files:
full = os.path.join(root, name) full = os.path.join(root, name)

View File

@ -17,7 +17,7 @@ import salt.utils
import salt.client.ssh import salt.client.ssh
# Import 3rd-party libs # Import 3rd-party libs
import salt.ext.six as six from salt.ext import six
class FunctionWrapper(object): class FunctionWrapper(object):
@ -42,8 +42,8 @@ class FunctionWrapper(object):
self.wfuncs = wfuncs if isinstance(wfuncs, dict) else {} self.wfuncs = wfuncs if isinstance(wfuncs, dict) else {}
self.opts = opts self.opts = opts
self.mods = mods if isinstance(mods, dict) else {} self.mods = mods if isinstance(mods, dict) else {}
self.kwargs = {'id_': id_, self.kwargs = {u'id_': id_,
'host': host} u'host': host}
self.fsclient = fsclient self.fsclient = fsclient
self.kwargs.update(kwargs) self.kwargs.update(kwargs)
self.aliases = aliases self.aliases = aliases
@ -67,14 +67,14 @@ class FunctionWrapper(object):
''' '''
Return the function call to simulate the salt local lookup system Return the function call to simulate the salt local lookup system
''' '''
if '.' not in cmd and not self.cmd_prefix: if u'.' not in cmd and not self.cmd_prefix:
# Form of salt.cmd.run in Jinja -- it's expecting a subdictionary # Form of salt.cmd.run in Jinja -- it's expecting a subdictionary
# containing only 'cmd' module calls, in that case. Create a new # containing only 'cmd' module calls, in that case. Create a new
# FunctionWrapper which contains the prefix 'cmd' (again, for the # FunctionWrapper which contains the prefix 'cmd' (again, for the
# salt.cmd.run example) # salt.cmd.run example)
kwargs = copy.deepcopy(self.kwargs) kwargs = copy.deepcopy(self.kwargs)
id_ = kwargs.pop('id_') id_ = kwargs.pop(u'id_')
host = kwargs.pop('host') host = kwargs.pop(u'host')
return FunctionWrapper(self.opts, return FunctionWrapper(self.opts,
id_, id_,
host, host,
@ -90,7 +90,7 @@ class FunctionWrapper(object):
# We're in an inner FunctionWrapper as created by the code block # We're in an inner FunctionWrapper as created by the code block
# above. Reconstruct the original cmd in the form 'cmd.run' and # above. Reconstruct the original cmd in the form 'cmd.run' and
# then evaluate as normal # then evaluate as normal
cmd = '{0}.{1}'.format(self.cmd_prefix, cmd) cmd = u'{0}.{1}'.format(self.cmd_prefix, cmd)
if cmd in self.wfuncs: if cmd in self.wfuncs:
return self.wfuncs[cmd] return self.wfuncs[cmd]
@ -104,7 +104,7 @@ class FunctionWrapper(object):
''' '''
argv = [cmd] argv = [cmd]
argv.extend([json.dumps(arg) for arg in args]) argv.extend([json.dumps(arg) for arg in args])
argv.extend(['{0}={1}'.format(key, json.dumps(val)) for key, val in six.iteritems(kwargs)]) argv.extend([u'{0}={1}'.format(key, json.dumps(val)) for key, val in six.iteritems(kwargs)])
single = salt.client.ssh.Single( single = salt.client.ssh.Single(
self.opts, self.opts,
argv, argv,
@ -115,21 +115,21 @@ class FunctionWrapper(object):
**self.kwargs **self.kwargs
) )
stdout, stderr, retcode = single.cmd_block() stdout, stderr, retcode = single.cmd_block()
if stderr.count('Permission Denied'): if stderr.count(u'Permission Denied'):
return {'_error': 'Permission Denied', return {u'_error': u'Permission Denied',
'stdout': stdout, u'stdout': stdout,
'stderr': stderr, u'stderr': stderr,
'retcode': retcode} u'retcode': retcode}
try: try:
ret = json.loads(stdout, object_hook=salt.utils.decode_dict) ret = json.loads(stdout, object_hook=salt.utils.decode_dict)
if len(ret) < 2 and 'local' in ret: if len(ret) < 2 and u'local' in ret:
ret = ret['local'] ret = ret[u'local']
ret = ret.get('return', {}) ret = ret.get(u'return', {})
except ValueError: except ValueError:
ret = {'_error': 'Failed to return clean data', ret = {u'_error': u'Failed to return clean data',
'stderr': stderr, u'stderr': stderr,
'stdout': stdout, u'stdout': stdout,
'retcode': retcode} u'retcode': retcode}
return ret return ret
return caller return caller
@ -137,18 +137,18 @@ class FunctionWrapper(object):
''' '''
Set aliases for functions Set aliases for functions
''' '''
if '.' not in cmd and not self.cmd_prefix: if u'.' not in cmd and not self.cmd_prefix:
# Form of salt.cmd.run in Jinja -- it's expecting a subdictionary # Form of salt.cmd.run in Jinja -- it's expecting a subdictionary
# containing only 'cmd' module calls, in that case. We don't # containing only 'cmd' module calls, in that case. We don't
# support assigning directly to prefixes in this way # support assigning directly to prefixes in this way
raise KeyError('Cannot assign to module key {0} in the ' raise KeyError(u'Cannot assign to module key {0} in the '
'FunctionWrapper'.format(cmd)) u'FunctionWrapper'.format(cmd))
if self.cmd_prefix: if self.cmd_prefix:
# We're in an inner FunctionWrapper as created by the first code # We're in an inner FunctionWrapper as created by the first code
# block in __getitem__. Reconstruct the original cmd in the form # block in __getitem__. Reconstruct the original cmd in the form
# 'cmd.run' and then evaluate as normal # 'cmd.run' and then evaluate as normal
cmd = '{0}.{1}'.format(self.cmd_prefix, cmd) cmd = u'{0}.{1}'.format(self.cmd_prefix, cmd)
if cmd in self.wfuncs: if cmd in self.wfuncs:
self.wfuncs[cmd] = value self.wfuncs[cmd] = value

View File

@ -13,47 +13,47 @@ import salt.utils
import salt.syspaths as syspaths import salt.syspaths as syspaths
# Import 3rd-party libs # Import 3rd-party libs
import salt.ext.six as six from salt.ext import six
# Set up the default values for all systems # Set up the default values for all systems
DEFAULTS = {'mongo.db': 'salt', DEFAULTS = {u'mongo.db': u'salt',
'mongo.host': 'salt', u'mongo.host': u'salt',
'mongo.password': '', u'mongo.password': u'',
'mongo.port': 27017, u'mongo.port': 27017,
'mongo.user': '', u'mongo.user': u'',
'redis.db': '0', u'redis.db': u'0',
'redis.host': 'salt', u'redis.host': u'salt',
'redis.port': 6379, u'redis.port': 6379,
'test.foo': 'unconfigured', u'test.foo': u'unconfigured',
'ca.cert_base_path': '/etc/pki', u'ca.cert_base_path': u'/etc/pki',
'solr.cores': [], u'solr.cores': [],
'solr.host': 'localhost', u'solr.host': u'localhost',
'solr.port': '8983', u'solr.port': u'8983',
'solr.baseurl': '/solr', u'solr.baseurl': u'/solr',
'solr.type': 'master', u'solr.type': u'master',
'solr.request_timeout': None, u'solr.request_timeout': None,
'solr.init_script': '/etc/rc.d/solr', u'solr.init_script': u'/etc/rc.d/solr',
'solr.dih.import_options': {'clean': False, 'optimize': True, u'solr.dih.import_options': {u'clean': False, u'optimize': True,
'commit': True, 'verbose': False}, u'commit': True, u'verbose': False},
'solr.backup_path': None, u'solr.backup_path': None,
'solr.num_backups': 1, u'solr.num_backups': 1,
'poudriere.config': '/usr/local/etc/poudriere.conf', u'poudriere.config': u'/usr/local/etc/poudriere.conf',
'poudriere.config_dir': '/usr/local/etc/poudriere.d', u'poudriere.config_dir': u'/usr/local/etc/poudriere.d',
'ldap.server': 'localhost', u'ldap.server': u'localhost',
'ldap.port': '389', u'ldap.port': u'389',
'ldap.tls': False, u'ldap.tls': False,
'ldap.scope': 2, u'ldap.scope': 2,
'ldap.attrs': None, u'ldap.attrs': None,
'ldap.binddn': '', u'ldap.binddn': u'',
'ldap.bindpw': '', u'ldap.bindpw': u'',
'hosts.file': '/etc/hosts', u'hosts.file': u'/etc/hosts',
'aliases.file': '/etc/aliases', u'aliases.file': u'/etc/aliases',
'virt.images': os.path.join(syspaths.SRV_ROOT_DIR, 'salt-images'), u'virt.images': os.path.join(syspaths.SRV_ROOT_DIR, u'salt-images'),
'virt.tunnel': False, u'virt.tunnel': False,
} }
def backup_mode(backup=''): def backup_mode(backup=u''):
''' '''
Return the backup mode Return the backup mode
@ -65,7 +65,7 @@ def backup_mode(backup=''):
''' '''
if backup: if backup:
return backup return backup
return option('backup_mode') return option(u'backup_mode')
def manage_mode(mode): def manage_mode(mode):
@ -96,14 +96,14 @@ def valid_fileproto(uri):
salt '*' config.valid_fileproto salt://path/to/file salt '*' config.valid_fileproto salt://path/to/file
''' '''
try: try:
return bool(re.match('^(?:salt|https?|ftp)://', uri)) return bool(re.match(u'^(?:salt|https?|ftp)://', uri))
except Exception: except Exception:
return False return False
def option( def option(
value, value,
default='', default=u'',
omit_opts=False, omit_opts=False,
omit_master=False, omit_master=False,
omit_pillar=False): omit_pillar=False):
@ -120,8 +120,8 @@ def option(
if value in __opts__: if value in __opts__:
return __opts__[value] return __opts__[value]
if not omit_master: if not omit_master:
if value in __pillar__.get('master', {}): if value in __pillar__.get(u'master', {}):
return __pillar__['master'][value] return __pillar__[u'master'][value]
if not omit_pillar: if not omit_pillar:
if value in __pillar__: if value in __pillar__:
return __pillar__[value] return __pillar__[value]
@ -131,7 +131,7 @@ def option(
def merge(value, def merge(value,
default='', default=u'',
omit_opts=False, omit_opts=False,
omit_master=False, omit_master=False,
omit_pillar=False): omit_pillar=False):
@ -151,14 +151,14 @@ def merge(value,
if not omit_opts: if not omit_opts:
if value in __opts__: if value in __opts__:
ret = __opts__[value] ret = __opts__[value]
if isinstance(ret, str): if isinstance(ret, six.string_types):
return ret return ret
if not omit_master: if not omit_master:
if value in __pillar__.get('master', {}): if value in __pillar__.get(u'master', {}):
tmp = __pillar__['master'][value] tmp = __pillar__[u'master'][value]
if ret is None: if ret is None:
ret = tmp ret = tmp
if isinstance(ret, str): if isinstance(ret, six.string_types):
return ret return ret
elif isinstance(ret, dict) and isinstance(tmp, dict): elif isinstance(ret, dict) and isinstance(tmp, dict):
tmp.update(ret) tmp.update(ret)
@ -171,7 +171,7 @@ def merge(value,
tmp = __pillar__[value] tmp = __pillar__[value]
if ret is None: if ret is None:
ret = tmp ret = tmp
if isinstance(ret, str): if isinstance(ret, six.string_types):
return ret return ret
elif isinstance(ret, dict) and isinstance(tmp, dict): elif isinstance(ret, dict) and isinstance(tmp, dict):
tmp.update(ret) tmp.update(ret)
@ -184,7 +184,7 @@ def merge(value,
return ret or default return ret or default
def get(key, default=''): def get(key, default=u''):
''' '''
.. versionadded: 0.14.0 .. versionadded: 0.14.0
@ -215,17 +215,17 @@ def get(key, default=''):
salt '*' config.get pkg:apache salt '*' config.get pkg:apache
''' '''
ret = salt.utils.traverse_dict_and_list(__opts__, key, '_|-') ret = salt.utils.traverse_dict_and_list(__opts__, key, u'_|-')
if ret != '_|-': if ret != u'_|-':
return ret return ret
ret = salt.utils.traverse_dict_and_list(__grains__, key, '_|-') ret = salt.utils.traverse_dict_and_list(__grains__, key, u'_|-')
if ret != '_|-': if ret != u'_|-':
return ret return ret
ret = salt.utils.traverse_dict_and_list(__pillar__, key, '_|-') ret = salt.utils.traverse_dict_and_list(__pillar__, key, u'_|-')
if ret != '_|-': if ret != u'_|-':
return ret return ret
ret = salt.utils.traverse_dict_and_list(__pillar__.get('master', {}), key, '_|-') ret = salt.utils.traverse_dict_and_list(__pillar__.get(u'master', {}), key, u'_|-')
if ret != '_|-': if ret != u'_|-':
return ret return ret
return default return default
@ -242,10 +242,10 @@ def dot_vals(value):
salt '*' config.dot_vals host salt '*' config.dot_vals host
''' '''
ret = {} ret = {}
for key, val in six.iteritems(__pillar__.get('master', {})): for key, val in six.iteritems(__pillar__.get(u'master', {})):
if key.startswith('{0}.'.format(value)): if key.startswith(u'{0}.'.format(value)):
ret[key] = val ret[key] = val
for key, val in six.iteritems(__opts__): for key, val in six.iteritems(__opts__):
if key.startswith('{0}.'.format(value)): if key.startswith(u'{0}.'.format(value)):
ret[key] = val ret[key] = val
return ret return ret

View File

@ -18,7 +18,7 @@ log = logging.getLogger(__name__)
def get_file(path, def get_file(path,
dest, dest,
saltenv='base', saltenv=u'base',
makedirs=False, makedirs=False,
template=None, template=None,
gzip=None): gzip=None):
@ -31,83 +31,83 @@ def get_file(path,
cp.get_file. The argument is only accepted for interface compatibility. cp.get_file. The argument is only accepted for interface compatibility.
''' '''
if gzip is not None: if gzip is not None:
log.warning('The gzip argument to cp.get_file in salt-ssh is ' log.warning(u'The gzip argument to cp.get_file in salt-ssh is '
'unsupported') u'unsupported')
if template is not None: if template is not None:
(path, dest) = _render_filenames(path, dest, saltenv, template) (path, dest) = _render_filenames(path, dest, saltenv, template)
src = __context__['fileclient'].cache_file( src = __context__[u'fileclient'].cache_file(
path, path,
saltenv, saltenv,
cachedir=os.path.join('salt-ssh', __salt__.kwargs['id_'])) cachedir=os.path.join(u'salt-ssh', __salt__.kwargs[u'id_']))
single = salt.client.ssh.Single( single = salt.client.ssh.Single(
__opts__, __opts__,
'', u'',
**__salt__.kwargs) **__salt__.kwargs)
ret = single.shell.send(src, dest, makedirs) ret = single.shell.send(src, dest, makedirs)
return not ret[2] return not ret[2]
def get_dir(path, dest, saltenv='base'): def get_dir(path, dest, saltenv=u'base'):
''' '''
Transfer a directory down Transfer a directory down
''' '''
src = __context__['fileclient'].cache_dir( src = __context__[u'fileclient'].cache_dir(
path, path,
saltenv, saltenv,
cachedir=os.path.join('salt-ssh', __salt__.kwargs['id_'])) cachedir=os.path.join(u'salt-ssh', __salt__.kwargs[u'id_']))
src = ' '.join(src) src = u' '.join(src)
single = salt.client.ssh.Single( single = salt.client.ssh.Single(
__opts__, __opts__,
'', u'',
**__salt__.kwargs) **__salt__.kwargs)
ret = single.shell.send(src, dest) ret = single.shell.send(src, dest)
return not ret[2] return not ret[2]
def get_url(path, dest, saltenv='base'): def get_url(path, dest, saltenv=u'base'):
''' '''
retrieve a URL retrieve a URL
''' '''
src = __context__['fileclient'].cache_file( src = __context__[u'fileclient'].cache_file(
path, path,
saltenv, saltenv,
cachedir=os.path.join('salt-ssh', __salt__.kwargs['id_'])) cachedir=os.path.join(u'salt-ssh', __salt__.kwargs[u'id_']))
single = salt.client.ssh.Single( single = salt.client.ssh.Single(
__opts__, __opts__,
'', u'',
**__salt__.kwargs) **__salt__.kwargs)
ret = single.shell.send(src, dest) ret = single.shell.send(src, dest)
return not ret[2] return not ret[2]
def list_states(saltenv='base'): def list_states(saltenv=u'base'):
''' '''
List all the available state modules in an environment List all the available state modules in an environment
''' '''
return __context__['fileclient'].list_states(saltenv) return __context__[u'fileclient'].list_states(saltenv)
def list_master(saltenv='base', prefix=''): def list_master(saltenv=u'base', prefix=u''):
''' '''
List all of the files stored on the master List all of the files stored on the master
''' '''
return __context__['fileclient'].file_list(saltenv, prefix) return __context__[u'fileclient'].file_list(saltenv, prefix)
def list_master_dirs(saltenv='base', prefix=''): def list_master_dirs(saltenv=u'base', prefix=u''):
''' '''
List all of the directories stored on the master List all of the directories stored on the master
''' '''
return __context__['fileclient'].dir_list(saltenv, prefix) return __context__[u'fileclient'].dir_list(saltenv, prefix)
def list_master_symlinks(saltenv='base', prefix=''): def list_master_symlinks(saltenv=u'base', prefix=u''):
''' '''
List all of the symlinks stored on the master List all of the symlinks stored on the master
''' '''
return __context__['fileclient'].symlink_list(saltenv, prefix) return __context__[u'fileclient'].symlink_list(saltenv, prefix)
def _render_filenames(path, dest, saltenv, template): def _render_filenames(path, dest, saltenv, template):
@ -122,16 +122,16 @@ def _render_filenames(path, dest, saltenv, template):
# render the path as a template using path_template_engine as the engine # render the path as a template using path_template_engine as the engine
if template not in salt.utils.templates.TEMPLATE_REGISTRY: if template not in salt.utils.templates.TEMPLATE_REGISTRY:
raise CommandExecutionError( raise CommandExecutionError(
'Attempted to render file paths with unavailable engine ' u'Attempted to render file paths with unavailable engine '
'{0}'.format(template) u'{0}'.format(template)
) )
kwargs = {} kwargs = {}
kwargs['salt'] = __salt__ kwargs[u'salt'] = __salt__
kwargs['pillar'] = __pillar__ kwargs[u'pillar'] = __pillar__
kwargs['grains'] = __grains__ kwargs[u'grains'] = __grains__
kwargs['opts'] = __opts__ kwargs[u'opts'] = __opts__
kwargs['saltenv'] = saltenv kwargs[u'saltenv'] = saltenv
def _render(contents): def _render(contents):
''' '''
@ -140,7 +140,7 @@ def _render_filenames(path, dest, saltenv, template):
''' '''
# write out path to temp file # write out path to temp file
tmp_path_fn = salt.utils.files.mkstemp() tmp_path_fn = salt.utils.files.mkstemp()
with salt.utils.files.fopen(tmp_path_fn, 'w+') as fp_: with salt.utils.files.fopen(tmp_path_fn, u'w+') as fp_:
fp_.write(contents) fp_.write(contents)
data = salt.utils.templates.TEMPLATE_REGISTRY[template]( data = salt.utils.templates.TEMPLATE_REGISTRY[template](
tmp_path_fn, tmp_path_fn,
@ -148,15 +148,15 @@ def _render_filenames(path, dest, saltenv, template):
**kwargs **kwargs
) )
salt.utils.files.safe_rm(tmp_path_fn) salt.utils.files.safe_rm(tmp_path_fn)
if not data['result']: if not data[u'result']:
# Failed to render the template # Failed to render the template
raise CommandExecutionError( raise CommandExecutionError(
'Failed to render file path with error: {0}'.format( u'Failed to render file path with error: {0}'.format(
data['data'] data[u'data']
) )
) )
else: else:
return data['data'] return data[u'data']
path = _render(path) path = _render(path)
dest = _render(dest) dest = _render(dest)

View File

@ -17,38 +17,40 @@ from salt.defaults import DEFAULT_TARGET_DELIM
from salt.exceptions import SaltException from salt.exceptions import SaltException
# Import 3rd-party libs # Import 3rd-party libs
import salt.ext.six as six from salt.ext import six
# Seed the grains dict so cython will build # Seed the grains dict so cython will build
__grains__ = {} __grains__ = {}
def _serial_sanitizer(instr): def _serial_sanitizer(instr):
'''Replaces the last 1/4 of a string with X's''' '''
Replaces the last 1/4 of a string with X's
'''
length = len(instr) length = len(instr)
index = int(math.floor(length * .75)) index = int(math.floor(length * .75))
return '{0}{1}'.format(instr[:index], 'X' * (length - index)) return u'{0}{1}'.format(instr[:index], u'X' * (length - index))
_FQDN_SANITIZER = lambda x: 'MINION.DOMAINNAME' _FQDN_SANITIZER = lambda x: u'MINION.DOMAINNAME'
_HOSTNAME_SANITIZER = lambda x: 'MINION' _HOSTNAME_SANITIZER = lambda x: u'MINION'
_DOMAINNAME_SANITIZER = lambda x: 'DOMAINNAME' _DOMAINNAME_SANITIZER = lambda x: u'DOMAINNAME'
# A dictionary of grain -> function mappings for sanitizing grain output. This # A dictionary of grain -> function mappings for sanitizing grain output. This
# is used when the 'sanitize' flag is given. # is used when the 'sanitize' flag is given.
_SANITIZERS = { _SANITIZERS = {
'serialnumber': _serial_sanitizer, u'serialnumber': _serial_sanitizer,
'domain': _DOMAINNAME_SANITIZER, u'domain': _DOMAINNAME_SANITIZER,
'fqdn': _FQDN_SANITIZER, u'fqdn': _FQDN_SANITIZER,
'id': _FQDN_SANITIZER, u'id': _FQDN_SANITIZER,
'host': _HOSTNAME_SANITIZER, u'host': _HOSTNAME_SANITIZER,
'localhost': _HOSTNAME_SANITIZER, u'localhost': _HOSTNAME_SANITIZER,
'nodename': _HOSTNAME_SANITIZER, u'nodename': _HOSTNAME_SANITIZER,
} }
def get(key, default='', delimiter=DEFAULT_TARGET_DELIM, ordered=True): def get(key, default=u'', delimiter=DEFAULT_TARGET_DELIM, ordered=True):
''' '''
Attempt to retrieve the named value from grains, if the named value is not Attempt to retrieve the named value from grains, if the named value is not
available return the passed default. The default return is an empty string. available return the passed default. The default return is an empty string.
@ -149,7 +151,7 @@ def item(*args, **kwargs):
ret[arg] = __grains__[arg] ret[arg] = __grains__[arg]
except KeyError: except KeyError:
pass pass
if salt.utils.is_true(kwargs.get('sanitize')): if salt.utils.is_true(kwargs.get(u'sanitize')):
for arg, func in six.iteritems(_SANITIZERS): for arg, func in six.iteritems(_SANITIZERS):
if arg in ret: if arg in ret:
ret[arg] = func(ret[arg]) ret[arg] = func(ret[arg])
@ -170,9 +172,9 @@ def ls(): # pylint: disable=C0103
def filter_by(lookup_dict, def filter_by(lookup_dict,
grain='os_family', grain=u'os_family',
merge=None, merge=None,
default='default', default=u'default',
base=None): base=None):
''' '''
.. versionadded:: 0.17.0 .. versionadded:: 0.17.0
@ -266,12 +268,12 @@ def filter_by(lookup_dict,
elif isinstance(base_values, collections.Mapping): elif isinstance(base_values, collections.Mapping):
if not isinstance(ret, collections.Mapping): if not isinstance(ret, collections.Mapping):
raise SaltException('filter_by default and look-up values must both be dictionaries.') raise SaltException(u'filter_by default and look-up values must both be dictionaries.')
ret = salt.utils.dictupdate.update(copy.deepcopy(base_values), ret) ret = salt.utils.dictupdate.update(copy.deepcopy(base_values), ret)
if merge: if merge:
if not isinstance(merge, collections.Mapping): if not isinstance(merge, collections.Mapping):
raise SaltException('filter_by merge argument must be a dictionary.') raise SaltException(u'filter_by merge argument must be a dictionary.')
else: else:
if ret is None: if ret is None:
ret = merge ret = merge

View File

@ -14,7 +14,7 @@ import copy
import salt.client.ssh import salt.client.ssh
def get(tgt, fun, tgt_type='glob', roster='flat'): def get(tgt, fun, tgt_type=u'glob', roster=u'flat'):
''' '''
Get data from the mine based on the target, function and tgt_type Get data from the mine based on the target, function and tgt_type
@ -36,15 +36,15 @@ def get(tgt, fun, tgt_type='glob', roster='flat'):
salt-ssh '*' mine.get '192.168.5.0' network.ipaddrs roster=scan salt-ssh '*' mine.get '192.168.5.0' network.ipaddrs roster=scan
''' '''
# Set up opts for the SSH object # Set up opts for the SSH object
opts = copy.deepcopy(__context__['master_opts']) opts = copy.deepcopy(__context__[u'master_opts'])
minopts = copy.deepcopy(__opts__) minopts = copy.deepcopy(__opts__)
opts.update(minopts) opts.update(minopts)
if roster: if roster:
opts['roster'] = roster opts[u'roster'] = roster
opts['argv'] = [fun] opts[u'argv'] = [fun]
opts['selected_target_option'] = tgt_type opts[u'selected_target_option'] = tgt_type
opts['tgt'] = tgt opts[u'tgt'] = tgt
opts['arg'] = [] opts[u'arg'] = []
# Create the SSH object to handle the actual call # Create the SSH object to handle the actual call
ssh = salt.client.ssh.SSH(opts) ssh = salt.client.ssh.SSH(opts)
@ -56,8 +56,8 @@ def get(tgt, fun, tgt_type='glob', roster='flat'):
cret = {} cret = {}
for host in rets: for host in rets:
if 'return' in rets[host]: if u'return' in rets[host]:
cret[host] = rets[host]['return'] cret[host] = rets[host][u'return']
else: else:
cret[host] = rets[host] cret[host] = rets[host]
return cret return cret

View File

@ -13,7 +13,7 @@ import salt.utils
from salt.defaults import DEFAULT_TARGET_DELIM from salt.defaults import DEFAULT_TARGET_DELIM
def get(key, default='', merge=False, delimiter=DEFAULT_TARGET_DELIM): def get(key, default=u'', merge=False, delimiter=DEFAULT_TARGET_DELIM):
''' '''
.. versionadded:: 0.14 .. versionadded:: 0.14
@ -130,10 +130,10 @@ def keys(key, delimiter=DEFAULT_TARGET_DELIM):
__pillar__, key, KeyError, delimiter) __pillar__, key, KeyError, delimiter)
if ret is KeyError: if ret is KeyError:
raise KeyError("Pillar key not found: {0}".format(key)) raise KeyError(u"Pillar key not found: {0}".format(key))
if not isinstance(ret, dict): if not isinstance(ret, dict):
raise ValueError("Pillar value in key {0} is not a dict".format(key)) raise ValueError(u"Pillar value in key {0} is not a dict".format(key))
return ret.keys() return ret.keys()

View File

@ -17,6 +17,8 @@ import logging
# Import salt libs # Import salt libs
import salt.client.ssh import salt.client.ssh
import salt.runner import salt.runner
import salt.utils.args
import salt.utils.versions
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -24,10 +26,10 @@ log = logging.getLogger(__name__)
def _publish(tgt, def _publish(tgt,
fun, fun,
arg=None, arg=None,
tgt_type='glob', tgt_type=u'glob',
returner='', returner=u'',
timeout=None, timeout=None,
form='clean', form=u'clean',
roster=None): roster=None):
''' '''
Publish a command "from the minion out to other minions". In reality, the Publish a command "from the minion out to other minions". In reality, the
@ -53,13 +55,13 @@ def _publish(tgt,
salt-ssh system.example.com publish.publish '*' cmd.run 'ls -la /tmp' salt-ssh system.example.com publish.publish '*' cmd.run 'ls -la /tmp'
''' '''
if fun.startswith('publish.'): if fun.startswith(u'publish.'):
log.info('Cannot publish publish calls. Returning {}') log.info(u'Cannot publish publish calls. Returning {}')
return {} return {}
# TODO: implement returners? Do they make sense for salt-ssh calls? # TODO: implement returners? Do they make sense for salt-ssh calls?
if returner: if returner:
log.warning('Returners currently not supported in salt-ssh publish') log.warning(u'Returners currently not supported in salt-ssh publish')
# Make sure args have been processed # Make sure args have been processed
if arg is None: if arg is None:
@ -72,17 +74,17 @@ def _publish(tgt,
arg = [] arg = []
# Set up opts for the SSH object # Set up opts for the SSH object
opts = copy.deepcopy(__context__['master_opts']) opts = copy.deepcopy(__context__[u'master_opts'])
minopts = copy.deepcopy(__opts__) minopts = copy.deepcopy(__opts__)
opts.update(minopts) opts.update(minopts)
if roster: if roster:
opts['roster'] = roster opts[u'roster'] = roster
if timeout: if timeout:
opts['timeout'] = timeout opts[u'timeout'] = timeout
opts['argv'] = [fun] + arg opts[u'argv'] = [fun] + arg
opts['selected_target_option'] = tgt_type opts[u'selected_target_option'] = tgt_type
opts['tgt'] = tgt opts[u'tgt'] = tgt
opts['arg'] = arg opts[u'arg'] = arg
# Create the SSH object to handle the actual call # Create the SSH object to handle the actual call
ssh = salt.client.ssh.SSH(opts) ssh = salt.client.ssh.SSH(opts)
@ -92,11 +94,11 @@ def _publish(tgt,
for ret in ssh.run_iter(): for ret in ssh.run_iter():
rets.update(ret) rets.update(ret)
if form == 'clean': if form == u'clean':
cret = {} cret = {}
for host in rets: for host in rets:
if 'return' in rets[host]: if u'return' in rets[host]:
cret[host] = rets[host]['return'] cret[host] = rets[host][u'return']
else: else:
cret[host] = rets[host] cret[host] = rets[host]
return cret return cret
@ -107,8 +109,8 @@ def _publish(tgt,
def publish(tgt, def publish(tgt,
fun, fun,
arg=None, arg=None,
tgt_type='glob', tgt_type=u'glob',
returner='', returner=u'',
timeout=5, timeout=5,
roster=None, roster=None,
expr_form=None): expr_form=None):
@ -173,11 +175,11 @@ def publish(tgt,
# remember to remove the expr_form argument from this function when # remember to remove the expr_form argument from this function when
# performing the cleanup on this deprecation. # performing the cleanup on this deprecation.
if expr_form is not None: if expr_form is not None:
salt.utils.warn_until( salt.utils.versions.warn_until(
'Fluorine', u'Fluorine',
'the target type should be passed using the \'tgt_type\' ' u'the target type should be passed using the \'tgt_type\' '
'argument instead of \'expr_form\'. Support for using ' u'argument instead of \'expr_form\'. Support for using '
'\'expr_form\' will be removed in Salt Fluorine.' u'\'expr_form\' will be removed in Salt Fluorine.'
) )
tgt_type = expr_form tgt_type = expr_form
@ -187,15 +189,15 @@ def publish(tgt,
tgt_type=tgt_type, tgt_type=tgt_type,
returner=returner, returner=returner,
timeout=timeout, timeout=timeout,
form='clean', form=u'clean',
roster=roster) roster=roster)
def full_data(tgt, def full_data(tgt,
fun, fun,
arg=None, arg=None,
tgt_type='glob', tgt_type=u'glob',
returner='', returner=u'',
timeout=5, timeout=5,
roster=None, roster=None,
expr_form=None): expr_form=None):
@ -223,11 +225,11 @@ def full_data(tgt,
# remember to remove the expr_form argument from this function when # remember to remove the expr_form argument from this function when
# performing the cleanup on this deprecation. # performing the cleanup on this deprecation.
if expr_form is not None: if expr_form is not None:
salt.utils.warn_until( salt.utils.versions.warn_until(
'Fluorine', u'Fluorine',
'the target type should be passed using the \'tgt_type\' ' u'the target type should be passed using the \'tgt_type\' '
'argument instead of \'expr_form\'. Support for using ' u'argument instead of \'expr_form\'. Support for using '
'\'expr_form\' will be removed in Salt Fluorine.' u'\'expr_form\' will be removed in Salt Fluorine.'
) )
tgt_type = expr_form tgt_type = expr_form
@ -237,7 +239,7 @@ def full_data(tgt,
tgt_type=tgt_type, tgt_type=tgt_type,
returner=returner, returner=returner,
timeout=timeout, timeout=timeout,
form='full', form=u'full',
roster=roster) roster=roster)
@ -260,5 +262,5 @@ def runner(fun, arg=None, timeout=5):
arg = [] arg = []
# Create and run the runner # Create and run the runner
runner = salt.runner.RunnerClient(__opts__['__master_opts__']) runner = salt.runner.RunnerClient(__opts__[u'__master_opts__'])
return runner.cmd(fun, arg) return runner.cmd(fun, arg)

View File

@ -20,10 +20,12 @@ import salt.state
import salt.loader import salt.loader
import salt.minion import salt.minion
import salt.log import salt.log
from salt.ext.six import string_types
# Import 3rd-party libs
from salt.ext import six
__func_alias__ = { __func_alias__ = {
'apply_': 'apply' u'apply_': u'apply'
} }
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -34,37 +36,37 @@ def _merge_extra_filerefs(*args):
''' '''
ret = [] ret = []
for arg in args: for arg in args:
if isinstance(arg, string_types): if isinstance(arg, six.string_types):
if arg: if arg:
ret.extend(arg.split(',')) ret.extend(arg.split(u','))
elif isinstance(arg, list): elif isinstance(arg, list):
if arg: if arg:
ret.extend(arg) ret.extend(arg)
return ','.join(ret) return u','.join(ret)
def sls(mods, saltenv='base', test=None, exclude=None, **kwargs): def sls(mods, saltenv=u'base', test=None, exclude=None, **kwargs):
''' '''
Create the seed file for a state.sls run Create the seed file for a state.sls run
''' '''
st_kwargs = __salt__.kwargs st_kwargs = __salt__.kwargs
__opts__['grains'] = __grains__ __opts__[u'grains'] = __grains__
__pillar__.update(kwargs.get('pillar', {})) __pillar__.update(kwargs.get(u'pillar', {}))
st_ = salt.client.ssh.state.SSHHighState( st_ = salt.client.ssh.state.SSHHighState(
__opts__, __opts__,
__pillar__, __pillar__,
__salt__, __salt__,
__context__['fileclient']) __context__[u'fileclient'])
if isinstance(mods, str): if isinstance(mods, six.string_types):
mods = mods.split(',') mods = mods.split(u',')
high_data, errors = st_.render_highstate({saltenv: mods}) high_data, errors = st_.render_highstate({saltenv: mods})
if exclude: if exclude:
if isinstance(exclude, str): if isinstance(exclude, six.string_types):
exclude = exclude.split(',') exclude = exclude.split(u',')
if '__exclude__' in high_data: if u'__exclude__' in high_data:
high_data['__exclude__'].extend(exclude) high_data[u'__exclude__'].extend(exclude)
else: else:
high_data['__exclude__'] = exclude high_data[u'__exclude__'] = exclude
high_data, ext_errors = st_.state.reconcile_extend(high_data) high_data, ext_errors = st_.state.reconcile_extend(high_data)
errors += ext_errors errors += ext_errors
errors += st_.state.verify_high(high_data) errors += st_.state.verify_high(high_data)
@ -81,38 +83,38 @@ def sls(mods, saltenv='base', test=None, exclude=None, **kwargs):
file_refs = salt.client.ssh.state.lowstate_file_refs( file_refs = salt.client.ssh.state.lowstate_file_refs(
chunks, chunks,
_merge_extra_filerefs( _merge_extra_filerefs(
kwargs.get('extra_filerefs', ''), kwargs.get(u'extra_filerefs', u''),
__opts__.get('extra_filerefs', '') __opts__.get(u'extra_filerefs', u'')
) )
) )
roster = salt.roster.Roster(__opts__, __opts__.get('roster', 'flat')) roster = salt.roster.Roster(__opts__, __opts__.get(u'roster', u'flat'))
roster_grains = roster.opts['grains'] roster_grains = roster.opts[u'grains']
# Create the tar containing the state pkg and relevant files. # Create the tar containing the state pkg and relevant files.
trans_tar = salt.client.ssh.state.prep_trans_tar( trans_tar = salt.client.ssh.state.prep_trans_tar(
__opts__, __opts__,
__context__['fileclient'], __context__[u'fileclient'],
chunks, chunks,
file_refs, file_refs,
__pillar__, __pillar__,
st_kwargs['id_'], st_kwargs[u'id_'],
roster_grains) roster_grains)
trans_tar_sum = salt.utils.get_hash(trans_tar, __opts__['hash_type']) trans_tar_sum = salt.utils.get_hash(trans_tar, __opts__[u'hash_type'])
cmd = 'state.pkg {0}/salt_state.tgz test={1} pkg_sum={2} hash_type={3}'.format( cmd = u'state.pkg {0}/salt_state.tgz test={1} pkg_sum={2} hash_type={3}'.format(
__opts__['thin_dir'], __opts__[u'thin_dir'],
test, test,
trans_tar_sum, trans_tar_sum,
__opts__['hash_type']) __opts__[u'hash_type'])
single = salt.client.ssh.Single( single = salt.client.ssh.Single(
__opts__, __opts__,
cmd, cmd,
fsclient=__context__['fileclient'], fsclient=__context__[u'fileclient'],
minion_opts=__salt__.minion_opts, minion_opts=__salt__.minion_opts,
**st_kwargs) **st_kwargs)
single.shell.send( single.shell.send(
trans_tar, trans_tar,
'{0}/salt_state.tgz'.format(__opts__['thin_dir'])) u'{0}/salt_state.tgz'.format(__opts__[u'thin_dir']))
stdout, stderr, _ = single.cmd_block() stdout, stderr, _ = single.cmd_block()
# Clean up our tar # Clean up our tar
@ -125,7 +127,7 @@ def sls(mods, saltenv='base', test=None, exclude=None, **kwargs):
try: try:
return json.loads(stdout, object_hook=salt.utils.decode_dict) return json.loads(stdout, object_hook=salt.utils.decode_dict)
except Exception as e: except Exception as e:
log.error("JSON Render failed for: {0}\n{1}".format(stdout, stderr)) log.error(u"JSON Render failed for: %s\n%s", stdout, stderr)
log.error(str(e)) log.error(str(e))
# If for some reason the json load fails, return the stdout # If for some reason the json load fails, return the stdout
@ -144,51 +146,51 @@ def low(data, **kwargs):
salt '*' state.low '{"state": "pkg", "fun": "installed", "name": "vi"}' salt '*' state.low '{"state": "pkg", "fun": "installed", "name": "vi"}'
''' '''
st_kwargs = __salt__.kwargs st_kwargs = __salt__.kwargs
__opts__['grains'] = __grains__ __opts__[u'grains'] = __grains__
chunks = [data] chunks = [data]
st_ = salt.client.ssh.state.SSHHighState( st_ = salt.client.ssh.state.SSHHighState(
__opts__, __opts__,
__pillar__, __pillar__,
__salt__, __salt__,
__context__['fileclient']) __context__[u'fileclient'])
for chunk in chunks: for chunk in chunks:
chunk['__id__'] = chunk['name'] if not chunk.get('__id__') else chunk['__id__'] chunk[u'__id__'] = chunk[u'name'] if not chunk.get(u'__id__') else chunk[u'__id__']
err = st_.state.verify_data(data) err = st_.state.verify_data(data)
if err: if err:
return err return err
file_refs = salt.client.ssh.state.lowstate_file_refs( file_refs = salt.client.ssh.state.lowstate_file_refs(
chunks, chunks,
_merge_extra_filerefs( _merge_extra_filerefs(
kwargs.get('extra_filerefs', ''), kwargs.get(u'extra_filerefs', u''),
__opts__.get('extra_filerefs', '') __opts__.get(u'extra_filerefs', u'')
) )
) )
roster = salt.roster.Roster(__opts__, __opts__.get('roster', 'flat')) roster = salt.roster.Roster(__opts__, __opts__.get(u'roster', u'flat'))
roster_grains = roster.opts['grains'] roster_grains = roster.opts[u'grains']
# Create the tar containing the state pkg and relevant files. # Create the tar containing the state pkg and relevant files.
trans_tar = salt.client.ssh.state.prep_trans_tar( trans_tar = salt.client.ssh.state.prep_trans_tar(
__opts__, __opts__,
__context__['fileclient'], __context__[u'fileclient'],
chunks, chunks,
file_refs, file_refs,
__pillar__, __pillar__,
st_kwargs['id_'], st_kwargs[u'id_'],
roster_grains) roster_grains)
trans_tar_sum = salt.utils.get_hash(trans_tar, __opts__['hash_type']) trans_tar_sum = salt.utils.get_hash(trans_tar, __opts__[u'hash_type'])
cmd = 'state.pkg {0}/salt_state.tgz pkg_sum={1} hash_type={2}'.format( cmd = u'state.pkg {0}/salt_state.tgz pkg_sum={1} hash_type={2}'.format(
__opts__['thin_dir'], __opts__[u'thin_dir'],
trans_tar_sum, trans_tar_sum,
__opts__['hash_type']) __opts__[u'hash_type'])
single = salt.client.ssh.Single( single = salt.client.ssh.Single(
__opts__, __opts__,
cmd, cmd,
fsclient=__context__['fileclient'], fsclient=__context__[u'fileclient'],
minion_opts=__salt__.minion_opts, minion_opts=__salt__.minion_opts,
**st_kwargs) **st_kwargs)
single.shell.send( single.shell.send(
trans_tar, trans_tar,
'{0}/salt_state.tgz'.format(__opts__['thin_dir'])) u'{0}/salt_state.tgz'.format(__opts__[u'thin_dir']))
stdout, stderr, _ = single.cmd_block() stdout, stderr, _ = single.cmd_block()
# Clean up our tar # Clean up our tar
@ -201,7 +203,7 @@ def low(data, **kwargs):
try: try:
return json.loads(stdout, object_hook=salt.utils.decode_dict) return json.loads(stdout, object_hook=salt.utils.decode_dict)
except Exception as e: except Exception as e:
log.error("JSON Render failed for: {0}\n{1}".format(stdout, stderr)) log.error(u"JSON Render failed for: %s\n%s", stdout, stderr)
log.error(str(e)) log.error(str(e))
# If for some reason the json load fails, return the stdout # If for some reason the json load fails, return the stdout
@ -219,49 +221,49 @@ def high(data, **kwargs):
salt '*' state.high '{"vim": {"pkg": ["installed"]}}' salt '*' state.high '{"vim": {"pkg": ["installed"]}}'
''' '''
__pillar__.update(kwargs.get('pillar', {})) __pillar__.update(kwargs.get(u'pillar', {}))
st_kwargs = __salt__.kwargs st_kwargs = __salt__.kwargs
__opts__['grains'] = __grains__ __opts__[u'grains'] = __grains__
st_ = salt.client.ssh.state.SSHHighState( st_ = salt.client.ssh.state.SSHHighState(
__opts__, __opts__,
__pillar__, __pillar__,
__salt__, __salt__,
__context__['fileclient']) __context__[u'fileclient'])
chunks = st_.state.compile_high_data(data) chunks = st_.state.compile_high_data(data)
file_refs = salt.client.ssh.state.lowstate_file_refs( file_refs = salt.client.ssh.state.lowstate_file_refs(
chunks, chunks,
_merge_extra_filerefs( _merge_extra_filerefs(
kwargs.get('extra_filerefs', ''), kwargs.get(u'extra_filerefs', u''),
__opts__.get('extra_filerefs', '') __opts__.get(u'extra_filerefs', u'')
) )
) )
roster = salt.roster.Roster(__opts__, __opts__.get('roster', 'flat')) roster = salt.roster.Roster(__opts__, __opts__.get(u'roster', u'flat'))
roster_grains = roster.opts['grains'] roster_grains = roster.opts[u'grains']
# Create the tar containing the state pkg and relevant files. # Create the tar containing the state pkg and relevant files.
trans_tar = salt.client.ssh.state.prep_trans_tar( trans_tar = salt.client.ssh.state.prep_trans_tar(
__opts__, __opts__,
__context__['fileclient'], __context__[u'fileclient'],
chunks, chunks,
file_refs, file_refs,
__pillar__, __pillar__,
st_kwargs['id_'], st_kwargs[u'id_'],
roster_grains) roster_grains)
trans_tar_sum = salt.utils.get_hash(trans_tar, __opts__['hash_type']) trans_tar_sum = salt.utils.get_hash(trans_tar, __opts__[u'hash_type'])
cmd = 'state.pkg {0}/salt_state.tgz pkg_sum={1} hash_type={2}'.format( cmd = u'state.pkg {0}/salt_state.tgz pkg_sum={1} hash_type={2}'.format(
__opts__['thin_dir'], __opts__[u'thin_dir'],
trans_tar_sum, trans_tar_sum,
__opts__['hash_type']) __opts__[u'hash_type'])
single = salt.client.ssh.Single( single = salt.client.ssh.Single(
__opts__, __opts__,
cmd, cmd,
fsclient=__context__['fileclient'], fsclient=__context__[u'fileclient'],
minion_opts=__salt__.minion_opts, minion_opts=__salt__.minion_opts,
**st_kwargs) **st_kwargs)
single.shell.send( single.shell.send(
trans_tar, trans_tar,
'{0}/salt_state.tgz'.format(__opts__['thin_dir'])) u'{0}/salt_state.tgz'.format(__opts__[u'thin_dir']))
stdout, stderr, _ = single.cmd_block() stdout, stderr, _ = single.cmd_block()
# Clean up our tar # Clean up our tar
@ -274,7 +276,7 @@ def high(data, **kwargs):
try: try:
return json.loads(stdout, object_hook=salt.utils.decode_dict) return json.loads(stdout, object_hook=salt.utils.decode_dict)
except Exception as e: except Exception as e:
log.error("JSON Render failed for: {0}\n{1}".format(stdout, stderr)) log.error(u"JSON Render failed for: %s\n%s", stdout, stderr)
log.error(str(e)) log.error(str(e))
# If for some reason the json load fails, return the stdout # If for some reason the json load fails, return the stdout
@ -316,56 +318,56 @@ def highstate(test=None, **kwargs):
salt '*' state.highstate exclude=sls_to_exclude salt '*' state.highstate exclude=sls_to_exclude
salt '*' state.highstate exclude="[{'id': 'id_to_exclude'}, {'sls': 'sls_to_exclude'}]" salt '*' state.highstate exclude="[{'id': 'id_to_exclude'}, {'sls': 'sls_to_exclude'}]"
''' '''
__pillar__.update(kwargs.get('pillar', {})) __pillar__.update(kwargs.get(u'pillar', {}))
st_kwargs = __salt__.kwargs st_kwargs = __salt__.kwargs
__opts__['grains'] = __grains__ __opts__[u'grains'] = __grains__
st_ = salt.client.ssh.state.SSHHighState( st_ = salt.client.ssh.state.SSHHighState(
__opts__, __opts__,
__pillar__, __pillar__,
__salt__, __salt__,
__context__['fileclient']) __context__[u'fileclient'])
chunks = st_.compile_low_chunks() chunks = st_.compile_low_chunks()
file_refs = salt.client.ssh.state.lowstate_file_refs( file_refs = salt.client.ssh.state.lowstate_file_refs(
chunks, chunks,
_merge_extra_filerefs( _merge_extra_filerefs(
kwargs.get('extra_filerefs', ''), kwargs.get(u'extra_filerefs', u''),
__opts__.get('extra_filerefs', '') __opts__.get(u'extra_filerefs', u'')
) )
) )
# Check for errors # Check for errors
for chunk in chunks: for chunk in chunks:
if not isinstance(chunk, dict): if not isinstance(chunk, dict):
__context__['retcode'] = 1 __context__[u'retcode'] = 1
return chunks return chunks
roster = salt.roster.Roster(__opts__, __opts__.get('roster', 'flat')) roster = salt.roster.Roster(__opts__, __opts__.get(u'roster', u'flat'))
roster_grains = roster.opts['grains'] roster_grains = roster.opts[u'grains']
# Create the tar containing the state pkg and relevant files. # Create the tar containing the state pkg and relevant files.
trans_tar = salt.client.ssh.state.prep_trans_tar( trans_tar = salt.client.ssh.state.prep_trans_tar(
__opts__, __opts__,
__context__['fileclient'], __context__[u'fileclient'],
chunks, chunks,
file_refs, file_refs,
__pillar__, __pillar__,
st_kwargs['id_'], st_kwargs[u'id_'],
roster_grains) roster_grains)
trans_tar_sum = salt.utils.get_hash(trans_tar, __opts__['hash_type']) trans_tar_sum = salt.utils.get_hash(trans_tar, __opts__[u'hash_type'])
cmd = 'state.pkg {0}/salt_state.tgz test={1} pkg_sum={2} hash_type={3}'.format( cmd = u'state.pkg {0}/salt_state.tgz test={1} pkg_sum={2} hash_type={3}'.format(
__opts__['thin_dir'], __opts__[u'thin_dir'],
test, test,
trans_tar_sum, trans_tar_sum,
__opts__['hash_type']) __opts__[u'hash_type'])
single = salt.client.ssh.Single( single = salt.client.ssh.Single(
__opts__, __opts__,
cmd, cmd,
fsclient=__context__['fileclient'], fsclient=__context__[u'fileclient'],
minion_opts=__salt__.minion_opts, minion_opts=__salt__.minion_opts,
**st_kwargs) **st_kwargs)
single.shell.send( single.shell.send(
trans_tar, trans_tar,
'{0}/salt_state.tgz'.format(__opts__['thin_dir'])) u'{0}/salt_state.tgz'.format(__opts__[u'thin_dir']))
stdout, stderr, _ = single.cmd_block() stdout, stderr, _ = single.cmd_block()
# Clean up our tar # Clean up our tar
@ -378,7 +380,7 @@ def highstate(test=None, **kwargs):
try: try:
return json.loads(stdout, object_hook=salt.utils.decode_dict) return json.loads(stdout, object_hook=salt.utils.decode_dict)
except Exception as e: except Exception as e:
log.error("JSON Render failed for: {0}\n{1}".format(stdout, stderr)) log.error(u"JSON Render failed for: %s\n%s", stdout, stderr)
log.error(str(e)) log.error(str(e))
# If for some reason the json load fails, return the stdout # If for some reason the json load fails, return the stdout
@ -397,55 +399,55 @@ def top(topfn, test=None, **kwargs):
salt '*' state.top reverse_top.sls exclude=sls_to_exclude salt '*' state.top reverse_top.sls exclude=sls_to_exclude
salt '*' state.top reverse_top.sls exclude="[{'id': 'id_to_exclude'}, {'sls': 'sls_to_exclude'}]" salt '*' state.top reverse_top.sls exclude="[{'id': 'id_to_exclude'}, {'sls': 'sls_to_exclude'}]"
''' '''
__pillar__.update(kwargs.get('pillar', {})) __pillar__.update(kwargs.get(u'pillar', {}))
st_kwargs = __salt__.kwargs st_kwargs = __salt__.kwargs
__opts__['grains'] = __grains__ __opts__[u'grains'] = __grains__
if salt.utils.test_mode(test=test, **kwargs): if salt.utils.test_mode(test=test, **kwargs):
__opts__['test'] = True __opts__[u'test'] = True
else: else:
__opts__['test'] = __opts__.get('test', None) __opts__[u'test'] = __opts__.get(u'test', None)
st_ = salt.client.ssh.state.SSHHighState( st_ = salt.client.ssh.state.SSHHighState(
__opts__, __opts__,
__pillar__, __pillar__,
__salt__, __salt__,
__context__['fileclient']) __context__[u'fileclient'])
st_.opts['state_top'] = os.path.join('salt://', topfn) st_.opts[u'state_top'] = os.path.join(u'salt://', topfn)
chunks = st_.compile_low_chunks() chunks = st_.compile_low_chunks()
file_refs = salt.client.ssh.state.lowstate_file_refs( file_refs = salt.client.ssh.state.lowstate_file_refs(
chunks, chunks,
_merge_extra_filerefs( _merge_extra_filerefs(
kwargs.get('extra_filerefs', ''), kwargs.get(u'extra_filerefs', u''),
__opts__.get('extra_filerefs', '') __opts__.get(u'extra_filerefs', u'')
) )
) )
roster = salt.roster.Roster(__opts__, __opts__.get('roster', 'flat')) roster = salt.roster.Roster(__opts__, __opts__.get(u'roster', u'flat'))
roster_grains = roster.opts['grains'] roster_grains = roster.opts[u'grains']
# Create the tar containing the state pkg and relevant files. # Create the tar containing the state pkg and relevant files.
trans_tar = salt.client.ssh.state.prep_trans_tar( trans_tar = salt.client.ssh.state.prep_trans_tar(
__opts__, __opts__,
__context__['fileclient'], __context__[u'fileclient'],
chunks, chunks,
file_refs, file_refs,
__pillar__, __pillar__,
st_kwargs['id_'], st_kwargs[u'id_'],
roster_grains) roster_grains)
trans_tar_sum = salt.utils.get_hash(trans_tar, __opts__['hash_type']) trans_tar_sum = salt.utils.get_hash(trans_tar, __opts__[u'hash_type'])
cmd = 'state.pkg {0}/salt_state.tgz test={1} pkg_sum={2} hash_type={3}'.format( cmd = u'state.pkg {0}/salt_state.tgz test={1} pkg_sum={2} hash_type={3}'.format(
__opts__['thin_dir'], __opts__[u'thin_dir'],
test, test,
trans_tar_sum, trans_tar_sum,
__opts__['hash_type']) __opts__[u'hash_type'])
single = salt.client.ssh.Single( single = salt.client.ssh.Single(
__opts__, __opts__,
cmd, cmd,
fsclient=__context__['fileclient'], fsclient=__context__[u'fileclient'],
minion_opts=__salt__.minion_opts, minion_opts=__salt__.minion_opts,
**st_kwargs) **st_kwargs)
single.shell.send( single.shell.send(
trans_tar, trans_tar,
'{0}/salt_state.tgz'.format(__opts__['thin_dir'])) u'{0}/salt_state.tgz'.format(__opts__[u'thin_dir']))
stdout, stderr, _ = single.cmd_block() stdout, stderr, _ = single.cmd_block()
# Clean up our tar # Clean up our tar
@ -458,7 +460,7 @@ def top(topfn, test=None, **kwargs):
try: try:
return json.loads(stdout, object_hook=salt.utils.decode_dict) return json.loads(stdout, object_hook=salt.utils.decode_dict)
except Exception as e: except Exception as e:
log.error("JSON Render failed for: {0}\n{1}".format(stdout, stderr)) log.error(u"JSON Render failed for: %s\n%s", stdout, stderr)
log.error(str(e)) log.error(str(e))
# If for some reason the json load fails, return the stdout # If for some reason the json load fails, return the stdout
@ -475,12 +477,12 @@ def show_highstate():
salt '*' state.show_highstate salt '*' state.show_highstate
''' '''
__opts__['grains'] = __grains__ __opts__[u'grains'] = __grains__
st_ = salt.client.ssh.state.SSHHighState( st_ = salt.client.ssh.state.SSHHighState(
__opts__, __opts__,
__pillar__, __pillar__,
__salt__, __salt__,
__context__['fileclient']) __context__[u'fileclient'])
return st_.compile_highstate() return st_.compile_highstate()
@ -494,16 +496,16 @@ def show_lowstate():
salt '*' state.show_lowstate salt '*' state.show_lowstate
''' '''
__opts__['grains'] = __grains__ __opts__[u'grains'] = __grains__
st_ = salt.client.ssh.state.SSHHighState( st_ = salt.client.ssh.state.SSHHighState(
__opts__, __opts__,
__pillar__, __pillar__,
__salt__, __salt__,
__context__['fileclient']) __context__[u'fileclient'])
return st_.compile_low_chunks() return st_.compile_low_chunks()
def show_sls(mods, saltenv='base', test=None, **kwargs): def show_sls(mods, saltenv=u'base', test=None, **kwargs):
''' '''
Display the state data from a specific sls or list of sls files on the Display the state data from a specific sls or list of sls files on the
master master
@ -514,20 +516,20 @@ def show_sls(mods, saltenv='base', test=None, **kwargs):
salt '*' state.show_sls core,edit.vim dev salt '*' state.show_sls core,edit.vim dev
''' '''
__pillar__.update(kwargs.get('pillar', {})) __pillar__.update(kwargs.get(u'pillar', {}))
__opts__['grains'] = __grains__ __opts__[u'grains'] = __grains__
opts = copy.copy(__opts__) opts = copy.copy(__opts__)
if salt.utils.test_mode(test=test, **kwargs): if salt.utils.test_mode(test=test, **kwargs):
opts['test'] = True opts[u'test'] = True
else: else:
opts['test'] = __opts__.get('test', None) opts[u'test'] = __opts__.get(u'test', None)
st_ = salt.client.ssh.state.SSHHighState( st_ = salt.client.ssh.state.SSHHighState(
__opts__, __opts__,
__pillar__, __pillar__,
__salt__, __salt__,
__context__['fileclient']) __context__[u'fileclient'])
if isinstance(mods, string_types): if isinstance(mods, six.string_types):
mods = mods.split(',') mods = mods.split(u',')
high_data, errors = st_.render_highstate({saltenv: mods}) high_data, errors = st_.render_highstate({saltenv: mods})
high_data, ext_errors = st_.state.reconcile_extend(high_data) high_data, ext_errors = st_.state.reconcile_extend(high_data)
errors += ext_errors errors += ext_errors
@ -543,7 +545,7 @@ def show_sls(mods, saltenv='base', test=None, **kwargs):
return high_data return high_data
def show_low_sls(mods, saltenv='base', test=None, **kwargs): def show_low_sls(mods, saltenv=u'base', test=None, **kwargs):
''' '''
Display the low state data from a specific sls or list of sls files on the Display the low state data from a specific sls or list of sls files on the
master. master.
@ -556,21 +558,21 @@ def show_low_sls(mods, saltenv='base', test=None, **kwargs):
salt '*' state.show_sls core,edit.vim dev salt '*' state.show_sls core,edit.vim dev
''' '''
__pillar__.update(kwargs.get('pillar', {})) __pillar__.update(kwargs.get(u'pillar', {}))
__opts__['grains'] = __grains__ __opts__[u'grains'] = __grains__
opts = copy.copy(__opts__) opts = copy.copy(__opts__)
if salt.utils.test_mode(test=test, **kwargs): if salt.utils.test_mode(test=test, **kwargs):
opts['test'] = True opts[u'test'] = True
else: else:
opts['test'] = __opts__.get('test', None) opts[u'test'] = __opts__.get(u'test', None)
st_ = salt.client.ssh.state.SSHHighState( st_ = salt.client.ssh.state.SSHHighState(
__opts__, __opts__,
__pillar__, __pillar__,
__salt__, __salt__,
__context__['fileclient']) __context__[u'fileclient'])
if isinstance(mods, string_types): if isinstance(mods, six.string_types):
mods = mods.split(',') mods = mods.split(u',')
high_data, errors = st_.render_highstate({saltenv: mods}) high_data, errors = st_.render_highstate({saltenv: mods})
high_data, ext_errors = st_.state.reconcile_extend(high_data) high_data, ext_errors = st_.state.reconcile_extend(high_data)
errors += ext_errors errors += ext_errors
@ -597,12 +599,12 @@ def show_top():
salt '*' state.show_top salt '*' state.show_top
''' '''
__opts__['grains'] = __grains__ __opts__[u'grains'] = __grains__
st_ = salt.client.ssh.state.SSHHighState( st_ = salt.client.ssh.state.SSHHighState(
__opts__, __opts__,
__pillar__, __pillar__,
__salt__, __salt__,
__context__['fileclient']) __context__[u'fileclient'])
top_data = st_.get_top() top_data = st_.get_top()
errors = [] errors = []
errors += st_.verify_tops(top_data) errors += st_.verify_tops(top_data)
@ -632,30 +634,30 @@ def single(fun, name, test=None, **kwargs):
''' '''
st_kwargs = __salt__.kwargs st_kwargs = __salt__.kwargs
__opts__['grains'] = __grains__ __opts__[u'grains'] = __grains__
# state.fun -> [state, fun] # state.fun -> [state, fun]
comps = fun.split('.') comps = fun.split(u'.')
if len(comps) < 2: if len(comps) < 2:
__context__['retcode'] = 1 __context__[u'retcode'] = 1
return 'Invalid function passed' return u'Invalid function passed'
# Create the low chunk, using kwargs as a base # Create the low chunk, using kwargs as a base
kwargs.update({'state': comps[0], kwargs.update({u'state': comps[0],
'fun': comps[1], u'fun': comps[1],
'__id__': name, u'__id__': name,
'name': name}) u'name': name})
opts = copy.deepcopy(__opts__) opts = copy.deepcopy(__opts__)
# Set test mode # Set test mode
if salt.utils.test_mode(test=test, **kwargs): if salt.utils.test_mode(test=test, **kwargs):
opts['test'] = True opts[u'test'] = True
else: else:
opts['test'] = __opts__.get('test', None) opts[u'test'] = __opts__.get(u'test', None)
# Get the override pillar data # Get the override pillar data
__pillar__.update(kwargs.get('pillar', {})) __pillar__.update(kwargs.get(u'pillar', {}))
# Create the State environment # Create the State environment
st_ = salt.client.ssh.state.SSHState(__opts__, __pillar__) st_ = salt.client.ssh.state.SSHState(__opts__, __pillar__)
@ -663,7 +665,7 @@ def single(fun, name, test=None, **kwargs):
# Verify the low chunk # Verify the low chunk
err = st_.verify_data(kwargs) err = st_.verify_data(kwargs)
if err: if err:
__context__['retcode'] = 1 __context__[u'retcode'] = 1
return err return err
# Must be a list of low-chunks # Must be a list of low-chunks
@ -674,46 +676,46 @@ def single(fun, name, test=None, **kwargs):
file_refs = salt.client.ssh.state.lowstate_file_refs( file_refs = salt.client.ssh.state.lowstate_file_refs(
chunks, chunks,
_merge_extra_filerefs( _merge_extra_filerefs(
kwargs.get('extra_filerefs', ''), kwargs.get(u'extra_filerefs', u''),
__opts__.get('extra_filerefs', '') __opts__.get(u'extra_filerefs', u'')
) )
) )
roster = salt.roster.Roster(__opts__, __opts__.get('roster', 'flat')) roster = salt.roster.Roster(__opts__, __opts__.get(u'roster', u'flat'))
roster_grains = roster.opts['grains'] roster_grains = roster.opts[u'grains']
# Create the tar containing the state pkg and relevant files. # Create the tar containing the state pkg and relevant files.
trans_tar = salt.client.ssh.state.prep_trans_tar( trans_tar = salt.client.ssh.state.prep_trans_tar(
__opts__, __opts__,
__context__['fileclient'], __context__[u'fileclient'],
chunks, chunks,
file_refs, file_refs,
__pillar__, __pillar__,
st_kwargs['id_'], st_kwargs[u'id_'],
roster_grains) roster_grains)
# Create a hash so we can verify the tar on the target system # Create a hash so we can verify the tar on the target system
trans_tar_sum = salt.utils.get_hash(trans_tar, __opts__['hash_type']) trans_tar_sum = salt.utils.get_hash(trans_tar, __opts__[u'hash_type'])
# We use state.pkg to execute the "state package" # We use state.pkg to execute the "state package"
cmd = 'state.pkg {0}/salt_state.tgz test={1} pkg_sum={2} hash_type={3}'.format( cmd = u'state.pkg {0}/salt_state.tgz test={1} pkg_sum={2} hash_type={3}'.format(
__opts__['thin_dir'], __opts__[u'thin_dir'],
test, test,
trans_tar_sum, trans_tar_sum,
__opts__['hash_type']) __opts__[u'hash_type'])
# Create a salt-ssh Single object to actually do the ssh work # Create a salt-ssh Single object to actually do the ssh work
single = salt.client.ssh.Single( single = salt.client.ssh.Single(
__opts__, __opts__,
cmd, cmd,
fsclient=__context__['fileclient'], fsclient=__context__[u'fileclient'],
minion_opts=__salt__.minion_opts, minion_opts=__salt__.minion_opts,
**st_kwargs) **st_kwargs)
# Copy the tar down # Copy the tar down
single.shell.send( single.shell.send(
trans_tar, trans_tar,
'{0}/salt_state.tgz'.format(__opts__['thin_dir'])) u'{0}/salt_state.tgz'.format(__opts__[u'thin_dir']))
# Run the state.pkg command on the target # Run the state.pkg command on the target
stdout, stderr, _ = single.cmd_block() stdout, stderr, _ = single.cmd_block()
@ -728,7 +730,7 @@ def single(fun, name, test=None, **kwargs):
try: try:
return json.loads(stdout, object_hook=salt.utils.decode_dict) return json.loads(stdout, object_hook=salt.utils.decode_dict)
except Exception as e: except Exception as e:
log.error("JSON Render failed for: {0}\n{1}".format(stdout, stderr)) log.error(u"JSON Render failed for: %s\n%s", stdout, stderr)
log.error(str(e)) log.error(str(e))
# If for some reason the json load fails, return the stdout # If for some reason the json load fails, return the stdout

View File

@ -30,13 +30,12 @@ import salt.config
import salt.client import salt.client
import salt.loader import salt.loader
import salt.utils import salt.utils
import salt.utils.args
import salt.utils.cloud import salt.utils.cloud
import salt.utils.context
import salt.utils.dictupdate import salt.utils.dictupdate
import salt.utils.files import salt.utils.files
import salt.syspaths import salt.syspaths
from salt.utils import reinit_crypto
from salt.utils import context
from salt.ext.six import string_types
from salt.template import compile_template from salt.template import compile_template
# Import third party libs # Import third party libs
@ -48,7 +47,7 @@ except ImportError:
except ImportError: except ImportError:
pass # pycrypto < 2.1 pass # pycrypto < 2.1
import yaml import yaml
import salt.ext.six as six from salt.ext import six
from salt.ext.six.moves import input # pylint: disable=import-error,redefined-builtin from salt.ext.six.moves import input # pylint: disable=import-error,redefined-builtin
# Get logging started # Get logging started
@ -342,7 +341,7 @@ class CloudClient(object):
vm_overrides = {} vm_overrides = {}
kwargs['profile'] = profile kwargs['profile'] = profile
mapper = salt.cloud.Map(self._opts_defaults(**kwargs)) mapper = salt.cloud.Map(self._opts_defaults(**kwargs))
if isinstance(names, str): if isinstance(names, six.string_types):
names = names.split(',') names = names.split(',')
return salt.utils.simple_types_filter( return salt.utils.simple_types_filter(
mapper.run_profile(profile, names, vm_overrides=vm_overrides) mapper.run_profile(profile, names, vm_overrides=vm_overrides)
@ -367,7 +366,7 @@ class CloudClient(object):
Destroy the named VMs Destroy the named VMs
''' '''
mapper = salt.cloud.Map(self._opts_defaults(destroy=True)) mapper = salt.cloud.Map(self._opts_defaults(destroy=True))
if isinstance(names, str): if isinstance(names, six.string_types):
names = names.split(',') names = names.split(',')
return salt.utils.simple_types_filter( return salt.utils.simple_types_filter(
mapper.destroy(names) mapper.destroy(names)
@ -391,7 +390,7 @@ class CloudClient(object):
provider += ':{0}'.format(next(six.iterkeys(providers[provider]))) provider += ':{0}'.format(next(six.iterkeys(providers[provider])))
else: else:
return False return False
if isinstance(names, str): if isinstance(names, six.string_types):
names = names.split(',') names = names.split(',')
ret = {} ret = {}
for name in names: for name in names:
@ -433,7 +432,7 @@ class CloudClient(object):
provider += ':{0}'.format(next(six.iterkeys(providers[provider]))) provider += ':{0}'.format(next(six.iterkeys(providers[provider])))
else: else:
return False return False
if isinstance(names, str): if isinstance(names, six.string_types):
names = names.split(',') names = names.split(',')
ret = {} ret = {}
@ -625,7 +624,7 @@ class Cloud(object):
pmap[alias] = {} pmap[alias] = {}
try: try:
with context.func_globals_inject( with salt.utils.context.func_globals_inject(
self.clouds[fun], self.clouds[fun],
__active_provider_name__=':'.join([alias, driver]) __active_provider_name__=':'.join([alias, driver])
): ):
@ -708,7 +707,7 @@ class Cloud(object):
def get_running_by_names(self, names, query='list_nodes', cached=False, def get_running_by_names(self, names, query='list_nodes', cached=False,
profile=None): profile=None):
if isinstance(names, string_types): if isinstance(names, six.string_types):
names = [names] names = [names]
matches = {} matches = {}
@ -730,18 +729,9 @@ class Cloud(object):
continue continue
for vm_name, details in six.iteritems(vms): for vm_name, details in six.iteritems(vms):
# If VM was created with use_fqdn with either of the softlayer drivers,
# we need to strip the VM name and only search for the short hostname.
if driver == 'softlayer' or driver == 'softlayer_hw':
ret = []
for name in names:
name = name.split('.')[0]
ret.append(name)
if vm_name not in ret:
continue
# XXX: The logic below can be removed once the aws driver # XXX: The logic below can be removed once the aws driver
# is removed # is removed
elif vm_name not in names: if vm_name not in names:
continue continue
elif driver == 'ec2' and 'aws' in handled_drivers and \ elif driver == 'ec2' and 'aws' in handled_drivers and \
@ -827,7 +817,7 @@ class Cloud(object):
try: try:
with context.func_globals_inject( with salt.utils.context.func_globals_inject(
self.clouds[fun], self.clouds[fun],
__active_provider_name__=':'.join([alias, driver]) __active_provider_name__=':'.join([alias, driver])
): ):
@ -870,7 +860,7 @@ class Cloud(object):
data[alias] = {} data[alias] = {}
try: try:
with context.func_globals_inject( with salt.utils.context.func_globals_inject(
self.clouds[fun], self.clouds[fun],
__active_provider_name__=':'.join([alias, driver]) __active_provider_name__=':'.join([alias, driver])
): ):
@ -913,7 +903,7 @@ class Cloud(object):
data[alias] = {} data[alias] = {}
try: try:
with context.func_globals_inject( with salt.utils.context.func_globals_inject(
self.clouds[fun], self.clouds[fun],
__active_provider_name__=':'.join([alias, driver]) __active_provider_name__=':'.join([alias, driver])
): ):
@ -1035,7 +1025,7 @@ class Cloud(object):
log.info('Destroying in non-parallel mode.') log.info('Destroying in non-parallel mode.')
for alias, driver, name in vms_to_destroy: for alias, driver, name in vms_to_destroy:
fun = '{0}.destroy'.format(driver) fun = '{0}.destroy'.format(driver)
with context.func_globals_inject( with salt.utils.context.func_globals_inject(
self.clouds[fun], self.clouds[fun],
__active_provider_name__=':'.join([alias, driver]) __active_provider_name__=':'.join([alias, driver])
): ):
@ -1280,7 +1270,7 @@ class Cloud(object):
try: try:
alias, driver = vm_['provider'].split(':') alias, driver = vm_['provider'].split(':')
func = '{0}.create'.format(driver) func = '{0}.create'.format(driver)
with context.func_globals_inject( with salt.utils.context.func_globals_inject(
self.clouds[fun], self.clouds[fun],
__active_provider_name__=':'.join([alias, driver]) __active_provider_name__=':'.join([alias, driver])
): ):
@ -1366,7 +1356,7 @@ class Cloud(object):
return return
try: try:
with context.func_globals_inject( with salt.utils.context.func_globals_inject(
self.clouds[fun], self.clouds[fun],
__active_provider_name__=extra_['provider'] __active_provider_name__=extra_['provider']
): ):
@ -1514,7 +1504,7 @@ class Cloud(object):
invalid_functions[fun].append(vm_name) invalid_functions[fun].append(vm_name)
continue continue
with context.func_globals_inject( with salt.utils.context.func_globals_inject(
self.clouds[fun], self.clouds[fun],
__active_provider_name__=':'.join([alias, driver]) __active_provider_name__=':'.join([alias, driver])
): ):
@ -1526,7 +1516,7 @@ class Cloud(object):
# Clean kwargs of "__pub_*" data before running the cloud action call. # Clean kwargs of "__pub_*" data before running the cloud action call.
# Prevents calling positional "kwarg" arg before "call" when no kwarg # Prevents calling positional "kwarg" arg before "call" when no kwarg
# argument is present in the cloud driver function's arg spec. # argument is present in the cloud driver function's arg spec.
kwargs = salt.utils.clean_kwargs(**kwargs) kwargs = salt.utils.args.clean_kwargs(**kwargs)
if kwargs: if kwargs:
ret[alias][driver][vm_name] = self.clouds[fun]( ret[alias][driver][vm_name] = self.clouds[fun](
@ -1598,7 +1588,7 @@ class Cloud(object):
) )
) )
with context.func_globals_inject( with salt.utils.context.func_globals_inject(
self.clouds[fun], self.clouds[fun],
__active_provider_name__=':'.join([alias, driver]) __active_provider_name__=':'.join([alias, driver])
): ):
@ -1646,7 +1636,7 @@ class Cloud(object):
self.opts['providers'].pop(alias) self.opts['providers'].pop(alias)
continue continue
with context.func_globals_inject( with salt.utils.context.func_globals_inject(
self.clouds[fun], self.clouds[fun],
__active_provider_name__=':'.join([alias, driver]) __active_provider_name__=':'.join([alias, driver])
): ):
@ -1815,7 +1805,7 @@ class Map(Cloud):
if isinstance(mapped, (list, tuple)): if isinstance(mapped, (list, tuple)):
entries = {} entries = {}
for mapping in mapped: for mapping in mapped:
if isinstance(mapping, string_types): if isinstance(mapping, six.string_types):
# Foo: # Foo:
# - bar1 # - bar1
# - bar2 # - bar2
@ -1855,7 +1845,7 @@ class Map(Cloud):
map_[profile] = entries map_[profile] = entries
continue continue
if isinstance(mapped, string_types): if isinstance(mapped, six.string_types):
# If it's a single string entry, let's make iterable because of # If it's a single string entry, let's make iterable because of
# the next step # the next step
mapped = [mapped] mapped = [mapped]
@ -2310,7 +2300,7 @@ def create_multiprocessing(parallel_data, queue=None):
This function will be called from another process when running a map in This function will be called from another process when running a map in
parallel mode. The result from the create is always a json object. parallel mode. The result from the create is always a json object.
''' '''
reinit_crypto() salt.utils.reinit_crypto()
parallel_data['opts']['output'] = 'json' parallel_data['opts']['output'] = 'json'
cloud = Cloud(parallel_data['opts']) cloud = Cloud(parallel_data['opts'])
@ -2342,14 +2332,14 @@ def destroy_multiprocessing(parallel_data, queue=None):
This function will be called from another process when running a map in This function will be called from another process when running a map in
parallel mode. The result from the destroy is always a json object. parallel mode. The result from the destroy is always a json object.
''' '''
reinit_crypto() salt.utils.reinit_crypto()
parallel_data['opts']['output'] = 'json' parallel_data['opts']['output'] = 'json'
clouds = salt.loader.clouds(parallel_data['opts']) clouds = salt.loader.clouds(parallel_data['opts'])
try: try:
fun = clouds['{0}.destroy'.format(parallel_data['driver'])] fun = clouds['{0}.destroy'.format(parallel_data['driver'])]
with context.func_globals_inject( with salt.utils.context.func_globals_inject(
fun, fun,
__active_provider_name__=':'.join([ __active_provider_name__=':'.join([
parallel_data['alias'], parallel_data['alias'],
@ -2378,11 +2368,11 @@ def run_parallel_map_providers_query(data, queue=None):
This function will be called from another process when building the This function will be called from another process when building the
providers map. providers map.
''' '''
reinit_crypto() salt.utils.reinit_crypto()
cloud = Cloud(data['opts']) cloud = Cloud(data['opts'])
try: try:
with context.func_globals_inject( with salt.utils.context.func_globals_inject(
cloud.clouds[data['fun']], cloud.clouds[data['fun']],
__active_provider_name__=':'.join([ __active_provider_name__=':'.join([
data['alias'], data['alias'],

View File

@ -20,23 +20,24 @@ import logging
from salt.ext.six.moves import input from salt.ext.six.moves import input
# Import salt libs # Import salt libs
import salt.cloud
import salt.utils.cloud
import salt.config import salt.config
import salt.defaults.exitcodes import salt.defaults.exitcodes
import salt.output import salt.output
import salt.syspaths as syspaths
import salt.utils import salt.utils
from salt.utils import parsers import salt.utils.parsers
from salt.exceptions import SaltCloudException, SaltCloudSystemExit
from salt.utils.verify import check_user, verify_env, verify_files, verify_log from salt.utils.verify import check_user, verify_env, verify_files, verify_log
# Import salt.cloud libs # Import 3rd-party libs
import salt.cloud from salt.ext import six
import salt.utils.cloud
from salt.exceptions import SaltCloudException, SaltCloudSystemExit
import salt.ext.six as six
import salt.syspaths as syspaths
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
class SaltCloud(parsers.SaltCloudParser): class SaltCloud(salt.utils.parsers.SaltCloudParser):
def run(self): def run(self):
''' '''

View File

@ -50,7 +50,8 @@ from salt.exceptions import (
SaltCloudExecutionTimeout SaltCloudExecutionTimeout
) )
# Import Third Party Libs # Import 3rd-party libs
from salt.ext import six
try: try:
import requests import requests
HAS_REQUESTS = True HAS_REQUESTS = True
@ -745,7 +746,7 @@ def _compute_signature(parameters, access_key_secret):
''' '''
def percent_encode(line): def percent_encode(line):
if not isinstance(line, str): if not isinstance(line, six.string_types):
return line return line
s = line s = line

View File

@ -65,7 +65,7 @@ import salt.config as config
import salt.utils import salt.utils
import salt.utils.cloud import salt.utils.cloud
import salt.utils.files import salt.utils.files
import salt.ext.six as six from salt.ext import six
import salt.version import salt.version
from salt.exceptions import ( from salt.exceptions import (
SaltCloudSystemExit, SaltCloudSystemExit,

View File

@ -30,6 +30,8 @@ import logging
# Import salt cloud libs # Import salt cloud libs
import salt.config as config import salt.config as config
import salt.utils.cloud
import salt.utils.event
from salt.cloud.libcloudfuncs import * # pylint: disable=redefined-builtin,wildcard-import,unused-wildcard-import from salt.cloud.libcloudfuncs import * # pylint: disable=redefined-builtin,wildcard-import,unused-wildcard-import
from salt.utils import namespaced_function from salt.utils import namespaced_function
from salt.exceptions import SaltCloudSystemExit from salt.exceptions import SaltCloudSystemExit

View File

@ -46,9 +46,8 @@ from salt.exceptions import (
SaltCloudExecutionFailure, SaltCloudExecutionFailure,
SaltCloudExecutionTimeout SaltCloudExecutionTimeout
) )
import salt.ext.six as six from salt.ext import six
from salt.ext.six.moves import zip from salt.ext.six.moves import zip
from salt.ext.six import string_types
# Import Third Party Libs # Import Third Party Libs
try: try:
@ -209,7 +208,7 @@ def get_image(vm_):
vm_image = config.get_cloud_config_value( vm_image = config.get_cloud_config_value(
'image', vm_, __opts__, search_global=False 'image', vm_, __opts__, search_global=False
) )
if not isinstance(vm_image, string_types): if not isinstance(vm_image, six.string_types):
vm_image = str(vm_image) vm_image = str(vm_image)
for image in images: for image in images:

Some files were not shown because too many files have changed in this diff Show More