mirror of
https://github.com/valitydev/salt.git
synced 2024-11-07 17:09:03 +00:00
Merge branch '2018.3' into 'develop'
Conflicts: - doc/conf.py - salt/modules/cmdmod.py - salt/utils/schedule.py - tests/integration/scheduler/test_eval.py
This commit is contained in:
commit
6927642321
@ -46,6 +46,8 @@ class Mock(object):
|
||||
data = self.__mapping.get(name)
|
||||
elif name in ('__file__', '__path__'):
|
||||
data = '/dev/null'
|
||||
elif name == '__qualname__':
|
||||
raise AttributeError("'Mock' object has no attribute '__qualname__'")
|
||||
else:
|
||||
data = Mock(mapping=self.__mapping)
|
||||
return data
|
||||
|
@ -277,6 +277,7 @@ execution modules
|
||||
napalm_users
|
||||
napalm_yang_mod
|
||||
netaddress
|
||||
netbox
|
||||
netbsd_sysctl
|
||||
netbsdservice
|
||||
netscaler
|
||||
|
7
doc/ref/modules/all/salt.modules.netbox.rst
Normal file
7
doc/ref/modules/all/salt.modules.netbox.rst
Normal file
@ -0,0 +1,7 @@
|
||||
==========================
|
||||
salt.modules.netbox module
|
||||
==========================
|
||||
|
||||
.. automodule:: salt.modules.netbox
|
||||
:members:
|
||||
|
@ -223,7 +223,7 @@ branches, and dot release branches.
|
||||
.. note::
|
||||
|
||||
GitHub will open pull requests against Salt's main branch, ``develop``,
|
||||
byndefault. Be sure to check which branch is selected when creating the
|
||||
by default. Be sure to check which branch is selected when creating the
|
||||
pull request.
|
||||
|
||||
The Develop Branch
|
||||
|
@ -1258,7 +1258,7 @@ target platform, and any other installation or usage instructions or tips.
|
||||
|
||||
A sample skeleton for the ``README.rst`` file:
|
||||
|
||||
.. code-block:: rest
|
||||
.. code-block:: restructuredtext
|
||||
|
||||
===
|
||||
foo
|
||||
@ -1269,7 +1269,7 @@ A sample skeleton for the ``README.rst`` file:
|
||||
.. note::
|
||||
|
||||
See the full `Salt Formulas installation and usage instructions
|
||||
<http://docs.saltstack.com/en/latest/topics/development/conventions/formulas.html>`_.
|
||||
<https://docs.saltstack.com/en/latest/topics/development/conventions/formulas.html>`_.
|
||||
|
||||
Available states
|
||||
================
|
||||
@ -1298,7 +1298,7 @@ A sample skeleton for the `CHANGELOG.rst` file:
|
||||
|
||||
:file:`CHANGELOG.rst`:
|
||||
|
||||
.. code-block:: rest
|
||||
.. code-block:: restructuredtext
|
||||
|
||||
foo formula
|
||||
===========
|
||||
|
@ -1384,7 +1384,7 @@ Example:
|
||||
|
||||
.. code-block:: jinja
|
||||
|
||||
{{ 'www.google.com' | dns_check }}
|
||||
{{ 'www.google.com' | dns_check(port=443) }}
|
||||
|
||||
Returns:
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
Salt 2016.11.9 Release Notes
|
||||
============================
|
||||
|
||||
Version 2016.11.9 is a bugfix release for :ref:`2016.11.0 <release-2016-11-0>`.]
|
||||
Version 2016.11.9 is a bugfix release for :ref:`2016.11.0 <release-2016-11-0>`.
|
||||
|
||||
Changes for v2016.11.8..v2016.11.9
|
||||
----------------------------------------------------------------
|
||||
|
80
doc/topics/releases/2017.7.4.rst
Normal file
80
doc/topics/releases/2017.7.4.rst
Normal file
@ -0,0 +1,80 @@
|
||||
===========================
|
||||
Salt 2017.7.4 Release Notes
|
||||
===========================
|
||||
|
||||
Version 2017.7.4 is a bugfix release for :ref:`2017.7.0 <release-2017-7-0>`.
|
||||
|
||||
Changes for v2017.7.3..v2017.7.4
|
||||
---------------------------------------------------------------
|
||||
|
||||
Extended changelog courtesy of Todd Stansell (https://github.com/tjstansell/salt-changelogs):
|
||||
|
||||
*Generated at: 2018-02-13T16:29:07Z*
|
||||
|
||||
Statistics:
|
||||
|
||||
- Total Merges: **4**
|
||||
- Total Issue references: **3**
|
||||
- Total PR references: **7**
|
||||
|
||||
Changes:
|
||||
|
||||
|
||||
- **PR** `#45981`_: (*gtmanfred*) use local config for vault when masterless
|
||||
@ *2018-02-13T15:22:01Z*
|
||||
|
||||
- **ISSUE** `#45976`_: (*grobinson-blockchain*) 6a5e0f9 introduces regression that breaks Vault module for salt masterless
|
||||
| refs: `#45981`_
|
||||
* ca76a0b328 Merge pull request `#45981`_ from gtmanfred/2017.7.3
|
||||
* 0d448457dc apparently local is not set by default
|
||||
|
||||
* 2a92f4bc16 use local config for vault when masterless
|
||||
|
||||
- **PR** `#45953`_: (*rallytime*) Back-port `#45928`_ to 2017.7.3
|
||||
@ *2018-02-09T22:29:10Z*
|
||||
|
||||
- **ISSUE** `#45915`_: (*MatthiasKuehneEllerhold*) 2017.7.3: Salt-SSH & Vault Pillar: Permission denied "minion.pem"
|
||||
| refs: `#45928`_
|
||||
- **PR** `#45928`_: (*garethgreenaway*) [2017.7] Fixing vault when used with pillar over salt-ssh
|
||||
| refs: `#45953`_
|
||||
* 6530649dbc Merge pull request `#45953`_ from rallytime/`bp-45928`_-2017.7.3
|
||||
* 85363189d1 Fixing vault when used with pillar over salt-ssh
|
||||
|
||||
- **PR** `#45934`_: (*rallytime*) Back-port `#45902`_ to 2017.7.3
|
||||
@ *2018-02-09T16:31:08Z*
|
||||
|
||||
- **ISSUE** `#45893`_: (*CrackerJackMack*) archive.extracted ValueError "No path specified" in 2017.7.3
|
||||
| refs: `#45902`_
|
||||
- **PR** `#45902`_: (*terminalmage*) Check the effective saltenv for cached archive
|
||||
| refs: `#45934`_
|
||||
* fb378cebb0 Merge pull request `#45934`_ from rallytime/`bp-45902`_
|
||||
* bb83e8b345 Add regression test for issue 45893
|
||||
|
||||
* cdda66d759 Remove duplicated section in docstring and fix example
|
||||
|
||||
* 4b6351cda6 Check the effective saltenv for cached archive
|
||||
|
||||
- **PR** `#45935`_: (*rallytime*) Back-port `#45742`_ to 2017.7.3
|
||||
@ *2018-02-09T14:02:26Z*
|
||||
|
||||
- **PR** `#45742`_: (*marccardinal*) list.copy() is not compatible with python 2.7
|
||||
| refs: `#45935`_
|
||||
* 0d74151c71 Merge pull request `#45935`_ from rallytime/`bp-45742`_
|
||||
* 6a0b5f7af3 Removed the chained copy
|
||||
|
||||
* ad1150fad4 list.copy() is not compatible with python 2.7
|
||||
|
||||
|
||||
.. _`#45742`: https://github.com/saltstack/salt/pull/45742
|
||||
.. _`#45893`: https://github.com/saltstack/salt/issues/45893
|
||||
.. _`#45902`: https://github.com/saltstack/salt/pull/45902
|
||||
.. _`#45915`: https://github.com/saltstack/salt/issues/45915
|
||||
.. _`#45928`: https://github.com/saltstack/salt/pull/45928
|
||||
.. _`#45934`: https://github.com/saltstack/salt/pull/45934
|
||||
.. _`#45935`: https://github.com/saltstack/salt/pull/45935
|
||||
.. _`#45953`: https://github.com/saltstack/salt/pull/45953
|
||||
.. _`#45976`: https://github.com/saltstack/salt/issues/45976
|
||||
.. _`#45981`: https://github.com/saltstack/salt/pull/45981
|
||||
.. _`bp-45742`: https://github.com/saltstack/salt/pull/45742
|
||||
.. _`bp-45902`: https://github.com/saltstack/salt/pull/45902
|
||||
.. _`bp-45928`: https://github.com/saltstack/salt/pull/45928
|
@ -79,7 +79,7 @@ with release Neon.
|
||||
|
||||
The functions have been moved as follows:
|
||||
|
||||
- ``salt.utils.appendproctitle``: use ``salt.utils.process.appendproctitle``
|
||||
- ``salt.utils.appendproctitle``: use ``salt.utils.process.appendproctitle``
|
||||
instead.
|
||||
- ``salt.utils.daemonize``: use ``salt.utils.process.daemonize`` instead.
|
||||
- ``salt.utils.daemonize_if``: use ``salt.utils.process.daemonize_if`` instead.
|
||||
@ -94,22 +94,22 @@ The functions have been moved as follows:
|
||||
- ``salt.utils.is_hex``: use ``salt.utils.stringutils.is_hex`` instead.
|
||||
- ``salt.utils.is_bin_str``: use ``salt.utils.stringutils.is_bin_str`` instead.
|
||||
- ``salt.utils.rand_string``: use ``salt.utils.stringutils.random`` instead.
|
||||
- ``salt.utils.contains_whitespace``: use
|
||||
- ``salt.utils.contains_whitespace``: use
|
||||
``salt.utils.stringutils.contains_whitespace`` instead.
|
||||
- ``salt.utils.build_whitespace_split_regex``: use
|
||||
- ``salt.utils.build_whitespace_split_regex``: use
|
||||
``salt.utils.stringutils.build_whitespace_split_regex`` instead.
|
||||
- ``salt.utils.expr_match``: use ``salt.utils.stringutils.expr_match`` instead.
|
||||
- ``salt.utils.check_whitelist_blacklist``: use
|
||||
- ``salt.utils.check_whitelist_blacklist``: use
|
||||
``salt.utils.stringutils.check_whitelist_blacklist`` instead.
|
||||
- ``salt.utils.check_include_exclude``: use
|
||||
- ``salt.utils.check_include_exclude``: use
|
||||
``salt.utils.stringutils.check_include_exclude`` instead.
|
||||
- ``salt.utils.print_cli``: use ``salt.utils.stringutils.print_cli`` instead.
|
||||
- ``salt.utils.clean_kwargs``: use ``salt.utils.args.clean_kwargs`` instead.
|
||||
- ``salt.utils.invalid_kwargs``: use ``salt.utils.args.invalid_kwargs``
|
||||
- ``salt.utils.invalid_kwargs``: use ``salt.utils.args.invalid_kwargs``
|
||||
instead.
|
||||
- ``salt.utils.shlex_split``: use ``salt.utils.args.shlex_split`` instead.
|
||||
- ``salt.utils.arg_lookup``: use ``salt.utils.args.arg_lookup`` instead.
|
||||
- ``salt.utils.argspec_report``: use ``salt.utils.args.argspec_report``
|
||||
- ``salt.utils.argspec_report``: use ``salt.utils.args.argspec_report``
|
||||
instead.
|
||||
- ``salt.utils.split_input``: use ``salt.utils.args.split_input`` instead.
|
||||
- ``salt.utils.test_mode``: use ``salt.utils.args.test_mode`` instead.
|
||||
@ -118,7 +118,7 @@ The functions have been moved as follows:
|
||||
- ``salt.utils.which_bin``: use ``salt.utils.path.which_bin`` instead.
|
||||
- ``salt.utils.path_join``: use ``salt.utils.path.join`` instead.
|
||||
- ``salt.utils.check_or_die``: use ``salt.utils.path.check_or_die`` instead.
|
||||
- ``salt.utils.sanitize_win_path_string``: use
|
||||
- ``salt.utils.sanitize_win_path_string``: use
|
||||
``salt.utils.path.sanitize_win_path`` instead.
|
||||
- ``salt.utils.rand_str``: use ``salt.utils.hashutils.random_hash`` instead.
|
||||
- ``salt.utils.get_hash``: use ``salt.utils.hashutils.get_hash`` instead.
|
||||
@ -128,9 +128,9 @@ The functions have been moved as follows:
|
||||
- ``salt.utils.is_darwin``: use ``salt.utils.platform.is_darwin`` instead.
|
||||
- ``salt.utils.is_sunos``: use ``salt.utils.platform.is_sunos`` instead.
|
||||
- ``salt.utils.is_smartos``: use ``salt.utils.platform.is_smartos`` instead.
|
||||
- ``salt.utils.is_smartos_globalzone``: use
|
||||
- ``salt.utils.is_smartos_globalzone``: use
|
||||
``salt.utils.platform.is_smartos_globalzone`` instead.
|
||||
- ``salt.utils.is_smartos_zone``: use ``salt.utils.platform.is_smartos_zone``
|
||||
- ``salt.utils.is_smartos_zone``: use ``salt.utils.platform.is_smartos_zone``
|
||||
instead.
|
||||
- ``salt.utils.is_freebsd``: use ``salt.utils.platform.is_freebsd`` instead.
|
||||
- ``salt.utils.is_netbsd``: use ``salt.utils.platform.is_netbsd`` instead.
|
||||
@ -147,55 +147,55 @@ The functions have been moved as follows:
|
||||
- ``salt.utils.is_bin_file``: use ``salt.utils.files.is_binary`` instead.
|
||||
- ``salt.utils.list_files``: use ``salt.utils.files.list_files`` instead.
|
||||
- ``salt.utils.safe_walk``: use ``salt.utils.files.safe_walk`` instead.
|
||||
- ``salt.utils.st_mode_to_octal``: use ``salt.utils.files.st_mode_to_octal``
|
||||
- ``salt.utils.st_mode_to_octal``: use ``salt.utils.files.st_mode_to_octal``
|
||||
instead.
|
||||
- ``salt.utils.normalize_mode``: use ``salt.utils.files.normalize_mode``
|
||||
- ``salt.utils.normalize_mode``: use ``salt.utils.files.normalize_mode``
|
||||
instead.
|
||||
- ``salt.utils.human_size_to_bytes``: use
|
||||
- ``salt.utils.human_size_to_bytes``: use
|
||||
``salt.utils.files.human_size_to_bytes`` instead.
|
||||
- ``salt.utils.backup_minion``: use ``salt.utils.files.backup_minion`` instead.
|
||||
- ``salt.utils.str_version_to_evr``: use ``salt.utils.pkg.rpm.version_to_evr``
|
||||
instead.
|
||||
- ``salt.utils.parse_docstring``: use ``salt.utils.doc.parse_docstring``
|
||||
- ``salt.utils.parse_docstring``: use ``salt.utils.doc.parse_docstring``
|
||||
instead.
|
||||
- ``salt.utils.compare_versions``: use ``salt.utils.versions.compare`` instead.
|
||||
- ``salt.utils.version_cmp``: use ``salt.utils.versions.version_cmp`` instead.
|
||||
- ``salt.utils.warn_until``: use ``salt.utils.versions.warn_until`` instead.
|
||||
- ``salt.utils.kwargs_warn_until``: use
|
||||
- ``salt.utils.kwargs_warn_until``: use
|
||||
``salt.utils.versions.kwargs_warn_until`` instead.
|
||||
- ``salt.utils.get_color_theme``: use ``salt.utils.color.get_color_theme``
|
||||
- ``salt.utils.get_color_theme``: use ``salt.utils.color.get_color_theme``
|
||||
instead.
|
||||
- ``salt.utils.get_colors``: use ``salt.utils.color.get_colors`` instead.
|
||||
- ``salt.utils.gen_state_tag``: use ``salt.utils.state.gen_tag`` instead.
|
||||
- ``salt.utils.search_onfail_requisites``: use
|
||||
- ``salt.utils.search_onfail_requisites``: use
|
||||
``salt.utils.state.search_onfail_requisites`` instead.
|
||||
- ``salt.utils.check_state_result``: use ``salt.utils.state.check_result``
|
||||
- ``salt.utils.check_state_result``: use ``salt.utils.state.check_result``
|
||||
instead.
|
||||
- ``salt.utils.get_user``: use ``salt.utils.user.get_user`` instead.
|
||||
- ``salt.utils.get_uid``: use ``salt.utils.user.get_uid`` instead.
|
||||
- ``salt.utils.get_specific_user``: use ``salt.utils.user.get_specific_user``
|
||||
- ``salt.utils.get_specific_user``: use ``salt.utils.user.get_specific_user``
|
||||
instead.
|
||||
- ``salt.utils.chugid``: use ``salt.utils.user.chugid`` instead.
|
||||
- ``salt.utils.chugid_and_umask``: use ``salt.utils.user.chugid_and_umask``
|
||||
- ``salt.utils.chugid_and_umask``: use ``salt.utils.user.chugid_and_umask``
|
||||
instead.
|
||||
- ``salt.utils.get_default_group``: use ``salt.utils.user.get_default_group``
|
||||
- ``salt.utils.get_default_group``: use ``salt.utils.user.get_default_group``
|
||||
instead.
|
||||
- ``salt.utils.get_group_list``: use ``salt.utils.user.get_group_list``
|
||||
- ``salt.utils.get_group_list``: use ``salt.utils.user.get_group_list``
|
||||
instead.
|
||||
- ``salt.utils.get_group_dict``: use ``salt.utils.user.get_group_dict``
|
||||
- ``salt.utils.get_group_dict``: use ``salt.utils.user.get_group_dict``
|
||||
instead.
|
||||
- ``salt.utils.get_gid_list``: use ``salt.utils.user.get_gid_list`` instead.
|
||||
- ``salt.utils.get_gid``: use ``salt.utils.user.get_gid`` instead.
|
||||
- ``salt.utils.enable_ctrl_logoff_handler``: use
|
||||
- ``salt.utils.enable_ctrl_logoff_handler``: use
|
||||
``salt.utils.win_functions.enable_ctrl_logoff_handler`` instead.
|
||||
- ``salt.utils.traverse_dict``: use ``salt.utils.data.traverse_dict`` instead.
|
||||
- ``salt.utils.traverse_dict_and_list``: use
|
||||
- ``salt.utils.traverse_dict_and_list``: use
|
||||
``salt.utils.data.traverse_dict_and_list`` instead.
|
||||
- ``salt.utils.filter_by``: use ``salt.utils.data.filter_by`` instead.
|
||||
- ``salt.utils.subdict_match``: use ``salt.utils.data.subdict_match`` instead.
|
||||
- ``salt.utils.substr_in_list``: use ``salt.utils.data.substr_in_list`` instead.
|
||||
- ``salt.utils.is_dictlist``: use ``salt.utils.data.is_dictlist``.
|
||||
- ``salt.utils.repack_dictlist``: use ``salt.utils.data.repack_dictlist``
|
||||
- ``salt.utils.repack_dictlist``: use ``salt.utils.data.repack_dictlist``
|
||||
instead.
|
||||
- ``salt.utils.compare_dicts``: use ``salt.utils.data.compare_dicts`` instead.
|
||||
- ``salt.utils.compare_lists``: use ``salt.utils.data.compare_lists`` instead.
|
||||
@ -208,33 +208,33 @@ The functions have been moved as follows:
|
||||
- ``salt.utils.isorted``: use ``salt.utils.data.sorted_ignorecase`` instead.
|
||||
- ``salt.utils.is_true``: use ``salt.utils.data.is_true`` instead.
|
||||
- ``salt.utils.mysql_to_dict``: use ``salt.utils.data.mysql_to_dict`` instead.
|
||||
- ``salt.utils.simple_types_filter``: use
|
||||
- ``salt.utils.simple_types_filter``: use
|
||||
``salt.utils.data.simple_types_filter`` instead.
|
||||
- ``salt.utils.ip_bracket``: use ``salt.utils.zeromq.ip_bracket`` instead.
|
||||
- ``salt.utils.gen_mac``: use ``salt.utils.network.gen_mac`` instead.
|
||||
- ``salt.utils.mac_str_to_bytes``: use ``salt.utils.network.mac_str_to_bytes``
|
||||
- ``salt.utils.mac_str_to_bytes``: use ``salt.utils.network.mac_str_to_bytes``
|
||||
instead.
|
||||
- ``salt.utils.refresh_dns``: use ``salt.utils.network.refresh_dns`` instead.
|
||||
- ``salt.utils.dns_check``: use ``salt.utils.network.dns_check`` instead.
|
||||
- ``salt.utils.get_context``: use ``salt.utils.templates.get_context`` instead.
|
||||
- ``salt.utils.get_master_key``: use ``salt.utils.master.get_master_key``
|
||||
- ``salt.utils.get_context``: use ``salt.utils.stringutils.get_context`` instead.
|
||||
- ``salt.utils.get_master_key``: use ``salt.utils.master.get_master_key``
|
||||
instead.
|
||||
- ``salt.utils.get_values_of_matching_keys``: use
|
||||
- ``salt.utils.get_values_of_matching_keys``: use
|
||||
``salt.utils.master.get_values_of_matching_keys`` instead.
|
||||
- ``salt.utils.date_cast``: use ``salt.utils.dateutils.date_cast`` instead.
|
||||
- ``salt.utils.date_format``: use ``salt.utils.dateutils.strftime`` instead.
|
||||
- ``salt.utils.total_seconds``: use ``salt.utils.dateutils.total_seconds``
|
||||
- ``salt.utils.total_seconds``: use ``salt.utils.dateutils.total_seconds``
|
||||
instead.
|
||||
- ``salt.utils.find_json``: use ``salt.utils.json.find_json`` instead.
|
||||
- ``salt.utils.import_json``: use ``salt.utils.json.import_json`` instead.
|
||||
- ``salt.utils.namespaced_function``: use
|
||||
- ``salt.utils.namespaced_function``: use
|
||||
``salt.utils.functools.namespaced_function`` instead.
|
||||
- ``salt.utils.alias_function``: use ``salt.utils.functools.alias_function``
|
||||
- ``salt.utils.alias_function``: use ``salt.utils.functools.alias_function``
|
||||
instead.
|
||||
- ``salt.utils.profile_func``: use ``salt.utils.profile.profile_func`` instead.
|
||||
- ``salt.utils.activate_profile``: use ``salt.utils.profile.activate_profile``
|
||||
- ``salt.utils.activate_profile``: use ``salt.utils.profile.activate_profile``
|
||||
instead.
|
||||
- ``salt.utils.output_profile``: use ``salt.utils.profile.output_profile``
|
||||
- ``salt.utils.output_profile``: use ``salt.utils.profile.output_profile``
|
||||
instead.
|
||||
|
||||
State and Execution Module Support for ``docker run`` Functionality
|
||||
|
@ -302,25 +302,30 @@ can define multiple versions for the same piece of software. The lines following
|
||||
the version are indented two more spaces and contain all the information needed
|
||||
to install that package.
|
||||
|
||||
.. warning:: The package name and the ``full_name`` must be unique to all
|
||||
other packages in the software repository.
|
||||
.. warning::
|
||||
The package name and the ``full_name`` must be unique to all other packages
|
||||
in the software repository.
|
||||
|
||||
The version line is the version for the package to be installed. It is used when
|
||||
you need to install a specific version of a piece of software.
|
||||
|
||||
.. warning:: The version must be enclosed in quotes, otherwise the yaml parser
|
||||
will remove trailing zeros.
|
||||
.. warning::
|
||||
The version must be enclosed in quotes, otherwise the yaml parser will
|
||||
remove trailing zeros.
|
||||
|
||||
.. note::
|
||||
There are unique situations where previous versions are unavailable. Take
|
||||
Google Chrome for example. There is only one url provided for a standalone
|
||||
installation of Google Chrome.
|
||||
|
||||
.. note:: There are unique situations where previous versions are unavailable.
|
||||
Take Google Chrome for example. There is only one url provided for a
|
||||
standalone installation of Google Chrome.
|
||||
(https://dl.google.com/edgedl/chrome/install/GoogleChromeStandaloneEnterprise.msi)
|
||||
|
||||
When a new version is released, the url just points to the new version. To
|
||||
handle situations such as these, set the version to `latest`. Salt will
|
||||
install the version of Chrome at the URL and report that version. Here's an
|
||||
example:
|
||||
|
||||
.. code-block:: bash
|
||||
.. code-block:: yaml
|
||||
|
||||
chrome:
|
||||
latest:
|
||||
@ -335,200 +340,237 @@ you need to install a specific version of a piece of software.
|
||||
|
||||
Available parameters are as follows:
|
||||
|
||||
:param str full_name: The Full Name for the software as shown in "Programs and
|
||||
Features" in the control panel. You can also get this information by
|
||||
installing the package manually and then running ``pkg.list_pkgs``. Here's
|
||||
an example of the output from ``pkg.list_pkgs``:
|
||||
:param str full_name:
|
||||
The Full Name for the software as shown in "Programs and Features" in the
|
||||
control panel. You can also get this information by installing the package
|
||||
manually and then running ``pkg.list_pkgs``. Here's an example of the output
|
||||
from ``pkg.list_pkgs``:
|
||||
|
||||
.. code-block:: bash
|
||||
.. code-block:: bash
|
||||
|
||||
salt 'test-2008' pkg.list_pkgs
|
||||
test-2008
|
||||
----------
|
||||
7-Zip 9.20 (x64 edition):
|
||||
9.20.00.0
|
||||
Microsoft .NET Framework 4 Client Profile:
|
||||
4.0.30319,4.0.30319
|
||||
Microsoft .NET Framework 4 Extended:
|
||||
4.0.30319,4.0.30319
|
||||
Microsoft Visual C++ 2008 Redistributable - x64 9.0.21022:
|
||||
9.0.21022
|
||||
Mozilla Firefox 17.0.1 (x86 en-US):
|
||||
17.0.1
|
||||
Mozilla Maintenance Service:
|
||||
17.0.1
|
||||
NSClient++ (x64):
|
||||
0.3.8.76
|
||||
Notepad++:
|
||||
6.4.2
|
||||
Salt Minion 0.16.0:
|
||||
0.16.0
|
||||
salt 'test-2008' pkg.list_pkgs
|
||||
test-2008
|
||||
----------
|
||||
7-Zip 9.20 (x64 edition):
|
||||
9.20.00.0
|
||||
Microsoft .NET Framework 4 Client Profile:
|
||||
4.0.30319,4.0.30319
|
||||
Microsoft .NET Framework 4 Extended:
|
||||
4.0.30319,4.0.30319
|
||||
Microsoft Visual C++ 2008 Redistributable - x64 9.0.21022:
|
||||
9.0.21022
|
||||
Mozilla Firefox 17.0.1 (x86 en-US):
|
||||
17.0.1
|
||||
Mozilla Maintenance Service:
|
||||
17.0.1
|
||||
NSClient++ (x64):
|
||||
0.3.8.76
|
||||
Notepad++:
|
||||
6.4.2
|
||||
Salt Minion 0.16.0:
|
||||
0.16.0
|
||||
|
||||
Notice the Full Name for Firefox: Mozilla Firefox 17.0.0 (x86 en-US). That's
|
||||
exactly what's in the ``full_name`` parameter in the software definition file.
|
||||
Notice the Full Name for Firefox: ``Mozilla Firefox 17.0.0 (x86 en-US)``.
|
||||
That's exactly what's in the ``full_name`` parameter in the software
|
||||
definition file.
|
||||
|
||||
If any of the software insalled on the machine matches one of the software
|
||||
definition files in the repository the full_name will be automatically renamed
|
||||
to the package name. The example below shows the ``pkg.list_pkgs`` for a
|
||||
machine that already has Mozilla Firefox 17.0.1 installed.
|
||||
If any of the software installed on the machine matches one of the software
|
||||
definition files in the repository, the full_name will be automatically
|
||||
renamed to the package name. The example below shows the ``pkg.list_pkgs``
|
||||
for a machine that already has Mozilla Firefox 17.0.1 installed.
|
||||
|
||||
.. code-block:: bash
|
||||
.. code-block:: bash
|
||||
|
||||
test-2008:
|
||||
----------
|
||||
7zip:
|
||||
9.20.00.0
|
||||
Microsoft .NET Framework 4 Client Profile:
|
||||
4.0.30319,4.0.30319
|
||||
Microsoft .NET Framework 4 Extended:
|
||||
4.0.30319,4.0.30319
|
||||
Microsoft Visual C++ 2008 Redistributable - x64 9.0.21022:
|
||||
9.0.21022
|
||||
Mozilla Maintenance Service:
|
||||
17.0.1
|
||||
Notepad++:
|
||||
6.4.2
|
||||
Salt Minion 0.16.0:
|
||||
0.16.0
|
||||
firefox:
|
||||
17.0.1
|
||||
nsclient:
|
||||
0.3.9.328
|
||||
|
||||
.. important::
|
||||
The version number and ``full_name`` need to match the output from
|
||||
``pkg.list_pkgs`` so that the status can be verified when running a
|
||||
highstate.
|
||||
|
||||
.. note::
|
||||
It is still possible to successfully install packages using
|
||||
``pkg.install``, even if the ``full_name`` or the version number don't
|
||||
match. However, this can make troubleshooting issues difficult, so be
|
||||
careful.
|
||||
|
||||
.. tip::
|
||||
To force salt to display the full name when there's already an existing
|
||||
package definition file on the system, you can pass a bogus ``saltenv``
|
||||
parameter to the command like so: ``pkg.list_pkgs saltenv=NotARealEnv``
|
||||
|
||||
:param str installer:
|
||||
The path to the ``.exe`` or ``.msi`` to use to install the package. This can
|
||||
be a path or a URL. If it is a URL or a salt path (``salt://``), the package
|
||||
will be cached locally and then executed. If it is a path to a file on disk
|
||||
or a file share, it will be executed directly.
|
||||
|
||||
.. note::
|
||||
If storing software in the same location as the winrepo it is best
|
||||
practice to place each installer in its own directory rather than the
|
||||
root of winrepo. Then you can place your package definition file in the
|
||||
same directory. It is best practice to name the file ``init.sls``. This
|
||||
will be picked up by ``pkg.refresh_db`` and processed properly.
|
||||
|
||||
:param str install_flags:
|
||||
Any flags that need to be passed to the installer to make it perform a
|
||||
silent install. These can often be found by adding ``/?`` or ``/h`` when
|
||||
running the installer from the command-line. A great resource for finding
|
||||
these silent install flags can be found on the WPKG project's wiki_:
|
||||
|
||||
.. warning::
|
||||
Salt will not return if the installer is waiting for user input so it is
|
||||
imperative that the software package being installed has the ability to
|
||||
install silently.
|
||||
|
||||
:param str uninstaller:
|
||||
The path to the program used to uninstall this software. This can be the
|
||||
path to the same `exe` or `msi` used to install the software. It can also be
|
||||
a GUID. You can find this value in the registry under the following keys:
|
||||
|
||||
- Software\\Microsoft\\Windows\\CurrentVersion\\Uninstall
|
||||
- Software\\Wow6432None\\Microsoft\\Windows\\CurrentVersion\\Uninstall
|
||||
|
||||
:param str uninstall_flags:
|
||||
Any flags that need to be passed to the uninstaller to make it perform a
|
||||
silent uninstall. These can often be found by adding ``/?`` or ``/h`` when
|
||||
running the uninstaller from the command-line. A great resource for finding
|
||||
these silent install flags can be found on the WPKG project's wiki_:
|
||||
|
||||
.. warning::
|
||||
Salt will not return if the uninstaller is waiting for user input so it
|
||||
is imperative that the software package being uninstalled has the
|
||||
ability to uninstall silently.
|
||||
|
||||
Here are some examples of installer and uninstaller settings:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
test-2008:
|
||||
----------
|
||||
7zip:
|
||||
9.20.00.0
|
||||
Microsoft .NET Framework 4 Client Profile:
|
||||
4.0.30319,4.0.30319
|
||||
Microsoft .NET Framework 4 Extended:
|
||||
4.0.30319,4.0.30319
|
||||
Microsoft Visual C++ 2008 Redistributable - x64 9.0.21022:
|
||||
9.0.21022
|
||||
Mozilla Maintenance Service:
|
||||
17.0.1
|
||||
Notepad++:
|
||||
6.4.2
|
||||
Salt Minion 0.16.0:
|
||||
0.16.0
|
||||
firefox:
|
||||
17.0.1
|
||||
nsclient:
|
||||
0.3.9.328
|
||||
'9.20.00.0':
|
||||
installer: salt://win/repo/7zip/7z920-x64.msi
|
||||
full_name: 7-Zip 9.20 (x64 edition)
|
||||
reboot: False
|
||||
install_flags: '/qn /norestart'
|
||||
msiexec: True
|
||||
uninstaller: '{23170F69-40C1-2702-0920-000001000000}'
|
||||
uninstall_flags: '/qn /norestart'
|
||||
|
||||
.. important:: The version number and ``full_name`` need to match the output
|
||||
from ``pkg.list_pkgs`` so that the status can be verified when running
|
||||
highstate.
|
||||
Alternatively the ``uninstaller`` can also simply repeat the URL of an msi
|
||||
file:
|
||||
|
||||
.. note:: It is still possible to successfully install packages using
|
||||
``pkg.install`` even if they don't match. This can make troubleshooting
|
||||
difficult so be careful.
|
||||
.. code-block:: yaml
|
||||
|
||||
:param str installer: The path to the ``.exe`` or ``.msi`` to use to install the
|
||||
package. This can be a path or a URL. If it is a URL or a salt path
|
||||
(salt://), the package will be cached locally and then executed. If it is a
|
||||
path to a file on disk or a file share, it will be executed directly.
|
||||
7zip:
|
||||
'9.20.00.0':
|
||||
installer: salt://win/repo/7zip/7z920-x64.msi
|
||||
full_name: 7-Zip 9.20 (x64 edition)
|
||||
reboot: False
|
||||
install_flags: '/qn /norestart'
|
||||
msiexec: True
|
||||
uninstaller: salt://win/repo/7zip/7z920-x64.msi
|
||||
uninstall_flags: '/qn /norestart'
|
||||
|
||||
:param str install_flags: Any flags that need to be passed to the installer to
|
||||
make it perform a silent install. These can often be found by adding ``/?``
|
||||
or ``/h`` when running the installer from the command-line. A great resource
|
||||
for finding these silent install flags can be found on the WPKG project's wiki_:
|
||||
:param msiexec:
|
||||
This tells salt to use ``msiexec /i`` to install the package and
|
||||
``msiexec /x`` to uninstall. This is for ``.msi`` installations. Possible
|
||||
options are: True, False or the path to ``msiexec.exe`` on your system
|
||||
|
||||
Salt will not return if the installer is waiting for user input so these are
|
||||
important.
|
||||
.. code-block:: yaml
|
||||
|
||||
:param str uninstaller: The path to the program used to uninstall this software.
|
||||
This can be the path to the same `exe` or `msi` used to install the
|
||||
software. It can also be a GUID. You can find this value in the registry
|
||||
under the following keys:
|
||||
7zip:
|
||||
'9.20.00.0':
|
||||
installer: salt://win/repo/7zip/7z920-x64.msi
|
||||
full_name: 7-Zip 9.20 (x64 edition)
|
||||
reboot: False
|
||||
install_flags: '/qn /norestart'
|
||||
msiexec: 'C:\Windows\System32\msiexec.exe'
|
||||
uninstaller: salt://win/repo/7zip/7z920-x64.msi
|
||||
uninstall_flags: '/qn /norestart'
|
||||
|
||||
- Software\\Microsoft\\Windows\\CurrentVersion\\Uninstall
|
||||
- Software\\Wow6432None\\Microsoft\\Windows\\CurrentVersion\\Uninstall
|
||||
:param bool allusers:
|
||||
This parameter is specific to ``.msi`` installations. It tells ``msiexec``
|
||||
to install the software for all users. The default is ``True``.
|
||||
|
||||
:param str uninstall_flags: Any flags that need to be passed to the uninstaller
|
||||
to make it perform a silent uninstall. These can often be found by adding
|
||||
``/?`` or ``/h`` when running the uninstaller from the command-line. A great
|
||||
resource for finding these silent install flags can be found on the WPKG
|
||||
project's wiki_:
|
||||
:param bool cache_dir:
|
||||
If ``True`` and the installer URL begins with ``salt://``, the entire
|
||||
directory where the installer resides will be recursively cached. This is
|
||||
useful for installers that depend on other files in the same directory for
|
||||
installation.
|
||||
|
||||
Salt will not return if the uninstaller is waiting for user input so these are
|
||||
important.
|
||||
|
||||
Here are some examples of installer and uninstaller settings:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
7zip:
|
||||
'9.20.00.0':
|
||||
installer: salt://win/repo/7zip/7z920-x64.msi
|
||||
full_name: 7-Zip 9.20 (x64 edition)
|
||||
reboot: False
|
||||
install_flags: '/qn /norestart'
|
||||
msiexec: True
|
||||
uninstaller: '{23170F69-40C1-2702-0920-000001000000}'
|
||||
uninstall_flags: '/qn /norestart'
|
||||
|
||||
Alternatively the ``uninstaller`` can also simply repeat the URL of the msi file.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
7zip:
|
||||
'9.20.00.0':
|
||||
installer: salt://win/repo/7zip/7z920-x64.msi
|
||||
full_name: 7-Zip 9.20 (x64 edition)
|
||||
reboot: False
|
||||
install_flags: '/qn /norestart'
|
||||
msiexec: True
|
||||
uninstaller: salt://win/repo/7zip/7z920-x64.msi
|
||||
uninstall_flags: '/qn /norestart'
|
||||
|
||||
:param msiexec: This tells salt to use ``msiexec /i`` to install the
|
||||
package and ``msiexec /x`` to uninstall. This is for `.msi` installations.
|
||||
Possible options are: True, False or path to msiexec on your system
|
||||
|
||||
7zip:
|
||||
'9.20.00.0':
|
||||
installer: salt://win/repo/7zip/7z920-x64.msi
|
||||
full_name: 7-Zip 9.20 (x64 edition)
|
||||
reboot: False
|
||||
install_flags: '/qn /norestart'
|
||||
msiexec: 'C:\Windows\System32\msiexec.exe'
|
||||
uninstaller: salt://win/repo/7zip/7z920-x64.msi
|
||||
uninstall_flags: '/qn /norestart'
|
||||
|
||||
:param str arch: This selects which ``msiexec.exe`` to use. Possible values:
|
||||
``x86``, ``x64``
|
||||
|
||||
:param bool allusers: This parameter is specific to `.msi` installations. It
|
||||
tells `msiexec` to install the software for all users. The default is True.
|
||||
|
||||
:param bool cache_dir: If true when installer URL begins with salt://, the
|
||||
entire directory where the installer resides will be recursively cached.
|
||||
This is useful for installers that depend on other files in the same
|
||||
directory for installation.
|
||||
.. warning::
|
||||
Be aware that all files and directories in the same location as the
|
||||
installer file will be copied down to the minion. If you place your
|
||||
installer file in the root of winrepo (``/srv/salt/win/repo-ng``) and
|
||||
``cache_dir: True`` the entire contents of winrepo will be cached to
|
||||
the minion. Therefore, it is best practice to place your installer files
|
||||
in a subdirectory if they are to be stored in winrepo.
|
||||
|
||||
:param str cache_file:
|
||||
When installer URL begins with salt://, this indicates single file to copy
|
||||
down for use with the installer. Copied to the same location as the
|
||||
installer. Use this over ``cache_dir`` if there are many files in the
|
||||
When the installer URL begins with ``salt://``, this indicates a single file
|
||||
to copy down for use with the installer. It is copied to the same location
|
||||
as the installer. Use this over ``cache_dir`` if there are many files in the
|
||||
directory and you only need a specific file and don't want to cache
|
||||
additional files that may reside in the installer directory.
|
||||
|
||||
Here's an example for a software package that has dependent files:
|
||||
Here's an example for a software package that has dependent files:
|
||||
|
||||
.. code-block:: yaml
|
||||
.. code-block:: yaml
|
||||
|
||||
sqlexpress:
|
||||
'12.0.2000.8':
|
||||
installer: 'salt://win/repo/sqlexpress/setup.exe'
|
||||
full_name: Microsoft SQL Server 2014 Setup (English)
|
||||
reboot: False
|
||||
install_flags: '/ACTION=install /IACCEPTSQLSERVERLICENSETERMS /Q'
|
||||
cache_dir: True
|
||||
sqlexpress:
|
||||
'12.0.2000.8':
|
||||
installer: 'salt://win/repo/sqlexpress/setup.exe'
|
||||
full_name: Microsoft SQL Server 2014 Setup (English)
|
||||
reboot: False
|
||||
install_flags: '/ACTION=install /IACCEPTSQLSERVERLICENSETERMS /Q'
|
||||
cache_dir: True
|
||||
|
||||
:param bool use_scheduler: If true, windows will use the task scheduler to run
|
||||
the installation. This is useful for running the salt installation itself as
|
||||
the installation process kills any currently running instances of salt.
|
||||
:param bool use_scheduler:
|
||||
If ``True``, Windows will use the task scheduler to run the installation.
|
||||
This is useful for running the Salt installation itself as the installation
|
||||
process kills any currently running instances of Salt.
|
||||
|
||||
:param str source_hash: This tells salt to compare a hash sum of the installer
|
||||
to the provided hash sum before execution. The value can be formatted as
|
||||
``hash_algorithm=hash_sum``, or it can be a URI to a file containing the hash
|
||||
sum.
|
||||
For a list of supported algorithms, see the `hashlib documentation
|
||||
<https://docs.python.org/2/library/hashlib.html>`_.
|
||||
:param str source_hash:
|
||||
This tells Salt to compare a hash sum of the installer to the provided hash
|
||||
sum before execution. The value can be formatted as
|
||||
``<hash_algorithm>=<hash_sum>``, or it can be a URI to a file containing the
|
||||
hash sum.
|
||||
|
||||
Here's an example of source_hash usage:
|
||||
For a list of supported algorithms, see the `hashlib documentation
|
||||
<https://docs.python.org/2/library/hashlib.html>`_.
|
||||
|
||||
.. code-block:: yaml
|
||||
Here's an example of source_hash usage:
|
||||
|
||||
messageanalyzer:
|
||||
'4.0.7551.0':
|
||||
full_name: 'Microsoft Message Analyzer'
|
||||
installer: 'salt://win/repo/messageanalyzer/MessageAnalyzer64.msi'
|
||||
install_flags: '/quiet /norestart'
|
||||
uninstaller: '{1CC02C23-8FCD-487E-860C-311EC0A0C933}'
|
||||
uninstall_flags: '/quiet /norestart'
|
||||
msiexec: True
|
||||
source_hash: 'sha1=62875ff451f13b10a8ff988f2943e76a4735d3d4'
|
||||
.. code-block:: yaml
|
||||
|
||||
messageanalyzer:
|
||||
'4.0.7551.0':
|
||||
full_name: 'Microsoft Message Analyzer'
|
||||
installer: 'salt://win/repo/messageanalyzer/MessageAnalyzer64.msi'
|
||||
install_flags: '/quiet /norestart'
|
||||
uninstaller: '{1CC02C23-8FCD-487E-860C-311EC0A0C933}'
|
||||
uninstall_flags: '/quiet /norestart'
|
||||
msiexec: True
|
||||
source_hash: 'sha1=62875ff451f13b10a8ff988f2943e76a4735d3d4'
|
||||
|
||||
:param bool reboot: Not implemented
|
||||
|
||||
|
@ -3,6 +3,6 @@ msgpack-python>0.3
|
||||
PyYAML
|
||||
MarkupSafe
|
||||
requests>=1.0.0
|
||||
tornado>=4.2.1
|
||||
tornado>=4.2.1,<5.0
|
||||
# Required by Tornado to handle threads stuff.
|
||||
futures>=2.0
|
||||
|
14
salt/cache/localfs.py
vendored
14
salt/cache/localfs.py
vendored
@ -14,6 +14,7 @@ from __future__ import absolute_import, print_function, unicode_literals
|
||||
import logging
|
||||
import os
|
||||
import os.path
|
||||
import errno
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
@ -45,13 +46,14 @@ def store(bank, key, data, cachedir):
|
||||
Store information in a file.
|
||||
'''
|
||||
base = os.path.join(cachedir, os.path.normpath(bank))
|
||||
if not os.path.isdir(base):
|
||||
try:
|
||||
os.makedirs(base)
|
||||
except OSError as exc:
|
||||
try:
|
||||
os.makedirs(base)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.EEXIST:
|
||||
raise SaltCacheError(
|
||||
'The cache directory, {0}, does not exist and could not be '
|
||||
'created: {1}'.format(base, exc)
|
||||
'The cache directory, {0}, could not be created: {1}'.format(
|
||||
base, exc
|
||||
)
|
||||
)
|
||||
|
||||
outfile = os.path.join(base, '{0}.p'.format(key))
|
||||
|
@ -119,11 +119,12 @@ class Master(salt.utils.parsers.MasterOptionParser, DaemonsMixin): # pylint: di
|
||||
Creates a master server
|
||||
'''
|
||||
def _handle_signals(self, signum, sigframe): # pylint: disable=unused-argument
|
||||
# escalate signal to the process manager processes
|
||||
self.master.process_manager.stop_restarting()
|
||||
self.master.process_manager.send_signal_to_processes(signum)
|
||||
# kill any remaining processes
|
||||
self.master.process_manager.kill_children()
|
||||
if hasattr(self.master, 'process_manager'): # IofloMaster has no process manager
|
||||
# escalate signal to the process manager processes
|
||||
self.master.process_manager.stop_restarting()
|
||||
self.master.process_manager.send_signal_to_processes(signum)
|
||||
# kill any remaining processes
|
||||
self.master.process_manager.kill_children()
|
||||
super(Master, self)._handle_signals(signum, sigframe)
|
||||
|
||||
def prepare(self):
|
||||
@ -151,7 +152,6 @@ class Master(salt.utils.parsers.MasterOptionParser, DaemonsMixin): # pylint: di
|
||||
os.path.join(self.config['cachedir'], 'jobs'),
|
||||
os.path.join(self.config['cachedir'], 'proc'),
|
||||
self.config['sock_dir'],
|
||||
self.config['key_dir'],
|
||||
self.config['token_dir'],
|
||||
self.config['syndic_dir'],
|
||||
self.config['sqlite_queue_dir'],
|
||||
@ -166,7 +166,7 @@ class Master(salt.utils.parsers.MasterOptionParser, DaemonsMixin): # pylint: di
|
||||
self.config['user'],
|
||||
permissive=self.config['permissive_pki_access'],
|
||||
root_dir=self.config['root_dir'],
|
||||
sensitive_dirs=[self.config['pki_dir'], self.config['key_dir']],
|
||||
pki_dir=self.config['pki_dir'],
|
||||
)
|
||||
# Clear out syndics from cachedir
|
||||
for syndic_file in os.listdir(self.config['syndic_dir']):
|
||||
@ -234,7 +234,8 @@ class Minion(salt.utils.parsers.MinionOptionParser, DaemonsMixin): # pylint: di
|
||||
|
||||
def _handle_signals(self, signum, sigframe): # pylint: disable=unused-argument
|
||||
# escalate signal to the process manager processes
|
||||
self.minion.stop(signum)
|
||||
if hasattr(self.minion, 'stop'):
|
||||
self.minion.stop(signum)
|
||||
super(Minion, self)._handle_signals(signum, sigframe)
|
||||
|
||||
# pylint: disable=no-member
|
||||
@ -287,7 +288,7 @@ class Minion(salt.utils.parsers.MinionOptionParser, DaemonsMixin): # pylint: di
|
||||
self.config['user'],
|
||||
permissive=self.config['permissive_pki_access'],
|
||||
root_dir=self.config['root_dir'],
|
||||
sensitive_dirs=[self.config['pki_dir']],
|
||||
pki_dir=self.config['pki_dir'],
|
||||
)
|
||||
except OSError as error:
|
||||
self.environment_failure(error)
|
||||
@ -392,7 +393,7 @@ class Minion(salt.utils.parsers.MinionOptionParser, DaemonsMixin): # pylint: di
|
||||
:param exitmsg
|
||||
'''
|
||||
self.action_log_info('Shutting down')
|
||||
if hasattr(self, 'minion'):
|
||||
if hasattr(self, 'minion') and hasattr(self.minion, 'destroy'):
|
||||
self.minion.destroy()
|
||||
super(Minion, self).shutdown(
|
||||
exitcode, ('The Salt {0} is shutdown. {1}'.format(
|
||||
@ -469,7 +470,7 @@ class ProxyMinion(salt.utils.parsers.ProxyMinionOptionParser, DaemonsMixin): #
|
||||
self.config['user'],
|
||||
permissive=self.config['permissive_pki_access'],
|
||||
root_dir=self.config['root_dir'],
|
||||
sensitive_dirs=[self.config['pki_dir']],
|
||||
pki_dir=self.config['pki_dir'],
|
||||
)
|
||||
except OSError as error:
|
||||
self.environment_failure(error)
|
||||
@ -578,7 +579,7 @@ class Syndic(salt.utils.parsers.SyndicOptionParser, DaemonsMixin): # pylint: di
|
||||
self.config['user'],
|
||||
permissive=self.config['permissive_pki_access'],
|
||||
root_dir=self.config['root_dir'],
|
||||
sensitive_dirs=[self.config['pki_dir']],
|
||||
pki_dir=self.config['pki_dir'],
|
||||
)
|
||||
except OSError as error:
|
||||
self.environment_failure(error)
|
||||
|
@ -10,6 +10,7 @@ import os
|
||||
import salt.utils.job
|
||||
import salt.utils.parsers
|
||||
import salt.utils.stringutils
|
||||
import salt.log
|
||||
from salt.utils.args import yamlify_arg
|
||||
from salt.utils.verify import verify_log
|
||||
from salt.exceptions import (
|
||||
@ -38,9 +39,10 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
|
||||
import salt.client
|
||||
self.parse_args()
|
||||
|
||||
# Setup file logging!
|
||||
self.setup_logfile_logger()
|
||||
verify_log(self.config)
|
||||
if self.config['log_level'] not in ('quiet', ):
|
||||
# Setup file logging!
|
||||
self.setup_logfile_logger()
|
||||
verify_log(self.config)
|
||||
|
||||
try:
|
||||
# We don't need to bail on config file permission errors
|
||||
@ -82,7 +84,7 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
|
||||
if 'token' in self.config:
|
||||
import salt.utils.files
|
||||
try:
|
||||
with salt.utils.files.fopen(os.path.join(self.config['key_dir'], '.root_key'), 'r') as fp_:
|
||||
with salt.utils.files.fopen(os.path.join(self.config['cachedir'], '.root_key'), 'r') as fp_:
|
||||
kwargs['key'] = fp_.readline()
|
||||
except IOError:
|
||||
kwargs['token'] = self.config['token']
|
||||
|
@ -194,11 +194,11 @@ class LocalClient(object):
|
||||
# The username may contain '\' if it is in Windows
|
||||
# 'DOMAIN\username' format. Fix this for the keyfile path.
|
||||
key_user = key_user.replace('\\', '_')
|
||||
keyfile = os.path.join(self.opts['key_dir'],
|
||||
keyfile = os.path.join(self.opts['cachedir'],
|
||||
'.{0}_key'.format(key_user))
|
||||
try:
|
||||
# Make sure all key parent directories are accessible
|
||||
salt.utils.verify.check_path_traversal(self.opts['key_dir'],
|
||||
salt.utils.verify.check_path_traversal(self.opts['cachedir'],
|
||||
key_user,
|
||||
self.skip_perm_errors)
|
||||
with salt.utils.files.fopen(keyfile, 'r') as key:
|
||||
|
@ -161,17 +161,17 @@ do
|
||||
py_cmd_path=`"$py_cmd" -c \
|
||||
'from __future__ import print_function;
|
||||
import sys; print(sys.executable);'`
|
||||
cmdpath=$(command -v $py_cmd 2>/dev/null || which $py_cmd 2>/dev/null)
|
||||
cmdpath=`command -v $py_cmd 2>/dev/null || which $py_cmd 2>/dev/null`
|
||||
if file $cmdpath | grep "shell script" > /dev/null
|
||||
then
|
||||
ex_vars="'PATH', 'LD_LIBRARY_PATH', 'MANPATH', \
|
||||
'XDG_DATA_DIRS', 'PKG_CONFIG_PATH'"
|
||||
export $($py_cmd -c \
|
||||
export `$py_cmd -c \
|
||||
"from __future__ import print_function;
|
||||
import sys;
|
||||
import os;
|
||||
map(sys.stdout.write, ['{{{{0}}}}={{{{1}}}} ' \
|
||||
.format(x, os.environ[x]) for x in [$ex_vars]])")
|
||||
.format(x, os.environ[x]) for x in [$ex_vars]])"`
|
||||
exec $SUDO PATH=$PATH LD_LIBRARY_PATH=$LD_LIBRARY_PATH \
|
||||
MANPATH=$MANPATH XDG_DATA_DIRS=$XDG_DATA_DIRS \
|
||||
PKG_CONFIG_PATH=$PKG_CONFIG_PATH \
|
||||
@ -880,6 +880,7 @@ class Single(object):
|
||||
# Pre apply changeable defaults
|
||||
self.minion_opts = {
|
||||
'grains_cache': True,
|
||||
'log_file': 'salt-call.log',
|
||||
}
|
||||
self.minion_opts.update(opts.get('ssh_minion_opts', {}))
|
||||
if minion_opts is not None:
|
||||
@ -889,7 +890,6 @@ class Single(object):
|
||||
'root_dir': os.path.join(self.thin_dir, 'running_data'),
|
||||
'id': self.id,
|
||||
'sock_dir': '/',
|
||||
'log_file': 'salt-call.log',
|
||||
'fileserver_list_cache_time': 3,
|
||||
})
|
||||
self.minion_config = salt.serializers.yaml.serialize(self.minion_opts)
|
||||
|
@ -32,7 +32,7 @@ from salt.utils.versions import LooseVersion as _LooseVersion
|
||||
# Import libcloud
|
||||
try:
|
||||
import libcloud
|
||||
from libcloud.compute.base import NodeState
|
||||
from libcloud.compute.base import NodeDriver, NodeState
|
||||
from libcloud.compute.base import NodeAuthPassword
|
||||
from libcloud.compute.types import Provider
|
||||
from libcloud.compute.providers import get_driver
|
||||
@ -52,9 +52,6 @@ try:
|
||||
except ImportError:
|
||||
HAS_LIBCLOUD = False
|
||||
|
||||
# Import generic libcloud functions
|
||||
# from salt.cloud.libcloudfuncs import *
|
||||
|
||||
# Import salt.cloud libs
|
||||
from salt.cloud.libcloudfuncs import * # pylint: disable=redefined-builtin,wildcard-import,unused-wildcard-import
|
||||
from salt.utils.functools import namespaced_function
|
||||
@ -217,7 +214,6 @@ def create(vm_):
|
||||
|
||||
log.info('Creating Cloud VM %s', vm_['name'])
|
||||
conn = get_conn()
|
||||
rootPw = NodeAuthPassword(vm_['auth'])
|
||||
|
||||
location = conn.ex_get_location_by_id(vm_['location'])
|
||||
images = conn.list_images(location=location)
|
||||
@ -248,15 +244,13 @@ def create(vm_):
|
||||
kwargs = {
|
||||
'name': vm_['name'],
|
||||
'image': image,
|
||||
'auth': rootPw,
|
||||
'ex_description': vm_['description'],
|
||||
'ex_network_domain': network_domain,
|
||||
'ex_vlan': vlan,
|
||||
'ex_is_started': vm_['is_started']
|
||||
}
|
||||
|
||||
event_data = kwargs.copy()
|
||||
del event_data['auth']
|
||||
event_data = _to_event_data(kwargs)
|
||||
|
||||
__utils__['cloud.fire_event'](
|
||||
'event',
|
||||
@ -267,6 +261,10 @@ def create(vm_):
|
||||
transport=__opts__['transport']
|
||||
)
|
||||
|
||||
# Initial password (excluded from event payload)
|
||||
initial_password = NodeAuthPassword(vm_['auth'])
|
||||
kwargs['auth'] = initial_password
|
||||
|
||||
try:
|
||||
data = conn.create_node(**kwargs)
|
||||
except Exception as exc:
|
||||
@ -280,7 +278,7 @@ def create(vm_):
|
||||
return False
|
||||
|
||||
try:
|
||||
data = salt.utils.cloud.wait_for_ip(
|
||||
data = __utils__['cloud.wait_for_ip'](
|
||||
_query_node_data,
|
||||
update_args=(vm_, data),
|
||||
timeout=config.get_cloud_config_value(
|
||||
@ -306,7 +304,7 @@ def create(vm_):
|
||||
ip_address = preferred_ip(vm_, data.public_ips)
|
||||
log.debug('Using IP address %s', ip_address)
|
||||
|
||||
if salt.utils.cloud.get_salt_interface(vm_, __opts__) == 'private_ips':
|
||||
if __utils__['cloud.get_salt_interface'](vm_, __opts__) == 'private_ips':
|
||||
salt_ip_address = preferred_ip(vm_, data.private_ips)
|
||||
log.info('Salt interface set to: %s', salt_ip_address)
|
||||
else:
|
||||
@ -322,7 +320,7 @@ def create(vm_):
|
||||
vm_['ssh_host'] = ip_address
|
||||
vm_['password'] = vm_['auth']
|
||||
|
||||
ret = salt.utils.cloud.bootstrap(vm_, __opts__)
|
||||
ret = __utils__['cloud.bootstrap'](vm_, __opts__)
|
||||
|
||||
ret.update(data.__dict__)
|
||||
|
||||
@ -414,11 +412,13 @@ def create_lb(kwargs=None, call=None):
|
||||
log.debug('Network Domain: %s', network_domain.id)
|
||||
lb_conn.ex_set_current_network_domain(network_domain.id)
|
||||
|
||||
event_data = _to_event_data(kwargs)
|
||||
|
||||
__utils__['cloud.fire_event'](
|
||||
'event',
|
||||
'create load_balancer',
|
||||
'salt/cloud/loadbalancer/creating',
|
||||
args=kwargs,
|
||||
args=event_data,
|
||||
sock_dir=__opts__['sock_dir'],
|
||||
transport=__opts__['transport']
|
||||
)
|
||||
@ -427,11 +427,13 @@ def create_lb(kwargs=None, call=None):
|
||||
name, port, protocol, algorithm, members
|
||||
)
|
||||
|
||||
event_data = _to_event_data(kwargs)
|
||||
|
||||
__utils__['cloud.fire_event'](
|
||||
'event',
|
||||
'created load_balancer',
|
||||
'salt/cloud/loadbalancer/created',
|
||||
args=kwargs,
|
||||
args=event_data,
|
||||
sock_dir=__opts__['sock_dir'],
|
||||
transport=__opts__['transport']
|
||||
)
|
||||
@ -573,3 +575,46 @@ def get_lb_conn(dd_driver=None):
|
||||
'Missing dimensiondata_driver for get_lb_conn method.'
|
||||
)
|
||||
return get_driver_lb(Provider_lb.DIMENSIONDATA)(user_id, key, region=region)
|
||||
|
||||
|
||||
def _to_event_data(obj):
|
||||
'''
|
||||
Convert the specified object into a form that can be serialised by msgpack as event data.
|
||||
|
||||
:param obj: The object to convert.
|
||||
'''
|
||||
|
||||
if obj is None:
|
||||
return None
|
||||
if isinstance(obj, bool):
|
||||
return obj
|
||||
if isinstance(obj, int):
|
||||
return obj
|
||||
if isinstance(obj, float):
|
||||
return obj
|
||||
if isinstance(obj, str):
|
||||
return obj
|
||||
if isinstance(obj, bytes):
|
||||
return obj
|
||||
if isinstance(obj, dict):
|
||||
return obj
|
||||
|
||||
if isinstance(obj, NodeDriver): # Special case for NodeDriver (cyclic references)
|
||||
return obj.name
|
||||
|
||||
if isinstance(obj, list):
|
||||
return [_to_event_data(item) for item in obj]
|
||||
|
||||
event_data = {}
|
||||
for attribute_name in dir(obj):
|
||||
if attribute_name.startswith('_'):
|
||||
continue
|
||||
|
||||
attribute_value = getattr(obj, attribute_name)
|
||||
|
||||
if callable(attribute_value): # Strip out methods
|
||||
continue
|
||||
|
||||
event_data[attribute_name] = _to_event_data(attribute_value)
|
||||
|
||||
return event_data
|
||||
|
@ -200,8 +200,7 @@ def get_dependencies():
|
||||
'''
|
||||
deps = {
|
||||
'requests': HAS_REQUESTS,
|
||||
'm2crypto': HAS_M2,
|
||||
'pycrypto': HAS_PYCRYPTO
|
||||
'pycrypto or m2crypto': HAS_M2 or HAS_PYCRYPTO
|
||||
}
|
||||
return config.check_driver_dependencies(
|
||||
__virtualname__,
|
||||
|
@ -355,7 +355,7 @@ def _get_ips(node, addr_type='public'):
|
||||
ret = []
|
||||
for _, interface in node.addresses.items():
|
||||
for addr in interface:
|
||||
if addr_type in ('floating', 'fixed') and addr_type == addr['OS-EXT-IPS:type']:
|
||||
if addr_type in ('floating', 'fixed') and addr_type == addr.get('OS-EXT-IPS:type'):
|
||||
ret.append(addr['addr'])
|
||||
elif addr_type == 'public' and __utils__['cloud.is_public_ip'](addr['addr']):
|
||||
ret.append(addr['addr'])
|
||||
|
@ -565,11 +565,6 @@ def create(vm_):
|
||||
record = {}
|
||||
ret = {}
|
||||
|
||||
# Since using "provider: <provider-engine>" is deprecated, alias provider
|
||||
# to use driver: "driver: <provider-engine>"
|
||||
if 'provider' in vm_:
|
||||
vm_['driver'] = vm_.pop('provider')
|
||||
|
||||
# fire creating event
|
||||
__utils__['cloud.fire_event'](
|
||||
'event',
|
||||
|
@ -197,9 +197,6 @@ VALID_OPTS = {
|
||||
# The directory used to store public key data
|
||||
'pki_dir': six.string_types,
|
||||
|
||||
# The directory to store authentication keys of a master's local environment.
|
||||
'key_dir': six.string_types,
|
||||
|
||||
# A unique identifier for this daemon
|
||||
'id': six.string_types,
|
||||
|
||||
@ -284,6 +281,7 @@ VALID_OPTS = {
|
||||
|
||||
# Location of the files a minion should look for. Set to 'local' to never ask the master.
|
||||
'file_client': six.string_types,
|
||||
'local': bool,
|
||||
|
||||
# When using a local file_client, this parameter is used to allow the client to connect to
|
||||
# a master for remote execution.
|
||||
@ -1250,6 +1248,7 @@ DEFAULT_MINION_OPTS = {
|
||||
'base': [salt.syspaths.BASE_THORIUM_ROOTS_DIR],
|
||||
},
|
||||
'file_client': 'remote',
|
||||
'local': False,
|
||||
'use_master_when_local': False,
|
||||
'file_roots': {
|
||||
'base': [salt.syspaths.BASE_FILE_ROOTS_DIR,
|
||||
@ -1500,7 +1499,6 @@ DEFAULT_MASTER_OPTS = {
|
||||
'archive_jobs': False,
|
||||
'root_dir': salt.syspaths.ROOT_DIR,
|
||||
'pki_dir': os.path.join(salt.syspaths.CONFIG_DIR, 'pki', 'master'),
|
||||
'key_dir': os.path.join(salt.syspaths.CONFIG_DIR, 'key'),
|
||||
'key_cache': '',
|
||||
'cachedir': os.path.join(salt.syspaths.CACHE_DIR, 'master'),
|
||||
'file_roots': {
|
||||
@ -2316,6 +2314,12 @@ def prepend_root_dir(opts, path_options):
|
||||
path = tmp_path_root_dir
|
||||
else:
|
||||
path = tmp_path_def_root_dir
|
||||
elif salt.utils.platform.is_windows() and not os.path.splitdrive(path)[0]:
|
||||
# In windows, os.path.isabs resolves '/' to 'C:\\' or whatever
|
||||
# the root drive is. This elif prevents the next from being
|
||||
# hit, so that the root_dir is prefixed in cases where the
|
||||
# drive is not prefixed on a config option
|
||||
pass
|
||||
elif os.path.isabs(path):
|
||||
# Absolute path (not default or overriden root_dir)
|
||||
# No prepending required
|
||||
@ -2503,7 +2507,7 @@ def syndic_config(master_config_path,
|
||||
opts.update(syndic_opts)
|
||||
# Prepend root_dir to other paths
|
||||
prepend_root_dirs = [
|
||||
'pki_dir', 'key_dir', 'cachedir', 'pidfile', 'sock_dir', 'extension_modules',
|
||||
'pki_dir', 'cachedir', 'pidfile', 'sock_dir', 'extension_modules',
|
||||
'autosign_file', 'autoreject_file', 'token_dir', 'autosign_grains_dir'
|
||||
]
|
||||
for config_key in ('log_file', 'key_logfile', 'syndic_log_file'):
|
||||
@ -3651,7 +3655,7 @@ def _adjust_log_file_override(overrides, default_log_file):
|
||||
if overrides.get('log_dir'):
|
||||
# Adjust log_file if a log_dir override is introduced
|
||||
if overrides.get('log_file'):
|
||||
if not os.path.abspath(overrides['log_file']):
|
||||
if not os.path.isabs(overrides['log_file']):
|
||||
# Prepend log_dir if log_file is relative
|
||||
overrides['log_file'] = os.path.join(overrides['log_dir'],
|
||||
overrides['log_file'])
|
||||
@ -3940,7 +3944,7 @@ def apply_master_config(overrides=None, defaults=None):
|
||||
|
||||
# Prepend root_dir to other paths
|
||||
prepend_root_dirs = [
|
||||
'pki_dir', 'key_dir', 'cachedir', 'pidfile', 'sock_dir', 'extension_modules',
|
||||
'pki_dir', 'cachedir', 'pidfile', 'sock_dir', 'extension_modules',
|
||||
'autosign_file', 'autoreject_file', 'token_dir', 'syndic_dir',
|
||||
'sqlite_queue_dir', 'autosign_grains_dir'
|
||||
]
|
||||
|
@ -121,6 +121,7 @@ class IofloMinion(object):
|
||||
'''
|
||||
warn_deprecated()
|
||||
self.opts = opts
|
||||
self.restart = False
|
||||
|
||||
def tune_in(self, behaviors=None):
|
||||
'''
|
||||
|
@ -186,11 +186,11 @@ def mk_key(opts, user):
|
||||
# The username may contain '\' if it is in Windows
|
||||
# 'DOMAIN\username' format. Fix this for the keyfile path.
|
||||
keyfile = os.path.join(
|
||||
opts['key_dir'], '.{0}_key'.format(user.replace('\\', '_'))
|
||||
opts['cachedir'], '.{0}_key'.format(user.replace('\\', '_'))
|
||||
)
|
||||
else:
|
||||
keyfile = os.path.join(
|
||||
opts['key_dir'], '.{0}_key'.format(user)
|
||||
opts['cachedir'], '.{0}_key'.format(user)
|
||||
)
|
||||
|
||||
if os.path.exists(keyfile):
|
||||
|
@ -11,7 +11,12 @@ framer minionudpstack be active first start
|
||||
exit
|
||||
do salt raet road stack closer per inode ".salt.road.manor."
|
||||
|
||||
framer bootstrap be active first join
|
||||
framer bootstrap be active first setup
|
||||
frame setup
|
||||
enter
|
||||
do salt raet road usher minion setup per inode ".salt.road.manor."
|
||||
go join
|
||||
|
||||
frame join
|
||||
print Joining...
|
||||
enter
|
||||
@ -44,7 +49,7 @@ framer bootstrap be active first join
|
||||
frame message
|
||||
print Messaging...
|
||||
enter
|
||||
do raet road stack messenger to contents "Minion 1 Hello" code 15 \
|
||||
do raet road stack messenger with contents "Minion 1 Hello" code 15 \
|
||||
per inode ".salt.road.manor."
|
||||
go next
|
||||
|
||||
|
@ -22,6 +22,10 @@ def test():
|
||||
if not os.path.exists(pkiDirpath):
|
||||
os.makedirs(pkiDirpath)
|
||||
|
||||
keyDirpath = os.path.join('/tmp', 'raet', 'testo', 'key')
|
||||
if not os.path.exists(keyDirpath):
|
||||
os.makedirs(keyDirpath)
|
||||
|
||||
acceptedDirpath = os.path.join(pkiDirpath, 'accepted')
|
||||
if not os.path.exists(acceptedDirpath):
|
||||
os.makedirs(acceptedDirpath)
|
||||
@ -64,10 +68,12 @@ def test():
|
||||
client_acl=dict(),
|
||||
publisher_acl=dict(),
|
||||
pki_dir=pkiDirpath,
|
||||
key_dir=keyDirpath,
|
||||
sock_dir=sockDirpath,
|
||||
cachedir=cacheDirpath,
|
||||
open_mode=True,
|
||||
auto_accept=True,
|
||||
client_acl_verify=True,
|
||||
)
|
||||
|
||||
master = salt.daemons.flo.IofloMaster(opts=opts)
|
||||
|
@ -348,8 +348,8 @@ if __name__ == '__main__' and __package__ is None:
|
||||
|
||||
#console.reinit(verbosity=console.Wordage.concise)
|
||||
|
||||
#runAll() # run all unittests
|
||||
runAll() # run all unittests
|
||||
|
||||
runSome() # only run some
|
||||
#runSome() # only run some
|
||||
|
||||
#runOne('testParseHostname')
|
||||
|
@ -5,6 +5,7 @@ Raet Ioflo Behavior Unittests
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import sys
|
||||
from salt.ext.six.moves import map
|
||||
import importlib
|
||||
# pylint: disable=blacklisted-import
|
||||
if sys.version_info < (2, 7):
|
||||
import unittest2 as unittest
|
||||
@ -40,6 +41,9 @@ class PresenterTestCase(testing.FrameIofloTestCase):
|
||||
'''
|
||||
Call super if override so House Framer and Frame are setup correctly
|
||||
'''
|
||||
behaviors = ['salt.daemons.flo', 'salt.daemons.test.plan']
|
||||
for behavior in behaviors:
|
||||
mod = importlib.import_module(behavior)
|
||||
super(PresenterTestCase, self).setUp()
|
||||
|
||||
def tearDown(self):
|
||||
|
@ -7,6 +7,7 @@ from __future__ import absolute_import, print_function, unicode_literals
|
||||
# pylint: skip-file
|
||||
# pylint: disable=C0103
|
||||
import sys
|
||||
import salt.utils.stringutils
|
||||
from salt.ext.six.moves import map
|
||||
if sys.version_info < (2, 7):
|
||||
import unittest2 as unittest
|
||||
@ -30,12 +31,15 @@ from raet.road import estating, keeping, stacking
|
||||
|
||||
from salt.key import RaetKey
|
||||
|
||||
|
||||
def setUpModule():
|
||||
console.reinit(verbosity=console.Wordage.concise)
|
||||
|
||||
|
||||
def tearDownModule():
|
||||
pass
|
||||
|
||||
|
||||
class BasicTestCase(unittest.TestCase):
|
||||
""""""
|
||||
|
||||
@ -47,7 +51,7 @@ class BasicTestCase(unittest.TestCase):
|
||||
|
||||
pkiDirpath = os.path.join(self.saltDirpath, 'pki')
|
||||
if not os.path.exists(pkiDirpath):
|
||||
os.makedirs(pkiDirpath)
|
||||
os.makedirs(pkiDirpath)
|
||||
|
||||
acceptedDirpath = os.path.join(pkiDirpath, 'accepted')
|
||||
if not os.path.exists(acceptedDirpath):
|
||||
@ -81,7 +85,7 @@ class BasicTestCase(unittest.TestCase):
|
||||
)
|
||||
|
||||
self.mainKeeper = RaetKey(opts=self.opts)
|
||||
self.baseDirpath = tempfile.mkdtemp(prefix="salt", suffix="base", dir='/tmp')
|
||||
self.baseDirpath = tempfile.mkdtemp(prefix="salt", suffix="base", dir='/tmp')
|
||||
|
||||
def tearDown(self):
|
||||
if os.path.exists(self.saltDirpath):
|
||||
@ -119,9 +123,9 @@ class BasicTestCase(unittest.TestCase):
|
||||
self.opts['auto_accept'] = True
|
||||
self.assertTrue(self.opts['auto_accept'])
|
||||
self.assertDictEqual(self.mainKeeper.all_keys(), {'accepted': [],
|
||||
'local': [],
|
||||
'rejected': [],
|
||||
'pending': []})
|
||||
'local': [],
|
||||
'rejected': [],
|
||||
'pending': []})
|
||||
|
||||
localkeys = self.mainKeeper.read_local()
|
||||
self.assertDictEqual(localkeys, {})
|
||||
@ -129,8 +133,9 @@ class BasicTestCase(unittest.TestCase):
|
||||
main = self.createRoadData(name='main', base=self.baseDirpath)
|
||||
self.mainKeeper.write_local(main['prihex'], main['sighex'])
|
||||
localkeys = self.mainKeeper.read_local()
|
||||
self.assertDictEqual(localkeys, {'priv': main['prihex'],
|
||||
'sign': main['sighex']})
|
||||
self.assertDictEqual(localkeys,
|
||||
{'priv': salt.utils.stringutils.to_str(main['prihex']),
|
||||
'sign': salt.utils.stringutils.to_str(main['sighex'])})
|
||||
allkeys = self.mainKeeper.all_keys()
|
||||
self.assertDictEqual(allkeys, {'accepted': [],
|
||||
'local': [self.localFilepath],
|
||||
@ -147,39 +152,38 @@ class BasicTestCase(unittest.TestCase):
|
||||
|
||||
allkeys = self.mainKeeper.all_keys()
|
||||
self.assertDictEqual(allkeys, {'accepted': ['other1', 'other2'],
|
||||
'local': [self.localFilepath],
|
||||
'pending': [],
|
||||
'rejected': []} )
|
||||
'local': [self.localFilepath],
|
||||
'pending': [],
|
||||
'rejected': []})
|
||||
|
||||
remotekeys = self.mainKeeper.read_remote(other1['name'])
|
||||
self.assertDictEqual(remotekeys, { 'minion_id': 'other1',
|
||||
'pub': other1['pubhex'],
|
||||
'verify': other1['verhex']} )
|
||||
self.assertDictEqual(remotekeys, {'minion_id': 'other1',
|
||||
'pub': salt.utils.stringutils.to_str(other1['pubhex']),
|
||||
'verify': salt.utils.stringutils.to_str(other1['verhex'])})
|
||||
|
||||
remotekeys = self.mainKeeper.read_remote(other2['name'])
|
||||
self.assertDictEqual(remotekeys, { 'minion_id': 'other2',
|
||||
'pub': other2['pubhex'],
|
||||
'verify': other2['verhex']} )
|
||||
self.assertDictEqual(remotekeys, {'minion_id': 'other2',
|
||||
'pub': salt.utils.stringutils.to_str(other2['pubhex']),
|
||||
'verify': salt.utils.stringutils.to_str(other2['verhex'])})
|
||||
|
||||
listkeys = self.mainKeeper.list_keys()
|
||||
self.assertDictEqual(listkeys, {'accepted': ['other1', 'other2'],
|
||||
'rejected': [],
|
||||
'pending': []})
|
||||
|
||||
|
||||
allremotekeys = self.mainKeeper.read_all_remote()
|
||||
self.assertDictEqual(allremotekeys, {'other1':
|
||||
{'verify': other1['verhex'],
|
||||
'minion_id': 'other1',
|
||||
'acceptance': 'accepted',
|
||||
'pub': other1['pubhex'],},
|
||||
'other2':
|
||||
{'verify': other2['verhex'],
|
||||
'minion_id': 'other2',
|
||||
'acceptance': 'accepted',
|
||||
'pub': other2['pubhex'],}
|
||||
})
|
||||
|
||||
self.assertDictEqual(allremotekeys,
|
||||
{'other1':
|
||||
{'verify': salt.utils.stringutils.to_str(other1['verhex']),
|
||||
'minion_id': 'other1',
|
||||
'acceptance': 'accepted',
|
||||
'pub': salt.utils.stringutils.to_str(other1['pubhex']), },
|
||||
'other2':
|
||||
{'verify': salt.utils.stringutils.to_str(other2['verhex']),
|
||||
'minion_id': 'other2',
|
||||
'acceptance': 'accepted',
|
||||
'pub': salt.utils.stringutils.to_str(other2['pubhex']), }
|
||||
})
|
||||
|
||||
def testManualAccept(self):
|
||||
'''
|
||||
@ -189,9 +193,9 @@ class BasicTestCase(unittest.TestCase):
|
||||
self.opts['auto_accept'] = False
|
||||
self.assertFalse(self.opts['auto_accept'])
|
||||
self.assertDictEqual(self.mainKeeper.all_keys(), {'accepted': [],
|
||||
'local': [],
|
||||
'rejected': [],
|
||||
'pending': []})
|
||||
'local': [],
|
||||
'rejected': [],
|
||||
'pending': []})
|
||||
|
||||
localkeys = self.mainKeeper.read_local()
|
||||
self.assertDictEqual(localkeys, {})
|
||||
@ -199,8 +203,9 @@ class BasicTestCase(unittest.TestCase):
|
||||
main = self.createRoadData(name='main', base=self.baseDirpath)
|
||||
self.mainKeeper.write_local(main['prihex'], main['sighex'])
|
||||
localkeys = self.mainKeeper.read_local()
|
||||
self.assertDictEqual(localkeys, {'priv': main['prihex'],
|
||||
'sign': main['sighex']})
|
||||
self.assertDictEqual(localkeys,
|
||||
{'priv': salt.utils.stringutils.to_str(main['prihex']),
|
||||
'sign': salt.utils.stringutils.to_str(main['sighex'])})
|
||||
allkeys = self.mainKeeper.all_keys()
|
||||
self.assertDictEqual(allkeys, {'accepted': [],
|
||||
'local': [self.localFilepath],
|
||||
@ -217,9 +222,9 @@ class BasicTestCase(unittest.TestCase):
|
||||
|
||||
allkeys = self.mainKeeper.all_keys()
|
||||
self.assertDictEqual(allkeys, {'accepted': [],
|
||||
'local': [self.localFilepath],
|
||||
'pending': ['other1', 'other2'],
|
||||
'rejected': []} )
|
||||
'local': [self.localFilepath],
|
||||
'pending': ['other1', 'other2'],
|
||||
'rejected': []})
|
||||
|
||||
remotekeys = self.mainKeeper.read_remote(other1['name'])
|
||||
self.assertDictEqual(remotekeys, {})
|
||||
@ -232,56 +237,60 @@ class BasicTestCase(unittest.TestCase):
|
||||
'rejected': [],
|
||||
'pending': ['other1', 'other2']})
|
||||
|
||||
|
||||
allremotekeys = self.mainKeeper.read_all_remote()
|
||||
self.assertDictEqual(allremotekeys, {'other1':
|
||||
{'verify': other1['verhex'],
|
||||
'minion_id': 'other1',
|
||||
'acceptance': 'pending',
|
||||
'pub': other1['pubhex'],},
|
||||
'other2':
|
||||
{'verify': other2['verhex'],
|
||||
'minion_id': 'other2',
|
||||
'acceptance': 'pending',
|
||||
'pub': other2['pubhex'],}
|
||||
})
|
||||
self.assertDictEqual(allremotekeys,
|
||||
{'other1':
|
||||
{'verify': salt.utils.stringutils.to_str(other1['verhex']),
|
||||
'minion_id': 'other1',
|
||||
'acceptance': 'pending',
|
||||
'pub': salt.utils.stringutils.to_str(other1['pubhex']),
|
||||
},
|
||||
'other2':
|
||||
{'verify': salt.utils.stringutils.to_str(other2['verhex']),
|
||||
'minion_id': 'other2',
|
||||
'acceptance': 'pending',
|
||||
'pub': salt.utils.stringutils.to_str(other2['pubhex']),
|
||||
}
|
||||
})
|
||||
|
||||
self.mainKeeper.accept_all()
|
||||
|
||||
allkeys = self.mainKeeper.all_keys()
|
||||
self.assertDictEqual(allkeys, {'accepted': ['other1', 'other2'],
|
||||
'local': [self.localFilepath],
|
||||
'pending': [],
|
||||
'rejected': []} )
|
||||
'local': [self.localFilepath],
|
||||
'pending': [],
|
||||
'rejected': []})
|
||||
|
||||
remotekeys = self.mainKeeper.read_remote(other1['name'])
|
||||
self.assertDictEqual(remotekeys, { 'minion_id': 'other1',
|
||||
'pub': other1['pubhex'],
|
||||
'verify': other1['verhex']} )
|
||||
self.assertDictEqual(remotekeys, {'minion_id': 'other1',
|
||||
'pub': salt.utils.stringutils.to_str(other1['pubhex']),
|
||||
'verify': salt.utils.stringutils.to_str(other1['verhex'])})
|
||||
|
||||
remotekeys = self.mainKeeper.read_remote(other2['name'])
|
||||
self.assertDictEqual(remotekeys, { 'minion_id': 'other2',
|
||||
'pub': other2['pubhex'],
|
||||
'verify': other2['verhex']} )
|
||||
self.assertDictEqual(remotekeys, {'minion_id': 'other2',
|
||||
'pub': salt.utils.stringutils.to_str(other2['pubhex']),
|
||||
'verify': salt.utils.stringutils.to_str(other2['verhex'])})
|
||||
|
||||
listkeys = self.mainKeeper.list_keys()
|
||||
self.assertDictEqual(listkeys, {'accepted': ['other1', 'other2'],
|
||||
'rejected': [],
|
||||
'pending': []})
|
||||
|
||||
|
||||
allremotekeys = self.mainKeeper.read_all_remote()
|
||||
self.assertDictEqual(allremotekeys, {'other1':
|
||||
{'verify': other1['verhex'],
|
||||
'minion_id': 'other1',
|
||||
'acceptance': 'accepted',
|
||||
'pub': other1['pubhex'],},
|
||||
'other2':
|
||||
{'verify': other2['verhex'],
|
||||
'minion_id': 'other2',
|
||||
'acceptance': 'accepted',
|
||||
'pub': other2['pubhex'],}
|
||||
})
|
||||
self.assertDictEqual(allremotekeys,
|
||||
{'other1':
|
||||
{'verify': salt.utils.stringutils.to_str(other1['verhex']),
|
||||
'minion_id': 'other1',
|
||||
'acceptance': 'accepted',
|
||||
'pub': salt.utils.stringutils.to_str(other1['pubhex']),
|
||||
},
|
||||
'other2':
|
||||
{'verify': salt.utils.stringutils.to_str(other2['verhex']),
|
||||
'minion_id': 'other2',
|
||||
'acceptance': 'accepted',
|
||||
'pub': salt.utils.stringutils.to_str(other2['pubhex']),
|
||||
}
|
||||
})
|
||||
|
||||
def testDelete(self):
|
||||
'''
|
||||
@ -291,9 +300,9 @@ class BasicTestCase(unittest.TestCase):
|
||||
self.opts['auto_accept'] = True
|
||||
self.assertTrue(self.opts['auto_accept'])
|
||||
self.assertDictEqual(self.mainKeeper.all_keys(), {'accepted': [],
|
||||
'local': [],
|
||||
'rejected': [],
|
||||
'pending': []})
|
||||
'local': [],
|
||||
'rejected': [],
|
||||
'pending': []})
|
||||
|
||||
localkeys = self.mainKeeper.read_local()
|
||||
self.assertDictEqual(localkeys, {})
|
||||
@ -301,8 +310,9 @@ class BasicTestCase(unittest.TestCase):
|
||||
main = self.createRoadData(name='main', base=self.baseDirpath)
|
||||
self.mainKeeper.write_local(main['prihex'], main['sighex'])
|
||||
localkeys = self.mainKeeper.read_local()
|
||||
self.assertDictEqual(localkeys, {'priv': main['prihex'],
|
||||
'sign': main['sighex']})
|
||||
self.assertDictEqual(localkeys,
|
||||
{'priv': salt.utils.stringutils.to_str(main['prihex']),
|
||||
'sign': salt.utils.stringutils.to_str(main['sighex'])})
|
||||
allkeys = self.mainKeeper.all_keys()
|
||||
self.assertDictEqual(allkeys, {'accepted': [],
|
||||
'local': [self.localFilepath],
|
||||
@ -319,70 +329,73 @@ class BasicTestCase(unittest.TestCase):
|
||||
|
||||
allkeys = self.mainKeeper.all_keys()
|
||||
self.assertDictEqual(allkeys, {'accepted': ['other1', 'other2'],
|
||||
'local': [self.localFilepath],
|
||||
'pending': [],
|
||||
'rejected': []} )
|
||||
'local': [self.localFilepath],
|
||||
'pending': [],
|
||||
'rejected': []})
|
||||
|
||||
remotekeys = self.mainKeeper.read_remote(other1['name'])
|
||||
self.assertDictEqual(remotekeys, { 'minion_id': 'other1',
|
||||
'pub': other1['pubhex'],
|
||||
'verify': other1['verhex']} )
|
||||
self.assertDictEqual(remotekeys, {'minion_id': 'other1',
|
||||
'pub': salt.utils.stringutils.to_str(other1['pubhex']),
|
||||
'verify': salt.utils.stringutils.to_str(other1['verhex']),
|
||||
})
|
||||
|
||||
remotekeys = self.mainKeeper.read_remote(other2['name'])
|
||||
self.assertDictEqual(remotekeys, { 'minion_id': 'other2',
|
||||
'pub': other2['pubhex'],
|
||||
'verify': other2['verhex']} )
|
||||
self.assertDictEqual(remotekeys, {'minion_id': 'other2',
|
||||
'pub': salt.utils.stringutils.to_str(other2['pubhex']),
|
||||
'verify': salt.utils.stringutils.to_str(other2['verhex']),
|
||||
})
|
||||
|
||||
listkeys = self.mainKeeper.list_keys()
|
||||
self.assertDictEqual(listkeys, {'accepted': ['other1', 'other2'],
|
||||
'rejected': [],
|
||||
'pending': []})
|
||||
|
||||
|
||||
allremotekeys = self.mainKeeper.read_all_remote()
|
||||
self.assertDictEqual(allremotekeys, {'other1':
|
||||
{'verify': other1['verhex'],
|
||||
'minion_id': 'other1',
|
||||
'acceptance': 'accepted',
|
||||
'pub': other1['pubhex']},
|
||||
'other2':
|
||||
{'verify': other2['verhex'],
|
||||
'minion_id': 'other2',
|
||||
'acceptance': 'accepted',
|
||||
'pub': other2['pubhex'],}
|
||||
})
|
||||
self.assertDictEqual(allremotekeys,
|
||||
{'other1':
|
||||
{'verify': salt.utils.stringutils.to_str(other1['verhex']),
|
||||
'minion_id': 'other1',
|
||||
'acceptance': 'accepted',
|
||||
'pub': salt.utils.stringutils.to_str(other1['pubhex'])
|
||||
},
|
||||
'other2':
|
||||
{'verify': salt.utils.stringutils.to_str(other2['verhex']),
|
||||
'minion_id': 'other2',
|
||||
'acceptance': 'accepted',
|
||||
'pub': salt.utils.stringutils.to_str(other2['pubhex']),
|
||||
}
|
||||
})
|
||||
|
||||
self.mainKeeper.delete_key(match=other1['name'])
|
||||
|
||||
allkeys = self.mainKeeper.all_keys()
|
||||
self.assertDictEqual(allkeys, {'accepted': ['other2'],
|
||||
'local': [self.localFilepath],
|
||||
'pending': [],
|
||||
'rejected': []} )
|
||||
'local': [self.localFilepath],
|
||||
'pending': [],
|
||||
'rejected': []})
|
||||
|
||||
remotekeys = self.mainKeeper.read_remote(other1['name'])
|
||||
self.assertDictEqual(remotekeys, {} )
|
||||
self.assertDictEqual(remotekeys, {})
|
||||
|
||||
remotekeys = self.mainKeeper.read_remote(other2['name'])
|
||||
self.assertDictEqual(remotekeys, { 'minion_id': 'other2',
|
||||
'pub': other2['pubhex'],
|
||||
'verify': other2['verhex']} )
|
||||
self.assertDictEqual(remotekeys, {'minion_id': 'other2',
|
||||
'pub': salt.utils.stringutils.to_str(other2['pubhex']),
|
||||
'verify': salt.utils.stringutils.to_str(other2['verhex'])})
|
||||
|
||||
listkeys = self.mainKeeper.list_keys()
|
||||
self.assertDictEqual(listkeys, {'accepted': [ 'other2'],
|
||||
self.assertDictEqual(listkeys, {'accepted': ['other2'],
|
||||
'rejected': [],
|
||||
'pending': []})
|
||||
|
||||
|
||||
allremotekeys = self.mainKeeper.read_all_remote()
|
||||
self.assertDictEqual(allremotekeys, {
|
||||
'other2':
|
||||
{'verify': other2['verhex'],
|
||||
'minion_id': 'other2',
|
||||
'acceptance': 'accepted',
|
||||
'pub': other2['pubhex'],}
|
||||
})
|
||||
|
||||
self.assertDictEqual(allremotekeys,
|
||||
{'other2':
|
||||
{'verify': salt.utils.stringutils.to_str(other2['verhex']),
|
||||
'minion_id': 'other2',
|
||||
'acceptance': 'accepted',
|
||||
'pub': salt.utils.stringutils.to_str(other2['pubhex']),
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
def runOne(test):
|
||||
@ -393,11 +406,12 @@ def runOne(test):
|
||||
suite = unittest.TestSuite([test])
|
||||
unittest.TextTestRunner(verbosity=2).run(suite)
|
||||
|
||||
|
||||
def runSome():
|
||||
'''
|
||||
Unittest runner
|
||||
'''
|
||||
tests = []
|
||||
tests = []
|
||||
names = ['testAutoAccept',
|
||||
'testManualAccept',
|
||||
'testDelete']
|
||||
@ -407,6 +421,7 @@ def runSome():
|
||||
suite = unittest.TestSuite(tests)
|
||||
unittest.TextTestRunner(verbosity=2).run(suite)
|
||||
|
||||
|
||||
def runAll():
|
||||
'''
|
||||
Unittest runner
|
||||
@ -416,12 +431,12 @@ def runAll():
|
||||
|
||||
unittest.TextTestRunner(verbosity=2).run(suite)
|
||||
|
||||
|
||||
if __name__ == '__main__' and __package__ is None:
|
||||
# console.reinit(verbosity=console.Wordage.concise)
|
||||
|
||||
#console.reinit(verbosity=console.Wordage.concise)
|
||||
runAll() # run all unittests
|
||||
|
||||
runAll() #run all unittests
|
||||
# runSome() #only run some
|
||||
|
||||
#runSome()#only run some
|
||||
|
||||
#runOne('testDelete')
|
||||
# runOne('testDelete')
|
||||
|
@ -13,10 +13,11 @@ else:
|
||||
# pylint: enable=blacklisted-import
|
||||
|
||||
import os
|
||||
import stat
|
||||
import time
|
||||
import tempfile
|
||||
import shutil
|
||||
import socket
|
||||
import stat
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
from ioflo.aid.odicting import odict
|
||||
from ioflo.aid.timing import StoreTimer
|
||||
@ -29,6 +30,7 @@ from raet.road import estating, stacking
|
||||
|
||||
from salt.daemons import salting
|
||||
import salt.utils.kinds as kinds
|
||||
import salt.utils.stringutils
|
||||
|
||||
|
||||
def setUpModule():
|
||||
@ -232,20 +234,21 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
os.path.join('main', 'raet', 'main_master')))
|
||||
self.assertTrue(main.ha, ("0.0.0.0", raeting.RAET_PORT))
|
||||
self.assertIs(main.keep.auto, raeting.AutoMode.never.value)
|
||||
self.assertDictEqual(main.keep.loadLocalData(), {'name': mainData['name'],
|
||||
'uid': 1,
|
||||
'ha': ['127.0.0.1', 7530],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'dyned': None,
|
||||
'sid': 0,
|
||||
'puid': 1,
|
||||
'aha': ['0.0.0.0', 7530],
|
||||
'role': mainData['role'],
|
||||
'sighex': mainData['sighex'],
|
||||
'prihex': mainData['prihex'],
|
||||
})
|
||||
self.assertDictEqual(main.keep.loadLocalData(),
|
||||
{'name': mainData['name'],
|
||||
'uid': 1,
|
||||
'ha': ['127.0.0.1', 7530],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'sid': 0,
|
||||
'puid': 1,
|
||||
'aha': ['0.0.0.0', 7530],
|
||||
'role': mainData['role'],
|
||||
'sighex': salt.utils.stringutils.to_str(mainData['sighex']),
|
||||
'prihex': salt.utils.stringutils.to_str(mainData['prihex']),
|
||||
})
|
||||
|
||||
data1 = self.createRoadData(role='remote1',
|
||||
kind=kinds.APPL_KIND_NAMES[kinds.applKinds.minion],
|
||||
@ -282,7 +285,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7532],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'main': False,
|
||||
'kind': data1['kind'],
|
||||
@ -290,8 +293,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'joined': None,
|
||||
'role': data1['role'],
|
||||
'acceptance': 0,
|
||||
'verhex': data1['verhex'],
|
||||
'pubhex': data1['pubhex'],
|
||||
'verhex': salt.utils.stringutils.to_str(data1['verhex']),
|
||||
'pubhex': salt.utils.stringutils.to_str(data1['pubhex']),
|
||||
},
|
||||
'remote2_minion':
|
||||
{'name': data2['name'],
|
||||
@ -300,7 +303,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7533],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'main': False,
|
||||
'kind': data2['kind'],
|
||||
@ -308,8 +311,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'joined': None,
|
||||
'role': data2['role'],
|
||||
'acceptance': 0,
|
||||
'verhex': data2['verhex'],
|
||||
'pubhex': data2['pubhex'],
|
||||
'verhex': salt.utils.stringutils.to_str(data2['verhex']),
|
||||
'pubhex': salt.utils.stringutils.to_str(data2['pubhex']),
|
||||
}
|
||||
})
|
||||
|
||||
@ -362,14 +365,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7531],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'sid': 0,
|
||||
'puid': 1,
|
||||
'aha': ['0.0.0.0', 7531],
|
||||
'role': otherData['role'],
|
||||
'sighex': otherData['sighex'],
|
||||
'prihex': otherData['prihex'],
|
||||
'sighex': salt.utils.stringutils.to_str(otherData['sighex']),
|
||||
'prihex': salt.utils.stringutils.to_str(otherData['prihex']),
|
||||
})
|
||||
|
||||
data3 = self.createRoadData(role='remote3',
|
||||
@ -405,7 +408,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7534],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'main': False,
|
||||
'kind': data3['kind'],
|
||||
@ -413,8 +416,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'joined': None,
|
||||
'role': data3['role'],
|
||||
'acceptance': 0,
|
||||
'verhex': data3['verhex'],
|
||||
'pubhex': data3['pubhex'],
|
||||
'verhex': salt.utils.stringutils.to_str(data3['verhex']),
|
||||
'pubhex': salt.utils.stringutils.to_str(data3['pubhex']),
|
||||
},
|
||||
'remote4_minion':
|
||||
{
|
||||
@ -424,7 +427,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7535],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'main': False,
|
||||
'kind': data4['kind'],
|
||||
@ -432,8 +435,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'joined': None,
|
||||
'role': data4['role'],
|
||||
'acceptance': 0,
|
||||
'verhex': data4['verhex'],
|
||||
'pubhex': data4['pubhex'],
|
||||
'verhex': salt.utils.stringutils.to_str(data4['verhex']),
|
||||
'pubhex': salt.utils.stringutils.to_str(data4['pubhex']),
|
||||
}
|
||||
})
|
||||
|
||||
@ -477,14 +480,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7530],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'sid': 0,
|
||||
'puid': 1,
|
||||
'aha': ['0.0.0.0', 7530],
|
||||
'role': mainData['role'],
|
||||
'sighex': mainData['sighex'],
|
||||
'prihex': mainData['prihex'],
|
||||
'sighex': salt.utils.stringutils.to_str(mainData['sighex']),
|
||||
'prihex': salt.utils.stringutils.to_str(mainData['prihex']),
|
||||
})
|
||||
|
||||
data1 = self.createRoadData(role='remote1',
|
||||
@ -520,7 +523,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7532],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'main': False,
|
||||
'kind': data1['kind'],
|
||||
@ -528,8 +531,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'joined': None,
|
||||
'role': data1['role'],
|
||||
'acceptance': 1,
|
||||
'verhex': data1['verhex'],
|
||||
'pubhex': data1['pubhex'],
|
||||
'verhex': salt.utils.stringutils.to_str(data1['verhex']),
|
||||
'pubhex': salt.utils.stringutils.to_str(data1['pubhex']),
|
||||
},
|
||||
'remote2_minion':
|
||||
{'name': data2['name'],
|
||||
@ -538,7 +541,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7533],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'main': False,
|
||||
'kind': data2['kind'],
|
||||
@ -546,8 +549,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'joined': None,
|
||||
'role': data2['role'],
|
||||
'acceptance': 1,
|
||||
'verhex': data2['verhex'],
|
||||
'pubhex': data2['pubhex'],
|
||||
'verhex': salt.utils.stringutils.to_str(data2['verhex']),
|
||||
'pubhex': salt.utils.stringutils.to_str(data2['pubhex']),
|
||||
}
|
||||
})
|
||||
|
||||
@ -600,14 +603,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7531],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'sid': 0,
|
||||
'puid': 1,
|
||||
'aha': ['0.0.0.0', 7531],
|
||||
'role': otherData['role'],
|
||||
'sighex': otherData['sighex'],
|
||||
'prihex': otherData['prihex'],
|
||||
'sighex': salt.utils.stringutils.to_str(otherData['sighex']),
|
||||
'prihex': salt.utils.stringutils.to_str(otherData['prihex']),
|
||||
})
|
||||
|
||||
data3 = self.createRoadData(role='remote3',
|
||||
@ -643,7 +646,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7534],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'main': False,
|
||||
'kind': data3['kind'],
|
||||
@ -651,8 +654,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'joined': None,
|
||||
'role': data3['role'],
|
||||
'acceptance': 1,
|
||||
'verhex': data3['verhex'],
|
||||
'pubhex': data3['pubhex'],
|
||||
'verhex': salt.utils.stringutils.to_str(data3['verhex']),
|
||||
'pubhex': salt.utils.stringutils.to_str(data3['pubhex']),
|
||||
},
|
||||
'remote4_minion':
|
||||
{
|
||||
@ -662,7 +665,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7535],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'main': False,
|
||||
'kind': data4['kind'],
|
||||
@ -670,8 +673,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'joined': None,
|
||||
'role': data4['role'],
|
||||
'acceptance': 1,
|
||||
'verhex': data4['verhex'],
|
||||
'pubhex': data4['pubhex'],
|
||||
'verhex': salt.utils.stringutils.to_str(data4['verhex']),
|
||||
'pubhex': salt.utils.stringutils.to_str(data4['pubhex']),
|
||||
}
|
||||
})
|
||||
|
||||
@ -715,13 +718,13 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7530],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'sid': 0,
|
||||
'puid': 1,
|
||||
'aha': ['0.0.0.0', 7530],
|
||||
'sighex': mainData['sighex'],
|
||||
'prihex': mainData['prihex'],
|
||||
'sighex': salt.utils.stringutils.to_str(mainData['sighex']),
|
||||
'prihex': salt.utils.stringutils.to_str(mainData['prihex']),
|
||||
'role': mainData['role'],
|
||||
})
|
||||
|
||||
@ -759,7 +762,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7532],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'main': False,
|
||||
'kind': data1['kind'],
|
||||
@ -767,8 +770,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'joined': None,
|
||||
'role': data1['role'],
|
||||
'acceptance': 1,
|
||||
'verhex': data1['verhex'],
|
||||
'pubhex': data1['pubhex'],
|
||||
'verhex': salt.utils.stringutils.to_str(data1['verhex']),
|
||||
'pubhex': salt.utils.stringutils.to_str(data1['pubhex']),
|
||||
},
|
||||
'remote2_minion':
|
||||
{
|
||||
@ -778,7 +781,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7533],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'main': False,
|
||||
'kind': data2['kind'],
|
||||
@ -786,8 +789,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'joined': None,
|
||||
'role': data2['role'],
|
||||
'acceptance': 1,
|
||||
'verhex': data2['verhex'],
|
||||
'pubhex': data2['pubhex'],
|
||||
'verhex': salt.utils.stringutils.to_str(data2['verhex']),
|
||||
'pubhex': salt.utils.stringutils.to_str(data2['pubhex']),
|
||||
}
|
||||
})
|
||||
|
||||
@ -840,14 +843,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7531],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'sid': 0,
|
||||
'puid': 1,
|
||||
'aha': ['0.0.0.0', 7531],
|
||||
'role': otherData['role'],
|
||||
'sighex': otherData['sighex'],
|
||||
'prihex': otherData['prihex'],
|
||||
'sighex': salt.utils.stringutils.to_str(otherData['sighex']),
|
||||
'prihex': salt.utils.stringutils.to_str(otherData['prihex']),
|
||||
})
|
||||
|
||||
data3 = self.createRoadData(role='remote3',
|
||||
@ -883,7 +886,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7534],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'main': False,
|
||||
'kind': data3['kind'],
|
||||
@ -891,8 +894,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'joined': None,
|
||||
'role': data3['role'],
|
||||
'acceptance': 1,
|
||||
'verhex': data3['verhex'],
|
||||
'pubhex': data3['pubhex'],
|
||||
'verhex': salt.utils.stringutils.to_str(data3['verhex']),
|
||||
'pubhex': salt.utils.stringutils.to_str(data3['pubhex']),
|
||||
},
|
||||
'remote4_minion':
|
||||
{
|
||||
@ -902,7 +905,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7535],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'main': False,
|
||||
'kind': data4['kind'],
|
||||
@ -910,8 +913,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'joined': None,
|
||||
'role': data4['role'],
|
||||
'acceptance': 1,
|
||||
'verhex': data4['verhex'],
|
||||
'pubhex': data4['pubhex'],
|
||||
'verhex': salt.utils.stringutils.to_str(data4['verhex']),
|
||||
'pubhex': salt.utils.stringutils.to_str(data4['pubhex']),
|
||||
}
|
||||
})
|
||||
|
||||
@ -955,14 +958,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7530],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'sid': 0,
|
||||
'puid': 1,
|
||||
'aha': ['0.0.0.0', 7530],
|
||||
'role': mainData['role'],
|
||||
'sighex': mainData['sighex'],
|
||||
'prihex': mainData['prihex'],
|
||||
'sighex': salt.utils.stringutils.to_str(mainData['sighex']),
|
||||
'prihex': salt.utils.stringutils.to_str(mainData['prihex']),
|
||||
})
|
||||
|
||||
# add multiple remotes all with same role
|
||||
@ -1006,7 +1009,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7532],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'main': False,
|
||||
'kind': data1['kind'],
|
||||
@ -1014,8 +1017,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'joined': None,
|
||||
'role': data1['role'],
|
||||
'acceptance': 0,
|
||||
'verhex': data1['verhex'],
|
||||
'pubhex': data1['pubhex'],
|
||||
'verhex': salt.utils.stringutils.to_str(data1['verhex']),
|
||||
'pubhex': salt.utils.stringutils.to_str(data1['pubhex']),
|
||||
},
|
||||
'primary_caller':
|
||||
{
|
||||
@ -1025,7 +1028,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7533],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'main': False,
|
||||
'kind': data2['kind'],
|
||||
@ -1033,8 +1036,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'joined': None,
|
||||
'role': data1['role'],
|
||||
'acceptance': 0,
|
||||
'verhex': data1['verhex'],
|
||||
'pubhex': data1['pubhex'],
|
||||
'verhex': salt.utils.stringutils.to_str(data1['verhex']),
|
||||
'pubhex': salt.utils.stringutils.to_str(data1['pubhex']),
|
||||
}
|
||||
})
|
||||
|
||||
@ -1104,14 +1107,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7530],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'sid': 0,
|
||||
'puid': 1,
|
||||
'aha': ['0.0.0.0', 7530],
|
||||
'role': mainData['role'],
|
||||
'sighex': mainData['sighex'],
|
||||
'prihex': mainData['prihex'],
|
||||
'sighex': salt.utils.stringutils.to_str(mainData['sighex']),
|
||||
'prihex': salt.utils.stringutils.to_str(mainData['prihex']),
|
||||
})
|
||||
|
||||
# add multiple remotes all with same role
|
||||
@ -1149,7 +1152,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7532],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'main': False,
|
||||
'kind': data1['kind'],
|
||||
@ -1157,8 +1160,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'joined': None,
|
||||
'role': data1['role'],
|
||||
'acceptance': 1,
|
||||
'verhex': data2['verhex'],
|
||||
'pubhex': data2['pubhex'],
|
||||
'verhex': salt.utils.stringutils.to_str(data2['verhex']),
|
||||
'pubhex': salt.utils.stringutils.to_str(data2['pubhex']),
|
||||
},
|
||||
'primary_syndic':
|
||||
{
|
||||
@ -1168,7 +1171,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7533],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'main': False,
|
||||
'kind': data2['kind'],
|
||||
@ -1176,8 +1179,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'joined': None,
|
||||
'role': data2['role'],
|
||||
'acceptance': 1,
|
||||
'verhex': data2['verhex'],
|
||||
'pubhex': data2['pubhex'],
|
||||
'verhex': salt.utils.stringutils.to_str(data2['verhex']),
|
||||
'pubhex': salt.utils.stringutils.to_str(data2['pubhex']),
|
||||
}
|
||||
})
|
||||
|
||||
@ -1248,14 +1251,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7530],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'sid': 0,
|
||||
'puid': 1,
|
||||
'aha': ['0.0.0.0', 7530],
|
||||
'role': mainData['role'],
|
||||
'sighex': mainData['sighex'],
|
||||
'prihex': mainData['prihex'],
|
||||
'sighex': salt.utils.stringutils.to_str(mainData['sighex']),
|
||||
'prihex': salt.utils.stringutils.to_str(mainData['prihex']),
|
||||
})
|
||||
|
||||
# add multiple remotes all with same role but different keys
|
||||
@ -1300,7 +1303,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7532],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'main': False,
|
||||
'kind': data1['kind'],
|
||||
@ -1308,8 +1311,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'joined': None,
|
||||
'role': data1['role'],
|
||||
'acceptance': 1,
|
||||
'verhex': data1['verhex'],
|
||||
'pubhex': data1['pubhex'],
|
||||
'verhex': salt.utils.stringutils.to_str(data1['verhex']),
|
||||
'pubhex': salt.utils.stringutils.to_str(data1['pubhex']),
|
||||
},
|
||||
'primary_syndic':
|
||||
{
|
||||
@ -1319,7 +1322,7 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7533],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'main': False,
|
||||
'kind': data2['kind'],
|
||||
@ -1327,8 +1330,8 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'joined': None,
|
||||
'role': data2['role'],
|
||||
'acceptance': 1,
|
||||
'verhex': data1['verhex'],
|
||||
'pubhex': data1['pubhex'],
|
||||
'verhex': salt.utils.stringutils.to_str(data1['verhex']),
|
||||
'pubhex': salt.utils.stringutils.to_str(data1['pubhex']),
|
||||
}
|
||||
})
|
||||
|
||||
@ -1399,14 +1402,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7530],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'sid': 0,
|
||||
'puid': 1,
|
||||
'aha': ['0.0.0.0', 7530],
|
||||
'role': mainData['role'],
|
||||
'sighex': mainData['sighex'],
|
||||
'prihex': mainData['prihex'],
|
||||
'sighex': salt.utils.stringutils.to_str(mainData['sighex']),
|
||||
'prihex': salt.utils.stringutils.to_str(mainData['prihex']),
|
||||
})
|
||||
|
||||
opts = self.createOpts(role='other',
|
||||
@ -1441,14 +1444,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7531],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'sid': 0,
|
||||
'puid': 1,
|
||||
'aha': ['0.0.0.0', 7531],
|
||||
'role': otherData['role'],
|
||||
'sighex': otherData['sighex'],
|
||||
'prihex': otherData['prihex'],
|
||||
'sighex': salt.utils.stringutils.to_str(otherData['sighex']),
|
||||
'prihex': salt.utils.stringutils.to_str(otherData['prihex']),
|
||||
})
|
||||
|
||||
self.join(other, main)
|
||||
@ -1524,14 +1527,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7530],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'sid': 0,
|
||||
'puid': 1,
|
||||
'aha': ['0.0.0.0', 7530],
|
||||
'role': mainData['role'],
|
||||
'sighex': mainData['sighex'],
|
||||
'prihex': mainData['prihex'],
|
||||
'sighex': salt.utils.stringutils.to_str(mainData['sighex']),
|
||||
'prihex': salt.utils.stringutils.to_str(mainData['prihex']),
|
||||
})
|
||||
|
||||
opts = self.createOpts(role='other',
|
||||
@ -1566,14 +1569,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7531],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'sid': 0,
|
||||
'puid': 1,
|
||||
'aha': ['0.0.0.0', 7531],
|
||||
'role': otherData['role'],
|
||||
'sighex': otherData['sighex'],
|
||||
'prihex': otherData['prihex'],
|
||||
'sighex': salt.utils.stringutils.to_str(otherData['sighex']),
|
||||
'prihex': salt.utils.stringutils.to_str(otherData['prihex']),
|
||||
})
|
||||
|
||||
self.join(other, main)
|
||||
@ -1645,14 +1648,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7530],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'sid': 0,
|
||||
'puid': 1,
|
||||
'aha': ['0.0.0.0', 7530],
|
||||
'role': mainData['role'],
|
||||
'sighex': mainData['sighex'],
|
||||
'prihex': mainData['prihex'],
|
||||
'sighex': salt.utils.stringutils.to_str(mainData['sighex']),
|
||||
'prihex': salt.utils.stringutils.to_str(mainData['prihex']),
|
||||
})
|
||||
|
||||
opts = self.createOpts(role='other',
|
||||
@ -1687,13 +1690,13 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7531],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'sid': 0,
|
||||
'puid': 1,
|
||||
'aha': ['0.0.0.0', 7531],
|
||||
'sighex': otherData['sighex'],
|
||||
'prihex': otherData['prihex'],
|
||||
'sighex': salt.utils.stringutils.to_str(otherData['sighex']),
|
||||
'prihex': salt.utils.stringutils.to_str(otherData['prihex']),
|
||||
'role': otherData['role'],
|
||||
})
|
||||
|
||||
@ -1766,14 +1769,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7530],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'sid': 0,
|
||||
'puid': 1,
|
||||
'aha': ['0.0.0.0', 7530],
|
||||
'role': mainData['role'],
|
||||
'sighex': mainData['sighex'],
|
||||
'prihex': mainData['prihex'],
|
||||
'sighex': salt.utils.stringutils.to_str(mainData['sighex']),
|
||||
'prihex': salt.utils.stringutils.to_str(mainData['prihex']),
|
||||
})
|
||||
|
||||
opts = self.createOpts(role='primary',
|
||||
@ -1808,14 +1811,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7531],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'sid': 0,
|
||||
'puid': 1,
|
||||
'aha': ['0.0.0.0', 7531],
|
||||
'role': other1Data['role'],
|
||||
'sighex': other1Data['sighex'],
|
||||
'prihex': other1Data['prihex'],
|
||||
'sighex': salt.utils.stringutils.to_str(other1Data['sighex']),
|
||||
'prihex': salt.utils.stringutils.to_str(other1Data['prihex']),
|
||||
})
|
||||
|
||||
self.join(other1, main)
|
||||
@ -1876,13 +1879,13 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7532],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'sid': 0,
|
||||
'puid': 1,
|
||||
'aha': ['0.0.0.0', 7532],
|
||||
'sighex': other2Data['sighex'],
|
||||
'prihex': other2Data['prihex'],
|
||||
'sighex': salt.utils.stringutils.to_str(other2Data['sighex']),
|
||||
'prihex': salt.utils.stringutils.to_str(other2Data['prihex']),
|
||||
'role': other2Data['role'],
|
||||
})
|
||||
|
||||
@ -1936,14 +1939,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7532],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'sid': 0,
|
||||
'puid': 1,
|
||||
'aha': ['0.0.0.0', 7532],
|
||||
'role': other2Data['role'],
|
||||
'sighex': other1Data['sighex'],
|
||||
'prihex': other1Data['prihex'],
|
||||
'sighex': salt.utils.stringutils.to_str(other1Data['sighex']),
|
||||
'prihex': salt.utils.stringutils.to_str(other1Data['prihex']),
|
||||
})
|
||||
|
||||
# should join since same role and keys
|
||||
@ -2021,14 +2024,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7530],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'sid': 0,
|
||||
'puid': 1,
|
||||
'aha': ['0.0.0.0', 7530],
|
||||
'role': mainData['role'],
|
||||
'sighex': mainData['sighex'],
|
||||
'prihex': mainData['prihex'],
|
||||
'sighex': salt.utils.stringutils.to_str(mainData['sighex']),
|
||||
'prihex': salt.utils.stringutils.to_str(mainData['prihex']),
|
||||
})
|
||||
|
||||
opts = self.createOpts(role='primary',
|
||||
@ -2063,14 +2066,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7531],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'sid': 0,
|
||||
'puid': 1,
|
||||
'aha': ['0.0.0.0', 7531],
|
||||
'role': other1Data['role'],
|
||||
'sighex': other1Data['sighex'],
|
||||
'prihex': other1Data['prihex'],
|
||||
'sighex': salt.utils.stringutils.to_str(other1Data['sighex']),
|
||||
'prihex': salt.utils.stringutils.to_str(other1Data['prihex']),
|
||||
})
|
||||
|
||||
self.join(other1, main)
|
||||
@ -2130,14 +2133,14 @@ class BasicTestCase(unittest.TestCase): # pylint: disable=moved-test-case-class
|
||||
'ha': ['127.0.0.1', 7532],
|
||||
'iha': None,
|
||||
'natted': None,
|
||||
'fqdn': '1.0.0.127.in-addr.arpa',
|
||||
'fqdn': socket.getfqdn('127.0.0.1'),
|
||||
'dyned': None,
|
||||
'sid': 0,
|
||||
'puid': 1,
|
||||
'aha': ['0.0.0.0', 7532],
|
||||
'role': other2Data['role'],
|
||||
'sighex': other2Data['sighex'],
|
||||
'prihex': other2Data['prihex'],
|
||||
'sighex': salt.utils.stringutils.to_str(other2Data['sighex']),
|
||||
'prihex': salt.utils.stringutils.to_str(other2Data['prihex']),
|
||||
})
|
||||
|
||||
# should join since open mode
|
||||
@ -2225,8 +2228,8 @@ if __name__ == '__main__' and __package__ is None:
|
||||
|
||||
#console.reinit(verbosity=console.Wordage.concise)
|
||||
|
||||
#runAll() # run all unittests
|
||||
runAll() # run all unittests
|
||||
|
||||
runSome() # only run some
|
||||
#runSome() # only run some
|
||||
|
||||
#runOne('testBootstrapRoleAuto')
|
||||
|
@ -5,6 +5,7 @@ Raet Ioflo Behavior Unittests
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import sys
|
||||
from salt.ext.six.moves import map
|
||||
import importlib
|
||||
# pylint: disable=blacklisted-import
|
||||
if sys.version_info < (2, 7):
|
||||
import unittest2 as unittest
|
||||
@ -43,6 +44,9 @@ class StatsEventerTestCase(testing.FrameIofloTestCase):
|
||||
'''
|
||||
Call super if override so House Framer and Frame are setup correctly
|
||||
'''
|
||||
behaviors = ['salt.daemons.flo', 'salt.daemons.test.plan']
|
||||
for behavior in behaviors:
|
||||
mod = importlib.import_module(behavior)
|
||||
super(StatsEventerTestCase, self).setUp()
|
||||
|
||||
def tearDown(self):
|
||||
@ -723,8 +727,8 @@ if __name__ == '__main__' and __package__ is None:
|
||||
|
||||
# console.reinit(verbosity=console.Wordage.concise)
|
||||
|
||||
#runAll() # run all unittests
|
||||
runAll() # run all unittests
|
||||
|
||||
runSome() # only run some
|
||||
#runSome() # only run some
|
||||
|
||||
#runOne('testMasterLaneStats')
|
||||
|
@ -285,7 +285,7 @@ class SaltRenderError(SaltException):
|
||||
if self.line_num and self.buffer:
|
||||
# Avoid circular import
|
||||
import salt.utils.templates
|
||||
self.context = salt.utils.templates.get_context(
|
||||
self.context = salt.utils.stringutils.get_context(
|
||||
self.buffer,
|
||||
self.line_num,
|
||||
marker=marker
|
||||
|
10
salt/key.py
10
salt/key.py
@ -125,7 +125,7 @@ class KeyCLI(object):
|
||||
if self.opts['eauth']:
|
||||
if 'token' in self.opts:
|
||||
try:
|
||||
with salt.utils.files.fopen(os.path.join(self.opts['key_dir'], '.root_key'), 'r') as fp_:
|
||||
with salt.utils.files.fopen(os.path.join(self.opts['cachedir'], '.root_key'), 'r') as fp_:
|
||||
low['key'] = \
|
||||
salt.utils.stringutils.to_unicode(fp_.readline())
|
||||
except IOError:
|
||||
@ -1082,6 +1082,8 @@ class RaetKey(Key):
|
||||
pre_path = os.path.join(pre, minion_id)
|
||||
rej_path = os.path.join(rej, minion_id)
|
||||
# open mode is turned on, force accept the key
|
||||
pub = salt.utils.stringutils.to_str(pub)
|
||||
verify = salt.utils.stringutils.to_str(verify)
|
||||
keydata = {
|
||||
'minion_id': minion_id,
|
||||
'pub': pub,
|
||||
@ -1148,7 +1150,7 @@ class RaetKey(Key):
|
||||
verify: <verify>
|
||||
'''
|
||||
path = os.path.join(self.opts['pki_dir'], status, minion_id)
|
||||
with salt.utils.files.fopen(path, 'r') as fp_:
|
||||
with salt.utils.files.fopen(path, 'rb') as fp_:
|
||||
keydata = self.serial.loads(fp_.read())
|
||||
return 'pub: {0}\nverify: {1}'.format(
|
||||
keydata['pub'],
|
||||
@ -1158,7 +1160,7 @@ class RaetKey(Key):
|
||||
'''
|
||||
Return a sha256 kingerprint for the key
|
||||
'''
|
||||
with salt.utils.files.fopen(path, 'r') as fp_:
|
||||
with salt.utils.files.fopen(path, 'rb') as fp_:
|
||||
keydata = self.serial.loads(fp_.read())
|
||||
key = 'pub: {0}\nverify: {1}'.format(
|
||||
keydata['pub'],
|
||||
@ -1442,7 +1444,7 @@ class RaetKey(Key):
|
||||
if os.path.exists(path):
|
||||
#mode = os.stat(path).st_mode
|
||||
os.chmod(path, stat.S_IWUSR | stat.S_IRUSR)
|
||||
with salt.utils.files.fopen(path, 'w+') as fp_:
|
||||
with salt.utils.files.fopen(path, 'w+b') as fp_:
|
||||
fp_.write(self.serial.dumps(keydata))
|
||||
os.chmod(path, stat.S_IRUSR)
|
||||
os.umask(c_umask)
|
||||
|
@ -17,6 +17,7 @@ import logging.handlers
|
||||
|
||||
# Import salt libs
|
||||
from salt.log.mixins import NewStyleClassMixIn, ExcInfoOnLogLevelFormatMixIn
|
||||
from salt.ext.six.moves import queue
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@ -176,9 +177,9 @@ if sys.version_info < (3, 2):
|
||||
'''
|
||||
try:
|
||||
self.queue.put_nowait(record)
|
||||
except self.queue.Full:
|
||||
except queue.Full:
|
||||
sys.stderr.write('[WARNING ] Message queue is full, '
|
||||
'unable to write "{0}" to log', record
|
||||
'unable to write "{0}" to log'.format(record)
|
||||
)
|
||||
|
||||
def prepare(self, record):
|
||||
|
@ -120,6 +120,7 @@ __MP_LOGGING_QUEUE = None
|
||||
__MP_LOGGING_QUEUE_PROCESS = None
|
||||
__MP_LOGGING_QUEUE_HANDLER = None
|
||||
__MP_IN_MAINPROCESS = multiprocessing.current_process().name == 'MainProcess'
|
||||
__MP_MAINPROCESS_ID = None
|
||||
|
||||
|
||||
class __NullLoggingHandler(TemporaryLoggingHandler):
|
||||
@ -822,6 +823,7 @@ def set_multiprocessing_logging_queue(queue):
|
||||
def setup_multiprocessing_logging_listener(opts, queue=None):
|
||||
global __MP_LOGGING_QUEUE_PROCESS
|
||||
global __MP_LOGGING_LISTENER_CONFIGURED
|
||||
global __MP_MAINPROCESS_ID
|
||||
|
||||
if __MP_IN_MAINPROCESS is False:
|
||||
# We're not in the MainProcess, return! No logging listener setup shall happen
|
||||
@ -830,6 +832,11 @@ def setup_multiprocessing_logging_listener(opts, queue=None):
|
||||
if __MP_LOGGING_LISTENER_CONFIGURED is True:
|
||||
return
|
||||
|
||||
if __MP_MAINPROCESS_ID is not None and __MP_MAINPROCESS_ID != os.getpid():
|
||||
# We're not in the MainProcess, return! No logging listener setup shall happen
|
||||
return
|
||||
|
||||
__MP_MAINPROCESS_ID = os.getpid()
|
||||
__MP_LOGGING_QUEUE_PROCESS = multiprocessing.Process(
|
||||
target=__process_multiprocessing_logging_queue,
|
||||
args=(opts, queue or get_multiprocessing_logging_queue(),)
|
||||
@ -967,6 +974,11 @@ def shutdown_multiprocessing_logging_listener(daemonizing=False):
|
||||
|
||||
if __MP_LOGGING_QUEUE_PROCESS is None:
|
||||
return
|
||||
|
||||
if __MP_MAINPROCESS_ID is not None and __MP_MAINPROCESS_ID != os.getpid():
|
||||
# We're not in the MainProcess, return! No logging listener setup shall happen
|
||||
return
|
||||
|
||||
if __MP_LOGGING_QUEUE_PROCESS.is_alive():
|
||||
logging.getLogger(__name__).debug('Stopping the multiprocessing logging queue listener')
|
||||
try:
|
||||
|
@ -32,12 +32,12 @@ Connection module for Amazon Cloud Formation
|
||||
# keep lint from choking on _get_conn and _cache_id
|
||||
#pylint: disable=E0602
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import logging
|
||||
|
||||
# Import Salt libs
|
||||
from salt.ext import six
|
||||
import salt.utils.versions
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@ -72,7 +72,9 @@ def exists(name, region=None, key=None, keyid=None, profile=None):
|
||||
'''
|
||||
Check to see if a stack exists.
|
||||
|
||||
CLI example::
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt myminion boto_cfn.exists mystack region=us-east-1
|
||||
'''
|
||||
@ -94,7 +96,9 @@ def describe(name, region=None, key=None, keyid=None, profile=None):
|
||||
|
||||
.. versionadded:: 2015.8.0
|
||||
|
||||
CLI example::
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt myminion boto_cfn.describe mystack region=us-east-1
|
||||
'''
|
||||
@ -135,7 +139,9 @@ def create(name, template_body=None, template_url=None, parameters=None, notific
|
||||
'''
|
||||
Create a CFN stack.
|
||||
|
||||
CLI example to create a stack::
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt myminion boto_cfn.create mystack template_url='https://s3.amazonaws.com/bucket/template.cft' \
|
||||
region=us-east-1
|
||||
@ -161,7 +167,9 @@ def update_stack(name, template_body=None, template_url=None, parameters=None, n
|
||||
|
||||
.. versionadded:: 2015.8.0
|
||||
|
||||
CLI example to update a stack::
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt myminion boto_cfn.update_stack mystack template_url='https://s3.amazonaws.com/bucket/template.cft' \
|
||||
region=us-east-1
|
||||
@ -186,7 +194,9 @@ def delete(name, region=None, key=None, keyid=None, profile=None):
|
||||
'''
|
||||
Delete a CFN stack.
|
||||
|
||||
CLI example to delete a stack::
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt myminion boto_cfn.delete mystack region=us-east-1
|
||||
'''
|
||||
@ -205,7 +215,9 @@ def get_template(name, region=None, key=None, keyid=None, profile=None):
|
||||
'''
|
||||
Check to see if attributes are set on a CFN stack.
|
||||
|
||||
CLI example::
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt myminion boto_cfn.get_template mystack
|
||||
'''
|
||||
@ -228,7 +240,9 @@ def validate_template(template_body=None, template_url=None, region=None, key=No
|
||||
|
||||
.. versionadded:: 2015.8.0
|
||||
|
||||
CLI example::
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt myminion boto_cfn.validate_template mystack-template
|
||||
'''
|
||||
|
@ -2586,6 +2586,7 @@ def describe_route_tables(route_table_id=None, route_table_name=None,
|
||||
'instance_id': 'Instance',
|
||||
'interface_id': 'NetworkInterfaceId',
|
||||
'nat_gateway_id': 'NatGatewayId',
|
||||
'vpc_peering_connection_id': 'VpcPeeringConnectionId',
|
||||
}
|
||||
assoc_keys = {'id': 'RouteTableAssociationId',
|
||||
'main': 'Main',
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -630,7 +630,6 @@ def _client_wrapper(attr, *args, **kwargs):
|
||||
)
|
||||
ret = func(*args, **kwargs)
|
||||
except docker.errors.APIError as exc:
|
||||
log.exception('Encountered error running API function %s', attr)
|
||||
if catch_api_errors:
|
||||
# Generic handling of Docker API errors
|
||||
raise CommandExecutionError(
|
||||
|
@ -29,7 +29,7 @@ try:
|
||||
except (NameError, KeyError):
|
||||
import salt.modules.cmdmod
|
||||
__salt__ = {
|
||||
'cmd.run_all': salt.modules.cmdmod._run_all_quiet
|
||||
'cmd.run_all': salt.modules.cmdmod.run_all
|
||||
}
|
||||
|
||||
|
||||
@ -95,8 +95,7 @@ def __execute_cmd(command, host=None,
|
||||
output_loglevel='quiet')
|
||||
|
||||
if cmd['retcode'] != 0:
|
||||
log.warning('racadm return an exit code \'{0}\'.'
|
||||
.format(cmd['retcode']))
|
||||
log.warning('racadm returned an exit code of %s', cmd['retcode'])
|
||||
return False
|
||||
|
||||
return True
|
||||
@ -129,8 +128,7 @@ def __execute_ret(command, host=None,
|
||||
output_loglevel='quiet')
|
||||
|
||||
if cmd['retcode'] != 0:
|
||||
log.warning('racadm return an exit code \'{0}\'.'
|
||||
.format(cmd['retcode']))
|
||||
log.warning('racadm returned an exit code of %s', cmd['retcode'])
|
||||
else:
|
||||
fmtlines = []
|
||||
for l in cmd['stdout'].splitlines():
|
||||
@ -193,8 +191,7 @@ def system_info(host=None,
|
||||
module=module)
|
||||
|
||||
if cmd['retcode'] != 0:
|
||||
log.warning('racadm return an exit code \'{0}\'.'
|
||||
.format(cmd['retcode']))
|
||||
log.warning('racadm returned an exit code of %s', cmd['retcode'])
|
||||
return cmd
|
||||
|
||||
return __parse_drac(cmd['stdout'])
|
||||
@ -272,8 +269,7 @@ def network_info(host=None,
|
||||
module=module)
|
||||
|
||||
if cmd['retcode'] != 0:
|
||||
log.warning('racadm return an exit code \'{0}\'.'
|
||||
.format(cmd['retcode']))
|
||||
log.warning('racadm returned an exit code of %s', cmd['retcode'])
|
||||
|
||||
cmd['stdout'] = 'Network:\n' + 'Device = ' + module + '\n' + \
|
||||
cmd['stdout']
|
||||
@ -395,8 +391,7 @@ def list_users(host=None,
|
||||
admin_password=admin_password)
|
||||
|
||||
if cmd['retcode'] != 0:
|
||||
log.warning('racadm return an exit code \'{0}\'.'
|
||||
.format(cmd['retcode']))
|
||||
log.warning('racadm returned an exit code of %s', cmd['retcode'])
|
||||
|
||||
for user in cmd['stdout'].splitlines():
|
||||
if not user.startswith('cfg'):
|
||||
@ -444,7 +439,7 @@ def delete_user(username,
|
||||
admin_password=admin_password)
|
||||
|
||||
else:
|
||||
log.warning('\'{0}\' does not exist'.format(username))
|
||||
log.warning('User \'%s\' does not exist', username)
|
||||
return False
|
||||
|
||||
|
||||
@ -485,7 +480,7 @@ def change_password(username, password, uid=None, host=None,
|
||||
host=host, admin_username=admin_username,
|
||||
admin_password=admin_password, module=module)
|
||||
else:
|
||||
log.warning('\'{0}\' does not exist'.format(username))
|
||||
log.warning('racadm: user \'%s\' does not exist', username)
|
||||
return False
|
||||
|
||||
|
||||
@ -567,7 +562,7 @@ def create_user(username, password, permissions,
|
||||
users = list_users()
|
||||
|
||||
if username in users:
|
||||
log.warning('\'{0}\' already exists'.format(username))
|
||||
log.warning('racadm: user \'%s\' already exists', username)
|
||||
return False
|
||||
|
||||
for idx in six.iterkeys(users):
|
||||
|
@ -67,7 +67,7 @@ def _localectl_status():
|
||||
Parse localectl status into a dict.
|
||||
:return: dict
|
||||
'''
|
||||
if salt.utils.which('localectl') is None:
|
||||
if salt.utils.path.which('localectl') is None:
|
||||
raise CommandExecutionError('Unable to find "localectl"')
|
||||
|
||||
ret = {}
|
||||
|
@ -30,7 +30,7 @@ def __virtual__():
|
||||
Only work on Mac OS
|
||||
'''
|
||||
if salt.utils.platform.is_darwin() \
|
||||
and _LooseVersion(__grains__['osrelease']) >= '10.9':
|
||||
and _LooseVersion(__grains__['osrelease']) >= _LooseVersion('10.9'):
|
||||
return True
|
||||
return (
|
||||
False,
|
||||
|
@ -34,7 +34,6 @@ from salt.ext.six.moves import range # pylint: disable=W0622,import-error
|
||||
|
||||
# Import third party libs
|
||||
try:
|
||||
import win32gui
|
||||
import win32api
|
||||
import win32con
|
||||
import pywintypes
|
||||
@ -45,6 +44,7 @@ except ImportError:
|
||||
# Import Salt libs
|
||||
import salt.utils.platform
|
||||
import salt.utils.stringutils
|
||||
import salt.utils.win_functions
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
@ -65,7 +65,7 @@ def __virtual__():
|
||||
if not HAS_WINDOWS_MODULES:
|
||||
return (False, 'reg execution module failed to load: '
|
||||
'One of the following libraries did not load: '
|
||||
+ 'win32gui, win32con, win32api')
|
||||
'win32con, win32api, pywintypes')
|
||||
|
||||
return __virtualname__
|
||||
|
||||
@ -190,11 +190,7 @@ def broadcast_change():
|
||||
|
||||
salt '*' reg.broadcast_change
|
||||
'''
|
||||
# https://msdn.microsoft.com/en-us/library/windows/desktop/ms644952(v=vs.85).aspx
|
||||
_, res = win32gui.SendMessageTimeout(
|
||||
win32con.HWND_BROADCAST, win32con.WM_SETTINGCHANGE, 0, 0,
|
||||
win32con.SMTO_ABORTIFHUNG, 5000)
|
||||
return not bool(res)
|
||||
return salt.utils.win_functions.broadcast_setting_change('Environment')
|
||||
|
||||
|
||||
def list_keys(hive, key=None, use_32bit_registry=False):
|
||||
|
@ -927,8 +927,8 @@ def highstate(test=None, queue=False, **kwargs):
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' state.higstate exclude=bar,baz
|
||||
salt '*' state.higstate exclude=foo*
|
||||
salt '*' state.highstate exclude=bar,baz
|
||||
salt '*' state.highstate exclude=foo*
|
||||
salt '*' state.highstate exclude="[{'id': 'id_to_exclude'}, {'sls': 'sls_to_exclude'}]"
|
||||
|
||||
saltenv
|
||||
|
@ -2442,7 +2442,7 @@ class _policy_info(object):
|
||||
elif ord(val) == 1:
|
||||
return 'Enabled'
|
||||
else:
|
||||
return 'Invalid Value'
|
||||
return 'Invalid Value: {0!r}'.format(val) # pylint: disable=repr-flag-used-in-string
|
||||
else:
|
||||
return 'Not Defined'
|
||||
except TypeError:
|
||||
@ -5066,7 +5066,10 @@ def get(policy_class=None, return_full_policy_names=True,
|
||||
class_vals[policy_name] = __salt__['reg.read_value'](_pol['Registry']['Hive'],
|
||||
_pol['Registry']['Path'],
|
||||
_pol['Registry']['Value'])['vdata']
|
||||
log.debug('Value %s found for reg policy %s', class_vals[policy_name], policy_name)
|
||||
log.debug(
|
||||
'Value %r found for reg policy %s',
|
||||
class_vals[policy_name], policy_name
|
||||
)
|
||||
elif 'Secedit' in _pol:
|
||||
# get value from secedit
|
||||
_ret, _val = _findOptionValueInSeceditFile(_pol['Secedit']['Option'])
|
||||
|
@ -18,12 +18,11 @@ import salt.utils.args
|
||||
import salt.utils.data
|
||||
import salt.utils.platform
|
||||
import salt.utils.stringutils
|
||||
import salt.utils.win_functions
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext.six.moves import map
|
||||
try:
|
||||
from win32con import HWND_BROADCAST, WM_SETTINGCHANGE
|
||||
from win32api import SendMessage
|
||||
HAS_WIN32 = True
|
||||
except ImportError:
|
||||
HAS_WIN32 = False
|
||||
@ -57,7 +56,14 @@ def _normalize_dir(string_):
|
||||
def rehash():
|
||||
'''
|
||||
Send a WM_SETTINGCHANGE Broadcast to Windows to refresh the Environment
|
||||
variables
|
||||
variables for new processes.
|
||||
|
||||
.. note::
|
||||
This will only affect new processes that aren't launched by services. To
|
||||
apply changes to the path to services, the host must be restarted. The
|
||||
``salt-minion``, if running as a service, will not see changes to the
|
||||
environment until the system is restarted. See
|
||||
`MSDN Documentation <https://support.microsoft.com/en-us/help/821761/changes-that-you-make-to-environment-variables-do-not-affect-services>`_
|
||||
|
||||
CLI Example:
|
||||
|
||||
@ -65,7 +71,7 @@ def rehash():
|
||||
|
||||
salt '*' win_path.rehash
|
||||
'''
|
||||
return bool(SendMessage(HWND_BROADCAST, WM_SETTINGCHANGE, 0, 'Environment'))
|
||||
return salt.utils.win_functions.broadcast_setting_change('Environment')
|
||||
|
||||
|
||||
def get_path():
|
||||
|
@ -581,28 +581,77 @@ def _refresh_db_conditional(saltenv, **kwargs):
|
||||
|
||||
|
||||
def refresh_db(**kwargs):
|
||||
'''
|
||||
Fetches metadata files and calls :py:func:`pkg.genrepo
|
||||
<salt.modules.win_pkg.genrepo>` to compile updated repository metadata.
|
||||
r'''
|
||||
Generates the local software metadata database (`winrepo.p`) on the minion.
|
||||
The database is stored in a serialized format located by default at the
|
||||
following location:
|
||||
|
||||
`C:\salt\var\cache\salt\minion\files\base\win\repo-ng\winrepo.p`
|
||||
|
||||
This module performs the following steps to generate the software metadata
|
||||
database:
|
||||
|
||||
- Fetch the package definition files (.sls) from `winrepo_source_dir`
|
||||
(default `salt://win/repo-ng`) and cache them in
|
||||
`<cachedir>\files\<saltenv>\<winrepo_source_dir>`
|
||||
(default: `C:\salt\var\cache\salt\minion\files\base\win\repo-ng`)
|
||||
- Call :py:func:`pkg.genrepo <salt.modules.win_pkg.genrepo>` to parse the
|
||||
package definition files and generate the repository metadata database
|
||||
file (`winrepo.p`)
|
||||
- Return the report received from
|
||||
:py:func:`pkg.genrepo <salt.modules.win_pkg.genrepo>`
|
||||
|
||||
The default winrepo directory on the master is `/srv/salt/win/repo-ng`. All
|
||||
files that end with `.sls` in this and all subdirectories will be used to
|
||||
generate the repository metadata database (`winrepo.p`).
|
||||
|
||||
.. note::
|
||||
- Hidden directories (directories beginning with '`.`', such as
|
||||
'`.git`') will be ignored.
|
||||
|
||||
.. note::
|
||||
There is no need to call `pkg.refresh_db` every time you work with the
|
||||
pkg module. Automatic refresh will occur based on the following minion
|
||||
configuration settings:
|
||||
- `winrepo_cache_expire_min`
|
||||
- `winrepo_cache_expire_max`
|
||||
However, if the package definition files have changed, as would be the
|
||||
case if you are developing a new package definition, this function
|
||||
should be called to ensure the minion has the latest information about
|
||||
packages available to it.
|
||||
|
||||
.. warning::
|
||||
Directories and files fetched from <winrepo_source_dir>
|
||||
(`/srv/salt/win/repo-ng`) will be processed in alphabetical order. If
|
||||
two or more software definition files contain the same name, the last
|
||||
one processed replaces all data from the files processed before it.
|
||||
|
||||
For more information see
|
||||
:ref:`Windows Software Repository <windows-package-manager>`
|
||||
|
||||
Kwargs:
|
||||
|
||||
saltenv (str): Salt environment. Default: ``base``
|
||||
|
||||
verbose (bool):
|
||||
Return verbose data structure which includes 'success_list', a list
|
||||
of all sls files and the package names contained within. Default
|
||||
'False'
|
||||
Return a verbose data structure which includes 'success_list', a
|
||||
list of all sls files and the package names contained within.
|
||||
Default is 'False'
|
||||
|
||||
failhard (bool):
|
||||
If ``True``, an error will be raised if any repo SLS files failed to
|
||||
If ``True``, an error will be raised if any repo SLS files fails to
|
||||
process. If ``False``, no error will be raised, and a dictionary
|
||||
containing the full results will be returned.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary containing the results of the database refresh.
|
||||
|
||||
.. Warning::
|
||||
.. note::
|
||||
A result with a `total: 0` generally means that the files are in the
|
||||
wrong location on the master. Try running the following command on the
|
||||
minion: `salt-call -l debug pkg.refresh saltenv=base`
|
||||
|
||||
.. warning::
|
||||
When calling this command from a state using `module.run` be sure to
|
||||
pass `failhard: False`. Otherwise the state will report failure if it
|
||||
encounters a bad software definition file.
|
||||
@ -648,10 +697,12 @@ def refresh_db(**kwargs):
|
||||
)
|
||||
|
||||
# Cache repo-ng locally
|
||||
log.info('Fetching *.sls files from {0}'.format(repo_details.winrepo_source_dir))
|
||||
__salt__['cp.cache_dir'](
|
||||
repo_details.winrepo_source_dir,
|
||||
saltenv,
|
||||
include_pat='*.sls'
|
||||
path=repo_details.winrepo_source_dir,
|
||||
saltenv=saltenv,
|
||||
include_pat='*.sls',
|
||||
exclude_pat=r'E@\/\..*?\/' # Exclude all hidden directories (.git)
|
||||
)
|
||||
|
||||
return genrepo(saltenv=saltenv, verbose=verbose, failhard=failhard)
|
||||
@ -754,6 +805,10 @@ def genrepo(**kwargs):
|
||||
to process. If ``False``, no error will be raised, and a dictionary
|
||||
containing the full results will be returned.
|
||||
|
||||
.. note::
|
||||
- Hidden directories (directories beginning with '`.`', such as
|
||||
'`.git`') will be ignored.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary of the results of the command
|
||||
|
||||
@ -777,9 +832,16 @@ def genrepo(**kwargs):
|
||||
repo_details = _get_repo_details(saltenv)
|
||||
|
||||
for root, _, files in salt.utils.path.os_walk(repo_details.local_dest, followlinks=False):
|
||||
|
||||
# Skip hidden directories (.git)
|
||||
if re.search(r'[\\/]\..*', root):
|
||||
log.debug('Skipping files in directory: {0}'.format(root))
|
||||
continue
|
||||
|
||||
short_path = os.path.relpath(root, repo_details.local_dest)
|
||||
if short_path == '.':
|
||||
short_path = ''
|
||||
|
||||
for name in files:
|
||||
if name.endswith('.sls'):
|
||||
total_files_processed += 1
|
||||
@ -1209,11 +1271,11 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
|
||||
# single files
|
||||
if cache_dir and installer.startswith('salt:'):
|
||||
path, _ = os.path.split(installer)
|
||||
__salt__['cp.cache_dir'](path,
|
||||
saltenv,
|
||||
False,
|
||||
None,
|
||||
'E@init.sls$')
|
||||
__salt__['cp.cache_dir'](path=path,
|
||||
saltenv=saltenv,
|
||||
include_empty=False,
|
||||
include_pat=None,
|
||||
exclude_pat='E@init.sls$')
|
||||
|
||||
# Check to see if the cache_file is cached... if passed
|
||||
if cache_file and cache_file.startswith('salt:'):
|
||||
@ -1820,7 +1882,7 @@ def get_repo_data(saltenv='base'):
|
||||
serial = salt.payload.Serial(__opts__)
|
||||
with salt.utils.files.fopen(repo_details.winrepo_file, 'rb') as repofile:
|
||||
try:
|
||||
repodata = salt.utils.data.decode(serial.loads(repofile.read()) or {})
|
||||
repodata = salt.utils.data.decode(serial.loads(repofile.read(), encoding='utf-8') or {})
|
||||
__context__['winrepo.data'] = repodata
|
||||
return repodata
|
||||
except Exception as exc:
|
||||
@ -1843,7 +1905,7 @@ def _get_name_map(saltenv='base'):
|
||||
return name_map
|
||||
|
||||
for k in name_map:
|
||||
u_name_map[k.decode('utf-8')] = name_map[k]
|
||||
u_name_map[k] = name_map[k]
|
||||
return u_name_map
|
||||
|
||||
|
||||
|
@ -211,25 +211,29 @@ def _check_versionlock():
|
||||
)
|
||||
|
||||
|
||||
def _get_repo_options(**kwargs):
|
||||
def _get_options(**kwargs):
|
||||
'''
|
||||
Returns a list of '--enablerepo' and '--disablerepo' options to be used
|
||||
in the yum command, based on the kwargs.
|
||||
Returns a list of options to be used in the yum/dnf command, based on the
|
||||
kwargs passed.
|
||||
'''
|
||||
# Get repo options from the kwargs
|
||||
fromrepo = kwargs.pop('fromrepo', '')
|
||||
repo = kwargs.pop('repo', '')
|
||||
disablerepo = kwargs.pop('disablerepo', '')
|
||||
enablerepo = kwargs.pop('enablerepo', '')
|
||||
disableexcludes = kwargs.pop('disableexcludes', '')
|
||||
branch = kwargs.pop('branch', '')
|
||||
get_extra_options = kwargs.pop('get_extra_options', False)
|
||||
|
||||
# Support old 'repo' argument
|
||||
if repo and not fromrepo:
|
||||
fromrepo = repo
|
||||
|
||||
ret = []
|
||||
|
||||
if fromrepo:
|
||||
log.info('Restricting to repo \'%s\'', fromrepo)
|
||||
ret.extend(['--disablerepo=*', '--enablerepo=' + fromrepo])
|
||||
ret.extend(['--disablerepo=*', '--enablerepo={0}'.format(fromrepo)])
|
||||
else:
|
||||
if disablerepo:
|
||||
targets = [disablerepo] \
|
||||
@ -245,58 +249,30 @@ def _get_repo_options(**kwargs):
|
||||
else enablerepo
|
||||
log.info('Enabling repo(s): %s', ', '.join(targets))
|
||||
ret.extend(['--enablerepo={0}'.format(x) for x in targets])
|
||||
return ret
|
||||
|
||||
if disableexcludes:
|
||||
log.info('Disabling excludes for \'%s\'', disableexcludes)
|
||||
ret.append('--disableexcludes={0}'.format(disableexcludes))
|
||||
|
||||
def _get_excludes_option(**kwargs):
|
||||
'''
|
||||
Returns a list of '--disableexcludes' option to be used in the yum command,
|
||||
based on the kwargs.
|
||||
'''
|
||||
disable_excludes = kwargs.pop('disableexcludes', '')
|
||||
ret = []
|
||||
if disable_excludes:
|
||||
log.info('Disabling excludes for \'%s\'', disable_excludes)
|
||||
ret.append('--disableexcludes={0}'.format(disable_excludes))
|
||||
return ret
|
||||
|
||||
|
||||
def _get_branch_option(**kwargs):
|
||||
'''
|
||||
Returns a list of '--branch' option to be used in the yum command,
|
||||
based on the kwargs. This feature requires 'branch' plugin for YUM.
|
||||
'''
|
||||
branch = kwargs.pop('branch', '')
|
||||
ret = []
|
||||
if branch:
|
||||
log.info('Adding branch \'%s\'', branch)
|
||||
ret.append('--branch=\'{0}\''.format(branch))
|
||||
return ret
|
||||
ret.append('--branch={0}'.format(branch))
|
||||
|
||||
if get_extra_options:
|
||||
# sorting here to make order uniform, makes unit testing more reliable
|
||||
for key in sorted(kwargs):
|
||||
if key.startswith('__'):
|
||||
continue
|
||||
value = kwargs[key]
|
||||
if isinstance(value, six.string_types):
|
||||
log.info('Found extra option --%s=%s', key, value)
|
||||
ret.append('--{0}={1}'.format(key, value))
|
||||
elif value is True:
|
||||
log.info('Found extra option --%s', key)
|
||||
ret.append('--{0}'.format(key))
|
||||
if ret:
|
||||
log.info('Adding extra options: %s', ret)
|
||||
|
||||
def _get_extra_options(**kwargs):
|
||||
'''
|
||||
Returns list of extra options for yum
|
||||
'''
|
||||
ret = []
|
||||
kwargs = salt.utils.args.clean_kwargs(**kwargs)
|
||||
|
||||
# Remove already handled options from kwargs
|
||||
fromrepo = kwargs.pop('fromrepo', '')
|
||||
repo = kwargs.pop('repo', '')
|
||||
disablerepo = kwargs.pop('disablerepo', '')
|
||||
enablerepo = kwargs.pop('enablerepo', '')
|
||||
disable_excludes = kwargs.pop('disableexcludes', '')
|
||||
branch = kwargs.pop('branch', '')
|
||||
|
||||
for key, value in six.iteritems(kwargs):
|
||||
if isinstance(value, six.string_types):
|
||||
log.info('Adding extra option --%s=\'%s\'', key, value)
|
||||
ret.append('--{0}=\'{1}\''.format(key, value))
|
||||
elif value is True:
|
||||
log.info('Adding extra option --%s', key)
|
||||
ret.append('--{0}'.format(key))
|
||||
log.info('Adding extra options %s', ret)
|
||||
return ret
|
||||
|
||||
|
||||
@ -460,8 +436,7 @@ def latest_version(*names, **kwargs):
|
||||
if len(names) == 0:
|
||||
return ''
|
||||
|
||||
repo_arg = _get_repo_options(**kwargs)
|
||||
exclude_arg = _get_excludes_option(**kwargs)
|
||||
options = _get_options(**kwargs)
|
||||
|
||||
# Refresh before looking for the latest version available
|
||||
if refresh:
|
||||
@ -471,8 +446,7 @@ def latest_version(*names, **kwargs):
|
||||
|
||||
# Get available versions for specified package(s)
|
||||
cmd = [_yum(), '--quiet']
|
||||
cmd.extend(repo_arg)
|
||||
cmd.extend(exclude_arg)
|
||||
cmd.extend(options)
|
||||
cmd.extend(['list', 'available'])
|
||||
cmd.extend(names)
|
||||
out = __salt__['cmd.run_all'](cmd,
|
||||
@ -818,7 +792,7 @@ def list_repo_pkgs(*args, **kwargs):
|
||||
disablerepo = kwargs.pop('disablerepo', '') or ''
|
||||
enablerepo = kwargs.pop('enablerepo', '') or ''
|
||||
|
||||
repo_arg = _get_repo_options(fromrepo=fromrepo, **kwargs)
|
||||
repo_arg = _get_options(fromrepo=fromrepo, **kwargs)
|
||||
|
||||
if fromrepo and not isinstance(fromrepo, list):
|
||||
try:
|
||||
@ -970,15 +944,13 @@ def list_upgrades(refresh=True, **kwargs):
|
||||
|
||||
salt '*' pkg.list_upgrades
|
||||
'''
|
||||
repo_arg = _get_repo_options(**kwargs)
|
||||
exclude_arg = _get_excludes_option(**kwargs)
|
||||
options = _get_options(**kwargs)
|
||||
|
||||
if salt.utils.data.is_true(refresh):
|
||||
refresh_db(check_update=False, **kwargs)
|
||||
|
||||
cmd = [_yum(), '--quiet']
|
||||
cmd.extend(repo_arg)
|
||||
cmd.extend(exclude_arg)
|
||||
cmd.extend(options)
|
||||
cmd.extend(['list', 'upgrades' if _yum() == 'dnf' else 'updates'])
|
||||
out = __salt__['cmd.run_all'](cmd,
|
||||
output_loglevel='trace',
|
||||
@ -1096,21 +1068,19 @@ def refresh_db(**kwargs):
|
||||
|
||||
check_update_ = kwargs.pop('check_update', True)
|
||||
|
||||
repo_arg = _get_repo_options(**kwargs)
|
||||
exclude_arg = _get_excludes_option(**kwargs)
|
||||
branch_arg = _get_branch_option(**kwargs)
|
||||
options = _get_options(**kwargs)
|
||||
|
||||
clean_cmd = [_yum(), '--quiet', 'clean', 'expire-cache']
|
||||
update_cmd = [_yum(), '--quiet', 'check-update']
|
||||
clean_cmd = [_yum(), '--quiet', '--assumeyes', 'clean', 'expire-cache']
|
||||
update_cmd = [_yum(), '--quiet', '--assumeyes', 'check-update']
|
||||
|
||||
if __grains__.get('os_family') == 'RedHat' and __grains__.get('osmajorrelease') == '7':
|
||||
# This feature is disable because it is not used by Salt and lasts a lot with using large repo like EPEL
|
||||
if __grains__.get('os_family') == 'RedHat' \
|
||||
and __grains__.get('osmajorrelease') == 7:
|
||||
# This feature is disabled because it is not used by Salt and adds a
|
||||
# lot of extra time to the command with large repos like EPEL
|
||||
update_cmd.append('--setopt=autocheck_running_kernel=false')
|
||||
|
||||
for args in (repo_arg, exclude_arg, branch_arg):
|
||||
if args:
|
||||
clean_cmd.extend(args)
|
||||
update_cmd.extend(args)
|
||||
clean_cmd.extend(options)
|
||||
update_cmd.extend(options)
|
||||
|
||||
__salt__['cmd.run'](clean_cmd, python_shell=False)
|
||||
if check_update_:
|
||||
@ -1162,6 +1132,7 @@ def install(name=None,
|
||||
reinstall=False,
|
||||
normalize=True,
|
||||
update_holds=False,
|
||||
saltenv='base',
|
||||
ignore_epoch=False,
|
||||
**kwargs):
|
||||
'''
|
||||
@ -1343,9 +1314,7 @@ def install(name=None,
|
||||
'version': '<new-version>',
|
||||
'arch': '<new-arch>'}}}
|
||||
'''
|
||||
repo_arg = _get_repo_options(**kwargs)
|
||||
exclude_arg = _get_excludes_option(**kwargs)
|
||||
branch_arg = _get_branch_option(**kwargs)
|
||||
options = _get_options(**kwargs)
|
||||
|
||||
if salt.utils.data.is_true(refresh):
|
||||
refresh_db(**kwargs)
|
||||
@ -1353,7 +1322,7 @@ def install(name=None,
|
||||
|
||||
try:
|
||||
pkg_params, pkg_type = __salt__['pkg_resource.parse_targets'](
|
||||
name, pkgs, sources, normalize=normalize, **kwargs
|
||||
name, pkgs, sources, saltenv=saltenv, normalize=normalize
|
||||
)
|
||||
except MinionError as exc:
|
||||
raise CommandExecutionError(exc)
|
||||
@ -1580,9 +1549,7 @@ def install(name=None,
|
||||
'''
|
||||
DRY function to add args common to all yum/dnf commands
|
||||
'''
|
||||
for arg in (repo_arg, exclude_arg, branch_arg):
|
||||
if arg:
|
||||
cmd.extend(arg)
|
||||
cmd.extend(options)
|
||||
if skip_verify:
|
||||
cmd.append('--nogpgcheck')
|
||||
if downloadonly:
|
||||
@ -1847,17 +1814,14 @@ def upgrade(name=None,
|
||||
.. note::
|
||||
|
||||
To add extra arguments to the ``yum upgrade`` command, pass them as key
|
||||
word arguments. For arguments without assignments, pass ``True``
|
||||
word arguments. For arguments without assignments, pass ``True``
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' pkg.upgrade security=True exclude='kernel*'
|
||||
|
||||
'''
|
||||
repo_arg = _get_repo_options(**kwargs)
|
||||
exclude_arg = _get_excludes_option(**kwargs)
|
||||
branch_arg = _get_branch_option(**kwargs)
|
||||
extra_args = _get_extra_options(**kwargs)
|
||||
options = _get_options(get_extra_options=True, **kwargs)
|
||||
|
||||
if salt.utils.data.is_true(refresh):
|
||||
refresh_db(**kwargs)
|
||||
@ -1886,9 +1850,7 @@ def upgrade(name=None,
|
||||
and __salt__['config.get']('systemd.scope', True):
|
||||
cmd.extend(['systemd-run', '--scope'])
|
||||
cmd.extend([_yum(), '--quiet', '-y'])
|
||||
for args in (repo_arg, exclude_arg, branch_arg, extra_args):
|
||||
if args:
|
||||
cmd.extend(args)
|
||||
cmd.extend(options)
|
||||
if skip_verify:
|
||||
cmd.append('--nogpgcheck')
|
||||
cmd.append('upgrade')
|
||||
|
@ -482,15 +482,6 @@ def info_installed(*names, **kwargs):
|
||||
t_nfo = dict()
|
||||
# Translate dpkg-specific keys to a common structure
|
||||
for key, value in six.iteritems(pkg_nfo):
|
||||
if isinstance(value, six.string_types):
|
||||
# Check, if string is encoded in a proper UTF-8
|
||||
if six.PY3:
|
||||
value_ = value.encode('UTF-8', 'ignore').decode('UTF-8', 'ignore')
|
||||
else:
|
||||
value_ = value.decode('UTF-8', 'ignore').encode('UTF-8', 'ignore')
|
||||
if value != value_:
|
||||
value = kwargs.get('errors', 'ignore') == 'ignore' and value_ or 'N/A (invalid UTF-8)'
|
||||
log.error('Package %s has bad UTF-8 code in %s: %s', pkg_name, key, value)
|
||||
if key == 'source_rpm':
|
||||
t_nfo['source'] = value
|
||||
else:
|
||||
|
@ -248,28 +248,6 @@ def _json_dumps(obj, **kwargs):
|
||||
# - "wheel" (need async api...)
|
||||
|
||||
|
||||
class SaltClientsMixIn(object):
|
||||
'''
|
||||
MixIn class to container all of the salt clients that the API needs
|
||||
'''
|
||||
# TODO: load this proactively, instead of waiting for a request
|
||||
__saltclients = None
|
||||
|
||||
@property
|
||||
def saltclients(self):
|
||||
if SaltClientsMixIn.__saltclients is None:
|
||||
local_client = salt.client.get_local_client(mopts=self.application.opts)
|
||||
# TODO: refreshing clients using cachedict
|
||||
SaltClientsMixIn.__saltclients = {
|
||||
'local': local_client.run_job_async,
|
||||
# not the actual client we'll use.. but its what we'll use to get args
|
||||
'local_async': local_client.run_job_async,
|
||||
'runner': salt.runner.RunnerClient(opts=self.application.opts).cmd_async,
|
||||
'runner_async': None, # empty, since we use the same client as `runner`
|
||||
}
|
||||
return SaltClientsMixIn.__saltclients
|
||||
|
||||
|
||||
AUTH_TOKEN_HEADER = 'X-Auth-Token'
|
||||
AUTH_COOKIE_NAME = 'session_id'
|
||||
|
||||
@ -400,7 +378,7 @@ class EventListener(object):
|
||||
del self.timeout_map[future]
|
||||
|
||||
|
||||
class BaseSaltAPIHandler(tornado.web.RequestHandler, SaltClientsMixIn): # pylint: disable=W0223
|
||||
class BaseSaltAPIHandler(tornado.web.RequestHandler): # pylint: disable=W0223
|
||||
ct_out_map = (
|
||||
('application/json', _json_dumps),
|
||||
('application/x-yaml', salt.utils.yaml.safe_dump),
|
||||
@ -428,6 +406,16 @@ class BaseSaltAPIHandler(tornado.web.RequestHandler, SaltClientsMixIn): # pylin
|
||||
self.application.opts,
|
||||
)
|
||||
|
||||
if not hasattr(self, 'saltclients'):
|
||||
local_client = salt.client.get_local_client(mopts=self.application.opts)
|
||||
self.saltclients = {
|
||||
'local': local_client.run_job_async,
|
||||
# not the actual client we'll use.. but its what we'll use to get args
|
||||
'local_async': local_client.run_job_async,
|
||||
'runner': salt.runner.RunnerClient(opts=self.application.opts).cmd_async,
|
||||
'runner_async': None, # empty, since we use the same client as `runner`
|
||||
}
|
||||
|
||||
@property
|
||||
def token(self):
|
||||
'''
|
||||
@ -759,7 +747,7 @@ class SaltAuthHandler(BaseSaltAPIHandler): # pylint: disable=W0223
|
||||
self.write(self.serialize(ret))
|
||||
|
||||
|
||||
class SaltAPIHandler(BaseSaltAPIHandler, SaltClientsMixIn): # pylint: disable=W0223
|
||||
class SaltAPIHandler(BaseSaltAPIHandler): # pylint: disable=W0223
|
||||
'''
|
||||
Main API handler for base "/"
|
||||
'''
|
||||
|
@ -226,6 +226,9 @@ def strip_esc_sequence(txt):
|
||||
from writing their own terminal manipulation commands
|
||||
'''
|
||||
if isinstance(txt, six.string_types):
|
||||
return txt.replace('\033', '?')
|
||||
try:
|
||||
return txt.replace('\033', '?')
|
||||
except UnicodeDecodeError:
|
||||
return txt.replace(str('\033'), str('?')) # future lint: disable=blacklisted-function
|
||||
else:
|
||||
return txt
|
||||
|
@ -71,20 +71,34 @@ class NestDisplay(object):
|
||||
endc,
|
||||
suffix)
|
||||
except UnicodeDecodeError:
|
||||
return fmt.format(
|
||||
indent,
|
||||
color,
|
||||
prefix,
|
||||
salt.utils.stringutils.to_unicode(msg),
|
||||
endc,
|
||||
suffix)
|
||||
try:
|
||||
return fmt.format(
|
||||
indent,
|
||||
color,
|
||||
prefix,
|
||||
salt.utils.stringutils.to_unicode(msg),
|
||||
endc,
|
||||
suffix)
|
||||
except UnicodeDecodeError:
|
||||
# msg contains binary data that can't be decoded
|
||||
return str(fmt).format( # future lint: disable=blacklisted-function
|
||||
indent,
|
||||
color,
|
||||
prefix,
|
||||
msg,
|
||||
endc,
|
||||
suffix)
|
||||
|
||||
def display(self, ret, indent, prefix, out):
|
||||
'''
|
||||
Recursively iterate down through data structures to determine output
|
||||
'''
|
||||
if isinstance(ret, bytes):
|
||||
ret = salt.utils.stringutils.to_unicode(ret)
|
||||
try:
|
||||
ret = salt.utils.stringutils.to_unicode(ret)
|
||||
except UnicodeDecodeError:
|
||||
# ret contains binary data that can't be decoded
|
||||
pass
|
||||
|
||||
if ret is None or ret is True or ret is False:
|
||||
out.append(
|
||||
@ -183,4 +197,11 @@ def output(ret, **kwargs):
|
||||
base_indent = kwargs.get('nested_indent', 0) \
|
||||
or __opts__.get('nested_indent', 0)
|
||||
nest = NestDisplay(retcode=retcode)
|
||||
return '\n'.join(nest.display(ret, base_indent, '', []))
|
||||
lines = nest.display(ret, base_indent, '', [])
|
||||
try:
|
||||
return '\n'.join(lines)
|
||||
except UnicodeDecodeError:
|
||||
# output contains binary data that can't be decoded
|
||||
return str('\n').join( # future lint: disable=blacklisted-function
|
||||
[salt.utils.stringutils.to_str(x) for x in lines]
|
||||
)
|
||||
|
@ -1,19 +1,37 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#-*- coding: utf-8 -*-
|
||||
'''
|
||||
Retrieve EC2 instance data for minions.
|
||||
Retrieve EC2 instance data for minions for ec2_tags and ec2_tags_list
|
||||
|
||||
The minion id must be the instance-id retrieved from AWS. As an
|
||||
option, use_grain can be set to True. This allows the use of an
|
||||
The minion id must be the AWS instance-id or value in 'tag_key'.
|
||||
For example set 'tag_key' to 'Name', to have the minion-id matched against the
|
||||
tag 'Name'. The tag contents must be unique. The value of tag_value can
|
||||
be 'uqdn' or 'asis'. if 'uqdn' strips any domain before comparison.
|
||||
|
||||
The option use_grain can be set to True. This allows the use of an
|
||||
instance-id grain instead of the minion-id. Since this is a potential
|
||||
security risk, the configuration can be further expanded to include
|
||||
a list of minions that are trusted to only allow the alternate id
|
||||
of the instances to specific hosts. There is no glob matching at
|
||||
this time.
|
||||
|
||||
The optional 'tag_list_key' indicates which keys should be added to
|
||||
'ec2_tags_list' and be split by tag_list_sep (default `;`). If a tag key is
|
||||
included in 'tag_list_key' it is removed from ec2_tags. If a tag does not
|
||||
exist it is still included as an empty list.
|
||||
|
||||
|
||||
Note: restart the salt-master for changes to take effect.
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
ext_pillar:
|
||||
- ec2_pillar:
|
||||
tag_key: 'Name'
|
||||
tag_value: 'asis'
|
||||
tag_list_key:
|
||||
- Role
|
||||
tag_list_sep: ';'
|
||||
use_grain: True
|
||||
minion_ids:
|
||||
- trusted-minion-1
|
||||
@ -31,6 +49,8 @@ the instance.
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import re
|
||||
import logging
|
||||
import salt.ext.six as six
|
||||
from salt.ext.six.moves import range
|
||||
|
||||
# Import salt libs
|
||||
from salt.utils.versions import StrictVersion as _StrictVersion
|
||||
@ -47,6 +67,9 @@ except ImportError:
|
||||
# Set up logging
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# DEBUG boto is far too verbose
|
||||
logging.getLogger('boto').setLevel(logging.WARNING)
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
@ -59,7 +82,7 @@ def __virtual__():
|
||||
required_boto_version = _StrictVersion('2.8.0')
|
||||
if boto_version < required_boto_version:
|
||||
log.error("%s: installed boto version %s < %s, can't retrieve instance data",
|
||||
__name__, boto_version, required_boto_version)
|
||||
__name__, boto_version, required_boto_version)
|
||||
return False
|
||||
return True
|
||||
|
||||
@ -76,64 +99,145 @@ def _get_instance_info():
|
||||
def ext_pillar(minion_id,
|
||||
pillar, # pylint: disable=W0613
|
||||
use_grain=False,
|
||||
minion_ids=None):
|
||||
minion_ids=None,
|
||||
tag_key=None,
|
||||
tag_value='asis',
|
||||
tag_list_key=None,
|
||||
tag_list_sep=';'):
|
||||
'''
|
||||
Execute a command and read the output as YAML
|
||||
'''
|
||||
valid_tag_value = ['uqdn', 'asis']
|
||||
|
||||
log.debug("Querying EC2 tags for minion id %s", minion_id)
|
||||
# meta-data:instance-id
|
||||
grain_instance_id = __grains__.get('meta-data', {}).get('instance-id', None)
|
||||
if not grain_instance_id:
|
||||
# dynamic:instance-identity:document:instanceId
|
||||
grain_instance_id = \
|
||||
__grains__.get('dynamic', {}).get('instance-identity', {}).get('document', {}).get('instance-id', None)
|
||||
if grain_instance_id and re.search(r'^i-([0-9a-z]{17}|[0-9a-z]{8})$', grain_instance_id) is None:
|
||||
log.error('External pillar %s, instance-id \'%s\' is not valid for '
|
||||
'\'%s\'', __name__, grain_instance_id, minion_id)
|
||||
grain_instance_id = None # invalid instance id found, remove it from use.
|
||||
|
||||
# If minion_id is not in the format of an AWS EC2 instance, check to see
|
||||
# if there is a grain named 'instance-id' use that. Because this is a
|
||||
# security risk, the master config must contain a use_grain: True option
|
||||
# for this external pillar, which defaults to no
|
||||
if re.search(r'^i-([0-9a-z]{17}|[0-9a-z]{8})$', minion_id) is None:
|
||||
if 'instance-id' not in __grains__:
|
||||
log.debug("Minion-id is not in AWS instance-id formation, and there "
|
||||
"is no instance-id grain for minion %s", minion_id)
|
||||
return {}
|
||||
if not use_grain:
|
||||
log.debug("Minion-id is not in AWS instance-id formation, and option "
|
||||
"not set to use instance-id grain, for minion %s, use_grain "
|
||||
"is %s", minion_id, use_grain)
|
||||
return {}
|
||||
log.debug("use_grain set to %s", use_grain)
|
||||
if minion_ids is not None and minion_id not in minion_ids:
|
||||
log.debug("Minion-id is not in AWS instance ID format, and minion_ids "
|
||||
"is set in the ec2_pillar configuration, but minion %s is "
|
||||
"not in the list of allowed minions %s", minion_id, minion_ids)
|
||||
return {}
|
||||
if re.search(r'^i-([0-9a-z]{17}|[0-9a-z]{8})$', __grains__['instance-id']) is not None:
|
||||
minion_id = __grains__['instance-id']
|
||||
log.debug("Minion-id is not in AWS instance ID format, but a grain"
|
||||
" is, so using %s as the minion ID", minion_id)
|
||||
# Check AWS Tag restrictions .i.e. letters, spaces, and numbers and + - = . _ : / @
|
||||
if tag_key and re.match(r'[\w=.:/@-]+$', tag_key) is None:
|
||||
log.error('External pillar %s, tag_key \'%s\' is not valid ',
|
||||
__name__, tag_key if isinstance(tag_key, six.text_type) else 'non-string')
|
||||
return {}
|
||||
|
||||
if tag_key and tag_value not in valid_tag_value:
|
||||
log.error('External pillar %s, tag_value \'%s\' is not valid must be one '
|
||||
'of %s', __name__, tag_value, ' '.join(valid_tag_value))
|
||||
return {}
|
||||
|
||||
if not tag_key:
|
||||
base_msg = ('External pillar %s, querying EC2 tags for minion id \'%s\' '
|
||||
'against instance-id', __name__, minion_id)
|
||||
else:
|
||||
base_msg = ('External pillar %s, querying EC2 tags for minion id \'%s\' '
|
||||
'against instance-id or \'%s\' against \'%s\'', __name__, minion_id, tag_key, tag_value)
|
||||
|
||||
log.debug(base_msg)
|
||||
find_filter = None
|
||||
find_id = None
|
||||
|
||||
if re.search(r'^i-([0-9a-z]{17}|[0-9a-z]{8})$', minion_id) is not None:
|
||||
find_filter = None
|
||||
find_id = minion_id
|
||||
elif tag_key:
|
||||
if tag_value == 'uqdn':
|
||||
find_filter = {'tag:{0}'.format(tag_key): minion_id.split('.', 1)[0]}
|
||||
else:
|
||||
log.debug("Nether minion id nor a grain named instance-id is in "
|
||||
"AWS format, can't query EC2 tags for minion %s", minion_id)
|
||||
return {}
|
||||
find_filter = {'tag:{0}'.format(tag_key): minion_id}
|
||||
if grain_instance_id:
|
||||
# we have an untrusted grain_instance_id, use it to narrow the search
|
||||
# even more. Combination will be unique even if uqdn is set.
|
||||
find_filter.update({'instance-id': grain_instance_id})
|
||||
# Add this if running state is not dependant on EC2Config
|
||||
# find_filter.update('instance-state-name': 'running')
|
||||
|
||||
m = boto.utils.get_instance_metadata(timeout=0.1, num_retries=1)
|
||||
if len(m.keys()) < 1:
|
||||
log.info("%s: not an EC2 instance, skipping", __name__)
|
||||
return None
|
||||
# no minion-id is instance-id and no suitable filter, try use_grain if enabled
|
||||
if not find_filter and not find_id and use_grain:
|
||||
if not grain_instance_id:
|
||||
log.debug('Minion-id is not in AWS instance-id formation, and there '
|
||||
'is no instance-id grain for minion %s', minion_id)
|
||||
return {}
|
||||
if minion_ids is not None and minion_id not in minion_ids:
|
||||
log.debug('Minion-id is not in AWS instance ID format, and minion_ids '
|
||||
'is set in the ec2_pillar configuration, but minion %s is '
|
||||
'not in the list of allowed minions %s', minion_id, minion_ids)
|
||||
return {}
|
||||
find_id = grain_instance_id
|
||||
|
||||
if not (find_filter or find_id):
|
||||
log.debug('External pillar %s, querying EC2 tags for minion id \'%s\' against '
|
||||
'instance-id or \'%s\' against \'%s\' noughthing to match against',
|
||||
__name__, minion_id, tag_key, tag_value)
|
||||
return {}
|
||||
|
||||
myself = boto.utils.get_instance_metadata(timeout=0.1, num_retries=1)
|
||||
if len(myself.keys()) < 1:
|
||||
log.info("%s: salt master not an EC2 instance, skipping", __name__)
|
||||
return {}
|
||||
|
||||
# Get the Master's instance info, primarily the region
|
||||
(instance_id, region) = _get_instance_info()
|
||||
(_, region) = _get_instance_info()
|
||||
|
||||
try:
|
||||
conn = boto.ec2.connect_to_region(region)
|
||||
except boto.exception as e: # pylint: disable=E0712
|
||||
log.error("%s: invalid AWS credentials.", __name__)
|
||||
return None
|
||||
except boto.exception.AWSConnectionError as exc:
|
||||
log.error('%s: invalid AWS credentials, %s', __name__, exc)
|
||||
return {}
|
||||
except:
|
||||
raise
|
||||
|
||||
if conn is None:
|
||||
log.error('%s: Could not connect to region %s', __name__, region)
|
||||
return {}
|
||||
|
||||
tags = {}
|
||||
try:
|
||||
_tags = conn.get_all_tags(filters={'resource-type': 'instance',
|
||||
'resource-id': minion_id})
|
||||
for tag in _tags:
|
||||
tags[tag.name] = tag.value
|
||||
except IndexError as e:
|
||||
log.error("Couldn't retrieve instance information: %s", e)
|
||||
return None
|
||||
if find_id:
|
||||
instance_data = conn.get_only_instances(instance_ids=[find_id], dry_run=False)
|
||||
else:
|
||||
# filters and max_results can not be used togther.
|
||||
instance_data = conn.get_only_instances(filters=find_filter, dry_run=False)
|
||||
|
||||
return {'ec2_tags': tags}
|
||||
except boto.exception.EC2ResponseError as exc:
|
||||
log.error('%s failed with \'%s\'', base_msg, exc)
|
||||
return {}
|
||||
|
||||
if not instance_data:
|
||||
log.debug('%s no match using \'%s\'', base_msg, find_id if find_id else find_filter)
|
||||
return {}
|
||||
|
||||
# Find a active instance, i.e. ignore terminated and stopped instances
|
||||
active_inst = []
|
||||
for inst in range(0, len(instance_data)):
|
||||
if instance_data[inst].state not in ['terminated', 'stopped']:
|
||||
active_inst.append(inst)
|
||||
|
||||
valid_inst = len(active_inst)
|
||||
if not valid_inst:
|
||||
log.debug('%s match found but not active \'%s\'', base_msg, find_id if find_id else find_filter)
|
||||
return {}
|
||||
|
||||
if valid_inst > 1:
|
||||
log.error('%s multiple matches, ignored, using \'%s\'', base_msg, find_id if find_id else find_filter)
|
||||
return {}
|
||||
|
||||
instance = instance_data[active_inst[0]]
|
||||
if instance.tags:
|
||||
ec2_tags = instance.tags
|
||||
ec2_tags_list = {}
|
||||
log.debug('External pillar %s, for minion id \'%s\', tags: %s', __name__, minion_id, instance.tags)
|
||||
if tag_list_key and isinstance(tag_list_key, list):
|
||||
for item in tag_list_key:
|
||||
if item in ec2_tags:
|
||||
ec2_tags_list[item] = ec2_tags[item].split(tag_list_sep)
|
||||
del ec2_tags[item] # make sure its only in ec2_tags_list
|
||||
else:
|
||||
ec2_tags_list[item] = [] # always return a result
|
||||
|
||||
return {'ec2_tags': ec2_tags, 'ec2_tags_list': ec2_tags_list}
|
||||
return {}
|
||||
|
@ -1,182 +1,97 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
The ``file_tree`` external pillar allows values from all files in a directory
|
||||
tree to be imported as Pillar data.
|
||||
|
||||
``File_tree`` is an external pillar that allows
|
||||
values from all files in a directory tree to be imported as Pillar data.
|
||||
.. note::
|
||||
|
||||
Note this is an external pillar, and is subject to the rules and constraints
|
||||
governing external pillars detailed here: :ref:`external-pillars`.
|
||||
This is an external pillar and is subject to the :ref:`rules and
|
||||
constraints <external-pillars>` governing external pillars.
|
||||
|
||||
.. versionadded:: 2015.5.0
|
||||
|
||||
Example Configuration
|
||||
---------------------
|
||||
In this pillar, data is organized by either Minion ID or Nodegroup name. To
|
||||
setup pillar data for a specific Minion, place it in
|
||||
``<root_dir>/hosts/<minion_id>``. To setup pillar data for an entire
|
||||
Nodegroup, place it in ``<root_dir>/nodegroups/<node_group>`` where
|
||||
``<node_group>`` is the Nodegroup's name.
|
||||
|
||||
Example ``file_tree`` Pillar
|
||||
============================
|
||||
|
||||
Master Configuration
|
||||
--------------------
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
ext_pillar:
|
||||
- file_tree:
|
||||
root_dir: /path/to/root/directory
|
||||
root_dir: /srv/ext_pillar
|
||||
follow_dir_links: False
|
||||
keep_newline: True
|
||||
|
||||
The ``root_dir`` parameter is required and points to the directory where files
|
||||
for each host are stored. The ``follow_dir_links`` parameter is optional and
|
||||
defaults to False. If ``follow_dir_links`` is set to True, this external pillar
|
||||
will follow symbolic links to other directories.
|
||||
node_groups:
|
||||
internal_servers: 'L@bob,stuart,kevin'
|
||||
|
||||
.. warning::
|
||||
Be careful when using ``follow_dir_links``, as a recursive symlink chain
|
||||
will result in unexpected results.
|
||||
Pillar Configuration
|
||||
--------------------
|
||||
|
||||
.. versionchanged:: Oxygen
|
||||
If ``root_dir`` is a relative path, it will be treated as relative to the
|
||||
:conf_master:`pillar_roots` of the environment specified by
|
||||
:conf_minion:`pillarenv`. If an environment specifies multiple
|
||||
roots, this module will search for files relative to all of them, in order,
|
||||
merging the results.
|
||||
.. code-block:: bash
|
||||
|
||||
If ``keep_newline`` is set to ``True``, then the pillar values for files ending
|
||||
in newlines will keep that newline. The default behavior is to remove the
|
||||
end-of-file newline. ``keep_newline`` should be turned on if the pillar data is
|
||||
intended to be used to deploy a file using ``contents_pillar`` with a
|
||||
:py:func:`file.managed <salt.states.file.managed>` state.
|
||||
(salt-master) # tree /srv/ext_pillar
|
||||
/srv/ext_pillar/
|
||||
|-- hosts
|
||||
| |-- bob
|
||||
| | |-- apache
|
||||
| | | `-- config.d
|
||||
| | | |-- 00_important.conf
|
||||
| | | `-- 20_bob_extra.conf
|
||||
| | `-- corporate_app
|
||||
| | `-- settings
|
||||
| | `-- bob_settings.cfg
|
||||
| `-- kevin
|
||||
| |-- apache
|
||||
| | `-- config.d
|
||||
| | `-- 00_important.conf
|
||||
| `-- corporate_app
|
||||
| `-- settings
|
||||
| `-- kevin_settings.cfg
|
||||
`-- nodegroups
|
||||
`-- internal_servers
|
||||
`-- corporate_app
|
||||
`-- settings
|
||||
`-- common_settings.cfg
|
||||
|
||||
.. versionchanged:: 2015.8.4
|
||||
The ``raw_data`` parameter has been renamed to ``keep_newline``. In earlier
|
||||
releases, ``raw_data`` must be used. Also, this parameter can now be a list
|
||||
of globs, allowing for more granular control over which pillar values keep
|
||||
their end-of-file newline. The globs match paths relative to the
|
||||
directories named for minion IDs and nodegroups underneath the ``root_dir``
|
||||
(see the layout examples in the below sections).
|
||||
Verify Pillar Data
|
||||
------------------
|
||||
|
||||
.. code-block:: yaml
|
||||
.. code-block:: bash
|
||||
|
||||
ext_pillar:
|
||||
- file_tree:
|
||||
root_dir: /path/to/root/directory
|
||||
keep_newline:
|
||||
- files/testdir/*
|
||||
(salt-master) # salt bob pillar.items
|
||||
bob:
|
||||
----------
|
||||
apache:
|
||||
----------
|
||||
config.d:
|
||||
----------
|
||||
00_important.conf:
|
||||
<important_config important_setting="yes" />
|
||||
20_bob_extra.conf:
|
||||
<bob_specific_cfg has_freeze_ray="yes" />
|
||||
corporate_app:
|
||||
----------
|
||||
settings:
|
||||
----------
|
||||
common_settings:
|
||||
// This is the main settings file for the corporate
|
||||
// internal web app
|
||||
main_setting: probably
|
||||
bob_settings:
|
||||
role: bob
|
||||
|
||||
.. note::
|
||||
In earlier releases, this documentation incorrectly stated that binary
|
||||
files would not affected by the ``keep_newline`` configuration. However,
|
||||
this module does not actually distinguish between binary and text files.
|
||||
|
||||
.. versionchanged:: 2017.7.0
|
||||
Templating/rendering has been added. You can now specify a default render
|
||||
pipeline and a black- and whitelist of (dis)allowed renderers.
|
||||
|
||||
``template`` must be set to ``True`` for templating to happen.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
ext_pillar:
|
||||
- file_tree:
|
||||
root_dir: /path/to/root/directory
|
||||
render_default: jinja|yaml
|
||||
renderer_blacklist:
|
||||
- gpg
|
||||
renderer_whitelist:
|
||||
- jinja
|
||||
- yaml
|
||||
template: True
|
||||
|
||||
Assigning Pillar Data to Individual Hosts
|
||||
-----------------------------------------
|
||||
|
||||
To configure pillar data for each host, this external pillar will recursively
|
||||
iterate over ``root_dir``/hosts/``id`` (where ``id`` is a minion ID), and
|
||||
compile pillar data with each subdirectory as a dictionary key and each file
|
||||
as a value.
|
||||
|
||||
For example, the following ``root_dir`` tree:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
./hosts/
|
||||
./hosts/test-host/
|
||||
./hosts/test-host/files/
|
||||
./hosts/test-host/files/testdir/
|
||||
./hosts/test-host/files/testdir/file1.txt
|
||||
./hosts/test-host/files/testdir/file2.txt
|
||||
./hosts/test-host/files/another-testdir/
|
||||
./hosts/test-host/files/another-testdir/symlink-to-file1.txt
|
||||
|
||||
will result in the following pillar tree for minion with ID ``test-host``:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
test-host:
|
||||
----------
|
||||
files:
|
||||
----------
|
||||
another-testdir:
|
||||
----------
|
||||
symlink-to-file1.txt:
|
||||
Contents of file #1.
|
||||
|
||||
testdir:
|
||||
----------
|
||||
file1.txt:
|
||||
Contents of file #1.
|
||||
|
||||
file2.txt:
|
||||
Contents of file #2.
|
||||
|
||||
.. note::
|
||||
Subdirectories underneath ``root_dir``/hosts/``id`` become nested
|
||||
dictionaries, as shown above.
|
||||
|
||||
|
||||
Assigning Pillar Data to Entire Nodegroups
|
||||
------------------------------------------
|
||||
|
||||
To assign Pillar data to all minions in a given nodegroup, this external pillar
|
||||
recursively iterates over ``root_dir``/nodegroups/``nodegroup`` (where
|
||||
``nodegroup`` is the name of a nodegroup), and like for individual hosts,
|
||||
compiles pillar data with each subdirectory as a dictionary key and each file
|
||||
as a value.
|
||||
|
||||
.. important::
|
||||
If the same Pillar key is set for a minion both by nodegroup and by
|
||||
individual host, then the value set for the individual host will take
|
||||
precedence.
|
||||
|
||||
For example, the following ``root_dir`` tree:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
./nodegroups/
|
||||
./nodegroups/test-group/
|
||||
./nodegroups/test-group/files/
|
||||
./nodegroups/test-group/files/testdir/
|
||||
./nodegroups/test-group/files/testdir/file1.txt
|
||||
./nodegroups/test-group/files/testdir/file2.txt
|
||||
./nodegroups/test-group/files/another-testdir/
|
||||
./nodegroups/test-group/files/another-testdir/symlink-to-file1.txt
|
||||
|
||||
will result in the following pillar data for minions in the node group
|
||||
``test-group``:
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
test-host:
|
||||
----------
|
||||
files:
|
||||
----------
|
||||
another-testdir:
|
||||
----------
|
||||
symlink-to-file1.txt:
|
||||
Contents of file #1.
|
||||
|
||||
testdir:
|
||||
----------
|
||||
file1.txt:
|
||||
Contents of file #1.
|
||||
|
||||
file2.txt:
|
||||
Contents of file #2.
|
||||
The leaf data in the example shown is the contents of the pillar files.
|
||||
'''
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
@ -311,7 +226,130 @@ def ext_pillar(minion_id,
|
||||
renderer_whitelist=None,
|
||||
template=False):
|
||||
'''
|
||||
Compile pillar data for the specified minion ID
|
||||
Compile pillar data from the given ``root_dir`` specific to Nodegroup names
|
||||
and Minion IDs.
|
||||
|
||||
If a Minion's ID is not found at ``<root_dir>/host/<minion_id>`` or if it
|
||||
is not included in any Nodegroups named at
|
||||
``<root_dir>/nodegroups/<node_group>``, no pillar data provided by this
|
||||
pillar module will be available for that Minion.
|
||||
|
||||
.. versionchanged:: 2017.7.0
|
||||
Templating/rendering has been added. You can now specify a default
|
||||
render pipeline and a black- and whitelist of (dis)allowed renderers.
|
||||
|
||||
:param:`template` must be set to ``True`` for templating to happen.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
ext_pillar:
|
||||
- file_tree:
|
||||
root_dir: /path/to/root/directory
|
||||
render_default: jinja|yaml
|
||||
renderer_blacklist:
|
||||
- gpg
|
||||
renderer_whitelist:
|
||||
- jinja
|
||||
- yaml
|
||||
template: True
|
||||
|
||||
:param minion_id:
|
||||
The ID of the Minion whose pillar data is to be collected
|
||||
|
||||
:param pillar:
|
||||
Unused by the ``file_tree`` pillar module
|
||||
|
||||
:param root_dir:
|
||||
Filesystem directory used as the root for pillar data (e.g.
|
||||
``/srv/ext_pillar``)
|
||||
|
||||
.. versionchanged:: Oxygen
|
||||
If ``root_dir`` is a relative path, it will be treated as relative to the
|
||||
:conf_master:`pillar_roots` of the environment specified by
|
||||
:conf_minion:`pillarenv`. If an environment specifies multiple
|
||||
roots, this module will search for files relative to all of them, in order,
|
||||
merging the results.
|
||||
|
||||
:param follow_dir_links:
|
||||
Follow symbolic links to directories while collecting pillar files.
|
||||
Defaults to ``False``.
|
||||
|
||||
.. warning::
|
||||
|
||||
Care should be exercised when enabling this option as it will
|
||||
follow links that point outside of :param:`root_dir`.
|
||||
|
||||
.. warning::
|
||||
|
||||
Symbolic links that lead to infinite recursion are not filtered.
|
||||
|
||||
:param debug:
|
||||
Enable debug information at log level ``debug``. Defaults to
|
||||
``False``. This option may be useful to help debug errors when setting
|
||||
up the ``file_tree`` pillar module.
|
||||
|
||||
:param keep_newline:
|
||||
Preserve the end-of-file newline in files. Defaults to ``False``.
|
||||
This option may either be a boolean or a list of file globs (as defined
|
||||
by the `Python fnmatch package
|
||||
<https://docs.python.org/library/fnmatch.html>`_) for which end-of-file
|
||||
newlines are to be kept.
|
||||
|
||||
``keep_newline`` should be turned on if the pillar data is intended to
|
||||
be used to deploy a file using ``contents_pillar`` with a
|
||||
:py:func:`file.managed <salt.states.file.managed>` state.
|
||||
|
||||
.. versionchanged:: 2015.8.4
|
||||
The ``raw_data`` parameter has been renamed to ``keep_newline``. In
|
||||
earlier releases, ``raw_data`` must be used. Also, this parameter
|
||||
can now be a list of globs, allowing for more granular control over
|
||||
which pillar values keep their end-of-file newline. The globs match
|
||||
paths relative to the directories named for Minion IDs and
|
||||
Nodegroup namess underneath the :param:`root_dir`.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
ext_pillar:
|
||||
- file_tree:
|
||||
root_dir: /srv/ext_pillar
|
||||
keep_newline:
|
||||
- apache/config.d/*
|
||||
- corporate_app/settings/*
|
||||
|
||||
.. note::
|
||||
In earlier releases, this documentation incorrectly stated that
|
||||
binary files would not affected by the ``keep_newline``. However,
|
||||
this module does not actually distinguish between binary and text
|
||||
files.
|
||||
|
||||
|
||||
:param render_default:
|
||||
Override Salt's :conf_master:`default global renderer <renderer>` for
|
||||
the ``file_tree`` pillar.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
render_default: jinja
|
||||
|
||||
:param renderer_blacklist:
|
||||
Disallow renderers for pillar files.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
renderer_blacklist:
|
||||
- json
|
||||
|
||||
:param renderer_whitelist:
|
||||
Allow renderers for pillar files.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
renderer_whitelist:
|
||||
- yaml
|
||||
- jinja
|
||||
|
||||
:param template:
|
||||
Enable templating of pillar files. Defaults to ``False``.
|
||||
'''
|
||||
# Not used
|
||||
del pillar
|
||||
@ -391,7 +429,7 @@ def _ext_pillar(minion_id,
|
||||
|
||||
ngroup_pillar = {}
|
||||
nodegroups_dir = os.path.join(root_dir, 'nodegroups')
|
||||
if os.path.exists(nodegroups_dir) and len(__opts__['nodegroups']) > 0:
|
||||
if os.path.exists(nodegroups_dir) and len(__opts__.get('nodegroups', ())) > 0:
|
||||
master_ngroups = __opts__['nodegroups']
|
||||
ext_pillar_dirs = os.listdir(nodegroups_dir)
|
||||
if len(ext_pillar_dirs) > 0:
|
||||
@ -419,8 +457,8 @@ def _ext_pillar(minion_id,
|
||||
else:
|
||||
if debug is True:
|
||||
log.debug(
|
||||
'file_tree: no nodegroups found in file tree directory '
|
||||
'ext_pillar_dirs, skipping...'
|
||||
'file_tree: no nodegroups found in file tree directory %s, skipping...',
|
||||
ext_pillar_dirs
|
||||
)
|
||||
else:
|
||||
if debug is True:
|
||||
@ -428,7 +466,12 @@ def _ext_pillar(minion_id,
|
||||
|
||||
host_dir = os.path.join(root_dir, 'hosts', minion_id)
|
||||
if not os.path.exists(host_dir):
|
||||
# No data for host with this ID
|
||||
if debug is True:
|
||||
log.debug(
|
||||
'file_tree: no pillar data for minion %s found in file tree directory %s',
|
||||
minion_id,
|
||||
host_dir
|
||||
)
|
||||
return ngroup_pillar
|
||||
|
||||
if not os.path.isdir(host_dir):
|
||||
|
@ -205,7 +205,7 @@ class Runner(RunnerClient):
|
||||
if self.opts.get('eauth'):
|
||||
if 'token' in self.opts:
|
||||
try:
|
||||
with salt.utils.files.fopen(os.path.join(self.opts['key_dir'], '.root_key'), 'r') as fp_:
|
||||
with salt.utils.files.fopen(os.path.join(self.opts['cachedir'], '.root_key'), 'r') as fp_:
|
||||
low['key'] = salt.utils.stringutils.to_unicode(fp_.readline())
|
||||
except IOError:
|
||||
low['token'] = self.opts['token']
|
||||
|
@ -43,7 +43,12 @@ def _handle_interrupt(exc, original_exc, hardfail=False, trace=''):
|
||||
|
||||
|
||||
def _handle_signals(client, signum, sigframe):
|
||||
trace = traceback.format_exc()
|
||||
try:
|
||||
# This raises AttributeError on Python 3.4 and 3.5 if there is no current exception.
|
||||
# Ref: https://bugs.python.org/issue23003
|
||||
trace = traceback.format_exc()
|
||||
except AttributeError:
|
||||
trace = ''
|
||||
try:
|
||||
hardcrash = client.options.hard_crash
|
||||
except (AttributeError, KeyError):
|
||||
|
@ -1,8 +1,8 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
The module used to execute states in salt. A state is unlike a module
|
||||
execution in that instead of just executing a command it ensure that a
|
||||
certain state is present on the system.
|
||||
The State Compiler is used to execute states in Salt. A state is unlike
|
||||
an execution module in that instead of just executing a command, it
|
||||
ensures that a certain state is present on the system.
|
||||
|
||||
The data sent to the state calls is as follows:
|
||||
{ 'state': '<state module name>',
|
||||
@ -167,6 +167,23 @@ def _l_tag(name, id_):
|
||||
return _gen_tag(low)
|
||||
|
||||
|
||||
def _calculate_fake_duration():
|
||||
'''
|
||||
Generate a NULL duration for when states do not run
|
||||
but we want the results to be consistent.
|
||||
'''
|
||||
utc_start_time = datetime.datetime.utcnow()
|
||||
local_start_time = utc_start_time - \
|
||||
(datetime.datetime.utcnow() - datetime.datetime.now())
|
||||
utc_finish_time = datetime.datetime.utcnow()
|
||||
start_time = local_start_time.time().isoformat()
|
||||
delta = (utc_finish_time - utc_start_time)
|
||||
# duration in milliseconds.microseconds
|
||||
duration = (delta.seconds * 1000000 + delta.microseconds)/1000.0
|
||||
|
||||
return start_time, duration
|
||||
|
||||
|
||||
def get_accumulator_dir(cachedir):
|
||||
'''
|
||||
Return the directory that accumulator data is stored in, creating it if it
|
||||
@ -2504,8 +2521,11 @@ class State(object):
|
||||
run_dict = self.pre
|
||||
else:
|
||||
run_dict = running
|
||||
start_time, duration = _calculate_fake_duration()
|
||||
run_dict[tag] = {'changes': {},
|
||||
'result': False,
|
||||
'duration': duration,
|
||||
'start_time': start_time,
|
||||
'comment': comment,
|
||||
'__run_num__': self.__run_num,
|
||||
'__sls__': low['__sls__']}
|
||||
@ -2588,9 +2608,12 @@ class State(object):
|
||||
_cmt = 'One or more requisite failed: {0}'.format(
|
||||
', '.join(six.text_type(i) for i in failed_requisites)
|
||||
)
|
||||
start_time, duration = _calculate_fake_duration()
|
||||
running[tag] = {
|
||||
'changes': {},
|
||||
'result': False,
|
||||
'duration': duration,
|
||||
'start_time': start_time,
|
||||
'comment': _cmt,
|
||||
'__run_num__': self.__run_num,
|
||||
'__sls__': low['__sls__']
|
||||
@ -2606,8 +2629,11 @@ class State(object):
|
||||
ret = self.call(low, chunks, running)
|
||||
running[tag] = ret
|
||||
elif status == 'pre':
|
||||
start_time, duration = _calculate_fake_duration()
|
||||
pre_ret = {'changes': {},
|
||||
'result': True,
|
||||
'duration': duration,
|
||||
'start_time': start_time,
|
||||
'comment': 'No changes detected',
|
||||
'__run_num__': self.__run_num,
|
||||
'__sls__': low['__sls__']}
|
||||
@ -2615,15 +2641,21 @@ class State(object):
|
||||
self.pre[tag] = pre_ret
|
||||
self.__run_num += 1
|
||||
elif status == 'onfail':
|
||||
start_time, duration = _calculate_fake_duration()
|
||||
running[tag] = {'changes': {},
|
||||
'result': True,
|
||||
'duration': duration,
|
||||
'start_time': start_time,
|
||||
'comment': 'State was not run because onfail req did not change',
|
||||
'__run_num__': self.__run_num,
|
||||
'__sls__': low['__sls__']}
|
||||
self.__run_num += 1
|
||||
elif status == 'onchanges':
|
||||
start_time, duration = _calculate_fake_duration()
|
||||
running[tag] = {'changes': {},
|
||||
'result': True,
|
||||
'duration': duration,
|
||||
'start_time': start_time,
|
||||
'comment': 'State was not run because none of the onchanges reqs changed',
|
||||
'__run_num__': self.__run_num,
|
||||
'__sls__': low['__sls__']}
|
||||
|
@ -966,7 +966,7 @@ def extracted(name,
|
||||
|
||||
if result['result']:
|
||||
# Get the path of the file in the minion cache
|
||||
cached = __salt__['cp.is_cached'](source_match)
|
||||
cached = __salt__['cp.is_cached'](source_match, saltenv=__env__)
|
||||
else:
|
||||
log.debug(
|
||||
'failed to download %s',
|
||||
|
@ -6628,7 +6628,7 @@ def cached(name,
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
cached = __salt__['cp.is_cached'](source_match)
|
||||
cached = __salt__['cp.is_cached'](source_match, saltenv=__env__)
|
||||
|
||||
This function will return the cached path of the file, or an empty string
|
||||
if the file is not present in the minion cache.
|
||||
|
@ -192,9 +192,14 @@ def present(name,
|
||||
salt.utils.stringutils.to_unicode(name, 'utf-8'))
|
||||
return ret
|
||||
|
||||
try:
|
||||
vdata_decoded = salt.utils.stringutils.to_unicode(vdata, 'utf-8')
|
||||
except UnicodeDecodeError:
|
||||
# vdata contains binary data that can't be decoded
|
||||
vdata_decoded = vdata
|
||||
add_change = {'Key': r'{0}\{1}'.format(hive, key),
|
||||
'Entry': '{0}'.format(salt.utils.stringutils.to_unicode(vname, 'utf-8') if vname else '(Default)'),
|
||||
'Value': salt.utils.stringutils.to_unicode(vdata, 'utf-8')}
|
||||
'Value': vdata_decoded}
|
||||
|
||||
# Check for test option
|
||||
if __opts__['test']:
|
||||
|
@ -558,6 +558,11 @@ class IPCMessagePublisher(object):
|
||||
io_loop=self.io_loop
|
||||
)
|
||||
self.streams.add(stream)
|
||||
|
||||
def discard_after_closed():
|
||||
self.streams.discard(stream)
|
||||
|
||||
stream.set_close_callback(discard_after_closed)
|
||||
except Exception as exc:
|
||||
log.error('IPC streaming error: %s', exc)
|
||||
|
||||
|
@ -420,6 +420,10 @@ class AESReqServerMixin(object):
|
||||
log.debug('Host key change detected in open mode.')
|
||||
with salt.utils.files.fopen(pubfn, 'w+') as fp_:
|
||||
fp_.write(load['pub'])
|
||||
elif not load['pub']:
|
||||
log.error('Public key is empty: {0}'.format(load['id']))
|
||||
return {'enc': 'clear',
|
||||
'load': {'ret': False}}
|
||||
|
||||
pub = None
|
||||
|
||||
|
@ -1740,15 +1740,15 @@ def dns_check(addr, port, safe=False, ipv6=None):
|
||||
def get_context(template, line, num_lines=5, marker=None):
|
||||
# Late import to avoid circular import.
|
||||
import salt.utils.versions
|
||||
import salt.utils.templates
|
||||
import salt.utils.stringutils
|
||||
salt.utils.versions.warn_until(
|
||||
'Neon',
|
||||
'Use of \'salt.utils.get_context\' detected. This function '
|
||||
'has been moved to \'salt.utils.templates.get_context\' as of '
|
||||
'has been moved to \'salt.utils.stringutils.get_context\' as of '
|
||||
'Salt Oxygen. This warning will be removed in Salt Neon.',
|
||||
stacklevel=3
|
||||
)
|
||||
return salt.utils.templates.get_context(template, line, num_lines, marker)
|
||||
return salt.utils.stringutils.get_context(template, line, num_lines, marker)
|
||||
|
||||
|
||||
def get_master_key(key_user, opts, skip_perm_errors=False):
|
||||
|
@ -269,7 +269,13 @@ def shlex_split(s, **kwargs):
|
||||
Only split if variable is a string
|
||||
'''
|
||||
if isinstance(s, six.string_types):
|
||||
return shlex.split(s, **kwargs)
|
||||
# On PY2, shlex.split will fail with unicode types if there are
|
||||
# non-ascii characters in the string. So, we need to make sure we
|
||||
# invoke it with a str type, and then decode the resulting string back
|
||||
# to unicode to return it.
|
||||
return salt.utils.data.decode(
|
||||
shlex.split(salt.utils.stringutils.to_str(s), **kwargs)
|
||||
)
|
||||
else:
|
||||
return s
|
||||
|
||||
|
@ -114,28 +114,27 @@ def _post_processing(kwargs, skip_translate, invalid):
|
||||
actual_volumes.sort()
|
||||
|
||||
if kwargs.get('port_bindings') is not None \
|
||||
and (skip_translate is True or
|
||||
all(x not in skip_translate
|
||||
for x in ('port_bindings', 'expose', 'ports'))):
|
||||
and all(x not in skip_translate
|
||||
for x in ('port_bindings', 'expose', 'ports')):
|
||||
# Make sure that all ports defined in "port_bindings" are included in
|
||||
# the "ports" param.
|
||||
auto_ports = list(kwargs['port_bindings'])
|
||||
if auto_ports:
|
||||
actual_ports = []
|
||||
# Sort list to make unit tests more reliable
|
||||
for port in auto_ports:
|
||||
if port in actual_ports:
|
||||
continue
|
||||
if isinstance(port, six.integer_types):
|
||||
actual_ports.append((port, 'tcp'))
|
||||
else:
|
||||
port, proto = port.split('/')
|
||||
actual_ports.append((int(port), proto))
|
||||
actual_ports.sort()
|
||||
actual_ports = [
|
||||
port if proto == 'tcp' else '{}/{}'.format(port, proto) for (port, proto) in actual_ports
|
||||
]
|
||||
kwargs.setdefault('ports', actual_ports)
|
||||
ports_to_bind = list(kwargs['port_bindings'])
|
||||
if ports_to_bind:
|
||||
ports_to_open = set(kwargs.get('ports', []))
|
||||
ports_to_open.update([helpers.get_port_def(x) for x in ports_to_bind])
|
||||
kwargs['ports'] = list(ports_to_open)
|
||||
|
||||
if 'ports' in kwargs \
|
||||
and all(x not in skip_translate for x in ('expose', 'ports')):
|
||||
# TCP ports should only be passed as the port number. Normalize the
|
||||
# input so a port definition of 80/tcp becomes just 80 instead of
|
||||
# (80, 'tcp').
|
||||
for index, _ in enumerate(kwargs['ports']):
|
||||
try:
|
||||
if kwargs['ports'][index][1] == 'tcp':
|
||||
kwargs['ports'][index] = ports_to_open[index][0]
|
||||
except TypeError:
|
||||
continue
|
||||
|
||||
|
||||
# Functions below must match names of docker-py arguments
|
||||
@ -552,13 +551,7 @@ def ports(val, **kwargs): # pylint: disable=unused-argument
|
||||
raise SaltInvocationError(exc.__str__())
|
||||
new_ports.update([helpers.get_port_def(x, proto)
|
||||
for x in range(range_start, range_end + 1)])
|
||||
ordered_new_ports = [
|
||||
port if proto == 'tcp' else (port, proto) for (port, proto) in sorted(
|
||||
[(new_port, 'tcp') if isinstance(new_port, six.integer_types) else new_port
|
||||
for new_port in new_ports]
|
||||
)
|
||||
]
|
||||
return ordered_new_ports
|
||||
return list(new_ports)
|
||||
|
||||
|
||||
def privileged(val, **kwargs): # pylint: disable=unused-argument
|
||||
|
@ -7,7 +7,6 @@ Classes which provide the shared base for GitFS, git_pillar, and winrepo
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import copy
|
||||
import contextlib
|
||||
import distutils
|
||||
import errno
|
||||
import fnmatch
|
||||
import glob
|
||||
@ -90,9 +89,9 @@ log = logging.getLogger(__name__)
|
||||
try:
|
||||
import git
|
||||
import gitdb
|
||||
HAS_GITPYTHON = True
|
||||
GITPYTHON_VERSION = _LooseVersion(git.__version__)
|
||||
except ImportError:
|
||||
HAS_GITPYTHON = False
|
||||
GITPYTHON_VERSION = None
|
||||
|
||||
try:
|
||||
# Squelch warning on cent7 due to them upgrading cffi
|
||||
@ -100,7 +99,31 @@ try:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter('ignore')
|
||||
import pygit2
|
||||
HAS_PYGIT2 = True
|
||||
PYGIT2_VERSION = _LooseVersion(pygit2.__version__)
|
||||
LIBGIT2_VERSION = _LooseVersion(pygit2.LIBGIT2_VERSION)
|
||||
|
||||
# Work around upstream bug where bytestrings were being decoded using the
|
||||
# default encoding (which is usually ascii on Python 2). This was fixed
|
||||
# on 2 Feb 2018, so releases prior to 0.26.2 will need a workaround.
|
||||
if PYGIT2_VERSION <= _LooseVersion('0.26.2'):
|
||||
try:
|
||||
import pygit2.ffi
|
||||
import pygit2.remote
|
||||
except ImportError:
|
||||
# If we couldn't import these, then we're using an old enough
|
||||
# version where ffi isn't in use and this workaround would be
|
||||
# useless.
|
||||
pass
|
||||
else:
|
||||
def __maybe_string(ptr):
|
||||
if not ptr:
|
||||
return None
|
||||
return pygit2.ffi.string(ptr).decode('utf-8')
|
||||
|
||||
pygit2.remote.maybe_string = __maybe_string
|
||||
|
||||
# Older pygit2 releases did not raise a specific exception class, this
|
||||
# try/except makes Salt's exception catching work on any supported release.
|
||||
try:
|
||||
GitError = pygit2.errors.GitError
|
||||
except AttributeError:
|
||||
@ -111,16 +134,17 @@ except Exception as exc:
|
||||
# to rebuild itself against the newer cffi). Therefore, we simply will
|
||||
# catch a generic exception, and log the exception if it is anything other
|
||||
# than an ImportError.
|
||||
HAS_PYGIT2 = False
|
||||
PYGIT2_VERSION = None
|
||||
LIBGIT2_VERSION = None
|
||||
if not isinstance(exc, ImportError):
|
||||
log.exception('Failed to import pygit2')
|
||||
|
||||
# pylint: enable=import-error
|
||||
|
||||
# Minimum versions for backend providers
|
||||
GITPYTHON_MINVER = '0.3'
|
||||
PYGIT2_MINVER = '0.20.3'
|
||||
LIBGIT2_MINVER = '0.20.0'
|
||||
GITPYTHON_MINVER = _LooseVersion('0.3')
|
||||
PYGIT2_MINVER = _LooseVersion('0.20.3')
|
||||
LIBGIT2_MINVER = _LooseVersion('0.20.0')
|
||||
|
||||
|
||||
def enforce_types(key, val):
|
||||
@ -1841,10 +1865,7 @@ class Pygit2(GitProvider):
|
||||
'''
|
||||
Assign attributes for pygit2 callbacks
|
||||
'''
|
||||
# pygit2 radically changed fetching in 0.23.2
|
||||
pygit2_version = pygit2.__version__
|
||||
if distutils.version.LooseVersion(pygit2_version) >= \
|
||||
distutils.version.LooseVersion('0.23.2'):
|
||||
if PYGIT2_VERSION >= _LooseVersion('0.23.2'):
|
||||
self.remotecallbacks = pygit2.RemoteCallbacks(
|
||||
credentials=self.credentials)
|
||||
if not self.ssl_verify:
|
||||
@ -1859,7 +1880,7 @@ class Pygit2(GitProvider):
|
||||
'pygit2 does not support disabling the SSL certificate '
|
||||
'check in versions prior to 0.23.2 (installed: {0}). '
|
||||
'Fetches for self-signed certificates will fail.'.format(
|
||||
pygit2_version
|
||||
PYGIT2_VERSION
|
||||
)
|
||||
)
|
||||
|
||||
@ -2435,10 +2456,10 @@ class GitBase(object):
|
||||
Check if GitPython is available and at a compatible version (>= 0.3.0)
|
||||
'''
|
||||
def _recommend():
|
||||
if HAS_PYGIT2 and 'pygit2' in self.git_providers:
|
||||
if PYGIT2_VERSION and 'pygit2' in self.git_providers:
|
||||
log.error(_RECOMMEND_PYGIT2, self.role, self.role)
|
||||
|
||||
if not HAS_GITPYTHON:
|
||||
if not GITPYTHON_VERSION:
|
||||
if not quiet:
|
||||
log.error(
|
||||
'%s is configured but could not be loaded, is GitPython '
|
||||
@ -2449,18 +2470,14 @@ class GitBase(object):
|
||||
elif 'gitpython' not in self.git_providers:
|
||||
return False
|
||||
|
||||
# pylint: disable=no-member
|
||||
gitver = _LooseVersion(git.__version__)
|
||||
minver = _LooseVersion(GITPYTHON_MINVER)
|
||||
# pylint: enable=no-member
|
||||
errors = []
|
||||
if gitver < minver:
|
||||
if GITPYTHON_VERSION < GITPYTHON_MINVER:
|
||||
errors.append(
|
||||
'{0} is configured, but the GitPython version is earlier than '
|
||||
'{1}. Version {2} detected.'.format(
|
||||
self.role,
|
||||
GITPYTHON_MINVER,
|
||||
git.__version__
|
||||
GITPYTHON_VERSION
|
||||
)
|
||||
)
|
||||
if not salt.utils.path.which('git'):
|
||||
@ -2486,10 +2503,10 @@ class GitBase(object):
|
||||
Pygit2 must be at least 0.20.3 and libgit2 must be at least 0.20.0.
|
||||
'''
|
||||
def _recommend():
|
||||
if HAS_GITPYTHON and 'gitpython' in self.git_providers:
|
||||
if GITPYTHON_VERSION and 'gitpython' in self.git_providers:
|
||||
log.error(_RECOMMEND_GITPYTHON, self.role, self.role)
|
||||
|
||||
if not HAS_PYGIT2:
|
||||
if not PYGIT2_VERSION:
|
||||
if not quiet:
|
||||
log.error(
|
||||
'%s is configured but could not be loaded, are pygit2 '
|
||||
@ -2500,31 +2517,23 @@ class GitBase(object):
|
||||
elif 'pygit2' not in self.git_providers:
|
||||
return False
|
||||
|
||||
# pylint: disable=no-member
|
||||
pygit2ver = _LooseVersion(pygit2.__version__)
|
||||
pygit2_minver = _LooseVersion(PYGIT2_MINVER)
|
||||
|
||||
libgit2ver = _LooseVersion(pygit2.LIBGIT2_VERSION)
|
||||
libgit2_minver = _LooseVersion(LIBGIT2_MINVER)
|
||||
# pylint: enable=no-member
|
||||
|
||||
errors = []
|
||||
if pygit2ver < pygit2_minver:
|
||||
if PYGIT2_VERSION < PYGIT2_MINVER:
|
||||
errors.append(
|
||||
'{0} is configured, but the pygit2 version is earlier than '
|
||||
'{1}. Version {2} detected.'.format(
|
||||
self.role,
|
||||
PYGIT2_MINVER,
|
||||
pygit2.__version__
|
||||
PYGIT2_VERSION
|
||||
)
|
||||
)
|
||||
if libgit2ver < libgit2_minver:
|
||||
if LIBGIT2_VERSION < LIBGIT2_MINVER:
|
||||
errors.append(
|
||||
'{0} is configured, but the libgit2 version is earlier than '
|
||||
'{1}. Version {2} detected.'.format(
|
||||
self.role,
|
||||
LIBGIT2_MINVER,
|
||||
pygit2.LIBGIT2_VERSION
|
||||
LIBGIT2_VERSION
|
||||
)
|
||||
)
|
||||
if not salt.utils.path.which('git'):
|
||||
@ -2749,15 +2758,20 @@ class GitFS(GitBase):
|
||||
return fnd
|
||||
|
||||
salt.fileserver.wait_lock(lk_fn, dest)
|
||||
if os.path.isfile(blobshadest) and os.path.isfile(dest):
|
||||
try:
|
||||
with salt.utils.files.fopen(blobshadest, 'r') as fp_:
|
||||
sha = salt.utils.stringutils.to_unicode(fp_.read())
|
||||
if sha == blob_hexsha:
|
||||
fnd['rel'] = path
|
||||
fnd['path'] = dest
|
||||
return _add_file_stat(fnd, blob_mode)
|
||||
except IOError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
raise exc
|
||||
|
||||
with salt.utils.files.fopen(lk_fn, 'w'):
|
||||
pass
|
||||
|
||||
for filename in glob.glob(hashes_glob):
|
||||
try:
|
||||
os.remove(filename)
|
||||
@ -2829,17 +2843,24 @@ class GitFS(GitBase):
|
||||
load['saltenv'],
|
||||
'{0}.hash.{1}'.format(relpath,
|
||||
self.opts['hash_type']))
|
||||
if not os.path.isfile(hashdest):
|
||||
if not os.path.exists(os.path.dirname(hashdest)):
|
||||
os.makedirs(os.path.dirname(hashdest))
|
||||
ret['hsum'] = salt.utils.hashutils.get_hash(path, self.opts['hash_type'])
|
||||
with salt.utils.files.fopen(hashdest, 'w+') as fp_:
|
||||
fp_.write(ret['hsum'])
|
||||
return ret
|
||||
else:
|
||||
try:
|
||||
with salt.utils.files.fopen(hashdest, 'rb') as fp_:
|
||||
ret['hsum'] = fp_.read()
|
||||
return ret
|
||||
except IOError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
raise exc
|
||||
|
||||
try:
|
||||
os.makedirs(os.path.dirname(hashdest))
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.EEXIST:
|
||||
raise exc
|
||||
|
||||
ret['hsum'] = salt.utils.hashutils.get_hash(path, self.opts['hash_type'])
|
||||
with salt.utils.files.fopen(hashdest, 'w+') as fp_:
|
||||
fp_.write(ret['hsum'])
|
||||
return ret
|
||||
|
||||
def _file_lists(self, load, form):
|
||||
'''
|
||||
|
@ -569,7 +569,7 @@ def query(url,
|
||||
not isinstance(result_text, six.text_type):
|
||||
result_text = result_text.decode(res_params['charset'])
|
||||
ret['body'] = result_text
|
||||
if 'Set-Cookie' in result_headers.keys() and cookies is not None:
|
||||
if 'Set-Cookie' in result_headers and cookies is not None:
|
||||
result_cookies = parse_cookie_header(result_headers['Set-Cookie'])
|
||||
for item in result_cookies:
|
||||
sess_cookies.set_cookie(item)
|
||||
@ -899,12 +899,10 @@ def parse_cookie_header(header):
|
||||
for cookie in cookies:
|
||||
name = None
|
||||
value = None
|
||||
for item in cookie:
|
||||
for item in list(cookie):
|
||||
if item in attribs:
|
||||
continue
|
||||
name = item
|
||||
value = cookie[item]
|
||||
del cookie[name]
|
||||
value = cookie.pop(item)
|
||||
|
||||
# cookielib.Cookie() requires an epoch
|
||||
if 'expires' in cookie:
|
||||
@ -912,7 +910,7 @@ def parse_cookie_header(header):
|
||||
|
||||
# Fill in missing required fields
|
||||
for req in reqd:
|
||||
if req not in cookie.keys():
|
||||
if req not in cookie:
|
||||
cookie[req] = ''
|
||||
if cookie['version'] == '':
|
||||
cookie['version'] = 0
|
||||
|
@ -850,6 +850,24 @@ class SerializerExtension(Extension, object):
|
||||
value = six.text_type(value)
|
||||
try:
|
||||
return salt.utils.data.decode(salt.utils.yaml.safe_load(value))
|
||||
except salt.utils.yaml.YAMLError as exc:
|
||||
msg = 'Encountered error loading yaml: '
|
||||
try:
|
||||
# Reported line is off by one, add 1 to correct it
|
||||
line = exc.problem_mark.line + 1
|
||||
buf = exc.problem_mark.buffer
|
||||
problem = exc.problem
|
||||
except AttributeError:
|
||||
# No context information available in the exception, fall back
|
||||
# to the stringified version of the exception.
|
||||
msg += six.text_type(exc)
|
||||
else:
|
||||
msg += '{0}\n'.format(problem)
|
||||
msg += salt.utils.stringutils.get_context(
|
||||
buf,
|
||||
line,
|
||||
marker=' <======================')
|
||||
raise TemplateRuntimeError(msg)
|
||||
except AttributeError:
|
||||
raise TemplateRuntimeError(
|
||||
'Unable to load yaml from {0}'.format(value))
|
||||
|
@ -47,6 +47,8 @@ import salt.exceptions
|
||||
from salt.ext import six
|
||||
from salt.ext.six.moves import range # pylint: disable=import-error,redefined-builtin
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _sorted(mixins_or_funcs):
|
||||
return sorted(
|
||||
@ -64,8 +66,8 @@ class MixInMeta(type):
|
||||
instance = super(MixInMeta, mcs).__new__(mcs, name, bases, attrs)
|
||||
if not hasattr(instance, '_mixin_setup'):
|
||||
raise RuntimeError(
|
||||
'Don\'t subclass {0} in {1} if you\'re not going to use it '
|
||||
'as a salt parser mix-in.'.format(mcs.__name__, name)
|
||||
"Don't subclass {0} in {1} if you're not going "
|
||||
"to use it as a salt parser mix-in.".format(mcs.__name__, name)
|
||||
)
|
||||
return instance
|
||||
|
||||
@ -206,7 +208,7 @@ class OptionParser(optparse.OptionParser, object):
|
||||
try:
|
||||
process_option_func()
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
logging.getLogger(__name__).exception(err)
|
||||
logger.exception(err)
|
||||
self.error(
|
||||
'Error while processing {0}: {1}'.format(
|
||||
process_option_func, traceback.format_exc(err)
|
||||
@ -218,7 +220,7 @@ class OptionParser(optparse.OptionParser, object):
|
||||
try:
|
||||
mixin_after_parsed_func(self)
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
logging.getLogger(__name__).exception(err)
|
||||
logger.exception(err)
|
||||
self.error(
|
||||
'Error while processing {0}: {1}'.format(
|
||||
mixin_after_parsed_func, traceback.format_exc(err)
|
||||
@ -226,7 +228,7 @@ class OptionParser(optparse.OptionParser, object):
|
||||
)
|
||||
|
||||
if self.config.get('conf_file', None) is not None: # pylint: disable=no-member
|
||||
logging.getLogger(__name__).debug(
|
||||
logger.debug(
|
||||
'Configuration file path: %s',
|
||||
self.config['conf_file'] # pylint: disable=no-member
|
||||
)
|
||||
@ -259,12 +261,10 @@ class OptionParser(optparse.OptionParser, object):
|
||||
try:
|
||||
mixin_before_exit_func(self)
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.exception(err)
|
||||
logger.error(
|
||||
'Error while processing %s: %s',
|
||||
mixin_before_exit_func, traceback.format_exc(err)
|
||||
)
|
||||
logger.error('Error while processing %s: %s',
|
||||
six.text_type(mixin_before_exit_func),
|
||||
traceback.format_exc(err))
|
||||
if self._setup_mp_logging_listener_ is True:
|
||||
# Stop logging through the queue
|
||||
log.shutdown_multiprocessing_logging()
|
||||
@ -399,17 +399,13 @@ class SaltfileMixIn(six.with_metaclass(MixInMeta, object)):
|
||||
return
|
||||
|
||||
if not os.path.isfile(self.options.saltfile):
|
||||
self.error(
|
||||
'\'{0}\' file does not exist.\n'.format(self.options.saltfile)
|
||||
)
|
||||
self.error("'{0}' file does not exist.\n".format(self.options.saltfile))
|
||||
|
||||
# Make sure we have an absolute path
|
||||
self.options.saltfile = os.path.abspath(self.options.saltfile)
|
||||
|
||||
# Make sure we let the user know that we will be loading a Saltfile
|
||||
logging.getLogger(__name__).info(
|
||||
'Loading Saltfile from \'%s\'', self.options.saltfile
|
||||
)
|
||||
logger.info("Loading Saltfile from '%s'", six.text_type(self.options.saltfile))
|
||||
|
||||
try:
|
||||
saltfile_config = config._read_conf_file(saltfile)
|
||||
@ -488,8 +484,7 @@ class HardCrashMixin(six.with_metaclass(MixInMeta, object)):
|
||||
hard_crash = os.environ.get('SALT_HARD_CRASH', False)
|
||||
self.add_option(
|
||||
'--hard-crash', action='store_true', default=hard_crash,
|
||||
help=('Raise any original exception rather than exiting gracefully. '
|
||||
'Default: %default.')
|
||||
help='Raise any original exception rather than exiting gracefully. Default: %default.'
|
||||
)
|
||||
|
||||
|
||||
@ -500,9 +495,8 @@ class NoParseMixin(six.with_metaclass(MixInMeta, object)):
|
||||
no_parse = os.environ.get('SALT_NO_PARSE', '')
|
||||
self.add_option(
|
||||
'--no-parse', default=no_parse,
|
||||
help=('Comma-separated list of named CLI arguments (i.e. '
|
||||
'argname=value) which should not be parsed as Python '
|
||||
'data types'),
|
||||
help='Comma-separated list of named CLI arguments (i.e. argname=value) '
|
||||
'which should not be parsed as Python data types',
|
||||
metavar='argname1,argname2,...',
|
||||
)
|
||||
|
||||
@ -527,11 +521,10 @@ class ConfigDirMixIn(six.with_metaclass(MixInMeta, object)):
|
||||
config_dir = os.environ.get(self._default_config_dir_env_var_, None)
|
||||
if not config_dir:
|
||||
config_dir = self._default_config_dir_
|
||||
logging.getLogger(__name__).debug('SYSPATHS setup as: %s', syspaths.CONFIG_DIR)
|
||||
logger.debug('SYSPATHS setup as: %s', six.text_type(syspaths.CONFIG_DIR))
|
||||
self.add_option(
|
||||
'-c', '--config-dir', default=config_dir,
|
||||
help=('Pass in an alternative configuration directory. Default: '
|
||||
'\'%default\'.')
|
||||
help="Pass in an alternative configuration directory. Default: '%default'."
|
||||
)
|
||||
|
||||
def process_config_dir(self):
|
||||
@ -539,7 +532,7 @@ class ConfigDirMixIn(six.with_metaclass(MixInMeta, object)):
|
||||
if not os.path.isdir(self.options.config_dir):
|
||||
# No logging is configured yet
|
||||
sys.stderr.write(
|
||||
'WARNING: CONFIG \'{0}\' directory does not exist.\n'.format(
|
||||
"WARNING: CONFIG '{0}' directory does not exist.\n".format(
|
||||
self.options.config_dir
|
||||
)
|
||||
)
|
||||
@ -717,7 +710,7 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
|
||||
# Remove it from config so it inherits from log_file
|
||||
self.config.pop(self._logfile_config_setting_name_)
|
||||
|
||||
if self.config['verify_env']:
|
||||
if self.config['verify_env'] and self.config['log_level'] not in ('quiet', ):
|
||||
# Verify the logfile if it was explicitly set but do not try to
|
||||
# verify the default
|
||||
if logfile is not None and not logfile.startswith(('tcp://', 'udp://', 'file://')):
|
||||
@ -794,12 +787,11 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
|
||||
logfile_basename = os.path.basename(
|
||||
self._default_logging_logfile_
|
||||
)
|
||||
logging.getLogger(__name__).debug(
|
||||
'The user \'%s\' is not allowed to write to \'%s\'. '
|
||||
'The log file will be stored in '
|
||||
'\'~/.salt/\'%s\'.log\'',
|
||||
current_user, logfile, logfile_basename
|
||||
)
|
||||
logger.debug("The user '%s' is not allowed to write to '%s'. "
|
||||
"The log file will be stored in '~/.salt/'%s'.log'",
|
||||
six.text_type(current_user),
|
||||
six.text_type(logfile),
|
||||
six.text_type(logfile_basename))
|
||||
logfile = os.path.join(
|
||||
user_salt_dir, '{0}.log'.format(logfile_basename)
|
||||
)
|
||||
@ -814,10 +806,10 @@ class LogLevelMixIn(six.with_metaclass(MixInMeta, object)):
|
||||
# Not supported on platforms other than Windows.
|
||||
# Other platforms may use an external tool such as 'logrotate'
|
||||
if log_rotate_max_bytes != 0:
|
||||
logging.getLogger(__name__).warning('\'log_rotate_max_bytes\' is only supported on Windows')
|
||||
logger.warning("'log_rotate_max_bytes' is only supported on Windows")
|
||||
log_rotate_max_bytes = 0
|
||||
if log_rotate_backup_count != 0:
|
||||
logging.getLogger(__name__).warning('\'log_rotate_backup_count\' is only supported on Windows')
|
||||
logger.warning("'log_rotate_backup_count' is only supported on Windows")
|
||||
log_rotate_backup_count = 0
|
||||
|
||||
# Save the settings back to the configuration
|
||||
@ -962,22 +954,23 @@ class DaemonMixIn(six.with_metaclass(MixInMeta, object)):
|
||||
default=os.path.join(
|
||||
syspaths.PIDFILE_DIR, '{0}.pid'.format(self.get_prog_name())
|
||||
),
|
||||
help=('Specify the location of the pidfile. Default: \'%default\'.')
|
||||
help="Specify the location of the pidfile. Default: '%default'."
|
||||
)
|
||||
|
||||
def _mixin_before_exit(self):
|
||||
if hasattr(self, 'config') and self.config.get('pidfile', ''):
|
||||
if hasattr(self, 'config') and self.config.get('pidfile'):
|
||||
# We've loaded and merged options into the configuration, it's safe
|
||||
# to query about the pidfile
|
||||
if self.check_pidfile():
|
||||
try:
|
||||
os.unlink(self.config['pidfile'])
|
||||
except OSError as err:
|
||||
logging.getLogger(__name__).info(
|
||||
'PIDfile could not be deleted: {0}'.format(
|
||||
self.config['pidfile']
|
||||
)
|
||||
)
|
||||
# Log error only when running salt-master as a root user.
|
||||
# Otherwise this can be ignored, since salt-master is able to
|
||||
# overwrite the PIDfile on the next start.
|
||||
if not os.getuid():
|
||||
logger.info('PIDfile could not be deleted: %s', six.text_type(self.config['pidfile']))
|
||||
logger.debug(six.text_type(err))
|
||||
|
||||
def set_pidfile(self):
|
||||
from salt.utils.process import set_pidfile
|
||||
@ -2739,31 +2732,31 @@ class SaltCallOptionParser(six.with_metaclass(OptionParserMeta,
|
||||
|
||||
role = opts.get('id')
|
||||
if not role:
|
||||
emsg = ("Missing role required to setup RAET SaltCaller.")
|
||||
logging.getLogger(__name__).error(emsg + "\n")
|
||||
emsg = "Missing role required to setup RAET SaltCaller."
|
||||
logger.error(emsg)
|
||||
raise ValueError(emsg)
|
||||
|
||||
kind = opts.get('__role') # application kind 'master', 'minion', etc
|
||||
if kind not in kinds.APPL_KINDS:
|
||||
emsg = ("Invalid application kind = '{0}' for RAET SaltCaller.".format(kind))
|
||||
logging.getLogger(__name__).error(emsg + "\n")
|
||||
emsg = "Invalid application kind = '{0}' for RAET SaltCaller.".format(six.text_type(kind))
|
||||
logger.error(emsg)
|
||||
raise ValueError(emsg)
|
||||
|
||||
if kind in [kinds.APPL_KIND_NAMES[kinds.applKinds.minion],
|
||||
kinds.APPL_KIND_NAMES[kinds.applKinds.caller], ]:
|
||||
if kind in [kinds.APPL_KIND_NAMES[kinds.applKinds.minion], kinds.APPL_KIND_NAMES[kinds.applKinds.caller]]:
|
||||
lanename = "{0}_{1}".format(role, kind)
|
||||
else:
|
||||
emsg = ("Unsupported application kind '{0}' for RAET SaltCaller.".format(kind))
|
||||
logging.getLogger(__name__).error(emsg + '\n')
|
||||
emsg = "Unsupported application kind '{0}' for RAET SaltCaller.".format(six.text_type(kind))
|
||||
logger.error(emsg)
|
||||
raise ValueError(emsg)
|
||||
|
||||
if kind == kinds.APPL_KIND_NAMES[kinds.applKinds.minion]: # minion check
|
||||
from raet.lane.yarding import Yard # pylint: disable=3rd-party-module-not-gated
|
||||
ha, dirpath = Yard.computeHa(dirpath, lanename, yardname) # pylint: disable=invalid-name
|
||||
if (os.path.exists(ha) and
|
||||
not os.path.isfile(ha) and
|
||||
not os.path.isdir(ha)): # minion manor yard
|
||||
return True
|
||||
try:
|
||||
from raet.lane.yarding import Yard # pylint: disable=3rd-party-module-not-gated
|
||||
ha, dirpath = Yard.computeHa(dirpath, lanename, yardname) # pylint: disable=invalid-name
|
||||
if os.path.exists(ha) and not os.path.isfile(ha) and not os.path.isdir(ha): # minion manor yard
|
||||
return True
|
||||
except ImportError as ex:
|
||||
logger.error("Error while importing Yard: %s", ex)
|
||||
return False
|
||||
|
||||
def process_module_dirs(self):
|
||||
@ -2822,13 +2815,13 @@ class SaltRunOptionParser(six.with_metaclass(OptionParserMeta,
|
||||
'--async',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help=('Start the runner operation and immediately return control.')
|
||||
help='Start the runner operation and immediately return control.'
|
||||
)
|
||||
self.add_option(
|
||||
'--skip-grains',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help=('Do not load grains.')
|
||||
help='Do not load grains.'
|
||||
)
|
||||
group = self.output_options_group = optparse.OptionGroup(
|
||||
self, 'Output Options', 'Configure your preferred output format.'
|
||||
@ -2946,14 +2939,13 @@ class SaltSSHOptionParser(six.with_metaclass(OptionParserMeta,
|
||||
'-v', '--verbose',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help=('Turn on command verbosity, display jid.')
|
||||
help='Turn on command verbosity, display jid.'
|
||||
)
|
||||
self.add_option(
|
||||
'-s', '--static',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help=('Return the data from minions as a group after they '
|
||||
'all return.')
|
||||
help='Return the data from minions as a group after they all return.'
|
||||
)
|
||||
self.add_option(
|
||||
'-w', '--wipe',
|
||||
|
@ -28,7 +28,8 @@ def _load_libcrypto():
|
||||
Load OpenSSL libcrypto
|
||||
'''
|
||||
if sys.platform.startswith('win'):
|
||||
return cdll.LoadLibrary('libeay32')
|
||||
# cdll.LoadLibrary on windows requires an 'str' argument
|
||||
return cdll.LoadLibrary(str('libeay32')) # future lint: disable=blacklisted-function
|
||||
elif getattr(sys, 'frozen', False) and salt.utils.platform.is_smartos():
|
||||
return cdll.LoadLibrary(glob.glob(os.path.join(
|
||||
os.path.dirname(sys.executable),
|
||||
|
@ -5,6 +5,7 @@ Functions for manipulating or otherwise processing strings
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import base64
|
||||
import errno
|
||||
import fnmatch
|
||||
import logging
|
||||
@ -203,7 +204,7 @@ def is_binary(data):
|
||||
@jinja_filter('random_str')
|
||||
def random(size=32):
|
||||
key = os.urandom(size)
|
||||
return key.encode('base64').replace('\n', '')[:size]
|
||||
return to_unicode(base64.b64encode(key).replace(b'\n', b'')[:size])
|
||||
|
||||
|
||||
@jinja_filter('contains_whitespace')
|
||||
@ -429,3 +430,38 @@ def print_cli(msg, retries=10, step=0.01):
|
||||
else:
|
||||
raise
|
||||
break
|
||||
|
||||
|
||||
def get_context(template, line, num_lines=5, marker=None):
|
||||
'''
|
||||
Returns debugging context around a line in a given string
|
||||
|
||||
Returns:: string
|
||||
'''
|
||||
template_lines = template.splitlines()
|
||||
num_template_lines = len(template_lines)
|
||||
|
||||
# In test mode, a single line template would return a crazy line number like,
|
||||
# 357. Do this sanity check and if the given line is obviously wrong, just
|
||||
# return the entire template
|
||||
if line > num_template_lines:
|
||||
return template
|
||||
|
||||
context_start = max(0, line - num_lines - 1) # subt 1 for 0-based indexing
|
||||
context_end = min(num_template_lines, line + num_lines)
|
||||
error_line_in_context = line - context_start - 1 # subtr 1 for 0-based idx
|
||||
|
||||
buf = []
|
||||
if context_start > 0:
|
||||
buf.append('[...]')
|
||||
error_line_in_context += 1
|
||||
|
||||
buf.extend(template_lines[context_start:context_end])
|
||||
|
||||
if context_end < num_template_lines:
|
||||
buf.append('[...]')
|
||||
|
||||
if marker:
|
||||
buf[error_line_in_context] += marker
|
||||
|
||||
return '---\n{0}\n---'.format('\n'.join(buf))
|
||||
|
@ -95,41 +95,6 @@ class AliasedModule(object):
|
||||
return getattr(self.wrapped, name)
|
||||
|
||||
|
||||
def get_context(template, line, num_lines=5, marker=None):
|
||||
'''
|
||||
Returns debugging context around a line in a given string
|
||||
|
||||
Returns:: string
|
||||
'''
|
||||
template_lines = template.splitlines()
|
||||
num_template_lines = len(template_lines)
|
||||
|
||||
# in test, a single line template would return a crazy line number like,
|
||||
# 357. do this sanity check and if the given line is obviously wrong, just
|
||||
# return the entire template
|
||||
if line > num_template_lines:
|
||||
return template
|
||||
|
||||
context_start = max(0, line - num_lines - 1) # subt 1 for 0-based indexing
|
||||
context_end = min(num_template_lines, line + num_lines)
|
||||
error_line_in_context = line - context_start - 1 # subtr 1 for 0-based idx
|
||||
|
||||
buf = []
|
||||
if context_start > 0:
|
||||
buf.append('[...]')
|
||||
error_line_in_context += 1
|
||||
|
||||
buf.extend(template_lines[context_start:context_end])
|
||||
|
||||
if context_end < num_template_lines:
|
||||
buf.append('[...]')
|
||||
|
||||
if marker:
|
||||
buf[error_line_in_context] += marker
|
||||
|
||||
return '---\n{0}\n---'.format('\n'.join(buf))
|
||||
|
||||
|
||||
def wrap_tmpl_func(render_str):
|
||||
|
||||
def render_tmpl(tmplsrc,
|
||||
@ -202,11 +167,9 @@ def wrap_tmpl_func(render_str):
|
||||
tmplsrc.close()
|
||||
try:
|
||||
output = render_str(tmplstr, context, tmplpath)
|
||||
if six.PY2:
|
||||
output = output.encode(SLS_ENCODING)
|
||||
if salt.utils.platform.is_windows():
|
||||
newline = False
|
||||
if salt.utils.stringutils.to_unicode(output).endswith(('\n', os.linesep)):
|
||||
if salt.utils.stringutils.to_unicode(output, encoding=SLS_ENCODING).endswith(('\n', os.linesep)):
|
||||
newline = True
|
||||
# Write out with Windows newlines
|
||||
output = os.linesep.join(output.splitlines())
|
||||
@ -223,9 +186,7 @@ def wrap_tmpl_func(render_str):
|
||||
if to_str: # then render as string
|
||||
return dict(result=True, data=output)
|
||||
with tempfile.NamedTemporaryFile('wb', delete=False, prefix=salt.utils.files.TEMPFILE_PREFIX) as outf:
|
||||
if six.PY3:
|
||||
output = output.encode(SLS_ENCODING)
|
||||
outf.write(output)
|
||||
outf.write(salt.utils.stringutils.to_bytes(output, encoding=SLS_ENCODING))
|
||||
# Note: If nothing is replaced or added by the rendering
|
||||
# function, then the contents of the output file will
|
||||
# be exactly the same as the input.
|
||||
@ -315,7 +276,7 @@ def _get_jinja_error(trace, context=None):
|
||||
out = '\n{0}\n'.format(msg.splitlines()[0])
|
||||
with salt.utils.files.fopen(template_path) as fp_:
|
||||
template_contents = salt.utils.stringutils.to_unicode(fp_.read())
|
||||
out += get_context(
|
||||
out += salt.utils.stringutils.get_context(
|
||||
template_contents,
|
||||
line,
|
||||
marker=' <======================')
|
||||
@ -417,15 +378,6 @@ def render_jinja_tmpl(tmplstr, context, tmplpath=None):
|
||||
template = jinja_env.from_string(tmplstr)
|
||||
template.globals.update(decoded_context)
|
||||
output = template.render(**decoded_context)
|
||||
except jinja2.exceptions.TemplateSyntaxError as exc:
|
||||
trace = traceback.extract_tb(sys.exc_info()[2])
|
||||
line, out = _get_jinja_error(trace, context=decoded_context)
|
||||
if not line:
|
||||
tmplstr = ''
|
||||
raise SaltRenderError(
|
||||
'Jinja syntax error: {0}{1}'.format(exc, out),
|
||||
line,
|
||||
tmplstr)
|
||||
except jinja2.exceptions.UndefinedError as exc:
|
||||
trace = traceback.extract_tb(sys.exc_info()[2])
|
||||
out = _get_jinja_error(trace, context=decoded_context)[1]
|
||||
@ -436,6 +388,16 @@ def render_jinja_tmpl(tmplstr, context, tmplpath=None):
|
||||
'Jinja variable {0}{1}'.format(
|
||||
exc, out),
|
||||
buf=tmplstr)
|
||||
except (jinja2.exceptions.TemplateRuntimeError,
|
||||
jinja2.exceptions.TemplateSyntaxError) as exc:
|
||||
trace = traceback.extract_tb(sys.exc_info()[2])
|
||||
line, out = _get_jinja_error(trace, context=decoded_context)
|
||||
if not line:
|
||||
tmplstr = ''
|
||||
raise SaltRenderError(
|
||||
'Jinja syntax error: {0}{1}'.format(exc, out),
|
||||
line,
|
||||
tmplstr)
|
||||
except (SaltInvocationError, CommandExecutionError) as exc:
|
||||
trace = traceback.extract_tb(sys.exc_info()[2])
|
||||
line, out = _get_jinja_error(trace, context=decoded_context)
|
||||
|
@ -98,7 +98,7 @@ def _get_vault_connection():
|
||||
Get the connection details for calling Vault, from local configuration if
|
||||
it exists, or from the master otherwise
|
||||
'''
|
||||
if 'vault' in __opts__ and __opts__.get('__role', 'minion') == 'master':
|
||||
def _use_local_config():
|
||||
log.debug('Using Vault connection details from local config')
|
||||
try:
|
||||
if __opts__['vault']['auth']['method'] == 'approle':
|
||||
@ -123,6 +123,11 @@ def _get_vault_connection():
|
||||
except KeyError as err:
|
||||
errmsg = 'Minion has "vault" config section, but could not find key "{0}" within'.format(err.message)
|
||||
raise salt.exceptions.CommandExecutionError(errmsg)
|
||||
|
||||
if 'vault' in __opts__ and __opts__.get('__role', 'minion') == 'master':
|
||||
return _use_local_config()
|
||||
elif any((__opts__['local'], __opts__['file_client'] == 'local', __opts__['master_type'] == 'disable')):
|
||||
return _use_local_config()
|
||||
else:
|
||||
log.debug('Contacting master for Vault connection details')
|
||||
return _get_token_and_url_from_master()
|
||||
|
@ -31,7 +31,6 @@ import salt.utils.files
|
||||
import salt.utils.path
|
||||
import salt.utils.platform
|
||||
import salt.utils.user
|
||||
import salt.utils.versions
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@ -204,28 +203,16 @@ def verify_env(
|
||||
permissive=False,
|
||||
pki_dir='',
|
||||
skip_extra=False,
|
||||
root_dir=ROOT_DIR,
|
||||
sensitive_dirs=None):
|
||||
root_dir=ROOT_DIR):
|
||||
'''
|
||||
Verify that the named directories are in place and that the environment
|
||||
can shake the salt
|
||||
'''
|
||||
if pki_dir:
|
||||
salt.utils.versions.warn_until(
|
||||
'Neon',
|
||||
'Use of \'pki_dir\' was detected: \'pki_dir\' has been deprecated '
|
||||
'in favor of \'sensitive_dirs\'. Support for \'pki_dir\' will be '
|
||||
'removed in Salt Neon.'
|
||||
)
|
||||
sensitive_dirs = sensitive_dirs or []
|
||||
sensitive_dirs.append(list(pki_dir))
|
||||
|
||||
if salt.utils.platform.is_windows():
|
||||
return win_verify_env(root_dir,
|
||||
dirs,
|
||||
permissive=permissive,
|
||||
skip_extra=skip_extra,
|
||||
sensitive_dirs=sensitive_dirs)
|
||||
skip_extra=skip_extra)
|
||||
import pwd # after confirming not running Windows
|
||||
try:
|
||||
pwnam = pwd.getpwnam(user)
|
||||
@ -300,11 +287,10 @@ def verify_env(
|
||||
# to read in what it needs to integrate.
|
||||
#
|
||||
# If the permissions aren't correct, default to the more secure 700.
|
||||
# If acls are enabled, the sensitive_dirs (i.e. pki_dir, key_dir) needs to
|
||||
# remain readable, this is still secure because the private keys are still
|
||||
# only readable by the user running the master
|
||||
sensitive_dirs = sensitive_dirs or []
|
||||
if dir_ in sensitive_dirs:
|
||||
# If acls are enabled, the pki_dir needs to remain readable, this
|
||||
# is still secure because the private keys are still only readable
|
||||
# by the user running the master
|
||||
if dir_ == pki_dir:
|
||||
smode = stat.S_IMODE(mode.st_mode)
|
||||
if smode != 448 and smode != 488:
|
||||
if os.access(dir_, os.W_OK):
|
||||
@ -555,22 +541,11 @@ def win_verify_env(
|
||||
dirs,
|
||||
permissive=False,
|
||||
pki_dir='',
|
||||
skip_extra=False,
|
||||
sensitive_dirs=None):
|
||||
skip_extra=False):
|
||||
'''
|
||||
Verify that the named directories are in place and that the environment
|
||||
can shake the salt
|
||||
'''
|
||||
if pki_dir:
|
||||
salt.utils.versions.warn_until(
|
||||
'Neon',
|
||||
'Use of \'pki_dir\' was detected: \'pki_dir\' has been deprecated '
|
||||
'in favor of \'sensitive_dirs\'. Support for \'pki_dir\' will be '
|
||||
'removed in Salt Neon.'
|
||||
)
|
||||
sensitive_dirs = sensitive_dirs or []
|
||||
sensitive_dirs.append(list(pki_dir))
|
||||
|
||||
import salt.utils.win_functions
|
||||
import salt.utils.win_dacl
|
||||
import salt.utils.path
|
||||
@ -647,9 +622,8 @@ def win_verify_env(
|
||||
sys.stderr.write(msg.format(dir_, err))
|
||||
sys.exit(err.errno)
|
||||
|
||||
# The senitive_dirs (i.e. pki_dir, key_dir) gets its own permissions
|
||||
sensitive_dirs = sensitive_dirs or []
|
||||
if dir_ in sensitive_dirs:
|
||||
# The PKI dir gets its own permissions
|
||||
if dir_ == pki_dir:
|
||||
try:
|
||||
# Make Administrators group the owner
|
||||
salt.utils.win_dacl.set_owner(path, 'S-1-5-32-544')
|
||||
|
@ -5,6 +5,8 @@ missing functions in other modules
|
||||
'''
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import platform
|
||||
import re
|
||||
import ctypes
|
||||
|
||||
# Import Salt Libs
|
||||
from salt.exceptions import CommandExecutionError
|
||||
@ -16,6 +18,7 @@ try:
|
||||
import win32api
|
||||
import win32net
|
||||
import win32security
|
||||
from win32con import HWND_BROADCAST, WM_SETTINGCHANGE, SMTO_ABORTIFHUNG
|
||||
HAS_WIN32 = True
|
||||
except ImportError:
|
||||
HAS_WIN32 = False
|
||||
@ -168,3 +171,122 @@ def enable_ctrl_logoff_handler():
|
||||
lambda event: True if event == ctrl_logoff_event else False,
|
||||
1
|
||||
)
|
||||
|
||||
|
||||
def escape_argument(arg):
|
||||
'''
|
||||
Escape the argument for the cmd.exe shell.
|
||||
See http://blogs.msdn.com/b/twistylittlepassagesallalike/archive/2011/04/23/everyone-quotes-arguments-the-wrong-way.aspx
|
||||
|
||||
First we escape the quote chars to produce a argument suitable for
|
||||
CommandLineToArgvW. We don't need to do this for simple arguments.
|
||||
|
||||
Args:
|
||||
arg (str): a single command line argument to escape for the cmd.exe shell
|
||||
|
||||
Returns:
|
||||
str: an escaped string suitable to be passed as a program argument to the cmd.exe shell
|
||||
'''
|
||||
if not arg or re.search(r'(["\s])', arg):
|
||||
arg = '"' + arg.replace('"', r'\"') + '"'
|
||||
|
||||
return escape_for_cmd_exe(arg)
|
||||
|
||||
|
||||
def escape_for_cmd_exe(arg):
|
||||
'''
|
||||
Escape an argument string to be suitable to be passed to
|
||||
cmd.exe on Windows
|
||||
|
||||
This method takes an argument that is expected to already be properly
|
||||
escaped for the receiving program to be properly parsed. This argument
|
||||
will be further escaped to pass the interpolation performed by cmd.exe
|
||||
unchanged.
|
||||
|
||||
Any meta-characters will be escaped, removing the ability to e.g. use
|
||||
redirects or variables.
|
||||
|
||||
Args:
|
||||
arg (str): a single command line argument to escape for cmd.exe
|
||||
|
||||
Returns:
|
||||
str: an escaped string suitable to be passed as a program argument to cmd.exe
|
||||
'''
|
||||
meta_chars = '()%!^"<>&|'
|
||||
meta_re = re.compile('(' + '|'.join(re.escape(char) for char in list(meta_chars)) + ')')
|
||||
meta_map = {char: "^{0}".format(char) for char in meta_chars}
|
||||
|
||||
def escape_meta_chars(m):
|
||||
char = m.group(1)
|
||||
return meta_map[char]
|
||||
|
||||
return meta_re.sub(escape_meta_chars, arg)
|
||||
|
||||
|
||||
def broadcast_setting_change(message='Environment'):
|
||||
'''
|
||||
Send a WM_SETTINGCHANGE Broadcast to all Windows
|
||||
|
||||
Args:
|
||||
|
||||
message (str):
|
||||
A string value representing the portion of the system that has been
|
||||
updated and needs to be refreshed. Default is ``Environment``. These
|
||||
are some common values:
|
||||
|
||||
- "Environment" : to effect a change in the environment variables
|
||||
- "intl" : to effect a change in locale settings
|
||||
- "Policy" : to effect a change in Group Policy Settings
|
||||
- a leaf node in the registry
|
||||
- the name of a section in the ``Win.ini`` file
|
||||
|
||||
See lParam within msdn docs for
|
||||
`WM_SETTINGCHANGE <https://msdn.microsoft.com/en-us/library/ms725497%28VS.85%29.aspx>`_
|
||||
for more information on Broadcasting Messages.
|
||||
|
||||
See GWL_WNDPROC within msdn docs for
|
||||
`SetWindowLong <https://msdn.microsoft.com/en-us/library/windows/desktop/ms633591(v=vs.85).aspx>`_
|
||||
for information on how to retrieve those messages.
|
||||
|
||||
.. note::
|
||||
This will only affect new processes that aren't launched by services. To
|
||||
apply changes to the path or registry to services, the host must be
|
||||
restarted. The ``salt-minion``, if running as a service, will not see
|
||||
changes to the environment until the system is restarted. Services
|
||||
inherit their environment from ``services.exe`` which does not respond
|
||||
to messaging events. See
|
||||
`MSDN Documentation <https://support.microsoft.com/en-us/help/821761/changes-that-you-make-to-environment-variables-do-not-affect-services>`_
|
||||
for more information.
|
||||
|
||||
CLI Example:
|
||||
|
||||
... code-block:: python
|
||||
|
||||
import salt.utils.win_functions
|
||||
salt.utils.win_functions.broadcast_setting_change('Environment')
|
||||
'''
|
||||
# Listen for messages sent by this would involve working with the
|
||||
# SetWindowLong function. This can be accessed via win32gui or through
|
||||
# ctypes. You can find examples on how to do this by searching for
|
||||
# `Accessing WGL_WNDPROC` on the internet. Here are some examples of how
|
||||
# this might work:
|
||||
#
|
||||
# # using win32gui
|
||||
# import win32con
|
||||
# import win32gui
|
||||
# old_function = win32gui.SetWindowLong(window_handle, win32con.GWL_WNDPROC, new_function)
|
||||
#
|
||||
# # using ctypes
|
||||
# import ctypes
|
||||
# import win32con
|
||||
# from ctypes import c_long, c_int
|
||||
# user32 = ctypes.WinDLL('user32', use_last_error=True)
|
||||
# WndProcType = ctypes.WINFUNCTYPE(c_int, c_long, c_int, c_int)
|
||||
# new_function = WndProcType
|
||||
# old_function = user32.SetWindowLongW(window_handle, win32con.GWL_WNDPROC, new_function)
|
||||
broadcast_message = ctypes.create_unicode_buffer(message)
|
||||
user32 = ctypes.WinDLL('user32', use_last_error=True)
|
||||
result = user32.SendMessageTimeoutW(HWND_BROADCAST, WM_SETTINGCHANGE, 0,
|
||||
broadcast_message, SMTO_ABORTIFHUNG,
|
||||
5000, 0)
|
||||
return result == 1
|
||||
|
@ -14,7 +14,8 @@ except (ImportError, ValueError):
|
||||
HAS_WIN32 = False
|
||||
|
||||
if HAS_WIN32:
|
||||
kernel32 = ctypes.WinDLL('kernel32', use_last_error=True)
|
||||
kernel32 = ctypes.WinDLL(str('kernel32'), # future lint: disable=blacklisted-function
|
||||
use_last_error=True)
|
||||
|
||||
|
||||
# Although utils are often directly imported, it is also possible to use the
|
||||
|
@ -46,8 +46,8 @@ def __virtual__():
|
||||
|
||||
if HAS_WIN32:
|
||||
# ctypes definitions
|
||||
kernel32 = ctypes.WinDLL('kernel32')
|
||||
advapi32 = ctypes.WinDLL('advapi32')
|
||||
kernel32 = ctypes.WinDLL(str('kernel32')) # future lint: disable=blacklisted-function
|
||||
advapi32 = ctypes.WinDLL(str('advapi32')) # future lint: disable=blacklisted-function
|
||||
|
||||
INVALID_HANDLE_VALUE = wintypes.HANDLE(-1).value
|
||||
INVALID_DWORD_VALUE = wintypes.DWORD(-1).value # ~WinAPI
|
||||
|
@ -5,10 +5,9 @@ Custom YAML loading in Salt
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import re
|
||||
import warnings
|
||||
|
||||
# Import third party libs
|
||||
import re
|
||||
import yaml # pylint: disable=blacklisted-import
|
||||
from yaml.nodes import MappingNode, SequenceNode
|
||||
from yaml.constructor import ConstructorError
|
||||
@ -18,6 +17,8 @@ try:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
import salt.utils.stringutils
|
||||
|
||||
__all__ = ['SaltYamlSafeLoader', 'load', 'safe_load']
|
||||
|
||||
|
||||
@ -30,7 +31,7 @@ warnings.simplefilter('always', category=DuplicateKeyWarning)
|
||||
|
||||
|
||||
# with code integrated from https://gist.github.com/844388
|
||||
class SaltYamlSafeLoader(yaml.SafeLoader, object):
|
||||
class SaltYamlSafeLoader(yaml.SafeLoader):
|
||||
'''
|
||||
Create a custom YAML loader that uses the custom constructor. This allows
|
||||
for the YAML loading defaults to be manipulated based on needs within salt
|
||||
@ -46,6 +47,9 @@ class SaltYamlSafeLoader(yaml.SafeLoader, object):
|
||||
self.add_constructor(
|
||||
'tag:yaml.org,2002:omap',
|
||||
type(self).construct_yaml_map)
|
||||
self.add_constructor(
|
||||
'tag:yaml.org,2002:str',
|
||||
type(self).construct_yaml_str)
|
||||
self.add_constructor(
|
||||
'tag:yaml.org,2002:python/unicode',
|
||||
type(self).construct_unicode)
|
||||
@ -76,18 +80,25 @@ class SaltYamlSafeLoader(yaml.SafeLoader, object):
|
||||
|
||||
self.flatten_mapping(node)
|
||||
|
||||
context = 'while constructing a mapping'
|
||||
mapping = self.dictclass()
|
||||
for key_node, value_node in node.value:
|
||||
key = self.construct_object(key_node, deep=deep)
|
||||
try:
|
||||
hash(key)
|
||||
except TypeError:
|
||||
err = ('While constructing a mapping {0} found unacceptable '
|
||||
'key {1}').format(node.start_mark, key_node.start_mark)
|
||||
raise ConstructorError(err)
|
||||
raise ConstructorError(
|
||||
context,
|
||||
node.start_mark,
|
||||
"found unacceptable key {0}".format(key_node.value),
|
||||
key_node.start_mark)
|
||||
value = self.construct_object(value_node, deep=deep)
|
||||
if key in mapping:
|
||||
raise ConstructorError('Conflicting ID \'{0}\''.format(key))
|
||||
raise ConstructorError(
|
||||
context,
|
||||
node.start_mark,
|
||||
"found conflicting ID '{0}'".format(key),
|
||||
key_node.start_mark)
|
||||
mapping[key] = value
|
||||
return mapping
|
||||
|
||||
@ -112,6 +123,10 @@ class SaltYamlSafeLoader(yaml.SafeLoader, object):
|
||||
node.value = eval(node.value, {}, {}) # pylint: disable=W0123
|
||||
return super(SaltYamlSafeLoader, self).construct_scalar(node)
|
||||
|
||||
def construct_yaml_str(self, node):
|
||||
value = self.construct_scalar(node)
|
||||
return salt.utils.stringutils.to_unicode(value)
|
||||
|
||||
def flatten_mapping(self, node):
|
||||
merge = []
|
||||
index = 0
|
||||
|
137
tests/integration/cloud/providers/test_dimensiondata.py
Normal file
137
tests/integration/cloud/providers/test_dimensiondata.py
Normal file
@ -0,0 +1,137 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Integration tests for the Dimension Data cloud provider
|
||||
'''
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import os
|
||||
import random
|
||||
import string
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.case import ShellCase
|
||||
from tests.support.paths import FILES
|
||||
from tests.support.helpers import expensiveTest
|
||||
|
||||
# Import Salt Libs
|
||||
from salt.config import cloud_providers_config
|
||||
from salt.ext.six.moves import range # pylint: disable=import-error,redefined-builtin
|
||||
|
||||
# Create the cloud instance name to be used throughout the tests
|
||||
INSTANCE_NAME = _random_name('CLOUD-TEST-')
|
||||
PROVIDER_NAME = 'dimensiondata'
|
||||
|
||||
|
||||
def _random_name(size=6):
|
||||
'''
|
||||
Generates a random cloud instance name
|
||||
'''
|
||||
return 'cloud-test-' + ''.join(
|
||||
random.choice(string.ascii_lowercase + string.digits)
|
||||
for x in range(size)
|
||||
)
|
||||
|
||||
|
||||
class DimensionDataTest(ShellCase):
|
||||
'''
|
||||
Integration tests for the Dimension Data cloud provider in Salt-Cloud
|
||||
'''
|
||||
|
||||
@expensiveTest
|
||||
def setUp(self):
|
||||
'''
|
||||
Sets up the test requirements
|
||||
'''
|
||||
super(DimensionDataTest, self).setUp()
|
||||
|
||||
# check if appropriate cloud provider and profile files are present
|
||||
profile_str = 'dimensiondata-config'
|
||||
providers = self.run_cloud('--list-providers')
|
||||
if profile_str + ':' not in providers:
|
||||
self.skipTest(
|
||||
'Configuration file for {0} was not found. Check {0}.conf files '
|
||||
'in tests/integration/files/conf/cloud.*.d/ to run these tests.'
|
||||
.format(PROVIDER_NAME)
|
||||
)
|
||||
|
||||
# check if user_id, key, and region are present
|
||||
config = cloud_providers_config(
|
||||
os.path.join(
|
||||
FILES,
|
||||
'conf',
|
||||
'cloud.providers.d',
|
||||
PROVIDER_NAME + '.conf'
|
||||
)
|
||||
)
|
||||
|
||||
user_id = config[profile_str][PROVIDER_NAME]['user_id']
|
||||
key = config[profile_str][PROVIDER_NAME]['key']
|
||||
region = config[profile_str][PROVIDER_NAME]['region']
|
||||
|
||||
if user_id == '' or key == '' or region == '':
|
||||
self.skipTest(
|
||||
'A user Id, password, and a region '
|
||||
'must be provided to run these tests. Check '
|
||||
'tests/integration/files/conf/cloud.providers.d/{0}.conf'
|
||||
.format(PROVIDER_NAME)
|
||||
)
|
||||
|
||||
def test_list_images(self):
|
||||
'''
|
||||
Tests the return of running the --list-images command for the dimensiondata cloud provider
|
||||
'''
|
||||
image_list = self.run_cloud('--list-images {0}'.format(PROVIDER_NAME))
|
||||
self.assertIn(
|
||||
'Ubuntu 14.04 2 CPU',
|
||||
[i.strip() for i in image_list]
|
||||
)
|
||||
|
||||
def test_list_locations(self):
|
||||
'''
|
||||
Tests the return of running the --list-locations command for the dimensiondata cloud provider
|
||||
'''
|
||||
_list_locations = self.run_cloud('--list-locations {0}'.format(PROVIDER_NAME))
|
||||
self.assertIn(
|
||||
'Australia - Melbourne MCP2',
|
||||
[i.strip() for i in _list_locations]
|
||||
)
|
||||
|
||||
def test_list_sizes(self):
|
||||
'''
|
||||
Tests the return of running the --list-sizes command for the dimensiondata cloud provider
|
||||
'''
|
||||
_list_sizes = self.run_cloud('--list-sizes {0}'.format(PROVIDER_NAME))
|
||||
self.assertIn(
|
||||
'default',
|
||||
[i.strip() for i in _list_sizes]
|
||||
)
|
||||
|
||||
def test_instance(self):
|
||||
'''
|
||||
Test creating an instance on Dimension Data's cloud
|
||||
'''
|
||||
# check if instance with salt installed returned
|
||||
try:
|
||||
self.assertIn(
|
||||
INSTANCE_NAME,
|
||||
[i.strip() for i in self.run_cloud('-p dimensiondata-test {0}'.format(INSTANCE_NAME), timeout=500)]
|
||||
)
|
||||
except AssertionError:
|
||||
self.run_cloud('-d {0} --assume-yes'.format(INSTANCE_NAME), timeout=500)
|
||||
raise
|
||||
|
||||
# delete the instance
|
||||
try:
|
||||
self.assertIn(
|
||||
'True',
|
||||
[i.strip() for i in self.run_cloud('-d {0} --assume-yes'.format(INSTANCE_NAME), timeout=500)]
|
||||
)
|
||||
except AssertionError:
|
||||
raise
|
||||
|
||||
# Final clean-up of created instance, in case something went wrong.
|
||||
# This was originally in a tearDown function, but that didn't make sense
|
||||
# To run this for each test when not all tests create instances.
|
||||
if INSTANCE_NAME in [i.strip() for i in self.run_cloud('--query')]:
|
||||
self.run_cloud('-d {0} --assume-yes'.format(INSTANCE_NAME), timeout=500)
|
@ -36,7 +36,7 @@ class GCETest(ShellCase):
|
||||
provider = 'gce'
|
||||
providers = self.run_cloud('--list-providers')
|
||||
# Create the cloud instance name to be used throughout the tests
|
||||
self.INSTANCE_NAME = generate_random_name('cloud-test-')
|
||||
self.INSTANCE_NAME = generate_random_name('cloud-test-').lower()
|
||||
|
||||
if profile_str not in providers:
|
||||
self.skipTest(
|
||||
|
@ -45,7 +45,7 @@ def __has_required_azure():
|
||||
else:
|
||||
version = LooseVersion(azure.common.__version__)
|
||||
|
||||
if REQUIRED_AZURE <= version:
|
||||
if LooseVersion(REQUIRED_AZURE) <= version:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
@ -0,0 +1,11 @@
|
||||
dimensiondata-test:
|
||||
provider: dimensiondata-config
|
||||
image: 42816eb2-9846-4483-95c3-7d7fbddebf2c
|
||||
size: default
|
||||
location: AU10
|
||||
is_started: yes
|
||||
description: 'Salt Ubuntu test'
|
||||
network_domain: ''
|
||||
vlan: ''
|
||||
ssh_interface: private_ips
|
||||
auth: ''
|
@ -0,0 +1,5 @@
|
||||
dimensiondata-config:
|
||||
driver: dimensiondata
|
||||
user_id: ''
|
||||
key: ''
|
||||
region: 'dd-au'
|
1
tests/integration/files/file/base/random_bytes
Normal file
1
tests/integration/files/file/base/random_bytes
Normal file
@ -0,0 +1 @@
|
||||
ゥ維
|
BIN
tests/integration/files/file/prod/issue45893/custom.tar.gz
Normal file
BIN
tests/integration/files/file/prod/issue45893/custom.tar.gz
Normal file
Binary file not shown.
5
tests/integration/files/file/prod/issue45893/init.sls
Normal file
5
tests/integration/files/file/prod/issue45893/init.sls
Normal file
@ -0,0 +1,5 @@
|
||||
test_non_base_env:
|
||||
archive.extracted:
|
||||
- name: {{ pillar['issue45893.name'] }}
|
||||
- source: salt://issue45893/custom.tar.gz
|
||||
- keep: False
|
@ -144,7 +144,7 @@ class CMDModuleTest(ModuleCase):
|
||||
'''
|
||||
self.assertEqual(self.run_function('cmd.run',
|
||||
['bad_command --foo']).rstrip(),
|
||||
'ERROR: This shell command is not permitted: "bad_command --foo"')
|
||||
'ERROR: The shell command "bad_command --foo" is not permitted')
|
||||
|
||||
def test_script(self):
|
||||
'''
|
||||
|
@ -1379,6 +1379,20 @@ class StateModuleTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
expected_result = 'Command "echo "Success!"" run'
|
||||
self.assertIn(expected_result, test_data)
|
||||
|
||||
def test_onchanges_requisite_with_duration(self):
|
||||
'''
|
||||
Tests a simple state using the onchanges requisite
|
||||
the state will not run but results will include duration
|
||||
'''
|
||||
|
||||
# Only run the state once and keep the return data
|
||||
state_run = self.run_function('state.sls', mods='requisites.onchanges_simple')
|
||||
|
||||
# Then, test the result of the state run when changes are not expected to happen
|
||||
# and ensure duration is included in the results
|
||||
test_data = state_run['cmd_|-test_non_changing_state_|-echo "Should not run"_|-run']
|
||||
self.assertIn('duration', test_data)
|
||||
|
||||
# onfail tests
|
||||
|
||||
def test_onfail_requisite(self):
|
||||
@ -1450,6 +1464,18 @@ class StateModuleTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
expected_result = 'State was not run because onfail req did not change'
|
||||
self.assertIn(expected_result, test_data)
|
||||
|
||||
def test_onfail_requisite_with_duration(self):
|
||||
'''
|
||||
Tests a simple state using the onfail requisite
|
||||
'''
|
||||
|
||||
# Only run the state once and keep the return data
|
||||
state_run = self.run_function('state.sls', mods='requisites.onfail_simple')
|
||||
|
||||
# Then, test the result of the state run when a failure is not expected to happen
|
||||
test_data = state_run['cmd_|-test_non_failing_state_|-echo "Should not run"_|-run']
|
||||
self.assertIn('duration', test_data)
|
||||
|
||||
# listen tests
|
||||
|
||||
def test_listen_requisite(self):
|
||||
|
@ -87,24 +87,27 @@ from tests.support.unit import skipIf
|
||||
# Import Salt libs
|
||||
import salt.utils.path
|
||||
import salt.utils.platform
|
||||
from salt.utils.gitfs import GITPYTHON_MINVER, PYGIT2_MINVER
|
||||
from salt.utils.versions import LooseVersion
|
||||
from salt.modules.virtualenv_mod import KNOWN_BINARY_NAMES as VIRTUALENV_NAMES
|
||||
from salt.ext.six.moves import range # pylint: disable=redefined-builtin
|
||||
from salt.utils.gitfs import (
|
||||
GITPYTHON_VERSION,
|
||||
GITPYTHON_MINVER,
|
||||
PYGIT2_VERSION,
|
||||
PYGIT2_MINVER,
|
||||
LIBGIT2_VERSION,
|
||||
LIBGIT2_MINVER
|
||||
)
|
||||
|
||||
# Check for requisite components
|
||||
try:
|
||||
import git
|
||||
HAS_GITPYTHON = \
|
||||
LooseVersion(git.__version__) >= LooseVersion(GITPYTHON_MINVER)
|
||||
HAS_GITPYTHON = GITPYTHON_VERSION >= GITPYTHON_MINVER
|
||||
except ImportError:
|
||||
HAS_GITPYTHON = False
|
||||
|
||||
try:
|
||||
import pygit2
|
||||
HAS_PYGIT2 = \
|
||||
LooseVersion(pygit2.__version__) >= LooseVersion(PYGIT2_MINVER)
|
||||
except ImportError:
|
||||
HAS_PYGIT2 = PYGIT2_VERSION >= PYGIT2_MINVER \
|
||||
and LIBGIT2_VERSION >= LIBGIT2_MINVER
|
||||
except AttributeError:
|
||||
HAS_PYGIT2 = False
|
||||
|
||||
HAS_SSHD = bool(salt.utils.path.which('sshd'))
|
||||
@ -419,7 +422,7 @@ class TestGitPythonAuthenticatedHTTP(TestGitPythonHTTP, GitPythonMixin):
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
@skipIf(_windows_or_mac(), 'minion is windows or mac')
|
||||
@skip_if_not_root
|
||||
@skipIf(not HAS_PYGIT2, 'pygit2 >= {0} required'.format(PYGIT2_MINVER))
|
||||
@skipIf(not HAS_PYGIT2, 'pygit2 >= {0} and libgit2 >= {1} required'.format(PYGIT2_MINVER, LIBGIT2_MINVER))
|
||||
@skipIf(not HAS_SSHD, 'sshd not present')
|
||||
class TestPygit2SSH(GitPillarSSHTestBase):
|
||||
'''
|
||||
@ -433,12 +436,6 @@ class TestPygit2SSH(GitPillarSSHTestBase):
|
||||
username = USERNAME
|
||||
passphrase = PASSWORD
|
||||
|
||||
def setUp(self):
|
||||
super(TestPygit2SSH, self).setUp()
|
||||
if self.is_el7(): # pylint: disable=E1120
|
||||
self.skipTest(
|
||||
'skipped until EPEL7 fixes pygit2/libgit2 version mismatch')
|
||||
|
||||
@requires_system_grains
|
||||
def test_single_source(self, grains):
|
||||
'''
|
||||
@ -1199,19 +1196,13 @@ class TestPygit2SSH(GitPillarSSHTestBase):
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
@skipIf(_windows_or_mac(), 'minion is windows or mac')
|
||||
@skip_if_not_root
|
||||
@skipIf(not HAS_PYGIT2, 'pygit2 >= {0} required'.format(PYGIT2_MINVER))
|
||||
@skipIf(not HAS_PYGIT2, 'pygit2 >= {0} and libgit2 >= {1} required'.format(PYGIT2_MINVER, LIBGIT2_MINVER))
|
||||
@skipIf(not HAS_NGINX, 'nginx not present')
|
||||
@skipIf(not HAS_VIRTUALENV, 'virtualenv not present')
|
||||
class TestPygit2HTTP(GitPillarHTTPTestBase):
|
||||
'''
|
||||
Test git_pillar with pygit2 using SSH authentication
|
||||
'''
|
||||
def setUp(self):
|
||||
super(TestPygit2HTTP, self).setUp()
|
||||
if self.is_el7(): # pylint: disable=E1120
|
||||
self.skipTest(
|
||||
'skipped until EPEL7 fixes pygit2/libgit2 version mismatch')
|
||||
|
||||
def test_single_source(self):
|
||||
'''
|
||||
Test using a single ext_pillar repo
|
||||
@ -1452,7 +1443,7 @@ class TestPygit2HTTP(GitPillarHTTPTestBase):
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
@skipIf(_windows_or_mac(), 'minion is windows or mac')
|
||||
@skip_if_not_root
|
||||
@skipIf(not HAS_PYGIT2, 'pygit2 >= {0} required'.format(PYGIT2_MINVER))
|
||||
@skipIf(not HAS_PYGIT2, 'pygit2 >= {0} and libgit2 >= {1} required'.format(PYGIT2_MINVER, LIBGIT2_MINVER))
|
||||
@skipIf(not HAS_NGINX, 'nginx not present')
|
||||
@skipIf(not HAS_VIRTUALENV, 'virtualenv not present')
|
||||
class TestPygit2AuthenticatedHTTP(GitPillarHTTPTestBase):
|
||||
@ -1465,12 +1456,6 @@ class TestPygit2AuthenticatedHTTP(GitPillarHTTPTestBase):
|
||||
user = USERNAME
|
||||
password = PASSWORD
|
||||
|
||||
def setUp(self):
|
||||
super(TestPygit2AuthenticatedHTTP, self).setUp()
|
||||
if self.is_el7(): # pylint: disable=E1120
|
||||
self.skipTest(
|
||||
'skipped until EPEL7 fixes pygit2/libgit2 version mismatch')
|
||||
|
||||
def test_single_source(self):
|
||||
'''
|
||||
Test using a single ext_pillar repo
|
||||
|
@ -11,7 +11,7 @@ import tempfile
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.case import ModuleCase
|
||||
from tests.support.helpers import destructiveTest
|
||||
from tests.support.helpers import destructiveTest, flaky
|
||||
from tests.support.paths import CODE_DIR
|
||||
|
||||
|
||||
@ -33,6 +33,7 @@ class SPMManTest(ModuleCase):
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.tmpdir)
|
||||
|
||||
@flaky
|
||||
def test_man_spm(self):
|
||||
'''
|
||||
test man spm
|
||||
|
@ -9,6 +9,7 @@ import time
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.case import SSHCase
|
||||
from tests.support.helpers import flaky
|
||||
from tests.support.paths import TMP
|
||||
|
||||
# Import Salt Libs
|
||||
@ -162,6 +163,7 @@ class SSHStateTest(SSHCase):
|
||||
check_file = self.run_function('file.file_exists', [SSH_SLS_FILE], wipe=False)
|
||||
self.assertTrue(check_file)
|
||||
|
||||
@flaky
|
||||
def test_state_running(self):
|
||||
'''
|
||||
test state.running with salt-ssh
|
||||
|
@ -68,6 +68,16 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
log.debug('Checking for extracted file: %s', path)
|
||||
self.assertTrue(os.path.isfile(path))
|
||||
|
||||
def run_function(self, *args, **kwargs):
|
||||
ret = super(ArchiveTest, self).run_function(*args, **kwargs)
|
||||
log.debug('ret = %s', ret)
|
||||
return ret
|
||||
|
||||
def run_state(self, *args, **kwargs):
|
||||
ret = super(ArchiveTest, self).run_state(*args, **kwargs)
|
||||
log.debug('ret = %s', ret)
|
||||
return ret
|
||||
|
||||
def test_archive_extracted_skip_verify(self):
|
||||
'''
|
||||
test archive.extracted with skip_verify
|
||||
@ -75,7 +85,6 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
|
||||
source=self.archive_tar_source, archive_format='tar',
|
||||
skip_verify=True)
|
||||
log.debug('ret = %s', ret)
|
||||
if 'Timeout' in ret:
|
||||
self.skipTest('Timeout talking to local tornado server.')
|
||||
self.assertSaltTrueReturn(ret)
|
||||
@ -91,7 +100,6 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
|
||||
source=self.archive_tar_source, archive_format='tar',
|
||||
source_hash=ARCHIVE_TAR_HASH)
|
||||
log.debug('ret = %s', ret)
|
||||
if 'Timeout' in ret:
|
||||
self.skipTest('Timeout talking to local tornado server.')
|
||||
|
||||
@ -111,7 +119,6 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
source=self.archive_tar_source, archive_format='tar',
|
||||
source_hash=ARCHIVE_TAR_HASH,
|
||||
user='root', group=r_group)
|
||||
log.debug('ret = %s', ret)
|
||||
if 'Timeout' in ret:
|
||||
self.skipTest('Timeout talking to local tornado server.')
|
||||
|
||||
@ -128,7 +135,6 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
source_hash=ARCHIVE_TAR_HASH,
|
||||
options='--strip=1',
|
||||
enforce_toplevel=False)
|
||||
log.debug('ret = %s', ret)
|
||||
if 'Timeout' in ret:
|
||||
self.skipTest('Timeout talking to local tornado server.')
|
||||
|
||||
@ -145,7 +151,6 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
source_hash=ARCHIVE_TAR_HASH,
|
||||
options='--strip-components=1',
|
||||
enforce_toplevel=False)
|
||||
log.debug('ret = %s', ret)
|
||||
if 'Timeout' in ret:
|
||||
self.skipTest('Timeout talking to local tornado server.')
|
||||
|
||||
@ -160,7 +165,6 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
|
||||
source=self.archive_tar_source,
|
||||
source_hash=ARCHIVE_TAR_HASH)
|
||||
log.debug('ret = %s', ret)
|
||||
if 'Timeout' in ret:
|
||||
self.skipTest('Timeout talking to local tornado server.')
|
||||
self.assertSaltTrueReturn(ret)
|
||||
@ -177,7 +181,6 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
source_hash=ARCHIVE_TAR_HASH,
|
||||
use_cmd_unzip=False,
|
||||
archive_format='tar')
|
||||
log.debug('ret = %s', ret)
|
||||
if 'Timeout' in ret:
|
||||
self.skipTest('Timeout talking to local tornado server.')
|
||||
self.assertSaltTrueReturn(ret)
|
||||
@ -190,7 +193,6 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
'''
|
||||
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
|
||||
source=ARCHIVE_LOCAL_TAR_SOURCE, archive_format='tar')
|
||||
log.debug('ret = %s', ret)
|
||||
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
@ -203,7 +205,6 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
|
||||
source=ARCHIVE_LOCAL_TAR_SOURCE, archive_format='tar',
|
||||
source_hash=ARCHIVE_TAR_BAD_HASH, skip_verify=True)
|
||||
log.debug('ret = %s', ret)
|
||||
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
@ -216,7 +217,6 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
|
||||
source=ARCHIVE_LOCAL_TAR_SOURCE, archive_format='tar',
|
||||
source_hash=ARCHIVE_TAR_HASH)
|
||||
log.debug('ret = %s', ret)
|
||||
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
@ -229,6 +229,17 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
|
||||
source=ARCHIVE_LOCAL_TAR_SOURCE, archive_format='tar',
|
||||
source_hash=ARCHIVE_TAR_BAD_HASH)
|
||||
log.debug('ret = %s', ret)
|
||||
|
||||
self.assertSaltFalseReturn(ret)
|
||||
|
||||
def test_archive_extracted_with_non_base_saltenv(self):
|
||||
'''
|
||||
test archive.extracted with a saltenv other than `base`
|
||||
'''
|
||||
ret = self.run_function(
|
||||
'state.sls',
|
||||
['issue45893'],
|
||||
pillar={'issue45893.name': ARCHIVE_DIR},
|
||||
saltenv='prod')
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self._check_extracted(os.path.join(ARCHIVE_DIR, UNTAR_FILE))
|
||||
|
@ -444,6 +444,44 @@ class DockerContainerTestCase(ModuleCase, SaltReturnAssertsMixin):
|
||||
image_info['Config']['Cmd']
|
||||
)
|
||||
|
||||
@container_name
|
||||
def test_running_with_port_bindings(self, name):
|
||||
'''
|
||||
This tests that the ports which are being bound are also exposed, even
|
||||
when not explicitly configured. This test will create a container with
|
||||
only some of the ports exposed, including some which aren't even bound.
|
||||
The resulting containers exposed ports should contain all of the ports
|
||||
defined in the "ports" argument, as well as each of the ports which are
|
||||
being bound.
|
||||
'''
|
||||
# Create the container
|
||||
ret = self.run_state(
|
||||
'docker_container.running',
|
||||
name=name,
|
||||
image=self.image,
|
||||
command='sleep 600',
|
||||
shutdown_timeout=1,
|
||||
port_bindings=[1234, '1235-1236', '2234/udp', '2235-2236/udp'],
|
||||
ports=[1235, '2235/udp', 9999],
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
# Check the created container's port bindings and exposed ports. The
|
||||
# port bindings should only contain the ports defined in the
|
||||
# port_bindings argument, while the exposed ports should also contain
|
||||
# the extra port (9999/tcp) which was included in the ports argument.
|
||||
cinfo = self.run_function('docker.inspect_container', [name])
|
||||
ports = ['1234/tcp', '1235/tcp', '1236/tcp',
|
||||
'2234/udp', '2235/udp', '2236/udp']
|
||||
self.assertEqual(
|
||||
sorted(cinfo['HostConfig']['PortBindings']),
|
||||
ports
|
||||
)
|
||||
self.assertEqual(
|
||||
sorted(cinfo['Config']['ExposedPorts']),
|
||||
ports + ['9999/tcp']
|
||||
)
|
||||
|
||||
@container_name
|
||||
def test_absent_with_stopped_container(self, name):
|
||||
'''
|
||||
|
@ -5,9 +5,12 @@
|
||||
tests.support.mock
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Helper module that wraps :mod:`mock <python3:unittest.mock>` and provides
|
||||
some fake objects in order to properly set the function/class decorators
|
||||
and yet skip the test cases execution.
|
||||
Helper module that wraps `mock` and provides some fake objects in order to
|
||||
properly set the function/class decorators and yet skip the test case's
|
||||
execution.
|
||||
|
||||
Note: mock >= 2.0.0 required since unittest.mock does not have
|
||||
MagicMock.assert_called in Python < 3.6.
|
||||
'''
|
||||
# pylint: disable=unused-import,function-redefined,blacklisted-module,blacklisted-external-module
|
||||
|
||||
@ -18,37 +21,20 @@ import sys
|
||||
from salt.ext import six
|
||||
|
||||
try:
|
||||
if sys.version_info >= (3,):
|
||||
# Python 3
|
||||
from unittest.mock import (
|
||||
Mock,
|
||||
MagicMock,
|
||||
patch,
|
||||
sentinel,
|
||||
DEFAULT,
|
||||
# ANY and call will be imported further down
|
||||
create_autospec,
|
||||
FILTER_DIR,
|
||||
NonCallableMock,
|
||||
NonCallableMagicMock,
|
||||
PropertyMock,
|
||||
__version__
|
||||
)
|
||||
else:
|
||||
from mock import (
|
||||
Mock,
|
||||
MagicMock,
|
||||
patch,
|
||||
sentinel,
|
||||
DEFAULT,
|
||||
# ANY and call will be imported further down
|
||||
create_autospec,
|
||||
FILTER_DIR,
|
||||
NonCallableMock,
|
||||
NonCallableMagicMock,
|
||||
PropertyMock,
|
||||
__version__
|
||||
)
|
||||
from mock import (
|
||||
Mock,
|
||||
MagicMock,
|
||||
patch,
|
||||
sentinel,
|
||||
DEFAULT,
|
||||
# ANY and call will be imported further down
|
||||
create_autospec,
|
||||
FILTER_DIR,
|
||||
NonCallableMock,
|
||||
NonCallableMagicMock,
|
||||
PropertyMock,
|
||||
__version__
|
||||
)
|
||||
NO_MOCK = False
|
||||
NO_MOCK_REASON = ''
|
||||
mock_version = []
|
||||
@ -99,11 +85,7 @@ except ImportError as exc:
|
||||
|
||||
if NO_MOCK is False:
|
||||
try:
|
||||
if sys.version_info >= (3,):
|
||||
# Python 3
|
||||
from unittest.mock import call, ANY
|
||||
else:
|
||||
from mock import call, ANY
|
||||
from mock import call, ANY
|
||||
except ImportError:
|
||||
NO_MOCK = True
|
||||
NO_MOCK_REASON = 'you need to upgrade your mock version to >= 0.8.0'
|
||||
|
23
tests/unit/cache/test_localfs.py
vendored
23
tests/unit/cache/test_localfs.py
vendored
@ -5,6 +5,7 @@ unit tests for the localfs cache
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
@ -45,16 +46,24 @@ class LocalFSTest(TestCase, LoaderModuleMockMixin):
|
||||
with patch.dict(localfs.__context__, {'serial': serializer}):
|
||||
localfs.store(bank='bank', key='key', data='payload data', cachedir=tmp_dir)
|
||||
|
||||
# 'store' function tests: 4
|
||||
# 'store' function tests: 5
|
||||
|
||||
def test_store_no_base_cache_dir(self):
|
||||
def test_handled_exception_cache_dir(self):
|
||||
'''
|
||||
Tests that a SaltCacheError is raised when the base directory doesn't exist and
|
||||
cannot be created.
|
||||
'''
|
||||
with patch('os.path.isdir', MagicMock(return_value=None)):
|
||||
with patch('os.makedirs', MagicMock(side_effect=OSError)):
|
||||
self.assertRaises(SaltCacheError, localfs.store, bank='', key='', data='', cachedir='')
|
||||
with patch('os.makedirs', MagicMock(side_effect=OSError(os.errno.EEXIST, ''))):
|
||||
with patch('tempfile.mkstemp', MagicMock(side_effect=Exception)):
|
||||
self.assertRaises(Exception, localfs.store, bank='', key='', data='', cachedir='')
|
||||
|
||||
def test_unhandled_exception_cache_dir(self):
|
||||
'''
|
||||
Tests that a SaltCacheError is raised when the base directory doesn't exist and
|
||||
cannot be created.
|
||||
'''
|
||||
with patch('os.makedirs', MagicMock(side_effect=OSError(1, ''))):
|
||||
self.assertRaises(SaltCacheError, localfs.store, bank='', key='', data='', cachedir='')
|
||||
|
||||
def test_store_close_mkstemp_file_handle(self):
|
||||
'''
|
||||
@ -64,7 +73,7 @@ class LocalFSTest(TestCase, LoaderModuleMockMixin):
|
||||
This test mocks the call to mkstemp, but forces an OSError to be raised when the
|
||||
close() function is called on a file descriptor that doesn't exist.
|
||||
'''
|
||||
with patch('os.path.isdir', MagicMock(return_value=True)):
|
||||
with patch('os.makedirs', MagicMock(side_effect=OSError(os.errno.EEXIST, ''))):
|
||||
with patch('tempfile.mkstemp', MagicMock(return_value=(12345, 'foo'))):
|
||||
self.assertRaises(OSError, localfs.store, bank='', key='', data='', cachedir='')
|
||||
|
||||
@ -73,7 +82,7 @@ class LocalFSTest(TestCase, LoaderModuleMockMixin):
|
||||
Tests that a SaltCacheError is raised when there is a problem writing to the
|
||||
cache file.
|
||||
'''
|
||||
with patch('os.path.isdir', MagicMock(return_value=True)):
|
||||
with patch('os.makedirs', MagicMock(side_effect=OSError(os.errno.EEXIST, ''))):
|
||||
with patch('tempfile.mkstemp', MagicMock(return_value=('one', 'two'))):
|
||||
with patch('os.close', MagicMock(return_value=None)):
|
||||
with patch('salt.utils.files.fopen', MagicMock(side_effect=IOError)):
|
||||
|
@ -1239,6 +1239,38 @@ class VMwareTestCase(ExtendedTestCase):
|
||||
kwargs={'name': 'cCD2GgJGPG1DUnPeFBoPeqtdmUxIWxDoVFbA14vIG0BPoUECkgbRMnnY6gaUPBvIDCcsZ5HU48ubgQu5c'},
|
||||
call='function')
|
||||
|
||||
def test__add_new_hard_disk_helper(self):
|
||||
with patch('salt.cloud.clouds.vmware._get_si', MagicMock(return_value=None)):
|
||||
with patch('salt.utils.vmware.get_mor_using_container_view', side_effect=[None, None]):
|
||||
self.assertRaises(
|
||||
SaltCloudSystemExit,
|
||||
vmware._add_new_hard_disk_helper,
|
||||
disk_label='test',
|
||||
size_gb=100,
|
||||
unit_number=0,
|
||||
datastore='whatever'
|
||||
)
|
||||
with patch('salt.utils.vmware.get_mor_using_container_view', side_effect=['Datastore', None]):
|
||||
self.assertRaises(
|
||||
AttributeError,
|
||||
vmware._add_new_hard_disk_helper,
|
||||
disk_label='test',
|
||||
size_gb=100,
|
||||
unit_number=0,
|
||||
datastore='whatever'
|
||||
)
|
||||
vmware.salt.utils.vmware.get_mor_using_container_view.assert_called_with(None, vim.Datastore, 'whatever')
|
||||
with patch('salt.utils.vmware.get_mor_using_container_view', side_effect=[None, 'Cluster']):
|
||||
self.assertRaises(
|
||||
AttributeError,
|
||||
vmware._add_new_hard_disk_helper,
|
||||
disk_label='test',
|
||||
size_gb=100,
|
||||
unit_number=0,
|
||||
datastore='whatever'
|
||||
)
|
||||
vmware.salt.utils.vmware.get_mor_using_container_view.assert_called_with(None, vim.StoragePod, 'whatever')
|
||||
|
||||
|
||||
class CloneFromSnapshotTest(TestCase):
|
||||
'''
|
||||
|
@ -1,10 +1,11 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Erik Johnson <erik@saltstack.com>`
|
||||
:codeauthor: :email:`Erik Johnson <erik@saltstack.com>`
|
||||
'''
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import copy
|
||||
import errno
|
||||
import os
|
||||
import shutil
|
||||
@ -18,15 +19,6 @@ try:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Import 3rd-party libs
|
||||
try:
|
||||
import git # pylint: disable=unused-import
|
||||
HAS_GITPYTHON = True
|
||||
GITFS_AVAILABLE = True
|
||||
except ImportError:
|
||||
HAS_GITPYTHON = False
|
||||
GITFS_AVAILABLE = False
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
@ -36,11 +28,36 @@ from tests.support.paths import TMP, FILES
|
||||
# Import salt libs
|
||||
import salt.fileserver.gitfs as gitfs
|
||||
import salt.utils.files
|
||||
import salt.utils.gitfs
|
||||
import salt.utils.platform
|
||||
import salt.utils.win_functions
|
||||
import salt.utils.yaml
|
||||
|
||||
import salt.utils.gitfs
|
||||
from salt.utils.gitfs import (
|
||||
GITPYTHON_VERSION,
|
||||
GITPYTHON_MINVER,
|
||||
PYGIT2_VERSION,
|
||||
PYGIT2_MINVER,
|
||||
LIBGIT2_VERSION,
|
||||
LIBGIT2_MINVER
|
||||
)
|
||||
|
||||
try:
|
||||
import git
|
||||
# We still need to use GitPython here for temp repo setup, so we do need to
|
||||
# actually import it. But we don't need import pygit2 in this module, we
|
||||
# can just use the LooseVersion instances imported along with
|
||||
# salt.utils.gitfs to check if we have a compatible version.
|
||||
HAS_GITPYTHON = GITPYTHON_VERSION >= GITPYTHON_MINVER
|
||||
except (ImportError, AttributeError):
|
||||
HAS_GITPYTHON = False
|
||||
|
||||
try:
|
||||
HAS_PYGIT2 = PYGIT2_VERSION >= PYGIT2_MINVER \
|
||||
and LIBGIT2_VERSION >= LIBGIT2_MINVER
|
||||
except AttributeError:
|
||||
HAS_PYGIT2 = False
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
TMP_SOCK_DIR = tempfile.mkdtemp(dir=TMP)
|
||||
@ -48,6 +65,38 @@ TMP_REPO_DIR = os.path.join(TMP, 'gitfs_root')
|
||||
INTEGRATION_BASE_FILES = os.path.join(FILES, 'file', 'base')
|
||||
UNICODE_FILENAME = 'питон.txt'
|
||||
UNICODE_DIRNAME = UNICODE_ENVNAME = 'соль'
|
||||
TAG_NAME = 'mytag'
|
||||
|
||||
OPTS = {
|
||||
'sock_dir': TMP_SOCK_DIR,
|
||||
'gitfs_remotes': ['file://' + TMP_REPO_DIR],
|
||||
'gitfs_root': '',
|
||||
'fileserver_backend': ['gitfs'],
|
||||
'gitfs_base': 'master',
|
||||
'fileserver_events': True,
|
||||
'transport': 'zeromq',
|
||||
'gitfs_mountpoint': '',
|
||||
'gitfs_saltenv': [],
|
||||
'gitfs_env_whitelist': [],
|
||||
'gitfs_env_blacklist': [],
|
||||
'gitfs_saltenv_whitelist': [],
|
||||
'gitfs_saltenv_blacklist': [],
|
||||
'gitfs_user': '',
|
||||
'gitfs_password': '',
|
||||
'gitfs_insecure_auth': False,
|
||||
'gitfs_privkey': '',
|
||||
'gitfs_pubkey': '',
|
||||
'gitfs_passphrase': '',
|
||||
'gitfs_refspecs': [
|
||||
'+refs/heads/*:refs/remotes/origin/*',
|
||||
'+refs/tags/*:refs/tags/*'
|
||||
],
|
||||
'gitfs_ssl_verify': True,
|
||||
'gitfs_disable_saltenv_mapping': False,
|
||||
'gitfs_ref_types': ['branch', 'tag', 'sha'],
|
||||
'gitfs_update_interval': 60,
|
||||
'__role': 'master',
|
||||
}
|
||||
|
||||
|
||||
def _rmtree_error(func, path, excinfo):
|
||||
@ -55,43 +104,23 @@ def _rmtree_error(func, path, excinfo):
|
||||
func(path)
|
||||
|
||||
|
||||
@skipIf(not HAS_GITPYTHON, 'GitPython is not installed')
|
||||
def _clear_instance_map():
|
||||
try:
|
||||
del salt.utils.gitfs.GitFS.instance_map[tornado.ioloop.IOLoop.current()]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
|
||||
@skipIf(not HAS_GITPYTHON, 'GitPython >= {0} required'.format(GITPYTHON_MINVER))
|
||||
class GitfsConfigTestCase(TestCase, LoaderModuleMockMixin):
|
||||
|
||||
def setup_loader_modules(self):
|
||||
self.tmp_cachedir = tempfile.mkdtemp(dir=TMP)
|
||||
opts = copy.deepcopy(OPTS)
|
||||
opts['cachedir'] = self.tmp_cachedir
|
||||
opts['sock_dir'] = self.tmp_sock_dir
|
||||
return {
|
||||
gitfs: {
|
||||
'__opts__': {
|
||||
'cachedir': self.tmp_cachedir,
|
||||
'sock_dir': TMP_SOCK_DIR,
|
||||
'gitfs_root': 'salt',
|
||||
'fileserver_backend': ['gitfs'],
|
||||
'gitfs_base': 'master',
|
||||
'fileserver_events': True,
|
||||
'transport': 'zeromq',
|
||||
'gitfs_mountpoint': '',
|
||||
'gitfs_saltenv': [],
|
||||
'gitfs_env_whitelist': [],
|
||||
'gitfs_env_blacklist': [],
|
||||
'gitfs_saltenv_whitelist': [],
|
||||
'gitfs_saltenv_blacklist': [],
|
||||
'gitfs_user': '',
|
||||
'gitfs_password': '',
|
||||
'gitfs_insecure_auth': False,
|
||||
'gitfs_privkey': '',
|
||||
'gitfs_pubkey': '',
|
||||
'gitfs_passphrase': '',
|
||||
'gitfs_refspecs': [
|
||||
'+refs/heads/*:refs/remotes/origin/*',
|
||||
'+refs/tags/*:refs/tags/*'
|
||||
],
|
||||
'gitfs_ssl_verify': True,
|
||||
'gitfs_disable_saltenv_mapping': False,
|
||||
'gitfs_ref_types': ['branch', 'tag', 'sha'],
|
||||
'gitfs_update_interval': 60,
|
||||
'__role': 'master',
|
||||
}
|
||||
'__opts__': opts,
|
||||
}
|
||||
}
|
||||
|
||||
@ -99,16 +128,27 @@ class GitfsConfigTestCase(TestCase, LoaderModuleMockMixin):
|
||||
def setUpClass(cls):
|
||||
# Clear the instance map so that we make sure to create a new instance
|
||||
# for this test class.
|
||||
try:
|
||||
del salt.utils.gitfs.GitFS.instance_map[tornado.ioloop.IOLoop.current()]
|
||||
except KeyError:
|
||||
pass
|
||||
_clear_instance_map()
|
||||
cls.tmp_cachedir = tempfile.mkdtemp(dir=TMP)
|
||||
cls.tmp_sock_dir = tempfile.mkdtemp(dir=TMP)
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.tmp_cachedir)
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
'''
|
||||
Remove the temporary git repository and gitfs cache directory to ensure
|
||||
a clean environment for the other test class(es).
|
||||
'''
|
||||
for path in (cls.tmp_cachedir, cls.tmp_sock_dir):
|
||||
try:
|
||||
shutil.rmtree(path, onerror=_rmtree_error)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
def test_per_saltenv_config(self):
|
||||
opts_override = textwrap.dedent('''
|
||||
gitfs_root: salt
|
||||
|
||||
gitfs_saltenv:
|
||||
- baz:
|
||||
# when loaded, the "salt://" prefix will be removed
|
||||
@ -186,110 +226,27 @@ class GitfsConfigTestCase(TestCase, LoaderModuleMockMixin):
|
||||
LOAD = {'saltenv': 'base'}
|
||||
|
||||
|
||||
@skipIf(not GITFS_AVAILABLE, "GitFS could not be loaded. Skipping GitFS tests!")
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class GitFSTest(TestCase, LoaderModuleMockMixin):
|
||||
class GitFSTestFuncs(object):
|
||||
'''
|
||||
These are where the tests go, so that they can be run using both GitPython
|
||||
and pygit2.
|
||||
|
||||
def setup_loader_modules(self):
|
||||
self.tmp_cachedir = tempfile.mkdtemp(dir=TMP)
|
||||
return {
|
||||
gitfs: {
|
||||
'__opts__': {
|
||||
'cachedir': self.tmp_cachedir,
|
||||
'sock_dir': TMP_SOCK_DIR,
|
||||
'gitfs_remotes': ['file://' + TMP_REPO_DIR],
|
||||
'gitfs_root': '',
|
||||
'fileserver_backend': ['gitfs'],
|
||||
'gitfs_base': 'master',
|
||||
'fileserver_events': True,
|
||||
'transport': 'zeromq',
|
||||
'gitfs_mountpoint': '',
|
||||
'gitfs_saltenv': [],
|
||||
'gitfs_env_whitelist': [],
|
||||
'gitfs_env_blacklist': [],
|
||||
'gitfs_saltenv_whitelist': [],
|
||||
'gitfs_saltenv_blacklist': [],
|
||||
'gitfs_user': '',
|
||||
'gitfs_password': '',
|
||||
'gitfs_insecure_auth': False,
|
||||
'gitfs_privkey': '',
|
||||
'gitfs_pubkey': '',
|
||||
'gitfs_passphrase': '',
|
||||
'gitfs_refspecs': [
|
||||
'+refs/heads/*:refs/remotes/origin/*',
|
||||
'+refs/tags/*:refs/tags/*'
|
||||
],
|
||||
'gitfs_ssl_verify': True,
|
||||
'gitfs_disable_saltenv_mapping': False,
|
||||
'gitfs_ref_types': ['branch', 'tag', 'sha'],
|
||||
'gitfs_update_interval': 60,
|
||||
'__role': 'master',
|
||||
}
|
||||
}
|
||||
}
|
||||
NOTE: The gitfs.update() has to happen AFTER the setUp is called. This is
|
||||
because running it inside the setUp will spawn a new singleton, which means
|
||||
that tests which need to mock the __opts__ will be too late; the setUp will
|
||||
have created a new singleton that will bypass our mocking. To ensure that
|
||||
our tests are reliable and correct, we want to make sure that each test
|
||||
uses a new gitfs object, allowing different manipulations of the opts to be
|
||||
tested.
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
# Clear the instance map so that we make sure to create a new instance
|
||||
# for this test class.
|
||||
try:
|
||||
del salt.utils.gitfs.GitFS.instance_map[tornado.ioloop.IOLoop.current()]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
# Create the dir if it doesn't already exist
|
||||
try:
|
||||
shutil.copytree(INTEGRATION_BASE_FILES, TMP_REPO_DIR + '/')
|
||||
except OSError:
|
||||
# We probably caught an error because files already exist. Ignore
|
||||
pass
|
||||
|
||||
try:
|
||||
repo = git.Repo(TMP_REPO_DIR)
|
||||
except git.exc.InvalidGitRepositoryError:
|
||||
repo = git.Repo.init(TMP_REPO_DIR)
|
||||
|
||||
if 'USERNAME' not in os.environ:
|
||||
try:
|
||||
if salt.utils.platform.is_windows():
|
||||
os.environ['USERNAME'] = salt.utils.win_functions.get_current_user()
|
||||
else:
|
||||
os.environ['USERNAME'] = pwd.getpwuid(os.geteuid()).pw_name
|
||||
except AttributeError:
|
||||
log.error('Unable to get effective username, falling back to '
|
||||
'\'root\'.')
|
||||
os.environ['USERNAME'] = 'root'
|
||||
|
||||
repo.index.add([x for x in os.listdir(TMP_REPO_DIR)
|
||||
if x != '.git'])
|
||||
repo.index.commit('Test')
|
||||
|
||||
# Add another branch with unicode characters in the name
|
||||
repo.create_head(UNICODE_ENVNAME, 'HEAD')
|
||||
|
||||
def setUp(self):
|
||||
'''
|
||||
We don't want to check in another .git dir into GH because that just
|
||||
gets messy. Instead, we'll create a temporary repo on the fly for the
|
||||
tests to examine.
|
||||
'''
|
||||
if not gitfs.__virtual__():
|
||||
self.skipTest("GitFS could not be loaded. Skipping GitFS tests!")
|
||||
self.tmp_cachedir = tempfile.mkdtemp(dir=TMP)
|
||||
gitfs.update()
|
||||
|
||||
def tearDown(self):
|
||||
'''
|
||||
Remove the temporary git repository and gitfs cache directory to ensure
|
||||
a clean environment for each test.
|
||||
'''
|
||||
try:
|
||||
shutil.rmtree(self.tmp_cachedir, onerror=_rmtree_error)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.EEXIST:
|
||||
raise
|
||||
Therefore, keep the following in mind:
|
||||
|
||||
1. Each test needs to call gitfs.update() *after* any patching, and
|
||||
*before* calling the function being tested.
|
||||
2. Do *NOT* move the gitfs.update() into the setUp.
|
||||
'''
|
||||
def test_file_list(self):
|
||||
gitfs.update()
|
||||
ret = gitfs.file_list(LOAD)
|
||||
self.assertIn('testfile', ret)
|
||||
self.assertIn(UNICODE_FILENAME, ret)
|
||||
@ -298,11 +255,242 @@ class GitFSTest(TestCase, LoaderModuleMockMixin):
|
||||
self.assertIn('/'.join((UNICODE_DIRNAME, 'foo.txt')), ret)
|
||||
|
||||
def test_dir_list(self):
|
||||
gitfs.update()
|
||||
ret = gitfs.dir_list(LOAD)
|
||||
self.assertIn('grail', ret)
|
||||
self.assertIn(UNICODE_DIRNAME, ret)
|
||||
|
||||
def test_envs(self):
|
||||
gitfs.update()
|
||||
ret = gitfs.envs(ignore_cache=True)
|
||||
self.assertIn('base', ret)
|
||||
self.assertIn(UNICODE_ENVNAME, ret)
|
||||
self.assertIn(TAG_NAME, ret)
|
||||
|
||||
def test_ref_types_global(self):
|
||||
'''
|
||||
Test the global gitfs_ref_types config option
|
||||
'''
|
||||
with patch.dict(gitfs.__opts__, {'gitfs_ref_types': ['branch']}):
|
||||
gitfs.update()
|
||||
ret = gitfs.envs(ignore_cache=True)
|
||||
# Since we are restricting to branches only, the tag should not
|
||||
# appear in the envs list.
|
||||
self.assertIn('base', ret)
|
||||
self.assertIn(UNICODE_ENVNAME, ret)
|
||||
self.assertNotIn(TAG_NAME, ret)
|
||||
|
||||
def test_ref_types_per_remote(self):
|
||||
'''
|
||||
Test the per_remote ref_types config option, using a different
|
||||
ref_types setting than the global test.
|
||||
'''
|
||||
remotes = [{'file://' + TMP_REPO_DIR: [{'ref_types': ['tag']}]}]
|
||||
with patch.dict(gitfs.__opts__, {'gitfs_remotes': remotes}):
|
||||
gitfs.update()
|
||||
ret = gitfs.envs(ignore_cache=True)
|
||||
# Since we are restricting to tags only, the tag should appear in
|
||||
# the envs list, but the branches should not.
|
||||
self.assertNotIn('base', ret)
|
||||
self.assertNotIn(UNICODE_ENVNAME, ret)
|
||||
self.assertIn(TAG_NAME, ret)
|
||||
|
||||
def test_disable_saltenv_mapping_global_with_mapping_defined_globally(self):
|
||||
'''
|
||||
Test the global gitfs_disable_saltenv_mapping config option, combined
|
||||
with the per-saltenv mapping being defined in the global gitfs_saltenv
|
||||
option.
|
||||
'''
|
||||
opts = salt.utils.yaml.safe_load(textwrap.dedent('''\
|
||||
gitfs_disable_saltenv_mapping: True
|
||||
gitfs_saltenv:
|
||||
- foo:
|
||||
- ref: base
|
||||
'''))
|
||||
with patch.dict(gitfs.__opts__, opts):
|
||||
gitfs.update()
|
||||
ret = gitfs.envs(ignore_cache=True)
|
||||
# Since we are restricting to tags only, the tag should appear in
|
||||
# the envs list, but the branches should not.
|
||||
self.assertEqual(ret, ['foo'])
|
||||
|
||||
def test_disable_saltenv_mapping_global_with_mapping_defined_per_remote(self):
|
||||
'''
|
||||
Test the global gitfs_disable_saltenv_mapping config option, combined
|
||||
with the per-saltenv mapping being defined in the remote itself via the
|
||||
"saltenv" per-remote option.
|
||||
'''
|
||||
opts = salt.utils.yaml.safe_load(textwrap.dedent('''\
|
||||
gitfs_disable_saltenv_mapping: True
|
||||
gitfs_remotes:
|
||||
- file://{0}:
|
||||
- saltenv:
|
||||
- bar:
|
||||
- ref: base
|
||||
'''.format(TMP_REPO_DIR)))
|
||||
with patch.dict(gitfs.__opts__, opts):
|
||||
gitfs.update()
|
||||
ret = gitfs.envs(ignore_cache=True)
|
||||
# Since we are restricting to tags only, the tag should appear in
|
||||
# the envs list, but the branches should not.
|
||||
self.assertEqual(ret, ['bar'])
|
||||
|
||||
def test_disable_saltenv_mapping_per_remote_with_mapping_defined_globally(self):
|
||||
'''
|
||||
Test the per-remote disable_saltenv_mapping config option, combined
|
||||
with the per-saltenv mapping being defined in the global gitfs_saltenv
|
||||
option.
|
||||
'''
|
||||
opts = salt.utils.yaml.safe_load(textwrap.dedent('''\
|
||||
gitfs_remotes:
|
||||
- file://{0}:
|
||||
- disable_saltenv_mapping: True
|
||||
|
||||
gitfs_saltenv:
|
||||
- hello:
|
||||
- ref: base
|
||||
'''))
|
||||
with patch.dict(gitfs.__opts__, opts):
|
||||
gitfs.update()
|
||||
ret = gitfs.envs(ignore_cache=True)
|
||||
# Since we are restricting to tags only, the tag should appear in
|
||||
# the envs list, but the branches should not.
|
||||
self.assertEqual(ret, ['hello'])
|
||||
|
||||
def test_disable_saltenv_mapping_per_remote_with_mapping_defined_per_remote(self):
|
||||
'''
|
||||
Test the per-remote disable_saltenv_mapping config option, combined
|
||||
with the per-saltenv mapping being defined in the remote itself via the
|
||||
"saltenv" per-remote option.
|
||||
'''
|
||||
opts = salt.utils.yaml.safe_load(textwrap.dedent('''\
|
||||
gitfs_remotes:
|
||||
- file://{0}:
|
||||
- disable_saltenv_mapping: True
|
||||
- saltenv:
|
||||
- world:
|
||||
- ref: base
|
||||
'''.format(TMP_REPO_DIR)))
|
||||
with patch.dict(gitfs.__opts__, opts):
|
||||
gitfs.update()
|
||||
ret = gitfs.envs(ignore_cache=True)
|
||||
# Since we are restricting to tags only, the tag should appear in
|
||||
# the envs list, but the branches should not.
|
||||
self.assertEqual(ret, ['world'])
|
||||
|
||||
|
||||
class GitFSTestBase(object):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.tmp_cachedir = tempfile.mkdtemp(dir=TMP)
|
||||
cls.tmp_sock_dir = tempfile.mkdtemp(dir=TMP)
|
||||
|
||||
try:
|
||||
shutil.rmtree(TMP_REPO_DIR)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
raise
|
||||
shutil.copytree(INTEGRATION_BASE_FILES, TMP_REPO_DIR + '/')
|
||||
|
||||
repo = git.Repo.init(TMP_REPO_DIR)
|
||||
|
||||
username_key = str('USERNAME')
|
||||
orig_username = os.environ.get(username_key)
|
||||
try:
|
||||
if username_key not in os.environ:
|
||||
try:
|
||||
if salt.utils.platform.is_windows():
|
||||
os.environ[username_key] = \
|
||||
salt.utils.win_functions.get_current_user()
|
||||
else:
|
||||
os.environ[username_key] = \
|
||||
pwd.getpwuid(os.geteuid()).pw_name
|
||||
except AttributeError:
|
||||
log.error(
|
||||
'Unable to get effective username, falling back to '
|
||||
'\'root\'.'
|
||||
)
|
||||
os.environ[username_key] = str('root')
|
||||
|
||||
repo.index.add([x for x in os.listdir(TMP_REPO_DIR)
|
||||
if x != '.git'])
|
||||
repo.index.commit('Test')
|
||||
|
||||
# Add another branch with unicode characters in the name
|
||||
repo.create_head(UNICODE_ENVNAME, 'HEAD')
|
||||
|
||||
# Add a tag
|
||||
repo.create_tag(TAG_NAME, 'HEAD')
|
||||
finally:
|
||||
if orig_username is not None:
|
||||
os.environ[username_key] = orig_username
|
||||
else:
|
||||
os.environ.pop(username_key, None)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
'''
|
||||
Remove the temporary git repository and gitfs cache directory to ensure
|
||||
a clean environment for the other test class(es).
|
||||
'''
|
||||
for path in (cls.tmp_cachedir, cls.tmp_sock_dir, TMP_REPO_DIR):
|
||||
try:
|
||||
shutil.rmtree(path, onerror=_rmtree_error)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
def setUp(self):
|
||||
'''
|
||||
We don't want to check in another .git dir into GH because that just
|
||||
gets messy. Instead, we'll create a temporary repo on the fly for the
|
||||
tests to examine.
|
||||
|
||||
Also ensure we A) don't re-use the singleton, and B) that the cachedirs
|
||||
are cleared. This keeps these performance enhancements from affecting
|
||||
the results of subsequent tests.
|
||||
'''
|
||||
if not gitfs.__virtual__():
|
||||
self.skipTest("GitFS could not be loaded. Skipping GitFS tests!")
|
||||
|
||||
_clear_instance_map()
|
||||
for subdir in ('gitfs', 'file_lists'):
|
||||
try:
|
||||
shutil.rmtree(os.path.join(self.tmp_cachedir, subdir))
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
|
||||
@skipIf(not HAS_GITPYTHON, 'GitPython >= {0} required'.format(GITPYTHON_MINVER))
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class GitPythonTest(GitFSTestBase, GitFSTestFuncs, TestCase, LoaderModuleMockMixin):
|
||||
|
||||
def setup_loader_modules(self):
|
||||
opts = copy.deepcopy(OPTS)
|
||||
opts['cachedir'] = self.tmp_cachedir
|
||||
opts['sock_dir'] = self.tmp_sock_dir
|
||||
opts['gitfs_provider'] = 'gitpython'
|
||||
return {
|
||||
gitfs: {
|
||||
'__opts__': opts,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@skipIf(not HAS_GITPYTHON, 'GitPython >= {0} required for temp repo setup'.format(GITPYTHON_MINVER))
|
||||
@skipIf(not HAS_PYGIT2, 'pygit2 >= {0} and libgit2 >= {1} required'.format(PYGIT2_MINVER, LIBGIT2_MINVER))
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class Pygit2Test(GitFSTestBase, GitFSTestFuncs, TestCase, LoaderModuleMockMixin):
|
||||
|
||||
def setup_loader_modules(self):
|
||||
opts = copy.deepcopy(OPTS)
|
||||
opts['cachedir'] = self.tmp_cachedir
|
||||
opts['sock_dir'] = self.tmp_sock_dir
|
||||
opts['gitfs_provider'] = 'pygit2'
|
||||
return {
|
||||
gitfs: {
|
||||
'__opts__': opts,
|
||||
}
|
||||
}
|
||||
|
@ -10,14 +10,17 @@ import sys
|
||||
import tempfile
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.utils.files
|
||||
import salt.utils.platform
|
||||
import salt.modules.cmdmod as cmdmod
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from salt.log import LOG_LEVELS
|
||||
from salt.ext.six.moves import builtins # pylint: disable=import-error
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.paths import FILES
|
||||
from tests.support.mock import (
|
||||
mock_open,
|
||||
Mock,
|
||||
@ -33,6 +36,39 @@ MOCK_SHELL_FILE = '# List of acceptable shells\n' \
|
||||
'/bin/bash\n'
|
||||
|
||||
|
||||
class MockTimedProc(object):
|
||||
'''
|
||||
Class used as a stand-in for salt.utils.timed_subprocess.TimedProc
|
||||
'''
|
||||
class _Process(object):
|
||||
'''
|
||||
Used to provide a dummy "process" attribute
|
||||
'''
|
||||
def __init__(self, returncode=0, pid=12345):
|
||||
self.returncode = returncode
|
||||
self.pid = pid
|
||||
|
||||
def __init__(self, stdout=None, stderr=None, returncode=0, pid=12345):
|
||||
if stdout is not None and not isinstance(stdout, bytes):
|
||||
raise TypeError('Must pass stdout to MockTimedProc as bytes')
|
||||
if stderr is not None and not isinstance(stderr, bytes):
|
||||
raise TypeError('Must pass stderr to MockTimedProc as bytes')
|
||||
self._stdout = stdout
|
||||
self._stderr = stderr
|
||||
self.process = self._Process(returncode=returncode, pid=pid)
|
||||
|
||||
def run(self):
|
||||
pass
|
||||
|
||||
@property
|
||||
def stdout(self):
|
||||
return self._stdout
|
||||
|
||||
@property
|
||||
def stderr(self):
|
||||
return self._stderr
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class CMDMODTestCase(TestCase, LoaderModuleMockMixin):
|
||||
'''
|
||||
@ -303,3 +339,85 @@ class CMDMODTestCase(TestCase, LoaderModuleMockMixin):
|
||||
pass
|
||||
else:
|
||||
raise RuntimeError
|
||||
|
||||
def test_run_all_binary_replace(self):
|
||||
'''
|
||||
Test for failed decoding of binary data, for instance when doing
|
||||
something silly like using dd to read from /dev/urandom and write to
|
||||
/dev/stdout.
|
||||
'''
|
||||
# Since we're using unicode_literals, read the random bytes from a file
|
||||
rand_bytes_file = os.path.join(FILES, 'file', 'base', 'random_bytes')
|
||||
with salt.utils.files.fopen(rand_bytes_file, 'rb') as fp_:
|
||||
stdout_bytes = fp_.read()
|
||||
|
||||
# stdout with the non-decodable bits replaced with the unicode
|
||||
# replacement character U+FFFD.
|
||||
stdout_unicode = '\ufffd\x1b\ufffd\ufffd\n'
|
||||
stderr_bytes = b'1+0 records in\n1+0 records out\n' \
|
||||
b'4 bytes copied, 9.1522e-05 s, 43.7 kB/s\n'
|
||||
stderr_unicode = stderr_bytes.decode()
|
||||
|
||||
proc = MagicMock(
|
||||
return_value=MockTimedProc(
|
||||
stdout=stdout_bytes,
|
||||
stderr=stderr_bytes
|
||||
)
|
||||
)
|
||||
with patch('salt.utils.timed_subprocess.TimedProc', proc):
|
||||
ret = cmdmod.run_all(
|
||||
'dd if=/dev/urandom of=/dev/stdout bs=4 count=1',
|
||||
rstrip=False)
|
||||
|
||||
self.assertEqual(ret['stdout'], stdout_unicode)
|
||||
self.assertEqual(ret['stderr'], stderr_unicode)
|
||||
|
||||
def test_run_all_none(self):
|
||||
'''
|
||||
Tests cases when proc.stdout or proc.stderr are None. These should be
|
||||
caught and replaced with empty strings.
|
||||
'''
|
||||
proc = MagicMock(return_value=MockTimedProc(stdout=None, stderr=None))
|
||||
with patch('salt.utils.timed_subprocess.TimedProc', proc):
|
||||
ret = cmdmod.run_all('some command', rstrip=False)
|
||||
|
||||
self.assertEqual(ret['stdout'], '')
|
||||
self.assertEqual(ret['stderr'], '')
|
||||
|
||||
def test_run_all_unicode(self):
|
||||
'''
|
||||
Ensure that unicode stdout and stderr are decoded properly
|
||||
'''
|
||||
stdout_unicode = 'Here is some unicode: спам'
|
||||
stderr_unicode = 'Here is some unicode: яйца'
|
||||
stdout_bytes = stdout_unicode.encode('utf-8')
|
||||
stderr_bytes = stderr_unicode.encode('utf-8')
|
||||
|
||||
proc = MagicMock(
|
||||
return_value=MockTimedProc(
|
||||
stdout=stdout_bytes,
|
||||
stderr=stderr_bytes
|
||||
)
|
||||
)
|
||||
|
||||
with patch('salt.utils.timed_subprocess.TimedProc', proc), \
|
||||
patch.object(builtins, '__salt_system_encoding__', 'utf-8'):
|
||||
ret = cmdmod.run_all('some command', rstrip=False)
|
||||
|
||||
self.assertEqual(ret['stdout'], stdout_unicode)
|
||||
self.assertEqual(ret['stderr'], stderr_unicode)
|
||||
|
||||
def test_run_all_output_encoding(self):
|
||||
'''
|
||||
Test that specifying the output encoding works as expected
|
||||
'''
|
||||
stdout = 'Æ'
|
||||
stdout_latin1_enc = stdout.encode('latin1')
|
||||
|
||||
proc = MagicMock(return_value=MockTimedProc(stdout=stdout_latin1_enc))
|
||||
|
||||
with patch('salt.utils.timed_subprocess.TimedProc', proc), \
|
||||
patch.object(builtins, '__salt_system_encoding__', 'utf-8'):
|
||||
ret = cmdmod.run_all('some command', output_encoding='latin1')
|
||||
|
||||
self.assertEqual(ret['stdout'], stdout)
|
||||
|
@ -18,12 +18,7 @@ from tests.support.mock import (
|
||||
NO_MOCK_REASON
|
||||
)
|
||||
|
||||
try:
|
||||
from salt.modules import kubernetes
|
||||
except ImportError:
|
||||
kubernetes = False
|
||||
if not kubernetes.HAS_LIBS:
|
||||
kubernetes = False
|
||||
from salt.modules import kubernetes
|
||||
|
||||
|
||||
@contextmanager
|
||||
@ -41,8 +36,8 @@ def mock_kubernetes_library():
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
@skipIf(kubernetes is False, "Probably Kubernetes client lib is not installed. \
|
||||
Skipping test_kubernetes.py")
|
||||
@skipIf(not kubernetes.HAS_LIBS, "Kubernetes client lib is not installed. "
|
||||
"Skipping test_kubernetes.py")
|
||||
class KubernetesTestCase(TestCase, LoaderModuleMockMixin):
|
||||
'''
|
||||
Test cases for salt.modules.kubernetes
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user