Merge pull request #41466 from rallytime/merge-develop

[develop] Merge forward from nitrogen to develop
This commit is contained in:
Nicole Thomas 2017-05-26 12:45:00 -06:00 committed by GitHub
commit b58b36864c
99 changed files with 1195 additions and 479 deletions

View File

@ -74,7 +74,7 @@ confidence=
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once).You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# disable everything first and then re-enable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes

View File

@ -71,7 +71,7 @@ confidence=
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once).You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# disable everything first and then re-enable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes

View File

@ -9,11 +9,6 @@ BUILDDIR = _build
SPHINXLANG =
XELATEX = xelatex
# User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
endif
# ----- Translations Support ------------------------------------------------>
# If language is set, also set translation options
ifeq ($(shell [ "x$(SPHINXLANG)" != "x" ] && echo 0 || echo 1), 0)
@ -36,7 +31,7 @@ ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(TRANSLATIONOPTS
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext translations download-translations
.PHONY: help clean check_sphinx-build html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext translations download-translations
help:
@echo "Please use \`make <target>' where <target> is one of"
@ -69,38 +64,42 @@ clean:
rm -rf $(BUILDDIR)/*
test -d 'locale' && find locale/ -name *.mo -exec rm {} \; || true
html: translations
# User-friendly check for sphinx-build
check_sphinx-build:
@which $(SPHINXBUILD) >/dev/null 2>&1 || (echo "The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)" >&2; false)
html: check_sphinx-build translations
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
dirhtml: translations
dirhtml: check_sphinx-build translations
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
singlehtml: translations
singlehtml: check_sphinx-build translations
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
pickle: translations
pickle: check_sphinx-build translations
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json: translations
json: check_sphinx-build translations
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp: translations
htmlhelp: check_sphinx-build translations
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp: translations
qthelp: check_sphinx-build translations
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
@ -109,7 +108,7 @@ qthelp: translations
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Salt.qhc"
devhelp: translations
devhelp: check_sphinx-build translations
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@ -118,31 +117,31 @@ devhelp: translations
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Salt"
@echo "# devhelp"
epub: translations
epub: check_sphinx-build translations
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex: translations
latex: check_sphinx-build translations
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
latexpdf: translations
latexpdf: check_sphinx-build translations
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
latexpdfja: translations
latexpdfja: check_sphinx-build translations
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
pdf: translations
pdf: check_sphinx-build translations
@if [ "$(XELATEX)" = "xelatex" ] || [ "x$(XELATEX)" = "x" ]; then \
echo "The '$(XELATEX)' command was not found."; \
fi
@ -157,62 +156,62 @@ cheatsheet: translations
cd cheatsheet && xelatex salt.tex && cp salt.pdf ../salt-cheatsheet.pdf
@echo "./salt-cheatsheet.pdf created."
text: translations
text: check_sphinx-build translations
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
man: translations
man: check_sphinx-build translations
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
texinfo: translations
texinfo: check_sphinx-build translations
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
info: translations
info: check_sphinx-build translations
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
gettext:
gettext: check_sphinx-build
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale"
changes: translations
changes: check_sphinx-build translations
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
spelling:
spelling: check_sphinx-build
$(SPHINXBUILD) -b spelling $(ALLSPHINXOPTS) $(BUILDDIR)/spelling
@echo
@echo "Spell check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/spelling/output.txt."
linkcheck:
linkcheck: check_sphinx-build
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
doctest:
doctest: check_sphinx-build
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
xml: translations
xml: check_sphinx-build translations
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
pseudoxml: translations
pseudoxml: check_sphinx-build translations
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."

View File

@ -26,7 +26,7 @@ class Mock(object):
'''
def __init__(self, mapping=None, *args, **kwargs):
"""
Mapping allows to bypass the Mock object, but actually assign
Mapping allows autodoc to bypass the Mock object, but actually assign
a specific value, expected by a specific attribute returned.
"""
self.__mapping = mapping or {}

View File

@ -28,3 +28,4 @@ Salt Table of Contents
topics/windows/index
topics/development/index
topics/releases/index
topics/venafi/index

View File

@ -260,9 +260,9 @@ service. But restarting the service while in the middle of a state run
interrupts the process of the Minion running states and sending results back to
the Master. A common way to workaround that is to schedule restarting of the
Minion service using :ref:`masterless mode <masterless-quickstart>` after all
other states have been applied. This allows to keep Minion to Master connection
alive for the Minion to report the final results to the Master, while the
service is restarting in the background.
other states have been applied. This allows the minion to keep Minion to Master
connection alive for the Minion to report the final results to the Master, while
the service is restarting in the background.
Upgrade without automatic restart
*********************************

View File

@ -760,7 +760,7 @@ Extended Changelog Courtesy of Todd Stansell (https://github.com/tjstansell/salt
- **PR** `#22925`_: (*rallytime*) Backport `#22895`_ to 2014.7
| refs: `#23113`_
- **PR** `#22895`_: (*aletourneau*) pam_tally counter was not reset to 0 after a succesfull login
- **PR** `#22895`_: (*aletourneau*) pam_tally counter was not reset to 0 after a successful login
| refs: `#22925`_
* dfe2066 Merge pull request `#23113`_ from saltstack/revert-22925-`bp-22895`_
* b957ea8 Revert "Backport `#22895`_ to 2014.7"
@ -921,7 +921,7 @@ Extended Changelog Courtesy of Todd Stansell (https://github.com/tjstansell/salt
| refs: `#23113`_
@ *2015-04-22T02:30:26Z*
- **PR** `#22895`_: (*aletourneau*) pam_tally counter was not reset to 0 after a succesfull login
- **PR** `#22895`_: (*aletourneau*) pam_tally counter was not reset to 0 after a successful login
| refs: `#22925`_
* 6890752 Merge pull request `#22925`_ from rallytime/`bp-22895`_
* 3852d96 Pylint fix
@ -930,7 +930,7 @@ Extended Changelog Courtesy of Todd Stansell (https://github.com/tjstansell/salt
* 5ebf159 Cleaned up pull request
* a08ac47 pam_tally counter was not reset to 0 after a succesfull login
* a08ac47 pam_tally counter was not reset to 0 after a successful login
- **PR** `#22914`_: (*cachedout*) Call proper returner function in jobs.list_jobs
@ *2015-04-22T00:49:01Z*

View File

@ -403,7 +403,7 @@ Changes:
- **PR** `#29708`_: (*lagesag*) Fix test=True for file.directory with recurse ignore_files/ignore_dirs.
- **PR** `#29642`_: (*cachedout*) Correctly restart deamonized minions on failure
- **PR** `#29642`_: (*cachedout*) Correctly restart daemonized minions on failure
- **PR** `#29599`_: (*cachedout*) Clean up minion shutdown

View File

@ -264,7 +264,7 @@ Runner Changes
``salt://_utils/``) are now able to be synced to the master, making it easier
to use them in custom runners. A :py:mod:`saltutil.sync_utils
<salt.runners.saltutil.sync_utils>` function has been added to the
:py:mod:`saltutil runner <salt.runners.saltutil>` to faciliate the syncing of
:py:mod:`saltutil runner <salt.runners.saltutil>` to facilitate the syncing of
utility modules to the master.
Pillar Changes
@ -291,7 +291,7 @@ Junos Module Changes
Network Automation: NAPALM
==========================
Beginning with 2016.11.0, network automation is inclued by default in the core
Beginning with 2016.11.0, network automation is included by default in the core
of Salt. It is based on a the `NAPALM <https://github.com/napalm-automation/napalm>`_
library and provides facilities to manage the configuration and retrieve data
from network devices running widely used operating systems such: JunOS, IOS-XR,
@ -720,7 +720,7 @@ Runner Module Deprecations
- The ``fileserver`` runner no longer accepts the ``outputter`` keyword argument. Users will
need to specify an outputter using the ``--out`` option.
- The ``jobs`` runner no longer accepts the ``ouputter`` keyword argument. Users will need to
- The ``jobs`` runner no longer accepts the ``outputter`` keyword argument. Users will need to
specify an outputter using the ``--out`` option.
- ``virt`` runner module:

View File

@ -873,7 +873,7 @@ Changes:
- **PR** `#37827`_: (*silenius*) add missing chloginclass
- **PR** `#37826`_: (*rallytime*) Update branch refs to more relevant branch
- **PR** `#37823`_: (*rallytime*) Add "names" option to file state docs: point users to highstate doc examples
- **PR** `#37822`_: (*laleocen*) add documenation for multiline encryption using nacl
- **PR** `#37822`_: (*laleocen*) add documentation for multiline encryption using nacl
| refs: `#37826`_
- **PR** `#37821`_: (*rallytime*) Clarify keystone.user_present password state docs with default behavior
- **PR** `#37820`_: (*rallytime*) Add some dependency documentation to libvirt docs

View File

@ -189,9 +189,9 @@ Changes:
* fd2ee7d Add some simple unit tests for salt.config.api_config function
* 3d2fefc Make sure the pidfile and log_file values are overriden by api opts
* 3d2fefc Make sure the pidfile and log_file values are overridden by api opts
* 1f6b540 Make sure the pidfile and log_file values are overriden by api opts
* 1f6b540 Make sure the pidfile and log_file values are overridden by api opts
* 04d307f salt-api no longer forces the default timeout
@ -844,7 +844,7 @@ Changes:
* 68d5475 Fixing Snapper unit tests for SUBVOLUME support
* e9919a9 Removing posible double '/' from the file paths
* e9919a9 Removing possible double '/' from the file paths
* 8b4f87f Updating and fixing the documentation

View File

@ -367,7 +367,7 @@ Changes:
* 5244041 Merge pull request `#39221`_ from lvg01/fix-bug-39220
* e8a41d6 Removes to early content stripping (stripping is allready done when needed with ident:true), fixes `#39220`_
* e8a41d6 Removes to early content stripping (stripping is already done when needed with ident:true), fixes `#39220`_
* a4b169e Fixed wrong logic, fixes `#39220`_
@ -482,7 +482,7 @@ Changes:
- **PR** `#39276`_: (*gtmanfred*) _device_mismatch_ignored will never be True
@ *2017-02-09T17:05:28Z*
- **ISSUE** `#39269`_: (*alexharrington*) Remount forced with lizardfs fuse filesystem due to device missmatch
- **ISSUE** `#39269`_: (*alexharrington*) Remount forced with lizardfs fuse filesystem due to device mismatch
| refs: `#39276`_
- **ISSUE** `#39106`_: (*carsten-AEI*) CVMFS fuse mount gets remounted every time
| refs: `#39276`_
@ -688,7 +688,7 @@ Changes:
- **ISSUE** `#1`_: (*thatch45*) Enable regex on the salt cli
- **PR** `#39146`_: (*gtmanfred*) update vmware getting started doc
- **PR** `#39145`_: (*garethgreenaway*) [2016.3] Fix when targeting via pillar with Salt syndic
- **PR** `#39131`_: (*bobrik*) Clarify ipv6 option for minion and inteface for master, closes `#39118`_
- **PR** `#39131`_: (*bobrik*) Clarify ipv6 option for minion and interface for master, closes `#39118`_
- **PR** `#39116`_: (*terminalmage*) Don't abort pillar.get with merge=True if default is None
- **PR** `#39077`_: (*terminalmage*) Apply fix from `#38705`_ to 2016.3 branch
- **PR** `#38804`_: (*alexbleotu*) Second attempt to fix prepending of root_dir to paths
@ -717,7 +717,7 @@ Changes:
* 97521b3 Second attempt to fix prepending of root_dir to paths
* 6ffeda3 Clarify ipv6 option for minion and inteface for master, closes `#39118`_ (`#39131`_)
* 6ffeda3 Clarify ipv6 option for minion and interface for master, closes `#39118`_ (`#39131`_)
* 646b9ea Don't abort pillar.get with merge=True if default is None (`#39116`_)
@ -978,7 +978,7 @@ Changes:
- **PR** `#39039`_: (*rallytime*) Update 2016.11.2 release notes
* a7fc02e Ungate the status.py module and raise unsupported errors in functions not executeable on Windows. (`#39005`_)
* a7fc02e Ungate the status.py module and raise unsupported errors in functions not executable on Windows. (`#39005`_)
- **PR** `#39005`_: (*cro*) Ungate the status.py module and raise unsupported errors in functions not executable on Windows.
| refs: `#39536`_
@ -1214,7 +1214,7 @@ Changes:
| refs: `#38875`_
- **PR** `#38890`_: (*cro*) Backport `#38887`_ to 2016.3: Enable resetting a VM via salt-cloud & VMware driver
- **PR** `#38883`_: (*techhat*) Don't require text_out path to exist
- **PR** `#38875`_: (*terminalmage*) Reactor: fix traceback when salt:// path is nonexistant
- **PR** `#38875`_: (*terminalmage*) Reactor: fix traceback when salt:// path is nonexistent
- **PR** `#38867`_: (*mchugh19*) Touch deploy.sh before use
| refs: `#38883`_
- **PR** `#38851`_: (*terminalmage*) Support docker-py 2.0 in dockerng
@ -1237,7 +1237,7 @@ Changes:
* fbc4d2a reactor: ensure glob_ref is a string
* 2e443d7 cp.cache_file: add note re: return for nonexistant salt:// path
* 2e443d7 cp.cache_file: add note re: return for nonexistent salt:// path
* e9ebec4 Merge pull request `#38890`_ from cro/vmware_reset_vm_20163

View File

@ -832,7 +832,7 @@ Changes:
* 2febd05 Merge pull request `#40372`_ from zer0def/pip-cache-fixes
* d68067f Merge remote-tracking branch 'main/2016.11' into pip-cache-fixes
* 4f23a23 Fixed the `test_install_download_cache_argument_in_resulting_command` to accomodate introduced cache directory argument fixes and renamed it to `test_install_download_cache_dir_arguments_in_resulting_command`.
* 4f23a23 Fixed the `test_install_download_cache_argument_in_resulting_command` to accommodate introduced cache directory argument fixes and renamed it to `test_install_download_cache_dir_arguments_in_resulting_command`.
* 9d0f94e Fixed unnecessary API changes introduced with suggested changes.
@ -1345,7 +1345,7 @@ Changes:
@ *2017-03-17T15:17:08Z*
- **PR** `#40090`_: (*rallytime*) Back-port `#40056`_ to 2016.3
- **PR** `#40059`_: (*terminalmage*) Fix traceback when virtualenv.managed is invoked with nonexistant user
- **PR** `#40059`_: (*terminalmage*) Fix traceback when virtualenv.managed is invoked with nonexistent user
- **PR** `#40057`_: (*cachedout*) More mentionbot blacklists
- **PR** `#40056`_: (*thatch45*) update mention bot blacklist
| refs: `#40090`_
@ -1354,7 +1354,7 @@ Changes:
* 116201f Merge pull request `#40059`_ from terminalmage/fix-virtualenv-traceback
* e3cfd29 Fix traceback when virtualenv.managed is invoked with nonexistant user
* e3cfd29 Fix traceback when virtualenv.managed is invoked with nonexistent user
* a01b52b Merge pull request `#40090`_ from rallytime/`bp-40056`_
@ -1386,7 +1386,7 @@ Changes:
- **PR** `#40053`_: (*gtmanfred*) Update rh_ip.py
- **PR** `#40041`_: (*terminalmage*) Fix transposed lines in salt.utils.process
- **PR** `#40038`_: (*velom*) correctly parse "pkg_name===version" from pip freeze
- **PR** `#40018`_: (*meaksh*) Allows overriding 'timeout' and 'gather_job_timeout' to 'manage.up' runner call
- **PR** `#40018`_: (*meaksh*) Allow overriding 'timeout' and 'gather_job_timeout' to 'manage.up' runner call
| refs: `#40072`_
* b12720a Merge pull request `#40088`_ from rallytime/merge-2016.11
* 626bd03 Merge branch '2016.3' into '2016.11'
@ -1397,9 +1397,9 @@ Changes:
* 8dcffc7 Merge pull request `#40018`_ from meaksh/2016.3-handling-timeouts-for-manage.up-runner
* 9f5c3b7 Allows to set custom timeouts for 'manage.up' and 'manage.status'
* 9f5c3b7 Allow setting custom timeouts for 'manage.up' and 'manage.status'
* 2102d9c Allows to set 'timeout' and 'gather_job_timeout' via kwargs
* 2102d9c Allow setting 'timeout' and 'gather_job_timeout' via kwargs
* 22fc529 Merge pull request `#40038`_ from velom/fix-pip-freeze-parsing
@ -1419,15 +1419,15 @@ Changes:
* 703ab23 Merge pull request `#40055`_ from rallytime/doc-build-warnings
* 72d16c9 Update "yaml" code-block references with "jinja" where needed
- **PR** `#40072`_: (*meaksh*) [2016.11] Allows overriding 'timeout' and 'gather_job_timeout' to 'manage.up' runner call
- **PR** `#40072`_: (*meaksh*) [2016.11] Allow overriding 'timeout' and 'gather_job_timeout' to 'manage.up' runner call
@ *2017-03-16T15:31:46Z*
- **PR** `#40018`_: (*meaksh*) Allows overriding 'timeout' and 'gather_job_timeout' to 'manage.up' runner call
- **PR** `#40018`_: (*meaksh*) Allow overriding 'timeout' and 'gather_job_timeout' to 'manage.up' runner call
| refs: `#40072`_
* e73a1d0 Merge pull request `#40072`_ from meaksh/2016.11-handling-timeouts-for-manage.up-runner
* 40246d3 Allows to set custom timeouts for 'manage.up' and 'manage.status'
* 40246d3 Allow setting custom timeouts for 'manage.up' and 'manage.status'
* ad232fd Allows to set 'timeout' and 'gather_job_timeout' via kwargs
* ad232fd Allow setting 'timeout' and 'gather_job_timeout' via kwargs
- **PR** `#40045`_: (*terminalmage*) Fix error when chhome is invoked by user.present state in Windows
@ *2017-03-15T19:00:41Z*
@ -1458,7 +1458,7 @@ Changes:
- **PR** `#40016`_: (*terminalmage*) Attempt to fix failing grains tests in 2016.3
- **PR** `#39994`_: (*rallytime*) Add a versionadded tag for dockerng ulimits addition
- **PR** `#39988`_: (*terminalmage*) Add comment explaining change from `#39973`_
- **PR** `#39980`_: (*vutny*) [2016.3] Allow to use `bg` kwarg for `cmd.run` state function
- **PR** `#39980`_: (*vutny*) [2016.3] Allow using `bg` kwarg for `cmd.run` state function
- **PR** `#39973`_: (*terminalmage*) Don't use docker.Client instance from context if missing attributes
* 277bd17 Merge pull request `#40025`_ from rallytime/merge-2016.11
* 029f28b Merge branch '2016.3' into '2016.11'
@ -1475,7 +1475,7 @@ Changes:
* 0c61d06 Merge pull request `#39980`_ from vutny/cmd-run-state-bg
* a81dc9d [2016.3] Allow to use `bg` kwarg for `cmd.run` state function
* a81dc9d [2016.3] Allow using `bg` kwarg for `cmd.run` state function
* b042484 Merge pull request `#39994`_ from rallytime/ulimits-dockerng-version
@ -1834,7 +1834,7 @@ Changes:
* 9f70ad7 Merge pull request `#39472`_ from whiteinge/_reformat_low-update
* d11f538 Add RunnerClient test for old/new-style arg/kwarg parsing
* ec377ab Reenable skipped RunnerClient tests
* ec377ab Re-enable skipped RunnerClient tests
* 27f7fd9 Update _reformat_low to run arg through parse_input
@ -2022,13 +2022,13 @@ Changes:
* e63cbba Merge pull request `#39653`_ from cachedout/26_odict
* 91eb721 Use salt's ordereddict for comparison
- **PR** `#39609`_: (*gtmanfred*) intialize the Client stuff in FSClient
- **PR** `#39609`_: (*gtmanfred*) initialize the Client stuff in FSClient
@ *2017-02-24T18:50:55Z*
- **ISSUE** `#38836`_: (*toanctruong*) file.managed with S3 Source errors out with obscure message
| refs: `#39589`_ `#39609`_
* 0bc6027 Merge pull request `#39609`_ from gtmanfred/2016.11
* 0820620 intialize the Client stuff in FSClient
* 0820620 initialize the Client stuff in FSClient
- **PR** `#39615`_: (*skizunov*) Bonjour/Avahi beacons: Make sure TXT record length is valid
@ *2017-02-24T18:47:05Z*

View File

@ -30,7 +30,7 @@ Backwards-incompatible Changes
It has been moved one directory down, into the master cachedir. On most
platforms, this is ``/var/cache/salt/master/extmods``. Most users won't have
to worry about this, but those who have been manually placing custom runners
into ``/var/cache/salt/extmods/runners``, or ouputters into
into ``/var/cache/salt/extmods/runners``, or outputters into
``/var/cache/salt/extmods/output``, etc. will be affected by this. To
transition, it is recommended not to simply move the extmods directory into
``/var/cache/salt/master``, but to copy the custom modules into the salt

View File

@ -149,9 +149,9 @@ Changes:
* fd2ee7d Add some simple unit tests for salt.config.api_config function
* 3d2fefc Make sure the pidfile and log_file values are overriden by api opts
* 3d2fefc Make sure the pidfile and log_file values are overridden by api opts
* 1f6b540 Make sure the pidfile and log_file values are overriden by api opts
* 1f6b540 Make sure the pidfile and log_file values are overridden by api opts
* 04d307f salt-api no longer forces the default timeout
@ -1046,7 +1046,7 @@ Changes:
* 0e74bad Update branch refs to more relevant branch (`#37826`_)
- **PR** `#37826`_: (*rallytime*) Update branch refs to more relevant branch
- **PR** `#37822`_: (*laleocen*) add documenation for multiline encryption using nacl
- **PR** `#37822`_: (*laleocen*) add documentation for multiline encryption using nacl
| refs: `#37826`_
* 6a9b49c Add "names" option to file state docs: point users to highstate doc examples (`#37823`_)

View File

@ -127,11 +127,11 @@ Changes:
* 35ddb79 Merge pull request `#40141`_ from bobrik/fallback-resolve
* af1545d Use the first address if cannot connect to any
- **PR** `#40059`_: (*terminalmage*) Fix traceback when virtualenv.managed is invoked with nonexistant user
- **PR** `#40059`_: (*terminalmage*) Fix traceback when virtualenv.managed is invoked with nonexistent user
@ *2017-03-16T20:46:43Z*
* 116201f Merge pull request `#40059`_ from terminalmage/fix-virtualenv-traceback
* e3cfd29 Fix traceback when virtualenv.managed is invoked with nonexistant user
* e3cfd29 Fix traceback when virtualenv.managed is invoked with nonexistent user
- **PR** `#40090`_: (*rallytime*) Back-port `#40056`_ to 2016.3
@ *2017-03-16T19:42:58Z*
@ -153,13 +153,13 @@ Changes:
* d36bdb1 Merge pull request `#40070`_ from Ch3LL/2016.3.6_release
* a1f8b49 update 2016.3.6 release notes with additional PR's
- **PR** `#40018`_: (*meaksh*) Allows overriding 'timeout' and 'gather_job_timeout' to 'manage.up' runner call
- **PR** `#40018`_: (*meaksh*) Allow overriding 'timeout' and 'gather_job_timeout' to 'manage.up' runner call
@ *2017-03-15T19:43:01Z*
* 8dcffc7 Merge pull request `#40018`_ from meaksh/2016.3-handling-timeouts-for-manage.up-runner
* 9f5c3b7 Allows to set custom timeouts for 'manage.up' and 'manage.status'
* 9f5c3b7 Allow setting custom timeouts for 'manage.up' and 'manage.status'
* 2102d9c Allows to set 'timeout' and 'gather_job_timeout' via kwargs
* 2102d9c Allow setting 'timeout' and 'gather_job_timeout' via kwargs
- **PR** `#40038`_: (*velom*) correctly parse "pkg_name===version" from pip freeze
@ *2017-03-15T19:30:03Z*
@ -197,11 +197,11 @@ Changes:
* 5d84b40 Attempt to fix failing grains tests in 2016.3
- **PR** `#39980`_: (*vutny*) [2016.3] Allow to use `bg` kwarg for `cmd.run` state function
- **PR** `#39980`_: (*vutny*) [2016.3] Allow using `bg` kwarg for `cmd.run` state function
@ *2017-03-14T17:16:14Z*
* 0c61d06 Merge pull request `#39980`_ from vutny/cmd-run-state-bg
* a81dc9d [2016.3] Allow to use `bg` kwarg for `cmd.run` state function
* a81dc9d [2016.3] Allow using `bg` kwarg for `cmd.run` state function
- **PR** `#39994`_: (*rallytime*) Add a versionadded tag for dockerng ulimits addition
@ *2017-03-13T20:58:02Z*
@ -658,7 +658,7 @@ Changes:
- **ISSUE** `#39220`_: (*lvg01*) state file.line skips leading spaces in content with mode:ensure and indent:False
| refs: `#39221`_ `#39221`_ `#39221`_ `#39221`_
* 5244041 Merge pull request `#39221`_ from lvg01/fix-bug-39220
* e8a41d6 Removes to early content stripping (stripping is allready done when needed with ident:true), fixes `#39220`_
* e8a41d6 Removes to early content stripping (stripping is already done when needed with ident:true), fixes `#39220`_
* a4b169e Fixed wrong logic, fixes `#39220`_
@ -807,11 +807,11 @@ Changes:
* 97521b3 Second attempt to fix prepending of root_dir to paths
* 6ffeda3 Clarify ipv6 option for minion and inteface for master, closes `#39118`_ (`#39131`_)
* 6ffeda3 Clarify ipv6 option for minion and interface for master, closes `#39118`_ (`#39131`_)
- **ISSUE** `#39118`_: (*bobrik*) Minion ipv6 option is not documented
| refs: `#39289`_
- **PR** `#39131`_: (*bobrik*) Clarify ipv6 option for minion and inteface for master, closes `#39118`_
- **PR** `#39131`_: (*bobrik*) Clarify ipv6 option for minion and interface for master, closes `#39118`_
* 646b9ea Don't abort pillar.get with merge=True if default is None (`#39116`_)
@ -1013,7 +1013,7 @@ Changes:
* e40fac5 Catch MinionError in file.source_list
- **PR** `#38875`_: (*terminalmage*) Reactor: fix traceback when salt:// path is nonexistant
- **PR** `#38875`_: (*terminalmage*) Reactor: fix traceback when salt:// path is nonexistent
@ *2017-01-24T15:23:39Z*
- **ISSUE** `#36121`_: (*Ashald*) TemplateNotFound/Unable to cache file
@ -1021,7 +1021,7 @@ Changes:
* b5df104 Merge pull request `#38875`_ from terminalmage/issue36121
* fbc4d2a reactor: ensure glob_ref is a string
* 2e443d7 cp.cache_file: add note re: return for nonexistant salt:// path
* 2e443d7 cp.cache_file: add note re: return for nonexistent salt:// path
- **PR** `#38890`_: (*cro*) Backport `#38887`_ to 2016.3: Enable resetting a VM via salt-cloud & VMware driver
@ *2017-01-24T15:15:35Z*

View File

@ -98,7 +98,7 @@ a new value using a command like:
Deleting values (if supported by the driver) is done pretty much the same way as
getting them. Provided that you have a profile called ``mykvstore`` that uses
a driver allowing to delete values you would delete a value as shown bellow:
a driver allowing to delete values you would delete a value as shown below:
.. code-block:: bash

View File

@ -66,7 +66,7 @@ Specific options can be sent to the minion also, as defined in the Python
.. note::
While setting the ssl_version is not required, we recomend it. Some older
While setting the ssl_version is not required, we recommend it. Some older
versions of python do not support the latest TLS protocol and if this is
the case for your version of python we strongly recommend upgrading your
version of Python.

View File

@ -129,7 +129,7 @@ Request a new certificate. Analogous to:
.. code-block:: bash
salt-run venafi.gen_csr minion.example.com minion.example.com country=US \
salt-run venafi.request minion.example.com minion.example.com country=US \
state=California loc=Sacramento org=CompanyName org_unit=DevOps \
zone=Internet password=SecretSauce

View File

@ -1,5 +1,6 @@
[Unit]
Description=The Salt API
Documentation=man:salt-api(1) file:///usr/share/doc/salt/html/contents.html https://docs.saltstack.com/en/latest/contents.html
After=network.target
[Service]

View File

@ -1,5 +1,6 @@
[Unit]
Description=The Salt Master Server
Documentation=man:salt-master(1) file:///usr/share/doc/salt/html/contents.html https://docs.saltstack.com/en/latest/contents.html
After=network.target
[Service]

View File

@ -1,5 +1,6 @@
[Unit]
Description=The Salt Minion
Documentation=man:salt-minion(1) file:///usr/share/doc/salt/html/contents.html https://docs.saltstack.com/en/latest/contents.html
After=network.target salt-master.service
[Service]

View File

@ -1,5 +1,6 @@
[Unit]
Description=salt-proxy service for %I
Documentation=man:salt-proxy(1) file:///usr/share/doc/salt/html/contents.html https://docs.saltstack.com/en/latest/contents.html
After=network.target
[Service]

View File

@ -1,5 +1,6 @@
[Unit]
Description=The Salt Master Server
Documentation=man:salt-syndic(1) file:///usr/share/doc/salt/html/contents.html https://docs.saltstack.com/en/latest/contents.html
After=network.target
[Service]

View File

@ -130,7 +130,7 @@ Wed Oct 7 19:15:54 UTC 2015 - mrueckert@suse.de
2. the only part of the package which would really benefit from
it would be the doc package. but given we only install the
files via %doc, we can not use it for that either.
- reenable completions on distros newer than sle11
- re-enable completions on distros newer than sle11
- do not use _datarootdir, use _datadir instead.
-------------------------------------------------------------------

View File

@ -1,11 +0,0 @@
@ echo off
:: Script for invoking Salt Main
:: Accepts all parameters that Salt Main accepts
:: Define Variables
Set SaltDir=%~dp0
Set SaltDir=%SaltDir:~0,-1%
Set Python=%SaltDir%\bin\python.exe
Set Script=%SaltDir%\bin\Scripts\salt
"%Python%" "%Script%" %*

View File

@ -50,8 +50,17 @@ goto eof
:RemovePython3
echo %0 :: Uninstalling Python 3 ...
echo ---------------------------------------------------------------------
echo %0 :: - 3.5.3
"%LOCALAPPDATA%\Package Cache\{b94f45d6-8461-440c-aa4d-bf197b2c2499}\python-3.5.3-amd64.exe" /uninstall
:: 64 bit
if exist "%LOCALAPPDATA%\Package Cache\{b94f45d6-8461-440c-aa4d-bf197b2c2499}" (
echo %0 :: - 3.5.3 64bit
"%LOCALAPPDATA%\Package Cache\{b94f45d6-8461-440c-aa4d-bf197b2c2499}\python-3.5.3-amd64.exe" /uninstall
)
:: 32 bit
if exist "%LOCALAPPDATA%\Package Cache\{a10037e1-4247-47c9-935b-c5ca049d0299}" (
echo %0 :: - 3.5.3 32bit
"%LOCALAPPDATA%\Package Cache\{a10037e1-4247-47c9-935b-c5ca049d0299}\python-3.5.3" /uninstall
)
rem wipe the Python directory
echo %0 :: Removing the C:\Program Files\Python35 Directory ...

View File

@ -82,7 +82,6 @@ Function Get-Settings {
"Libeay" = "libeay32.dll"
"SSLeay" = "ssleay32.dll"
"OpenSSLLic" = "OpenSSL_License.txt"
"libsodium" = "libsodium.dll"
"msvcr" = "msvcr120.dll"
}
$ini.Add("64bitDLLs", $64bitDLLs)
@ -92,7 +91,6 @@ Function Get-Settings {
"Libeay" = "libeay32.dll"
"SSLeay" = "ssleay32.dll"
"OpenSSLLic" = "OpenSSL_License.txt"
"libsodium" = "libsodium.dll"
"msvcr" = "msvcr120.dll"
}
$ini.Add("32bitDLLs", $32bitDLLs)

View File

@ -9,11 +9,9 @@ futures==3.1.1
gitdb==0.6.4
GitPython==2.1.3
idna==2.5
ioflo==1.6.7
ioloop==0.1a0
ipaddress==1.0.18
Jinja2==2.9.6
libnacl==1.5.0
Mako==1.0.6
MarkupSafe==1.0
msgpack-python==0.4.8

View File

@ -91,7 +91,7 @@ def beacon(config):
'''
Broadcast values via zeroconf
If the announced values are static, it is adviced to set run_once: True
If the announced values are static, it is advised to set run_once: True
(do not poll) on the beacon configuration.
The following are required configuration settings:

View File

@ -84,7 +84,7 @@ def beacon(config):
'''
Broadcast values via zeroconf
If the announced values are static, it is adviced to set run_once: True
If the announced values are static, it is advised to set run_once: True
(do not poll) on the beacon configuration.
The following are required configuration settings:

View File

@ -538,7 +538,7 @@ class AsyncClientMixin(object):
# if this is a ret, we have our own set of rules
if suffix == 'ret':
# Check if ouputter was passed in the return data. If this is the case,
# Check if outputter was passed in the return data. If this is the case,
# then the return data will be a dict two keys: 'data' and 'outputter'
if isinstance(event.get('return'), dict) \
and set(event['return']) == set(('data', 'outputter')):

View File

@ -4397,7 +4397,7 @@ def delete_keypair(kwargs=None, call=None):
return False
params = {'Action': 'DeleteKeyPair',
'KeyName.1': kwargs['keyname']}
'KeyName': kwargs['keyname']}
data = aws.query(params,
return_url=True,

View File

@ -2657,7 +2657,7 @@ def create(vm_):
log.info("Creating {0} from {1}({2})".format(vm_['name'], clone_type, vm_['clonefrom']))
if datastore and not datastore_ref and datastore_cluster_ref:
# datastore cluster has been specified so apply Storage DRS recomendations
# datastore cluster has been specified so apply Storage DRS recommendations
pod_spec = vim.storageDrs.PodSelectionSpec(storagePod=datastore_cluster_ref)
storage_spec = vim.storageDrs.StoragePlacementSpec(

File diff suppressed because it is too large Load Diff

View File

@ -2452,7 +2452,7 @@ def cloud_config(path, env_var='SALT_CLOUD_CONFIG', defaults=None,
elif master_config_path is not None and master_config is None:
master_config = salt.config.master_config(master_config_path)
# cloud config has a seperate cachedir
# cloud config has a separate cachedir
del master_config['cachedir']
# 2nd - salt-cloud configuration which was loaded before so we could

View File

@ -65,7 +65,7 @@ def worker_fork(self):
class Worker(multiprocessing.Process):
'''
Create an ioflo worker in a seperate process
Create an ioflo worker in a separate process
'''
def __init__(self, opts, windex, worker_verify, access_keys, mkey, aes):
super(Worker, self).__init__()

View File

@ -14,25 +14,25 @@ keys make the engine interactive.
.. code-block:: yaml
engines:
hipchat:
api_url: http://api.hipchat.myteam.com
token: 'XXXXXX'
room: 'salt'
control: True
valid_users:
- SomeUser
valid_commands:
- test.ping
- cmd.run
- list_jobs
- list_commands
aliases:
list_jobs:
cmd: jobs.list_jobs
list_commands:
cmd: pillar.get salt:engines:hipchat:valid_commands target=saltmaster tgt_type=list
max_rooms: 0
wait_time: 1
- hipchat:
api_url: http://api.hipchat.myteam.com
token: 'XXXXXX'
room: 'salt'
control: True
valid_users:
- SomeUser
valid_commands:
- test.ping
- cmd.run
- list_jobs
- list_commands
aliases:
list_jobs:
cmd: jobs.list_jobs
list_commands:
cmd: pillar.get salt:engines:hipchat:valid_commands target=saltmaster tgt_type=list
max_rooms: 0
wait_time: 1
'''
from __future__ import absolute_import
@ -53,6 +53,8 @@ import salt.utils.files
import salt.runner
import salt.client
import salt.loader
import salt.output
import salt.ext.six as six
def __virtual__():
@ -65,7 +67,7 @@ _DEFAULT_SLEEP = 5
_DEFAULT_MAX_ROOMS = 1000
def _publish_file(token, room, filepath, message='', api_url=None):
def _publish_file(token, room, filepath, message='', outputter=None, api_url=None):
""" Send file to a HipChat room via API version 2
Parameters
----------
@ -110,6 +112,68 @@ Content-Disposition: attachment; name="file"; filename="{1}"
salt.utils.http.query(url, method='POST', header_dict=headers, data=payload)
def _publish_html_message(token, room, data, message='', outputter='nested', api_url=None):
'''
Publishes the HTML-formatted message.
'''
url = "{0}/v2/room/{1}/notification".format(api_url, room)
headers = {
'Content-type': 'text/plain'
}
headers['Authorization'] = 'Bearer ' + token
salt.utils.http.query(
url,
'POST',
data=message,
decode=True,
status=True,
header_dict=headers,
opts=__opts__,
)
headers['Content-type'] = 'text/html'
message = salt.output.html_format(data, outputter, opts=__opts__)
salt.utils.http.query(
url,
'POST',
data=message,
decode=True,
status=True,
header_dict=headers,
opts=__opts__,
)
def _publish_code_message(token, room, data, message='', outputter='nested', api_url=None):
'''
Publishes the output format as code.
'''
url = "{0}/v2/room/{1}/notification".format(api_url, room)
headers = {
'Content-type': 'text/plain'
}
headers['Authorization'] = 'Bearer ' + token
salt.utils.http.query(
url,
'POST',
data=message,
decode=True,
status=True,
header_dict=headers,
opts=__opts__,
)
message = '/code '
message += salt.output.string_format(data, outputter, opts=__opts__)
salt.utils.http.query(
url,
'POST',
data=message,
decode=True,
status=True,
header_dict=headers,
opts=__opts__,
)
def start(token,
room='salt',
aliases=None,
@ -121,9 +185,74 @@ def start(token,
api_key=None,
api_url=None,
max_rooms=None,
wait_time=None):
wait_time=None,
output_type='file',
outputter='nested'):
'''
Listen to Hipchat messages and forward them to Salt
Listen to Hipchat messages and forward them to Salt.
token
The HipChat API key. It requires a key for global usgae,
assigned per user, rather than room.
room
The HipChat room name.
aliases
Define custom aliases.
valid_users
Restrict access only to certain users.
valid_commands
Restrict the execution to a limited set of commands.
control
Send commands to the master.
trigger: ``!``
Special character that triggers the execution of salt commands.
tag: ``salt/engines/hipchat/incoming``
The event tag on the Salt bus.
api_url: ``https://api.hipchat.com``
The URL to the HipChat API.
.. versionadded:: Nitrogen
max_rooms: ``1000``
Maximum number of rooms allowed to fetch. If set to 0,
it is able to retrieve the entire list of rooms.
wait_time: ``5``
Maximum wait time, in seconds.
output_type: ``file``
The type of the output. Choose bewteen:
- ``file``: save the output into a temporary file and upload
- ``html``: send the output as HTML
- ``code``: send the output as code
This can be overriden when executing a command, using the ``--out-type`` argument.
.. versionadded:: Nitrogen
outputter: ``nested``
The format to display the data, using the outputters available on the CLI.
This argument can also be overriden when executing a command, using the ``--out`` option.
.. versionadded:: Nitrogen
HipChat Example:
.. code-block:: text
! test.ping
! test.ping target=minion1
! test.ping --out=nested
! test.ping --out-type=code --out=table
'''
target_room = None
@ -242,6 +371,16 @@ def start(token,
tgt_type = kwargs['tgt_type']
del kwargs['tgt_type']
# Check for outputter. Otherwise assume nested
if '--out' in kwargs:
outputter = kwargs['--out']
del kwargs['--out']
# Check for outputter. Otherwise assume nested
if '--out-type' in kwargs:
output_type = kwargs['--out-type']
del kwargs['--out-type']
# Ensure the command is allowed
if valid_commands:
if cmd not in valid_commands:
@ -258,10 +397,22 @@ def start(token,
local = salt.client.LocalClient()
ret = local.cmd('{0}'.format(target), cmd, args, kwargs, tgt_type='{0}'.format(tgt_type))
tmp_path_fn = salt.utils.files.mkstemp()
with salt.utils.fopen(tmp_path_fn, 'w+') as fp_:
fp_.write(json.dumps(ret, sort_keys=True, indent=4))
message_string = '@{0} Results for: {1} {2} {3} on {4}'.format(partner, cmd, args, kwargs, target)
_publish_file(token, room, tmp_path_fn, message=message_string, api_url=api_url)
salt.utils.safe_rm(tmp_path_fn)
nice_args = (' ' + ' '.join(args)) if args else ''
nice_kwargs = (' ' + ' '.join('{0}={1}'.format(key, value) for (key, value) in six.iteritems(kwargs))) \
if kwargs else ''
message_string = '@{0} Results for: {1}{2}{3} on {4}'.format(partner,
cmd,
nice_args,
nice_kwargs,
target)
if output_type == 'html':
_publish_html_message(token, room, ret, message=message_string, outputter=outputter, api_url=api_url)
elif output_type == 'code':
_publish_code_message(token, room, ret, message=message_string, outputter=outputter, api_url=api_url)
else:
tmp_path_fn = salt.utils.files.mkstemp()
with salt.utils.fopen(tmp_path_fn, 'w+') as fp_:
fp_.write(json.dumps(ret, sort_keys=True, indent=4))
_publish_file(token, room, tmp_path_fn, message=message_string, api_url=api_url)
salt.utils.safe_rm(tmp_path_fn)
time.sleep(wait_time or _DEFAULT_SLEEP)

View File

@ -281,7 +281,7 @@ def renew_by(name, window=None):
def needs_renewal(name, window=None):
'''
Check if a certicate needs renewal
Check if a certificate needs renewal
:param name: CommonName of cert
:param window: Window in days to renew earlier or True/force to just return True

View File

@ -994,7 +994,7 @@ def unzip(zip_file,
extract_perms : True
The Python zipfile_ module does not extract file/directory attributes
by default. When this argument is set to ``True``, Salt will attempt to
apply the file permision attributes to the extracted files/folders.
apply the file permission attributes to the extracted files/folders.
On Windows, only the read-only flag will be extracted as set within the
zip file, other attributes (i.e. user/group permissions) are ignored.

View File

@ -597,7 +597,7 @@ def set_tags(tags,
a dict of key:value pair of tags to set on the security group
name
the name of the security gruop
the name of the security group
group_id
the group id of the security group (in lie of a name/vpc combo)

View File

@ -143,7 +143,7 @@ def osd_prepare(**kwargs):
Notes:
cluster_uuid
Set the deivce to store the osd data on.
Set the device to store the osd data on.
journal_dev
Set the journal device. defaults to osd_dev.
@ -194,7 +194,7 @@ def keyring_create(**kwargs):
Notes:
keyring_type
Required paramter
Required parameter
Can be set to:
admin, mon, osd, rgw, mds
@ -223,7 +223,7 @@ def keyring_save(**kwargs):
Notes:
keyring_type
Required paramter
Required parameter
Can be set to:
admin, mon, osd, rgw, mds
@ -251,7 +251,7 @@ def keyring_purge(**kwargs):
Notes:
keyring_type
Required paramter
Required parameter
Can be set to:
admin, mon, osd, rgw, mds
@ -281,7 +281,7 @@ def keyring_present(**kwargs):
Notes:
keyring_type
Required paramter
Required parameter
Can be set to:
admin, mon, osd, rgw, mds
@ -309,7 +309,7 @@ def keyring_auth_add(**kwargs):
Notes:
keyring_type
Required paramter
Required parameter
Can be set to:
admin, mon, osd, rgw, mds
@ -337,7 +337,7 @@ def keyring_auth_del(**kwargs):
Notes:
keyring_type
Required paramter
Required parameter
Can be set to:
admin, mon, osd, rgw, mds
@ -374,7 +374,7 @@ def mon_is(**kwargs):
def mon_status(**kwargs):
'''
Get status from mon deamon
Get status from mon daemon
CLI Example:
@ -396,7 +396,7 @@ def mon_status(**kwargs):
def mon_quorum(**kwargs):
'''
Is mon deamon in quorum
Is mon daemon in quorum
CLI Example:
@ -418,7 +418,7 @@ def mon_quorum(**kwargs):
def mon_active(**kwargs):
'''
Is mon deamon running
Is mon daemon running
CLI Example:
@ -518,7 +518,7 @@ def rgw_create(**kwargs):
Notes:
name:
Required paramter
Required parameter
Set the rgw client name. Must start with 'rgw.'
cluster_uuid
@ -546,7 +546,7 @@ def rgw_destroy(**kwargs):
Notes:
name:
Required paramter
Required parameter
Set the rgw client name. Must start with 'rgw.'
cluster_uuid
@ -576,15 +576,15 @@ def mds_create(**kwargs):
Notes:
name:
Required paramter
Required parameter
Set the rgw client name. Must start with 'mds.'
port:
Required paramter
Required parameter
Port for the mds to listen to.
addr:
Required paramter
Required parameter
Address or IP address for the mds to listen to.
cluster_uuid
@ -612,7 +612,7 @@ def mds_destroy(**kwargs):
Notes:
name:
Required paramter
Required parameter
Set the rgw client name. Must start with 'mds.'
cluster_uuid

View File

@ -43,7 +43,7 @@ def get_data(datastore, path):
:type datastore: :class:`DatastoreType` (``str`` enum).
:param path: The device path to set the value at,
a list of element names in order, / seperated
a list of element names in order, / separated
:type path: ``list``, ``str`` OR ``tuple``
:return: The network configuration at that tree
@ -67,7 +67,7 @@ def set_data_value(datastore, path, data):
:type datastore: :class:`DatastoreType` (``str`` enum).
:param path: The device path to set the value at,
a list of element names in order, / seperated
a list of element names in order, / separated
:type path: ``list``, ``str`` OR ``tuple``
:param data: The new value at the given path

View File

@ -1581,13 +1581,13 @@ def _write_file_ifaces(iface, data, **settings):
if adapter == iface:
saved_ifcfg = tmp
_SEPERATE_FILE = False
_SEPARATE_FILE = False
if 'filename' in settings:
if not settings['filename'].startswith('/'):
filename = '{0}/{1}'.format(_DEB_NETWORK_DIR, settings['filename'])
else:
filename = settings['filename']
_SEPERATE_FILE = True
_SEPARATE_FILE = True
else:
if 'filename' in adapters[adapter]['data']:
filename = adapters[adapter]['data']
@ -1600,7 +1600,7 @@ def _write_file_ifaces(iface, data, **settings):
log.error(msg)
raise AttributeError(msg)
with salt.utils.flopen(filename, 'w') as fout:
if _SEPERATE_FILE:
if _SEPARATE_FILE:
fout.write(saved_ifcfg)
else:
fout.write(ifcfg)

View File

@ -594,7 +594,7 @@ def get_source_sum(file_name='',
file, used to disambiguate ambiguous matches.
saltenv : base
Salt fileserver environment from which to retrive the source_hash. This
Salt fileserver environment from which to retrieve the source_hash. This
value will only be used when ``source_hash`` refers to a file on the
Salt fileserver (i.e. one beginning with ``salt://``).
@ -4664,7 +4664,7 @@ def manage_file(name,
.. note:: keep_mode does not work with salt-ssh.
As a consequence of how the files are transfered to the minion, and
As a consequence of how the files are transferred to the minion, and
the inability to connect back to the master with salt-ssh, salt is
unable to stat the file as it exists on the fileserver and thus
cannot mirror the mode on the salt-ssh minion

View File

@ -64,7 +64,6 @@ def _process_return_data(retData):
else:
msg = 'Unsuccessful error code {0} returned'.format(retData.status_code)
raise CommandExecutionError(msg)
return None
def delete_record(name,
@ -393,7 +392,6 @@ def get_network(network_name,
return records
else:
return False
return False
def get_record(record_name,
@ -473,7 +471,6 @@ def get_record(record_name,
return records
else:
return False
return False
def _parse_record_data(entry_data):

View File

@ -127,13 +127,14 @@ def get_locale():
salt '*' locale.get_locale
'''
cmd = ''
if salt.utils.systemd.booted(__context__):
if 'Suse' in __grains__['os_family']:
# this block applies to all SUSE systems - also with systemd
cmd = 'grep "^RC_LANG" /etc/sysconfig/language'
elif salt.utils.systemd.booted(__context__):
params = _parse_dbus_locale() if HAS_DBUS else _parse_localectl()
return params.get('LANG', '')
elif 'RedHat' in __grains__['os_family']:
cmd = 'grep "^LANG=" /etc/sysconfig/i18n'
elif 'Suse' in __grains__['os_family']:
cmd = 'grep "^RC_LANG" /etc/sysconfig/language'
elif 'Debian' in __grains__['os_family']:
# this block only applies to Debian without systemd
cmd = 'grep "^LANG=" /etc/default/locale'
@ -161,7 +162,17 @@ def set_locale(locale):
salt '*' locale.set_locale 'en_US.UTF-8'
'''
if salt.utils.systemd.booted(__context__):
if 'Suse' in __grains__['os_family']:
# this block applies to all SUSE systems - also with systemd
if not __salt__['file.file_exists']('/etc/sysconfig/language'):
__salt__['file.touch']('/etc/sysconfig/language')
__salt__['file.replace'](
'/etc/sysconfig/language',
'^RC_LANG=.*',
'RC_LANG="{0}"'.format(locale),
append_if_not_found=True
)
elif salt.utils.systemd.booted(__context__):
return _localectl_set(locale)
elif 'RedHat' in __grains__['os_family']:
if not __salt__['file.file_exists']('/etc/sysconfig/i18n'):
@ -172,15 +183,6 @@ def set_locale(locale):
'LANG="{0}"'.format(locale),
append_if_not_found=True
)
elif 'Suse' in __grains__['os_family']:
if not __salt__['file.file_exists']('/etc/sysconfig/language'):
__salt__['file.touch']('/etc/sysconfig/language')
__salt__['file.replace'](
'/etc/sysconfig/language',
'^RC_LANG=.*',
'RC_LANG="{0}"'.format(locale),
append_if_not_found=True
)
elif 'Debian' in __grains__['os_family']:
# this block only applies to Debian without systemd
update_locale = salt.utils.which('update-locale')

View File

@ -503,7 +503,7 @@ def cloud_init_interface(name, vm_=None, **kwargs):
# via the legacy salt cloud configuration style.
# On other cases, we should rely on settings provided by the new
# salt lxc network profile style configuration which can
# be also be overriden or a per interface basis via the nic_opts dict.
# be also be overridden or a per interface basis via the nic_opts dict.
if bridge:
eth0['link'] = bridge
if gateway:

View File

@ -577,7 +577,7 @@ def ipaddrs(**kwargs): # pylint: disable=unused-argument
Returns all configured IP addresses on all interfaces as a dictionary of dictionaries.\
Keys of the main dictionary represent the name of the interface.\
Values of the main dictionary represent are dictionaries that may consist of two keys\
'ipv4' and 'ipv6' (one, both or none) which are themselvs dictionaries witht the IP addresses as keys.\
'ipv4' and 'ipv6' (one, both or none) which are themselvs dictionaries with the IP addresses as keys.\
CLI Example:
@ -929,7 +929,7 @@ def load_config(filename=None,
To avoid committing the configuration, set the argument ``test`` to ``True`` and will discard (dry run).
To keep the chnages but not commit, set ``commit`` to ``False``.
To keep the changes but not commit, set ``commit`` to ``False``.
To replace the config, set ``replace`` to ``True``.
@ -947,7 +947,7 @@ def load_config(filename=None,
Commit? Default: ``True``.
debug: False
Debug mode. Will insert a new key under the output dictionary, as ``loaded_config`` contaning the raw
Debug mode. Will insert a new key under the output dictionary, as ``loaded_config`` containing the raw
configuration loaded on the device.
.. versionadded:: 2016.11.2
@ -1050,7 +1050,7 @@ def load_template(template_name,
To avoid committing the configuration, set the argument ``test`` to ``True``
and will discard (dry run).
To preserve the chnages, set ``commit`` to ``False``.
To preserve the changes, set ``commit`` to ``False``.
However, this is recommended to be used only in exceptional cases
when there are applied few consecutive states
and/or configuration changes.
@ -1074,7 +1074,7 @@ def load_template(template_name,
Placing the template under ``/etc/salt/states/templates/example.jinja``,
it can be used as ``salt://templates/example.jinja``.
Alternatively, for local files, the user can specify the abolute path.
Alternatively, for local files, the user can specify the absolute path.
If remotely, the source can be retrieved via ``http``, ``https`` or ``ftp``.
Examples:
@ -1156,7 +1156,7 @@ def load_template(template_name,
debug: False
Debug mode. Will insert a new key under the output dictionary,
as ``loaded_config`` contaning the raw result after the template was rendered.
as ``loaded_config`` containing the raw result after the template was rendered.
.. versionadded:: 2016.11.2
@ -1175,7 +1175,7 @@ def load_template(template_name,
.. note::
Do not explicitely specify this argument.
Do not explicitly specify this argument.
This represents any other variable that will be sent
to the template rendering system.
Please see the examples below!
@ -1314,7 +1314,7 @@ def load_template(template_name,
if template_path and not file_exists:
template_name = __salt__['file.join'](template_path, template_name)
if not saltenv:
# no saltenv overriden
# no saltenv overridden
# use the custom template path
saltenv = template_path if not salt_render else 'base'
elif salt_render and not saltenv:
@ -1539,8 +1539,8 @@ def config_control(inherit_napalm_device=None, **kwargs): # pylint: disable=unu
If differences found, will try to commit.
In case commit unsuccessful, will try to rollback.
:return: A tuple with a boolean that specifies if the config was changed/commited/rollbacked on the device.\
And a string that provides more details of the reason why the configuration was not commited properly.
:return: A tuple with a boolean that specifies if the config was changed/committed/rollbacked on the device.\
And a string that provides more details of the reason why the configuration was not committed properly.
CLI Example:

View File

@ -213,7 +213,7 @@ def set_peers(*peers, **options):
:commit commit (bool): commit loaded config. By default `commit` is True (will commit the changes). Useful when
the user does not want to commit after each change, but after a couple.
By default this function will commit the config changes (if any). To load without commiting, use the `commit`
By default this function will commit the config changes (if any). To load without committing, use the `commit`
option. For dry run use the `test` argument.
CLI Example:
@ -246,7 +246,7 @@ def set_servers(*servers, **options):
:commit commit (bool): commit loaded config. By default `commit` is True (will commit the changes). Useful when
the user does not want to commit after each change, but after a couple.
By default this function will commit the config changes (if any). To load without commiting, use the `commit`
By default this function will commit the config changes (if any). To load without committing, use the `commit`
option. For dry run use the `test` argument.
CLI Example:
@ -279,7 +279,7 @@ def delete_peers(*peers, **options):
:commit commit (bool): commit loaded config. By default `commit` is True (will commit the changes). Useful when
the user does not want to commit after each change, but after a couple.
By default this function will commit the config changes (if any). To load without commiting, use the `commit`
By default this function will commit the config changes (if any). To load without committing, use the `commit`
option. For dry run use the `test` argument.
CLI Example:
@ -312,7 +312,7 @@ def delete_servers(*servers, **options):
:commit commit (bool): commit loaded config. By default `commit` is True (will commit the changes). Useful when
the user does not want to commit after each change, but after a couple.
By default this function will commit the config changes (if any). To load without commiting, use the `commit`
By default this function will commit the config changes (if any). To load without committing, use the `commit`
option. For dry run use the `test` argument.
CLI Example:

View File

@ -7,7 +7,7 @@ see the `Parallels Desktop Reference Guide
<http://download.parallels.com/desktop/v9/ga/docs/en_US/Parallels%20Command%20Line%20Reference%20Guide.pdf>`_.
What has not been implemented yet can be accessed through ``parallels.prlctl``
and ``parallels.prlsrvctl`` (note the preceeding double dash ``--`` as
and ``parallels.prlsrvctl`` (note the preceding double dash ``--`` as
necessary):
.. code-block::

View File

@ -640,7 +640,7 @@ def get_users():
def lsof(name):
'''
Retrieve the lsof informations of the given process name.
Retrieve the lsof information of the given process name.
CLI Example:
@ -657,7 +657,7 @@ def lsof(name):
def netstat(name):
'''
Retrieve the netstat informations of the given process name.
Retrieve the netstat information of the given process name.
CLI Example:

View File

@ -476,9 +476,9 @@ def set_value(hive,
under the key. If not passed, the key (Default) value will be set.
:param object vdata: The value data to be set.
What the type of this paramater
What the type of this parameter
should be is determined by the value of the vtype
paramater. The correspondence
parameter. The correspondence
is as follows:
.. glossary::
@ -495,15 +495,15 @@ def set_value(hive,
str
:param str vtype: The value type.
The possible values of the vtype paramater are indicated
above in the description of the vdata paramater.
The possible values of the vtype parameter are indicated
above in the description of the vdata parameter.
:param bool use_32bit_registry: Sets the 32bit portion of the registry on
64bit installations. On 32bit machines this is ignored.
:param bool volatile: When this paramater has a value of True, the registry key will be
:param bool volatile: When this parameter has a value of True, the registry key will be
made volatile (i.e. it will not persist beyond a system reset or shutdown).
This paramater only has an effect when a key is being created and at no
This parameter only has an effect when a key is being created and at no
other time.
:return: Returns True if successful, False if not

View File

@ -79,6 +79,8 @@ def _create_rpmmacros():
with salt.utils.fopen(rpmmacros, 'w') as afile:
afile.write('%_topdir {0}\n'.format(rpmbuilddir))
afile.write('%signature gpg\n')
afile.write('%_source_filedigest_algorithm 8\n')
afile.write('%_binary_filedigest_algorithm 8\n')
afile.write('%_gpg_name packaging@saltstack.com\n')
@ -128,7 +130,7 @@ def _get_distset(tgt):
tgtattrs = tgt.split('-')
if tgtattrs[0] == 'amzn':
distset = '--define "dist .{0}1"'.format(tgtattrs[0])
elif tgtattrs[1] in ['5', '6', '7']:
elif tgtattrs[1] in ['6', '7']:
distset = '--define "dist .el{0}"'.format(tgtattrs[1])
else:
distset = ''
@ -173,6 +175,13 @@ def make_src_pkg(dest_dir, spec, sources, env=None, template=None, saltenv='base
This example command should build the libnacl SOURCE package and place it in
/var/www/html/ on the minion
.. versionchanged:: Nitrogen
.. note::
using SHA256 as digest and minimum level dist el6
'''
_create_rpmmacros()
tree_base = _mk_tree()
@ -182,8 +191,8 @@ def make_src_pkg(dest_dir, spec, sources, env=None, template=None, saltenv='base
for src in sources:
_get_src(tree_base, src, saltenv)
# make source rpms for dist el5, usable with mock on other dists
cmd = 'rpmbuild --define "_topdir {0}" -bs --define "_source_filedigest_algorithm md5" --define "_binary_filedigest_algorithm md5" --define "dist .el5" {1}'.format(tree_base, spec_path)
# make source rpms for dist el6 with SHA256, usable with mock on other dists
cmd = 'rpmbuild --verbose --define "_topdir {0}" -bs --define "dist .el6" {1}'.format(tree_base, spec_path)
__salt__['cmd.run'](cmd)
srpms = os.path.join(tree_base, 'SRPMS')
ret = []

View File

@ -647,7 +647,7 @@ def enable(name, start=False, **kwargs):
def disable(name, stop=False, **kwargs):
'''
Don't start service ``name`` at boot
Returns ``True`` if operation is successfull
Returns ``True`` if operation is successful
name
the service's name
@ -686,7 +686,7 @@ def disable(name, stop=False, **kwargs):
def remove(name):
'''
Remove the service <name> from system.
Returns ``True`` if operation is successfull.
Returns ``True`` if operation is successful.
The service will be also stopped.
name

View File

@ -247,7 +247,7 @@ def call_hook(message,
username=None,
icon_emoji=None):
'''
Send message to Slack incomming webhook.
Send message to Slack incoming webhook.
:param message: The topic of message.
:param attachment: The message to send to the Slacke WebHook.
@ -258,7 +258,7 @@ def call_hook(message,
:param channel: The channel to use instead of the WebHook default.
:param username: Username to use instead of WebHook default.
:param icon_emoji: Icon to use instead of WebHook default.
:return: Boolean if message was sent successfuly.
:return: Boolean if message was sent successfully.
CLI Example:

View File

@ -491,7 +491,7 @@ def modify_snapshot(snapshot_id=None,
snapshot = get_snapshot(config=config, number=snapshot_id)
try:
# Updating only the explicitely provided attributes by the user
# Updating only the explicitly provided attributes by the user
updated_opts = {
'description': description if description is not None else snapshot['description'],
'cleanup': cleanup if cleanup is not None else snapshot['cleanup'],
@ -669,7 +669,7 @@ def undo(config='root', files=None, num_pre=None, num_post=None):
the files into the state of num_pre.
.. warning::
If one of the files has changes after num_post, they will be overwriten
If one of the files has changes after num_post, they will be overwritten
The snapshots are used to determine the file list, but the current
version of the files will be overwritten by the versions in num_pre.
@ -790,7 +790,7 @@ def diff(config='root', filename=None, num_pre=None, num_post=None):
if filepath.startswith(SUBVOLUME):
_filepath = filepath[len(SUBVOLUME):]
# Just in case, removing posible double '/' from the final file paths
# Just in case, removing possible double '/' from the final file paths
pre_file = os.path.normpath(pre_mount + "/" + _filepath).replace("//", "/")
post_file = os.path.normpath(post_mount + "/" + _filepath).replace("//", "/")

View File

@ -56,7 +56,7 @@ def create(name, **params):
raise CommandExecutionError(
'request to uptime failed : {0}'.format(req.reason)
)
log.debug('[uptime] PUT request successfull')
log.debug('[uptime] PUT request successful')
return req.json()['_id']
@ -83,7 +83,7 @@ def delete(name):
raise CommandExecutionError(
'request to uptime failed : {0}'.format(req.reason)
)
log.debug('[uptime] DELETE request successfull')
log.debug('[uptime] DELETE request successful')
return True

View File

@ -272,9 +272,9 @@ def import_cert(name,
return False
if password:
cert_props = get_cert_file(name=cached_source_path, password=password)
cert_props = get_cert_file(name=cached_source_path, cert_format=cert_format, password=password)
else:
cert_props = get_cert_file(name=cached_source_path)
cert_props = get_cert_file(name=cached_source_path, cert_format=cert_format)
current_certs = get_certs(context=context, store=store)

View File

@ -881,7 +881,7 @@ def create_crl( # pylint: disable=too-many-arguments,too-many-locals
represents one certificate. A dict must contain either the key
``serial_number`` with the value of the serial number to revoke, or
``certificate`` with either the PEM encoded text of the certificate,
or a path ot the certificate to revoke.
or a path to the certificate to revoke.
The dict can optionally contain the ``revocation_date`` key. If this
key is omitted the revocation date will be set to now. If should be a

View File

@ -431,7 +431,7 @@ def remove(name=None, pkgs=None, recursive=True, **kwargs):
The name of the package to be deleted.
recursive
Also remove dependant packages (not required elsewhere).
Also remove dependent packages (not required elsewhere).
Default mode: enabled.
Multiple Package Options:

View File

@ -441,7 +441,7 @@ def user_getmedia(userids=None, **connection_args):
:param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring)
:param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring)
:return: List of retreived media, False on failure.
:return: List of retrieved media, False on failure.
CLI Example:
.. code-block:: bash

View File

@ -7,6 +7,7 @@ for managing outputters.
# Import python libs
from __future__ import print_function
from __future__ import absolute_import
import re
import os
import sys
import errno
@ -185,6 +186,23 @@ def out_format(data, out, opts=None, **kwargs):
return try_printout(data, out, opts, **kwargs)
def string_format(data, out, opts=None, **kwargs):
'''
Return the formatted outputter string, removing the ANSI escape sequences.
'''
raw_output = try_printout(data, out, opts, **kwargs)
ansi_escape = re.compile(r'\x1b[^m]*m')
return ansi_escape.sub('', raw_output)
def html_format(data, out, opts=None, **kwargs):
'''
Return the formatted string as HTML.
'''
ansi_escaped_string = string_format(data, out, opts, **kwargs)
return ansi_escaped_string.replace(' ', '&nbsp;').replace('\n', '<br />')
def strip_esc_sequence(txt):
'''
Replace ESC (ASCII 27/Oct 33) to prevent unsafe strings

View File

@ -58,7 +58,7 @@ You can also provide a list of config files:
Select config files through grains|pillar|opts matching
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
You can also opt for a much more flexible configuration: MakoStack allows to
You can also opt for a much more flexible configuration: MakoStack allows one to
select the config files for the current minion based on matching values from
either grains, or pillar, or opts objects.

View File

@ -62,7 +62,7 @@ def ping():
decode=True,
)
log.debug(
'chronos.info returned succesfully: %s',
'chronos.info returned successfully: %s',
response,
)
if 'dict' in response:

View File

@ -270,7 +270,7 @@ def get_data(datastore, path):
:type datastore: :class:`DatastoreType` (``str`` enum).
:param path: The device path, a list of element names in order,
comma seperated
comma separated
:type path: ``list`` of ``str`` OR ``tuple``
:return: The network configuration at that tree
@ -293,7 +293,7 @@ def set_data_value(datastore, path, data):
:type datastore: :class:`DatastoreType` (``str`` enum).
:param path: The device path to set the value at,
a list of element names in order, comma seperated
a list of element names in order, comma separated
:type path: ``list`` of ``str`` OR ``tuple``
:param data: The new value at the given path

View File

@ -62,7 +62,7 @@ def ping():
decode=True,
)
log.debug(
'marathon.info returned succesfully: %s',
'marathon.info returned successfully: %s',
response,
)
if 'text' in response and response['text'].strip() == 'pong':

View File

@ -35,6 +35,12 @@ please refer to the `NAPALM Read the Docs page`_.
.. _`NAPALM Read the Docs page`: https://napalm.readthedocs.io/en/latest/#supported-network-operating-systems
.. _`optional arguments`: http://napalm.readthedocs.io/en/latest/support/index.html#list-of-supported-optional-arguments
.. versionadded:: Nitrogen
* always_alive: in certain less dynamic environments, maintaining the remote connection permanently
open with the network device is not always beneficial. In that case, the user can
select to initialize the connection only when needed, by specifying this field to ``false``.
Default: ``true`` (maintains the connection with the remote network device).
Example:
@ -48,12 +54,15 @@ Example:
passwd: my_password
optional_args:
port: 12201
config_format: set
.. seealso::
- :mod:`NAPALM grains: select network devices based on their characteristics <salt.grains.napalm>`
- :mod:`NET module: network basic features <salt.modules.napalm_network>`
- :mod:`Network config state: Manage the configuration using arbitrary templates <salt.states.netconfig>`
- :mod:`NAPALM YANG state: Manage the configuration according to the YANG models (OpenConfig/IETF) <salt.states.netyang>`
- :mod:`Network ACL module: Generate and load ACL (firewall) configuration <salt.modules.napalm_acl>`
- :mod:`Network ACL state: Manage the firewall configuration <salt.states.netacl>`
- :mod:`NTP operational and configuration management module <salt.modules.napalm_ntp>`
- :mod:`BGP operational and configuration management module <salt.modules.napalm_bgp>`
- :mod:`Routes details <salt.modules.napalm_route>`
@ -245,11 +254,11 @@ def call(method, *args, **kwargs):
* result (True/False): if the operation succeeded
* out (object): returns the object as-is from the call
* comment (string): provides more details in case the call failed
* traceback (string): complete traceback in case of exeception. Please submit an issue including this traceback
* traceback (string): complete traceback in case of exception. Please submit an issue including this traceback
on the `correct driver repo`_ and make sure to read the FAQ_
.. _`correct driver repo`: https://github.com/napalm-automation/napalm/issues/new
.. FAQ_: https://github.com/napalm-automation/napalm#faq
.. _FAQ: https://github.com/napalm-automation/napalm#faq
Example:

View File

@ -14,7 +14,7 @@ Open up ``/etc/salt/master`` and add:
venafi:
api_key: None
Then register your email address with Venagi using the following command:
Then register your email address with Venafi using the following command:
.. code-block:: bash
@ -38,7 +38,6 @@ import json
import salt.syspaths as syspaths
import salt.cache
import salt.utils
import salt.utils.http
import salt.ext.six as six
from salt.exceptions import CommandExecutionError
@ -90,7 +89,7 @@ def gen_key(minion_id, dns_name=None, zone='default', password=None):
# The /v1/zones/tag/{name} API call is a shortcut to get the zoneID
# directly from the name
qdata = salt.utils.http.query(
qdata = __utils__['http.query'](
'{0}/zones/tag/{1}'.format(_base_url(), zone),
method='GET',
decode=True,
@ -106,7 +105,7 @@ def gen_key(minion_id, dns_name=None, zone='default', password=None):
# the /v1/certificatepolicies?zoneId API call returns the default
# certificate use and certificate identity policies
qdata = salt.utils.http.query(
qdata = __utils__['http.query'](
'{0}/certificatepolicies?zoneId={1}'.format(_base_url(), zone_id),
method='GET',
decode=True,
@ -221,6 +220,8 @@ def gen_csr(
tmpcsr,
subject
)
if password is not None:
cmd += ' -passin pass:{0}'.format(password)
output = __salt__['salt.cmd']('cmd.run', cmd)
if 'problems making Certificate Request' in output:
@ -296,6 +297,7 @@ def request(
loc=loc,
org=org,
org_unit=org_unit,
password=password,
)
pdata = json.dumps({
@ -303,7 +305,7 @@ def request(
'certificateSigningRequest': csr,
})
qdata = salt.utils.http.query(
qdata = __utils__['http.query'](
'{0}/certificaterequests'.format(_base_url()),
method='POST',
data=pdata,
@ -369,7 +371,7 @@ def register(email):
salt-run venafi.register email@example.com
'''
data = salt.utils.http.query(
data = __utils__['http.query'](
'{0}/useraccounts'.format(_base_url()),
method='POST',
data=json.dumps({
@ -401,7 +403,7 @@ def show_company(domain):
salt-run venafi.show_company example.com
'''
data = salt.utils.http.query(
data = __utils__['http.query'](
'{0}/companies/domain/{1}'.format(_base_url(), domain),
status=True,
decode=True,
@ -428,7 +430,7 @@ def show_csrs():
salt-run venafi.show_csrs
'''
data = salt.utils.http.query(
data = __utils__['http.query'](
'{0}/certificaterequests'.format(_base_url()),
status=True,
decode=True,
@ -455,7 +457,7 @@ def get_zone_id(zone_name):
salt-run venafi.get_zone_id default
'''
data = salt.utils.http.query(
data = __utils__['http.query'](
'{0}/zones/tag/{1}'.format(_base_url(), zone_name),
status=True,
decode=True,
@ -483,7 +485,7 @@ def show_policies():
salt-run venafi.show_zones
'''
data = salt.utils.http.query(
data = __utils__['http.query'](
'{0}/certificatepolicies'.format(_base_url()),
status=True,
decode=True,
@ -510,7 +512,7 @@ def show_zones():
salt-run venafi.show_zones
'''
data = salt.utils.http.query(
data = __utils__['http.query'](
'{0}/zones'.format(_base_url()),
status=True,
decode=True,
@ -537,7 +539,7 @@ def show_cert(id_):
salt-run venafi.show_cert 01234567-89ab-cdef-0123-456789abcdef
'''
data = salt.utils.http.query(
data = __utils__['http.query'](
'{0}/certificaterequests/{1}/certificate'.format(_base_url(), id_),
params={
'format': 'PEM',
@ -553,7 +555,7 @@ def show_cert(id_):
'There was an API error: {0}'.format(data['error'])
)
data = data.get('body', '')
csr_data = salt.utils.http.query(
csr_data = __utils__['http.query'](
'{0}/certificaterequests/{1}'.format(_base_url(), id_),
status=True,
decode=True,

View File

@ -77,7 +77,7 @@ def get(key, profile=None):
returns a dict of joined credential_pairs, credentials_metadata returns a
dict of metadata relevant to the credentials mapped to the confidant
service, and result returns a bool that can be used to determine if the sdb
call succeded or failed to fetch credentials from confidant (or from local
call succeeded or failed to fetch credentials from confidant (or from local
cache). If result is false, the data in credentials or credentials_metadata
can't be trusted.
'''

View File

@ -116,7 +116,7 @@ def present(name, api_name, swagger_file, stage_name, api_key_required,
The canconicalization of these input parameters is done in the following order:
1) lambda_funcname_format is formatted with the input parameters as passed,
2) resulting string is stripped for leading/trailing spaces,
3) path paramter's curly braces are removed from the resource path,
3) path parameter's curly braces are removed from the resource path,
4) consecutive spaces and forward slashes in the paths are replaced with '_'
5) consecutive '_' are replaced with '_'

View File

@ -322,11 +322,11 @@ def _pipeline_present_with_definition(name, expected_pipeline_objects,
pipeline_objects = pipeline_definition.get('pipelineObjects')
parameter_objects = pipeline_definition.get('parameterObjects')
paramater_values = pipeline_definition.get('parameterValues')
parameter_values = pipeline_definition.get('parameterValues')
present = (_recursive_compare(_cleaned(pipeline_objects), _cleaned(expected_pipeline_objects)) and
_recursive_compare(parameter_objects, expected_parameter_objects) and
_recursive_compare(paramater_values, expected_parameter_values))
_recursive_compare(parameter_values, expected_parameter_values))
return present, pipeline_definition

View File

@ -1737,10 +1737,10 @@ def delete_vpc_peering_connection(name, conn_id=None, conn_name=None,
Name of the state
conn_id
ID of the peering connection to delete. Exlusive with conn_name.
ID of the peering connection to delete. Exclusive with conn_name.
conn_name
The name of the peering connection to delete. Exlusive with conn_id.
The name of the peering connection to delete. Exclusive with conn_id.
region
Region to connect to.

View File

@ -68,14 +68,14 @@ def quorum(name, **kwargs):
- require:
- sesceph: mon_running
'''
paramters = _ordereddict2dict(kwargs)
if paramters is None:
return _error(name, "Invalid paramters:%s")
parameters = _ordereddict2dict(kwargs)
if parameters is None:
return _error(name, "Invalid parameters:%s")
if __opts__['test']:
return _test(name, "cluster quorum")
try:
cluster_quorum = __salt__['ceph.cluster_quorum'](**paramters)
cluster_quorum = __salt__['ceph.cluster_quorum'](**parameters)
except (CommandExecutionError, CommandNotFoundError) as err:
return _error(name, err.strerror)
if cluster_quorum:

View File

@ -25,7 +25,7 @@ def value_present(name, datastore, path, config):
:type datastore: :class:`DatastoreType` (``str`` enum).
:param path: The device path to set the value at,
a list of element names in order, / seperated
a list of element names in order, / separated
:type path: ``list``, ``str`` OR ``tuple``
:param config: The new value at the given path

View File

@ -1737,7 +1737,7 @@ def managed(name,
.. note:: keep does not work with salt-ssh.
As a consequence of how the files are transfered to the minion, and
As a consequence of how the files are transferred to the minion, and
the inability to connect back to the master with salt-ssh, salt is
unable to stat the file as it exists on the fileserver and thus
cannot mirror the mode on the salt-ssh minion
@ -1968,7 +1968,7 @@ def managed(name,
tmp_ext
Suffix for temp file created by ``check_cmd``. Useful for checkers
dependant on config file extension (e.g. the init-checkconf upstart
dependent on config file extension (e.g. the init-checkconf upstart
config checker).
.. code-block:: yaml

View File

@ -279,7 +279,7 @@ def config_absent(name):
.. note::
For certain cases extra lines could be removed based on dependencies.
In this example, included after the example for config_present, the
ACLs would be removed because they depend on the existance of the
ACLs would be removed because they depend on the existence of the
group.
'''

View File

@ -1398,7 +1398,7 @@ def installed(
``3010`` is the only recognized exit code,
but this is subject to future refinement.
The value of this param
defaults to ``True``. This paramater has no effect
defaults to ``True``. This parameter has no effect
on non-Windows systems.
.. versionadded:: 2016.11.0
@ -2249,7 +2249,7 @@ def latest(
for the remainder of the current boot session. For the time being,
``3010`` is the only recognized exit code, but this
is subject to future refinement. The value of this param
defaults to ``True``. This paramater has no effect on
defaults to ``True``. This parameter has no effect on
non-Windows systems.
.. versionadded:: 2016.11.0

View File

@ -75,7 +75,7 @@ def _default_ret(name):
def _retrieve_rpm_probes():
'''
Will retrive the probes from the network device using salt module "probes" throught NAPALM proxy.
Will retrieve the probes from the network device using salt module "probes" throught NAPALM proxy.
'''
return __salt__['probes.config']()

View File

@ -43,7 +43,7 @@ def __virtual__():
def _get_summary(rsync_out):
'''
Get summary from the rsync successfull output.
Get summary from the rsync successful output.
'''
return "- " + "\n- ".join([elm for elm in rsync_out.split("\n\n")[-1].replace(" ", "\n").split("\n") if elm])
@ -51,7 +51,7 @@ def _get_summary(rsync_out):
def _get_changes(rsync_out):
'''
Get changes from the rsync successfull output.
Get changes from the rsync successful output.
'''
copied = list()
deleted = list()

View File

@ -44,7 +44,7 @@ Multiple policy configuration
Minimum password age: 1
Minimum password length: 14
Password must meet complexity requirements: Enabled
Store passwords using reversible encrytion: Disabled
Store passwords using reversible encryption: Disabled
Configure Automatic Updates:
Configure automatic updating: 4 - Auto download and schedule the intsall
Scheduled install day: 7 - Every Saturday

View File

@ -2298,7 +2298,7 @@ def namespaced_function(function, global_dict, defaults=None, preserve_context=F
Redefine (clone) a function under a different globals() namespace scope
preserve_context:
Allow to keep the context taken from orignal namespace,
Allow keeping the context taken from orignal namespace,
and extend it with globals() taken from
new targetted namespace.
'''

View File

@ -227,7 +227,7 @@ def run(extension=None, name=None, description=None, salt_dir=None, merge=False,
:param salt_dir: The targeted Salt source directory
:type salt_dir: ``str``
:param merge: Merge with salt directory, `False` to keep seperate, `True` to merge trees.
:param merge: Merge with salt directory, `False` to keep separate, `True` to merge trees.
:type merge: ``bool``
:param temp_dir: The directory for generated code, if omitted, system temp will be used

View File

@ -95,7 +95,7 @@ def _generate_minion_id():
class DistinctList(list):
'''
List, which allows to append only distinct objects.
List, which allows one to append only distinct objects.
Needs to work on Python 2.6, because of collections.OrderedDict only since 2.7 version.
Override 'filter()' for custom filtering.
'''

View File

@ -278,7 +278,7 @@ class ProcessManager(object):
kwargs = {}
if salt.utils.is_windows():
# Need to ensure that 'log_queue' is correctly transfered to
# Need to ensure that 'log_queue' is correctly transferred to
# processes that inherit from 'MultiprocessingProcess'.
if type(MultiprocessingProcess) is type(tgt) and (
issubclass(tgt, MultiprocessingProcess)):

View File

@ -256,7 +256,7 @@ def _get_jinja_error(trace, context=None):
):
add_log = True
template_path = error[0]
# if we add a log, format explicitly the exeception here
# if we add a log, format explicitly the exception here
# by telling to output the macro context after the macro
# error log place at the beginning
if add_log:

View File

@ -800,8 +800,8 @@ class InstallLib(install_lib):
chmod = []
for idx, inputfile in enumerate(inp):
for executeable in executables:
if inputfile.endswith(executeable):
for executable in executables:
if inputfile.endswith(executable):
chmod.append(idx)
for idx in chmod:
filename = out[idx]

View File

@ -468,19 +468,19 @@ class XpcomConversionTests(TestCase):
for key in expected_extras:
self.assertIn(key, ret_keys)
def test_extra_nonexistant_attributes(self):
def test_extra_nonexistent_attributes(self):
expected_extra_dict = {
"nonexistant": ""
"nonexistent": ""
}
xpcom = XpcomConversionTests._mock_xpcom_object()
ret = vb_xpcom_to_attribute_dict(xpcom, extra_attributes=expected_extra_dict.keys())
self.assertDictEqual(ret, expected_extra_dict)
def test_extra_nonexistant_attribute_with_default(self):
expected_extras = [("nonexistant", list)]
def test_extra_nonexistent_attribute_with_default(self):
expected_extras = [("nonexistent", list)]
expected_extra_dict = {
"nonexistant": []
"nonexistent": []
}
xpcom = XpcomConversionTests._mock_xpcom_object()

View File

@ -0,0 +1 @@
# -*- coding: utf-8 -*-

View File

@ -0,0 +1,94 @@
# -*- coding: utf-8 -*-
'''
Tests for the salt-run command
'''
# Import Python libs
from __future__ import absolute_import
import functools
import random
import string
# Import Salt Testing libs
from tests.support.case import ShellCase
from tests.support.helpers import destructiveTest, expensiveTest
from salt.ext.six.moves import range
def _random_name(prefix=''):
ret = prefix
for _ in range(8):
ret += random.choice(string.ascii_lowercase)
return ret
def with_random_name(func):
'''
generate a randomized name for a container
'''
@functools.wraps(func)
def wrapper(self, *args, **kwargs):
name = _random_name(prefix='salt_')
return func(self, _random_name(prefix='salt_test_'), *args, **kwargs)
return wrapper
@destructiveTest
@expensiveTest
class VenafiTest(ShellCase):
'''
Test the venafi runner
'''
@with_random_name
def test_gen_key_password(self, name):
'''
venafi.gen_key
'''
ret = self.run_run_plus(fun='venafi.gen_key',
minion_id='{0}.test.saltstack.com'.format(name),
dns_name='{0}.test.saltstack.com'.format(name),
zone='Internet',
password='SecretSauce')
self.assertEqual(ret['out'][0], '-----BEGIN RSA PRIVATE KEY-----')
self.assertEqual(ret['out'][1], 'Proc-Type: 4,ENCRYPTED')
self.assertEqual(ret['out'][-1], '-----END RSA PRIVATE KEY-----')
@with_random_name
def test_gen_key_without_password(self, name):
'''
venafi.gen_key
'''
ret = self.run_run_plus(fun='venafi.gen_key',
minion_id='{0}.test.saltstack.com'.format(name),
dns_name='{0}.test.saltstack.com'.format(name),
zone='Internet')
self.assertEqual(ret['out'][0], '-----BEGIN RSA PRIVATE KEY-----')
self.assertNotEqual(ret['out'][1], 'Proc-Type: 4,ENCRYPTED')
self.assertEqual(ret['out'][-1], '-----END RSA PRIVATE KEY-----')
@with_random_name
def test_gen_csr(self, name):
'''
venafi.gen_csr
'''
ret = self.run_run_plus(fun='venafi.gen_csr',
minion_id='{0}.test.saltstack.com'.format(name),
dns_name='{0}.test.saltstack.com'.format(name),
country='US', state='Utah', loc='Salt Lake City',
org='Salt Stack Inc.', org_unit='Testing',
zone='Internet', password='SecretSauce')
self.assertEqual(ret['out'][0], '-----BEGIN CERTIFICATE REQUEST-----')
self.assertEqual(ret['out'][-1], '-----END CERTIFICATE REQUEST-----')
@with_random_name
def test_request(self, name):
'''
venafi.request
'''
ret = self.run_run_plus(fun='venafi.request',
minion_id='{0}.example.com'.format(name),
dns_name='{0}.example.com'.format(name),
country='US', state='Utah', loc='Salt Lake City',
org='Salt Stack Inc.', org_unit='Testing',
zone='Internet', password='SecretSauce')
self.assertTrue('request_id' in ret['return'])

View File

@ -90,7 +90,7 @@ class TestSaltAPIHandler(_SaltnadoIntegrationTestCase):
self.assertEqual(response.headers['Location'], '/login')
# Local client tests
@skipIf(True, 'to be reenabled when #23623 is merged')
@skipIf(True, 'to be re-enabled when #23623 is merged')
def test_simple_local_post(self):
'''
Test a basic API of /
@ -326,7 +326,7 @@ class TestMinionSaltAPIHandler(_SaltnadoIntegrationTestCase):
for minion_id, grains in six.iteritems(response_obj['return'][0]):
self.assertEqual(minion_id, grains['id'])
@skipIf(True, 'to be reenabled when #23623 is merged')
@skipIf(True, 'to be re-enabled when #23623 is merged')
def test_get(self):
response = self.fetch('/minions/minion',
method='GET',
@ -410,7 +410,7 @@ class TestJobsSaltAPIHandler(_SaltnadoIntegrationTestCase):
application.event_listener = saltnado.EventListener({}, self.opts)
return application
@skipIf(True, 'to be reenabled when #23623 is merged')
@skipIf(True, 'to be re-enabled when #23623 is merged')
def test_get(self):
# test with no JID
self.http_client.fetch(self.get_url('/jobs'),
@ -463,7 +463,7 @@ class TestRunSaltAPIHandler(_SaltnadoIntegrationTestCase):
application.event_listener = saltnado.EventListener({}, self.opts)
return application
@skipIf(True, 'to be reenabled when #23623 is merged')
@skipIf(True, 'to be re-enabled when #23623 is merged')
def test_get(self):
low = [{'client': 'local',
'tgt': '*',

View File

@ -30,7 +30,7 @@ class ManageTest(ShellCase):
self.assertEqual(ret['return'], {})
self.assertEqual(ret['out'], [])
@skipIf(True, 'to be reenabled when #23623 is merged')
@skipIf(True, 'to be re-enabled when #23623 is merged')
def test_list_jobs(self):
'''
jobs.list_jobs

View File

@ -140,7 +140,7 @@ class CallTest(ShellCase, testprogram.TestProgramCase, ShellCaseCommonTestsMixin
self.assertNotEqual(0, retcode)
@skipIf(sys.platform.startswith('win'), 'This test does not apply on Win')
@skipIf(True, 'to be reenabled when #23623 is merged')
@skipIf(True, 'to be re-enabled when #23623 is merged')
def test_return(self):
self.run_call('cmd.run "echo returnTOmaster"')
jobs = [a for a in self.run_run('jobs.list_jobs')]

View File

@ -114,6 +114,7 @@ class PipStateTest(ModuleCase, SaltReturnAssertsMixin):
if os.path.isdir(venv_dir):
shutil.rmtree(venv_dir)
@skipIf(six.PY3, 'Issue is specific to carbon module, which is PY2-only')
@requires_system_grains
def test_pip_installed_weird_install(self, grains=None):
# First, check to see if this is running on CentOS 5 or MacOS.

View File

@ -494,7 +494,7 @@ class PkgTest(ModuleCase, SaltReturnAssertsMixin):
Need to check to ensure the package has been
installed after the pkg_latest_epoch sls
file has been run. This needs to be broken up into
a seperate method so I can add the requires_salt_modules
a separate method so I can add the requires_salt_modules
decorator to only the pkg.info_installed command.
'''
# Skip test if package manager not available

View File

@ -183,7 +183,7 @@ class TestProgram(six.with_metaclass(TestProgramMeta, object)):
@property
def start_pid(self):
'''PID of the called script prior to deamonizing.'''
'''PID of the called script prior to daemonizing.'''
return self.process.pid if self.process else None
@property

View File

@ -157,6 +157,9 @@ TEST_SUITES = {
'proxy':
{'display_name': 'Proxy',
'path': 'integration/proxy'},
'external_api':
{'display_name': 'ExternalAPIs',
'path': 'integration/externalapi'},
}
@ -408,7 +411,6 @@ class SaltTestsuiteParser(SaltCoverageTestingParser):
default=False,
help='Run salt-api tests'
)
self.test_selection_group.add_option(
'-P',
'--proxy',
@ -418,9 +420,18 @@ class SaltTestsuiteParser(SaltCoverageTestingParser):
default=False,
help='Run salt-proxy tests'
)
self.test_selection_group.add_option(
'--external',
'--external-api',
'--external-api-tests',
dest='external_api',
action='store_true',
default=False,
help='Run venafi runner tests'
)
def validate_options(self):
if self.options.cloud_provider:
if self.options.cloud_provider or self.options.external_api:
# Turn on expensive tests execution
os.environ['EXPENSIVE_TESTS'] = 'True'

View File

@ -43,17 +43,18 @@ class LocalemodTestCase(TestCase, LoaderModuleMockMixin):
Test for Get the current system locale
'''
with patch.dict(localemod.__context__, {'salt.utils.systemd.booted': True}):
with patch.multiple(localemod,
_parse_dbus_locale=MagicMock(return_value={'LANG': 'A'}),
HAS_DBUS=True):
self.assertEqual('A', localemod.get_locale())
localemod._parse_dbus_locale.assert_called_once_with()
with patch.dict(localemod.__grains__, {'os_family': ['Unknown']}):
with patch.multiple(localemod,
_parse_dbus_locale=MagicMock(return_value={'LANG': 'A'}),
HAS_DBUS=True):
self.assertEqual('A', localemod.get_locale())
localemod._parse_dbus_locale.assert_called_once_with()
with patch.multiple(localemod,
_parse_localectl=MagicMock(return_value={'LANG': 'A'}),
HAS_DBUS=False):
self.assertEqual('A', localemod.get_locale())
localemod._parse_localectl.assert_called_once_with()
with patch.multiple(localemod,
_parse_localectl=MagicMock(return_value={'LANG': 'A'}),
HAS_DBUS=False):
self.assertEqual('A', localemod.get_locale())
localemod._parse_localectl.assert_called_once_with()
with patch.dict(localemod.__context__, {'salt.utils.systemd.booted': False}):
with patch.dict(localemod.__grains__, {'os_family': ['Gentoo']}):
@ -79,8 +80,9 @@ class LocalemodTestCase(TestCase, LoaderModuleMockMixin):
Test for Sets the current system locale
'''
with patch.dict(localemod.__context__, {'salt.utils.systemd.booted': True}):
with patch.object(localemod, '_localectl_set', return_value=True):
self.assertTrue(localemod.set_locale('l'))
with patch.dict(localemod.__grains__, {'os_family': ['Unknown']}):
with patch.object(localemod, '_localectl_set', return_value=True):
self.assertTrue(localemod.set_locale('l'))
with patch.dict(localemod.__context__, {'salt.utils.systemd.booted': False}):
with patch.dict(localemod.__grains__, {'os_family': ['Gentoo']}):

View File

@ -27,7 +27,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
return {powercfg: {'__grains__': {'osrelease': 8}}}
query_ouput = '''Subgroup GUID: 238c9fa8-0aad-41ed-83f4-97be242c8f20 (Hibernate)
query_output = '''Subgroup GUID: 238c9fa8-0aad-41ed-83f4-97be242c8f20 (Hibernate)
GUID Alias: SUB_SLEEP
Power Setting GUID: 29f6c1db-86da-48c5-9fdb-f2b67b1f44da (Hibernate after)
GUID Alias: HIBERNATEIDLE
@ -43,7 +43,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin):
Test to make sure we can set the monitor timeout value
'''
mock = MagicMock()
mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_ouput]
mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_output]
with patch.dict(powercfg.__salt__, {'cmd.run': mock}):
powercfg.set_monitor_timeout(0, "dc")
@ -58,7 +58,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin):
Test to make sure we can set the disk timeout value
'''
mock = MagicMock()
mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_ouput]
mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_output]
with patch.dict(powercfg.__salt__, {'cmd.run': mock}):
powercfg.set_disk_timeout(0, "dc")
@ -73,7 +73,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin):
Test to make sure we can set the standby timeout value
'''
mock = MagicMock()
mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_ouput]
mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_output]
with patch.dict(powercfg.__salt__, {'cmd.run': mock}):
powercfg.set_standby_timeout(0, "dc")
@ -88,7 +88,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin):
Test to make sure we can set the hibernate timeout value
'''
mock = MagicMock()
mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_ouput]
mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_output]
with patch.dict(powercfg.__salt__, {'cmd.run': mock}):
powercfg.set_hibernate_timeout(0, "dc")
@ -103,7 +103,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin):
Test to make sure we can get the monitor timeout value
'''
mock = MagicMock()
mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_ouput]
mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_output]
with patch.dict(powercfg.__salt__, {'cmd.run': mock}):
ret = powercfg.get_monitor_timeout()
@ -120,7 +120,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin):
Test to make sure we can get the disk timeout value
'''
mock = MagicMock()
mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_ouput]
mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_output]
with patch.dict(powercfg.__salt__, {'cmd.run': mock}):
ret = powercfg.get_disk_timeout()
@ -137,7 +137,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin):
Test to make sure we can get the standby timeout value
'''
mock = MagicMock()
mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_ouput]
mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_output]
with patch.dict(powercfg.__salt__, {'cmd.run': mock}):
ret = powercfg.get_standby_timeout()
@ -154,7 +154,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin):
Test to make sure we can get the hibernate timeout value
'''
mock = MagicMock()
mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_ouput]
mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_output]
with patch.dict(powercfg.__salt__, {'cmd.run': mock}):
ret = powercfg.get_hibernate_timeout()
@ -171,7 +171,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin):
Test to make sure we can get the hibernate timeout value on windows 7
'''
mock = MagicMock()
mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_ouput]
mock.side_effect = ["Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", self.query_output]
with patch.dict(powercfg.__salt__, {'cmd.run': mock}):
with patch.dict(powercfg.__grains__, {'osrelease': '7'}):
@ -189,7 +189,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin):
Test to make sure we can set the hibernate timeout value
'''
mock = MagicMock()
mock.side_effect = [self.query_ouput]
mock.side_effect = [self.query_output]
with patch.dict(powercfg.__salt__, {'cmd.run': mock}):
powercfg.set_hibernate_timeout(0, "dc", scheme="SCHEME_MIN")
@ -203,7 +203,7 @@ class PowerCfgTestCase(TestCase, LoaderModuleMockMixin):
Test to make sure we can get the hibernate timeout value with a specified scheme
'''
mock = MagicMock()
mock.side_effect = [self.query_ouput]
mock.side_effect = [self.query_output]
with patch.dict(powercfg.__salt__, {'cmd.run': mock}):
ret = powercfg.get_hibernate_timeout(scheme="SCHEME_MIN")