Merge branch 'develop' into 43647_fix_InitiatorName_in_comments

This commit is contained in:
Mike Place 2017-11-13 12:20:39 -07:00 committed by GitHub
commit 79b7eb1c33
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
82 changed files with 2598 additions and 666 deletions

4
.github/stale.yml vendored
View File

@ -1,8 +1,8 @@
# Probot Stale configuration file # Probot Stale configuration file
# Number of days of inactivity before an issue becomes stale # Number of days of inactivity before an issue becomes stale
# 900 is approximately 2 years and 5 months # 890 is approximately 2 years and 5 months
daysUntilStale: 900 daysUntilStale: 890
# Number of days of inactivity before a stale issue is closed # Number of days of inactivity before a stale issue is closed
daysUntilClose: 7 daysUntilClose: 7

187
.kitchen.yml Normal file
View File

@ -0,0 +1,187 @@
---
<% vagrant = system('which vagrant 2>/dev/null >/dev/null') %>
<% version = '2017.7.2' %>
<% platformsfile = ENV['SALT_KITCHEN_PLATFORMS'] || '.kitchen/platforms.yml' %>
<% driverfile = ENV['SALT_KITCHEN_DRIVER'] || '.kitchen/driver.yml' %>
<% if File.exists?(driverfile) %>
<%= ERB.new(File.read(driverfile)).result %>
<% else %>
driver:
name: docker
use_sudo: false
privileged: true
username: root
volume:
- /var/run/docker.sock:/docker.sock
cap_add:
- sys_admin
disable_upstart: false
provision_command:
- echo 'L /run/docker.sock - - - - /docker.sock' > /etc/tmpfiles.d/docker.conf
<% end %>
sudo: false
provisioner:
name: salt_solo
salt_install: bootstrap
salt_version: latest
salt_bootstrap_url: https://bootstrap.saltstack.com
salt_bootstrap_options: -X stable <%= version %>
log_level: info
require_chef: false
remote_states:
name: git://github.com/gtmanfred/salt-jenkins.git
branch: master
repo: git
testingdir: /testing
salt_copy_filter:
- .bundle
- .git
- .gitignore
- .kitchen
- .kitchen.yml
- Gemfile
- Gemfile.lock
- README.rst
- .travis.yml
state_top:
base:
"*":
- git.salt
- kitchen
<% if File.exists?(platformsfile) %>
<%= ERB.new(File.read(platformsfile)).result %>
<% else %>
platforms:
- name: fedora
driver_config:
image: fedora:latest
run_command: /usr/lib/systemd/systemd
provisioner:
salt_bootstrap_options: -X git v<%= version %> >/dev/null
- name: centos-7
driver_config:
run_command: /usr/lib/systemd/systemd
- name: centos-6
driver_config:
run_command: /sbin/init
provision_command:
- yum install -y upstart
provisioner:
salt_bootstrap_options: -P -y -x python2.7 -X git v<%= version %> >/dev/null
- name: ubuntu-rolling
driver_config:
image: ubuntu:rolling
run_command: /lib/systemd/systemd
provisioner:
salt_bootstrap_url: https://raw.githubusercontent.com/saltstack/salt-bootstrap/develop/bootstrap-salt.sh
- name: ubuntu-16.04
driver_config:
run_command: /lib/systemd/systemd
- name: ubuntu-14.04
driver_config:
run_command: /sbin/init
provision_command:
- rm -f /sbin/initctl
- dpkg-divert --local --rename --remove /sbin/initctl
- name: debian-8
driver_config:
run_command: /lib/systemd/systemd
provision_command:
- apt-get install -y dbus
- echo 'L /run/docker.sock - - - - /docker.sock' > /etc/tmpfiles.d/docker.conf
- name: debian-9
driver_config:
run_command: /lib/systemd/systemd
- name: arch
driver_config:
image: base/archlinux
run_command: /usr/lib/systemd/systemd
provision_command:
- pacman -Syu --noconfirm systemd
- systemctl enable sshd
- echo 'L /run/docker.sock - - - - /docker.sock' > /etc/tmpfiles.d/docker.conf
provisioner:
salt_bootstrap_options: -X git v<%= version %> >/dev/null
- name: opensuse
driver_config:
run_command: /usr/lib/systemd/systemd
provision_command:
- systemctl enable sshd.service
- echo 'L /run/docker.sock - - - - /docker.sock' > /etc/tmpfiles.d/docker.conf
provisioner:
salt_bootstrap_options: -X git v<%= version %> >/dev/null
<% if vagrant != false %>
- name: windows-2012r2
driver:
box: mwrock/Windows2012R2
communicator: winrm
name: vagrant
gui: true
username: administrator
password: Pass@word1
provisioner:
init_environment: |
Clear-Host
$AddedLocation ="c:\salt"
$Reg = "Registry::HKLM\System\CurrentControlSet\Control\Session Manager\Environment"
$OldPath = (Get-ItemProperty -Path "$Reg" -Name PATH).Path
$NewPath= $OldPath + ; + $AddedLocation
Set-ItemProperty -Path "$Reg" -Name PATH Value $NewPath
salt_bootstrap_url: https://raw.githubusercontent.com/saltstack/salt-bootstrap/develop/bootstrap-salt.ps1
salt_bootstrap_options: ''
- name: windows-2016
driver:
box: mwrock/Windows2016
communicator: winrm
name: vagrant
username: Vagrant
password: vagrant
gui: true
provisioner:
init_environment: |
Clear-Host
$AddedLocation ="c:\salt;c:\salt\bin\Scripts"
$Reg = "Registry::HKLM\System\CurrentControlSet\Control\Session Manager\Environment"
$OldPath = (Get-ItemProperty -Path "$Reg" -Name PATH).Path
$NewPath= $OldPath + ; + $AddedLocation
Set-ItemProperty -Path "$Reg" -Name PATH Value $NewPath
salt_bootstrap_url: https://raw.githubusercontent.com/saltstack/salt-bootstrap/develop/bootstrap-salt.ps1
salt_bootstrap_options: ''
<% end %>
<% end %>
suites:
- name: py2
provisioner:
pillars:
top.sls:
base:
"*":
- jenkins
jenkins.sls:
testing_dir: /tmp/kitchen/testing
clone_repo: false
salttesting_namespec: salttesting==2017.6.1
- name: py3
provisioner:
pillars:
top.sls:
base:
"*":
- jenkins
jenkins.sls:
testing_dir: /tmp/kitchen/testing
clone_repo: false
py3: true
salttesting_namespec: salttesting==2017.6.1
verifier:
name: shell
remote_exec: true
sudo: false
live_stream: {}
<% if ENV['TESTOPTS'].nil? %>
command: '$(kitchen) /tmp/kitchen/testing/tests/runtests.py --run-destructive --sysinfo --transport=zeromq --output-columns=80 --ssh --coverage-xml=/tmp/coverage.xml --xml=/tmp/xml-unittests-output'
<% else %>
command: '$(kitchen) /tmp/kitchen/testing/tests/runtests.py --run-destructive --output-columns 80 <%= ENV["TESTOPTS"] %>'
<% end %>

23
Gemfile Normal file
View File

@ -0,0 +1,23 @@
# This file is only used for running the test suite with kitchen-salt.
source "https://rubygems.org"
gem "test-kitchen"
gem "kitchen-salt", :git => 'https://github.com/saltstack/kitchen-salt.git'
gem 'git'
group :docker do
gem 'kitchen-docker', :git => 'https://github.com/test-kitchen/kitchen-docker.git'
end
group :opennebula do
gem 'kitchen-opennebula', :git => 'https://github.com/gtmanfred/kitchen-opennebula.git'
gem 'xmlrpc'
end
group :windows do
gem 'vagrant-wrapper'
gem 'kitchen-vagrant'
gem 'winrm', '~>2.0'
gem 'winrm-fs', '~>1.0'
end

View File

@ -337,7 +337,8 @@
# If the autosign_file is specified, incoming keys specified in the # If the autosign_file is specified, incoming keys specified in the
# autosign_file will be automatically accepted. This is insecure. Regular # autosign_file will be automatically accepted. This is insecure. Regular
# expressions as well as globing lines are supported. # expressions as well as globing lines are supported. The file must be readonly
# except for the owner. Use permissive_pki_access to allow the group write access.
#autosign_file: /etc/salt/autosign.conf #autosign_file: /etc/salt/autosign.conf
# Works like autosign_file, but instead allows you to specify minion IDs for # Works like autosign_file, but instead allows you to specify minion IDs for

View File

@ -311,7 +311,8 @@ syndic_user: salt
# If the autosign_file is specified, incoming keys specified in the # If the autosign_file is specified, incoming keys specified in the
# autosign_file will be automatically accepted. This is insecure. Regular # autosign_file will be automatically accepted. This is insecure. Regular
# expressions as well as globing lines are supported. # expressions as well as globing lines are supported. The file must be readonly
# except for the owner. Use permissive_pki_access to allow the group write access.
#autosign_file: /etc/salt/autosign.conf #autosign_file: /etc/salt/autosign.conf
# Works like autosign_file, but instead allows you to specify minion IDs for # Works like autosign_file, but instead allows you to specify minion IDs for

View File

@ -1278,6 +1278,12 @@ comparison, then by globbing, then by full-string regex matching.
This should still be considered a less than secure option, due to the fact This should still be considered a less than secure option, due to the fact
that trust is based on just the requesting minion id. that trust is based on just the requesting minion id.
.. versionchanged:: Oxygen
For security reasons the file must be readonly except for it's owner.
If :conf_master:`permissive_pki_access` is ``True`` the owning group can also
have write access, but if Salt is running as ``root`` it must be a member of that group.
A less strict requirement also existed in previous version.
.. conf_master:: autoreject_file .. conf_master:: autoreject_file
``autoreject_file`` ``autoreject_file``

View File

@ -46,5 +46,6 @@ returner modules
splunk splunk
sqlite3_return sqlite3_return
syslog_return syslog_return
telegram_return
xmpp_return xmpp_return
zabbix_return zabbix_return

View File

@ -209,6 +209,13 @@ Each direct requisite also has a corresponding requisite_in:
* ``onchanges_in`` * ``onchanges_in``
* ``onfail_in`` * ``onfail_in``
There are several corresponding requisite_any statements:
* ``require_any``
* ``watch_any``
* ``onchanges_any``
* ``onfail_any``
All of the requisites define specific relationships and always work with the All of the requisites define specific relationships and always work with the
dependency logic defined above. dependency logic defined above.
@ -245,6 +252,44 @@ This will add all of the state declarations found in the given sls file. This me
that every state in sls `foo` will be required. This makes it very easy to batch that every state in sls `foo` will be required. This makes it very easy to batch
large groups of states easily in any requisite statement. large groups of states easily in any requisite statement.
.. _requisites-require_any:
require_any
~~~~~~~~~~~
.. versionadded:: Oxygen
The use of ``require_any`` demands that one of the required states executes before the
dependent state. The state containing the ``require_any`` requisite is defined as the
dependent state. The states specified in the ``require_any`` statement are defined as the
required states. If at least one of the required state's execution succeeds, the dependent state
will then execute. If at least one of the required state's execution fails, the dependent state
will not execute.
.. code-block:: yaml
A:
cmd.run:
- name: echo A
- require_any:
- cmd: B
- cmd: C
- cmd: D
B:
cmd.run:
- name: echo B
C:
cmd.run:
- name: /bin/false
D:
cmd.run:
- name: echo D
In this example `A` will run because at least one of the requirements specified,
`B`, `C`, or `D` will succeed.
.. _requisites-watch: .. _requisites-watch:
watch watch
@ -332,6 +377,50 @@ to Salt ensuring that the service is running.
- name: /etc/ntp.conf - name: /etc/ntp.conf
- source: salt://ntp/files/ntp.conf - source: salt://ntp/files/ntp.conf
watch_any
~~~~~~~~~
.. versionadded:: Oxygen
The state containing the ``watch_any`` requisite is defined as the watching
state. The states specified in the ``watch_any`` statement are defined as the watched
states. When the watched states execute, they will return a dictionary containing
a key named "changes".
If the "result" of any of the watched states is ``True``, the watching state *will
execute normally*, and if all of them are ``False``, the watching state will never run.
This part of ``watch`` mirrors the functionality of the ``require`` requisite.
If the "result" of any of the watched states is ``True`` *and* the "changes"
key contains a populated dictionary (changes occurred in the watched state),
then the ``watch`` requisite can add additional behavior. This additional
behavior is defined by the ``mod_watch`` function within the watching state
module. If the ``mod_watch`` function exists in the watching state module, it
will be called *in addition to* the normal watching state. The return data
from the ``mod_watch`` function is what will be returned to the master in this
case; the return data from the main watching function is discarded.
If the "changes" key contains an empty dictionary, the ``watch`` requisite acts
exactly like the ``require`` requisite (the watching state will execute if
"result" is ``True``, and fail if "result" is ``False`` in the watched state).
.. code-block:: yaml
apache2:
service.running:
- watch_any:
- file: /etc/apache2/sites-available/site1.conf
- file: /etc/apache2/sites-available/site2.conf
file.managed:
- name: /etc/apache2/sites-available/site1.conf
- source: salt://apache2/files/site1.conf
file.managed:
- name: /etc/apache2/sites-available/site2.conf
- source: salt://apache2/files/site2.conf
In this example, the service will be reloaded/restarted if either of the
file.managed states has a result of True and has changes.
.. _requisites-prereq: .. _requisites-prereq:
prereq prereq
@ -423,6 +512,46 @@ The ``onfail`` requisite is applied in the same way as ``require`` as ``watch``:
.. _Issue #22370: https://github.com/saltstack/salt/issues/22370 .. _Issue #22370: https://github.com/saltstack/salt/issues/22370
.. _requisites-onfail_any:
onfail_any
~~~~~~~~~~
.. versionadded:: Oxygen
The ``onfail_any`` requisite allows for reactions to happen strictly as a response
to the failure of at least one other state. This can be used in a number of ways, such as
executing a second attempt to set up a service or begin to execute a separate
thread of states because of a failure.
The ``onfail_any`` requisite is applied in the same way as ``require_any`` and ``watch_any``:
.. code-block:: yaml
primary_mount:
mount.mounted:
- name: /mnt/share
- device: 10.0.0.45:/share
- fstype: nfs
secondary_mount:
mount.mounted:
- name: /mnt/code
- device: 10.0.0.45:/code
- fstype: nfs
backup_mount:
mount.mounted:
- name: /mnt/share
- device: 192.168.40.34:/share
- fstype: nfs
- onfail_any:
- mount: primary_mount
- mount: secondary_mount
In this example, the `backup_mount` will be mounted if either of the
`primary_mount` or `secondary_mount` states results in a failure.
.. _requisites-onchanges: .. _requisites-onchanges:
onchanges onchanges
@ -482,6 +611,41 @@ if any of the watched states changes.
- onchanges: - onchanges:
- file: /etc/myservice/myservice.conf - file: /etc/myservice/myservice.conf
.. _requisites-onchanges_any:
onchanges_any
~~~~~~~~~~~~~
.. versionadded:: Oxygen
The ``onchanges_any`` requisite makes a state only apply one of the required states
generates changes, and if one of the watched state's "result" is ``True``. This can be
a useful way to execute a post hook after changing aspects of a system.
.. code-block:: yaml
myservice:
pkg.installed:
- name: myservice
- name: yourservice
file.managed:
- name: /etc/myservice/myservice.conf
- source: salt://myservice/files/myservice.conf
- mode: 600
file.managed:
- name: /etc/yourservice/yourservice.conf
- source: salt://yourservice/files/yourservice.conf
- mode: 600
cmd.run:
- name: /usr/libexec/myservice/post-changes-hook.sh
- onchanges_any:
- file: /etc/myservice/myservice.conf
- file: /etc/your_service/yourservice.conf
In this example, the `cmd.run` would be run only if either of the
`file.managed` states generated changes and at least one of the
watched state's "result" is ``True``.
use use
~~~ ~~~

View File

@ -99,7 +99,8 @@ Profile configuration example:
# vagrant_up_timeout: 300 # (seconds) timeout for cmd.run of the "vagrant up" command # vagrant_up_timeout: 300 # (seconds) timeout for cmd.run of the "vagrant up" command
# vagrant_provider: '' # option for "vagrant up" like: "--provider vmware_fusion" # vagrant_provider: '' # option for "vagrant up" like: "--provider vmware_fusion"
# ssh_host: None # "None" means try to find the routable IP address from "ifconfig" # ssh_host: None # "None" means try to find the routable IP address from "ifconfig"
# target_network: None # Expected CIDR address of your bridged network # ssh_username: '' # also required when ssh_host is used.
# target_network: None # Expected CIDR address range of your bridged network
# force_minion_config: false # Set "true" to re-purpose an existing VM # force_minion_config: false # Set "true" to re-purpose an existing VM
The machine can now be created and configured with the following command: The machine can now be created and configured with the following command:

View File

@ -98,16 +98,16 @@ Mocking Loader Modules
Salt loader modules use a series of globally available dunder variables, Salt loader modules use a series of globally available dunder variables,
``__salt__``, ``__opts__``, ``__pillar__``, etc. To facilitate testing these ``__salt__``, ``__opts__``, ``__pillar__``, etc. To facilitate testing these
modules a mixin class was created, ``LoaderModuleMockMixin`` which can be found modules a mixin class was created, ``LoaderModuleMockMixin`` which can be found
in ``tests/support/mixins.py``. The reason for the exitance of this class is in ``tests/support/mixins.py``. The reason for the existance of this class is
because, historycally, and because it was easier, one would add these dunder because historiclly and because it was easier, one would add these dunder
variables directly on the imported module. This however, introduces unexpected variables directly on the imported module. This however, introduces unexpected
behavior when running the full test suite since those attributes would not be behavior when running the full test suite since those attributes would not be
removed once we were done testing the module and would therefor leak to other removed once we were done testing the module and would therefore leak to other
modules being tested with unpredictable results. This is the kind of work that modules being tested with unpredictable results. This is the kind of work that
should be defered to mock, and that's exactly what this mixin class does. should be deferred to mock, and that's exactly what this mixin class does.
As an example, if one needs to specify some options which should be available As an example, if one needs to specify some options which should be available
to the module being tests one should do: to the module being tested one should do:
.. code-block:: python .. code-block:: python
@ -173,10 +173,10 @@ Consider this more extensive example from
return {libcloud_dns: module_globals} return {libcloud_dns: module_globals}
What happens on the above example is that, we mock a call to What happens in the above example is we mock a call to
`__salt__['config.option']` to return the configuration needed for the `__salt__['config.option']` to return the configuration needed for the
execution of the tests. Additionally, if the ``libcloud`` library is not execution of the tests. Additionally, if the ``libcloud`` library is not
available, since that's not actually part of whats being tested, we mocked that available, since that's not actually part of what's being tested, we mocked that
import by patching ``sys.modules`` when tests are running. import by patching ``sys.modules`` when tests are running.
@ -245,7 +245,7 @@ To understand how one might integrate Mock into writing a unit test for Salt,
let's imagine a scenario in which we're testing an execution module that's let's imagine a scenario in which we're testing an execution module that's
designed to operate on a database. Furthermore, let's imagine two separate designed to operate on a database. Furthermore, let's imagine two separate
methods, here presented in pseduo-code in an imaginary execution module called methods, here presented in pseduo-code in an imaginary execution module called
'db.py. 'db.py'.
.. code-block:: python .. code-block:: python

View File

@ -413,7 +413,7 @@ signed certificates. :ref:`Here<new-pywinrm>` for more information.
DigitalOcean DigitalOcean
------------ ------------
The DigitalOcean driver has been renamed to conform to the companies name. The The DigitalOcean driver has been renamed to conform to the company name. The
new driver name is ``digitalocean``. The old name ``digital_ocean`` and a new driver name is ``digitalocean``. The old name ``digital_ocean`` and a
short one ``do`` will still be supported through virtual aliases, this is mostly short one ``do`` will still be supported through virtual aliases, this is mostly
cosmetic. cosmetic.
@ -962,6 +962,54 @@ check the configuration for the correct format and only load if the validation p
beacons: beacons:
wtmp: [] wtmp: []
New requisites available in state compiler
------------------------------------------
- ``require_any``
The use of ``require_any`` demands that one of the required states executes before the
dependent state. The state containing the ``require_any`` requisite is defined as the
dependent state. The states specified in the ``require_any`` statement are defined as the
required states. If at least one of the required state's execution succeeds, the dependent state
will then execute. If at least one of the required state's execution fails, the dependent state
will not execute.
- ``watch_any``
The state containing the ``watch_any`` requisite is defined as the watching
state. The states specified in the ``watch_any`` statement are defined as the watched
states. When the watched states execute, they will return a dictionary containing
a key named "changes".
If the "result" of any of the watched states is ``True``, the watching state *will
execute normally*, and if all of them are ``False``, the watching state will never run.
This part of ``watch`` mirrors the functionality of the ``require`` requisite.
If the "result" of any of the watched states is ``True`` *and* the "changes"
key contains a populated dictionary (changes occurred in the watched state),
then the ``watch`` requisite can add additional behavior. This additional
behavior is defined by the ``mod_watch`` function within the watching state
module. If the ``mod_watch`` function exists in the watching state module, it
will be called *in addition to* the normal watching state. The return data
from the ``mod_watch`` function is what will be returned to the master in this
case; the return data from the main watching function is discarded.
If the "changes" key contains an empty dictionary, the ``watch`` requisite acts
exactly like the ``require`` requisite (the watching state will execute if
"result" is ``True``, and fail if "result" is ``False`` in the watched state).
- ``onchanges_any``
The ``onchanges_any`` requisite makes a state only apply one of the required states
generates changes, and if one of the watched state's "result" is ``True``. This can be
a useful way to execute a post hook after changing aspects of a system.
- ``onfail_any``
The ``onfail_any`` requisite allows for reactions to happen strictly as a response
to the failure of at least one other state. This can be used in a number of ways, such as
executing a second attempt to set up a service or begin to execute a separate
thread of states because of a failure.
The ``onfail_any`` requisite is applied in the same way as ``require_any`` and ``watch_any``:
Deprecations Deprecations
------------ ------------

View File

@ -29,7 +29,6 @@ import salt.transport.client
import salt.utils.args import salt.utils.args
import salt.utils.dictupdate import salt.utils.dictupdate
import salt.utils.files import salt.utils.files
import salt.utils.master
import salt.utils.minions import salt.utils.minions
import salt.utils.user import salt.utils.user
import salt.utils.versions import salt.utils.versions
@ -444,6 +443,8 @@ class LoadAuth(object):
auth_ret = True auth_ret = True
if auth_ret is not True: if auth_ret is not True:
# Avoid a circular import
import salt.utils.master
auth_list = salt.utils.master.get_values_of_matching_keys( auth_list = salt.utils.master.get_values_of_matching_keys(
self.opts['publisher_acl'], auth_ret) self.opts['publisher_acl'], auth_ret)
if not auth_list: if not auth_list:

View File

@ -13,10 +13,12 @@ import salt.utils.stringutils
from salt.utils.args import yamlify_arg from salt.utils.args import yamlify_arg
from salt.utils.verify import verify_log from salt.utils.verify import verify_log
from salt.exceptions import ( from salt.exceptions import (
SaltClientError, EauthAuthenticationError,
SaltInvocationError, LoaderError,
EauthAuthenticationError SaltClientError,
) SaltInvocationError,
SaltSystemExit
)
# Import 3rd-party libs # Import 3rd-party libs
from salt.ext import six from salt.ext import six
@ -167,8 +169,8 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
out = 'progress' out = 'progress'
try: try:
self._progress_ret(progress, out) self._progress_ret(progress, out)
except salt.exceptions.LoaderError as exc: except LoaderError as exc:
raise salt.exceptions.SaltSystemExit(exc) raise SaltSystemExit(exc)
if 'return_count' not in progress: if 'return_count' not in progress:
ret.update(progress) ret.update(progress)
self._progress_end(out) self._progress_end(out)
@ -251,7 +253,7 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
try: try:
batch = salt.cli.batch.Batch(self.config, eauth=eauth, quiet=True) batch = salt.cli.batch.Batch(self.config, eauth=eauth, quiet=True)
except salt.exceptions.SaltClientError as exc: except SaltClientError:
sys.exit(2) sys.exit(2)
ret = {} ret = {}
@ -265,7 +267,7 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
try: try:
self.config['batch'] = self.options.batch self.config['batch'] = self.options.batch
batch = salt.cli.batch.Batch(self.config, eauth=eauth, parser=self.options) batch = salt.cli.batch.Batch(self.config, eauth=eauth, parser=self.options)
except salt.exceptions.SaltClientError as exc: except SaltClientError:
# We will print errors to the console further down the stack # We will print errors to the console further down the stack
sys.exit(1) sys.exit(1)
# Printing the output is already taken care of in run() itself # Printing the output is already taken care of in run() itself
@ -345,9 +347,9 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
if not hasattr(self, 'progress_bar'): if not hasattr(self, 'progress_bar'):
try: try:
self.progress_bar = salt.output.get_progress(self.config, out, progress) self.progress_bar = salt.output.get_progress(self.config, out, progress)
except Exception as exc: except Exception:
raise salt.exceptions.LoaderError('\nWARNING: Install the `progressbar` python package. ' raise LoaderError('\nWARNING: Install the `progressbar` python package. '
'Requested job was still run but output cannot be displayed.\n') 'Requested job was still run but output cannot be displayed.\n')
salt.output.update_progress(self.config, progress, self.progress_bar, out) salt.output.update_progress(self.config, progress, self.progress_bar, out)
def _output_ret(self, ret, out): def _output_ret(self, ret, out):

View File

@ -258,19 +258,30 @@ def create(vm_):
wol_host = config.get_cloud_config_value( wol_host = config.get_cloud_config_value(
'wol_sender_node', vm_, __opts__, default='') 'wol_sender_node', vm_, __opts__, default='')
if wol_mac and wol_host: if wol_mac and wol_host:
log.info('sending wake-on-lan to %s using node %s', good_ping = False
wol_mac, wol_host) ssh_host = config.get_cloud_config_value(
local = salt.client.LocalClient() 'ssh_host', vm_, __opts__, default='')
if isinstance(wol_mac, six.string_types): if ssh_host:
wol_mac = [wol_mac] # a smart user may have passed more params log.info('trying to ping %s', ssh_host)
ret = local.cmd(wol_host, 'network.wol', wol_mac) count = 'n' if salt.utils.platform.is_windows() else 'c'
log.info('network.wol returned value %s', ret) cmd = 'ping -{} 1 {}'.format(count, ssh_host)
if ret and ret[wol_host]: good_ping = __salt__['cmd.retcode'](cmd) == 0
sleep_time = config.get_cloud_config_value( if good_ping:
'wol_boot_wait', vm_, __opts__, default=30) log.info('successful ping.')
if sleep_time > 0.0: else:
log.info('delaying %d seconds for boot', sleep_time) log.info('sending wake-on-lan to %s using node %s',
time.sleep(sleep_time) wol_mac, wol_host)
local = salt.client.LocalClient()
if isinstance(wol_mac, six.string_types):
wol_mac = [wol_mac] # a smart user may have passed more params
ret = local.cmd(wol_host, 'network.wol', wol_mac)
log.info('network.wol returned value %s', ret)
if ret and ret[wol_host]:
sleep_time = config.get_cloud_config_value(
'wol_boot_wait', vm_, __opts__, default=30)
if sleep_time > 0.0:
log.info('delaying %d seconds for boot', sleep_time)
time.sleep(sleep_time)
log.info('Provisioning existing machine %s', vm_['name']) log.info('Provisioning existing machine %s', vm_['name'])
ret = __utils__['cloud.bootstrap'](vm_, __opts__) ret = __utils__['cloud.bootstrap'](vm_, __opts__)
else: else:

View File

@ -30,7 +30,8 @@ if six.PY3:
import ipaddress import ipaddress
else: else:
import salt.ext.ipaddress as ipaddress import salt.ext.ipaddress as ipaddress
from salt.exceptions import SaltCloudException, SaltCloudSystemExit from salt.exceptions import SaltCloudException, SaltCloudSystemExit, \
SaltInvocationError
# Get logging started # Get logging started
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -229,18 +230,22 @@ def create(vm_):
kwarg={'network_mask': network_mask, kwarg={'network_mask': network_mask,
'get_private_key': True})[host] 'get_private_key': True})[host]
with tempfile.NamedTemporaryFile() as pks: with tempfile.NamedTemporaryFile() as pks:
if 'private_key' not in vm_ and ret.get('private_key', False): if 'private_key' not in vm_ and ret and ret.get('private_key', False):
pks.write(ret['private_key']) pks.write(ret['private_key'])
pks.flush() pks.flush()
log.debug('wrote private key to %s', pks.name) log.debug('wrote private key to %s', pks.name)
vm_['key_filename'] = pks.name vm_['key_filename'] = pks.name
if 'ssh_host' not in vm_: if 'ssh_host' not in vm_:
vm_.setdefault('ssh_username', ret['ssh_username']) try:
if ret.get('ip_address'): vm_.setdefault('ssh_username', ret['ssh_username'])
vm_['ssh_host'] = ret['ip_address'] if ret.get('ip_address'):
else: # if probe failed or not used, use Vagrant's reported ssh info vm_['ssh_host'] = ret['ip_address']
vm_['ssh_host'] = ret['ssh_host'] else: # if probe failed or not used, use Vagrant's reported ssh info
vm_.setdefault('ssh_port', ret['ssh_port']) vm_['ssh_host'] = ret['ssh_host']
vm_.setdefault('ssh_port', ret['ssh_port'])
except (KeyError, TypeError):
raise SaltInvocationError(
'Insufficient SSH addressing information for {}'.format(name))
log.info('Provisioning machine %s as node %s using ssh %s', log.info('Provisioning machine %s as node %s using ssh %s',
machine, name, vm_['ssh_host']) machine, name, vm_['ssh_host'])
@ -288,29 +293,32 @@ def destroy(name, call=None):
transport=opts['transport'] transport=opts['transport']
) )
my_info = _get_my_info(name) my_info = _get_my_info(name)
profile_name = my_info[name]['profile'] if my_info:
profile = opts['profiles'][profile_name] profile_name = my_info[name]['profile']
host = profile['host'] profile = opts['profiles'][profile_name]
local = salt.client.LocalClient() host = profile['host']
ret = local.cmd(host, 'vagrant.destroy', [name]) local = salt.client.LocalClient()
ret = local.cmd(host, 'vagrant.destroy', [name])
if ret[host]: if ret[host]:
__utils__['cloud.fire_event']( __utils__['cloud.fire_event'](
'event', 'event',
'destroyed instance', 'destroyed instance',
'salt/cloud/{0}/destroyed'.format(name), 'salt/cloud/{0}/destroyed'.format(name),
args={'name': name}, args={'name': name},
sock_dir=opts['sock_dir'], sock_dir=opts['sock_dir'],
transport=opts['transport'] transport=opts['transport']
) )
if opts.get('update_cachedir', False) is True: if opts.get('update_cachedir', False) is True:
__utils__['cloud.delete_minion_cachedir']( __utils__['cloud.delete_minion_cachedir'](
name, __active_provider_name__.split(':')[0], opts) name, __active_provider_name__.split(':')[0], opts)
return {'Destroyed': '{0} was destroyed.'.format(name)} return {'Destroyed': '{0} was destroyed.'.format(name)}
else:
return {'Error': 'Error destroying {}'.format(name)}
else: else:
return {'Error': 'Error destroying {}'.format(name)} return {'Error': 'No response from {}. Cannot destroy.'.format(name)}
# noinspection PyTypeChecker # noinspection PyTypeChecker

View File

@ -283,46 +283,23 @@ class AutoKey(object):
return True return True
# After we've ascertained we're not on windows # After we've ascertained we're not on windows
try: groups = salt.utils.user.get_gid_list(self.opts['user'], include_default=False)
user = self.opts['user']
pwnam = pwd.getpwnam(user)
uid = pwnam[2]
gid = pwnam[3]
groups = salt.utils.user.get_gid_list(user, include_default=False)
except KeyError:
log.error(
'Failed to determine groups for user {0}. The user is not '
'available.\n'.format(
user
)
)
return False
fmode = os.stat(filename) fmode = os.stat(filename)
if os.getuid() == 0: if stat.S_IWOTH & fmode.st_mode:
if fmode.st_uid == uid or fmode.st_gid != gid: # don't allow others to write to the file
return True return False
elif self.opts.get('permissive_pki_access', False) \
and fmode.st_gid in groups: if stat.S_IWGRP & fmode.st_mode:
return True # if the group has write access only allow with permissive_pki_access
else: if not self.opts.get('permissive_pki_access', False):
if stat.S_IWOTH & fmode.st_mode: return False
# don't allow others to write to the file elif os.getuid() == 0 and fmode.st_gid not in groups:
# if salt is root it has to be in the group that has write access
# this gives the group 'permission' to have write access
return False return False
# check group flags return True
if self.opts.get('permissive_pki_access', False) and stat.S_IWGRP & fmode.st_mode:
return True
elif stat.S_IWGRP & fmode.st_mode:
return False
# check if writable by group or other
if not (stat.S_IWGRP & fmode.st_mode or
stat.S_IWOTH & fmode.st_mode):
return True
return False
def check_signing_file(self, keyid, signing_file): def check_signing_file(self, keyid, signing_file):
''' '''

View File

@ -6,6 +6,14 @@ HTTP Logstash engine
An engine that reads messages from the salt event bus and pushes An engine that reads messages from the salt event bus and pushes
them onto a logstash endpoint via HTTP requests. them onto a logstash endpoint via HTTP requests.
.. versionchanged:: Oxygen
.. note::
By default, this engine take everything from the Salt bus and exports into
Logstash.
For a better selection of the events that you want to publish, you can use
the ``tags`` and ``funs`` options.
:configuration: Example configuration :configuration: Example configuration
.. code-block:: yaml .. code-block:: yaml
@ -47,11 +55,9 @@ _HEADERS = {'Content-Type': 'application/json'}
def _logstash(url, data): def _logstash(url, data):
''' '''
Issues HTTP queries to the logstash server. Issues HTTP queries to the logstash server.
''' '''
result = salt.utils.http.query( result = salt.utils.http.query(
url, url,
'POST', 'POST',
@ -69,11 +75,25 @@ def _logstash(url, data):
def start(url, funs=None, tags=None): def start(url, funs=None, tags=None):
'''
Listen to salt events and forward them to logstash via HTTP.
''' '''
Listen to salt events and forward them to logstash.
url
The Logstash endpoint.
funs: ``None``
A list of functions to be compared against, looking into the ``fun``
field from the event data. This option helps to select the events
generated by one or more functions.
If an event does not have the ``fun`` field in the data section, it
will be published. For a better selection, consider using the ``tags``
option.
By default, this option accepts any event to be submitted to Logstash.
tags: ``None``
A list of pattern to compare the event tag against.
By default, this option accepts any event to be submitted to Logstash.
'''
if __opts__.get('id').endswith('_master'): if __opts__.get('id').endswith('_master'):
instance = 'master' instance = 'master'
else: else:
@ -82,9 +102,8 @@ def start(url, funs=None, tags=None):
sock_dir=__opts__['sock_dir'], sock_dir=__opts__['sock_dir'],
transport=__opts__['transport'], transport=__opts__['transport'],
opts=__opts__) opts=__opts__)
while True: while True:
event = event_bus.get_event(tag='salt/job', full=True) event = event_bus.get_event(full=True)
if event: if event:
publish = True publish = True
if isinstance(tags, list) and len(tags) > 0: if isinstance(tags, list) and len(tags) > 0:
@ -93,7 +112,7 @@ def start(url, funs=None, tags=None):
if fnmatch.fnmatch(event['tag'], tag): if fnmatch.fnmatch(event['tag'], tag):
found_match = True found_match = True
publish = found_match publish = found_match
if funs: if funs and 'fun' in event['data']:
if not event['data']['fun'] in funs: if not event['data']['fun'] in funs:
publish = False publish = False
if publish: if publish:

View File

@ -448,10 +448,15 @@ def _bsd_memdata(osdata):
sysctl = salt.utils.path.which('sysctl') sysctl = salt.utils.path.which('sysctl')
if sysctl: if sysctl:
mem = __salt__['cmd.run']('{0} -n hw.physmem'.format(sysctl)) mem = __salt__['cmd.run']('{0} -n hw.physmem'.format(sysctl))
swap_total = __salt__['cmd.run']('{0} -n vm.swap_total'.format(sysctl))
if osdata['kernel'] == 'NetBSD' and mem.startswith('-'): if osdata['kernel'] == 'NetBSD' and mem.startswith('-'):
mem = __salt__['cmd.run']('{0} -n hw.physmem64'.format(sysctl)) mem = __salt__['cmd.run']('{0} -n hw.physmem64'.format(sysctl))
grains['mem_total'] = int(mem) // 1024 // 1024 grains['mem_total'] = int(mem) // 1024 // 1024
if osdata['kernel'] == 'OpenBSD':
swapctl = salt.utils.path.which('swapctl')
swap_total = __salt__['cmd.run']('{0} -sk'.format(swapctl)).split(' ')[1]
else:
swap_total = __salt__['cmd.run']('{0} -n vm.swap_total'.format(sysctl))
grains['swap_total'] = int(swap_total) // 1024 // 1024 grains['swap_total'] = int(swap_total) // 1024 // 1024
return grains return grains

View File

@ -70,10 +70,8 @@ def _zfs_support():
on_supported_platform = _check_retcode('ls /sys/module/zfs') on_supported_platform = _check_retcode('ls /sys/module/zfs')
# NOTE: fallback to zfs-fuse if needed # NOTE: fallback to zfs-fuse if needed
if not on_supported_platform: if not on_supported_platform and salt.utils.path.which('zfs-fuse'):
_zfs_fuse = lambda f: __salt__['service.' + f]('zfs-fuse') on_supported_platform = True
if _zfs_fuse('available') and (_zfs_fuse('status') or _zfs_fuse('start')):
on_supported_platform = True
# Additional check for the zpool command # Additional check for the zpool command
if on_supported_platform and salt.utils.path.which('zpool'): if on_supported_platform and salt.utils.path.which('zpool'):

View File

@ -1244,7 +1244,7 @@ class Minion(MinionBase):
) )
modules_max_memory = True modules_max_memory = True
old_mem_limit = resource.getrlimit(resource.RLIMIT_AS) old_mem_limit = resource.getrlimit(resource.RLIMIT_AS)
rss, vms = psutil.Process(os.getpid()).memory_info() rss, vms = psutil.Process(os.getpid()).memory_info()[:2]
mem_limit = rss + vms + self.opts[u'modules_max_memory'] mem_limit = rss + vms + self.opts[u'modules_max_memory']
resource.setrlimit(resource.RLIMIT_AS, (mem_limit, mem_limit)) resource.setrlimit(resource.RLIMIT_AS, (mem_limit, mem_limit))
elif self.opts.get(u'modules_max_memory', -1) > 0: elif self.opts.get(u'modules_max_memory', -1) > 0:
@ -1665,7 +1665,25 @@ class Minion(MinionBase):
This method should be used as a threading target, start the actual This method should be used as a threading target, start the actual
minion side execution. minion side execution.
''' '''
fn_ = os.path.join(minion_instance.proc_dir, data[u'jid'])
if opts[u'multiprocessing'] and not salt.utils.platform.is_windows():
# Shutdown the multiprocessing before daemonizing
salt.log.setup.shutdown_multiprocessing_logging()
salt.utils.process.daemonize_if(opts)
# Reconfigure multiprocessing logging after daemonizing
salt.log.setup.setup_multiprocessing_logging()
salt.utils.process.appendproctitle(u'{0}._thread_multi_return {1}'.format(cls.__name__, data[u'jid'])) salt.utils.process.appendproctitle(u'{0}._thread_multi_return {1}'.format(cls.__name__, data[u'jid']))
sdata = {u'pid': os.getpid()}
sdata.update(data)
log.info(u'Starting a new job with PID %s', sdata[u'pid'])
with salt.utils.files.fopen(fn_, u'w+b') as fp_:
fp_.write(minion_instance.serial.dumps(sdata))
multifunc_ordered = opts.get(u'multifunc_ordered', False) multifunc_ordered = opts.get(u'multifunc_ordered', False)
num_funcs = len(data[u'fun']) num_funcs = len(data[u'fun'])
if multifunc_ordered: if multifunc_ordered:

View File

@ -1392,7 +1392,7 @@ def list_pkgs(versions_as_list=False,
version_num) version_num)
# Check for virtual packages. We need dctrl-tools for this. # Check for virtual packages. We need dctrl-tools for this.
if not removed: if not removed and not HAS_APT:
try: try:
virtpkgs_all = _get_virtual() virtpkgs_all = _get_virtual()
except CommandExecutionError as cee: except CommandExecutionError as cee:

View File

@ -513,16 +513,16 @@ def tar(options, tarfile, sources=None, dest=None,
.. code-block:: bash .. code-block:: bash
salt '*' archive.tar -cjvf /tmp/salt.tar.bz2 {{grains.saltpath}} template=jinja salt '*' archive.tar cjvf /tmp/salt.tar.bz2 {{grains.saltpath}} template=jinja
CLI Examples: CLI Examples:
.. code-block:: bash .. code-block:: bash
# Create a tarfile # Create a tarfile
salt '*' archive.tar -cjvf /tmp/tarfile.tar.bz2 /tmp/file_1,/tmp/file_2 salt '*' archive.tar cjvf /tmp/tarfile.tar.bz2 /tmp/file_1,/tmp/file_2
# Create a tarfile using globbing (2017.7.0 and later) # Create a tarfile using globbing (2017.7.0 and later)
salt '*' archive.tar -cjvf /tmp/tarfile.tar.bz2 '/tmp/file_*' salt '*' archive.tar cjvf /tmp/tarfile.tar.bz2 '/tmp/file_*'
# Unpack a tarfile # Unpack a tarfile
salt '*' archive.tar xf foo.tar dest=/target/directory salt '*' archive.tar xf foo.tar dest=/target/directory
''' '''

View File

@ -131,7 +131,8 @@ _TERM_FIELDS = {
'flattened': False, 'flattened': False,
'flattened_addr': None, 'flattened_addr': None,
'flattened_saddr': None, 'flattened_saddr': None,
'flattened_daddr': None 'flattened_daddr': None,
'priority': None
} }
# IP-type fields # IP-type fields
@ -746,6 +747,7 @@ def get_term_config(platform,
- flattened_addr - flattened_addr
- flattened_saddr - flattened_saddr
- flattened_daddr - flattened_daddr
- priority
.. note:: .. note::
The following fields can be also a single value and a list of values: The following fields can be also a single value and a list of values:

View File

@ -1645,12 +1645,12 @@ def _mkstemp_copy(path,
def _starts_till(src, probe, strip_comments=True): def _starts_till(src, probe, strip_comments=True):
''' '''
Returns True if src and probe at least begins till some point. Returns True if src and probe at least matches at the beginning till some point.
''' '''
def _strip_comments(txt): def _strip_comments(txt):
''' '''
Strip possible comments. Strip possible comments.
Usually commends are one or two symbols Usually comments are one or two symbols at the beginning of the line, separated with space
''' '''
buff = txt.split(" ", 1) buff = txt.split(" ", 1)
return len(buff) == 2 and len(buff[0]) < 2 and buff[1] or txt return len(buff) == 2 and len(buff[0]) < 2 and buff[1] or txt
@ -1714,6 +1714,8 @@ def _assert_occurrence(src, probe, target, amount=1):
if msg: if msg:
raise CommandExecutionError('Found {0} expected occurrences in "{1}" expression'.format(msg, target)) raise CommandExecutionError('Found {0} expected occurrences in "{1}" expression'.format(msg, target))
return occ
def _get_line_indent(src, line, indent): def _get_line_indent(src, line, indent):
''' '''
@ -1777,9 +1779,9 @@ def line(path, content=None, match=None, mode=None, location=None,
location location
Defines where to place content in the line. Note this option is only Defines where to place content in the line. Note this option is only
used when ``mode=insert`` or ``mode=ensure`` is specified. If a location used when ``mode=insert`` is specified. If a location is passed in, it
is passed in, it takes precedence over both the ``before`` and ``after`` takes precedence over both the ``before`` and ``after`` kwargs. Valid
kwargs. Valid locations are:`` locations are:
- start - start
Place the content at the beginning of the file. Place the content at the beginning of the file.
@ -1849,10 +1851,10 @@ def line(path, content=None, match=None, mode=None, location=None,
# We've set the content to be empty in the function params but we want to make sure # We've set the content to be empty in the function params but we want to make sure
# it gets passed when needed. Feature #37092 # it gets passed when needed. Feature #37092
modeswithemptycontent = ['delete'] empty_content_modes = ['delete']
if mode not in modeswithemptycontent and content is None: if mode not in empty_content_modes and content is None:
raise CommandExecutionError('Content can only be empty if mode is {0}'.format(modeswithemptycontent)) raise CommandExecutionError('Content can only be empty if mode is "{0}"'.format(', '.join(empty_content_modes)))
del modeswithemptycontent del empty_content_modes
# Before/after has privilege. If nothing defined, match is used by content. # Before/after has privilege. If nothing defined, match is used by content.
if before is None and after is None and not match: if before is None and after is None and not match:
@ -1884,13 +1886,13 @@ def line(path, content=None, match=None, mode=None, location=None,
_assert_occurrence(body, after, 'after') _assert_occurrence(body, after, 'after')
out = [] out = []
lines = body.split(os.linesep) lines = body.split(os.linesep)
for idx in range(len(lines)): in_range = False
_line = lines[idx] for line in lines:
if _line.find(before) > -1 and idx <= len(lines) and lines[idx - 1].find(after) > -1: if line.find(after) > -1:
out.append(_get_line_indent(_line, content, indent)) in_range = True
out.append(_line) elif line.find(before) > -1 and in_range:
else: out.append(_get_line_indent(line, content, indent))
out.append(_line) out.append(line)
body = os.linesep.join(out) body = os.linesep.join(out)
if before and not after: if before and not after:
@ -1910,103 +1912,79 @@ def line(path, content=None, match=None, mode=None, location=None,
_assert_occurrence(body, after, 'after') _assert_occurrence(body, after, 'after')
out = [] out = []
lines = body.split(os.linesep) lines = body.split(os.linesep)
for idx in range(len(lines)): for idx, _line in enumerate(lines):
_line = lines[idx]
out.append(_line) out.append(_line)
cnd = _get_line_indent(_line, content, indent) cnd = _get_line_indent(_line, content, indent)
if _line.find(after) > -1: # No duplicates or append, if "after" is the last line
# No dupes or append, if "after" is the last line if (_line.find(after) > -1 and
if (idx < len(lines) and _starts_till(lines[idx + 1], cnd) < 0) or idx + 1 == len(lines): (lines[((idx + 1) < len(lines)) and idx + 1 or idx].strip() != cnd or
out.append(cnd) idx + 1 == len(lines))):
out.append(cnd)
body = os.linesep.join(out) body = os.linesep.join(out)
else: else:
if location == 'start': if location == 'start':
body = ''.join([content, body]) body = os.linesep.join((content, body))
elif location == 'end': elif location == 'end':
body = ''.join([body, _get_line_indent(body[-1], content, indent) if body else content]) body = os.linesep.join((body, _get_line_indent(body[-1], content, indent) if body else content))
elif mode == 'ensure': elif mode == 'ensure':
after = after and after.strip() after = after and after.strip()
before = before and before.strip() before = before and before.strip()
if location: if before and after:
found = False _assert_occurrence(body, before, 'before')
_assert_occurrence(body, after, 'after')
is_there = bool(body.count(content))
if not is_there:
out = []
body = body.split(os.linesep)
for idx, line in enumerate(body):
out.append(line)
if line.find(content) > -1:
is_there = True
if not is_there:
if idx < (len(body) - 1) and line.find(after) > -1 and body[idx + 1].find(before) > -1:
out.append(content)
elif line.find(after) > -1:
raise CommandExecutionError('Found more than one line between '
'boundaries "before" and "after".')
body = os.linesep.join(out)
elif before and not after:
_assert_occurrence(body, before, 'before')
body = body.split(os.linesep)
out = [] out = []
if body: for idx in range(len(body)):
for file_line in body.split(os.linesep): if body[idx].find(before) > -1:
if file_line.find(match) > -1 and not file_line == content: prev = (idx > 0 and idx or 1) - 1
out.append(_get_line_indent(file_line, content, indent)) out.append(_get_line_indent(body[idx], content, indent))
found = True if _starts_till(out[prev], content) > -1:
elif file_line == content: del out[prev]
out.append(file_line) out.append(body[idx])
found = True body = os.linesep.join(out)
else:
out.append(file_line)
body = os.linesep.join(out)
if not found:
if location == 'start':
body = os.linesep.join([content, body])
elif location == 'end':
body = os.linesep.join([body, _get_line_indent(body[-1], content, indent) if body else content])
else:
if before and after:
_assert_occurrence(body, before, 'before')
_assert_occurrence(body, after, 'after')
a_idx = b_idx = -1 elif not before and after:
idx = 0 _assert_occurrence(body, after, 'after')
body = body.split(os.linesep) body = body.split(os.linesep)
for _line in body: skip = None
idx += 1 out = []
if _line.find(before) > -1 and b_idx < 0: for idx in range(len(body)):
b_idx = idx if skip != body[idx]:
if _line.find(after) > -1 and a_idx < 0:
a_idx = idx
# Add
if not b_idx - a_idx - 1:
body = body[:a_idx] + [content] + body[b_idx - 1:]
elif b_idx - a_idx - 1 == 1:
if _starts_till(body[a_idx:b_idx - 1][0], content) > -1:
body[a_idx] = _get_line_indent(body[a_idx - 1], content, indent)
else:
raise CommandExecutionError('Found more than one line between boundaries "before" and "after".')
body = os.linesep.join(body)
elif before and not after:
_assert_occurrence(body, before, 'before')
body = body.split(os.linesep)
out = []
for idx in range(len(body)):
if body[idx].find(before) > -1:
prev = (idx > 0 and idx or 1) - 1
out.append(_get_line_indent(body[prev], content, indent))
if _starts_till(out[prev], content) > -1:
del out[prev]
out.append(body[idx]) out.append(body[idx])
body = os.linesep.join(out)
elif not before and after: if body[idx].find(after) > -1:
_assert_occurrence(body, after, 'after') next_line = idx + 1 < len(body) and body[idx + 1] or None
body = body.split(os.linesep) if next_line is not None and _starts_till(next_line, content) > -1:
skip = None skip = next_line
out = [] out.append(_get_line_indent(body[idx], content, indent))
for idx in range(len(body)): body = os.linesep.join(out)
if skip != body[idx]:
out.append(body[idx])
if body[idx].find(after) > -1: else:
next_line = idx + 1 < len(body) and body[idx + 1] or None raise CommandExecutionError("Wrong conditions? "
if next_line is not None and _starts_till(next_line, content) > -1: "Unable to ensure line without knowing "
skip = next_line "where to put it before and/or after.")
out.append(_get_line_indent(body[idx], content, indent))
body = os.linesep.join(out)
else:
raise CommandExecutionError("Wrong conditions? "
"Unable to ensure line without knowing "
"where to put it before and/or after.")
changed = body_before != hashlib.sha256(salt.utils.stringutils.to_bytes(body)).hexdigest() changed = body_before != hashlib.sha256(salt.utils.stringutils.to_bytes(body)).hexdigest()

View File

@ -585,7 +585,8 @@ def _parse_members(settype, members):
def _parse_member(settype, member, strict=False): def _parse_member(settype, member, strict=False):
subtypes = settype.split(':')[1].split(',') subtypes = settype.split(':')[1].split(',')
parts = member.split(' ') all_parts = member.split(' ', 1)
parts = all_parts[0].split(',')
parsed_member = [] parsed_member = []
for i in range(len(subtypes)): for i in range(len(subtypes)):
@ -610,8 +611,8 @@ def _parse_member(settype, member, strict=False):
parsed_member.append(part) parsed_member.append(part)
if len(parts) > len(subtypes): if len(all_parts) > 1:
parsed_member.append(' '.join(parts[len(subtypes):])) parsed_member.append(all_parts[1])
return parsed_member return parsed_member

View File

@ -282,6 +282,7 @@ def load_term_config(filter_name,
- flattened_addr - flattened_addr
- flattened_saddr - flattened_saddr
- flattened_daddr - flattened_daddr
- priority
.. note:: .. note::
The following fields can be also a single value and a list of values: The following fields can be also a single value and a list of values:

View File

@ -551,19 +551,19 @@ def _check_onlyif_unless(onlyif, unless):
if onlyif is not None: if onlyif is not None:
if not isinstance(onlyif, six.string_types): if not isinstance(onlyif, six.string_types):
if not onlyif: if not onlyif:
ret = {'comment': 'onlyif execution failed', 'result': True} ret = {'comment': 'onlyif condition is false', 'result': True}
elif isinstance(onlyif, six.string_types): elif isinstance(onlyif, six.string_types):
if retcode(onlyif) != 0: if retcode(onlyif) != 0:
ret = {'comment': 'onlyif execution failed', 'result': True} ret = {'comment': 'onlyif condition is false', 'result': True}
log.debug('onlyif execution failed') log.debug('onlyif condition is false')
if unless is not None: if unless is not None:
if not isinstance(unless, six.string_types): if not isinstance(unless, six.string_types):
if unless: if unless:
ret = {'comment': 'unless execution succeeded', 'result': True} ret = {'comment': 'unless condition is true', 'result': True}
elif isinstance(unless, six.string_types): elif isinstance(unless, six.string_types):
if retcode(unless) == 0: if retcode(unless) == 0:
ret = {'comment': 'unless execution succeeded', 'result': True} ret = {'comment': 'unless condition is true', 'result': True}
log.debug('unless execution succeeded') log.debug('unless condition is true')
return ret return ret

View File

@ -39,6 +39,7 @@ Current known limitations
''' '''
# Import Python libs # Import Python libs
from __future__ import absolute_import from __future__ import absolute_import
from __future__ import unicode_literals
import io import io
import os import os
import logging import logging
@ -48,10 +49,11 @@ import ctypes
import time import time
# Import Salt libs # Import Salt libs
from salt.exceptions import CommandExecutionError, SaltInvocationError
import salt.utils.dictupdate as dictupdate
import salt.utils.files import salt.utils.files
import salt.utils.platform import salt.utils.platform
import salt.utils.dictupdate as dictupdate import salt.utils.stringutils
from salt.exceptions import CommandExecutionError, SaltInvocationError
# Import 3rd-party libs # Import 3rd-party libs
from salt.ext import six from salt.ext import six
@ -4081,7 +4083,7 @@ def _write_regpol_data(data_to_write,
gpt_ini_data = '' gpt_ini_data = ''
if os.path.exists(gpt_ini_path): if os.path.exists(gpt_ini_path):
with salt.utils.files.fopen(gpt_ini_path, 'rb') as gpt_file: with salt.utils.files.fopen(gpt_ini_path, 'rb') as gpt_file:
gpt_ini_data = gpt_file.read() gpt_ini_data = salt.utils.stringutils.to_str(gpt_file.read())
if not _regexSearchRegPolData(r'\[General\]\r\n', gpt_ini_data): if not _regexSearchRegPolData(r'\[General\]\r\n', gpt_ini_data):
gpt_ini_data = '[General]\r\n' + gpt_ini_data gpt_ini_data = '[General]\r\n' + gpt_ini_data
if _regexSearchRegPolData(r'{0}='.format(re.escape(gpt_extension)), gpt_ini_data): if _regexSearchRegPolData(r'{0}='.format(re.escape(gpt_extension)), gpt_ini_data):
@ -4136,7 +4138,7 @@ def _write_regpol_data(data_to_write,
gpt_ini_data[general_location.end():]) gpt_ini_data[general_location.end():])
if gpt_ini_data: if gpt_ini_data:
with salt.utils.files.fopen(gpt_ini_path, 'wb') as gpt_file: with salt.utils.files.fopen(gpt_ini_path, 'wb') as gpt_file:
gpt_file.write(gpt_ini_data) gpt_file.write(salt.utils.stringutils.to_bytes(gpt_ini_data))
except Exception as e: except Exception as e:
msg = 'An error occurred attempting to write to {0}, the exception was {1}'.format( msg = 'An error occurred attempting to write to {0}, the exception was {1}'.format(
gpt_ini_path, e) gpt_ini_path, e)

View File

@ -448,8 +448,9 @@ def stop(name):
try: try:
win32serviceutil.StopService(name) win32serviceutil.StopService(name)
except pywintypes.error as exc: except pywintypes.error as exc:
raise CommandExecutionError( if exc[0] != 1062:
'Failed To Stop {0}: {1}'.format(name, exc[2])) raise CommandExecutionError(
'Failed To Stop {0}: {1}'.format(name, exc[2]))
attempts = 0 attempts = 0
while info(name)['Status'] in ['Running', 'Stop Pending'] \ while info(name)['Status'] in ['Running', 'Stop Pending'] \

View File

@ -4,20 +4,20 @@ This module is used to manage Wordpress installations
:depends: wp binary from http://wp-cli.org/ :depends: wp binary from http://wp-cli.org/
''' '''
from __future__ import absolute_import
# Import Python Modules # Import Python Modules
from __future__ import absolute_import
import collections import collections
# Import Salt Modules # Import Salt Modules
import salt.utils import salt.utils.path
from salt.ext.six.moves import map from salt.ext.six.moves import map
Plugin = collections.namedtuple('Plugin', 'name status update versino') Plugin = collections.namedtuple('Plugin', 'name status update versino')
def __virtual__(): def __virtual__():
if salt.utils.which('wp'): if salt.utils.path.which('wp'):
return True return True
return False return False

View File

@ -1031,17 +1031,17 @@ def _check_onlyif_unless(onlyif, unless, directory, runas=None, env=()):
if onlyif is not None: if onlyif is not None:
if not isinstance(onlyif, six.string_types): if not isinstance(onlyif, six.string_types):
if not onlyif: if not onlyif:
_valid(status, 'onlyif execution failed') _valid(status, 'onlyif condition is false')
elif isinstance(onlyif, six.string_types): elif isinstance(onlyif, six.string_types):
if retcode(onlyif, cwd=directory, runas=runas, env=env) != 0: if retcode(onlyif, cwd=directory, runas=runas, env=env) != 0:
_valid(status, 'onlyif execution failed') _valid(status, 'onlyif condition is false')
if unless is not None: if unless is not None:
if not isinstance(unless, six.string_types): if not isinstance(unless, six.string_types):
if unless: if unless:
_valid(status, 'unless execution succeeded') _valid(status, 'unless condition is true')
elif isinstance(unless, six.string_types): elif isinstance(unless, six.string_types):
if retcode(unless, cwd=directory, runas=runas, env=env, python_shell=False) == 0: if retcode(unless, cwd=directory, runas=runas, env=env, python_shell=False) == 0:
_valid(status, 'unless execution succeeded') _valid(status, 'unless condition is true')
if status['status']: if status['status']:
ret = status ret = status
return ret return ret

View File

@ -23,7 +23,14 @@ def output(ret, bar, **kwargs): # pylint: disable=unused-argument
Update the progress bar Update the progress bar
''' '''
if 'return_count' in ret: if 'return_count' in ret:
bar.update(ret['return_count']) val = ret['return_count']
# Avoid to fail if targets are behind a syndic. In this case actual return count will be
# higher than targeted by MoM itself.
# TODO: implement a way to get the proper target minions count and remove this workaround.
# Details are in #44239.
if val > bar.maxval:
bar.maxval = val
bar.update(val)
return '' return ''

View File

@ -9,6 +9,9 @@ import os
import logging import logging
import pickle import pickle
# Import Salt libs
import salt.utils.files
# This must be present or the Salt loader won't load this module # This must be present or the Salt loader won't load this module
__proxyenabled__ = ['dummy'] __proxyenabled__ = ['dummy']
@ -35,16 +38,14 @@ def __virtual__():
def _save_state(details): def _save_state(details):
pck = open(FILENAME, 'wb') # pylint: disable=W8470 with salt.utils.files.fopen(FILENAME, 'wb') as pck:
pickle.dump(details, pck) pickle.dump(details, pck)
pck.close()
def _load_state(): def _load_state():
try: try:
pck = open(FILENAME, 'r') # pylint: disable=W8470 with salt.utils.files.fopen(FILENAME, 'r') as pck:
DETAILS = pickle.load(pck) DETAILS = pickle.load(pck)
pck.close()
except IOError: except IOError:
DETAILS = {} DETAILS = {}
DETAILS['initialized'] = False DETAILS['initialized'] = False

View File

@ -0,0 +1,155 @@
# -*- coding: utf-8 -*-
'''
Return salt data via Telegram.
The following fields can be set in the minion conf file::
telegram.chat_id (required)
telegram.token (required)
Telegram settings may also be configured as:
.. code-block:: yaml
telegram:
chat_id: 000000000
token: 000000000:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
To use the Telegram return, append '--return telegram' to the salt command.
.. code-block:: bash
salt '*' test.ping --return telegram
'''
from __future__ import absolute_import
# Import Python libs
import logging
# Import 3rd-party libs
try:
import requests
HAS_REQUESTS = True
except ImportError:
HAS_REQUESTS = False
# Import Salt Libs
import salt.returners
log = logging.getLogger(__name__)
__virtualname__ = 'telegram'
def __virtual__():
'''
Return virtual name of the module.
:return: The virtual name of the module.
'''
if not HAS_REQUESTS:
return False
return __virtualname__
def _get_options(ret=None):
'''
Get the Telegram options from salt.
:param ret: The data to be sent.
:return: Dictionary containing the data and options needed to send
them to telegram.
'''
attrs = {'chat_id': 'chat_id',
'token': 'token'}
_options = salt.returners.get_returner_options(__virtualname__,
ret,
attrs,
__salt__=__salt__,
__opts__=__opts__)
log.debug('Options: {0}'.format(_options))
return _options
def returner(ret):
'''
Send a Telegram message with the data.
:param ret: The data to be sent.
:return: Boolean if message was sent successfully.
'''
_options = _get_options(ret)
chat_id = _options.get('chat_id')
token = _options.get('token')
if not chat_id:
log.error('telegram.chat_id not defined in salt config')
if not token:
log.error('telegram.token not defined in salt config')
returns = ret.get('return')
message = ('id: {0}\r\n'
'function: {1}\r\n'
'function args: {2}\r\n'
'jid: {3}\r\n'
'return: {4}\r\n').format(
ret.get('id'),
ret.get('fun'),
ret.get('fun_args'),
ret.get('jid'),
returns)
telegram = _post_message(chat_id,
message,
token)
return telegram
def _post_message(chat_id, message, token):
'''
Send a message to a Telegram chat.
:param chat_id: The chat id.
:param message: The message to send to the telegram chat.
:param token: The Telegram API token.
:return: Boolean if message was sent successfully.
'''
url = 'https://api.telegram.org/bot{0}/sendMessage'.format(token)
parameters = dict()
if chat_id:
parameters['chat_id'] = chat_id
if message:
parameters['text'] = message
try:
response = requests.post(
url,
data=parameters
)
result = response.json()
log.debug(
'Raw response of the telegram request is {0}'.format(response))
except Exception:
log.exception(
'Sending telegram api request failed'
)
result = False
if response and 'message_id' in result:
success = True
else:
success = False
log.debug('result {0}'.format(success))
return bool(success)

View File

@ -66,12 +66,16 @@ log = logging.getLogger(__name__)
# by salt in this state module and not on the actual state module function # by salt in this state module and not on the actual state module function
STATE_REQUISITE_KEYWORDS = frozenset([ STATE_REQUISITE_KEYWORDS = frozenset([
u'onchanges', u'onchanges',
u'onchanges_any',
u'onfail', u'onfail',
u'onfail_any',
u'onfail_stop', u'onfail_stop',
u'prereq', u'prereq',
u'prerequired', u'prerequired',
u'watch', u'watch',
u'watch_any',
u'require', u'require',
u'require_any',
u'listen', u'listen',
]) ])
STATE_REQUISITE_IN_KEYWORDS = frozenset([ STATE_REQUISITE_IN_KEYWORDS = frozenset([
@ -826,12 +830,12 @@ class State(object):
entry, ignore_retcode=True, python_shell=True, **cmd_opts) entry, ignore_retcode=True, python_shell=True, **cmd_opts)
log.debug(u'Last command return code: %s', cmd) log.debug(u'Last command return code: %s', cmd)
if cmd != 0 and ret[u'result'] is False: if cmd != 0 and ret[u'result'] is False:
ret.update({u'comment': u'onlyif execution failed', ret.update({u'comment': u'onlyif condition is false',
u'skip_watch': True, u'skip_watch': True,
u'result': True}) u'result': True})
return ret return ret
elif cmd == 0: elif cmd == 0:
ret.update({u'comment': u'onlyif execution succeeded', u'result': False}) ret.update({u'comment': u'onlyif condition is true', u'result': False})
return ret return ret
if u'unless' in low_data: if u'unless' in low_data:
@ -841,17 +845,17 @@ class State(object):
low_data_unless = low_data[u'unless'] low_data_unless = low_data[u'unless']
for entry in low_data_unless: for entry in low_data_unless:
if not isinstance(entry, six.string_types): if not isinstance(entry, six.string_types):
ret.update({u'comment': u'unless execution failed, bad type passed', u'result': False}) ret.update({u'comment': u'unless condition is false, bad type passed', u'result': False})
return ret return ret
cmd = self.functions[u'cmd.retcode']( cmd = self.functions[u'cmd.retcode'](
entry, ignore_retcode=True, python_shell=True, **cmd_opts) entry, ignore_retcode=True, python_shell=True, **cmd_opts)
log.debug(u'Last command return code: %s', cmd) log.debug(u'Last command return code: %s', cmd)
if cmd == 0 and ret[u'result'] is False: if cmd == 0 and ret[u'result'] is False:
ret.update({u'comment': u'unless execution succeeded', ret.update({u'comment': u'unless condition is true',
u'skip_watch': True, u'skip_watch': True,
u'result': True}) u'result': True})
elif cmd != 0: elif cmd != 0:
ret.update({u'comment': u'unless execution failed', u'result': False}) ret.update({u'comment': u'unless condition is false', u'result': False})
return ret return ret
# No reason to stop, return ret # No reason to stop, return ret
@ -2150,25 +2154,43 @@ class State(object):
low[u'require'] = low.pop(u'watch') low[u'require'] = low.pop(u'watch')
else: else:
present = True present = True
if u'watch_any' in low:
if u'{0}.mod_watch'.format(low[u'state']) not in self.states:
if u'require_any' in low:
low[u'require_any'].extend(low.pop(u'watch_any'))
else:
low[u'require_any'] = low.pop(u'watch_any')
else:
present = True
if u'require' in low: if u'require' in low:
present = True present = True
if u'require_any' in low:
present = True
if u'prerequired' in low: if u'prerequired' in low:
present = True present = True
if u'prereq' in low: if u'prereq' in low:
present = True present = True
if u'onfail' in low: if u'onfail' in low:
present = True present = True
if u'onfail_any' in low:
present = True
if u'onchanges' in low: if u'onchanges' in low:
present = True present = True
if u'onchanges_any' in low:
present = True
if not present: if not present:
return u'met', () return u'met', ()
self.reconcile_procs(running) self.reconcile_procs(running)
reqs = { reqs = {
u'require': [], u'require': [],
u'require_any': [],
u'watch': [], u'watch': [],
u'watch_any': [],
u'prereq': [], u'prereq': [],
u'onfail': [], u'onfail': [],
u'onchanges': []} u'onfail_any': [],
u'onchanges': [],
u'onchanges_any': []}
if pre: if pre:
reqs[u'prerequired'] = [] reqs[u'prerequired'] = []
for r_state in reqs: for r_state in reqs:
@ -2213,42 +2235,58 @@ class State(object):
return u'unmet', () return u'unmet', ()
fun_stats = set() fun_stats = set()
for r_state, chunks in six.iteritems(reqs): for r_state, chunks in six.iteritems(reqs):
if r_state == u'prereq': req_stats = set()
if r_state.startswith(u'prereq') and not r_state.startswith(u'prerequired'):
run_dict = self.pre run_dict = self.pre
else: else:
run_dict = running run_dict = running
for chunk in chunks: for chunk in chunks:
tag = _gen_tag(chunk) tag = _gen_tag(chunk)
if tag not in run_dict: if tag not in run_dict:
fun_stats.add(u'unmet') req_stats.add(u'unmet')
continue continue
if run_dict[tag].get(u'proc'): if run_dict[tag].get(u'proc'):
# Run in parallel, first wait for a touch and then recheck # Run in parallel, first wait for a touch and then recheck
time.sleep(0.01) time.sleep(0.01)
return self.check_requisite(low, running, chunks, pre) return self.check_requisite(low, running, chunks, pre)
if r_state == u'onfail': if r_state.startswith(u'onfail'):
if run_dict[tag][u'result'] is True: if run_dict[tag][u'result'] is True:
fun_stats.add(u'onfail') # At least one state is OK req_stats.add(u'onfail') # At least one state is OK
continue continue
else: else:
if run_dict[tag][u'result'] is False: if run_dict[tag][u'result'] is False:
fun_stats.add(u'fail') req_stats.add(u'fail')
continue continue
if r_state == u'onchanges': if r_state.startswith(u'onchanges'):
if not run_dict[tag][u'changes']: if not run_dict[tag][u'changes']:
fun_stats.add(u'onchanges') req_stats.add(u'onchanges')
else: else:
fun_stats.add(u'onchangesmet') req_stats.add(u'onchangesmet')
continue continue
if r_state == u'watch' and run_dict[tag][u'changes']: if r_state.startswith(u'watch') and run_dict[tag][u'changes']:
fun_stats.add(u'change') req_stats.add(u'change')
continue continue
if r_state == u'prereq' and run_dict[tag][u'result'] is None: if r_state.startswith(u'prereq') and run_dict[tag][u'result'] is None:
fun_stats.add(u'premet') if not r_state.startswith(u'prerequired'):
if r_state == u'prereq' and not run_dict[tag][u'result'] is None: req_stats.add(u'premet')
fun_stats.add(u'pre') if r_state.startswith(u'prereq') and not run_dict[tag][u'result'] is None:
if not r_state.startswith(u'prerequired'):
req_stats.add(u'pre')
else: else:
fun_stats.add(u'met') req_stats.add(u'met')
if r_state.endswith(u'_any'):
if u'met' in req_stats or u'change' in req_stats:
if u'fail' in req_stats:
req_stats.remove(u'fail')
if u'onchangesmet' in req_stats:
if u'onchanges' in req_stats:
req_stats.remove(u'onchanges')
if u'fail' in req_stats:
req_stats.remove(u'fail')
if u'onfail' in req_stats:
if u'fail' in req_stats:
req_stats.remove(u'onfail')
fun_stats.update(req_stats)
if u'unmet' in fun_stats: if u'unmet' in fun_stats:
status = u'unmet' status = u'unmet'
@ -2319,7 +2357,15 @@ class State(object):
tag = _gen_tag(low) tag = _gen_tag(low)
if not low.get(u'prerequired'): if not low.get(u'prerequired'):
self.active.add(tag) self.active.add(tag)
requisites = [u'require', u'watch', u'prereq', u'onfail', u'onchanges'] requisites = [u'require',
u'require_any',
u'watch',
u'watch_any',
u'prereq',
u'onfail',
u'onfail_any',
u'onchanges',
u'onchanges_any']
if not low.get(u'__prereq__'): if not low.get(u'__prereq__'):
requisites.append(u'prerequired') requisites.append(u'prerequired')
status, reqs = self.check_requisite(low, running, chunks, pre=True) status, reqs = self.check_requisite(low, running, chunks, pre=True)

View File

@ -100,17 +100,17 @@ def present(name, cloud_provider, onlyif=None, unless=None, opts=None, **kwargs)
if onlyif is not None: if onlyif is not None:
if not isinstance(onlyif, six.string_types): if not isinstance(onlyif, six.string_types):
if not onlyif: if not onlyif:
return _valid(name, comment='onlyif execution failed') return _valid(name, comment='onlyif condition is false')
elif isinstance(onlyif, six.string_types): elif isinstance(onlyif, six.string_types):
if retcode(onlyif, python_shell=True) != 0: if retcode(onlyif, python_shell=True) != 0:
return _valid(name, comment='onlyif execution failed') return _valid(name, comment='onlyif condition is false')
if unless is not None: if unless is not None:
if not isinstance(unless, six.string_types): if not isinstance(unless, six.string_types):
if unless: if unless:
return _valid(name, comment='unless execution succeeded') return _valid(name, comment='unless condition is true')
elif isinstance(unless, six.string_types): elif isinstance(unless, six.string_types):
if retcode(unless, python_shell=True) == 0: if retcode(unless, python_shell=True) == 0:
return _valid(name, comment='unless execution succeeded') return _valid(name, comment='unless condition is true')
# provider=None not cloud_provider because # provider=None not cloud_provider because
# need to ensure ALL providers don't have the instance # need to ensure ALL providers don't have the instance
@ -177,17 +177,17 @@ def absent(name, onlyif=None, unless=None):
if onlyif is not None: if onlyif is not None:
if not isinstance(onlyif, six.string_types): if not isinstance(onlyif, six.string_types):
if not onlyif: if not onlyif:
return _valid(name, comment='onlyif execution failed') return _valid(name, comment='onlyif condition is false')
elif isinstance(onlyif, six.string_types): elif isinstance(onlyif, six.string_types):
if retcode(onlyif, python_shell=True) != 0: if retcode(onlyif, python_shell=True) != 0:
return _valid(name, comment='onlyif execution failed') return _valid(name, comment='onlyif condition is false')
if unless is not None: if unless is not None:
if not isinstance(unless, six.string_types): if not isinstance(unless, six.string_types):
if unless: if unless:
return _valid(name, comment='unless execution succeeded') return _valid(name, comment='unless condition is true')
elif isinstance(unless, six.string_types): elif isinstance(unless, six.string_types):
if retcode(unless, python_shell=True) == 0: if retcode(unless, python_shell=True) == 0:
return _valid(name, comment='unless execution succeeded') return _valid(name, comment='unless condition is true')
if not __salt__['cloud.has_instance'](name=name, provider=None): if not __salt__['cloud.has_instance'](name=name, provider=None):
ret['result'] = True ret['result'] = True
@ -253,17 +253,17 @@ def profile(name, profile, onlyif=None, unless=None, opts=None, **kwargs):
if onlyif is not None: if onlyif is not None:
if not isinstance(onlyif, six.string_types): if not isinstance(onlyif, six.string_types):
if not onlyif: if not onlyif:
return _valid(name, comment='onlyif execution failed') return _valid(name, comment='onlyif condition is false')
elif isinstance(onlyif, six.string_types): elif isinstance(onlyif, six.string_types):
if retcode(onlyif, python_shell=True) != 0: if retcode(onlyif, python_shell=True) != 0:
return _valid(name, comment='onlyif execution failed') return _valid(name, comment='onlyif condition is false')
if unless is not None: if unless is not None:
if not isinstance(unless, six.string_types): if not isinstance(unless, six.string_types):
if unless: if unless:
return _valid(name, comment='unless execution succeeded') return _valid(name, comment='unless condition is true')
elif isinstance(unless, six.string_types): elif isinstance(unless, six.string_types):
if retcode(unless, python_shell=True) == 0: if retcode(unless, python_shell=True) == 0:
return _valid(name, comment='unless execution succeeded') return _valid(name, comment='unless condition is true')
instance = _get_instance([name]) instance = _get_instance([name])
if instance and not any('Not Actioned' in key for key in instance): if instance and not any('Not Actioned' in key for key in instance):
ret['result'] = True ret['result'] = True

View File

@ -343,7 +343,7 @@ def mod_run_check(cmd_kwargs, onlyif, unless, creates):
cmd = __salt__['cmd.retcode'](onlyif, ignore_retcode=True, python_shell=True, **cmd_kwargs) cmd = __salt__['cmd.retcode'](onlyif, ignore_retcode=True, python_shell=True, **cmd_kwargs)
log.debug('Last command return code: {0}'.format(cmd)) log.debug('Last command return code: {0}'.format(cmd))
if cmd != 0: if cmd != 0:
return {'comment': 'onlyif execution failed', return {'comment': 'onlyif condition is false',
'skip_watch': True, 'skip_watch': True,
'result': True} 'result': True}
elif isinstance(onlyif, list): elif isinstance(onlyif, list):
@ -351,13 +351,13 @@ def mod_run_check(cmd_kwargs, onlyif, unless, creates):
cmd = __salt__['cmd.retcode'](entry, ignore_retcode=True, python_shell=True, **cmd_kwargs) cmd = __salt__['cmd.retcode'](entry, ignore_retcode=True, python_shell=True, **cmd_kwargs)
log.debug('Last command \'{0}\' return code: {1}'.format(entry, cmd)) log.debug('Last command \'{0}\' return code: {1}'.format(entry, cmd))
if cmd != 0: if cmd != 0:
return {'comment': 'onlyif execution failed: {0}'.format(entry), return {'comment': 'onlyif condition is false: {0}'.format(entry),
'skip_watch': True, 'skip_watch': True,
'result': True} 'result': True}
elif not isinstance(onlyif, string_types): elif not isinstance(onlyif, string_types):
if not onlyif: if not onlyif:
log.debug('Command not run: onlyif did not evaluate to string_type') log.debug('Command not run: onlyif did not evaluate to string_type')
return {'comment': 'onlyif execution failed', return {'comment': 'onlyif condition is false',
'skip_watch': True, 'skip_watch': True,
'result': True} 'result': True}
@ -366,7 +366,7 @@ def mod_run_check(cmd_kwargs, onlyif, unless, creates):
cmd = __salt__['cmd.retcode'](unless, ignore_retcode=True, python_shell=True, **cmd_kwargs) cmd = __salt__['cmd.retcode'](unless, ignore_retcode=True, python_shell=True, **cmd_kwargs)
log.debug('Last command return code: {0}'.format(cmd)) log.debug('Last command return code: {0}'.format(cmd))
if cmd == 0: if cmd == 0:
return {'comment': 'unless execution succeeded', return {'comment': 'unless condition is true',
'skip_watch': True, 'skip_watch': True,
'result': True} 'result': True}
elif isinstance(unless, list): elif isinstance(unless, list):
@ -375,13 +375,13 @@ def mod_run_check(cmd_kwargs, onlyif, unless, creates):
cmd.append(__salt__['cmd.retcode'](entry, ignore_retcode=True, python_shell=True, **cmd_kwargs)) cmd.append(__salt__['cmd.retcode'](entry, ignore_retcode=True, python_shell=True, **cmd_kwargs))
log.debug('Last command return code: {0}'.format(cmd)) log.debug('Last command return code: {0}'.format(cmd))
if all([c == 0 for c in cmd]): if all([c == 0 for c in cmd]):
return {'comment': 'unless execution succeeded', return {'comment': 'unless condition is true',
'skip_watch': True, 'skip_watch': True,
'result': True} 'result': True}
elif not isinstance(unless, string_types): elif not isinstance(unless, string_types):
if unless: if unless:
log.debug('Command not run: unless did not evaluate to string_type') log.debug('Command not run: unless did not evaluate to string_type')
return {'comment': 'unless execution succeeded', return {'comment': 'unless condition is true',
'skip_watch': True, 'skip_watch': True,
'result': True} 'result': True}

View File

@ -2884,6 +2884,7 @@ def directory(name,
if __opts__['test']: if __opts__['test']:
ret['result'] = presult ret['result'] = presult
ret['comment'] = pcomment ret['comment'] = pcomment
ret['changes'] = ret['pchanges']
return ret return ret
if not os.path.isdir(name): if not os.path.isdir(name):

View File

@ -3002,13 +3002,13 @@ def mod_run_check(cmd_kwargs, onlyif, unless):
cmd_kwargs['python_shell'] = True cmd_kwargs['python_shell'] = True
if onlyif: if onlyif:
if __salt__['cmd.retcode'](onlyif, **cmd_kwargs) != 0: if __salt__['cmd.retcode'](onlyif, **cmd_kwargs) != 0:
return {'comment': 'onlyif execution failed', return {'comment': 'onlyif condition is false',
'skip_watch': True, 'skip_watch': True,
'result': True} 'result': True}
if unless: if unless:
if __salt__['cmd.retcode'](unless, **cmd_kwargs) == 0: if __salt__['cmd.retcode'](unless, **cmd_kwargs) == 0:
return {'comment': 'unless execution succeeded', return {'comment': 'unless condition is true',
'skip_watch': True, 'skip_watch': True,
'result': True} 'result': True}

View File

@ -581,7 +581,7 @@ def secret_present(
ret['changes'] = { ret['changes'] = {
# Omit values from the return. They are unencrypted # Omit values from the return. They are unencrypted
# and can contain sensitive data. # and can contain sensitive data.
'data': res['data'].keys() 'data': list(res['data'])
} }
ret['result'] = True ret['result'] = True
@ -933,7 +933,10 @@ def node_label_folder_absent(name, node, **kwargs):
ret['result'] = True ret['result'] = True
ret['changes'] = { ret['changes'] = {
'kubernetes.node_label_folder_absent': { 'kubernetes.node_label_folder_absent': {
'new': new_labels, 'old': labels.keys()}} 'old': list(labels),
'new': new_labels,
}
}
ret['comment'] = 'Label folder removed from node' ret['comment'] = 'Label folder removed from node'
return ret return ret

View File

@ -254,13 +254,13 @@ def _mod_run_check(cmd_kwargs, onlyif, unless):
''' '''
if onlyif: if onlyif:
if __salt__['cmd.retcode'](onlyif, **cmd_kwargs) != 0: if __salt__['cmd.retcode'](onlyif, **cmd_kwargs) != 0:
return {'comment': 'onlyif execution failed', return {'comment': 'onlyif condition is false',
'skip_watch': True, 'skip_watch': True,
'result': True} 'result': True}
if unless: if unless:
if __salt__['cmd.retcode'](unless, **cmd_kwargs) == 0: if __salt__['cmd.retcode'](unless, **cmd_kwargs) == 0:
return {'comment': 'unless execution succeeded', return {'comment': 'unless condition is true',
'skip_watch': True, 'skip_watch': True,
'result': True} 'result': True}

View File

@ -111,8 +111,6 @@ def present(name,
# check if user exists # check if user exists
users = __salt__['mongodb.user_find'](name, user, password, host, port, database, authdb) users = __salt__['mongodb.user_find'](name, user, password, host, port, database, authdb)
if len(users) > 0: if len(users) > 0:
# check each user occurrence
users = __salt__['mongodb.user_find'](name, user, password, host, port, database, authdb)
# check each user occurrence # check each user occurrence
for usr in users: for usr in users:
# prepare empty list for current roles # prepare empty list for current roles

View File

@ -239,6 +239,7 @@ def term(name,
- flattened_addr - flattened_addr
- flattened_saddr - flattened_saddr
- flattened_daddr - flattened_daddr
- priority
.. note:: .. note::
The following fields can be also a single value and a list of values: The following fields can be also a single value and a list of values:

View File

@ -44,7 +44,7 @@ from __future__ import absolute_import
# Import Python libs # Import Python libs
import logging import logging
from time import strftime, strptime, gmtime from time import strftime, strptime, localtime
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -717,6 +717,10 @@ def scheduled_snapshot(name, prefix, recursive=True, schedule=None):
a schedule must be setup to automatically run the state. this means that if a schedule must be setup to automatically run the state. this means that if
you run the state daily the hourly snapshot will only be made once per day! you run the state daily the hourly snapshot will only be made once per day!
.. versionchanged:: Oxygen
switched to localtime from gmtime so times now take into account timezones.
''' '''
ret = {'name': name, ret = {'name': name,
'changes': {}, 'changes': {},
@ -789,7 +793,7 @@ def scheduled_snapshot(name, prefix, recursive=True, schedule=None):
# create snapshot # create snapshot
needed_holds = [] needed_holds = []
current_timestamp = gmtime() current_timestamp = localtime()
for hold in snapshots: for hold in snapshots:
# check if we need need to consider hold # check if we need need to consider hold
if schedule[hold] == 0: if schedule[hold] == 0:

File diff suppressed because it is too large Load Diff

View File

@ -320,10 +320,14 @@ def fopen(*args, **kwargs):
if len(args) > 1: if len(args) > 1:
args = list(args) args = list(args)
if 'b' not in args[1]: if 'b' not in args[1]:
args[1] += 'b' args[1] = args[1].replace('t', 'b')
elif kwargs.get('mode', None): if 'b' not in args[1]:
args[1] += 'b'
elif kwargs.get('mode'):
if 'b' not in kwargs['mode']: if 'b' not in kwargs['mode']:
kwargs['mode'] += 'b' kwargs['mode'] = kwargs['mode'].replace('t', 'b')
if 'b' not in kwargs['mode']:
kwargs['mode'] += 'b'
else: else:
# the default is to read # the default is to read
kwargs['mode'] = 'rb' kwargs['mode'] = 'rb'

View File

@ -537,7 +537,7 @@ class GitProvider(object):
return root_dir return root_dir
log.error( log.error(
'Root path \'%s\' not present in %s remote \'%s\', ' 'Root path \'%s\' not present in %s remote \'%s\', '
'skipping.', self.root, self.role, self.id 'skipping.', self.root(), self.role, self.id
) )
return None return None

View File

@ -192,6 +192,16 @@ def get_entry_multi(dict_, pairs, raise_error=True):
return {} return {}
def get_endpoint_url_v3(catalog, service_type, region_name):
for service_entry in catalog:
if service_entry['type'] == service_type:
for endpoint_entry in service_entry['endpoints']:
if (endpoint_entry['region'] == region_name and
endpoint_entry['interface'] == 'public'):
return endpoint_entry['url']
return None
def sanatize_novaclient(kwargs): def sanatize_novaclient(kwargs):
variables = ( variables = (
'username', 'api_key', 'project_id', 'auth_url', 'insecure', 'username', 'api_key', 'project_id', 'auth_url', 'insecure',
@ -355,21 +365,16 @@ class SaltNova(object):
def _v3_setup(self, region_name): def _v3_setup(self, region_name):
if region_name is not None: if region_name is not None:
servers_endpoints = get_entry(self.catalog, 'type', 'compute')['endpoints'] self.client_kwargs['bypass_url'] = get_endpoint_url_v3(self.catalog, 'compute', region_name)
self.kwargs['bypass_url'] = get_entry_multi( log.debug('Using Nova bypass_url: %s', self.client_kwargs['bypass_url'])
servers_endpoints,
[('region', region_name), ('interface', 'public')]
)['url']
self.compute_conn = client.Client(version=self.version, session=self.session, **self.client_kwargs) self.compute_conn = client.Client(version=self.version, session=self.session, **self.client_kwargs)
volume_endpoints = get_entry(self.catalog, 'type', 'volume', raise_error=False).get('endpoints', {}) volume_endpoints = get_entry(self.catalog, 'type', 'volume', raise_error=False).get('endpoints', {})
if volume_endpoints: if volume_endpoints:
if region_name is not None: if region_name is not None:
self.kwargs['bypass_url'] = get_entry_multi( self.client_kwargs['bypass_url'] = get_endpoint_url_v3(self.catalog, 'volume', region_name)
volume_endpoints, log.debug('Using Cinder bypass_url: %s', self.client_kwargs['bypass_url'])
[('region', region_name), ('interface', 'public')]
)['url']
self.volume_conn = client.Client(version=self.version, session=self.session, **self.client_kwargs) self.volume_conn = client.Client(version=self.version, session=self.session, **self.client_kwargs)
if hasattr(self, 'extensions'): if hasattr(self, 'extensions'):

View File

@ -134,7 +134,11 @@ def check_result(running, recurse=False, highstate=None):
ret = True ret = True
for state_id, state_result in six.iteritems(running): for state_id, state_result in six.iteritems(running):
if not recurse and not isinstance(state_result, dict): expected_type = dict
# The __extend__ state is a list
if "__extend__" == state_id:
expected_type = list
if not recurse and not isinstance(state_result, expected_type):
ret = False ret = False
if ret and isinstance(state_result, dict): if ret and isinstance(state_result, dict):
result = state_result.get('result', _empty) result = state_result.get('result', _empty)

View File

@ -55,50 +55,52 @@ def parse_gitlog(filename=None):
else: else:
fh = open(filename, 'r+') fh = open(filename, 'r+')
commitcount = 0 try:
for line in fh.readlines(): commitcount = 0
line = line.rstrip() for line in fh.readlines():
if line.startswith('commit '): line = line.rstrip()
new_commit = True if line.startswith('commit '):
commitcount += 1 new_commit = True
continue commitcount += 1
continue
if line.startswith('Author:'): if line.startswith('Author:'):
author = re.match(r'Author:\s+(.*)\s+<(.*)>', line) author = re.match(r'Author:\s+(.*)\s+<(.*)>', line)
if author: if author:
email = author.group(2) email = author.group(2)
continue continue
if line.startswith('Date:'): if line.startswith('Date:'):
isodate = re.match(r'Date:\s+(.*)', line) isodate = re.match(r'Date:\s+(.*)', line)
d = parse_date(isodate.group(1)) d = parse_date(isodate.group(1))
continue continue
if len(line) < 2 and new_commit: if len(line) < 2 and new_commit:
new_commit = False new_commit = False
key = '{0}-{1}'.format(d.year, str(d.month).zfill(2)) key = '{0}-{1}'.format(d.year, str(d.month).zfill(2))
if key not in results: if key not in results:
results[key] = [] results[key] = []
if key not in commits: if key not in commits:
commits[key] = 0 commits[key] = 0
if email not in commits_by_contributor: if email not in commits_by_contributor:
commits_by_contributor[email] = {} commits_by_contributor[email] = {}
if key not in commits_by_contributor[email]: if key not in commits_by_contributor[email]:
commits_by_contributor[email][key] = 1 commits_by_contributor[email][key] = 1
else: else:
commits_by_contributor[email][key] += 1 commits_by_contributor[email][key] += 1
if email not in results[key]: if email not in results[key]:
results[key].append(email) results[key].append(email)
commits[key] += commitcount commits[key] += commitcount
commitcount = 0 commitcount = 0
fh.close() finally:
fh.close()
return (results, commits, commits_by_contributor) return (results, commits, commits_by_contributor)

View File

@ -10,6 +10,7 @@ import optparse
# Import Salt libs # Import Salt libs
import salt.utils.color import salt.utils.color
import salt.utils.files
# Import 3rd-party libs # Import 3rd-party libs
import yaml import yaml
@ -45,7 +46,8 @@ def run(command):
''' '''
cmd = r'salt \* {0} --yaml-out -t 500 > high'.format(command) cmd = r'salt \* {0} --yaml-out -t 500 > high'.format(command)
subprocess.call(cmd, shell=True) subprocess.call(cmd, shell=True)
data = yaml.load(open('high')) with salt.utils.files.fopen('high') as fp_:
data = yaml.load(fp_)
hashes = set() hashes = set()
for key, val in six.iteritems(data): for key, val in six.iteritems(data):
has = hashlib.md5(str(val)).hexdigest() has = hashlib.md5(str(val)).hexdigest()

View File

@ -803,7 +803,10 @@ class TestDaemon(object):
# Set up config options that require internal data # Set up config options that require internal data
master_opts['pillar_roots'] = syndic_master_opts['pillar_roots'] = { master_opts['pillar_roots'] = syndic_master_opts['pillar_roots'] = {
'base': [os.path.join(FILES, 'pillar', 'base')] 'base': [
RUNTIME_VARS.TMP_PILLAR_TREE,
os.path.join(FILES, 'pillar', 'base'),
]
} }
master_opts['file_roots'] = syndic_master_opts['file_roots'] = { master_opts['file_roots'] = syndic_master_opts['file_roots'] = {
'base': [ 'base': [
@ -979,6 +982,7 @@ class TestDaemon(object):
sub_minion_opts['sock_dir'], sub_minion_opts['sock_dir'],
minion_opts['sock_dir'], minion_opts['sock_dir'],
RUNTIME_VARS.TMP_STATE_TREE, RUNTIME_VARS.TMP_STATE_TREE,
RUNTIME_VARS.TMP_PILLAR_TREE,
RUNTIME_VARS.TMP_PRODENV_STATE_TREE, RUNTIME_VARS.TMP_PRODENV_STATE_TREE,
TMP, TMP,
], ],
@ -1090,7 +1094,8 @@ class TestDaemon(object):
os.chmod(path, stat.S_IRWXU) os.chmod(path, stat.S_IRWXU)
func(path) func(path)
for dirname in (TMP, RUNTIME_VARS.TMP_STATE_TREE, RUNTIME_VARS.TMP_PRODENV_STATE_TREE): for dirname in (TMP, RUNTIME_VARS.TMP_STATE_TREE,
RUNTIME_VARS.TMP_PILLAR_TREE, RUNTIME_VARS.TMP_PRODENV_STATE_TREE):
if os.path.isdir(dirname): if os.path.isdir(dirname):
shutil.rmtree(dirname, onerror=remove_readonly) shutil.rmtree(dirname, onerror=remove_readonly)

View File

@ -0,0 +1,34 @@
changing_state:
cmd.run:
- name: echo "Changed!"
another_changing_state:
cmd.run:
- name: echo "Changed!"
# mock is installed with salttesting, so it should already be
# present on the system, resulting in no changes
non_changing_state:
pip.installed:
- name: mock
another_non_changing_state:
pip.installed:
- name: mock
# Should succeed since at least one will have changes
test_one_changing_states:
cmd.run:
- name: echo "Success!"
- onchanges_any:
- cmd: changing_state
- cmd: another_changing_state
- pip: non_changing_state
- pip: another_non_changing_state
test_two_non_changing_states:
cmd.run:
- name: echo "Should not run"
- onchanges_any:
- pip: non_changing_state
- pip: another_non_changing_state

View File

@ -0,0 +1,39 @@
a:
cmd.run:
- name: exit 0
b:
cmd.run:
- name: exit 1
c:
cmd.run:
- name: exit 0
d:
cmd.run:
- name: echo itworked
- onfail_any:
- cmd: a
- cmd: b
- cmd: c
e:
cmd.run:
- name: exit 0
f:
cmd.run:
- name: exit 0
g:
cmd.run:
- name: exit 0
h:
cmd.run:
- name: echo itworked
- onfail_any:
- cmd: e
- cmd: f
- cmd: g

View File

@ -0,0 +1,34 @@
# Complex require/require_in graph
#
# Relative order of C>E is given by the definition order
#
# D (1) <--+
# |
# B (2) ---+ <-+ <-+ <-+
# | | |
# C (3) <--+ --|---|---+
# | | |
# E (4) ---|---|---+ <-+
# | | |
# A (5) ---+ --+ ------+
#
# A should success since B succeeds even though C fails.
A:
cmd.run:
- name: echo A
- require_any:
- cmd: B
- cmd: C
- cmd: D
B:
cmd.run:
- name: echo B
C:
cmd.run:
- name: /bin/false
D:
cmd.run:
- name: echo D

View File

@ -0,0 +1,15 @@
# D should fail since both E & F fail
E:
cmd.run:
- name: 'false'
F:
cmd.run:
- name: 'false'
D:
cmd.run:
- name: echo D
- require_any:
- cmd: E
- cmd: F

View File

@ -0,0 +1,39 @@
A:
cmd.wait:
- name: 'true'
- watch_any:
- cmd: B
- cmd: C
- cmd: D
B:
cmd.run:
- name: 'true'
C:
cmd.run:
- name: 'false'
D:
cmd.run:
- name: 'true'
E:
cmd.wait:
- name: 'true'
- watch_any:
- cmd: F
- cmd: G
- cmd: H
F:
cmd.run:
- name: 'true'
G:
cmd.run:
- name: 'false'
H:
cmd.run:
- name: 'false'

View File

@ -0,0 +1,14 @@
A:
cmd.wait:
- name: 'true'
- watch_any:
- cmd: B
- cmd: C
B:
cmd.run:
- name: 'false'
C:
cmd.run:
- name: 'false'

View File

@ -19,6 +19,10 @@ if salt.utils.platform.is_windows():
pass pass
def _freebsd_or_openbsd():
return salt.utils.platform.is_freebsd() or salt.utils.platform.is_openbsd()
class TestGrainsCore(ModuleCase): class TestGrainsCore(ModuleCase):
''' '''
Test the core grains grains Test the core grains grains
@ -28,7 +32,6 @@ class TestGrainsCore(ModuleCase):
''' '''
test grains['cpu_model'] test grains['cpu_model']
''' '''
opts = self.minion_opts
cpu_model_text = salt.modules.reg.read_value( cpu_model_text = salt.modules.reg.read_value(
'HKEY_LOCAL_MACHINE', 'HKEY_LOCAL_MACHINE',
'HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0', 'HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0',
@ -37,3 +40,25 @@ class TestGrainsCore(ModuleCase):
self.run_function('grains.items')['cpu_model'], self.run_function('grains.items')['cpu_model'],
cpu_model_text cpu_model_text
) )
@skipIf(not _freebsd_or_openbsd(), 'Only run on FreeBSD or OpenBSD')
def test_freebsd_openbsd_mem_total(self):
'''
test grains['mem_total']
'''
physmem = self.run_function('sysctl.get', ['hw.physmem'])
self.assertEqual(
self.run_function('grains.items')['mem_total'],
int(physmem) / 1048576
)
@skipIf(not salt.utils.platform.is_openbsd(), 'Only run on OpenBSD')
def test_openbsd_swap_total(self):
'''
test grains['swap_total']
'''
swapmem = self.run_function('cmd.run', ['swapctl -sk']).split(' ')[1]
self.assertEqual(
self.run_function('grains.items')['swap_total'],
int(swapmem) / 1048576
)

View File

@ -5,10 +5,16 @@ Integration tests for the saltutil module.
# Import Python libs # Import Python libs
from __future__ import absolute_import from __future__ import absolute_import
import os
import time import time
import textwrap
# Import Salt Testing libs # Import Salt Testing libs
from tests.support.case import ModuleCase from tests.support.case import ModuleCase
from tests.support.paths import TMP_PILLAR_TREE
# Import Salt Libs
import salt.utils.files
class SaltUtilModuleTest(ModuleCase): class SaltUtilModuleTest(ModuleCase):
@ -153,3 +159,38 @@ class SaltUtilSyncModuleTest(ModuleCase):
ret = self.run_function('saltutil.sync_all', extmod_whitelist={'modules': ['runtests_decorators']}, ret = self.run_function('saltutil.sync_all', extmod_whitelist={'modules': ['runtests_decorators']},
extmod_blacklist={'modules': ['runtests_decorators']}) extmod_blacklist={'modules': ['runtests_decorators']})
self.assertEqual(ret, expected_return) self.assertEqual(ret, expected_return)
class SaltUtilSyncPillarTest(ModuleCase):
'''
Testcase for the saltutil sync pillar module
'''
def test_pillar_refresh(self):
'''
test pillar refresh module
'''
pillar_key = 'itworked'
pre_pillar = self.run_function('pillar.raw')
self.assertNotIn(pillar_key, pre_pillar.get(pillar_key, 'didnotwork'))
with salt.utils.files.fopen(os.path.join(TMP_PILLAR_TREE, 'add_pillar.sls'), 'w') as fp:
fp.write('{0}: itworked'.format(pillar_key))
with salt.utils.files.fopen(os.path.join(TMP_PILLAR_TREE, 'top.sls'), 'w') as fp:
fp.write(textwrap.dedent('''\
base:
'*':
- add_pillar
'''))
self.run_function('saltutil.refresh_pillar')
self.run_function('test.sleep', [1])
post_pillar = self.run_function('pillar.raw')
self.assertIn(pillar_key, post_pillar.get(pillar_key, 'didnotwork'))
def tearDown(self):
for filename in os.listdir(TMP_PILLAR_TREE):
os.remove(os.path.join(TMP_PILLAR_TREE, filename))

View File

@ -0,0 +1,48 @@
# -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.helpers import destructiveTest
# Import Salt libs
import salt.utils.path
@destructiveTest
class ServiceModuleTest(ModuleCase):
'''
Module testing the service module
'''
def setUp(self):
self.service_name = 'cron'
cmd_name = 'crontab'
os_family = self.run_function('grains.get', ['os_family'])
if os_family == 'RedHat':
self.service_name = 'crond'
elif os_family == 'Arch':
self.service_name = 'systemd-journald'
cmd_name = 'systemctl'
if salt.utils.path.which(cmd_name) is None:
self.skipTest('{0} is not installed'.format(cmd_name))
def test_service_status_running(self):
'''
test service.status execution module
when service is running
'''
self.run_function('service.start', [self.service_name])
check_service = self.run_function('service.status', [self.service_name])
self.assertTrue(check_service)
def test_service_status_dead(self):
'''
test service.status execution module
when service is dead
'''
self.run_function('service.stop', [self.service_name])
check_service = self.run_function('service.status', [self.service_name])
self.assertFalse(check_service)

View File

@ -23,6 +23,9 @@ from salt.modules.virtualenv_mod import KNOWN_BINARY_NAMES
# Import 3rd-party libs # Import 3rd-party libs
from salt.ext import six from salt.ext import six
import logging
log = logging.getLogger(__name__)
class StateModuleTest(ModuleCase, SaltReturnAssertsMixin): class StateModuleTest(ModuleCase, SaltReturnAssertsMixin):
''' '''
@ -699,6 +702,237 @@ class StateModuleTest(ModuleCase, SaltReturnAssertsMixin):
['A recursive requisite was found, SLS "requisites.require_recursion_error1" ID "B" ID "A"'] ['A recursive requisite was found, SLS "requisites.require_recursion_error1" ID "B" ID "A"']
) )
def test_requisites_require_any(self):
'''
Call sls file containing several require_in and require.
Ensure that some of them are failing and that the order is right.
'''
expected_result = {
'cmd_|-A_|-echo A_|-run': {
'__run_num__': 3,
'comment': 'Command "echo A" run',
'result': True,
'changes': True,
},
'cmd_|-B_|-echo B_|-run': {
'__run_num__': 0,
'comment': 'Command "echo B" run',
'result': True,
'changes': True,
},
'cmd_|-C_|-/bin/false_|-run': {
'__run_num__': 1,
'comment': 'Command "/bin/false" run',
'result': False,
'changes': True,
},
'cmd_|-D_|-echo D_|-run': {
'__run_num__': 2,
'comment': 'Command "echo D" run',
'result': True,
'changes': True,
},
}
ret = self.run_function('state.sls', mods='requisites.require_any')
result = self.normalize_ret(ret)
self.assertReturnNonEmptySaltType(ret)
self.assertEqual(expected_result, result)
def test_requisites_require_any_fail(self):
'''
Call sls file containing several require_in and require.
Ensure that some of them are failing and that the order is right.
'''
ret = self.run_function('state.sls', mods='requisites.require_any_fail')
result = self.normalize_ret(ret)
self.assertReturnNonEmptySaltType(ret)
self.assertIn('One or more requisite failed',
result['cmd_|-D_|-echo D_|-run']['comment'])
def test_requisites_watch_any(self):
'''
Call sls file containing several require_in and require.
Ensure that some of them are failing and that the order is right.
'''
expected_result = {
'cmd_|-A_|-true_|-wait': {
'__run_num__': 4,
'comment': 'Command "true" run',
'result': True,
'changes': True,
},
'cmd_|-B_|-true_|-run': {
'__run_num__': 0,
'comment': 'Command "true" run',
'result': True,
'changes': True,
},
'cmd_|-C_|-false_|-run': {
'__run_num__': 1,
'comment': 'Command "false" run',
'result': False,
'changes': True,
},
'cmd_|-D_|-true_|-run': {
'__run_num__': 2,
'comment': 'Command "true" run',
'result': True,
'changes': True,
},
'cmd_|-E_|-true_|-wait': {
'__run_num__': 9,
'comment': 'Command "true" run',
'result': True,
'changes': True,
},
'cmd_|-F_|-true_|-run': {
'__run_num__': 5,
'comment': 'Command "true" run',
'result': True,
'changes': True,
},
'cmd_|-G_|-false_|-run': {
'__run_num__': 6,
'comment': 'Command "false" run',
'result': False,
'changes': True,
},
'cmd_|-H_|-false_|-run': {
'__run_num__': 7,
'comment': 'Command "false" run',
'result': False,
'changes': True,
},
}
ret = self.run_function('state.sls', mods='requisites.watch_any')
result = self.normalize_ret(ret)
self.assertReturnNonEmptySaltType(ret)
self.assertEqual(expected_result, result)
def test_requisites_watch_any_fail(self):
'''
Call sls file containing several require_in and require.
Ensure that some of them are failing and that the order is right.
'''
ret = self.run_function('state.sls', mods='requisites.watch_any_fail')
result = self.normalize_ret(ret)
self.assertReturnNonEmptySaltType(ret)
self.assertIn('One or more requisite failed',
result['cmd_|-A_|-true_|-wait']['comment'])
def test_requisites_onchanges_any(self):
'''
Call sls file containing several require_in and require.
Ensure that some of them are failing and that the order is right.
'''
expected_result = {
'cmd_|-another_changing_state_|-echo "Changed!"_|-run': {
'__run_num__': 1,
'changes': True,
'comment': 'Command "echo "Changed!"" run',
'result': True
},
'cmd_|-changing_state_|-echo "Changed!"_|-run': {
'__run_num__': 0,
'changes': True,
'comment': 'Command "echo "Changed!"" run',
'result': True
},
'cmd_|-test_one_changing_states_|-echo "Success!"_|-run': {
'__run_num__': 4,
'changes': True,
'comment': 'Command "echo "Success!"" run',
'result': True
},
'cmd_|-test_two_non_changing_states_|-echo "Should not run"_|-run': {
'__run_num__': 5,
'changes': False,
'comment': 'State was not run because none of the onchanges reqs changed',
'result': True
},
'pip_|-another_non_changing_state_|-mock_|-installed': {
'__run_num__': 3,
'changes': False,
'comment': 'Python package mock was already installed\nAll packages were successfully installed',
'result': True
},
'pip_|-non_changing_state_|-mock_|-installed': {
'__run_num__': 2,
'changes': False,
'comment': 'Python package mock was already installed\nAll packages were successfully installed',
'result': True
}
}
ret = self.run_function('state.sls', mods='requisites.onchanges_any')
result = self.normalize_ret(ret)
self.assertReturnNonEmptySaltType(ret)
self.assertEqual(expected_result, result)
def test_requisites_onfail_any(self):
'''
Call sls file containing several require_in and require.
Ensure that some of them are failing and that the order is right.
'''
expected_result = {
'cmd_|-a_|-exit 0_|-run': {
'__run_num__': 0,
'changes': True,
'comment': 'Command "exit 0" run',
'result': True
},
'cmd_|-b_|-exit 1_|-run': {
'__run_num__': 1,
'changes': True,
'comment': 'Command "exit 1" run',
'result': False
},
'cmd_|-c_|-exit 0_|-run': {
'__run_num__': 2,
'changes': True,
'comment': 'Command "exit 0" run',
'result': True
},
'cmd_|-d_|-echo itworked_|-run': {
'__run_num__': 3,
'changes': True,
'comment': 'Command "echo itworked" run',
'result': True},
'cmd_|-e_|-exit 0_|-run': {
'__run_num__': 4,
'changes': True,
'comment': 'Command "exit 0" run',
'result': True
},
'cmd_|-f_|-exit 0_|-run': {
'__run_num__': 5,
'changes': True,
'comment': 'Command "exit 0" run',
'result': True
},
'cmd_|-g_|-exit 0_|-run': {
'__run_num__': 6,
'changes': True,
'comment': 'Command "exit 0" run',
'result': True
},
'cmd_|-h_|-echo itworked_|-run': {
'__run_num__': 7,
'changes': False,
'comment': 'State was not run because onfail req did not change',
'result': True
}
}
ret = self.run_function('state.sls', mods='requisites.onfail_any')
result = self.normalize_ret(ret)
self.assertReturnNonEmptySaltType(ret)
self.assertEqual(expected_result, result)
def test_requisites_full_sls(self): def test_requisites_full_sls(self):
''' '''
Teste the sls special command in requisites Teste the sls special command in requisites
@ -910,7 +1144,7 @@ class StateModuleTest(ModuleCase, SaltReturnAssertsMixin):
ret = self.run_function('state.sls', mods='requisites.use') ret = self.run_function('state.sls', mods='requisites.use')
self.assertReturnNonEmptySaltType(ret) self.assertReturnNonEmptySaltType(ret)
for item, descr in six.iteritems(ret): for item, descr in six.iteritems(ret):
self.assertEqual(descr['comment'], 'onlyif execution failed') self.assertEqual(descr['comment'], 'onlyif condition is false')
# TODO: issue #8802 : use recursions undetected # TODO: issue #8802 : use recursions undetected
# issue is closed as use does not actually inherit requisites # issue is closed as use does not actually inherit requisites
@ -1201,7 +1435,8 @@ class StateModuleTest(ModuleCase, SaltReturnAssertsMixin):
''' '''
testfile = os.path.join(TMP, 'retry_file') testfile = os.path.join(TMP, 'retry_file')
time.sleep(30) time.sleep(30)
open(testfile, 'a').close() # pylint: disable=resource-leakage with salt.utils.files.fopen(testfile, 'a'):
pass
def test_retry_option_eventual_success(self): def test_retry_option_eventual_success(self):
''' '''

View File

@ -6,60 +6,81 @@ Tests for the spm build utility
from __future__ import absolute_import from __future__ import absolute_import
import os import os
import shutil import shutil
import textwrap
# Import Salt libs
import salt.utils.files
import salt.utils.path
# Import Salt Testing libs # Import Salt Testing libs
from tests.support.case import SPMCase from tests.support.case import SPMCase, ModuleCase
from tests.support.helpers import destructiveTest from tests.support.helpers import destructiveTest
from tests.support.unit import skipIf
# Import Salt Libraries
import salt.utils.files
@destructiveTest @destructiveTest
class SPMBuildTest(SPMCase): class SPMBuildTest(SPMCase, ModuleCase):
''' '''
Validate the spm build command Validate the spm build command
''' '''
def setUp(self): def setUp(self):
self.config = self._spm_config() self.config = self._spm_config()
self.formula_dir = os.path.join(' '.join(self.config['file_roots']['base']), 'formulas') self._spm_build_files(self.config)
self.formula_sls_dir = os.path.join(self.formula_dir, 'apache')
self.formula_sls = os.path.join(self.formula_sls_dir, 'apache.sls')
self.formula_file = os.path.join(self.formula_dir, 'FORMULA')
dirs = [self.formula_dir, self.formula_sls_dir]
for formula_dir in dirs:
os.makedirs(formula_dir)
with salt.utils.files.fopen(self.formula_sls, 'w') as fp:
fp.write(textwrap.dedent('''\
install-apache:
pkg.installed:
- name: apache2
'''))
with salt.utils.files.fopen(self.formula_file, 'w') as fp:
fp.write(textwrap.dedent('''\
name: apache
os: RedHat, Debian, Ubuntu, Suse, FreeBSD
os_family: RedHat, Debian, Suse, FreeBSD
version: 201506
release: 2
summary: Formula for installing Apache
description: Formula for installing Apache
'''))
def test_spm_build(self): def test_spm_build(self):
''' '''
test spm build test spm build
''' '''
build_spm = self.run_spm('build', self.config, self.formula_dir) self.run_spm('build', self.config, self.formula_dir)
spm_file = os.path.join(self.config['spm_build_dir'], 'apache-201506-2.spm') spm_file = os.path.join(self.config['spm_build_dir'], 'apache-201506-2.spm')
# Make sure .spm file gets created # Make sure .spm file gets created
self.assertTrue(os.path.exists(spm_file)) self.assertTrue(os.path.exists(spm_file))
# Make sure formula path dir is created # Make sure formula path dir is created
self.assertTrue(os.path.isdir(self.config['formula_path'])) self.assertTrue(os.path.isdir(self.config['formula_path']))
@skipIf(salt.utils.path.which('fallocate') is None, 'fallocate not installed')
def test_spm_build_big_file(self):
'''
test spm build with a big file
'''
# check to make sure there is enough space to run this test
check_space = self.run_function('status.diskusage', ['/'])
space = check_space['/']['available']
if space < 2000000:
self.skipTest('Not enough space on host to run this test')
self.run_function('cmd.run',
['fallocate -l 1G {0}'.format(os.path.join(self.formula_sls_dir,
'bigfile.txt'))])
self.run_spm('build', self.config, self.formula_dir)
spm_file = os.path.join(self.config['spm_build_dir'], 'apache-201506-2.spm')
self.run_spm('install', self.config, spm_file)
get_files = self.run_spm('files', self.config, 'apache')
files = ['apache.sls', 'bigfile.txt']
for sls in files:
self.assertIn(sls, ' '.join(get_files))
def test_spm_build_exclude(self):
'''
test spm build while excluding directory
'''
git_dir = os.path.join(self.formula_sls_dir, '.git')
os.makedirs(git_dir)
files = ['donotbuild1', 'donotbuild2', 'donotbuild3']
for git_file in files:
with salt.utils.files.fopen(os.path.join(git_dir, git_file), 'w') as fp:
fp.write('Please do not include me in build')
self.run_spm('build', self.config, self.formula_dir)
spm_file = os.path.join(self.config['spm_build_dir'], 'apache-201506-2.spm')
self.run_spm('install', self.config, spm_file)
get_files = self.run_spm('files', self.config, 'apache')
for git_file in files:
self.assertNotIn(git_file, ' '.join(get_files))
def tearDown(self): def tearDown(self):
shutil.rmtree(self._tmp_spm) shutil.rmtree(self._tmp_spm)

View File

@ -0,0 +1,45 @@
# -*- coding: utf-8 -*-
'''
Tests for the spm remove utility
'''
# Import python libs
from __future__ import absolute_import
import os
import shutil
# Import Salt Testing libs
from tests.support.case import SPMCase
from tests.support.helpers import destructiveTest
@destructiveTest
class SPMRemoveTest(SPMCase):
'''
Validate the spm remove command
'''
def setUp(self):
self.config = self._spm_config()
self._spm_build_files(self.config)
def test_spm_remove(self):
'''
test spm remove from an inital repo install
'''
# first install apache package
self._spm_create_update_repo(self.config)
install = self.run_spm('install', self.config, 'apache')
sls = os.path.join(self.config['formula_path'], 'apache', 'apache.sls')
self.assertTrue(os.path.exists(sls))
#now remove an make sure file is removed
remove = self.run_spm('remove', self.config, 'apache')
sls = os.path.join(self.config['formula_path'], 'apache', 'apache.sls')
self.assertFalse(os.path.exists(sls))
self.assertIn('... removing apache', remove)
def tearDown(self):
shutil.rmtree(self._tmp_spm)

View File

@ -105,7 +105,7 @@ class CMDRunRedirectTest(ModuleCase, SaltReturnAssertsMixin):
self.assertSaltTrueReturn(sls) self.assertSaltTrueReturn(sls)
# We must assert against the comment here to make sure the comment reads that the # We must assert against the comment here to make sure the comment reads that the
# command "echo "hello"" was run. This ensures that we made it to the last unless # command "echo "hello"" was run. This ensures that we made it to the last unless
# command in the state. If the comment reads "unless execution succeeded", or similar, # command in the state. If the comment reads "unless condition is true", or similar,
# then the unless state run bailed out after the first unless command succeeded, # then the unless state run bailed out after the first unless command succeeded,
# which is the bug we're regression testing for. # which is the bug we're regression testing for.
self.assertEqual(sls['cmd_|-cmd_run_unless_multiple_|-echo "hello"_|-run']['comment'], self.assertEqual(sls['cmd_|-cmd_run_unless_multiple_|-echo "hello"_|-run']['comment'],

View File

@ -23,6 +23,7 @@ import uuid
# Import salt libs # Import salt libs
import salt import salt
import salt.utils.files
# Import third party libs # Import third party libs
import yaml import yaml
@ -255,7 +256,8 @@ class Swarm(object):
pidfile = '{0}.pid'.format(path) pidfile = '{0}.pid'.format(path)
try: try:
try: try:
pid = int(open(pidfile).read().strip()) with salt.utils.files.fopen(pidfile) as fp_:
pid = int(fp_.read().strip())
os.kill(pid, signal.SIGTERM) os.kill(pid, signal.SIGTERM)
except ValueError: except ValueError:
pass pass
@ -298,7 +300,7 @@ class MinionSwarm(Swarm):
data = {} data = {}
if self.opts['config_dir']: if self.opts['config_dir']:
spath = os.path.join(self.opts['config_dir'], 'minion') spath = os.path.join(self.opts['config_dir'], 'minion')
with open(spath) as conf: with salt.utils.files.fopen(spath) as conf:
data = yaml.load(conf) or {} data = yaml.load(conf) or {}
minion_id = '{0}-{1}'.format( minion_id = '{0}-{1}'.format(
self.opts['name'], self.opts['name'],
@ -357,7 +359,7 @@ class MinionSwarm(Swarm):
if self.opts['rand_uuid']: if self.opts['rand_uuid']:
data['grains']['uuid'] = str(uuid.uuid4()) data['grains']['uuid'] = str(uuid.uuid4())
with open(path, 'w+') as fp_: with salt.utils.files.fopen(path, 'w+') as fp_:
yaml.dump(data, fp_) yaml.dump(data, fp_)
self.confs.add(dpath) self.confs.add(dpath)
@ -411,7 +413,7 @@ class MasterSwarm(Swarm):
data = {} data = {}
if self.opts['config_dir']: if self.opts['config_dir']:
spath = os.path.join(self.opts['config_dir'], 'master') spath = os.path.join(self.opts['config_dir'], 'master')
with open(spath) as conf: with salt.utils.files.fopen(spath) as conf:
data = yaml.load(conf) data = yaml.load(conf)
data.update({ data.update({
'log_file': os.path.join(self.conf, 'master.log'), 'log_file': os.path.join(self.conf, 'master.log'),
@ -421,7 +423,7 @@ class MasterSwarm(Swarm):
os.makedirs(self.conf) os.makedirs(self.conf)
path = os.path.join(self.conf, 'master') path = os.path.join(self.conf, 'master')
with open(path, 'w+') as fp_: with salt.utils.files.fopen(path, 'w+') as fp_:
yaml.dump(data, fp_) yaml.dump(data, fp_)
def shutdown(self): def shutdown(self):

View File

@ -627,7 +627,7 @@ class SPMCase(TestCase, AdaptedConfigurationTestCaseMixin):
'spm_repos_config': os.path.join(self._tmp_spm, 'etc', 'spm.repos'), 'spm_repos_config': os.path.join(self._tmp_spm, 'etc', 'spm.repos'),
'spm_cache_dir': os.path.join(self._tmp_spm, 'cache'), 'spm_cache_dir': os.path.join(self._tmp_spm, 'cache'),
'spm_build_dir': os.path.join(self._tmp_spm, 'build'), 'spm_build_dir': os.path.join(self._tmp_spm, 'build'),
'spm_build_exclude': ['.git'], 'spm_build_exclude': ['apache/.git'],
'spm_db_provider': 'sqlite3', 'spm_db_provider': 'sqlite3',
'spm_files_provider': 'local', 'spm_files_provider': 'local',
'spm_db': os.path.join(self._tmp_spm, 'packages.db'), 'spm_db': os.path.join(self._tmp_spm, 'packages.db'),

View File

@ -0,0 +1,68 @@
# -*- coding: utf-8 -*-
'''
Script for copying back xml junit files from tests
'''
from __future__ import absolute_import, print_function
import argparse # pylint: disable=minimum-python-version
import os
import paramiko
import subprocess
import yaml
class DownloadArtifacts(object):
def __init__(self, instance, artifacts):
self.instance = instance
self.artifacts = artifacts
self.client = self.setup_transport()
def setup_transport(self):
# pylint: disable=minimum-python-version
config = yaml.load(subprocess.check_output(['bundle', 'exec', 'kitchen', 'diagnose', self.instance]))
# pylint: enable=minimum-python-version
state = config['instances'][self.instance]['state_file']
tport = config['instances'][self.instance]['transport']
transport = paramiko.Transport((
state['hostname'],
state.get('port', tport.get('port', 22))
))
pkey = paramiko.rsakey.RSAKey(
filename=state.get('ssh_key', tport.get('ssh_key', '~/.ssh/id_rsa'))
)
transport.connect(
username=state.get('username', tport.get('username', 'root')),
pkey=pkey
)
return paramiko.SFTPClient.from_transport(transport)
def download(self):
for remote, local in self.artifacts:
if remote.endswith('/'):
for fxml in self.client.listdir(remote):
self._do_download(os.path.join(remote, fxml), os.path.join(local, os.path.basename(fxml)))
else:
self._do_download(remote, os.path.join(local, os.path.basename(remote)))
def _do_download(self, remote, local):
print('Copying from {0} to {1}'.format(remote, local))
self.client.get(remote, local)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Jenkins Artifact Download Helper')
parser.add_argument(
'--instance',
required=True,
action='store',
help='Instance on Test Kitchen to pull from',
)
parser.add_argument(
'--download-artifacts',
dest='artifacts',
nargs=2,
action='append',
metavar=('REMOTE_PATH', 'LOCAL_PATH'),
help='Download remote artifacts',
)
args = parser.parse_args()
downloader = DownloadArtifacts(args.instance, args.artifacts)
downloader.download()

View File

@ -52,6 +52,7 @@ PYEXEC = 'python{0}.{1}'.format(*sys.version_info)
MOCKBIN = os.path.join(INTEGRATION_TEST_DIR, 'mockbin') MOCKBIN = os.path.join(INTEGRATION_TEST_DIR, 'mockbin')
SCRIPT_DIR = os.path.join(CODE_DIR, 'scripts') SCRIPT_DIR = os.path.join(CODE_DIR, 'scripts')
TMP_STATE_TREE = os.path.join(SYS_TMP_DIR, 'salt-temp-state-tree') TMP_STATE_TREE = os.path.join(SYS_TMP_DIR, 'salt-temp-state-tree')
TMP_PILLAR_TREE = os.path.join(SYS_TMP_DIR, 'salt-temp-pillar-tree')
TMP_PRODENV_STATE_TREE = os.path.join(SYS_TMP_DIR, 'salt-temp-prodenv-state-tree') TMP_PRODENV_STATE_TREE = os.path.join(SYS_TMP_DIR, 'salt-temp-prodenv-state-tree')
TMP_CONF_DIR = os.path.join(TMP, 'config') TMP_CONF_DIR = os.path.join(TMP, 'config')
TMP_SUB_MINION_CONF_DIR = os.path.join(TMP_CONF_DIR, 'sub-minion') TMP_SUB_MINION_CONF_DIR = os.path.join(TMP_CONF_DIR, 'sub-minion')

View File

@ -215,6 +215,7 @@ RUNTIME_VARS = RuntimeVars(
TMP_SYNDIC_MINION_CONF_DIR=paths.TMP_SYNDIC_MINION_CONF_DIR, TMP_SYNDIC_MINION_CONF_DIR=paths.TMP_SYNDIC_MINION_CONF_DIR,
TMP_SCRIPT_DIR=paths.TMP_SCRIPT_DIR, TMP_SCRIPT_DIR=paths.TMP_SCRIPT_DIR,
TMP_STATE_TREE=paths.TMP_STATE_TREE, TMP_STATE_TREE=paths.TMP_STATE_TREE,
TMP_PILLAR_TREE=paths.TMP_PILLAR_TREE,
TMP_PRODENV_STATE_TREE=paths.TMP_PRODENV_STATE_TREE, TMP_PRODENV_STATE_TREE=paths.TMP_PRODENV_STATE_TREE,
RUNNING_TESTS_USER=RUNNING_TESTS_USER, RUNNING_TESTS_USER=RUNNING_TESTS_USER,
RUNTIME_CONFIGS={} RUNTIME_CONFIGS={}

View File

@ -2,6 +2,8 @@
# Import Python libs # Import Python libs
from __future__ import absolute_import from __future__ import absolute_import
from functools import wraps
import stat
# Import Salt libs # Import Salt libs
import salt.config import salt.config
@ -17,6 +19,123 @@ from tests.support.mock import (
) )
def gen_permissions(owner='', group='', others=''):
'''
Helper method to generate file permission bits
Usage: gen_permissions('rw', 'r', 'r')
'''
ret = 0
for c in owner:
ret |= getattr(stat, 'S_I{}USR'.format(c.upper()), 0)
for c in group:
ret |= getattr(stat, 'S_I{}GRP'.format(c.upper()), 0)
for c in others:
ret |= getattr(stat, 'S_I{}OTH'.format(c.upper()), 0)
return ret
def patch_check_permissions(uid=1, groups=None, is_windows=False, permissive_pki=False):
if not groups:
groups = [uid]
def decorator(func):
@wraps(func)
def wrapper(self):
self.auto_key.opts['permissive_pki_access'] = permissive_pki
with patch('os.stat', self.os_stat_mock), \
patch('os.getuid', MagicMock(return_value=uid)), \
patch('salt.utils.user.get_gid_list', MagicMock(return_value=groups)), \
patch('salt.utils.platform.is_windows', MagicMock(return_value=is_windows)):
func(self)
return wrapper
return decorator
@skipIf(NO_MOCK, NO_MOCK_REASON)
class AutoKeyTest(TestCase):
'''
Tests for the salt.daemons.masterapi.AutoKey class
'''
def setUp(self):
opts = {'user': 'test_user'}
self.auto_key = masterapi.AutoKey(opts)
self.stats = {}
def os_stat_mock(self, filename):
fmode = MagicMock()
fstats = self.stats.get(filename, {})
fmode.st_mode = fstats.get('mode', 0)
fmode.st_gid = fstats.get('gid', 0)
return fmode
@patch_check_permissions(uid=0, is_windows=True)
def test_check_permissions_windows(self):
'''
Assert that all files are accepted on windows
'''
self.stats['testfile'] = {'mode': gen_permissions('rwx', 'rwx', 'rwx'), 'gid': 2}
self.assertTrue(self.auto_key.check_permissions('testfile'))
@patch_check_permissions(permissive_pki=True)
def test_check_permissions_others_can_write(self):
'''
Assert that no file is accepted, when others can write to it
'''
self.stats['testfile'] = {'mode': gen_permissions('', '', 'w'), 'gid': 1}
self.assertFalse(self.auto_key.check_permissions('testfile'))
@patch_check_permissions()
def test_check_permissions_group_can_write_not_permissive(self):
'''
Assert that a file is accepted, when group can write to it and perkissive_pki_access=False
'''
self.stats['testfile'] = {'mode': gen_permissions('w', 'w', ''), 'gid': 1}
self.assertFalse(self.auto_key.check_permissions('testfile'))
@patch_check_permissions(permissive_pki=True)
def test_check_permissions_group_can_write_permissive(self):
'''
Assert that a file is accepted, when group can write to it and perkissive_pki_access=True
'''
self.stats['testfile'] = {'mode': gen_permissions('w', 'w', ''), 'gid': 1}
self.assertTrue(self.auto_key.check_permissions('testfile'))
@patch_check_permissions(uid=0, permissive_pki=True)
def test_check_permissions_group_can_write_permissive_root_in_group(self):
'''
Assert that a file is accepted, when group can write to it, perkissive_pki_access=False,
salt is root and in the file owning group
'''
self.stats['testfile'] = {'mode': gen_permissions('w', 'w', ''), 'gid': 0}
self.assertTrue(self.auto_key.check_permissions('testfile'))
@patch_check_permissions(uid=0, permissive_pki=True)
def test_check_permissions_group_can_write_permissive_root_not_in_group(self):
'''
Assert that no file is accepted, when group can write to it, perkissive_pki_access=False,
salt is root and **not** in the file owning group
'''
self.stats['testfile'] = {'mode': gen_permissions('w', 'w', ''), 'gid': 1}
self.assertFalse(self.auto_key.check_permissions('testfile'))
@patch_check_permissions()
def test_check_permissions_only_owner_can_write(self):
'''
Assert that a file is accepted, when only the owner can write to it
'''
self.stats['testfile'] = {'mode': gen_permissions('w', '', ''), 'gid': 1}
self.assertTrue(self.auto_key.check_permissions('testfile'))
@patch_check_permissions(uid=0)
def test_check_permissions_only_owner_can_write_root(self):
'''
Assert that a file is accepted, when only the owner can write to it and salt is root
'''
self.stats['testfile'] = {'mode': gen_permissions('w', '', ''), 'gid': 0}
self.assertTrue(self.auto_key.check_permissions('testfile'))
@skipIf(NO_MOCK, NO_MOCK_REASON) @skipIf(NO_MOCK, NO_MOCK_REASON)
class LocalFuncsTestCase(TestCase): class LocalFuncsTestCase(TestCase):
''' '''

View File

@ -11,7 +11,12 @@ import textwrap
from tests.support.mixins import LoaderModuleMockMixin from tests.support.mixins import LoaderModuleMockMixin
from tests.support.paths import TMP from tests.support.paths import TMP
from tests.support.unit import TestCase, skipIf from tests.support.unit import TestCase, skipIf
from tests.support.mock import MagicMock, patch from tests.support.mock import MagicMock, patch, mock_open
try:
import pytest
except ImportError:
pytest = None
# Import Salt libs # Import Salt libs
import salt.config import salt.config
@ -738,94 +743,45 @@ class FileModuleTestCase(TestCase, LoaderModuleMockMixin):
saltenv='base') saltenv='base')
self.assertEqual(ret, 'This is a templated file.') self.assertEqual(ret, 'This is a templated file.')
def test_line_ensure_location_start(self):
'''
Check that file.line uses ``location=start`` if a
match is not found and replaces content if it is.
'''
# File DOESN'T contain the match
with tempfile.NamedTemporaryFile(mode='w+b', delete=False) as tfile:
tfile.write(salt.utils.to_bytes('first=foo' + os.linesep))
tfile.flush()
filemod.line(tfile.name,
content='second=bar',
match='second=',
mode='ensure',
location='start')
expected = os.linesep.join(['second=bar', 'first=foo']) + os.linesep
with salt.utils.files.fopen(tfile.name) as tfile2:
self.assertEqual(tfile2.read(), expected)
# File DOES contain the match @skipIf(pytest is None, 'PyTest required for this set of tests')
with tempfile.NamedTemporaryFile(mode='w+b', delete=False) as tfile: class FilemodLineTests(TestCase, LoaderModuleMockMixin):
tfile.write(salt.utils.to_bytes(os.linesep.join(['first=foo', 'second=foo']) + os.linesep)) '''
tfile.flush() Unit tests for file.line
filemod.line(tfile.name, '''
content='second=bar', def setUp(self):
match='second=', class AnyAttr(object):
mode='ensure', def __getattr__(self, item):
location='start') return 0
expected = os.linesep.join(['first=foo', 'second=bar']) + os.linesep
with salt.utils.files.fopen(tfile.name) as tfile2:
self.assertEqual(tfile2.read(), expected)
def test_line_ensure_location_end(self): def __call__(self, *args, **kwargs):
''' return self
Check that file.line uses ``location=end`` if a self._anyattr = AnyAttr()
match is not found and replaces content if it is.
'''
# File DOESN'T contain the match
with tempfile.NamedTemporaryFile(mode='w+b', delete=False) as tfile:
tfile.write(salt.utils.to_bytes('first=foo'))
tfile.flush()
filemod.line(tfile.name,
content='second=bar',
match='second=',
mode='ensure',
location='end')
expected = os.linesep.join(['first=foo', 'second=bar'])
with salt.utils.files.fopen(tfile.name) as tfile2:
self.assertEqual(tfile2.read(), expected)
# File DOES contain the match def tearDown(self):
with tempfile.NamedTemporaryFile(mode='w+b', delete=False) as tfile: del self._anyattr
tfile.write(salt.utils.to_bytes(os.linesep.join(['second=foo', 'first=foo']) + os.linesep))
tfile.flush()
filemod.line(tfile.name,
content='second=bar',
match='second=',
mode='ensure',
location='end')
expected = os.linesep.join(['second=bar', 'first=foo']) + os.linesep
with salt.utils.files.fopen(tfile.name) as tfile2:
self.assertEqual(tfile2.read(), expected)
def test_replace_line_in_empty_file(self): def setup_loader_modules(self):
''' return {
Tests that when calling file.line with ``mode=replace``, filemod: {
the function doesn't stack trace if the file is empty. '__salt__': {
Should return ``False``. 'config.manage_mode': configmod.manage_mode,
'cmd.run': cmdmod.run,
See Issue #31135. 'cmd.run_all': cmdmod.run_all
''' },
# Create an empty temporary named file '__opts__': {
empty_file = tempfile.NamedTemporaryFile(delete=False, 'test': False,
mode='w+') 'file_roots': {'base': 'tmp'},
'pillar_roots': {'base': 'tmp'},
# Assert that the file was created and is empty 'cachedir': 'tmp',
self.assertEqual(os.stat(empty_file.name).st_size, 0) 'grains': {},
},
# Now call the function on the empty file and assert '__grains__': {'kernel': 'Linux'}
# the return is False instead of stack-tracing }
self.assertFalse(filemod.line(empty_file.name, }
content='foo',
match='bar',
mode='replace'))
# Close and remove the file
empty_file.close()
os.remove(empty_file.name)
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
def test_delete_line_in_empty_file(self): def test_delete_line_in_empty_file(self):
''' '''
Tests that when calling file.line with ``mode=delete``, Tests that when calling file.line with ``mode=delete``,
@ -834,23 +790,337 @@ class FileModuleTestCase(TestCase, LoaderModuleMockMixin):
See Issue #38438. See Issue #38438.
''' '''
# Create an empty temporary named file for mode in ['delete', 'replace']:
empty_file = tempfile.NamedTemporaryFile(delete=False, _log = MagicMock()
mode='w+') with patch('salt.utils.files.fopen', mock_open(read_data='')):
with patch('os.stat', self._anyattr):
with patch('salt.modules.file.log', _log):
assert not filemod.line('/dummy/path', content='foo', match='bar', mode=mode)
assert 'Cannot find text to {0}'.format(mode) in _log.warning.call_args_list[0][0][0]
# Assert that the file was created and is empty @patch('os.path.realpath', MagicMock())
self.assertEqual(os.stat(empty_file.name).st_size, 0) @patch('os.path.isfile', MagicMock(return_value=True))
def test_line_modecheck_failure(self):
'''
Test for file.line for empty or wrong mode.
Calls unknown or empty mode and expects failure.
:return:
'''
for mode, err_msg in [(None, 'How to process the file'), ('nonsense', 'Unknown mode')]:
with pytest.raises(CommandExecutionError) as cmd_err:
filemod.line('foo', mode=mode)
assert err_msg in str(cmd_err)
# Now call the function on the empty file and assert @patch('os.path.realpath', MagicMock())
# the return is False instead of stack-tracing @patch('os.path.isfile', MagicMock(return_value=True))
self.assertFalse(filemod.line(empty_file.name, def test_line_no_content(self):
content='foo', '''
match='bar', Test for file.line for an empty content when not deleting anything.
mode='delete')) :return:
'''
for mode in ['insert', 'ensure', 'replace']:
with pytest.raises(CommandExecutionError) as cmd_err:
filemod.line('foo', mode=mode)
assert 'Content can only be empty if mode is "delete"' in str(cmd_err)
# Close and remove the file @patch('os.path.realpath', MagicMock())
empty_file.close() @patch('os.path.isfile', MagicMock(return_value=True))
os.remove(empty_file.name) @patch('os.stat', MagicMock())
def test_line_insert_no_location_no_before_no_after(self):
'''
Test for file.line for insertion but define no location/before/after.
:return:
'''
files_fopen = mock_open(read_data='test data')
with patch('salt.utils.files.fopen', files_fopen):
with pytest.raises(CommandExecutionError) as cmd_err:
filemod.line('foo', content='test content', mode='insert')
assert '"location" or "before/after"' in str(cmd_err)
def test_util_starts_till(self):
'''
Test for file._starts_till function.
:return:
'''
src = 'here is something'
assert 1 == filemod._starts_till(src=src, probe='here quite something else')
assert 0 == filemod._starts_till(src=src, probe='here is something')
assert -1 == filemod._starts_till(src=src, probe='and here is something')
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_insert_after_no_pattern(self):
'''
Test for file.line for insertion after specific line, using no pattern.
See issue #38670
:return:
'''
file_content = 'file_roots:\n base:\n - /srv/salt'
file_modified = 'file_roots:\n base:\n - /srv/salt\n - /srv/custom'
cfg_content = '- /srv/custom'
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
filemod.line('foo', content=cfg_content, after='- /srv/salt', mode='insert')
assert 1 == len(atomic_opener().write.call_args_list)
assert file_modified == atomic_opener().write.call_args_list[0][0][0]
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_insert_after_pattern(self):
'''
Test for file.line for insertion after specific line, using pattern.
See issue #38670
:return:
'''
file_content = 'file_boots:\n - /rusty\nfile_roots:\n base:\n - /srv/salt\n - /srv/sugar'
file_modified = 'file_boots:\n - /rusty\nfile_roots:\n custom:\n ' \
'- /srv/custom\n base:\n - /srv/salt\n - /srv/sugar'
cfg_content = ' custom:\n - /srv/custom'
for after_line in ['file_r.*', '.*roots']:
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
filemod.line('foo', content=cfg_content, after=after_line, mode='insert', indent=False)
assert 1 == len(atomic_opener().write.call_args_list)
assert file_modified == atomic_opener().write.call_args_list[0][0][0]
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_insert_before(self):
'''
Test for file.line for insertion before specific line, using pattern and no patterns.
See issue #38670
:return:
'''
file_content = 'file_roots:\n base:\n - /srv/salt\n - /srv/sugar'
file_modified = 'file_roots:\n base:\n - /srv/custom\n - /srv/salt\n - /srv/sugar'
cfg_content = '- /srv/custom'
for before_line in ['/srv/salt', '/srv/sa.*t', '/sr.*']:
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
filemod.line('foo', content=cfg_content, before=before_line, mode='insert')
assert 1 == len(atomic_opener().write.call_args_list)
assert file_modified == atomic_opener().write.call_args_list[0][0][0]
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_insert_before_after(self):
'''
Test for file.line for insertion before specific line, using pattern and no patterns.
See issue #38670
:return:
'''
file_content = 'file_roots:\n base:\n - /srv/salt\n - /srv/pepper\n - /srv/sugar'
file_modified = 'file_roots:\n base:\n - /srv/salt\n ' \
'- /srv/pepper\n - /srv/coriander\n - /srv/sugar'
cfg_content = '- /srv/coriander'
for b_line, a_line in [('/srv/sugar', '/srv/salt')]:
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
filemod.line('foo', content=cfg_content, before=b_line, after=a_line, mode='insert')
assert 1 == len(atomic_opener().write.call_args_list)
assert file_modified == atomic_opener().write.call_args_list[0][0][0]
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_insert_start(self):
'''
Test for file.line for insertion at the beginning of the file
:return:
'''
cfg_content = 'everything: fantastic'
file_content = 'file_roots:\n base:\n - /srv/salt\n - /srv/sugar'
file_modified = '{0}\nfile_roots:\n base:\n - /srv/salt\n - /srv/sugar'.format(cfg_content)
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
filemod.line('foo', content=cfg_content, location='start', mode='insert')
assert 1 == len(atomic_opener().write.call_args_list)
assert file_modified == atomic_opener().write.call_args_list[0][0][0]
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_insert_end(self):
'''
Test for file.line for insertion at the end of the file (append)
:return:
'''
cfg_content = 'everything: fantastic'
file_content = 'file_roots:\n base:\n - /srv/salt\n - /srv/sugar'
file_modified = 'file_roots:\n base:\n - /srv/salt\n - /srv/sugar\n{0}'.format(cfg_content)
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
filemod.line('foo', content=cfg_content, location='end', mode='insert')
assert 1 == len(atomic_opener().write.call_args_list)
assert file_modified == atomic_opener().write.call_args_list[0][0][0]
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_insert_ensure_before(self):
'''
Test for file.line for insertion ensuring the line is before
:return:
'''
cfg_content = '/etc/init.d/someservice restart'
file_content = '#!/bin/bash\n\nexit 0'
file_modified = '#!/bin/bash\n\n{0}\nexit 0'.format(cfg_content)
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
filemod.line('foo', content=cfg_content, before='exit 0', mode='ensure')
assert 1 == len(atomic_opener().write.call_args_list)
assert file_modified == atomic_opener().write.call_args_list[0][0][0]
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_insert_ensure_after(self):
'''
Test for file.line for insertion ensuring the line is after
:return:
'''
cfg_content = 'exit 0'
file_content = '#!/bin/bash\n/etc/init.d/someservice restart'
file_modified = '#!/bin/bash\n/etc/init.d/someservice restart\n{0}'.format(cfg_content)
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
filemod.line('foo', content=cfg_content, after='/etc/init.d/someservice restart', mode='ensure')
assert 1 == len(atomic_opener().write.call_args_list)
assert file_modified == atomic_opener().write.call_args_list[0][0][0]
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_insert_ensure_beforeafter_twolines(self):
'''
Test for file.line for insertion ensuring the line is between two lines
:return:
'''
cfg_content = 'EXTRA_GROUPS="dialout cdrom floppy audio video plugdev users"'
# pylint: disable=W1401
file_content = 'NAME_REGEX="^[a-z][-a-z0-9_]*\$"\nSKEL_IGNORE_REGEX="dpkg-(old|new|dist|save)"'
# pylint: enable=W1401
after, before = file_content.split(os.linesep)
file_modified = os.linesep.join([after, cfg_content, before])
for (_after, _before) in [(after, before), ('NAME_.*', 'SKEL_.*')]:
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
filemod.line('foo', content=cfg_content, after=_after, before=_before, mode='ensure')
assert 1 == len(atomic_opener().write.call_args_list)
assert file_modified == atomic_opener().write.call_args_list[0][0][0]
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_insert_ensure_beforeafter_twolines_exists(self):
'''
Test for file.line for insertion ensuring the line is between two lines where content already exists
:return:
'''
cfg_content = 'EXTRA_GROUPS="dialout"'
# pylint: disable=W1401
file_content = 'NAME_REGEX="^[a-z][-a-z0-9_]*\$"\nEXTRA_GROUPS="dialout"' \
'\nSKEL_IGNORE_REGEX="dpkg-(old|new|dist|save)"'
# pylint: enable=W1401
after, before = file_content.split(os.linesep)[0], file_content.split(os.linesep)[2]
for (_after, _before) in [(after, before), ('NAME_.*', 'SKEL_.*')]:
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
result = filemod.line('foo', content=cfg_content, after=_after, before=_before, mode='ensure')
assert 0 == len(atomic_opener().write.call_args_list)
assert not result
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_insert_ensure_beforeafter_rangelines(self):
'''
Test for file.line for insertion ensuring the line is between two lines within the range.
This expected to bring no changes.
:return:
'''
cfg_content = 'EXTRA_GROUPS="dialout cdrom floppy audio video plugdev users"'
# pylint: disable=W1401
file_content = 'NAME_REGEX="^[a-z][-a-z0-9_]*\$"\nSETGID_HOME=no\nADD_EXTRA_GROUPS=1\n' \
'SKEL_IGNORE_REGEX="dpkg-(old|new|dist|save)"'
# pylint: enable=W1401
after, before = file_content.split(os.linesep)[0], file_content.split(os.linesep)[-1]
for (_after, _before) in [(after, before), ('NAME_.*', 'SKEL_.*')]:
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
with pytest.raises(CommandExecutionError) as cmd_err:
filemod.line('foo', content=cfg_content, after=_after, before=_before, mode='ensure')
assert 'Found more than one line between boundaries "before" and "after"' in str(cmd_err)
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_delete(self):
'''
Test for file.line for deletion of specific line
:return:
'''
file_content = 'file_roots:\n base:\n - /srv/salt\n - /srv/pepper\n - /srv/sugar'
file_modified = 'file_roots:\n base:\n - /srv/salt\n - /srv/sugar'
for content in ['/srv/pepper', '/srv/pepp*', '/srv/p.*', '/sr.*pe.*']:
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
filemod.line('foo', content=content, mode='delete')
assert 1 == len(atomic_opener().write.call_args_list)
assert file_modified == atomic_opener().write.call_args_list[0][0][0]
@patch('os.path.realpath', MagicMock())
@patch('os.path.isfile', MagicMock(return_value=True))
@patch('os.stat', MagicMock())
def test_line_replace(self):
'''
Test for file.line for replacement of specific line
:return:
'''
file_content = 'file_roots:\n base:\n - /srv/salt\n - /srv/pepper\n - /srv/sugar'
file_modified = 'file_roots:\n base:\n - /srv/salt\n - /srv/natrium-chloride\n - /srv/sugar'
for match in ['/srv/pepper', '/srv/pepp*', '/srv/p.*', '/sr.*pe.*']:
files_fopen = mock_open(read_data=file_content)
with patch('salt.utils.files.fopen', files_fopen):
atomic_opener = mock_open()
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
filemod.line('foo', content='- /srv/natrium-chloride', match=match, mode='replace')
assert 1 == len(atomic_opener().write.call_args_list)
assert file_modified == atomic_opener().write.call_args_list[0][0][0]
class FileBasicsTestCase(TestCase, LoaderModuleMockMixin): class FileBasicsTestCase(TestCase, LoaderModuleMockMixin):

View File

@ -130,10 +130,10 @@ class BuildoutTestCase(Base):
def test_onlyif_unless(self): def test_onlyif_unless(self):
b_dir = os.path.join(self.tdir, 'b') b_dir = os.path.join(self.tdir, 'b')
ret = buildout.buildout(b_dir, onlyif='/bin/false') ret = buildout.buildout(b_dir, onlyif='/bin/false')
self.assertTrue(ret['comment'] == 'onlyif execution failed') self.assertTrue(ret['comment'] == 'onlyif condition is false')
self.assertTrue(ret['status'] is True) self.assertTrue(ret['status'] is True)
ret = buildout.buildout(b_dir, unless='/bin/true') ret = buildout.buildout(b_dir, unless='/bin/true')
self.assertTrue(ret['comment'] == 'unless execution succeeded') self.assertTrue(ret['comment'] == 'unless condition is true')
self.assertTrue(ret['status'] is True) self.assertTrue(ret['status'] is True)
@requires_network() @requires_network()

View File

@ -0,0 +1,53 @@
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Roald Nefs (info@roaldnefs.com)`
tests.unit.returners.telegram_return_test
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
from tests.support.mock import NO_MOCK, NO_MOCK_REASON, MagicMock, patch
# Import salt libs
import salt.returners.telegram_return as telegram
@skipIf(NO_MOCK, NO_MOCK_REASON)
class TelegramReturnerTestCase(TestCase, LoaderModuleMockMixin):
'''
Test Telegram Returner
'''
def setup_loader_modules(self):
return {telegram: {}}
def test_returner(self):
'''
Test to see if the Telegram returner sends a message
'''
ret = {'id': '12345',
'fun': 'mytest.func',
'fun_args': 'myfunc args',
'jid': '54321',
'return': 'The room is on fire as shes fixing her hair'}
options = {'chat_id': '',
'token': ''}
class MockRequest(object):
"""
Mock of requests response
"""
def json(self):
return {'message_id': ''}
with patch('salt.returners.telegram_return._get_options',
MagicMock(return_value=options)), \
patch('salt.returners.telegram_return.requests.post',
MagicMock(return_value=MockRequest())):
self.assertTrue(telegram.returner(ret))

View File

@ -47,7 +47,7 @@ class CloudTestCase(TestCase, LoaderModuleMockMixin):
with patch.dict(cloud.__salt__, {'cmd.retcode': mock, with patch.dict(cloud.__salt__, {'cmd.retcode': mock,
'cloud.has_instance': mock_bool, 'cloud.has_instance': mock_bool,
'cloud.create': mock_dict}): 'cloud.create': mock_dict}):
comt = ('onlyif execution failed') comt = ('onlyif condition is false')
ret.update({'comment': comt}) ret.update({'comment': comt})
self.assertDictEqual(cloud.present(name, cloud_provider, self.assertDictEqual(cloud.present(name, cloud_provider,
onlyif=False), ret) onlyif=False), ret)
@ -55,7 +55,7 @@ class CloudTestCase(TestCase, LoaderModuleMockMixin):
self.assertDictEqual(cloud.present(name, cloud_provider, onlyif=''), self.assertDictEqual(cloud.present(name, cloud_provider, onlyif=''),
ret) ret)
comt = ('unless execution succeeded') comt = ('unless condition is true')
ret.update({'comment': comt}) ret.update({'comment': comt})
self.assertDictEqual(cloud.present(name, cloud_provider, self.assertDictEqual(cloud.present(name, cloud_provider,
unless=True), ret) unless=True), ret)
@ -98,13 +98,13 @@ class CloudTestCase(TestCase, LoaderModuleMockMixin):
with patch.dict(cloud.__salt__, {'cmd.retcode': mock, with patch.dict(cloud.__salt__, {'cmd.retcode': mock,
'cloud.has_instance': mock_bool, 'cloud.has_instance': mock_bool,
'cloud.destroy': mock_dict}): 'cloud.destroy': mock_dict}):
comt = ('onlyif execution failed') comt = ('onlyif condition is false')
ret.update({'comment': comt}) ret.update({'comment': comt})
self.assertDictEqual(cloud.absent(name, onlyif=False), ret) self.assertDictEqual(cloud.absent(name, onlyif=False), ret)
self.assertDictEqual(cloud.absent(name, onlyif=''), ret) self.assertDictEqual(cloud.absent(name, onlyif=''), ret)
comt = ('unless execution succeeded') comt = ('unless condition is true')
ret.update({'comment': comt}) ret.update({'comment': comt})
self.assertDictEqual(cloud.absent(name, unless=True), ret) self.assertDictEqual(cloud.absent(name, unless=True), ret)
@ -152,14 +152,14 @@ class CloudTestCase(TestCase, LoaderModuleMockMixin):
with patch.dict(cloud.__salt__, {'cmd.retcode': mock, with patch.dict(cloud.__salt__, {'cmd.retcode': mock,
'cloud.profile': mock_d, 'cloud.profile': mock_d,
'cloud.action': mock_dict}): 'cloud.action': mock_dict}):
comt = ('onlyif execution failed') comt = ('onlyif condition is false')
ret.update({'comment': comt}) ret.update({'comment': comt})
self.assertDictEqual(cloud.profile(name, profile, onlyif=False), self.assertDictEqual(cloud.profile(name, profile, onlyif=False),
ret) ret)
self.assertDictEqual(cloud.profile(name, profile, onlyif=''), ret) self.assertDictEqual(cloud.profile(name, profile, onlyif=''), ret)
comt = ('unless execution succeeded') comt = ('unless condition is true')
ret.update({'comment': comt}) ret.update({'comment': comt})
self.assertDictEqual(cloud.profile(name, profile, unless=True), ret) self.assertDictEqual(cloud.profile(name, profile, unless=True), ret)

View File

@ -41,7 +41,7 @@ class CmdTestCase(TestCase, LoaderModuleMockMixin):
mock = MagicMock(return_value=1) mock = MagicMock(return_value=1)
with patch.dict(cmd.__salt__, {'cmd.retcode': mock}): with patch.dict(cmd.__salt__, {'cmd.retcode': mock}):
with patch.dict(cmd.__opts__, {'test': True}): with patch.dict(cmd.__opts__, {'test': True}):
ret = {'comment': 'onlyif execution failed', 'result': True, ret = {'comment': 'onlyif condition is false', 'result': True,
'skip_watch': True} 'skip_watch': True}
self.assertDictEqual(cmd.mod_run_check(cmd_kwargs, '', '', creates), ret) self.assertDictEqual(cmd.mod_run_check(cmd_kwargs, '', '', creates), ret)
@ -50,13 +50,13 @@ class CmdTestCase(TestCase, LoaderModuleMockMixin):
mock = MagicMock(return_value=1) mock = MagicMock(return_value=1)
with patch.dict(cmd.__salt__, {'cmd.retcode': mock}): with patch.dict(cmd.__salt__, {'cmd.retcode': mock}):
with patch.dict(cmd.__opts__, {'test': True}): with patch.dict(cmd.__opts__, {'test': True}):
ret = {'comment': 'onlyif execution failed: ', 'result': True, ret = {'comment': 'onlyif condition is false: ', 'result': True,
'skip_watch': True} 'skip_watch': True}
self.assertDictEqual(cmd.mod_run_check(cmd_kwargs, [''], '', creates), ret) self.assertDictEqual(cmd.mod_run_check(cmd_kwargs, [''], '', creates), ret)
mock = MagicMock(return_value=0) mock = MagicMock(return_value=0)
with patch.dict(cmd.__salt__, {'cmd.retcode': mock}): with patch.dict(cmd.__salt__, {'cmd.retcode': mock}):
ret = {'comment': 'unless execution succeeded', 'result': True, ret = {'comment': 'unless condition is true', 'result': True,
'skip_watch': True} 'skip_watch': True}
self.assertDictEqual(cmd.mod_run_check(cmd_kwargs, None, '', creates), ret) self.assertDictEqual(cmd.mod_run_check(cmd_kwargs, None, '', creates), ret)
@ -143,7 +143,7 @@ class CmdTestCase(TestCase, LoaderModuleMockMixin):
mock = MagicMock(return_value=1) mock = MagicMock(return_value=1)
with patch.dict(cmd.__salt__, {'cmd.retcode': mock}): with patch.dict(cmd.__salt__, {'cmd.retcode': mock}):
with patch.dict(cmd.__opts__, {'test': False}): with patch.dict(cmd.__opts__, {'test': False}):
comt = ('onlyif execution failed') comt = ('onlyif condition is false')
ret.update({'comment': comt, 'result': True, ret.update({'comment': comt, 'result': True,
'skip_watch': True}) 'skip_watch': True})
self.assertDictEqual(cmd.run(name, onlyif=''), ret) self.assertDictEqual(cmd.run(name, onlyif=''), ret)
@ -186,7 +186,7 @@ class CmdTestCase(TestCase, LoaderModuleMockMixin):
mock = MagicMock(return_value=1) mock = MagicMock(return_value=1)
with patch.dict(cmd.__salt__, {'cmd.retcode': mock}): with patch.dict(cmd.__salt__, {'cmd.retcode': mock}):
with patch.dict(cmd.__opts__, {'test': False}): with patch.dict(cmd.__opts__, {'test': False}):
comt = ('onlyif execution failed') comt = ('onlyif condition is false')
ret.update({'comment': comt, 'result': True, ret.update({'comment': comt, 'result': True,
'skip_watch': True, 'changes': {}}) 'skip_watch': True, 'changes': {}})
self.assertDictEqual(cmd.script(name, onlyif=''), ret) self.assertDictEqual(cmd.script(name, onlyif=''), ret)
@ -222,7 +222,7 @@ class CmdTestCase(TestCase, LoaderModuleMockMixin):
self.assertDictEqual(cmd.call(name, func), ret) self.assertDictEqual(cmd.call(name, func), ret)
flag = False flag = False
comt = ('onlyif execution failed') comt = ('onlyif condition is false')
ret.update({'comment': '', 'result': False, ret.update({'comment': '', 'result': False,
'changes': {'retval': []}}) 'changes': {'retval': []}})
self.assertDictEqual(cmd.call(name, func), ret) self.assertDictEqual(cmd.call(name, func), ret)
@ -230,7 +230,7 @@ class CmdTestCase(TestCase, LoaderModuleMockMixin):
mock = MagicMock(return_value=1) mock = MagicMock(return_value=1)
with patch.dict(cmd.__salt__, {'cmd.retcode': mock}): with patch.dict(cmd.__salt__, {'cmd.retcode': mock}):
with patch.dict(cmd.__opts__, {'test': True}): with patch.dict(cmd.__opts__, {'test': True}):
comt = ('onlyif execution failed') comt = ('onlyif condition is false')
ret.update({'comment': comt, 'skip_watch': True, ret.update({'comment': comt, 'skip_watch': True,
'result': True, 'changes': {}}) 'result': True, 'changes': {}})
self.assertDictEqual(cmd.call(name, func, onlyif=''), ret) self.assertDictEqual(cmd.call(name, func, onlyif=''), ret)

View File

@ -814,7 +814,8 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
ret.update({ ret.update({
'comment': comt, 'comment': comt,
'result': None, 'result': None,
'pchanges': p_chg 'pchanges': p_chg,
'changes': {'/etc/grub.conf': {'directory': 'new'}}
}) })
self.assertDictEqual(filestate.directory(name, self.assertDictEqual(filestate.directory(name,
user=user, user=user,
@ -825,7 +826,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
with patch.object(os.path, 'isdir', mock_f): with patch.object(os.path, 'isdir', mock_f):
comt = ('No directory to create {0} in' comt = ('No directory to create {0} in'
.format(name)) .format(name))
ret.update({'comment': comt, 'result': False}) ret.update({'comment': comt, 'result': False, 'changes': {}})
self.assertDictEqual(filestate.directory self.assertDictEqual(filestate.directory
(name, user=user, group=group), (name, user=user, group=group),
ret) ret)

View File

@ -19,8 +19,9 @@ from tests.support.mock import (
patch) patch)
# Import Salt Libs # Import Salt Libs
import salt.utils.stringutils
from salt.states import kubernetes from salt.states import kubernetes
from salt.ext.six import iteritems from salt.ext import six
@skipIf(NO_MOCK, NO_MOCK_REASON) @skipIf(NO_MOCK, NO_MOCK_REASON)
@ -61,8 +62,10 @@ class KubernetesTestCase(TestCase, LoaderModuleMockMixin):
data=data, data=data,
) )
# Base64 all of the values just like kubectl does # Base64 all of the values just like kubectl does
for key, value in iteritems(secret_data['data']): for key, value in six.iteritems(secret_data['data']):
secret_data['data'][key] = base64.b64encode(value) secret_data['data'][key] = base64.b64encode(
salt.utils.stringutils.to_bytes(value)
)
return secret_data return secret_data

View File

@ -337,7 +337,7 @@ class MacPackageTestCase(TestCase, LoaderModuleMockMixin):
''' '''
expected = { expected = {
'changes': {}, 'changes': {},
'comment': 'onlyif execution failed', 'comment': 'onlyif condition is false',
'skip_watch': True, 'skip_watch': True,
'result': True, 'result': True,
'name': '/path/to/file.pkg', 'name': '/path/to/file.pkg',
@ -355,7 +355,7 @@ class MacPackageTestCase(TestCase, LoaderModuleMockMixin):
''' '''
expected = { expected = {
'changes': {}, 'changes': {},
'comment': 'unless execution succeeded', 'comment': 'unless condition is true',
'skip_watch': True, 'skip_watch': True,
'result': True, 'result': True,
'name': '/path/to/file.pkg', 'name': '/path/to/file.pkg',

View File

@ -64,14 +64,14 @@ class BuildoutTestCase(Base):
ret = buildout.installed(b_dir, ret = buildout.installed(b_dir,
python=self.py_st, python=self.py_st,
onlyif='/bin/false') onlyif='/bin/false')
self.assertEqual(ret['comment'], '\nonlyif execution failed') self.assertEqual(ret['comment'], '\nonlyif condition is false')
self.assertEqual(ret['result'], True) self.assertEqual(ret['result'], True)
self.assertTrue('/b' in ret['name']) self.assertTrue('/b' in ret['name'])
b_dir = os.path.join(self.tdir, 'b') b_dir = os.path.join(self.tdir, 'b')
ret = buildout.installed(b_dir, ret = buildout.installed(b_dir,
python=self.py_st, python=self.py_st,
unless='/bin/true') unless='/bin/true')
self.assertEqual(ret['comment'], '\nunless execution succeeded') self.assertEqual(ret['comment'], '\nunless condition is true')
self.assertEqual(ret['result'], True) self.assertEqual(ret['result'], True)
self.assertTrue('/b' in ret['name']) self.assertTrue('/b' in ret['name'])
ret = buildout.installed(b_dir, python=self.py_st) ret = buildout.installed(b_dir, python=self.py_st)

View File

@ -309,21 +309,21 @@ class PyDSLRendererTestCase(CommonTestCaseBoilerplate):
- cwd: / - cwd: /
.Y: .Y:
cmd.run: cmd.run:
- name: echo Y >> {1} - name: echo Y >> {0}
- cwd: / - cwd: /
.Z: .Z:
cmd.run: cmd.run:
- name: echo Z >> {2} - name: echo Z >> {0}
- cwd: / - cwd: /
'''.format(output, output, output))) '''.format(output.replace('\\', '/'))))
write_to(os.path.join(dirpath, 'yyy.sls'), textwrap.dedent('''\ write_to(os.path.join(dirpath, 'yyy.sls'), textwrap.dedent('''\
#!pydsl|stateconf -ps #!pydsl|stateconf -ps
__pydsl__.set(ordered=True) __pydsl__.set(ordered=True)
state('.D').cmd.run('echo D >> {0}', cwd='/') state('.D').cmd.run('echo D >> {0}', cwd='/')
state('.E').cmd.run('echo E >> {1}', cwd='/') state('.E').cmd.run('echo E >> {0}', cwd='/')
state('.F').cmd.run('echo F >> {2}', cwd='/') state('.F').cmd.run('echo F >> {0}', cwd='/')
'''.format(output, output, output))) '''.format(output.replace('\\', '/'))))
write_to(os.path.join(dirpath, 'aaa.sls'), textwrap.dedent('''\ write_to(os.path.join(dirpath, 'aaa.sls'), textwrap.dedent('''\
#!pydsl|stateconf -ps #!pydsl|stateconf -ps
@ -339,9 +339,9 @@ class PyDSLRendererTestCase(CommonTestCaseBoilerplate):
__pydsl__.set(ordered=True) __pydsl__.set(ordered=True)
state('.A').cmd.run('echo A >> {0}', cwd='/') state('.A').cmd.run('echo A >> {0}', cwd='/')
state('.B').cmd.run('echo B >> {1}', cwd='/') state('.B').cmd.run('echo B >> {0}', cwd='/')
state('.C').cmd.run('echo C >> {2}', cwd='/') state('.C').cmd.run('echo C >> {0}', cwd='/')
'''.format(output, output, output))) '''.format(output.replace('\\', '/'))))
self.state_highstate({'base': ['aaa']}, dirpath) self.state_highstate({'base': ['aaa']}, dirpath)
with salt.utils.files.fopen(output, 'r') as f: with salt.utils.files.fopen(output, 'r') as f:
@ -361,26 +361,29 @@ class PyDSLRendererTestCase(CommonTestCaseBoilerplate):
) )
) )
try: try:
# The Windows shell will include any spaces before the redirect
# in the text that is redirected.
# For example: echo hello > test.txt will contain "hello "
write_to(os.path.join(dirpath, 'aaa.sls'), textwrap.dedent('''\ write_to(os.path.join(dirpath, 'aaa.sls'), textwrap.dedent('''\
#!pydsl #!pydsl
__pydsl__.set(ordered=True) __pydsl__.set(ordered=True)
A = state('A') A = state('A')
A.cmd.run('echo hehe > {0}/zzz.txt', cwd='/') A.cmd.run('echo hehe>{0}/zzz.txt', cwd='/')
A.file.managed('{1}/yyy.txt', source='salt://zzz.txt') A.file.managed('{0}/yyy.txt', source='salt://zzz.txt')
A() A()
A() A()
state().cmd.run('echo hoho >> {2}/yyy.txt', cwd='/') state().cmd.run('echo hoho>>{0}/yyy.txt', cwd='/')
A.file.managed('{3}/xxx.txt', source='salt://zzz.txt') A.file.managed('{0}/xxx.txt', source='salt://zzz.txt')
A() A()
'''.format(dirpath, dirpath, dirpath, dirpath))) '''.format(dirpath.replace('\\', '/'))))
self.state_highstate({'base': ['aaa']}, dirpath) self.state_highstate({'base': ['aaa']}, dirpath)
with salt.utils.files.fopen(os.path.join(dirpath, 'yyy.txt'), 'rt') as f: with salt.utils.files.fopen(os.path.join(dirpath, 'yyy.txt'), 'rt') as f:
self.assertEqual(f.read(), 'hehe\nhoho\n') self.assertEqual(f.read(), 'hehe' + os.linesep + 'hoho' + os.linesep)
with salt.utils.files.fopen(os.path.join(dirpath, 'xxx.txt'), 'rt') as f: with salt.utils.files.fopen(os.path.join(dirpath, 'xxx.txt'), 'rt') as f:
self.assertEqual(f.read(), 'hehe\n') self.assertEqual(f.read(), 'hehe' + os.linesep)
finally: finally:
shutil.rmtree(dirpath, ignore_errors=True) shutil.rmtree(dirpath, ignore_errors=True)

View File

@ -250,11 +250,11 @@ class TestVerify(TestCase):
self.skipTest('We\'ve hit the max open files setting') self.skipTest('We\'ve hit the max open files setting')
raise raise
finally: finally:
shutil.rmtree(tempdir)
if sys.platform.startswith('win'): if sys.platform.startswith('win'):
win32file._setmaxstdio(mof_h) win32file._setmaxstdio(mof_h)
else: else:
resource.setrlimit(resource.RLIMIT_NOFILE, (mof_s, mof_h)) resource.setrlimit(resource.RLIMIT_NOFILE, (mof_s, mof_h))
shutil.rmtree(tempdir)
@skipIf(NO_MOCK, NO_MOCK_REASON) @skipIf(NO_MOCK, NO_MOCK_REASON)
def test_verify_log(self): def test_verify_log(self):