diff --git a/.github/stale.yml b/.github/stale.yml index 40831d75bd..35c146c138 100644 --- a/.github/stale.yml +++ b/.github/stale.yml @@ -1,8 +1,8 @@ # Probot Stale configuration file # Number of days of inactivity before an issue becomes stale -# 760 is approximately 2 years and 1 month -daysUntilStale: 760 +# 750 is approximately 2 years and 1 month +daysUntilStale: 750 # Number of days of inactivity before a stale issue is closed daysUntilClose: 7 diff --git a/.kitchen.yml b/.kitchen.yml index c6f8d5cbe9..5236888405 100644 --- a/.kitchen.yml +++ b/.kitchen.yml @@ -31,7 +31,7 @@ provisioner: salt_version: latest salt_bootstrap_url: https://bootstrap.saltstack.com salt_bootstrap_options: -X -p rsync stable <%= version %> - log_level: info + log_level: debug sudo: true require_chef: false retry_on_exit_code: @@ -189,7 +189,6 @@ suites: verifier: name: runtests sudo: true - verbose: true run_destructive: true transport: zeromq types: diff --git a/doc/conf.py b/doc/conf.py index 699a46de64..2556ad8a33 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -263,8 +263,8 @@ if on_saltstack: copyright = time.strftime("%Y") # < --- START do not merge these settings to other branches START ---> # -build_type = 'develop' # latest, previous, develop, next -release = version # version, latest_release, previous_release +build_type = 'latest' # latest, previous, develop, next +release = latest_release # version, latest_release, previous_release # < --- END do not merge these settings to other branches END ---> # # Set google custom search engine diff --git a/doc/faq.rst b/doc/faq.rst index 9b75fd307d..28c298cda3 100644 --- a/doc/faq.rst +++ b/doc/faq.rst @@ -254,17 +254,19 @@ specifying the pillar variable is the same one used for :py:func:`pillar.get ` state is only supported in Salt 2015.8.4 and newer. +.. _faq-restart-salt-minion: + What is the best way to restart a Salt Minion daemon using Salt after upgrade? ------------------------------------------------------------------------------ Updating the ``salt-minion`` package requires a restart of the ``salt-minion`` service. But restarting the service while in the middle of a state run interrupts the process of the Minion running states and sending results back to -the Master. A common way to workaround that is to schedule restarting of the -Minion service using :ref:`masterless mode ` after all -other states have been applied. This allows the minion to keep Minion to Master -connection alive for the Minion to report the final results to the Master, while -the service is restarting in the background. +the Master. A common way to workaround that is to schedule restarting the +Minion service in the background by issuing a ``salt-call`` command calling +``service.restart`` function. This prevents the Minion being disconnected from +the Master immediately. Otherwise you would get +``Minion did not return. [Not connected]`` message as the result of a state run. Upgrade without automatic restart ********************************* @@ -328,7 +330,7 @@ The following example works on UNIX-like operating systems: {%- if grains['os'] != 'Windows' %} Restart Salt Minion: cmd.run: - - name: 'salt-call --local service.restart salt-minion' + - name: 'salt-call service.restart salt-minion' - bg: True - onchanges: - pkg: Upgrade Salt Minion @@ -348,9 +350,9 @@ as follows: Restart Salt Minion: cmd.run: {%- if grains['kernel'] == 'Windows' %} - - name: 'C:\salt\salt-call.bat --local service.restart salt-minion' + - name: 'C:\salt\salt-call.bat service.restart salt-minion' {%- else %} - - name: 'salt-call --local service.restart salt-minion' + - name: 'salt-call service.restart salt-minion' {%- endif %} - bg: True - onchanges: @@ -358,7 +360,13 @@ as follows: However, it requires more advanced tricks to upgrade from legacy version of Salt (before ``2016.3.0``) on UNIX-like operating systems, where executing -commands in the background is not supported: +commands in the background is not supported. You also may need to schedule +restarting the Minion service using :ref:`masterless mode +` after all other states have been applied for Salt +versions earlier than ``2016.11.0``. This allows the Minion to keep the +connection to the Master alive for being able to report the final results back +to the Master, while the service is restarting in the background. This state +should run last or watch for the ``pkg`` state changes: .. code-block:: jinja @@ -382,8 +390,8 @@ Restart the Minion from the command line: .. code-block:: bash - salt -G kernel:Windows cmd.run_bg 'C:\salt\salt-call.bat --local service.restart salt-minion' - salt -C 'not G@kernel:Windows' cmd.run_bg 'salt-call --local service.restart salt-minion' + salt -G kernel:Windows cmd.run_bg 'C:\salt\salt-call.bat service.restart salt-minion' + salt -C 'not G@kernel:Windows' cmd.run_bg 'salt-call service.restart salt-minion' Salting the Salt Master ----------------------- @@ -409,6 +417,10 @@ for salt itself: https://github.com/saltstack-formulas/salt-formula +Restarting the ``salt-master`` service using execution module or application of +state could be done the same way as for the Salt minion described :ref:`above +`. + .. _faq-grain-security: Is Targeting using Grain Data Secure? @@ -443,4 +455,3 @@ the grain and values that you want to change / set.) You should also `file an issue `_ describing the change so it can be fixed in Salt. - diff --git a/doc/man/salt.7 b/doc/man/salt.7 index 93c4b073cd..4275c61255 100644 --- a/doc/man/salt.7 +++ b/doc/man/salt.7 @@ -946,6 +946,7 @@ Now go to the Configuring Salt page. .sp \fBLatest stable build from the selected branch\fP: + .sp The output of \fBmd5 \fP should match the contents of the corresponding md5 file. @@ -1397,7 +1398,7 @@ sudo python setup.py install \-\-force \fBNOTE:\fP .INDENT 0.0 .INDENT 3.5 -SaltStack does offer commerical support for Solaris which includes packages. +SaltStack does offer commercial support for Solaris which includes packages. .UNINDENT .UNINDENT .SS Ubuntu @@ -5363,7 +5364,7 @@ Default: \fB0\fP .sp Memcache is an additional cache layer that keeps a limited amount of data fetched from the minion data cache for a limited period of time in memory that -makes cache operations faster. It doesn\(aqt make much sence for the \fBlocalfs\fP +makes cache operations faster. It doesn\(aqt make much sense for the \fBlocalfs\fP cache driver but helps for more complex drivers like \fBconsul\fP\&. .sp This option sets the memcache items expiration time. By default is set to \fB0\fP @@ -10793,11 +10794,10 @@ branch/tag. .ft C winrepo_branch: winrepo -ext_pillar: - \- git: - \- https://mygitserver/winrepo1.git - \- https://mygitserver/winrepo2.git: - \- foo https://mygitserver/winrepo3.git +winrepo_remotes: + \- https://mygitserver/winrepo1.git + \- https://mygitserver/winrepo2.git: + \- foo https://mygitserver/winrepo3.git .ft P .fi .UNINDENT @@ -14988,7 +14988,7 @@ New in version 2017.7.0. .sp Default: \fBTrue\fP .sp -Wheter the proxy should maintain the connection with the remote +Whether the proxy should maintain the connection with the remote device. Similarly to \fI\%proxy_keep_alive\fP, this option is very specific to the design of the proxy module. When \fI\%proxy_always_alive\fP is set to \fBFalse\fP, @@ -15011,7 +15011,7 @@ New in version 2017.7.3. .sp Default: \fBFalse\fP\&. .sp -Wheter the pillar data to be merged into the proxy configuration options. +Whether the pillar data to be merged into the proxy configuration options. As multiple proxies can run on the same server, we may need different configuration options for each, while there\(aqs one single configuration file. The solution is merging the pillar data of each proxy minion into the opts. @@ -19623,7 +19623,7 @@ Master Job Cache The major difference between these two mechanism is from where results are returned (from the Salt Master or Salt Minion). Configuring either of these options will also make the \fBJobs Runner functions\fP -to automatically query the remote stores for infomation. +to automatically query the remote stores for information. .SS External Job Cache \- Minion\-Side Returner .sp When an External Job Cache is configured, data is returned to the Default Job @@ -22991,7 +22991,7 @@ important to use this marker whenever declaring \fBpre\fP or \fBpost\fP states, that the text following it can be evaluated properly. .SS local States .sp -\fBlocal\fP states are evaluated locally; this is analagous to issuing a state +\fBlocal\fP states are evaluated locally; this is analogous to issuing a state run using a \fBsalt\-call \-\-local\fP command. These commands will be issued on the local machine running the \fBspm\fP command, whether that machine is a master or a minion. @@ -31871,7 +31871,7 @@ in the above example, Ubuntu minions will match both the \fBDebian\fP and \fBUbuntu\fP classes, since Ubuntu has an \fBos_family\fP grain of \fBDebian\fP an an \fBos\fP grain of \fBUbuntu\fP\&. As of the 2017.7.0 release, the order is dictated by the order of declaration, with classes defined later overriding -earlier ones. Addtionally, 2017.7.0 adds support for explicitly defining +earlier ones. Additionally, 2017.7.0 adds support for explicitly defining the ordering using an optional attribute called \fBpriority\fP\&. .sp Given the above example, \fBos_family\fP matches will be processed first, @@ -33040,6 +33040,32 @@ the pillar environment takes the place of \fB{{ saltenv }}\fP in the jinja context. .UNINDENT .UNINDENT +.SS Dynamic Pillar Environments +.sp +If environment \fB__env__\fP is specified in \fBpillar_roots\fP, all +environments that are not explicitly specified in \fBpillar_roots\fP +will map to the directories from \fB__env__\fP\&. This allows one to use dynamic +git branch based environments for state/pillar files with the same file\-based +pillar applying to all environments. For example: +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +pillar_roots: + __env__: + \- /srv/pillar + +ext_pillar: + \- git: + \- __env__ https://example.com/git\-pillar.git +.ft P +.fi +.UNINDENT +.UNINDENT +.sp +New in version 2017.7.5,2018.3.1. + .SS Pillar Namespace Flattening .sp The separate pillar SLS files all merge down into a single dictionary of @@ -37703,7 +37729,7 @@ Example: .sp .nf .ft C -{{ \(aqwww.google.com\(aq | dns_check }} +{{ \(aqwww.google.com\(aq | dns_check(port=443) }} .ft P .fi .UNINDENT @@ -45846,7 +45872,7 @@ nginx: .UNINDENT .UNINDENT .sp -Now \fBnginx\fP will be started in a seperate process from the normal state run +Now \fBnginx\fP will be started in a separate process from the normal state run and will therefore not block additional states. .SS Parallel States and Requisites .sp @@ -50489,7 +50515,7 @@ the querystring syntax (e.g. Reactor SLS files are similar to State and Pillar SLS files. They are by default YAML + Jinja templates and are passed familiar context variables. Click \fI\%here\fP for more detailed information on the -variables availble in Jinja templating. +variables available in Jinja templating. .sp Here is the SLS for a simple reaction: .INDENT 0.0 @@ -50661,7 +50687,7 @@ T} _ .TE .sp -This reaction would be equvalent to running the following Salt command: +This reaction would be equivalent to running the following Salt command: .INDENT 0.0 .INDENT 3.5 .sp @@ -50754,7 +50780,7 @@ _ .TE .sp Assuming that the event tag is \fBfoo\fP, and the data passed to the event is -\fB{\(aqbar\(aq: \(aqbaz\(aq}\fP, then this reaction is equvalent to running the following +\fB{\(aqbar\(aq: \(aqbaz\(aq}\fP, then this reaction is equivalent to running the following Salt command: .INDENT 0.0 .INDENT 3.5 @@ -50881,7 +50907,7 @@ T} _ .TE .sp -This reaction is equvalent to running the following Salt command: +This reaction is equivalent to running the following Salt command: .INDENT 0.0 .INDENT 3.5 .sp @@ -57455,7 +57481,7 @@ exoscale: New in version next\-release. .sp -You can specifiy a list of security groups (by name or id) that should be +You can specify a list of security groups (by name or id) that should be assigned to the VM. .INDENT 0.0 .INDENT 3.5 @@ -59283,7 +59309,7 @@ profile\-id: .UNINDENT .sp Note that \(aqsubnetid\(aq takes precedence over \(aqsubnetname\(aq, but \(aqsecuritygroupid\(aq -and \(aqsecuritygroupname\(aq are merged toghether to generate a single list for +and \(aqsecuritygroupname\(aq are merged together to generate a single list for SecurityGroups of instances. .SS Specifying interface properties .sp @@ -76804,7 +76830,7 @@ The first one will match all mounted disks beginning with "/", except /home The second one will match disks from A:to Z:on a Windows system .sp Note that if a regular expression are evaluated after static mount points, -which means that if a regular expression matches an other defined mount point, +which means that if a regular expression matches another defined mount point, it will override the previously defined threshold. .UNINDENT .INDENT 0.0 @@ -78365,7 +78391,7 @@ a very long list of sub\-banks, the number of requests to build the sub\-tree ma An improvement for this would be loading a custom Lua script in the Redis instance of the user (using the \fBregister_script\fP feature) and call it whenever we flush. This script would only need to build this sub\-tree causing problems. It can be added later and the behaviour -should not change as the user needs to explicitely allow Salt inject scripts in their Redis instance. +should not change as the user needs to explicitly allow Salt inject scripts in their Redis instance. .UNINDENT .INDENT 0.0 .TP @@ -92675,14 +92701,14 @@ The type of the output. Choose bewteen: .UNINDENT .UNINDENT .sp -This can be overriden when executing a command, using the \fB\-\-out\-type\fP argument. +This can be overridden when executing a command, using the \fB\-\-out\-type\fP argument. .sp New in version 2017.7.0. .TP .B outputter: \fBnested\fP The format to display the data, using the outputters available on the CLI. -This argument can also be overriden when executing a command, using the \fB\-\-out\fP option. +This argument can also be overridden when executing a command, using the \fB\-\-out\fP option. .sp New in version 2017.7.0. @@ -94787,7 +94813,7 @@ value configured in the pillar: \fBNAPALM proxy module\fP\&. \fBNOTE:\fP .INDENT 7.0 .INDENT 3.5 -The diference betwen \fBhost\fP and \fBhostname\fP is that +The diference between \fBhost\fP and \fBhostname\fP is that \fBhost\fP provides the physical location \- either domain name or IP address, while \fBhostname\fP provides the hostname as configured on the device. They are not necessarily the same. @@ -139792,7 +139818,7 @@ New in version 2018.3.0. .TP .B dryrun: False -when set to True the container will not be commited at the end of +when set to True the container will not be committed at the end of the build. The dryrun succeed also when the state contains errors. .UNINDENT .sp @@ -144228,7 +144254,7 @@ salt myminion etcd.ls /path/to/dir/ host=127.0.0.1 port=2379 New in version 2014.7.0. .sp -Delete a key from etcd. Returns True if the key was deleted, False if it wasn +Delete a key from etcd. Returns True if the key was deleted, False if it was not and None if there was a failure. .sp CLI Example: @@ -145058,18 +145084,34 @@ The file extension to use for a backup of the file if any edit is made. Set to \fBFalse\fP to skip making a backup. .TP .B dry_run -Don\(aqt make any edits to the file. +False +If \fBTrue\fP, do not make any edits to the file and simply return the +changes that \fIwould\fP be made. .TP .B show_changes -Output a unified diff of the old file and the new file. If \fBFalse\fP, -return a boolean if any changes were made. +True +Controls how changes are presented. If \fBTrue\fP, this function will +return a unified diff of the changes made. If False, then it will +return a boolean (\fBTrue\fP if any changes were made, otherwise +\fBFalse\fP). .TP -.B append_newline: -Append a newline to the content block. For more information see: -\fI\%https://github.com/saltstack/salt/issues/33686\fP +.B append_newline +False +Controls whether or not a newline is appended to the content block. If +the value of this argument is \fBTrue\fP then a newline will be added to +the content block. If it is \fBFalse\fP, then a newline will \fInot\fP be +added to the content block. If it is \fBNone\fP then a newline will only +be added to the content block if it does not already end in a newline. .sp New in version 2016.3.4. +.sp +Changed in version 2017.7.5,2018.3.1: New behavior added when value is \fBNone\fP\&. + +.sp +Changed in version Fluorine: The default value of this argument will change to \fBNone\fP to match +the behavior of the \fBfile.blockreplace state\fP + .UNINDENT .sp CLI Example: @@ -156761,7 +156803,7 @@ Return peer status information The return value is a dictionary with peer UUIDs as keys and dicts of peer information as values. Hostnames are listed in one list. GlusterFS separates one of the hostnames but the only reason for this seems to be which hostname -happens to be used firts in peering. +happens to be used first in peering. .sp CLI Example: .INDENT 7.0 @@ -160604,7 +160646,7 @@ salt \(aq*\(aq heat.flavor_list profile=openstack1 .UNINDENT .INDENT 0.0 .TP -.B salt.modules.heat.create_stack(name=None, template_file=None, enviroment=None, parameters=None, poll=0, rollback=False, timeout=60, profile=None) +.B salt.modules.heat.create_stack(name=None, template_file=None, environment=None, parameters=None, poll=0, rollback=False, timeout=60, profile=None, enviroment=None) Create a stack (heat stack\-create) .INDENT 7.0 .TP @@ -160614,8 +160656,8 @@ Name of the new stack .B template_file File of template .TP -.B enviroment -File of enviroment +.B environment +File of environment .TP .B parameters Parameter dict used to create the stack @@ -160641,13 +160683,18 @@ CLI Example: .ft C salt \(aq*\(aq heat.create_stack name=mystack \e template_file=salt://template.yaml \e - enviroment=salt://enviroment.yaml \e + environment=salt://environment.yaml \e parameters="{"image": "Debian 8", "flavor": "m1.small"}" \e poll=5 rollback=False timeout=60 profile=openstack1 .ft P .fi .UNINDENT .UNINDENT +.sp +New in version 2017.7.5,2018.3.1: The spelling mistake in parameter \fIenviroment\fP was corrected to \fIenvironment\fP\&. +The misspelled version is still supported for backward compatibility, but will +be removed in Salt Neon. + .UNINDENT .INDENT 0.0 .TP @@ -160755,7 +160802,7 @@ salt \(aq*\(aq heat.template_stack name=mystack profile=openstack1 .UNINDENT .INDENT 0.0 .TP -.B salt.modules.heat.update_stack(name=None, template_file=None, enviroment=None, parameters=None, poll=0, rollback=False, timeout=60, profile=None) +.B salt.modules.heat.update_stack(name=None, template_file=None, environment=None, parameters=None, poll=0, rollback=False, timeout=60, profile=None, enviroment=None) Update a stack (heat stack\-template) .INDENT 7.0 .TP @@ -160765,8 +160812,8 @@ Name of the stack .B template_file File of template .TP -.B enviroment -File of enviroment +.B environment +File of environment .TP .B parameters Parameter dict used to update the stack @@ -160792,13 +160839,18 @@ CLI Example: .ft C salt \(aq*\(aq heat.update_stack name=mystack \e template_file=salt://template.yaml \e - enviroment=salt://enviroment.yaml \e + environment=salt://environment.yaml \e parameters="{"image": "Debian 8", "flavor": "m1.small"}" \e poll=5 rollback=False timeout=60 profile=openstack1 .ft P .fi .UNINDENT .UNINDENT +.sp +New in version 2017.7.5,2018.3.1: The spelling mistake in parameter \fIenviroment\fP was corrected to \fIenvironment\fP\&. +The misspelled version is still supported for backward compatibility, but will +be removed in Salt Neon. + .UNINDENT .SS salt.modules.hg .sp @@ -171059,7 +171111,7 @@ kubernetes.client\-key\-file: \(aq/path/to/client.key\(aq .UNINDENT .UNINDENT .sp -These settings can be also overrided by adding \fIapi_url\fP, \fIapi_user\fP, +These settings can be also overridden by adding \fIapi_url\fP, \fIapi_user\fP, \fIapi_password\fP, \fIapi_certificate_authority_file\fP, \fIapi_client_certificate_file\fP or \fIapi_client_key_file\fP parameters when calling a function: .sp @@ -171067,7 +171119,20 @@ The data format for \fIkubernetes.*\-data\fP values is the same as provided in \ It\(aqs base64 encoded certificates/keys in one line. .sp For an item only one field should be provided. Either a \fIdata\fP or a \fIfile\fP entry. -In case both are provided the \fIfile\fP entry is prefered. +In case both are provided the \fIfile\fP entry is preferred. +.sp +\fBWARNING:\fP +.INDENT 0.0 +.INDENT 3.5 +Configuration options will change in Flourine. All options above will be replaced by: +.INDENT 0.0 +.IP \(bu 2 +kubernetes.kubeconfig or kubernetes.kubeconfig\-data +.IP \(bu 2 +kubernetes.context +.UNINDENT +.UNINDENT +.UNINDENT .INDENT 0.0 .INDENT 3.5 .sp @@ -188396,7 +188461,7 @@ salt minion mssql.db_exists database_name=\(aqDBNAME\(aq .INDENT 0.0 .TP .B salt.modules.mssql.db_list(**kwargs) -Return the databse list created on a MS SQL server. +Return the database list created on a MS SQL server. .sp CLI Example: .INDENT 7.0 @@ -192101,7 +192166,7 @@ Commit? Default: \fBTrue\fP\&. .TP .B debug: \fBFalse\fP Debug mode. Will insert a new key under the output dictionary, -as \fBloaded_config\fP contaning the raw configuration loaded on the device. +as \fBloaded_config\fP containing the raw configuration loaded on the device. .UNINDENT .sp The output is a dictionary having the same form as \fBnet.load_config\fP\&. @@ -192284,7 +192349,7 @@ Commit? Default: \fBTrue\fP\&. .TP .B debug: \fBFalse\fP Debug mode. Will insert a new key under the output dictionary, -as \fBloaded_config\fP contaning the raw configuration loaded on the device. +as \fBloaded_config\fP containing the raw configuration loaded on the device. .UNINDENT .sp The output is a dictionary having the same form as \fBnet.load_config\fP\&. @@ -192460,7 +192525,7 @@ Commit? Default: \fBTrue\fP\&. .TP .B debug: \fBFalse\fP Debug mode. Will insert a new key under the output dictionary, -as \fBloaded_config\fP contaning the raw configuration loaded on the device. +as \fBloaded_config\fP containing the raw configuration loaded on the device. .TP .B source_service A special service to choose from. This is a helper so the user is able to @@ -193593,14 +193658,14 @@ running (string): Representation of the native running configuration. .INDENT 2.0 .TP .B candidate (string): Representation of the native candidate configuration. -If the device doesnt differentiate between running and startup +If the device doesn\(aqt differentiate between running and startup configuration this will an empty string. .UNINDENT .IP \(bu 2 .INDENT 2.0 .TP .B startup (string): Representation of the native startup configuration. -If the device doesnt differentiate between running and startup +If the device doesn\(aqt differentiate between running and startup configuration this will an empty string. .UNINDENT .UNINDENT @@ -195457,7 +195522,7 @@ This function return the routes from the RIB. In case the destination prefix is too short, there may be too many routes matched. Therefore in cases of devices having a very high number of routes -it may be necessary to adjust the prefix lenght and request +it may be necessary to adjust the prefix length and request using a longer prefix. .UNINDENT .UNINDENT @@ -196139,7 +196204,7 @@ Commit? Default: \fBTrue\fP\&. .TP .B debug: \fBFalse\fP Debug mode. Will insert a new key under the output dictionary, -as \fBloaded_config\fP contaning the raw configuration loaded on the device. +as \fBloaded_config\fP containing the raw configuration loaded on the device. .TP .B replace: \fBFalse\fP Should replace the config with the new generate one? @@ -209979,7 +210044,7 @@ specify user account control properties \fBNOTE:\fP .INDENT 7.0 .INDENT 3.5 -Only the follwing can be set: +Only the following can be set: \- N: No password required \- D: Account disabled \- H: Home directory required @@ -210201,8 +210266,8 @@ New in version 0.14. .sp Attempt to retrieve the named value from pillar, if the named value is not available return the passed default. The default return is an empty string -except __opts__[\(aqpillar_raise_on_missing\(aq] is set to True, in which case a -KeyError will be raised. +except \fB__opts__[\(aqpillar_raise_on_missing\(aq]\fP is set to True, in which +case a \fBKeyError\fP exception will be raised. .sp If the merge parameter is set to \fBTrue\fP, the default will be recursively merged into the returned pillar data. @@ -210220,8 +210285,8 @@ for the dict. This means that if a dict in pillar looks like this: .UNINDENT .UNINDENT .sp -To retrieve the value associated with the apache key in the pkg dict this -key can be passed: +To retrieve the value associated with the \fBapache\fP key in the \fBpkg\fP +dict this key can be passed as: .INDENT 7.0 .INDENT 3.5 .sp @@ -210394,6 +210459,201 @@ The pillar value can be a list. The function will return the \fBlookup_dict\fP value for a first found item in the list matching one of the \fBlookup_dict\fP keys. +.IP \(bu 2 +\fBmerge\fP \-\- A dictionary to merge with the results of the pillar +selection from \fBlookup_dict\fP\&. This allows another dictionary to +override the values in the \fBlookup_dict\fP\&. +.IP \(bu 2 +\fBdefault\fP \-\- default lookup_dict\(aqs key used if the pillar does not exist +or if the pillar value has no match on lookup_dict. If unspecified +the value is "default". +.IP \(bu 2 +\fBbase\fP \-\- A lookup_dict key to use for a base dictionary. The +pillar\-selected \fBlookup_dict\fP is merged over this and then finally +the \fBmerge\fP dictionary is merged. This allows common values for +each case to be collected in the base and overridden by the pillar +selection dictionary and the merge dictionary. Default is unset. +.UNINDENT +.UNINDENT +.sp +CLI Example: +.INDENT 7.0 +.INDENT 3.5 +.sp +.nf +.ft C +salt \(aq*\(aq pillar.filter_by \(aq{web: Serve it up, db: I query, default: x_x}\(aq role +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.INDENT 0.0 +.TP +.B default +If specified, return this value in case when named pillar value does +not exist. +.TP +.B merge +\fBFalse\fP +If \fBTrue\fP, the retrieved values will be merged into the passed +default. When the default and the retrieved value are both +dictionaries, the dictionaries will be recursively merged. +.sp +New in version 2014.7.0. + +.sp +Changed in version 2016.3.7,2016.11.4,2017.7.0: If the default and the retrieved value are not of the same type, +then merging will be skipped and the retrieved value will be +returned. Earlier releases raised an error in these cases. + +.TP +.B merge_nested_lists +If set to \fBFalse\fP, lists nested within the retrieved pillar +dictionary will \fIoverwrite\fP lists in \fBdefault\fP\&. If set to \fBTrue\fP, +nested lists will be \fImerged\fP into lists in \fBdefault\fP\&. If unspecified +(the default), this option is inherited from the +\fBpillar_merge_lists\fP minion config option. +.sp +\fBNOTE:\fP +.INDENT 7.0 +.INDENT 3.5 +This option is ignored when \fBmerge\fP is set to \fBFalse\fP\&. +.UNINDENT +.UNINDENT +.sp +New in version 2016.11.6. + +.TP +.B delimiter +Specify an alternate delimiter to use when traversing a nested dict. +This is useful for when the desired key contains a colon. See CLI +example below for usage. +.sp +New in version 2014.7.0. + +.TP +.B pillarenv +If specified, this function will query the master to generate fresh +pillar data on the fly, specifically from the requested pillar +environment. Note that this can produce different pillar data than +executing this function without an environment, as its normal behavior +is just to return a value from minion\(aqs pillar data in memory (which +can be sourced from more than one pillar environment). +.sp +Using this argument will not affect the pillar data in memory. It will +however be slightly slower and use more resources on the master due to +the need for the master to generate and send the minion fresh pillar +data. This tradeoff in performance however allows for the use case +where pillar data is desired only from a single environment. +.sp +New in version 2017.7.0. + +.TP +.B saltenv +Included only for compatibility with +\fBpillarenv_from_saltenv\fP, and is otherwise ignored. +.sp +New in version 2017.7.0. + +.UNINDENT +.sp +CLI Example: +.INDENT 7.0 +.INDENT 3.5 +.sp +.nf +.ft C +salt \(aq*\(aq pillar.get pkg:apache +salt \(aq*\(aq pillar.get abc::def|ghi delimiter=\(aq|\(aq +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.INDENT 0.0 +.TP +.B salt.modules.pillar.file_exists(path, saltenv=None) +New in version 2016.3.0. + +.sp +This is a master\-only function. Calling from the minion is not supported. +.sp +Use the given path and search relative to the pillar environments to see if +a file exists at that path. +.sp +If the \fBsaltenv\fP argument is given, restrict search to that environment +only. +.sp +Will only work with \fBpillar_roots\fP, not external pillars. +.sp +Returns True if the file is found, and False otherwise. +.INDENT 7.0 +.TP +.B path +The path to the file in question. Will be treated as a relative path +.TP +.B saltenv +Optional argument to restrict the search to a specific saltenv +.UNINDENT +.sp +CLI Example: +.INDENT 7.0 +.INDENT 3.5 +.sp +.nf +.ft C +salt \(aq*\(aq pillar.file_exists foo/bar.sls +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.INDENT 0.0 +.TP +.B salt.modules.pillar.filter_by(lookup_dict, pillar, merge=None, default=\(aqdefault\(aq, base=None) +New in version 2017.7.0. + +.sp +Look up the given pillar in a given dictionary and return the result +.INDENT 7.0 +.TP +.B Parameters +.INDENT 7.0 +.IP \(bu 2 +\fBlookup_dict\fP \-\- +.sp +A dictionary, keyed by a pillar, containing a value or +values relevant to systems matching that pillar. For example, a key +could be a pillar for a role and the value could the name of a package +on that particular OS. +.sp +The dictionary key can be a globbing pattern. The function will return +the corresponding \fBlookup_dict\fP value where the pilalr value matches +the pattern. For example: +.INDENT 2.0 +.INDENT 3.5 +.sp +.nf +.ft C +# this will render \(aqgot some salt\(aq if \(ga\(garole\(ga\(ga begins with \(aqsalt\(aq +salt \(aq*\(aq pillar.filter_by \(aq{salt*: got some salt, default: salt is not here}\(aq role +.ft P +.fi +.UNINDENT +.UNINDENT + +.IP \(bu 2 +\fBpillar\fP \-\- +.sp +The name of a pillar to match with the system\(aqs pillar. For +example, the value of the "role" pillar could be used to pull values +from the \fBlookup_dict\fP dictionary. +.sp +The pillar value can be a list. The function will return the +\fBlookup_dict\fP value for a first found item in the list matching +one of the \fBlookup_dict\fP keys. + .IP \(bu 2 \fBmerge\fP \-\- A dictionary to merge with the results of the pillar selection from \fBlookup_dict\fP\&. This allows another dictionary to @@ -210431,8 +210691,8 @@ New in version 0.14. .sp Attempt to retrieve the named value from pillar, if the named value is not available return the passed default. The default return is an empty string -except __opts__[\(aqpillar_raise_on_missing\(aq] is set to True, in which case a -KeyError will be raised. +except \fB__opts__[\(aqpillar_raise_on_missing\(aq]\fP is set to True, in which +case a \fBKeyError\fP exception will be raised. .sp If the merge parameter is set to \fBTrue\fP, the default will be recursively merged into the returned pillar data. @@ -210450,8 +210710,8 @@ for the dict. This means that if a dict in pillar looks like this: .UNINDENT .UNINDENT .sp -To retrieve the value associated with the apache key in the pkg dict this -key can be passed: +To retrieve the value associated with the \fBapache\fP key in the \fBpkg\fP +dict this key can be passed as: .INDENT 7.0 .INDENT 3.5 .sp @@ -210464,6 +210724,13 @@ pkg:apache .UNINDENT .INDENT 7.0 .TP +.B key +The pillar key to get value from +.TP +.B default +If specified, return this value in case when named pillar value does +not exist. +.TP .B merge \fBFalse\fP If \fBTrue\fP, the retrieved values will be merged into the passed @@ -210865,7 +211132,7 @@ back slash is an escape character. .UNINDENT .INDENT 0.0 .TP -.B salt.modules.pip.freeze(bin_env=None, user=None, cwd=None, use_vt=False) +.B salt.modules.pip.freeze(bin_env=None, user=None, cwd=None, use_vt=False, env_vars=None) Return a list of installed packages either globally or in the specified virtualenv .INDENT 7.0 @@ -227844,7 +228111,7 @@ New in version 2017.7.0. .sp Applies SElinux policies to filespec using \fIrestorecon [\-R] -filespec\fP\&. Returns dict with changes if succesful, the output of +filespec\fP\&. Returns dict with changes if successful, the output of the restorecon command otherwise. .INDENT 7.0 .TP @@ -228426,7 +228693,7 @@ Displays a message on the LED matrix. The message to display .TP .B msg_type -The type of the message. Changes the appearence of the message. +The type of the message. Changes the appearance of the message. .sp Available types are: .INDENT 7.0 @@ -234558,7 +234825,7 @@ Reference: \fI\%https://cwiki.apache.org/confluence/display/solr/Collections+API .INDENT 0.0 .TP .B salt.modules.solrcloud.alias_exists(alias_name, **kwargs) -Check alias existance +Check alias existence .sp Additional parameters (kwargs) may be passed, they will be proxied to http.query .sp @@ -236862,8 +237129,8 @@ multiple states. .sp .nf .ft C -salt \(aq*\(aq state.higstate exclude=bar,baz -salt \(aq*\(aq state.higstate exclude=foo* +salt \(aq*\(aq state.highstate exclude=bar,baz +salt \(aq*\(aq state.highstate exclude=foo* salt \(aq*\(aq state.highstate exclude="[{\(aqid\(aq: \(aqid_to_exclude\(aq}, {\(aqsls\(aq: \(aqsls_to_exclude\(aq}]" .ft P .fi @@ -253319,7 +253586,7 @@ To apply a config that already exists on the the system .sp .nf .ft C -salt \(aq*\(aq dsc.run_config C:\e\eDSC\e\eWebSiteConfiguration +salt \(aq*\(aq dsc.apply_config C:\e\eDSC\e\eWebSiteConfiguration .ft P .fi .UNINDENT @@ -253331,7 +253598,7 @@ To cache a configuration from the master and apply it: .sp .nf .ft C -salt \(aq*\(aq dsc.run_config C:\e\eDSC\e\eWebSiteConfiguration salt://dsc/configs/WebSiteConfiguration +salt \(aq*\(aq dsc.apply_config C:\e\eDSC\e\eWebSiteConfiguration salt://dsc/configs/WebSiteConfiguration .ft P .fi .UNINDENT @@ -253441,6 +253708,9 @@ A dictionary representing the DSC Configuration on the machine .TP .B Return type \fI\%dict\fP +.TP +.B Raises +\fBCommandExecutionError\fP \-\- On failure .UNINDENT .sp CLI Example: @@ -253524,8 +253794,8 @@ apply it. The PowerShell script can be cached from the master using the script, the desired configuration can be applied by passing the name in the \fBconfig\fP option. .sp -This command would be the equivalent of running \fBdsc.compile_config\fP and -\fBdsc.apply_config\fP separately. +This command would be the equivalent of running \fBdsc.compile_config\fP +followed by \fBdsc.apply_config\fP\&. .INDENT 7.0 .TP .B Parameters @@ -253582,7 +253852,7 @@ Default is \(aqbase\(aq .UNINDENT .TP .B Returns -True if successfully compiled and applied, False if not +True if successfully compiled and applied, otherwise False .TP .B Return type \fI\%bool\fP @@ -253596,7 +253866,7 @@ To compile a config from a script that already exists on the system: .sp .nf .ft C -salt \(aq*\(aq dsc.compile_apply_config C:\e\eDSC\e\eWebsiteConfig.ps1 +salt \(aq*\(aq dsc.run_config C:\e\eDSC\e\eWebsiteConfig.ps1 .ft P .fi .UNINDENT @@ -253608,7 +253878,7 @@ To cache a config script to the system from the master and compile it: .sp .nf .ft C -salt \(aq*\(aq dsc.compile_apply_config C:\e\eDSC\e\eWebsiteConfig.ps1 salt://dsc/configs/WebsiteConfig.ps1 +salt \(aq*\(aq dsc.run_config C:\e\eDSC\e\eWebsiteConfig.ps1 salt://dsc/configs/WebsiteConfig.ps1 .ft P .fi .UNINDENT @@ -255634,7 +255904,7 @@ Return info on all groups .TP .B Parameters \fBrefresh\fP (\fI\%bool\fP) \-\- Refresh the info for all groups in \fB__context__\fP\&. If False only -the groups in \fB__context__\fP wil be returned. If True the +the groups in \fB__context__\fP will be returned. If True the \fB__context__\fP will be refreshed with current data and returned. Default is False .TP @@ -255690,7 +255960,7 @@ Return a list of groups .TP .B Parameters \fBrefresh\fP (\fI\%bool\fP) \-\- Refresh the info for all groups in \fB__context__\fP\&. If False only -the groups in \fB__context__\fP wil be returned. If True, the +the groups in \fB__context__\fP will be returned. If True, the \fB__context__\fP will be refreshed with current data and returned. Default is False .TP @@ -258380,6 +258650,17 @@ containing the full results will be returned. .UNINDENT .UNINDENT .UNINDENT +.sp +\fBNOTE:\fP +.INDENT 7.0 +.INDENT 3.5 +.INDENT 0.0 +.IP \(bu 2 +Hidden directories (directories beginning with \(aq\fI\&.\fP\(aq, such as +\(aq\fI\&.git\fP\(aq) will be ignored. +.UNINDENT +.UNINDENT +.UNINDENT .INDENT 7.0 .TP .B Returns @@ -258899,7 +259180,80 @@ salt \(aq*\(aq pkg.purge pkgs=\(aq["foo", "bar"]\(aq .INDENT 0.0 .TP .B salt.modules.win_pkg.refresh_db(**kwargs) -Fetches metadata files and calls \fI\%pkg.genrepo\fP to compile updated repository metadata. +Generates the local software metadata database (\fIwinrepo.p\fP) on the minion. +The database is stored in a serialized format located by default at the +following location: +.sp +\fIC:saltvarcachesaltminionfilesbasewinrepo\-ngwinrepo.p\fP +.sp +This module performs the following steps to generate the software metadata +database: +.INDENT 7.0 +.IP \(bu 2 +Fetch the package definition files (.sls) from \fIwinrepo_source_dir\fP +(default \fIsalt://win/repo\-ng\fP) and cache them in +\fIfiles\fP +(default: \fIC:saltvarcachesaltminionfilesbasewinrepo\-ng\fP) +.IP \(bu 2 +Call \fI\%pkg.genrepo\fP to parse the +package definition files and generate the repository metadata database +file (\fIwinrepo.p\fP) +.IP \(bu 2 +Return the report received from +\fI\%pkg.genrepo\fP +.UNINDENT +.sp +The default winrepo directory on the master is \fI/srv/salt/win/repo\-ng\fP\&. All +files that end with \fI\&.sls\fP in this and all subdirectories will be used to +generate the repository metadata database (\fIwinrepo.p\fP). +.sp +\fBNOTE:\fP +.INDENT 7.0 +.INDENT 3.5 +.INDENT 0.0 +.IP \(bu 2 +Hidden directories (directories beginning with \(aq\fI\&.\fP\(aq, such as +\(aq\fI\&.git\fP\(aq) will be ignored. +.UNINDENT +.UNINDENT +.UNINDENT +.sp +\fBNOTE:\fP +.INDENT 7.0 +.INDENT 3.5 +There is no need to call \fIpkg.refresh_db\fP every time you work with the +pkg module. Automatic refresh will occur based on the following minion +configuration settings: +.INDENT 0.0 +.INDENT 3.5 +.INDENT 0.0 +.IP \(bu 2 +\fIwinrepo_cache_expire_min\fP +.IP \(bu 2 +\fIwinrepo_cache_expire_max\fP +.UNINDENT +.UNINDENT +.UNINDENT +.sp +However, if the package definition files have changed, as would be the +case if you are developing a new package definition, this function +should be called to ensure the minion has the latest information about +packages available to it. +.UNINDENT +.UNINDENT +.sp +\fBWARNING:\fP +.INDENT 7.0 +.INDENT 3.5 +Directories and files fetched from +(\fI/srv/salt/win/repo\-ng\fP) will be processed in alphabetical order. If +two or more software definition files contain the same name, the last +one processed replaces all data from the files processed before it. +.UNINDENT +.UNINDENT +.sp +For more information see +Windows Software Repository .sp Kwargs: .INDENT 7.0 @@ -258908,12 +259262,12 @@ saltenv (str): Salt environment. Default: \fBbase\fP .INDENT 0.0 .TP .B verbose (bool): -Return verbose data structure which includes \(aqsuccess_list\(aq, a list -of all sls files and the package names contained within. Default -\(aqFalse\(aq +Return a verbose data structure which includes \(aqsuccess_list\(aq, a +list of all sls files and the package names contained within. +Default is \(aqFalse\(aq .TP .B failhard (bool): -If \fBTrue\fP, an error will be raised if any repo SLS files failed to +If \fBTrue\fP, an error will be raised if any repo SLS files fails to process. If \fBFalse\fP, no error will be raised, and a dictionary containing the full results will be returned. .UNINDENT @@ -258928,6 +259282,15 @@ A dictionary containing the results of the database refresh. \fI\%dict\fP .UNINDENT .sp +\fBNOTE:\fP +.INDENT 7.0 +.INDENT 3.5 +A result with a \fItotal: 0\fP generally means that the files are in the +wrong location on the master. Try running the following command on the +minion: \fIsalt\-call \-l debug pkg.refresh saltenv=base\fP +.UNINDENT +.UNINDENT +.sp \fBWARNING:\fP .INDENT 7.0 .INDENT 3.5 @@ -267367,7 +267730,7 @@ CLI Example: .sp .nf .ft C -salt \(aq*\(aq x509.read_pem_entries "/etc/pki/*.crt" +salt \(aq*\(aq x509.get_pem_entries "/etc/pki/*.crt" .ft P .fi .UNINDENT @@ -276464,10 +276827,10 @@ be created, so long as the following values are specified: .INDENT 7.0 .TP .B repo or alias -alias by which the zypper refers to the repo +alias by which Zypper refers to the repo .TP .B url, mirrorlist or baseurl -the URL for zypper to reference +the URL for Zypper to reference .TP .B enabled Enable or disable (True or False) repository, @@ -276483,12 +276846,14 @@ Enable or disable (True or False) RPM files caching. Enable or disable (True or False) GPG check for this repository. .TP .B gpgautoimport -Automatically trust and import new repository. +False +If set to True, automatically trust and import public GPG key for +the repository. .UNINDENT .sp Key/Value pairs may also be removed from a repo\(aqs configuration by setting a key to a blank value. Bear in mind that a name cannot be deleted, and a -url can only be deleted if a mirrorlist is specified (or vice versa). +URL can only be deleted if a \fBmirrorlist\fP is specified (or vice versa). .sp CLI Examples: .INDENT 7.0 @@ -278471,7 +278836,7 @@ return: The /run enpoint can also be used to issue commands using the salt\-ssh subsystem. .sp -When using salt\-ssh, eauth credentials should not be supplied. Instad, +When using salt\-ssh, eauth credentials should not be supplied. Instead, authentication should be handled by the SSH layer itself. The use of the salt\-ssh client does not require a salt master to be running. Instead, only a roster file must be present in the salt configuration @@ -278678,7 +279043,7 @@ very busy and can quickly overwhelm the memory allocated to a browser tab. .UNINDENT .sp -A full, working proof\-of\-concept JavaScript appliction is available +A full, working proof\-of\-concept JavaScript application is available \fI\%adjacent to this file\fP\&. It can be viewed by pointing a browser at the \fB/app\fP endpoint in a running \fBrest_cherrypy\fP instance. @@ -280442,7 +280807,7 @@ _ T{ \fBec2_pillar\fP T} T{ -Retrieve EC2 instance data for minions. +Retrieve EC2 instance data for minions for ec2_tags and ec2_tags_list T} _ T{ @@ -280454,7 +280819,7 @@ _ T{ \fBfile_tree\fP T} T{ -\fBFile_tree\fP is an external pillar that allows +The \fBfile_tree\fP external pillar allows values from all files in a directory tree to be imported as Pillar data. T} _ T{ @@ -281256,15 +281621,29 @@ file is run in a subprocess and the changed variables are then added .UNINDENT .SS salt.pillar.ec2_pillar .sp -Retrieve EC2 instance data for minions. +Retrieve EC2 instance data for minions for ec2_tags and ec2_tags_list .sp -The minion id must be the instance\-id retrieved from AWS. As an -option, use_grain can be set to True. This allows the use of an +The minion id must be the AWS instance\-id or value in \(aqtag_match_key\(aq. +For example set \(aqtag_match_key\(aq to \(aqName\(aq, to have the minion\-id matched against the +tag \(aqName\(aq. The tag contents must be unique. The value of tag_match_value can +be \(aquqdn\(aq or \(aqasis\(aq. if \(aquqdn\(aq strips any domain before comparison. +.sp +The option use_grain can be set to True. This allows the use of an instance\-id grain instead of the minion\-id. Since this is a potential security risk, the configuration can be further expanded to include a list of minions that are trusted to only allow the alternate id of the instances to specific hosts. There is no glob matching at this time. +.sp +The optional \(aqtag_list_key\(aq indicates which keys should be added to +\(aqec2_tags_list\(aq and be split by tag_list_sep (default \fI;\fP). If a tag key is +included in \(aqtag_list_key\(aq it is removed from ec2_tags. If a tag does not +exist it is still included as an empty list. +.INDENT 0.0 +.INDENT 3.5 +Note: restart the salt\-master for changes to take effect. +.UNINDENT +.UNINDENT .INDENT 0.0 .INDENT 3.5 .sp @@ -281272,6 +281651,11 @@ this time. .ft C ext_pillar: \- ec2_pillar: + tag_match_key: \(aqName\(aq + tag_match_value: \(aqasis\(aq + tag_list_key: + \- Role + tag_list_sep: \(aq;\(aq use_grain: True minion_ids: \- trusted\-minion\-1 @@ -281288,7 +281672,7 @@ returns a list of key/value pairs for all of the EC2 tags assigned to the instance. .INDENT 0.0 .TP -.B salt.pillar.ec2_pillar.ext_pillar(minion_id, pillar, use_grain=False, minion_ids=None) +.B salt.pillar.ec2_pillar.ext_pillar(minion_id, pillar, use_grain=False, minion_ids=None, tag_match_key=None, tag_match_value=\(aqasis\(aq, tag_list_key=None, tag_list_sep=\(aq;\(aq) Execute a command and read the output as YAML .UNINDENT .SS salt.pillar.etcd_pillar @@ -281401,15 +281785,27 @@ Check etcd for all data .UNINDENT .SS salt.pillar.file_tree .sp -\fBFile_tree\fP is an external pillar that allows -values from all files in a directory tree to be imported as Pillar data. +The \fBfile_tree\fP external pillar allows values from all files in a directory +tree to be imported as Pillar data. .sp -Note this is an external pillar, and is subject to the rules and constraints -governing external pillars detailed here: external\-pillars\&. +\fBNOTE:\fP +.INDENT 0.0 +.INDENT 3.5 +This is an external pillar and is subject to the rules and +constraints governing external pillars. +.UNINDENT +.UNINDENT .sp New in version 2015.5.0. -.SS Example Configuration +.sp +In this pillar, data is organized by either Minion ID or Nodegroup name. To +setup pillar data for a specific Minion, place it in +\fB/hosts/\fP\&. To setup pillar data for an entire +Nodegroup, place it in \fB/nodegroups/\fP where +\fB\fP is the Nodegroup\(aqs name. +.SS Example \fBfile_tree\fP Pillar +.SS Master Configuration .INDENT 0.0 .INDENT 3.5 .sp @@ -281417,24 +281813,47 @@ New in version 2015.5.0. .ft C ext_pillar: \- file_tree: - root_dir: /path/to/root/directory + root_dir: /srv/ext_pillar follow_dir_links: False keep_newline: True + +node_groups: + internal_servers: \(aqL@bob,stuart,kevin\(aq .ft P .fi .UNINDENT .UNINDENT -.sp -The \fBroot_dir\fP parameter is required and points to the directory where files -for each host are stored. The \fBfollow_dir_links\fP parameter is optional and -defaults to False. If \fBfollow_dir_links\fP is set to True, this external pillar -will follow symbolic links to other directories. -.sp -\fBWARNING:\fP +.SS Pillar Configuration .INDENT 0.0 .INDENT 3.5 -Be careful when using \fBfollow_dir_links\fP, as a recursive symlink chain -will result in unexpected results. +.sp +.nf +.ft C +(salt\-master) # tree /srv/ext_pillar +/srv/ext_pillar/ +|\-\- hosts +| |\-\- bob +| | |\-\- apache +| | | \(ga\-\- config.d +| | | |\-\- 00_important.conf +| | | \(ga\-\- 20_bob_extra.conf +| | \(ga\-\- corporate_app +| | \(ga\-\- settings +| | \(ga\-\- bob_settings.cfg +| \(ga\-\- kevin +| |\-\- apache +| | \(ga\-\- config.d +| | \(ga\-\- 00_important.conf +| \(ga\-\- corporate_app +| \(ga\-\- settings +| \(ga\-\- kevin_settings.cfg +\(ga\-\- nodegroups + \(ga\-\- internal_servers + \(ga\-\- corporate_app + \(ga\-\- settings + \(ga\-\- common_settings.cfg +.ft P +.fi .UNINDENT .UNINDENT .sp @@ -281462,31 +281881,58 @@ directories named for minion IDs and nodegroups underneath the \fBroot_dir\fP .sp .nf .ft C -ext_pillar: - \- file_tree: - root_dir: /path/to/root/directory - keep_newline: - \- files/testdir/* +(salt\-master) # salt bob pillar.items +bob: + \-\-\-\-\-\-\-\-\-\- + apache: + \-\-\-\-\-\-\-\-\-\- + config.d: + \-\-\-\-\-\-\-\-\-\- + 00_important.conf: + + 20_bob_extra.conf: + + corporate_app: + \-\-\-\-\-\-\-\-\-\- + settings: + \-\-\-\-\-\-\-\-\-\- + common_settings: + // This is the main settings file for the corporate + // internal web app + main_setting: probably + bob_settings: + role: bob .ft P .fi .UNINDENT .UNINDENT - .sp \fBNOTE:\fP .INDENT 0.0 .INDENT 3.5 -In earlier releases, this documentation incorrectly stated that binary -files would not affected by the \fBkeep_newline\fP configuration. However, -this module does not actually distinguish between binary and text files. +The leaf data in the example shown is the contents of the pillar files. .UNINDENT .UNINDENT -.sp -Changed in version 2017.7.0: Templating/rendering has been added. You can now specify a default render -pipeline and a black\- and whitelist of (dis)allowed renderers. -.sp -\fBtemplate\fP must be set to \fBTrue\fP for templating to happen. .INDENT 0.0 +.TP +.B salt.pillar.file_tree.ext_pillar(minion_id, pillar, root_dir=None, follow_dir_links=False, debug=False, keep_newline=False, render_default=None, renderer_blacklist=None, renderer_whitelist=None, template=False) +Compile pillar data from the given \fBroot_dir\fP specific to Nodegroup names +and Minion IDs. +.sp +If a Minion\(aqs ID is not found at \fB/host/\fP or if it +is not included in any Nodegroups named at +\fB/nodegroups/\fP, no pillar data provided by this +pillar module will be available for that Minion. +.sp +Changed in version 2017.7.0: Templating/rendering has been added. You can now specify a default +render pipeline and a black\- and whitelist of (dis)allowed renderers. +.sp + +.nf +:param:\(gatemplate\(ga +.fi + must be set to \fBTrue\fP for templating to happen. +.INDENT 7.0 .INDENT 3.5 .sp .nf @@ -281506,133 +281952,148 @@ ext_pillar: .UNINDENT .UNINDENT -.SS Assigning Pillar Data to Individual Hosts +.INDENT 7.0 +.TP +.B Parameters +.INDENT 7.0 +.IP \(bu 2 +\fBminion_id\fP \-\- The ID of the Minion whose pillar data is to be collected +.IP \(bu 2 +\fBpillar\fP \-\- Unused by the \fBfile_tree\fP pillar module +.IP \(bu 2 +\fBroot_dir\fP \-\- Filesystem directory used as the root for pillar data (e.g. +\fB/srv/ext_pillar\fP) +.IP \(bu 2 +\fBfollow_dir_links\fP \-\- .sp -To configure pillar data for each host, this external pillar will recursively -iterate over \fBroot_dir\fP/hosts/\fBid\fP (where \fBid\fP is a minion ID), and -compile pillar data with each subdirectory as a dictionary key and each file -as a value. +Follow symbolic links to directories while collecting pillar files. +Defaults to \fBFalse\fP\&. .sp -For example, the following \fBroot_dir\fP tree: -.INDENT 0.0 +\fBWARNING:\fP +.INDENT 2.0 +.INDENT 3.5 +Care should be exercised when enabling this option as it will +follow links that point outside of +.nf +:param:\(garoot_dir\(ga +.fi +\&. +.UNINDENT +.UNINDENT +.sp +\fBWARNING:\fP +.INDENT 2.0 +.INDENT 3.5 +Symbolic links that lead to infinite recursion are not filtered. +.UNINDENT +.UNINDENT + +.IP \(bu 2 +\fBdebug\fP \-\- Enable debug information at log level \fBdebug\fP\&. Defaults to +\fBFalse\fP\&. This option may be useful to help debug errors when setting +up the \fBfile_tree\fP pillar module. +.IP \(bu 2 +\fBkeep_newline\fP \-\- +.sp +Preserve the end\-of\-file newline in files. Defaults to \fBFalse\fP\&. +This option may either be a boolean or a list of file globs (as defined +by the \fI\%Python fnmatch package\fP) for which end\-of\-file +newlines are to be kept. +.sp +\fBkeep_newline\fP should be turned on if the pillar data is intended to +be used to deploy a file using \fBcontents_pillar\fP with a +\fBfile.managed\fP state. +.sp +Changed in version 2015.8.4: The \fBraw_data\fP parameter has been renamed to \fBkeep_newline\fP\&. In +earlier releases, \fBraw_data\fP must be used. Also, this parameter +can now be a list of globs, allowing for more granular control over +which pillar values keep their end\-of\-file newline. The globs match +paths relative to the directories named for Minion IDs and +Nodegroup namess underneath the +.nf +:param:\(garoot_dir\(ga +.fi +\&. +.INDENT 2.0 .INDENT 3.5 .sp .nf .ft C -\&./hosts/ -\&./hosts/test\-host/ -\&./hosts/test\-host/files/ -\&./hosts/test\-host/files/testdir/ -\&./hosts/test\-host/files/testdir/file1.txt -\&./hosts/test\-host/files/testdir/file2.txt -\&./hosts/test\-host/files/another\-testdir/ -\&./hosts/test\-host/files/another\-testdir/symlink\-to\-file1.txt +ext_pillar: + \- file_tree: + root_dir: /srv/ext_pillar + keep_newline: + \- apache/config.d/* + \- corporate_app/settings/* .ft P .fi .UNINDENT .UNINDENT -.sp -will result in the following pillar tree for minion with ID \fBtest\-host\fP: -.INDENT 0.0 -.INDENT 3.5 -.sp -.nf -.ft C -test\-host: - \-\-\-\-\-\-\-\-\-\- - files: - \-\-\-\-\-\-\-\-\-\- - another\-testdir: - \-\-\-\-\-\-\-\-\-\- - symlink\-to\-file1.txt: - Contents of file #1. - testdir: - \-\-\-\-\-\-\-\-\-\- - file1.txt: - Contents of file #1. - - file2.txt: - Contents of file #2. -.ft P -.fi -.UNINDENT -.UNINDENT .sp \fBNOTE:\fP -.INDENT 0.0 +.INDENT 2.0 .INDENT 3.5 -Subdirectories underneath \fBroot_dir\fP/hosts/\fBid\fP become nested -dictionaries, as shown above. +In earlier releases, this documentation incorrectly stated that +binary files would not affected by the \fBkeep_newline\fP\&. However, +this module does not actually distinguish between binary and text +files. .UNINDENT .UNINDENT -.SS Assigning Pillar Data to Entire Nodegroups + +.IP \(bu 2 +\fBrender_default\fP \-\- .sp -To assign Pillar data to all minions in a given nodegroup, this external pillar -recursively iterates over \fBroot_dir\fP/nodegroups/\fBnodegroup\fP (where -\fBnodegroup\fP is the name of a nodegroup), and like for individual hosts, -compiles pillar data with each subdirectory as a dictionary key and each file -as a value. -.sp -\fBIMPORTANT:\fP -.INDENT 0.0 -.INDENT 3.5 -If the same Pillar key is set for a minion both by nodegroup and by -individual host, then the value set for the individual host will take -precedence. -.UNINDENT -.UNINDENT -.sp -For example, the following \fBroot_dir\fP tree: -.INDENT 0.0 +Override Salt\(aqs \fBdefault global renderer\fP for +the \fBfile_tree\fP pillar. +.INDENT 2.0 .INDENT 3.5 .sp .nf .ft C -\&./nodegroups/ -\&./nodegroups/test\-group/ -\&./nodegroups/test\-group/files/ -\&./nodegroups/test\-group/files/testdir/ -\&./nodegroups/test\-group/files/testdir/file1.txt -\&./nodegroups/test\-group/files/testdir/file2.txt -\&./nodegroups/test\-group/files/another\-testdir/ -\&./nodegroups/test\-group/files/another\-testdir/symlink\-to\-file1.txt +render_default: jinja .ft P .fi .UNINDENT .UNINDENT + +.IP \(bu 2 +\fBrenderer_blacklist\fP \-\- .sp -will result in the following pillar data for minions in the node group -\fBtest\-group\fP: -.INDENT 0.0 +Disallow renderers for pillar files. +.INDENT 2.0 .INDENT 3.5 .sp .nf .ft C -test\-host: - \-\-\-\-\-\-\-\-\-\- - files: - \-\-\-\-\-\-\-\-\-\- - another\-testdir: - \-\-\-\-\-\-\-\-\-\- - symlink\-to\-file1.txt: - Contents of file #1. - - testdir: - \-\-\-\-\-\-\-\-\-\- - file1.txt: - Contents of file #1. - - file2.txt: - Contents of file #2. +renderer_blacklist: + \- json .ft P .fi .UNINDENT .UNINDENT -.INDENT 0.0 -.TP -.B salt.pillar.file_tree.ext_pillar(minion_id, pillar, root_dir=None, follow_dir_links=False, debug=False, keep_newline=False, render_default=None, renderer_blacklist=None, renderer_whitelist=None, template=False) -Compile pillar data for the specified minion ID + +.IP \(bu 2 +\fBrenderer_whitelist\fP \-\- +.sp +Allow renderers for pillar files. +.INDENT 2.0 +.INDENT 3.5 +.sp +.nf +.ft C +renderer_whitelist: + \- yaml + \- jinja +.ft P +.fi +.UNINDENT +.UNINDENT + +.IP \(bu 2 +\fBtemplate\fP \-\- Enable templating of pillar files. Defaults to \fBFalse\fP\&. +.UNINDENT +.UNINDENT .UNINDENT .SS salt.pillar.foreman .sp @@ -289543,7 +290004,7 @@ Some fields cannot be removed: By default, the following extra fields are returned (displayed): .INDENT 7.0 .IP \(bu 2 -\fBconnection_stats\fP: connection stats, as descibed below +\fBconnection_stats\fP: connection stats, as described below .IP \(bu 2 \fBimport_policy\fP: the name of the import policy .IP \(bu 2 @@ -314315,8 +314776,7 @@ mycustompkg: \fBcmd.wait\fP itself does not do anything; all functionality is inside its \fBmod_watch\fP function, which is called by \fBwatch\fP on changes. .sp -\fBcmd.wait\fP will be deprecated in future due to the confusion it causes. The -preferred format is using the onchanges Requisite, which +The preferred format is using the onchanges Requisite, which works on \fBcmd.run\fP as well as on any other state. The example would then look as follows: .INDENT 0.0 .INDENT 3.5 @@ -317232,7 +317692,7 @@ using \fBdocker.stop\fP\&. If a were set, then a timeout of 10 seconds will be used. .sp Changed in version 2017.7.0: This option was renamed from \fBstop_timeout\fP to -\fBshutdown_timeout\fP to acommodate the \fBstop_timeout\fP container +\fBshutdown_timeout\fP to accommodate the \fBstop_timeout\fP container configuration setting. .TP @@ -319098,7 +319558,7 @@ The below two examples are equivalent: .nf .ft C foo: - dockerng.running: + docker_container.running: \- image: bar/baz:latest \- ulimits: nofile=1024:1024,nproc=60 .ft P @@ -319111,7 +319571,7 @@ foo: .nf .ft C foo: - dockerng.running: + docker_container.running: \- image: bar/baz:latest \- ulimits: \- nofile=1024:1024 @@ -322717,6 +323177,15 @@ Maintain an edit in a file in a zone delimited by two line markers .sp New in version 2014.1.0. +.sp +Changed in version 2017.7.5,2018.3.1: \fBappend_newline\fP argument added. Additionally, to improve +idempotence, if the string represented by \fBmarker_end\fP is found in +the middle of the line, the content preceding the marker will be +removed when the block is replaced. This allows one to remove +\fBappend_newline: False\fP from the SLS and have the block properly +replaced if the end of the content block is immediately followed by the +\fBmarker_end\fP (i.e. no newline before the marker). + .sp A block of content delimited by comments can help you manage several lines entries without worrying about old entries removal. This can help you @@ -322821,8 +323290,9 @@ md5 32 See the \fBsource_hash\fP parameter description for \fI\%file.managed\fP function for more details and examples. .TP .B template -The named templating engine will be used to render the downloaded file. -Defaults to \fBjinja\fP\&. The following templates are supported: +jinja +Templating engine to be used to render the downloaded file. The +following engines are supported: .INDENT 7.0 .IP \(bu 2 \fBcheetah\fP @@ -322839,29 +323309,47 @@ Defaults to \fBjinja\fP\&. The following templates are supported: .UNINDENT .TP .B context -Overrides default context variables passed to the template. +Overrides default context variables passed to the template .TP .B defaults -Default context passed to the template. +Default context passed to the template .TP .B append_if_not_found -If markers are not found and set to True then the markers and content -will be appended to the file. Default is \fBFalse\fP +False +If markers are not found and this option is set to \fBTrue\fP, the +content block will be appended to the file. .TP .B prepend_if_not_found -If markers are not found and set to True then the markers and content -will be prepended to the file. Default is \fBFalse\fP +False +If markers are not found and this option is set to \fBTrue\fP, the +content block will be prepended to the file. .TP .B backup The file extension to use for a backup of the file if any edit is made. Set this to \fBFalse\fP to skip making a backup. .TP .B dry_run -Don\(aqt make any edits to the file +False +If \fBTrue\fP, do not make any edits to the file and simply return the +changes that \fIwould\fP be made. .TP .B show_changes -Output a unified diff of the old file and the new file. If \fBFalse\fP -return a boolean if any changes were made +True +Controls how changes are presented. If \fBTrue\fP, the \fBChanges\fP +section of the state return will contain a unified diff of the changes +made. If False, then it will contain a boolean (\fBTrue\fP if any changes +were made, otherwise \fBFalse\fP). +.TP +.B append_newline +Controls whether or not a newline is appended to the content block. If +the value of this argument is \fBTrue\fP then a newline will be added to +the content block. If it is \fBFalse\fP, then a newline will \fInot\fP be +added to the content block. If it is unspecified, then a newline will +only be added to the content block if it does not already end in a +newline. +.sp +New in version 2017.7.5,2018.3.1. + .UNINDENT .sp Example of usage with an accumulator and with a variable: @@ -323644,6 +324132,21 @@ If an equal sign (\fB=\fP) appears in an argument to a Salt command, it is interpreted as a keyword argument in the format of \fBkey=val\fP\&. That processing can be bypassed in order to pass an equal sign through to the remote shell command by manually specifying the kwarg: +.INDENT 7.0 +.INDENT 3.5 +.sp +.nf +.ft C +update_config: + file.line: + \- name: /etc/myconfig.conf + \- mode: ensure + \- content: my key = my value + \- before: somekey.*? +.ft P +.fi +.UNINDENT +.UNINDENT .UNINDENT .INDENT 0.0 .TP @@ -325079,6 +325582,28 @@ This is only used when datetime is pulled from \fBos.path.getmtime()\fP\&. Defaults to \fBNone\fP which uses the timezone from the locale. .UNINDENT .UNINDENT +.sp +Usage example: +.INDENT 7.0 +.INDENT 3.5 +.sp +.nf +.ft C +/var/backups/example_directory: + file.retention_schedule: + \- retain: + most_recent: 5 + first_of_hour: 4 + first_of_day: 7 + first_of_week: 6 # NotImplemented yet. + first_of_month: 6 + first_of_year: all + \- strptime_format: example_name_%Y%m%dT%H%M%S.tar.bz2 + \- timezone: None +.ft P +.fi +.UNINDENT +.UNINDENT .UNINDENT .INDENT 0.0 .TP @@ -327340,25 +327865,55 @@ Manage Grafana v4.0 Dashboards New in version 2017.7.0. .INDENT 0.0 +.TP +.B configuration +This state requires a configuration profile to be configured +in the minion config, minion pillar, or master config. The module will use +the \(aqgrafana\(aq key by default, if defined. +.sp +Example configuration using basic authentication: +.INDENT 7.0 .INDENT 3.5 .sp .nf .ft C grafana: + grafana_url: http://grafana.localhost + grafana_user: admin + grafana_password: admin grafana_timeout: 3 - grafana_token: qwertyuiop - grafana_url: \(aqhttps://url.com\(aq .ft P .fi .UNINDENT .UNINDENT +.sp +Example configuration using token based authentication: +.INDENT 7.0 +.INDENT 3.5 +.sp +.nf +.ft C +grafana: + grafana_url: http://grafana.localhost + grafana_token: token + grafana_timeout: 3 +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.sp +The behavior of this module is to create dashboards if they do not exist, to +add rows if they do not exist in existing dashboards, and to update rows if +they exist in dashboards. The module will not manage rows that are not defined, +allowing users to manage their own custom rows. .INDENT 0.0 .INDENT 3.5 .sp .nf .ft C Ensure minimum dashboard is managed: - grafana_dashboard.present: + grafana4_dashboard.present: \- name: insightful\-dashboard \- base_dashboards_from_pillar: \- default_dashboard @@ -327378,11 +327933,6 @@ Ensure minimum dashboard is managed: .fi .UNINDENT .UNINDENT -.sp -The behavior of this module is to create dashboards if they do not exist, to -add rows if they do not exist in existing dashboards, and to update rows if -they exist in dashboards. The module will not manage rows that are not defined, -allowing users to manage their own custom rows. .INDENT 0.0 .TP .B salt.states.grafana4_dashboard.absent(name, orgname=None, profile=u\(aqgrafana\(aq) @@ -327435,30 +327985,473 @@ Manage Grafana v4.0 data sources .sp New in version 2017.7.0. +.INDENT 0.0 +.TP +.B configuration +This state requires a configuration profile to be configured +in the minion config, minion pillar, or master config. The module will use +the \(aqgrafana\(aq key by default, if defined. .sp -Token auth setup +Example configuration using basic authentication: +.INDENT 7.0 +.INDENT 3.5 +.sp +.nf +.ft C +grafana: + grafana_url: http://grafana.localhost + grafana_user: admin + grafana_password: admin + grafana_timeout: 3 +.ft P +.fi +.UNINDENT +.UNINDENT +.sp +Example configuration using token based authentication: +.INDENT 7.0 +.INDENT 3.5 +.sp +.nf +.ft C +grafana: + grafana_url: http://grafana.localhost + grafana_token: token + grafana_timeout: 3 +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.sp +The behavior of this module is to create data sources if the do not exists, and +to update data sources if the already exists. .INDENT 0.0 .INDENT 3.5 .sp .nf .ft C -grafana_version: 4 +Ensure influxdb data source is present: + grafana4_datasource.present: + \- name: influxdb + \- type: influxdb + \- url: http://localhost:8086 + \- access: proxy + \- basic_auth: true + \- basic_auth_user: myuser + \- basic_auth_password: mypass + \- is_default: true +.ft P +.fi +.UNINDENT +.UNINDENT +.INDENT 0.0 +.TP +.B salt.states.grafana4_datasource.absent(name, orgname=None, profile=\(aqgrafana\(aq) +Ensure that a data source is present. +.INDENT 7.0 +.TP +.B name +Name of the data source to remove. +.TP +.B orgname +Name of the organization from which the data source should be absent. +.TP +.B profile +Configuration profile used to connect to the Grafana instance. +Default is \(aqgrafana\(aq. +.UNINDENT +.UNINDENT +.INDENT 0.0 +.TP +.B salt.states.grafana4_datasource.present(name, type, url, access=None, user=None, password=None, database=None, basic_auth=None, basic_auth_user=None, basic_auth_password=None, tls_auth=None, json_data=None, is_default=None, with_credentials=None, type_logo_url=None, orgname=None, profile=\(aqgrafana\(aq) +Ensure that a data source is present. +.INDENT 7.0 +.TP +.B name +Name of the data source. +.TP +.B type +Type of the datasource (\(aqgraphite\(aq, \(aqinfluxdb\(aq etc.). +.TP +.B access +Use proxy or direct. Default: proxy +.TP +.B url +The URL to the data source API. +.TP +.B user +Optional \- user to authenticate with the data source. +.TP +.B password +Optional \- password to authenticate with the data source. +.TP +.B database +Optional \- database to use with the data source. +.TP +.B basic_auth +Optional \- set to True to use HTTP basic auth to authenticate with the +data source. +.TP +.B basic_auth_user +Optional \- HTTP basic auth username. +.TP +.B basic_auth_password +Optional \- HTTP basic auth password. +.TP +.B json_data +Optional \- additional json data to post (eg. "timeInterval"). +.TP +.B is_default +Optional \- set data source as default. +.TP +.B with_credentials +Optional \- Whether credentials such as cookies or auth headers should +be sent with cross\-site requests. +.TP +.B type_logo_url +Optional \- Logo to use for this datasource. +.TP +.B orgname +Name of the organization in which the data source should be present. +.TP +.B profile +Configuration profile used to connect to the Grafana instance. +Default is \(aqgrafana\(aq. +.UNINDENT +.UNINDENT +.SS salt.states.grafana4_org module +.sp +Manage Grafana v4.0 orgs +.sp +New in version 2017.7.0. + +.INDENT 0.0 +.TP +.B configuration +This state requires a configuration profile to be configured +in the minion config, minion pillar, or master config. The module will use +the \(aqgrafana\(aq key by default, if defined. +.sp +Example configuration using basic authentication: +.INDENT 7.0 +.INDENT 3.5 +.sp +.nf +.ft C grafana: - grafana_timeout: 5 + grafana_url: http://grafana.localhost + grafana_user: admin + grafana_password: admin + grafana_timeout: 3 +.ft P +.fi +.UNINDENT +.UNINDENT +.sp +Example configuration using token based authentication: +.INDENT 7.0 +.INDENT 3.5 +.sp +.nf +.ft C +grafana: + grafana_url: http://grafana.localhost + grafana_token: token + grafana_timeout: 3 +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +Ensure foobar org is present: + grafana4_org.present: + \- name: foobar + \- theme: "" + \- home_dashboard_id: 0 + \- timezone: "utc" + \- address1: "" + \- address2: "" + \- city: "" + \- zip_code: "" + \- state: "" + \- country: "" +.ft P +.fi +.UNINDENT +.UNINDENT +.INDENT 0.0 +.TP +.B salt.states.grafana4_org.absent(name, profile=\(aqgrafana\(aq) +Ensure that a org is present. +.INDENT 7.0 +.TP +.B name +Name of the org to remove. +.TP +.B profile +Configuration profile used to connect to the Grafana instance. +Default is \(aqgrafana\(aq. +.UNINDENT +.UNINDENT +.INDENT 0.0 +.TP +.B salt.states.grafana4_org.present(name, users=None, theme=None, home_dashboard_id=None, timezone=None, address1=None, address2=None, city=None, zip_code=None, address_state=None, country=None, profile=\(aqgrafana\(aq) +Ensure that an organization is present. +.INDENT 7.0 +.TP +.B name +Name of the org. +.TP +.B users +Optional \- Dict of user/role associated with the org. Example: +users: +.INDENT 7.0 +.INDENT 3.5 +foo: Viewer +bar: Editor +.UNINDENT +.UNINDENT +.TP +.B theme +Optional \- Selected theme for the org. +.TP +.B home_dashboard_id +Optional \- Home dashboard for the org. +.TP +.B timezone +Optional \- Timezone for the org (one of: "browser", "utc", or ""). +.TP +.B address1 +Optional \- address1 of the org. +.TP +.B address2 +Optional \- address2 of the org. +.TP +.B city +Optional \- city of the org. +.TP +.B zip_code +Optional \- zip_code of the org. +.TP +.B address_state +Optional \- state of the org. +.TP +.B country +Optional \- country of the org. +.TP +.B profile +Configuration profile used to connect to the Grafana instance. +Default is \(aqgrafana\(aq. +.UNINDENT +.UNINDENT +.SS salt.states.grafana4_user module +.sp +Manage Grafana v4.0 users +.sp +New in version 2017.7.0. + +.INDENT 0.0 +.TP +.B configuration +This state requires a configuration profile to be configured +in the minion config, minion pillar, or master config. The module will use +the \(aqgrafana\(aq key by default, if defined. +.sp +Example configuration using basic authentication: +.INDENT 7.0 +.INDENT 3.5 +.sp +.nf +.ft C +grafana: + grafana_url: http://grafana.localhost + grafana_user: admin + grafana_password: admin + grafana_timeout: 3 +.ft P +.fi +.UNINDENT +.UNINDENT +.sp +Example configuration using token based authentication: +.INDENT 7.0 +.INDENT 3.5 +.sp +.nf +.ft C +grafana: + grafana_url: http://grafana.localhost + grafana_token: token + grafana_timeout: 3 +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +Ensure foobar user is present: + grafana4_user.present: + \- name: foobar + \- password: mypass + \- email: "foobar@localhost" + \- fullname: Foo Bar + \- is_admin: true +.ft P +.fi +.UNINDENT +.UNINDENT +.INDENT 0.0 +.TP +.B salt.states.grafana4_user.absent(name, profile=\(aqgrafana\(aq) +Ensure that a user is present. +.INDENT 7.0 +.TP +.B name +Name of the user to remove. +.TP +.B profile +Configuration profile used to connect to the Grafana instance. +Default is \(aqgrafana\(aq. +.UNINDENT +.UNINDENT +.INDENT 0.0 +.TP +.B salt.states.grafana4_user.present(name, password, email, is_admin=False, fullname=None, theme=None, profile=\(aqgrafana\(aq) +Ensure that a user is present. +.INDENT 7.0 +.TP +.B name +Name of the user. +.TP +.B password +Password of the user. +.TP +.B email +Email of the user. +.TP +.B is_admin +Optional \- Set user as admin user. Default: False +.TP +.B fullname +Optional \- Full name of the user. +.TP +.B theme +Optional \- Selected theme of the user. +.TP +.B profile +Configuration profile used to connect to the Grafana instance. +Default is \(aqgrafana\(aq. +.UNINDENT +.UNINDENT +.SS salt.states.grafana_dashboard module +.sp +Manage Grafana v2.0 Dashboards +.sp +New in version 2016.3.0. + +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +grafana: + grafana_timeout: 3 grafana_token: qwertyuiop grafana_url: \(aqhttps://url.com\(aq .ft P .fi .UNINDENT .UNINDENT -.sp -Basic auth setup .INDENT 0.0 .INDENT 3.5 .sp .nf .ft C -grafana_version: 4 +Ensure minimum dashboard is managed: + grafana_dashboard.present: + \- name: insightful\-dashboard + \- base_dashboards_from_pillar: + \- default_dashboard + \- base_rows_from_pillar: + \- default_row + \- base_panels_from_pillar: + \- default_panel + \- dashboard: + rows: + \- title: Usage + panels: + \- targets: + \- target: alias(constantLine(50), \(aqmax\(aq) + title: Imaginary + type: graph +.ft P +.fi +.UNINDENT +.UNINDENT +.sp +The behavior of this module is to create dashboards if they do not exist, to +add rows if they do not exist in existing dashboards, and to update rows if +they exist in dashboards. The module will not manage rows that are not defined, +allowing users to manage their own custom rows. +.INDENT 0.0 +.TP +.B salt.states.grafana_dashboard.absent(name, profile=\(aqgrafana\(aq) +Ensure the named grafana dashboard is absent. +.INDENT 7.0 +.TP +.B name +Name of the grafana dashboard. +.TP +.B profile +A pillar key or dict that contains grafana information +.UNINDENT +.UNINDENT +.INDENT 0.0 +.TP +.B salt.states.grafana_dashboard.present(name, base_dashboards_from_pillar=None, base_panels_from_pillar=None, base_rows_from_pillar=None, dashboard=None, profile=\(aqgrafana\(aq) +Ensure the grafana dashboard exists and is managed. +.INDENT 7.0 +.TP +.B name +Name of the grafana dashboard. +.TP +.B base_dashboards_from_pillar +A pillar key that contains a list of dashboards to inherit from +.TP +.B base_panels_from_pillar +A pillar key that contains a list of panels to inherit from +.TP +.B base_rows_from_pillar +A pillar key that contains a list of rows to inherit from +.TP +.B dashboard +A dict that defines a dashboard that should be managed. +.TP +.B profile +A pillar key or dict that contains grafana information +.UNINDENT +.UNINDENT +.SS salt.states.grafana_datasource module +.sp +Manage Grafana v2.0 data sources +.sp +New in version 2016.3.0. + +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C grafana: grafana_timeout: 5 grafana_user: grafana @@ -328392,7 +329385,7 @@ Stack can be set as either absent or deploy. heat.deployed: \- name: \- template: #Required - \- enviroment: + \- environment: \- params: {} \- poll: 5 \- rollback: False @@ -328422,6 +329415,11 @@ rollback: True .UNINDENT .UNINDENT .UNINDENT +.sp +New in version 2017.7.5,2018.3.1: The spelling mistake in parameter \fIenviroment\fP was corrected to \fIenvironment\fP\&. +The misspelled version is still supported for backward compatibility, but will +be removed in Salt Neon. + .INDENT 0.0 .TP .B salt.states.heat.absent(name, poll=5, timeout=60, profile=None) @@ -328443,7 +329441,7 @@ Profile to use .UNINDENT .INDENT 0.0 .TP -.B salt.states.heat.deployed(name, template=None, enviroment=None, params=None, poll=5, rollback=False, timeout=60, update=False, profile=None, **connection_args) +.B salt.states.heat.deployed(name, template=None, environment=None, params=None, poll=5, rollback=False, timeout=60, update=False, profile=None, **connection_args) Deploy stack with the specified properties .INDENT 7.0 .TP @@ -328453,14 +329451,14 @@ The name of the stack .B template File of template .TP -.B enviroment -File of enviroment +.B environment +File of environment .TP .B params Parameter dict used to create the stack .TP .B poll -Poll(in sec.) and report events until stack complete +Poll (in sec.) and report events until stack complete .TP .B rollback Enable rollback on create failure @@ -328471,6 +329469,11 @@ Stack creation timeout in minutes .B profile Profile to use .UNINDENT +.sp +New in version 2017.7.5,2018.3.1: The spelling mistake in parameter \fIenviroment\fP was corrected to \fIenvironment\fP\&. +The misspelled version is still supported for backward compatibility, but will +be removed in Salt Neon. + .UNINDENT .SS salt.states.hg .SS Interaction with Mercurial repositories @@ -328590,7 +329593,7 @@ hipchat\-message: \- api_url: https://hipchat.myteam.com \- api_key: peWcBiMOS9HrZG15peWcBiMOS9HrZG15 \- api_version: v1 - \- color: green + \- message_color: green \- notify: True .ft P .fi @@ -328630,7 +329633,7 @@ if not specified in the configuration options of master or minion. The api version for Hipchat to use, if not specified in the configuration options of master or minion. .TP -.B color +.B message_color The color the Hipchat message should be displayed in. One of the following, default: yellow "yellow", "red", "green", "purple", "gray", or "random". .TP @@ -328925,7 +329928,7 @@ Optional interval to delay requests by N seconds to reduce the number of request \fBNOTE:\fP .INDENT 7.0 .INDENT 3.5 -All other arguements are passed to the http.query state. +All other arguments are passed to the http.query state. .UNINDENT .UNINDENT .UNINDENT @@ -330903,7 +331906,7 @@ If this option is specified, the commit will be rollbacked in the .IP \(bu 2 diffs_file: Path to the file where the diff (difference in old configuration -and the commited configuration) will be stored.(default = None) +and the committed configuration) will be stored.(default = None) Note that the file will be stored on the proxy minion. To push the files to the master use the salt\(aqs following execution module: \fBcp.push\fP .UNINDENT @@ -331166,7 +332169,7 @@ The rpc to be executed. (default = None) \fBOptional\fP \-\- .INDENT 2.0 .IP \(bu 2 dest: -Destination file where the rpc ouput is stored. (default = None) +Destination file where the rpc output is stored. (default = None) Note that the file will be stored on the proxy minion. To push the files to the master use the salt\(aqs following execution module: \fBcp.push\fP .IP \(bu 2 @@ -332122,6 +333125,13 @@ New in version 2016.3.0. NOTE: This module requires the proper pillar values set. See salt.modules.kubernetes for more information. .sp +\fBWARNING:\fP +.INDENT 0.0 +.INDENT 3.5 +Configuration options will change in Flourine. +.UNINDENT +.UNINDENT +.sp The kubernetes module is used to manage different kubernetes resources. .INDENT 0.0 .INDENT 3.5 @@ -336338,7 +337348,7 @@ Commit? Default: \fBTrue\fP\&. .TP .B debug: \fBFalse\fP Debug mode. Will insert a new key under the output dictionary, -as \fBloaded_config\fP contaning the raw configuration loaded on the device. +as \fBloaded_config\fP containing the raw configuration loaded on the device. .UNINDENT .sp CLI Example: @@ -336481,7 +337491,7 @@ IPv4 only (from \fBsource_address\fP field). .INDENT 3.5 The first method allows the user to eventually apply complex manipulation and / or retrieve the data from external services before passing the -data to the state. The second one is more straighforward, for less +data to the state. The second one is more straightforward, for less complex cases when loading the data directly from the pillar is sufficient. .UNINDENT .UNINDENT @@ -336524,7 +337534,7 @@ Included only for compatibility with \fBpillarenv_from_saltenv\fP, and is otherwise ignored. .TP .B merge_pillar: \fBFalse\fP -Merge the \fBfilters\fP wil the corresponding values from the pillar. Default: \fBFalse\fP\&. +Merge the \fBfilters\fP will the corresponding values from the pillar. Default: \fBFalse\fP\&. .sp \fBNOTE:\fP .INDENT 7.0 @@ -336564,7 +337574,7 @@ Commit? Default: \fBTrue\fP\&. .TP .B debug: \fBFalse\fP Debug mode. Will insert a new key under the output dictionary, -as \fBloaded_config\fP contaning the raw configuration loaded on the device. +as \fBloaded_config\fP containing the raw configuration loaded on the device. .UNINDENT .sp CLI Example: @@ -336784,7 +337794,7 @@ netacl_example: .INDENT 3.5 The first method allows the user to eventually apply complex manipulation and / or retrieve the data from external services before passing the -data to the state. The second one is more straighforward, for less +data to the state. The second one is more straightforward, for less complex cases when loading the data directly from the pillar is sufficient. .UNINDENT .UNINDENT @@ -336851,7 +337861,7 @@ Commit? Default: \fBTrue\fP\&. .TP .B debug: \fBFalse\fP Debug mode. Will insert a new key under the output dictionary, -as \fBloaded_config\fP contaning the raw configuration loaded on the device. +as \fBloaded_config\fP containing the raw configuration loaded on the device. .TP .B source_service A special service to choose from. This is a helper so the user is able to @@ -337267,7 +338277,7 @@ update_icmp_first_term: .INDENT 3.5 The first method allows the user to eventually apply complex manipulation and / or retrieve the data from external services before passing the -data to the state. The second one is more straighforward, for less +data to the state. The second one is more straightforward, for less complex cases when loading the data directly from the pillar is sufficient. .UNINDENT .UNINDENT @@ -337330,8 +338340,8 @@ To replace the config, set \fBreplace\fP to \fBTrue\fP\&. This option is recomme \fBWARNING:\fP .INDENT 7.0 .INDENT 3.5 -The spport for NAPALM native templates will be dropped beginning with Salt Fluorine. -Implicitly, the \fBtemplate_path\fP argument will be depreacted and removed. +The support for NAPALM native templates will be dropped beginning with Salt Fluorine. +Implicitly, the \fBtemplate_path\fP argument will be deprecated and removed. .UNINDENT .UNINDENT .INDENT 7.0 @@ -337356,7 +338366,7 @@ file_roots: .sp Placing the template under \fB/etc/salt/states/templates/example.jinja\fP, it can be used as \fBsalt://templates/example.jinja\fP\&. -Alternatively, for local files, the user can specify the abolute path. +Alternatively, for local files, the user can specify the absolute path. If remotely, the source can be retrieved via \fBhttp\fP, \fBhttps\fP or \fBftp\fP\&. .sp Examples: @@ -337437,7 +338447,7 @@ Dry run? If set to \fBTrue\fP, will apply the config, discard and return the cha Commit? Default: \fBTrue\fP\&. .TP .B debug: False -Debug mode. Will insert a new key under the output dictionary, as \fBloaded_config\fP contaning the raw +Debug mode. Will insert a new key under the output dictionary, as \fBloaded_config\fP containing the raw result after the template was rendered. .TP .B replace: False @@ -337451,7 +338461,7 @@ Default variables/context passed to the template. ** .fi template_vars -Dictionary with the arguments/context to be used when the template is rendered. Do not explicitely specify this +Dictionary with the arguments/context to be used when the template is rendered. Do not explicitly specify this argument. This represents any other variable that will be sent to the template rendering system. Please see an example below! In both \fBntp_peers_example_using_pillar\fP and \fBntp_peers_example\fP, \fBpeers\fP is sent as template variable. @@ -338296,7 +339306,7 @@ only when there are differences between the existing configuration on the device and the expected configuration. Depending on the platform and hardware capabilities, one could be more optimal than the other. -Additionally, the output of the \fBmanaged\fP is diferent, +Additionally, the output of the \fBmanaged\fP is different, in such a way that the \fBpchange\fP field in the output contains structured data, rather than text. .UNINDENT @@ -338323,7 +339333,7 @@ Commit? Default: \fBTrue\fP\&. .TP .B debug: \fBFalse\fP Debug mode. Will insert a new key under the output dictionary, -as \fBloaded_config\fP contaning the raw configuration loaded on the device. +as \fBloaded_config\fP containing the raw configuration loaded on the device. .TP .B replace: \fBFalse\fP Should replace the config with the new generate one? @@ -338404,7 +339414,7 @@ Commit? Default: \fBTrue\fP\&. .TP .B debug: \fBFalse\fP Debug mode. Will insert a new key under the output dictionary, -as \fBloaded_config\fP contaning the raw configuration loaded on the device. +as \fBloaded_config\fP containing the raw configuration loaded on the device. .TP .B replace: \fBFalse\fP Should replace the config with the new generate one? @@ -340713,7 +341723,7 @@ specify user account control properties \fBNOTE:\fP .INDENT 7.0 .INDENT 3.5 -Only the follwing can be set: +Only the following can be set: \- N: No password required \- D: Account disabled \- H: Home directory required @@ -343217,6 +344227,11 @@ A URL which points to a file containing a collection of baseurls Sometimes you want to supply additional information, but not as enabled configuration. Anything supplied for this list will be saved in the repo configuration with a comment marker (#) in front. +.TP +.B gpgautoimport +Only valid for Zypper package manager. If set to True, automatically +trust and import public GPG key for the repository. The key should be +specified with \fBgpgkey\fP parameter. See details below. .UNINDENT .sp Additional configuration values seen in repo files, such as \fBgpgkey\fP or @@ -343321,7 +344336,7 @@ and/or installing packages. .TP .B enabled True -Included to reduce confusion due to yum/dnf/zypper\(aqs use of the +Included to reduce confusion due to YUM/DNF/Zypper\(aqs use of the \fBenabled\fP argument. If this is passed for an APT\-based distro, then the reverse will be passed as \fBdisabled\fP\&. For example, passing \fBenabled=False\fP will assume \fBdisabled=False\fP\&. @@ -353218,7 +354233,7 @@ Ensure Account Lockout Duration: .sp .nf .ft C -Acount lockout duration: +Account lockout duration: gpo.set: \- setting: 120 \- policy_class: Machine @@ -356679,7 +357694,7 @@ solaris New in version 2017.7.0. .sp -Bellow are some examples of how to use this state. +Below are some examples of how to use this state. Lets start with creating a zone and installing it. .INDENT 0.0 .INDENT 3.5 @@ -356723,7 +357738,7 @@ omipkg1_running: .sp A zone without network access is not very useful. We could update the zone.present state in the example above to add a network interface -or we could use a seperate state for this. +or we could use a separate state for this. .INDENT 0.0 .INDENT 3.5 .sp @@ -356968,7 +357983,7 @@ brand specific options to pass .INDENT 0.0 .TP .B salt.states.zone.present(name, brand, zonepath, properties=None, resources=None) -Ensure a zone with certain properties and resouces +Ensure a zone with certain properties and resources .INDENT 7.0 .TP .B name @@ -363388,8 +364403,8 @@ to install that package. \fBWARNING:\fP .INDENT 0.0 .INDENT 3.5 -The package name and the \fBfull_name\fP must be unique to all -other packages in the software repository. +The package name and the \fBfull_name\fP must be unique to all other packages +in the software repository. .UNINDENT .UNINDENT .sp @@ -363399,18 +364414,20 @@ you need to install a specific version of a piece of software. \fBWARNING:\fP .INDENT 0.0 .INDENT 3.5 -The version must be enclosed in quotes, otherwise the yaml parser -will remove trailing zeros. +The version must be enclosed in quotes, otherwise the yaml parser will +remove trailing zeros. .UNINDENT .UNINDENT .sp \fBNOTE:\fP .INDENT 0.0 .INDENT 3.5 -There are unique situations where previous versions are unavailable. -Take Google Chrome for example. There is only one url provided for a -standalone installation of Google Chrome. +There are unique situations where previous versions are unavailable. Take +Google Chrome for example. There is only one url provided for a standalone +installation of Google Chrome. +.sp (\fI\%https://dl.google.com/edgedl/chrome/install/GoogleChromeStandaloneEnterprise.msi\fP) +.sp When a new version is released, the url just points to the new version. To handle situations such as these, set the version to \fIlatest\fP\&. Salt will install the version of Chrome at the URL and report that version. Here\(aqs an @@ -363441,12 +364458,11 @@ Available parameters are as follows: .INDENT 0.0 .TP .B param str full_name -The Full Name for the software as shown in "Programs and -Features" in the control panel. You can also get this information by -installing the package manually and then running \fBpkg.list_pkgs\fP\&. Here\(aqs -an example of the output from \fBpkg.list_pkgs\fP: -.UNINDENT -.INDENT 0.0 +The Full Name for the software as shown in "Programs and Features" in the +control panel. You can also get this information by installing the package +manually and then running \fBpkg.list_pkgs\fP\&. Here\(aqs an example of the output +from \fBpkg.list_pkgs\fP: +.INDENT 7.0 .INDENT 3.5 .sp .nf @@ -363477,14 +364493,15 @@ test\-2008 .UNINDENT .UNINDENT .sp -Notice the Full Name for Firefox: Mozilla Firefox 17.0.0 (x86 en\-US). That\(aqs -exactly what\(aqs in the \fBfull_name\fP parameter in the software definition file. +Notice the Full Name for Firefox: \fBMozilla Firefox 17.0.0 (x86 en\-US)\fP\&. +That\(aqs exactly what\(aqs in the \fBfull_name\fP parameter in the software +definition file. .sp -If any of the software insalled on the machine matches one of the software -definition files in the repository the full_name will be automatically renamed -to the package name. The example below shows the \fBpkg.list_pkgs\fP for a -machine that already has Mozilla Firefox 17.0.1 installed. -.INDENT 0.0 +If any of the software installed on the machine matches one of the software +definition files in the repository, the full_name will be automatically +renamed to the package name. The example below shows the \fBpkg.list_pkgs\fP +for a machine that already has Mozilla Firefox 17.0.1 installed. +.INDENT 7.0 .INDENT 3.5 .sp .nf @@ -363515,66 +364532,97 @@ test\-2008: .UNINDENT .sp \fBIMPORTANT:\fP -.INDENT 0.0 +.INDENT 7.0 .INDENT 3.5 -The version number and \fBfull_name\fP need to match the output -from \fBpkg.list_pkgs\fP so that the status can be verified when running +The version number and \fBfull_name\fP need to match the output from +\fBpkg.list_pkgs\fP so that the status can be verified when running a highstate. .UNINDENT .UNINDENT .sp \fBNOTE:\fP -.INDENT 0.0 +.INDENT 7.0 .INDENT 3.5 It is still possible to successfully install packages using -\fBpkg.install\fP even if they don\(aqt match. This can make troubleshooting -difficult so be careful. +\fBpkg.install\fP, even if the \fBfull_name\fP or the version number don\(aqt +match. However, this can make troubleshooting issues difficult, so be +careful. .UNINDENT .UNINDENT -.INDENT 0.0 -.TP -.B param str installer -The path to the \fB\&.exe\fP or \fB\&.msi\fP to use to install the -package. This can be a path or a URL. If it is a URL or a salt path -(salt://), the package will be cached locally and then executed. If it is a -path to a file on disk or a file share, it will be executed directly. -.TP -.B param str install_flags -Any flags that need to be passed to the installer to -make it perform a silent install. These can often be found by adding \fB/?\fP -or \fB/h\fP when running the installer from the command\-line. A great resource -for finding these silent install flags can be found on the WPKG project\(aqs \fI\%wiki\fP: -.UNINDENT .sp -Salt will not return if the installer is waiting for user input so these are -important. -.INDENT 0.0 +\fBTIP:\fP +.INDENT 7.0 +.INDENT 3.5 +To force salt to display the full name when there\(aqs already an existing +package definition file on the system, you can pass a bogus \fBsaltenv\fP +parameter to the command like so: \fBpkg.list_pkgs saltenv=NotARealEnv\fP +.UNINDENT +.UNINDENT +.TP +.B param str installer +The path to the \fB\&.exe\fP or \fB\&.msi\fP to use to install the package. This can +be a path or a URL. If it is a URL or a salt path (\fBsalt://\fP), the package +will be cached locally and then executed. If it is a path to a file on disk +or a file share, it will be executed directly. +.sp +\fBNOTE:\fP +.INDENT 7.0 +.INDENT 3.5 +If storing software in the same location as the winrepo it is best +practice to place each installer in its own directory rather than the +root of winrepo. Then you can place your package definition file in the +same directory. It is best practice to name the file \fBinit.sls\fP\&. This +will be picked up by \fBpkg.refresh_db\fP and processed properly. +.UNINDENT +.UNINDENT +.TP +.B param str install_flags +Any flags that need to be passed to the installer to make it perform a +silent install. These can often be found by adding \fB/?\fP or \fB/h\fP when +running the installer from the command\-line. A great resource for finding +these silent install flags can be found on the WPKG project\(aqs \fI\%wiki\fP: +.sp +\fBWARNING:\fP +.INDENT 7.0 +.INDENT 3.5 +Salt will not return if the installer is waiting for user input so it is +imperative that the software package being installed has the ability to +install silently. +.UNINDENT +.UNINDENT .TP .B param str uninstaller -The path to the program used to uninstall this software. -This can be the path to the same \fIexe\fP or \fImsi\fP used to install the -software. It can also be a GUID. You can find this value in the registry -under the following keys: +The path to the program used to uninstall this software. This can be the +path to the same \fIexe\fP or \fImsi\fP used to install the software. It can also be +a GUID. You can find this value in the registry under the following keys: .INDENT 7.0 +.INDENT 3.5 +.INDENT 0.0 .IP \(bu 2 Software\eMicrosoft\eWindows\eCurrentVersion\eUninstall .IP \(bu 2 Software\eWow6432None\eMicrosoft\eWindows\eCurrentVersion\eUninstall .UNINDENT +.UNINDENT +.UNINDENT .TP .B param str uninstall_flags -Any flags that need to be passed to the uninstaller -to make it perform a silent uninstall. These can often be found by adding -\fB/?\fP or \fB/h\fP when running the uninstaller from the command\-line. A great -resource for finding these silent install flags can be found on the WPKG -project\(aqs \fI\%wiki\fP: +Any flags that need to be passed to the uninstaller to make it perform a +silent uninstall. These can often be found by adding \fB/?\fP or \fB/h\fP when +running the uninstaller from the command\-line. A great resource for finding +these silent install flags can be found on the WPKG project\(aqs \fI\%wiki\fP: +.sp +\fBWARNING:\fP +.INDENT 7.0 +.INDENT 3.5 +Salt will not return if the uninstaller is waiting for user input so it +is imperative that the software package being uninstalled has the +ability to uninstall silently. +.UNINDENT .UNINDENT .sp -Salt will not return if the uninstaller is waiting for user input so these are -important. -.sp Here are some examples of installer and uninstaller settings: -.INDENT 0.0 +.INDENT 7.0 .INDENT 3.5 .sp .nf @@ -363593,8 +364641,9 @@ Here are some examples of installer and uninstaller settings: .UNINDENT .UNINDENT .sp -Alternatively the \fBuninstaller\fP can also simply repeat the URL of the msi file. -.INDENT 0.0 +Alternatively the \fBuninstaller\fP can also simply repeat the URL of an msi +file: +.INDENT 7.0 .INDENT 3.5 .sp .nf @@ -363612,52 +364661,61 @@ Alternatively the \fBuninstaller\fP can also simply repeat the URL of the msi fi .fi .UNINDENT .UNINDENT -.INDENT 0.0 .TP .B param msiexec -This tells salt to use \fBmsiexec /i\fP to install the -package and \fBmsiexec /x\fP to uninstall. This is for \fI\&.msi\fP installations. -Possible options are: True, False or path to msiexec on your system +This tells salt to use \fBmsiexec /i\fP to install the package and +\fBmsiexec /x\fP to uninstall. This is for \fB\&.msi\fP installations. Possible +options are: True, False or the path to \fBmsiexec.exe\fP on your system .INDENT 7.0 -.TP -.B 7zip: -.INDENT 7.0 -.TP -.B \(aq9.20.00.0\(aq: -installer: salt://win/repo/7zip/7z920\-x64.msi -full_name: 7\-Zip 9.20 (x64 edition) -reboot: False -install_flags: \(aq/qn /norestart\(aq -msiexec: \(aqC:WindowsSystem32msiexec.exe\(aq -uninstaller: salt://win/repo/7zip/7z920\-x64.msi -uninstall_flags: \(aq/qn /norestart\(aq +.INDENT 3.5 +.sp +.nf +.ft C +7zip: + \(aq9.20.00.0\(aq: + installer: salt://win/repo/7zip/7z920\-x64.msi + full_name: 7\-Zip 9.20 (x64 edition) + reboot: False + install_flags: \(aq/qn /norestart\(aq + msiexec: \(aqC:\eWindows\eSystem32\emsiexec.exe\(aq + uninstaller: salt://win/repo/7zip/7z920\-x64.msi + uninstall_flags: \(aq/qn /norestart\(aq +.ft P +.fi .UNINDENT .UNINDENT .TP -.B param str arch -This selects which \fBmsiexec.exe\fP to use. Possible values: -\fBx86\fP, \fBx64\fP -.TP .B param bool allusers -This parameter is specific to \fI\&.msi\fP installations. It -tells \fImsiexec\fP to install the software for all users. The default is True. +This parameter is specific to \fB\&.msi\fP installations. It tells \fBmsiexec\fP +to install the software for all users. The default is \fBTrue\fP\&. .TP .B param bool cache_dir -If true when installer URL begins with salt://, the -entire directory where the installer resides will be recursively cached. -This is useful for installers that depend on other files in the same -directory for installation. +If \fBTrue\fP and the installer URL begins with \fBsalt://\fP, the entire +directory where the installer resides will be recursively cached. This is +useful for installers that depend on other files in the same directory for +installation. +.sp +\fBWARNING:\fP +.INDENT 7.0 +.INDENT 3.5 +Be aware that all files and directories in the same location as the +installer file will be copied down to the minion. If you place your +installer file in the root of winrepo (\fB/srv/salt/win/repo\-ng\fP) and +\fBcache_dir: True\fP the entire contents of winrepo will be cached to +the minion. Therefore, it is best practice to place your installer files +in a subdirectory if they are to be stored in winrepo. +.UNINDENT +.UNINDENT .TP .B param str cache_file -When installer URL begins with salt://, this indicates single file to copy -down for use with the installer. Copied to the same location as the -installer. Use this over \fBcache_dir\fP if there are many files in the +When the installer URL begins with \fBsalt://\fP, this indicates a single file +to copy down for use with the installer. It is copied to the same location +as the installer. Use this over \fBcache_dir\fP if there are many files in the directory and you only need a specific file and don\(aqt want to cache additional files that may reside in the installer directory. -.UNINDENT .sp Here\(aqs an example for a software package that has dependent files: -.INDENT 0.0 +.INDENT 7.0 .INDENT 3.5 .sp .nf @@ -363673,24 +364731,22 @@ sqlexpress: .fi .UNINDENT .UNINDENT -.INDENT 0.0 .TP .B param bool use_scheduler -If true, windows will use the task scheduler to run -the installation. This is useful for running the salt installation itself as -the installation process kills any currently running instances of salt. +If \fBTrue\fP, Windows will use the task scheduler to run the installation. +This is useful for running the Salt installation itself as the installation +process kills any currently running instances of Salt. .TP .B param str source_hash -This tells salt to compare a hash sum of the installer -.UNINDENT +This tells Salt to compare a hash sum of the installer to the provided hash +sum before execution. The value can be formatted as +\fB=\fP, or it can be a URI to a file containing the +hash sum. .sp -to the provided hash sum before execution. The value can be formatted as -\fBhash_algorithm=hash_sum\fP, or it can be a URI to a file containing the hash -sum. For a list of supported algorithms, see the \fI\%hashlib documentation\fP\&. .sp Here\(aqs an example of source_hash usage: -.INDENT 0.0 +.INDENT 7.0 .INDENT 3.5 .sp .nf @@ -363708,7 +364764,6 @@ messageanalyzer: .fi .UNINDENT .UNINDENT -.INDENT 0.0 .TP .B param bool reboot Not implemented @@ -364687,7 +365742,7 @@ year, month, and version. Examples include \fB2016.11.8\fP and \fB2017.7.2\fP\&. .INDENT 0.0 .INDENT 3.5 GitHub will open pull requests against Salt\(aqs main branch, \fBdevelop\fP, -byndefault. Be sure to check which branch is selected when creating the +by default. Be sure to check which branch is selected when creating the pull request. .UNINDENT .UNINDENT @@ -368104,7 +369159,7 @@ external resource, like a cloud virtual machine. This decorator is not normally used by developers outside of the Salt core team. .sp \fI@destructiveTest\fP \-\- Marks a test as potentially destructive. It will not be run -by the test runner unles the \fB\-run\-destructive\fP test is expressly passed. +by the test runner unless the \fB\-run\-destructive\fP test is expressly passed. .sp \fI@requires_network\fP \-\- Requires a network connection for the test to operate successfully. If a network connection is not detected, the test will not run. @@ -370362,10 +371417,10 @@ foo Install and configure the FOO service. -\&.. note:: +**NOTE** - See the full \(gaSalt Formulas installation and usage instructions - \(ga_. +See the full \(gaSalt Formulas installation and usage instructions +\(ga_. Available states ================ @@ -371752,7 +372807,7 @@ Get human readable message from Python Exception .SS The Salt Fileserver and Client .SS Introduction .sp -Salt has a modular fileserver, and mulitple client classes which are used to +Salt has a modular fileserver, and multiple client classes which are used to interact with it. This page serves as a developer\(aqs reference, to help explain how the fileserver and clients both work. .SS Fileserver @@ -393798,7 +394853,7 @@ Added module execution support for disk.iostat Added Memcache booster for the minion data cache. Memcache is an additional cache layer that keeps a limited amount of data fetched from the minion data cache for a limited period of time in memory that -makes cache operations faster. It doesn\(aqt make much sence for the \fBlocalfs\fP +makes cache operations faster. It doesn\(aqt make much sense for the \fBlocalfs\fP cache driver but helps for more complex drivers like \fBconsul\fP\&. For more details see \fBmemcache_expire_seconds\fP and other \fBmemcache_*\fP options in the master config reverence. @@ -399206,7 +400261,7 @@ d146fd029c Update win_pki.py ef8e3ef569 Update win_pki.py .UNINDENT .IP \(bu 2 -\fBPR\fP \fI\%#41557\fP: (\fIdmurphy18\fP) Add symbolic link for salt\-proxy service similar to other serivce files +\fBPR\fP \fI\%#41557\fP: (\fIdmurphy18\fP) Add symbolic link for salt\-proxy service similar to other service files @ \fI2017\-06\-06T17:13:52Z\fP .INDENT 2.0 .IP \(bu 2 @@ -399560,7 +400615,7 @@ c93f112c9b Updating Nova Module to include use_keystone Auth .IP \(bu 2 66ab1e5184 Re\-adding neutron dependency check .IP \(bu 2 -cce07eefc2 Updating Neutron module to suport KeystoneAuth +cce07eefc2 Updating Neutron module to support KeystoneAuth .UNINDENT .IP \(bu 2 \fBPR\fP \fI\%#41409\fP: (\fIgarethgreenaway\fP) Fixes to ipc transport @@ -399826,7 +400881,7 @@ e1a88e8bf7 Allowing test=True to be passed for salt.runner and salt.wheel when u \fBISSUE\fP \fI\%#41306\fP: (\fIlomeroe\fP) win_lgpo does not properly pack group policy version number in gpt.ini | refs: \fI\%#41319\fP \fI\%#41307\fP .IP \(bu 2 -\fBPR\fP \fI\%#41307\fP: (\fIlomeroe\fP) properly pack/unpack the verison numbers into a number +\fBPR\fP \fI\%#41307\fP: (\fIlomeroe\fP) properly pack/unpack the version numbers into a number | refs: \fI\%#41319\fP .UNINDENT .INDENT 2.0 @@ -401334,7 +402389,7 @@ da85326 Merge pull request \fI\%#42694\fP from gtmanfred/2016.11 1e13383 tornado api .UNINDENT .IP \(bu 2 -\fBPR\fP \fI\%#42655\fP: (\fIwhiteinge\fP) Reenable cpstats for rest_cherrypy +\fBPR\fP \fI\%#42655\fP: (\fIwhiteinge\fP) Re\-enable cpstats for rest_cherrypy @ \fI2017\-08\-03T20:44:10Z\fP .INDENT 2.0 .IP \(bu 2 @@ -401347,7 +402402,7 @@ f0f00fc Merge pull request \fI\%#42655\fP from whiteinge/rest_cherrypy\-reenable .IP \(bu 2 deb6316 Fix lint errors .IP \(bu 2 -6bd91c8 Reenable cpstats for rest_cherrypy +6bd91c8 Re\-enable cpstats for rest_cherrypy .UNINDENT .IP \(bu 2 \fBPR\fP \fI\%#42693\fP: (\fIgilbsgilbs\fP) Fix RabbitMQ tags not properly set. @@ -401677,13 +402732,13 @@ d9df97e Merge pull request \fI\%#42424\fP from goten4/2016.11 8c04840 Detect Server OS with a desktop release name .UNINDENT .IP \(bu 2 -\fBPR\fP \fI\%#42356\fP: (\fImeaksh\fP) Allow to check whether a function is available on the AliasesLoader wrapper +\fBPR\fP \fI\%#42356\fP: (\fImeaksh\fP) Allow checking whether a function is available on the AliasesLoader wrapper @ \fI2017\-07\-19T16:56:41Z\fP .INDENT 2.0 .IP \(bu 2 0a72e56 Merge pull request \fI\%#42356\fP from meaksh/2016.11\-AliasesLoader\-wrapper\-fix .IP \(bu 2 -915d942 Allow to check whether a function is available on the AliasesLoader wrapper +915d942 Allow checking whether a function is available on the AliasesLoader wrapper .UNINDENT .IP \(bu 2 \fBPR\fP \fI\%#42368\fP: (\fItwangboy\fP) Remove build and dist directories before install (2016.11) @@ -402553,7 +403608,7 @@ c337d52 Fix test data for test_get_serial, and a typo .IP \(bu 2 7f69613 test and lint fixes .IP \(bu 2 -8ee4843 Suppress output of crypt context and be more specifc with whitespace vs. serial +8ee4843 Suppress output of crypt context and be more specific with whitespace vs. serial .IP \(bu 2 61f817d Match serials based on output position (fix for non\-English languages) .UNINDENT @@ -402602,7 +403657,7 @@ e206c38 Fix master side scheduled jobs to return events .UNINDENT .SS Salt 2016.11.9 Release Notes .sp -Version 2016.11.9 is a bugfix release for 2016.11.0\&.] +Version 2016.11.9 is a bugfix release for 2016.11.0\&. .SS Changes for v2016.11.8..v2016.11.9 .sp Extended changelog courtesy of Todd Stansell (\fI\%https://github.com/tjstansell/salt\-changelogs\fP): @@ -402634,7 +403689,7 @@ Significate changes (PR #43708 & #45390, damon\-atkins) have been made to the pk .IP \(bu 2 \fBpkg.list_pkgs\fP now returns \(aqNot Found\(aq when the version is not found instead of \(aq(value not set)\(aq which matches the contents of the sls definitions. .IP \(bu 2 -\fBpkg.remove()\fP will wait upto 3 seconds (normally about a second) to detect changes in the registry after removing software, improving reporting of version changes. +\fBpkg.remove()\fP will wait up to 3 seconds (normally about a second) to detect changes in the registry after removing software, improving reporting of version changes. .IP \(bu 2 \fBpkg.remove()\fP can remove \fBlatest\fP software, if \fBlatest\fP is defined in sls definition. .IP \(bu 2 @@ -403245,7 +404300,7 @@ ead3c569e1 Bump deprecation warnings from Oxygen to Fluorine .IP \(bu 2 998d714ee7 Merge pull request \fI\%#44517\fP from whytewolf/publish_port_doc_missing .IP \(bu 2 -4b5855283a missed one place where i didnt chanbge master_port from my copy to publish_port +4b5855283a missed one place where i didn\(aqt change master_port from my copy to publish_port .IP \(bu 2 e4610baea5 update doc to have publish port .UNINDENT @@ -403518,7 +404573,7 @@ b65f4ea4ea switch salt\-jenkins over to saltstack .IP \(bu 2 cab54e34b5 Merge pull request \fI\%#44173\fP from twangboy/win_system_docs .IP \(bu 2 -8e111b413d Fix some of the wording and grammer errors +8e111b413d Fix some of the wording and grammar errors .IP \(bu 2 a12bc5ae41 Use google style docstrings .UNINDENT @@ -403885,7 +404940,7 @@ bbd9db4d00 One more encoding \fBISSUE\fP \fI\%#43581\fP: (\fIjcourington\fP) cherrypy stats issue | refs: \fI\%#44021\fP .IP \(bu 2 -\fBPR\fP \fI\%#42655\fP: (\fIwhiteinge\fP) Reenable cpstats for rest_cherrypy +\fBPR\fP \fI\%#42655\fP: (\fIwhiteinge\fP) Re\-enable cpstats for rest_cherrypy | refs: \fI\%#44021\fP .IP \(bu 2 \fBPR\fP \fI\%#33806\fP: (\fIcachedout\fP) Work around upstream cherrypy bug @@ -404142,7 +405197,7 @@ ea8d273c2b Merge pull request \fI\%#43768\fP from vutny/fix\-pylint\-deprecation f8b3fa9da1 Merge branch \(aq2016.11\(aq into fix\-pylint\-deprecation\-warnings .UNINDENT .IP \(bu 2 -\fBPR\fP \fI\%#43772\fP: (\fIgtmanfred\fP) dont print Minion not responding with quiet +\fBPR\fP \fI\%#43772\fP: (\fIgtmanfred\fP) don\(aqt print Minion not responding with quiet @ \fI2017\-09\-27T15:39:18Z\fP .INDENT 2.0 .IP \(bu 2 @@ -404153,7 +405208,7 @@ f8b3fa9da1 Merge branch \(aq2016.11\(aq into fix\-pylint\-deprecation\-warnings .IP \(bu 2 1a8cc60bb4 Merge pull request \fI\%#43772\fP from gtmanfred/2016.11 .IP \(bu 2 -0194c60960 dont print Minion not responding with quiet +0194c60960 don\(aqt print Minion not responding with quiet .UNINDENT .IP \(bu 2 \fBPR\fP \fI\%#43747\fP: (\fIrallytime\fP) Add GPG Verification section to Contributing Docs @@ -425998,7 +427053,7 @@ a793c19 Avoid extraneous newline character added in last environment variable @ \fI2015\-06\-05T22:32:25Z\fP .INDENT 2.0 .IP \(bu 2 -\fBPR\fP \fI\%#24441\fP: (\fIarthurlogilab\fP) [doc] Alignement fix on external_auth documentation +\fBPR\fP \fI\%#24441\fP: (\fIarthurlogilab\fP) [doc] Alignment fix on external_auth documentation | refs: \fI\%#24456\fP .UNINDENT .INDENT 2.0 @@ -426007,7 +427062,7 @@ ced558a Merge pull request \fI\%#24456\fP from rallytime/\fI\%bp\-24441\fP .IP \(bu 2 7002855 yaml indentations should be 2 spaces .IP \(bu 2 -21b51ab [doc] Alignement fix on external_auth documentation +21b51ab [doc] Alignment fix on external_auth documentation .UNINDENT .IP \(bu 2 \fBPR\fP \fI\%#24398\fP: (\fIkiorky\fP) VirtualName for states.apt diff --git a/doc/ref/clouds/all/index.rst b/doc/ref/clouds/all/index.rst index 3ec40b0ed7..cdd3985537 100644 --- a/doc/ref/clouds/all/index.rst +++ b/doc/ref/clouds/all/index.rst @@ -1,8 +1,8 @@ .. _all-salt.clouds: -=============================== -Full list of Salt Cloud modules -=============================== +============= +cloud modules +============= .. currentmodule:: salt.cloud.clouds diff --git a/doc/ref/configuration/master.rst b/doc/ref/configuration/master.rst index 4b98cb8861..003d6eb047 100644 --- a/doc/ref/configuration/master.rst +++ b/doc/ref/configuration/master.rst @@ -4515,6 +4515,25 @@ Recursively merge lists by aggregating them instead of replacing them. pillar_merge_lists: False +.. conf_master:: pillar_includes_override_sls + +``pillar_includes_override_sls`` +******************************** + +.. versionadded:: 2017.7.6,2018.3.1 + +Default: ``False`` + +Prior to version 2017.7.3, keys from :ref:`pillar includes ` +would be merged on top of the pillar SLS. Since 2017.7.3, the includes are +merged together and then the pillar SLS is merged on top of that. + +Set this option to ``True`` to return to the old behavior. + +.. code-block:: yaml + + pillar_includes_override_sls: True + .. _pillar-cache-opts: Pillar Cache Options diff --git a/doc/ref/proxy/all/index.rst b/doc/ref/proxy/all/index.rst index b03409bfe2..d986cd5b67 100644 --- a/doc/ref/proxy/all/index.rst +++ b/doc/ref/proxy/all/index.rst @@ -12,6 +12,7 @@ proxy modules cimc chronos + cimc cisconso dummy esxi diff --git a/doc/ref/proxy/all/salt.proxy.cimc.rst b/doc/ref/proxy/all/salt.proxy.cimc.rst index 2c76fb3440..ec4b862633 100644 --- a/doc/ref/proxy/all/salt.proxy.cimc.rst +++ b/doc/ref/proxy/all/salt.proxy.cimc.rst @@ -1,6 +1,5 @@ -=================== -salt.proxy.cimc -=================== +salt.proxy.cimc module +====================== .. automodule:: salt.proxy.cimc :members: diff --git a/doc/ref/proxy/all/salt.proxy.panos.rst b/doc/ref/proxy/all/salt.proxy.panos.rst index a2db497eba..695bcd4a0b 100644 --- a/doc/ref/proxy/all/salt.proxy.panos.rst +++ b/doc/ref/proxy/all/salt.proxy.panos.rst @@ -1,5 +1,5 @@ salt.proxy.panos module -========================= +======================= .. automodule:: salt.proxy.panos :members: diff --git a/doc/topics/installation/rhel.rst b/doc/topics/installation/rhel.rst index 2372efa7c8..d3370afe3c 100644 --- a/doc/topics/installation/rhel.rst +++ b/doc/topics/installation/rhel.rst @@ -49,17 +49,13 @@ the SaltStack Repository. Installation from the Community-Maintained Repository ===================================================== -Beginning with version 0.9.4, Salt has been available in `EPEL`_. For -RHEL/CentOS 5, `Fedora COPR`_ is a single community repository that provides -Salt packages due to the removal from EPEL5. +Beginning with version 0.9.4, Salt has been available in `EPEL`_. .. note:: - Packages in these repositories are built by community, and it can - take a little while until the latest stable SaltStack release become - available. + Packages in this repository are built by community, and it can take a little + while until the latest stable SaltStack release become available. .. _`EPEL`: http://fedoraproject.org/wiki/EPEL -.. _`Fedora COPR`: https://copr.fedorainfracloud.org/coprs/saltstack/salt-el5/ RHEL/CentOS 6 and 7, Scientific Linux, etc. ------------------------------------------- @@ -146,26 +142,13 @@ ZeroMQ 4 ======== We recommend using ZeroMQ 4 where available. SaltStack provides ZeroMQ 4.0.5 -and pyzmq 14.5.0 in the :ref:`SaltStack Repository ` -as well as a separate `zeromq4 COPR`_ repository. - -.. _`zeromq4 COPR`: http://copr.fedorainfracloud.org/coprs/saltstack/zeromq4/ +and ``pyzmq`` 14.5.0 in the :ref:`SaltStack Repository +`. If this repository is added *before* Salt is installed, then installing either ``salt-master`` or ``salt-minion`` will automatically pull in ZeroMQ 4.0.5, and additional steps to upgrade ZeroMQ and pyzmq are unnecessary. -.. warning:: RHEL/CentOS 5 Users - Using COPR repos on RHEL/CentOS 5 requires that the ``python-hashlib`` - package be installed. Not having it present will result in checksum errors - because YUM will not be able to process the SHA256 checksums used by COPR. - -.. note:: - For RHEL/CentOS 5 installations, if using the SaltStack repo or Fedora COPR - to install Salt (as described :ref:`above `), - then it is not necessary to enable the `zeromq4 COPR`_, because those - repositories already include ZeroMQ 4. - Package Management ================== diff --git a/doc/topics/pillar/index.rst b/doc/topics/pillar/index.rst index 1a3ee1b3c5..f2936256da 100644 --- a/doc/topics/pillar/index.rst +++ b/doc/topics/pillar/index.rst @@ -312,6 +312,8 @@ Since both pillar SLS files contained a ``bind`` key which contained a nested dictionary, the pillar dictionary's ``bind`` key contains the combined contents of both SLS files' ``bind`` keys. +.. _pillar-include: + Including Other Pillars ======================= diff --git a/doc/topics/releases/2017.7.5.rst b/doc/topics/releases/2017.7.5.rst index 05401a001a..d0676d873c 100644 --- a/doc/topics/releases/2017.7.5.rst +++ b/doc/topics/releases/2017.7.5.rst @@ -3,3 +3,2056 @@ Salt 2017.7.5 Release Notes =========================== Version 2017.7.5 is a bugfix release for :ref:`2017.7.0 `. + +Changes for v2017.7.4..v2017.7.5 +---------------------------------------------------------------- + +Extended changelog courtesy of Todd Stansell (https://github.com/tjstansell/salt-changelogs): + +*Generated at: 2018-03-19T20:32:02Z* + +Statistics: + +- Total Merges: **211** +- Total Issue references: **64** +- Total PR references: **253** + +Changes: + + +- **PR** `#46577`_: (*gtmanfred*) Fix npm issue + @ *2018-03-19T11:51:04Z* + + - **PR** `#884`_: (*dcolish*) Resolve `#789`_, `#670`_ + | refs: `#46577`_ + * cdd768fa4d Merge pull request `#46577`_ from gtmanfred/2017.7.5 + * 78cbf7b5cd Fix npm issue + + * c76f7eb028 enable debug logging on the minionlog + +- **PR** `#46551`_: (*terminalmage*) Fix failing pkg integration test on OpenSUSE + @ *2018-03-19T11:50:12Z* + + * e6682c660c Merge pull request `#46551`_ from terminalmage/salt-jenkins-885 + * 703b5e7e65 Change versionadded to show that 2018.3.0 will not have this function + + * 010d260d06 Rewrite failing Suse pkg integration test + + * f3f5dec239 zypper.py: fix version argument being ignored + + * 214f2d6ad3 Add pkg.list_repo_pkgs to zypper.py + +- **PR** `#46563`_: (*gtmanfred*) virtualenv version too old for python3.6 + @ *2018-03-15T20:17:16Z* + + - **ISSUE** `#886`_: (*j0nes2k*) Add MAILTO command to cron state + | refs: `#46563`_ + * bd62699ccb Merge pull request `#46563`_ from gtmanfred/2017.7.5 + * 8d5ab72983 virtualenv version too old for python3.6 + +- **PR** `#46561`_: (*gtmanfred*) disable verbose + @ *2018-03-15T16:36:41Z* + + * 2916708124 Merge pull request `#46561`_ from gtmanfred/2017.7.5 + * 2c39ac6dfb disable verbose + +- **PR** `#46537`_: (*rallytime*) Back-port `#46529`_ to 2017.7.5 + @ *2018-03-14T14:47:28Z* + + - **PR** `#46529`_: (*gtmanfred*) retry if there is a segfault + | refs: `#46537`_ + * ee3bff6e32 Merge pull request `#46537`_ from rallytime/`bp-46529`_ + * 289c7a228f retry if there is a segfault + +- **PR** `#46519`_: (*rallytime*) Update man pages for 2017.7.5 + @ *2018-03-13T20:00:51Z* + + * 1271536a89 Merge pull request `#46519`_ from rallytime/man-pages-2017.7.5 + * 782a5584f5 Update man pages for 2017.7.5 + +- **PR** `#46493`_: (*terminalmage*) salt-call: don't re-use initial pillar if CLI overrides passed + @ *2018-03-12T20:41:52Z* + + - **ISSUE** `#46207`_: (*seanjnkns*) Issue `#44034`_ still unresolved + | refs: `#46493`_ + - **ISSUE** `#44034`_: (*seanjnkns*) salt-call pillar overrides broken in 2016.11.8 and 2017.7.2 + | refs: `#44483`_ + - **PR** `#44483`_: (*terminalmage*) salt-call: account for instances where __pillar__ is empty + | refs: `#46493`_ + * 0e90c8ca6f Merge pull request `#46493`_ from terminalmage/issue46207 + * f06ff68f10 salt-call: don't re-use initial pillar if CLI overrides passed + +- **PR** `#46450`_: (*gtmanfred*) load grains for salt.cmd runner + @ *2018-03-12T18:52:22Z* + + * b11a8fc8e0 Merge pull request `#46450`_ from gtmanfred/salt_runner + * 7974ff7264 load grains for salt.cmd runner + +- **PR** `#46337`_: (*gtmanfred*) Fix using names with listen and listen_in + @ *2018-03-12T18:50:00Z* + + - **ISSUE** `#30115`_: (*gtmanfred*) [BUG] listen does not appear to respect the special names directive + | refs: `#46337`_ + * 22d753364b Merge pull request `#46337`_ from gtmanfred/2017.7 + * d6d9e36359 add tests for names and listen/listen_in + + * 3f8e0db572 let listen_in work with names + + * 7161f4d4df fix listen to be able to handle names + +- **PR** `#46413`_: (*meaksh*) Explore 'module.run' state module output in depth to catch "result" properly + @ *2018-03-12T18:49:07Z* + + * b7191b8782 Merge pull request `#46413`_ from meaksh/2017.7-explore-result-in-depth + * 885751634e Add new unit test to check state.apply within module.run + + * 9f19ad5264 Rename and fix recursive method + + * 1476ace558 Fix Python3 and pylint issue + + * 726ca3044d Explore 'module.run' response to catch the 'result' in depth + +- **PR** `#46496`_: (*gtmanfred*) more test kitchen clean up + @ *2018-03-12T18:28:34Z* + + * 02a79a2014 Merge pull request `#46496`_ from gtmanfred/kitchen + * da002f78d0 include virtualenv path for py3 windows + + * fe2efe03ea remove duplicate setup + +- **PR** `#46330`_: (*bdrung*) Fix ValueError for template in AppsV1beta1DeploymentSpec + @ *2018-03-12T16:56:18Z* + + - **ISSUE** `#46329`_: (*bdrung*) test_create_deployments fails with python-kubernetes 4.0.0 + | refs: `#46330`_ + * 5c4c182d75 Merge pull request `#46330`_ from bdrung/fix_kubernetes_test_create_deployments + * 5008c53c44 Fix ValueError for template in AppsV1beta1DeploymentSpec + +- **PR** `#46482`_: (*rongshengfang*) Fix KeyError in salt/states/boto_ec2.py + @ *2018-03-12T15:13:13Z* + + - **ISSUE** `#46479`_: (*rongshengfang*) boto_ec2.instance_present throwing KeyError exception when associating EIP to an existing instance + | refs: `#46482`_ + * c7e05d3ff4 Merge pull request `#46482`_ from rongshengfang/fix-keyerror-in-instance_present + * ed8c83e89a Fix KeyError in salt/states/boto_ec2.py when an EIP is being associated to an existing instance with the instance_present state. + +- **PR** `#46463`_: (*terminalmage*) Update requirements files to depend on mock>=2.0.0 + @ *2018-03-09T19:24:41Z* + + * 573d51afec Merge pull request `#46463`_ from terminalmage/mock-2.0 + * b958b4699c Update requirements files to depend on mock>=2.0.0 + +- **PR** `#46422`_: (*rallytime*) Back-port `#46300`_ to 2017.7 + @ *2018-03-09T19:19:25Z* + + - **ISSUE** `#46299`_: (*gclinch*) debconf module fails on Python 3 + | refs: `#46300`_ + - **PR** `#46300`_: (*gclinch*) Python 3 support for debconfmod (fixes `#46299`_) + | refs: `#46422`_ + * a154d35fc7 Merge pull request `#46422`_ from rallytime/`bp-46300`_ + * 829dfde8e8 Change stringutils path to old utils path for 2017.7 + + * 91db2e0782 Python 3 support + +- **PR** `#46320`_: (*mcalmer*) add warning about future config option change + @ *2018-03-09T17:48:29Z* + + * 2afaca17a1 Merge pull request `#46320`_ from mcalmer/warn-kubernetes + * c493ced415 add warning about future config option change + +- **PR** `#46449`_: (*bdrung*) Make documentation theme configurable + @ *2018-03-09T17:47:15Z* + + * c7f95581e3 Merge pull request `#46449`_ from bdrung/make-doc-theme-configurable + * 4a5da2d144 Make documentation theme configurable + +- **PR** `#46162`_: (*rallytime*) Add team-suse to CODEOWNERS file for zypper files + @ *2018-03-09T17:46:13Z* + + * 10ce0e9e20 Merge pull request `#46162`_ from rallytime/team-suse-zypper-owner + * 13a295a3b7 Add *pkg* and *snapper* to team-suse + + * 35c7b7b0d3 Add btrfs, xfs, yumpkg, and kubernetes file to team-suse + + * 485d777ac0 Add team-suse to CODEOWNERS file for zypper files + +- **PR** `#46434`_: (*gtmanfred*) split return key value correctly + @ *2018-03-09T17:45:21Z* + + * cac096b311 Merge pull request `#46434`_ from gtmanfred/highstate_return + * d18f1a55a7 fix pylint + + * 9e2c3f7991 split return key value correctly + +- **PR** `#46455`_: (*whytewolf*) .format remove fix for `#44452`_ + @ *2018-03-09T17:37:19Z* + + - **ISSUE** `#44452`_: (*konstest*) salt-cloud can't create snapshots, because there is a bug in the Unicode name of the virtual machine + | refs: `#46455`_ `#46455`_ + * 7dd71101ce Merge pull request `#46455`_ from whytewolf/Issue_44452_unicode_cloud + * 5fe474b1a8 .format remove fix for `#44452`_ + +- **PR** `#46428`_: (*twangboy*) Fix issue with dev env install on Windows + @ *2018-03-09T14:52:46Z* + + * 4c8d9026d3 Merge pull request `#46428`_ from twangboy/win_fix_reqs + * e7ab97cc17 Remove six as a hard dep for Salt + + * cc67e5c2ef Set six to 1.11.0 + +- **PR** `#46454`_: (*gtmanfred*) fix windows for kitchen + @ *2018-03-08T21:19:31Z* + + * e834d9a63b Merge pull request `#46454`_ from gtmanfred/kitchen + * b8ab8434a5 fix windows for kitchen + +- **PR** `#46452`_: (*gtmanfred*) make spm cache_dir instead of all cachedirs + @ *2018-03-08T21:12:20Z* + + - **ISSUE** `#46451`_: (*gmacon*) SPM fails to start with customized cache location + | refs: `#46452`_ + * 2886dca88f Merge pull request `#46452`_ from gtmanfred/spm_cache_dir + * 169cf7a4e2 make spm cache_dir instead of all cachedirs + +- **PR** `#46446`_: (*bdrung*) Fix various typos + @ *2018-03-08T21:11:47Z* + + * a188984cd9 Merge pull request `#46446`_ from bdrung/fix-typos + * 7e6e80be87 heat: Fix spelling mistake of environment + + * a3c54b50f6 Fix various spelling mistakes + +- **PR** `#46309`_: (*bdrung*) Support dynamic pillar_root environment + @ *2018-03-08T19:15:35Z* + + - **ISSUE** `#20581`_: (*notpeter*) Many environments: one pillar_root (all your envs are belong to base) + | refs: `#46309`_ + * e35fc5263c Merge pull request `#46309`_ from bdrung/dynamic-pillarenv + * 584b451fd1 Support dynamic pillar_root environment + +- **PR** `#46430`_: (*terminalmage*) Improve reliability/idempotence of file.blockreplace state + @ *2018-03-08T15:41:38Z* + + - **ISSUE** `#44032`_: (*PhilippeAB*) blockreplace marker_end isn't applied with newline + | refs: `#46430`_ + * 35fe9827fe Merge pull request `#46430`_ from terminalmage/issue44032 + * f9f187e915 Improve reliability/idempotence of file.blockreplace state + +- **PR** `#46429`_: (*twangboy*) Fix problem with __virtual__ in win_snmp + @ *2018-03-07T23:26:46Z* + + * 2bad0a21c0 Merge pull request `#46429`_ from twangboy/win_fix_snmp + * 8995a9b8de Fix problem with __virtual__ in win_snmp + +- **PR** `#46100`_: (*jfindlay*) Handle IPv6 scope parameter in resolv.conf + @ *2018-03-07T19:51:20Z* + + * 93a572f229 Merge pull request `#46100`_ from jfindlay/resolv_scope + * d5561bedaf tests.unit.grains.core add scoped IPv6 nameserver + + * 4e2e62d508 salt.utils.dns parse scope param for ipv6 servers + +- **PR** `#46420`_: (*bdrung*) Fix SSH client exception if SSH is not found + @ *2018-03-07T17:49:00Z* + + * 5acc1d5c54 Merge pull request `#46420`_ from bdrung/2017.7 + * e48c13d9e0 Fix SSH client exception if SSH is not found + +- **PR** `#46379`_: (*angeloudy*) TypeError: a bytes-like object is required, not 'str' + @ *2018-03-07T15:00:47Z* + + * ca6a76e317 Merge pull request `#46379`_ from angeloudy/2017.7 + * 3acb59c74c Merge branch '2017.7' into 2017.7 + + * d971e0c08b Fix indent + + * 269514683f Update http.py + + * 908c040ac3 Update http.py + + * 51ba3c135b Update http.py + + * 14aba24111 fix bytes-object required error in python 3 + +- **PR** `#46404`_: (*gtmanfred*) get 2017.7 ready to switch over to the new jenkins + @ *2018-03-07T14:29:30Z* + + * 73f9233557 Merge pull request `#46404`_ from gtmanfred/kitchen + * c56baa95a8 clone .git for the version tests + + * 3620611b5b fix unhold package for debian + + * 5219f7d2ba fix minion log path + +- **PR** `#46310`_: (*twangboy*) Update the Windows installer build scripts + @ *2018-03-06T20:21:58Z* + + - **ISSUE** `#46192`_: (*asymetrixs*) salt-log-setup: AttributeError 'NoneType' object has no attribute 'flush' + | refs: `#46310`_ `#46310`_ + * ca28cfd4e4 Merge pull request `#46310`_ from twangboy/win_update_installer_build + * bcf8b19566 Update the installer build + +- **PR** `#46316`_: (*twangboy*) Fix issues with the DSC module + @ *2018-03-06T20:16:18Z* + + * decccbeca3 Merge pull request `#46316`_ from twangboy/win_fix_dsc + * 2042d33d59 Fix issues with the DSC module + +- **PR** `#46394`_: (*Ch3LL*) Add mac py2 and py3 packages to mac installation docs + @ *2018-03-06T16:45:30Z* + + * 95586678c3 Merge pull request `#46394`_ from Ch3LL/mac_doc + * 158add6661 change oxdownload to oxdownload-{python_version} + + * 21aa848c89 Add mac py2 and py3 packages to mac installation docs + +- **PR** `#46338`_: (*rallytime*) Remove cmd.wait deprecation reference in docs + @ *2018-03-05T21:48:52Z* + + - **ISSUE** `#44831`_: (*kivoli*) cmd.wait deprecated but cannot replicate conditional execution with onchanges + | refs: `#46338`_ + * 07b5d09ac1 Merge pull request `#46338`_ from rallytime/`fix-44831`_ + * 90771da999 Remove cmd.wait deprecation reference in docs + +- **PR** `#46333`_: (*danlsgiga*) Fixes color parameter mismatch and handles 204 responses correctly + @ *2018-03-05T19:42:26Z* + + - **ISSUE** `#42438`_: (*ajoaugustine*) Failed to send message: hipchat-message + | refs: `#46333`_ + * 3849e7a085 Merge pull request `#46333`_ from danlsgiga/issue-42438 + * 3b13f37b44 Revert changes in the code and change docs instead + + * 38114a65d8 Fixes color parameter mismatch and handles 204 responses correctly + +- **PR** `#46322`_: (*terminalmage*) yamlify_arg: don't treat leading dashes as lists + @ *2018-03-05T15:40:17Z* + + - **ISSUE** `#44935`_: (*grinapo*) module.file.replace string seems to be mutated into arrays + | refs: `#46322`_ + * a8f2f1b063 Merge pull request `#46322`_ from terminalmage/issue44935 + * 85ac6a9893 yamlify_arg: don't treat leading dashes as lists + +- **PR** `#46327`_: (*samilaine*) Modify the way a FQDN is handled in the vmware cloud provider. + @ *2018-03-05T15:35:37Z* + + * da5c282cb2 Merge pull request `#46327`_ from samilaine/fix-vmware-cloud-fqdn + * 4b8dfb326f Modify the way a FQDN is handled in the vmware cloud provider. + +- **PR** `#46318`_: (*terminalmage*) Skip type-checking for several gitfs/git_pillar/winrepo params + @ *2018-03-05T15:04:27Z* + + * 78c45d3786 Merge pull request `#46318`_ from terminalmage/squelch-warnings + * 5889b36646 Skip type-checking for several gitfs/git_pillar/winrepo params + +- **PR** `#46312`_: (*gtmanfred*) add module_dirs to salt ssh thin tarball + @ *2018-03-05T15:00:48Z* + + - **ISSUE** `#45535`_: (*whytewolf*) module_dirs left out salt-ssh, leaving custom ext_pillars and modules out of salt-ssh + | refs: `#46312`_ + * bb0d6fc263 Merge pull request `#46312`_ from gtmanfred/2017.7 + * 749ae580ed add module_dirs to salt ssh thin tarball + +- **PR** `#46242`_: (*redbaron4*) Pass env_vars to pip.freeze + @ *2018-03-05T14:53:13Z* + + - **ISSUE** `#46127`_: (*redbaron4*) pip.installed does not pass env_vars when calling freeze to check if package is already installed + | refs: `#46242`_ + * 88b5f7383d Merge pull request `#46242`_ from redbaron4/`fix-46127`_ + * 06dba51617 Make changes from review + + * 727ebe1056 Merge branch '2017.7' into `fix-46127`_ + + * 08d1ee8baf Fix Python3 test errors + + * aa9d709015 Pass env_vars to pip.freeze + +- **PR** `#46265`_: (*Ch3LL*) Add username/password to profitbricks conf for cloud tests + @ *2018-03-02T21:40:22Z* + + * a0716643e4 Merge pull request `#46265`_ from Ch3LL/profit_cloud + * d4893eab4c Add username/password to profitbricks conf for cloud tests + +- **PR** `#46306`_: (*rallytime*) Back-port `#46256`_ to 2017.7 + @ *2018-03-02T21:37:26Z* + + - **PR** `#46256`_: (*rallytime*) Don't install msgpack 0.5.5 + | refs: `#46306`_ + * ed7bffa7e0 Merge pull request `#46306`_ from rallytime/`bp-46256`_ + * 6439bce4a8 Don't install msgpack 0.5.5 + +- **PR** `#46208`_: (*terminalmage*) Blacklist os.umask + @ *2018-03-02T18:46:07Z* + + * 8c2c4e3316 Merge pull request `#46208`_ from terminalmage/audit-umask-usage + * 9c92aadce8 Disable blacklisted-function check for legitimate uses + + * 58a11aaa26 Disable pylint check in salt-ssh shim + + * ecadf67659 Blacklist os.umask + + * 31b1d98fcb Replace direct use of os.umask with use of existing context manager + + * 82ce546e18 Prevent failed os.makedirs from leaving modified umask in place + +- **PR** `#46293`_: (*eliasp*) Fix Python3 comparison `TypeError` in `salt.modules.upstart` + @ *2018-03-02T16:36:10Z* + + - **PR** `#44624`_: (*eliasp*) Fix Traceback when using the `service.enabled` state on non-booted systems + | refs: `#46293`_ + * 978e869490 Merge pull request `#46293`_ from eliasp/2017.7-44624-py3-compat + * 2e08b0d9c8 Fix Python3 comparison `TypeError` in `salt.modules.upstart` + +- **PR** `#46264`_: (*terminalmage*) Fix incorrect merge conflict resolution + @ *2018-03-02T14:21:13Z* + + - **ISSUE** `#46128`_: (*Boulet-*) Mountpoint in git_pillar + | refs: `#46264`_ + * bee4a66d0c Merge pull request `#46264`_ from terminalmage/issue46128 + * 68000b7211 Fix incorrect merge conflict resolution + +- **PR** `#46296`_: (*vutny*) [DOC] Add missing params to `pillar.get` docstring + @ *2018-03-02T14:19:41Z* + + * 1e0b3aa348 Merge pull request `#46296`_ from vutny/doc-pillar-get + * 1faa8331e1 [DOC] Add missing params to `pillar.get` docstring + +- **PR** `#45874`_: (*GwiYeong*) fix for local client timeout bug + @ *2018-03-01T19:39:35Z* + + * c490a50452 Merge pull request `#45874`_ from GwiYeong/2017.7-local-client-hotfix + * 949aefc82b Merge branch '2017.7' into 2017.7-local-client-hotfix + + * 45d663f435 fix for local client timeout bug + +- **PR** `#46261`_: (*rallytime*) [2017.7] Merge forward from 2016.11 to 2017.7 + @ *2018-03-01T17:55:23Z* + + - **ISSUE** `#46178`_: (*wedge-jarrad*) mount.mounted forces remount when 'credentials=file' is specified as an option + | refs: `#46179`_ + - **ISSUE** `#45136`_: (*etfeet*) salt state mount.mounted remounts cephfs every time when setting secretfile=path/to/secretfile option + | refs: `#46179`_ + - **PR** `#46253`_: (*rallytime*) Update docbanner for SaltConf18 + - **PR** `#46179`_: (*wedge-jarrad*) Add credentials and secretfile to mount.mounted mount_invisible_keys + * 8e8a3a2897 Merge pull request `#46261`_ from rallytime/merge-2017.7 + * 8256ae5ee5 Merge branch '2016.11' into '2017.7' + + * 140ef4d6b9 Merge pull request `#46253`_ from rallytime/doc-banners + + * 07ed8c7db3 Update docbanner for SaltConf18 + + * 9fe86ee520 Merge pull request `#46179`_ from wedge-jarrad/cifs-remount-fix + + * 9ca25c4313 Add credentials and secretfile to mount.mounted mount_invisible_keys + +- **PR** `#46276`_: (*terminalmage*) salt.utils.docker.translate_input: operate on deepcopy of kwargs + @ *2018-03-01T15:37:44Z* + + - **ISSUE** `#44046`_: (*t2b*) docker_container.running states fail if the argument ulimits is set and a watch requisite is triggered + | refs: `#46276`_ + * 88a3166589 Merge pull request `#46276`_ from terminalmage/issue44046 + * a14d4daf8c salt.utils.docker.translate_input: operate on deepcopy of kwargs + +- **PR** `#46183`_: (*oeuftete*) Fix docker_container.running HostConfig Ulimits comparison + @ *2018-02-28T22:22:11Z* + + - **ISSUE** `#46182`_: (*oeuftete*) docker_container.running is sensitive to HostConfig Ulimits ordering + | refs: `#46183`_ + * da60399b8f Merge pull request `#46183`_ from oeuftete/fix-docker-container-running-host-config-ulimits + * 5b09644429 Sort lists from Ulimits before comparing + + * 0b80f02226 Update old dockerng doc ref + +- **PR** `#46260`_: (*terminalmage*) Normalize global git_pillar/winrepo config items + @ *2018-02-28T22:05:26Z* + + - **ISSUE** `#46259`_: (*terminalmage*) git_pillar_branch overrides branch defined in git_pillar configuration + | refs: `#46260`_ + - **ISSUE** `#46258`_: (*terminalmage*) git_pillar_base doesn't work for values when PyYAML loads them as int/float + | refs: `#46260`_ + * 509429f08c Merge pull request `#46260`_ from terminalmage/git_pillar + * b1ce2501fd Normalize global git_pillar/winrepo config items + +- **PR** `#46101`_: (*jfindlay*) In OpenRC exec module, make sure to ignore retcode on status + @ *2018-02-28T20:01:37Z* + + * a97a3e6fb0 Merge pull request `#46101`_ from jfindlay/openrc_ret + * 2eef3c65a6 tests.unit.modules.gentoo_service add retcode arg + + * 81ec66fd8b modules.gentoo_service handle stopped retcode + +- **PR** `#46254`_: (*rallytime*) Update enterprise banner + @ *2018-02-28T19:54:03Z* + + * 1a17593c05 Merge pull request `#46254`_ from rallytime/enterprise-banner + * f5fae3dedf Update enterprise banner + +- **PR** `#46250`_: (*terminalmage*) Add documentation to the fileserver runner + @ *2018-02-28T18:53:49Z* + + * 8c50ff32bd Merge pull request `#46250`_ from terminalmage/runner-docs + * 91b4895087 Add documentation to the fileserver runner + +- **PR** `#46243`_: (*racker-markh*) Don't ignore 'private_ips' unnecessarily + @ *2018-02-28T15:28:29Z* + + - **ISSUE** `#46215`_: (*racker-markh*) salt-cloud will only intermittently build rackspace cloud instances with purely private networks + | refs: `#46243`_ + * 53067cca43 Merge pull request `#46243`_ from racker-markh/fix-openstack-private-network-issue + * 50c1e140f0 Don't check deny private_ips already in the original list of private_ips + +- **PR** `#46239`_: (*terminalmage*) archive.extracted: don't check source file when if_missing path exists + @ *2018-02-28T15:01:36Z* + + - **ISSUE** `#46109`_: (*rombert*) archive.extracted takes a long time (> 4 minutes) even though directory exists + | refs: `#46239`_ + * 15405c8760 Merge pull request `#46239`_ from terminalmage/issue46109 + * 586d8b0dcf archive.extracted: don't check source file when if_missing path exists + +- **PR** `#46221`_: (*terminalmage*) Fix hanging tests in integration suite + @ *2018-02-27T21:32:25Z* + + * 633e1208e4 Merge pull request `#46221`_ from terminalmage/salt-jenkins-854 + * 0eb012659c Fix hanging tests in integration suite + +- **PR** `#46214`_: (*vutny*) [DOC] Replace `note` rST block for GitHub + @ *2018-02-27T17:42:37Z* + + * 7917277345 Merge pull request `#46214`_ from vutny/formulas-readme-formatting + * d702846961 [DOC] Replace `note` rST block for GitHub + +- **PR** `#46203`_: (*Ch3LL*) Add 2017.7.5 Release Notes File + @ *2018-02-26T21:17:48Z* + + * a2e099b744 Merge pull request `#46203`_ from Ch3LL/7.5_release + * 6ddf3246ce Add 2017.7.5 Release Notes File + +- **PR** `#46201`_: (*rallytime*) [2017.7] Merge forward from 2016.11 to 2017.7 + @ *2018-02-26T18:56:47Z* + + - **PR** `#46132`_: (*rallytime*) Update release versions for the 2016.11 branch + * 973b227818 Merge pull request `#46201`_ from rallytime/merge-2017.7 + * 9ac2101baa Merge branch '2016.11' into '2017.7' + + * a4c5417d23 Merge pull request `#46132`_ from rallytime/2016.11_update_version_doc + + * d2196b6df3 Update release versions for the 2016.11 branch + +- **PR** `#46139`_: (*bdrung*) Add os grains test cases for Debian/Ubuntu and fix oscodename on Ubuntu + @ *2018-02-26T16:44:04Z* + + - **ISSUE** `#34423`_: (*bdrung*) oscodename wrong on Debian 8 (jessie) + | refs: `#46139`_ + * 89cf2e5061 Merge pull request `#46139`_ from bdrung/os-grains + * 0b445f2a37 tests: Add unit tests for _parse_os_release() + + * f6069b77ed Fix osfinger grain on Debian + + * 8dde55a761 tests: Add os_grains test cases for Debian + + * ff02ab9937 tests: Add Ubuntu 17.10 (artful) os_grains test case + + * 77d5356aba Fix incorrect oscodename grain on Ubuntu + + * 7e62dc9fd2 tests: Support reading os-release files from disk + + * a92ec0db1b Make _parse_os_release() always callable + + * eee1fe5b38 tests: Dissolve _run_ubuntu_os_grains_tests + + * 1d6ef731fe tests: Deduplicate _run_os_grains_tests() + +- **PR** `#46133`_: (*rallytime*) Update release versions for the 2017.7 branch + @ *2018-02-26T16:42:43Z* + + * c8c71e75ca Merge pull request `#46133`_ from rallytime/2017.7_update_version_doc + * 0ed338e643 Update release versions for the 2017.7 branch + +- **PR** `#46185`_: (*terminalmage*) gitfs: Fix detection of base env when its ref is also mapped to a different env + @ *2018-02-26T14:52:16Z* + + - **ISSUE** `#46124`_: (*moremo*) GitFS saltenv ref won't pick up multiple of the same ref + | refs: `#46185`_ + * 390d592aa6 Merge pull request `#46185`_ from terminalmage/issue46124 + * 3b58dd0da0 gitfs: Fix detection of base env when its ref is also mapped to a different env + +- **PR** `#46148`_: (*rallytime*) [2017.7] Merge forward from 2017.7.3 to 2017.7 + @ *2018-02-23T19:21:38Z* + + * 705caa8cca Merge pull request `#46148`_ from rallytime/merge-2017.7 + * 25deebf7a6 Merge branch '2017.7.3' into '2017.7' + +- **PR** `#46137`_: (*damon-atkins*) [2017.7] update ec2 pillar arguments with better names + @ *2018-02-23T13:32:04Z* + + - **PR** `#45878`_: (*damon-atkins*) ec2_pillar update to fix finding instance-id + | refs: `#46137`_ `#46137`_ `#46137`_ + * 10a47dcbc4 Merge pull request `#46137`_ from damon-atkins/2017.7_fix_ec2_pillar2 + * 99e7f6a7d3 update ec2 pillar arguments with better names + +- **PR** `#46145`_: (*terminalmage*) 3 small fixes for runners/orchestration + @ *2018-02-22T22:11:11Z* + + - **ISSUE** `#46004`_: (*github-abcde*) opts file_roots gets overwritten with pillar_roots in orchestration run + | refs: `#46145`_ + * d74cb14557 Merge pull request `#46145`_ from terminalmage/issue46004 + * 467ff841cd pillarenv argument should default to None and not the value from opts + + * 2a185855ea Better solution for fixing the opts munging in pillar.show_pillar runner + + * e2c4702e0c Update tests to reflect changes to the SaltCacheLoader + + * f9301fcc34 Document behavior when orchestration runnner invoked with non-orch states + + * 9644579cd0 Instantiate the SaltCacheLoader's fileclient in the __init__ + + * f9a6c86e21 salt.runners.pillar.show_pillar: don't modify master opts + + * e0940a9fc4 Properly detect use of the state.orch alias and add orch jid to kwargs + +- **PR** `#46135`_: (*rallytime*) Back-port `#46088`_ to 2017.7 + @ *2018-02-22T15:11:14Z* + + - **PR** `#46088`_: (*rongzeng54*) fix kernel subpackages install bug + | refs: `#46135`_ + * 0398ce0482 Merge pull request `#46135`_ from rallytime/`bp-46088`_ + * 57a60f62a3 fix kernel subpackages install bug + +- **PR** `#46136`_: (*rallytime*) Back-port `#46115`_ to 2017.7 + @ *2018-02-21T19:17:23Z* + + - **ISSUE** `#45837`_: (*johje349*) Salt Cloud does not recognise all Digitalocean sizes + | refs: `#46115`_ + - **PR** `#46115`_: (*samodid*) update digitalocean salt-cloud driver + | refs: `#46136`_ + * 1fcbbd1e02 Merge pull request `#46136`_ from rallytime/`bp-46115`_ + * 0a481d707f update digitalocean salt-cloud driver + +- **PR** `#45911`_: (*twangboy*) LGPO Module: Convert reg values to unicode for debug + @ *2018-02-21T19:02:17Z* + + * 11e5e8eb86 Merge pull request `#45911`_ from twangboy/win_fix_lgpo_unicode + * bcde5cc625 Update log statement + + * e9fa53d3b7 Change the Invalid Data Message + + * c818d4b791 Convert reg values to unicode for debug + +- **PR** `#46123`_: (*gtmanfred*) If no pubkey is passed in openmode fail + @ *2018-02-21T19:01:47Z* + + - **ISSUE** `#46085`_: (*zmedico*) 2017.7.3 salt master with "open_mode: True" becomes unresponsive if minion submits empty public key + | refs: `#46123`_ + * 524a6a72a0 Merge pull request `#46123`_ from gtmanfred/2017.7 + * 8d36730ef7 If no pubkey is passed in openmode fail + +- **PR** `#46131`_: (*vutny*) [DOC] Fix code-blocks for reStructuredText + @ *2018-02-21T15:47:05Z* + + * e48fa58012 Merge pull request `#46131`_ from vutny/doc-formula-formatting + * d8fb051e44 [DOC] Fix code-blocks for reStructuredText + +- **PR** `#46118`_: (*rallytime*) Back-port `#44603`_ to 2017.7 + @ *2018-02-21T15:21:42Z* + + - **ISSUE** `#42763`_: (*xuhcc*) acme.cert state falsely reports about renewed certificate + | refs: `#44603`_ + - **ISSUE** `#40208`_: (*bewing*) Inconsistent state return when test=True + | refs: `#44603`_ + - **PR** `#44603`_: (*oarmstrong*) Fix acme state to correctly return on test + | refs: `#46118`_ + * 6cea44ee95 Merge pull request `#46118`_ from rallytime/`bp-44603`_ + * 2a2c23c66b Fix acme state to correctly return on test + +- **PR** `#46121`_: (*rallytime*) [2017.7] Merge forward from 2016.11 to 2017.7 + @ *2018-02-21T10:07:18Z* + + - **ISSUE** `#45910`_: (*lorengordon*) 2016.11.9: UnicodeDecodeError traceback in reg.present + | refs: `#46000`_ + - **ISSUE** `#45790`_: (*bdarnell*) Test with Tornado 5.0b1 + | refs: `#46066`_ + - **PR** `#46093`_: (*wedge-jarrad*) Fix contributing doc typo + - **PR** `#46076`_: (*rallytime*) Back-port `#46066`_ to 2016.11 + - **PR** `#46066`_: (*rallytime*) Pin tornado version in requirements file + | refs: `#46076`_ + - **PR** `#46011`_: (*terminalmage*) cmdmod.py: runas workaround for platforms that don't set a USER env var + - **PR** `#46000`_: (*terminalmage*) salt.states.reg.present: Prevent traceback when reg data is binary + - **PR** `#45992`_: (*bgridley*) Add vpc_peering_connection_id to describe_route_tables route_keys + - **PR** `#45467`_: (*twangboy*) Exclude hidden directories in pkg.refresh_db + * 16c382b55b Merge pull request `#46121`_ from rallytime/merge-2017.7 + * 4c2f504a85 Merge branch '2016.11' into '2017.7' + + * e197a0fbc5 Merge pull request `#46076`_ from rallytime/`bp-46066`_ + + * b94d73c53e Pin tornado version in requirements file + + * c72c1bde5f Merge pull request `#46093`_ from wedge-jarrad/contributing-doc-typo + + * 5a0fe104f7 Fix contributing doc typo + + * 3cb83ea87e Merge pull request `#45992`_ from bgridley/fix-routes-present-state + + * 679787699c Add vpc_peering_connection_id to describe_route_tables route_keys + + * 8a60635da0 Merge pull request `#46000`_ from terminalmage/issue45910 + + * 8cf13325ee salt.states.reg.present: Prevent traceback when reg data is binary + + * 1f44e285dc Merge pull request `#46011`_ from terminalmage/fix-solaris-runas + + * 8ee0a3a28b Move Solaris USER workaround up a bit + + * 13cdb52690 cmdmod.py: runas workaround for platforms that don't set a USER env var + + * 30fb8f7be0 Merge pull request `#45467`_ from twangboy/win_exclude_hidden + + * ea41215646 Make the regex pattern less greedy + + * 6d223cffa7 Add tip about passing bogus saltenv + + * 1282ae3a93 Skip hidden first + + * 437a457911 Skip hidden dirs in genrepo + + * 87dc554dc3 Add final updates to docs + + * 3646d5c897 Fix some docs formatting, add some warnings + + * 35c81faf5a Log the source_dir when caching the files + + * 91c3da8dfd Improve docs for pkg.refresh_db + + * 4803d92707 Add some documentation + + * 08b82e0875 Fix lint error, use raw + + * 2f712691cf Exclude hidden directories in pkg.refresh_db + +- **PR** `#46107`_: (*amendlik*) Add --assumeyes on YUM/DNF commands + @ *2018-02-20T22:52:06Z* + + - **ISSUE** `#46106`_: (*amendlik*) yumpkg.refresh_db hangs + | refs: `#46107`_ + * b92346645b Merge pull request `#46107`_ from amendlik/yumpkg-assumeyes + * 8d9a432fb2 Add --assumeyes to yum/dnf commands in yumpkg.refresh_db + +- **PR** `#46094`_: (*kstreee*) Fix memory leak + @ *2018-02-20T21:36:02Z* + + * 14fe423e0c Merge pull request `#46094`_ from kstreee/fix-memory-leak + * 48080a1bae Fixes memory leak, saltclients should be cleaned after used. + + * aba00805f4 Adds set_close_callback function to removes stream instance after closed from a set streams. + +- **PR** `#46097`_: (*vutny*) [DOC] Put https link to the formulas doc page + @ *2018-02-20T17:07:39Z* + + - **ISSUE** `#13`_: (*thatch45*) Expand the stats module + | refs: `#46097`_ + * 320c2037e1 Merge pull request `#46097`_ from vutny/fix-https-link + * 2062fd0e5c [DOC] Put https link to the formulas doc page + +- **PR** `#46103`_: (*bdrung*) Fix skipping Kubernetes tests if client is not installed + @ *2018-02-20T16:33:42Z* + + * 0eb137fb4e Merge pull request `#46103`_ from bdrung/2017.7 + * dd3f936557 Fix skipping Kubernetes tests if client is not installed + +- **PR** `#46070`_: (*Ch3LL*) add required arg to dns_check jinja doc example + @ *2018-02-16T20:00:44Z* + + * c3a938e994 Merge pull request `#46070`_ from Ch3LL/fix-doc-dns + * 2a5d855d97 add required arg to dns_check jinja doc example + +- **PR** `#46067`_: (*rallytime*) Back-port `#45994`_ to 2017.7 + @ *2018-02-16T19:55:27Z* + + - **PR** `#45994`_: (*nullify005*) Fix hosted zone Comment updates & quote TXT entries correctly + | refs: `#46067`_ + * 01042e9d77 Merge pull request `#46067`_ from rallytime/`bp-45994`_ + * a07bb48726 Correct formatting for lint + + * e8678f633d Fix Comment being None not '' and inject quotes into the TXT ChangeRecords + +- **PR** `#45932`_: (*The-Loeki*) Fix cmd run_all bg error + @ *2018-02-16T14:53:15Z* + + - **ISSUE** `#42932`_: (*bobrik*) cmd.run with bg: true doesn't fail properly + | refs: `#45932`_ + - **PR** `#39980`_: (*vutny*) [2016.3] Allow to use `bg` kwarg for `cmd.run` state function + | refs: `#45932`_ + * 5e0e2a30e2 Merge pull request `#45932`_ from The-Loeki/fix_cmd_run_all_bg + * f83da27ca5 Merge branch '2017.7' into fix_cmd_run_all_bg + + * 771758fbca Merge branch '2017.7' into fix_cmd_run_all_bg + + * c54fcf7a2d cmd: move separate DRY logging blocks into _run, prevent logging on bg=True, don't use_vt on bg + + * ebb1f81a9b cmd run: when running in bg, force ignore_retcode=True + +- **PR** `#46062`_: (*vutny*) Fix typo in postgres_user.present state function + @ *2018-02-16T14:44:29Z* + + * 45ace39961 Merge pull request `#46062`_ from vutny/pg-user-state-fix-typo + * a5fbe4e95e Fix typo in postgres_user.present state function + +- **PR** `#45763`_: (*twangboy*) Fix rehash function in win_path.py + @ *2018-02-15T20:05:16Z* + + * edcb64de76 Merge pull request `#45763`_ from twangboy/win_fix_path_rehash + * b9a2bc7b29 Fix hyperlinks + + * 29912adc15 Move the test_rehash test to test_win_functions + + * adc594c183 Remove duplicate link + + * e84628c1eb Add some comments to the code + + * d50d5f582f Add additional info to docs for `broadcast_setting_change` + + * 3a54e09cd9 Rename setting to message + + * a3f9e99bc0 Change to a generic function to broadcast change + + * 79299361c3 Create refresh_environment salt util + + * 967b83940c Fix rehash function + +- **PR** `#46042`_: (*jfindlay*) Revise file_tree pillar module documentation + @ *2018-02-15T19:29:52Z* + + - **PR** `#46027`_: (*jfindlay*) Revise file_tree pillar module documentation + | refs: `#46042`_ + * a46fbc546c Merge pull request `#46042`_ from jfindlay/file_tree_doc + * 0ba4954a4b salt.pillar.file_tree revise module documentation + + * 3c6a5bf967 salt.pillar.file_tree provide better debug info + + * bb1cdc451e salt.pillar.file_tree no stack trace when nodegroups undefined + +- **PR** `#46013`_: (*rallytime*) Back-port `#45598`_ to 2017.7 + @ *2018-02-15T16:11:05Z* + + - **PR** `#45598`_: (*nullify005*) Patch around ResourceRecords needing to be present for AliasTarget + | refs: `#46013`_ + * de86126dd8 Merge pull request `#46013`_ from rallytime/`bp-45598`_ + * 2ea3fef543 No lazy logging + + * f427b0febc Change formatting style of logging lines per review + + * ebb244396b Patch around ResourceRecords needing to be present for AliasTarget entries to work + +- **PR** `#46016`_: (*rallytime*) Back-port `#45826`_ to 2017.7 + @ *2018-02-14T18:16:24Z* + + - **ISSUE** `#45825`_: (*philpep*) selinux.fcontext_policy_present doesn't work on Centos 6 with filetype = all files + | refs: `#45826`_ + - **PR** `#45826`_: (*philpep*) Fix selinux.fcontext_policy_present for Centos 6 + | refs: `#46016`_ + * 07e5735471 Merge pull request `#46016`_ from rallytime/`bp-45826`_ + * 1916e5c4a4 Fix selinux.fcontext_policy_present for Centos 6 + +- **PR** `#46015`_: (*rallytime*) Back-port `#45785`_ to 2017.7 + @ *2018-02-14T18:16:09Z* + + - **ISSUE** `#45784`_: (*oarmstrong*) SELinux module fcontext_get_policy fails with long regex + | refs: `#45785`_ `#45785`_ `#45785`_ + - **PR** `#45785`_: (*oarmstrong*) m/selinux.fcontext_get_policy allow long filespecs + | refs: `#46015`_ + * a1f4092811 Merge pull request `#46015`_ from rallytime/`bp-45785`_ + * ef6ffb1492 Resolve linting errors + + * 8047066c46 Remove unused import + + * 8f7c45935a Add tests for salt.modules.selinux.fcontext_get_policy + + * bafb7b4e6e Ensure parsed fields are stripped + + * a830a6e819 m/selinux.fcontext_get_policy allow long filespecs + +- **PR** `#46012`_: (*rallytime*) Back-port `#45462`_ to 2017.7 + @ *2018-02-14T18:14:56Z* + + - **PR** `#45462`_: (*aphor*) emit port cli version, variants as separate args + | refs: `#46012`_ + * 96097c037e Merge pull request `#46012`_ from rallytime/`bp-45462`_ + * 9f76836a6c emit port cli version, variants as separate args + +- **PR** `#45991`_: (*terminalmage*) yumpkg: Fix a couple issues with _get_extra_opts + @ *2018-02-14T16:48:28Z* + + * 1279924f5f Merge pull request `#45991`_ from terminalmage/fix-duplicate-extra-opts + * 916766f651 yumpkg: Fix a couple issues with _get_extra_opts + +- **PR** `#46017`_: (*rallytime*) [2017.7] Merge forward from 2017.7.3 to 2017.7 + @ *2018-02-13T21:43:15Z* + + * 8b9adc258e Merge pull request `#46017`_ from rallytime/merge-2017.7 + * a06645ce71 Merge branch '2017.7.3' into '2017.7' + +- **PR** `#45988`_: (*rallytime*) Back-port `#45797`_ to 2017.7 + @ *2018-02-13T17:49:02Z* + + - **ISSUE** `#45796`_: (*L4rS6*) aliases module doesn't follow symlinks + | refs: `#45797`_ + - **PR** `#45797`_: (*L4rS6*) follow symlinks in aliases module (close `#45796`_) + | refs: `#45988`_ + * d20ff89414 Merge pull request `#45988`_ from rallytime/`bp-45797`_ + * 953a400d79 follow symlinks + +- **PR** `#45711`_: (*bdrung*) Fix Unicode tests when run with LC_ALL=POSIX + @ *2018-02-13T17:42:07Z* + + * b18087cee0 Merge pull request `#45711`_ from bdrung/fix-unicode-tests + * b6181b5ed6 Fix Unicode tests when run with LC_ALL=POSIX + +- **PR** `#45878`_: (*damon-atkins*) ec2_pillar update to fix finding instance-id + | refs: `#46137`_ `#46137`_ `#46137`_ + @ *2018-02-13T17:34:14Z* + + * 5271fb1d40 Merge pull request `#45878`_ from damon-atkins/2017.7_fix_ec2_pillar + * 0e74025714 Merge branch '2017.7' into 2017.7_fix_ec2_pillar + + * b4d0b23891 py3 fix + + * 75d9e20d8a Add ignoring 'terminated', 'stopped' instances, to improve changes of a single match + + * 0093472a37 added tag_key_list and tag_key_sep to create ec2_tags_list + + * afb3968aa7 ec2_pillar could not find instance-id, resolved. add support to use any tag to compare minion id against. + +- **PR** `#45942`_: (*terminalmage*) Fix incorrect translation of docker port_bindings -> ports (2017.7 branch) + @ *2018-02-13T16:10:03Z* + + * cf367dbd04 Merge pull request `#45942`_ from terminalmage/issue45679-2017.7 + * 89cbd72a0d Don't try to sort ports when translating docker input + + * 9cd47b39dd Fix incorrect translation of docker port_bindings -> ports + +- **PR** `#45959`_: (*rallytime*) A couple of grammar updates for the state compiler docs + @ *2018-02-12T22:17:49Z* + + * dae41de7a8 Merge pull request `#45959`_ from rallytime/state-doc-update + * 6f781cb95d A couple of grammar updates for the state compiler docs + +- **PR** `#45908`_: (*tintoy*) Fix for `#45884`_ ("TypeError: can't serialize `. + +Option to Return to Previous Pillar Include Behavior +---------------------------------------------------- + +Prior to version 2017.7.3, keys from :ref:`pillar includes ` +would be merged on top of the pillar SLS. Since 2017.7.3, the includes are +merged together and then the pillar SLS is merged on top of that. + +The :conf_master:`pillar_includes_override_sls` option has been added allow +users to switch back to the pre-2017.7.3 behavior. diff --git a/doc/topics/releases/2018.3.0.rst b/doc/topics/releases/2018.3.0.rst index b0a39db028..3b94f3ae1a 100644 --- a/doc/topics/releases/2018.3.0.rst +++ b/doc/topics/releases/2018.3.0.rst @@ -30,7 +30,7 @@ The :py:func:`docker_network.present ` state has undergone a full rewrite, which includes the following improvements: Full API Support for Network Management ---------------------------------------- +======================================= The improvements made to input handling in the :py:func:`docker_container.running ` @@ -39,14 +39,14 @@ state for 2017.7.0 have now been expanded to :py:func:`docker_network.present tunable configuration arguments. Custom Subnets --------------- +============== Custom subnets can now be configured. Both IPv4 and mixed IPv4/IPv6 networks are supported. See :ref:`here ` for more information. Network Configuration in :py:func:`docker_container.running` States -------------------------------------------------------------------- +=================================================================== A long-requested feature has finally been added! It is now possible to configure static IPv4/IPv6 addresses, as well as links and labels. See @@ -85,174 +85,6 @@ Additionally, the ``tag`` argument must now be explicitly passed to the :py:func:`docker_image.present ` state, unless the image is being pulled from a docker registry. -``utils`` functions moved into separate modules -=============================================== - -The Salt utility functions from ``salt.utils`` have been moved into different -modules, grouped logically based on their functionality. This change is -backwards compatible, but the old imports will no longer be supported starting -with release Neon. - -The functions have been moved as follows: - -- ``salt.utils.appendproctitle``: use ``salt.utils.process.appendproctitle`` - instead. -- ``salt.utils.daemonize``: use ``salt.utils.process.daemonize`` instead. -- ``salt.utils.daemonize_if``: use ``salt.utils.process.daemonize_if`` instead. -- ``salt.utils.reinit_crypto``: use ``salt.utils.crypt.reinit_crypto`` instead. -- ``salt.utils.pem_finger``: use ``salt.utils.crypt.pem_finger`` instead. -- ``salt.utils.to_bytes``: use ``salt.utils.stringutils.to_bytes`` instead. -- ``salt.utils.to_str``: use ``salt.utils.stringutils.to_str`` instead. -- ``salt.utils.to_unicode``: use ``salt.utils.stringutils.to_unicode`` instead. -- ``salt.utils.str_to_num``: use ``salt.utils.stringutils.to_num`` instead. -- ``salt.utils.is_quoted``: use ``salt.utils.stringutils.is_quoted`` instead. -- ``salt.utils.dequote``: use ``salt.utils.stringutils.dequote`` instead. -- ``salt.utils.is_hex``: use ``salt.utils.stringutils.is_hex`` instead. -- ``salt.utils.is_bin_str``: use ``salt.utils.stringutils.is_bin_str`` instead. -- ``salt.utils.rand_string``: use ``salt.utils.stringutils.random`` instead. -- ``salt.utils.contains_whitespace``: use - ``salt.utils.stringutils.contains_whitespace`` instead. -- ``salt.utils.build_whitespace_split_regex``: use - ``salt.utils.stringutils.build_whitespace_split_regex`` instead. -- ``salt.utils.expr_match``: use ``salt.utils.stringutils.expr_match`` instead. -- ``salt.utils.check_whitelist_blacklist``: use - ``salt.utils.stringutils.check_whitelist_blacklist`` instead. -- ``salt.utils.check_include_exclude``: use - ``salt.utils.stringutils.check_include_exclude`` instead. -- ``salt.utils.print_cli``: use ``salt.utils.stringutils.print_cli`` instead. -- ``salt.utils.clean_kwargs``: use ``salt.utils.args.clean_kwargs`` instead. -- ``salt.utils.invalid_kwargs``: use ``salt.utils.args.invalid_kwargs`` - instead. -- ``salt.utils.shlex_split``: use ``salt.utils.args.shlex_split`` instead. -- ``salt.utils.arg_lookup``: use ``salt.utils.args.arg_lookup`` instead. -- ``salt.utils.argspec_report``: use ``salt.utils.args.argspec_report`` - instead. -- ``salt.utils.split_input``: use ``salt.utils.args.split_input`` instead. -- ``salt.utils.test_mode``: use ``salt.utils.args.test_mode`` instead. -- ``salt.utils.format_call``: use ``salt.utils.args.format_call`` instead. -- ``salt.utils.which``: use ``salt.utils.path.which`` instead. -- ``salt.utils.which_bin``: use ``salt.utils.path.which_bin`` instead. -- ``salt.utils.path_join``: use ``salt.utils.path.join`` instead. -- ``salt.utils.check_or_die``: use ``salt.utils.path.check_or_die`` instead. -- ``salt.utils.sanitize_win_path_string``: use - ``salt.utils.path.sanitize_win_path`` instead. -- ``salt.utils.rand_str``: use ``salt.utils.hashutils.random_hash`` instead. -- ``salt.utils.get_hash``: use ``salt.utils.hashutils.get_hash`` instead. -- ``salt.utils.is_windows``: use ``salt.utils.platform.is_windows`` instead. -- ``salt.utils.is_proxy``: use ``salt.utils.platform.is_proxy`` instead. -- ``salt.utils.is_linux``: use ``salt.utils.platform.is_linux`` instead. -- ``salt.utils.is_darwin``: use ``salt.utils.platform.is_darwin`` instead. -- ``salt.utils.is_sunos``: use ``salt.utils.platform.is_sunos`` instead. -- ``salt.utils.is_smartos``: use ``salt.utils.platform.is_smartos`` instead. -- ``salt.utils.is_smartos_globalzone``: use - ``salt.utils.platform.is_smartos_globalzone`` instead. -- ``salt.utils.is_smartos_zone``: use ``salt.utils.platform.is_smartos_zone`` - instead. -- ``salt.utils.is_freebsd``: use ``salt.utils.platform.is_freebsd`` instead. -- ``salt.utils.is_netbsd``: use ``salt.utils.platform.is_netbsd`` instead. -- ``salt.utils.is_openbsd``: use ``salt.utils.platform.is_openbsd`` instead. -- ``salt.utils.is_aix``: use ``salt.utils.platform.is_aix`` instead. -- ``salt.utils.safe_rm``: use ``salt.utils.files.safe_rm`` instead. -- ``salt.utils.is_empty``: use ``salt.utils.files.is_empty`` instead. -- ``salt.utils.fopen``: use ``salt.utils.files.fopen`` instead. -- ``salt.utils.flopen``: use ``salt.utils.files.flopen`` instead. -- ``salt.utils.fpopen``: use ``salt.utils.files.fpopen`` instead. -- ``salt.utils.rm_rf``: use ``salt.utils.files.rm_rf`` instead. -- ``salt.utils.mkstemp``: use ``salt.utils.files.mkstemp`` instead. -- ``salt.utils.istextfile``: use ``salt.utils.files.is_text_file`` instead. -- ``salt.utils.is_bin_file``: use ``salt.utils.files.is_binary`` instead. -- ``salt.utils.list_files``: use ``salt.utils.files.list_files`` instead. -- ``salt.utils.safe_walk``: use ``salt.utils.files.safe_walk`` instead. -- ``salt.utils.st_mode_to_octal``: use ``salt.utils.files.st_mode_to_octal`` - instead. -- ``salt.utils.normalize_mode``: use ``salt.utils.files.normalize_mode`` - instead. -- ``salt.utils.human_size_to_bytes``: use - ``salt.utils.files.human_size_to_bytes`` instead. -- ``salt.utils.backup_minion``: use ``salt.utils.files.backup_minion`` instead. -- ``salt.utils.str_version_to_evr``: use ``salt.utils.pkg.rpm.version_to_evr`` - instead. -- ``salt.utils.parse_docstring``: use ``salt.utils.doc.parse_docstring`` - instead. -- ``salt.utils.compare_versions``: use ``salt.utils.versions.compare`` instead. -- ``salt.utils.version_cmp``: use ``salt.utils.versions.version_cmp`` instead. -- ``salt.utils.warn_until``: use ``salt.utils.versions.warn_until`` instead. -- ``salt.utils.kwargs_warn_until``: use - ``salt.utils.versions.kwargs_warn_until`` instead. -- ``salt.utils.get_color_theme``: use ``salt.utils.color.get_color_theme`` - instead. -- ``salt.utils.get_colors``: use ``salt.utils.color.get_colors`` instead. -- ``salt.utils.gen_state_tag``: use ``salt.utils.state.gen_tag`` instead. -- ``salt.utils.search_onfail_requisites``: use - ``salt.utils.state.search_onfail_requisites`` instead. -- ``salt.utils.check_state_result``: use ``salt.utils.state.check_result`` - instead. -- ``salt.utils.get_user``: use ``salt.utils.user.get_user`` instead. -- ``salt.utils.get_uid``: use ``salt.utils.user.get_uid`` instead. -- ``salt.utils.get_specific_user``: use ``salt.utils.user.get_specific_user`` - instead. -- ``salt.utils.chugid``: use ``salt.utils.user.chugid`` instead. -- ``salt.utils.chugid_and_umask``: use ``salt.utils.user.chugid_and_umask`` - instead. -- ``salt.utils.get_default_group``: use ``salt.utils.user.get_default_group`` - instead. -- ``salt.utils.get_group_list``: use ``salt.utils.user.get_group_list`` - instead. -- ``salt.utils.get_group_dict``: use ``salt.utils.user.get_group_dict`` - instead. -- ``salt.utils.get_gid_list``: use ``salt.utils.user.get_gid_list`` instead. -- ``salt.utils.get_gid``: use ``salt.utils.user.get_gid`` instead. -- ``salt.utils.enable_ctrl_logoff_handler``: use - ``salt.utils.win_functions.enable_ctrl_logoff_handler`` instead. -- ``salt.utils.traverse_dict``: use ``salt.utils.data.traverse_dict`` instead. -- ``salt.utils.traverse_dict_and_list``: use - ``salt.utils.data.traverse_dict_and_list`` instead. -- ``salt.utils.filter_by``: use ``salt.utils.data.filter_by`` instead. -- ``salt.utils.subdict_match``: use ``salt.utils.data.subdict_match`` instead. -- ``salt.utils.substr_in_list``: use ``salt.utils.data.substr_in_list`` instead. -- ``salt.utils.is_dictlist``: use ``salt.utils.data.is_dictlist``. -- ``salt.utils.repack_dictlist``: use ``salt.utils.data.repack_dictlist`` - instead. -- ``salt.utils.compare_dicts``: use ``salt.utils.data.compare_dicts`` instead. -- ``salt.utils.compare_lists``: use ``salt.utils.data.compare_lists`` instead. -- ``salt.utils.decode_dict``: use ``salt.utils.data.encode_dict`` instead. -- ``salt.utils.decode_list``: use ``salt.utils.data.encode_list`` instead. -- ``salt.utils.exactly_n``: use ``salt.utils.data.exactly_n`` instead. -- ``salt.utils.exactly_one``: use ``salt.utils.data.exactly_one`` instead. -- ``salt.utils.is_list``: use ``salt.utils.data.is_list`` instead. -- ``salt.utils.is_iter``: use ``salt.utils.data.is_iter`` instead. -- ``salt.utils.isorted``: use ``salt.utils.data.sorted_ignorecase`` instead. -- ``salt.utils.is_true``: use ``salt.utils.data.is_true`` instead. -- ``salt.utils.mysql_to_dict``: use ``salt.utils.data.mysql_to_dict`` instead. -- ``salt.utils.simple_types_filter``: use - ``salt.utils.data.simple_types_filter`` instead. -- ``salt.utils.ip_bracket``: use ``salt.utils.zeromq.ip_bracket`` instead. -- ``salt.utils.gen_mac``: use ``salt.utils.network.gen_mac`` instead. -- ``salt.utils.mac_str_to_bytes``: use ``salt.utils.network.mac_str_to_bytes`` - instead. -- ``salt.utils.refresh_dns``: use ``salt.utils.network.refresh_dns`` instead. -- ``salt.utils.dns_check``: use ``salt.utils.network.dns_check`` instead. -- ``salt.utils.get_context``: use ``salt.utils.stringutils.get_context`` instead. -- ``salt.utils.get_master_key``: use ``salt.utils.master.get_master_key`` - instead. -- ``salt.utils.get_values_of_matching_keys``: use - ``salt.utils.master.get_values_of_matching_keys`` instead. -- ``salt.utils.date_cast``: use ``salt.utils.dateutils.date_cast`` instead. -- ``salt.utils.date_format``: use ``salt.utils.dateutils.strftime`` instead. -- ``salt.utils.total_seconds``: use ``salt.utils.dateutils.total_seconds`` - instead. -- ``salt.utils.find_json``: use ``salt.utils.json.find_json`` instead. -- ``salt.utils.import_json``: use ``salt.utils.json.import_json`` instead. -- ``salt.utils.namespaced_function``: use - ``salt.utils.functools.namespaced_function`` instead. -- ``salt.utils.alias_function``: use ``salt.utils.functools.alias_function`` - instead. -- ``salt.utils.profile_func``: use ``salt.utils.profile.profile_func`` instead. -- ``salt.utils.activate_profile``: use ``salt.utils.profile.activate_profile`` - instead. -- ``salt.utils.output_profile``: use ``salt.utils.profile.output_profile`` - instead. - State and Execution Module Support for ``docker run`` Functionality =================================================================== @@ -456,16 +288,14 @@ programatically `. The failed returns in these cases are now included in the changes dictionary, making for much easier parsing. -New Grains ----------- - -New core grains have been added to expose any storage inititator setting. - -The new grains added are: - -* ``fc_wwn``: Show all fibre channel world wide port names for a host +Grains +------ +* ``fc_wwn``: Show all fibre channel world wide port names for a host, must be enabled with `fibre_channel_grains` * ``iscsi_iqn``: Show the iSCSI IQN name for a host * ``swap_total``: Show the configured swap_total for Linux, \*BSD, OS X and Solaris/SunOS +* ``virtual``: + * identifies reports KVM and VMM hypervisors when running an OpenBSD guest + * for detecting Solaris Logical Domains (LDOMs) running on T-Series SPARC hardware. The ``virtual_subtype`` grain is populated as a list of domain roles. Salt Minion Auto-discovery ------------------------ @@ -482,7 +312,7 @@ Configuration By default, automatic discovery is disabled. .. warning:: - Due to the current limitations that will be changing in a future, before you turn on auto-discovery, + Due to the current limitations that will be changing in a future release, before you turn on auto-discovery, make sure your network is secured and trusted. Auto-discovery is configured on Master and Minion. Both of them are configured via the ``discovery`` option @@ -589,12 +419,6 @@ This feature has a couple of _temporary_ limitations that are subject to change security applied (priv/pub key check, signature, fingerprint etc). That implies that administrator is expected to know his network and make sure it is clean. -Grains Changes --------------- - -* The ``virtual`` grain identifies reports KVM and VMM hypervisors when running - an OpenBSD guest - New Modules ----------- @@ -655,31 +479,96 @@ The ``state_output`` parameter now supports ``full_id``, ``changes_id`` and ``te Just like ``mixed_id``, these use the state ID as name in the highstate output. For more information on these output modes, see the docs for the :mod:`Highstate Outputter `. -Windows Installer: Changes to existing config handling ------------------------------------------------------- -Behavior with existing configuration has changed. With previous installers the +Windows +------- +Python Version +============== +Python 2 Windows API was design when Windows did not support Unicode. Windows now supports +Unicode however to keep backwards compatibility Python 2 Windows API has not been changed. +Python 3 Windows API supports Unicode. Salt Python 3 installer is the recommend choice for +users who need characters other than Non-ASCII (7bit) characters. + +Execution module changes +======================== +pkg +*** +Significate changes have been made to the :mod:`win_pkg ` execution module. Users should test this release against their existing package sls definition files. These changes are also in 2016.11.9 & 2017.7.3. + +- ``pkg.list_available`` no longer defaults to refreshing the winrepo meta database. +- ``pkg.install`` without a ``version`` parameter no longer upgrades software if the software is already installed. Use ``pkg.install version=latest`` or in a state use ``pkg.latest`` to get the old behavior. +- ``pkg.list_pkgs`` now returns multiple versions if software installed more than once. +- ``pkg.list_pkgs`` now returns 'Not Found' when the version is not found instead of '(value not set)' which matches the contents of the sls definitions. +- ``pkg.remove()`` will wait upto 3 seconds (normally about a second) to detect changes in the registry after removing software, improving reporting of version changes. +- ``pkg.remove()`` can remove ``latest`` software, if ``latest`` is defined in sls definition. +- Documentation was update for the execution module to match the style in new versions, some corrections as well. +- All install/remove commands are prefix with cmd.exe shell and cmdmod is called with a command line string instead of a list. Some sls files in saltstack/salt-winrepo-ng expected the commands to be prefixed with cmd.exe (i.e. the use of ``&``). +- Some execution module functions results, now behavour more like their Unix/Linux versions. + +cmdmod +****** +Linux/Unix OS command & arguments requires a python list. Windows was being treated +the same. Windows requires commands & arguments to be a string. These changes are +also in 2016.11.9 & 2017.7.3. + +Installer +========= +Changes to config handling +************************** +Behavior with existing configuration has changed. With previous windows installers the existing config was used and the master and minion id could be modified via the installer. It was problematic in that it didn't account for configuration that may be defined in the ``minion.d`` directory. This change gives you the option -via a checkbox to either use the existing config with out changes or the default -config using values you pass to the installer. If you choose to use the existing -config then no changes are made. If not, the existing config is deleted, to -include the ``minion.d`` directory, and the default config is used. A -command-line switch (``/use-existing-config``) has also been added to control -this behavior. +via a drop-down list to use one of the following: -Windows Installer: Multi-master configuration ---------------------------------------------- +- Default Config: Use the config that comes with the installer +- Existing Config: Use the current config without changes +- Custom Config: Select a custom config using the file picker + +The existing config option will only be available if the installer detects an +existing config. If there is an existing config, and you choose ``Default`` or +``Custom``, the existing config will be deleted, including the ``minion.d`` +directory, and replaced by your selection. + +The ``Default Config`` and ``Custom Config`` options will allow you to modify +the Master and the Minion ID. ``Existing Config`` will leave the existing +configuration unchanged. + +These settings can be defined on the command line using the following switches: + +- ``/default-config`` +- ``/custom-config=C:\Path\To\Custom\Config\minion`` + +If neither option is passed and there is an existing config, the default is to +use the existing config. If there is no existing config (new install) the +default config will be used. + +Multi-master configuration +************************** The installer now has the ability to apply a multi-master configuration either from the gui or the command line. The ``master`` field in the gui can accept either a single master or a comma-separated list of masters. The command-line switch (``/master=``) can accept the same. -Windows Installer: Command-line help ------------------------------------- +Command-line help +***************** The Windows installer will now display command-line help when a help switch (``/?``) is passed. +utils.pkg.win preview +===================== +A new utils python module has been added, which gathers information about windows +installed software. This is currently not used by any salt execution module or state. +Users are encource to run this and report any issues. Running the command with the +``detail`` option will be useful for anyone developing windows package definitions. +With salt installed in the default location the following command will print the help +message. + +.. code-block:: text + + chcp 65001 + c:\salt\bin\python.exe c:\salt\bin\lib\site-packages\salt\utils\pkg\win.py + c:\salt\bin\python.exe c:\salt\bin\lib\site-packages\salt\utils\pkg\win.py detail system + Salt Cloud Features ------------------- @@ -784,6 +673,21 @@ which designate a ``Vagrantfile`` on the host machine. The master can be a very limited machine, such as a Raspberry Pi, or a small VagrantBox VM. +Python PyWinRM Module +===================== +Versions of ``pywinrm>=0.2.1`` are finally able to disable validation of self +signed certificates. :ref:`Here` for more information. + +DigitalOcean +============ +The DigitalOcean driver has been renamed to conform to the company name. The +new driver name is ``digitalocean``. The old name ``digital_ocean`` and a +short one ``do`` will still be supported through virtual aliases, this is mostly +cosmetic. + +Azure Cloud +=========== +The azure sdk used for the ``azurearm`` cloud driver now depends on ``azure-cli>=2.0.12`` New pillar/master_tops module called saltclass ---------------------------------------------- @@ -973,27 +877,6 @@ Not using ``^`` as the first entry will simply merge the lists Currently you can't have both a variable and an escaped variable in the same string as the escaped one will not be correctly rendered - '\${xx}' will stay as is instead of being rendered as '${xx}' -Newer PyWinRM Versions ----------------------- - -Versions of ``pywinrm>=0.2.1`` are finally able to disable validation of self -signed certificates. :ref:`Here` for more information. - -DigitalOcean ------------- - -The DigitalOcean driver has been renamed to conform to the company name. The -new driver name is ``digitalocean``. The old name ``digital_ocean`` and a -short one ``do`` will still be supported through virtual aliases, this is mostly -cosmetic. - -Solaris Logical Domains In Virtual Grain ----------------------------------------- - -Support has been added to the ``virtual`` grain for detecting Solaris LDOMs -running on T-Series SPARC hardware. The ``virtual_subtype`` grain is -populated as a list of domain roles. - Lists of comments in state returns ---------------------------------- @@ -1610,9 +1493,177 @@ NaCL Module and Runner changes ------------------------------ In addition to argument changes in both the NaCL module and runner for future -deprecation in the Fluorine release, the default box_type has changed from -`secretbox` to `sealedbox`. SecretBox is data encrypted using private key -`sk` and Sealedbox is encrypted using public key `pk` +removal in the Neon release, the default "box_type" has changed from +``secretbox`` to ``sealedbox``. SecretBox is data encrypted using private key +``sk`` and Sealedbox is encrypted using public key ``pk``. + +``utils`` functions moved into separate modules +----------------------------------------------- + +The Salt utility functions from ``salt.utils`` have been moved into different +modules, grouped logically based on their functionality. This change is +backwards compatible, but the old imports will no longer be supported starting +with release Neon. + +The functions have been moved as follows: + +- ``salt.utils.appendproctitle``: use ``salt.utils.process.appendproctitle`` + instead. +- ``salt.utils.daemonize``: use ``salt.utils.process.daemonize`` instead. +- ``salt.utils.daemonize_if``: use ``salt.utils.process.daemonize_if`` instead. +- ``salt.utils.reinit_crypto``: use ``salt.utils.crypt.reinit_crypto`` instead. +- ``salt.utils.pem_finger``: use ``salt.utils.crypt.pem_finger`` instead. +- ``salt.utils.to_bytes``: use ``salt.utils.stringutils.to_bytes`` instead. +- ``salt.utils.to_str``: use ``salt.utils.stringutils.to_str`` instead. +- ``salt.utils.to_unicode``: use ``salt.utils.stringutils.to_unicode`` instead. +- ``salt.utils.str_to_num``: use ``salt.utils.stringutils.to_num`` instead. +- ``salt.utils.is_quoted``: use ``salt.utils.stringutils.is_quoted`` instead. +- ``salt.utils.dequote``: use ``salt.utils.stringutils.dequote`` instead. +- ``salt.utils.is_hex``: use ``salt.utils.stringutils.is_hex`` instead. +- ``salt.utils.is_bin_str``: use ``salt.utils.stringutils.is_bin_str`` instead. +- ``salt.utils.rand_string``: use ``salt.utils.stringutils.random`` instead. +- ``salt.utils.contains_whitespace``: use + ``salt.utils.stringutils.contains_whitespace`` instead. +- ``salt.utils.build_whitespace_split_regex``: use + ``salt.utils.stringutils.build_whitespace_split_regex`` instead. +- ``salt.utils.expr_match``: use ``salt.utils.stringutils.expr_match`` instead. +- ``salt.utils.check_whitelist_blacklist``: use + ``salt.utils.stringutils.check_whitelist_blacklist`` instead. +- ``salt.utils.check_include_exclude``: use + ``salt.utils.stringutils.check_include_exclude`` instead. +- ``salt.utils.print_cli``: use ``salt.utils.stringutils.print_cli`` instead. +- ``salt.utils.clean_kwargs``: use ``salt.utils.args.clean_kwargs`` instead. +- ``salt.utils.invalid_kwargs``: use ``salt.utils.args.invalid_kwargs`` + instead. +- ``salt.utils.shlex_split``: use ``salt.utils.args.shlex_split`` instead. +- ``salt.utils.arg_lookup``: use ``salt.utils.args.arg_lookup`` instead. +- ``salt.utils.argspec_report``: use ``salt.utils.args.argspec_report`` + instead. +- ``salt.utils.split_input``: use ``salt.utils.args.split_input`` instead. +- ``salt.utils.test_mode``: use ``salt.utils.args.test_mode`` instead. +- ``salt.utils.format_call``: use ``salt.utils.args.format_call`` instead. +- ``salt.utils.which``: use ``salt.utils.path.which`` instead. +- ``salt.utils.which_bin``: use ``salt.utils.path.which_bin`` instead. +- ``salt.utils.path_join``: use ``salt.utils.path.join`` instead. +- ``salt.utils.check_or_die``: use ``salt.utils.path.check_or_die`` instead. +- ``salt.utils.sanitize_win_path_string``: use + ``salt.utils.path.sanitize_win_path`` instead. +- ``salt.utils.rand_str``: use ``salt.utils.hashutils.random_hash`` instead. +- ``salt.utils.get_hash``: use ``salt.utils.hashutils.get_hash`` instead. +- ``salt.utils.is_windows``: use ``salt.utils.platform.is_windows`` instead. +- ``salt.utils.is_proxy``: use ``salt.utils.platform.is_proxy`` instead. +- ``salt.utils.is_linux``: use ``salt.utils.platform.is_linux`` instead. +- ``salt.utils.is_darwin``: use ``salt.utils.platform.is_darwin`` instead. +- ``salt.utils.is_sunos``: use ``salt.utils.platform.is_sunos`` instead. +- ``salt.utils.is_smartos``: use ``salt.utils.platform.is_smartos`` instead. +- ``salt.utils.is_smartos_globalzone``: use + ``salt.utils.platform.is_smartos_globalzone`` instead. +- ``salt.utils.is_smartos_zone``: use ``salt.utils.platform.is_smartos_zone`` + instead. +- ``salt.utils.is_freebsd``: use ``salt.utils.platform.is_freebsd`` instead. +- ``salt.utils.is_netbsd``: use ``salt.utils.platform.is_netbsd`` instead. +- ``salt.utils.is_openbsd``: use ``salt.utils.platform.is_openbsd`` instead. +- ``salt.utils.is_aix``: use ``salt.utils.platform.is_aix`` instead. +- ``salt.utils.safe_rm``: use ``salt.utils.files.safe_rm`` instead. +- ``salt.utils.is_empty``: use ``salt.utils.files.is_empty`` instead. +- ``salt.utils.fopen``: use ``salt.utils.files.fopen`` instead. +- ``salt.utils.flopen``: use ``salt.utils.files.flopen`` instead. +- ``salt.utils.fpopen``: use ``salt.utils.files.fpopen`` instead. +- ``salt.utils.rm_rf``: use ``salt.utils.files.rm_rf`` instead. +- ``salt.utils.mkstemp``: use ``salt.utils.files.mkstemp`` instead. +- ``salt.utils.istextfile``: use ``salt.utils.files.is_text_file`` instead. +- ``salt.utils.is_bin_file``: use ``salt.utils.files.is_binary`` instead. +- ``salt.utils.list_files``: use ``salt.utils.files.list_files`` instead. +- ``salt.utils.safe_walk``: use ``salt.utils.files.safe_walk`` instead. +- ``salt.utils.st_mode_to_octal``: use ``salt.utils.files.st_mode_to_octal`` + instead. +- ``salt.utils.normalize_mode``: use ``salt.utils.files.normalize_mode`` + instead. +- ``salt.utils.human_size_to_bytes``: use + ``salt.utils.files.human_size_to_bytes`` instead. +- ``salt.utils.backup_minion``: use ``salt.utils.files.backup_minion`` instead. +- ``salt.utils.str_version_to_evr``: use ``salt.utils.pkg.rpm.version_to_evr`` + instead. +- ``salt.utils.parse_docstring``: use ``salt.utils.doc.parse_docstring`` + instead. +- ``salt.utils.compare_versions``: use ``salt.utils.versions.compare`` instead. +- ``salt.utils.version_cmp``: use ``salt.utils.versions.version_cmp`` instead. +- ``salt.utils.warn_until``: use ``salt.utils.versions.warn_until`` instead. +- ``salt.utils.kwargs_warn_until``: use + ``salt.utils.versions.kwargs_warn_until`` instead. +- ``salt.utils.get_color_theme``: use ``salt.utils.color.get_color_theme`` + instead. +- ``salt.utils.get_colors``: use ``salt.utils.color.get_colors`` instead. +- ``salt.utils.gen_state_tag``: use ``salt.utils.state.gen_tag`` instead. +- ``salt.utils.search_onfail_requisites``: use + ``salt.utils.state.search_onfail_requisites`` instead. +- ``salt.utils.check_state_result``: use ``salt.utils.state.check_result`` + instead. +- ``salt.utils.get_user``: use ``salt.utils.user.get_user`` instead. +- ``salt.utils.get_uid``: use ``salt.utils.user.get_uid`` instead. +- ``salt.utils.get_specific_user``: use ``salt.utils.user.get_specific_user`` + instead. +- ``salt.utils.chugid``: use ``salt.utils.user.chugid`` instead. +- ``salt.utils.chugid_and_umask``: use ``salt.utils.user.chugid_and_umask`` + instead. +- ``salt.utils.get_default_group``: use ``salt.utils.user.get_default_group`` + instead. +- ``salt.utils.get_group_list``: use ``salt.utils.user.get_group_list`` + instead. +- ``salt.utils.get_group_dict``: use ``salt.utils.user.get_group_dict`` + instead. +- ``salt.utils.get_gid_list``: use ``salt.utils.user.get_gid_list`` instead. +- ``salt.utils.get_gid``: use ``salt.utils.user.get_gid`` instead. +- ``salt.utils.enable_ctrl_logoff_handler``: use + ``salt.utils.win_functions.enable_ctrl_logoff_handler`` instead. +- ``salt.utils.traverse_dict``: use ``salt.utils.data.traverse_dict`` instead. +- ``salt.utils.traverse_dict_and_list``: use + ``salt.utils.data.traverse_dict_and_list`` instead. +- ``salt.utils.filter_by``: use ``salt.utils.data.filter_by`` instead. +- ``salt.utils.subdict_match``: use ``salt.utils.data.subdict_match`` instead. +- ``salt.utils.substr_in_list``: use ``salt.utils.data.substr_in_list`` instead. +- ``salt.utils.is_dictlist``: use ``salt.utils.data.is_dictlist``. +- ``salt.utils.repack_dictlist``: use ``salt.utils.data.repack_dictlist`` + instead. +- ``salt.utils.compare_dicts``: use ``salt.utils.data.compare_dicts`` instead. +- ``salt.utils.compare_lists``: use ``salt.utils.data.compare_lists`` instead. +- ``salt.utils.decode_dict``: use ``salt.utils.data.encode_dict`` instead. +- ``salt.utils.decode_list``: use ``salt.utils.data.encode_list`` instead. +- ``salt.utils.exactly_n``: use ``salt.utils.data.exactly_n`` instead. +- ``salt.utils.exactly_one``: use ``salt.utils.data.exactly_one`` instead. +- ``salt.utils.is_list``: use ``salt.utils.data.is_list`` instead. +- ``salt.utils.is_iter``: use ``salt.utils.data.is_iter`` instead. +- ``salt.utils.isorted``: use ``salt.utils.data.sorted_ignorecase`` instead. +- ``salt.utils.is_true``: use ``salt.utils.data.is_true`` instead. +- ``salt.utils.mysql_to_dict``: use ``salt.utils.data.mysql_to_dict`` instead. +- ``salt.utils.simple_types_filter``: use + ``salt.utils.data.simple_types_filter`` instead. +- ``salt.utils.ip_bracket``: use ``salt.utils.zeromq.ip_bracket`` instead. +- ``salt.utils.gen_mac``: use ``salt.utils.network.gen_mac`` instead. +- ``salt.utils.mac_str_to_bytes``: use ``salt.utils.network.mac_str_to_bytes`` + instead. +- ``salt.utils.refresh_dns``: use ``salt.utils.network.refresh_dns`` instead. +- ``salt.utils.dns_check``: use ``salt.utils.network.dns_check`` instead. +- ``salt.utils.get_context``: use ``salt.utils.stringutils.get_context`` instead. +- ``salt.utils.get_master_key``: use ``salt.utils.master.get_master_key`` + instead. +- ``salt.utils.get_values_of_matching_keys``: use + ``salt.utils.master.get_values_of_matching_keys`` instead. +- ``salt.utils.date_cast``: use ``salt.utils.dateutils.date_cast`` instead. +- ``salt.utils.date_format``: use ``salt.utils.dateutils.strftime`` instead. +- ``salt.utils.total_seconds``: use ``salt.utils.dateutils.total_seconds`` + instead. +- ``salt.utils.find_json``: use ``salt.utils.json.find_json`` instead. +- ``salt.utils.import_json``: use ``salt.utils.json.import_json`` instead. +- ``salt.utils.namespaced_function``: use + ``salt.utils.functools.namespaced_function`` instead. +- ``salt.utils.alias_function``: use ``salt.utils.functools.alias_function`` + instead. +- ``salt.utils.profile_func``: use ``salt.utils.profile.profile_func`` instead. +- ``salt.utils.activate_profile``: use ``salt.utils.profile.activate_profile`` + instead. +- ``salt.utils.output_profile``: use ``salt.utils.profile.output_profile`` + instead. Deprecations ------------ @@ -1629,10 +1680,7 @@ Profitbricks Cloud Updated Dependency The minimum version of the ``profitbrick`` python package for the ``profitbricks`` cloud driver has changed from 3.0.0 to 3.1.0. -Azure Cloud Updated Dependency ------------------------------- -The azure sdk used for the ``azurearm`` cloud driver now depends on ``azure-cli>=2.0.12`` Module Deprecations =================== diff --git a/pkg/osx/req.txt b/pkg/osx/req.txt index 241a4e63a0..2eb8249e9c 100644 --- a/pkg/osx/req.txt +++ b/pkg/osx/req.txt @@ -21,7 +21,7 @@ pycrypto==2.6.1 python-dateutil==2.6.1 python-gnupg==0.4.1 PyYAML==3.12 -pyzmq==17.0.0b3 +pyzmq==17.0.0 requests==2.18.4 singledispatch==3.4.0.3 six==1.11.0 diff --git a/pkg/windows/installer/Salt-Minion-Setup.nsi b/pkg/windows/installer/Salt-Minion-Setup.nsi index ee019d9626..bbcdea86f2 100644 --- a/pkg/windows/installer/Salt-Minion-Setup.nsi +++ b/pkg/windows/installer/Salt-Minion-Setup.nsi @@ -1056,7 +1056,7 @@ Function AddToPath # Make sure the new length isn't over the NSIS_MAX_STRLEN IntCmp $2 ${NSIS_MAX_STRLEN} +4 +4 0 - DetailPrint "AddToPath: new length $2 > ${NSIS_MAX_STRLEN}" + DetailPrint "AddToPath Failed: new length $2 > ${NSIS_MAX_STRLEN}" MessageBox MB_OK \ "You may add C:\salt to the %PATH% for convenience when issuing local salt commands from the command line." \ /SD IDOK diff --git a/pkg/windows/req.txt b/pkg/windows/req.txt index 1881f4c99b..21e5a7de57 100644 --- a/pkg/windows/req.txt +++ b/pkg/windows/req.txt @@ -3,7 +3,7 @@ backports.ssl-match-hostname==3.5.0.1 certifi cffi==1.10.0 CherryPy==10.2.1 -cryptography==1.8.1 +cryptography==2.1.4 enum34==1.1.6 futures==3.1.1 gitdb==0.6.4 @@ -23,13 +23,14 @@ pycparser==2.17 pycrypto==2.6.1 pycurl==7.43.0 PyMySQL==0.7.11 -pyOpenSSL==17.0.0 -python-dateutil==2.6.0 -python-gnupg==0.4.0 +pyOpenSSL==17.5.0 +python-dateutil==2.6.1 +python-gnupg==0.4.1 +pythonnet==2.3.0 pywin32==223 PyYAML==3.12 -pyzmq==16.0.2 -requests==2.13.0 +pyzmq==16.0.3 +requests==2.18.4 singledispatch==3.4.0.3 smmap==0.9.0 timelib==0.2.4 diff --git a/salt/auth/sharedsecret.py b/salt/auth/sharedsecret.py index fcd8f4b947..ff3411af43 100644 --- a/salt/auth/sharedsecret.py +++ b/salt/auth/sharedsecret.py @@ -37,8 +37,8 @@ import logging log = logging.getLogger(__name__) -def auth(username, sharedsecret, **kwargs): +def auth(username, password): ''' Shared secret authentication ''' - return sharedsecret == __opts__.get('sharedsecret') + return password == __opts__.get('sharedsecret') diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py index fe7cabdffb..f2f4b5a14c 100644 --- a/salt/client/ssh/__init__.py +++ b/salt/client/ssh/__init__.py @@ -57,7 +57,7 @@ from salt.ext import six from salt.ext.six.moves import input # pylint: disable=import-error,redefined-builtin try: import saltwinshell - HAS_WINSHELL = False + HAS_WINSHELL = True except ImportError: HAS_WINSHELL = False from salt.utils.zeromq import zmq @@ -560,6 +560,19 @@ class SSH(object): self.targets[host][default] = self.defaults[default] if 'host' not in self.targets[host]: self.targets[host]['host'] = host + if self.targets[host].get('winrm') and not HAS_WINSHELL: + returned.add(host) + rets.add(host) + log_msg = 'Please contact sales@saltstack.com for access to the enterprise saltwinshell module.' + log.debug(log_msg) + no_ret = {'fun_args': [], + 'jid': None, + 'return': log_msg, + 'retcode': 1, + 'fun': '', + 'id': host} + yield {host: no_ret} + continue args = ( que, self.opts, diff --git a/salt/cloud/clouds/openstack.py b/salt/cloud/clouds/openstack.py index 5646dc64e2..645b968ab3 100644 --- a/salt/cloud/clouds/openstack.py +++ b/salt/cloud/clouds/openstack.py @@ -708,7 +708,7 @@ def create(vm_): try: ip_address = __utils__['cloud.wait_for_fun']( __query_node, - update_args=(vm_,) + vm_=vm_ ) except (SaltCloudExecutionTimeout, SaltCloudExecutionFailure) as exc: try: diff --git a/salt/config/__init__.py b/salt/config/__init__.py index 6cd1ef2ecf..585930e16b 100644 --- a/salt/config/__init__.py +++ b/salt/config/__init__.py @@ -739,6 +739,9 @@ VALID_OPTS = { # Recursively merge lists by aggregating them instead of replacing them. 'pillar_merge_lists': bool, + # If True, values from included pillar SLS targets will override + 'pillar_includes_override_sls': bool, + # How to merge multiple top files from multiple salt environments # (saltenvs); can be 'merge' or 'same' 'top_file_merging_strategy': six.string_types, @@ -1238,6 +1241,9 @@ DEFAULT_MINION_OPTS = { 'pillarenv': None, 'pillarenv_from_saltenv': False, 'pillar_opts': False, + 'pillar_source_merging_strategy': 'smart', + 'pillar_merge_lists': False, + 'pillar_includes_override_sls': False, # ``pillar_cache``, ``pillar_cache_ttl`` and ``pillar_cache_backend`` # are not used on the minion but are unavoidably in the code path 'pillar_cache': False, @@ -1616,6 +1622,7 @@ DEFAULT_MASTER_OPTS = { 'pillar_safe_render_error': True, 'pillar_source_merging_strategy': 'smart', 'pillar_merge_lists': False, + 'pillar_includes_override_sls': False, 'pillar_cache': False, 'pillar_cache_ttl': 3600, 'pillar_cache_backend': 'disk', diff --git a/salt/engines/libvirt_events.py b/salt/engines/libvirt_events.py index ca773ef5b5..0977efc61f 100644 --- a/salt/engines/libvirt_events.py +++ b/salt/engines/libvirt_events.py @@ -214,7 +214,8 @@ def _salt_send_domain_event(opaque, conn, domain, event, event_data): data = { 'domain': { 'name': domain.name(), - 'id': domain.ID() + 'id': domain.ID(), + 'uuid': domain.UUIDString() }, 'event': event } @@ -228,7 +229,8 @@ def _domain_event_lifecycle_cb(conn, domain, event, detail, opaque): ''' event_str, detail_str = _get_domain_event_detail(event, detail) - _salt_send_domain_event(opaque, conn, domain, event_str, { + _salt_send_domain_event(opaque, conn, domain, opaque['event'], { + 'event': event_str, 'detail': detail_str }) @@ -468,7 +470,10 @@ def _network_event_lifecycle_cb(conn, net, event, detail, opaque): ''' _salt_send_event(opaque, conn, { - 'network': net.name(), + 'network': { + 'name': net.name(), + 'uuid': net.UUIDString() + }, 'event': _get_libvirt_enum_string('VIR_NETWORK_EVENT_', event), 'detail': 'unknown' # currently unused }) @@ -479,7 +484,10 @@ def _pool_event_lifecycle_cb(conn, pool, event, detail, opaque): Storage pool lifecycle events handler ''' _salt_send_event(opaque, conn, { - 'pool': pool.name(), + 'pool': { + 'name': pool.name(), + 'uuid': pool.UUIDString() + }, 'event': _get_libvirt_enum_string('VIR_STORAGE_POOL_EVENT_', event), 'detail': 'unknown' # currently unused }) @@ -490,7 +498,10 @@ def _pool_event_refresh_cb(conn, pool, opaque): Storage pool refresh events handler ''' _salt_send_event(opaque, conn, { - 'pool': pool.name(), + 'pool': { + 'name': pool.name(), + 'uuid': pool.UUIDString() + }, 'event': opaque['event'] }) @@ -500,7 +511,9 @@ def _nodedev_event_lifecycle_cb(conn, dev, event, detail, opaque): Node device lifecycle events handler ''' _salt_send_event(opaque, conn, { - 'nodedev': dev.name(), + 'nodedev': { + 'name': dev.name() + }, 'event': _get_libvirt_enum_string('VIR_NODE_DEVICE_EVENT_', event), 'detail': 'unknown' # currently unused }) @@ -511,7 +524,9 @@ def _nodedev_event_update_cb(conn, dev, opaque): Node device update events handler ''' _salt_send_event(opaque, conn, { - 'nodedev': dev.name(), + 'nodedev': { + 'name': dev.name() + }, 'event': opaque['event'] }) @@ -521,7 +536,9 @@ def _secret_event_lifecycle_cb(conn, secret, event, detail, opaque): Secret lifecycle events handler ''' _salt_send_event(opaque, conn, { - 'secret': secret.UUIDString(), + 'secret': { + 'uuid': secret.UUIDString() + }, 'event': _get_libvirt_enum_string('VIR_SECRET_EVENT_', event), 'detail': 'unknown' # currently unused }) @@ -532,7 +549,9 @@ def _secret_event_value_changed_cb(conn, secret, opaque): Secret value change events handler ''' _salt_send_event(opaque, conn, { - 'secret': secret.UUIDString(), + 'secret': { + 'uuid': secret.UUIDString() + }, 'event': opaque['event'] }) diff --git a/salt/grains/core.py b/salt/grains/core.py index d1f29a3891..44ef31883f 100644 --- a/salt/grains/core.py +++ b/salt/grains/core.py @@ -2449,7 +2449,30 @@ def get_server_id(): if salt.utils.platform.is_proxy(): return {} - return {'server_id': abs(hash(__opts__.get('id', '')) % (2 ** 31))} + id_ = __opts__.get('id', '') + id_hash = None + py_ver = sys.version_info[:2] + if py_ver >= (3, 3): + # Python 3.3 enabled hash randomization, so we need to shell out to get + # a reliable hash. + py_bin = 'python{0}.{1}'.format(*py_ver) + id_hash = __salt__['cmd.run']( + [py_bin, '-c', 'print(hash("{0}"))'.format(id_)], + env={'PYTHONHASHSEED': '0'} + ) + try: + id_hash = int(id_hash) + except (TypeError, ValueError): + log.debug( + 'Failed to hash the ID to get the server_id grain. Result of ' + 'hash command: %s', id_hash + ) + id_hash = None + if id_hash is None: + # Python < 3.3 or error encountered above + id_hash = hash(id_) + + return {'server_id': abs(id_hash % (2 ** 31))} def get_master(): diff --git a/salt/grains/metadata.py b/salt/grains/metadata.py index d75c827789..c452fa8274 100644 --- a/salt/grains/metadata.py +++ b/salt/grains/metadata.py @@ -48,9 +48,11 @@ def _search(prefix="latest/"): Recursively look up all grains in the metadata server ''' ret = {} - linedata = http.query(os.path.join(HOST, prefix)) + linedata = http.query(os.path.join(HOST, prefix), headers=True) if 'body' not in linedata: return ret + if linedata['headers'].get('Content-Type', 'text/plain') == 'application/octet-stream': + return linedata['body'] for line in linedata['body'].split('\n'): if line.endswith('/'): ret[line[:-1]] = _search(prefix=os.path.join(prefix, line)) diff --git a/salt/loader.py b/salt/loader.py index 3fbc062c89..56b4b038ea 100644 --- a/salt/loader.py +++ b/salt/loader.py @@ -14,6 +14,7 @@ import logging import inspect import tempfile import functools +import threading import types from collections import MutableMapping from zipimport import zipimporter @@ -1124,7 +1125,8 @@ class LazyLoader(salt.utils.lazy.LazyDict): ) ) - self.refresh_file_mapping() + self._lock = threading.RLock() + self._refresh_file_mapping() super(LazyLoader, self).__init__() # late init the lazy loader # create all of the import namespaces @@ -1191,7 +1193,7 @@ class LazyLoader(salt.utils.lazy.LazyDict): else: return '\'{0}\' __virtual__ returned False'.format(mod_name) - def refresh_file_mapping(self): + def _refresh_file_mapping(self): ''' refresh the mapping of the FS on disk ''' @@ -1308,15 +1310,16 @@ class LazyLoader(salt.utils.lazy.LazyDict): ''' Clear the dict ''' - super(LazyLoader, self).clear() # clear the lazy loader - self.loaded_files = set() - self.missing_modules = {} - self.loaded_modules = {} - # if we have been loaded before, lets clear the file mapping since - # we obviously want a re-do - if hasattr(self, 'opts'): - self.refresh_file_mapping() - self.initial_load = False + with self._lock: + super(LazyLoader, self).clear() # clear the lazy loader + self.loaded_files = set() + self.missing_modules = {} + self.loaded_modules = {} + # if we have been loaded before, lets clear the file mapping since + # we obviously want a re-do + if hasattr(self, 'opts'): + self._refresh_file_mapping() + self.initial_load = False def __prep_mod_opts(self, opts): ''' @@ -1520,14 +1523,14 @@ class LazyLoader(salt.utils.lazy.LazyDict): virtual_funcs_to_process = ['__virtual__'] + self.virtual_funcs for virtual_func in virtual_funcs_to_process: virtual_ret, module_name, virtual_err, virtual_aliases = \ - self.process_virtual(mod, module_name, virtual_func) + self._process_virtual(mod, module_name, virtual_func) if virtual_err is not None: log.trace( 'Error loading %s.%s: %s', self.tag, module_name, virtual_err ) - # if process_virtual returned a non-True value then we are + # if _process_virtual returned a non-True value then we are # supposed to not process this module if virtual_ret is not True and module_name not in self.missing_modules: # If a module has information about why it could not be loaded, record it @@ -1619,39 +1622,42 @@ class LazyLoader(salt.utils.lazy.LazyDict): if '.' not in key: raise KeyError('The key \'%s\' should contain a \'.\'', key) mod_name, _ = key.split('.', 1) - if mod_name in self.missing_modules: - return True - # if the modulename isn't in the whitelist, don't bother - if self.whitelist and mod_name not in self.whitelist: - raise KeyError + with self._lock: + # It is possible that the key is in the dictionary after + # acquiring the lock due to another thread loading it. + if mod_name in self.missing_modules or key in self._dict: + return True + # if the modulename isn't in the whitelist, don't bother + if self.whitelist and mod_name not in self.whitelist: + raise KeyError - def _inner_load(mod_name): - for name in self._iter_files(mod_name): - if name in self.loaded_files: - continue - # if we got what we wanted, we are done - if self._load_module(name) and key in self._dict: - return True - return False + def _inner_load(mod_name): + for name in self._iter_files(mod_name): + if name in self.loaded_files: + continue + # if we got what we wanted, we are done + if self._load_module(name) and key in self._dict: + return True + return False - # try to load the module - ret = None - reloaded = False - # re-scan up to once, IOErrors or a failed load cause re-scans of the - # filesystem - while True: - try: - ret = _inner_load(mod_name) - if not reloaded and ret is not True: - self.refresh_file_mapping() - reloaded = True + # try to load the module + ret = None + reloaded = False + # re-scan up to once, IOErrors or a failed load cause re-scans of the + # filesystem + while True: + try: + ret = _inner_load(mod_name) + if not reloaded and ret is not True: + self._refresh_file_mapping() + reloaded = True + continue + break + except IOError: + if not reloaded: + self._refresh_file_mapping() + reloaded = True continue - break - except IOError: - if not reloaded: - self.refresh_file_mapping() - reloaded = True - continue return ret @@ -1659,16 +1665,18 @@ class LazyLoader(salt.utils.lazy.LazyDict): ''' Load all of them ''' - for name in self.file_mapping: - if name in self.loaded_files or name in self.missing_modules: - continue - self._load_module(name) + with self._lock: + for name in self.file_mapping: + if name in self.loaded_files or name in self.missing_modules: + continue + self._load_module(name) - self.loaded = True + self.loaded = True def reload_modules(self): - self.loaded_files = set() - self._load_all() + with self._lock: + self.loaded_files = set() + self._load_all() def _apply_outputter(self, func, mod): ''' @@ -1679,7 +1687,7 @@ class LazyLoader(salt.utils.lazy.LazyDict): if func.__name__ in outp: func.__outputter__ = outp[func.__name__] - def process_virtual(self, mod, module_name, virtual_func='__virtual__'): + def _process_virtual(self, mod, module_name, virtual_func='__virtual__'): ''' Given a loaded module and its default name determine its virtual name diff --git a/salt/minion.py b/salt/minion.py index 0ed7b9fc92..c2ebe805a9 100644 --- a/salt/minion.py +++ b/salt/minion.py @@ -2524,6 +2524,7 @@ class Minion(MinionBase): self.opts, self.functions, self.returners, + utils=self.utils, cleanup=[master_event(type='alive')]) try: diff --git a/salt/modules/cmdmod.py b/salt/modules/cmdmod.py index 581a24ae82..c972972d60 100644 --- a/salt/modules/cmdmod.py +++ b/salt/modules/cmdmod.py @@ -437,10 +437,16 @@ def _run(cmd, if runas or group: try: # Getting the environment for the runas user + # Use markers to thwart any stdout noise # There must be a better way to do this. + import uuid + marker = '<<<' + str(uuid.uuid4()) + '>>>' + marker_b = marker.encode(__salt_system_encoding__) py_code = ( 'import sys, os, itertools; ' - 'sys.stdout.write(\"\\0\".join(itertools.chain(*os.environ.items())))' + 'sys.stdout.write(\"' + marker + '\"); ' + 'sys.stdout.write(\"\\0\".join(itertools.chain(*os.environ.items()))); ' + 'sys.stdout.write(\"' + marker + '\");' ) if use_sudo or __grains__['os'] in ['MacOS', 'Darwin']: @@ -466,11 +472,34 @@ def _run(cmd, env_cmd = ('su', '-s', shell, '-', runas, '-c', sys.executable) msg = 'env command: {0}'.format(env_cmd) log.debug(log_callback(msg)) - env_bytes = salt.utils.stringutils.to_bytes(subprocess.Popen( + + env_bytes, env_encoded_err = subprocess.Popen( env_cmd, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE - ).communicate(salt.utils.stringutils.to_bytes(py_code))[0]) + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + stdin=subprocess.PIPE + ).communicate(salt.utils.stringutils.to_bytes(py_code)) + marker_count = env_bytes.count(marker_b) + if marker_count == 0: + # Possibly PAM prevented the login + log.error( + 'Environment could not be retrieved for user \'%s\': ' + 'stderr=%r stdout=%r', + runas, env_encoded_err, env_bytes + ) + # Ensure that we get an empty env_runas dict below since we + # were not able to get the environment. + env_bytes = b'' + elif marker_count != 2: + raise CommandExecutionError( + 'Environment could not be retrieved for user \'{0}\'', + info={'stderr': repr(env_encoded_err), + 'stdout': repr(env_bytes)} + ) + else: + # Strip the marker + env_bytes = env_bytes.split(marker_b)[1] + if six.PY2: import itertools env_runas = dict(itertools.izip(*[iter(env_bytes.split(b'\0'))]*2)) @@ -488,10 +517,11 @@ def _run(cmd, if env_runas.get('USER') != runas: env_runas['USER'] = runas env = env_runas - except ValueError: + except ValueError as exc: + log.exception('Error raised retrieving environment for user %s', runas) raise CommandExecutionError( - 'Environment could not be retrieved for User \'{0}\''.format( - runas + 'Environment could not be retrieved for user \'{0}\': {1}'.format( + runas, exc ) ) @@ -630,7 +660,7 @@ def _run(cmd, ret['retcode'] = 1 return ret - if output_encoding is not None: + if output_loglevel != 'quiet' and output_encoding is not None: log.debug('Decoding output from command %s using %s encoding', cmd, output_encoding) @@ -646,10 +676,11 @@ def _run(cmd, proc.stdout, encoding=output_encoding, errors='replace') - log.error( - 'Failed to decode stdout from command %s, non-decodable ' - 'characters have been replaced', cmd - ) + if output_loglevel != 'quiet': + log.error( + 'Failed to decode stdout from command %s, non-decodable ' + 'characters have been replaced', cmd + ) try: err = salt.utils.stringutils.to_unicode( @@ -663,10 +694,11 @@ def _run(cmd, proc.stderr, encoding=output_encoding, errors='replace') - log.error( - 'Failed to decode stderr from command %s, non-decodable ' - 'characters have been replaced', cmd - ) + if output_loglevel != 'quiet': + log.error( + 'Failed to decode stderr from command %s, non-decodable ' + 'characters have been replaced', cmd + ) if rstrip: if out is not None: diff --git a/salt/modules/debbuild.py b/salt/modules/debbuild.py index aa105b0dc3..18cc47f3b7 100644 --- a/salt/modules/debbuild.py +++ b/salt/modules/debbuild.py @@ -426,6 +426,9 @@ def build(runas, # use default /var/cache/pbuilder/result results_dir = '/var/cache/pbuilder/result' + ## ensure clean + __salt__['cmd.run']('rm -fR {0}'.format(results_dir)) + # dscs should only contain salt orig and debian tarballs and dsc file for dsc in dscs: afile = os.path.basename(dsc) @@ -436,10 +439,10 @@ def build(runas, try: __salt__['cmd.run']('chown {0} -R {1}'.format(runas, dbase)) - cmd = 'pbuilder --update --override-config' + cmd = 'pbuilder update --override-config' __salt__['cmd.run'](cmd, runas=runas, python_shell=True) - cmd = 'pbuilder --build {0}'.format(dsc) + cmd = 'pbuilder build --debbuildopts "-sa" {0}'.format(dsc) __salt__['cmd.run'](cmd, runas=runas, python_shell=True) # ignore local deps generated package file diff --git a/salt/modules/git.py b/salt/modules/git.py index 59a1c64bc9..f44a6fb40a 100644 --- a/salt/modules/git.py +++ b/salt/modules/git.py @@ -221,9 +221,6 @@ def _git_run(command, cwd=None, user=None, password=None, identity=None, 'cmd.run_all', and used as an alternative to 'cmd.run_all'. Some commands don't return proper retcodes, so this can't replace 'cmd.run_all'. ''' - if salt.utils.platform.is_windows() and output_encoding is None: - output_encoding = 'utf-8' - env = {} if identity: @@ -516,21 +513,9 @@ def add(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -664,21 +649,9 @@ def archive(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -790,21 +763,9 @@ def branch(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -903,21 +864,9 @@ def checkout(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -1069,21 +1018,9 @@ def clone(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -1210,21 +1147,9 @@ def commit(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -1309,21 +1234,9 @@ def config_get(key, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -1420,21 +1333,9 @@ def config_get_regexp(key, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -1545,21 +1446,9 @@ def config_set(key, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -1696,21 +1585,9 @@ def config_unset(key, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -1819,21 +1696,9 @@ def current_branch(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -1891,21 +1756,9 @@ def describe(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -2015,21 +1868,9 @@ def diff(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -2115,6 +1956,64 @@ def diff(cwd, output_encoding=output_encoding)['stdout'] +def discard_local_changes(cwd, + path='.', + user=None, + password=None, + ignore_retcode=False, + output_encoding=None): + ''' + .. versionadded:: Fluorine + + Runs a ``git checkout -- `` from the directory specified by ``cwd``. + + cwd + The path to the git checkout + + path + path relative to cwd (defaults to ``.``) + + user + User under which to run the git command. By default, the command is run + by the user under which the minion is running. + + password + Windows only. Required when specifying ``user``. This parameter will be + ignored on non-Windows platforms. + + ignore_retcode : False + If ``True``, do not log an error to the minion log if the git command + returns a nonzero exit status. + + output_encoding + Use this option to specify which encoding to use to decode the output + from any git commands which are run. This should not be needed in most + cases. + + .. note:: + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. + + CLI Example: + + .. code-block:: bash + + salt myminion git.discard_local_changes /path/to/repo + salt myminion git.discard_local_changes /path/to/repo path=foo + ''' + cwd = _expand_path(cwd, user) + command = ['git', 'checkout', '--', path] + # Checkout message goes to stderr + return _git_run(command, + cwd=cwd, + user=user, + password=password, + ignore_retcode=ignore_retcode, + redirect_stderr=True, + output_encoding=output_encoding)['stdout'] + + def fetch(cwd, remote=None, force=False, @@ -2222,21 +2121,9 @@ def fetch(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -2382,21 +2269,9 @@ def init(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -2471,21 +2346,9 @@ def is_worktree(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -2571,21 +2434,9 @@ def list_branches(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -2642,21 +2493,9 @@ def list_tags(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -2730,21 +2569,9 @@ def list_worktrees(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -3116,21 +2943,9 @@ def ls_remote(cwd=None, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -3237,21 +3052,9 @@ def merge(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -3390,21 +3193,9 @@ def merge_base(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -3559,21 +3350,9 @@ def merge_tree(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -3689,21 +3468,9 @@ def pull(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -3827,21 +3594,9 @@ def push(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -3935,21 +3690,9 @@ def rebase(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -4032,21 +3775,9 @@ def remote_get(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -4151,21 +3882,9 @@ def remote_refs(url, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -4275,21 +3994,9 @@ def remote_set(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -4402,21 +4109,9 @@ def remotes(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -4511,21 +4206,9 @@ def reset(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -4609,21 +4292,9 @@ def rev_parse(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -4701,21 +4372,9 @@ def revision(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -4799,21 +4458,9 @@ def rm_(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -4893,21 +4540,9 @@ def stash(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -4970,21 +4605,9 @@ def status(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -5125,21 +4748,9 @@ def submodule(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -5252,21 +4863,9 @@ def symbolic_ref(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -5447,21 +5046,9 @@ def worktree_add(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -5593,21 +5180,9 @@ def worktree_prune(cwd, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -5672,21 +5247,9 @@ def worktree_rm(cwd, user=None, output_encoding=None): cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 diff --git a/salt/modules/localemod.py b/salt/modules/localemod.py index e97af32647..066d85d0ca 100644 --- a/salt/modules/localemod.py +++ b/salt/modules/localemod.py @@ -87,7 +87,7 @@ def _localectl_status(): ret[ctl_key] = {} ret[ctl_key][loc_set[0]] = loc_set[1] else: - ret[ctl_key] = ctl_data + ret[ctl_key] = {'data': ctl_data} if not ret: log.debug("Unable to find any locale information inside the following data:\n%s", locale_ctl_out) raise CommandExecutionError('Unable to parse result of "localectl"') diff --git a/salt/modules/mysql.py b/salt/modules/mysql.py index 0625b02a96..833a766a97 100644 --- a/salt/modules/mysql.py +++ b/salt/modules/mysql.py @@ -1691,11 +1691,11 @@ def __grant_generate(grant, table = db_part[2] if escape: - if dbc is not '*': + if dbc != '*': # _ and % are authorized on GRANT queries and should get escaped # on the db name, but only if not requesting a table level grant - dbc = quote_identifier(dbc, for_grants=(table is '*')) - if table is not '*': + dbc = quote_identifier(dbc, for_grants=(table == '*')) + if table != '*': table = quote_identifier(table) # identifiers cannot be used as values, and same thing for grants qry = 'GRANT {0} ON {1}.{2} TO %(user)s@%(host)s'.format(grant, dbc, table) @@ -1790,8 +1790,10 @@ def grant_exists(grant, if not target_tokens: # Avoid the overhead of re-calc in loop target_tokens = _grant_to_tokens(target) grant_tokens = _grant_to_tokens(grant) + grant_tokens_database = grant_tokens['database'].replace('"', '').replace('\\', '').replace('`', '') + target_tokens_database = target_tokens['database'].replace('"', '').replace('\\', '').replace('`', '') if grant_tokens['user'] == target_tokens['user'] and \ - grant_tokens['database'] == target_tokens['database'] and \ + grant_tokens_database == target_tokens_database and \ grant_tokens['host'] == target_tokens['host'] and \ set(grant_tokens['grant']) >= set(target_tokens['grant']): return True @@ -1893,16 +1895,16 @@ def grant_revoke(grant, db_part = database.rpartition('.') dbc = db_part[0] table = db_part[2] - if dbc is not '*': + if dbc != '*': # _ and % are authorized on GRANT queries and should get escaped # on the db name, but only if not requesting a table level grant - s_database = quote_identifier(dbc, for_grants=(table is '*')) - if dbc is '*': + s_database = quote_identifier(dbc, for_grants=(table == '*')) + if dbc == '*': # add revoke for *.* # before the modification query send to mysql will looks like # REVOKE SELECT ON `*`.* FROM %(user)s@%(host)s s_database = dbc - if table is not '*': + if table != '*': table = quote_identifier(table) # identifiers cannot be used as values, same thing for grants qry = 'REVOKE {0} ON {1}.{2} FROM %(user)s@%(host)s;'.format( diff --git a/salt/modules/nacl.py b/salt/modules/nacl.py index 5a5da2a095..4cf2b33b54 100644 --- a/salt/modules/nacl.py +++ b/salt/modules/nacl.py @@ -161,6 +161,7 @@ import salt.syspaths import salt.utils.files import salt.utils.platform import salt.utils.stringutils +import salt.utils.versions import salt.utils.win_functions import salt.utils.win_dacl @@ -254,7 +255,7 @@ def keygen(sk_file=None, pk_file=None, **kwargs): ''' if 'keyfile' in kwargs: salt.utils.versions.warn_until( - 'Fluorine', + 'Neon', 'The \'keyfile\' argument has been deprecated and will be removed in Salt ' '{version}. Please use \'sk_file\' argument instead.' ) @@ -322,7 +323,7 @@ def enc(data, **kwargs): ''' if 'keyfile' in kwargs: salt.utils.versions.warn_until( - 'Fluorine', + 'Neon', 'The \'keyfile\' argument has been deprecated and will be removed in Salt ' '{version}. Please use \'sk_file\' argument instead.' ) @@ -330,7 +331,7 @@ def enc(data, **kwargs): if 'key' in kwargs: salt.utils.versions.warn_until( - 'Fluorine', + 'Neon', 'The \'key\' argument has been deprecated and will be removed in Salt ' '{version}. Please use \'sk\' argument instead.' ) @@ -388,7 +389,7 @@ def dec(data, **kwargs): ''' if 'keyfile' in kwargs: salt.utils.versions.warn_until( - 'Fluorine', + 'Neon', 'The \'keyfile\' argument has been deprecated and will be removed in Salt ' '{version}. Please use \'sk_file\' argument instead.' ) @@ -399,7 +400,7 @@ def dec(data, **kwargs): if 'key' in kwargs: salt.utils.versions.warn_until( - 'Fluorine', + 'Neon', 'The \'key\' argument has been deprecated and will be removed in Salt ' '{version}. Please use \'sk\' argument instead.' ) diff --git a/salt/modules/npm.py b/salt/modules/npm.py index 86f9fe86a1..76fb0d0344 100644 --- a/salt/modules/npm.py +++ b/salt/modules/npm.py @@ -159,7 +159,7 @@ def install(pkg=None, if runas: uid = salt.utils.user.get_uid(runas) if uid: - env.update({'SUDO_UID': b'{0}'.format(uid), 'SUDO_USER': b''}) + env.update({'SUDO_UID': uid, 'SUDO_USER': ''}) cmd = ' '.join(cmd) result = __salt__['cmd.run_all'](cmd, python_shell=True, cwd=dir, runas=runas, env=env) @@ -238,7 +238,7 @@ def uninstall(pkg, dir=None, runas=None, env=None): if runas: uid = salt.utils.user.get_uid(runas) if uid: - env.update({'SUDO_UID': b'{0}'.format(uid), 'SUDO_USER': b''}) + env.update({'SUDO_UID': uid, 'SUDO_USER': ''}) cmd = ['npm', 'uninstall', '"{0}"'.format(pkg)] if not dir: @@ -297,7 +297,7 @@ def list_(pkg=None, dir=None, runas=None, env=None, depth=None): if runas: uid = salt.utils.user.get_uid(runas) if uid: - env.update({'SUDO_UID': b'{0}'.format(uid), 'SUDO_USER': b''}) + env.update({'SUDO_UID': uid, 'SUDO_USER': ''}) cmd = ['npm', 'list', '--json', '--silent'] @@ -360,7 +360,7 @@ def cache_clean(path=None, runas=None, env=None, force=False): if runas: uid = salt.utils.user.get_uid(runas) if uid: - env.update({'SUDO_UID': b'{0}'.format(uid), 'SUDO_USER': b''}) + env.update({'SUDO_UID': uid, 'SUDO_USER': ''}) cmd = ['npm', 'cache', 'clean'] if path: @@ -407,7 +407,7 @@ def cache_list(path=None, runas=None, env=None): if runas: uid = salt.utils.user.get_uid(runas) if uid: - env.update({'SUDO_UID': b'{0}'.format(uid), 'SUDO_USER': b''}) + env.update({'SUDO_UID': uid, 'SUDO_USER': ''}) cmd = ['npm', 'cache', 'ls'] if path: @@ -447,7 +447,7 @@ def cache_path(runas=None, env=None): if runas: uid = salt.utils.user.get_uid(runas) if uid: - env.update({'SUDO_UID': b'{0}'.format(uid), 'SUDO_USER': b''}) + env.update({'SUDO_UID': uid, 'SUDO_USER': ''}) cmd = 'npm config get cache' diff --git a/salt/modules/pillar.py b/salt/modules/pillar.py index d320e46754..0c704a11f8 100644 --- a/salt/modules/pillar.py +++ b/salt/modules/pillar.py @@ -366,6 +366,28 @@ def item(*args, **kwargs): .. versionadded:: 2015.8.0 + pillarenv + If specified, this function will query the master to generate fresh + pillar data on the fly, specifically from the requested pillar + environment. Note that this can produce different pillar data than + executing this function without an environment, as its normal behavior + is just to return a value from minion's pillar data in memory (which + can be sourced from more than one pillar environment). + + Using this argument will not affect the pillar data in memory. It will + however be slightly slower and use more resources on the master due to + the need for the master to generate and send the minion fresh pillar + data. This tradeoff in performance however allows for the use case + where pillar data is desired only from a single environment. + + .. versionadded:: 2017.7.6,2018.3.1 + + saltenv + Included only for compatibility with + :conf_minion:`pillarenv_from_saltenv`, and is otherwise ignored. + + .. versionadded:: 2017.7.6,2018.3.1 + CLI Examples: .. code-block:: bash @@ -377,11 +399,17 @@ def item(*args, **kwargs): ret = {} default = kwargs.get('default', '') delimiter = kwargs.get('delimiter', DEFAULT_TARGET_DELIM) + pillarenv = kwargs.get('pillarenv', None) + saltenv = kwargs.get('saltenv', None) + + pillar_dict = __pillar__ \ + if all(x is None for x in (saltenv, pillarenv)) \ + else items(saltenv=saltenv, pillarenv=pillarenv) try: for arg in args: ret[arg] = salt.utils.data.traverse_dict_and_list( - __pillar__, + pillar_dict, arg, default, delimiter) diff --git a/salt/modules/sensehat.py b/salt/modules/sensehat.py index b879303ff7..e9e1a7d6a0 100644 --- a/salt/modules/sensehat.py +++ b/salt/modules/sensehat.py @@ -25,7 +25,6 @@ import logging try: from sense_hat import SenseHat - _sensehat = SenseHat() has_sense_hat = True except (ImportError, NameError): _sensehat = None @@ -39,15 +38,20 @@ def __virtual__(): Only load the module if SenseHat is available ''' if has_sense_hat: + try: + _sensehat = SenseHat() + except OSError: + return False, 'This module can only be used on a Raspberry Pi with a SenseHat.' + rotation = __salt__['pillar.get']('sensehat:rotation', 0) if rotation in [0, 90, 180, 270]: _sensehat.set_rotation(rotation, False) else: - log.error("%s is not a valid rotation. Using default rotation.", + log.error('%s is not a valid rotation. Using default rotation.', rotation) return True - else: - return False, "The SenseHat excecution module can not be loaded: SenseHat unavailable.\nThis module can only be used on a Raspberry Pi with a SenseHat. Also make sure that the sense_hat python library is installed!" + + return False, 'The SenseHat execution module cannot be loaded: \'sense_hat\' python library unavailable.' def set_pixels(pixels): diff --git a/salt/modules/state.py b/salt/modules/state.py index d17515747b..1c0d01007b 100644 --- a/salt/modules/state.py +++ b/salt/modules/state.py @@ -2344,9 +2344,10 @@ def event(tagmatch='*', ) sys.stdout.flush() - if count > -1: + if count > 0: count -= 1 log.debug('Remaining event matches: %s', count) + if count == 0: break else: diff --git a/salt/modules/vault.py b/salt/modules/vault.py index cb1277b2e9..50cc94a206 100644 --- a/salt/modules/vault.py +++ b/salt/modules/vault.py @@ -108,10 +108,6 @@ Functions to interact with Hashicorp Vault. from __future__ import absolute_import, print_function, unicode_literals import logging -# Import Salt libs -import salt.crypt -import salt.exceptions - log = logging.getLogger(__name__) @@ -146,7 +142,7 @@ def read_secret(path, key=None): return data except Exception as err: log.error('Failed to read secret! %s: %s', type(err).__name__, err) - raise salt.exceptions.CommandExecutionError(err) + return None def write_secret(path, **kwargs): @@ -169,7 +165,7 @@ def write_secret(path, **kwargs): return True except Exception as err: log.error('Failed to write secret! %s: %s', type(err).__name__, err) - raise salt.exceptions.CommandExecutionError(err) + return False def delete_secret(path): @@ -191,7 +187,7 @@ def delete_secret(path): return True except Exception as err: log.error('Failed to delete secret! %s: %s', type(err).__name__, err) - raise salt.exceptions.CommandExecutionError(err) + return False def list_secrets(path): @@ -214,4 +210,4 @@ def list_secrets(path): return response.json()['data'] except Exception as err: log.error('Failed to list secrets! %s: %s', type(err).__name__, err) - raise salt.exceptions.CommandExecutionError(err) + return None diff --git a/salt/modules/win_dism.py b/salt/modules/win_dism.py index c2ce283fd9..a5f7a97978 100644 --- a/salt/modules/win_dism.py +++ b/salt/modules/win_dism.py @@ -430,16 +430,27 @@ def add_package(package, Install a package using DISM Args: - package (str): The package to install. Can be a .cab file, a .msu file, - or a folder - ignore_check (Optional[bool]): Skip installation of the package if the - applicability checks fail - prevent_pending (Optional[bool]): Skip the installation of the package - if there are pending online actions - image (Optional[str]): The path to the root directory of an offline - Windows image. If `None` is passed, the running operating system is - targeted. Default is None. - restart (Optional[bool]): Reboot the machine if required by the install + package (str): + The package to install. Can be a .cab file, a .msu file, or a folder + + .. note:: + An `.msu` package is supported only when the target image is + offline, either mounted or applied. + + ignore_check (Optional[bool]): + Skip installation of the package if the applicability checks fail + + prevent_pending (Optional[bool]): + Skip the installation of the package if there are pending online + actions + + image (Optional[str]): + The path to the root directory of an offline Windows image. If + ``None`` is passed, the running operating system is targeted. + Default is None. + + restart (Optional[bool]): + Reboot the machine if required by the install Returns: dict: A dictionary containing the results of the command diff --git a/salt/modules/win_lgpo.py b/salt/modules/win_lgpo.py index cbf4cc079b..cfefa30cf5 100644 --- a/salt/modules/win_lgpo.py +++ b/salt/modules/win_lgpo.py @@ -3440,7 +3440,7 @@ def _processValueItem(element, reg_key, reg_valuename, policy, parent_element, this_element_value = b''.join([this_element_value.encode('utf-16-le'), encoded_null]) elif etree.QName(element).localname == 'multiText': - this_vtype = 'REG_MULTI_SZ' + this_vtype = 'REG_MULTI_SZ' if not check_deleted else 'REG_SZ' if this_element_value is not None: this_element_value = '{0}{1}{1}'.format(chr(0).join(this_element_value), chr(0)) elif etree.QName(element).localname == 'list': @@ -3449,7 +3449,7 @@ def _processValueItem(element, reg_key, reg_valuename, policy, parent_element, element_valuenames = [] element_values = this_element_value if this_element_value is not None: - element_valuenames = list(range(1, len(this_element_value) + 1)) + element_valuenames = list([str(z) for z in range(1, len(this_element_value) + 1)]) if 'additive' in element.attrib: if element.attrib['additive'].lower() == 'false': # a delete values will be added before all the other @@ -3474,11 +3474,18 @@ def _processValueItem(element, reg_key, reg_valuename, policy, parent_element, if this_element_value is not None: element_valuenames = this_element_value.keys() element_values = this_element_value.values() - - if 'valuePrefix' in element.attrib and element.attrib['valuePrefix'] != '': - if this_element_value is not None: - element_valuenames = ['{0}{1}'.format(element.attrib['valuePrefix'], - k) for k in element_valuenames] + if 'valuePrefix' in element.attrib: + # if the valuePrefix attribute exists, the valuenames are + # most prefixes attributes are empty in the admx files, so the valuenames + # end up being just numbers + if element.attrib['valuePrefix'] != '': + if this_element_value is not None: + element_valuenames = ['{0}{1}'.format(element.attrib['valuePrefix'], + k) for k in element_valuenames] + else: + # if there is no valuePrefix attribute, the valuename is the value + if element_values is not None: + element_valuenames = [str(z) for z in element_values] if not check_deleted: if this_element_value is not None: log.debug('_processValueItem has an explicit ' diff --git a/salt/modules/win_pkg.py b/salt/modules/win_pkg.py index 5dad3b061d..5aa356c975 100644 --- a/salt/modules/win_pkg.py +++ b/salt/modules/win_pkg.py @@ -1882,7 +1882,7 @@ def get_repo_data(saltenv='base'): serial = salt.payload.Serial(__opts__) with salt.utils.files.fopen(repo_details.winrepo_file, 'rb') as repofile: try: - repodata = salt.utils.data.decode(serial.loads(repofile.read(), encoding='utf-8') or {}) + repodata = salt.utils.data.decode(serial.loads(repofile.read()) or {}) __context__['winrepo.data'] = repodata return repodata except Exception as exc: diff --git a/salt/modules/x509.py b/salt/modules/x509.py index d1c0bf2892..cafbbe8cc7 100644 --- a/salt/modules/x509.py +++ b/salt/modules/x509.py @@ -20,6 +20,7 @@ import tempfile import re import datetime import ast +import sys # Import salt libs import salt.utils.files @@ -132,8 +133,8 @@ def _new_extension(name, value, critical=0, issuer=None, _pyfree=1): 'value must be precomputed hash') # ensure name and value are bytes - name = salt.utils.stringutils.to_bytes(name) - value = salt.utils.stringutils.to_bytes(value) + name = salt.utils.stringutils.to_str(name) + value = salt.utils.stringutils.to_str(value) try: ctx = M2Crypto.m2.x509v3_set_nconf() @@ -320,9 +321,9 @@ def _text_or_file(input_): ''' if os.path.isfile(input_): with salt.utils.files.fopen(input_) as fp_: - return salt.utils.stringutils.to_bytes(fp_.read()) + return salt.utils.stringutils.to_str(fp_.read()) else: - return input_ + return salt.utils.stringutils.to_str(input_) def _parse_subject(subject): @@ -497,7 +498,7 @@ def get_pem_entry(text, pem_type=None): ret += pem_body[i:i + 64] + '\n' ret += pem_footer + '\n' - return ret.encode('ascii') + return salt.utils.stringutils.to_bytes(ret, encoding='ascii') def get_pem_entries(glob_path): @@ -682,12 +683,12 @@ def get_public_key(key, passphrase=None, asObj=False): if isinstance(key, M2Crypto.X509.X509): rsa = key.get_pubkey().get_rsa() - text = '' + text = b'' else: text = _text_or_file(key) text = get_pem_entry(text) - if text.startswith('-----BEGIN PUBLIC KEY-----'): + if text.startswith(b'-----BEGIN PUBLIC KEY-----'): if not asObj: return text bio = M2Crypto.BIO.MemoryBuffer() @@ -695,14 +696,14 @@ def get_public_key(key, passphrase=None, asObj=False): rsa = M2Crypto.RSA.load_pub_key_bio(bio) bio = M2Crypto.BIO.MemoryBuffer() - if text.startswith('-----BEGIN CERTIFICATE-----'): + if text.startswith(b'-----BEGIN CERTIFICATE-----'): cert = M2Crypto.X509.load_cert_string(text) rsa = cert.get_pubkey().get_rsa() - if text.startswith('-----BEGIN CERTIFICATE REQUEST-----'): + if text.startswith(b'-----BEGIN CERTIFICATE REQUEST-----'): csr = M2Crypto.X509.load_request_string(text) rsa = csr.get_pubkey().get_rsa() - if (text.startswith('-----BEGIN PRIVATE KEY-----') or - text.startswith('-----BEGIN RSA PRIVATE KEY-----')): + if (text.startswith(b'-----BEGIN PRIVATE KEY-----') or + text.startswith(b'-----BEGIN RSA PRIVATE KEY-----')): rsa = M2Crypto.RSA.load_key_string( text, callback=_passphrase_callback(passphrase)) @@ -851,7 +852,7 @@ def create_private_key(path=None, pem_type='(?:RSA )?PRIVATE KEY' ) else: - return bio.read_all() + return salt.utils.stringutils.to_str(bio.read_all()) def create_crl( # pylint: disable=too-many-arguments,too-many-locals @@ -1432,7 +1433,14 @@ def create_certificate( if 'serial_number' not in kwargs: kwargs['serial_number'] = _dec2hex( random.getrandbits(kwargs['serial_bits'])) - cert.set_serial_number(int(kwargs['serial_number'].replace(':', ''), 16)) + serial_number = int(kwargs['serial_number'].replace(':', ''), 16) + # With Python3 we occasionally end up with an INT + # that is too large because Python3 no longer supports long INTs. + # If we're larger than the maxsize value + # then we adjust the serial number. + if serial_number > sys.maxsize: + serial_number = serial_number - sys.maxsize + cert.set_serial_number(serial_number) # Set validity dates # pylint: disable=no-member diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py index 434dafdbdf..2400dfda42 100644 --- a/salt/modules/yumpkg.py +++ b/salt/modules/yumpkg.py @@ -2829,20 +2829,18 @@ def mod_repo(repo, basedir=None, **kwargs): filerepos[repo].update(repo_opts) content = header for stanza in six.iterkeys(filerepos): - comments = '' - if 'comments' in six.iterkeys(filerepos[stanza]): - comments = salt.utils.pkg.rpm.combine_comments( - filerepos[stanza]['comments']) - del filerepos[stanza]['comments'] - content += '\n[{0}]'.format(stanza) + comments = salt.utils.pkg.rpm.combine_comments( + filerepos[stanza].pop('comments', []) + ) + content += '[{0}]\n'.format(stanza) for line in six.iterkeys(filerepos[stanza]): - content += '\n{0}={1}'.format( + content += '{0}={1}\n'.format( line, filerepos[stanza][line] if not isinstance(filerepos[stanza][line], bool) else _bool_to_str(filerepos[stanza][line]) ) - content += '\n{0}\n'.format(comments) + content += comments + '\n' with salt.utils.files.fopen(repofile, 'w') as fileout: fileout.write(salt.utils.stringutils.to_str(content)) @@ -2871,15 +2869,30 @@ def _parse_repo_file(filename): section_dict.pop('__name__', None) config[section] = section_dict - # Try to extract leading comments + # Try to extract header comments, as well as comments for each repo. Read + # from the beginning of the file and assume any leading comments are + # header comments. Continue to read each section header and then find the + # comments for each repo. headers = '' - with salt.utils.files.fopen(filename, 'r') as rawfile: - for line in rawfile: + section = None + with salt.utils.files.fopen(filename, 'r') as repofile: + for line in repofile: line = salt.utils.stringutils.to_unicode(line) - if line.strip().startswith('#'): - headers += '{0}\n'.format(line.strip()) - else: - break + line = line.strip() + if line.startswith('#'): + if section is None: + headers += line + '\n' + else: + try: + comments = config[section].setdefault('comments', []) + comments.append(line[1:].lstrip()) + except KeyError: + log.debug( + 'Found comment in %s which does not appear to ' + 'belong to any repo section: %s', filename, line + ) + elif line.startswith('[') and line.endswith(']'): + section = line[1:-1] return (headers, salt.utils.data.decode(config)) diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py index f0d7aaafab..668143bdd9 100644 --- a/salt/modules/zypper.py +++ b/salt/modules/zypper.py @@ -309,7 +309,11 @@ class _Zypper(object): if self.error_msg and not self.__no_raise and not self.__ignore_repo_failure: raise CommandExecutionError('Zypper command failure: {0}'.format(self.error_msg)) - return self._is_xml_mode() and dom.parseString(self.__call_result['stdout']) or self.__call_result['stdout'] + return ( + self._is_xml_mode() and + dom.parseString(salt.utils.stringutils.to_str(self.__call_result['stdout'])) or + self.__call_result['stdout'] + ) __zypper__ = _Zypper() @@ -726,6 +730,123 @@ def list_pkgs(versions_as_list=False, **kwargs): attr) +def list_repo_pkgs(*args, **kwargs): + ''' + .. versionadded:: 2017.7.5,2018.3.1 + + Returns all available packages. Optionally, package names (and name globs) + can be passed and the results will be filtered to packages matching those + names. This is recommended as it speeds up the function considerably. + + This function can be helpful in discovering the version or repo to specify + in a :mod:`pkg.installed ` state. + + The return data will be a dictionary mapping package names to a list of + version numbers, ordered from newest to oldest. If ``byrepo`` is set to + ``True``, then the return dictionary will contain repository names at the + top level, and each repository will map packages to lists of version + numbers. For example: + + .. code-block:: python + + # With byrepo=False (default) + { + 'bash': ['4.3-83.3.1', + '4.3-82.6'], + 'vim': ['7.4.326-12.1'] + } + { + 'OSS': { + 'bash': ['4.3-82.6'], + 'vim': ['7.4.326-12.1'] + }, + 'OSS Update': { + 'bash': ['4.3-83.3.1'] + } + } + + fromrepo : None + Only include results from the specified repo(s). Multiple repos can be + specified, comma-separated. + + byrepo : False + When ``True``, the return data for each package will be organized by + repository. + + CLI Examples: + + .. code-block:: bash + + salt '*' pkg.list_repo_pkgs + salt '*' pkg.list_repo_pkgs foo bar baz + salt '*' pkg.list_repo_pkgs 'python2-*' byrepo=True + salt '*' pkg.list_repo_pkgs 'python2-*' fromrepo='OSS Updates' + ''' + byrepo = kwargs.pop('byrepo', False) + fromrepo = kwargs.pop('fromrepo', '') or '' + ret = {} + + targets = [ + arg if isinstance(arg, six.string_types) else six.text_type(arg) + for arg in args + ] + + def _is_match(pkgname): + ''' + When package names are passed to a zypper search, they will be matched + anywhere in the package name. This makes sure that only exact or + fnmatch matches are identified. + ''' + if not args: + # No package names passed, everyone's a winner! + return True + for target in targets: + if fnmatch.fnmatch(pkgname, target): + return True + return False + + for node in __zypper__.xml.call('se', '-s', *targets).getElementsByTagName('solvable'): + pkginfo = dict(node.attributes.items()) + try: + if pkginfo['kind'] != 'package': + continue + reponame = pkginfo['repository'] + if fromrepo and reponame != fromrepo: + continue + pkgname = pkginfo['name'] + pkgversion = pkginfo['edition'] + except KeyError: + continue + else: + if _is_match(pkgname): + repo_dict = ret.setdefault(reponame, {}) + version_list = repo_dict.setdefault(pkgname, set()) + version_list.add(pkgversion) + + if byrepo: + for reponame in ret: + # Sort versions newest to oldest + for pkgname in ret[reponame]: + sorted_versions = sorted( + [LooseVersion(x) for x in ret[reponame][pkgname]], + reverse=True + ) + ret[reponame][pkgname] = [x.vstring for x in sorted_versions] + return ret + else: + byrepo_ret = {} + for reponame in ret: + for pkgname in ret[reponame]: + byrepo_ret.setdefault(pkgname, []).extend(ret[reponame][pkgname]) + for pkgname in byrepo_ret: + sorted_versions = sorted( + [LooseVersion(x) for x in byrepo_ret[pkgname]], + reverse=True + ) + byrepo_ret[pkgname] = [x.vstring for x in sorted_versions] + return byrepo_ret + + def _get_configured_repos(): ''' Get all the info about repositories from the configurations. @@ -1144,6 +1265,15 @@ def install(name=None, return {} version_num = Wildcard(__zypper__)(name, version) + + if version_num: + if pkgs is None and sources is None: + # Allow "version" to work for single package target + pkg_params = {name: version_num} + else: + log.warning('"version" parameter will be ignored for multiple ' + 'package targets') + if pkg_type == 'repository': targets = [] for param, version_num in six.iteritems(pkg_params): diff --git a/salt/pillar/__init__.py b/salt/pillar/__init__.py index beb6689e6f..adc1d56b2b 100644 --- a/salt/pillar/__init__.py +++ b/salt/pillar/__init__.py @@ -776,11 +776,22 @@ class Pillar(object): nstate = { key_fragment: nstate } - include_states.append(nstate) + if not self.opts.get('pillar_includes_override_sls', False): + include_states.append(nstate) + else: + state = merge( + state, + nstate, + self.merge_strategy, + self.opts.get('renderer', 'yaml'), + self.opts.get('pillar_merge_lists', False)) if err: errors += err - if include_states: - # merge included state(s) with the current state merged last + + if not self.opts.get('pillar_includes_override_sls', False): + # merge included state(s) with the current state + # merged last to ensure that its values are + # authoritative. include_states.append(state) state = None for s in include_states: diff --git a/salt/proxy/cimc.py b/salt/proxy/cimc.py index 56254157d5..c539559f5c 100644 --- a/salt/proxy/cimc.py +++ b/salt/proxy/cimc.py @@ -1,7 +1,9 @@ # -*- coding: utf-8 -*- ''' +Proxy Minion interface module for managing Cisco Integrated Management Controller devices +========================================================================================= -Proxy Minion interface module for managing Cisco Integrated Management Controller devices. +.. versionadded:: 2018.3.0 :codeauthor: :email:`Spencer Ervin ` :maturity: new @@ -21,6 +23,7 @@ documentation. Configuration ============= + To use this integration proxy module, please configure the following: Pillar @@ -40,6 +43,7 @@ the ID. proxytype ^^^^^^^^^ + The ``proxytype`` key and value pair is critical, as it tells Salt which interface to load from the ``proxy`` directory in Salt's install hierarchy, or from ``/srv/salt/_proxy`` on the Salt Master (if you have created your @@ -48,16 +52,18 @@ own proxy module, for example). To use this cimc Proxy Module, set this to host ^^^^ + The location, or ip/dns, of the cimc host. Required. username ^^^^^^^^ + The username used to login to the cimc host. Required. password ^^^^^^^^ -The password used to login to the cimc host. Required. +The password used to login to the cimc host. Required. ''' from __future__ import absolute_import, print_function, unicode_literals diff --git a/salt/proxy/panos.py b/salt/proxy/panos.py index 5e7c9c9689..5071ad7e92 100644 --- a/salt/proxy/panos.py +++ b/salt/proxy/panos.py @@ -1,7 +1,9 @@ # -*- coding: utf-8 -*- ''' +Proxy Minion interface module for managing Palo Alto firewall devices +===================================================================== -Proxy Minion interface module for managing Palo Alto firewall devices. +.. versionadded:: 2018.3.0 :codeauthor: :email:`Spencer Ervin ` :maturity: new @@ -22,6 +24,7 @@ documentation. Configuration ============= + To use this integration proxy module, please configure the following: Pillar @@ -53,6 +56,7 @@ the device with username and password. proxytype ^^^^^^^^^ + The ``proxytype`` key and value pair is critical, as it tells Salt which interface to load from the ``proxy`` directory in Salt's install hierarchy, or from ``/srv/salt/_proxy`` on the Salt Master (if you have created your @@ -61,14 +65,17 @@ own proxy module, for example). To use this panos Proxy Module, set this to host ^^^^ + The location, or ip/dns, of the panos host. Required. username ^^^^^^^^ + The username used to login to the panos host. Required. password ^^^^^^^^ + The password used to login to the panos host. Required. Direct Device (API Key) @@ -88,6 +95,7 @@ instead of username and password. proxytype ^^^^^^^^^ + The ``proxytype`` key and value pair is critical, as it tells Salt which interface to load from the ``proxy`` directory in Salt's install hierarchy, or from ``/srv/salt/_proxy`` on the Salt Master (if you have created your @@ -96,14 +104,16 @@ own proxy module, for example). To use this panos Proxy Module, set this to host ^^^^ + The location, or ip/dns, of the panos host. Required. apikey -^^^^^^^^ +^^^^^^ + The generated XML API key for the panos host. Required. Panorama Pass-Through (Password) ------------------------- +-------------------------------- The Panorama pass-through method sends all connections through the Panorama management system. It passes the connections to the appropriate device using @@ -126,6 +136,7 @@ not the panos device. proxytype ^^^^^^^^^ + The ``proxytype`` key and value pair is critical, as it tells Salt which interface to load from the ``proxy`` directory in Salt's install hierarchy, or from ``/srv/salt/_proxy`` on the Salt Master (if you have created your @@ -134,22 +145,26 @@ own proxy module, for example). To use this panos Proxy Module, set this to serial ^^^^^^ + The serial number of the panos host. Required. host ^^^^ + The location, or ip/dns, of the Panorama server. Required. username ^^^^^^^^ + The username used to login to the Panorama server. Required. password ^^^^^^^^ + The password used to login to the Panorama server. Required. Panorama Pass-Through (API Key) ------------------------- +------------------------------- The Panorama server can also utilize a generated 'API key'_ for authentication. @@ -165,6 +180,7 @@ The Panorama server can also utilize a generated 'API key'_ for authentication. proxytype ^^^^^^^^^ + The ``proxytype`` key and value pair is critical, as it tells Salt which interface to load from the ``proxy`` directory in Salt's install hierarchy, or from ``/srv/salt/_proxy`` on the Salt Master (if you have created your @@ -173,16 +189,18 @@ own proxy module, for example). To use this panos Proxy Module, set this to serial ^^^^^^ + The serial number of the panos host. Required. host ^^^^ + The location, or ip/dns, of the Panorama server. Required. apikey ^^^^^^^^ -The generated XML API key for the Panorama server. Required. +The generated XML API key for the Panorama server. Required. ''' from __future__ import absolute_import, print_function, unicode_literals diff --git a/salt/returners/local_cache.py b/salt/returners/local_cache.py index 70c641af19..9bf6b948c9 100644 --- a/salt/returners/local_cache.py +++ b/salt/returners/local_cache.py @@ -441,7 +441,7 @@ def clean_old_jobs(): shutil.rmtree(f_path) elif os.path.isfile(jid_file): jid_ctime = os.stat(jid_file).st_ctime - hours_difference = (time.time()- jid_ctime) / 3600.0 + hours_difference = (time.time() - jid_ctime) / 3600.0 if hours_difference > __opts__['keep_jobs'] and os.path.exists(t_path): # Remove the entire f_path from the original JID dir shutil.rmtree(f_path) diff --git a/salt/runners/nacl.py b/salt/runners/nacl.py index 5727356258..2fae5915ab 100644 --- a/salt/runners/nacl.py +++ b/salt/runners/nacl.py @@ -121,6 +121,7 @@ import os import salt.utils.files import salt.utils.platform import salt.utils.stringutils +import salt.utils.versions import salt.utils.win_functions import salt.utils.win_dacl import salt.syspaths @@ -219,7 +220,7 @@ def keygen(sk_file=None, pk_file=None, **kwargs): if 'keyfile' in kwargs: salt.utils.versions.warn_until( - 'Fluorine', + 'Neon', 'The \'keyfile\' argument has been deprecated and will be removed in Salt ' '{version}. Please use \'sk_file\' argument instead.' ) @@ -288,7 +289,7 @@ def enc(data, **kwargs): if 'keyfile' in kwargs: salt.utils.versions.warn_until( - 'Fluorine', + 'Neon', 'The \'keyfile\' argument has been deprecated and will be removed in Salt ' '{version}. Please use \'sk_file\' argument instead.' ) @@ -296,7 +297,7 @@ def enc(data, **kwargs): if 'key' in kwargs: salt.utils.versions.warn_until( - 'Fluorine', + 'Neon', 'The \'key\' argument has been deprecated and will be removed in Salt ' '{version}. Please use \'sk\' argument instead.' ) @@ -353,7 +354,7 @@ def dec(data, **kwargs): ''' if 'keyfile' in kwargs: salt.utils.versions.warn_until( - 'Fluorine', + 'Neon', 'The \'keyfile\' argument has been deprecated and will be removed in Salt ' '{version}. Please use \'sk_file\' argument instead.' ) @@ -364,7 +365,7 @@ def dec(data, **kwargs): if 'key' in kwargs: salt.utils.versions.warn_until( - 'Fluorine', + 'Neon', 'The \'key\' argument has been deprecated and will be removed in Salt ' '{version}. Please use \'sk\' argument instead.' ) diff --git a/salt/runners/vault.py b/salt/runners/vault.py index 75268e7460..c2f30068ae 100644 --- a/salt/runners/vault.py +++ b/salt/runners/vault.py @@ -40,7 +40,7 @@ def generate_token(minion_id, signature, impersonated_by_master=False): True. This happens when the master generates minion pillars. ''' log.debug( - 'Token generation request for %s (impersonated by master: %s)'. + 'Token generation request for %s (impersonated by master: %s)', minion_id, impersonated_by_master ) _validate_signature(minion_id, signature, impersonated_by_master) diff --git a/salt/state.py b/salt/state.py index 5d47cb47e6..ca6dfd146d 100644 --- a/salt/state.py +++ b/salt/state.py @@ -2371,7 +2371,8 @@ class State(object): if not r_state.startswith('prerequired'): req_stats.add('pre') else: - req_stats.add('met') + if run_dict[tag].get('__state_ran__', True): + req_stats.add('met') if r_state.endswith('_any'): if 'met' in req_stats or 'change' in req_stats: if 'fail' in req_stats: @@ -2620,6 +2621,7 @@ class State(object): '__run_num__': self.__run_num, '__sls__': low['__sls__'] } + self.pre[tag] = running[tag] self.__run_num += 1 elif status == 'change' and not low.get('__prereq__'): ret = self.call(low, chunks, running) @@ -2649,6 +2651,7 @@ class State(object): 'duration': duration, 'start_time': start_time, 'comment': 'State was not run because onfail req did not change', + '__state_ran__': False, '__run_num__': self.__run_num, '__sls__': low['__sls__']} self.__run_num += 1 @@ -2659,6 +2662,7 @@ class State(object): 'duration': duration, 'start_time': start_time, 'comment': 'State was not run because none of the onchanges reqs changed', + '__state_ran__': False, '__run_num__': self.__run_num, '__sls__': low['__sls__']} self.__run_num += 1 @@ -3014,6 +3018,7 @@ class BaseHighState(object): 'top_file_merging_strategy set to \'same\', but no ' 'default_top configuration option was set' ) + self.opts['environment'] = self.opts['default_top'] if self.opts['saltenv']: contents = self.client.cache_file( diff --git a/salt/states/environ.py b/salt/states/environ.py index 7f1589a56a..5d87a93f3c 100644 --- a/salt/states/environ.py +++ b/salt/states/environ.py @@ -22,6 +22,15 @@ def __virtual__(): return True +def _norm_key(key): + ''' + Normalize windows environment keys + ''' + if salt.utils.platform.is_windows(): + return key.upper() + return key + + def setenv(name, value, false_unsets=False, @@ -126,12 +135,11 @@ def setenv(name, permanent_hive = 'HKLM' permanent_key = r'SYSTEM\CurrentControlSet\Control\Session Manager\Environment' - out = __salt__['reg.read_value'](permanent_hive, permanent_key, key) + out = __salt__['reg.read_value'](permanent_hive, permanent_key, _norm_key(key)) return out['success'] is True else: return False - - if current_environ.get(key, None) is None and not key_exists(): + if current_environ.get(_norm_key(key), None) is None and not key_exists(): # The key does not exist in environment if false_unsets is not True: # This key will be added with value '' @@ -140,13 +148,13 @@ def setenv(name, # The key exists. if false_unsets is not True: # Check to see if the value will change - if current_environ.get(key, None) != '': + if current_environ.get(_norm_key(key), None) != '': # This key value will change to '' ret['changes'].update({key: ''}) else: # We're going to delete the key ret['changes'].update({key: None}) - elif current_environ.get(key, '') == val: + elif current_environ.get(_norm_key(key), '') == val: already_set.append(key) else: ret['changes'].update({key: val}) diff --git a/salt/states/file.py b/salt/states/file.py index 932d17d85b..5cd62da1e9 100644 --- a/salt/states/file.py +++ b/salt/states/file.py @@ -619,28 +619,34 @@ def _check_file(name): return ret, msg -def _clean_dir(root, keep, exclude_pat): +def _find_keep_files(root, keep): ''' - Clean out all of the files and directories in a directory (root) while - preserving the files in a list (keep) and part of exclude_pat + Compile a list of valid keep files (and directories). ''' - removed = set() real_keep = set() real_keep.add(root) if isinstance(keep, list): for fn_ in keep: if not os.path.isabs(fn_): continue + fn_ = os.path.normcase(os.path.abspath(fn_)) real_keep.add(fn_) while True: - fn_ = os.path.dirname(fn_) + fn_ = os.path.abspath(os.path.dirname(fn_)) real_keep.add(fn_) - if fn_ in [ - os.sep, - ''.join([os.path.splitdrive(fn_)[0], os.sep]), - ''.join([os.path.splitdrive(fn_)[0], os.sep, os.sep]) - ]: + drive, path = os.path.splitdrive(fn_) + if not path.lstrip(os.sep): break + return real_keep + + +def _clean_dir(root, keep, exclude_pat): + ''' + Clean out all of the files and directories in a directory (root) while + preserving the files in a list (keep) and part of exclude_pat + ''' + real_keep = _find_keep_files(root, keep) + removed = set() def _delete_not_kept(nfn): if nfn not in real_keep: @@ -5351,9 +5357,14 @@ def copy( subdir=False, **kwargs): ''' - If the source file exists on the system, copy it to the named file. The - named file will not be overwritten if it already exists unless the force - option is set to True. + If the file defined by the ``source`` option exists on the minion, copy it + to the named path. The file will not be overwritten if it already exists, + unless the ``force`` option is set to ``True``. + + .. note:: + This state only copies files from one location on a minion to another + location on the same minion. For copying files from the master, use a + :py:func:`file.managed ` state. name The location of the file to copy to diff --git a/salt/states/git.py b/salt/states/git.py index 63c987d35d..a32d7914f1 100644 --- a/salt/states/git.py +++ b/salt/states/git.py @@ -152,8 +152,10 @@ def _strip_exc(exc): def _uptodate(ret, target, comments=None, local_changes=False): ret['comment'] = 'Repository {0} is up-to-date'.format(target) if local_changes: - ret['comment'] += ', but with local changes. Set \'force_reset\' to ' \ - 'True to purge local changes.' + ret['comment'] += ( + ', but with uncommitted changes. Set \'force_reset\' to True to ' + 'purge uncommitted changes.' + ) if comments: # Shouldn't be making any changes if the repo was up to date, but # report on them so we are alerted to potential problems with our @@ -223,7 +225,7 @@ def _not_fast_forward(ret, rev, pre, post, branch, local_branch, return _fail( ret, 'Repository would be updated {0}{1}, but {2}. Set \'force_reset\' to ' - 'True to force this update{3}.{4}'.format( + 'True{3} to force this update{4}.{5}'.format( 'from {0} to {1}'.format(pre, post) if local_changes and pre != post else 'to {0}'.format(post), @@ -233,6 +235,7 @@ def _not_fast_forward(ret, rev, pre, post, branch, local_branch, 'this is not a fast-forward merge' if not local_changes else 'there are uncommitted changes', + ' (or \'remote-changes\')' if local_changes else '', ' and discard these changes' if local_changes else '', branch_msg, ), @@ -421,6 +424,11 @@ def latest(name, argument to ``True`` to force a hard-reset to the remote revision in these cases. + .. versionchanged:: Fluorine + This option can now be set to ``remote-changes``, which will + instruct Salt not to discard local changes if the repo is + up-to-date with the remote repository. + submodules : False Update submodules on clone or branch change @@ -532,21 +540,9 @@ def latest(name, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -622,6 +618,12 @@ def latest(name, '\'{0}\' is not a valid value for the \'rev\' argument'.format(rev) ) + if force_reset not in (True, False, 'remote-changes'): + return _fail( + ret, + '\'force_reset\' must be one of True, False, or \'remote-changes\'' + ) + # Ensure that certain arguments are strings to ensure that comparisons work if not isinstance(rev, six.string_types): rev = six.text_type(rev) @@ -936,11 +938,13 @@ def latest(name, local_changes = False if local_changes and revs_match: - if force_reset: + if force_reset is True: msg = ( - '{0} is up-to-date, but with local changes. Since ' - '\'force_reset\' is enabled, these local changes ' - 'would be reset.'.format(target) + '{0} is up-to-date, but with uncommitted changes. ' + 'Since \'force_reset\' is set to True, these local ' + 'changes would be reset. To only reset when there are ' + 'changes in the remote repository, set ' + '\'force_reset\' to \'remote-changes\'.'.format(target) ) if __opts__['test']: ret['changes']['forced update'] = True @@ -950,9 +954,9 @@ def latest(name, log.debug(msg.replace('would', 'will')) else: log.debug( - '%s up-to-date, but with local changes. Since ' - '\'force_reset\' is disabled, no changes will be ' - 'made.', target + '%s up-to-date, but with uncommitted changes. Since ' + '\'force_reset\' is set to %s, no changes will be ' + 'made.', target, force_reset ) return _uptodate(ret, target, @@ -1054,20 +1058,23 @@ def latest(name, elif remote_rev_type == 'sha1': has_remote_rev = True - # If fast_forward is not boolean, then we don't know if this will - # be a fast forward or not, because a fetch is required. - fast_forward = None if not local_changes else False + # If fast_forward is not boolean, then we don't yet know if this + # will be a fast forward or not, because a fetch is required. + fast_forward = False \ + if (local_changes and force_reset != 'remote-changes') \ + else None if has_remote_rev: if (not revs_match and not update_head) \ and (branch is None or branch == local_branch): - ret['comment'] = remote_loc.capitalize() \ - if rev == 'HEAD' \ - else remote_loc - ret['comment'] += ( - ' is already present and local HEAD ({0}) does not ' + ret['comment'] = ( + '{0} is already present and local HEAD ({1}) does not ' 'match, but update_head=False. HEAD has not been ' - 'updated locally.'.format(local_rev[:7]) + 'updated locally.'.format( + remote_loc.capitalize() if rev == 'HEAD' + else remote_loc, + local_rev[:7] + ) ) return ret @@ -1081,9 +1088,8 @@ def latest(name, # existed there and a remote was added and fetched, but # the repository was not fast-forwarded. Regardless, # going from no HEAD to a locally-present rev is - # considered a fast-forward update, unless there are - # local changes. - fast_forward = not bool(local_changes) + # considered a fast-forward update. + fast_forward = True else: fast_forward = __salt__['git.merge_base']( target, @@ -1095,7 +1101,7 @@ def latest(name, output_encoding=output_encoding) if fast_forward is False: - if not force_reset: + if force_reset is False: return _not_fast_forward( ret, rev, @@ -1439,7 +1445,10 @@ def latest(name, password=password, output_encoding=output_encoding) - if fast_forward is False and not force_reset: + if fast_forward is force_reset is False \ + or (fast_forward is True + and local_changes + and force_reset is False): return _not_fast_forward( ret, rev, @@ -1495,9 +1504,6 @@ def latest(name, '\'{0}\' was checked out'.format(checkout_rev) ) - if local_changes: - comments.append('Local changes were discarded') - if fast_forward is False: __salt__['git.reset']( target, @@ -1506,9 +1512,20 @@ def latest(name, password=password, output_encoding=output_encoding) ret['changes']['forced update'] = True + if local_changes: + comments.append('Uncommitted changes were discarded') comments.append( 'Repository was hard-reset to {0}'.format(remote_loc) ) + elif fast_forward is True \ + and local_changes \ + and force_reset is not False: + __salt__['git.discard_local_changes']( + target, + user=user, + password=password, + output_encoding=output_encoding) + comments.append('Uncommitted changes were discarded') if branch_opts is not None: __salt__['git.branch']( @@ -2032,21 +2049,9 @@ def present(name, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -2246,21 +2251,9 @@ def detached(name, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 ''' @@ -2736,21 +2729,9 @@ def config_unset(name, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 @@ -3009,21 +2990,9 @@ def config_set(name, cases. .. note:: - - On Windows, this option works slightly differently in the git state - and execution module than it does in the :mod:`"cmd" execution - module `. The filenames in most git - repositories are created using a UTF-8 locale, and the system - encoding on Windows (CP1252) will successfully (but incorrectly) - decode many UTF-8 characters. This makes interacting with - repositories containing UTF-8 filenames on Windows unreliable. - Therefore, Windows will default to decoding the output from git - commands using UTF-8 unless this option is explicitly used to - specify the encoding. - - On non-Windows platforms, the default output decoding behavior will - be observed (i.e. the encoding specified by the locale will be - tried first, and if that fails, UTF-8 will be used as a fallback). + This should only be needed if the files in the repository were + created with filenames using an encoding other than UTF-8 to handle + Unicode characters. .. versionadded:: 2018.3.1 diff --git a/salt/states/mysql_query.py b/salt/states/mysql_query.py index a8d1916901..bcdc72fc1b 100644 --- a/salt/states/mysql_query.py +++ b/salt/states/mysql_query.py @@ -57,6 +57,7 @@ def run_file(name, grain=None, key=None, overwrite=True, + check_db_exists=True, **connection_args): ''' Execute an arbitrary query on the specified database @@ -85,6 +86,10 @@ def run_file(name, overwrite: The file or grain will be overwritten if it already exists (default) + check_db_exists: + The state run will check that the specified database exists (default=True) + before running any queries + .. versionadded:: 2017.7.0 ''' ret = {'name': name, @@ -98,7 +103,7 @@ def run_file(name, return ret # check if database exists - if not __salt__['mysql.db_exists'](database, **connection_args): + if check_db_exists and not __salt__['mysql.db_exists'](database, **connection_args): err = _get_mysql_error() if err is not None: ret['comment'] = err @@ -216,6 +221,7 @@ def run(name, grain=None, key=None, overwrite=True, + check_db_exists=False, **connection_args): ''' Execute an arbitrary query on the specified database @@ -243,13 +249,17 @@ def run(name, overwrite: The file or grain will be overwritten if it already exists (default) + + check_db_exists: + The state run will check that the specified database exists (default=True) + before running any queries ''' ret = {'name': name, 'changes': {}, 'result': True, 'comment': 'Database {0} is already present'.format(database)} # check if database exists - if not __salt__['mysql.db_exists'](database, **connection_args): + if check_db_exists and not __salt__['mysql.db_exists'](database, **connection_args): err = _get_mysql_error() if err is not None: ret['comment'] = err diff --git a/salt/states/pkg.py b/salt/states/pkg.py index 45dcbf33f2..d131fe2145 100644 --- a/salt/states/pkg.py +++ b/salt/states/pkg.py @@ -1009,11 +1009,11 @@ def installed( **WILDCARD VERSIONS** As of the 2017.7.0 release, this state now supports wildcards in - package versions for SUSE SLES/Leap/Tumbleweed, Debian/Ubuntu, RHEL/CentOS, - Arch Linux, and their derivatives. Using wildcards can be useful for - packages where the release name is built into the version in some way, - such as for RHEL/CentOS which typically has version numbers like - ``1.2.34-5.el7``. An example of the usage for this would be: + package versions for SUSE SLES/Leap/Tumbleweed, Debian/Ubuntu, + RHEL/CentOS, Arch Linux, and their derivatives. Using wildcards can be + useful for packages where the release name is built into the version in + some way, such as for RHEL/CentOS which typically has version numbers + like ``1.2.34-5.el7``. An example of the usage for this would be: .. code-block:: yaml @@ -1021,6 +1021,11 @@ def installed( pkg.installed: - version: '1.2.34*' + Keep in mind that using wildcard versions will result in a slower state + run since Salt must gather the available versions of the specified + packages and figure out which of them match the specified wildcard + expression. + :param bool refresh: This parameter controls whether or not the package repo database is updated prior to installing the requested package(s). @@ -2668,7 +2673,7 @@ def removed(name, .. code-block:: yaml vim-enhanced: - pkg.installed: + pkg.removed: - version: 2:7.4.160-1.el7 In version 2015.8.9, an **ignore_epoch** argument has been added to @@ -2774,7 +2779,7 @@ def purged(name, .. code-block:: yaml vim-enhanced: - pkg.installed: + pkg.purged: - version: 2:7.4.160-1.el7 In version 2015.8.9, an **ignore_epoch** argument has been added to diff --git a/salt/states/win_network.py b/salt/states/win_network.py index ec0975144e..e94abf2199 100644 --- a/salt/states/win_network.py +++ b/salt/states/win_network.py @@ -267,8 +267,13 @@ def managed(name, ret['comment'] = ' '.join(errors) return ret + try: + currently_enabled = __salt__['ip.is_enabled'](name) + except CommandExecutionError: + currently_enabled = False + if not enabled: - if __salt__['ip.is_enabled'](name): + if currently_enabled: if __opts__['test']: ret['result'] = None ret['comment'] = ('Interface \'{0}\' will be disabled' @@ -282,18 +287,13 @@ def managed(name, ret['comment'] += ' (already disabled)' return ret else: - try: - currently_enabled = __salt__['ip.is_disabled'](name) - except CommandExecutionError: - currently_enabled = False if not currently_enabled: if __opts__['test']: ret['result'] = None ret['comment'] = ('Interface \'{0}\' will be enabled' .format(name)) else: - result = __salt__['ip.enable'](name) - if not result: + if not __salt__['ip.enable'](name): ret['result'] = False ret['comment'] = ('Failed to enable interface \'{0}\' to ' 'make changes'.format(name)) diff --git a/salt/states/x509.py b/salt/states/x509.py index b2de03d281..832f74168c 100644 --- a/salt/states/x509.py +++ b/salt/states/x509.py @@ -308,6 +308,9 @@ def private_key_managed(name, file_args['contents'] = __salt__['x509.create_private_key']( text=True, bits=bits, passphrase=passphrase, cipher=cipher, verbose=verbose) + # Ensure the key contents are a string before passing it along + file_args['contents'] = salt.utils.stringutils.to_str(file_args['contents']) + ret = __states__['file.managed'](**file_args) if ret['changes'] and new_key: ret['changes'] = {'new': 'New private key generated'} diff --git a/salt/templates/debian_ip/route_eth.jinja b/salt/templates/debian_ip/route_eth.jinja index a5379b1964..21d73f0d5d 100644 --- a/salt/templates/debian_ip/route_eth.jinja +++ b/salt/templates/debian_ip/route_eth.jinja @@ -2,5 +2,5 @@ # {{route_type}} test "${IFACE}" = "{{iface}}" || exit 0 {% for route in routes %}{% if route.name %}# {{route.name}} -{%endif%}ip route {{route_type}} {% if route.ipaddr %}{{route.ipaddr}}{%endif%}{% if route.netmask %}/{{route.netmask}}{%endif%} {% if route.gateway %}via {{route.gateway}}{%endif%} dev {{iface}} +{%endif%}ip route {{route_type}} {% if route.ipaddr %}{{route.ipaddr}}{%endif%}{% if route.netmask %}/{{route.netmask}}{%endif%} {% if route.gateway %}via {{route.gateway}}{%endif%} dev {{iface}}{% if route.metric %} metric {{route.metric}}{%endif%} {% endfor %} diff --git a/salt/templates/rh_ip/rh6_route_eth.jinja b/salt/templates/rh_ip/rh6_route_eth.jinja index 4b0bc7ede1..3bde01b6e0 100644 --- a/salt/templates/rh_ip/rh6_route_eth.jinja +++ b/salt/templates/rh_ip/rh6_route_eth.jinja @@ -7,4 +7,6 @@ {%- if route.gateway %} via {{route.gateway}} {%- else %} dev {{iface}} {%- endif %} +{%- if route.metric %} metric {{route.metric}} +{%- endif %} {% endfor -%} diff --git a/salt/utils/event.py b/salt/utils/event.py index 8b40ab6f47..2bd9e6a029 100644 --- a/salt/utils/event.py +++ b/salt/utils/event.py @@ -439,7 +439,7 @@ class SaltEvent(object): if six.PY2: mtag, sep, mdata = raw.partition(TAGEND) # split tag from data - data = serial.loads(mdata) + data = serial.loads(mdata, encoding='utf-8') else: mtag, sep, mdata = raw.partition(salt.utils.stringutils.to_bytes(TAGEND)) # split tag from data mtag = salt.utils.stringutils.to_str(mtag) diff --git a/salt/utils/parsers.py b/salt/utils/parsers.py index b99e7597a5..7e46c7789a 100644 --- a/salt/utils/parsers.py +++ b/salt/utils/parsers.py @@ -38,6 +38,7 @@ import salt.utils.platform import salt.utils.process import salt.utils.stringutils import salt.utils.user +import salt.utils.win_functions import salt.utils.xdg import salt.utils.yaml from salt.defaults import DEFAULT_TARGET_DELIM @@ -1020,11 +1021,11 @@ class DaemonMixIn(six.with_metaclass(MixInMeta, object)): if self.check_pidfile(): pid = self.get_pidfile() if not salt.utils.platform.is_windows(): - if self.check_pidfile() and self.is_daemonized(pid) and not os.getppid() == pid: + if self.check_pidfile() and self.is_daemonized(pid) and os.getppid() != pid: return True else: - # We have no os.getppid() on Windows. Best effort. - if self.check_pidfile() and self.is_daemonized(pid): + # We have no os.getppid() on Windows. Use salt.utils.win_functions.get_parent_pid + if self.check_pidfile() and self.is_daemonized(pid) and salt.utils.win_functions.get_parent_pid() != pid: return True return False diff --git a/salt/utils/pkg/rpm.py b/salt/utils/pkg/rpm.py index a8f4cd44db..987edab894 100644 --- a/salt/utils/pkg/rpm.py +++ b/salt/utils/pkg/rpm.py @@ -12,7 +12,6 @@ import subprocess # Import 3rd-party libs from salt.ext import six -from salt.ext.six.moves import range # pylint: disable=redefined-builtin log = logging.getLogger(__name__) @@ -122,13 +121,13 @@ def combine_comments(comments): ''' if not isinstance(comments, list): comments = [comments] - for idx in range(len(comments)): - if not isinstance(comments[idx], six.string_types): - comments[idx] = six.text_type(comments[idx]) - comments[idx] = comments[idx].strip() - if not comments[idx].startswith('#'): - comments[idx] = '#' + comments[idx] - return '\n'.join(comments) + ret = [] + for comment in comments: + if not isinstance(comment, six.string_types): + comment = str(comment) + # Normalize for any spaces (or lack thereof) after the # + ret.append('# {0}\n'.format(comment.lstrip('#').lstrip())) + return ''.join(ret) def version_to_evr(verstring): diff --git a/salt/utils/process.py b/salt/utils/process.py index 72d3c6825d..817b1bc630 100644 --- a/salt/utils/process.py +++ b/salt/utils/process.py @@ -231,7 +231,7 @@ def get_pidfile(pidfile): pid = pdf.read().strip() return int(pid) except (OSError, IOError, TypeError, ValueError): - return None + return -1 def clean_proc(proc, wait_for_kill=10): diff --git a/salt/utils/schedule.py b/salt/utils/schedule.py index d93d48e142..4cd5e9bc91 100644 --- a/salt/utils/schedule.py +++ b/salt/utils/schedule.py @@ -110,13 +110,15 @@ class Schedule(object): pass # an init for the singleton instance to call - def __singleton_init__(self, opts, functions, returners=None, intervals=None, cleanup=None, proxy=None, standalone=False): + def __singleton_init__(self, opts, functions, returners=None, intervals=None, cleanup=None, proxy=None, utils=None, standalone=False): self.opts = opts self.proxy = proxy self.functions = functions + self.utils = utils self.standalone = standalone self.skip_function = None self.skip_during_range = None + self.splay = None self.enabled = True if isinstance(intervals, dict): self.intervals = intervals @@ -283,6 +285,7 @@ class Schedule(object): self.skip_function = None self.skip_during_range = None self.enabled = True + self.splay = None self.opts['schedule'] = {} def delete_job_prefix(self, name, persist=True): @@ -595,10 +598,11 @@ class Schedule(object): # This also needed for ZeroMQ transport to reset all functions # context data that could keep paretns connections. ZeroMQ will # hang on polling parents connections from the child process. + utils = self.utils or salt.loader.utils(self.opts) if self.opts['__role'] == 'master': - self.functions = salt.loader.runner(self.opts) + self.functions = salt.loader.runner(self.opts, utils=utils) else: - self.functions = salt.loader.minion_mods(self.opts, proxy=self.proxy) + self.functions = salt.loader.minion_mods(self.opts, proxy=self.proxy, utils=utils) self.returners = salt.loader.returners(self.opts, self.functions, proxy=self.proxy) ret = {'id': self.opts.get('id', 'master'), 'fun': func, @@ -1297,10 +1301,13 @@ class Schedule(object): self.skip_during_range = schedule['skip_during_range'] if 'enabled' in schedule: self.enabled = schedule['enabled'] + if 'splay' in schedule: + self.splay = schedule['splay'] _hidden = ['enabled', 'skip_function', - 'skip_during_range'] + 'skip_during_range', + 'splay'] for job, data in six.iteritems(schedule): # Skip anything that is a global setting @@ -1404,7 +1411,12 @@ class Schedule(object): seconds = int((data['_next_fire_time'] - now).total_seconds()) - if 'splay' in data: + # If there is no job specific splay available, + # grab the global which defaults to None. + if 'splay' not in data: + data['splay'] = self.splay + + if 'splay' in data and data['splay']: # Got "splay" configured, make decision to run a job based on that if not data['_splay']: # Try to add "splay" time only if next job fire time is @@ -1613,6 +1625,7 @@ class Schedule(object): # Restore our function references. self.functions = functions self.returners = returners + self.utils = utils def clean_proc_dir(opts): diff --git a/salt/utils/stringutils.py b/salt/utils/stringutils.py index df056f6459..e53804907b 100644 --- a/salt/utils/stringutils.py +++ b/salt/utils/stringutils.py @@ -41,10 +41,11 @@ def to_bytes(s, encoding=None, errors='strict'): return s.encode(encoding, errors) else: try: - return s.encode(__salt_system_encoding__, errors) - except UnicodeEncodeError: - # Fall back to UTF-8 + # Try UTF-8 first return s.encode('utf-8', errors) + except UnicodeEncodeError: + # Fall back to detected encoding + return s.encode(__salt_system_encoding__, errors) raise TypeError('expected bytes, bytearray, or str') else: return to_str(s, encoding, errors) @@ -64,10 +65,11 @@ def to_str(s, encoding=None, errors='strict'): return s.decode(encoding, errors) else: try: - return s.decode(__salt_system_encoding__, errors) - except UnicodeDecodeError: - # Fall back to UTF-8 + # Try UTF-8 first return s.decode('utf-8', errors) + except UnicodeDecodeError: + # Fall back to detected encoding + return s.decode(__salt_system_encoding__, errors) raise TypeError('expected str, bytes, or bytearray not {}'.format(type(s))) else: if isinstance(s, bytearray): @@ -77,10 +79,11 @@ def to_str(s, encoding=None, errors='strict'): return s.encode(encoding, errors) else: try: - return s.encode(__salt_system_encoding__, errors) - except UnicodeEncodeError: - # Fall back to UTF-8 + # Try UTF-8 first return s.encode('utf-8', errors) + except UnicodeEncodeError: + # Fall back to detected encoding + return s.encode(__salt_system_encoding__, errors) raise TypeError('expected str, bytearray, or unicode') @@ -108,10 +111,11 @@ def to_unicode(s, encoding=None, errors='strict', normalize=False): return _normalize(s.decode(encoding, errors)) else: try: - return _normalize(s.decode(__salt_system_encoding__, errors)) - except UnicodeDecodeError: - # Fall back to UTF-8 + # Try UTF-8 first return _normalize(s.decode('utf-8', errors)) + except UnicodeDecodeError: + # Fall back to detected encoding + return _normalize(s.decode(__salt_system_encoding__, errors)) raise TypeError('expected str or bytearray') @@ -294,20 +298,29 @@ def build_whitespace_split_regex(text): def expr_match(line, expr): ''' - Evaluate a line of text against an expression. First try a full-string - match, next try globbing, and then try to match assuming expr is a regular - expression. Originally designed to match minion IDs for - whitelists/blacklists. + Checks whether or not the passed value matches the specified expression. + Tries to match expr first as a glob using fnmatch.fnmatch(), and then tries + to match expr as a regular expression. Originally designed to match minion + IDs for whitelists/blacklists. + + Note that this also does exact matches, as fnmatch.fnmatch() will return + ``True`` when no glob characters are used and the string is an exact match: + + .. code-block:: python + + >>> fnmatch.fnmatch('foo', 'foo') + True ''' - if line == expr: - return True - if fnmatch.fnmatch(line, expr): - return True try: - if re.match(r'\A{0}\Z'.format(expr), line): + if fnmatch.fnmatch(line, expr): return True - except re.error: - pass + try: + if re.match(r'\A{0}\Z'.format(expr), line): + return True + except re.error: + pass + except TypeError: + log.exception('Value %r or expression %r is not a string', line, expr) return False @@ -337,22 +350,16 @@ def check_whitelist_blacklist(value, whitelist=None, blacklist=None): if blacklist is not None: if not hasattr(blacklist, '__iter__'): blacklist = [blacklist] - try: - for expr in blacklist: - if expr_match(value, expr): - return False - except TypeError: - log.error('Non-iterable blacklist %s', blacklist) + for expr in blacklist: + if expr_match(value, expr): + return False if whitelist: if not hasattr(whitelist, '__iter__'): whitelist = [whitelist] - try: - for expr in whitelist: - if expr_match(value, expr): - return True - except TypeError: - log.error('Non-iterable whitelist %s', whitelist) + for expr in whitelist: + if expr_match(value, expr): + return True else: return True diff --git a/salt/utils/templates.py b/salt/utils/templates.py index 09984c9594..05b1646a40 100644 --- a/salt/utils/templates.py +++ b/salt/utils/templates.py @@ -28,6 +28,7 @@ else: # Import Salt libs import salt.utils.data +import salt.utils.dateutils import salt.utils.http import salt.utils.files import salt.utils.platform @@ -372,7 +373,14 @@ def render_jinja_tmpl(tmplstr, context, tmplpath=None): decoded_context[key] = value continue - decoded_context[key] = salt.utils.locales.sdecode(value) + try: + decoded_context[key] = salt.utils.stringutils.to_unicode(value, encoding=SLS_ENCODING) + except UnicodeDecodeError as ex: + log.debug( + "Failed to decode using default encoding (%s), trying system encoding", + SLS_ENCODING, + ) + decoded_context[key] = salt.utils.locales.sdecode(value) try: template = jinja_env.from_string(tmplstr) diff --git a/tests/integration/cli/test_batch.py b/tests/integration/cli/test_batch.py index 4faaf8ec39..1ca8049504 100644 --- a/tests/integration/cli/test_batch.py +++ b/tests/integration/cli/test_batch.py @@ -18,7 +18,7 @@ class BatchTest(ShellCase): ''' Tests executing a simple batch command to help catch regressions ''' - ret = 'Executing run on [\'sub_minion\']' + ret = 'Executing run on [{0}]'.format(repr('sub_minion')) cmd = self.run_salt('\'*minion\' test.echo \'batch testing\' -b 50%') self.assertIn(ret, cmd) @@ -28,7 +28,7 @@ class BatchTest(ShellCase): Tests executing a simple batch command using a number division instead of a percentage with full batch CLI call. ''' - ret = "Executing run on ['minion', 'sub_minion']" + ret = "Executing run on [{0}, {1}]".format(repr('minion'), repr('sub_minion')) cmd = self.run_salt('\'*minion\' test.ping --batch-size 2') self.assertIn(ret, cmd) @@ -38,8 +38,8 @@ class BatchTest(ShellCase): targeting. ''' os_grain = '' - sub_min_ret = "Executing run on ['sub_minion']" - min_ret = "Executing run on ['minion']" + sub_min_ret = "Executing run on [{0}]".format(repr('sub_minion')) + min_ret = "Executing run on [{0}]".format(repr('minion')) for item in self.run_salt('minion grains.get os'): if item != 'minion': diff --git a/tests/integration/cloud/providers/test_ec2.py b/tests/integration/cloud/providers/test_ec2.py index 2758809d8e..efc395058d 100644 --- a/tests/integration/cloud/providers/test_ec2.py +++ b/tests/integration/cloud/providers/test_ec2.py @@ -6,14 +6,18 @@ # Import Python Libs from __future__ import absolute_import, print_function, unicode_literals import os +import yaml # Import Salt Libs from salt.config import cloud_providers_config +import salt.utils.files # Import Salt Testing Libs from tests.support.case import ShellCase from tests.support.paths import FILES from tests.support.helpers import expensiveTest, generate_random_name +from tests.support.unit import expectedFailure +from tests.support import win_installer # Create the cloud instance name to be used throughout the tests INSTANCE_NAME = generate_random_name('CLOUD-TEST-') @@ -26,6 +30,38 @@ class EC2Test(ShellCase): ''' Integration tests for the EC2 cloud provider in Salt-Cloud ''' + TIMEOUT = 1000 + + def _installer_name(self): + ''' + Determine the downloaded installer name by searching the files + directory for the firt file that loosk like an installer. + ''' + for path, dirs, files in os.walk(FILES): + for file in files: + if file.startswith(win_installer.PREFIX): + return file + break + return + + def _fetch_latest_installer(self): + ''' + Download the latest Windows installer executable + ''' + name = win_installer.latest_installer_name() + path = os.path.join(FILES, name) + with salt.utils.files.fopen(path, 'wb') as fp: + win_installer.download_and_verify(fp, name) + return name + + def _ensure_installer(self): + ''' + Make sure the testing environment has a Windows installer executbale. + ''' + name = self._installer_name() + if name: + return name + return self._fetch_latest_installer() @expensiveTest def setUp(self): @@ -77,27 +113,51 @@ class EC2Test(ShellCase): 'missing. Check tests/integration/files/conf/cloud.providers.d/{0}.conf' .format(PROVIDER_NAME) ) + self.INSTALLER = self._ensure_installer() - def test_instance(self): + def override_profile_config(self, name, data): + conf_path = os.path.join(self.get_config_dir(), 'cloud.profiles.d', 'ec2.conf') + with salt.utils.files.fopen(conf_path, 'r') as fp: + conf = yaml.safe_load(fp) + conf[name].update(data) + with salt.utils.files.fopen(conf_path, 'w') as fp: + yaml.dump(conf, fp) + + def copy_file(self, name): + ''' + Copy a file from tests/integration/files to a test's temporary + configuration directory. The path to the file which is created will be + returned. + ''' + src = os.path.join(FILES, name) + dst = os.path.join(self.get_config_dir(), name) + with salt.utils.files.fopen(src, 'rb') as sfp: + with salt.utils.files.fopen(dst, 'wb') as dfp: + dfp.write(sfp.read()) + return dst + + def _test_instance(self, profile='ec2-test', debug=False, timeout=TIMEOUT): ''' Tests creating and deleting an instance on EC2 (classic) ''' + # create the instance - instance = self.run_cloud('-p ec2-test {0}'.format(INSTANCE_NAME), - timeout=EC2_TIMEOUT) + cmd = '-p {0}'.format(profile) + if debug: + cmd += ' -l debug' + cmd += ' {0}'.format(INSTANCE_NAME) + instance = self.run_cloud(cmd, timeout=timeout) ret_str = '{0}:'.format(INSTANCE_NAME) # check if instance returned with salt installed try: self.assertIn(ret_str, instance) except AssertionError: - self.run_cloud('-d {0} --assume-yes'.format(INSTANCE_NAME), - timeout=EC2_TIMEOUT) + self.run_cloud('-d {0} --assume-yes'.format(INSTANCE_NAME), timeout=timeout) raise # delete the instance - delete = self.run_cloud('-d {0} --assume-yes'.format(INSTANCE_NAME), - timeout=EC2_TIMEOUT) + delete = self.run_cloud('-d {0} --assume-yes'.format(INSTANCE_NAME), timeout=timeout) ret_str = ' shutting-down' # check if deletion was performed appropriately @@ -145,6 +205,80 @@ class EC2Test(ShellCase): # check if deletion was performed appropriately self.assertIn(ret_str, delete) + def test_instance(self): + ''' + Tests creating and deleting an instance on EC2 (classic) + ''' + self._test_instance('ec2-test') + + @expectedFailure + def test_win2012r2_winexe(self): + ''' + Tests creating and deleting a Windows 2012r2instance on EC2 using + winexe (classic) + ''' + # TODO: winexe calls hang and the test fails by timing out. The same + # same calls succeed when run outside of the test environment. + self.override_profile_config( + 'ec2-win2012-test', + { + 'use_winrm': False, + 'user_data': self.copy_file('windows-firewall-winexe.ps1'), + 'win_installer': self.copy_file(self.INSTALLER), + }, + ) + self._test_instance('ec2-win2012r2-test', debug=True, timeout=500) + + def test_win2012r2_winrm(self): + ''' + Tests creating and deleting a Windows 2012r2 instance on EC2 using + winrm (classic) + ''' + self.override_profile_config( + 'ec2-win2016-test', + { + 'user_data': self.copy_file('windows-firewall.ps1'), + 'win_installer': self.copy_file(self.INSTALLER), + 'winrm_ssl_verify': False, + } + + ) + self._test_instance('ec2-win2012r2-test', debug=True, timeout=500) + + @expectedFailure + def test_win2016_winexe(self): + ''' + Tests creating and deleting a Windows 2016 instance on EC2 using winrm + (classic) + ''' + # TODO: winexe calls hang and the test fails by timing out. The same + # same calls succeed when run outside of the test environment. + self.override_profile_config( + 'ec2-win2016-test', + { + 'use_winrm': False, + 'user_data': self.copy_file('windows-firewall-winexe.ps1'), + 'win_installer': self.copy_file(self.INSTALLER), + }, + ) + self._test_instance('ec2-win2016-test', debug=True, timeout=500) + + def test_win2016_winrm(self): + ''' + Tests creating and deleting a Windows 2016 instance on EC2 using winrm + (classic) + ''' + self.override_profile_config( + 'ec2-win2016-test', + { + 'user_data': self.copy_file('windows-firewall.ps1'), + 'win_installer': self.copy_file(self.INSTALLER), + 'winrm_ssl_verify': False, + } + + ) + self._test_instance('ec2-win2016-test', debug=True, timeout=500) + def tearDown(self): ''' Clean up after tests @@ -154,5 +288,4 @@ class EC2Test(ShellCase): # if test instance is still present, delete it if ret_str in query: - self.run_cloud('-d {0} --assume-yes'.format(INSTANCE_NAME), - timeout=EC2_TIMEOUT) + self.run_cloud('-d {0} --assume-yes'.format(INSTANCE_NAME), timeout=self.TIMEOUT) diff --git a/tests/integration/doc/__init__.py b/tests/integration/doc/__init__.py new file mode 100644 index 0000000000..40a96afc6f --- /dev/null +++ b/tests/integration/doc/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/tests/integration/doc/test_man.py b/tests/integration/doc/test_man.py new file mode 100644 index 0000000000..6e2bb15ac6 --- /dev/null +++ b/tests/integration/doc/test_man.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- +''' +Tests for existence of manpages +''' +# Import python libs +from __future__ import absolute_import, print_function, unicode_literals +import os +import shutil + +# Import Salt libs +import salt.utils.platform + +# Import Salt Testing libs +from tests.support.case import ModuleCase +from tests.support.paths import TMP +from tests.support.unit import skipIf + + +@skipIf(salt.utils.platform.is_windows(), 'minion is windows') +class ManTest(ModuleCase): + rootdir = os.path.join(TMP, 'mantest') + # Map filenames to search strings which should be in the manpage + manpages = { + 'salt-cp.1': [ + 'salt-cp Documentation', + 'copies files from the master', + ], + 'salt-cloud.1': [ + 'Salt Cloud Command', + 'Provision virtual machines in the cloud', + ], + 'salt-call.1': [ + 'salt-call Documentation', + 'run module functions locally', + ], + 'salt-api.1': [ + 'salt-api Command', + 'Start interfaces used to remotely connect', + ], + 'salt-unity.1': [ + 'salt-unity Command', + 'unified invocation wrapper', + ], + 'salt-syndic.1': [ + 'salt-syndic Documentation', + 'Salt syndic daemon', + ], + 'salt-ssh.1': [ + 'salt-ssh Documentation', + 'executed using only SSH', + ], + 'salt-run.1': [ + 'salt-run Documentation', + 'frontend command for executing', + ], + 'salt-proxy.1': [ + 'salt-proxy Documentation', + 'proxies these commands', + ], + 'salt-minion.1': [ + 'salt-minion Documentation', + 'Salt minion daemon', + ], + 'salt-master.1': [ + 'salt-master Documentation', + 'Salt master daemon', + ], + 'salt-key.1': [ + 'salt-key Documentation', + 'management of Salt server public keys', + ], + 'salt.1': [ + 'allows for commands to be executed', + ], + 'salt.7': [ + 'Salt Documentation', + ], + 'spm.1': [ + 'Salt Package Manager Command', + 'command for managing Salt packages', + ], + } + + def setUp(self): + if not self.run_function('mantest.install', [self.rootdir]): + self.fail('Failed to install salt to {0}'.format(self.rootdir)) + + @classmethod + def tearDownClass(cls): + try: + shutil.rmtree(cls.rootdir) + except OSError: + pass + + def test_man(self): + ''' + Make sure that man pages are installed + ''' + ret = self.run_function('mantest.search', [self.manpages, self.rootdir]) + # The above function returns True if successful and an exception (which + # will manifest in the return as a stringified exception) if + # unsuccessful. Therefore, a simple assertTrue is not sufficient. + if ret is not True: + self.fail(ret) diff --git a/tests/integration/files/conf/cloud.profiles.d/ec2.conf b/tests/integration/files/conf/cloud.profiles.d/ec2.conf index c544cf3b88..cd8aba1364 100644 --- a/tests/integration/files/conf/cloud.profiles.d/ec2.conf +++ b/tests/integration/files/conf/cloud.profiles.d/ec2.conf @@ -4,3 +4,31 @@ ec2-test: size: t1.micro sh_username: ec2-user script_args: '-P -Z' +ec2-win2012r2-test: + provider: ec2-config + size: t2.micro + image: ami-eb1ecd96 + smb_port: 445 + win_installer: '' + win_username: Administrator + win_password: auto + userdata_file: '' + userdata_template: False + use_winrm: True + winrm_verify_ssl: False + ssh_interface: private_ips + deploy: True +ec2-win2016-test: + provider: ec2-config + size: t2.micro + image: ami-ed14c790 + smb_port: 445 + win_installer: '' + win_username: Administrator + win_password: auto + userdata_file: '' + userdata_template: False + use_winrm: True + winrm_verify_ssl: False + ssh_interface: private_ips + deploy: True diff --git a/tests/integration/files/file/base/_modules/mantest.py b/tests/integration/files/file/base/_modules/mantest.py new file mode 100644 index 0000000000..bc7f73e7b6 --- /dev/null +++ b/tests/integration/files/file/base/_modules/mantest.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +''' +Helpers for testing man pages +''' +# Import python libs +from __future__ import absolute_import, print_function, unicode_literals +import os +import sys + +# Import Salt libs +import salt.utils.files +import salt.utils.path +import salt.utils.stringutils +from salt.exceptions import CommandExecutionError + +# Import Salt Tesing libs +from tests.support.paths import CODE_DIR + + +def install(rootdir): + if not os.path.exists(rootdir): + os.makedirs(rootdir) + return __salt__['cmd.retcode']( + [sys.executable, + os.path.join(CODE_DIR, 'setup.py'), + 'install', '--root={0}'.format(rootdir)]) == 0 + return True + + +def search(manpages, rootdir): + manpage_fns = set(manpages) + manpage_paths = {} + for root, _, files in os.walk(rootdir): + if not manpage_fns: + # All manpages found, no need to keep walking + break + # Using list because we will be modifying the set during iteration + for manpage_fn in list(manpage_fns): + if manpage_fn in files: + manpage_path = salt.utils.path.join(root, manpage_fn) + manpage_paths[manpage_fn] = manpage_path + manpage_fns.remove(manpage_fn) + + if manpage_fns: + raise CommandExecutionError( + 'The following manpages were not found under {0}: {1}'.format( + rootdir, + ', '.join(sorted(manpage_fns)) + ) + ) + + failed = {} + for manpage in sorted(manpages): + with salt.utils.files.fopen(manpage_paths[manpage]) as fp_: + contents = salt.utils.stringutils.to_unicode(fp_.read()) + # Check for search string in contents + for search_string in manpages[manpage]: + if search_string not in contents: + failed.setdefault(manpage, []).append( + 'No match for search string \'{0}\' found in {1}'.format( + search_string, manpage_paths[manpage] + ) + ) + # Check for correct install dir + path = '/man{0}/'.format(manpage.rsplit('.', 1)[-1]) + if path not in manpage_paths[manpage]: + failed.setdefault(manpage, []).append( + '{0} not found in manpage path {1}'.format( + path, manpage_paths[manpage] + ) + ) + + if failed: + raise CommandExecutionError('One or more manpages failed', info=failed) + + return True diff --git a/tests/integration/files/file/base/_modules/runtests_helpers.py b/tests/integration/files/file/base/_modules/runtests_helpers.py index 84ec92e044..89f1f78c78 100644 --- a/tests/integration/files/file/base/_modules/runtests_helpers.py +++ b/tests/integration/files/file/base/_modules/runtests_helpers.py @@ -66,6 +66,8 @@ def get_invalid_docs(): 'log.warning', 'lowpkg.bin_pkg_info', 'lxc.run_cmd', + 'mantest.install', + 'mantest.search', 'nspawn.restart', 'nspawn.stop', 'pkg.expand_repo_def', diff --git a/tests/integration/files/file/base/issue-46672.sls b/tests/integration/files/file/base/issue-46672.sls new file mode 100644 index 0000000000..49840a029d --- /dev/null +++ b/tests/integration/files/file/base/issue-46672.sls @@ -0,0 +1,3 @@ +echo1: + cmd.run: + - name: "echo 'This is Æ test!'" diff --git a/tests/integration/files/file/base/requisites/onfail_multiple_required.sls b/tests/integration/files/file/base/requisites/onfail_multiple_required.sls new file mode 100644 index 0000000000..d4c49518e9 --- /dev/null +++ b/tests/integration/files/file/base/requisites/onfail_multiple_required.sls @@ -0,0 +1,25 @@ +a: + cmd.run: + - name: exit 1 + +b: + cmd.run: + - name: echo b + - onfail: + - cmd: a + +c: + cmd.run: + - name: echo c + - onfail: + - cmd: a + - require: + - cmd: b + +d: + cmd.run: + - name: echo d + - onfail: + - cmd: a + - require: + - cmd: c diff --git a/tests/integration/files/file/base/requisites/onfail_multiple_required_no_run.sls b/tests/integration/files/file/base/requisites/onfail_multiple_required_no_run.sls new file mode 100644 index 0000000000..fbf43b6618 --- /dev/null +++ b/tests/integration/files/file/base/requisites/onfail_multiple_required_no_run.sls @@ -0,0 +1,25 @@ +a: + cmd.run: + - name: exit 0 + +b: + cmd.run: + - name: echo b + - onfail: + - cmd: a + +c: + cmd.run: + - name: echo c + - onfail: + - cmd: a + - require: + - cmd: b + +d: + cmd.run: + - name: echo d + - onfail: + - cmd: a + - require: + - cmd: c diff --git a/tests/integration/files/windows-firewall-winexe.ps1 b/tests/integration/files/windows-firewall-winexe.ps1 new file mode 100644 index 0000000000..e3318aa561 --- /dev/null +++ b/tests/integration/files/windows-firewall-winexe.ps1 @@ -0,0 +1,5 @@ + +New-NetFirewallRule -Name "SMB445" -DisplayName "SMB445" -Protocol TCP -LocalPort 445 +Set-Item (dir wsman:\localhost\Listener\*\Port -Recurse).pspath 445 -Force +Restart-Service winrm + diff --git a/tests/integration/files/windows-firewall.ps1 b/tests/integration/files/windows-firewall.ps1 new file mode 100644 index 0000000000..8e3d6a63f2 --- /dev/null +++ b/tests/integration/files/windows-firewall.ps1 @@ -0,0 +1,33 @@ + +New-NetFirewallRule -Name "SMB445" -DisplayName "SMB445" -Protocol TCP -LocalPort 445 +New-NetFirewallRule -Name "WINRM5986" -DisplayName "WINRM5986" -Protocol TCP -LocalPort 5986 + +winrm quickconfig -q +winrm set winrm/config/winrs '@{MaxMemoryPerShellMB="300"}' +winrm set winrm/config '@{MaxTimeoutms="1800000"}' +winrm set winrm/config/service/auth '@{Basic="true"}' + +$SourceStoreScope = 'LocalMachine' +$SourceStorename = 'Remote Desktop' + +$SourceStore = New-Object -TypeName System.Security.Cryptography.X509Certificates.X509Store -ArgumentList $SourceStorename, $SourceStoreScope +$SourceStore.Open([System.Security.Cryptography.X509Certificates.OpenFlags]::ReadOnly) + +$cert = $SourceStore.Certificates | Where-Object -FilterScript { + $_.subject -like '*' +} + +$DestStoreScope = 'LocalMachine' +$DestStoreName = 'My' + +$DestStore = New-Object -TypeName System.Security.Cryptography.X509Certificates.X509Store -ArgumentList $DestStoreName, $DestStoreScope +$DestStore.Open([System.Security.Cryptography.X509Certificates.OpenFlags]::ReadWrite) +$DestStore.Add($cert) + +$SourceStore.Close() +$DestStore.Close() + +winrm create winrm/config/listener?Address=*+Transport=HTTPS `@`{Hostname=`"($certId)`"`;CertificateThumbprint=`"($cert.Thumbprint)`"`} + +Restart-Service winrm + diff --git a/tests/integration/modules/test_cmdmod.py b/tests/integration/modules/test_cmdmod.py index 5701dd7f43..5a5c24a776 100644 --- a/tests/integration/modules/test_cmdmod.py +++ b/tests/integration/modules/test_cmdmod.py @@ -15,9 +15,11 @@ from tests.support.helpers import ( skip_if_not_root ) from tests.support.paths import TMP +from tests.support.unit import skipIf # Import salt libs import salt.utils.path +import salt.utils.platform # Import 3rd-party libs from salt.ext import six @@ -289,6 +291,15 @@ class CMDModuleTest(ModuleCase): runas=runas).strip() self.assertEqual(result, expected_result) + @skipIf(salt.utils.platform.is_windows(), 'minion is windows') + @skip_if_not_root + def test_runas(self): + ''' + Ensure that the env is the runas user's + ''' + out = self.run_function('cmd.run', ['env'], runas='nobody').splitlines() + self.assertIn('USER=nobody', out) + def test_timeout(self): ''' cmd.run trigger timeout diff --git a/tests/integration/modules/test_cp.py b/tests/integration/modules/test_cp.py index f473283507..713190b32e 100644 --- a/tests/integration/modules/test_cp.py +++ b/tests/integration/modules/test_cp.py @@ -14,7 +14,10 @@ import textwrap # Import Salt Testing libs from tests.support.case import ModuleCase -from tests.support.helpers import get_unused_localhost_port, skip_if_not_root +from tests.support.helpers import ( + get_unused_localhost_port, + skip_if_not_root, + with_tempfile) from tests.support.unit import skipIf import tests.support.paths as paths @@ -41,11 +44,11 @@ class CPModuleTest(ModuleCase): super(CPModuleTest, self).run_function(*args, **kwargs) ) - def test_get_file(self): + @with_tempfile + def test_get_file(self, tgt): ''' cp.get_file ''' - tgt = os.path.join(paths.TMP, 'scene33') self.run_function( 'cp.get_file', [ @@ -73,11 +76,11 @@ class CPModuleTest(ModuleCase): self.assertIn('KNIGHT: They\'re nervous, sire.', data) self.assertNotIn('bacon', data) - def test_get_file_templated_paths(self): + @with_tempfile + def test_get_file_templated_paths(self, tgt): ''' cp.get_file ''' - tgt = os.path.join(paths.TMP, 'cheese') self.run_function( 'cp.get_file', [ @@ -91,11 +94,11 @@ class CPModuleTest(ModuleCase): self.assertIn('Gromit', data) self.assertNotIn('bacon', data) - def test_get_file_gzipped(self): + @with_tempfile + def test_get_file_gzipped(self, tgt): ''' cp.get_file ''' - tgt = os.path.join(paths.TMP, 'file.big') src = os.path.join(paths.FILES, 'file', 'base', 'file.big') with salt.utils.files.fopen(src, 'rb') as fp_: hash_str = hashlib.md5(fp_.read()).hexdigest() @@ -134,11 +137,11 @@ class CPModuleTest(ModuleCase): self.assertIn('KNIGHT: They\'re nervous, sire.', data) self.assertNotIn('bacon', data) - def test_get_template(self): + @with_tempfile + def test_get_template(self, tgt): ''' cp.get_template ''' - tgt = os.path.join(paths.TMP, 'scene33') self.run_function( 'cp.get_template', ['salt://grail/scene33', tgt], @@ -183,11 +186,11 @@ class CPModuleTest(ModuleCase): # cp.get_url tests - def test_get_url(self): + @with_tempfile + def test_get_url(self, tgt): ''' cp.get_url with salt:// source given ''' - tgt = os.path.join(paths.TMP, 'scene33') self.run_function( 'cp.get_url', [ @@ -274,11 +277,11 @@ class CPModuleTest(ModuleCase): self.assertIn('KNIGHT: They\'re nervous, sire.', data) self.assertNotIn('bacon', data) - def test_get_url_https(self): + @with_tempfile + def test_get_url_https(self, tgt): ''' cp.get_url with https:// source given ''' - tgt = os.path.join(paths.TMP, 'test_get_url_https') self.run_function( 'cp.get_url', [ @@ -616,7 +619,8 @@ class CPModuleTest(ModuleCase): self.assertEqual( sha256_hash['hsum'], hashlib.sha256(data).hexdigest()) - def test_get_file_from_env_predefined(self): + @with_tempfile + def test_get_file_from_env_predefined(self, tgt): ''' cp.get_file ''' @@ -630,7 +634,8 @@ class CPModuleTest(ModuleCase): finally: os.unlink(tgt) - def test_get_file_from_env_in_url(self): + @with_tempfile + def test_get_file_from_env_in_url(self, tgt): tgt = os.path.join(paths.TMP, 'cheese') try: self.run_function('cp.get_file', ['salt://cheese?saltenv=prod', tgt]) diff --git a/tests/integration/modules/test_git.py b/tests/integration/modules/test_git.py index 1172e70332..6ede0f96f1 100644 --- a/tests/integration/modules/test_git.py +++ b/tests/integration/modules/test_git.py @@ -147,8 +147,7 @@ class GitModuleTest(ModuleCase): TODO: maybe move this behavior to ModuleCase itself? ''' return salt.utils.data.decode( - super(GitModuleTest, self).run_function(*args, **kwargs), - encoding='utf-8' + super(GitModuleTest, self).run_function(*args, **kwargs) ) def tearDown(self): @@ -207,8 +206,7 @@ class GitModuleTest(ModuleCase): self.run_function('cmd.run', ['cp ' + tar_archive + ' /root/']) with closing(tarfile.open(tar_archive, 'r')) as tar_obj: self.assertEqual( - sorted(salt.utils.data.decode(tar_obj.getnames(), - encoding='utf-8')), + sorted(salt.utils.data.decode(tar_obj.getnames())), sorted([ 'foo', 'foo/bar', 'foo/baz', 'foo/foo', 'foo/питон', 'foo/qux', 'foo/qux/bar', 'foo/qux/baz', 'foo/qux/foo', @@ -238,8 +236,7 @@ class GitModuleTest(ModuleCase): self.assertTrue(tarfile.is_tarfile(tar_archive)) with closing(tarfile.open(tar_archive, 'r')) as tar_obj: self.assertEqual( - sorted(salt.utils.data.decode(tar_obj.getnames(), - encoding='utf-8')), + sorted(salt.utils.data.decode(tar_obj.getnames())), sorted(['foo', 'foo/bar', 'foo/baz', 'foo/foo', 'foo/питон']) ) finally: diff --git a/tests/integration/modules/test_groupadd.py b/tests/integration/modules/test_groupadd.py index 46c9ee4394..f917c0314e 100644 --- a/tests/integration/modules/test_groupadd.py +++ b/tests/integration/modules/test_groupadd.py @@ -9,11 +9,13 @@ import string # Import Salt Testing libs from tests.support.case import ModuleCase from tests.support.helpers import destructiveTest, skip_if_not_root +from tests.support.unit import skipIf # Import Salt libs from salt.ext import six from salt.ext.six.moves import range import salt.utils.files +import salt.utils.platform import salt.utils.stringutils @@ -34,13 +36,13 @@ class GroupModuleTest(ModuleCase): self._no_user = self.__random_string() self._group = self.__random_string() self._no_group = self.__random_string() - self._gid = 64989 - self._new_gid = 64998 - os_grain = self.run_function('grains.item', ['kernel']) - if os_grain['kernel'] not in 'Linux': + self.os_grain = self.run_function('grains.item', ['kernel']) + self._gid = 64989 if 'Windows' not in self.os_grain['kernel'] else None + self._new_gid = 64998 if 'Windows' not in self.os_grain['kernel'] else None + if self.os_grain['kernel'] not in ('Linux', 'Windows'): self.skipTest( 'Test not applicable to \'{kernel}\' kernel'.format( - **os_grain + **self.os_grain ) ) @@ -110,12 +112,18 @@ class GroupModuleTest(ModuleCase): Test the add group function ''' # add a new group - self.assertTrue(self.run_function('group.add', [self._group, self._gid])) + self.assertTrue(self.run_function('group.add', [self._group], gid=self._gid)) group_info = self.run_function('group.info', [self._group]) - self.assertEqual(group_info['name'], self._group) self.assertEqual(group_info['gid'], self._gid) + self.assertEqual(group_info['name'], self._group) # try adding the group again - self.assertFalse(self.run_function('group.add', [self._group, self._gid])) + if self.os_grain['kernel'] == 'Windows': + add_group = self.run_function('group.add', [self._group], gid=self._gid) + self.assertEqual(add_group['result'], None) + self.assertEqual(add_group['comment'], 'The group {0} already exists.'.format(self._group)) + self.assertEqual(add_group['changes'], []) + else: + self.assertFalse(self.run_function('group.add', [self._group], gid=self._gid)) @destructiveTest def test_add_system_group(self): @@ -164,26 +172,32 @@ class GroupModuleTest(ModuleCase): self.assertTrue(self.run_function('group.delete', [self._group])) # group does not exist - self.assertFalse(self.run_function('group.delete', [self._no_group])) + if self.os_grain['kernel'] == 'Windows': + del_group = self.run_function('group.delete', [self._no_group]) + self.assertEqual(del_group['changes'], []) + self.assertEqual(del_group['comment'], 'The group {0} does not exists.'.format(self._no_group)) + else: + self.assertFalse(self.run_function('group.delete', [self._no_group])) def test_info(self): ''' Test the info group function ''' - self.run_function('group.add', [self._group, self._gid]) + self.run_function('group.add', [self._group], gid=self._gid) self.run_function('user.add', [self._user]) self.run_function('group.adduser', [self._group, self._user]) group_info = self.run_function('group.info', [self._group]) self.assertEqual(group_info['name'], self._group) self.assertEqual(group_info['gid'], self._gid) - self.assertIn(self._user, group_info['members']) + self.assertIn(self._user, str(group_info['members'])) + @skipIf(salt.utils.platform.is_windows(), 'gid test skipped on windows') def test_chgid(self): ''' Test the change gid function ''' - self.run_function('group.add', [self._group, self._gid]) + self.run_function('group.add', [self._group], gid=self._gid) self.assertTrue(self.run_function('group.chgid', [self._group, self._new_gid])) group_info = self.run_function('group.info', [self._group]) self.assertEqual(group_info['gid'], self._new_gid) @@ -192,47 +206,55 @@ class GroupModuleTest(ModuleCase): ''' Test the add user to group function ''' - self.run_function('group.add', [self._group, self._gid]) + self.run_function('group.add', [self._group], gid=self._gid) self.run_function('user.add', [self._user]) self.assertTrue(self.run_function('group.adduser', [self._group, self._user])) group_info = self.run_function('group.info', [self._group]) - self.assertIn(self._user, group_info['members']) - # try to add a non existing user - self.assertFalse(self.run_function('group.adduser', [self._group, self._no_user])) - # try to add a user to non existing group - self.assertFalse(self.run_function('group.adduser', [self._no_group, self._user])) - # try to add a non existing user to a non existing group - self.assertFalse(self.run_function('group.adduser', [self._no_group, self._no_user])) + self.assertIn(self._user, str(group_info['members'])) + if self.os_grain['kernel'] == 'Windows': + no_group = self.run_function('group.adduser', [self._no_group, self._no_user]) + no_user = self.run_function('group.adduser', [self._group, self._no_user]) + funcs = [no_group, no_user] + for func in funcs: + self.assertIn('Fail', func['comment']) + self.assertFalse(func['result']) + else: + # try add a non existing user + self.assertFalse(self.run_function('group.adduser', [self._group, self._no_user])) + # try add a user to non existing group + self.assertFalse(self.run_function('group.adduser', [self._no_group, self._user])) + # try add a non existing user to a non existing group + self.assertFalse(self.run_function('group.adduser', [self._no_group, self._no_user])) def test_deluser(self): ''' Test the delete user from group function ''' - self.run_function('group.add', [self._group, self._gid]) + self.run_function('group.add', [self._group], gid=self._gid) self.run_function('user.add', [self._user]) self.run_function('group.adduser', [self._group, self._user]) self.assertTrue(self.run_function('group.deluser', [self._group, self._user])) group_info = self.run_function('group.info', [self._group]) - self.assertNotIn(self._user, group_info['members']) + self.assertNotIn(self._user, str(group_info['members'])) def test_members(self): ''' Test the members function ''' - self.run_function('group.add', [self._group, self._gid]) + self.run_function('group.add', [self._group], gid=self._gid) self.run_function('user.add', [self._user]) self.run_function('user.add', [self._user1]) m = '{0},{1}'.format(self._user, self._user1) self.assertTrue(self.run_function('group.members', [self._group, m])) group_info = self.run_function('group.info', [self._group]) - self.assertIn(self._user, group_info['members']) - self.assertIn(self._user1, group_info['members']) + self.assertIn(self._user, str(group_info['members'])) + self.assertIn(self._user1, str(group_info['members'])) def test_getent(self): ''' Test the getent function ''' - self.run_function('group.add', [self._group, self._gid]) + self.run_function('group.add', [self._group], gid=self._gid) self.run_function('user.add', [self._user]) self.run_function('group.adduser', [self._group, self._user]) ginfo = self.run_function('user.getent') diff --git a/tests/integration/modules/test_pkg.py b/tests/integration/modules/test_pkg.py index aa10617dfd..4168bff1c8 100644 --- a/tests/integration/modules/test_pkg.py +++ b/tests/integration/modules/test_pkg.py @@ -272,38 +272,53 @@ class PkgModuleTest(ModuleCase, SaltReturnAssertsMixin): self.run_function('pkg.refresh_db') if os_family == 'Suse': - # pkg.latest version returns empty if the latest version is already installed - vim_version_dict = self.run_function('pkg.latest_version', ['vim']) - vim_info = self.run_function('pkg.info_available', ['vim'])['vim'] - if vim_version_dict == {}: - # Latest version is installed, get its version and construct - # a version selector so the immediately previous version is selected - vim_version = 'version=<'+vim_info['version'] - else: - # Vim was not installed, so pkg.latest_version returns the latest one. - # Construct a version selector so immediately previous version is selected - vim_version = 'version=<'+vim_version_dict + # This test assumes that there are multiple possible versions of a + # package available. That makes it brittle if you pick just one + # target, as changes in the available packages will break the test. + # Therefore, we'll choose from several packages to make sure we get + # one that is suitable for this test. + packages = ('hwinfo', 'avrdude', 'diffoscope', 'vim') - # Only install a new version of vim if vim is up-to-date, otherwise we don't - # need this check. (And the test will fail when we check for the empty dict - # since vim gets upgraded in the install step.) - if 'out-of-date' not in vim_info['status']: - # Install a version of vim that should need upgrading - ret = self.run_function('pkg.install', ['vim', vim_version]) - if not isinstance(ret, dict): - if ret.startswith('ERROR'): - self.skipTest('Could not install earlier vim to complete test.') + available = self.run_function('pkg.list_repo_pkgs', packages) + versions = self.run_function('pkg.version', packages) + + for package in packages: + try: + new, old = available[package][:2] + except (KeyError, ValueError): + # Package not available, or less than 2 versions + # available. This is not a suitable target. + continue else: - self.assertNotEqual(ret, {}) + target = package + current = versions[target] + break + else: + # None of the packages have more than one version available, so + # we need to find new package(s). pkg.list_repo_pkgs can be + # used to get an overview of the available packages. We should + # try to find packages with few dependencies and small download + # sizes, to keep this test from taking longer than necessary. + self.fail('No suitable package found for this test') - # Run a system upgrade, which should catch the fact that Vim needs upgrading, and upgrade it. + # Make sure we have the 2nd-oldest available version installed + ret = self.run_function('pkg.install', [target], version=old) + if not isinstance(ret, dict): + if ret.startswith('ERROR'): + self.skipTest( + 'Could not install older {0} to complete ' + 'test.'.format(target) + ) + + # Run a system upgrade, which should catch the fact that the + # targeted package needs upgrading, and upgrade it. ret = self.run_function(func) # The changes dictionary should not be empty. if 'changes' in ret: - self.assertIn('vim', ret['changes']) + self.assertIn(target, ret['changes']) else: - self.assertIn('vim', ret) + self.assertIn(target, ret) else: ret = self.run_function('pkg.list_upgrades') if ret == '' or ret == {}: diff --git a/tests/integration/modules/test_saltutil.py b/tests/integration/modules/test_saltutil.py index 09d24bf948..a018340a1f 100644 --- a/tests/integration/modules/test_saltutil.py +++ b/tests/integration/modules/test_saltutil.py @@ -84,7 +84,8 @@ class SaltUtilSyncModuleTest(ModuleCase): 'beacons': [], 'utils': [], 'returners': [], - 'modules': ['modules.override_test', + 'modules': ['modules.mantest', + 'modules.override_test', 'modules.runtests_decorators', 'modules.runtests_helpers', 'modules.salttest'], @@ -131,7 +132,8 @@ class SaltUtilSyncModuleTest(ModuleCase): 'beacons': [], 'utils': [], 'returners': [], - 'modules': ['modules.override_test', + 'modules': ['modules.mantest', + 'modules.override_test', 'modules.runtests_helpers', 'modules.salttest'], 'renderers': [], diff --git a/tests/integration/modules/test_state.py b/tests/integration/modules/test_state.py index b0664ac321..b3c13ddbb8 100644 --- a/tests/integration/modules/test_state.py +++ b/tests/integration/modules/test_state.py @@ -1476,6 +1476,56 @@ class StateModuleTest(ModuleCase, SaltReturnAssertsMixin): test_data = state_run['cmd_|-test_non_failing_state_|-echo "Should not run"_|-run'] self.assertIn('duration', test_data) + def test_multiple_onfail_requisite_with_required(self): + ''' + test to ensure multiple states are run + when specified as onfails for a single state. + This is a test for the issue: + https://github.com/saltstack/salt/issues/46552 + ''' + + state_run = self.run_function('state.sls', mods='requisites.onfail_multiple_required') + + retcode = state_run['cmd_|-b_|-echo b_|-run']['changes']['retcode'] + self.assertEqual(retcode, 0) + + retcode = state_run['cmd_|-c_|-echo c_|-run']['changes']['retcode'] + self.assertEqual(retcode, 0) + + retcode = state_run['cmd_|-d_|-echo d_|-run']['changes']['retcode'] + self.assertEqual(retcode, 0) + + stdout = state_run['cmd_|-b_|-echo b_|-run']['changes']['stdout'] + self.assertEqual(stdout, 'b') + + stdout = state_run['cmd_|-c_|-echo c_|-run']['changes']['stdout'] + self.assertEqual(stdout, 'c') + + stdout = state_run['cmd_|-d_|-echo d_|-run']['changes']['stdout'] + self.assertEqual(stdout, 'd') + + def test_multiple_onfail_requisite_with_required_no_run(self): + ''' + test to ensure multiple states are not run + when specified as onfails for a single state + which fails. + This is a test for the issue: + https://github.com/saltstack/salt/issues/46552 + ''' + + state_run = self.run_function('state.sls', mods='requisites.onfail_multiple_required_no_run') + + expected = 'State was not run because onfail req did not change' + + stdout = state_run['cmd_|-b_|-echo b_|-run']['comment'] + self.assertEqual(stdout, expected) + + stdout = state_run['cmd_|-c_|-echo c_|-run']['comment'] + self.assertEqual(stdout, expected) + + stdout = state_run['cmd_|-d_|-echo d_|-run']['comment'] + self.assertEqual(stdout, expected) + # listen tests def test_listen_requisite(self): @@ -1855,6 +1905,16 @@ class StateModuleTest(ModuleCase, SaltReturnAssertsMixin): for id in _expected: self.assertEqual(sls[id]['comment'], _expected[id]['comment']) + def test_state_sls_unicode_characters(self): + ''' + test state.sls when state file contains non-ascii characters + ''' + ret = self.run_function('state.sls', ['issue-46672']) + log.debug('== ret %s ==', type(ret)) + + _expected = "cmd_|-echo1_|-echo 'This is Æ test!'_|-run" + self.assertIn(_expected, ret) + def tearDown(self): nonbase_file = os.path.join(TMP, 'nonbase_env') if os.path.isfile(nonbase_file): diff --git a/tests/integration/scheduler/test_eval.py b/tests/integration/scheduler/test_eval.py index 9e7fe4f899..7891366866 100644 --- a/tests/integration/scheduler/test_eval.py +++ b/tests/integration/scheduler/test_eval.py @@ -469,4 +469,90 @@ class SchedulerEvalTest(ModuleCase, SaltReturnAssertsMixin): run_time = dateutil_parser.parse('11/29/2017 3:00pm') self.schedule.eval(now=run_time) ret = self.schedule.job_status('job1') - self.assertEqual(ret['_last_run'], run_time) + + def test_eval_splay(self): + ''' + verify that scheduled job runs with splayed time + ''' + job = { + 'schedule': { + 'job_eval_splay': { + 'function': 'test.ping', + 'seconds': '30', + 'splay': '10' + } + } + } + + # Add job to schedule + self.schedule.opts.update(job) + + with patch('random.randint', MagicMock(return_value=10)): + # eval at 2:00pm to prime, simulate minion start up. + run_time = dateutil_parser.parse('11/29/2017 2:00pm') + self.schedule.eval(now=run_time) + ret = self.schedule.job_status('job_eval_splay') + + # eval at 2:00:40pm, will run. + run_time = dateutil_parser.parse('11/29/2017 2:00:40pm') + self.schedule.eval(now=run_time) + ret = self.schedule.job_status('job_eval_splay') + self.assertEqual(ret['_last_run'], run_time) + + def test_eval_splay_range(self): + ''' + verify that scheduled job runs with splayed time + ''' + job = { + 'schedule': { + 'job_eval_splay': { + 'function': 'test.ping', + 'seconds': '30', + 'splay': {'start': 5, 'end': 10} + } + } + } + + # Add job to schedule + self.schedule.opts.update(job) + + with patch('random.randint', MagicMock(return_value=10)): + # eval at 2:00pm to prime, simulate minion start up. + run_time = dateutil_parser.parse('11/29/2017 2:00pm') + self.schedule.eval(now=run_time) + ret = self.schedule.job_status('job_eval_splay') + + # eval at 2:00:40pm, will run. + run_time = dateutil_parser.parse('11/29/2017 2:00:40pm') + self.schedule.eval(now=run_time) + ret = self.schedule.job_status('job_eval_splay') + self.assertEqual(ret['_last_run'], run_time) + + def test_eval_splay_global(self): + ''' + verify that scheduled job runs with splayed time + ''' + job = { + 'schedule': { + 'splay': {'start': 5, 'end': 10}, + 'job_eval_splay': { + 'function': 'test.ping', + 'seconds': '30', + } + } + } + + # Add job to schedule + self.schedule.opts.update(job) + + with patch('random.randint', MagicMock(return_value=10)): + # eval at 2:00pm to prime, simulate minion start up. + run_time = dateutil_parser.parse('11/29/2017 2:00pm') + self.schedule.eval(now=run_time) + ret = self.schedule.job_status('job_eval_splay') + + # eval at 2:00:40pm, will run. + run_time = dateutil_parser.parse('11/29/2017 2:00:40pm') + self.schedule.eval(now=run_time) + ret = self.schedule.job_status('job_eval_splay') + self.assertEqual(ret['_last_run'], run_time) diff --git a/tests/integration/shell/test_auth.py b/tests/integration/shell/test_auth.py index a22f5fc8c5..bc85f0b061 100644 --- a/tests/integration/shell/test_auth.py +++ b/tests/integration/shell/test_auth.py @@ -59,12 +59,18 @@ class AuthTest(ShellCase): def setUp(self): for user in (self.userA, self.userB): try: + if salt.utils.platform.is_darwin() and user not in str(self.run_call('user.list_users')): + # workaround for https://github.com/saltstack/salt-jenkins/issues/504 + raise KeyError pwd.getpwnam(user) except KeyError: self.run_call('user.add {0} createhome=False'.format(user)) # only put userB into the group for the group auth test try: + if salt.utils.platform.is_darwin() and self.group not in str(self.run_call('group.info {0}'.format(self.group))): + # workaround for https://github.com/saltstack/salt-jenkins/issues/504 + raise KeyError grp.getgrnam(self.group) except KeyError: self.run_call('group.add {0}'.format(self.group)) diff --git a/tests/integration/spm/test_man_spm.py b/tests/integration/spm/test_man_spm.py deleted file mode 100644 index 51b9806c34..0000000000 --- a/tests/integration/spm/test_man_spm.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -''' -Tests man spm -''' -# Import python libs -from __future__ import absolute_import -import os -import shutil -import sys -import tempfile - -# Import Salt Testing libs -from tests.support.case import ModuleCase -from tests.support.helpers import destructiveTest, flaky -from tests.support.paths import CODE_DIR - - -@destructiveTest -class SPMManTest(ModuleCase): - ''' - Validate man spm - ''' - - def setUp(self): - self.tmpdir = tempfile.mktemp() - os.mkdir(self.tmpdir) - self.run_function('cmd.run', ['{0} {1} install --root={2}'.format( - sys.executable, - os.path.join(CODE_DIR, 'setup.py'), - self.tmpdir - )]) - - def tearDown(self): - shutil.rmtree(self.tmpdir) - - @flaky - def test_man_spm(self): - ''' - test man spm - ''' - manpath = self.run_function('cmd.run', ['find {0} -name spm.1'.format(self.tmpdir)]) - self.assertIn('/man1/', manpath) - cmd = self.run_function('cmd.run', ['man {0}'.format(manpath)]) - self.assertIn('Salt Package Manager', cmd) - self.assertIn('command for managing Salt packages', cmd) diff --git a/tests/integration/states/test_file.py b/tests/integration/states/test_file.py index f7858615b6..fb785ab545 100644 --- a/tests/integration/states/test_file.py +++ b/tests/integration/states/test_file.py @@ -2182,7 +2182,7 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin): 'salt_utf8_tests', '{0}.txt'.format(korean_1) ) - test_file_encoded = salt.utils.stringutils.to_str(test_file) + test_file_encoded = test_file template_path = os.path.join(TMP_STATE_TREE, 'issue-8947.sls') # create the sls template template_lines = [ @@ -2247,46 +2247,45 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin): ' 한국어 시험\n' '+마지막 행\n' ) - diff = salt.utils.stringutils.to_str(diff) # future_lint: disable=blacklisted-function expected = { - str('file_|-some-utf8-file-create_|-{0}_|-managed').format(test_file_encoded): { + 'file_|-some-utf8-file-create_|-{0}_|-managed'.format(test_file_encoded): { 'name': test_file_encoded, '__run_num__': 0, - 'comment': str('File {0} updated').format(test_file_encoded), + 'comment': 'File {0} updated'.format(test_file_encoded), 'diff': 'New file' }, - str('file_|-some-utf8-file-create2_|-{0}_|-managed').format(test_file_encoded): { + 'file_|-some-utf8-file-create2_|-{0}_|-managed'.format(test_file_encoded): { 'name': test_file_encoded, '__run_num__': 1, - 'comment': str('File {0} updated').format(test_file_encoded), + 'comment': 'File {0} updated'.format(test_file_encoded), 'diff': diff }, - str('file_|-some-utf8-file-exists_|-{0}_|-exists').format(test_file_encoded): { + 'file_|-some-utf8-file-exists_|-{0}_|-exists'.format(test_file_encoded): { 'name': test_file_encoded, '__run_num__': 2, - 'comment': str('Path {0} exists').format(test_file_encoded) + 'comment': 'Path {0} exists'.format(test_file_encoded) }, - str('cmd_|-some-utf8-file-content-test_|-cat "{0}"_|-run').format(test_file_encoded): { - 'name': str('cat "{0}"').format(test_file_encoded), + 'cmd_|-some-utf8-file-content-test_|-cat "{0}"_|-run'.format(test_file_encoded): { + 'name': 'cat "{0}"'.format(test_file_encoded), '__run_num__': 3, - 'comment': str('Command "cat "{0}"" run').format(test_file_encoded), - 'stdout': str('{0}\n{1}\n{2}').format( - salt.utils.stringutils.to_str(korean_2), - salt.utils.stringutils.to_str(korean_1), - salt.utils.stringutils.to_str(korean_3), + 'comment': 'Command "cat "{0}"" run'.format(test_file_encoded), + 'stdout': '{0}\n{1}\n{2}'.format( + korean_2, + korean_1, + korean_3, ) }, - str('cmd_|-some-utf8-file-content-remove_|-rm -f "{0}"_|-run').format(test_file_encoded): { - 'name': str('rm -f "{0}"').format(test_file_encoded), + 'cmd_|-some-utf8-file-content-remove_|-rm -f "{0}"_|-run'.format(test_file_encoded): { + 'name': 'rm -f "{0}"'.format(test_file_encoded), '__run_num__': 4, - 'comment': str('Command "rm -f "{0}"" run').format(test_file_encoded), + 'comment': 'Command "rm -f "{0}"" run'.format(test_file_encoded), 'stdout': '' }, - str('file_|-some-utf8-file-removed_|-{0}_|-missing').format(test_file_encoded): { + 'file_|-some-utf8-file-removed_|-{0}_|-missing'.format(test_file_encoded): { 'name': test_file_encoded, '__run_num__': 5, - 'comment': str('Path {0} is missing').format(test_file_encoded), + 'comment': 'Path {0} is missing'.format(test_file_encoded), } } # future_lint: enable=blacklisted-function @@ -2308,7 +2307,7 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin): self.assertEqual(expected, result) # future_lint: disable=blacklisted-function - cat_id = str('cmd_|-some-utf8-file-content-test_|-cat "{0}"_|-run').format(test_file_encoded) + cat_id = 'cmd_|-some-utf8-file-content-test_|-cat "{0}"_|-run'.format(test_file_encoded) # future_lint: enable=blacklisted-function self.assertEqual( salt.utils.stringutils.to_unicode(result[cat_id]['stdout']), diff --git a/tests/integration/states/test_git.py b/tests/integration/states/test_git.py index bfb39502c8..d1e79a9ca1 100644 --- a/tests/integration/states/test_git.py +++ b/tests/integration/states/test_git.py @@ -244,8 +244,9 @@ class GitTest(ModuleCase, SaltReturnAssertsMixin): self.assertSaltTrueReturn(ret) self.assertEqual( ret[next(iter(ret))]['comment'], - ('Repository {0} is up-to-date, but with local changes. Set ' - '\'force_reset\' to True to purge local changes.'.format(name)) + ('Repository {0} is up-to-date, but with uncommitted changes. ' + 'Set \'force_reset\' to True to purge uncommitted changes.' + .format(name)) ) # Now run the state with force_reset=True @@ -560,43 +561,228 @@ class LocalRepoGitTest(ModuleCase, SaltReturnAssertsMixin): ''' Tests which do no require connectivity to github.com ''' + def setUp(self): + self.repo = tempfile.mkdtemp(dir=TMP) + self.admin = tempfile.mkdtemp(dir=TMP) + self.target = tempfile.mkdtemp(dir=TMP) + for dirname in (self.repo, self.admin, self.target): + self.addCleanup(shutil.rmtree, dirname, ignore_errors=True) + + # Create bare repo + self.run_function('git.init', [self.repo], bare=True) + # Clone bare repo + self.run_function('git.clone', [self.admin], url=self.repo) + self._commit(self.admin, '', message='initial commit') + self._push(self.admin) + + def _commit(self, repo_path, content, message): + with salt.utils.files.fopen(os.path.join(repo_path, 'foo'), 'a') as fp_: + fp_.write(content) + self.run_function('git.add', [repo_path, '.']) + self.run_function( + 'git.commit', [repo_path, message], + git_opts='-c user.name="Foo Bar" -c user.email=foo@bar.com', + ) + + def _push(self, repo_path, remote='origin', ref='master'): + self.run_function('git.push', [repo_path], remote=remote, ref=ref) + + def _test_latest_force_reset_setup(self): + # Perform the initial clone + ret = self.run_state( + 'git.latest', + name=self.repo, + target=self.target) + self.assertSaltTrueReturn(ret) + + # Make and push changes to remote repo + self._commit(self.admin, + content='Hello world!\n', + message='added a line') + self._push(self.admin) + + # Make local changes to clone, but don't commit them + with salt.utils.files.fopen(os.path.join(self.target, 'foo'), 'a') as fp_: + fp_.write('Local changes!\n') + + def test_latest_force_reset_remote_changes(self): + ''' + This tests that an otherwise fast-forward change with local chanegs + will not reset local changes when force_reset='remote_changes' + ''' + self._test_latest_force_reset_setup() + + # This should fail because of the local changes + ret = self.run_state( + 'git.latest', + name=self.repo, + target=self.target) + self.assertSaltFalseReturn(ret) + ret = ret[next(iter(ret))] + self.assertIn('there are uncommitted changes', ret['comment']) + self.assertIn( + 'Set \'force_reset\' to True (or \'remote-changes\')', + ret['comment'] + ) + self.assertEqual(ret['changes'], {}) + + # Now run again with force_reset='remote_changes', the state should + # succeed and discard the local changes + ret = self.run_state( + 'git.latest', + name=self.repo, + target=self.target, + force_reset='remote-changes') + self.assertSaltTrueReturn(ret) + ret = ret[next(iter(ret))] + self.assertIn('Uncommitted changes were discarded', ret['comment']) + self.assertIn('Repository was fast-forwarded', ret['comment']) + self.assertNotIn('forced update', ret['changes']) + self.assertIn('revision', ret['changes']) + + # Add new local changes, but don't commit them + with salt.utils.files.fopen(os.path.join(self.target, 'foo'), 'a') as fp_: + fp_.write('More local changes!\n') + + # Now run again with force_reset='remote_changes', the state should + # succeed with an up-to-date message and mention that there are local + # changes, telling the user how to discard them. + ret = self.run_state( + 'git.latest', + name=self.repo, + target=self.target, + force_reset='remote-changes') + self.assertSaltTrueReturn(ret) + ret = ret[next(iter(ret))] + self.assertIn('up-to-date, but with uncommitted changes', ret['comment']) + self.assertIn( + 'Set \'force_reset\' to True to purge uncommitted changes', + ret['comment'] + ) + self.assertEqual(ret['changes'], {}) + + def test_latest_force_reset_true_fast_forward(self): + ''' + This tests that an otherwise fast-forward change with local chanegs + does reset local changes when force_reset=True + ''' + self._test_latest_force_reset_setup() + + # Test that local changes are discarded and that we fast-forward + ret = self.run_state( + 'git.latest', + name=self.repo, + target=self.target, + force_reset=True) + self.assertSaltTrueReturn(ret) + ret = ret[next(iter(ret))] + self.assertIn('Uncommitted changes were discarded', ret['comment']) + self.assertIn('Repository was fast-forwarded', ret['comment']) + + # Add new local changes + with salt.utils.files.fopen(os.path.join(self.target, 'foo'), 'a') as fp_: + fp_.write('More local changes!\n') + + # Running without setting force_reset should mention uncommitted changes + ret = self.run_state( + 'git.latest', + name=self.repo, + target=self.target) + self.assertSaltTrueReturn(ret) + ret = ret[next(iter(ret))] + self.assertIn('up-to-date, but with uncommitted changes', ret['comment']) + self.assertIn( + 'Set \'force_reset\' to True to purge uncommitted changes', + ret['comment'] + ) + self.assertEqual(ret['changes'], {}) + + # Test that local changes are discarded + ret = self.run_state( + 'git.latest', + name=self.repo, + target=self.target, + force_reset=True) + self.assertSaltTrueReturn(ret) + ret = ret[next(iter(ret))] + self.assertIn('Uncommitted changes were discarded', ret['comment']) + self.assertIn('Repository was hard-reset', ret['comment']) + self.assertIn('forced update', ret['changes']) + + def test_latest_force_reset_true_non_fast_forward(self): + ''' + This tests that a non fast-forward change with divergent commits fails + unless force_reset=True. + ''' + self._test_latest_force_reset_setup() + + # Reset to remote HEAD + ret = self.run_state( + 'git.latest', + name=self.repo, + target=self.target, + force_reset=True) + self.assertSaltTrueReturn(ret) + ret = ret[next(iter(ret))] + self.assertIn('Uncommitted changes were discarded', ret['comment']) + self.assertIn('Repository was fast-forwarded', ret['comment']) + + # Make and push changes to remote repo + self._commit(self.admin, + content='New line\n', + message='added another line') + self._push(self.admin) + + # Make different changes to local file and commit locally + self._commit(self.target, + content='Different new line\n', + message='added a different line') + + # This should fail since the local clone has diverged and cannot + # fast-forward to the remote rev + ret = self.run_state( + 'git.latest', + name=self.repo, + target=self.target) + self.assertSaltFalseReturn(ret) + ret = ret[next(iter(ret))] + self.assertIn('this is not a fast-forward merge', ret['comment']) + self.assertIn( + 'Set \'force_reset\' to True to force this update', + ret['comment'] + ) + self.assertEqual(ret['changes'], {}) + + # Repeat the state with force_reset=True and confirm that the hard + # reset was performed + ret = self.run_state( + 'git.latest', + name=self.repo, + target=self.target, + force_reset=True) + self.assertSaltTrueReturn(ret) + ret = ret[next(iter(ret))] + self.assertIn('Repository was hard-reset', ret['comment']) + self.assertIn('forced update', ret['changes']) + self.assertIn('revision', ret['changes']) + def test_renamed_default_branch(self): ''' Test the case where the remote branch has been removed https://github.com/saltstack/salt/issues/36242 ''' - repo = tempfile.mkdtemp(dir=TMP) - admin = tempfile.mkdtemp(dir=TMP) - name = tempfile.mkdtemp(dir=TMP) - for dirname in (repo, admin, name): - self.addCleanup(shutil.rmtree, dirname, ignore_errors=True) - - # Create bare repo - self.run_function('git.init', [repo], bare=True) - # Clone bare repo - self.run_function('git.clone', [admin], url=repo) - # Create, add, commit, and push file - with salt.utils.files.fopen(os.path.join(admin, 'foo'), 'w'): - pass - self.run_function('git.add', [admin, '.']) - self.run_function( - 'git.commit', [admin, 'initial commit'], - git_opts='-c user.name="Foo Bar" -c user.email=foo@bar.com', - ) - self.run_function('git.push', [admin], remote='origin', ref='master') - # Rename remote 'master' branch to 'develop' os.rename( - os.path.join(repo, 'refs', 'heads', 'master'), - os.path.join(repo, 'refs', 'heads', 'develop') + os.path.join(self.repo, 'refs', 'heads', 'master'), + os.path.join(self.repo, 'refs', 'heads', 'develop') ) # Run git.latest state. This should successfully clone and fail with a # specific error in the comment field. ret = self.run_state( 'git.latest', - name=repo, - target=name, + name=self.repo, + target=self.target, rev='develop', ) self.assertSaltFalseReturn(ret) @@ -610,19 +796,19 @@ class LocalRepoGitTest(ModuleCase, SaltReturnAssertsMixin): '(which will ensure that the named branch is created ' 'if it does not already exist).\n\n' 'Changes already made: {0} cloned to {1}' - .format(repo, name) + .format(self.repo, self.target) ) self.assertEqual( ret[next(iter(ret))]['changes'], - {'new': '{0} => {1}'.format(repo, name)} + {'new': '{0} => {1}'.format(self.repo, self.target)} ) # Run git.latest state again. This should fail again, with a different # error in the comment field, and should not change anything. ret = self.run_state( 'git.latest', - name=repo, - target=name, + name=self.repo, + target=self.target, rev='develop', ) self.assertSaltFalseReturn(ret) @@ -643,8 +829,8 @@ class LocalRepoGitTest(ModuleCase, SaltReturnAssertsMixin): # checkout a new branch and the state should pass. ret = self.run_state( 'git.latest', - name=repo, - target=name, + name=self.repo, + target=self.target, rev='develop', branch='develop', ) diff --git a/tests/integration/states/test_npm.py b/tests/integration/states/test_npm.py index 4fe59a21b4..4c6bccb95f 100644 --- a/tests/integration/states/test_npm.py +++ b/tests/integration/states/test_npm.py @@ -6,12 +6,14 @@ ''' # Import Python libs from __future__ import absolute_import, unicode_literals, print_function +import os # Import Salt Testing libs from tests.support.case import ModuleCase from tests.support.unit import skipIf from tests.support.helpers import destructiveTest, requires_network from tests.support.mixins import SaltReturnAssertsMixin +from tests.support.runtests import RUNTIME_VARS # Import salt libs import salt.modules.cmdmod as cmd @@ -31,7 +33,7 @@ class NpmStateTest(ModuleCase, SaltReturnAssertsMixin): Basic test to determine if NPM module was successfully installed and removed. ''' - ret = self.run_state('npm.installed', name='pm2') + ret = self.run_state('npm.installed', name='pm2', registry="http://registry.npmjs.org/") self.assertSaltTrueReturn(ret) ret = self.run_state('npm.removed', name='pm2') self.assertSaltTrueReturn(ret) @@ -42,10 +44,23 @@ class NpmStateTest(ModuleCase, SaltReturnAssertsMixin): ''' Determine if URL-referenced NPM module can be successfully installed. ''' - ret = self.run_state('npm.installed', name='request/request#v2.81.1') + if LooseVersion(cmd.run('npm -v')) >= LooseVersion(MAX_NPM_VERSION): + user = os.environ.get('SUDO_USER', 'root') + npm_dir = os.path.join(RUNTIME_VARS.TMP, 'git-install-npm') + self.run_state('file.directory', name=npm_dir, user=user, dir_mode='755') + else: + user = None + npm_dir = None + ret = self.run_state('npm.installed', + name='request/request#v2.81.1', + runas=user, + dir=npm_dir, + registry="http://registry.npmjs.org/") self.assertSaltTrueReturn(ret) - ret = self.run_state('npm.removed', name='git://github.com/request/request') + ret = self.run_state('npm.removed', name='git://github.com/request/request', runas=user, dir=npm_dir) self.assertSaltTrueReturn(ret) + if npm_dir is not None: + self.run_state('file.absent', name=npm_dir) @requires_network() @destructiveTest @@ -54,7 +69,7 @@ class NpmStateTest(ModuleCase, SaltReturnAssertsMixin): Basic test to determine if NPM module successfully installs multiple packages. ''' - ret = self.run_state('npm.installed', name='unused', pkgs=['pm2', 'grunt']) + ret = self.run_state('npm.installed', name='unused', pkgs=['pm2', 'grunt'], registry="http://registry.npmjs.org/") self.assertSaltTrueReturn(ret) @skipIf(salt.utils.path.which('npm') and LooseVersion(cmd.run('npm -v')) >= LooseVersion(MAX_NPM_VERSION), diff --git a/tests/integration/states/test_pip.py b/tests/integration/states/test_pip.py index 692067eb7f..6237b6f753 100644 --- a/tests/integration/states/test_pip.py +++ b/tests/integration/states/test_pip.py @@ -14,6 +14,7 @@ import os import pwd import glob import shutil +import sys # Import Salt Testing libs from tests.support.mixins import SaltReturnAssertsMixin @@ -525,6 +526,7 @@ class PipStateTest(ModuleCase, SaltReturnAssertsMixin): if os.path.isdir(venv_dir): shutil.rmtree(venv_dir) + @skipIf(sys.version_info[:2] >= (3, 6), 'Old version of virtualenv too old for python3.6') def test_46127_pip_env_vars(self): ''' Test that checks if env_vars passed to pip.installed are also passed diff --git a/tests/integration/states/test_pkgrepo.py b/tests/integration/states/test_pkgrepo.py index 2aea0187c2..9a06d16208 100644 --- a/tests/integration/states/test_pkgrepo.py +++ b/tests/integration/states/test_pkgrepo.py @@ -22,16 +22,16 @@ import salt.utils.platform from salt.ext import six +@destructiveTest +@skipIf(salt.utils.platform.is_windows(), 'minion is windows') class PkgrepoTest(ModuleCase, SaltReturnAssertsMixin): ''' pkgrepo state tests ''' - @destructiveTest - @skipIf(salt.utils.platform.is_windows(), 'minion is windows') @requires_system_grains def test_pkgrepo_01_managed(self, grains): ''' - This is a destructive test as it adds a repository. + Test adding a repo ''' os_grain = self.run_function('grains.item', ['os'])['os'] os_release_info = tuple(self.run_function('grains.item', ['osrelease_info'])['osrelease_info']) @@ -56,12 +56,9 @@ class PkgrepoTest(ModuleCase, SaltReturnAssertsMixin): for state_id, state_result in six.iteritems(ret): self.assertSaltTrueReturn(dict([(state_id, state_result)])) - @destructiveTest - @skipIf(salt.utils.platform.is_windows(), 'minion is windows') def test_pkgrepo_02_absent(self): ''' - This is a destructive test as it removes the repository added in the - above test. + Test removing the repo from the above test ''' os_grain = self.run_function('grains.item', ['os'])['os'] os_release_info = tuple(self.run_function('grains.item', ['osrelease_info'])['osrelease_info']) @@ -78,3 +75,56 @@ class PkgrepoTest(ModuleCase, SaltReturnAssertsMixin): self.assertReturnNonEmptySaltType(ret) for state_id, state_result in six.iteritems(ret): self.assertSaltTrueReturn(dict([(state_id, state_result)])) + + @requires_system_grains + def test_pkgrepo_03_with_comments(self, grains): + ''' + Test adding a repo with comments + ''' + os_family = grains['os_family'].lower() + + if os_family in ('redhat', 'suse'): + kwargs = { + 'name': 'examplerepo', + 'baseurl': 'http://example.com/repo', + 'enabled': False, + 'comments': ['This is a comment'] + } + elif os_family in ('debian',): + self.skipTest('Debian/Ubuntu test case needed') + else: + self.skipTest("No test case for os_family '{0}'".format(os_family)) + + try: + # Run the state to add the repo + ret = self.run_state('pkgrepo.managed', **kwargs) + self.assertSaltTrueReturn(ret) + + # Run again with modified comments + kwargs['comments'].append('This is another comment') + ret = self.run_state('pkgrepo.managed', **kwargs) + self.assertSaltTrueReturn(ret) + ret = ret[next(iter(ret))] + self.assertEqual( + ret['changes'], + { + 'comments': { + 'old': ['This is a comment'], + 'new': ['This is a comment', + 'This is another comment'] + } + } + ) + + # Run a third time, no changes should be made + ret = self.run_state('pkgrepo.managed', **kwargs) + self.assertSaltTrueReturn(ret) + ret = ret[next(iter(ret))] + self.assertFalse(ret['changes']) + self.assertEqual( + ret['comment'], + "Package repo '{0}' already configured".format(kwargs['name']) + ) + finally: + # Clean up + self.run_state('pkgrepo.absent', name=kwargs['name']) diff --git a/tests/runtests.py b/tests/runtests.py index 2329db44cd..eb973e64de 100755 --- a/tests/runtests.py +++ b/tests/runtests.py @@ -115,6 +115,9 @@ TEST_SUITES = { 'client': {'display_name': 'Client', 'path': 'integration/client'}, + 'doc': + {'display_name': 'Documentation', + 'path': 'integration/doc'}, 'ext_pillar': {'display_name': 'External Pillar', 'path': 'integration/pillar'}, @@ -292,6 +295,15 @@ class SaltTestsuiteParser(SaltCoverageTestingParser): action='store_true', help='Run tests for client' ) + self.test_selection_group.add_option( + '-d', + '--doc', + '--doc-tests', + dest='doc', + default=False, + action='store_true', + help='Run tests for documentation' + ) self.test_selection_group.add_option( '-I', '--ext-pillar', @@ -743,6 +755,7 @@ class SaltTestsuiteParser(SaltCoverageTestingParser): failfast=self.options.failfast, ) status.append(results) + return status for suite in TEST_SUITES: if suite != 'unit' and getattr(self.options, suite): status.append(self.run_integration_suite(**TEST_SUITES[suite])) diff --git a/tests/support/win_installer.py b/tests/support/win_installer.py new file mode 100644 index 0000000000..6aa139243f --- /dev/null +++ b/tests/support/win_installer.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +''' + :copyright: Copyright 2013-2017 by the SaltStack Team, see AUTHORS for more details. + :license: Apache 2.0, see LICENSE for more details. + + + tests.support.win_installer + ~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Fetches the binary Windows installer +''' +from __future__ import absolute_import +import hashlib +import requests +import re + +PREFIX = 'Salt-Minion-' +REPO = "https://repo.saltstack.com/windows" + + +def iter_installers(content): + ''' + Parse a list of windows installer links and their corresponding md5 + checksum links. + ''' + HREF_RE = "" + installer, md5 = None, None + for m in re.finditer(HREF_RE, content): + x = m.groups()[0] + if not x.startswith(PREFIX): + continue + if x.endswith('zip'): + continue + if installer: + if x != installer + '.md5': + raise Exception("Unable to parse response") + md5 = x + yield installer, md5 + installer, md5 = None, None + else: + installer = x + + +def split_installer(name): + ''' + Return a tuple of the salt version, python verison and architecture from an + installer name. + ''' + x = name[len(PREFIX):] + return x.split('-')[:3] + + +def latest_version(repo=REPO): + ''' + Return the latest version found on the salt repository webpage. + ''' + for name, md5 in iter_installers(requests.get(repo).content): + pass + return split_installer(name)[0] + + +def installer_name(salt_ver, py_ver='Py2', arch='AMD64'): + ''' + Create an installer file name + ''' + return "Salt-Minion-{}-{}-{}-Setup.exe".format(salt_ver, py_ver, arch) + + +def latest_installer_name(repo=REPO, **kwargs): + ''' + Fetch the latest installer name + ''' + return installer_name(latest_version(repo), **kwargs) + + +def download_and_verify(fp, name, repo=REPO): + ''' + Download an installer and verify it's contents. + ''' + md5 = "{}.md5".format(name) + url = lambda x: "{}/{}".format(repo, x) + resp = requests.get(url(md5)) + if resp.status_code != 200: + raise Exception("Unable to fetch installer md5") + installer_md5 = resp.text.strip().split()[0].lower() + resp = requests.get(url(name), stream=True) + if resp.status_code != 200: + raise Exception("Unable to fetch installer") + md5hsh = hashlib.md5() + for chunk in resp.iter_content(chunk_size=1024): + md5hsh.update(chunk) + fp.write(chunk) + if md5hsh.hexdigest() != installer_md5: + raise Exception("Installer's hash does not match {} != {}".format( + md5hsh.hexdigest(), installer_md5 + )) diff --git a/tests/unit/modules/test_cmdmod.py b/tests/unit/modules/test_cmdmod.py index 89acfb81e5..a04d171700 100644 --- a/tests/unit/modules/test_cmdmod.py +++ b/tests/unit/modules/test_cmdmod.py @@ -303,7 +303,7 @@ class CMDMODTestCase(TestCase, LoaderModuleMockMixin): environment = os.environ.copy() popen_mock.return_value = Mock( - communicate=lambda *args, **kwags: ['{}', None], + communicate=lambda *args, **kwags: [b'', None], pid=lambda: 1, retcode=0 ) diff --git a/tests/unit/modules/test_inspect_collector.py b/tests/unit/modules/test_inspect_collector.py index 94c2a9181d..e177a58a72 100644 --- a/tests/unit/modules/test_inspect_collector.py +++ b/tests/unit/modules/test_inspect_collector.py @@ -19,6 +19,8 @@ # Import Python Libs from __future__ import absolute_import, print_function, unicode_literals import os +import errno +import subprocess # Import Salt Testing Libs from tests.support.unit import TestCase, skipIf @@ -33,7 +35,33 @@ from tests.support.mock import ( from salt.modules.inspectlib.collector import Inspector +HAS_SYMLINKS = None + + +def no_symlinks(): + ''' + Check if git is installed and has symlinks enabled in the configuration. + ''' + global HAS_SYMLINKS + if HAS_SYMLINKS is not None: + return not HAS_SYMLINKS + output = '' + try: + output = subprocess.check_output('git config --get core.symlinks', shell=True) + except OSError as exc: + if exc.errno != errno.ENOENT: + raise + except subprocess.CalledProcessError: + # git returned non-zero status + pass + HAS_SYMLINKS = False + if output.strip() == 'true': + HAS_SYMLINKS = True + return not HAS_SYMLINKS + + @skipIf(NO_MOCK, NO_MOCK_REASON) +@skipIf(no_symlinks(), "Git missing 'core.symlinks=true' config") class InspectorCollectorTestCase(TestCase): ''' Test inspectlib:collector:Inspector diff --git a/tests/unit/modules/test_localemod.py b/tests/unit/modules/test_localemod.py index 05d3cf4c7e..ba942e181a 100644 --- a/tests/unit/modules/test_localemod.py +++ b/tests/unit/modules/test_localemod.py @@ -40,6 +40,11 @@ class LocalemodTestCase(TestCase, LoaderModuleMockMixin): X11 Layout: us X11 Model: pc105 ''' + locale_ctl_notset = ''' + System Locale: n/a + VC Keymap: n/a + X11 Layout: n/a + ''' locale_ctl_out_empty = '' locale_ctl_out_broken = ''' System error:Recursive traversal of loopback mount points @@ -79,9 +84,36 @@ class LocalemodTestCase(TestCase, LoaderModuleMockMixin): assert 'LANG' in out['system_locale'] assert 'LANGUAGE' in out['system_locale'] assert out['system_locale']['LANG'] == out['system_locale']['LANGUAGE'] == 'de_DE.utf8' - assert out['vc_keymap'] == 'n/a' - assert out['x11_layout'] == 'us' - assert out['x11_model'] == 'pc105' + assert isinstance(out['vc_keymap'], dict) + assert 'data' in out['vc_keymap'] + assert out['vc_keymap']['data'] == 'n/a' + assert isinstance(out['x11_layout'], dict) + assert 'data' in out['x11_layout'] + assert out['x11_layout']['data'] == 'us' + assert isinstance(out['x11_model'], dict) + assert 'data' in out['x11_model'] + assert out['x11_model']['data'] == 'pc105' + + @patch('salt.utils.path.which', MagicMock(return_value="/usr/bin/localctl")) + @patch('salt.modules.localemod.__salt__', {'cmd.run': MagicMock(return_value=locale_ctl_notset)}) + def test_localectl_status_parser_notset(self): + ''' + Test localectl status parser. + :return: + ''' + out = localemod._localectl_status() + assert isinstance(out, dict) + for key in ['system_locale', 'vc_keymap', 'x11_layout']: + assert key in out + assert isinstance(out['system_locale'], dict) + assert 'data' in out['system_locale'] + assert out['system_locale']['data'] == 'n/a' + assert isinstance(out['vc_keymap'], dict) + assert 'data' in out['vc_keymap'] + assert out['vc_keymap']['data'] == 'n/a' + assert isinstance(out['x11_layout'], dict) + assert 'data' in out['x11_layout'] + assert out['x11_layout']['data'] == 'n/a' @patch('salt.modules.localemod.dbus', MagicMock()) def test_dbus_locale_parser_matches(self): @@ -154,7 +186,7 @@ class LocalemodTestCase(TestCase, LoaderModuleMockMixin): assert isinstance(out[key], dict) for in_key in out[key]: assert isinstance(out[key][in_key], six.text_type) - assert isinstance(out['reason'], six.text_type) + assert isinstance(out['reason']['data'], six.text_type) @patch('salt.utils.path.which', MagicMock(return_value="/usr/bin/localctl")) @patch('salt.modules.localemod.__grains__', {'os_family': 'Ubuntu', 'osmajorrelease': 42}) diff --git a/tests/unit/modules/test_x509.py b/tests/unit/modules/test_x509.py index a56cc4a9da..98fc84146e 100644 --- a/tests/unit/modules/test_x509.py +++ b/tests/unit/modules/test_x509.py @@ -78,7 +78,7 @@ class X509TestCase(TestCase, LoaderModuleMockMixin): Test private function _parse_subject(subject) it handles a missing fields :return: ''' - ca_key = '''-----BEGIN RSA PRIVATE KEY----- + ca_key = b'''-----BEGIN RSA PRIVATE KEY----- MIICWwIBAAKBgQCjdjbgL4kQ8Lu73xeRRM1q3C3K3ptfCLpyfw38LRnymxaoJ6ls pNSx2dU1uJ89YKFlYLo1QcEk4rJ2fdIjarV0kuNCY3rC8jYUp9BpAU5Z6p9HKeT1 2rTPH81JyjbQDR5PyfCyzYOQtpwpB4zIUUK/Go7tTm409xGKbbUFugJNgQIDAQAB @@ -163,4 +163,4 @@ c9bcgp7D7xD+TxWWNj4CSXEccJgGr91StV+gFg4ARQ== authorityKeyIdentifier='keyid,issuer:always', days_valid=3650, days_remaining=0) - self.assertIn('BEGIN CERTIFICATE', ret) + self.assertIn(b'BEGIN CERTIFICATE', ret) diff --git a/tests/unit/returners/test_local_cache.py b/tests/unit/returners/test_local_cache.py index 8d88d3d1b8..9c987d9b83 100644 --- a/tests/unit/returners/test_local_cache.py +++ b/tests/unit/returners/test_local_cache.py @@ -10,6 +10,7 @@ Unit tests for the Default Job Cache (local_cache). from __future__ import absolute_import, print_function, unicode_literals import os import shutil +import time import logging import tempfile import time @@ -82,6 +83,11 @@ class LocalCacheCleanOldJobsTestCase(TestCase, LoaderModuleMockMixin): # Call clean_old_jobs function, patching the keep_jobs value with a # very small value to force the call to clean the job. with patch.dict(local_cache.__opts__, {'keep_jobs': 0.00000001}): + # Sleep on Windows because time.time is only precise to 3 decimal + # points, and therefore subtracting the jid_ctime from time.time + # will result in a negative number + if salt.utils.platform.is_windows(): + time.sleep(0.25) local_cache.clean_old_jobs() # Assert that the JID dir was removed @@ -154,6 +160,11 @@ class LocalCacheCleanOldJobsTestCase(TestCase, LoaderModuleMockMixin): # Call clean_old_jobs function, patching the keep_jobs value with a # very small value to force the call to clean the job. with patch.dict(local_cache.__opts__, {'keep_jobs': 0.00000001}): + # Sleep on Windows because time.time is only precise to 3 decimal + # points, and therefore subtracting the jid_ctime from time.time + # will result in a negative number + if salt.utils.platform.is_windows(): + time.sleep(0.25) local_cache.clean_old_jobs() # there should be only 1 dir in TMP_JID_DIR diff --git a/tests/unit/returners/test_smtp_return.py b/tests/unit/returners/test_smtp_return.py index bce7d96c9a..1d1065613e 100644 --- a/tests/unit/returners/test_smtp_return.py +++ b/tests/unit/returners/test_smtp_return.py @@ -66,8 +66,8 @@ class SMTPReturnerTestCase(TestCase, LoaderModuleMockMixin): 'renderer': 'jinja|yaml', 'renderer_blacklist': [], 'renderer_whitelist': [], - 'file_roots': [], - 'pillar_roots': [], + 'file_roots': {}, + 'pillar_roots': {}, 'cachedir': '/'}), \ patch('salt.returners.smtp_return.gnupg'), \ patch('salt.returners.smtp_return.smtplib.SMTP') as mocked_smtplib: @@ -79,8 +79,8 @@ class SMTPReturnerTestCase(TestCase, LoaderModuleMockMixin): 'renderer': 'jinja|yaml', 'renderer_blacklist': [], 'renderer_whitelist': [], - 'file_roots': [], - 'pillar_roots': [], + 'file_roots': {}, + 'pillar_roots': {}, 'cachedir': '/'}), \ patch('salt.returners.smtp_return.smtplib.SMTP') as mocked_smtplib: self._test_returner(mocked_smtplib) diff --git a/tests/unit/states/test_environ.py b/tests/unit/states/test_environ.py index a895531702..994bb7eac5 100644 --- a/tests/unit/states/test_environ.py +++ b/tests/unit/states/test_environ.py @@ -6,7 +6,7 @@ import os # Import Salt Testing libs from tests.support.mixins import LoaderModuleMockMixin -from tests.support.unit import TestCase +from tests.support.unit import TestCase, skipIf from tests.support.mock import ( MagicMock, patch @@ -14,6 +14,8 @@ from tests.support.mock import ( # Import salt libs import salt.states.environ as envstate import salt.modules.environ as envmodule +import salt.modules.reg +import salt.utils.platform class TestEnvironState(TestCase, LoaderModuleMockMixin): @@ -22,17 +24,21 @@ class TestEnvironState(TestCase, LoaderModuleMockMixin): loader_globals = { '__env__': 'base', '__opts__': {'test': False}, - '__salt__': {'environ.setenv': envmodule.setenv} + '__salt__': { + 'environ.setenv': envmodule.setenv, + 'reg.read_value': salt.modules.reg.read_value, + } } return {envstate: loader_globals, envmodule: loader_globals} def setUp(self): - original_environ = os.environ.copy() - os.environ = {'INITIAL': 'initial'} + patcher = patch.dict(os.environ, {'INITIAL': 'initial'}, clear=True) + patcher.start() - def reset_environ(original_environ): - os.environ = original_environ - self.addCleanup(reset_environ, original_environ) + def reset_environ(patcher): + patcher.stop() + + self.addCleanup(reset_environ, patcher) def test_setenv(self): ''' @@ -48,9 +54,12 @@ class TestEnvironState(TestCase, LoaderModuleMockMixin): ret = envstate.setenv('test', 'other') self.assertEqual(ret['changes'], {}) + @skipIf(not salt.utils.platform.is_windows(), 'Windows only') def test_setenv_permanent(self): - with patch.dict(envmodule.__salt__, {'reg.set_value': MagicMock(), 'reg.delete_value': MagicMock()}), \ - patch('salt.utils.platform.is_windows', MagicMock(return_value=True)): + ''' + test that we can set perminent environment variables (requires pywin32) + ''' + with patch.dict(envmodule.__salt__, {'reg.set_value': MagicMock(), 'reg.delete_value': MagicMock()}): ret = envstate.setenv('test', 'value', permanent=True) self.assertEqual(ret['changes'], {'test': 'value'}) envmodule.__salt__['reg.set_value'].assert_called_with("HKCU", "Environment", 'test', 'value') @@ -91,7 +100,10 @@ class TestEnvironState(TestCase, LoaderModuleMockMixin): ''' ret = envstate.setenv('test', 'value', clear_all=True) self.assertEqual(ret['changes'], {'test': 'value', 'INITIAL': ''}) - self.assertEqual(envstate.os.environ, {'test': 'value', 'INITIAL': ''}) + if salt.utils.platform.is_windows(): + self.assertEqual(envstate.os.environ, {'TEST': 'value', 'INITIAL': ''}) + else: + self.assertEqual(envstate.os.environ, {'test': 'value', 'INITIAL': ''}) def test_setenv_clearall_with_unset(self): ''' @@ -100,7 +112,10 @@ class TestEnvironState(TestCase, LoaderModuleMockMixin): ''' ret = envstate.setenv('test', 'value', false_unsets=True, clear_all=True) self.assertEqual(ret['changes'], {'test': 'value', 'INITIAL': None}) - self.assertEqual(envstate.os.environ, {'test': 'value'}) + if salt.utils.platform.is_windows(): + self.assertEqual(envstate.os.environ, {'TEST': 'value'}) + else: + self.assertEqual(envstate.os.environ, {'test': 'value'}) def test_setenv_unset_multi(self): ''' @@ -113,12 +128,18 @@ class TestEnvironState(TestCase, LoaderModuleMockMixin): ret = envstate.setenv( 'notimportant', {'test': False, 'foo': 'baz'}, false_unsets=True) self.assertEqual(ret['changes'], {'test': None, 'foo': 'baz'}) - self.assertEqual(envstate.os.environ, {'INITIAL': 'initial', 'foo': 'baz'}) + if salt.utils.platform.is_windows(): + self.assertEqual(envstate.os.environ, {'INITIAL': 'initial', 'FOO': 'baz'}) + else: + self.assertEqual(envstate.os.environ, {'INITIAL': 'initial', 'foo': 'baz'}) with patch.dict(envstate.__salt__, {'reg.read_value': MagicMock()}): ret = envstate.setenv('notimportant', {'test': False, 'foo': 'bax'}) self.assertEqual(ret['changes'], {'test': '', 'foo': 'bax'}) - self.assertEqual(envstate.os.environ, {'INITIAL': 'initial', 'foo': 'bax', 'test': ''}) + if salt.utils.platform.is_windows(): + self.assertEqual(envstate.os.environ, {'INITIAL': 'initial', 'FOO': 'bax', 'TEST': ''}) + else: + self.assertEqual(envstate.os.environ, {'INITIAL': 'initial', 'foo': 'bax', 'test': ''}) def test_setenv_test_mode(self): ''' diff --git a/tests/unit/states/test_file.py b/tests/unit/states/test_file.py index d5226f19ae..61be8418d1 100644 --- a/tests/unit/states/test_file.py +++ b/tests/unit/states/test_file.py @@ -1861,3 +1861,36 @@ class TestFileState(TestCase, LoaderModuleMockMixin): run_checks(test=True) run_checks(strptime_format=fake_strptime_format) run_checks(strptime_format=fake_strptime_format, test=True) + + +class TestFindKeepFiles(TestCase): + + @skipIf(salt.utils.platform.is_windows(), 'Do not run on Windows') + def test__find_keep_files_unix(self): + keep = filestate._find_keep_files( + '/test/parent_folder', + ['/test/parent_folder/meh.txt'] + ) + expected = [ + '/', + '/test', + '/test/parent_folder', + '/test/parent_folder/meh.txt', + ] + actual = sorted(list(keep)) + assert actual == expected, actual + + @skipIf(not salt.utils.platform.is_windows(), 'Only run on Windows') + def test__find_keep_files_win32(self): + keep = filestate._find_keep_files( + 'c:\\test\\parent_folder', + ['C:\\test\\parent_folder\\meh-2.txt'] + ) + expected = [ + 'c:\\', + 'c:\\test', + 'c:\\test\\parent_folder', + 'c:\\test\\parent_folder\\meh-2.txt' + ] + actual = sorted(list(keep)) + assert actual == expected, actual diff --git a/tests/unit/states/test_win_network.py b/tests/unit/states/test_win_network.py index a21cbac4b0..16a2e77d66 100644 --- a/tests/unit/states/test_win_network.py +++ b/tests/unit/states/test_win_network.py @@ -41,17 +41,18 @@ class WinNetworkTestCase(TestCase, LoaderModuleMockMixin): ' static, dhcp.'}) self.assertDictEqual(win_network.managed('salt'), ret) - mock = MagicMock(return_value=False) + mock_false = MagicMock(return_value=False) + mock_true = MagicMock(return_value=True) mock1 = MagicMock(side_effect=[False, True, True, True, True, True, True]) mock2 = MagicMock(side_effect=[False, True, True, {'salt': 'True'}, {'salt': 'True'}]) - with patch.dict(win_network.__salt__, {"ip.is_enabled": mock, + with patch.dict(win_network.__salt__, {"ip.is_enabled": mock_false, "ip.is_disabled": mock1, - "ip.enable": mock, + "ip.enable": mock_false, "ip.get_interface": mock2, - "ip.set_dhcp_dns": mock, - "ip.set_dhcp_ip": mock}): + "ip.set_dhcp_dns": mock_false, + "ip.set_dhcp_ip": mock_false}): ret.update({'comment': "Interface 'salt' is up to date." " (already disabled)", 'result': True}) self.assertDictEqual(win_network.managed('salt', @@ -66,52 +67,54 @@ class WinNetworkTestCase(TestCase, LoaderModuleMockMixin): dns_proto='static', ip_proto='static'), ret) - mock = MagicMock(side_effect=['True', False, False, False, False, + mock_false = MagicMock(side_effect=['True', False, False, False, False, False]) - with patch.object(win_network, '_validate', mock): - ret.update({'comment': 'The following SLS configuration' - ' errors were detected: T r u e'}) - self.assertDictEqual(win_network.managed('salt', - dns_proto='static', - ip_proto='static'), - ret) - ret.update({'comment': "Unable to get current" - " configuration for interface 'salt'", - 'result': False}) - self.assertDictEqual(win_network.managed('salt', - dns_proto='dhcp', - ip_proto='dhcp'), - ret) + with patch.dict(win_network.__salt__, {"ip.is_enabled": mock_true}): + with patch.object(win_network, '_validate', mock_false): + ret.update({'comment': 'The following SLS configuration' + ' errors were detected: T r u e'}) + self.assertDictEqual(win_network.managed('salt', + dns_proto='static', + ip_proto='static'), + ret) - mock = MagicMock(side_effect=[False, [''], - {'dns_proto': 'dhcp', - 'ip_proto': 'dhcp'}, - {'dns_proto': 'dhcp', - 'ip_proto': 'dhcp'}]) - ret.update({'comment': "Interface 'salt' is up to date.", - 'result': True}) - with patch.object(win_network, '_changes', mock): + ret.update({'comment': "Unable to get current" + " configuration for interface 'salt'", + 'result': False}) self.assertDictEqual(win_network.managed('salt', dns_proto='dhcp', - ip_proto='dhcp' - ), ret) + ip_proto='dhcp'), + ret) - ret.update({'comment': "The following changes will be made" - " to interface 'salt': ", 'result': None}) - with patch.dict(win_network.__opts__, {"test": True}): + mock_false = MagicMock(side_effect=[False, [''], + {'dns_proto': 'dhcp', + 'ip_proto': 'dhcp'}, + {'dns_proto': 'dhcp', + 'ip_proto': 'dhcp'}]) + ret.update({'comment': "Interface 'salt' is up to date.", + 'result': True}) + with patch.object(win_network, '_changes', mock_false): self.assertDictEqual(win_network.managed('salt', - dns_proto='dh' - 'cp', + dns_proto='dhcp', ip_proto='dhcp' ), ret) - with patch.dict(win_network.__opts__, {"test": False}): - ret.update({'comment': "Failed to set desired" - " configuration settings for interface" - " 'salt'", 'result': False}) - self.assertDictEqual(win_network.managed('salt', - dns_proto='dh' - 'cp', - ip_proto='dhcp' - ), ret) + ret.update({'comment': "The following changes will be made" + " to interface 'salt': ", 'result': None}) + with patch.dict(win_network.__opts__, {"test": True}): + self.assertDictEqual(win_network.managed('salt', + dns_proto='dh' + 'cp', + ip_proto='dhcp' + ), ret) + + with patch.dict(win_network.__opts__, {"test": False}): + ret.update({'comment': "Failed to set desired" + " configuration settings for interface" + " 'salt'", 'result': False}) + self.assertDictEqual(win_network.managed('salt', + dns_proto='dh' + 'cp', + ip_proto='dhcp' + ), ret) diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 1d94a71a30..0759a12b5f 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -13,7 +13,10 @@ from tests.support.mock import patch, NO_MOCK, NO_MOCK_REASON # Import Salt libs from salt import client -from salt.exceptions import EauthAuthenticationError, SaltInvocationError, SaltClientError +import salt.utils.platform +from salt.exceptions import ( + EauthAuthenticationError, SaltInvocationError, SaltClientError, SaltReqTimeoutError +) @skipIf(NO_MOCK, NO_MOCK_REASON) @@ -67,7 +70,13 @@ class LocalClientTestCase(TestCase, kwarg=None, tgt_type='list', ret='') + @skipIf(salt.utils.platform.is_windows(), 'Not supported on Windows') def test_pub(self): + ''' + Tests that the client cleanly returns when the publisher is not running + + Note: Requires ZeroMQ's IPC transport which is not supported on windows. + ''' if self.get_config('minion')['transport'] != 'zeromq': self.skipTest('This test only works with ZeroMQ') # Make sure we cleanly return if the publisher isn't running @@ -83,3 +92,27 @@ class LocalClientTestCase(TestCase, self.assertRaises(SaltInvocationError, self.client.pub, 'non_existent_group', 'test.ping', tgt_type='nodegroup') + + @skipIf(not salt.utils.platform.is_windows(), 'Windows only test') + def test_pub_win32(self): + ''' + Tests that the client raises a timeout error when using ZeroMQ's TCP + transport and publisher is not running. + + Note: Requires ZeroMQ's TCP transport, this is only the default on Windows. + ''' + if self.get_config('minion')['transport'] != 'zeromq': + self.skipTest('This test only works with ZeroMQ') + # Make sure we cleanly return if the publisher isn't running + with patch('os.path.exists', return_value=False): + self.assertRaises(SaltReqTimeoutError, lambda: self.client.pub('*', 'test.ping')) + + # Check nodegroups behavior + with patch('os.path.exists', return_value=True): + with patch.dict(self.client.opts, + {'nodegroups': + {'group1': 'L@foo.domain.com,bar.domain.com,baz.domain.com or bl*.domain.com'}}): + # Do we raise an exception if the nodegroup can't be matched? + self.assertRaises(SaltInvocationError, + self.client.pub, + 'non_existent_group', 'test.ping', tgt_type='nodegroup') diff --git a/tests/unit/test_pillar.py b/tests/unit/test_pillar.py index b6457bff97..174525ab73 100644 --- a/tests/unit/test_pillar.py +++ b/tests/unit/test_pillar.py @@ -436,6 +436,66 @@ class PillarTestCase(TestCase): ({'foo': 'bar', 'nested': {'level': {'foo': 'bar2'}}}, []) ) + def test_includes_override_sls(self): + opts = { + 'renderer': 'json', + 'renderer_blacklist': [], + 'renderer_whitelist': [], + 'state_top': '', + 'pillar_roots': [], + 'file_roots': [], + 'extension_modules': '' + } + grains = { + 'os': 'Ubuntu', + 'os_family': 'Debian', + 'oscodename': 'raring', + 'osfullname': 'Ubuntu', + 'osrelease': '13.04', + 'kernel': 'Linux' + } + with patch('salt.pillar.compile_template') as compile_template: + + # Test with option set to True + opts['pillar_includes_override_sls'] = True + pillar = salt.pillar.Pillar(opts, grains, 'mocked-minion', 'base') + # Mock getting the proper template files + pillar.client.get_state = MagicMock( + return_value={ + 'dest': '/path/to/pillar/files/foo.sls', + 'source': 'salt://foo.sls' + } + ) + + compile_template.side_effect = [ + {'foo': 'bar', 'include': ['blah']}, + {'foo': 'bar2'} + ] + self.assertEqual( + pillar.render_pillar({'base': ['foo.sls']}), + ({'foo': 'bar2'}, []) + ) + + # Test with option set to False + opts['pillar_includes_override_sls'] = False + pillar = salt.pillar.Pillar(opts, grains, 'mocked-minion', 'base') + # Mock getting the proper template files + pillar.client.get_state = MagicMock( + return_value={ + 'dest': '/path/to/pillar/files/foo.sls', + 'source': 'salt://foo.sls' + } + ) + + compile_template.side_effect = [ + {'foo': 'bar', 'include': ['blah']}, + {'foo': 'bar2'} + ] + self.assertEqual( + pillar.render_pillar({'base': ['foo.sls']}), + ({'foo': 'bar'}, []) + ) + def test_topfile_order(self): with patch('salt.pillar.salt.fileclient.get_file_client', autospec=True) as get_file_client, \ patch('salt.pillar.salt.minion.Matcher') as Matcher: # autospec=True disabled due to py3 mock bug diff --git a/tests/unit/tops/__init__.py b/tests/unit/tops/__init__.py new file mode 100644 index 0000000000..40a96afc6f --- /dev/null +++ b/tests/unit/tops/__init__.py @@ -0,0 +1 @@ +# -*- coding: utf-8 -*- diff --git a/tests/unit/tops/test_ext_nodes.py b/tests/unit/tops/test_ext_nodes.py new file mode 100644 index 0000000000..e74c87a5ca --- /dev/null +++ b/tests/unit/tops/test_ext_nodes.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +''' +Test ext_nodes master_tops module +''' + +# Import python libs +from __future__ import absolute_import, print_function, unicode_literals +import subprocess +import textwrap + +# Import Salt Testing libs +from tests.support.unit import TestCase, skipIf +from tests.support.mixins import LoaderModuleMockMixin +from tests.support.mock import patch, MagicMock, NO_MOCK, NO_MOCK_REASON + +# Import Salt libs +import salt.utils.stringutils +import salt.tops.ext_nodes as ext_nodes + + +@skipIf(NO_MOCK, NO_MOCK_REASON) +class ExtNodesTestCase(TestCase, LoaderModuleMockMixin): + def setup_loader_modules(self): + return { + ext_nodes: { + '__opts__': { + 'master_tops': { + # Since ext_nodes runs the command with shell=True, + # this will keep "command not found" errors from + # showing up on the console. We'll be mocking the + # communicate results anyway. + 'ext_nodes': 'echo', + } + } + } + } + + def test_ext_nodes(self): + ''' + Confirm that subprocess.Popen works as expected and does not raise an + exception (see https://github.com/saltstack/salt/pull/46863). + ''' + stdout = salt.utils.stringutils.to_bytes(textwrap.dedent('''\ + classes: + - one + - two''')) + communicate_mock = MagicMock(return_value=(stdout, None)) + with patch.object(subprocess.Popen, 'communicate', communicate_mock): + ret = ext_nodes.top(opts={'id': 'foo'}) + self.assertEqual(ret, {'base': ['one', 'two']}) + + def test_ext_nodes_with_environment(self): + ''' + Same as above, but also tests that the matches are assigned to the proper + environment if one is returned by the ext_nodes command. + ''' + stdout = salt.utils.stringutils.to_bytes(textwrap.dedent('''\ + classes: + - one + - two + environment: dev''')) + communicate_mock = MagicMock(return_value=(stdout, None)) + with patch.object(subprocess.Popen, 'communicate', communicate_mock): + ret = ext_nodes.top(opts={'id': 'foo'}) + self.assertEqual(ret, {'dev': ['one', 'two']}) diff --git a/tests/unit/utils/test_data.py b/tests/unit/utils/test_data.py index ea87bcf679..e0d319eb25 100644 --- a/tests/unit/utils/test_data.py +++ b/tests/unit/utils/test_data.py @@ -240,7 +240,6 @@ class DataTestCase(TestCase): ret = salt.utils.data.decode( self.test_data, - encoding='utf-8', keep=True, normalize=True, preserve_dict_class=True, @@ -253,7 +252,6 @@ class DataTestCase(TestCase): UnicodeDecodeError, salt.utils.data.decode, self.test_data, - encoding='utf-8', keep=False, normalize=True, preserve_dict_class=True, @@ -267,7 +265,6 @@ class DataTestCase(TestCase): ret = salt.utils.data.decode( self.test_data, - encoding='utf-8', keep=True, normalize=True, preserve_dict_class=False, diff --git a/tests/unit/utils/test_stringutils.py b/tests/unit/utils/test_stringutils.py index 36ff9308a4..fd1f63a5ee 100644 --- a/tests/unit/utils/test_stringutils.py +++ b/tests/unit/utils/test_stringutils.py @@ -97,7 +97,6 @@ class StringutilsTestCase(TestCase): self.assertEqual( salt.utils.stringutils.to_unicode( EGGS, - encoding='utf=8', normalize=True ), 'яйца' @@ -105,7 +104,6 @@ class StringutilsTestCase(TestCase): self.assertNotEqual( salt.utils.stringutils.to_unicode( EGGS, - encoding='utf=8', normalize=False ), 'яйца' @@ -120,9 +118,12 @@ class StringutilsTestCase(TestCase): self.assertEqual(salt.utils.stringutils.to_unicode(str('xyzzy'), 'utf-8'), 'xyzzy') # future lint: disable=blacklisted-function self.assertEqual(salt.utils.stringutils.to_unicode(BYTES, 'utf-8'), UNICODE) - # Test utf-8 fallback with ascii default encoding + # Test that unicode chars are decoded properly even when using + # locales which are not UTF-8 compatible with patch.object(builtins, '__salt_system_encoding__', 'ascii'): - self.assertEqual(salt.utils.stringutils.to_unicode(u'Ψ'.encode('utf-8')), u'Ψ') + self.assertEqual(salt.utils.stringutils.to_unicode('Ψ'.encode('utf-8')), 'Ψ') + with patch.object(builtins, '__salt_system_encoding__', 'CP1252'): + self.assertEqual(salt.utils.stringutils.to_unicode('Ψ'.encode('utf-8')), 'Ψ') def test_build_whitespace_split_regex(self): expected_regex = '(?m)^(?:[\\s]+)?Lorem(?:[\\s]+)?ipsum(?:[\\s]+)?dolor(?:[\\s]+)?sit(?:[\\s]+)?amet\\,' \ @@ -169,3 +170,12 @@ class StringutilsTestCase(TestCase): context = salt.utils.stringutils.get_context(template, 8, num_lines=2, marker=' <---') expected = '---\n[...]\n6\n7\n8 <---\n9\na\n[...]\n---' self.assertEqual(expected, context) + + def test_expr_match(self): + val = 'foo/bar/baz' + # Exact match + self.assertTrue(salt.utils.stringutils.expr_match(val, val)) + # Glob match + self.assertTrue(salt.utils.stringutils.expr_match(val, 'foo/*/baz')) + # Glob non-match + self.assertFalse(salt.utils.stringutils.expr_match(val, 'foo/*/bar')) diff --git a/tests/whitelist.txt b/tests/whitelist.txt index 9cc6f551df..65c45e0cea 100644 --- a/tests/whitelist.txt +++ b/tests/whitelist.txt @@ -10,6 +10,7 @@ integration.modules.test_data integration.modules.test_disk integration.modules.test_git integration.modules.test_grains +integration.modules.test_groupadd integration.modules.test_hosts integration.modules.test_mine integration.modules.test_pillar