mirror of
https://github.com/valitydev/salt.git
synced 2024-11-07 08:58:59 +00:00
Merge branch 'develop' into ssh_private_key_pass
This commit is contained in:
commit
f3cecd560c
4
.github/stale.yml
vendored
4
.github/stale.yml
vendored
@ -1,8 +1,8 @@
|
||||
# Probot Stale configuration file
|
||||
|
||||
# Number of days of inactivity before an issue becomes stale
|
||||
# 740 is approximately 2 years and 1 month
|
||||
daysUntilStale: 740
|
||||
# 730 is approximately 2 years
|
||||
daysUntilStale: 730
|
||||
|
||||
# Number of days of inactivity before a stale issue is closed
|
||||
daysUntilClose: 7
|
||||
|
@ -46,6 +46,7 @@ provisioner:
|
||||
- .bundle
|
||||
- .kitchen
|
||||
- .kitchen.yml
|
||||
- artifacts
|
||||
- Gemfile
|
||||
- Gemfile.lock
|
||||
- README.rst
|
||||
|
@ -176,6 +176,14 @@
|
||||
# master event bus. The value is expressed in bytes.
|
||||
#max_event_size: 1048576
|
||||
|
||||
# Windows platforms lack posix IPC and must rely on slower TCP based inter-
|
||||
# process communications. Set ipc_mode to 'tcp' on such systems
|
||||
#ipc_mode: ipc
|
||||
|
||||
# Overwrite the default tcp ports used by the minion when ipc_mode is set to 'tcp'
|
||||
#tcp_master_pub_port: 4510
|
||||
#tcp_master_pull_port: 4511
|
||||
|
||||
# By default, the master AES key rotates every 24 hours. The next command
|
||||
# following a key rotation will trigger a key refresh from the minion which may
|
||||
# result in minions which do not respond to the first command after a key refresh.
|
||||
|
@ -380,7 +380,7 @@
|
||||
# process communications. Set ipc_mode to 'tcp' on such systems
|
||||
#ipc_mode: ipc
|
||||
|
||||
# Overwrite the default tcp ports used by the minion when in tcp mode
|
||||
# Overwrite the default tcp ports used by the minion when ipc_mode is set to 'tcp'
|
||||
#tcp_pub_port: 4510
|
||||
#tcp_pull_port: 4511
|
||||
|
||||
|
@ -263,8 +263,8 @@ if on_saltstack:
|
||||
copyright = time.strftime("%Y")
|
||||
|
||||
# < --- START do not merge these settings to other branches START ---> #
|
||||
build_type = 'latest' # latest, previous, develop, next
|
||||
release = latest_release # version, latest_release, previous_release
|
||||
build_type = 'develop' # latest, previous, develop, next
|
||||
release = version # version, latest_release, previous_release
|
||||
# < --- END do not merge these settings to other branches END ---> #
|
||||
|
||||
# Set google custom search engine
|
||||
|
@ -189,7 +189,7 @@ execution modules
|
||||
iwtools
|
||||
jboss7
|
||||
jboss7_cli
|
||||
jenkins
|
||||
jenkinsmod
|
||||
junos
|
||||
k8s
|
||||
kapacitor
|
||||
|
@ -1,5 +0,0 @@
|
||||
salt.modules.jenkins module
|
||||
===========================
|
||||
|
||||
.. automodule:: salt.modules.jenkins
|
||||
:members:
|
5
doc/ref/modules/all/salt.modules.jenkinsmod.rst
Normal file
5
doc/ref/modules/all/salt.modules.jenkinsmod.rst
Normal file
@ -0,0 +1,5 @@
|
||||
salt.modules.jenkinsmod module
|
||||
==============================
|
||||
|
||||
.. automodule:: salt.modules.jenkinsmod
|
||||
:members:
|
@ -142,5 +142,23 @@ wait_for_timeout
|
||||
The timeout to wait in seconds for provisioning resources such as servers.
|
||||
The default wait_for_timeout is 15 minutes.
|
||||
|
||||
public_key_ids
|
||||
List of public key IDs (ssh key).
|
||||
|
||||
Functions
|
||||
=========
|
||||
|
||||
* Create an SSH key
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
sudo salt-cloud -f create_ssh_key my-oneandone-config name='SaltTest' description='SaltTestDescription'
|
||||
|
||||
* Create a block storage
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
sudo salt-cloud -f create_block_storage my-oneandone-config name='SaltTest2' description='SaltTestDescription' size=50 datacenter_id='5091F6D8CBFEF9C26ACE957C652D5D49'
|
||||
|
||||
For more information concerning cloud profiles, see :ref:`here
|
||||
<salt-cloud-profiles>`.
|
||||
|
@ -136,24 +136,24 @@ the following userdata example:
|
||||
$SourceStoreScope = 'LocalMachine'
|
||||
$SourceStorename = 'Remote Desktop'
|
||||
|
||||
$SourceStore = New-Object -TypeName System.Security.Cryptography.X509Certificates.X509Store -ArgumentList $SourceStorename, $SourceStoreScope
|
||||
$SourceStore = New-Object -TypeName System.Security.Cryptography.X509Certificates.X509Store -ArgumentList $SourceStorename, $SourceStoreScope
|
||||
$SourceStore.Open([System.Security.Cryptography.X509Certificates.OpenFlags]::ReadOnly)
|
||||
|
||||
$cert = $SourceStore.Certificates | Where-Object -FilterScript {
|
||||
$cert = $SourceStore.Certificates | Where-Object -FilterScript {
|
||||
$_.subject -like '*'
|
||||
}
|
||||
|
||||
$DestStoreScope = 'LocalMachine'
|
||||
$DestStoreName = 'My'
|
||||
|
||||
$DestStore = New-Object -TypeName System.Security.Cryptography.X509Certificates.X509Store -ArgumentList $DestStoreName, $DestStoreScope
|
||||
$DestStore = New-Object -TypeName System.Security.Cryptography.X509Certificates.X509Store -ArgumentList $DestStoreName, $DestStoreScope
|
||||
$DestStore.Open([System.Security.Cryptography.X509Certificates.OpenFlags]::ReadWrite)
|
||||
$DestStore.Add($cert)
|
||||
|
||||
$SourceStore.Close()
|
||||
$DestStore.Close()
|
||||
|
||||
winrm create winrm/config/listener?Address=*+Transport=HTTPS `@`{Hostname=`"($certId)`"`;CertificateThumbprint=`"($cert.Thumbprint)`"`}
|
||||
winrm create winrm/config/listener?Address=*+Transport=HTTPS `@`{CertificateThumbprint=`"($cert.Thumbprint)`"`}
|
||||
|
||||
Restart-Service winrm
|
||||
</powershell>
|
||||
|
@ -75,6 +75,67 @@ Example ``thorium_roots`` configuration:
|
||||
base:
|
||||
- /etc/salt/thorium
|
||||
|
||||
It is also possible to use gitfs with Thorium,
|
||||
using the ``thoriumenv`` or ``thorium_top`` settings.
|
||||
|
||||
Example using ``thorium_top``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
thorium_top: salt://thorium/top.sls
|
||||
gitfs_provider: pygit2
|
||||
|
||||
gitfs_remotes:
|
||||
- git@github.com:user/repo.git:
|
||||
- name: salt-backend
|
||||
- root: salt
|
||||
- base: master
|
||||
- git@github.com:user/repo.git:
|
||||
- name: thorium-backend
|
||||
- root: thorium
|
||||
- base: master
|
||||
- mountpoint: salt://thorium
|
||||
|
||||
.. note::
|
||||
|
||||
When using this method don't forget to prepend the mountpoint to files served by this repo,
|
||||
for example ``top.sls``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
base:
|
||||
'*':
|
||||
- thorium.key_clean
|
||||
|
||||
Example using ``thoriumenv``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
thoriumenv: thorium
|
||||
gitfs_provider: pygit2
|
||||
|
||||
gitfs_remotes:
|
||||
- git@github.com:user/repo.git:
|
||||
- name: salt-backend
|
||||
- root: salt
|
||||
- base: master
|
||||
- git@github.com:user/repo.git:
|
||||
- name: thorium-backend
|
||||
- root: thorium
|
||||
- saltenv:
|
||||
- thorium:
|
||||
- ref: master
|
||||
|
||||
.. note::
|
||||
|
||||
When using this method all state will run under the defined environment,
|
||||
for example ``top.sls``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
thorium:
|
||||
'*':
|
||||
- key_clean
|
||||
|
||||
The Thorium top.sls File
|
||||
------------------------
|
||||
|
@ -157,7 +157,7 @@ If (Test-Path "$($ini[$bitPaths]['VCforPythonDir'])\vcvarsall.bat") {
|
||||
# Install Microsoft Visual C++ for Python2.7
|
||||
Write-Output " - Installing $($ini['Prerequisites']['VCforPython']) . . ."
|
||||
$file = "$($ini['Settings']['DownloadDir'])\$($ini['Prerequisites']['VCforPython'])"
|
||||
$p = Start-Process msiexec.exe -ArgumentList "/i $file /qb ALLUSERS=1" -Wait -NoNewWindow -PassThru
|
||||
$p = Start-Process msiexec.exe -ArgumentList "/i $file /quiet ALLUSERS=1" -Wait -NoNewWindow -PassThru
|
||||
}
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
@ -175,7 +175,7 @@ If (Test-Path "$($ini['Settings']['Python2Dir'])\python.exe") {
|
||||
DownloadFileWithProgress $url $file
|
||||
|
||||
Write-Output " - $script_name :: Installing $($ini[$bitPrograms]['Python2']) . . ."
|
||||
$p = Start-Process msiexec -ArgumentList "/i $file /qb ADDLOCAL=DefaultFeature,SharedCRT,Extensions,pip_feature,PrependPath TARGETDIR=`"$($ini['Settings']['Python2Dir'])`"" -Wait -NoNewWindow -PassThru
|
||||
$p = Start-Process msiexec -ArgumentList "/i $file /quiet ADDLOCAL=DefaultFeature,SharedCRT,Extensions,pip_feature,PrependPath TARGETDIR=`"$($ini['Settings']['Python2Dir'])`"" -Wait -NoNewWindow -PassThru
|
||||
}
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
@ -197,17 +197,17 @@ Write-Output " ----------------------------------------------------------------"
|
||||
Write-Output " - $script_name :: Updating PIP and SetupTools . . ."
|
||||
Write-Output " ----------------------------------------------------------------"
|
||||
if ( ! [bool]$Env:SALT_PIP_LOCAL_CACHE) {
|
||||
Start_Process_and_test_exitcode "$($ini['Settings']['Python2Dir'])\python.exe" "-m pip --no-cache-dir install -r $($script_path)\req_pip.txt" "python pip"
|
||||
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip --no-cache-dir install -r $($script_path)\req_pip.txt" "python pip"
|
||||
} else {
|
||||
$p = New-Item $Env:SALT_PIP_LOCAL_CACHE -ItemType Directory -Force # Ensure directory exists
|
||||
if ( (Get-ChildItem $Env:SALT_PIP_LOCAL_CACHE | Measure-Object).Count -eq 0 ) {
|
||||
# folder empty
|
||||
Write-Output " pip download from req_pip.txt into empty local cache SALT_REQ_PIP $Env:SALT_PIP_LOCAL_CACHE"
|
||||
Start_Process_and_test_exitcode "$($ini['Settings']['Python2Dir'])\python.exe" "-m pip download --dest $Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip download"
|
||||
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip download --dest $Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip download"
|
||||
}
|
||||
Write-Output " reading from local pip cache $Env:SALT_PIP_LOCAL_CACHE"
|
||||
Write-Output " If a (new) resource is missing, please delete all files in this cache, go online and repeat"
|
||||
Start_Process_and_test_exitcode "$($ini['Settings']['Python2Dir'])\python.exe" "-m pip install --no-index --find-links=$Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip install"
|
||||
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip install --no-index --find-links=$Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip install"
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
@ -218,16 +218,16 @@ Write-Output " ----------------------------------------------------------------"
|
||||
Write-Output " - $script_name :: Installing pypi resources using pip . . ."
|
||||
Write-Output " ----------------------------------------------------------------"
|
||||
if ( ! [bool]$Env:SALT_REQ_LOCAL_CACHE) {
|
||||
Start_Process_and_test_exitcode "$($ini['Settings']['Scripts2Dir'])\pip.exe" "--no-cache-dir install -r $($script_path)\req.txt" "pip install"
|
||||
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip --no-cache-dir install -r $($script_path)\req.txt" "pip install"
|
||||
} else {
|
||||
if ( (Get-ChildItem $Env:SALT_REQ_LOCAL_CACHE | Measure-Object).Count -eq 0 ) {
|
||||
# folder empty
|
||||
Write-Output " pip download from req.txt into empty local cache SALT_REQ $Env:SALT_REQ_LOCAL_CACHE"
|
||||
Start_Process_and_test_exitcode "$($ini['Settings']['Python2Dir'])\python.exe" "-m pip download --dest $Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip download"
|
||||
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip download --dest $Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip download"
|
||||
}
|
||||
Write-Output " reading from local pip cache $Env:SALT_REQ_LOCAL_CACHE"
|
||||
Write-Output " If a (new) resource is missing, please delete all files in this cache, go online and repeat"
|
||||
Start_Process_and_test_exitcode "$($ini['Settings']['Python2Dir'])\python.exe" "-m pip install --no-index --find-links=$Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip install"
|
||||
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python2Dir'])\python.exe -m pip install --no-index --find-links=$Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip install"
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
|
@ -157,7 +157,7 @@ If (Test-Path "$($ini[$bitPaths]['VCppBuildToolsDir'])\vcbuildtools.bat") {
|
||||
# Install Microsoft Visual C++ Build Tools
|
||||
Write-Output " - Installing $($ini['Prerequisites']['VCppBuildTools']) . . ."
|
||||
$file = "$($ini['Settings']['DownloadDir'])\$($ini['Prerequisites']['VCppBuildTools'])"
|
||||
$p = Start-Process $file -ArgumentList '/Passive' -Wait -NoNewWindow -PassThru
|
||||
$p = Start-Process $file -ArgumentList '/Quiet' -Wait -NoNewWindow -PassThru
|
||||
}
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
@ -175,7 +175,7 @@ If (Test-Path "$($ini['Settings']['Python3Dir'])\python.exe") {
|
||||
DownloadFileWithProgress $url $file
|
||||
|
||||
Write-Output " - $script_name :: Installing $($ini[$bitPrograms]['Python3']) . . ."
|
||||
$p = Start-Process $file -ArgumentList "/passive InstallAllUsers=1 TargetDir=`"$($ini['Settings']['Python3Dir'])`" Include_doc=0 Include_tcltk=0 Include_test=0 Include_launcher=0 PrependPath=1 Shortcuts=0" -Wait -NoNewWindow -PassThru
|
||||
$p = Start-Process $file -ArgumentList "/Quiet InstallAllUsers=1 TargetDir=`"$($ini['Settings']['Python3Dir'])`" Include_doc=0 Include_tcltk=0 Include_test=0 Include_launcher=0 PrependPath=1 Shortcuts=0" -Wait -NoNewWindow -PassThru
|
||||
}
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
@ -197,17 +197,17 @@ Write-Output " ----------------------------------------------------------------"
|
||||
Write-Output " - $script_name :: Updating PIP and SetupTools . . ."
|
||||
Write-Output " ----------------------------------------------------------------"
|
||||
if ( ! [bool]$Env:SALT_PIP_LOCAL_CACHE) {
|
||||
Start_Process_and_test_exitcode "$($ini['Settings']['Python3Dir'])\python.exe" "-m pip --no-cache-dir install -r $($script_path)\req_pip.txt" "python pip"
|
||||
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip --no-cache-dir install -r $($script_path)\req_pip.txt" "python pip"
|
||||
} else {
|
||||
$p = New-Item $Env:SALT_PIP_LOCAL_CACHE -ItemType Directory -Force # Ensure directory exists
|
||||
if ( (Get-ChildItem $Env:SALT_PIP_LOCAL_CACHE | Measure-Object).Count -eq 0 ) {
|
||||
# folder empty
|
||||
Write-Output " pip download from req_pip.txt into empty local cache SALT_REQ_PIP $Env:SALT_PIP_LOCAL_CACHE"
|
||||
Start_Process_and_test_exitcode "$($ini['Settings']['Python3Dir'])\python.exe" "-m pip download --dest $Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip download"
|
||||
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip download --dest $Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip download"
|
||||
}
|
||||
Write-Output " reading from local pip cache $Env:SALT_PIP_LOCAL_CACHE"
|
||||
Write-Output " If a (new) resource is missing, please delete all files in this cache, go online and repeat"
|
||||
Start_Process_and_test_exitcode "$($ini['Settings']['Python3Dir'])\python.exe" "-m pip install --no-index --find-links=$Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip install"
|
||||
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip install --no-index --find-links=$Env:SALT_PIP_LOCAL_CACHE -r $($script_path)\req_pip.txt" "pip install"
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
@ -218,16 +218,16 @@ Write-Output " ----------------------------------------------------------------"
|
||||
Write-Output " - $script_name :: Installing pypi resources using pip . . ."
|
||||
Write-Output " ----------------------------------------------------------------"
|
||||
if ( ! [bool]$Env:SALT_REQ_LOCAL_CACHE) {
|
||||
Start_Process_and_test_exitcode "$($ini['Settings']['Scripts3Dir'])\pip.exe" "--no-cache-dir install -r $($script_path)\req.txt" "pip install"
|
||||
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip --no-cache-dir install -r $($script_path)\req.txt" "pip install"
|
||||
} else {
|
||||
if ( (Get-ChildItem $Env:SALT_REQ_LOCAL_CACHE | Measure-Object).Count -eq 0 ) {
|
||||
# folder empty
|
||||
Write-Output " pip download from req.txt into empty local cache SALT_REQ $Env:SALT_REQ_LOCAL_CACHE"
|
||||
Start_Process_and_test_exitcode "$($ini['Settings']['Python3Dir'])\python.exe" "-m pip download --dest $Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip download"
|
||||
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip download --dest $Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip download"
|
||||
}
|
||||
Write-Output " reading from local pip cache $Env:SALT_REQ_LOCAL_CACHE"
|
||||
Write-Output " If a (new) resource is missing, please delete all files in this cache, go online and repeat"
|
||||
Start_Process_and_test_exitcode "$($ini['Settings']['Python3Dir'])\python.exe" "-m pip install --no-index --find-links=$Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip install"
|
||||
Start_Process_and_test_exitcode "cmd" "/c $($ini['Settings']['Python3Dir'])\python.exe -m pip install --no-index --find-links=$Env:SALT_REQ_LOCAL_CACHE -r $($script_path)\req.txt" "pip install"
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
|
@ -50,8 +50,6 @@ goto CheckPython3
|
||||
echo Failed, please remove manually
|
||||
)
|
||||
|
||||
goto eof
|
||||
|
||||
:CheckPython3
|
||||
if exist "\Python35" goto RemovePython3
|
||||
|
||||
@ -63,13 +61,13 @@ goto eof
|
||||
:: 64 bit
|
||||
if exist "%LOCALAPPDATA%\Package Cache\{b94f45d6-8461-440c-aa4d-bf197b2c2499}" (
|
||||
echo %0 :: - 3.5.3 64bit
|
||||
"%LOCALAPPDATA%\Package Cache\{b94f45d6-8461-440c-aa4d-bf197b2c2499}\python-3.5.3-amd64.exe" /uninstall /passive
|
||||
"%LOCALAPPDATA%\Package Cache\{b94f45d6-8461-440c-aa4d-bf197b2c2499}\python-3.5.3-amd64.exe" /uninstall /quiet
|
||||
)
|
||||
|
||||
:: 32 bit
|
||||
if exist "%LOCALAPPDATA%\Package Cache\{a10037e1-4247-47c9-935b-c5ca049d0299}" (
|
||||
echo %0 :: - 3.5.3 32bit
|
||||
"%LOCALAPPDATA%\Package Cache\{a10037e1-4247-47c9-935b-c5ca049d0299}\python-3.5.3" /uninstall /passive
|
||||
"%LOCALAPPDATA%\Package Cache\{a10037e1-4247-47c9-935b-c5ca049d0299}\python-3.5.3" /uninstall /quiet
|
||||
)
|
||||
|
||||
rem wipe the Python directory
|
||||
|
@ -449,6 +449,7 @@ class LocalClient(object):
|
||||
sub=3,
|
||||
cli=False,
|
||||
progress=False,
|
||||
full_return=False,
|
||||
**kwargs):
|
||||
'''
|
||||
Execute a command on a random subset of the targeted systems
|
||||
@ -488,6 +489,7 @@ class LocalClient(object):
|
||||
ret=ret,
|
||||
kwarg=kwarg,
|
||||
progress=progress,
|
||||
full_return=full_return,
|
||||
**kwargs)
|
||||
|
||||
def cmd_batch(
|
||||
|
@ -354,6 +354,9 @@ class SSH(object):
|
||||
needs_expansion = '*' not in hostname and salt.utils.network.is_reachable_host(hostname)
|
||||
if needs_expansion:
|
||||
hostname = salt.utils.network.ip_to_host(hostname)
|
||||
if hostname is None:
|
||||
# Reverse lookup failed
|
||||
return
|
||||
self._get_roster()
|
||||
for roster_filename in self.__parsed_rosters:
|
||||
roster_data = self.__parsed_rosters[roster_filename]
|
||||
|
@ -2043,9 +2043,10 @@ def list_input_endpoints(kwargs=None, conn=None, call=None):
|
||||
|
||||
ret = {}
|
||||
for item in data:
|
||||
if 'Role' not in item:
|
||||
continue
|
||||
for role in item['Role']:
|
||||
if 'Role' in item:
|
||||
role = item['Role']
|
||||
if not isinstance(role, dict):
|
||||
return ret
|
||||
input_endpoint = role['ConfigurationSets']['ConfigurationSet'].get('InputEndpoints', {}).get('InputEndpoint')
|
||||
if not input_endpoint:
|
||||
continue
|
||||
@ -2053,6 +2054,7 @@ def list_input_endpoints(kwargs=None, conn=None, call=None):
|
||||
input_endpoint = [input_endpoint]
|
||||
for endpoint in input_endpoint:
|
||||
ret[endpoint['Name']] = endpoint
|
||||
return ret
|
||||
return ret
|
||||
|
||||
|
||||
|
@ -6,6 +6,8 @@
|
||||
=======
|
||||
The 1&1 SaltStack cloud module allows a 1&1 server to
|
||||
be automatically deployed and bootstrapped with Salt.
|
||||
It also has functions to create block storages
|
||||
and ssh keys.
|
||||
|
||||
:depends: 1and1 >= 1.2.0
|
||||
|
||||
@ -77,6 +79,20 @@ Set ``deploy`` to False if Salt should not be installed on the node.
|
||||
|
||||
my-oneandone-profile:
|
||||
deploy: False
|
||||
|
||||
Create an SSH key
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
sudo salt-cloud -f create_ssh_key my-oneandone-config name='SaltTest' description='SaltTestDescription'
|
||||
|
||||
Create a block storage
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
sudo salt-cloud -f create_block_storage my-oneandone-config name='SaltTest2'
|
||||
description='SaltTestDescription' size=50 datacenter_id='5091F6D8CBFEF9C26ACE957C652D5D49'
|
||||
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
@ -104,7 +120,7 @@ from salt.ext import six
|
||||
|
||||
try:
|
||||
from oneandone.client import (
|
||||
OneAndOneService, Server, Hdd
|
||||
OneAndOneService, Server, Hdd, BlockStorage, SshKey
|
||||
)
|
||||
HAS_ONEANDONE = True
|
||||
except ImportError:
|
||||
@ -227,6 +243,90 @@ def avail_locations(conn=None, call=None):
|
||||
return {'Locations': datacenters}
|
||||
|
||||
|
||||
def create_block_storage(kwargs=None, call=None):
|
||||
'''
|
||||
Create a block storage
|
||||
'''
|
||||
if call == 'action':
|
||||
raise SaltCloudSystemExit(
|
||||
'The avail_locations function must be called with '
|
||||
'-f or --function, or with the --list-locations option'
|
||||
)
|
||||
|
||||
conn = get_conn()
|
||||
|
||||
# Assemble the composite block storage object.
|
||||
block_storage = _get_block_storage(kwargs)
|
||||
|
||||
data = conn.create_block_storage(block_storage=block_storage)
|
||||
|
||||
return {'BlockStorage': data}
|
||||
|
||||
|
||||
def _get_block_storage(kwargs):
|
||||
'''
|
||||
Construct a block storage instance from passed arguments
|
||||
'''
|
||||
if kwargs is None:
|
||||
kwargs = {}
|
||||
|
||||
block_storage_name = kwargs.get('name', None)
|
||||
block_storage_size = kwargs.get('size', None)
|
||||
block_storage_description = kwargs.get('description', None)
|
||||
datacenter_id = kwargs.get('datacenter_id', None)
|
||||
server_id = kwargs.get('server_id', None)
|
||||
|
||||
block_storage = BlockStorage(
|
||||
name=block_storage_name,
|
||||
size=block_storage_size)
|
||||
|
||||
if block_storage_description:
|
||||
block_storage.description = block_storage_description
|
||||
|
||||
if datacenter_id:
|
||||
block_storage.datacenter_id = datacenter_id
|
||||
|
||||
if server_id:
|
||||
block_storage.server_id = server_id
|
||||
|
||||
return block_storage
|
||||
|
||||
|
||||
def _get_ssh_key(kwargs):
|
||||
'''
|
||||
Construct an SshKey instance from passed arguments
|
||||
'''
|
||||
ssh_key_name = kwargs.get('name', None)
|
||||
ssh_key_description = kwargs.get('description', None)
|
||||
public_key = kwargs.get('public_key', None)
|
||||
|
||||
return SshKey(
|
||||
name=ssh_key_name,
|
||||
description=ssh_key_description,
|
||||
public_key=public_key
|
||||
)
|
||||
|
||||
|
||||
def create_ssh_key(kwargs=None, call=None):
|
||||
'''
|
||||
Create an ssh key
|
||||
'''
|
||||
if call == 'action':
|
||||
raise SaltCloudSystemExit(
|
||||
'The avail_locations function must be called with '
|
||||
'-f or --function, or with the --list-locations option'
|
||||
)
|
||||
|
||||
conn = get_conn()
|
||||
|
||||
# Assemble the composite SshKey object.
|
||||
ssh_key = _get_ssh_key(kwargs)
|
||||
|
||||
data = conn.create_ssh_key(ssh_key=ssh_key)
|
||||
|
||||
return {'SshKey': data}
|
||||
|
||||
|
||||
def avail_images(conn=None, call=None):
|
||||
'''
|
||||
Return a list of the server appliances that are on the provider
|
||||
@ -460,6 +560,11 @@ def _get_server(vm_):
|
||||
search_global=False
|
||||
)
|
||||
|
||||
public_key = config.get_cloud_config_value(
|
||||
'public_key_ids', vm_, __opts__, default=True,
|
||||
search_global=False
|
||||
)
|
||||
|
||||
# Contruct server object
|
||||
return Server(
|
||||
name=vm_['name'],
|
||||
@ -477,7 +582,8 @@ def _get_server(vm_):
|
||||
monitoring_policy_id=monitoring_policy_id,
|
||||
datacenter_id=datacenter_id,
|
||||
rsa_key=ssh_key,
|
||||
private_network_id=private_network_id
|
||||
private_network_id=private_network_id,
|
||||
public_key=public_key
|
||||
)
|
||||
|
||||
|
||||
|
@ -4570,7 +4570,7 @@ def _list_nodes(full=False):
|
||||
pass
|
||||
|
||||
vms[name]['id'] = vm.find('ID').text
|
||||
if vm.find('TEMPLATE').find('TEMPLATE_ID'):
|
||||
if 'TEMPLATE_ID' in vm.find('TEMPLATE'):
|
||||
vms[name]['image'] = vm.find('TEMPLATE').find('TEMPLATE_ID').text
|
||||
vms[name]['name'] = name
|
||||
vms[name]['size'] = {'cpu': cpu_size, 'memory': memory_size}
|
||||
|
@ -1192,6 +1192,12 @@ VALID_OPTS = {
|
||||
|
||||
# Enable calling ssh minions from the salt master
|
||||
'enable_ssh_minions': bool,
|
||||
|
||||
# Thorium saltenv
|
||||
'thoriumenv': (type(None), six.string_types),
|
||||
|
||||
# Thorium top file location
|
||||
'thorium_top': six.string_types,
|
||||
}
|
||||
|
||||
# default configurations
|
||||
@ -1255,6 +1261,8 @@ DEFAULT_MINION_OPTS = {
|
||||
'startup_states': '',
|
||||
'sls_list': [],
|
||||
'top_file': '',
|
||||
'thoriumenv': None,
|
||||
'thorium_top': 'top.sls',
|
||||
'thorium_interval': 0.5,
|
||||
'thorium_roots': {
|
||||
'base': [salt.syspaths.BASE_THORIUM_ROOTS_DIR],
|
||||
@ -1530,6 +1538,8 @@ DEFAULT_MASTER_OPTS = {
|
||||
'decrypt_pillar_delimiter': ':',
|
||||
'decrypt_pillar_default': 'gpg',
|
||||
'decrypt_pillar_renderers': ['gpg'],
|
||||
'thoriumenv': None,
|
||||
'thorium_top': 'top.sls',
|
||||
'thorium_interval': 0.5,
|
||||
'thorium_roots': {
|
||||
'base': [salt.syspaths.BASE_THORIUM_ROOTS_DIR],
|
||||
@ -3895,6 +3905,10 @@ def apply_master_config(overrides=None, defaults=None):
|
||||
if overrides:
|
||||
opts.update(overrides)
|
||||
|
||||
opts['__cli'] = salt.utils.stringutils.to_unicode(
|
||||
os.path.basename(sys.argv[0])
|
||||
)
|
||||
|
||||
if 'environment' in opts:
|
||||
if opts['saltenv'] is not None:
|
||||
log.warning(
|
||||
|
@ -57,6 +57,25 @@ structure::
|
||||
|
||||
.. note:: This fileserver back-end requires the use of the MD5 hashing algorithm.
|
||||
MD5 may not be compliant with all security policies.
|
||||
|
||||
.. note:: This fileserver back-end is only compatible with MD5 ETag hashes in
|
||||
the S3 metadata. This means that you must use SSE-S3 or plaintext for
|
||||
bucket encryption, and that you must not use multipart upload when
|
||||
uploading to your bucket. More information here:
|
||||
https://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html
|
||||
|
||||
Objects without an MD5 ETag will be fetched on every fileserver update.
|
||||
|
||||
If you deal with objects greater than 8MB, then you should use the
|
||||
following AWS CLI config to avoid mutipart upload:
|
||||
|
||||
.. code-block::
|
||||
|
||||
s3 =
|
||||
multipart_threshold = 1024MB
|
||||
|
||||
More info here:
|
||||
https://docs.aws.amazon.com/cli/latest/topic/s3-config.html
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
@ -110,13 +129,14 @@ def update():
|
||||
# sync the buckets to the local cache
|
||||
log.info('Syncing local cache from S3...')
|
||||
for saltenv, env_meta in six.iteritems(metadata):
|
||||
for bucket, files in six.iteritems(_find_files(env_meta)):
|
||||
for file_path in files:
|
||||
cached_file_path = _get_cached_file_name(bucket, saltenv, file_path)
|
||||
log.info('%s - %s : %s', bucket, saltenv, file_path)
|
||||
for bucket_files in _find_files(env_meta):
|
||||
for bucket, files in six.iteritems(bucket_files):
|
||||
for file_path in files:
|
||||
cached_file_path = _get_cached_file_name(bucket, saltenv, file_path)
|
||||
log.info('%s - %s : %s', bucket, saltenv, file_path)
|
||||
|
||||
# load the file from S3 if it's not in the cache or it's old
|
||||
_get_file_from_s3(metadata, saltenv, bucket, file_path, cached_file_path)
|
||||
# load the file from S3 if it's not in the cache or it's old
|
||||
_get_file_from_s3(metadata, saltenv, bucket, file_path, cached_file_path)
|
||||
|
||||
log.info('Sync local cache from S3 completed.')
|
||||
|
||||
@ -144,10 +164,15 @@ def find_file(path, saltenv='base', **kwargs):
|
||||
path = os.path.join(saltenv, path)
|
||||
|
||||
# look for the files and check if they're ignored globally
|
||||
for bucket_name, files in six.iteritems(env_files):
|
||||
if path in files and not fs.is_file_ignored(__opts__, path):
|
||||
fnd['bucket'] = bucket_name
|
||||
fnd['path'] = path
|
||||
for bucket in env_files:
|
||||
for bucket_name, files in six.iteritems(bucket):
|
||||
if path in files and not fs.is_file_ignored(__opts__, path):
|
||||
fnd['bucket'] = bucket_name
|
||||
fnd['path'] = path
|
||||
break
|
||||
else:
|
||||
continue # only executes if we didn't break
|
||||
break
|
||||
|
||||
if not fnd['path'] or not fnd['bucket']:
|
||||
return fnd
|
||||
@ -245,10 +270,10 @@ def file_list(load):
|
||||
|
||||
if not metadata or saltenv not in metadata:
|
||||
return ret
|
||||
|
||||
for buckets in six.itervalues(_find_files(metadata[saltenv])):
|
||||
files = [f for f in buckets if not fs.is_file_ignored(__opts__, f)]
|
||||
ret += _trim_env_off_path(files, saltenv)
|
||||
for bucket in _find_files(metadata[saltenv]):
|
||||
for buckets in six.itervalues(bucket):
|
||||
files = [f for f in buckets if not fs.is_file_ignored(__opts__, f)]
|
||||
ret += _trim_env_off_path(files, saltenv)
|
||||
|
||||
return ret
|
||||
|
||||
@ -283,11 +308,12 @@ def dir_list(load):
|
||||
return ret
|
||||
|
||||
# grab all the dirs from the buckets cache file
|
||||
for dirs in six.itervalues(_find_dirs(metadata[saltenv])):
|
||||
# trim env and trailing slash
|
||||
dirs = _trim_env_off_path(dirs, saltenv, trim_slash=True)
|
||||
# remove empty string left by the base env dir in single bucket mode
|
||||
ret += [_f for _f in dirs if _f]
|
||||
for bucket in _find_dirs(metadata[saltenv]):
|
||||
for dirs in six.itervalues(bucket):
|
||||
# trim env and trailing slash
|
||||
dirs = _trim_env_off_path(dirs, saltenv, trim_slash=True)
|
||||
# remove empty string left by the base env dir in single bucket mode
|
||||
ret += [_f for _f in dirs if _f]
|
||||
|
||||
return ret
|
||||
|
||||
@ -418,8 +444,9 @@ def _refresh_buckets_cache_file(cache_file):
|
||||
if _is_env_per_bucket():
|
||||
# Single environment per bucket
|
||||
for saltenv, buckets in six.iteritems(_get_buckets()):
|
||||
bucket_files = {}
|
||||
bucket_files_list = []
|
||||
for bucket_name in buckets:
|
||||
bucket_files = {}
|
||||
s3_meta = __get_s3_meta(bucket_name)
|
||||
|
||||
# s3 query returned nothing
|
||||
@ -428,6 +455,7 @@ def _refresh_buckets_cache_file(cache_file):
|
||||
|
||||
# grab only the files/dirs
|
||||
bucket_files[bucket_name] = [k for k in s3_meta if 'Key' in k]
|
||||
bucket_files_list.append(bucket_files)
|
||||
|
||||
# check to see if we added any keys, otherwise investigate possible error conditions
|
||||
if len(bucket_files[bucket_name]) == 0:
|
||||
@ -454,7 +482,7 @@ def _refresh_buckets_cache_file(cache_file):
|
||||
'in your S3 bucket?')
|
||||
return {}
|
||||
|
||||
metadata[saltenv] = bucket_files
|
||||
metadata[saltenv] = bucket_files_list
|
||||
|
||||
else:
|
||||
# Multiple environments per buckets
|
||||
@ -502,12 +530,16 @@ def _refresh_buckets_cache_file(cache_file):
|
||||
env_files = [k for k in files if k['Key'].startswith(saltenv)]
|
||||
|
||||
if saltenv not in metadata:
|
||||
metadata[saltenv] = {}
|
||||
metadata[saltenv] = []
|
||||
|
||||
if bucket_name not in metadata[saltenv]:
|
||||
metadata[saltenv][bucket_name] = []
|
||||
|
||||
metadata[saltenv][bucket_name] += env_files
|
||||
found = False
|
||||
for bucket_files in metadata[saltenv]:
|
||||
if bucket_name in bucket_files:
|
||||
bucket_files[bucket_name] += env_files
|
||||
found = True
|
||||
break
|
||||
if not found:
|
||||
metadata[saltenv].append({bucket_name: env_files})
|
||||
|
||||
# write the metadata to disk
|
||||
if os.path.isfile(cache_file):
|
||||
@ -543,16 +575,21 @@ def _find_files(metadata):
|
||||
Looks for all the files in the S3 bucket cache metadata
|
||||
'''
|
||||
|
||||
ret = {}
|
||||
|
||||
for bucket_name, data in six.iteritems(metadata):
|
||||
if bucket_name not in ret:
|
||||
ret[bucket_name] = []
|
||||
|
||||
filePaths = [k['Key'] for k in data]
|
||||
# filter out the dirs
|
||||
ret[bucket_name] += [k for k in filePaths if not k.endswith('/')]
|
||||
ret = []
|
||||
found = {}
|
||||
|
||||
for bucket_dict in metadata:
|
||||
for bucket_name, data in six.iteritems(bucket_dict):
|
||||
filepaths = [k['Key'] for k in data]
|
||||
filepaths = [k for k in filepaths if not k.endswith('/')]
|
||||
if bucket_name not in found:
|
||||
found[bucket_name] = True
|
||||
ret.append({bucket_name: filepaths})
|
||||
else:
|
||||
for bucket in ret:
|
||||
if bucket_name in bucket:
|
||||
bucket[bucket_name] += filepaths
|
||||
break
|
||||
return ret
|
||||
|
||||
|
||||
@ -564,19 +601,27 @@ def _find_dirs(metadata):
|
||||
directories discovered in the path of file keys.
|
||||
'''
|
||||
|
||||
ret = {}
|
||||
|
||||
for bucket_name, data in six.iteritems(metadata):
|
||||
if bucket_name not in ret:
|
||||
ret[bucket_name] = set()
|
||||
|
||||
for path in [k['Key'] for k in data]:
|
||||
prefix = ''
|
||||
for part in path.split('/')[:-1]:
|
||||
directory = prefix + part + '/'
|
||||
ret[bucket_name].add(directory)
|
||||
prefix = directory
|
||||
ret = []
|
||||
found = {}
|
||||
|
||||
for bucket_dict in metadata:
|
||||
for bucket_name, data in six.iteritems(bucket_dict):
|
||||
dirpaths = set()
|
||||
for path in [k['Key'] for k in data]:
|
||||
prefix = ''
|
||||
for part in path.split('/')[:-1]:
|
||||
directory = prefix + part + '/'
|
||||
dirpaths.add(directory)
|
||||
prefix = directory
|
||||
if bucket_name not in found:
|
||||
found[bucket_name] = True
|
||||
ret.append({bucket_name: list(dirpaths)})
|
||||
else:
|
||||
for bucket in ret:
|
||||
if bucket_name in bucket:
|
||||
bucket[bucket_name] += list(dirpaths)
|
||||
bucket[bucket_name] = list(set(bucket[bucket_name]))
|
||||
break
|
||||
return ret
|
||||
|
||||
|
||||
@ -585,7 +630,10 @@ def _find_file_meta(metadata, bucket_name, saltenv, path):
|
||||
Looks for a file's metadata in the S3 bucket cache file
|
||||
'''
|
||||
env_meta = metadata[saltenv] if saltenv in metadata else {}
|
||||
bucket_meta = env_meta[bucket_name] if bucket_name in env_meta else {}
|
||||
bucket_meta = {}
|
||||
for bucket in env_meta:
|
||||
if bucket_name in bucket:
|
||||
bucket_meta = bucket[bucket_name]
|
||||
files_meta = list(list(filter((lambda k: 'Key' in k), bucket_meta)))
|
||||
|
||||
for item_meta in files_meta:
|
||||
|
@ -21,8 +21,11 @@ import os
|
||||
import socket
|
||||
|
||||
# Import salt libs
|
||||
import salt.ext.six as six
|
||||
import salt.utils.data
|
||||
import salt.utils.http as http
|
||||
import salt.utils.json
|
||||
import salt.utils.stringutils
|
||||
|
||||
|
||||
# metadata server information
|
||||
@ -51,9 +54,10 @@ def _search(prefix="latest/"):
|
||||
linedata = http.query(os.path.join(HOST, prefix), headers=True)
|
||||
if 'body' not in linedata:
|
||||
return ret
|
||||
body = salt.utils.stringutils.to_unicode(linedata['body'])
|
||||
if linedata['headers'].get('Content-Type', 'text/plain') == 'application/octet-stream':
|
||||
return linedata['body']
|
||||
for line in linedata['body'].split('\n'):
|
||||
return body
|
||||
for line in body.split('\n'):
|
||||
if line.endswith('/'):
|
||||
ret[line[:-1]] = _search(prefix=os.path.join(prefix, line))
|
||||
elif prefix == 'latest/':
|
||||
@ -70,11 +74,14 @@ def _search(prefix="latest/"):
|
||||
retdata = http.query(os.path.join(HOST, prefix, line)).get('body', None)
|
||||
# (gtmanfred) This try except block is slightly faster than
|
||||
# checking if the string starts with a curly brace
|
||||
try:
|
||||
ret[line] = salt.utils.json.loads(retdata)
|
||||
except ValueError:
|
||||
if isinstance(retdata, six.binary_type):
|
||||
try:
|
||||
ret[line] = salt.utils.json.loads(salt.utils.stringutils.to_unicode(retdata))
|
||||
except ValueError:
|
||||
ret[line] = salt.utils.stringutils.to_unicode(retdata)
|
||||
else:
|
||||
ret[line] = retdata
|
||||
return ret
|
||||
return salt.utils.data.decode(ret)
|
||||
|
||||
|
||||
def metadata():
|
||||
|
@ -629,10 +629,16 @@ class MinionBase(object):
|
||||
break
|
||||
except SaltClientError as exc:
|
||||
last_exc = exc
|
||||
log.info(
|
||||
'Master %s could not be reached, trying next '
|
||||
'next master (if any)', opts['master']
|
||||
)
|
||||
if exc.strerror.startswith('Could not access'):
|
||||
msg = (
|
||||
'Failed to initiate connection with Master '
|
||||
'%s: check ownership/permissions. Error '
|
||||
'message: %s', opts['master'], exc
|
||||
)
|
||||
else:
|
||||
msg = ('Master %s could not be reached, trying next '
|
||||
'next master (if any)', opts['master'])
|
||||
log.info(msg)
|
||||
continue
|
||||
|
||||
if not conn:
|
||||
|
@ -132,7 +132,8 @@ _TERM_FIELDS = {
|
||||
'flattened_addr': None,
|
||||
'flattened_saddr': None,
|
||||
'flattened_daddr': None,
|
||||
'priority': None
|
||||
'priority': None,
|
||||
'ttl': None
|
||||
}
|
||||
|
||||
# IP-type fields
|
||||
|
@ -511,6 +511,9 @@ def _cmp_attrs(path, attrs):
|
||||
Returns a pair (list) where first item are attributes to
|
||||
add and second item are to be removed.
|
||||
|
||||
Please take into account when using this function that some minions will
|
||||
not have lsattr installed.
|
||||
|
||||
path
|
||||
path to file to compare attributes with.
|
||||
|
||||
@ -519,7 +522,11 @@ def _cmp_attrs(path, attrs):
|
||||
'''
|
||||
diff = [None, None]
|
||||
|
||||
lattrs = lsattr(path).get(path, '')
|
||||
try:
|
||||
lattrs = lsattr(path).get(path, '')
|
||||
except AttributeError:
|
||||
# lsattr not installed
|
||||
return None
|
||||
|
||||
old = [chr for chr in lattrs if chr not in attrs]
|
||||
if len(old) > 0:
|
||||
@ -535,6 +542,8 @@ def _cmp_attrs(path, attrs):
|
||||
def lsattr(path):
|
||||
'''
|
||||
.. versionadded:: 2018.3.0
|
||||
.. versionchanged:: 2018.3.1
|
||||
If ``lsattr`` is not installed on the system, ``None`` is returned.
|
||||
|
||||
Obtain the modifiable attributes of the given file. If path
|
||||
is to a directory, an empty list is returned.
|
||||
@ -548,6 +557,9 @@ def lsattr(path):
|
||||
|
||||
salt '*' file.lsattr foo1.txt
|
||||
'''
|
||||
if not salt.utils.path.which('lsattr'):
|
||||
return None
|
||||
|
||||
if not os.path.exists(path):
|
||||
raise SaltInvocationError("File or directory does not exist.")
|
||||
|
||||
@ -4425,7 +4437,6 @@ def check_perms(name, ret, user, group, mode, attrs=None, follow_symlinks=False)
|
||||
``follow_symlinks`` option added
|
||||
'''
|
||||
name = os.path.expanduser(name)
|
||||
lsattr_cmd = salt.utils.path.which('lsattr')
|
||||
|
||||
if not ret:
|
||||
ret = {'name': name,
|
||||
@ -4445,12 +4456,14 @@ def check_perms(name, ret, user, group, mode, attrs=None, follow_symlinks=False)
|
||||
perms['lmode'] = salt.utils.files.normalize_mode(cur['mode'])
|
||||
|
||||
is_dir = os.path.isdir(name)
|
||||
if not salt.utils.platform.is_windows() and not is_dir and lsattr_cmd:
|
||||
# List attributes on file
|
||||
perms['lattrs'] = ''.join(lsattr(name).get('name', ''))
|
||||
# Remove attributes on file so changes can be enforced.
|
||||
if perms['lattrs']:
|
||||
chattr(name, operator='remove', attributes=perms['lattrs'])
|
||||
if not salt.utils.platform.is_windows() and not is_dir:
|
||||
lattrs = lsattr(name)
|
||||
if lattrs is not None:
|
||||
# List attributes on file
|
||||
perms['lattrs'] = ''.join(lattrs.get('name', ''))
|
||||
# Remove attributes on file so changes can be enforced.
|
||||
if perms['lattrs']:
|
||||
chattr(name, operator='remove', attributes=perms['lattrs'])
|
||||
|
||||
# Mode changes if needed
|
||||
if mode is not None:
|
||||
@ -4559,9 +4572,9 @@ def check_perms(name, ret, user, group, mode, attrs=None, follow_symlinks=False)
|
||||
if __opts__['test'] is True and ret['changes']:
|
||||
ret['result'] = None
|
||||
|
||||
if not salt.utils.platform.is_windows() and not is_dir and lsattr_cmd:
|
||||
if not salt.utils.platform.is_windows() and not is_dir:
|
||||
# Replace attributes on file if it had been removed
|
||||
if perms['lattrs']:
|
||||
if perms.get('lattrs', ''):
|
||||
chattr(name, operator='add', attributes=perms['lattrs'])
|
||||
|
||||
# Modify attributes of file if needed
|
||||
@ -4572,22 +4585,23 @@ def check_perms(name, ret, user, group, mode, attrs=None, follow_symlinks=False)
|
||||
pass
|
||||
else:
|
||||
diff_attrs = _cmp_attrs(name, attrs)
|
||||
if diff_attrs[0] is not None or diff_attrs[1] is not None:
|
||||
if __opts__['test'] is True:
|
||||
ret['changes']['attrs'] = attrs
|
||||
else:
|
||||
if diff_attrs[0] is not None:
|
||||
chattr(name, operator="add", attributes=diff_attrs[0])
|
||||
if diff_attrs[1] is not None:
|
||||
chattr(name, operator="remove", attributes=diff_attrs[1])
|
||||
cmp_attrs = _cmp_attrs(name, attrs)
|
||||
if cmp_attrs[0] is not None or cmp_attrs[1] is not None:
|
||||
ret['result'] = False
|
||||
ret['comment'].append(
|
||||
'Failed to change attributes to {0}'.format(attrs)
|
||||
)
|
||||
else:
|
||||
if diff_attrs is not None:
|
||||
if diff_attrs[0] is not None or diff_attrs[1] is not None:
|
||||
if __opts__['test'] is True:
|
||||
ret['changes']['attrs'] = attrs
|
||||
else:
|
||||
if diff_attrs[0] is not None:
|
||||
chattr(name, operator="add", attributes=diff_attrs[0])
|
||||
if diff_attrs[1] is not None:
|
||||
chattr(name, operator="remove", attributes=diff_attrs[1])
|
||||
cmp_attrs = _cmp_attrs(name, attrs)
|
||||
if cmp_attrs[0] is not None or cmp_attrs[1] is not None:
|
||||
ret['result'] = False
|
||||
ret['comment'].append(
|
||||
'Failed to change attributes to {0}'.format(attrs)
|
||||
)
|
||||
else:
|
||||
ret['changes']['attrs'] = attrs
|
||||
|
||||
return ret, perms
|
||||
|
||||
@ -4790,7 +4804,6 @@ def check_file_meta(
|
||||
contents
|
||||
File contents
|
||||
'''
|
||||
lsattr_cmd = salt.utils.path.which('lsattr')
|
||||
changes = {}
|
||||
if not source_sum:
|
||||
source_sum = dict()
|
||||
@ -4862,14 +4875,13 @@ def check_file_meta(
|
||||
if mode is not None and mode != smode:
|
||||
changes['mode'] = mode
|
||||
|
||||
if lsattr_cmd and attrs:
|
||||
if attrs:
|
||||
diff_attrs = _cmp_attrs(name, attrs)
|
||||
if (
|
||||
attrs is not None and
|
||||
diff_attrs[0] is not None or
|
||||
diff_attrs[1] is not None
|
||||
):
|
||||
changes['attrs'] = attrs
|
||||
if diff_attrs is not None:
|
||||
if attrs is not None \
|
||||
and (diff_attrs[0] is not None
|
||||
or diff_attrs[1] is not None):
|
||||
changes['attrs'] = attrs
|
||||
|
||||
return changes
|
||||
|
||||
@ -4972,9 +4984,8 @@ def get_diff(file1,
|
||||
args = []
|
||||
for idx, filename in enumerate(files):
|
||||
try:
|
||||
with salt.utils.files.fopen(filename, 'r') as fp_:
|
||||
args.append([salt.utils.stringutils.to_unicode(x)
|
||||
for x in fp_.readlines()])
|
||||
with salt.utils.files.fopen(filename, 'rb') as fp_:
|
||||
args.append(fp_.readlines())
|
||||
except (IOError, OSError) as exc:
|
||||
raise CommandExecutionError(
|
||||
'Failed to read {0}: {1}'.format(
|
||||
@ -4994,15 +5005,13 @@ def get_diff(file1,
|
||||
ret = bdiff
|
||||
else:
|
||||
if show_filenames:
|
||||
args.extend(
|
||||
[salt.utils.stringutils.to_unicode(x) for x in files]
|
||||
args.extend(files)
|
||||
ret = ''.join(
|
||||
difflib.unified_diff(
|
||||
*salt.utils.data.decode(args)
|
||||
)
|
||||
ret = salt.utils.locales.sdecode(
|
||||
''.join(difflib.unified_diff(*args)) # pylint: disable=no-value-for-parameter
|
||||
)
|
||||
return ret
|
||||
|
||||
return ''
|
||||
return ret
|
||||
|
||||
|
||||
def manage_file(name,
|
||||
@ -5233,12 +5242,12 @@ def manage_file(name,
|
||||
if salt.utils.platform.is_windows():
|
||||
contents = os.linesep.join(
|
||||
_splitlines_preserving_trailing_newline(contents))
|
||||
with salt.utils.files.fopen(tmp, 'w') as tmp_:
|
||||
with salt.utils.files.fopen(tmp, 'wb') as tmp_:
|
||||
if encoding:
|
||||
log.debug('File will be encoded with {0}'.format(encoding))
|
||||
log.debug('File will be encoded with %s', encoding)
|
||||
tmp_.write(contents.encode(encoding=encoding, errors=encoding_errors))
|
||||
else:
|
||||
tmp_.write(salt.utils.stringutils.to_str(contents))
|
||||
tmp_.write(salt.utils.stringutils.to_bytes(contents))
|
||||
|
||||
try:
|
||||
differences = get_diff(
|
||||
@ -5441,12 +5450,12 @@ def manage_file(name,
|
||||
if salt.utils.platform.is_windows():
|
||||
contents = os.linesep.join(
|
||||
_splitlines_preserving_trailing_newline(contents))
|
||||
with salt.utils.files.fopen(tmp, 'w') as tmp_:
|
||||
with salt.utils.files.fopen(tmp, 'wb') as tmp_:
|
||||
if encoding:
|
||||
log.debug('File will be encoded with {0}'.format(encoding))
|
||||
log.debug('File will be encoded with %s', encoding)
|
||||
tmp_.write(contents.encode(encoding=encoding, errors=encoding_errors))
|
||||
else:
|
||||
tmp_.write(salt.utils.stringutils.to_str(contents))
|
||||
tmp_.write(salt.utils.stringutils.to_bytes(contents))
|
||||
|
||||
# Copy into place
|
||||
salt.utils.files.copyfile(tmp,
|
||||
|
@ -4632,7 +4632,7 @@ def status(cwd,
|
||||
password=password,
|
||||
ignore_retcode=ignore_retcode,
|
||||
output_encoding=output_encoding)['stdout']
|
||||
for line in output.split('\0'):
|
||||
for line in output.split(str('\0')):
|
||||
try:
|
||||
state, filename = line.split(None, 1)
|
||||
except ValueError:
|
||||
|
@ -86,7 +86,7 @@ def _config(name, key=None, **kwargs):
|
||||
value = kwargs[name]
|
||||
else:
|
||||
value = __salt__['config.option']('ldap.{0}'.format(key))
|
||||
return value
|
||||
return salt.utils.data.decode(value, to_str=True)
|
||||
|
||||
|
||||
def _connect(**kwargs):
|
||||
@ -141,7 +141,7 @@ def search(filter, # pylint: disable=C0103
|
||||
if attrs == '': # Allow command line 'return all' attr override
|
||||
attrs = None
|
||||
elif attrs is None:
|
||||
attrs = salt.utils.data.decode(_config('attrs'), to_str=True)
|
||||
attrs = _config('attrs')
|
||||
_ldap = _connect(**kwargs)
|
||||
start = time.time()
|
||||
log.debug(
|
||||
|
@ -1140,7 +1140,7 @@ def convert_cidr(cidr):
|
||||
ret = {'network': None,
|
||||
'netmask': None}
|
||||
cidr = calc_net(cidr)
|
||||
network_info = salt.ext.ipaddress.ip_network(cidr)
|
||||
network_info = ipaddress.ip_network(cidr)
|
||||
ret['network'] = six.text_type(network_info.network_address)
|
||||
ret['netmask'] = six.text_type(network_info.netmask)
|
||||
return ret
|
||||
|
@ -397,7 +397,8 @@ def install(pkgs=None, # pylint: disable=R0912,R0913,R0914
|
||||
use_vt=False,
|
||||
trusted_host=None,
|
||||
no_cache_dir=False,
|
||||
cache_dir=None):
|
||||
cache_dir=None,
|
||||
no_binary=None):
|
||||
'''
|
||||
Install packages with pip
|
||||
|
||||
@ -423,7 +424,12 @@ def install(pkgs=None, # pylint: disable=R0912,R0913,R0914
|
||||
Prefer wheel archives (requires pip>=1.4)
|
||||
|
||||
no_use_wheel
|
||||
Force to not use wheel archives (requires pip>=1.4)
|
||||
Force to not use wheel archives (requires pip>=1.4,<10.0.0)
|
||||
|
||||
no_binary
|
||||
Force to not use binary packages (requires pip >= 7.0.0)
|
||||
Accepts either :all: to disable all binary packages, :none: to empty the set,
|
||||
or one or more package names with commas between them
|
||||
|
||||
log
|
||||
Log file where a complete (maximum verbosity) record will be kept
|
||||
@ -595,29 +601,48 @@ def install(pkgs=None, # pylint: disable=R0912,R0913,R0914
|
||||
|
||||
if use_wheel:
|
||||
min_version = '1.4'
|
||||
max_version = '9.0.3'
|
||||
cur_version = __salt__['pip.version'](bin_env)
|
||||
if not salt.utils.versions.compare(ver1=cur_version, oper='>=',
|
||||
ver2=min_version):
|
||||
too_low = salt.utils.versions.compare(ver1=cur_version, oper='<', ver2=min_version)
|
||||
too_high = salt.utils.versions.compare(ver1=cur_version, oper='>', ver2=max_version)
|
||||
if too_low or too_high:
|
||||
logger.error(
|
||||
'The --use-wheel option is only supported in pip %s and '
|
||||
'newer. The version of pip detected is %s. This option '
|
||||
'will be ignored.', min_version, cur_version
|
||||
'The --use-wheel option is only supported in pip between %s and '
|
||||
'%s. The version of pip detected is %s. This option '
|
||||
'will be ignored.', min_version, max_version, cur_version
|
||||
)
|
||||
else:
|
||||
cmd.append('--use-wheel')
|
||||
|
||||
if no_use_wheel:
|
||||
min_version = '1.4'
|
||||
max_version = '9.0.3'
|
||||
cur_version = __salt__['pip.version'](bin_env)
|
||||
if not salt.utils.versions.compare(ver1=cur_version, oper='>=',
|
||||
ver2=min_version):
|
||||
too_low = salt.utils.versions.compare(ver1=cur_version, oper='<', ver2=min_version)
|
||||
too_high = salt.utils.versions.compare(ver1=cur_version, oper='>', ver2=max_version)
|
||||
if too_low or too_high:
|
||||
logger.error(
|
||||
'The --no-use-wheel option is only supported in pip %s and '
|
||||
'The --no-use-wheel option is only supported in pip between %s and '
|
||||
'%s. The version of pip detected is %s. This option '
|
||||
'will be ignored.', min_version, max_version, cur_version
|
||||
)
|
||||
else:
|
||||
cmd.append('--no-use-wheel')
|
||||
|
||||
if no_binary:
|
||||
min_version = '7.0.0'
|
||||
cur_version = __salt__['pip.version'](bin_env)
|
||||
too_low = salt.utils.versions.compare(ver1=cur_version, oper='<', ver2=min_version)
|
||||
if too_low:
|
||||
logger.error(
|
||||
'The --no-binary option is only supported in pip %s and '
|
||||
'newer. The version of pip detected is %s. This option '
|
||||
'will be ignored.', min_version, cur_version
|
||||
)
|
||||
else:
|
||||
cmd.append('--no-use-wheel')
|
||||
if isinstance(no_binary, list):
|
||||
no_binary = ','.join(no_binary)
|
||||
cmd.extend(['--no-binary', no_binary])
|
||||
|
||||
if log:
|
||||
if os.path.isdir(log):
|
||||
@ -783,6 +808,11 @@ def install(pkgs=None, # pylint: disable=R0912,R0913,R0914
|
||||
# Put the commas back in while making sure the names are contained in
|
||||
# quotes, this allows for proper version spec passing salt>=0.17.0
|
||||
cmd.extend([p.replace(';', ',') for p in pkgs])
|
||||
elif not any([requirements, editable]):
|
||||
# Starting with pip 10.0.0, if no packages are specified in the
|
||||
# command, it returns a retcode 1. So instead of running the command,
|
||||
# just return the output without running pip.
|
||||
return {'retcode': 0, 'stdout': 'No packages to install.'}
|
||||
|
||||
if editable:
|
||||
egg_match = re.compile(r'(?:#|#.*?&)egg=([^&]*)')
|
||||
|
@ -422,7 +422,7 @@ def _check_queue(queue, kwargs):
|
||||
|
||||
|
||||
def _get_initial_pillar(opts):
|
||||
return __pillar__ if __opts__['__cli'] == 'salt-call' \
|
||||
return __pillar__ if __opts__.get('__cli', None) == 'salt-call' \
|
||||
and opts['pillarenv'] == __opts__['pillarenv'] \
|
||||
else None
|
||||
|
||||
|
@ -22,6 +22,7 @@ import salt.utils.json
|
||||
import salt.utils.platform
|
||||
import salt.utils.powershell
|
||||
import salt.utils.versions
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@ -57,11 +58,29 @@ def _pshell_json(cmd, cwd=None):
|
||||
if 'convertto-json' not in cmd.lower():
|
||||
cmd = '{0} | ConvertTo-Json'.format(cmd)
|
||||
log.debug('PowerShell: %s', cmd)
|
||||
ret = __salt__['cmd.shell'](cmd, shell='powershell', cwd=cwd)
|
||||
ret = __salt__['cmd.run_all'](cmd, shell='powershell', cwd=cwd)
|
||||
|
||||
if 'pid' in ret:
|
||||
del ret['pid']
|
||||
|
||||
if ret.get('stderr', ''):
|
||||
error = ret['stderr'].splitlines()[0]
|
||||
raise CommandExecutionError(error, info=ret)
|
||||
|
||||
if 'retcode' not in ret or ret['retcode'] != 0:
|
||||
# run_all logs an error to log.error, fail hard back to the user
|
||||
raise CommandExecutionError(
|
||||
'Issue executing PowerShell {0}'.format(cmd), info=ret)
|
||||
|
||||
# Sometimes Powershell returns an empty string, which isn't valid JSON
|
||||
if ret['stdout'] == '':
|
||||
ret['stdout'] = '{}'
|
||||
|
||||
try:
|
||||
ret = salt.utils.json.loads(ret, strict=False)
|
||||
ret = salt.utils.json.loads(ret['stdout'], strict=False)
|
||||
except ValueError:
|
||||
log.debug('Json not returned')
|
||||
raise CommandExecutionError(
|
||||
'No JSON results from PowerShell', info=ret)
|
||||
return ret
|
||||
|
||||
|
||||
@ -341,7 +360,12 @@ def remove(feature, remove_payload=False, restart=False):
|
||||
.format(command, _cmd_quote(feature), management_tools,
|
||||
_remove_payload,
|
||||
'-Restart' if restart else '')
|
||||
out = _pshell_json(cmd)
|
||||
try:
|
||||
out = _pshell_json(cmd)
|
||||
except CommandExecutionError as exc:
|
||||
if 'ArgumentNotValid' in exc.message:
|
||||
raise CommandExecutionError('Invalid Feature Name', info=exc.info)
|
||||
raise
|
||||
|
||||
# Results are stored in a list of dictionaries in `FeatureResult`
|
||||
if out['FeatureResult']:
|
||||
|
@ -1355,7 +1355,7 @@ def install(name=None,
|
||||
|
||||
try:
|
||||
pkg_params, pkg_type = __salt__['pkg_resource.parse_targets'](
|
||||
name, pkgs, sources, saltenv=saltenv, normalize=normalize
|
||||
name, pkgs, sources, saltenv=saltenv, normalize=normalize, **kwargs
|
||||
)
|
||||
except MinionError as exc:
|
||||
raise CommandExecutionError(exc)
|
||||
@ -1653,7 +1653,7 @@ def install(name=None,
|
||||
if _yum() == 'dnf':
|
||||
cmd.extend(['--best', '--allowerasing'])
|
||||
_add_common_args(cmd)
|
||||
cmd.append('install' if pkg_type is not 'advisory' else 'update')
|
||||
cmd.append('install' if pkg_type != 'advisory' else 'update')
|
||||
cmd.extend(targets)
|
||||
out = __salt__['cmd.run_all'](
|
||||
cmd,
|
||||
|
@ -301,7 +301,7 @@ class EventListener(object):
|
||||
|
||||
self.event.set_event_handler(self._handle_event_socket_recv)
|
||||
|
||||
def clean_timeout_futures(self, request):
|
||||
def clean_by_request(self, request):
|
||||
'''
|
||||
Remove all futures that were waiting for request `request` since it is done waiting
|
||||
'''
|
||||
@ -493,7 +493,7 @@ class BaseSaltAPIHandler(tornado.web.RequestHandler): # pylint: disable=W0223
|
||||
timeout a session
|
||||
'''
|
||||
# TODO: set a header or something??? so we know it was a timeout
|
||||
self.application.event_listener.clean_timeout_futures(self)
|
||||
self.application.event_listener.clean_by_request(self)
|
||||
|
||||
def on_finish(self):
|
||||
'''
|
||||
@ -969,6 +969,17 @@ class SaltAPIHandler(BaseSaltAPIHandler): # pylint: disable=W0223
|
||||
yield job_not_running_future
|
||||
raise tornado.gen.Return((yield all_return_future))
|
||||
|
||||
def subscribe_minion_returns(self, jid, minions):
|
||||
# Subscribe each minion event
|
||||
future_minion_map = {}
|
||||
for minion in minions:
|
||||
tag = tagify([jid, 'ret', minion], 'job')
|
||||
minion_future = self.application.event_listener.get_event(self,
|
||||
tag=tag,
|
||||
matcher=EventListener.exact_matcher)
|
||||
future_minion_map[minion_future] = minion
|
||||
return future_minion_map
|
||||
|
||||
@tornado.gen.coroutine
|
||||
def all_returns(self,
|
||||
jid,
|
||||
@ -1049,8 +1060,7 @@ class SaltAPIHandler(BaseSaltAPIHandler): # pylint: disable=W0223
|
||||
try:
|
||||
event = self.application.event_listener.get_event(self,
|
||||
tag=ping_tag,
|
||||
timeout=self.application.opts['gather_job_timeout'],
|
||||
)
|
||||
timeout=self.application.opts['gather_job_timeout'])
|
||||
f = yield Any([event, is_finished])
|
||||
# When finished entire routine, cleanup other futures and return result
|
||||
if f is is_finished:
|
||||
|
@ -231,6 +231,7 @@ import os
|
||||
import logging
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.data
|
||||
from salt.exceptions import SaltInvocationError
|
||||
|
||||
# Import third party libs
|
||||
@ -271,7 +272,7 @@ def _config(name, conf):
|
||||
Return a value for 'name' from the config file options.
|
||||
'''
|
||||
try:
|
||||
value = conf[name]
|
||||
value = salt.utils.data.decode(conf[name], to_str=True)
|
||||
except KeyError:
|
||||
value = None
|
||||
return value
|
||||
|
@ -2292,9 +2292,9 @@ def managed(name,
|
||||
.format(contents_id)
|
||||
)
|
||||
|
||||
contents_are_binary = \
|
||||
isinstance(use_contents, six.string_types) and '\0' in use_contents
|
||||
if contents_are_binary:
|
||||
if isinstance(use_contents, bytes) and b'\0' in use_contents:
|
||||
contents = use_contents
|
||||
elif isinstance(use_contents, six.string_types) and str('\0') in use_contents:
|
||||
contents = use_contents
|
||||
else:
|
||||
validated_contents = _validate_str_list(use_contents)
|
||||
@ -6287,7 +6287,7 @@ def serialize(name,
|
||||
'result': False}
|
||||
|
||||
with salt.utils.files.fopen(name, 'r') as fhr:
|
||||
existing_data = __serializers__[deserializer_name](fhr)
|
||||
existing_data = __serializers__[deserializer_name](fhr, **options.get(serializer_name, {}))
|
||||
|
||||
if existing_data is not None:
|
||||
merged_data = salt.utils.dictupdate.merge_recurse(existing_data, dataset)
|
||||
|
@ -325,7 +325,8 @@ def installed(name,
|
||||
use_vt=False,
|
||||
trusted_host=None,
|
||||
no_cache_dir=False,
|
||||
cache_dir=None):
|
||||
cache_dir=None,
|
||||
no_binary=None):
|
||||
'''
|
||||
Make sure the package is installed
|
||||
|
||||
@ -360,6 +361,25 @@ def installed(name,
|
||||
no_use_wheel : False
|
||||
Force to not use wheel archives (requires pip>=1.4)
|
||||
|
||||
no_binary
|
||||
Force to not use binary packages (requires pip >= 7.0.0)
|
||||
Accepts either :all: to disable all binary packages, :none: to empty the set,
|
||||
or a list of one or more packages
|
||||
|
||||
Example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
django:
|
||||
pip.installed:
|
||||
- no_binary: ':all:'
|
||||
|
||||
flask:
|
||||
pip.installed:
|
||||
- no_binary:
|
||||
- itsdangerous
|
||||
- click
|
||||
|
||||
log
|
||||
Log file where a complete (maximum verbosity) record will be kept
|
||||
|
||||
@ -602,23 +622,39 @@ def installed(name,
|
||||
# Check that the pip binary supports the 'use_wheel' option
|
||||
if use_wheel:
|
||||
min_version = '1.4'
|
||||
max_version = '9.0.3'
|
||||
cur_version = __salt__['pip.version'](bin_env)
|
||||
if not salt.utils.versions.compare(ver1=cur_version, oper='>=',
|
||||
ver2=min_version):
|
||||
too_low = salt.utils.versions.compare(ver1=cur_version, oper='<', ver2=min_version)
|
||||
too_high = salt.utils.versions.compare(ver1=cur_version, oper='>', ver2=max_version)
|
||||
if too_low or too_high:
|
||||
ret['result'] = False
|
||||
ret['comment'] = ('The \'use_wheel\' option is only supported in '
|
||||
'pip {0} and newer. The version of pip detected '
|
||||
'was {1}.').format(min_version, cur_version)
|
||||
'pip between {0} and {1}. The version of pip detected '
|
||||
'was {2}.').format(min_version, max_version, cur_version)
|
||||
return ret
|
||||
|
||||
# Check that the pip binary supports the 'no_use_wheel' option
|
||||
if no_use_wheel:
|
||||
min_version = '1.4'
|
||||
max_version = '9.0.3'
|
||||
cur_version = __salt__['pip.version'](bin_env)
|
||||
if not salt.utils.versions.compare(ver1=cur_version, oper='>=',
|
||||
ver2=min_version):
|
||||
too_low = salt.utils.versions.compare(ver1=cur_version, oper='<', ver2=min_version)
|
||||
too_high = salt.utils.versions.compare(ver1=cur_version, oper='>', ver2=max_version)
|
||||
if too_low or too_high:
|
||||
ret['result'] = False
|
||||
ret['comment'] = ('The \'no_use_wheel\' option is only supported in '
|
||||
'pip between {0} and {1}. The version of pip detected '
|
||||
'was {2}.').format(min_version, max_version, cur_version)
|
||||
return ret
|
||||
|
||||
# Check that the pip binary supports the 'no_binary' option
|
||||
if no_binary:
|
||||
min_version = '7.0.0'
|
||||
cur_version = __salt__['pip.version'](bin_env)
|
||||
too_low = salt.utils.versions.compare(ver1=cur_version, oper='<', ver2=min_version)
|
||||
if too_low:
|
||||
ret['result'] = False
|
||||
ret['comment'] = ('The \'no_binary\' option is only supported in '
|
||||
'pip {0} and newer. The version of pip detected '
|
||||
'was {1}.').format(min_version, cur_version)
|
||||
return ret
|
||||
@ -735,6 +771,7 @@ def installed(name,
|
||||
bin_env=bin_env,
|
||||
use_wheel=use_wheel,
|
||||
no_use_wheel=no_use_wheel,
|
||||
no_binary=no_binary,
|
||||
log=log,
|
||||
proxy=proxy,
|
||||
timeout=timeout,
|
||||
|
@ -192,11 +192,8 @@ def present(name,
|
||||
salt.utils.stringutils.to_unicode(name, 'utf-8'))
|
||||
return ret
|
||||
|
||||
try:
|
||||
vdata_decoded = salt.utils.stringutils.to_unicode(vdata, 'utf-8')
|
||||
except UnicodeDecodeError:
|
||||
# vdata contains binary data that can't be decoded
|
||||
vdata_decoded = vdata
|
||||
vdata_decoded = __utils__['reg.cast_vdata'](vdata=vdata, vtype=vtype)
|
||||
|
||||
add_change = {'Key': r'{0}\{1}'.format(hive, key),
|
||||
'Entry': '{0}'.format(salt.utils.stringutils.to_unicode(vname, 'utf-8') if vname else '(Default)'),
|
||||
'Value': vdata_decoded}
|
||||
|
@ -60,7 +60,8 @@ def managed(name,
|
||||
pip_pkgs=None,
|
||||
pip_no_cache_dir=False,
|
||||
pip_cache_dir=None,
|
||||
process_dependency_links=False):
|
||||
process_dependency_links=False,
|
||||
no_binary=None):
|
||||
'''
|
||||
Create a virtualenv and optionally manage it with pip
|
||||
|
||||
@ -110,6 +111,11 @@ def managed(name,
|
||||
no_use_wheel: False
|
||||
Force to not use wheel archives (requires pip>=1.4)
|
||||
|
||||
no_binary
|
||||
Force to not use binary packages (requires pip >= 7.0.0)
|
||||
Accepts either :all: to disable all binary packages, :none: to empty the set,
|
||||
or a list of one or more packages
|
||||
|
||||
pip_upgrade: False
|
||||
Pass `--upgrade` to `pip install`.
|
||||
|
||||
@ -185,7 +191,6 @@ def managed(name,
|
||||
ret['comment'] = 'Virtualenv {0} is set to be cleared'.format(name)
|
||||
return ret
|
||||
if venv_exists and not clear:
|
||||
#ret['result'] = None
|
||||
ret['comment'] = 'Virtualenv {0} is already created'.format(name)
|
||||
return ret
|
||||
ret['result'] = None
|
||||
@ -229,27 +234,44 @@ def managed(name,
|
||||
elif venv_exists:
|
||||
ret['comment'] = 'virtualenv exists'
|
||||
|
||||
# Check that the pip binary supports the 'use_wheel' option
|
||||
if use_wheel:
|
||||
min_version = '1.4'
|
||||
max_version = '9.0.3'
|
||||
cur_version = __salt__['pip.version'](bin_env=name)
|
||||
if not salt.utils.versions.compare(ver1=cur_version, oper='>=',
|
||||
ver2=min_version):
|
||||
too_low = salt.utils.versions.compare(ver1=cur_version, oper='<', ver2=min_version)
|
||||
too_high = salt.utils.versions.compare(ver1=cur_version, oper='>', ver2=max_version)
|
||||
if too_low or too_high:
|
||||
ret['result'] = False
|
||||
ret['comment'] = ('The \'use_wheel\' option is only supported in '
|
||||
'pip {0} and newer. The version of pip detected '
|
||||
'was {1}.').format(min_version, cur_version)
|
||||
'pip between {0} and {1}. The version of pip detected '
|
||||
'was {2}.').format(min_version, max_version, cur_version)
|
||||
return ret
|
||||
|
||||
# Check that the pip binary supports the 'no_use_wheel' option
|
||||
if no_use_wheel:
|
||||
min_version = '1.4'
|
||||
max_version = '9.0.3'
|
||||
cur_version = __salt__['pip.version'](bin_env=name)
|
||||
if not salt.utils.versions.compare(ver1=cur_version, oper='>=',
|
||||
ver2=min_version):
|
||||
too_low = salt.utils.versions.compare(ver1=cur_version, oper='<', ver2=min_version)
|
||||
too_high = salt.utils.versions.compare(ver1=cur_version, oper='>', ver2=max_version)
|
||||
if too_low or too_high:
|
||||
ret['result'] = False
|
||||
ret['comment'] = ('The \'no_use_wheel\' option is only supported '
|
||||
'in pip {0} and newer. The version of pip '
|
||||
'detected was {1}.').format(min_version,
|
||||
cur_version)
|
||||
ret['comment'] = ('The \'no_use_wheel\' option is only supported in '
|
||||
'pip between {0} and {1}. The version of pip detected '
|
||||
'was {2}.').format(min_version, max_version, cur_version)
|
||||
return ret
|
||||
|
||||
# Check that the pip binary supports the 'no_binary' option
|
||||
if no_binary:
|
||||
min_version = '7.0.0'
|
||||
cur_version = __salt__['pip.version'](bin_env=name)
|
||||
too_low = salt.utils.versions.compare(ver1=cur_version, oper='<', ver2=min_version)
|
||||
if too_low:
|
||||
ret['result'] = False
|
||||
ret['comment'] = ('The \'no_binary\' option is only supported in '
|
||||
'pip {0} and newer. The version of pip detected '
|
||||
'was {1}.').format(min_version, cur_version)
|
||||
return ret
|
||||
|
||||
# Populate the venv via a requirements file
|
||||
@ -282,6 +304,7 @@ def managed(name,
|
||||
bin_env=name,
|
||||
use_wheel=use_wheel,
|
||||
no_use_wheel=no_use_wheel,
|
||||
no_binary=no_binary,
|
||||
user=user,
|
||||
cwd=cwd,
|
||||
index_url=index_url,
|
||||
|
@ -111,7 +111,7 @@ def installed(name,
|
||||
- XPS-Viewer
|
||||
- SNMP-Service
|
||||
- exclude:
|
||||
- Web-Service
|
||||
- Web-Server
|
||||
'''
|
||||
if 'force' in kwargs:
|
||||
salt.utils.versions.warn_until(
|
||||
|
@ -454,8 +454,9 @@ def _dataset_present(dataset_type, name, volume_size=None, sparse=False, create_
|
||||
## NOTE: fetch current volume properties
|
||||
properties_current = __salt__['zfs.get'](
|
||||
name,
|
||||
type=dataset_type,
|
||||
fields='value',
|
||||
depth=1,
|
||||
depth=0,
|
||||
parsable=True,
|
||||
).get(name, OrderedDict())
|
||||
|
||||
|
@ -45,6 +45,8 @@ class ThorState(salt.state.HighState):
|
||||
self.pillar = pillar
|
||||
self.pillar_keys = pillar_keys
|
||||
opts['file_roots'] = opts['thorium_roots']
|
||||
opts['saltenv'] = opts['thoriumenv']
|
||||
opts['state_top'] = opts['thorium_top']
|
||||
opts['file_client'] = 'local'
|
||||
self.opts = opts
|
||||
if opts.get('minion_data_cache'):
|
||||
|
@ -641,7 +641,7 @@ class TCPReqServerChannel(salt.transport.mixins.auth.AESReqServerMixin, salt.tra
|
||||
|
||||
try:
|
||||
id_ = payload['load'].get('id', '')
|
||||
if '\0' in id_:
|
||||
if str('\0') in id_:
|
||||
log.error('Payload contains an id with a null byte: %s', payload)
|
||||
stream.send(self.serial.dumps('bad load: id contains a null byte'))
|
||||
raise tornado.gen.Return()
|
||||
|
@ -662,7 +662,7 @@ class ZeroMQReqServerChannel(salt.transport.mixins.auth.AESReqServerMixin,
|
||||
|
||||
try:
|
||||
id_ = payload['load'].get('id', '')
|
||||
if '\0' in id_:
|
||||
if str('\0') in id_:
|
||||
log.error('Payload contains an id with a null byte: %s', payload)
|
||||
stream.send(self.serial.dumps('bad load: id contains a null byte'))
|
||||
raise tornado.gen.Return()
|
||||
|
@ -31,6 +31,7 @@ import sys
|
||||
import salt.config
|
||||
import salt.ext.six as six
|
||||
import salt.loader
|
||||
import salt.utils.stringutils
|
||||
import salt.version
|
||||
from salt.exceptions import (
|
||||
SaltInvocationError, SaltSystemExit
|
||||
@ -122,7 +123,7 @@ def _determine_auth(**kwargs):
|
||||
'A subscription_id must be specified'
|
||||
)
|
||||
|
||||
subscription_id = kwargs['subscription_id']
|
||||
subscription_id = salt.utils.stringutils.to_str(kwargs['subscription_id'])
|
||||
|
||||
return credentials, subscription_id, cloud_env
|
||||
|
||||
@ -163,7 +164,6 @@ def get_client(client_type, **kwargs):
|
||||
)
|
||||
|
||||
credentials, subscription_id, cloud_env = _determine_auth(**kwargs)
|
||||
|
||||
if client_type == 'subscription':
|
||||
client = Client(
|
||||
credentials=credentials,
|
||||
|
@ -217,7 +217,8 @@ def ip_to_host(ip):
|
||||
'''
|
||||
try:
|
||||
hostname, aliaslist, ipaddrlist = socket.gethostbyaddr(ip)
|
||||
except Exception:
|
||||
except Exception as exc:
|
||||
log.debug('salt.utils.network.ip_to_host(%r) failed: %s', ip, exc)
|
||||
hostname = None
|
||||
return hostname
|
||||
|
||||
|
@ -406,5 +406,5 @@ def os_walk(top, *args, **kwargs):
|
||||
This is a helper than ensures that all paths returned from os.walk are
|
||||
unicode.
|
||||
'''
|
||||
for item in os.walk(top, *args, **kwargs):
|
||||
for item in os.walk(salt.utils.stringutils.to_str(top), *args, **kwargs):
|
||||
yield salt.utils.data.decode(item, preserve_tuples=True)
|
||||
|
@ -190,19 +190,27 @@ def is_binary(data):
|
||||
'''
|
||||
Detects if the passed string of data is binary or text
|
||||
'''
|
||||
if not data or not isinstance(data, six.string_types):
|
||||
if not data or not isinstance(data, (six.string_types, six.binary_type)):
|
||||
return False
|
||||
if '\0' in data:
|
||||
|
||||
if isinstance(data, six.binary_type):
|
||||
if b'\0' in data:
|
||||
return True
|
||||
elif str('\0') in data:
|
||||
return True
|
||||
|
||||
text_characters = ''.join([chr(x) for x in range(32, 127)] + list('\n\r\t\b'))
|
||||
# Get the non-text characters (map each character to itself then use the
|
||||
# 'remove' option to get rid of the text characters.)
|
||||
if six.PY3:
|
||||
trans = ''.maketrans('', '', text_characters)
|
||||
nontext = data.translate(trans)
|
||||
if isinstance(data, six.binary_type):
|
||||
import salt.utils.data
|
||||
nontext = data.translate(None, salt.utils.data.encode(text_characters))
|
||||
else:
|
||||
trans = ''.maketrans('', '', text_characters)
|
||||
nontext = data.translate(trans)
|
||||
else:
|
||||
if isinstance(data, unicode): # pylint: disable=incompatible-py3-code
|
||||
if isinstance(data, six.text_type):
|
||||
trans_args = ({ord(x): None for x in text_characters},)
|
||||
else:
|
||||
trans_args = (None, str(text_characters)) # future lint: blacklisted-function
|
||||
|
@ -497,7 +497,7 @@ def valid_id(opts, id_):
|
||||
Returns if the passed id is valid
|
||||
'''
|
||||
try:
|
||||
if any(x in id_ for x in ('/', '\\', '\0')):
|
||||
if any(x in id_ for x in ('/', '\\', str('\0'))):
|
||||
return False
|
||||
return bool(clean_path(opts['pki_dir'], id_))
|
||||
except (AttributeError, KeyError, TypeError):
|
||||
|
@ -45,6 +45,7 @@ except ImportError:
|
||||
import salt.utils.platform
|
||||
import salt.utils.stringutils
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from salt.ext import six
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
log = logging.getLogger(__name__)
|
||||
@ -169,7 +170,10 @@ def key_exists(hive, key, use_32bit_registry=False):
|
||||
local_key = _to_unicode(key)
|
||||
|
||||
registry = Registry()
|
||||
hkey = registry.hkeys[local_hive]
|
||||
try:
|
||||
hkey = registry.hkeys[local_hive]
|
||||
except KeyError:
|
||||
raise CommandExecutionError('Invalid Hive: {0}'.format(local_hive))
|
||||
access_mask = registry.registry_32[use_32bit_registry]
|
||||
|
||||
try:
|
||||
@ -231,7 +235,10 @@ def list_keys(hive, key=None, use_32bit_registry=False):
|
||||
local_key = _to_unicode(key)
|
||||
|
||||
registry = Registry()
|
||||
hkey = registry.hkeys[local_hive]
|
||||
try:
|
||||
hkey = registry.hkeys[local_hive]
|
||||
except KeyError:
|
||||
raise CommandExecutionError('Invalid Hive: {0}'.format(local_hive))
|
||||
access_mask = registry.registry_32[use_32bit_registry]
|
||||
|
||||
subkeys = []
|
||||
@ -287,7 +294,10 @@ def list_values(hive, key=None, use_32bit_registry=False, include_default=True):
|
||||
local_key = _to_unicode(key)
|
||||
|
||||
registry = Registry()
|
||||
hkey = registry.hkeys[local_hive]
|
||||
try:
|
||||
hkey = registry.hkeys[local_hive]
|
||||
except KeyError:
|
||||
raise CommandExecutionError('Invalid Hive: {0}'.format(local_hive))
|
||||
access_mask = registry.registry_32[use_32bit_registry]
|
||||
handle = None
|
||||
values = list()
|
||||
@ -378,7 +388,10 @@ def read_value(hive, key, vname=None, use_32bit_registry=False):
|
||||
ret['vname'] = '(Default)'
|
||||
|
||||
registry = Registry()
|
||||
hkey = registry.hkeys[local_hive]
|
||||
try:
|
||||
hkey = registry.hkeys[local_hive]
|
||||
except KeyError:
|
||||
raise CommandExecutionError('Invalid Hive: {0}'.format(local_hive))
|
||||
access_mask = registry.registry_32[use_32bit_registry]
|
||||
|
||||
try:
|
||||
@ -524,26 +537,14 @@ def set_value(hive,
|
||||
local_vtype = _to_unicode(vtype)
|
||||
|
||||
registry = Registry()
|
||||
hkey = registry.hkeys[local_hive]
|
||||
try:
|
||||
hkey = registry.hkeys[local_hive]
|
||||
except KeyError:
|
||||
raise CommandExecutionError('Invalid Hive: {0}'.format(local_hive))
|
||||
vtype_value = registry.vtype[local_vtype]
|
||||
access_mask = registry.registry_32[use_32bit_registry] | win32con.KEY_ALL_ACCESS
|
||||
|
||||
# Check data type and cast to expected type
|
||||
# int will automatically become long on 64bit numbers
|
||||
# https://www.python.org/dev/peps/pep-0237/
|
||||
|
||||
# String Types to Unicode
|
||||
if vtype_value in [win32con.REG_SZ, win32con.REG_EXPAND_SZ]:
|
||||
local_vdata = _to_unicode(vdata)
|
||||
# Don't touch binary...
|
||||
elif vtype_value == win32con.REG_BINARY:
|
||||
local_vdata = vdata
|
||||
# Make sure REG_MULTI_SZ is a list of strings
|
||||
elif vtype_value == win32con.REG_MULTI_SZ:
|
||||
local_vdata = [_to_unicode(i) for i in vdata]
|
||||
# Everything else is int
|
||||
else:
|
||||
local_vdata = int(vdata)
|
||||
local_vdata = cast_vdata(vdata=vdata, vtype=local_vtype)
|
||||
|
||||
if volatile:
|
||||
create_options = registry.opttype['REG_OPTION_VOLATILE']
|
||||
@ -563,6 +564,52 @@ def set_value(hive,
|
||||
return False
|
||||
|
||||
|
||||
def cast_vdata(vdata=None, vtype='REG_SZ'):
|
||||
'''
|
||||
Cast the ``vdata` value to the appropriate data type for the registry type
|
||||
specified in ``vtype``
|
||||
|
||||
Args:
|
||||
|
||||
vdata (str, list, bin): The data to cast
|
||||
|
||||
vtype (str):
|
||||
The type of data to be written to the registry. Must be one of the
|
||||
following:
|
||||
- REG_BINARY
|
||||
- REG_DWORD
|
||||
- REG_EXPAND_SZ
|
||||
- REG_MULTI_SZ
|
||||
- REG_SZ
|
||||
|
||||
Returns:
|
||||
The vdata cast to the appropriate type. Will be unicode string, binary,
|
||||
list of unicode strings, or int
|
||||
'''
|
||||
# Check data type and cast to expected type
|
||||
# int will automatically become long on 64bit numbers
|
||||
# https://www.python.org/dev/peps/pep-0237/
|
||||
|
||||
registry = Registry()
|
||||
vtype_value = registry.vtype[vtype]
|
||||
|
||||
# String Types to Unicode
|
||||
if vtype_value in [win32con.REG_SZ, win32con.REG_EXPAND_SZ]:
|
||||
return _to_unicode(vdata)
|
||||
# Don't touch binary... if it's binary
|
||||
elif vtype_value == win32con.REG_BINARY:
|
||||
if isinstance(vdata, six.text_type):
|
||||
# Unicode data must be encoded
|
||||
return vdata.encode('utf-8')
|
||||
return vdata
|
||||
# Make sure REG_MULTI_SZ is a list of strings
|
||||
elif vtype_value == win32con.REG_MULTI_SZ:
|
||||
return [_to_unicode(i) for i in vdata]
|
||||
# Everything else is int
|
||||
else:
|
||||
return int(vdata)
|
||||
|
||||
|
||||
def delete_key_recursive(hive, key, use_32bit_registry=False):
|
||||
'''
|
||||
.. versionadded:: 2015.5.4
|
||||
@ -601,7 +648,10 @@ def delete_key_recursive(hive, key, use_32bit_registry=False):
|
||||
|
||||
# Instantiate the registry object
|
||||
registry = Registry()
|
||||
hkey = registry.hkeys[local_hive]
|
||||
try:
|
||||
hkey = registry.hkeys[local_hive]
|
||||
except KeyError:
|
||||
raise CommandExecutionError('Invalid Hive: {0}'.format(local_hive))
|
||||
key_path = local_key
|
||||
access_mask = registry.registry_32[use_32bit_registry] | win32con.KEY_ALL_ACCESS
|
||||
|
||||
@ -699,7 +749,10 @@ def delete_value(hive, key, vname=None, use_32bit_registry=False):
|
||||
local_vname = _to_unicode(vname)
|
||||
|
||||
registry = Registry()
|
||||
hkey = registry.hkeys[local_hive]
|
||||
try:
|
||||
hkey = registry.hkeys[local_hive]
|
||||
except KeyError:
|
||||
raise CommandExecutionError('Invalid Hive: {0}'.format(local_hive))
|
||||
access_mask = registry.registry_32[use_32bit_registry] | win32con.KEY_ALL_ACCESS
|
||||
|
||||
try:
|
||||
|
@ -127,6 +127,47 @@ class SaltYamlSafeLoader(yaml.SafeLoader):
|
||||
value = self.construct_scalar(node)
|
||||
return salt.utils.stringutils.to_unicode(value)
|
||||
|
||||
def fetch_plain(self):
|
||||
'''
|
||||
Handle unicode literal strings which appear inline in the YAML
|
||||
'''
|
||||
orig_line = self.line
|
||||
orig_column = self.column
|
||||
orig_pointer = self.pointer
|
||||
try:
|
||||
return super(SaltYamlSafeLoader, self).fetch_plain()
|
||||
except yaml.scanner.ScannerError as exc:
|
||||
problem_line = self.line
|
||||
problem_column = self.column
|
||||
problem_pointer = self.pointer
|
||||
if exc.problem == "found unexpected ':'":
|
||||
# Reset to prior position
|
||||
self.line = orig_line
|
||||
self.column = orig_column
|
||||
self.pointer = orig_pointer
|
||||
if self.peek(0) == 'u':
|
||||
# Might be a unicode literal string, check for 2nd char and
|
||||
# call the appropriate fetch func if it's a quote
|
||||
quote_char = self.peek(1)
|
||||
if quote_char in ("'", '"'):
|
||||
# Skip the "u" prefix by advancing the column and
|
||||
# pointer by 1
|
||||
self.column += 1
|
||||
self.pointer += 1
|
||||
if quote_char == '\'':
|
||||
return self.fetch_single()
|
||||
else:
|
||||
return self.fetch_double()
|
||||
else:
|
||||
# This wasn't a unicode literal string, so the caught
|
||||
# exception was correct. Restore the old position and
|
||||
# then raise the caught exception.
|
||||
self.line = problem_line
|
||||
self.column = problem_column
|
||||
self.pointer = problem_pointer
|
||||
# Raise the caught exception
|
||||
raise exc
|
||||
|
||||
def flatten_mapping(self, node):
|
||||
merge = []
|
||||
index = 0
|
||||
|
@ -4,6 +4,5 @@
|
||||
- user: issue-1959
|
||||
{%- if grains.get('pythonversion')[0] != 2 %}
|
||||
{#- wheels are disabled because the pip cache dir will not be owned by the above issue-1959 user. Need to check this ASAP #}
|
||||
- use_wheel: False
|
||||
- no_use_wheel: True
|
||||
- no_binary: ':all:'
|
||||
{%- endif %}
|
||||
|
@ -1,3 +0,0 @@
|
||||
{{ salt['runtests_helpers.get_salt_temp_dir_for_path']('test.append') }}:
|
||||
file:
|
||||
- touch
|
@ -1,4 +0,0 @@
|
||||
issue-2227:
|
||||
file.append:
|
||||
- name: {{ salt['runtests_helpers.get_salt_temp_dir_for_path']('test.append') }}
|
||||
- text: HISTTIMEFORMAT='%F %T '
|
@ -1,4 +0,0 @@
|
||||
{{ salt['runtests_helpers.get_salt_temp_dir_for_path']('test.append') }}:
|
||||
|
||||
file.append:
|
||||
- source: salt://testappend/firstif
|
@ -1,3 +0,0 @@
|
||||
{{ salt['runtests_helpers.get_salt_temp_dir_for_path']('test.append') }}:
|
||||
file.append:
|
||||
- source: salt://testappend/secondif
|
@ -10,24 +10,24 @@ winrm set winrm/config/service/auth '@{Basic="true"}'
|
||||
$SourceStoreScope = 'LocalMachine'
|
||||
$SourceStorename = 'Remote Desktop'
|
||||
|
||||
$SourceStore = New-Object -TypeName System.Security.Cryptography.X509Certificates.X509Store -ArgumentList $SourceStorename, $SourceStoreScope
|
||||
$SourceStore = New-Object -TypeName System.Security.Cryptography.X509Certificates.X509Store -ArgumentList $SourceStorename, $SourceStoreScope
|
||||
$SourceStore.Open([System.Security.Cryptography.X509Certificates.OpenFlags]::ReadOnly)
|
||||
|
||||
$cert = $SourceStore.Certificates | Where-Object -FilterScript {
|
||||
$cert = $SourceStore.Certificates | Where-Object -FilterScript {
|
||||
$_.subject -like '*'
|
||||
}
|
||||
|
||||
$DestStoreScope = 'LocalMachine'
|
||||
$DestStoreName = 'My'
|
||||
|
||||
$DestStore = New-Object -TypeName System.Security.Cryptography.X509Certificates.X509Store -ArgumentList $DestStoreName, $DestStoreScope
|
||||
$DestStore = New-Object -TypeName System.Security.Cryptography.X509Certificates.X509Store -ArgumentList $DestStoreName, $DestStoreScope
|
||||
$DestStore.Open([System.Security.Cryptography.X509Certificates.OpenFlags]::ReadWrite)
|
||||
$DestStore.Add($cert)
|
||||
|
||||
$SourceStore.Close()
|
||||
$DestStore.Close()
|
||||
|
||||
winrm create winrm/config/listener?Address=*+Transport=HTTPS `@`{Hostname=`"($certId)`"`;CertificateThumbprint=`"($cert.Thumbprint)`"`}
|
||||
winrm create winrm/config/listener?Address=*+Transport=HTTPS `@`{CertificateThumbprint=`"($cert.Thumbprint)`"`}
|
||||
|
||||
Restart-Service winrm
|
||||
</powershell>
|
||||
|
@ -44,7 +44,7 @@ class CPModuleTest(ModuleCase):
|
||||
super(CPModuleTest, self).run_function(*args, **kwargs)
|
||||
)
|
||||
|
||||
@with_tempfile
|
||||
@with_tempfile()
|
||||
def test_get_file(self, tgt):
|
||||
'''
|
||||
cp.get_file
|
||||
@ -76,7 +76,7 @@ class CPModuleTest(ModuleCase):
|
||||
self.assertIn('KNIGHT: They\'re nervous, sire.', data)
|
||||
self.assertNotIn('bacon', data)
|
||||
|
||||
@with_tempfile
|
||||
@with_tempfile()
|
||||
def test_get_file_templated_paths(self, tgt):
|
||||
'''
|
||||
cp.get_file
|
||||
@ -94,7 +94,7 @@ class CPModuleTest(ModuleCase):
|
||||
self.assertIn('Gromit', data)
|
||||
self.assertNotIn('bacon', data)
|
||||
|
||||
@with_tempfile
|
||||
@with_tempfile()
|
||||
def test_get_file_gzipped(self, tgt):
|
||||
'''
|
||||
cp.get_file
|
||||
@ -137,7 +137,7 @@ class CPModuleTest(ModuleCase):
|
||||
self.assertIn('KNIGHT: They\'re nervous, sire.', data)
|
||||
self.assertNotIn('bacon', data)
|
||||
|
||||
@with_tempfile
|
||||
@with_tempfile()
|
||||
def test_get_template(self, tgt):
|
||||
'''
|
||||
cp.get_template
|
||||
@ -186,7 +186,7 @@ class CPModuleTest(ModuleCase):
|
||||
|
||||
# cp.get_url tests
|
||||
|
||||
@with_tempfile
|
||||
@with_tempfile()
|
||||
def test_get_url(self, tgt):
|
||||
'''
|
||||
cp.get_url with salt:// source given
|
||||
@ -277,7 +277,7 @@ class CPModuleTest(ModuleCase):
|
||||
self.assertIn('KNIGHT: They\'re nervous, sire.', data)
|
||||
self.assertNotIn('bacon', data)
|
||||
|
||||
@with_tempfile
|
||||
@with_tempfile()
|
||||
def test_get_url_https(self, tgt):
|
||||
'''
|
||||
cp.get_url with https:// source given
|
||||
@ -619,7 +619,7 @@ class CPModuleTest(ModuleCase):
|
||||
self.assertEqual(
|
||||
sha256_hash['hsum'], hashlib.sha256(data).hexdigest())
|
||||
|
||||
@with_tempfile
|
||||
@with_tempfile()
|
||||
def test_get_file_from_env_predefined(self, tgt):
|
||||
'''
|
||||
cp.get_file
|
||||
@ -634,7 +634,7 @@ class CPModuleTest(ModuleCase):
|
||||
finally:
|
||||
os.unlink(tgt)
|
||||
|
||||
@with_tempfile
|
||||
@with_tempfile()
|
||||
def test_get_file_from_env_in_url(self, tgt):
|
||||
tgt = os.path.join(paths.TMP, 'cheese')
|
||||
try:
|
||||
|
@ -11,8 +11,9 @@ import time
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.case import ModuleCase
|
||||
from tests.support.helpers import with_tempdir
|
||||
from tests.support.unit import skipIf
|
||||
from tests.support.paths import TMP, TMP_PILLAR_TREE
|
||||
from tests.support.paths import FILES, TMP, TMP_PILLAR_TREE
|
||||
from tests.support.mixins import SaltReturnAssertsMixin
|
||||
|
||||
# Import Salt libs
|
||||
@ -29,6 +30,37 @@ import logging
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
DEFAULT_ENDING = salt.utils.stringutils.to_bytes(os.linesep)
|
||||
|
||||
|
||||
def trim_line_end(line):
|
||||
'''
|
||||
Remove CRLF or LF from the end of line.
|
||||
'''
|
||||
if line[-2:] == salt.utils.stringutils.to_bytes('\r\n'):
|
||||
return line[:-2]
|
||||
elif line[-1:] == salt.utils.stringutils.to_bytes('\n'):
|
||||
return line[:-1]
|
||||
raise Exception("Invalid line ending")
|
||||
|
||||
|
||||
def reline(source, dest, force=False, ending=DEFAULT_ENDING):
|
||||
'''
|
||||
Normalize the line endings of a file.
|
||||
'''
|
||||
fp, tmp = tempfile.mkstemp()
|
||||
os.close(fp)
|
||||
with salt.utils.files.fopen(tmp, 'wb') as tmp_fd:
|
||||
with salt.utils.files.fopen(source, 'rb') as fd:
|
||||
lines = fd.readlines()
|
||||
for line in lines:
|
||||
line_noend = trim_line_end(line)
|
||||
tmp_fd.write(line_noend + ending)
|
||||
if os.path.exists(dest) and force:
|
||||
os.remove(dest)
|
||||
os.rename(tmp, dest)
|
||||
|
||||
|
||||
class StateModuleTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
'''
|
||||
Validate the state module
|
||||
@ -36,6 +68,13 @@ class StateModuleTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
|
||||
maxDiff = None
|
||||
|
||||
def setUp(self):
|
||||
super(StateModuleTest, self).setUp()
|
||||
destpath = os.path.join(FILES, 'file', 'base', 'testappend', 'firstif')
|
||||
reline(destpath, destpath, force=True)
|
||||
destpath = os.path.join(FILES, 'file', 'base', 'testappend', 'secondif')
|
||||
reline(destpath, destpath, force=True)
|
||||
|
||||
def test_show_highstate(self):
|
||||
'''
|
||||
state.show_highstate
|
||||
@ -199,21 +238,24 @@ class StateModuleTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
ret = self.run_function('state.run_request')
|
||||
self.assertEqual(ret, {})
|
||||
|
||||
def test_issue_1896_file_append_source(self):
|
||||
@with_tempdir()
|
||||
def test_issue_1896_file_append_source(self, base_dir):
|
||||
'''
|
||||
Verify that we can append a file's contents
|
||||
'''
|
||||
testfile = os.path.join(TMP, 'test.append')
|
||||
if os.path.isfile(testfile):
|
||||
os.unlink(testfile)
|
||||
testfile = os.path.join(base_dir, 'test.append')
|
||||
|
||||
ret = self.run_function('state.sls', mods='testappend')
|
||||
ret = self.run_state('file.touch', name=testfile)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
ret = self.run_function('state.sls', mods='testappend.step-1')
|
||||
ret = self.run_state(
|
||||
'file.append',
|
||||
name=testfile,
|
||||
source='salt://testappend/firstif')
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
ret = self.run_function('state.sls', mods='testappend.step-2')
|
||||
ret = self.run_state(
|
||||
'file.append',
|
||||
name=testfile,
|
||||
source='salt://testappend/secondif')
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
with salt.utils.files.fopen(testfile, 'r') as fp_:
|
||||
@ -236,14 +278,17 @@ class StateModuleTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
contents = os.linesep.join(new_contents)
|
||||
contents += os.linesep
|
||||
|
||||
self.assertMultiLineEqual(
|
||||
contents, testfile_contents)
|
||||
self.assertMultiLineEqual(contents, testfile_contents)
|
||||
|
||||
# Re-append switching order
|
||||
ret = self.run_function('state.sls', mods='testappend.step-2')
|
||||
ret = self.run_state(
|
||||
'file.append',
|
||||
name=testfile,
|
||||
source='salt://testappend/secondif')
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
ret = self.run_function('state.sls', mods='testappend.step-1')
|
||||
ret = self.run_state(
|
||||
'file.append',
|
||||
name=testfile,
|
||||
source='salt://testappend/firstif')
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
with salt.utils.files.fopen(testfile, 'r') as fp_:
|
||||
|
@ -9,6 +9,7 @@ from tests.support.case import ModuleCase
|
||||
from tests.support.unit import skipIf
|
||||
|
||||
# Import Salt libs
|
||||
from salt.ext import six
|
||||
import salt.utils.platform
|
||||
|
||||
|
||||
@ -26,3 +27,49 @@ class StatusModuleTest(ModuleCase):
|
||||
random_pid = random.choice(grab_pids)
|
||||
grep_salt = self.run_function('cmd.run', ['ps aux | grep salt'])
|
||||
self.assertIn(random_pid, grep_salt)
|
||||
|
||||
@skipIf(not salt.utils.platform.is_windows(), 'windows only test')
|
||||
def test_status_cpuload(self):
|
||||
'''
|
||||
status.cpuload
|
||||
'''
|
||||
ret = self.run_function('status.cpuload')
|
||||
self.assertTrue(isinstance(ret, float))
|
||||
|
||||
@skipIf(not salt.utils.platform.is_windows(), 'windows only test')
|
||||
def test_status_saltmem(self):
|
||||
'''
|
||||
status.saltmem
|
||||
'''
|
||||
ret = self.run_function('status.saltmem')
|
||||
self.assertTrue(isinstance(ret, int))
|
||||
|
||||
def test_status_diskusage(self):
|
||||
'''
|
||||
status.diskusage
|
||||
'''
|
||||
ret = self.run_function('status.diskusage')
|
||||
if salt.utils.platform.is_windows():
|
||||
self.assertTrue(isinstance(ret['percent'], float))
|
||||
else:
|
||||
self.assertIn('total', str(ret))
|
||||
self.assertIn('available', str(ret))
|
||||
|
||||
def test_status_procs(self):
|
||||
'''
|
||||
status.procs
|
||||
'''
|
||||
ret = self.run_function('status.procs')
|
||||
for x, y in six.iteritems(ret):
|
||||
self.assertIn('cmd', y)
|
||||
|
||||
def test_status_uptime(self):
|
||||
'''
|
||||
status.uptime
|
||||
'''
|
||||
ret = self.run_function('status.uptime')
|
||||
|
||||
if salt.utils.platform.is_windows():
|
||||
self.assertTrue(isinstance(ret, float))
|
||||
else:
|
||||
self.assertTrue(isinstance(ret['days'], int))
|
||||
|
@ -117,6 +117,31 @@ class UseraddModuleTestWindows(ModuleCase):
|
||||
random.choice(string.ascii_uppercase + string.digits)
|
||||
for x in range(size))
|
||||
|
||||
def setUp(self):
|
||||
self.user_name = self.__random_string()
|
||||
self.group_name = self.__random_string()
|
||||
|
||||
def tearDown(self):
|
||||
self.run_function('user.delete', [self.user_name, True, True])
|
||||
self.run_function('group.delete', [self.group_name])
|
||||
|
||||
def _add_user(self):
|
||||
'''
|
||||
helper class to add user
|
||||
'''
|
||||
if self.run_function('user.add', [self.user_name]) is False:
|
||||
self.run_function('user.delete', [self.user_name, True, True])
|
||||
self.skipTest('Failed to create user')
|
||||
|
||||
def _add_group(self):
|
||||
'''
|
||||
helper class to add group
|
||||
'''
|
||||
if self.run_function('group.add', [self.group_name]) is False:
|
||||
# Skip because creating is not what we're testing here
|
||||
self.run_function('group.delete', [self.group_name, True, True])
|
||||
self.skipTest('Failed to create group')
|
||||
|
||||
def test_add_user(self):
|
||||
'''
|
||||
Test adding a user
|
||||
@ -185,3 +210,84 @@ class UseraddModuleTestWindows(ModuleCase):
|
||||
finally:
|
||||
self.run_function('user.delete', [user_name, True, True])
|
||||
self.run_function('group.delete', [group_name])
|
||||
|
||||
def test_add_user_addgroup(self):
|
||||
'''
|
||||
Test adding a user to a group with groupadd
|
||||
'''
|
||||
self._add_group()
|
||||
self._add_user()
|
||||
self.run_function('user.addgroup', [self.user_name, self.group_name])
|
||||
info = self.run_function('user.info', [self.user_name])
|
||||
self.assertEqual(info['groups'], [self.group_name])
|
||||
|
||||
def test_user_chhome(self):
|
||||
'''
|
||||
Test changing a users home dir
|
||||
'''
|
||||
self._add_user()
|
||||
user_dir = r'c:\salt'
|
||||
self.run_function('user.chhome', [self.user_name, user_dir])
|
||||
info = self.run_function('user.info', [self.user_name])
|
||||
self.assertEqual(info['home'], user_dir)
|
||||
|
||||
def test_user_chprofile(self):
|
||||
'''
|
||||
Test changing a users profile
|
||||
'''
|
||||
self._add_user()
|
||||
config = r'c:\salt\config'
|
||||
self.run_function('user.chprofile', [self.user_name, config])
|
||||
info = self.run_function('user.info', [self.user_name])
|
||||
self.assertEqual(info['profile'], config)
|
||||
|
||||
def test_user_chfullname(self):
|
||||
'''
|
||||
Test changing a users fullname
|
||||
'''
|
||||
self._add_user()
|
||||
name = 'Salt Test'
|
||||
self.run_function('user.chfullname', [self.user_name, name])
|
||||
info = self.run_function('user.info', [self.user_name])
|
||||
self.assertEqual(info['fullname'], name)
|
||||
|
||||
def test_user_delete(self):
|
||||
'''
|
||||
Test deleting a user
|
||||
'''
|
||||
self._add_user()
|
||||
self.assertTrue(self.run_function('user.info', [self.user_name])['active'])
|
||||
self.run_function('user.delete', [self.user_name])
|
||||
self.assertEqual({}, self.run_function('user.info', [self.user_name]))
|
||||
|
||||
def test_user_removegroup(self):
|
||||
'''
|
||||
Test removing a group
|
||||
'''
|
||||
self._add_user()
|
||||
self._add_group()
|
||||
self.run_function('user.addgroup', [self.user_name, self.group_name])
|
||||
self.assertIn(self.group_name, self.run_function('user.list_groups', [self.user_name]))
|
||||
self.run_function('user.removegroup', [self.group_name])
|
||||
self.assertIn(self.group_name, self.run_function('user.list_groups', [self.user_name]))
|
||||
|
||||
def test_user_rename(self):
|
||||
'''
|
||||
Test changing a users name
|
||||
'''
|
||||
self._add_user()
|
||||
name = 'newuser'
|
||||
self.run_function('user.rename', [self.user_name, name])
|
||||
info = self.run_function('user.info', [name])
|
||||
self.assertTrue(info['active'])
|
||||
|
||||
#delete new user
|
||||
self.run_function('user.delete', [name, True, True])
|
||||
|
||||
def test_user_setpassword(self):
|
||||
'''
|
||||
Test setting a password
|
||||
'''
|
||||
self._add_user()
|
||||
passwd = 'sup3rs3cr3T!'
|
||||
self.assertTrue(self.run_function('user.setpassword', [self.user_name, passwd]))
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -5,7 +5,6 @@ Tests for the Git state
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import errno
|
||||
import functools
|
||||
import inspect
|
||||
import os
|
||||
@ -16,8 +15,9 @@ import tempfile
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.case import ModuleCase
|
||||
from tests.support.paths import TMP
|
||||
from tests.support.helpers import with_tempdir
|
||||
from tests.support.mixins import SaltReturnAssertsMixin
|
||||
from tests.support.paths import TMP
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.files
|
||||
@ -98,174 +98,151 @@ class GitTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
# Reset the dns timeout after the test is over
|
||||
socket.setdefaulttimeout(None)
|
||||
|
||||
def test_latest(self):
|
||||
@with_tempdir(create=False)
|
||||
def test_latest(self, target):
|
||||
'''
|
||||
git.latest
|
||||
'''
|
||||
name = os.path.join(TMP, 'salt_repo')
|
||||
try:
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
target=name
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
|
||||
finally:
|
||||
shutil.rmtree(name, ignore_errors=True)
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
target=target
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertTrue(os.path.isdir(os.path.join(target, '.git')))
|
||||
|
||||
def test_latest_with_rev_and_submodules(self):
|
||||
@with_tempdir(create=False)
|
||||
def test_latest_with_rev_and_submodules(self, target):
|
||||
'''
|
||||
git.latest
|
||||
'''
|
||||
name = os.path.join(TMP, 'salt_repo')
|
||||
try:
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
rev='develop',
|
||||
target=name,
|
||||
submodules=True
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
|
||||
finally:
|
||||
shutil.rmtree(name, ignore_errors=True)
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
rev='develop',
|
||||
target=target,
|
||||
submodules=True
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertTrue(os.path.isdir(os.path.join(target, '.git')))
|
||||
|
||||
def test_latest_failure(self):
|
||||
@with_tempdir(create=False)
|
||||
def test_latest_failure(self, target):
|
||||
'''
|
||||
git.latest
|
||||
'''
|
||||
name = os.path.join(TMP, 'salt_repo')
|
||||
try:
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://youSpelledGitHubWrong.com/saltstack/salt-test-repo.git',
|
||||
rev='develop',
|
||||
target=name,
|
||||
submodules=True
|
||||
)
|
||||
self.assertSaltFalseReturn(ret)
|
||||
self.assertFalse(os.path.isdir(os.path.join(name, '.git')))
|
||||
finally:
|
||||
shutil.rmtree(name, ignore_errors=True)
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://youSpelledGitHubWrong.com/saltstack/salt-test-repo.git',
|
||||
rev='develop',
|
||||
target=target,
|
||||
submodules=True
|
||||
)
|
||||
self.assertSaltFalseReturn(ret)
|
||||
self.assertFalse(os.path.isdir(os.path.join(target, '.git')))
|
||||
|
||||
def test_latest_empty_dir(self):
|
||||
@with_tempdir()
|
||||
def test_latest_empty_dir(self, target):
|
||||
'''
|
||||
git.latest
|
||||
'''
|
||||
name = os.path.join(TMP, 'salt_repo')
|
||||
if not os.path.isdir(name):
|
||||
os.mkdir(name)
|
||||
try:
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
rev='develop',
|
||||
target=name,
|
||||
submodules=True
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
|
||||
finally:
|
||||
shutil.rmtree(name, ignore_errors=True)
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
rev='develop',
|
||||
target=target,
|
||||
submodules=True
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertTrue(os.path.isdir(os.path.join(target, '.git')))
|
||||
|
||||
def test_latest_unless_no_cwd_issue_6800(self):
|
||||
@with_tempdir(create=False)
|
||||
def test_latest_unless_no_cwd_issue_6800(self, target):
|
||||
'''
|
||||
cwd=target was being passed to _run_check which blew up if
|
||||
target dir did not already exist.
|
||||
'''
|
||||
name = os.path.join(TMP, 'salt_repo')
|
||||
if os.path.isdir(name):
|
||||
shutil.rmtree(name)
|
||||
try:
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
rev='develop',
|
||||
target=name,
|
||||
unless='test -e {0}'.format(name),
|
||||
submodules=True
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
|
||||
finally:
|
||||
shutil.rmtree(name, ignore_errors=True)
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
rev='develop',
|
||||
target=target,
|
||||
unless='test -e {0}'.format(target),
|
||||
submodules=True
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertTrue(os.path.isdir(os.path.join(target, '.git')))
|
||||
|
||||
def test_numeric_rev(self):
|
||||
@with_tempdir(create=False)
|
||||
def test_numeric_rev(self, target):
|
||||
'''
|
||||
git.latest with numeric revision
|
||||
'''
|
||||
name = os.path.join(TMP, 'salt_repo')
|
||||
try:
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
rev=0.11,
|
||||
target=name,
|
||||
submodules=True,
|
||||
timeout=120
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
|
||||
finally:
|
||||
shutil.rmtree(name, ignore_errors=True)
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
rev=0.11,
|
||||
target=target,
|
||||
submodules=True,
|
||||
timeout=120
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertTrue(os.path.isdir(os.path.join(target, '.git')))
|
||||
|
||||
def test_latest_with_local_changes(self):
|
||||
@with_tempdir(create=False)
|
||||
def test_latest_with_local_changes(self, target):
|
||||
'''
|
||||
Ensure that we fail the state when there are local changes and succeed
|
||||
when force_reset is True.
|
||||
'''
|
||||
name = os.path.join(TMP, 'salt_repo')
|
||||
try:
|
||||
# Clone repo
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
target=name
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
|
||||
# Clone repo
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
target=target
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertTrue(os.path.isdir(os.path.join(target, '.git')))
|
||||
|
||||
# Make change to LICENSE file.
|
||||
with salt.utils.files.fopen(os.path.join(name, 'LICENSE'), 'a') as fp_:
|
||||
fp_.write('Lorem ipsum dolor blah blah blah....\n')
|
||||
# Make change to LICENSE file.
|
||||
with salt.utils.files.fopen(os.path.join(target, 'LICENSE'), 'a') as fp_:
|
||||
fp_.write('Lorem ipsum dolor blah blah blah....\n')
|
||||
|
||||
# Make sure that we now have uncommitted changes
|
||||
self.assertTrue(self.run_function('git.diff', [name, 'HEAD']))
|
||||
# Make sure that we now have uncommitted changes
|
||||
self.assertTrue(self.run_function('git.diff', [target, 'HEAD']))
|
||||
|
||||
# Re-run state with force_reset=False
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
target=name,
|
||||
force_reset=False
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertEqual(
|
||||
ret[next(iter(ret))]['comment'],
|
||||
('Repository {0} is up-to-date, but with uncommitted changes. '
|
||||
'Set \'force_reset\' to True to purge uncommitted changes.'
|
||||
.format(name))
|
||||
)
|
||||
# Re-run state with force_reset=False
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
target=target,
|
||||
force_reset=False
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertEqual(
|
||||
ret[next(iter(ret))]['comment'],
|
||||
('Repository {0} is up-to-date, but with uncommitted changes. '
|
||||
'Set \'force_reset\' to True to purge uncommitted changes.'
|
||||
.format(target))
|
||||
)
|
||||
|
||||
# Now run the state with force_reset=True
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
target=name,
|
||||
force_reset=True
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
# Now run the state with force_reset=True
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
target=target,
|
||||
force_reset=True
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
# Make sure that we no longer have uncommitted changes
|
||||
self.assertFalse(self.run_function('git.diff', [name, 'HEAD']))
|
||||
|
||||
finally:
|
||||
shutil.rmtree(name, ignore_errors=True)
|
||||
# Make sure that we no longer have uncommitted changes
|
||||
self.assertFalse(self.run_function('git.diff', [target, 'HEAD']))
|
||||
|
||||
@uses_git_opts
|
||||
def test_latest_fast_forward(self):
|
||||
@with_tempdir(create=False)
|
||||
@with_tempdir(create=False)
|
||||
@with_tempdir(create=False)
|
||||
def test_latest_fast_forward(self, mirror_dir, admin_dir, clone_dir):
|
||||
'''
|
||||
Test running git.latest state a second time after changes have been
|
||||
made to the remote repo.
|
||||
@ -274,98 +251,87 @@ class GitTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
return self.run_function('git.rev_parse', [cwd, 'HEAD'])
|
||||
|
||||
repo_url = 'https://{0}/saltstack/salt-test-repo.git'.format(self.__domain)
|
||||
mirror_dir = os.path.join(TMP, 'salt_repo_mirror')
|
||||
mirror_url = 'file://' + mirror_dir
|
||||
admin_dir = os.path.join(TMP, 'salt_repo_admin')
|
||||
clone_dir = os.path.join(TMP, 'salt_repo')
|
||||
|
||||
try:
|
||||
# Mirror the repo
|
||||
self.run_function(
|
||||
'git.clone', [mirror_dir], url=repo_url, opts='--mirror')
|
||||
# Mirror the repo
|
||||
self.run_function(
|
||||
'git.clone', [mirror_dir], url=repo_url, opts='--mirror')
|
||||
|
||||
# Make sure the directory for the mirror now exists
|
||||
self.assertTrue(os.path.exists(mirror_dir))
|
||||
# Make sure the directory for the mirror now exists
|
||||
self.assertTrue(os.path.exists(mirror_dir))
|
||||
|
||||
# Clone the mirror twice, once to the admin location and once to
|
||||
# the clone_dir
|
||||
ret = self.run_state('git.latest', name=mirror_url, target=admin_dir)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
ret = self.run_state('git.latest', name=mirror_url, target=clone_dir)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
# Clone the mirror twice, once to the admin location and once to
|
||||
# the clone_dir
|
||||
ret = self.run_state('git.latest', name=mirror_url, target=admin_dir)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
ret = self.run_state('git.latest', name=mirror_url, target=clone_dir)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
# Make a change to the repo by editing the file in the admin copy
|
||||
# of the repo and committing.
|
||||
head_pre = _head(admin_dir)
|
||||
with salt.utils.files.fopen(os.path.join(admin_dir, 'LICENSE'), 'a') as fp_:
|
||||
fp_.write('Hello world!')
|
||||
self.run_function(
|
||||
'git.commit', [admin_dir, 'added a line'],
|
||||
git_opts='-c user.name="Foo Bar" -c user.email=foo@bar.com',
|
||||
opts='-a',
|
||||
)
|
||||
# Make sure HEAD is pointing to a new SHA so we know we properly
|
||||
# committed our change.
|
||||
head_post = _head(admin_dir)
|
||||
self.assertNotEqual(head_pre, head_post)
|
||||
# Make a change to the repo by editing the file in the admin copy
|
||||
# of the repo and committing.
|
||||
head_pre = _head(admin_dir)
|
||||
with salt.utils.files.fopen(os.path.join(admin_dir, 'LICENSE'), 'a') as fp_:
|
||||
fp_.write('Hello world!')
|
||||
self.run_function(
|
||||
'git.commit', [admin_dir, 'added a line'],
|
||||
git_opts='-c user.name="Foo Bar" -c user.email=foo@bar.com',
|
||||
opts='-a',
|
||||
)
|
||||
# Make sure HEAD is pointing to a new SHA so we know we properly
|
||||
# committed our change.
|
||||
head_post = _head(admin_dir)
|
||||
self.assertNotEqual(head_pre, head_post)
|
||||
|
||||
# Push the change to the mirror
|
||||
# NOTE: the test will fail if the salt-test-repo's default branch
|
||||
# is changed.
|
||||
self.run_function('git.push', [admin_dir, 'origin', 'develop'])
|
||||
# Push the change to the mirror
|
||||
# NOTE: the test will fail if the salt-test-repo's default branch
|
||||
# is changed.
|
||||
self.run_function('git.push', [admin_dir, 'origin', 'develop'])
|
||||
|
||||
# Re-run the git.latest state on the clone_dir
|
||||
ret = self.run_state('git.latest', name=mirror_url, target=clone_dir)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
# Re-run the git.latest state on the clone_dir
|
||||
ret = self.run_state('git.latest', name=mirror_url, target=clone_dir)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
# Make sure that the clone_dir now has the correct SHA
|
||||
self.assertEqual(head_post, _head(clone_dir))
|
||||
# Make sure that the clone_dir now has the correct SHA
|
||||
self.assertEqual(head_post, _head(clone_dir))
|
||||
|
||||
finally:
|
||||
for path in (mirror_dir, admin_dir, clone_dir):
|
||||
shutil.rmtree(path, ignore_errors=True)
|
||||
|
||||
def _changed_local_branch_helper(self, rev, hint):
|
||||
@with_tempdir(create=False)
|
||||
def _changed_local_branch_helper(self, target, rev, hint):
|
||||
'''
|
||||
We're testing two almost identical cases, the only thing that differs
|
||||
is the rev used for the git.latest state.
|
||||
'''
|
||||
name = os.path.join(TMP, 'salt_repo')
|
||||
try:
|
||||
# Clone repo
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
rev=rev,
|
||||
target=name
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
# Clone repo
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
rev=rev,
|
||||
target=target
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
# Check out a new branch in the clone and make a commit, to ensure
|
||||
# that when we re-run the state, it is not a fast-forward change
|
||||
self.run_function('git.checkout', [name, 'new_branch'], opts='-b')
|
||||
with salt.utils.files.fopen(os.path.join(name, 'foo'), 'w'):
|
||||
pass
|
||||
self.run_function('git.add', [name, '.'])
|
||||
self.run_function(
|
||||
'git.commit', [name, 'add file'],
|
||||
git_opts='-c user.name="Foo Bar" -c user.email=foo@bar.com',
|
||||
)
|
||||
# Check out a new branch in the clone and make a commit, to ensure
|
||||
# that when we re-run the state, it is not a fast-forward change
|
||||
self.run_function('git.checkout', [target, 'new_branch'], opts='-b')
|
||||
with salt.utils.files.fopen(os.path.join(target, 'foo'), 'w'):
|
||||
pass
|
||||
self.run_function('git.add', [target, '.'])
|
||||
self.run_function(
|
||||
'git.commit', [target, 'add file'],
|
||||
git_opts='-c user.name="Foo Bar" -c user.email=foo@bar.com',
|
||||
)
|
||||
|
||||
# Re-run the state, this should fail with a specific hint in the
|
||||
# comment field.
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
rev=rev,
|
||||
target=name
|
||||
)
|
||||
self.assertSaltFalseReturn(ret)
|
||||
# Re-run the state, this should fail with a specific hint in the
|
||||
# comment field.
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
rev=rev,
|
||||
target=target
|
||||
)
|
||||
self.assertSaltFalseReturn(ret)
|
||||
|
||||
comment = ret[next(iter(ret))]['comment']
|
||||
self.assertTrue(hint in comment)
|
||||
finally:
|
||||
shutil.rmtree(name, ignore_errors=True)
|
||||
comment = ret[next(iter(ret))]['comment']
|
||||
self.assertTrue(hint in comment)
|
||||
|
||||
@uses_git_opts
|
||||
def test_latest_changed_local_branch_rev_head(self):
|
||||
@ -376,7 +342,7 @@ class GitTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
This test will fail if the default branch for the salt-test-repo is
|
||||
ever changed.
|
||||
'''
|
||||
self._changed_local_branch_helper(
|
||||
self._changed_local_branch_helper( # pylint: disable=no-value-for-parameter
|
||||
'HEAD',
|
||||
'The default remote branch (develop) differs from the local '
|
||||
'branch (new_branch)'
|
||||
@ -388,162 +354,136 @@ class GitTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
Test for presence of hint in failure message when the local branch has
|
||||
been changed and a non-HEAD rev is specified
|
||||
'''
|
||||
self._changed_local_branch_helper(
|
||||
self._changed_local_branch_helper( # pylint: disable=no-value-for-parameter
|
||||
'develop',
|
||||
'The desired rev (develop) differs from the name of the local '
|
||||
'branch (new_branch)'
|
||||
)
|
||||
|
||||
@uses_git_opts
|
||||
def test_latest_updated_remote_rev(self):
|
||||
@with_tempdir(create=False)
|
||||
@with_tempdir()
|
||||
def test_latest_updated_remote_rev(self, name, target):
|
||||
'''
|
||||
Ensure that we don't exit early when checking for a fast-forward
|
||||
'''
|
||||
name = tempfile.mkdtemp(dir=TMP)
|
||||
target = os.path.join(TMP, 'test_latest_updated_remote_rev')
|
||||
|
||||
# Initialize a new git repository
|
||||
self.run_function('git.init', [name])
|
||||
|
||||
try:
|
||||
# Add and commit a file
|
||||
with salt.utils.files.fopen(os.path.join(name, 'foo.txt'), 'w') as fp_:
|
||||
fp_.write('Hello world\n')
|
||||
self.run_function('git.add', [name, '.'])
|
||||
self.run_function(
|
||||
'git.commit', [name, 'initial commit'],
|
||||
git_opts='-c user.name="Foo Bar" -c user.email=foo@bar.com',
|
||||
)
|
||||
# Add and commit a file
|
||||
with salt.utils.files.fopen(os.path.join(name, 'foo.txt'), 'w') as fp_:
|
||||
fp_.write('Hello world\n')
|
||||
self.run_function('git.add', [name, '.'])
|
||||
self.run_function(
|
||||
'git.commit', [name, 'initial commit'],
|
||||
git_opts='-c user.name="Foo Bar" -c user.email=foo@bar.com',
|
||||
)
|
||||
|
||||
# Run the state to clone the repo we just created
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name=name,
|
||||
target=target,
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
# Run the state to clone the repo we just created
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name=name,
|
||||
target=target,
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
# Add another commit
|
||||
with salt.utils.files.fopen(os.path.join(name, 'foo.txt'), 'w') as fp_:
|
||||
fp_.write('Added a line\n')
|
||||
self.run_function(
|
||||
'git.commit', [name, 'added a line'],
|
||||
git_opts='-c user.name="Foo Bar" -c user.email=foo@bar.com',
|
||||
opts='-a',
|
||||
)
|
||||
# Add another commit
|
||||
with salt.utils.files.fopen(os.path.join(name, 'foo.txt'), 'w') as fp_:
|
||||
fp_.write('Added a line\n')
|
||||
self.run_function(
|
||||
'git.commit', [name, 'added a line'],
|
||||
git_opts='-c user.name="Foo Bar" -c user.email=foo@bar.com',
|
||||
opts='-a',
|
||||
)
|
||||
|
||||
# Run the state again. It should pass, if it doesn't then there was
|
||||
# a problem checking whether or not the change is a fast-forward.
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name=name,
|
||||
target=target,
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
finally:
|
||||
for path in (name, target):
|
||||
try:
|
||||
shutil.rmtree(path)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
raise exc
|
||||
# Run the state again. It should pass, if it doesn't then there was
|
||||
# a problem checking whether or not the change is a fast-forward.
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name=name,
|
||||
target=target,
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
def test_latest_depth(self):
|
||||
@with_tempdir(create=False)
|
||||
def test_latest_depth(self, target):
|
||||
'''
|
||||
Test running git.latest state using the "depth" argument to limit the
|
||||
history. See #45394.
|
||||
'''
|
||||
name = os.path.join(TMP, 'salt_repo')
|
||||
try:
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
rev='HEAD',
|
||||
target=name,
|
||||
depth=1
|
||||
)
|
||||
# HEAD is not a branch, this should fail
|
||||
self.assertSaltFalseReturn(ret)
|
||||
self.assertIn(
|
||||
'must be set to the name of a branch',
|
||||
ret[next(iter(ret))]['comment']
|
||||
)
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
rev='HEAD',
|
||||
target=target,
|
||||
depth=1
|
||||
)
|
||||
# HEAD is not a branch, this should fail
|
||||
self.assertSaltFalseReturn(ret)
|
||||
self.assertIn(
|
||||
'must be set to the name of a branch',
|
||||
ret[next(iter(ret))]['comment']
|
||||
)
|
||||
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
rev='non-default-branch',
|
||||
target=name,
|
||||
depth=1
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
|
||||
finally:
|
||||
shutil.rmtree(name, ignore_errors=True)
|
||||
ret = self.run_state(
|
||||
'git.latest',
|
||||
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
|
||||
rev='non-default-branch',
|
||||
target=target,
|
||||
depth=1
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertTrue(os.path.isdir(os.path.join(target, '.git')))
|
||||
|
||||
def test_present(self):
|
||||
@with_tempdir(create=False)
|
||||
def test_present(self, name):
|
||||
'''
|
||||
git.present
|
||||
'''
|
||||
name = os.path.join(TMP, 'salt_repo')
|
||||
try:
|
||||
ret = self.run_state(
|
||||
'git.present',
|
||||
name=name,
|
||||
bare=True
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertTrue(os.path.isfile(os.path.join(name, 'HEAD')))
|
||||
finally:
|
||||
shutil.rmtree(name, ignore_errors=True)
|
||||
ret = self.run_state(
|
||||
'git.present',
|
||||
name=name,
|
||||
bare=True
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertTrue(os.path.isfile(os.path.join(name, 'HEAD')))
|
||||
|
||||
def test_present_failure(self):
|
||||
@with_tempdir()
|
||||
def test_present_failure(self, name):
|
||||
'''
|
||||
git.present
|
||||
'''
|
||||
name = os.path.join(TMP, 'salt_repo')
|
||||
if not os.path.isdir(name):
|
||||
os.mkdir(name)
|
||||
try:
|
||||
fname = os.path.join(name, 'stoptheprocess')
|
||||
fname = os.path.join(name, 'stoptheprocess')
|
||||
|
||||
with salt.utils.files.fopen(fname, 'a'):
|
||||
pass
|
||||
with salt.utils.files.fopen(fname, 'a'):
|
||||
pass
|
||||
|
||||
ret = self.run_state(
|
||||
'git.present',
|
||||
name=name,
|
||||
bare=True
|
||||
)
|
||||
self.assertSaltFalseReturn(ret)
|
||||
self.assertFalse(os.path.isfile(os.path.join(name, 'HEAD')))
|
||||
finally:
|
||||
shutil.rmtree(name, ignore_errors=True)
|
||||
ret = self.run_state(
|
||||
'git.present',
|
||||
name=name,
|
||||
bare=True
|
||||
)
|
||||
self.assertSaltFalseReturn(ret)
|
||||
self.assertFalse(os.path.isfile(os.path.join(name, 'HEAD')))
|
||||
|
||||
def test_present_empty_dir(self):
|
||||
@with_tempdir()
|
||||
def test_present_empty_dir(self, name):
|
||||
'''
|
||||
git.present
|
||||
'''
|
||||
name = os.path.join(TMP, 'salt_repo')
|
||||
if not os.path.isdir(name):
|
||||
os.mkdir(name)
|
||||
try:
|
||||
ret = self.run_state(
|
||||
'git.present',
|
||||
name=name,
|
||||
bare=True
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertTrue(os.path.isfile(os.path.join(name, 'HEAD')))
|
||||
finally:
|
||||
shutil.rmtree(name, ignore_errors=True)
|
||||
ret = self.run_state(
|
||||
'git.present',
|
||||
name=name,
|
||||
bare=True
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertTrue(os.path.isfile(os.path.join(name, 'HEAD')))
|
||||
|
||||
def test_config_set_value_with_space_character(self):
|
||||
@with_tempdir()
|
||||
def test_config_set_value_with_space_character(self, name):
|
||||
'''
|
||||
git.config
|
||||
'''
|
||||
name = tempfile.mkdtemp(dir=TMP)
|
||||
self.addCleanup(shutil.rmtree, name, ignore_errors=True)
|
||||
self.run_function('git.init', [name])
|
||||
|
||||
ret = self.run_state(
|
||||
|
262
tests/integration/states/test_reg.py
Normal file
262
tests/integration/states/test_reg.py
Normal file
@ -0,0 +1,262 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Tests for the Reg State
|
||||
'''
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import logging
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.case import ModuleCase
|
||||
from tests.support.mixins import SaltReturnAssertsMixin
|
||||
from tests.support.unit import skipIf
|
||||
from tests.support.helpers import destructiveTest, generate_random_name
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils.platform
|
||||
import salt.utils.win_reg as reg
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__testcontext__ = {}
|
||||
|
||||
UNICODE_VALUE_NAME = 'Unicode Key \N{TRADE MARK SIGN}'
|
||||
UNICODE_VALUE = 'Unicode Value ' \
|
||||
'\N{COPYRIGHT SIGN},\N{TRADE MARK SIGN},\N{REGISTERED SIGN}'
|
||||
FAKE_KEY = 'SOFTWARE\\{0}'.format(generate_random_name('SaltTesting-'))
|
||||
|
||||
|
||||
@destructiveTest
|
||||
@skipIf(not salt.utils.platform.is_windows(), 'Windows Specific Test')
|
||||
class RegTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
'''
|
||||
Reg state module tests
|
||||
These tests are destructive as the modify the registry
|
||||
'''
|
||||
def tearDown(self):
|
||||
reg.delete_key_recursive(hive='HKLM',
|
||||
key=FAKE_KEY)
|
||||
reg.delete_key_recursive(hive='HKLM',
|
||||
key=FAKE_KEY,
|
||||
use_32bit_registry=True)
|
||||
|
||||
def test_present_reg_sz(self):
|
||||
'''
|
||||
Testing reg.present with REG_SZ
|
||||
'''
|
||||
log.debug('Testing reg.present with REG_SZ')
|
||||
# default type is 'REG_SZ'
|
||||
# Does the state return the correct data
|
||||
ret = self.run_state('reg.present',
|
||||
name='HKLM\\{0}'.format(FAKE_KEY),
|
||||
vname='test_reg_sz',
|
||||
vdata='fake string data')
|
||||
expected = {
|
||||
'reg': {
|
||||
'Added': {
|
||||
'Entry': 'test_reg_sz',
|
||||
'Key': 'HKLM\\{0}'.format(FAKE_KEY),
|
||||
'Value': 'fake string data'}}}
|
||||
self.assertSaltStateChangesEqual(ret, expected)
|
||||
|
||||
# Is the value actually set
|
||||
ret = reg.read_value(hive='HKLM', key=FAKE_KEY, vname='test_reg_sz')
|
||||
expected = {
|
||||
'vtype': 'REG_SZ',
|
||||
'vname': 'test_reg_sz',
|
||||
'success': True,
|
||||
'hive': 'HKLM',
|
||||
'vdata': 'fake string data',
|
||||
'key': FAKE_KEY}
|
||||
self.assertEqual(ret, expected)
|
||||
|
||||
def test_present_reg_sz_unicode_value(self):
|
||||
'''
|
||||
Testing reg.present with REG_SZ and a unicode value
|
||||
'''
|
||||
log.debug('Testing reg.present with REG_SZ and a unicode value')
|
||||
# default type is 'REG_SZ'
|
||||
# Does the state return the correct data
|
||||
ret = self.run_state('reg.present',
|
||||
name='HKLM\\{0}'.format(FAKE_KEY),
|
||||
vname='test_reg_sz',
|
||||
vdata=UNICODE_VALUE)
|
||||
expected = {
|
||||
'reg': {
|
||||
'Added': {
|
||||
'Entry': 'test_reg_sz',
|
||||
'Key': 'HKLM\\{0}'.format(FAKE_KEY),
|
||||
'Value': UNICODE_VALUE}}}
|
||||
self.assertSaltStateChangesEqual(ret, expected)
|
||||
|
||||
# Is the value actually set
|
||||
ret = reg.read_value(hive='HKLM', key=FAKE_KEY, vname='test_reg_sz')
|
||||
expected = {
|
||||
'vtype': 'REG_SZ',
|
||||
'vname': 'test_reg_sz',
|
||||
'success': True,
|
||||
'hive': 'HKLM',
|
||||
'vdata': UNICODE_VALUE,
|
||||
'key': FAKE_KEY}
|
||||
self.assertEqual(ret, expected)
|
||||
|
||||
def test_present_reg_sz_unicode_default_value(self):
|
||||
'''
|
||||
Testing reg.present with REG_SZ and a unicode default value
|
||||
'''
|
||||
log.debug('Testing reg.present with REG_SZ and a unicode default value')
|
||||
# default type is 'REG_SZ'
|
||||
# Does the state return the correct data
|
||||
ret = self.run_state('reg.present',
|
||||
name='HKLM\\{0}'.format(FAKE_KEY),
|
||||
vdata=UNICODE_VALUE)
|
||||
expected = {
|
||||
'reg': {
|
||||
'Added': {
|
||||
'Entry': '(Default)',
|
||||
'Key': 'HKLM\\{0}'.format(FAKE_KEY),
|
||||
'Value': UNICODE_VALUE}}}
|
||||
self.assertSaltStateChangesEqual(ret, expected)
|
||||
|
||||
# Is the value actually set
|
||||
ret = reg.read_value(hive='HKLM', key=FAKE_KEY)
|
||||
|
||||
expected = {
|
||||
'vtype': 'REG_SZ',
|
||||
'vname': '(Default)',
|
||||
'success': True,
|
||||
'hive': 'HKLM',
|
||||
'vdata': UNICODE_VALUE,
|
||||
'key': FAKE_KEY}
|
||||
self.assertEqual(ret, expected)
|
||||
|
||||
def test_present_reg_sz_unicode_value_name(self):
|
||||
'''
|
||||
Testing reg.present with REG_SZ and a unicode value name
|
||||
'''
|
||||
log.debug('Testing reg.present with REG_SZ and a unicode value name')
|
||||
# default type is 'REG_SZ'
|
||||
# Does the state return the correct data
|
||||
ret = self.run_state('reg.present',
|
||||
name='HKLM\\{0}'.format(FAKE_KEY),
|
||||
vname=UNICODE_VALUE_NAME,
|
||||
vdata='fake string data')
|
||||
expected = {
|
||||
'reg': {
|
||||
'Added': {
|
||||
'Entry': UNICODE_VALUE_NAME,
|
||||
'Key': 'HKLM\\{0}'.format(FAKE_KEY),
|
||||
'Value': 'fake string data'}}}
|
||||
self.assertSaltStateChangesEqual(ret, expected)
|
||||
|
||||
# Is the value actually set
|
||||
ret = reg.read_value(hive='HKLM', key=FAKE_KEY, vname=UNICODE_VALUE_NAME)
|
||||
|
||||
expected = {
|
||||
'vtype': 'REG_SZ',
|
||||
'vname': UNICODE_VALUE_NAME,
|
||||
'success': True,
|
||||
'hive': 'HKLM',
|
||||
'vdata': 'fake string data',
|
||||
'key': FAKE_KEY}
|
||||
self.assertEqual(ret, expected)
|
||||
|
||||
def test_present_reg_binary(self):
|
||||
'''
|
||||
Testing reg.present with REG_BINARY
|
||||
'''
|
||||
test_data = 'Salty Test'
|
||||
log.debug('Testing reg.present with REG_BINARY')
|
||||
# default type is 'REG_SZ'
|
||||
# Does the state return the correct data
|
||||
ret = self.run_state('reg.present',
|
||||
name='HKLM\\{0}'.format(FAKE_KEY),
|
||||
vname='test_reg_binary',
|
||||
vtype='REG_BINARY',
|
||||
vdata=test_data)
|
||||
expected = {
|
||||
'reg': {
|
||||
'Added': {
|
||||
'Entry': 'test_reg_binary',
|
||||
'Key': 'HKLM\\{0}'.format(FAKE_KEY),
|
||||
'Value': test_data}}}
|
||||
self.assertSaltStateChangesEqual(ret, expected)
|
||||
|
||||
# Is the value actually set
|
||||
ret = reg.read_value(hive='HKLM', key=FAKE_KEY, vname='test_reg_binary')
|
||||
expected = {
|
||||
'vtype': 'REG_BINARY',
|
||||
'vname': 'test_reg_binary',
|
||||
'success': True,
|
||||
'hive': 'HKLM',
|
||||
'vdata': test_data.encode('utf-8'),
|
||||
'key': FAKE_KEY}
|
||||
self.assertEqual(ret, expected)
|
||||
|
||||
def test_present_reg_multi_sz(self):
|
||||
'''
|
||||
Testing reg.present with REG_MULTI_SZ
|
||||
'''
|
||||
log.debug('Testing reg.present with REG_MULTI_SZ')
|
||||
# default type is 'REG_SZ'
|
||||
# Does the state return the correct data
|
||||
ret = self.run_state('reg.present',
|
||||
name='HKLM\\{0}'.format(FAKE_KEY),
|
||||
vname='test_reg_multi_sz',
|
||||
vtype='REG_MULTI_SZ',
|
||||
vdata=['item1', 'item2'])
|
||||
expected = {
|
||||
'reg': {
|
||||
'Added': {
|
||||
'Entry': 'test_reg_multi_sz',
|
||||
'Key': 'HKLM\\{0}'.format(FAKE_KEY),
|
||||
'Value': ['item1', 'item2']}}}
|
||||
self.assertSaltStateChangesEqual(ret, expected)
|
||||
|
||||
# Is the value actually set
|
||||
ret = reg.read_value(hive='HKLM',
|
||||
key=FAKE_KEY,
|
||||
vname='test_reg_multi_sz')
|
||||
expected = {
|
||||
'vtype': 'REG_MULTI_SZ',
|
||||
'vname': 'test_reg_multi_sz',
|
||||
'success': True,
|
||||
'hive': 'HKLM',
|
||||
'vdata': ['item1', 'item2'],
|
||||
'key': FAKE_KEY}
|
||||
self.assertEqual(ret, expected)
|
||||
|
||||
def test_present_32_bit(self):
|
||||
'''
|
||||
Testing reg.present with REG_SZ using 32bit registry
|
||||
'''
|
||||
log.debug('Testing reg.present with REG_SZ using 32bit registry')
|
||||
# default type is 'REG_SZ'
|
||||
# Does the state return the correct data
|
||||
ret = self.run_state('reg.present',
|
||||
name='HKLM\\{0}'.format(FAKE_KEY),
|
||||
vname='test_reg_sz',
|
||||
vdata='fake string data',
|
||||
use_32bit_registry=True)
|
||||
|
||||
expected = {
|
||||
'reg': {
|
||||
'Added': {
|
||||
'Entry': 'test_reg_sz',
|
||||
'Key': 'HKLM\\{0}'.format(FAKE_KEY),
|
||||
'Value': 'fake string data'}}}
|
||||
self.assertSaltStateChangesEqual(ret, expected)
|
||||
|
||||
# Is the value actually set
|
||||
ret = reg.read_value(hive='HKLM',
|
||||
key=FAKE_KEY,
|
||||
vname='test_reg_sz',
|
||||
use_32bit_registry=True)
|
||||
expected = {
|
||||
'vtype': 'REG_SZ',
|
||||
'vname': 'test_reg_sz',
|
||||
'success': True,
|
||||
'hive': 'HKLM',
|
||||
'vdata': 'fake string data',
|
||||
'key': FAKE_KEY}
|
||||
self.assertEqual(ret, expected)
|
@ -10,6 +10,7 @@ from __future__ import absolute_import, print_function
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import warnings
|
||||
|
||||
TESTS_DIR = os.path.dirname(os.path.normpath(os.path.abspath(__file__)))
|
||||
if os.name == 'nt':
|
||||
@ -517,20 +518,26 @@ class SaltTestsuiteParser(SaltCoverageTestingParser):
|
||||
is_admin = True
|
||||
else:
|
||||
is_admin = salt.utils.win_functions.is_admin(current_user)
|
||||
if self.options.coverage and any((
|
||||
self.options.name,
|
||||
not is_admin,
|
||||
not self.options.run_destructive)) \
|
||||
and self._check_enabled_suites(include_unit=True):
|
||||
warnings.warn("Test suite not running with elevated priviledges")
|
||||
else:
|
||||
is_admin = os.geteuid() == 0
|
||||
|
||||
if self.options.coverage and any((
|
||||
self.options.name,
|
||||
not is_admin,
|
||||
not self.options.run_destructive)) \
|
||||
and self._check_enabled_suites(include_unit=True):
|
||||
self.error(
|
||||
'No sense in generating the tests coverage report when '
|
||||
'not running the full test suite, including the '
|
||||
'destructive tests, as \'root\'. It would only produce '
|
||||
'incorrect results.'
|
||||
)
|
||||
if self.options.coverage and any((
|
||||
self.options.name,
|
||||
not is_admin,
|
||||
not self.options.run_destructive)) \
|
||||
and self._check_enabled_suites(include_unit=True):
|
||||
self.error(
|
||||
'No sense in generating the tests coverage report when '
|
||||
'not running the full test suite, including the '
|
||||
'destructive tests, as \'root\'. It would only produce '
|
||||
'incorrect results.'
|
||||
)
|
||||
|
||||
# When no tests are specifically enumerated on the command line, setup
|
||||
# a default run: +unit -cloud_provider
|
||||
|
@ -122,7 +122,7 @@ class ShellTestCase(TestCase, AdaptedConfigurationTestCaseMixin):
|
||||
self.assertIn('minion', data)
|
||||
'''
|
||||
arg_str = '-c {0} {1}'.format(self.get_config_dir(), arg_str)
|
||||
return self.run_script('salt', arg_str, with_retcode=with_retcode, catch_stderr=catch_stderr)
|
||||
return self.run_script('salt', arg_str, with_retcode=with_retcode, catch_stderr=catch_stderr, timeout=timeout)
|
||||
|
||||
def run_ssh(self, arg_str, with_retcode=False, timeout=25,
|
||||
catch_stderr=False, wipe=False, raw=False):
|
||||
|
@ -12,7 +12,7 @@
|
||||
# pylint: disable=repr-flag-used-in-string,wrong-import-order
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import base64
|
||||
import errno
|
||||
import functools
|
||||
@ -20,6 +20,7 @@ import inspect
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import shutil
|
||||
import signal
|
||||
import socket
|
||||
import string
|
||||
@ -53,6 +54,9 @@ from tests.support.unit import skip, _id
|
||||
from tests.support.mock import patch
|
||||
from tests.support.paths import FILES, TMP
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils.files
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -955,22 +959,61 @@ def with_system_user_and_group(username, group,
|
||||
return decorator
|
||||
|
||||
|
||||
def with_tempfile(func):
|
||||
'''
|
||||
Generates a tempfile and cleans it up when test completes.
|
||||
'''
|
||||
@functools.wraps(func)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
fd_, name = tempfile.mkstemp(prefix='__salt.test.', dir=TMP)
|
||||
os.close(fd_)
|
||||
del fd_
|
||||
ret = func(self, name, *args, **kwargs)
|
||||
try:
|
||||
class WithTempfile(object):
|
||||
def __init__(self, **kwargs):
|
||||
self.create = kwargs.pop('create', True)
|
||||
if 'dir' not in kwargs:
|
||||
kwargs['dir'] = TMP
|
||||
if 'prefix' not in kwargs:
|
||||
kwargs['prefix'] = '__salt.test.'
|
||||
self.kwargs = kwargs
|
||||
|
||||
def __call__(self, func):
|
||||
self.func = func
|
||||
return functools.wraps(func)(
|
||||
lambda testcase, *args, **kwargs: self.wrap(testcase, *args, **kwargs) # pylint: disable=W0108
|
||||
)
|
||||
|
||||
def wrap(self, testcase, *args, **kwargs):
|
||||
name = salt.utils.files.mkstemp(**self.kwargs)
|
||||
if not self.create:
|
||||
os.remove(name)
|
||||
except Exception:
|
||||
pass
|
||||
return ret
|
||||
return wrapper
|
||||
try:
|
||||
return self.func(testcase, name, *args, **kwargs)
|
||||
finally:
|
||||
try:
|
||||
os.remove(name)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
with_tempfile = WithTempfile
|
||||
|
||||
|
||||
class WithTempdir(object):
|
||||
def __init__(self, **kwargs):
|
||||
self.create = kwargs.pop('create', True)
|
||||
if 'dir' not in kwargs:
|
||||
kwargs['dir'] = TMP
|
||||
self.kwargs = kwargs
|
||||
|
||||
def __call__(self, func):
|
||||
self.func = func
|
||||
return functools.wraps(func)(
|
||||
lambda testcase, *args, **kwargs: self.wrap(testcase, *args, **kwargs) # pylint: disable=W0108
|
||||
)
|
||||
|
||||
def wrap(self, testcase, *args, **kwargs):
|
||||
tempdir = tempfile.mkdtemp(**self.kwargs)
|
||||
if not self.create:
|
||||
os.rmdir(tempdir)
|
||||
try:
|
||||
return self.func(testcase, tempdir, *args, **kwargs)
|
||||
finally:
|
||||
shutil.rmtree(tempdir, ignore_errors=True)
|
||||
|
||||
|
||||
with_tempdir = WithTempdir
|
||||
|
||||
|
||||
def requires_system_grains(func):
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -62,6 +62,14 @@ class RootsTest(TestCase, AdaptedConfigurationTestCaseMixin, LoaderModuleMockMix
|
||||
cls.test_symlink_list_file_roots = {'base': [root_dir]}
|
||||
else:
|
||||
cls.test_symlink_list_file_roots = None
|
||||
cls.tmp_dir = tempfile.mkdtemp(dir=TMP)
|
||||
full_path_to_file = os.path.join(FILES, 'file', 'base', 'testfile')
|
||||
with salt.utils.files.fopen(full_path_to_file, 'rb') as s_fp:
|
||||
with salt.utils.files.fopen(os.path.join(cls.tmp_dir, 'testfile'), 'wb') as d_fp:
|
||||
for line in s_fp:
|
||||
d_fp.write(
|
||||
line.rstrip(b'\n').rstrip(b'\r') + os.linesep.encode('utf-8')
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
@ -73,6 +81,7 @@ class RootsTest(TestCase, AdaptedConfigurationTestCaseMixin, LoaderModuleMockMix
|
||||
salt.utils.files.rm_rf(cls.test_symlink_list_file_roots['base'][0])
|
||||
except OSError:
|
||||
pass
|
||||
salt.utils.files.rm_rf(cls.tmp_dir)
|
||||
|
||||
def tearDown(self):
|
||||
del self.opts
|
||||
@ -92,10 +101,10 @@ class RootsTest(TestCase, AdaptedConfigurationTestCaseMixin, LoaderModuleMockMix
|
||||
def test_serve_file(self):
|
||||
with patch.dict(roots.__opts__, {'file_buffer_size': 262144}):
|
||||
load = {'saltenv': 'base',
|
||||
'path': os.path.join(FILES, 'file', 'base', 'testfile'),
|
||||
'path': os.path.join(self.tmp_dir, 'testfile'),
|
||||
'loc': 0
|
||||
}
|
||||
fnd = {'path': os.path.join(FILES, 'file', 'base', 'testfile'),
|
||||
fnd = {'path': os.path.join(self.tmp_dir, 'testfile'),
|
||||
'rel': 'testfile'}
|
||||
ret = roots.serve_file(load, fnd)
|
||||
|
||||
@ -144,10 +153,10 @@ class RootsTest(TestCase, AdaptedConfigurationTestCaseMixin, LoaderModuleMockMix
|
||||
def test_file_hash(self):
|
||||
load = {
|
||||
'saltenv': 'base',
|
||||
'path': os.path.join(FILES, 'file', 'base', 'testfile'),
|
||||
'path': os.path.join(self.tmp_dir, 'testfile'),
|
||||
}
|
||||
fnd = {
|
||||
'path': os.path.join(FILES, 'file', 'base', 'testfile'),
|
||||
'path': os.path.join(self.tmp_dir, 'testfile'),
|
||||
'rel': 'testfile'
|
||||
}
|
||||
ret = roots.file_hash(load, fnd)
|
||||
|
@ -138,11 +138,12 @@ class PipTestCase(TestCase, LoaderModuleMockMixin):
|
||||
expected = ['pip', 'install', '--use-mirrors']
|
||||
for item in mirrors:
|
||||
expected.extend(['--mirrors', item])
|
||||
expected.append('pep8')
|
||||
|
||||
# Passing mirrors as a list
|
||||
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
|
||||
with patch.dict(pip.__salt__, {'cmd.run_all': mock}):
|
||||
pip.install(mirrors=mirrors)
|
||||
pip.install(pkgs=['pep8'], mirrors=mirrors)
|
||||
mock.assert_called_once_with(
|
||||
expected,
|
||||
saltenv='base',
|
||||
@ -154,7 +155,7 @@ class PipTestCase(TestCase, LoaderModuleMockMixin):
|
||||
# Passing mirrors as a comma separated list
|
||||
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
|
||||
with patch.dict(pip.__salt__, {'cmd.run_all': mock}):
|
||||
pip.install(mirrors=','.join(mirrors))
|
||||
pip.install(pkgs=['pep8'], mirrors=','.join(mirrors))
|
||||
mock.assert_called_once_with(
|
||||
expected,
|
||||
saltenv='base',
|
||||
@ -166,9 +167,9 @@ class PipTestCase(TestCase, LoaderModuleMockMixin):
|
||||
# As single string (just use the first element from mirrors)
|
||||
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
|
||||
with patch.dict(pip.__salt__, {'cmd.run_all': mock}):
|
||||
pip.install(mirrors=mirrors[0])
|
||||
pip.install(pkgs=['pep8'], mirrors=mirrors[0])
|
||||
mock.assert_called_once_with(
|
||||
['pip', 'install', '--use-mirrors', '--mirrors', mirrors[0]],
|
||||
['pip', 'install', '--use-mirrors', '--mirrors', mirrors[0], 'pep8'],
|
||||
saltenv='base',
|
||||
runas=None,
|
||||
use_vt=False,
|
||||
|
@ -7,9 +7,6 @@ from __future__ import absolute_import, unicode_literals, print_function
|
||||
|
||||
|
||||
# Import Python libs
|
||||
import shutil
|
||||
import tempfile
|
||||
import os
|
||||
import logging
|
||||
try:
|
||||
# We're not going to actually use OpenSSL, we just want to check that
|
||||
@ -20,8 +17,8 @@ except Exception:
|
||||
NO_PYOPENSSL = True
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.helpers import with_tempdir
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.paths import TMP
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import (
|
||||
mock_open,
|
||||
@ -309,367 +306,327 @@ class TLSAddTestCase(TestCase, LoaderModuleMockMixin):
|
||||
remove_not_in_result(ret, result)
|
||||
self.assertEqual(result, ret)
|
||||
|
||||
def test_create_ca(self):
|
||||
@with_tempdir()
|
||||
def test_create_ca(self, ca_path):
|
||||
'''
|
||||
Test creating CA cert
|
||||
'''
|
||||
ca_path = tempfile.mkdtemp(dir=TMP)
|
||||
try:
|
||||
ca_name = 'test_ca'
|
||||
certp = '{0}/{1}/{2}_ca_cert.crt'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
ca_name)
|
||||
certk = '{0}/{1}/{2}_ca_cert.key'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
ca_name)
|
||||
ret = 'Created Private Key: "{0}." Created CA "{1}": "{2}."'.format(
|
||||
certk, ca_name, certp)
|
||||
mock_opt = MagicMock(return_value=ca_path)
|
||||
mock_ret = MagicMock(return_value=0)
|
||||
with patch.dict(tls.__salt__, {'config.option': mock_opt, 'cmd.retcode': mock_ret}), \
|
||||
patch.dict(tls.__opts__, {'hash_type': 'sha256', 'cachedir': ca_path}), \
|
||||
patch('salt.modules.tls.maybe_fix_ssl_version',
|
||||
MagicMock(return_value=True)):
|
||||
self.assertEqual(
|
||||
tls.create_ca(
|
||||
ca_name,
|
||||
days=365,
|
||||
fixmode=False,
|
||||
**_TLS_TEST_DATA['create_ca']),
|
||||
ret)
|
||||
finally:
|
||||
if os.path.isdir(ca_path):
|
||||
shutil.rmtree(ca_path)
|
||||
ca_name = 'test_ca'
|
||||
certp = '{0}/{1}/{2}_ca_cert.crt'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
ca_name)
|
||||
certk = '{0}/{1}/{2}_ca_cert.key'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
ca_name)
|
||||
ret = 'Created Private Key: "{0}." Created CA "{1}": "{2}."'.format(
|
||||
certk, ca_name, certp)
|
||||
mock_opt = MagicMock(return_value=ca_path)
|
||||
mock_ret = MagicMock(return_value=0)
|
||||
with patch.dict(tls.__salt__, {'config.option': mock_opt, 'cmd.retcode': mock_ret}), \
|
||||
patch.dict(tls.__opts__, {'hash_type': 'sha256', 'cachedir': ca_path}), \
|
||||
patch('salt.modules.tls.maybe_fix_ssl_version',
|
||||
MagicMock(return_value=True)):
|
||||
self.assertEqual(
|
||||
tls.create_ca(
|
||||
ca_name,
|
||||
days=365,
|
||||
fixmode=False,
|
||||
**_TLS_TEST_DATA['create_ca']),
|
||||
ret)
|
||||
|
||||
def test_recreate_ca(self):
|
||||
@with_tempdir()
|
||||
def test_recreate_ca(self, ca_path):
|
||||
'''
|
||||
Test creating CA cert when one already exists
|
||||
'''
|
||||
ca_path = tempfile.mkdtemp(dir=TMP)
|
||||
try:
|
||||
ca_name = 'test_ca'
|
||||
certp = '{0}/{1}/{2}_ca_cert.crt'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
ca_name)
|
||||
certk = '{0}/{1}/{2}_ca_cert.key'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
ca_name)
|
||||
ret = 'Created Private Key: "{0}." Created CA "{1}": "{2}."'.format(
|
||||
certk, ca_name, certp)
|
||||
mock_opt = MagicMock(return_value=ca_path)
|
||||
mock_ret = MagicMock(return_value=0)
|
||||
with patch('salt.modules.tls.maybe_fix_ssl_version',
|
||||
MagicMock(return_value=True)), \
|
||||
patch.dict(tls.__salt__, {'config.option': mock_opt, 'cmd.retcode': mock_ret}), \
|
||||
patch.dict(tls.__opts__, {'hash_type': 'sha256', 'cachedir': ca_path}), \
|
||||
patch.dict(_TLS_TEST_DATA['create_ca'], {'replace': True}):
|
||||
tls.create_ca(ca_name)
|
||||
self.assertEqual(
|
||||
tls.create_ca(
|
||||
ca_name,
|
||||
days=365,
|
||||
fixmode=False,
|
||||
**_TLS_TEST_DATA['create_ca']),
|
||||
ret)
|
||||
finally:
|
||||
if os.path.isdir(ca_path):
|
||||
shutil.rmtree(ca_path)
|
||||
ca_name = 'test_ca'
|
||||
certp = '{0}/{1}/{2}_ca_cert.crt'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
ca_name)
|
||||
certk = '{0}/{1}/{2}_ca_cert.key'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
ca_name)
|
||||
ret = 'Created Private Key: "{0}." Created CA "{1}": "{2}."'.format(
|
||||
certk, ca_name, certp)
|
||||
mock_opt = MagicMock(return_value=ca_path)
|
||||
mock_ret = MagicMock(return_value=0)
|
||||
with patch('salt.modules.tls.maybe_fix_ssl_version',
|
||||
MagicMock(return_value=True)), \
|
||||
patch.dict(tls.__salt__, {'config.option': mock_opt, 'cmd.retcode': mock_ret}), \
|
||||
patch.dict(tls.__opts__, {'hash_type': 'sha256', 'cachedir': ca_path}), \
|
||||
patch.dict(_TLS_TEST_DATA['create_ca'], {'replace': True}):
|
||||
tls.create_ca(ca_name)
|
||||
self.assertEqual(
|
||||
tls.create_ca(
|
||||
ca_name,
|
||||
days=365,
|
||||
fixmode=False,
|
||||
**_TLS_TEST_DATA['create_ca']),
|
||||
ret)
|
||||
|
||||
def test_create_csr(self):
|
||||
@with_tempdir()
|
||||
def test_create_csr(self, ca_path):
|
||||
'''
|
||||
Test creating certificate signing request
|
||||
'''
|
||||
ca_path = tempfile.mkdtemp(dir=TMP)
|
||||
try:
|
||||
ca_name = 'test_ca'
|
||||
certp = '{0}/{1}/certs/{2}.csr'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
certk = '{0}/{1}/certs/{2}.key'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
ret = ('Created Private Key: "{0}." '
|
||||
'Created CSR for "{1}": "{2}."').format(
|
||||
certk, _TLS_TEST_DATA['create_ca']['CN'], certp)
|
||||
mock_opt = MagicMock(return_value=ca_path)
|
||||
mock_ret = MagicMock(return_value=0)
|
||||
mock_pgt = MagicMock(return_value=False)
|
||||
with patch.dict(tls.__salt__, {'config.option': mock_opt, 'cmd.retcode': mock_ret, 'pillar.get': mock_pgt}), \
|
||||
patch.dict(tls.__opts__, {'hash_type': 'sha256', 'cachedir': ca_path}), \
|
||||
patch('salt.modules.tls.maybe_fix_ssl_version',
|
||||
MagicMock(return_value=True)):
|
||||
tls.create_ca(ca_name)
|
||||
self.assertEqual(
|
||||
tls.create_csr(
|
||||
ca_name,
|
||||
**_TLS_TEST_DATA['create_ca']),
|
||||
ret)
|
||||
finally:
|
||||
if os.path.isdir(ca_path):
|
||||
shutil.rmtree(ca_path)
|
||||
ca_name = 'test_ca'
|
||||
certp = '{0}/{1}/certs/{2}.csr'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
certk = '{0}/{1}/certs/{2}.key'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
ret = ('Created Private Key: "{0}." '
|
||||
'Created CSR for "{1}": "{2}."').format(
|
||||
certk, _TLS_TEST_DATA['create_ca']['CN'], certp)
|
||||
mock_opt = MagicMock(return_value=ca_path)
|
||||
mock_ret = MagicMock(return_value=0)
|
||||
mock_pgt = MagicMock(return_value=False)
|
||||
with patch.dict(tls.__salt__, {'config.option': mock_opt, 'cmd.retcode': mock_ret, 'pillar.get': mock_pgt}), \
|
||||
patch.dict(tls.__opts__, {'hash_type': 'sha256', 'cachedir': ca_path}), \
|
||||
patch('salt.modules.tls.maybe_fix_ssl_version',
|
||||
MagicMock(return_value=True)):
|
||||
tls.create_ca(ca_name)
|
||||
self.assertEqual(
|
||||
tls.create_csr(
|
||||
ca_name,
|
||||
**_TLS_TEST_DATA['create_ca']),
|
||||
ret)
|
||||
|
||||
def test_recreate_csr(self):
|
||||
@with_tempdir()
|
||||
def test_recreate_csr(self, ca_path):
|
||||
'''
|
||||
Test creating certificate signing request when one already exists
|
||||
'''
|
||||
ca_path = tempfile.mkdtemp(dir=TMP)
|
||||
try:
|
||||
ca_name = 'test_ca'
|
||||
certp = '{0}/{1}/certs/{2}.csr'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
certk = '{0}/{1}/certs/{2}.key'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
ret = ('Created Private Key: "{0}." '
|
||||
'Created CSR for "{1}": "{2}."').format(
|
||||
certk, _TLS_TEST_DATA['create_ca']['CN'], certp)
|
||||
mock_opt = MagicMock(return_value=ca_path)
|
||||
mock_ret = MagicMock(return_value=0)
|
||||
mock_pgt = MagicMock(return_value=False)
|
||||
with patch.dict(tls.__salt__, {'config.option': mock_opt,
|
||||
'cmd.retcode': mock_ret,
|
||||
'pillar.get': mock_pgt}), \
|
||||
patch.dict(tls.__opts__, {'hash_type': 'sha256',
|
||||
'cachedir': ca_path}), \
|
||||
patch.dict(_TLS_TEST_DATA['create_ca'], {'replace': True}), \
|
||||
patch('salt.modules.tls.maybe_fix_ssl_version',
|
||||
MagicMock(return_value=True)):
|
||||
tls.create_ca(ca_name)
|
||||
tls.create_csr(ca_name)
|
||||
self.assertEqual(
|
||||
tls.create_csr(
|
||||
ca_name,
|
||||
**_TLS_TEST_DATA['create_ca']),
|
||||
ret)
|
||||
finally:
|
||||
if os.path.isdir(ca_path):
|
||||
shutil.rmtree(ca_path)
|
||||
ca_name = 'test_ca'
|
||||
certp = '{0}/{1}/certs/{2}.csr'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
certk = '{0}/{1}/certs/{2}.key'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
ret = ('Created Private Key: "{0}." '
|
||||
'Created CSR for "{1}": "{2}."').format(
|
||||
certk, _TLS_TEST_DATA['create_ca']['CN'], certp)
|
||||
mock_opt = MagicMock(return_value=ca_path)
|
||||
mock_ret = MagicMock(return_value=0)
|
||||
mock_pgt = MagicMock(return_value=False)
|
||||
with patch.dict(tls.__salt__, {'config.option': mock_opt,
|
||||
'cmd.retcode': mock_ret,
|
||||
'pillar.get': mock_pgt}), \
|
||||
patch.dict(tls.__opts__, {'hash_type': 'sha256',
|
||||
'cachedir': ca_path}), \
|
||||
patch.dict(_TLS_TEST_DATA['create_ca'], {'replace': True}), \
|
||||
patch('salt.modules.tls.maybe_fix_ssl_version',
|
||||
MagicMock(return_value=True)):
|
||||
tls.create_ca(ca_name)
|
||||
tls.create_csr(ca_name)
|
||||
self.assertEqual(
|
||||
tls.create_csr(
|
||||
ca_name,
|
||||
**_TLS_TEST_DATA['create_ca']),
|
||||
ret)
|
||||
|
||||
def test_create_self_signed_cert(self):
|
||||
@with_tempdir()
|
||||
def test_create_self_signed_cert(self, ca_path):
|
||||
'''
|
||||
Test creating self signed certificate
|
||||
'''
|
||||
ca_path = tempfile.mkdtemp(dir=TMP)
|
||||
try:
|
||||
tls_dir = 'test_tls'
|
||||
certp = '{0}/{1}/certs/{2}.crt'.format(
|
||||
ca_path,
|
||||
tls_dir,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
certk = '{0}/{1}/certs/{2}.key'.format(
|
||||
ca_path,
|
||||
tls_dir,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
ret = ('Created Private Key: "{0}." '
|
||||
'Created Certificate: "{1}."').format(
|
||||
certk, certp)
|
||||
mock_opt = MagicMock(return_value=ca_path)
|
||||
with patch.dict(tls.__salt__, {'config.option': mock_opt}), \
|
||||
patch.dict(tls.__opts__, {'hash_type': 'sha256',
|
||||
'cachedir': ca_path}), \
|
||||
patch('salt.modules.tls.maybe_fix_ssl_version',
|
||||
MagicMock(return_value=True)):
|
||||
self.assertEqual(
|
||||
tls.create_self_signed_cert(
|
||||
tls_dir=tls_dir,
|
||||
days=365,
|
||||
**_TLS_TEST_DATA['create_ca']),
|
||||
ret)
|
||||
finally:
|
||||
if os.path.isdir(ca_path):
|
||||
shutil.rmtree(ca_path)
|
||||
tls_dir = 'test_tls'
|
||||
certp = '{0}/{1}/certs/{2}.crt'.format(
|
||||
ca_path,
|
||||
tls_dir,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
certk = '{0}/{1}/certs/{2}.key'.format(
|
||||
ca_path,
|
||||
tls_dir,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
ret = ('Created Private Key: "{0}." '
|
||||
'Created Certificate: "{1}."').format(
|
||||
certk, certp)
|
||||
mock_opt = MagicMock(return_value=ca_path)
|
||||
with patch.dict(tls.__salt__, {'config.option': mock_opt}), \
|
||||
patch.dict(tls.__opts__, {'hash_type': 'sha256',
|
||||
'cachedir': ca_path}), \
|
||||
patch('salt.modules.tls.maybe_fix_ssl_version',
|
||||
MagicMock(return_value=True)):
|
||||
self.assertEqual(
|
||||
tls.create_self_signed_cert(
|
||||
tls_dir=tls_dir,
|
||||
days=365,
|
||||
**_TLS_TEST_DATA['create_ca']),
|
||||
ret)
|
||||
|
||||
def test_recreate_self_signed_cert(self):
|
||||
@with_tempdir()
|
||||
def test_recreate_self_signed_cert(self, ca_path):
|
||||
'''
|
||||
Test creating self signed certificate when one already exists
|
||||
'''
|
||||
ca_path = tempfile.mkdtemp(dir=TMP)
|
||||
try:
|
||||
tls_dir = 'test_tls'
|
||||
certp = '{0}/{1}/certs/{2}.crt'.format(
|
||||
ca_path,
|
||||
tls_dir,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
certk = '{0}/{1}/certs/{2}.key'.format(
|
||||
ca_path,
|
||||
tls_dir,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
ret = ('Created Private Key: "{0}." '
|
||||
'Created Certificate: "{1}."').format(
|
||||
certk, certp)
|
||||
mock_opt = MagicMock(return_value=ca_path)
|
||||
with patch.dict(tls.__salt__, {'config.option': mock_opt}), \
|
||||
patch.dict(tls.__opts__, {'hash_type': 'sha256',
|
||||
'cachedir': ca_path}), \
|
||||
patch('salt.modules.tls.maybe_fix_ssl_version',
|
||||
MagicMock(return_value=True)):
|
||||
self.assertEqual(
|
||||
tls.create_self_signed_cert(
|
||||
tls_dir=tls_dir,
|
||||
days=365,
|
||||
**_TLS_TEST_DATA['create_ca']),
|
||||
ret)
|
||||
finally:
|
||||
if os.path.isdir(ca_path):
|
||||
shutil.rmtree(ca_path)
|
||||
tls_dir = 'test_tls'
|
||||
certp = '{0}/{1}/certs/{2}.crt'.format(
|
||||
ca_path,
|
||||
tls_dir,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
certk = '{0}/{1}/certs/{2}.key'.format(
|
||||
ca_path,
|
||||
tls_dir,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
ret = ('Created Private Key: "{0}." '
|
||||
'Created Certificate: "{1}."').format(
|
||||
certk, certp)
|
||||
mock_opt = MagicMock(return_value=ca_path)
|
||||
with patch.dict(tls.__salt__, {'config.option': mock_opt}), \
|
||||
patch.dict(tls.__opts__, {'hash_type': 'sha256',
|
||||
'cachedir': ca_path}), \
|
||||
patch('salt.modules.tls.maybe_fix_ssl_version',
|
||||
MagicMock(return_value=True)):
|
||||
self.assertEqual(
|
||||
tls.create_self_signed_cert(
|
||||
tls_dir=tls_dir,
|
||||
days=365,
|
||||
**_TLS_TEST_DATA['create_ca']),
|
||||
ret)
|
||||
|
||||
def test_create_ca_signed_cert(self):
|
||||
@with_tempdir()
|
||||
def test_create_ca_signed_cert(self, ca_path):
|
||||
'''
|
||||
Test signing certificate from request
|
||||
'''
|
||||
ca_path = tempfile.mkdtemp(dir=TMP)
|
||||
try:
|
||||
ca_name = 'test_ca'
|
||||
certp = '{0}/{1}/certs/{2}.crt'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
ret = 'Created Certificate for "{0}": "{1}"'.format(
|
||||
_TLS_TEST_DATA['create_ca']['CN'], certp)
|
||||
mock_opt = MagicMock(return_value=ca_path)
|
||||
mock_ret = MagicMock(return_value=0)
|
||||
mock_pgt = MagicMock(return_value=False)
|
||||
with patch.dict(tls.__salt__, {'config.option': mock_opt,
|
||||
'cmd.retcode': mock_ret,
|
||||
'pillar.get': mock_pgt}), \
|
||||
patch.dict(tls.__opts__, {'hash_type': 'sha256',
|
||||
'cachedir': ca_path}), \
|
||||
patch('salt.modules.tls.maybe_fix_ssl_version',
|
||||
MagicMock(return_value=True)):
|
||||
tls.create_ca(ca_name)
|
||||
tls.create_csr(ca_name, **_TLS_TEST_DATA['create_ca'])
|
||||
self.assertEqual(
|
||||
tls.create_ca_signed_cert(
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN']),
|
||||
ret)
|
||||
finally:
|
||||
if os.path.isdir(ca_path):
|
||||
shutil.rmtree(ca_path)
|
||||
ca_name = 'test_ca'
|
||||
certp = '{0}/{1}/certs/{2}.crt'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
ret = 'Created Certificate for "{0}": "{1}"'.format(
|
||||
_TLS_TEST_DATA['create_ca']['CN'], certp)
|
||||
mock_opt = MagicMock(return_value=ca_path)
|
||||
mock_ret = MagicMock(return_value=0)
|
||||
mock_pgt = MagicMock(return_value=False)
|
||||
with patch.dict(tls.__salt__, {'config.option': mock_opt,
|
||||
'cmd.retcode': mock_ret,
|
||||
'pillar.get': mock_pgt}), \
|
||||
patch.dict(tls.__opts__, {'hash_type': 'sha256',
|
||||
'cachedir': ca_path}), \
|
||||
patch('salt.modules.tls.maybe_fix_ssl_version',
|
||||
MagicMock(return_value=True)):
|
||||
tls.create_ca(ca_name)
|
||||
tls.create_csr(ca_name, **_TLS_TEST_DATA['create_ca'])
|
||||
self.assertEqual(
|
||||
tls.create_ca_signed_cert(
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN']),
|
||||
ret)
|
||||
|
||||
def test_recreate_ca_signed_cert(self):
|
||||
@with_tempdir()
|
||||
def test_recreate_ca_signed_cert(self, ca_path):
|
||||
'''
|
||||
Test signing certificate from request when certificate exists
|
||||
'''
|
||||
ca_path = tempfile.mkdtemp(dir=TMP)
|
||||
try:
|
||||
ca_name = 'test_ca'
|
||||
certp = '{0}/{1}/certs/{2}.crt'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
ret = 'Created Certificate for "{0}": "{1}"'.format(
|
||||
_TLS_TEST_DATA['create_ca']['CN'], certp)
|
||||
mock_opt = MagicMock(return_value=ca_path)
|
||||
mock_ret = MagicMock(return_value=0)
|
||||
mock_pgt = MagicMock(return_value=False)
|
||||
with patch.dict(tls.__salt__, {'config.option': mock_opt,
|
||||
'cmd.retcode': mock_ret,
|
||||
'pillar.get': mock_pgt}), \
|
||||
patch.dict(tls.__opts__, {'hash_type': 'sha256',
|
||||
'cachedir': ca_path}), \
|
||||
patch('salt.modules.tls.maybe_fix_ssl_version',
|
||||
MagicMock(return_value=True)):
|
||||
tls.create_ca(ca_name)
|
||||
tls.create_csr(ca_name)
|
||||
tls.create_ca_signed_cert(ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
self.assertEqual(
|
||||
tls.create_ca_signed_cert(
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'],
|
||||
replace=True),
|
||||
ret)
|
||||
finally:
|
||||
if os.path.isdir(ca_path):
|
||||
shutil.rmtree(ca_path)
|
||||
ca_name = 'test_ca'
|
||||
certp = '{0}/{1}/certs/{2}.crt'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
ret = 'Created Certificate for "{0}": "{1}"'.format(
|
||||
_TLS_TEST_DATA['create_ca']['CN'], certp)
|
||||
mock_opt = MagicMock(return_value=ca_path)
|
||||
mock_ret = MagicMock(return_value=0)
|
||||
mock_pgt = MagicMock(return_value=False)
|
||||
with patch.dict(tls.__salt__, {'config.option': mock_opt,
|
||||
'cmd.retcode': mock_ret,
|
||||
'pillar.get': mock_pgt}), \
|
||||
patch.dict(tls.__opts__, {'hash_type': 'sha256',
|
||||
'cachedir': ca_path}), \
|
||||
patch('salt.modules.tls.maybe_fix_ssl_version',
|
||||
MagicMock(return_value=True)):
|
||||
tls.create_ca(ca_name)
|
||||
tls.create_csr(ca_name)
|
||||
tls.create_ca_signed_cert(ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
self.assertEqual(
|
||||
tls.create_ca_signed_cert(
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'],
|
||||
replace=True),
|
||||
ret)
|
||||
|
||||
def test_create_pkcs12(self):
|
||||
@with_tempdir()
|
||||
def test_create_pkcs12(self, ca_path):
|
||||
'''
|
||||
Test creating pkcs12
|
||||
'''
|
||||
ca_path = tempfile.mkdtemp(dir=TMP)
|
||||
try:
|
||||
ca_name = 'test_ca'
|
||||
certp = '{0}/{1}/certs/{2}.p12'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
ret = 'Created PKCS#12 Certificate for "{0}": "{1}"'.format(
|
||||
_TLS_TEST_DATA['create_ca']['CN'], certp)
|
||||
mock_opt = MagicMock(return_value=ca_path)
|
||||
mock_ret = MagicMock(return_value=0)
|
||||
mock_pgt = MagicMock(return_value=False)
|
||||
with patch.dict(tls.__salt__, {'config.option': mock_opt,
|
||||
'cmd.retcode': mock_ret,
|
||||
'pillar.get': mock_pgt}), \
|
||||
patch.dict(tls.__opts__, {'hash_type': 'sha256',
|
||||
'cachedir': ca_path}), \
|
||||
patch('salt.modules.tls.maybe_fix_ssl_version',
|
||||
MagicMock(return_value=True)):
|
||||
tls.create_ca(ca_name)
|
||||
tls.create_csr(ca_name, **_TLS_TEST_DATA['create_ca'])
|
||||
tls.create_ca_signed_cert(ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
self.assertEqual(
|
||||
tls.create_pkcs12(ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'],
|
||||
'password'),
|
||||
ret)
|
||||
finally:
|
||||
if os.path.isdir(ca_path):
|
||||
shutil.rmtree(ca_path)
|
||||
ca_name = 'test_ca'
|
||||
certp = '{0}/{1}/certs/{2}.p12'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
ret = 'Created PKCS#12 Certificate for "{0}": "{1}"'.format(
|
||||
_TLS_TEST_DATA['create_ca']['CN'], certp)
|
||||
mock_opt = MagicMock(return_value=ca_path)
|
||||
mock_ret = MagicMock(return_value=0)
|
||||
mock_pgt = MagicMock(return_value=False)
|
||||
with patch.dict(tls.__salt__, {'config.option': mock_opt,
|
||||
'cmd.retcode': mock_ret,
|
||||
'pillar.get': mock_pgt}), \
|
||||
patch.dict(tls.__opts__, {'hash_type': 'sha256',
|
||||
'cachedir': ca_path}), \
|
||||
patch('salt.modules.tls.maybe_fix_ssl_version',
|
||||
MagicMock(return_value=True)):
|
||||
tls.create_ca(ca_name)
|
||||
tls.create_csr(ca_name, **_TLS_TEST_DATA['create_ca'])
|
||||
tls.create_ca_signed_cert(ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
self.assertEqual(
|
||||
tls.create_pkcs12(ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'],
|
||||
'password'),
|
||||
ret)
|
||||
|
||||
def test_recreate_pkcs12(self):
|
||||
@with_tempdir()
|
||||
def test_recreate_pkcs12(self, ca_path):
|
||||
'''
|
||||
Test creating pkcs12 when it already exists
|
||||
'''
|
||||
ca_path = tempfile.mkdtemp(dir=TMP)
|
||||
try:
|
||||
ca_name = 'test_ca'
|
||||
certp = '{0}/{1}/certs/{2}.p12'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
ret = 'Created PKCS#12 Certificate for "{0}": "{1}"'.format(
|
||||
_TLS_TEST_DATA['create_ca']['CN'], certp)
|
||||
mock_opt = MagicMock(return_value=ca_path)
|
||||
mock_ret = MagicMock(return_value=0)
|
||||
mock_pgt = MagicMock(return_value=False)
|
||||
with patch.dict(tls.__salt__, {'config.option': mock_opt,
|
||||
'cmd.retcode': mock_ret,
|
||||
'pillar.get': mock_pgt}), \
|
||||
patch.dict(tls.__opts__, {'hash_type': 'sha256',
|
||||
'cachedir': ca_path}), \
|
||||
patch.dict(_TLS_TEST_DATA['create_ca'], {'replace': True}), \
|
||||
patch('salt.modules.tls.maybe_fix_ssl_version',
|
||||
MagicMock(return_value=True)):
|
||||
tls.create_ca(ca_name)
|
||||
tls.create_csr(ca_name)
|
||||
tls.create_ca_signed_cert(ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
ca_name = 'test_ca'
|
||||
certp = '{0}/{1}/certs/{2}.p12'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
ret = 'Created PKCS#12 Certificate for "{0}": "{1}"'.format(
|
||||
_TLS_TEST_DATA['create_ca']['CN'], certp)
|
||||
mock_opt = MagicMock(return_value=ca_path)
|
||||
mock_ret = MagicMock(return_value=0)
|
||||
mock_pgt = MagicMock(return_value=False)
|
||||
with patch.dict(tls.__salt__, {'config.option': mock_opt,
|
||||
'cmd.retcode': mock_ret,
|
||||
'pillar.get': mock_pgt}), \
|
||||
patch.dict(tls.__opts__, {'hash_type': 'sha256',
|
||||
'cachedir': ca_path}), \
|
||||
patch.dict(_TLS_TEST_DATA['create_ca'], {'replace': True}), \
|
||||
patch('salt.modules.tls.maybe_fix_ssl_version',
|
||||
MagicMock(return_value=True)):
|
||||
tls.create_ca(ca_name)
|
||||
tls.create_csr(ca_name)
|
||||
tls.create_ca_signed_cert(ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
tls.create_pkcs12(ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'],
|
||||
'password')
|
||||
self.assertEqual(
|
||||
tls.create_pkcs12(ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'],
|
||||
'password')
|
||||
self.assertEqual(
|
||||
tls.create_pkcs12(ca_name,
|
||||
_TLS_TEST_DATA[
|
||||
'create_ca']['CN'],
|
||||
'password',
|
||||
replace=True),
|
||||
ret)
|
||||
finally:
|
||||
if os.path.isdir(ca_path):
|
||||
shutil.rmtree(ca_path)
|
||||
_TLS_TEST_DATA[
|
||||
'create_ca']['CN'],
|
||||
'password',
|
||||
replace=True),
|
||||
ret)
|
||||
|
||||
def test_pyOpenSSL_version(self):
|
||||
'''
|
||||
@ -701,13 +658,13 @@ class TLSAddTestCase(TestCase, LoaderModuleMockMixin):
|
||||
self.assertEqual(tls.get_extensions('server'), pillarval)
|
||||
self.assertEqual(tls.get_extensions('client'), pillarval)
|
||||
|
||||
def test_pyOpenSSL_version_destructive(self):
|
||||
@with_tempdir()
|
||||
def test_pyOpenSSL_version_destructive(self, ca_path):
|
||||
'''
|
||||
Test extension logic with different pyOpenSSL versions
|
||||
'''
|
||||
pillarval = {'csr': {'extendedKeyUsage': 'serverAuth'}}
|
||||
mock_pgt = MagicMock(return_value=pillarval)
|
||||
ca_path = tempfile.mkdtemp(dir=TMP)
|
||||
ca_name = 'test_ca'
|
||||
certp = '{0}/{1}/{2}_ca_cert.crt'.format(
|
||||
ca_path,
|
||||
@ -721,108 +678,100 @@ class TLSAddTestCase(TestCase, LoaderModuleMockMixin):
|
||||
certk, ca_name, certp)
|
||||
mock_opt = MagicMock(return_value=ca_path)
|
||||
mock_ret = MagicMock(return_value=0)
|
||||
try:
|
||||
with patch.dict(tls.__salt__, {
|
||||
'config.option': mock_opt,
|
||||
'cmd.retcode': mock_ret}):
|
||||
with patch.dict(tls.__opts__, {
|
||||
'hash_type': 'sha256',
|
||||
'cachedir': ca_path}):
|
||||
with patch.dict(_TLS_TEST_DATA['create_ca'],
|
||||
{'replace': True}):
|
||||
with patch.dict(tls.__dict__, {
|
||||
'OpenSSL_version':
|
||||
LooseVersion('0.1.1'),
|
||||
'X509_EXT_ENABLED': False}):
|
||||
self.assertEqual(
|
||||
tls.create_ca(
|
||||
ca_name,
|
||||
days=365,
|
||||
fixmode=False,
|
||||
**_TLS_TEST_DATA['create_ca']),
|
||||
ret)
|
||||
with patch.dict(tls.__dict__, {
|
||||
'OpenSSL_version':
|
||||
LooseVersion('0.14.1'),
|
||||
'X509_EXT_ENABLED': True}):
|
||||
self.assertEqual(
|
||||
tls.create_ca(
|
||||
ca_name,
|
||||
days=365,
|
||||
fixmode=False,
|
||||
**_TLS_TEST_DATA['create_ca']),
|
||||
ret)
|
||||
with patch.dict(tls.__dict__, {
|
||||
'OpenSSL_version':
|
||||
LooseVersion('0.15.1'),
|
||||
'X509_EXT_ENABLED': True}):
|
||||
self.assertEqual(
|
||||
tls.create_ca(
|
||||
ca_name,
|
||||
days=365,
|
||||
fixmode=False,
|
||||
**_TLS_TEST_DATA['create_ca']),
|
||||
ret)
|
||||
finally:
|
||||
if os.path.isdir(ca_path):
|
||||
shutil.rmtree(ca_path)
|
||||
with patch.dict(tls.__salt__, {
|
||||
'config.option': mock_opt,
|
||||
'cmd.retcode': mock_ret}):
|
||||
with patch.dict(tls.__opts__, {
|
||||
'hash_type': 'sha256',
|
||||
'cachedir': ca_path}):
|
||||
with patch.dict(_TLS_TEST_DATA['create_ca'],
|
||||
{'replace': True}):
|
||||
with patch.dict(tls.__dict__, {
|
||||
'OpenSSL_version':
|
||||
LooseVersion('0.1.1'),
|
||||
'X509_EXT_ENABLED': False}):
|
||||
self.assertEqual(
|
||||
tls.create_ca(
|
||||
ca_name,
|
||||
days=365,
|
||||
fixmode=False,
|
||||
**_TLS_TEST_DATA['create_ca']),
|
||||
ret)
|
||||
with patch.dict(tls.__dict__, {
|
||||
'OpenSSL_version':
|
||||
LooseVersion('0.14.1'),
|
||||
'X509_EXT_ENABLED': True}):
|
||||
self.assertEqual(
|
||||
tls.create_ca(
|
||||
ca_name,
|
||||
days=365,
|
||||
fixmode=False,
|
||||
**_TLS_TEST_DATA['create_ca']),
|
||||
ret)
|
||||
with patch.dict(tls.__dict__, {
|
||||
'OpenSSL_version':
|
||||
LooseVersion('0.15.1'),
|
||||
'X509_EXT_ENABLED': True}):
|
||||
self.assertEqual(
|
||||
tls.create_ca(
|
||||
ca_name,
|
||||
days=365,
|
||||
fixmode=False,
|
||||
**_TLS_TEST_DATA['create_ca']),
|
||||
ret)
|
||||
|
||||
try:
|
||||
certp = '{0}/{1}/certs/{2}.csr'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
certk = '{0}/{1}/certs/{2}.key'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
ret = ('Created Private Key: "{0}." '
|
||||
'Created CSR for "{1}": "{2}."').format(
|
||||
certk, _TLS_TEST_DATA['create_ca']['CN'], certp)
|
||||
with patch.dict(tls.__salt__, {
|
||||
'config.option': mock_opt,
|
||||
'cmd.retcode': mock_ret,
|
||||
'pillar.get': mock_pgt}):
|
||||
with patch.dict(tls.__opts__, {'hash_type': 'sha256',
|
||||
'cachedir': ca_path}):
|
||||
with patch.dict(_TLS_TEST_DATA['create_ca'], {
|
||||
'subjectAltName': 'DNS:foo.bar',
|
||||
'replace': True}):
|
||||
with patch.dict(tls.__dict__, {
|
||||
'OpenSSL_version':
|
||||
LooseVersion('0.1.1'),
|
||||
'X509_EXT_ENABLED': False}):
|
||||
tls.create_ca(ca_name)
|
||||
tls.create_csr(ca_name)
|
||||
self.assertRaises(ValueError,
|
||||
tls.create_csr,
|
||||
ca_name,
|
||||
**_TLS_TEST_DATA['create_ca'])
|
||||
with patch.dict(tls.__dict__, {
|
||||
'OpenSSL_version':
|
||||
LooseVersion('0.14.1'),
|
||||
'X509_EXT_ENABLED': True}):
|
||||
tls.create_ca(ca_name)
|
||||
tls.create_csr(ca_name)
|
||||
self.assertEqual(
|
||||
tls.create_csr(
|
||||
ca_name,
|
||||
**_TLS_TEST_DATA['create_ca']),
|
||||
ret)
|
||||
with patch.dict(tls.__dict__, {
|
||||
'OpenSSL_version':
|
||||
LooseVersion('0.15.1'),
|
||||
'X509_EXT_ENABLED': True}):
|
||||
tls.create_ca(ca_name)
|
||||
tls.create_csr(ca_name)
|
||||
self.assertEqual(
|
||||
tls.create_csr(
|
||||
ca_name,
|
||||
**_TLS_TEST_DATA['create_ca']),
|
||||
ret)
|
||||
finally:
|
||||
if os.path.isdir(ca_path):
|
||||
shutil.rmtree(ca_path)
|
||||
certp = '{0}/{1}/certs/{2}.csr'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
certk = '{0}/{1}/certs/{2}.key'.format(
|
||||
ca_path,
|
||||
ca_name,
|
||||
_TLS_TEST_DATA['create_ca']['CN'])
|
||||
ret = ('Created Private Key: "{0}." '
|
||||
'Created CSR for "{1}": "{2}."').format(
|
||||
certk, _TLS_TEST_DATA['create_ca']['CN'], certp)
|
||||
with patch.dict(tls.__salt__, {
|
||||
'config.option': mock_opt,
|
||||
'cmd.retcode': mock_ret,
|
||||
'pillar.get': mock_pgt}):
|
||||
with patch.dict(tls.__opts__, {'hash_type': 'sha256',
|
||||
'cachedir': ca_path}):
|
||||
with patch.dict(_TLS_TEST_DATA['create_ca'], {
|
||||
'subjectAltName': 'DNS:foo.bar',
|
||||
'replace': True}):
|
||||
with patch.dict(tls.__dict__, {
|
||||
'OpenSSL_version':
|
||||
LooseVersion('0.1.1'),
|
||||
'X509_EXT_ENABLED': False}):
|
||||
tls.create_ca(ca_name)
|
||||
tls.create_csr(ca_name)
|
||||
self.assertRaises(ValueError,
|
||||
tls.create_csr,
|
||||
ca_name,
|
||||
**_TLS_TEST_DATA['create_ca'])
|
||||
with patch.dict(tls.__dict__, {
|
||||
'OpenSSL_version':
|
||||
LooseVersion('0.14.1'),
|
||||
'X509_EXT_ENABLED': True}):
|
||||
tls.create_ca(ca_name)
|
||||
tls.create_csr(ca_name)
|
||||
self.assertEqual(
|
||||
tls.create_csr(
|
||||
ca_name,
|
||||
**_TLS_TEST_DATA['create_ca']),
|
||||
ret)
|
||||
with patch.dict(tls.__dict__, {
|
||||
'OpenSSL_version':
|
||||
LooseVersion('0.15.1'),
|
||||
'X509_EXT_ENABLED': True}):
|
||||
tls.create_ca(ca_name)
|
||||
tls.create_csr(ca_name)
|
||||
self.assertEqual(
|
||||
tls.create_csr(
|
||||
ca_name,
|
||||
**_TLS_TEST_DATA['create_ca']),
|
||||
ret)
|
||||
|
||||
def test_get_expiration_date(self):
|
||||
with patch('salt.utils.files.fopen',
|
||||
|
@ -140,3 +140,84 @@ class TestEventListener(AsyncTestCase):
|
||||
self.assertTrue(event_future.done())
|
||||
with self.assertRaises(saltnado.TimeoutException):
|
||||
event_future.result()
|
||||
|
||||
def test_clean_by_request(self):
|
||||
'''
|
||||
Make sure the method clean_by_request clean up every related data in EventListener
|
||||
request_future_1 : will be timeout-ed by clean_by_request(self)
|
||||
request_future_2 : will be finished by me.fire_event ...
|
||||
dummy_request_future_1 : will be finished by me.fire_event ...
|
||||
dummy_request_future_2 : will be timeout-ed by clean-by_request(dummy_request)
|
||||
'''
|
||||
class DummyRequest(object):
|
||||
'''
|
||||
Dummy request object to simulate the request object
|
||||
'''
|
||||
@property
|
||||
def _finished(self):
|
||||
'''
|
||||
Simulate _finished of the request object
|
||||
'''
|
||||
return False
|
||||
|
||||
# Inner functions never permit modifying primitive values directly
|
||||
cnt = [0]
|
||||
|
||||
def stop():
|
||||
'''
|
||||
To realize the scenario of this test, define a custom stop method to call
|
||||
self.stop after finished two events.
|
||||
'''
|
||||
cnt[0] += 1
|
||||
if cnt[0] == 2:
|
||||
self.stop()
|
||||
|
||||
with eventpublisher_process():
|
||||
me = salt.utils.event.MasterEvent(SOCK_DIR)
|
||||
event_listener = saltnado.EventListener({}, # we don't use mod_opts, don't save?
|
||||
{'sock_dir': SOCK_DIR,
|
||||
'transport': 'zeromq'})
|
||||
|
||||
self.assertEqual(0, len(event_listener.tag_map))
|
||||
self.assertEqual(0, len(event_listener.request_map))
|
||||
|
||||
self._finished = False # fit to event_listener's behavior
|
||||
dummy_request = DummyRequest()
|
||||
request_future_1 = event_listener.get_event(self, tag='evt1')
|
||||
request_future_2 = event_listener.get_event(self, tag='evt2', callback=lambda f: stop())
|
||||
dummy_request_future_1 = event_listener.get_event(dummy_request, tag='evt3', callback=lambda f: stop())
|
||||
dummy_request_future_2 = event_listener.get_event(dummy_request, timeout=10, tag='evt4')
|
||||
|
||||
self.assertEqual(4, len(event_listener.tag_map))
|
||||
self.assertEqual(2, len(event_listener.request_map))
|
||||
|
||||
me.fire_event({'data': 'foo2'}, 'evt2')
|
||||
me.fire_event({'data': 'foo3'}, 'evt3')
|
||||
self.wait()
|
||||
event_listener.clean_by_request(self)
|
||||
me.fire_event({'data': 'foo1'}, 'evt1')
|
||||
|
||||
self.assertTrue(request_future_1.done())
|
||||
with self.assertRaises(saltnado.TimeoutException):
|
||||
request_future_1.result()
|
||||
|
||||
self.assertTrue(request_future_2.done())
|
||||
self.assertEqual(request_future_2.result()['tag'], 'evt2')
|
||||
self.assertEqual(request_future_2.result()['data']['data'], 'foo2')
|
||||
|
||||
self.assertTrue(dummy_request_future_1.done())
|
||||
self.assertEqual(dummy_request_future_1.result()['tag'], 'evt3')
|
||||
self.assertEqual(dummy_request_future_1.result()['data']['data'], 'foo3')
|
||||
|
||||
self.assertFalse(dummy_request_future_2.done())
|
||||
|
||||
self.assertEqual(2, len(event_listener.tag_map))
|
||||
self.assertEqual(1, len(event_listener.request_map))
|
||||
|
||||
event_listener.clean_by_request(dummy_request)
|
||||
|
||||
with self.assertRaises(saltnado.TimeoutException):
|
||||
dummy_request_future_2.result()
|
||||
|
||||
self.assertEqual(0, len(event_listener.tag_map))
|
||||
self.assertEqual(0, len(event_listener.request_map))
|
||||
|
@ -75,7 +75,7 @@ class VirtualenvModTestCase(TestCase, LoaderModuleMockMixin):
|
||||
|
||||
with patch.dict(virtualenv_mod.__opts__, {"test": False}):
|
||||
ret.update({'comment': "The 'use_wheel' option is"
|
||||
" only supported in pip 1.4 and newer."
|
||||
" only supported in pip between 1.4 and 9.0.3."
|
||||
" The version of pip detected was 1.1.",
|
||||
'result': False})
|
||||
self.assertDictEqual(virtualenv_mod.managed('salt',
|
||||
|
@ -20,7 +20,6 @@ from tests.support.paths import TMP_CONF_DIR
|
||||
# Import Salt libs
|
||||
import salt.config
|
||||
import salt.loader
|
||||
import salt.utils.yaml
|
||||
from salt.exceptions import SaltRenderError
|
||||
|
||||
from salt.ext import six
|
||||
@ -40,6 +39,7 @@ from salt.utils.templates import JINJA, render_jinja_tmpl
|
||||
import salt.utils.dateutils # pylint: disable=unused-import
|
||||
import salt.utils.files
|
||||
import salt.utils.stringutils
|
||||
import salt.utils.yaml
|
||||
|
||||
# Import 3rd party libs
|
||||
try:
|
||||
@ -49,6 +49,7 @@ except ImportError:
|
||||
HAS_TIMELIB = False
|
||||
|
||||
TEMPLATES_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
BLINESEP = salt.utils.stringutils.to_bytes(os.linesep)
|
||||
|
||||
|
||||
class MockFileClient(object):
|
||||
@ -69,18 +70,48 @@ class MockFileClient(object):
|
||||
})
|
||||
|
||||
|
||||
def _setup_test_dir(src_dir, test_dir):
|
||||
os.makedirs(test_dir)
|
||||
salt.utils.files.recursive_copy(src_dir, test_dir)
|
||||
filename = os.path.join(test_dir, 'non_ascii')
|
||||
with salt.utils.files.fopen(filename, 'wb') as fp:
|
||||
fp.write(b'Assun\xc3\xa7\xc3\xa3o' + BLINESEP)
|
||||
filename = os.path.join(test_dir, 'hello_simple')
|
||||
with salt.utils.files.fopen(filename, 'wb') as fp:
|
||||
fp.write(b'world' + BLINESEP)
|
||||
filename = os.path.join(test_dir, 'hello_import')
|
||||
lines = [
|
||||
r"{% from 'macro' import mymacro -%}",
|
||||
r"{% from 'macro' import mymacro -%}",
|
||||
r"{{ mymacro('Hey') ~ mymacro(a|default('a'), b|default('b')) }}",
|
||||
]
|
||||
with salt.utils.files.fopen(filename, 'wb') as fp:
|
||||
for line in lines:
|
||||
fp.write(line.encode('utf-8') + BLINESEP)
|
||||
|
||||
|
||||
class TestSaltCacheLoader(TestCase):
|
||||
def __init__(self, *args, **kws):
|
||||
super(TestSaltCacheLoader, self).__init__(*args, **kws)
|
||||
|
||||
def setUp(self):
|
||||
self.TEMPDIR = tempfile.mkdtemp()
|
||||
self.TEMPLATES_DIR = os.path.join(self.TEMPDIR, 'files', 'test')
|
||||
_setup_test_dir(
|
||||
os.path.join(TEMPLATES_DIR, 'files', 'test'),
|
||||
self.TEMPLATES_DIR
|
||||
)
|
||||
self.opts = {
|
||||
'cachedir': TEMPLATES_DIR,
|
||||
'cachedir': self.TEMPDIR,
|
||||
'file_roots': {
|
||||
'test': [os.path.join(TEMPLATES_DIR, 'files', 'test')]
|
||||
'test': [self.TEMPLATES_DIR]
|
||||
},
|
||||
'pillar_roots': {
|
||||
'test': [os.path.join(TEMPLATES_DIR, 'files', 'test')]
|
||||
'test': [self.TEMPLATES_DIR]
|
||||
}
|
||||
}
|
||||
super(TestSaltCacheLoader, self).setUp()
|
||||
|
||||
def tearDown(self):
|
||||
salt.utils.files.rm_rf(self.TEMPDIR)
|
||||
|
||||
def test_searchpath(self):
|
||||
'''
|
||||
@ -102,7 +133,7 @@ class TestSaltCacheLoader(TestCase):
|
||||
assert len(res) == 3
|
||||
# res[0] on Windows is unicode and use os.linesep so it works cross OS
|
||||
self.assertEqual(six.text_type(res[0]), 'world' + os.linesep)
|
||||
tmpl_dir = os.path.join(TEMPLATES_DIR, 'files', 'test', 'hello_simple')
|
||||
tmpl_dir = os.path.join(self.TEMPLATES_DIR, 'hello_simple')
|
||||
self.assertEqual(res[1], tmpl_dir)
|
||||
assert res[2](), 'Template up to date?'
|
||||
assert len(loader._file_client.requests)
|
||||
@ -161,18 +192,24 @@ class TestSaltCacheLoader(TestCase):
|
||||
|
||||
|
||||
class TestGetTemplate(TestCase):
|
||||
def __init__(self, *args, **kws):
|
||||
super(TestGetTemplate, self).__init__(*args, **kws)
|
||||
|
||||
def setUp(self):
|
||||
self.TEMPDIR = tempfile.mkdtemp()
|
||||
self.TEMPLATES_DIR = os.path.join(self.TEMPDIR, 'files', 'test')
|
||||
_setup_test_dir(
|
||||
os.path.join(TEMPLATES_DIR, 'files', 'test'),
|
||||
self.TEMPLATES_DIR
|
||||
)
|
||||
self.local_opts = {
|
||||
'cachedir': TEMPLATES_DIR,
|
||||
'cachedir': self.TEMPDIR,
|
||||
'file_client': 'local',
|
||||
'file_ignore_regex': None,
|
||||
'file_ignore_glob': None,
|
||||
'file_roots': {
|
||||
'test': [os.path.join(TEMPLATES_DIR, 'files', 'test')]
|
||||
'test': [self.TEMPLATES_DIR]
|
||||
},
|
||||
'pillar_roots': {
|
||||
'test': [os.path.join(TEMPLATES_DIR, 'files', 'test')]
|
||||
'test': [self.TEMPLATES_DIR]
|
||||
},
|
||||
'fileserver_backend': ['roots'],
|
||||
'hash_type': 'md5',
|
||||
@ -181,13 +218,17 @@ class TestGetTemplate(TestCase):
|
||||
'extmods'),
|
||||
}
|
||||
self.local_salt = {}
|
||||
super(TestGetTemplate, self).setUp()
|
||||
|
||||
def tearDown(self):
|
||||
salt.utils.files.rm_rf(self.TEMPDIR)
|
||||
|
||||
def test_fallback(self):
|
||||
'''
|
||||
A Template with a filesystem loader is returned as fallback
|
||||
if the file is not contained in the searchpath
|
||||
'''
|
||||
fn_ = os.path.join(TEMPLATES_DIR, 'files', 'test', 'hello_simple')
|
||||
fn_ = os.path.join(self.TEMPLATES_DIR, 'hello_simple')
|
||||
with salt.utils.files.fopen(fn_) as fp_:
|
||||
out = render_jinja_tmpl(
|
||||
salt.utils.stringutils.to_unicode(fp_.read()),
|
||||
@ -200,7 +241,7 @@ class TestGetTemplate(TestCase):
|
||||
A Template with a filesystem loader is returned as fallback
|
||||
if the file is not contained in the searchpath
|
||||
'''
|
||||
filename = os.path.join(TEMPLATES_DIR, 'files', 'test', 'hello_import')
|
||||
filename = os.path.join(self.TEMPLATES_DIR, 'hello_import')
|
||||
with salt.utils.files.fopen(filename) as fp_:
|
||||
out = render_jinja_tmpl(
|
||||
salt.utils.stringutils.to_unicode(fp_.read()),
|
||||
@ -217,11 +258,11 @@ class TestGetTemplate(TestCase):
|
||||
'''
|
||||
fc = MockFileClient()
|
||||
with patch.object(SaltCacheLoader, 'file_client', MagicMock(return_value=fc)):
|
||||
filename = os.path.join(TEMPLATES_DIR, 'files', 'test', 'hello_import')
|
||||
filename = os.path.join(self.TEMPLATES_DIR, 'hello_import')
|
||||
with salt.utils.files.fopen(filename) as fp_:
|
||||
out = render_jinja_tmpl(
|
||||
salt.utils.stringutils.to_unicode(fp_.read()),
|
||||
dict(opts={'cachedir': TEMPLATES_DIR, 'file_client': 'remote',
|
||||
dict(opts={'cachedir': self.TEMPDIR, 'file_client': 'remote',
|
||||
'file_roots': self.local_opts['file_roots'],
|
||||
'pillar_roots': self.local_opts['pillar_roots']},
|
||||
a='Hi', b='Salt', saltenv='test', salt=self.local_salt))
|
||||
@ -240,8 +281,7 @@ class TestGetTemplate(TestCase):
|
||||
\{\{ 1/0 \}\} <======================
|
||||
\{%- endmacro %\}
|
||||
---.*'''
|
||||
filename = os.path.join(TEMPLATES_DIR,
|
||||
'files', 'test', 'hello_import_generalerror')
|
||||
filename = os.path.join(self.TEMPLATES_DIR, 'hello_import_generalerror')
|
||||
fc = MockFileClient()
|
||||
with patch.object(SaltCacheLoader, 'file_client', MagicMock(return_value=fc)):
|
||||
with salt.utils.files.fopen(filename) as fp_:
|
||||
@ -264,8 +304,7 @@ class TestGetTemplate(TestCase):
|
||||
\{\{b.greetee\}\} <-- error is here <======================
|
||||
\{%- endmacro %\}
|
||||
---'''
|
||||
filename = os.path.join(TEMPLATES_DIR,
|
||||
'files', 'test', 'hello_import_undefined')
|
||||
filename = os.path.join(self.TEMPLATES_DIR, 'hello_import_undefined')
|
||||
fc = MockFileClient()
|
||||
with patch.object(SaltCacheLoader, 'file_client', MagicMock(return_value=fc)):
|
||||
with salt.utils.files.fopen(filename) as fp_:
|
||||
@ -288,8 +327,7 @@ class TestGetTemplate(TestCase):
|
||||
\{\{ greeting ~ ' ' ~ greetee \}\} !
|
||||
\{%- endmacro %\}
|
||||
---.*'''
|
||||
filename = os.path.join(TEMPLATES_DIR,
|
||||
'files', 'test', 'hello_import_error')
|
||||
filename = os.path.join(self.TEMPLATES_DIR, 'hello_import_error')
|
||||
fc = MockFileClient()
|
||||
with patch.object(SaltCacheLoader, 'file_client', MagicMock(return_value=fc)):
|
||||
with salt.utils.files.fopen(filename) as fp_:
|
||||
@ -303,22 +341,22 @@ class TestGetTemplate(TestCase):
|
||||
def test_non_ascii_encoding(self):
|
||||
fc = MockFileClient()
|
||||
with patch.object(SaltCacheLoader, 'file_client', MagicMock(return_value=fc)):
|
||||
filename = os.path.join(TEMPLATES_DIR, 'files', 'test', 'hello_import')
|
||||
filename = os.path.join(self.TEMPLATES_DIR, 'hello_import')
|
||||
with salt.utils.files.fopen(filename) as fp_:
|
||||
out = render_jinja_tmpl(
|
||||
salt.utils.stringutils.to_unicode(fp_.read()),
|
||||
dict(opts={'cachedir': TEMPLATES_DIR, 'file_client': 'remote',
|
||||
dict(opts={'cachedir': self.TEMPDIR, 'file_client': 'remote',
|
||||
'file_roots': self.local_opts['file_roots'],
|
||||
'pillar_roots': self.local_opts['pillar_roots']},
|
||||
a='Hi', b='Sàlt', saltenv='test', salt=self.local_salt))
|
||||
self.assertEqual(out, salt.utils.stringutils.to_unicode('Hey world !Hi Sàlt !' + os.linesep))
|
||||
self.assertEqual(fc.requests[0]['path'], 'salt://macro')
|
||||
|
||||
filename = os.path.join(TEMPLATES_DIR, 'files', 'test', 'non_ascii')
|
||||
filename = os.path.join(self.TEMPLATES_DIR, 'non_ascii')
|
||||
with salt.utils.files.fopen(filename) as fp_:
|
||||
out = render_jinja_tmpl(
|
||||
salt.utils.stringutils.to_unicode(fp_.read(), 'utf-8'),
|
||||
dict(opts={'cachedir': TEMPLATES_DIR, 'file_client': 'remote',
|
||||
dict(opts={'cachedir': self.TEMPDIR, 'file_client': 'remote',
|
||||
'file_roots': self.local_opts['file_roots'],
|
||||
'pillar_roots': self.local_opts['pillar_roots']},
|
||||
a='Hi', b='Sàlt', saltenv='test', salt=self.local_salt))
|
||||
@ -375,7 +413,7 @@ class TestGetTemplate(TestCase):
|
||||
self.assertEqual(response, '02')
|
||||
|
||||
def test_non_ascii(self):
|
||||
fn = os.path.join(TEMPLATES_DIR, 'files', 'test', 'non_ascii')
|
||||
fn = os.path.join(self.TEMPLATES_DIR, 'non_ascii')
|
||||
out = JINJA(
|
||||
fn,
|
||||
opts=self.local_opts,
|
||||
@ -386,6 +424,36 @@ class TestGetTemplate(TestCase):
|
||||
result = salt.utils.stringutils.to_unicode(fp.read(), 'utf-8')
|
||||
self.assertEqual(salt.utils.stringutils.to_unicode('Assunção' + os.linesep), result)
|
||||
|
||||
def test_get_context_has_enough_context(self):
|
||||
template = '1\n2\n3\n4\n5\n6\n7\n8\n9\na\nb\nc\nd\ne\nf'
|
||||
context = salt.utils.stringutils.get_context(template, 8)
|
||||
expected = '---\n[...]\n3\n4\n5\n6\n7\n8\n9\na\nb\nc\nd\n[...]\n---'
|
||||
self.assertEqual(expected, context)
|
||||
|
||||
def test_get_context_at_top_of_file(self):
|
||||
template = '1\n2\n3\n4\n5\n6\n7\n8\n9\na\nb\nc\nd\ne\nf'
|
||||
context = salt.utils.stringutils.get_context(template, 1)
|
||||
expected = '---\n1\n2\n3\n4\n5\n6\n[...]\n---'
|
||||
self.assertEqual(expected, context)
|
||||
|
||||
def test_get_context_at_bottom_of_file(self):
|
||||
template = '1\n2\n3\n4\n5\n6\n7\n8\n9\na\nb\nc\nd\ne\nf'
|
||||
context = salt.utils.stringutils.get_context(template, 15)
|
||||
expected = '---\n[...]\na\nb\nc\nd\ne\nf\n---'
|
||||
self.assertEqual(expected, context)
|
||||
|
||||
def test_get_context_2_context_lines(self):
|
||||
template = '1\n2\n3\n4\n5\n6\n7\n8\n9\na\nb\nc\nd\ne\nf'
|
||||
context = salt.utils.stringutils.get_context(template, 8, num_lines=2)
|
||||
expected = '---\n[...]\n6\n7\n8\n9\na\n[...]\n---'
|
||||
self.assertEqual(expected, context)
|
||||
|
||||
def test_get_context_with_marker(self):
|
||||
template = '1\n2\n3\n4\n5\n6\n7\n8\n9\na\nb\nc\nd\ne\nf'
|
||||
context = salt.utils.stringutils.get_context(template, 8, num_lines=2, marker=' <---')
|
||||
expected = '---\n[...]\n6\n7\n8 <---\n9\na\n[...]\n---'
|
||||
self.assertEqual(expected, context)
|
||||
|
||||
def test_render_with_syntax_error(self):
|
||||
template = 'hello\n\n{{ bad\n\nfoo'
|
||||
expected = r'.*---\nhello\n\n{{ bad\n\nfoo <======================\n---'
|
||||
|
@ -54,20 +54,30 @@ class LocalClientTestCase(TestCase,
|
||||
self.client.cmd_subset('*', 'first.func', sub=1, cli=True)
|
||||
try:
|
||||
cmd_cli_mock.assert_called_with(['minion2'], 'first.func', (), progress=False,
|
||||
kwarg=None, tgt_type='list',
|
||||
kwarg=None, tgt_type='list', full_return=False,
|
||||
ret='')
|
||||
except AssertionError:
|
||||
cmd_cli_mock.assert_called_with(['minion1'], 'first.func', (), progress=False,
|
||||
kwarg=None, tgt_type='list',
|
||||
kwarg=None, tgt_type='list', full_return=False,
|
||||
ret='')
|
||||
self.client.cmd_subset('*', 'first.func', sub=10, cli=True)
|
||||
try:
|
||||
cmd_cli_mock.assert_called_with(['minion2', 'minion1'], 'first.func', (), progress=False,
|
||||
kwarg=None, tgt_type='list',
|
||||
kwarg=None, tgt_type='list', full_return=False,
|
||||
ret='')
|
||||
except AssertionError:
|
||||
cmd_cli_mock.assert_called_with(['minion1', 'minion2'], 'first.func', (), progress=False,
|
||||
kwarg=None, tgt_type='list',
|
||||
kwarg=None, tgt_type='list', full_return=False,
|
||||
ret='')
|
||||
|
||||
ret = self.client.cmd_subset('*', 'first.func', sub=1, cli=True, full_return=True)
|
||||
try:
|
||||
cmd_cli_mock.assert_called_with(['minion2'], 'first.func', (), progress=False,
|
||||
kwarg=None, tgt_type='list', full_return=True,
|
||||
ret='')
|
||||
except AssertionError:
|
||||
cmd_cli_mock.assert_called_with(['minion1'], 'first.func', (), progress=False,
|
||||
kwarg=None, tgt_type='list', full_return=True,
|
||||
ret='')
|
||||
|
||||
@skipIf(salt.utils.platform.is_windows(), 'Not supported on Windows')
|
||||
|
@ -120,6 +120,10 @@ class MinionTestCase(TestCase):
|
||||
try:
|
||||
event_publisher = event.AsyncEventPublisher(__opts__)
|
||||
result = True
|
||||
except ValueError:
|
||||
# There are rare cases where we operate a closed socket, especially in containers.
|
||||
# In this case, don't fail the test because we'll catch it down the road.
|
||||
result = True
|
||||
except SaltSystemExit:
|
||||
result = False
|
||||
self.assertTrue(result)
|
||||
|
@ -10,6 +10,7 @@ import textwrap
|
||||
import copy
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.helpers import with_tempdir
|
||||
from tests.support.unit import TestCase
|
||||
from tests.support.paths import TMP
|
||||
|
||||
@ -34,6 +35,7 @@ class CommonTestCaseBoilerplate(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.root_dir = tempfile.mkdtemp(dir=TMP)
|
||||
self.addCleanup(shutil.rmtree, self.root_dir, ignore_errors=True)
|
||||
self.state_tree_dir = os.path.join(self.root_dir, 'state_tree')
|
||||
self.cache_dir = os.path.join(self.root_dir, 'cachedir')
|
||||
if not os.path.isdir(self.root_dir):
|
||||
@ -291,167 +293,130 @@ class PyDSLRendererTestCase(CommonTestCaseBoilerplate):
|
||||
self.assertEqual(result['C']['cmd'][1]['require'][0]['cmd'], 'A')
|
||||
self.assertEqual(result['B']['file'][1]['require'][0]['cmd'], 'C')
|
||||
|
||||
def test_pipe_through_stateconf(self):
|
||||
dirpath = tempfile.mkdtemp(dir=TMP)
|
||||
if not os.path.isdir(dirpath):
|
||||
self.skipTest(
|
||||
'The temporary directory \'{0}\' was not created'.format(
|
||||
dirpath
|
||||
)
|
||||
)
|
||||
@with_tempdir()
|
||||
def test_pipe_through_stateconf(self, dirpath):
|
||||
output = os.path.join(dirpath, 'output')
|
||||
try:
|
||||
write_to(os.path.join(dirpath, 'xxx.sls'), textwrap.dedent(
|
||||
'''#!stateconf -os yaml . jinja
|
||||
.X:
|
||||
cmd.run:
|
||||
- name: echo X >> {0}
|
||||
- cwd: /
|
||||
.Y:
|
||||
cmd.run:
|
||||
- name: echo Y >> {0}
|
||||
- cwd: /
|
||||
.Z:
|
||||
cmd.run:
|
||||
- name: echo Z >> {0}
|
||||
- cwd: /
|
||||
'''.format(output.replace('\\', '/'))))
|
||||
write_to(os.path.join(dirpath, 'yyy.sls'), textwrap.dedent('''\
|
||||
#!pydsl|stateconf -ps
|
||||
write_to(os.path.join(dirpath, 'xxx.sls'), textwrap.dedent(
|
||||
'''#!stateconf -os yaml . jinja
|
||||
.X:
|
||||
cmd.run:
|
||||
- name: echo X >> {0}
|
||||
- cwd: /
|
||||
.Y:
|
||||
cmd.run:
|
||||
- name: echo Y >> {0}
|
||||
- cwd: /
|
||||
.Z:
|
||||
cmd.run:
|
||||
- name: echo Z >> {0}
|
||||
- cwd: /
|
||||
'''.format(output.replace('\\', '/'))))
|
||||
write_to(os.path.join(dirpath, 'yyy.sls'), textwrap.dedent('''\
|
||||
#!pydsl|stateconf -ps
|
||||
|
||||
__pydsl__.set(ordered=True)
|
||||
state('.D').cmd.run('echo D >> {0}', cwd='/')
|
||||
state('.E').cmd.run('echo E >> {0}', cwd='/')
|
||||
state('.F').cmd.run('echo F >> {0}', cwd='/')
|
||||
'''.format(output.replace('\\', '/'))))
|
||||
__pydsl__.set(ordered=True)
|
||||
state('.D').cmd.run('echo D >> {0}', cwd='/')
|
||||
state('.E').cmd.run('echo E >> {0}', cwd='/')
|
||||
state('.F').cmd.run('echo F >> {0}', cwd='/')
|
||||
'''.format(output.replace('\\', '/'))))
|
||||
|
||||
write_to(os.path.join(dirpath, 'aaa.sls'), textwrap.dedent('''\
|
||||
#!pydsl|stateconf -ps
|
||||
write_to(os.path.join(dirpath, 'aaa.sls'), textwrap.dedent('''\
|
||||
#!pydsl|stateconf -ps
|
||||
|
||||
include('xxx', 'yyy')
|
||||
include('xxx', 'yyy')
|
||||
|
||||
# make all states in xxx run BEFORE states in this sls.
|
||||
extend(state('.start').stateconf.require(stateconf='xxx::goal'))
|
||||
# make all states in xxx run BEFORE states in this sls.
|
||||
extend(state('.start').stateconf.require(stateconf='xxx::goal'))
|
||||
|
||||
# make all states in yyy run AFTER this sls.
|
||||
extend(state('.goal').stateconf.require_in(stateconf='yyy::start'))
|
||||
# make all states in yyy run AFTER this sls.
|
||||
extend(state('.goal').stateconf.require_in(stateconf='yyy::start'))
|
||||
|
||||
__pydsl__.set(ordered=True)
|
||||
__pydsl__.set(ordered=True)
|
||||
|
||||
state('.A').cmd.run('echo A >> {0}', cwd='/')
|
||||
state('.B').cmd.run('echo B >> {0}', cwd='/')
|
||||
state('.C').cmd.run('echo C >> {0}', cwd='/')
|
||||
'''.format(output.replace('\\', '/'))))
|
||||
state('.A').cmd.run('echo A >> {0}', cwd='/')
|
||||
state('.B').cmd.run('echo B >> {0}', cwd='/')
|
||||
state('.C').cmd.run('echo C >> {0}', cwd='/')
|
||||
'''.format(output.replace('\\', '/'))))
|
||||
|
||||
self.state_highstate({'base': ['aaa']}, dirpath)
|
||||
with salt.utils.files.fopen(output, 'r') as f:
|
||||
self.assertEqual(''.join(f.read().split()), "XYZABCDEF")
|
||||
self.state_highstate({'base': ['aaa']}, dirpath)
|
||||
with salt.utils.files.fopen(output, 'r') as f:
|
||||
self.assertEqual(''.join(f.read().split()), "XYZABCDEF")
|
||||
|
||||
finally:
|
||||
shutil.rmtree(dirpath, ignore_errors=True)
|
||||
|
||||
def test_compile_time_state_execution(self):
|
||||
@with_tempdir()
|
||||
def test_compile_time_state_execution(self, dirpath):
|
||||
if not sys.stdin.isatty():
|
||||
self.skipTest('Not attached to a TTY')
|
||||
dirpath = tempfile.mkdtemp(dir=TMP)
|
||||
if not os.path.isdir(dirpath):
|
||||
self.skipTest(
|
||||
'The temporary directory \'{0}\' was not created'.format(
|
||||
dirpath
|
||||
)
|
||||
)
|
||||
try:
|
||||
# The Windows shell will include any spaces before the redirect
|
||||
# in the text that is redirected.
|
||||
# For example: echo hello > test.txt will contain "hello "
|
||||
write_to(os.path.join(dirpath, 'aaa.sls'), textwrap.dedent('''\
|
||||
#!pydsl
|
||||
# The Windows shell will include any spaces before the redirect
|
||||
# in the text that is redirected.
|
||||
# For example: echo hello > test.txt will contain "hello "
|
||||
write_to(os.path.join(dirpath, 'aaa.sls'), textwrap.dedent('''\
|
||||
#!pydsl
|
||||
|
||||
__pydsl__.set(ordered=True)
|
||||
A = state('A')
|
||||
A.cmd.run('echo hehe>{0}/zzz.txt', cwd='/')
|
||||
A.file.managed('{0}/yyy.txt', source='salt://zzz.txt')
|
||||
A()
|
||||
A()
|
||||
__pydsl__.set(ordered=True)
|
||||
A = state('A')
|
||||
A.cmd.run('echo hehe>{0}/zzz.txt', cwd='/')
|
||||
A.file.managed('{0}/yyy.txt', source='salt://zzz.txt')
|
||||
A()
|
||||
A()
|
||||
|
||||
state().cmd.run('echo hoho>>{0}/yyy.txt', cwd='/')
|
||||
state().cmd.run('echo hoho>>{0}/yyy.txt', cwd='/')
|
||||
|
||||
A.file.managed('{0}/xxx.txt', source='salt://zzz.txt')
|
||||
A()
|
||||
'''.format(dirpath.replace('\\', '/'))))
|
||||
self.state_highstate({'base': ['aaa']}, dirpath)
|
||||
with salt.utils.files.fopen(os.path.join(dirpath, 'yyy.txt'), 'rt') as f:
|
||||
self.assertEqual(f.read(), 'hehe' + os.linesep + 'hoho' + os.linesep)
|
||||
with salt.utils.files.fopen(os.path.join(dirpath, 'xxx.txt'), 'rt') as f:
|
||||
self.assertEqual(f.read(), 'hehe' + os.linesep)
|
||||
finally:
|
||||
shutil.rmtree(dirpath, ignore_errors=True)
|
||||
A.file.managed('{0}/xxx.txt', source='salt://zzz.txt')
|
||||
A()
|
||||
'''.format(dirpath.replace('\\', '/'))))
|
||||
self.state_highstate({'base': ['aaa']}, dirpath)
|
||||
with salt.utils.files.fopen(os.path.join(dirpath, 'yyy.txt'), 'rt') as f:
|
||||
self.assertEqual(f.read(), 'hehe' + os.linesep + 'hoho' + os.linesep)
|
||||
with salt.utils.files.fopen(os.path.join(dirpath, 'xxx.txt'), 'rt') as f:
|
||||
self.assertEqual(f.read(), 'hehe' + os.linesep)
|
||||
|
||||
def test_nested_high_state_execution(self):
|
||||
dirpath = tempfile.mkdtemp(dir=TMP)
|
||||
if not os.path.isdir(dirpath):
|
||||
self.skipTest(
|
||||
'The temporary directory \'{0}\' was not created'.format(
|
||||
dirpath
|
||||
)
|
||||
)
|
||||
@with_tempdir()
|
||||
def test_nested_high_state_execution(self, dirpath):
|
||||
output = os.path.join(dirpath, 'output')
|
||||
try:
|
||||
write_to(os.path.join(dirpath, 'aaa.sls'), textwrap.dedent('''\
|
||||
#!pydsl
|
||||
__salt__['state.sls']('bbb')
|
||||
state().cmd.run('echo bbbbbb', cwd='/')
|
||||
'''))
|
||||
write_to(os.path.join(dirpath, 'bbb.sls'), textwrap.dedent(
|
||||
'''
|
||||
# {{ salt['state.sls']('ccc') }}
|
||||
test:
|
||||
cmd.run:
|
||||
- name: echo bbbbbbb
|
||||
- cwd: /
|
||||
'''))
|
||||
write_to(os.path.join(dirpath, 'ccc.sls'), textwrap.dedent(
|
||||
'''
|
||||
#!pydsl
|
||||
state().cmd.run('echo ccccc', cwd='/')
|
||||
'''))
|
||||
self.state_highstate({'base': ['aaa']}, dirpath)
|
||||
finally:
|
||||
shutil.rmtree(dirpath, ignore_errors=True)
|
||||
write_to(os.path.join(dirpath, 'aaa.sls'), textwrap.dedent('''\
|
||||
#!pydsl
|
||||
__salt__['state.sls']('bbb')
|
||||
state().cmd.run('echo bbbbbb', cwd='/')
|
||||
'''))
|
||||
write_to(os.path.join(dirpath, 'bbb.sls'), textwrap.dedent(
|
||||
'''
|
||||
# {{ salt['state.sls']('ccc') }}
|
||||
test:
|
||||
cmd.run:
|
||||
- name: echo bbbbbbb
|
||||
- cwd: /
|
||||
'''))
|
||||
write_to(os.path.join(dirpath, 'ccc.sls'), textwrap.dedent(
|
||||
'''
|
||||
#!pydsl
|
||||
state().cmd.run('echo ccccc', cwd='/')
|
||||
'''))
|
||||
self.state_highstate({'base': ['aaa']}, dirpath)
|
||||
|
||||
def test_repeat_includes(self):
|
||||
dirpath = tempfile.mkdtemp(dir=TMP)
|
||||
if not os.path.isdir(dirpath):
|
||||
self.skipTest(
|
||||
'The temporary directory \'{0}\' was not created'.format(
|
||||
dirpath
|
||||
)
|
||||
)
|
||||
@with_tempdir()
|
||||
def test_repeat_includes(self, dirpath):
|
||||
output = os.path.join(dirpath, 'output')
|
||||
try:
|
||||
write_to(os.path.join(dirpath, 'b.sls'), textwrap.dedent('''\
|
||||
#!pydsl
|
||||
include('c')
|
||||
include('d')
|
||||
'''))
|
||||
write_to(os.path.join(dirpath, 'c.sls'), textwrap.dedent('''\
|
||||
#!pydsl
|
||||
modtest = include('e')
|
||||
modtest.success
|
||||
'''))
|
||||
write_to(os.path.join(dirpath, 'd.sls'), textwrap.dedent('''\
|
||||
#!pydsl
|
||||
modtest = include('e')
|
||||
modtest.success
|
||||
'''))
|
||||
write_to(os.path.join(dirpath, 'e.sls'), textwrap.dedent('''\
|
||||
#!pydsl
|
||||
success = True
|
||||
'''))
|
||||
self.state_highstate({'base': ['b']}, dirpath)
|
||||
self.state_highstate({'base': ['c', 'd']}, dirpath)
|
||||
finally:
|
||||
shutil.rmtree(dirpath, ignore_errors=True)
|
||||
write_to(os.path.join(dirpath, 'b.sls'), textwrap.dedent('''\
|
||||
#!pydsl
|
||||
include('c')
|
||||
include('d')
|
||||
'''))
|
||||
write_to(os.path.join(dirpath, 'c.sls'), textwrap.dedent('''\
|
||||
#!pydsl
|
||||
modtest = include('e')
|
||||
modtest.success
|
||||
'''))
|
||||
write_to(os.path.join(dirpath, 'd.sls'), textwrap.dedent('''\
|
||||
#!pydsl
|
||||
modtest = include('e')
|
||||
modtest.success
|
||||
'''))
|
||||
write_to(os.path.join(dirpath, 'e.sls'), textwrap.dedent('''\
|
||||
#!pydsl
|
||||
success = True
|
||||
'''))
|
||||
self.state_highstate({'base': ['b']}, dirpath)
|
||||
self.state_highstate({'base': ['c', 'd']}, dirpath)
|
||||
|
||||
|
||||
def write_to(fpath, content):
|
||||
|
@ -6,15 +6,13 @@ Unit Tests for functions located in salt.utils.files.py.
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, unicode_literals, print_function
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils.files
|
||||
from salt.ext import six
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.paths import TMP
|
||||
from tests.support.helpers import with_tempdir
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import (
|
||||
patch,
|
||||
@ -43,33 +41,30 @@ class FilesUtilTestCase(TestCase):
|
||||
error = True
|
||||
self.assertFalse(error, 'salt.utils.files.safe_rm raised exception when it should not have')
|
||||
|
||||
def test_safe_walk_symlink_recursion(self):
|
||||
tmp = tempfile.mkdtemp(dir=TMP)
|
||||
try:
|
||||
if os.stat(tmp).st_ino == 0:
|
||||
self.skipTest('inodes not supported in {0}'.format(tmp))
|
||||
os.mkdir(os.path.join(tmp, 'fax'))
|
||||
os.makedirs(os.path.join(tmp, 'foo/bar'))
|
||||
os.symlink('../..', os.path.join(tmp, 'foo/bar/baz'))
|
||||
os.symlink('foo', os.path.join(tmp, 'root'))
|
||||
expected = [
|
||||
(os.path.join(tmp, 'root'), ['bar'], []),
|
||||
(os.path.join(tmp, 'root/bar'), ['baz'], []),
|
||||
(os.path.join(tmp, 'root/bar/baz'), ['fax', 'foo', 'root'], []),
|
||||
(os.path.join(tmp, 'root/bar/baz/fax'), [], []),
|
||||
]
|
||||
paths = []
|
||||
for root, dirs, names in salt.utils.files.safe_walk(os.path.join(tmp, 'root')):
|
||||
paths.append((root, sorted(dirs), names))
|
||||
if paths != expected:
|
||||
raise AssertionError(
|
||||
'\n'.join(
|
||||
['got:'] + [repr(p) for p in paths] +
|
||||
['', 'expected:'] + [repr(p) for p in expected]
|
||||
)
|
||||
@with_tempdir()
|
||||
def test_safe_walk_symlink_recursion(self, tmp):
|
||||
if os.stat(tmp).st_ino == 0:
|
||||
self.skipTest('inodes not supported in {0}'.format(tmp))
|
||||
os.mkdir(os.path.join(tmp, 'fax'))
|
||||
os.makedirs(os.path.join(tmp, 'foo/bar'))
|
||||
os.symlink('../..', os.path.join(tmp, 'foo/bar/baz'))
|
||||
os.symlink('foo', os.path.join(tmp, 'root'))
|
||||
expected = [
|
||||
(os.path.join(tmp, 'root'), ['bar'], []),
|
||||
(os.path.join(tmp, 'root/bar'), ['baz'], []),
|
||||
(os.path.join(tmp, 'root/bar/baz'), ['fax', 'foo', 'root'], []),
|
||||
(os.path.join(tmp, 'root/bar/baz/fax'), [], []),
|
||||
]
|
||||
paths = []
|
||||
for root, dirs, names in salt.utils.files.safe_walk(os.path.join(tmp, 'root')):
|
||||
paths.append((root, sorted(dirs), names))
|
||||
if paths != expected:
|
||||
raise AssertionError(
|
||||
'\n'.join(
|
||||
['got:'] + [repr(p) for p in paths] +
|
||||
['', 'expected:'] + [repr(p) for p in expected]
|
||||
)
|
||||
finally:
|
||||
shutil.rmtree(tmp)
|
||||
)
|
||||
|
||||
@skipIf(not six.PY3, 'This test only applies to Python 3')
|
||||
def test_fopen_with_disallowed_fds(self):
|
||||
|
@ -132,7 +132,7 @@ class JSONTestCase(TestCase):
|
||||
# Loading it should be equal to the original data
|
||||
self.assertEqual(salt.utils.json.loads(ret), self.data)
|
||||
|
||||
@with_tempfile
|
||||
@with_tempfile()
|
||||
def test_dump_load(self, json_out):
|
||||
'''
|
||||
Test dumping to and loading from a file handle
|
||||
|
@ -37,19 +37,44 @@ class StringutilsTestCase(TestCase):
|
||||
|
||||
def test_is_binary(self):
|
||||
self.assertFalse(salt.utils.stringutils.is_binary(LOREM_IPSUM))
|
||||
# Also test bytestring
|
||||
self.assertFalse(
|
||||
salt.utils.stringutils.is_binary(
|
||||
salt.utils.stringutils.is_binary(LOREM_IPSUM)
|
||||
)
|
||||
)
|
||||
|
||||
zero_str = '{0}{1}'.format(LOREM_IPSUM, '\0')
|
||||
self.assertTrue(salt.utils.stringutils.is_binary(zero_str))
|
||||
# Also test bytestring
|
||||
self.assertTrue(
|
||||
salt.utils.stringutils.is_binary(
|
||||
salt.utils.stringutils.to_bytes(zero_str)
|
||||
)
|
||||
)
|
||||
|
||||
# To to ensure safe exit if str passed doesn't evaluate to True
|
||||
self.assertFalse(salt.utils.stringutils.is_binary(''))
|
||||
self.assertFalse(salt.utils.stringutils.is_binary(b''))
|
||||
|
||||
nontext = 3 * (''.join([chr(x) for x in range(1, 32) if x not in (8, 9, 10, 12, 13)]))
|
||||
almost_bin_str = '{0}{1}'.format(LOREM_IPSUM[:100], nontext[:42])
|
||||
self.assertFalse(salt.utils.stringutils.is_binary(almost_bin_str))
|
||||
# Also test bytestring
|
||||
self.assertFalse(
|
||||
salt.utils.stringutils.is_binary(
|
||||
salt.utils.stringutils.to_bytes(almost_bin_str)
|
||||
)
|
||||
)
|
||||
|
||||
bin_str = almost_bin_str + '\x01'
|
||||
self.assertTrue(salt.utils.stringutils.is_binary(bin_str))
|
||||
# Also test bytestring
|
||||
self.assertTrue(
|
||||
salt.utils.stringutils.is_binary(
|
||||
salt.utils.stringutils.to_bytes(bin_str)
|
||||
)
|
||||
)
|
||||
|
||||
def test_to_str(self):
|
||||
for x in (123, (1, 2, 3), [1, 2, 3], {1: 23}, None):
|
||||
|
@ -149,3 +149,26 @@ class YamlLoaderTestCase(TestCase):
|
||||
b: {'a': u'\\u0414'}''')),
|
||||
{'foo': {'a': u'\u0414', 'b': {'a': u'\u0414'}}}
|
||||
)
|
||||
|
||||
def test_yaml_with_colon_in_inline_dict(self):
|
||||
'''
|
||||
Test proper loading of unicode literal strings in inline dicts
|
||||
'''
|
||||
self.assert_matches(
|
||||
self.render_yaml(textwrap.dedent('''\
|
||||
foo:
|
||||
b: {u'c': u'https://foo.com'}''')),
|
||||
{'foo': {'b': {'c': 'https://foo.com'}}}
|
||||
)
|
||||
|
||||
def test_yaml_with_plain_scalars(self):
|
||||
'''
|
||||
Test that plain (i.e. unqoted) string and non-string scalars are
|
||||
properly handled
|
||||
'''
|
||||
self.assert_matches(
|
||||
self.render_yaml(textwrap.dedent('''\
|
||||
foo:
|
||||
b: {foo: bar, one: 1, list: [1, two, 3]}''')),
|
||||
{'foo': {'b': {'foo': 'bar', 'one': 1, 'list': [1, 'two', 3]}}}
|
||||
)
|
||||
|
@ -1,5 +1,6 @@
|
||||
integration.client.test_runner
|
||||
integration.client.test_standard
|
||||
integration.grains.test_core
|
||||
integration.loader.test_ext_grains
|
||||
integration.loader.test_ext_modules
|
||||
integration.modules.test_aliases
|
||||
@ -17,9 +18,11 @@ integration.modules.test_pillar
|
||||
integration.modules.test_pkg
|
||||
integration.modules.test_publish
|
||||
integration.modules.test_state
|
||||
integration.modules.test_status
|
||||
integration.modules.test_sysmod
|
||||
integration.modules.test_test
|
||||
integration.modules.test_useradd
|
||||
integration.reactor.test_reactor
|
||||
integration.renderers.test_pydsl
|
||||
integration.returners.test_librato_return
|
||||
integration.runners.test_fileserver
|
||||
@ -27,6 +30,7 @@ integration.runners.test_jobs
|
||||
integration.runners.test_salt
|
||||
integration.sdb.test_env
|
||||
integration.states.test_host
|
||||
integration.states.test_reg
|
||||
integration.states.test_renderers
|
||||
integration.utils.testprogram
|
||||
integration.wheel.test_client
|
||||
|
Loading…
Reference in New Issue
Block a user