Merge branch 'oxygen' into scheduler_fixes_loop_interval

This commit is contained in:
Gareth J. Greenaway 2017-12-21 08:36:09 -08:00 committed by GitHub
commit 98a9ddf10c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
31 changed files with 955 additions and 323 deletions

View File

@ -35,7 +35,7 @@ Function Get-Settings {
# Prerequisite software
$Prerequisites = @{
"NSIS" = "nsis-3.0b1-setup.exe"
"NSIS" = "nsis-3.02.1-setup.exe"
"VCforPython" = "VCForPython27.msi"
"VCppBuildTools" = "visualcppbuildtools_full.exe"
}

View File

@ -25,7 +25,7 @@ pyOpenSSL==17.5.0
python-dateutil==2.6.1
python-gnupg==0.4.1
pyyaml==3.12
pyzmq==17.0.0b3
pyzmq==16.0.3
requests==2.18.4
singledispatch==3.4.0.3
six==1.11.0

View File

@ -7,7 +7,6 @@ from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import os
import time
import copy
import json
import logging
@ -101,8 +100,9 @@ def sls(mods, saltenv='base', test=None, exclude=None, **kwargs):
st_kwargs = __salt__.kwargs
__opts__['grains'] = __grains__
__pillar__.update(kwargs.get('pillar', {}))
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
st_ = salt.client.ssh.state.SSHHighState(
__opts__,
opts,
__pillar__,
__salt__,
__context__['fileclient'])
@ -133,37 +133,37 @@ def sls(mods, saltenv='base', test=None, exclude=None, **kwargs):
chunks,
_merge_extra_filerefs(
kwargs.get('extra_filerefs', ''),
__opts__.get('extra_filerefs', '')
opts.get('extra_filerefs', '')
)
)
roster = salt.roster.Roster(__opts__, __opts__.get('roster', 'flat'))
roster = salt.roster.Roster(opts, opts.get('roster', 'flat'))
roster_grains = roster.opts['grains']
# Create the tar containing the state pkg and relevant files.
trans_tar = salt.client.ssh.state.prep_trans_tar(
__opts__,
opts,
__context__['fileclient'],
chunks,
file_refs,
__pillar__,
st_kwargs['id_'],
roster_grains)
trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, __opts__['hash_type'])
trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, opts['hash_type'])
cmd = 'state.pkg {0}/salt_state.tgz test={1} pkg_sum={2} hash_type={3}'.format(
__opts__['thin_dir'],
opts['thin_dir'],
test,
trans_tar_sum,
__opts__['hash_type'])
opts['hash_type'])
single = salt.client.ssh.Single(
__opts__,
opts,
cmd,
fsclient=__context__['fileclient'],
minion_opts=__salt__.minion_opts,
**st_kwargs)
single.shell.send(
trans_tar,
'{0}/salt_state.tgz'.format(__opts__['thin_dir']))
'{0}/salt_state.tgz'.format(opts['thin_dir']))
stdout, stderr, _ = single.cmd_block()
# Clean up our tar
@ -245,32 +245,6 @@ def _check_queue(queue, kwargs):
return conflict
def _get_opts(**kwargs):
'''
Return a copy of the opts for use, optionally load a local config on top
'''
opts = copy.deepcopy(__opts__)
if 'localconfig' in kwargs:
return salt.config.minion_config(kwargs['localconfig'], defaults=opts)
if 'saltenv' in kwargs:
saltenv = kwargs['saltenv']
if saltenv is not None and not isinstance(saltenv, six.string_types):
opts['environment'] = str(kwargs['saltenv'])
else:
opts['environment'] = kwargs['saltenv']
if 'pillarenv' in kwargs:
pillarenv = kwargs['pillarenv']
if pillarenv is not None and not isinstance(pillarenv, six.string_types):
opts['pillarenv'] = str(kwargs['pillarenv'])
else:
opts['pillarenv'] = kwargs['pillarenv']
return opts
def _get_initial_pillar(opts):
return __pillar__ if __opts__['__cli'] == 'salt-call' \
and opts['pillarenv'] == __opts__['pillarenv'] \
@ -382,8 +356,9 @@ def high(data, **kwargs):
__pillar__.update(kwargs.get('pillar', {}))
st_kwargs = __salt__.kwargs
__opts__['grains'] = __grains__
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
st_ = salt.client.ssh.state.SSHHighState(
__opts__,
opts,
__pillar__,
__salt__,
__context__['fileclient'])
@ -392,36 +367,36 @@ def high(data, **kwargs):
chunks,
_merge_extra_filerefs(
kwargs.get('extra_filerefs', ''),
__opts__.get('extra_filerefs', '')
opts.get('extra_filerefs', '')
)
)
roster = salt.roster.Roster(__opts__, __opts__.get('roster', 'flat'))
roster = salt.roster.Roster(opts, opts.get('roster', 'flat'))
roster_grains = roster.opts['grains']
# Create the tar containing the state pkg and relevant files.
trans_tar = salt.client.ssh.state.prep_trans_tar(
__opts__,
opts,
__context__['fileclient'],
chunks,
file_refs,
__pillar__,
st_kwargs['id_'],
roster_grains)
trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, __opts__['hash_type'])
trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, opts['hash_type'])
cmd = 'state.pkg {0}/salt_state.tgz pkg_sum={1} hash_type={2}'.format(
__opts__['thin_dir'],
opts['thin_dir'],
trans_tar_sum,
__opts__['hash_type'])
opts['hash_type'])
single = salt.client.ssh.Single(
__opts__,
opts,
cmd,
fsclient=__context__['fileclient'],
minion_opts=__salt__.minion_opts,
**st_kwargs)
single.shell.send(
trans_tar,
'{0}/salt_state.tgz'.format(__opts__['thin_dir']))
'{0}/salt_state.tgz'.format(opts['thin_dir']))
stdout, stderr, _ = single.cmd_block()
# Clean up our tar
@ -618,9 +593,9 @@ def highstate(test=None, **kwargs):
__pillar__.update(kwargs.get('pillar', {}))
st_kwargs = __salt__.kwargs
__opts__['grains'] = __grains__
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
st_ = salt.client.ssh.state.SSHHighState(
__opts__,
opts,
__pillar__,
__salt__,
__context__['fileclient'])
@ -629,7 +604,7 @@ def highstate(test=None, **kwargs):
chunks,
_merge_extra_filerefs(
kwargs.get('extra_filerefs', ''),
__opts__.get('extra_filerefs', '')
opts.get('extra_filerefs', '')
)
)
# Check for errors
@ -638,33 +613,33 @@ def highstate(test=None, **kwargs):
__context__['retcode'] = 1
return chunks
roster = salt.roster.Roster(__opts__, __opts__.get('roster', 'flat'))
roster = salt.roster.Roster(opts, opts.get('roster', 'flat'))
roster_grains = roster.opts['grains']
# Create the tar containing the state pkg and relevant files.
trans_tar = salt.client.ssh.state.prep_trans_tar(
__opts__,
opts,
__context__['fileclient'],
chunks,
file_refs,
__pillar__,
st_kwargs['id_'],
roster_grains)
trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, __opts__['hash_type'])
trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, opts['hash_type'])
cmd = 'state.pkg {0}/salt_state.tgz test={1} pkg_sum={2} hash_type={3}'.format(
__opts__['thin_dir'],
opts['thin_dir'],
test,
trans_tar_sum,
__opts__['hash_type'])
opts['hash_type'])
single = salt.client.ssh.Single(
__opts__,
opts,
cmd,
fsclient=__context__['fileclient'],
minion_opts=__salt__.minion_opts,
**st_kwargs)
single.shell.send(
trans_tar,
'{0}/salt_state.tgz'.format(__opts__['thin_dir']))
'{0}/salt_state.tgz'.format(opts['thin_dir']))
stdout, stderr, _ = single.cmd_block()
# Clean up our tar
@ -699,12 +674,13 @@ def top(topfn, test=None, **kwargs):
__pillar__.update(kwargs.get('pillar', {}))
st_kwargs = __salt__.kwargs
__opts__['grains'] = __grains__
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
if salt.utils.args.test_mode(test=test, **kwargs):
__opts__['test'] = True
opts['test'] = True
else:
__opts__['test'] = __opts__.get('test', None)
opts['test'] = __opts__.get('test', None)
st_ = salt.client.ssh.state.SSHHighState(
__opts__,
opts,
__pillar__,
__salt__,
__context__['fileclient'])
@ -714,37 +690,37 @@ def top(topfn, test=None, **kwargs):
chunks,
_merge_extra_filerefs(
kwargs.get('extra_filerefs', ''),
__opts__.get('extra_filerefs', '')
opts.get('extra_filerefs', '')
)
)
roster = salt.roster.Roster(__opts__, __opts__.get('roster', 'flat'))
roster = salt.roster.Roster(opts, opts.get('roster', 'flat'))
roster_grains = roster.opts['grains']
# Create the tar containing the state pkg and relevant files.
trans_tar = salt.client.ssh.state.prep_trans_tar(
__opts__,
opts,
__context__['fileclient'],
chunks,
file_refs,
__pillar__,
st_kwargs['id_'],
roster_grains)
trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, __opts__['hash_type'])
trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, opts['hash_type'])
cmd = 'state.pkg {0}/salt_state.tgz test={1} pkg_sum={2} hash_type={3}'.format(
__opts__['thin_dir'],
opts['thin_dir'],
test,
trans_tar_sum,
__opts__['hash_type'])
opts['hash_type'])
single = salt.client.ssh.Single(
__opts__,
opts,
cmd,
fsclient=__context__['fileclient'],
minion_opts=__salt__.minion_opts,
**st_kwargs)
single.shell.send(
trans_tar,
'{0}/salt_state.tgz'.format(__opts__['thin_dir']))
'{0}/salt_state.tgz'.format(opts['thin_dir']))
stdout, stderr, _ = single.cmd_block()
# Clean up our tar
@ -764,7 +740,7 @@ def top(topfn, test=None, **kwargs):
return stdout
def show_highstate():
def show_highstate(**kwargs):
'''
Retrieve the highstate data from the salt master and display it
@ -775,15 +751,16 @@ def show_highstate():
salt '*' state.show_highstate
'''
__opts__['grains'] = __grains__
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
st_ = salt.client.ssh.state.SSHHighState(
__opts__,
opts,
__pillar__,
__salt__,
__context__['fileclient'])
return st_.compile_highstate()
def show_lowstate():
def show_lowstate(**kwargs):
'''
List out the low data that will be applied to this minion
@ -794,8 +771,9 @@ def show_lowstate():
salt '*' state.show_lowstate
'''
__opts__['grains'] = __grains__
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
st_ = salt.client.ssh.state.SSHHighState(
__opts__,
opts,
__pillar__,
__salt__,
__context__['fileclient'])
@ -838,13 +816,13 @@ def sls_id(id_, mods, test=None, queue=False, **kwargs):
if conflict is not None:
return conflict
orig_test = __opts__.get('test', None)
opts = _get_opts(**kwargs)
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
opts['test'] = _get_test_value(test, **kwargs)
# Since this is running a specific ID within a specific SLS file, fall back
# to the 'base' saltenv if none is configured and none was passed.
if opts['environment'] is None:
opts['environment'] = 'base'
if opts['saltenv'] is None:
opts['saltenv'] = 'base'
try:
st_ = salt.state.HighState(opts,
@ -864,7 +842,7 @@ def sls_id(id_, mods, test=None, queue=False, **kwargs):
split_mods = mods.split(',')
st_.push_active()
try:
high_, errors = st_.render_highstate({opts['environment']: split_mods})
high_, errors = st_.render_highstate({opts['saltenv']: split_mods})
finally:
st_.pop_active()
errors += st_.state.verify_high(high_)
@ -884,13 +862,10 @@ def sls_id(id_, mods, test=None, queue=False, **kwargs):
ret.update(st_.state.call_chunk(chunk, {}, chunks))
_set_retcode(ret, highstate=highstate)
# Work around Windows multiprocessing bug, set __opts__['test'] back to
# value from before this function was run.
__opts__['test'] = orig_test
if not ret:
raise SaltInvocationError(
'No matches for ID \'{0}\' found in SLS \'{1}\' within saltenv '
'\'{2}\''.format(id_, mods, opts['environment'])
'\'{2}\''.format(id_, mods, opts['saltenv'])
)
return ret
@ -908,13 +883,13 @@ def show_sls(mods, saltenv='base', test=None, **kwargs):
'''
__pillar__.update(kwargs.get('pillar', {}))
__opts__['grains'] = __grains__
opts = copy.copy(__opts__)
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
if salt.utils.args.test_mode(test=test, **kwargs):
opts['test'] = True
else:
opts['test'] = __opts__.get('test', None)
st_ = salt.client.ssh.state.SSHHighState(
__opts__,
opts,
__pillar__,
__salt__,
__context__['fileclient'])
@ -951,13 +926,13 @@ def show_low_sls(mods, saltenv='base', test=None, **kwargs):
__pillar__.update(kwargs.get('pillar', {}))
__opts__['grains'] = __grains__
opts = copy.copy(__opts__)
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
if salt.utils.args.test_mode(test=test, **kwargs):
opts['test'] = True
else:
opts['test'] = __opts__.get('test', None)
st_ = salt.client.ssh.state.SSHHighState(
__opts__,
opts,
__pillar__,
__salt__,
__context__['fileclient'])
@ -979,7 +954,7 @@ def show_low_sls(mods, saltenv='base', test=None, **kwargs):
return ret
def show_top():
def show_top(**kwargs):
'''
Return the top data that the minion will use for a highstate
@ -990,8 +965,9 @@ def show_top():
salt '*' state.show_top
'''
__opts__['grains'] = __grains__
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
st_ = salt.client.ssh.state.SSHHighState(
__opts__,
opts,
__pillar__,
__salt__,
__context__['fileclient'])
@ -1038,7 +1014,7 @@ def single(fun, name, test=None, **kwargs):
'__id__': name,
'name': name})
opts = copy.deepcopy(__opts__)
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
# Set test mode
if salt.utils.args.test_mode(test=test, **kwargs):
@ -1050,7 +1026,7 @@ def single(fun, name, test=None, **kwargs):
__pillar__.update(kwargs.get('pillar', {}))
# Create the State environment
st_ = salt.client.ssh.state.SSHState(__opts__, __pillar__)
st_ = salt.client.ssh.state.SSHState(opts, __pillar__)
# Verify the low chunk
err = st_.verify_data(kwargs)
@ -1067,16 +1043,16 @@ def single(fun, name, test=None, **kwargs):
chunks,
_merge_extra_filerefs(
kwargs.get('extra_filerefs', ''),
__opts__.get('extra_filerefs', '')
opts.get('extra_filerefs', '')
)
)
roster = salt.roster.Roster(__opts__, __opts__.get('roster', 'flat'))
roster = salt.roster.Roster(opts, opts.get('roster', 'flat'))
roster_grains = roster.opts['grains']
# Create the tar containing the state pkg and relevant files.
trans_tar = salt.client.ssh.state.prep_trans_tar(
__opts__,
opts,
__context__['fileclient'],
chunks,
file_refs,
@ -1085,18 +1061,18 @@ def single(fun, name, test=None, **kwargs):
roster_grains)
# Create a hash so we can verify the tar on the target system
trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, __opts__['hash_type'])
trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, opts['hash_type'])
# We use state.pkg to execute the "state package"
cmd = 'state.pkg {0}/salt_state.tgz test={1} pkg_sum={2} hash_type={3}'.format(
__opts__['thin_dir'],
opts['thin_dir'],
test,
trans_tar_sum,
__opts__['hash_type'])
opts['hash_type'])
# Create a salt-ssh Single object to actually do the ssh work
single = salt.client.ssh.Single(
__opts__,
opts,
cmd,
fsclient=__context__['fileclient'],
minion_opts=__salt__.minion_opts,
@ -1105,7 +1081,7 @@ def single(fun, name, test=None, **kwargs):
# Copy the tar down
single.shell.send(
trans_tar,
'{0}/salt_state.tgz'.format(__opts__['thin_dir']))
'{0}/salt_state.tgz'.format(opts['thin_dir']))
# Run the state.pkg command on the target
stdout, stderr, _ = single.cmd_block()

View File

@ -41,6 +41,8 @@ class SaltException(Exception):
def __init__(self, message=''):
# Avoid circular import
import salt.utils.stringutils
if not isinstance(message, six.string_types):
message = six.text_type(message)
if six.PY3 or isinstance(message, unicode): # pylint: disable=incompatible-py3-code
super(SaltException, self).__init__(
salt.utils.stringutils.to_str(message)
@ -120,7 +122,7 @@ class CommandExecutionError(SaltException):
def __init__(self, message='', info=None):
# Avoid circular import
import salt.utils.stringutils
self.error = exc_str_prefix = salt.utils.stringutils.to_unicode(message)
self.error = exc_str_prefix = six.text_type(message)
self.info = info
if self.info:
if exc_str_prefix:

176
salt/modules/boto_s3.py Normal file
View File

@ -0,0 +1,176 @@
# -*- coding: utf-8 -*-
'''
Connection module for Amazon S3 using boto3
.. versionadded:: Oxygen
:configuration: This module accepts explicit AWS credentials but can also
utilize IAM roles assigned to the instance through Instance Profiles or
it can read them from the ~/.aws/credentials file or from these
environment variables: AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY.
Dynamic credentials are then automatically obtained from AWS API and no
further configuration is necessary. More information available at:
.. code-block:: text
http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/
iam-roles-for-amazon-ec2.html
http://boto3.readthedocs.io/en/latest/guide/
configuration.html#guide-configuration
If IAM roles are not used you need to specify them either in a pillar or
in the minion's config file:
.. code-block:: yaml
s3.keyid: GKTADJGHEIQSXMKKRBJ08H
s3.key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
A region may also be specified in the configuration:
.. code-block:: yaml
s3.region: us-east-1
If a region is not specified, the default is us-east-1.
It's also possible to specify key, keyid and region via a profile, either
as a passed in dict, or as a string to pull from pillars or minion config:
.. code-block:: yaml
myprofile:
keyid: GKTADJGHEIQSXMKKRBJ08H
key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
region: us-east-1
:depends: boto3
'''
# keep lint from choking on _get_conn and _cache_id
# pylint: disable=E0602
# Import Python libs
from __future__ import absolute_import
import logging
# Import Salt libs
from salt.utils.versions import LooseVersion as _LooseVersion
log = logging.getLogger(__name__)
# pylint: disable=import-error
try:
# pylint: disable=unused-import
import boto3
# pylint: enable=unused-import
import botocore
logging.getLogger('boto3').setLevel(logging.CRITICAL)
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
# pylint: enable=import-error
def __virtual__():
'''
Only load if boto libraries exist and if boto libraries are greater than
a given version.
'''
needed_boto3_version = '1.2.1'
msg = 'The boto_s3 module cannot be loaded: {0}.'
if not HAS_BOTO:
return (False, msg.format('boto3 libraries not found'))
if _LooseVersion(boto3.__version__) < _LooseVersion(needed_boto3_version):
submsg = 'boto3 library version {0} is required'.format(
needed_boto3_version,
)
return (False, msg.format(submsg))
return True
def __init__(opts): # pylint: disable=unused-argument
if HAS_BOTO:
__utils__['boto3.assign_funcs'](__name__, 's3')
def get_object_metadata(
name,
extra_args=None,
region=None,
key=None,
keyid=None,
profile=None,
):
'''
Get metadata about an S3 object.
Returns None if the object does not exist.
You can pass AWS SSE-C related args and/or RequestPayer in extra_args.
CLI Example:
.. code-block:: bash
salt myminion boto_s3.get_object_metadata \\
my_bucket/path/to/object \\
region=us-east-1 \\
key=key \\
keyid=keyid \\
profile=profile \\
'''
bucket, _, s3_key = name.partition('/')
if extra_args is None:
extra_args = {}
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
try:
metadata = conn.head_object(
Bucket=bucket,
Key=s3_key,
**extra_args
)
except botocore.exceptions.ClientError as e:
if e.response['Error']['Message'] == 'Not Found':
return {'result': None}
return {'error': __utils__['boto3.get_error'](e)}
return {'result': metadata}
def upload_file(
source,
name,
extra_args=None,
region=None,
key=None,
keyid=None,
profile=None,
):
'''
Upload a local file as an S3 object.
CLI Example:
.. code-block:: bash
salt myminion boto_s3.upload_file \\
/path/to/local/file \\
my_bucket/path/to/object \\
region=us-east-1 \\
key=key \\
keyid=keyid \\
profile=profile \\
'''
bucket, _, s3_key = name.partition('/')
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
try:
conn.upload_file(source, bucket, s3_key, ExtraArgs=extra_args)
except boto3.exceptions.S3UploadFailedError as e:
return {'error': __utils__['boto3.get_error'](e)}
log.info('S3 object uploaded to {0}'.format(name))
return {'result': True}

View File

@ -629,6 +629,7 @@ def _client_wrapper(attr, *args, **kwargs):
)
ret = func(*args, **kwargs)
except docker.errors.APIError as exc:
log.exception('Encountered error running API function %s', attr)
if catch_api_errors:
# Generic handling of Docker API errors
raise CommandExecutionError(
@ -1246,8 +1247,10 @@ def compare_networks(first, second, ignore='Name,Id,Created,Containers'):
if bool(subval1) is bool(subval2) is False:
continue
elif subkey == 'Config':
config1 = sorted(val1['Config'])
config2 = sorted(val2.get('Config', []))
kvsort = lambda x: (list(six.iterkeys(x)),
list(six.itervalues(x)))
config1 = sorted(val1['Config'], key=kvsort)
config2 = sorted(val2.get('Config', []), key=kvsort)
if config1 != config2:
ret.setdefault('IPAM', {})['Config'] = {
'old': config1, 'new': config2
@ -2045,7 +2048,10 @@ def logs(name, **kwargs):
kwargs['since'], exc
)
return _client_wrapper('logs', name, **kwargs)
# logs() returns output as bytestrings
return salt.utils.stringutils.to_unicode(
_client_wrapper('logs', name, **kwargs)
)
def pid(name):
@ -3336,7 +3342,7 @@ def run_container(image,
ret['Id'],
stream=True,
timestamps=False):
output.append(line)
output.append(salt.utils.stringutils.to_unicode(line))
except CommandExecutionError:
msg = (
'Failed to get logs from container. This may be because '
@ -6425,30 +6431,30 @@ def _generate_tmp_path():
'salt.docker.{0}'.format(uuid.uuid4().hex[:6]))
def _prepare_trans_tar(name, mods=None, saltenv='base', pillar=None):
def _prepare_trans_tar(name, sls_opts, mods=None, pillar=None):
'''
Prepares a self contained tarball that has the state
to be applied in the container
'''
chunks = _compile_state(mods, saltenv)
chunks = _compile_state(sls_opts, mods)
# reuse it from salt.ssh, however this function should
# be somewhere else
refs = salt.client.ssh.state.lowstate_file_refs(chunks)
_mk_fileclient()
trans_tar = salt.client.ssh.state.prep_trans_tar(
__opts__,
sls_opts,
__context__['cp.fileclient'],
chunks, refs, pillar, name)
return trans_tar
def _compile_state(mods=None, saltenv='base'):
def _compile_state(sls_opts, mods=None):
'''
Generates the chunks of lowdata from the list of modules
'''
st_ = HighState(__opts__)
st_ = HighState(sls_opts)
high_data, errors = st_.render_highstate({saltenv: mods})
high_data, errors = st_.render_highstate({sls_opts['saltenv']: mods})
high_data, ext_errors = st_.state.reconcile_extend(high_data)
errors += ext_errors
errors += st_.state.verify_high(high_data)
@ -6466,26 +6472,6 @@ def _compile_state(mods=None, saltenv='base'):
return st_.state.compile_high_data(high_data)
def _gather_pillar(pillarenv, pillar_override, **grains):
'''
Gathers pillar with a custom set of grains, which should
be first retrieved from the container
'''
pillar = salt.pillar.get_pillar(
__opts__,
grains,
# Not sure if these two are correct
__opts__['id'],
__opts__['saltenv'],
pillar_override=pillar_override,
pillarenv=pillarenv
)
ret = pillar.compile_pillar()
if pillar_override and isinstance(pillar_override, dict):
ret.update(pillar_override)
return ret
def call(name, function, *args, **kwargs):
'''
Executes a Salt function inside a running container
@ -6574,7 +6560,7 @@ def call(name, function, *args, **kwargs):
run_all(name, subprocess.list2cmdline(rm_thin_argv))
def sls(name, mods=None, saltenv='base', **kwargs):
def sls(name, mods=None, **kwargs):
'''
Apply the states defined by the specified SLS modules to the running
container
@ -6594,6 +6580,24 @@ def sls(name, mods=None, saltenv='base', **kwargs):
Specify the environment from which to retrieve the SLS indicated by the
`mods` parameter.
pillarenv
Specify a Pillar environment to be used when applying states. This
can also be set in the minion config file using the
:conf_minion:`pillarenv` option. When neither the
:conf_minion:`pillarenv` minion config option nor this CLI argument is
used, all Pillar environments will be merged together.
.. versionadded:: Oxygen
pillar
Custom Pillar values, passed as a dictionary of key-value pairs
.. note::
Values passed this way will override Pillar values set via
``pillar_roots`` or an external Pillar source.
.. versionadded:: Oxygen
CLI Example:
.. code-block:: bash
@ -6603,13 +6607,30 @@ def sls(name, mods=None, saltenv='base', **kwargs):
'''
mods = [item.strip() for item in mods.split(',')] if mods else []
# Figure out the saltenv/pillarenv to use
pillar_override = kwargs.pop('pillar', None)
if 'saltenv' not in kwargs:
kwargs['saltenv'] = 'base'
sls_opts = __utils__['state.get_sls_opts'](__opts__, **kwargs)
# gather grains from the container
grains = call(name, 'grains.items')
# compile pillar with container grains
pillar = _gather_pillar(saltenv, {}, **grains)
pillar = salt.pillar.get_pillar(
__opts__,
grains,
__opts__['id'],
pillar_override=pillar_override,
pillarenv=sls_opts['pillarenv']).compile_pillar()
if pillar_override and isinstance(pillar_override, dict):
pillar.update(pillar_override)
trans_tar = _prepare_trans_tar(name, mods=mods, saltenv=saltenv, pillar=pillar)
trans_tar = _prepare_trans_tar(
name,
sls_opts,
mods=mods,
pillar=pillar)
# where to put the salt trans tar
trans_dest_path = _generate_tmp_path()
@ -6659,7 +6680,6 @@ def sls_build(repository,
tag='latest',
base='opensuse/python',
mods=None,
saltenv='base',
dryrun=False,
**kwargs):
'''
@ -6699,6 +6719,24 @@ def sls_build(repository,
Specify the environment from which to retrieve the SLS indicated by the
`mods` parameter.
pillarenv
Specify a Pillar environment to be used when applying states. This
can also be set in the minion config file using the
:conf_minion:`pillarenv` option. When neither the
:conf_minion:`pillarenv` minion config option nor this CLI argument is
used, all Pillar environments will be merged together.
.. versionadded:: Oxygen
pillar
Custom Pillar values, passed as a dictionary of key-value pairs
.. note::
Values passed this way will override Pillar values set via
``pillar_roots`` or an external Pillar source.
.. versionadded:: Oxygen
dryrun: False
when set to True the container will not be commited at the end of
the build. The dryrun succeed also when the state contains errors.
@ -6742,7 +6780,7 @@ def sls_build(repository,
start_(id_)
# Now execute the state into the container
ret = sls(id_, mods, saltenv, **kwargs)
ret = sls(id_, mods, **kwargs)
# fail if the state was not successful
if not dryrun and not __utils__['state.check_result'](ret):
raise CommandExecutionError(ret)

View File

@ -1501,8 +1501,9 @@ def comment_line(path,
try:
# Open the file in write mode
mode = 'wb' if six.PY2 and salt.utils.platform.is_windows() else 'w'
with salt.utils.files.fopen(path,
mode='wb',
mode=mode,
buffering=bufsize) as w_file:
try:
# Open the temp file in read mode
@ -1523,7 +1524,10 @@ def comment_line(path,
else:
# Write the existing line (no change)
wline = line
w_file.write(salt.utils.stringutils.to_str(wline))
wline = salt.utils.stringutils.to_bytes(wline) \
if salt.utils.platform.is_windows() \
else salt.utils.stringutils.to_str(wline)
w_file.write(wline)
except (OSError, IOError) as exc:
raise CommandExecutionError(
"Unable to write file '{0}'. Contents may "

View File

@ -13,7 +13,6 @@ states themselves.
# Import python libs
from __future__ import absolute_import, print_function
import copy
import fnmatch
import json
import logging
@ -36,6 +35,7 @@ import salt.utils.functools
import salt.utils.hashutils
import salt.utils.jid
import salt.utils.platform
import salt.utils.state
import salt.utils.url
import salt.utils.versions
from salt.exceptions import CommandExecutionError, SaltInvocationError
@ -420,36 +420,6 @@ def _check_queue(queue, kwargs):
return conflict
def _get_opts(**kwargs):
'''
Return a copy of the opts for use, optionally load a local config on top
'''
opts = copy.deepcopy(__opts__)
if 'localconfig' in kwargs:
return salt.config.minion_config(kwargs['localconfig'], defaults=opts)
if 'saltenv' in kwargs:
saltenv = kwargs['saltenv']
if saltenv is not None:
if not isinstance(saltenv, six.string_types):
saltenv = six.text_type(saltenv)
if opts['lock_saltenv'] and saltenv != opts['saltenv']:
raise CommandExecutionError(
'lock_saltenv is enabled, saltenv cannot be changed'
)
opts['saltenv'] = kwargs['saltenv']
if 'pillarenv' in kwargs or opts.get('pillarenv_from_saltenv', False):
pillarenv = kwargs.get('pillarenv') or kwargs.get('saltenv')
if pillarenv is not None and not isinstance(pillarenv, six.string_types):
opts['pillarenv'] = str(pillarenv)
else:
opts['pillarenv'] = pillarenv
return opts
def _get_initial_pillar(opts):
return __pillar__ if __opts__['__cli'] == 'salt-call' \
and opts['pillarenv'] == __opts__['pillarenv'] \
@ -521,7 +491,7 @@ def high(data, test=None, queue=False, **kwargs):
conflict = _check_queue(queue, kwargs)
if conflict is not None:
return conflict
opts = _get_opts(**kwargs)
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
opts['test'] = _get_test_value(test, **kwargs)
@ -573,7 +543,7 @@ def template(tem, queue=False, **kwargs):
if conflict is not None:
return conflict
opts = _get_opts(**kwargs)
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
try:
st_ = salt.state.HighState(opts,
context=__context__,
@ -618,7 +588,7 @@ def template_str(tem, queue=False, **kwargs):
if conflict is not None:
return conflict
opts = _get_opts(**kwargs)
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
try:
st_ = salt.state.State(opts,
@ -1022,7 +992,7 @@ def highstate(test=None, queue=False, **kwargs):
return conflict
orig_test = __opts__.get('test', None)
opts = _get_opts(**kwargs)
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
opts['test'] = _get_test_value(test, **kwargs)
@ -1228,7 +1198,7 @@ def sls(mods, test=None, exclude=None, queue=False, **kwargs):
return disabled
orig_test = __opts__.get('test', None)
opts = _get_opts(**kwargs)
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
opts['test'] = _get_test_value(test, **kwargs)
@ -1376,7 +1346,7 @@ def top(topfn, test=None, queue=False, **kwargs):
if conflict is not None:
return conflict
orig_test = __opts__.get('test', None)
opts = _get_opts(**kwargs)
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
opts['test'] = _get_test_value(test, **kwargs)
pillar_override = kwargs.get('pillar')
@ -1455,7 +1425,7 @@ def show_highstate(queue=False, **kwargs):
'is specified.'
)
opts = _get_opts(**kwargs)
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
try:
st_ = salt.state.HighState(opts,
pillar_override,
@ -1497,7 +1467,7 @@ def show_lowstate(queue=False, **kwargs):
assert False
return conflict
opts = _get_opts(**kwargs)
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
try:
st_ = salt.state.HighState(opts,
proxy=__proxy__,
@ -1605,7 +1575,7 @@ def sls_id(id_, mods, test=None, queue=False, **kwargs):
if conflict is not None:
return conflict
orig_test = __opts__.get('test', None)
opts = _get_opts(**kwargs)
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
opts['test'] = _get_test_value(test, **kwargs)
# Since this is running a specific ID within a specific SLS file, fall back
@ -1716,7 +1686,7 @@ def show_low_sls(mods, test=None, queue=False, **kwargs):
if conflict is not None:
return conflict
orig_test = __opts__.get('test', None)
opts = _get_opts(**kwargs)
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
opts['test'] = _get_test_value(test, **kwargs)
# Since this is dealing with a specific SLS file (or files), fall back to
@ -1802,7 +1772,7 @@ def show_sls(mods, test=None, queue=False, **kwargs):
if conflict is not None:
return conflict
orig_test = __opts__.get('test', None)
opts = _get_opts(**kwargs)
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
opts['test'] = _get_test_value(test, **kwargs)
@ -1873,7 +1843,7 @@ def show_top(queue=False, **kwargs):
if conflict is not None:
return conflict
opts = _get_opts(**kwargs)
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
try:
st_ = salt.state.HighState(opts,
proxy=__proxy__,
@ -1925,7 +1895,7 @@ def single(fun, name, test=None, queue=False, **kwargs):
'__id__': name,
'name': name})
orig_test = __opts__.get('test', None)
opts = _get_opts(**kwargs)
opts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
opts['test'] = _get_test_value(test, **kwargs)
pillar_override = kwargs.get('pillar')
@ -2008,7 +1978,7 @@ def pkg(pkg_path,
salt '*' state.pkg /tmp/salt_state.tgz 760a9353810e36f6d81416366fc426dc md5
'''
# TODO - Add ability to download from salt master or other source
popts = _get_opts(**kwargs)
popts = salt.utils.state.get_sls_opts(__opts__, **kwargs)
if not os.path.isfile(pkg_path):
return {}
if not salt.utils.hashutils.get_hash(pkg_path, hash_type) == pkg_sum:

View File

@ -280,7 +280,7 @@ def list_available(*names, **kwargs):
refresh = salt.utils.data.is_true(kwargs.get('refresh', False))
_refresh_db_conditional(saltenv, force=refresh)
return_dict_always = \
salt.utils.is_true(kwargs.get('return_dict_always', False))
salt.utils.data.is_true(kwargs.get('return_dict_always', False))
if len(names) == 1 and not return_dict_always:
pkginfo = _get_package_info(names[0], saltenv=saltenv)
if not pkginfo:

View File

@ -618,6 +618,7 @@ import salt.utils.event
import salt.utils.stringutils
import salt.utils.versions
from salt.ext import six
from salt.ext.six import BytesIO
# Import salt-api libs
import salt.netapi
@ -950,18 +951,6 @@ def urlencoded_processor(entity):
:param entity: raw POST data
'''
if six.PY3:
# https://github.com/cherrypy/cherrypy/pull/1572
contents = six.StringIO()
entity.fp.read(fp_out=contents)
contents.seek(0)
body_str = contents.read()
body_bytes = salt.utils.stringutils.to_bytes(body_str)
body_bytes = six.BytesIO(body_bytes)
body_bytes.seek(0)
# Patch fp
entity.fp = body_bytes
del contents
# First call out to CherryPy's default processor
cherrypy._cpreqbody.process_urlencoded(entity)
cherrypy._cpreqbody.process_urlencoded(entity)
@ -980,10 +969,10 @@ def json_processor(entity):
body = entity.fp.read()
else:
# https://github.com/cherrypy/cherrypy/pull/1572
contents = six.StringIO()
contents = BytesIO()
body = entity.fp.read(fp_out=contents)
contents.seek(0)
body = contents.read()
body = salt.utils.stringutils.to_unicode(contents.read())
del contents
try:
cherrypy.serving.request.unserialized_data = json.loads(body)
@ -1004,10 +993,10 @@ def yaml_processor(entity):
body = entity.fp.read()
else:
# https://github.com/cherrypy/cherrypy/pull/1572
contents = six.StringIO()
contents = BytesIO()
body = entity.fp.read(fp_out=contents)
contents.seek(0)
body = contents.read()
body = salt.utils.stringutils.to_unicode(contents.read())
try:
cherrypy.serving.request.unserialized_data = yaml.safe_load(body)
except ValueError:
@ -1030,10 +1019,10 @@ def text_processor(entity):
body = entity.fp.read()
else:
# https://github.com/cherrypy/cherrypy/pull/1572
contents = six.StringIO()
contents = BytesIO()
body = entity.fp.read(fp_out=contents)
contents.seek(0)
body = contents.read()
body = salt.utils.stringutils.to_unicode(contents.read())
try:
cherrypy.serving.request.unserialized_data = json.loads(body)
except ValueError:

63
salt/serializers/toml.py Normal file
View File

@ -0,0 +1,63 @@
# -*- coding: utf-8 -*-
'''
salt.serializers.toml
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Implements TOML serializer.
It's just a wrapper around pytoml module.
'''
from __future__ import absolute_import
# Import pytoml
try:
import pytoml as toml
available = True
except ImportError:
available = False
# Import Salt libs
from salt.serializers import DeserializationError, SerializationError
# Import 3rd-party libs
from salt.ext import six
__all__ = ['deserialize', 'serialize', 'available']
def deserialize(stream_or_string, **options):
'''
Deserialize from TOML into Python data structure.
:param stream_or_string: toml stream or string to deserialize.
:param options: options given to lower pytoml module.
'''
try:
if not isinstance(stream_or_string, (bytes, six.string_types)):
return toml.load(stream_or_string, **options)
if isinstance(stream_or_string, bytes):
stream_or_string = stream_or_string.decode('utf-8')
return toml.loads(stream_or_string)
except Exception as error:
raise DeserializationError(error)
def serialize(obj, **options):
'''
Serialize Python data to TOML.
:param obj: the data structure to serialize.
:param options: options given to lower pytoml module.
'''
try:
if 'file_out' in options:
return toml.dump(obj, options['file_out'], **options)
else:
return toml.dumps(obj, **options)
except Exception as error:
raise SerializationError(error)

310
salt/states/boto_s3.py Normal file
View File

@ -0,0 +1,310 @@
# -*- coding: utf-8 -*-
'''
Manage S3 Resources
=================
.. versionadded:: Oxygen
Manage S3 resources. Be aware that this interacts with Amazon's services,
and so may incur charges.
This module uses ``boto3``, which can be installed via package, or pip.
This module accepts explicit AWS credentials but can also utilize
IAM roles assigned to the instance through Instance Profiles. Dynamic
credentials are then automatically obtained from AWS API and no further
configuration is necessary. More information available `here
<http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html>`_.
If IAM roles are not used you need to specify them either in a pillar file or
in the minion's config file:
.. code-block:: yaml
s3.keyid: GKTADJGHEIQSXMKKRBJ08H
s3.key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
It's also possible to specify ``key``, ``keyid`` and ``region`` via a profile,
either passed in as a dict, or as a string to pull from pillars or minion
config:
.. code-block:: yaml
myprofile:
keyid: GKTADJGHEIQSXMKKRBJ08H
key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
region: us-east-1
.. code-block:: yaml
Ensure s3 object exists:
boto_s3.object_present:
- name: s3-bucket/s3-key
- source: /path/to/local/file
- region: us-east-1
- keyid: GKTADJGHEIQSXMKKRBJ08H
- key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
- profile: my-profile
:depends: boto3
'''
# Import Python Libs
from __future__ import absolute_import
import copy
import difflib
import logging
import yaml
# Import Salt libs
import salt.ext.six as six
import salt.utils.hashutils
log = logging.getLogger(__name__)
def __virtual__():
'''
Only load if boto is available.
'''
if 'boto_s3.get_object_metadata' not in __salt__:
return False
return 'boto_s3'
# Keys for `extra_args` that we support.
# Currently, this excludes the `ACL` and `Grant*` keys.
# Most keys are stored and returned by AWS:
STORED_EXTRA_ARGS = frozenset([
'CacheControl',
'ContentDisposition',
'ContentEncoding',
'ContentLanguage',
'ContentType',
'Expires',
'Metadata',
'ServerSideEncryption',
'SSECustomerAlgorithm',
'SSECustomerKeyMD5',
'SSEKMSKeyId',
'StorageClass',
'WebsiteRedirectLocation',
])
# However, some keys are only specified on upload,
# but won't be stored/returned by AWS as metadata:
UPLOAD_ONLY_EXTRA_ARGS = frozenset([
# AWS doesn't store customer provided keys,
# can use SSECustomerKeyMD5 to check for correct key
'SSECustomerKey',
'RequestPayer',
])
# Some extra args must also be passed along to retrive metadata,
# namely SSE-C (customer-provided encryption) and RequestPayer args.
GET_METADATA_EXTRA_ARGS = frozenset([
'SSECustomerAlgorithm',
'SSECustomerKey',
'SSECustomerKeyMD5',
'RequestPayer',
])
def object_present(
name,
source=None,
hash_type=None,
extra_args=None,
extra_args_from_pillar='boto_s3_object_extra_args',
region=None,
key=None,
keyid=None,
profile=None,
):
'''
Ensure object exists in S3.
name
The name of the state definition.
This will be used to determine the location of the object in S3,
by splitting on the first slash and using the first part
as the bucket name and the remainder as the S3 key.
source
The source file to upload to S3,
currently this only supports files hosted on the minion's local
file system (starting with /).
hash_type
Hash algorithm to use to check that the object contents are correct.
Defaults to the value of the `hash_type` config option.
extra_args
A dictionary of extra arguments to use when uploading the file.
Note that these are only enforced if new objects are uploaded,
and not modified on existing objects.
The supported args are those in the ALLOWED_UPLOAD_ARGS list at
http://boto3.readthedocs.io/en/latest/reference/customizations/s3.html.
However, Note that the 'ACL', 'GrantFullControl', 'GrantRead',
'GrantReadACP', and 'GrantWriteACL' keys are currently not supported.
extra_args_from_pillar
Name of pillar dict that contains extra arguments.
Extra arguments defined for this specific state will be
merged over those from the pillar.
region
Region to connect to.
key
Secret key to be used.
keyid
Access key to be used.
profile
A dict with region, key and keyid, or a pillar key (string) that
contains a dict with region, key and keyid.
'''
ret = {
'name': name,
'comment': '',
'changes': {},
}
if extra_args is None:
extra_args = {}
combined_extra_args = copy.deepcopy(
__salt__['config.option'](extra_args_from_pillar, {})
)
__utils__['dictupdate.update'](combined_extra_args, extra_args)
if combined_extra_args:
supported_args = STORED_EXTRA_ARGS | UPLOAD_ONLY_EXTRA_ARGS
combined_extra_args_keys = frozenset(six.iterkeys(combined_extra_args))
extra_keys = combined_extra_args_keys - supported_args
if extra_keys:
msg = 'extra_args keys {0} are not supported'.format(extra_keys)
return {'error': msg}
# Get the hash of the local file
if not hash_type:
hash_type = __opts__['hash_type']
try:
digest = salt.utils.hashutils.get_hash(source, form=hash_type)
except IOError as e:
ret['result'] = False
ret['comment'] = "Could not read local file {0}: {1}".format(
source,
e,
)
return ret
except ValueError as e:
# Invalid hash type exception from get_hash
ret['result'] = False
ret['comment'] = 'Could not hash local file {0}: {1}'.format(
source,
e,
)
return ret
HASH_METADATA_KEY = 'salt_managed_content_hash'
combined_extra_args.setdefault('Metadata', {})
if HASH_METADATA_KEY in combined_extra_args['Metadata']:
# Be lenient, silently allow hash metadata key if digest value matches
if combined_extra_args['Metadata'][HASH_METADATA_KEY] != digest:
ret['result'] = False
ret['comment'] = (
'Salt uses the {0} metadata key internally,'
'do not pass it to the boto_s3.object_present state.'
).format(HASH_METADATA_KEY)
return ret
combined_extra_args['Metadata'][HASH_METADATA_KEY] = digest
# Remove upload-only keys from full set of extra_args
# to create desired dict for comparisons
desired_metadata = dict(
(k, v) for k, v in six.iteritems(combined_extra_args)
if k not in UPLOAD_ONLY_EXTRA_ARGS
)
# Some args (SSE-C, RequestPayer) must also be passed to get_metadata
metadata_extra_args = dict(
(k, v) for k, v in six.iteritems(combined_extra_args)
if k in GET_METADATA_EXTRA_ARGS
)
r = __salt__['boto_s3.get_object_metadata'](
name,
extra_args=metadata_extra_args,
region=region,
key=key,
keyid=keyid,
profile=profile,
)
if 'error' in r:
ret['result'] = False
ret['comment'] = 'Failed to check if S3 object exists: {0}.'.format(
r['error'],
)
return ret
if r['result']:
# Check if content and metadata match
# A hash of the content is injected into the metadata,
# so we can combine both checks into one
# Only check metadata keys specified by the user,
# ignore other fields that have been set
s3_metadata = dict(
(k, r['result'][k]) for k in STORED_EXTRA_ARGS
if k in desired_metadata and k in r['result']
)
if s3_metadata == desired_metadata:
ret['result'] = True
ret['comment'] = 'S3 object {0} is present.'.format(name)
return ret
action = 'update'
else:
s3_metadata = None
action = 'create'
def _yaml_safe_dump(attrs):
'''Safely dump YAML using a readable flow style'''
dumper_name = 'IndentedSafeOrderedDumper'
dumper = __utils__['yamldumper.get_dumper'](dumper_name)
return yaml.dump(
attrs,
default_flow_style=False,
Dumper=dumper,
)
changes_diff = ''.join(difflib.unified_diff(
_yaml_safe_dump(s3_metadata).splitlines(True),
_yaml_safe_dump(desired_metadata).splitlines(True),
))
if __opts__['test']:
ret['result'] = None
ret['comment'] = 'S3 object {0} set to be {1}d.'.format(name, action)
ret['comment'] += '\nChanges:\n{0}'.format(changes_diff)
ret['pchanges'] = {'diff': changes_diff}
return ret
r = __salt__['boto_s3.upload_file'](
source,
name,
extra_args=combined_extra_args,
region=region,
key=key,
keyid=keyid,
profile=profile,
)
if 'error' in r:
ret['result'] = False
ret['comment'] = 'Failed to {0} S3 object: {1}.'.format(
action,
r['error'],
)
return ret
ret['result'] = True
ret['comment'] = 'S3 object {0} {1}d.'.format(name, action)
ret['comment'] += '\nChanges:\n{0}'.format(changes_diff)
ret['changes'] = {'diff': changes_diff}
return ret

View File

@ -369,7 +369,7 @@ def upgraded(name,
if version:
# If installed version and new version are the same
if salt.utils.compare_versions(
if salt.utils.versions.compare(
ver1=installed_version,
oper="==",
ver2=version):
@ -383,7 +383,7 @@ def upgraded(name,
''.format(name, installed_version)
else:
# If installed version is older than new version
if salt.utils.compare_versions(
if salt.utils.versions.compare(
ver1=installed_version, oper="<", ver2=version):
ret['pchanges'] = {
name: 'Version {0} will be upgraded to Version {1} '
@ -429,6 +429,6 @@ def upgraded(name,
# Get list of installed packages after 'chocolatey.install'
post_install = __salt__['chocolatey.list'](local_only=True)
ret['changes'] = salt.utils.compare_dicts(pre_install, post_install)
ret['changes'] = salt.utils.data.compare_dicts(pre_install, post_install)
return ret

View File

@ -2253,6 +2253,7 @@ def run(name,
force=force,
**kwargs)
except Exception as exc:
log.exception('Encountered error running container')
ret['result'] = False
ret['comment'] = 'Encountered error running container: {0}'.format(exc)
else:

View File

@ -73,6 +73,8 @@ def present(name,
sls=None,
base='opensuse/python',
saltenv='base',
pillarenv=None,
pillar=None,
**kwargs):
'''
.. versionchanged:: Oxygen
@ -178,13 +180,34 @@ def present(name,
Base image with which to start :py:func:`docker.sls_build
<salt.modules.dockermod.sls_build>`
.. versionadded: 2017.7.0
.. versionadded:: 2017.7.0
saltenv
Environment from which to pull SLS files for :py:func:`docker.sls_build
<salt.modules.dockermod.sls_build>`
Specify the environment from which to retrieve the SLS indicated by the
`mods` parameter.
.. versionadded: 2017.7.0
.. versionadded:: 2017.7.0
.. versionchanged:: Oxygen
Now uses the effective saltenv if not explicitly passed. In earlier
versions, ``base`` was assumed as a default.
pillarenv
Specify a Pillar environment to be used when applying states. This
can also be set in the minion config file using the
:conf_minion:`pillarenv` option. When neither the
:conf_minion:`pillarenv` minion config option nor this CLI argument is
used, all Pillar environments will be merged together.
.. versionadded:: Oxygen
pillar
Custom Pillar values, passed as a dictionary of key-value pairs
.. note::
Values passed this way will override Pillar values set via
``pillar_roots`` or an external Pillar source.
.. versionadded:: Oxygen
'''
ret = {'name': name,
'changes': {},
@ -192,7 +215,7 @@ def present(name,
'comment': ''}
if not isinstance(name, six.string_types):
name = six.string_types(name)
name = six.text_type(name)
# At most one of the args that result in an image being built can be used
num_build_args = len([x for x in (build, load, sls) if x is not None])
@ -209,7 +232,7 @@ def present(name,
)
return ret
if not isinstance(tag, six.string_types):
tag = six.string_types(tag)
tag = six.text_type(tag)
full_image = ':'.join((name, tag))
else:
if tag:
@ -258,7 +281,8 @@ def present(name,
try:
# map values passed from the state to the build args
build_args['path'] = build
build_args['image'] = full_image
build_args['repository'] = name
build_args['tag'] = tag
build_args['dockerfile'] = dockerfile
image_update = __salt__['docker.build'](**build_args)
except Exception as exc:
@ -272,12 +296,15 @@ def present(name,
ret['changes'] = image_update
elif sls:
_locals = locals()
sls_build_kwargs = {k: _locals[k] for k in ('saltenv', 'pillarenv', 'pillar')
if _locals[k] is not None}
try:
image_update = __salt__['docker.sls_build'](repository=name,
tag=tag,
base=base,
mods=sls,
saltenv=saltenv)
**sls_build_kwargs)
except Exception as exc:
ret['comment'] = (
'Encountered error using SLS {0} for building {1}: {2}'
@ -325,7 +352,7 @@ def present(name,
try:
__salt__['docker.inspect_image'](full_image)
error = False
except CommandExecutionError:
except CommandExecutionError as exc:
msg = exc.__str__()
if '404' not in msg:
error = 'Failed to inspect image \'{0}\' after it was {1}: {2}'.format(

View File

@ -7,9 +7,11 @@ Utility functions for state functions
# Import Python Libs
from __future__ import absolute_import
import copy
# Import Salt libs
from salt.ext import six
from salt.exceptions import CommandExecutionError
import salt.state
_empty = object()
@ -215,3 +217,33 @@ def merge_subreturn(original_return, sub_return, subkey=None):
original_return['pchanges'][subkey] = sub_return['pchanges']
return original_return
def get_sls_opts(opts, **kwargs):
'''
Return a copy of the opts for use, optionally load a local config on top
'''
opts = copy.deepcopy(opts)
if 'localconfig' in kwargs:
return salt.config.minion_config(kwargs['localconfig'], defaults=opts)
if 'saltenv' in kwargs:
saltenv = kwargs['saltenv']
if saltenv is not None:
if not isinstance(saltenv, six.string_types):
saltenv = six.text_type(saltenv)
if opts['lock_saltenv'] and saltenv != opts['saltenv']:
raise CommandExecutionError(
'lock_saltenv is enabled, saltenv cannot be changed'
)
opts['saltenv'] = kwargs['saltenv']
if 'pillarenv' in kwargs or opts.get('pillarenv_from_saltenv', False):
pillarenv = kwargs.get('pillarenv') or kwargs.get('saltenv')
if pillarenv is not None and not isinstance(pillarenv, six.string_types):
opts['pillarenv'] = str(pillarenv)
else:
opts['pillarenv'] = pillarenv
return opts

View File

@ -36,7 +36,7 @@ class GCETest(ShellCase):
provider = 'gce'
providers = self.run_cloud('--list-providers')
# Create the cloud instance name to be used throughout the tests
self.INSTANCE_NAME = generate_random_name('CLOUD-TEST-')
self.INSTANCE_NAME = generate_random_name('cloud-test-')
if profile_str not in providers:
self.skipTest(

View File

@ -29,7 +29,7 @@ setup_test_virtualenv:
carbon-weird-setup:
pip.installed:
- name: carbon
- name: 'carbon < 1.1'
- no_deps: True
- bin_env: {{ virtualenv_test }}
- onchanges:

View File

@ -161,19 +161,19 @@ class PipStateTest(ModuleCase, SaltReturnAssertsMixin):
# some of the state return parts
for key in six.iterkeys(ret):
self.assertTrue(ret[key]['result'])
if ret[key]['name'] != 'carbon':
if ret[key]['name'] != 'carbon < 1.1':
continue
self.assertEqual(
ret[key]['comment'],
'There was no error installing package \'carbon\' '
'There was no error installing package \'carbon < 1.1\' '
'although it does not show when calling \'pip.freeze\'.'
)
break
else:
raise Exception('Expected state did not run')
finally:
if os.path.isdir('/opt/graphite'):
shutil.rmtree('/opt/graphite')
if os.path.isdir(ographite):
shutil.rmtree(ographite)
def test_issue_2028_pip_installed_state(self):
ret = self.run_function('state.sls', mods='issue-2028-pip-installed')

View File

@ -16,6 +16,17 @@ if os.name == 'nt':
TESTS_DIR = TESTS_DIR.replace('\\', '\\\\')
CODE_DIR = os.path.dirname(TESTS_DIR)
if sys.version_info.major == 3:
# Clean up any Python 2 byte-compiled files, as they will cause problems
# when Python 3 tries to import modules.
for root, _, files in os.walk(os.path.dirname(TESTS_DIR)):
if os.path.basename(root) == '__pycache__':
# Ignore byte-compiled files in Python 3 __pycache__ dirs
continue
for filename in files:
if filename.endswith('.pyc'):
os.remove(os.path.join(root, filename))
# Let's inject CODE_DIR so salt is importable if not there already
if '' in sys.path:
sys.path.remove('')

View File

@ -824,6 +824,8 @@ class SSHCase(ShellCase):
'''
ret = self.run_ssh(self._arg_str(function, arg), timeout=timeout,
wipe=wipe, raw=raw)
log.debug('SSHCase run_function executed %s with arg %s', function, arg)
log.debug('SSHCase JSON return: %s', ret)
try:
return json.loads(ret)['localhost']
except Exception:

View File

@ -38,9 +38,11 @@ from tests.support.case import TestCase
# pylint: disable=import-error
import cherrypy # pylint: disable=3rd-party-module-not-gated
from salt.ext import six
from salt.ext.six.moves import StringIO
from salt.ext.six import BytesIO
# pylint: enable=import-error
import salt.utils.stringutils
# Not strictly speaking mandatory but just makes sense
cherrypy.config.update({'environment': "test_suite"})
@ -92,7 +94,7 @@ class BaseCherryPyTestCase(TestCase):
fd = None
if body is not None:
h['content-length'] = '{0}'.format(len(body))
fd = StringIO(body)
fd = BytesIO(salt.utils.stringutils.to_bytes(body))
if headers is not None:
h.update(headers)

View File

@ -437,7 +437,12 @@ class ForceImportErrorOn(object):
def restore_import_funtion(self):
self.patcher.stop()
def __fake_import__(self, name, globals_, locals_, fromlist, level=-1):
def __fake_import__(self,
name,
globals_={} if six.PY2 else None,
locals_={} if six.PY2 else None,
fromlist=[] if six.PY2 else (),
level=-1 if six.PY2 else 0):
if name in self.__module_names:
importerror_fromlist = self.__module_names.get(name)
if importerror_fromlist is None:
@ -453,7 +458,6 @@ class ForceImportErrorOn(object):
)
)
)
return self.__original_import(name, globals_, locals_, fromlist, level)
def __enter__(self):

View File

@ -24,10 +24,14 @@ import salt.grains.core as core
# Import 3rd-party libs
from salt.ext import six
if six.PY3:
import ipaddress
else:
import salt.ext.ipaddress as ipaddress
# Globals
IPv4Address = salt.ext.ipaddress.IPv4Address
IPv6Address = salt.ext.ipaddress.IPv6Address
IPv4Address = ipaddress.IPv4Address
IPv6Address = ipaddress.IPv6Address
IP4_LOCAL = '127.0.0.1'
IP4_ADD1 = '10.0.0.1'
IP4_ADD2 = '10.0.0.2'

View File

@ -49,7 +49,7 @@ class DockerTestCase(TestCase, LoaderModuleMockMixin):
)
# Force the LazyDict to populate its references. Otherwise the lookup
# will fail inside the unit tests.
utils.keys()
list(utils)
return {docker_mod: {'__context__': {'docker.docker_version': ''},
'__utils__': utils}}
@ -565,7 +565,7 @@ class DockerTestCase(TestCase, LoaderModuleMockMixin):
cmd='sleep infinity',
image='opensuse/python', interactive=True, tty=True)
docker_start_mock.assert_called_once_with('ID')
docker_sls_mock.assert_called_once_with('ID', 'foo', 'base')
docker_sls_mock.assert_called_once_with('ID', 'foo')
docker_stop_mock.assert_called_once_with('ID')
docker_rm_mock.assert_called_once_with('ID')
docker_commit_mock.assert_called_once_with('ID', 'foo', tag='latest')
@ -619,7 +619,7 @@ class DockerTestCase(TestCase, LoaderModuleMockMixin):
cmd='sleep infinity',
image='opensuse/python', interactive=True, tty=True)
docker_start_mock.assert_called_once_with('ID')
docker_sls_mock.assert_called_once_with('ID', 'foo', 'base')
docker_sls_mock.assert_called_once_with('ID', 'foo')
docker_stop_mock.assert_called_once_with('ID')
docker_rm_mock.assert_called_once_with('ID')
self.assertEqual(

View File

@ -567,8 +567,8 @@ class FileModuleTestCase(TestCase, LoaderModuleMockMixin):
newlines at end of file.
'''
# File ending with a newline
with tempfile.NamedTemporaryFile(mode='w+b', delete=False) as tfile:
tfile.write(salt.utils.stringutils.to_bytes('foo' + os.linesep))
with tempfile.NamedTemporaryFile(mode='w', delete=False) as tfile:
tfile.write(salt.utils.stringutils.to_str('foo' + os.linesep))
tfile.flush()
filemod.append(tfile.name, 'bar')
expected = os.linesep.join(['foo', 'bar']) + os.linesep
@ -577,8 +577,8 @@ class FileModuleTestCase(TestCase, LoaderModuleMockMixin):
salt.utils.stringutils.to_unicode(tfile2.read()), expected)
# File not ending with a newline
with tempfile.NamedTemporaryFile(mode='w+b', delete=False) as tfile:
tfile.write(salt.utils.stringutils.to_bytes('foo'))
with tempfile.NamedTemporaryFile(mode='w', delete=False) as tfile:
tfile.write(salt.utils.stringutils.to_str('foo'))
tfile.flush()
filemod.append(tfile.name, 'bar')
with salt.utils.files.fopen(tfile.name) as tfile2:
@ -586,8 +586,8 @@ class FileModuleTestCase(TestCase, LoaderModuleMockMixin):
salt.utils.stringutils.to_unicode(tfile2.read()), expected)
# A newline should be added in empty files
with tempfile.NamedTemporaryFile(mode='w+b', delete=False) as tfile:
filemod.append(tfile.name, salt.utils.stringutils.to_bytes('bar'))
with tempfile.NamedTemporaryFile(mode='w', delete=False) as tfile:
filemod.append(tfile.name, salt.utils.stringutils.to_str('bar'))
with salt.utils.files.fopen(tfile.name) as tfile2:
self.assertEqual(
salt.utils.stringutils.to_unicode(tfile2.read()),

View File

@ -24,6 +24,7 @@ import salt.loader
import salt.utils.hashutils
import salt.utils.odict
import salt.utils.platform
import salt.utils.state
import salt.modules.state as state
from salt.exceptions import CommandExecutionError, SaltInvocationError
import salt.modules.config as config
@ -423,7 +424,7 @@ class StateTestCase(TestCase, LoaderModuleMockMixin):
self.assertFalse(state.high({"vim": {"pkg": ["installed"]}}))
mock = MagicMock(return_value={"test": True})
with patch.object(state, '_get_opts', mock):
with patch.object(salt.utils.state, 'get_sls_opts', mock):
self.assertTrue(state.high({"vim": {"pkg": ["installed"]}}))
def test_template(self):
@ -539,7 +540,7 @@ class StateTestCase(TestCase, LoaderModuleMockMixin):
with patch.dict(state.__opts__, {"test": "install"}):
mock = MagicMock(return_value={"test": ""})
with patch.object(state, '_get_opts', mock):
with patch.object(salt.utils.state, 'get_sls_opts', mock):
mock = MagicMock(return_value=True)
with patch.object(salt.utils, 'test_mode', mock):
self.assertRaises(SaltInvocationError,
@ -643,7 +644,7 @@ class StateTestCase(TestCase, LoaderModuleMockMixin):
return_value={'test': True,
'saltenv': None}
)
with patch.object(state, '_get_opts', mock):
with patch.object(salt.utils.state, 'get_sls_opts', mock):
mock = MagicMock(return_value=True)
with patch.object(salt.utils, 'test_mode', mock):
MockState.State.flag = True
@ -670,7 +671,7 @@ class StateTestCase(TestCase, LoaderModuleMockMixin):
return_value={'test': True,
'saltenv': None}
)
with patch.object(state, '_get_opts', mock):
with patch.object(salt.utils.state, 'get_sls_opts', mock):
MockState.State.flag = True
MockState.HighState.flag = True
self.assertEqual(state.show_low_sls("foo"), 2)
@ -692,7 +693,7 @@ class StateTestCase(TestCase, LoaderModuleMockMixin):
return_value={'test': True,
'saltenv': None}
)
with patch.object(state, '_get_opts', mock):
with patch.object(salt.utils.state, 'get_sls_opts', mock):
mock = MagicMock(return_value=True)
with patch.object(salt.utils, 'test_mode', mock):
self.assertRaises(SaltInvocationError,
@ -723,7 +724,7 @@ class StateTestCase(TestCase, LoaderModuleMockMixin):
with patch.dict(state.__opts__, {"test": "A"}):
mock = MagicMock(return_value={'test': True})
with patch.object(state, '_get_opts', mock):
with patch.object(salt.utils.state, 'get_sls_opts', mock):
mock = MagicMock(return_value=True)
with patch.object(salt.utils, 'test_mode', mock):
self.assertRaises(SaltInvocationError,
@ -764,7 +765,7 @@ class StateTestCase(TestCase, LoaderModuleMockMixin):
with patch.dict(state.__opts__, {"test": "A"}):
mock = MagicMock(return_value={'test': True})
with patch.object(state, '_get_opts', mock):
with patch.object(salt.utils.state, 'get_sls_opts', mock):
self.assertRaises(SaltInvocationError,
state.highstate,
"whitelist=sls1.sls",
@ -887,7 +888,7 @@ class StateTestCase(TestCase, LoaderModuleMockMixin):
with patch.dict(state.__opts__, {"test": None}):
mock = MagicMock(return_value={"test": "",
"saltenv": None})
with patch.object(state, '_get_opts', mock):
with patch.object(salt.utils.state, 'get_sls_opts', mock):
mock = MagicMock(return_value=True)
with patch.object(salt.utils,
'test_mode',

View File

@ -18,6 +18,7 @@ import salt.serializers.yaml as yaml
import salt.serializers.yamlex as yamlex
import salt.serializers.msgpack as msgpack
import salt.serializers.python as python
import salt.serializers.toml as toml
from salt.serializers.yaml import EncryptedString
from salt.serializers import SerializationError
from salt.utils.odict import OrderedDict
@ -349,3 +350,14 @@ class TestSerializers(TestCase):
deserialized = configparser.deserialize(serialized)
assert deserialized == data, deserialized
@skipIf(not toml.available, SKIP_MESSAGE % 'toml')
def test_serialize_toml(self):
data = {
"foo": "bar"
}
serialized = toml.serialize(data)
assert serialized == 'foo = "bar"\n', serialized
deserialized = toml.deserialize(serialized)
assert deserialized == data, deserialized

View File

@ -315,7 +315,7 @@ class TestGetTemplate(TestCase):
'file_roots': self.local_opts['file_roots'],
'pillar_roots': self.local_opts['pillar_roots']},
a='Hi', b='Sàlt', saltenv='test', salt=self.local_salt))
self.assertEqual(u'Assunção' + os.linesep, out)
self.assertEqual('Assunção' + os.linesep, out)
self.assertEqual(fc.requests[0]['path'], 'salt://macro')
@skipIf(HAS_TIMELIB is False, 'The `timelib` library is not installed.')
@ -424,7 +424,7 @@ class TestGetTemplate(TestCase):
@skipIf(NO_MOCK, NO_MOCK_REASON)
def test_render_with_unicode_syntax_error(self):
with patch.object(builtins, '__salt_system_encoding__', 'utf-8'):
template = u'hello\n\n{{ bad\n\nfoo\ud55c'
template = 'hello\n\n{{ bad\n\nfoo\ud55c'
expected = r'.*---\nhello\n\n{{ bad\n\nfoo\xed\x95\x9c <======================\n---'
self.assertRaisesRegex(
SaltRenderError,
@ -472,7 +472,7 @@ class TestGetTemplate(TestCase):
)
def test_render_with_undefined_variable_unicode(self):
template = u"hello\ud55c\n\n{{ foo }}\n\nfoo"
template = "hello\ud55c\n\n{{ foo }}\n\nfoo"
expected = r'Jinja variable \'foo\' is undefined'
self.assertRaisesRegex(
SaltRenderError,
@ -528,7 +528,7 @@ class TestJinjaDefaultOptions(TestCase):
"""
rendered = render_jinja_tmpl(template,
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'onetwothree')
self.assertEqual(rendered, 'onetwothree')
def test_statement_prefix(self):
@ -541,7 +541,7 @@ class TestJinjaDefaultOptions(TestCase):
"""
rendered = render_jinja_tmpl(template,
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'onetwothree')
self.assertEqual(rendered, 'onetwothree')
class TestCustomExtensions(TestCase):
@ -590,7 +590,7 @@ class TestCustomExtensions(TestCase):
self.assertEqual(sorted(rendered), sorted(list(unique)))
else:
rendered = env.from_string('{{ dataset|unique }}').render(dataset=dataset)
self.assertEqual(rendered, u"{0}".format(unique))
self.assertEqual(rendered, "{0}".format(unique))
def test_unique_tuple(self):
dataset = ('foo', 'foo', 'bar')
@ -602,7 +602,7 @@ class TestCustomExtensions(TestCase):
self.assertEqual(sorted(rendered), sorted(list(unique)))
else:
rendered = env.from_string('{{ dataset|unique }}').render(dataset=dataset)
self.assertEqual(rendered, u"{0}".format(unique))
self.assertEqual(rendered, "{0}".format(unique))
def test_unique_list(self):
dataset = ['foo', 'foo', 'bar']
@ -614,7 +614,7 @@ class TestCustomExtensions(TestCase):
self.assertEqual(rendered, unique)
else:
rendered = env.from_string('{{ dataset|unique }}').render(dataset=dataset)
self.assertEqual(rendered, u"{0}".format(unique))
self.assertEqual(rendered, "{0}".format(unique))
def test_serialize_json(self):
dataset = {
@ -652,7 +652,7 @@ class TestCustomExtensions(TestCase):
self.assertEqual(dataset, rendered)
def test_serialize_yaml_unicode(self):
dataset = u"str value"
dataset = "str value"
env = Environment(extensions=[SerializerExtension])
rendered = env.from_string('{{ dataset|yaml }}').render(dataset=dataset)
if six.PY3:
@ -681,11 +681,11 @@ class TestCustomExtensions(TestCase):
def test_load_yaml(self):
env = Environment(extensions=[SerializerExtension])
rendered = env.from_string('{% set document = "{foo: it works}"|load_yaml %}{{ document.foo }}').render()
self.assertEqual(rendered, u"it works")
self.assertEqual(rendered, "it works")
rendered = env.from_string('{% set document = document|load_yaml %}'
'{{ document.foo }}').render(document="{foo: it works}")
self.assertEqual(rendered, u"it works")
self.assertEqual(rendered, "it works")
with self.assertRaises(exceptions.TemplateRuntimeError):
env.from_string('{% set document = document|load_yaml %}'
@ -699,13 +699,13 @@ class TestCustomExtensions(TestCase):
'{{ docu.foo }}'
rendered = env.from_string(source).render(bar="barred")
self.assertEqual(rendered, u"barred, it works")
self.assertEqual(rendered, "barred, it works")
source = '{{ bar }}, {% load_json as docu %}{"foo": "it works", "{{ bar }}": "baz"}{% endload %}' + \
'{{ docu.foo }}'
rendered = env.from_string(source).render(bar="barred")
self.assertEqual(rendered, u"barred, it works")
self.assertEqual(rendered, "barred, it works")
with self.assertRaises(exceptions.TemplateSyntaxError):
env.from_string('{% load_yamle as document %}{foo, bar: it works}{% endload %}').render()
@ -717,11 +717,11 @@ class TestCustomExtensions(TestCase):
env = Environment(extensions=[SerializerExtension])
rendered = env.from_string('{% set document = \'{"foo": "it works"}\'|load_json %}'
'{{ document.foo }}').render()
self.assertEqual(rendered, u"it works")
self.assertEqual(rendered, "it works")
rendered = env.from_string('{% set document = document|load_json %}'
'{{ document.foo }}').render(document='{"foo": "it works"}')
self.assertEqual(rendered, u"it works")
self.assertEqual(rendered, "it works")
# bad quotes
with self.assertRaises(exceptions.TemplateRuntimeError):
@ -735,7 +735,7 @@ class TestCustomExtensions(TestCase):
loader = DictLoader({'foo': '{bar: "my god is blue", foo: [1, 2, 3]}'})
env = Environment(extensions=[SerializerExtension], loader=loader)
rendered = env.from_string('{% import_yaml "foo" as doc %}{{ doc.bar }}').render()
self.assertEqual(rendered, u"my god is blue")
self.assertEqual(rendered, "my god is blue")
with self.assertRaises(exceptions.TemplateNotFound):
env.from_string('{% import_yaml "does not exists" as doc %}').render()
@ -744,7 +744,7 @@ class TestCustomExtensions(TestCase):
loader = DictLoader({'foo': '{"bar": "my god is blue", "foo": [1, 2, 3]}'})
env = Environment(extensions=[SerializerExtension], loader=loader)
rendered = env.from_string('{% import_json "foo" as doc %}{{ doc.bar }}').render()
self.assertEqual(rendered, u"my god is blue")
self.assertEqual(rendered, "my god is blue")
with self.assertRaises(exceptions.TemplateNotFound):
env.from_string('{% import_json "does not exists" as doc %}').render()
@ -754,7 +754,7 @@ class TestCustomExtensions(TestCase):
env = Environment(extensions=[SerializerExtension], loader=loader)
rendered = env.from_string('{% import_text "foo" as doc %}{{ doc }}').render()
self.assertEqual(rendered, u"Foo!")
self.assertEqual(rendered, "Foo!")
with self.assertRaises(exceptions.TemplateNotFound):
env.from_string('{% import_text "does not exists" as doc %}').render()
@ -791,22 +791,22 @@ class TestCustomExtensions(TestCase):
env = Environment(extensions=[SerializerExtension], loader=loader)
rendered = env.get_template('main1').render()
self.assertEqual(rendered, u"my god is blue")
self.assertEqual(rendered, "my god is blue")
rendered = env.get_template('main2').render()
self.assertEqual(rendered, u"it works")
self.assertEqual(rendered, "it works")
rendered = env.get_template('main3').render().strip()
self.assertEqual(rendered, u"my god is blue")
self.assertEqual(rendered, "my god is blue")
rendered = env.get_template('main4').render().strip()
self.assertEqual(rendered, u"it works")
self.assertEqual(rendered, "it works")
rendered = env.get_template('main5').render().strip()
self.assertEqual(rendered, u"my god is blue")
self.assertEqual(rendered, "my god is blue")
rendered = env.get_template('main6').render().strip()
self.assertEqual(rendered, u"it works")
self.assertEqual(rendered, "it works")
def test_nested_structures(self):
env = Environment(extensions=[SerializerExtension])
@ -822,7 +822,11 @@ class TestCustomExtensions(TestCase):
])
rendered = env.from_string('{{ data }}').render(data=data)
self.assertEqual(rendered, u"{u'foo': {u'bar': u'baz', u'qux': 42}}")
self.assertEqual(
rendered,
"{u'foo': {u'bar': u'baz', u'qux': 42}}" if six.PY2
else "{'foo': {'bar': 'baz', 'qux': 42}}"
)
rendered = env.from_string('{{ data }}').render(data=[
OrderedDict(
@ -832,7 +836,11 @@ class TestCustomExtensions(TestCase):
baz=42,
)
])
self.assertEqual(rendered, u"[{'foo': u'bar'}, {'baz': 42}]")
self.assertEqual(
rendered,
"[{'foo': u'bar'}, {'baz': 42}]" if six.PY2
else "[{'foo': 'bar'}, {'baz': 42}]"
)
def test_sequence(self):
env = Environment()
@ -862,135 +870,135 @@ class TestCustomExtensions(TestCase):
'''Test the `is_ip` Jinja filter.'''
rendered = render_jinja_tmpl("{{ '192.168.0.1' | is_ip }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'True')
self.assertEqual(rendered, 'True')
rendered = render_jinja_tmpl("{{ 'FE80::' | is_ip }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'True')
self.assertEqual(rendered, 'True')
rendered = render_jinja_tmpl("{{ 'random' | is_ip }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'False')
self.assertEqual(rendered, 'False')
def test_is_ipv4(self):
'''Test the `is_ipv4` Jinja filter.'''
rendered = render_jinja_tmpl("{{ '192.168.0.1' | is_ipv4 }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'True')
self.assertEqual(rendered, 'True')
rendered = render_jinja_tmpl("{{ 'FE80::' | is_ipv4 }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'False')
self.assertEqual(rendered, 'False')
rendered = render_jinja_tmpl("{{ 'random' | is_ipv4 }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'False')
self.assertEqual(rendered, 'False')
def test_is_ipv6(self):
'''Test the `is_ipv6` Jinja filter.'''
rendered = render_jinja_tmpl("{{ '192.168.0.1' | is_ipv6 }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'False')
self.assertEqual(rendered, 'False')
rendered = render_jinja_tmpl("{{ 'FE80::' | is_ipv6 }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'True')
self.assertEqual(rendered, 'True')
rendered = render_jinja_tmpl("{{ 'random' | is_ipv6 }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'False')
self.assertEqual(rendered, 'False')
def test_ipaddr(self):
'''Test the `ipaddr` Jinja filter.'''
rendered = render_jinja_tmpl("{{ '192.168.0.1' | ipaddr }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'192.168.0.1')
self.assertEqual(rendered, '192.168.0.1')
# provides a list with valid IP addresses only
rendered = render_jinja_tmpl("{{ ['192.168.0.1', '172.17.17.1', 'foo', 'bar', '::'] | ipaddr | join(', ') }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'192.168.0.1, 172.17.17.1, ::')
self.assertEqual(rendered, '192.168.0.1, 172.17.17.1, ::')
# return only multicast addresses
rendered = render_jinja_tmpl("{{ ['224.0.0.1', 'FF01::1', '::'] | ipaddr(options='multicast') | join(', ') }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'224.0.0.1, ff01::1')
self.assertEqual(rendered, '224.0.0.1, ff01::1')
def test_ipv4(self):
'''Test the `ipv4` Jinja filter.'''
rendered = render_jinja_tmpl("{{ '192.168.0.1' | ipv4 }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'192.168.0.1')
self.assertEqual(rendered, '192.168.0.1')
rendered = render_jinja_tmpl("{{ ['192.168.0.1', '172.17.17.1'] | ipv4 | join(', ')}}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'192.168.0.1, 172.17.17.1')
self.assertEqual(rendered, '192.168.0.1, 172.17.17.1')
rendered = render_jinja_tmpl("{{ 'fe80::' | ipv4 }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'None')
self.assertEqual(rendered, 'None')
rendered = render_jinja_tmpl("{{ 'random' | ipv4 }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'None')
self.assertEqual(rendered, 'None')
rendered = render_jinja_tmpl("{{ '192.168.0.1' | ipv4(options='lo') }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'None')
self.assertEqual(rendered, 'None')
rendered = render_jinja_tmpl("{{ '127.0.0.1' | ipv4(options='lo') }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'127.0.0.1')
self.assertEqual(rendered, '127.0.0.1')
def test_ipv6(self):
'''Test the `ipv6` Jinja filter.'''
rendered = render_jinja_tmpl("{{ '192.168.0.1' | ipv6 }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'None')
self.assertEqual(rendered, 'None')
rendered = render_jinja_tmpl("{{ 'random' | ipv6 }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'None')
self.assertEqual(rendered, 'None')
# returns the standard format value
rendered = render_jinja_tmpl("{{ 'FE80:0:0::0' | ipv6 }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'fe80::')
self.assertEqual(rendered, 'fe80::')
# fe80:: is link local therefore will be returned
rendered = render_jinja_tmpl("{{ 'fe80::' | ipv6(options='ll') }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'fe80::')
self.assertEqual(rendered, 'fe80::')
# fe80:: is not loopback
rendered = render_jinja_tmpl("{{ 'fe80::' | ipv6(options='lo') }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'None')
self.assertEqual(rendered, 'None')
# returns only IPv6 addresses in the list
rendered = render_jinja_tmpl("{{ ['fe80::', '192.168.0.1'] | ipv6 | join(', ') }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'fe80::')
self.assertEqual(rendered, 'fe80::')
rendered = render_jinja_tmpl("{{ ['fe80::', '::'] | ipv6 | join(', ') }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'fe80::, ::')
self.assertEqual(rendered, 'fe80::, ::')
def test_network_hosts(self):
'''Test the `network_hosts` Jinja filter.'''
rendered = render_jinja_tmpl("{{ '192.168.0.1/30' | network_hosts | join(', ') }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'192.168.0.1, 192.168.0.2')
self.assertEqual(rendered, '192.168.0.1, 192.168.0.2')
def test_network_size(self):
'''Test the `network_size` Jinja filter.'''
rendered = render_jinja_tmpl("{{ '192.168.0.1' | network_size }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'1')
self.assertEqual(rendered, '1')
rendered = render_jinja_tmpl("{{ '192.168.0.1/8' | network_size }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'16777216')
self.assertEqual(rendered, '16777216')
def test_http_query(self):
'''Test the `http_query` Jinja filter.'''
@ -1006,103 +1014,103 @@ class TestCustomExtensions(TestCase):
'''Test the `to_bool` Jinja filter.'''
rendered = render_jinja_tmpl("{{ 1 | to_bool }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'True')
self.assertEqual(rendered, 'True')
rendered = render_jinja_tmpl("{{ 'True' | to_bool }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'True')
self.assertEqual(rendered, 'True')
rendered = render_jinja_tmpl("{{ 0 | to_bool }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'False')
self.assertEqual(rendered, 'False')
rendered = render_jinja_tmpl("{{ 'Yes' | to_bool }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'True')
self.assertEqual(rendered, 'True')
def test_quote(self):
'''Test the `quote` Jinja filter.'''
rendered = render_jinja_tmpl("{{ 'random' | quote }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'random')
self.assertEqual(rendered, 'random')
def test_regex_search(self):
'''Test the `regex_search` Jinja filter.'''
rendered = render_jinja_tmpl("{{ 'abcdefabcdef' | regex_search('BC(.*)', ignorecase=True) }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u"('defabcdef',)") # because search looks only at the beginning
self.assertEqual(rendered, "('defabcdef',)") # because search looks only at the beginning
def test_regex_match(self):
'''Test the `regex_match` Jinja filter.'''
rendered = render_jinja_tmpl("{{ 'abcdefabcdef' | regex_match('BC(.*)', ignorecase=True)}}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u"None")
self.assertEqual(rendered, "None")
def test_regex_replace(self):
'''Test the `regex_replace` Jinja filter.'''
rendered = render_jinja_tmpl(r"{{ 'lets replace spaces' | regex_replace('\s+', '__') }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'lets__replace__spaces')
self.assertEqual(rendered, 'lets__replace__spaces')
def test_uuid(self):
'''Test the `uuid` Jinja filter.'''
rendered = render_jinja_tmpl("{{ 'random' | uuid }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'3652b285-26ad-588e-a5dc-c2ee65edc804')
self.assertEqual(rendered, '3652b285-26ad-588e-a5dc-c2ee65edc804')
def test_min(self):
'''Test the `min` Jinja filter.'''
rendered = render_jinja_tmpl("{{ [1, 2, 3] | min }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'1')
self.assertEqual(rendered, '1')
def test_max(self):
'''Test the `max` Jinja filter.'''
rendered = render_jinja_tmpl("{{ [1, 2, 3] | max }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'3')
self.assertEqual(rendered, '3')
def test_avg(self):
'''Test the `avg` Jinja filter.'''
rendered = render_jinja_tmpl("{{ [1, 2, 3] | avg }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'2.0')
self.assertEqual(rendered, '2.0')
def test_union(self):
'''Test the `union` Jinja filter.'''
rendered = render_jinja_tmpl("{{ [1, 2, 3] | union([2, 3, 4]) | join(', ') }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'1, 2, 3, 4')
self.assertEqual(rendered, '1, 2, 3, 4')
def test_intersect(self):
'''Test the `intersect` Jinja filter.'''
rendered = render_jinja_tmpl("{{ [1, 2, 3] | intersect([2, 3, 4]) | join(', ') }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'2, 3')
self.assertEqual(rendered, '2, 3')
def test_difference(self):
'''Test the `difference` Jinja filter.'''
rendered = render_jinja_tmpl("{{ [1, 2, 3] | difference([2, 3, 4]) | join(', ') }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'1')
self.assertEqual(rendered, '1')
def test_symmetric_difference(self):
'''Test the `symmetric_difference` Jinja filter.'''
rendered = render_jinja_tmpl("{{ [1, 2, 3] | symmetric_difference([2, 3, 4]) | join(', ') }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'1, 4')
self.assertEqual(rendered, '1, 4')
def test_md5(self):
'''Test the `md5` Jinja filter.'''
rendered = render_jinja_tmpl("{{ 'random' | md5 }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'7ddf32e17a6ac5ce04a8ecbf782ca509')
self.assertEqual(rendered, '7ddf32e17a6ac5ce04a8ecbf782ca509')
def test_sha256(self):
'''Test the `sha256` Jinja filter.'''
rendered = render_jinja_tmpl("{{ 'random' | sha256 }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'a441b15fe9a3cf56661190a0b93b9dec7d04127288cc87250967cf3b52894d11')
self.assertEqual(rendered, 'a441b15fe9a3cf56661190a0b93b9dec7d04127288cc87250967cf3b52894d11')
def test_sha512(self):
'''Test the `sha512` Jinja filter.'''
@ -1115,24 +1123,24 @@ class TestCustomExtensions(TestCase):
'''Test the `hmac` Jinja filter.'''
rendered = render_jinja_tmpl("{{ 'random' | hmac('secret', 'blah') }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'False')
self.assertEqual(rendered, 'False')
rendered = render_jinja_tmpl(("{{ 'get salted' | "
"hmac('shared secret', 'eBWf9bstXg+NiP5AOwppB5HMvZiYMPzEM9W5YMm/AmQ=') }}"),
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'True')
self.assertEqual(rendered, 'True')
def test_base64_encode(self):
'''Test the `base64_encode` Jinja filter.'''
rendered = render_jinja_tmpl("{{ 'random' | base64_encode }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'cmFuZG9t')
self.assertEqual(rendered, 'cmFuZG9t')
def test_base64_decode(self):
'''Test the `base64_decode` Jinja filter.'''
rendered = render_jinja_tmpl("{{ 'cmFuZG9t' | base64_decode }}",
dict(opts=self.local_opts, saltenv='test', salt=self.local_salt))
self.assertEqual(rendered, u'random')
self.assertEqual(rendered, 'random')
# def test_print(self):
# env = Environment(extensions=[SerializerExtension])

View File

@ -489,7 +489,7 @@ class LogSettingsParserTests(TestCase):
# Check log file logger
self.assertEqual(self.log_setup.log_level_logfile, log_level_logfile)
@skipIf(salt.utils.is_windows(), 'Windows uses a logging listener')
@skipIf(salt.utils.platform.is_windows(), 'Windows uses a logging listener')
def test_log_created(self):
'''
Tests that log file is created

View File

@ -68,13 +68,13 @@ class CreateVirtualMachineTestCase(TestCase):
def test_create_vm_pool_task_call(self):
vmware.create_vm(self.vm_name, self.mock_config_spec,
self.mock_folder_object, self.mock_resourcepool_object)
self.mock_vm_create_task.assert_called_once()
self.assert_called_once(self.mock_vm_create_task)
def test_create_vm_host_task_call(self):
vmware.create_vm(self.vm_name, self.mock_config_spec,
self.mock_folder_object, self.mock_resourcepool_object,
host_object=self.mock_host_object)
self.mock_vm_create_task.assert_called_once()
self.assert_called_once(self.mock_vm_create_task)
def test_create_vm_raise_no_permission(self):
exception = vim.fault.NoPermission()
@ -133,13 +133,13 @@ class RegisterVirtualMachineTestCase(TestCase):
def test_register_vm_pool_task_call(self):
vmware.register_vm(self.datacenter, self.vm_name, self.mock_vmx_path,
self.mock_resourcepool_object)
self.mock_vm_register_task.assert_called_once()
self.assert_called_once(self.mock_vm_register_task)
def test_register_vm_host_task_call(self):
vmware.register_vm(self.datacenter, self.vm_name, self.mock_vmx_path,
self.mock_resourcepool_object,
host_object=self.mock_host_object)
self.mock_vm_register_task.assert_called_once()
self.assert_called_once(self.mock_vm_register_task)
def test_register_vm_raise_no_permission(self):
exception = vim.fault.NoPermission()
@ -192,7 +192,7 @@ class UpdateVirtualMachineTestCase(TestCase):
def test_update_vm_task_call(self):
vmware.update_vm(self.mock_vm_ref, self.mock_config_spec)
self.mock_vm_update_task.assert_called_once()
self.assert_called_once(self.mock_vm_update_task)
def test_update_vm_raise_vim_fault(self):
exception = vim.fault.VimFault()
@ -234,7 +234,7 @@ class DeleteVirtualMachineTestCase(TestCase):
def test_destroy_vm_task_call(self):
vmware.delete_vm(self.mock_vm_ref)
self.mock_vm_destroy_task.assert_called_once()
self.assert_called_once(self.mock_vm_destroy_task)
def test_destroy_vm_raise_vim_fault(self):
exception = vim.fault.VimFault()
@ -274,7 +274,7 @@ class UnregisterVirtualMachineTestCase(TestCase):
def test_unregister_vm_task_call(self):
vmware.unregister_vm(self.mock_vm_ref)
self.mock_vm_unregister.assert_called_once()
self.assert_called_once(self.mock_vm_unregister)
def test_unregister_vm_raise_vim_fault(self):
exception = vim.fault.VimFault()