Merge branch 'oxygen' into scheduler_fixes_loop_interval

This commit is contained in:
Gareth J. Greenaway 2017-12-18 08:10:39 -08:00 committed by GitHub
commit 047e76cb31
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
199 changed files with 1213 additions and 1203 deletions

View File

@ -1646,13 +1646,13 @@ Example:
.. code-block:: jinja
regex_escape = {{ 'https://example.com?foo=bar%20baz' | regex_escape }}
regex_escape = {{ 'https://example.com?foo=bar%20baz' | regex_escape }}
will be rendered as:
.. code-block:: text
regex_escape = https\:\/\/example\.com\?foo\=bar\%20baz
regex_escape = https\:\/\/example\.com\?foo\=bar\%20baz
Set Theory Filters
------------------
@ -1670,13 +1670,13 @@ Example:
.. code-block:: jinja
unique = {{ ['foo', 'foo', 'bar'] | unique }}
unique = {{ ['foo', 'foo', 'bar'] | unique }}
will be rendered as:
.. code-block:: text
unique = ['foo', 'bar']
unique = ['foo', 'bar']
Jinja in Files
==============

View File

@ -211,6 +211,28 @@ localtime.
This will schedule the command: ``state.sls httpd test=True`` at 5:00 PM on
Monday, Wednesday and Friday, and 3:00 PM on Tuesday and Thursday.
.. code-block:: yaml
schedule:
job1:
function: state.sls
args:
- httpd
kwargs:
test: True
when:
- 'tea time'
.. code-block:: yaml
whens:
tea time: 1:40pm
deployment time: Friday 5:00pm
The Salt scheduler also allows custom phrases to be used for the `when`
parameter. These `whens` can be stored as either pillar values or
grain values.
.. code-block:: yaml
schedule:

View File

@ -145,8 +145,10 @@ class BaseCaller(object):
print_ret = ret.get('return', {})
salt.output.display_output(
{'local': print_ret},
out,
self.opts)
out=out,
opts=self.opts,
_retcode=ret.get('retcode', 0))
# _retcode will be available in the kwargs of the outputter function
if self.opts.get('retcode_passthrough', False):
sys.exit(ret['retcode'])
except SaltInvocationError as err:
@ -372,8 +374,10 @@ class RAETCaller(BaseCaller):
self.process.terminate()
salt.output.display_output(
{'local': print_ret},
ret.get('out', 'nested'),
self.opts)
out=ret.get('out', 'nested'),
opts=self.opts,
_retcode=ret.get('retcode', 0))
# _retcode will be available in the kwargs of the outputter function
if self.opts.get('retcode_passthrough', False):
sys.exit(ret['retcode'])

View File

@ -181,7 +181,7 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
for full_ret in self.local_client.cmd_cli(**kwargs):
ret_, out, retcode = self._format_ret(full_ret)
ret.update(ret_)
self._output_ret(ret, out)
self._output_ret(ret, out, retcode=retcode)
else:
if self.options.verbose:
kwargs['verbose'] = True
@ -190,7 +190,7 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
try:
ret_, out, retcode = self._format_ret(full_ret)
retcodes.append(retcode)
self._output_ret(ret_, out)
self._output_ret(ret_, out, retcode=retcode)
ret.update(full_ret)
except KeyError:
errors.append(full_ret)
@ -212,7 +212,7 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
except (SaltInvocationError, EauthAuthenticationError, SaltClientError) as exc:
ret = str(exc)
self._output_ret(ret, '')
self._output_ret(ret, '', retcode=1)
def _preview_target(self):
'''
@ -352,7 +352,7 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
'Requested job was still run but output cannot be displayed.\n')
salt.output.update_progress(self.config, progress, self.progress_bar, out)
def _output_ret(self, ret, out):
def _output_ret(self, ret, out, retcode=0):
'''
Print the output from a single return to the terminal
'''
@ -362,7 +362,10 @@ class SaltCMD(salt.utils.parsers.SaltCMDOptionParser):
self._print_docs(ret)
else:
# Determine the proper output method and run it
salt.output.display_output(ret, out, self.config)
salt.output.display_output(ret,
out=out,
opts=self.config,
_retcode=retcode)
if not ret:
sys.stderr.write('ERROR: No return received\n')
sys.exit(2)

View File

@ -36,6 +36,7 @@ import salt.utils.crypt
import salt.utils.data
import salt.utils.dictupdate
import salt.utils.files
import salt.utils.verify
import salt.syspaths
from salt.template import compile_template
@ -185,6 +186,10 @@ class CloudClient(object):
else:
self.opts = salt.config.cloud_config(path)
# Check the cache-dir exists. If not, create it.
v_dirs = [self.opts['cachedir']]
salt.utils.verify.verify_env(v_dirs, salt.utils.get_user())
if pillars:
for name, provider in six.iteritems(pillars.pop('providers', {})):
driver = provider['driver']

View File

@ -49,6 +49,7 @@ Example Provider Configuration
# Import python libs
from __future__ import absolute_import
import os
import sys
import re
import pprint
import logging
@ -58,6 +59,7 @@ from salt.utils.versions import LooseVersion as _LooseVersion
# Import 3rd-party libs
# pylint: disable=import-error
LIBCLOUD_IMPORT_ERROR = None
try:
import libcloud
from libcloud.compute.types import Provider
@ -78,6 +80,7 @@ try:
libcloud.security.CA_CERTS_PATH.append('/etc/ssl/certs/YaST-CA.pem')
HAS_LIBCLOUD = True
except ImportError:
LIBCLOUD_IMPORT_ERROR = sys.exc_info()
HAS_LIBCLOUD = False
# pylint: enable=import-error
@ -155,6 +158,9 @@ def get_dependencies():
'''
Warn if dependencies aren't met.
'''
if LIBCLOUD_IMPORT_ERROR:
log.error("Failure when importing LibCloud: ", exc_info=LIBCLOUD_IMPORT_ERROR)
log.error("Note: The libcloud dependency is called 'apache-libcloud' on PyPi/pip.")
return config.check_driver_dependencies(
__virtualname__,
{'libcloud': HAS_LIBCLOUD}

View File

@ -10,6 +10,7 @@ import socket
import ctypes
import os
import ipaddress
import salt.ext.six as six
class sockaddr(ctypes.Structure):
@ -36,7 +37,7 @@ def inet_pton(address_family, ip_string):
# This will catch IP Addresses such as 10.1.2
if address_family == socket.AF_INET:
try:
ipaddress.ip_address(ip_string.decode())
ipaddress.ip_address(six.u(ip_string))
except ValueError:
raise socket.error('illegal IP address string passed to inet_pton')
return socket.inet_aton(ip_string)

View File

@ -425,14 +425,14 @@ def _osx_memdata():
sysctl = salt.utils.path.which('sysctl')
if sysctl:
mem = __salt__['cmd.run']('{0} -n hw.memsize'.format(sysctl))
swap_total = __salt__['cmd.run']('{0} -n vm.swapusage').split()[2]
swap_total = __salt__['cmd.run']('{0} -n vm.swapusage'.format(sysctl)).split()[2]
if swap_total.endswith('K'):
_power = 2**10
elif swap_total.endswith('M'):
_power = 2**20
elif swap_total.endswith('G'):
_power = 2**30
swap_total = swap_total[:-1] * _power
swap_total = float(swap_total[:-1]) * _power
grains['mem_total'] = int(mem) // 1024 // 1024
grains['swap_total'] = int(swap_total) // 1024 // 1024

View File

@ -398,7 +398,7 @@ def create_hosted_zone(Name, VPCId=None, VPCName=None, VPCRegion=None, CallerRef
if tries and e.response.get('Error', {}).get('Code') == 'Throttling':
log.debug('Throttled by AWS API.')
time.sleep(3)
retries -= 1
tries -= 1
continue
log.error('Failed to create hosted zone {0}: {1}'.format(Name, str(e)))
return []
@ -448,7 +448,7 @@ def update_hosted_zone_comment(Id=None, Name=None, Comment=None, PrivateZone=Non
if tries and e.response.get('Error', {}).get('Code') == 'Throttling':
log.debug('Throttled by AWS API.')
time.sleep(3)
retries -= 1
tries -= 1
continue
log.error('Failed to update comment on hosted zone {0}: {1}'.format(
Name or Id, str(e)))
@ -547,7 +547,7 @@ def associate_vpc_with_hosted_zone(HostedZoneId=None, Name=None, VPCId=None,
if tries and e.response.get('Error', {}).get('Code') == 'Throttling':
log.debug('Throttled by AWS API.')
time.sleep(3)
retries -= 1
tries -= 1
continue
log.error('Failed to associate VPC {0} with hosted zone {1}: {2}'.format(
VPCName or VPCId, Name or HostedZoneId, str(e)))
@ -639,7 +639,7 @@ def diassociate_vpc_from_hosted_zone(HostedZoneId=None, Name=None, VPCId=None,
if tries and e.response.get('Error', {}).get('Code') == 'Throttling':
log.debug('Throttled by AWS API.')
time.sleep(3)
retries -= 1
tries -= 1
continue
log.error('Failed to associate VPC {0} with hosted zone {1}: {2}'.format(
VPCName or VPCId, Name or HostedZoneId, str(e)))
@ -877,7 +877,7 @@ def change_resource_record_sets(HostedZoneId=None, Name=None,
if tries and e.response.get('Error', {}).get('Code') == 'Throttling':
log.debug('Throttled by AWS API.')
time.sleep(3)
retries -= 1
tries -= 1
continue
log.error('Failed to apply requested changes to the hosted zone {0}: {1}'.format(
Name or HostedZoneId, str(e)))

View File

@ -754,44 +754,71 @@ def delete_record(name, zone, record_type, identifier=None, all_records=False,
raise e
def _wait_for_sync(status, conn, wait_for_sync):
if not wait_for_sync:
def _try_func(conn, func, **args):
tries = 30
while True:
try:
return getattr(conn, func)(**args)
except AttributeError as e:
# Don't include **args in log messages - security concern.
log.error('Function `{0}()` not found for AWS connection object '
'{1}'.format(func, conn))
return None
except DNSServerError as e:
if tries and e.code == 'Throttling':
log.debug('Throttled by AWS API. Will retry in 5 seconds')
time.sleep(5)
tries -= 1
continue
log.error('Failed calling {0}(): {1}'.format(func, str(e)))
return None
def _wait_for_sync(status, conn, wait=True):
### Wait should be a bool or an integer
if wait is True:
wait = 600
if not wait:
return True
retry = 10
i = 0
while i < retry:
log.info('Getting route53 status (attempt {0})'.format(i + 1))
orig_wait = wait
log.info('Waiting up to {0} seconds for Route53 changes to synchronize'.format(orig_wait))
while wait > 0:
change = conn.get_change(status)
log.debug(change.GetChangeResponse.ChangeInfo.Status)
if change.GetChangeResponse.ChangeInfo.Status == 'INSYNC':
current = change.GetChangeResponse.ChangeInfo.Status
if current == 'INSYNC':
return True
i = i + 1
time.sleep(20)
log.error('Timed out waiting for Route53 status update.')
sleep = wait if wait % 60 == wait else 60
log.info('Sleeping {0} seconds waiting for changes to synch (current status {1})'.format(
sleep, current))
time.sleep(sleep)
wait -= sleep
continue
log.error('Route53 changes not synced after {0} seconds.'.format(orig_wait))
return False
def create_hosted_zone(domain_name, caller_ref=None, comment='',
private_zone=False, vpc_id=None, vpc_name=None,
vpc_region=None, region=None, key=None, keyid=None,
def create_hosted_zone(domain_name, caller_ref=None, comment='', private_zone=False, vpc_id=None,
vpc_name=None, vpc_region=None, region=None, key=None, keyid=None,
profile=None):
'''
Create a new Route53 Hosted Zone. Returns a Python data structure with
information about the newly created Hosted Zone.
Create a new Route53 Hosted Zone. Returns a Python data structure with information about the
newly created Hosted Zone.
domain_name
The name of the domain. This should be a fully-specified domain, and
should terminate with a period. This is the name you have registered
with your DNS registrar. It is also the name you will delegate from your
registrar to the Amazon Route 53 delegation servers returned in response
The name of the domain. This must be fully-qualified, terminating with a period. This is
the name you have registered with your domain registrar. It is also the name you will
delegate from your registrar to the Amazon Route 53 delegation servers returned in response
to this request.
caller_ref
A unique string that identifies the request and that allows
create_hosted_zone() calls to be retried without the risk of executing
the operation twice. You want to provide this where possible, since
additional calls while the first is in PENDING status will be accepted
and can lead to multiple copies of the zone being created in Route53.
A unique string that identifies the request and that allows create_hosted_zone() calls to
be retried without the risk of executing the operation twice. It can take several minutes
for the change to replicate globally, and change from PENDING to INSYNC status. Thus it's
best to provide some value for this where possible, since duplicate calls while the first
is in PENDING status will be accepted and can lead to multiple copies of the zone being
created. On the other hand, if a zone is created with a given caller_ref, then deleted,
a second attempt to create a zone with the same caller_ref will fail until that caller_ref
is flushed from the Route53 system, which can take upwards of 24 hours.
comment
Any comments you want to include about the hosted zone.
@ -800,33 +827,30 @@ def create_hosted_zone(domain_name, caller_ref=None, comment='',
Set True if creating a private hosted zone.
vpc_id
When creating a private hosted zone, either the VPC ID or VPC Name to
associate with is required. Exclusive with vpe_name. Ignored if passed
for a non-private zone.
When creating a private hosted zone, either the VPC ID or VPC Name to associate with is
required. Exclusive with vpe_name. Ignored when creating a non-private zone.
vpc_name
When creating a private hosted zone, either the VPC ID or VPC Name to
associate with is required. Exclusive with vpe_id. Ignored if passed
for a non-private zone.
When creating a private hosted zone, either the VPC ID or VPC Name to associate with is
required. Exclusive with vpe_id. Ignored when creating a non-private zone.
vpc_region
When creating a private hosted zone, the region of the associated VPC is
required. If not provided, an effort will be made to determine it from
vpc_id or vpc_name, if possible. If this fails, you'll need to provide
an explicit value for this option. Ignored if passed for a non-private
zone.
When creating a private hosted zone, the region of the associated VPC is required. If not
provided, an effort will be made to determine it from vpc_id or vpc_name, where possible.
If this fails, you'll need to provide an explicit value for this option. Ignored when
creating a non-private zone.
region
Region endpoint to connect to
Region endpoint to connect to.
key
AWS key to bind with
AWS key to bind with.
keyid
AWS keyid to bind with
AWS keyid to bind with.
profile
Dict, or pillar key pointing to a dict, containing AWS region/key/keyid
Dict, or pillar key pointing to a dict, containing AWS region/key/keyid.
CLI Example::
@ -879,24 +903,15 @@ def create_hosted_zone(domain_name, caller_ref=None, comment='',
log.info('Options vpc_id, vpc_name, and vpc_region are ignored '
'when creating non-private zones.')
retries = 10
while retries:
try:
# Crazy layers of dereference...
r = conn.create_hosted_zone(**args)
r = r.CreateHostedZoneResponse.__dict__ if hasattr(r,
'CreateHostedZoneResponse') else {}
return r.get('parent', {}).get('CreateHostedZoneResponse')
except DNSServerError as e:
if retries:
if 'Throttling' == e.code:
log.debug('Throttled by AWS API.')
elif 'PriorRequestNotComplete' == e.code:
log.debug('The request was rejected by AWS API.\
Route 53 was still processing a prior request')
time.sleep(3)
retries -= 1
continue
log.error('Failed to create hosted zone {0}: {1}'.format(
domain_name, e.message))
return None
r = _try_func(conn, 'create_hosted_zone', **args)
if r is None:
log.error('Failed to create hosted zone {0}'.format(domain_name))
return None
r = r.get('CreateHostedZoneResponse', {})
# Pop it since it'll be irrelevant by the time we return
status = r.pop('ChangeInfo', {}).get('Id', '').replace('/change/', '')
synced = _wait_for_sync(status, conn, wait=600)
if not synced:
log.error('Hosted zone {0} not synced after 600 seconds.'.format(domain_name))
return None
return r

View File

@ -2,12 +2,13 @@
'''
Package support for the dummy proxy used by the test suite
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import logging
import salt.utils.data
import salt.utils.platform
from salt.ext import six
log = logging.getLogger(__name__)
@ -96,9 +97,9 @@ def installed(name,
p = __proxy__['dummy.package_status'](name)
if version is None:
if 'ret' in p:
return str(p['ret'])
return six.text_type(p['ret'])
else:
return True
else:
if p is not None:
return version == str(p)
return version == six.text_type(p)

View File

@ -4,7 +4,7 @@
Provide the service module for the dummy proxy used in integration tests
'''
# Import python libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import logging
# Import Salt libs

View File

@ -144,17 +144,17 @@ def _parse_acl(acl, user, group):
# Set the permissions fields
octal = 0
vals['permissions'] = {}
if 'r' in comps[2]:
if 'r' in comps[-1]:
octal += 4
vals['permissions']['read'] = True
else:
vals['permissions']['read'] = False
if 'w' in comps[2]:
if 'w' in comps[-1]:
octal += 2
vals['permissions']['write'] = True
else:
vals['permissions']['write'] = False
if 'x' in comps[2]:
if 'x' in comps[-1]:
octal += 1
vals['permissions']['execute'] = True
else:

View File

@ -247,7 +247,7 @@ def install_ruby(ruby, runas=None):
ret = {}
ret = _rbenv_exec(['install', ruby], env=env, runas=runas, ret=ret)
if ret['retcode'] == 0:
if ret is not False and ret['retcode'] == 0:
rehash(runas=runas)
return ret['stderr']
else:

View File

@ -7,7 +7,7 @@ Wrapper for rsync
This data can also be passed into :ref:`pillar <pillar-walk-through>`.
Options passed into opts will overwrite options passed into pillar.
'''
from __future__ import absolute_import
from __future__ import absolute_import, unicode_literals
# Import python libs
import errno

View File

@ -447,6 +447,9 @@ def meminfo():
.. versionchanged:: 2016.11.4
Added support for AIX
.. versionchanged:: Oxygen
Added support for OpenBSD
CLI Example:
.. code-block:: bash
@ -574,10 +577,25 @@ def meminfo():
return ret
def openbsd_meminfo():
'''
openbsd specific implementation of meminfo
'''
vmstat = __salt__['cmd.run']('vmstat').splitlines()
# We're only interested in memory and page values which are printed
# as subsequent fields.
fields = ['active virtual pages', 'free list size', 'page faults',
'pages reclaimed', 'pages paged in', 'pages paged out',
'pages freed', 'pages scanned']
data = vmstat[2].split()[2:10]
ret = dict(zip(fields, data))
return ret
# dict that return a function that does the right thing per platform
get_version = {
'Linux': linux_meminfo,
'FreeBSD': freebsd_meminfo,
'OpenBSD': openbsd_meminfo,
'AIX': aix_meminfo,
}

View File

@ -317,7 +317,7 @@ def get_site_packages(venv):
ret = __salt__['cmd.exec_code_all'](
bin_path,
'from distutils import sysconfig; '
'print sysconfig.get_python_lib()'
'print(sysconfig.get_python_lib())'
)
if ret['retcode'] != 0:

View File

@ -278,10 +278,9 @@ def list_available(*names, **kwargs):
saltenv = kwargs.get('saltenv', 'base')
refresh = salt.utils.data.is_true(kwargs.get('refresh', False))
return_dict_always = \
salt.utils.data.is_true(kwargs.get('return_dict_always', False))
_refresh_db_conditional(saltenv, force=refresh)
return_dict_always = \
salt.utils.is_true(kwargs.get('return_dict_always', False))
if len(names) == 1 and not return_dict_always:
pkginfo = _get_package_info(names[0], saltenv=saltenv)
if not pkginfo:
@ -358,6 +357,7 @@ def list_pkgs(versions_as_list=False, **kwargs):
List the packages currently installed
Args:
version_as_list (bool): Returns the versions as a list
Kwargs:
saltenv (str): The salt environment to use. Default ``base``.
@ -1255,6 +1255,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
log.debug('Source hash matches package hash.')
# Get install flags
install_flags = pkginfo[version_num].get('install_flags', '')
if options and options.get('extra_install_flags'):
install_flags = '{0} {1}'.format(
@ -1329,14 +1330,12 @@ def install(name=None, refresh=False, pkgs=None, **kwargs):
log.error('Scheduled Task failed to run')
ret[pkg_name] = {'install status': 'failed'}
else:
# Launch the command
result = __salt__['cmd.run_all'](
'"{0}" /s /c "{1}"'.format(cmd_shell, arguments),
cache_path,
output_loglevel='trace',
python_shell=False,
redirect_stderr=True)
result = __salt__['cmd.run_all']('"{0}" /s /c "{1}"'.format(cmd_shell, arguments),
cache_path,
output_loglevel='trace',
python_shell=False,
redirect_stderr=True)
if not result['retcode']:
ret[pkg_name] = {'install status': 'success'}
changed.append(pkg_name)
@ -1513,7 +1512,6 @@ def remove(name=None, pkgs=None, version=None, **kwargs):
removal_targets.append(version_num)
for target in removal_targets:
# Get the uninstaller
uninstaller = pkginfo[target].get('uninstaller', '')
cache_dir = pkginfo[target].get('cache_dir', False)
@ -1538,6 +1536,7 @@ def remove(name=None, pkgs=None, version=None, **kwargs):
# If true, the entire directory will be cached instead of the
# individual file. This is useful for installations that are not
# single files
if cache_dir and uninstaller.startswith('salt:'):
path, _ = os.path.split(uninstaller)
__salt__['cp.cache_dir'](path,
@ -1578,15 +1577,13 @@ def remove(name=None, pkgs=None, version=None, **kwargs):
else:
# Run the uninstaller directly
# (not hosted on salt:, https:, etc.)
cached_pkg = uninstaller
cached_pkg = os.path.expandvars(uninstaller)
# Fix non-windows slashes
cached_pkg = cached_pkg.replace('/', '\\')
cache_path, _ = os.path.split(cached_pkg)
# Get parameters for cmd
expanded_cached_pkg = str(os.path.expandvars(cached_pkg))
expanded_cache_path = str(os.path.expandvars(cache_path))
# os.path.expandvars is not required as we run everything through cmd.exe /s /c
# Get uninstall flags
uninstall_flags = pkginfo[target].get('uninstall_flags', '')
@ -1602,9 +1599,11 @@ def remove(name=None, pkgs=None, version=None, **kwargs):
# Build cmd and arguments
# cmd and arguments must be separated for use with the task scheduler
if use_msiexec:
arguments = '"{0}" /X "{1}"'.format(msiexec, uninstaller if uninstaller else expanded_cached_pkg)
# Check if uninstaller is set to {guid}, if not we assume its a remote msi file.
# which has already been downloaded.
arguments = '"{0}" /X "{1}"'.format(msiexec, cached_pkg)
else:
arguments = '"{0}"'.format(expanded_cached_pkg)
arguments = '"{0}"'.format(cached_pkg)
if uninstall_flags:
arguments = '{0} {1}'.format(arguments, uninstall_flags)
@ -1619,7 +1618,7 @@ def remove(name=None, pkgs=None, version=None, **kwargs):
action_type='Execute',
cmd=cmd_shell,
arguments='/s /c "{0}"'.format(arguments),
start_in=expanded_cache_path,
start_in=cache_path,
trigger_type='Once',
start_date='1975-01-01',
start_time='01:00',
@ -1634,7 +1633,6 @@ def remove(name=None, pkgs=None, version=None, **kwargs):
# Launch the command
result = __salt__['cmd.run_all'](
'"{0}" /s /c "{1}"'.format(cmd_shell, arguments),
expanded_cache_path,
output_loglevel='trace',
python_shell=False,
redirect_stderr=True)

View File

@ -1175,10 +1175,6 @@ class LowDataAdapter(object):
for chunk in lowstate:
if token:
chunk['token'] = token
if cherrypy.session.get('user'):
chunk['__current_eauth_user'] = cherrypy.session.get('user')
if cherrypy.session.get('groups'):
chunk['__current_eauth_groups'] = cherrypy.session.get('groups')
if client:
chunk['client'] = client
@ -1878,9 +1874,6 @@ class Login(LowDataAdapter):
cherrypy.response.headers['X-Auth-Token'] = cherrypy.session.id
cherrypy.session['token'] = token['token']
cherrypy.session['timeout'] = (token['expire'] - token['start']) / 60
cherrypy.session['user'] = token['name']
if 'groups' in token:
cherrypy.session['groups'] = token['groups']
# Grab eauth config for the current backend for the current user
try:

View File

@ -39,7 +39,7 @@ class NestDisplay(object):
'''
Manage the nested display contents
'''
def __init__(self):
def __init__(self, retcode=0):
self.__dict__.update(
salt.utils.color.get_colors(
__opts__.get('color'),
@ -47,6 +47,7 @@ class NestDisplay(object):
)
)
self.strip_colors = __opts__.get('strip_colors', True)
self.retcode = retcode
def ustring(self,
indent,
@ -109,12 +110,15 @@ class NestDisplay(object):
)
first_line = False
elif isinstance(ret, (list, tuple)):
color = self.GREEN
if self.retcode != 0:
color = self.RED
for ind in ret:
if isinstance(ind, (list, tuple, dict)):
out.append(
self.ustring(
indent,
self.GREEN,
color,
'|_'
)
)
@ -124,10 +128,13 @@ class NestDisplay(object):
self.display(ind, indent, '- ', out)
elif isinstance(ret, dict):
if indent:
color = self.CYAN
if self.retcode != 0:
color = self.RED
out.append(
self.ustring(
indent,
self.CYAN,
color,
'----------'
)
)
@ -137,13 +144,15 @@ class NestDisplay(object):
keys = ret.keys()
else:
keys = sorted(ret)
color = self.CYAN
if self.retcode != 0:
color = self.RED
for key in keys:
val = ret[key]
out.append(
self.ustring(
indent,
self.CYAN,
color,
key,
suffix=':',
prefix=prefix
@ -158,7 +167,8 @@ def output(ret, **kwargs):
Display ret data
'''
# Prefer kwargs before opts
retcode = kwargs.get('_retcode', 0)
base_indent = kwargs.get('nested_indent', 0) \
or __opts__.get('nested_indent', 0)
nest = NestDisplay()
nest = NestDisplay(retcode=retcode)
return '\n'.join(nest.display(ret, base_indent, '', []))

View File

@ -24,7 +24,7 @@ the chronos endpoint:
.. versionadded:: 2015.8.2
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import logging
import salt.utils.http

View File

@ -60,7 +60,7 @@ The password used to login to the cimc host. Required.
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import Python Libs
import logging

View File

@ -173,7 +173,7 @@ responding:
'''
# Import Python Libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import logging
# Import Salt Libs

View File

@ -2,7 +2,7 @@
'''
This is a dummy proxy-minion designed for testing the proxy minion subsystem.
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import os
@ -194,7 +194,7 @@ def uptodate():
for p in DETAILS['packages']:
version_float = float(DETAILS['packages'][p])
version_float = version_float + 1.0
DETAILS['packages'][p] = str(version_float)
DETAILS['packages'][p] = six.text_type(version_float)
return DETAILS['packages']

View File

@ -153,7 +153,7 @@ Look there to find an example structure for Pillar as well as an example
# Import Python Libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import logging
import os
@ -200,13 +200,12 @@ def init(opts):
login
the protocol and port are cached.
'''
log.debug('Initting esxcluster proxy module in process '
'{}'.format(os.getpid()))
log.debug('Initting esxcluster proxy module in process %s', os.getpid())
log.debug('Validating esxcluster proxy input')
schema = EsxclusterProxySchema.serialize()
log.trace('schema = {}'.format(schema))
log.trace('schema = %s', schema)
proxy_conf = merge(opts.get('proxy', {}), __pillar__.get('proxy', {}))
log.trace('proxy_conf = {0}'.format(proxy_conf))
log.trace('proxy_conf = %s', proxy_conf)
try:
jsonschema.validate(proxy_conf, schema)
except jsonschema.exceptions.ValidationError as exc:
@ -253,7 +252,7 @@ def init(opts):
username, password = find_credentials()
DETAILS['password'] = password
except salt.exceptions.SaltSystemExit as err:
log.critical('Error: {0}'.format(err))
log.critical('Error: %s', err)
return False
return True

View File

@ -146,7 +146,7 @@ Look there to find an example structure for Pillar as well as an example
'''
# Import Python Libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import logging
import os
@ -191,13 +191,12 @@ def init(opts):
This function gets called when the proxy starts up.
All login details are cached.
'''
log.debug('Initting esxdatacenter proxy module in process '
'{}'.format(os.getpid()))
log.debug('Initting esxdatacenter proxy module in process %s', os.getpid())
log.trace('Validating esxdatacenter proxy input')
schema = EsxdatacenterProxySchema.serialize()
log.trace('schema = {}'.format(schema))
log.trace('schema = %s', schema)
proxy_conf = merge(opts.get('proxy', {}), __pillar__.get('proxy', {}))
log.trace('proxy_conf = {0}'.format(proxy_conf))
log.trace('proxy_conf = %s', proxy_conf)
try:
jsonschema.validate(proxy_conf, schema)
except jsonschema.exceptions.ValidationError as exc:
@ -244,7 +243,7 @@ def init(opts):
username, password = find_credentials()
DETAILS['password'] = password
except salt.exceptions.SaltSystemExit as err:
log.critical('Error: {0}'.format(err))
log.critical('Error: %s', err)
return False
return True

View File

@ -271,7 +271,7 @@ for standing up an ESXi host from scratch.
'''
# Import Python Libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import logging
import os
@ -317,13 +317,12 @@ def init(opts):
ESXi devices, the host, login credentials, and, if configured,
the protocol and port are cached.
'''
log.debug('Initting esxi proxy module in process \'{}\''
''.format(os.getpid()))
log.debug('Initting esxi proxy module in process %s', os.getpid())
log.debug('Validating esxi proxy input')
schema = EsxiProxySchema.serialize()
log.trace('esxi_proxy_schema = {}'.format(schema))
log.trace('esxi_proxy_schema = %s', schema)
proxy_conf = merge(opts.get('proxy', {}), __pillar__.get('proxy', {}))
log.trace('proxy_conf = {0}'.format(proxy_conf))
log.trace('proxy_conf = %s', proxy_conf)
try:
jsonschema.validate(proxy_conf, schema)
except jsonschema.exceptions.ValidationError as exc:
@ -348,7 +347,7 @@ def init(opts):
try:
username, password = find_credentials(host)
except SaltSystemExit as err:
log.critical('Error: {0}'.format(err))
log.critical('Error: %s', err)
return False
# Set configuration details
@ -408,7 +407,7 @@ def init(opts):
username, password = find_credentials(DETAILS['vcenter'])
DETAILS['password'] = password
except SaltSystemExit as err:
log.critical('Error: {0}'.format(err))
log.critical('Error: %s', err)
return False
# Save optional

View File

@ -146,7 +146,7 @@ Look there to find an example structure for Pillar as well as an example
'''
# Import Python Libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import logging
import os
@ -182,11 +182,10 @@ def init(opts):
This function gets called when the proxy starts up. For
login the protocol and port are cached.
'''
log.debug('Initting esxvm proxy module in process '
'{}'.format(os.getpid()))
log.debug('Initting esxvm proxy module in process %s', os.getpid())
log.debug('Validating esxvm proxy input')
proxy_conf = merge(opts.get('proxy', {}), __pillar__.get('proxy', {}))
log.trace('proxy_conf = {0}'.format(proxy_conf))
log.trace('proxy_conf = %s', proxy_conf)
# TODO json schema validation
# Save mandatory fields in cache
@ -230,7 +229,7 @@ def init(opts):
username, password = find_credentials()
DETAILS['password'] = password
except excs.SaltSystemExit as err:
log.critical('Error: {0}'.format(err))
log.critical('Error: %s', err)
return False
return True

View File

@ -171,7 +171,7 @@ Look there to find an example structure for pillar as well as an example
``.sls`` file for standing up a Dell Chassis from scratch.
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import logging

View File

@ -35,7 +35,7 @@ Run the salt proxy via the following command:
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import logging
@ -191,7 +191,7 @@ def shutdown(opts):
This is called when the proxy-minion is exiting to make sure the
connection to the device is closed cleanly.
'''
log.debug('Proxy module {0} shutting down!!'.format(opts['id']))
log.debug('Proxy module %s shutting down!!', opts['id'])
try:
thisproxy['conn'].close()

View File

@ -24,7 +24,7 @@ the marathon endpoint:
.. versionadded:: 2015.8.2
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import logging
import salt.utils.http

View File

@ -135,7 +135,7 @@ Example using a user-specific library, extending NAPALM's capabilities, e.g. ``c
.. versionadded:: 2016.11.0
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import python lib
import logging
@ -196,10 +196,10 @@ def alive(opts):
# or regular minion
is_alive_ret = call('is_alive', **{})
if not is_alive_ret.get('result', False):
log.debug('[{proxyid}] Unable to execute `is_alive`: {comment}'.format(
proxyid=opts.get('id'),
comment=is_alive_ret.get('comment')
))
log.debug(
'[%s] Unable to execute `is_alive`: %s',
opts.get('id'), is_alive_ret.get('comment')
)
# if `is_alive` is not implemented by the underneath driver,
# will consider the connection to be still alive
# we don't want overly request connection reestablishment
@ -207,10 +207,7 @@ def alive(opts):
# and return False to force reconnection
return True
flag = is_alive_ret.get('out', {}).get('is_alive', False)
log.debug('Is {proxyid} still alive? {answ}'.format(
proxyid=opts.get('id'),
answ='Yes.' if flag else 'No.'
))
log.debug('Is %s still alive? %s', opts.get('id'), 'Yes.' if flag else 'No.')
return flag
@ -277,13 +274,12 @@ def shutdown(opts):
raise Exception('not connected!')
NETWORK_DEVICE.get('DRIVER').close()
except Exception as error:
port = NETWORK_DEVICE.get('OPTIONAL_ARGS', {}).get('port')
log.error(
'Cannot close connection with {hostname}{port}! Please check error: {error}'.format(
hostname=NETWORK_DEVICE.get('HOSTNAME', '[unknown hostname]'),
port=(':{port}'.format(port=NETWORK_DEVICE.get('OPTIONAL_ARGS', {}).get('port'))
if NETWORK_DEVICE.get('OPTIONAL_ARGS', {}).get('port') else ''),
error=error
)
'Cannot close connection with %s%s! Please check error: %s',
NETWORK_DEVICE.get('HOSTNAME', '[unknown hostname]'),
':{0}'.format(port) if port else '',
error
)
return True

View File

@ -55,7 +55,7 @@ the :mod:`salt.modules.nxos<salt.modules.nxos>` execution module.
'''
# Import Python libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import logging
import multiprocessing
import re

View File

@ -185,7 +185,7 @@ The generated XML API key for the Panorama server. Required.
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import Python Libs
import logging

View File

@ -21,7 +21,7 @@ Philips HUE lamps module for proxy.
'''
# pylint: disable=import-error,no-name-in-module,redefined-builtin
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import salt.ext.six.moves.http_client as http_client
# Import python libs
@ -29,6 +29,7 @@ import logging
import time
import json
from salt.exceptions import (CommandExecutionError, MinionError)
from salt.ext import six
__proxyenabled__ = ['philips_hue']
@ -188,8 +189,8 @@ def call_lights(*args, **kwargs):
res = dict()
lights = _get_lights()
for dev_id in 'id' in kwargs and _get_devices(kwargs) or sorted(lights.keys()):
if lights.get(str(dev_id)):
res[dev_id] = lights[str(dev_id)]
if lights.get(six.text_type(dev_id)):
res[dev_id] = lights[six.text_type(dev_id)]
return res or False
@ -221,7 +222,7 @@ def call_switch(*args, **kwargs):
state = kwargs['on'] and Const.LAMP_ON or Const.LAMP_OFF
else:
# Invert the current state
state = devices[str(dev_id)]['state']['on'] and Const.LAMP_OFF or Const.LAMP_ON
state = devices[six.text_type(dev_id)]['state']['on'] and Const.LAMP_OFF or Const.LAMP_ON
out[dev_id] = _set(dev_id, state)
return out
@ -247,7 +248,7 @@ def call_blink(*args, **kwargs):
pause = kwargs.get('pause', 0)
res = dict()
for dev_id in 'id' not in kwargs and sorted(devices.keys()) or _get_devices(kwargs):
state = devices[str(dev_id)]['state']['on']
state = devices[six.text_type(dev_id)]['state']['on']
_set(dev_id, state and Const.LAMP_OFF or Const.LAMP_ON)
if pause:
time.sleep(pause)

View File

@ -3,7 +3,7 @@
This is a simple proxy-minion designed to connect to and communicate with
the bottle-based web service contained in https://github.com/saltstack/salt-contrib/tree/master/proxyminion_rest_example
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import logging

View File

@ -5,7 +5,7 @@
This can be used as an option when the device does not provide
an api over HTTP and doesn't have the python stack to run a minion.
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import json

View File

@ -183,7 +183,7 @@ and communicate with the ESXi host.
'''
# Import Python Libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import logging
import os
@ -228,13 +228,12 @@ def init(opts):
This function gets called when the proxy starts up.
For login the protocol and port are cached.
'''
log.info('Initting vcenter proxy module in process {0}'
''.format(os.getpid()))
log.info('Initting vcenter proxy module in process %s', os.getpid())
log.trace('VCenter Proxy Validating vcenter proxy input')
schema = VCenterProxySchema.serialize()
log.trace('schema = {}'.format(schema))
log.trace('schema = %s', schema)
proxy_conf = merge(opts.get('proxy', {}), __pillar__.get('proxy', {}))
log.trace('proxy_conf = {0}'.format(proxy_conf))
log.trace('proxy_conf = %s', proxy_conf)
try:
jsonschema.validate(proxy_conf, schema)
except jsonschema.exceptions.ValidationError as exc:
@ -281,7 +280,7 @@ def init(opts):
username, password = find_credentials()
DETAILS['password'] = password
except salt.exceptions.SaltSystemExit as err:
log.critical('Error: {0}'.format(err))
log.critical('Error: %s', err)
return False
return True

View File

@ -3,7 +3,7 @@
Cheetah Renderer for Salt
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import 3rd party libs
try:
@ -13,7 +13,7 @@ except ImportError:
HAS_LIBS = False
# Import salt libs
from salt.ext.six import string_types
from salt.ext import six
def render(cheetah_data, saltenv='base', sls='', method='xml', **kws):
@ -25,7 +25,7 @@ def render(cheetah_data, saltenv='base', sls='', method='xml', **kws):
if not HAS_LIBS:
return {}
if not isinstance(cheetah_data, string_types):
if not isinstance(cheetah_data, six.string_types):
cheetah_data = cheetah_data.read()
if cheetah_data.startswith('#!'):
@ -33,4 +33,4 @@ def render(cheetah_data, saltenv='base', sls='', method='xml', **kws):
if not cheetah_data.strip():
return {}
return str(Template(cheetah_data, searchList=[kws]))
return six.text_type(Template(cheetah_data, searchList=[kws]))

View File

@ -12,7 +12,7 @@ This renderer requires `Dogeon`__ (installable via pip)
.. __: https://github.com/soasme/dogeon
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import logging

View File

@ -3,7 +3,7 @@
Genshi Renderer for Salt
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import 3rd party libs
try:
@ -15,7 +15,7 @@ except ImportError:
HAS_LIBS = False
# Import salt libs
from salt.ext.six import string_types
from salt.ext import six
def render(genshi_data, saltenv='base', sls='', method='xml', **kws):
@ -40,7 +40,7 @@ def render(genshi_data, saltenv='base', sls='', method='xml', **kws):
if not HAS_LIBS:
return {}
if not isinstance(genshi_data, string_types):
if not isinstance(genshi_data, six.string_types):
genshi_data = genshi_data.read()
if genshi_data.startswith('#!'):

View File

@ -209,7 +209,7 @@ pillar data like so:
'''
# Import python libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import os
import re
import logging
@ -288,7 +288,7 @@ def _decrypt_ciphertext(cipher, translate_newlines=False):
else:
if six.PY3 and isinstance(decrypted_data, bytes):
decrypted_data = decrypted_data.decode(__salt_system_encoding__)
return str(decrypted_data)
return six.text_type(decrypted_data)
def _decrypt_object(obj, translate_newlines=False):

View File

@ -4,7 +4,7 @@ Hjson Renderer for Salt
http://laktak.github.io/hjson/
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import 3rd party libs
try:
@ -14,7 +14,7 @@ except ImportError:
HAS_LIBS = False
# Import salt libs
from salt.ext.six import string_types
from salt.ext import six
def render(hjson_data, saltenv='base', sls='', **kws):
@ -24,7 +24,7 @@ def render(hjson_data, saltenv='base', sls='', **kws):
:rtype: A Python data structure
'''
if not isinstance(hjson_data, string_types):
if not isinstance(hjson_data, six.string_types):
hjson_data = hjson_data.read()
if hjson_data.startswith('#!'):

View File

@ -6,7 +6,7 @@ For Jinja usage information see :ref:`Understanding Jinja <understanding-jinja>`
'''
# Import python libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import logging
# Import salt libs
@ -53,7 +53,7 @@ def render(template_file, saltenv='base', sls='', argline='',
from_str = argline == '-s'
if not from_str and argline:
raise SaltRenderError(
'Unknown renderer option: {opt}'.format(opt=argline)
'Unknown renderer option: {opt}'.format(opt=argline)
)
tmp_data = salt.utils.templates.JINJA(template_file,

View File

@ -3,14 +3,14 @@
JSON Renderer for Salt
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import salt.utils.json
json = salt.utils.json.import_json()
# Import salt libs
from salt.ext.six import string_types
from salt.ext import six
def render(json_data, saltenv='base', sls='', **kws):
@ -20,7 +20,7 @@ def render(json_data, saltenv='base', sls='', **kws):
:rtype: A Python data structure
'''
if not isinstance(json_data, string_types):
if not isinstance(json_data, six.string_types):
json_data = json_data.read()
if json_data.startswith('#!'):

View File

@ -12,7 +12,7 @@ This renderer requires the `json5 python bindings`__, installable via pip.
.. __: https://pypi.python.org/pypi/json5
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import logging
@ -23,7 +23,7 @@ except ImportError:
HAS_JSON5 = False
# Import salt libs
from salt.ext.six import string_types
from salt.ext import six
log = logging.getLogger(__name__)
@ -44,7 +44,7 @@ def render(json_data, saltenv='base', sls='', **kws):
:rtype: A Python data structure
'''
if not isinstance(json_data, string_types):
if not isinstance(json_data, six.string_types):
json_data = json_data.read()
if json_data.startswith('#!'):

View File

@ -4,7 +4,7 @@ Mako Renderer for Salt
'''
# Import python libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import salt libs
from salt.ext import six

View File

@ -1,11 +1,11 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import third party libs
import msgpack
# Import salt libs
from salt.ext.six import string_types
from salt.ext import six
def render(msgpack_data, saltenv='base', sls='', **kws):
@ -21,7 +21,7 @@ def render(msgpack_data, saltenv='base', sls='', **kws):
:rtype: A Python data structure
'''
if not isinstance(msgpack_data, string_types):
if not isinstance(msgpack_data, six.string_types):
msgpack_data = msgpack_data.read()
if msgpack_data.startswith('#!'):

View File

@ -53,7 +53,7 @@ data like so:
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import re
import logging

View File

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
"""
'''
Pass Renderer for Salt
[pass](https://www.passwordstore.org/)
@ -38,10 +38,10 @@ entries that are of interest for pillar data
pass:
pkg.installed
```
"""
'''
# Import python libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import logging
import os
from os.path import expanduser
@ -58,9 +58,9 @@ log = logging.getLogger(__name__)
def _get_pass_exec():
"""
'''
Return the pass executable or raise an error
"""
'''
pass_exec = salt.utils.path.which('pass')
if pass_exec:
return pass_exec
@ -69,12 +69,12 @@ def _get_pass_exec():
def _fetch_secret(pass_path):
"""
'''
Fetch secret from pass based on pass_path. If there is
any error, return back the original pass_path value
"""
'''
cmd = "pass show {0}".format(pass_path.strip())
log.debug('Fetching secret: {0}'.format(cmd))
log.debug('Fetching secret: %s', cmd)
proc = Popen(cmd.split(' '), stdout=PIPE, stderr=PIPE)
pass_data, pass_error = proc.communicate()
@ -88,9 +88,9 @@ def _fetch_secret(pass_path):
def _decrypt_object(obj):
"""
'''
Recursively try to find a pass path (string) that can be handed off to pass
"""
'''
if isinstance(obj, six.string_types):
return _fetch_secret(obj)
elif isinstance(obj, dict):
@ -103,9 +103,9 @@ def _decrypt_object(obj):
def render(pass_info, saltenv='base', sls='', argline='', **kwargs):
"""
'''
Fetch secret from pass based on pass_path
"""
'''
try:
_get_pass_exec()
except SaltRenderError:

View File

@ -97,7 +97,7 @@ Translate to::
return config
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import os

View File

@ -230,7 +230,7 @@ is enabled by setting the ``ordered`` option on ``__pydsl__``.
__pydsl__.set(ordered=True)
for i in range(10):
i = str(i)
i = six.text_type(i)
state(i).cmd.run('echo '+i, cwd='/')
state('1').cmd.run('echo one')
state('2').cmd.run(name='echo two')
@ -334,7 +334,7 @@ For example:
my_mod = sys.modules['salt.loaded.ext.module.my_mod']
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import types
import salt.utils.pydsl as pydsl

View File

@ -295,13 +295,13 @@ TODO
'''
# Import Python Libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import logging
import os
import re
# Import Salt Libs
from salt.ext.six import exec_
from salt.ext import six
import salt.utils.files
import salt.loader
from salt.fileclient import get_file_client
@ -384,7 +384,7 @@ def render(template, saltenv='base', sls='', salt_data=True, **kwargs):
mod,
valid_funcs
)
exec_(mod_cmd, mod_globals, mod_locals)
six.exec_(mod_cmd, mod_globals, mod_locals)
_globals[mod_camel] = mod_locals[mod_camel]
@ -459,7 +459,7 @@ def render(template, saltenv='base', sls='', salt_data=True, **kwargs):
with salt.utils.files.fopen(state_file) as state_fh:
state_contents, state_globals = process_template(state_fh)
exec_(state_contents, state_globals)
six.exec_(state_contents, state_globals)
# if no imports have been specified then we are being imported as: import salt://foo.sls
# so we want to stick all of the locals from our state file into the template globals
@ -501,6 +501,6 @@ def render(template, saltenv='base', sls='', salt_data=True, **kwargs):
Registry.enabled = True
# now exec our template using our created scopes
exec_(final_template, _globals)
six.exec_(final_template, _globals)
return Registry.salt_data()

View File

@ -27,15 +27,16 @@ A flexible renderer that takes a templating engine and a data format
#
# Import python libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import logging
import os
import re
import getopt
import copy
from os import path as ospath
# Import salt libs
import salt.utils.files
import salt.utils.stringutils
from salt.exceptions import SaltRenderError
# Import 3rd-party libs
@ -75,7 +76,7 @@ def __init__(opts):
STATE_NAME = STATE_FUNC.split('.')[0]
MOD_BASENAME = ospath.basename(__file__)
MOD_BASENAME = os.path.basename(__file__)
INVALID_USAGE_ERROR = SaltRenderError(
'Invalid use of {0} renderer!\n'
'''Usage: #!{1} [-GoSp] [<data_renderer> [options] . <template_renderer> [options]]
@ -108,7 +109,7 @@ def render(input, saltenv='base', sls='', argline='', **kws):
implicit_require = False
def process_sls_data(data, context=None, extract=False):
sls_dir = ospath.dirname(sls.replace('.', ospath.sep)) if '.' in sls else sls
sls_dir = os.path.dirname(sls.replace('.', os.path.sep)) if '.' in sls else sls
ctx = dict(sls_dir=sls_dir if sls_dir else '.')
if context:
@ -156,8 +157,8 @@ def render(input, saltenv='base', sls='', argline='', **kws):
raise
except Exception as err:
log.exception(
'Error found while pre-processing the salt file '
'{0}:\n{1}'.format(sls, err)
'Error found while pre-processing the salt file %s:\n%s',
sls, err
)
from salt.state import State
state = State(__opts__)
@ -207,9 +208,9 @@ def render(input, saltenv='base', sls='', argline='', **kws):
if isinstance(input, six.string_types):
with salt.utils.files.fopen(input, 'r') as ifile:
sls_templ = ifile.read()
sls_templ = salt.utils.stringutils.to_unicode(ifile.read())
else: # assume file-like
sls_templ = input.read()
sls_templ = salt.utils.stringutils.to_unicode(input.read())
# first pass to extract the state configuration
match = re.search(__opts__['stateconf_end_marker'], sls_templ)
@ -235,7 +236,7 @@ def render(input, saltenv='base', sls='', argline='', **kws):
if log.isEnabledFor(logging.DEBUG):
import pprint # FIXME: pprint OrderedDict
log.debug('Rendered sls: {0}'.format(pprint.pformat(data)))
log.debug('Rendered sls: %s', pprint.pformat(data))
return data

View File

@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
# Import python libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import salt libs
from salt.ext import six

View File

@ -5,7 +5,7 @@ YAML Renderer for Salt
For YAML usage information see :ref:`Understanding YAML <yaml>`.
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import logging
@ -60,13 +60,12 @@ def render(yaml_data, saltenv='base', sls='', argline='', **kws):
if len(warn_list) > 0:
for item in warn_list:
log.warning(
'{warn} found in {sls} saltenv={env}'.format(
warn=item.message, sls=salt.utils.url.create(sls), env=saltenv
)
'%s found in %s saltenv=%s',
item.message, salt.utils.url.create(sls), saltenv
)
if not data:
data = {}
log.debug('Results of YAML rendering: \n{0}'.format(data))
log.debug('Results of YAML rendering: \n%s', data)
def _validate_data(data):
'''

View File

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import logging
@ -24,11 +24,10 @@ def render(sls_data, saltenv='base', sls='', **kws):
for item in warn_list:
log.warning(
'{warn} found in {sls} saltenv={env}'.format(
warn=item.message, sls=salt.utils.url.create(sls), env=saltenv
)
'%s found in %s saltenv=%s',
item.message, salt.utils.url.create(sls), saltenv
)
log.debug('Results of SLS rendering: \n{0}'.format(data))
log.debug('Results of SLS rendering: \n%s', data)
return data

View File

@ -5,9 +5,10 @@ Returners Directory
:func:`get_returner_options` is a general purpose function that returners may
use to fetch their configuration options.
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import logging
from salt.ext import six
log = logging.getLogger(__name__)
@ -116,7 +117,7 @@ def _fetch_ret_config(ret):
return None
if 'ret_config' not in ret:
return ''
return str(ret['ret_config'])
return six.text_type(ret['ret_config'])
def _fetch_option(cfg, ret_config, virtualname, attr_name):

View File

@ -82,7 +82,7 @@ To override individual configuration items, append --return_kwargs '{"key:": "va
'''
# Import python libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import collections
import logging
import socket
@ -143,7 +143,7 @@ def _carbon(host, port):
carbon_sock.connect((host, port))
except socket.error as err:
log.error('Error connecting to {0}:{1}, {2}'.format(host, port, err))
log.error('Error connecting to %s:%s, %s', host, port, err)
raise
else:
log.debug('Connected to carbon')
@ -176,7 +176,7 @@ def _send_textmetrics(metrics):
Format metrics for the carbon plaintext protocol
'''
data = [' '.join(map(str, metric)) for metric in metrics] + ['']
data = [' '.join(map(six.text_type, metric)) for metric in metrics] + ['']
return '\n'.join(data)
@ -199,7 +199,10 @@ def _walk(path, value, metrics, timestamp, skip):
Whether or not to skip metrics when there's an error casting the value
to a float. Defaults to `False`.
'''
log.trace('Carbon return walking path: {0}, value: {1}, metrics: {2}, timestamp: {3}'.format(path, value, metrics, timestamp))
log.trace(
'Carbon return walking path: %s, value: %s, metrics: %s, ',
'timestamp: %s', path, value, metrics, timestamp
)
if isinstance(value, collections.Mapping):
for key, val in six.iteritems(value):
_walk('{0}.{1}'.format(path, key), val, metrics, timestamp, skip)
@ -232,8 +235,8 @@ def _send(saltdata, metric_base, opts):
metric_base_pattern = opts.get('carbon.metric_base_pattern')
mode = opts.get('mode').lower() if 'mode' in opts else 'text'
log.debug('Carbon minion configured with host: {0}:{1}'.format(host, port))
log.debug('Using carbon protocol: {0}'.format(mode))
log.debug('Carbon minion configured with host: %s:%s', host, port)
log.debug('Using carbon protocol: %s', mode)
if not (host and port):
log.error('Host or port not defined')
@ -246,10 +249,10 @@ def _send(saltdata, metric_base, opts):
handler = _send_picklemetrics if mode == 'pickle' else _send_textmetrics
metrics = []
log.trace('Carbon returning walking data: {0}'.format(saltdata))
log.trace('Carbon returning walking data: %s', saltdata)
_walk(metric_base, saltdata, metrics, timestamp, skip)
data = handler(metrics)
log.trace('Carbon inserting data: {0}'.format(data))
log.trace('Carbon inserting data: %s', data)
with _carbon(host, port) as sock:
total_sent_bytes = 0
@ -259,7 +262,7 @@ def _send(saltdata, metric_base, opts):
log.error('Bytes sent 0, Connection reset?')
return
log.debug('Sent {0} bytes to carbon'.format(sent_bytes))
log.debug('Sent %s bytes to carbon', sent_bytes)
total_sent_bytes += sent_bytes
@ -272,7 +275,7 @@ def event_return(events):
opts = _get_options({}) # Pass in empty ret, since this is a list of events
opts['skip'] = True
for event in events:
log.trace('Carbon returner received event: {0}'.format(event))
log.trace('Carbon returner received event: %s', event)
metric_base = event['tag']
saltdata = event['data'].get('data')
_send(saltdata, metric_base, opts)

View File

@ -117,9 +117,7 @@ needs. SaltStack has seen situations where these timeouts can resolve
some stacktraces that appear to come from the Datastax Python driver.
'''
from __future__ import absolute_import
# Let's not allow PyLint complain about string substitution
# pylint: disable=W1321,E1321
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import json
@ -132,6 +130,7 @@ import salt.returners
import salt.utils.jid
import salt.exceptions
from salt.exceptions import CommandExecutionError
from salt.ext import six
# Import third party libs
try:
@ -210,7 +209,7 @@ def returner(ret):
log.critical('Could not insert into salt_returns with Cassandra returner.')
raise
except Exception as e:
log.critical('Unexpected error while inserting into salt_returns: {0}'.format(str(e)))
log.critical('Unexpected error while inserting into salt_returns: %s', e)
raise
# Store the last function called by the minion
@ -234,7 +233,10 @@ def returner(ret):
log.critical('Could not store minion ID with Cassandra returner.')
raise
except Exception as e:
log.critical('Unexpected error while inserting minion ID into the minions table: {0}'.format(str(e)))
log.critical(
'Unexpected error while inserting minion ID into the minions '
'table: %s', e
)
raise
@ -258,7 +260,7 @@ def event_return(events):
) VALUES (
?, ?, ?, ?, ?)
'''
statement_arguments = [str(uuid.uuid1()),
statement_arguments = [six.text_type(uuid.uuid1()),
int(time.time() * 1000),
json.dumps(data).replace("'", "''"),
__opts__['id'],
@ -273,7 +275,8 @@ def event_return(events):
log.critical('Could not store events with Cassandra returner.')
raise
except Exception as e:
log.critical('Unexpected error while inserting into salt_events: {0}'.format(str(e)))
log.critical(
'Unexpected error while inserting into salt_events: %s', e)
raise
@ -302,7 +305,7 @@ def save_load(jid, load, minions=None):
log.critical('Could not save load in jids table.')
raise
except Exception as e:
log.critical('Unexpected error while inserting into jids: {0}'.format(str(e)))
log.critical('Unexpected error while inserting into jids: %s', e)
raise
@ -333,7 +336,7 @@ def get_load(jid):
log.critical('Could not get load from jids table.')
raise
except Exception as e:
log.critical('Unexpected error while getting load from jids: {0}'.format(str(e)))
log.critical('Unexpected error while getting load from jids: %s', e)
raise
return ret
@ -361,7 +364,8 @@ def get_jid(jid):
log.critical('Could not select job specific information.')
raise
except Exception as e:
log.critical('Unexpected error while getting job specific information: {0}'.format(str(e)))
log.critical(
'Unexpected error while getting job specific information: %s', e)
raise
return ret
@ -389,7 +393,8 @@ def get_fun(fun):
log.critical('Could not get the list of minions.')
raise
except Exception as e:
log.critical('Unexpected error while getting list of minions: {0}'.format(str(e)))
log.critical(
'Unexpected error while getting list of minions: %s', e)
raise
return ret
@ -417,7 +422,8 @@ def get_jids():
log.critical('Could not get a list of all job ids.')
raise
except Exception as e:
log.critical('Unexpected error while getting list of all job ids: {0}'.format(str(e)))
log.critical(
'Unexpected error while getting list of all job ids: %s', e)
raise
return ret
@ -444,7 +450,8 @@ def get_minions():
log.critical('Could not get the list of minions.')
raise
except Exception as e:
log.critical('Unexpected error while getting list of minions: {0}'.format(str(e)))
log.critical(
'Unexpected error while getting list of minions: %s', e)
raise
return ret

View File

@ -20,7 +20,7 @@ Required python modules: pycassa
'''
# Import python libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import logging
# Import salt libs
@ -68,9 +68,9 @@ def returner(ret):
'id': ret['id']}
if isinstance(ret['return'], dict):
for key, value in six.iteritems(ret['return']):
columns['return.{0}'.format(key)] = str(value)
columns['return.{0}'.format(key)] = six.text_type(value)
else:
columns['return'] = str(ret['return'])
columns['return'] = six.text_type(ret['return'])
log.debug(columns)
ccf.insert(ret['jid'], columns)

View File

@ -48,7 +48,7 @@ JID/MINION_ID
return: return_data
full_ret: full load of job return
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import json
import logging
@ -59,11 +59,14 @@ try:
except ImportError:
HAS_DEPS = False
# Import salt libs
# Import Salt libs
import salt.utils.jid
import salt.utils.json
import salt.utils.minions
# Import 3rd-party libs
from salt.ext import six
log = logging.getLogger(__name__)
@ -168,7 +171,7 @@ def prep_jid(nocache=False, passed_jid=None):
cb_ = _get_connection()
try:
cb_.add(str(jid),
cb_.add(six.text_type(jid),
{'nocache': nocache},
ttl=_get_ttl(),
)
@ -197,10 +200,8 @@ def returner(load):
)
except couchbase.exceptions.KeyExistsError:
log.error(
'An extra return was detected from minion {0}, please verify '
'the minion, this could be a replay attack'.format(
load['id']
)
'An extra return was detected from minion %s, please verify '
'the minion, this could be a replay attack', load['id']
)
return False
@ -212,13 +213,13 @@ def save_load(jid, clear_load, minion=None):
cb_ = _get_connection()
try:
jid_doc = cb_.get(str(jid))
jid_doc = cb_.get(six.text_type(jid))
except couchbase.exceptions.NotFoundError:
cb_.add(str(jid), {}, ttl=_get_ttl())
jid_doc = cb_.get(str(jid))
cb_.add(six.text_type(jid), {}, ttl=_get_ttl())
jid_doc = cb_.get(six.text_type(jid))
jid_doc.value['load'] = clear_load
cb_.replace(str(jid), jid_doc.value, cas=jid_doc.cas, ttl=_get_ttl())
cb_.replace(six.text_type(jid), jid_doc.value, cas=jid_doc.cas, ttl=_get_ttl())
# if you have a tgt, save that for the UI etc
if 'tgt' in clear_load and clear_load['tgt'] != '':
@ -240,9 +241,9 @@ def save_minions(jid, minions, syndic_id=None): # pylint: disable=unused-argume
cb_ = _get_connection()
try:
jid_doc = cb_.get(str(jid))
jid_doc = cb_.get(six.text_type(jid))
except couchbase.exceptions.NotFoundError:
log.warning('Could not write job cache file for jid: {0}'.format(jid))
log.warning('Could not write job cache file for jid: %s', jid)
return False
# save the minions to a cache so we can see in the UI
@ -252,7 +253,7 @@ def save_minions(jid, minions, syndic_id=None): # pylint: disable=unused-argume
)
else:
jid_doc.value['minions'] = minions
cb_.replace(str(jid), jid_doc.value, cas=jid_doc.cas, ttl=_get_ttl())
cb_.replace(six.text_type(jid), jid_doc.value, cas=jid_doc.cas, ttl=_get_ttl())
def get_load(jid):
@ -262,7 +263,7 @@ def get_load(jid):
cb_ = _get_connection()
try:
jid_doc = cb_.get(str(jid))
jid_doc = cb_.get(six.text_type(jid))
except couchbase.exceptions.NotFoundError:
return {}
@ -285,7 +286,7 @@ def get_jid(jid):
ret = {}
for result in cb_.query(DESIGN_NAME, 'jid_returns', key=str(jid), include_docs=True):
for result in cb_.query(DESIGN_NAME, 'jid_returns', key=six.text_type(jid), include_docs=True):
ret[result.value] = result.doc.value
return ret

View File

@ -53,7 +53,7 @@ otherwise multi-minion targeting can lead to losing output:
'''
# Import Python libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import logging
import time
import json
@ -155,11 +155,10 @@ def returner(ret):
# Confirm that the response back was simple 'ok': true.
if 'ok' not in _response or _response['ok'] is not True:
log.error('Unable to create database "{0}"'
.format(options['db']))
log.error('Unable to create database \'%s\'', options['db'])
log.error('Nothing logged! Lost data.')
return
log.info('Created database "{0}"'.format(options['db']))
log.info('Created database \'%s\'', options['db'])
# Call _generate_doc to get a dict object of the document we're going to
# shove into the database.
@ -173,7 +172,7 @@ def returner(ret):
# Sanity check regarding the response..
if 'ok' not in _response or _response['ok'] is not True:
log.error('Unable to create document: "{0}"'.format(_response))
log.error('Unable to create document: \'%s\'', _response)
log.error('Nothing logged! Lost data.')
@ -184,7 +183,7 @@ def get_jid(jid):
options = _get_options(ret=None)
_response = _request("GET", options['url'] + options['db'] + '/' + jid)
if 'error' in _response:
log.error('Unable to get JID "{0}" : "{1}"'.format(jid, _response))
log.error('Unable to get JID \'%s\' : \'%s\'', jid, _response)
return {}
return {_response['id']: _response}
@ -198,8 +197,10 @@ def get_jids():
# Make sure the 'total_rows' is returned.. if not error out.
if 'total_rows' not in _response:
log.error('Didn\'t get valid response from requesting all docs: {0}'
.format(_response))
log.error(
'Didn\'t get valid response from requesting all docs: %s',
_response
)
return {}
# Return the rows.
@ -246,8 +247,10 @@ def get_fun(fun):
fun))
# Skip the minion if we got an error..
if 'error' in _response:
log.warning('Got an error when querying for last command by a '
'minion: {0}'.format(_response['error']))
log.warning(
'Got an error when querying for last command by a minion: %s',
_response['error']
)
continue
# Skip the minion if we didn't get any rows back. ( IE function that
@ -279,7 +282,7 @@ def get_minions():
# Verify that we got a response back.
if 'rows' not in _response:
log.error('Unable to get available minions: {0}'.format(_response))
log.error('Unable to get available minions: %s', _response)
return []
# Iterate over the rows to build up a list return it.
@ -354,8 +357,10 @@ def set_salt_view():
options['url'] + options['db'] + "/_design/salt",
"application/json", json.dumps(new_doc))
if 'error' in _response:
log.warning("Unable to set the salt design document: {0}"
.format(_response['error']))
log.warning(
'Unable to set the salt design document: %s',
_response['error']
)
return False
return True

View File

@ -27,7 +27,7 @@ An example Django module that registers a function called
'''
# Import Python libraries
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import logging
# Import Salt libraries
@ -61,8 +61,10 @@ def returner(ret):
signaled = dispatch.Signal(providing_args=['ret']).send(sender='returner', ret=ret)
for signal in signaled:
log.debug('Django returner function \'returner\' signaled {0} '
'which responded with {1}'.format(signal[0], signal[1]))
log.debug(
'Django returner function \'returner\' signaled %s '
'which responded with %s', signal[0], signal[1]
)
def save_load(jid, load, minions=None):
@ -74,8 +76,10 @@ def save_load(jid, load, minions=None):
sender='save_load', jid=jid, load=load)
for signal in signaled:
log.debug('Django returner function \'save_load\' signaled {0} '
'which responded with {1}'.format(signal[0], signal[1]))
log.debug(
'Django returner function \'save_load\' signaled %s '
'which responded with %s', signal[0], signal[1]
)
def prep_jid(nocache=False, passed_jid=None):

View File

@ -95,7 +95,7 @@ Minion configuration:
'''
# Import Python libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import datetime
from datetime import tzinfo, timedelta
import uuid
@ -220,14 +220,17 @@ def returner(ret):
if job_fun in options['functions_blacklist']:
log.info(
'Won\'t push new data to Elasticsearch, job with jid={0} and '
'function={1} which is in the user-defined list of ignored '
'functions'.format(job_id, job_fun))
'Won\'t push new data to Elasticsearch, job with jid=%s and '
'function=%s which is in the user-defined list of ignored '
'functions', job_id, job_fun
)
return
if ret.get('return', None) is None:
log.info('Won\'t push new data to Elasticsearch, job with jid={0} was '
'not succesful'.format(job_id))
log.info(
'Won\'t push new data to Elasticsearch, job with jid=%s was '
'not succesful', job_id
)
return
# Build the index name
@ -258,7 +261,7 @@ def returner(ret):
# index data format
if options['states_order_output'] and isinstance(ret['return'], dict):
index = '{0}-ordered'.format(index)
max_chars = len(str(len(ret['return'])))
max_chars = len(six.text_type(len(ret['return'])))
for uid, data in six.iteritems(ret['return']):
# Skip keys we've already prefixed
@ -274,7 +277,7 @@ def returner(ret):
# Prefix the key with the run order so it can be sorted
new_uid = '{0}_|-{1}'.format(
str(data['__run_num__']).zfill(max_chars),
six.text_type(data['__run_num__']).zfill(max_chars),
uid,
)
@ -321,7 +324,7 @@ def returner(ret):
}
if options['debug_returner_payload']:
log.debug('Payload: {0}'.format(data))
log.debug('elasicsearch payload: %s', data)
# Post the payload
ret = __salt__['elasticsearch.document_create'](index=index,

View File

@ -64,20 +64,22 @@ create the profiles as specified above. Then add:
etcd.returner_read_profile: my_etcd_read
etcd.returner_write_profile: my_etcd_write
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import json
import logging
# Import salt libs
import salt.utils.jid
try:
import salt.utils.etcd_util
HAS_LIBS = True
except ImportError:
HAS_LIBS = False
import salt.utils.jid
# Import 3rd-party libs
from salt.ext import six
log = logging.getLogger(__name__)
@ -185,7 +187,7 @@ def get_fun():
client, path = _get_conn(__opts__)
items = client.get('/'.join((path, 'minions')))
for item in items.children:
comps = str(item.key).split('/')
comps = six.text_type(item.key).split('/')
ret[comps[-1]] = item.value
return ret
@ -199,7 +201,7 @@ def get_jids():
items = client.get('/'.join((path, 'jobs')))
for item in items.children:
if item.dir is True:
jid = str(item.key).split('/')[-1]
jid = six.text_type(item.key).split('/')[-1]
load = client.get('/'.join((item.key, '.load.p'))).value
ret[jid] = salt.utils.jid.format_jid_instance(jid, json.loads(load))
return ret
@ -213,7 +215,7 @@ def get_minions():
client, path = _get_conn(__opts__)
items = client.get('/'.join((path, 'minions')))
for item in items.children:
comps = str(item.key).split('/')
comps = six.text_type(item.key).split('/')
ret.append(comps[-1])
return ret

View File

@ -77,7 +77,7 @@ values at the time of pillar generation, these will contain minion values at
the time of execution.
'''
from __future__ import absolute_import, print_function
from __future__ import absolute_import, print_function, unicode_literals
import logging
import json
@ -88,8 +88,10 @@ from email.mime.text import MIMEText
import yaml
from salt.ext.six.moves import range
from salt.ext.six.moves import StringIO
from salt.ext import six
import salt.utils.files
import salt.utils.stringutils
import salt.returners
log = logging.getLogger(__name__)
@ -167,12 +169,12 @@ def _generate_html_table(data, out, level=0, extra_style=''):
Generate a single table of data
'''
print('<table style="{0}">'.format(
_lookup_style('table', ['table' + str(level)])), file=out)
_lookup_style('table', ['table' + six.text_type(level)])), file=out)
firstone = True
row_style = 'row' + str(level)
cell_style = 'cell' + str(level)
row_style = 'row' + six.text_type(level)
cell_style = 'cell' + six.text_type(level)
for subdata in data:
first_style = 'first_first' if firstone else 'notfirst_first'
@ -227,7 +229,7 @@ def _generate_html_table(data, out, level=0, extra_style=''):
new_extra_style
]
),
cgi.escape(str(value))
cgi.escape(six.text_type(value))
), file=out)
print('</tr>', file=out)
elif isinstance(subdata, list):
@ -252,7 +254,7 @@ def _generate_html_table(data, out, level=0, extra_style=''):
'td',
[cell_style, first_style, 'value', extra_style]
),
cgi.escape(str(subdata))
cgi.escape(six.text_type(subdata))
), file=out)
print('</tr>', file=out)
firstone = False
@ -356,10 +358,10 @@ def _generate_report(ret, setup):
unchanged = total - failed - changed
log.debug('highstate total: {0}'.format(total))
log.debug('highstate failed: {0}'.format(failed))
log.debug('highstate unchanged: {0}'.format(unchanged))
log.debug('highstate changed: {0}'.format(changed))
log.debug('highstate total: %s', total)
log.debug('highstate failed: %s', failed)
log.debug('highstate unchanged: %s', unchanged)
log.debug('highstate changed: %s', changed)
# generate report if required
if setup.get('report_everything', False) or \
@ -409,7 +411,7 @@ def _sprinkle(config_str):
'''
parts = [x for sub in config_str.split('{') for x in sub.split('}')]
for i in range(1, len(parts), 2):
parts[i] = str(__grains__.get(parts[i], ''))
parts[i] = six.text_type(__grains__.get(parts[i], ''))
return ''.join(parts)
@ -419,7 +421,7 @@ def _produce_output(report, failed, setup):
'''
report_format = setup.get('report_format', 'yaml')
log.debug('highstate output format: {0}'.format(report_format))
log.debug('highstate output format: %s', report_format)
if report_format == 'json':
report_text = json.dumps(report)
@ -436,12 +438,12 @@ def _produce_output(report, failed, setup):
report_delivery = setup.get('report_delivery', 'file')
log.debug('highstate report_delivery: {0}'.format(report_delivery))
log.debug('highstate report_delivery: %s', report_delivery)
if report_delivery == 'file':
output_file = _sprinkle(setup.get('file_output', '/tmp/test.rpt'))
with salt.utils.files.fopen(output_file, 'w') as out:
out.write(report_text)
out.write(salt.utils.stringutils.to_str(report_text))
else:
msg = MIMEText(report_text, report_format)
@ -473,7 +475,7 @@ def returner(ret):
'''
setup = _get_options(ret)
log.debug('highstate setup {0}'.format(setup))
log.debug('highstate setup %s', setup)
report, failed = _generate_report(ret, setup)
if report:
@ -491,7 +493,7 @@ def __test_html():
file_output: '/srv/salt/_returners/test.rpt'
'''
with salt.utils.files.fopen('test.rpt', 'r') as input_file:
data_text = input_file.read()
data_text = salt.utils.stringutils.to_unicode(input_file.read())
data = yaml.safe_load(data_text)
string_file = StringIO()
@ -500,7 +502,7 @@ def __test_html():
result = string_file.read()
with salt.utils.files.fopen('test.html', 'w') as output:
output.write(result)
output.write(salt.utils.stringutils.to_str(result))
if __name__ == '__main__':

View File

@ -93,7 +93,7 @@ To override individual configuration items, append --return_kwargs '{"key:": "va
salt '*' test.ping --return hipchat --return_kwargs '{"room_id": "another-room"}'
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import Python libs
import json
@ -101,6 +101,7 @@ import pprint
import logging
# pylint: disable=import-error,no-name-in-module
from salt.ext import six
from salt.ext.six.moves.urllib.parse import urljoin as _urljoin
from salt.ext.six.moves.urllib.parse import urlencode as _urlencode
import salt.ext.six.moves.http_client
@ -183,7 +184,7 @@ def _query(function,
query_params = {}
if room_id:
room_id = 'room/{0}/notification'.format(str(room_id))
room_id = 'room/{0}/notification'.format(six.text_type(room_id))
else:
room_id = 'room/0/notification'
@ -388,7 +389,7 @@ def event_return(events):
# TODO:
# Pre-process messages to apply individualized colors for various
# event types.
log.trace('Hipchat returner received event: {0}'.format(event))
log.trace('Hipchat returner received event: %s', event)
_send_message(_options.get('room_id'), # room_id
event['data'], # message
_options.get('from_name'), # from_name

View File

@ -50,7 +50,7 @@ To override individual configuration items, append --return_kwargs '{"key:": "va
salt '*' test.ping --return influxdb --return_kwargs '{"db": "another-salt"}'
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import json
@ -113,7 +113,10 @@ def _get_version(host, port, user, password):
if influxDBVersionHeader in result.headers:
version = result.headers[influxDBVersionHeader]
except Exception as ex:
log.critical('Failed to query InfluxDB version from HTTP API within InfluxDB returner: {0}'.format(ex))
log.critical(
'Failed to query InfluxDB version from HTTP API within InfluxDB '
'returner: %s', ex
)
return version
@ -187,7 +190,7 @@ def returner(ret):
try:
serv.write_points(req)
except Exception as ex:
log.critical('Failed to store return with InfluxDB returner: {0}'.format(ex))
log.critical('Failed to store return with InfluxDB returner: %s', ex)
def save_load(jid, load, minions=None):
@ -224,7 +227,7 @@ def save_load(jid, load, minions=None):
try:
serv.write_points(req)
except Exception as ex:
log.critical('Failed to store load with InfluxDB returner: {0}'.format(ex))
log.critical('Failed to store load with InfluxDB returner: %s', ex)
def save_minions(jid, minions, syndic_id=None): # pylint: disable=unused-argument
@ -241,9 +244,9 @@ def get_load(jid):
serv = _get_serv(ret=None)
sql = "select load from jids where jid = '{0}'".format(jid)
log.debug(">> Now in get_load {0}".format(jid))
log.debug(">> Now in get_load %s", jid)
data = serv.query(sql)
log.debug(">> Now Data: {0}".format(data))
log.debug(">> Now Data: %s", data)
if data:
return data
return {}

View File

@ -25,7 +25,7 @@ To use the kafka returner, append '--return kafka' to the Salt command, eg;
'''
# Import Python libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import json
import logging

View File

@ -31,7 +31,7 @@ by adding more tags to the submission.
'''
# Import python libs
from __future__ import absolute_import, print_function
from __future__ import absolute_import, print_function, unicode_literals
import logging
# Import Salt libs
@ -75,7 +75,7 @@ def _get_options(ret=None):
_options['api_url'] = _options.get('api_url', 'metrics-api.librato.com')
log.debug("Retrieved Librato options: {0}".format(_options))
log.debug('Retrieved Librato options: %s', _options)
return _options
@ -112,7 +112,7 @@ def _calculate_runtimes(states):
# Count durations
results['runtime'] += resultset['duration']
log.debug("Parsed state metrics: {0}".format(results))
log.debug('Parsed state metrics: %s', results)
return results
@ -125,31 +125,37 @@ def returner(ret):
q = librato_conn.new_queue()
if ret['fun'] == 'state.highstate':
log.debug("Found returned Highstate data.")
log.debug('Found returned Highstate data.')
# Calculate the runtimes and number of failed states.
stats = _calculate_runtimes(ret['return'])
log.debug("Batching Metric retcode with {0}".format(ret['retcode']))
q.add("saltstack.highstate.retcode", ret[
'retcode'], tags={'Name': ret['id']})
log.debug('Batching Metric retcode with %s', ret['retcode'])
q.add('saltstack.highstate.retcode',
ret['retcode'], tags={'Name': ret['id']})
log.debug("Batching Metric num_failed_jobs with {0}".format(
stats['num_failed_states']))
q.add("saltstack.highstate.failed_states",
log.debug(
'Batching Metric num_failed_jobs with %s',
stats['num_failed_states']
)
q.add('saltstack.highstate.failed_states',
stats['num_failed_states'], tags={'Name': ret['id']})
log.debug("Batching Metric num_passed_states with {0}".format(
stats['num_passed_states']))
q.add("saltstack.highstate.passed_states",
log.debug(
'Batching Metric num_passed_states with %s',
stats['num_passed_states']
)
q.add('saltstack.highstate.passed_states',
stats['num_passed_states'], tags={'Name': ret['id']})
log.debug("Batching Metric runtime with {0}".format(stats['runtime']))
q.add("saltstack.highstate.runtime",
log.debug('Batching Metric runtime with %s', stats['runtime'])
q.add('saltstack.highstate.runtime',
stats['runtime'], tags={'Name': ret['id']})
log.debug("Batching Metric runtime with {0}".format(
stats['num_failed_states'] + stats['num_passed_states']))
q.add("saltstack.highstate.total_states", stats[
log.debug(
'Batching Metric runtime with %s',
stats['num_failed_states'] + stats['num_passed_states']
)
q.add('saltstack.highstate.total_states', stats[
'num_failed_states'] + stats['num_passed_states'], tags={'Name': ret['id']})
log.info("Sending metrics to Librato.")
log.info('Sending metrics to Librato.')
q.submit()

View File

@ -11,7 +11,7 @@ To use the local returner, append '--return local' to the salt command. ex:
'''
# Import python libs
from __future__ import absolute_import, print_function
from __future__ import absolute_import, print_function, unicode_literals
def returner(ret):

View File

@ -3,7 +3,7 @@
Return data to local job cache
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import errno
@ -20,6 +20,7 @@ import salt.utils.atomicfile
import salt.utils.files
import salt.utils.jid
import salt.utils.minions
import salt.utils.stringutils
import salt.exceptions
# Import 3rd-party libs
@ -108,15 +109,13 @@ def prep_jid(nocache=False, passed_jid=None, recurse_count=0):
try:
with salt.utils.files.fopen(os.path.join(jid_dir, 'jid'), 'wb+') as fn_:
if six.PY2:
fn_.write(jid)
else:
fn_.write(bytes(jid, 'utf-8'))
fn_.write(salt.utils.stringutils.to_bytes(jid))
if nocache:
with salt.utils.files.fopen(os.path.join(jid_dir, 'nocache'), 'wb+') as fn_:
fn_.write(b'')
with salt.utils.files.fopen(os.path.join(jid_dir, 'nocache'), 'wb+'):
pass
except IOError:
log.warning('Could not write out jid file for job {0}. Retrying.'.format(jid))
log.warning(
'Could not write out jid file for job %s. Retrying.', jid)
time.sleep(0.1)
return prep_jid(passed_jid=jid, nocache=nocache,
recurse_count=recurse_count+1)
@ -146,16 +145,14 @@ def returner(load):
if err.errno == errno.EEXIST:
# Minion has already returned this jid and it should be dropped
log.error(
'An extra return was detected from minion {0}, please verify '
'the minion, this could be a replay attack'.format(
load['id']
)
'An extra return was detected from minion %s, please verify '
'the minion, this could be a replay attack', load['id']
)
return False
elif err.errno == errno.ENOENT:
log.error(
'An inconsistency occurred, a job was received with a job id '
'that is not present in the local cache: {jid}'.format(**load)
'(%s) that is not present in the local cache', load['jid']
)
return False
raise
@ -280,8 +277,8 @@ def save_minions(jid, minions, syndic_id=None):
serial.dump(minions, wfh)
except IOError as exc:
log.error(
'Failed to write minion list {0} to job cache file {1}: {2}'
.format(minions, minions_path, exc)
'Failed to write minion list %s to job cache file %s: %s',
minions, minions_path, exc
)
@ -351,7 +348,7 @@ def get_jid(jid):
with salt.utils.files.fopen(outp, 'rb') as rfh:
ret[fn_]['out'] = serial.load(rfh)
except Exception as exc:
if 'Permission denied:' in str(exc):
if 'Permission denied:' in six.text_type(exc):
raise
return ret
@ -459,9 +456,9 @@ def update_endtime(jid, time):
if not os.path.exists(jid_dir):
os.makedirs(jid_dir)
with salt.utils.files.fopen(os.path.join(jid_dir, ENDTIME), 'w') as etfile:
etfile.write(time)
etfile.write(salt.utils.stringutils.to_str(time))
except IOError as exc:
log.warning('Could not write job invocation cache file: {0}'.format(exc))
log.warning('Could not write job invocation cache file: %s', exc)
def get_endtime(jid):
@ -475,7 +472,7 @@ def get_endtime(jid):
if not os.path.exists(etpath):
return False
with salt.utils.files.fopen(etpath, 'r') as etfile:
endtime = etfile.read().strip('\n')
endtime = salt.utils.stringutils.to_unicode(etfile.read()).strip('\n')
return endtime
@ -504,7 +501,7 @@ def save_reg(data):
with salt.utils.files.fopen(regfile, 'a') as fh_:
msgpack.dump(data, fh_)
except:
log.error('Could not write to msgpack file {0}'.format(__opts__['outdir']))
log.error('Could not write to msgpack file %s', __opts__['outdir'])
raise
@ -518,5 +515,5 @@ def load_reg():
with salt.utils.files.fopen(regfile, 'r') as fh_:
return msgpack.load(fh_)
except:
log.error('Could not write to msgpack file {0}'.format(__opts__['outdir']))
log.error('Could not write to msgpack file %s', __opts__['outdir'])
raise

View File

@ -43,7 +43,7 @@ To override individual configuration items, append --return_kwargs '{'key:': 'va
salt '*' test.ping --return mattermost --return_kwargs '{'channel': '#random'}'
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import Python libs
import logging
@ -89,7 +89,7 @@ def _get_options(ret=None):
attrs,
__salt__=__salt__,
__opts__=__opts__)
log.debug('Options: {0}'.format(_options))
log.debug('Options: %s', _options)
return _options
@ -146,8 +146,8 @@ def event_return(events):
is_ok = True
for event in events:
log.debug('Event: {0}'.format(str(event)))
log.debug('Event data: {0}'.format(str(event['data'])))
log.debug('Event: %s', event)
log.debug('Event data: %s', event['data'])
message = 'tag: {0}\r\n'.format(event['tag'])
for key, value in six.iteritems(event['data']):
message += '{0}: {1}\r\n'.format(key, value)
@ -183,10 +183,10 @@ def post_message(channel,
if username:
parameters['username'] = username
parameters['text'] = '```' + message + '```' # pre-formatted, fixed-width text
log.debug('Parameters: {0}'.format(parameters))
log.debug('Parameters: %s', parameters)
result = salt.utils.mattermost.query(api_url=api_url,
hook=hook,
data='payload={0}'.format(json.dumps(parameters)))
log.debug('result {0}'.format(result))
log.debug('result %s', result)
return bool(result)

View File

@ -45,7 +45,7 @@ To override individual configuration items, append --return_kwargs '{"key:": "va
salt '*' test.ping --return memcache --return_kwargs '{"host": "hostname.domain.com"}'
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import json
@ -99,7 +99,7 @@ def _get_serv(ret):
host = _options.get('host')
port = _options.get('port')
log.debug('memcache server: {0}:{1}'.format(host, port))
log.debug('memcache server: %s:%s', host, port)
if not host or not port:
log.error('Host or port not defined in salt config')
return

View File

@ -63,7 +63,7 @@ To override individual configuration items, append --return_kwargs '{"key:": "va
salt '*' test.ping --return mongo --return_kwargs '{"db": "another-salt"}'
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import logging
@ -226,7 +226,7 @@ def _safe_copy(dat):
for k in dat:
r = k.replace('%', '%25').replace('\\', '%5c').replace('$', '%24').replace('.', '%2e')
if r != k:
log.debug('converting dict key from {0} to {1} for mongodb'.format(k, r))
log.debug('converting dict key from %s to %s for mongodb', k, r)
ret[r] = _safe_copy(dat[k])
return ret

View File

@ -60,7 +60,7 @@ To override individual configuration items, append --return_kwargs '{"key:": "va
salt '*' test.ping --return mongo --return_kwargs '{"db": "another-salt"}'
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import logging

View File

@ -5,7 +5,7 @@ Read/Write multiple returners
'''
# Import python libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import logging
# Import salt libs

View File

@ -138,7 +138,7 @@ To override individual configuration items, append --return_kwargs '{"key:": "va
salt '*' test.ping --return mysql --return_kwargs '{"db": "another-salt"}'
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Let's not allow PyLint complain about string substitution
# pylint: disable=W1321,E1321
@ -229,7 +229,7 @@ def _get_serv(ret=None, commit=False):
conn.ping()
connect = False
except MySQLdb.connections.OperationalError as exc:
log.debug('OperationalError on ping: {0}'.format(exc))
log.debug('OperationalError on ping: %s', exc)
if connect:
log.debug('Generating new MySQL connection pool')
@ -263,7 +263,7 @@ def _get_serv(ret=None, commit=False):
yield cursor
except MySQLdb.DatabaseError as err:
error = err.args
sys.stderr.write(str(error))
sys.stderr.write(six.text_type(error))
cursor.execute("ROLLBACK")
raise err
else:
@ -476,8 +476,8 @@ def _purge_jobs(timestamp):
cur.execute('COMMIT')
except MySQLdb.Error as e:
log.error('mysql returner archiver was unable to delete contents of table \'jids\'')
log.error(str(e))
raise salt.exceptions.Salt(str(e))
log.error(six.text_type(e))
raise salt.exceptions.SaltRunnerError(six.text_type(e))
try:
sql = 'delete from `salt_returns` where alter_time < %s'
@ -485,8 +485,8 @@ def _purge_jobs(timestamp):
cur.execute('COMMIT')
except MySQLdb.Error as e:
log.error('mysql returner archiver was unable to delete contents of table \'salt_returns\'')
log.error(str(e))
raise salt.exceptions.Salt(str(e))
log.error(six.text_type(e))
raise salt.exceptions.SaltRunnerError(six.text_type(e))
try:
sql = 'delete from `salt_events` where alter_time < %s'
@ -494,8 +494,8 @@ def _purge_jobs(timestamp):
cur.execute('COMMIT')
except MySQLdb.Error as e:
log.error('mysql returner archiver was unable to delete contents of table \'salt_events\'')
log.error(str(e))
raise salt.exceptions.Salt(str(e))
log.error(six.text_type(e))
raise salt.exceptions.SaltRunnerError(six.text_type(e))
return True
@ -521,8 +521,8 @@ def _archive_jobs(timestamp):
target_tables[table_name] = tmp_table_name
except MySQLdb.Error as e:
log.error('mysql returner archiver was unable to create the archive tables.')
log.error(str(e))
raise salt.exceptions.SaltRunnerError(str(e))
log.error(six.text_type(e))
raise salt.exceptions.SaltRunnerError(six.text_type(e))
try:
sql = 'insert into `{0}` select * from `{1}` where jid in (select distinct jid from salt_returns where alter_time < %s)'.format(target_tables['jids'], 'jids')
@ -530,8 +530,8 @@ def _archive_jobs(timestamp):
cur.execute('COMMIT')
except MySQLdb.Error as e:
log.error('mysql returner archiver was unable to copy contents of table \'jids\'')
log.error(str(e))
raise salt.exceptions.SaltRunnerError(str(e))
log.error(six.text_type(e))
raise salt.exceptions.SaltRunnerError(six.text_type(e))
except Exception as e:
log.error(e)
raise
@ -542,8 +542,8 @@ def _archive_jobs(timestamp):
cur.execute('COMMIT')
except MySQLdb.Error as e:
log.error('mysql returner archiver was unable to copy contents of table \'salt_returns\'')
log.error(str(e))
raise salt.exceptions.SaltRunnerError(str(e))
log.error(six.text_type(e))
raise salt.exceptions.SaltRunnerError(six.text_type(e))
try:
sql = 'insert into `{0}` select * from `{1}` where alter_time < %s'.format(target_tables['salt_events'], 'salt_events')
@ -551,8 +551,8 @@ def _archive_jobs(timestamp):
cur.execute('COMMIT')
except MySQLdb.Error as e:
log.error('mysql returner archiver was unable to copy contents of table \'salt_events\'')
log.error(str(e))
raise salt.exceptions.SaltRunnerError(str(e))
log.error(six.text_type(e))
raise salt.exceptions.SaltRunnerError(six.text_type(e))
return _purge_jobs(timestamp)
@ -577,5 +577,5 @@ def clean_old_jobs():
_purge_jobs(stamp)
except MySQLdb.Error as e:
log.error('Mysql returner was unable to get timestamp for purge/archive of jobs')
log.error(str(e))
raise salt.exceptions.Salt(str(e))
log.error(six.text_type(e))
raise salt.exceptions.SaltRunnerError(six.text_type(e))

View File

@ -48,7 +48,7 @@ To override individual configuration items, append --return_kwargs '{"key:": "va
salt '*' test.ping --return nagios --return_kwargs '{"service": "service-name"}'
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import cgi
@ -56,6 +56,7 @@ import logging
import salt.returners
# pylint: disable=import-error,no-name-in-module,redefined-builtin
from salt.ext import six
import salt.ext.six.moves.http_client
# pylint: enable=import-error,no-name-in-module,redefined-builtin
@ -64,6 +65,13 @@ log = logging.getLogger(__name__)
__virtualname__ = 'nagios_nrdp'
def __virtual__():
'''
Return virtualname
'''
return 'nagios.list_plugins' in __salt__
def _get_options(ret=None):
'''
Get the requests options from salt.
@ -80,7 +88,7 @@ def _get_options(ret=None):
__salt__=__salt__,
__opts__=__opts__)
log.debug('attrs {0}'.format(attrs))
log.debug('attrs %s', attrs)
if 'checktype' not in _options or _options['checktype'] == '':
# default to passive check type
_options['checktype'] = '1'
@ -92,7 +100,7 @@ def _get_options(ret=None):
_options['checktype'] = '1'
# checktype should be a string
_options['checktype'] = str(_options['checktype'])
_options['checktype'] = six.text_type(_options['checktype'])
return _options
@ -111,11 +119,11 @@ def _prepare_xml(options=None, state=None):
# No service defined then we set the status of the hostname
if 'service' in options and options['service'] != '':
xml += "<checkresult type='service' checktype='"+str(options['checktype'])+"'>"
xml += "<checkresult type='service' checktype='" + six.text_type(options['checktype'])+"'>"
xml += "<hostname>"+cgi.escape(options['hostname'], True)+"</hostname>"
xml += "<servicename>"+cgi.escape(options['service'], True)+"</servicename>"
else:
xml += "<checkresult type='host' checktype='"+str(options['checktype'])+"'>"
xml += "<checkresult type='host' checktype='" + six.text_type(options['checktype'])+"'>"
xml += "<hostname>"+cgi.escape(options['hostname'], True)+"</hostname>"
xml += "<state>"+_state+"</state>"
@ -169,24 +177,20 @@ def _post_data(options=None, xml=None):
log.error('No content returned from Nagios NRDP.')
return False
else:
log.error('Error returned from Nagios NRDP. Status code: {0}.'.format(res.status_code))
log.error(
'Error returned from Nagios NRDP. Status code: %s.',
res.status_code
)
return False
def __virtual__():
'''
Return virtualname
'''
return 'nagios.list_plugins' in __salt__
def returner(ret):
'''
Send a message to Nagios with the data
'''
_options = _get_options(ret)
log.debug('_options {0}'.format(_options))
log.debug('_options %s', _options)
_options['hostname'] = ret.get('id')
if 'url' not in _options or _options['url'] == '':

View File

@ -123,7 +123,7 @@ To override individual configuration items, append --return_kwargs '{"key:": "va
salt '*' test.ping --return odbc --return_kwargs '{"dsn": "dsn-name"}'
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Let's not allow PyLint complain about string substitution
# pylint: disable=W1321,E1321

View File

@ -144,7 +144,7 @@ To override individual configuration items, append --return_kwargs '{"key:": "va
salt '*' test.ping --return pgjsonb --return_kwargs '{"db": "another-salt"}'
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Let's not allow PyLint complain about string substitution
# pylint: disable=W1321,E1321
@ -247,7 +247,7 @@ def _get_serv(ret=None, commit=False):
yield cursor
except psycopg2.DatabaseError as err:
error = err.args
sys.stderr.write(str(error))
sys.stderr.write(six.text_type(error))
cursor.execute("ROLLBACK")
raise err
else:

View File

@ -126,7 +126,7 @@ To override individual configuration items, append --return_kwargs '{"key:": "va
salt '*' test.ping --return postgres --return_kwargs '{"db": "another-salt"}'
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Let's not allow PyLint complain about string substitution
# pylint: disable=W1321,E1321
@ -142,6 +142,7 @@ import salt.returners
import salt.exceptions
# Import third party libs
from salt.ext import six
try:
import psycopg2
HAS_POSTGRES = True
@ -209,7 +210,7 @@ def _get_serv(ret=None, commit=False):
yield cursor
except psycopg2.DatabaseError as err:
error = err.args
sys.stderr.write(str(error))
sys.stderr.write(six.text_type(error))
cursor.execute("ROLLBACK")
raise err
else:

View File

@ -108,7 +108,7 @@ Required python modules: psycopg2
'''
# Import python libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import json
import logging
import re
@ -148,7 +148,7 @@ def _get_conn():
database=__opts__['master_job_cache.postgres.db'],
port=__opts__['master_job_cache.postgres.port'])
except psycopg2.OperationalError:
log.error("Could not connect to SQL server: " + str(sys.exc_info()[0]))
log.error('Could not connect to SQL server: %s', sys.exc_info()[0])
return None
return conn
@ -232,7 +232,7 @@ def returner(load):
sql = '''INSERT INTO salt_returns
(fun, jid, return, id, success)
VALUES (%s, %s, %s, %s, %s)'''
job_ret = {'return': six.text_type(str(load['return']), 'utf-8', 'replace')}
job_ret = {'return': six.text_type(six.text_type(load['return']), 'utf-8', 'replace')}
if 'retcode' in load:
job_ret['retcode'] = load['retcode']
if 'success' in load:
@ -288,14 +288,14 @@ def save_load(jid, clear_load, minions=None):
sql, (
jid,
salt.utils.jid.jid_to_time(jid),
str(clear_load.get("tgt_type")),
str(clear_load.get("cmd")),
str(clear_load.get("tgt")),
str(clear_load.get("kwargs")),
str(clear_load.get("ret")),
str(clear_load.get("user")),
str(json.dumps(clear_load.get("arg"))),
str(clear_load.get("fun")),
six.text_type(clear_load.get("tgt_type")),
six.text_type(clear_load.get("cmd")),
six.text_type(clear_load.get("tgt")),
six.text_type(clear_load.get("kwargs")),
six.text_type(clear_load.get("ret")),
six.text_type(clear_load.get("user")),
six.text_type(json.dumps(clear_load.get("arg"))),
six.text_type(clear_load.get("fun")),
)
)
# TODO: Add Metadata support when it is merged from develop
@ -313,7 +313,7 @@ def _escape_jid(jid):
'''
Do proper formatting of the jid
'''
jid = str(jid)
jid = six.text_type(jid)
jid = re.sub(r"'*", "", jid)
return jid
@ -393,7 +393,7 @@ def get_jids():
'''FROM jids'''
if __opts__['keep_jobs'] != 0:
sql = sql + " WHERE started > NOW() - INTERVAL '" \
+ str(__opts__['keep_jobs']) + "' HOUR"
+ six.text_type(__opts__['keep_jobs']) + "' HOUR"
cur.execute(sql)
ret = {}

View File

@ -75,7 +75,7 @@ To override individual configuration items, append --return_kwargs '{"key:": "va
salt '*' test.ping --return pushover --return_kwargs '{"title": "Salt is awesome!"}'
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import Python libs
import pprint
@ -245,7 +245,7 @@ def returner(ret):
sound=sound,
token=token)
log.debug('result {0}'.format(result))
log.debug('pushover result %s', result)
if not result['res']:
log.info('Error: {0}'.format(result['message']))
log.info('Error: %s', result['message'])
return

View File

@ -18,7 +18,7 @@ to restrict the events that are written.
'''
# Import python libs
from __future__ import absolute_import, print_function, with_statement
from __future__ import absolute_import, print_function, with_statement, unicode_literals
import logging
import json
@ -60,7 +60,7 @@ def returner(ret):
with salt.utils.files.flopen(opts['filename'], 'a') as logfile:
logfile.write(json.dumps(ret)+'\n')
except:
log.error('Could not write to rawdata_json file {0}'.format(opts['filename']))
log.error('Could not write to rawdata_json file %s', opts['filename'])
raise
@ -79,5 +79,5 @@ def event_return(events):
json.dump(event, logfile)
logfile.write('\n')
except:
log.error('Could not write to rawdata_json file {0}'.format(opts['filename']))
log.error('Could not write to rawdata_json file %s', opts['filename'])
raise

View File

@ -86,7 +86,7 @@ cluster.skip_full_coverage_check: ``False``
'''
# Import Python libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import json
import logging
@ -305,7 +305,7 @@ def clean_old_jobs():
to_remove.append(ret_key)
if len(to_remove) != 0:
serv.delete(*to_remove)
log.debug('clean old jobs: {0}'.format(to_remove))
log.debug('clean old jobs: %s', to_remove)
def prep_jid(nocache=False, passed_jid=None): # pylint: disable=unused-argument

View File

@ -41,7 +41,7 @@ tags, allowing tagging of events in the sentry ui.
To report only errors to sentry, set report_errors_only: true.
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import Python libs
import logging
@ -58,7 +58,7 @@ try:
except ImportError:
has_raven = False
logger = logging.getLogger(__name__)
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = 'sentry'
@ -81,10 +81,7 @@ def returner(ret):
try:
_connect_sentry(_get_message(ret), ret)
except Exception as err:
logger.error(
'Can\'t run connect_sentry: {0}'.format(err),
exc_info=True
)
log.error('Can\'t run connect_sentry: %s', err, exc_info=True)
def _ret_is_not_error(result):
@ -164,7 +161,7 @@ def _connect_sentry(message, result):
transport=HTTPTransport
)
except KeyError as missing_key:
logger.error(
log.error(
'Sentry returner needs key \'%s\' in pillar',
missing_key
)
@ -178,12 +175,9 @@ def _connect_sentry(message, result):
extra=sentry_data,
tags=tags
)
logger.info('Message id {} written to sentry'.format(msgid))
log.info('Message id %s written to sentry', msgid)
except Exception as exc:
logger.error(
'Can\'t send message to sentry: {0}'.format(exc),
exc_info=True
)
log.error('Can\'t send message to sentry: %s', exc, exc_info=True)
def prep_jid(nocache=False, passed_jid=None): # pylint: disable=unused-argument

View File

@ -76,7 +76,7 @@ To override individual configuration items, append --return_kwargs '{"key:": "va
salt '*' test.ping --return slack --return_kwargs '{"channel": "#random"}'
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import Python libs
import yaml
@ -170,7 +170,7 @@ def _post_message(channel,
header_dict={'Content-Type': 'application/x-www-form-urlencoded'},
data=_urlencode(parameters))
log.debug('result {0}'.format(result))
log.debug('Slack message post result: %s', result)
if result:
return True
else:

View File

@ -28,7 +28,7 @@ To use the sms returner, append '--return sms' to the salt command.
salt '*' test.ping --return sms
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import logging
import salt.returners
@ -96,8 +96,10 @@ def returner(ret):
ret['id'], ret['fun'], ret['success'], ret['jid']
), to=receiver, from_=sender)
except TwilioRestException as e:
log.error('Twilio [https://www.twilio.com/docs/errors/{0}]'.format(
e.code))
log.error(
'Twilio [https://www.twilio.com/docs/errors/%s]',
e.code
)
return False
return True

View File

@ -106,7 +106,7 @@ This configuration enables Salt Master to send an email when accepting or reject
'''
# Import python libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import os
import logging
import smtplib
@ -184,7 +184,7 @@ def returner(ret):
if not port:
port = 25
log.debug('SMTP port has been set to {0}'.format(port))
log.debug('SMTP port has been set to %s', port)
for field in fields:
if field in ret:
@ -198,7 +198,7 @@ def returner(ret):
**ret)
if isinstance(subject, six.moves.StringIO):
subject = subject.read()
log.debug("smtp_return: Subject is '{0}'".format(subject))
log.debug("smtp_return: Subject is '%s'", subject)
template = _options.get('template')
if template:
@ -224,7 +224,7 @@ def returner(ret):
encrypted_data = gpg.encrypt(content, to_addrs)
if encrypted_data.ok:
log.debug('smtp_return: Encryption successful')
content = str(encrypted_data)
content = six.text_type(encrypted_data)
else:
log.error('smtp_return: Encryption failed, only an error message will be sent')
content = 'Encryption failed, the return data was not sent.\r\n\r\n{0}\r\n{1}'.format(

View File

@ -17,7 +17,7 @@ Run a test by using ``salt-call test.ping --return splunk``
Written by Scott Pack (github.com/scottjpack)
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import socket
@ -28,6 +28,8 @@ import json
import time
import logging
from salt.ext import six
_max_content_bytes = 100000
http_event_collector_SSL_verify = False
http_event_collector_debug = False
@ -74,7 +76,7 @@ def _send_splunk(event, index_override=None, sourcetype_override=None):
'''
#Get Splunk Options
opts = _get_options()
logging.info('Options: {0}'.format(json.dumps(opts)))
logging.info('Options: %s', json.dumps(opts))
http_event_collector_key = opts['token']
http_event_collector_host = opts['indexer']
#Set up the collector
@ -94,7 +96,7 @@ def _send_splunk(event, index_override=None, sourcetype_override=None):
#Add the event
payload.update({"event": event})
logging.info('Payload: {0}'.format(json.dumps(payload)))
logging.info('Payload: %s', json.dumps(payload))
#fire it off
splunk_event.sendEvent(payload)
return True
@ -145,7 +147,7 @@ class http_event_collector(object):
# If eventtime in epoch not passed as optional argument use current system time in epoch
if not eventtime:
eventtime = str(int(time.time()))
eventtime = six.text_type(int(time.time()))
# Fill in local hostname if not manually populated
if 'host' not in payload:
@ -182,7 +184,7 @@ class http_event_collector(object):
# If eventtime in epoch not passed as optional argument use current system time in epoch
if not eventtime:
eventtime = str(int(time.time()))
eventtime = six.text_type(int(time.time()))
# Update time value on payload if need to use system time
data = {"time": eventtime}

View File

@ -80,7 +80,7 @@ To override individual configuration items, append --return_kwargs '{"key:": "va
salt '*' test.ping --return sqlite3 --return_kwargs '{"db": "/var/lib/salt/another-salt.db"}'
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import logging
@ -91,6 +91,9 @@ import datetime
import salt.utils.jid
import salt.returners
# Import 3rd-party libs
from salt.ext import six
# Better safe than sorry here. Even though sqlite3 is included in python
try:
import sqlite3
@ -141,9 +144,7 @@ def _get_conn(ret=None):
if not timeout:
raise Exception(
'sqlite3 config option "sqlite3.timeout" is missing')
log.debug('Connecting the sqlite3 database: {0} timeout: {1}'.format(
database,
timeout))
log.debug('Connecting the sqlite3 database: %s timeout: %s', database, timeout)
conn = sqlite3.connect(database, timeout=float(timeout))
return conn
@ -161,7 +162,7 @@ def returner(ret):
'''
Insert minion return data into the sqlite3 database
'''
log.debug('sqlite3 returner <returner> called with data: {0}'.format(ret))
log.debug('sqlite3 returner <returner> called with data: %s', ret)
conn = _get_conn(ret)
cur = conn.cursor()
sql = '''INSERT INTO salt_returns
@ -171,8 +172,8 @@ def returner(ret):
{'fun': ret['fun'],
'jid': ret['jid'],
'id': ret['id'],
'fun_args': str(ret['fun_args']) if ret.get('fun_args') else None,
'date': str(datetime.datetime.now()),
'fun_args': six.text_type(ret['fun_args']) if ret.get('fun_args') else None,
'date': six.text_type(datetime.datetime.now()),
'full_ret': json.dumps(ret['return']),
'success': ret.get('success', '')})
_close_conn(conn)
@ -182,8 +183,7 @@ def save_load(jid, load, minions=None):
'''
Save the load to the specified jid
'''
log.debug('sqlite3 returner <save_load> called jid:{0} load:{1}'
.format(jid, load))
log.debug('sqlite3 returner <save_load> called jid: %s load: %s', jid, load)
conn = _get_conn(ret=None)
cur = conn.cursor()
sql = '''INSERT INTO jids (jid, load) VALUES (:jid, :load)'''
@ -204,7 +204,7 @@ def get_load(jid):
'''
Return the load from a specified jid
'''
log.debug('sqlite3 returner <get_load> called jid: {0}'.format(jid))
log.debug('sqlite3 returner <get_load> called jid: %s', jid)
conn = _get_conn(ret=None)
cur = conn.cursor()
sql = '''SELECT load FROM jids WHERE jid = :jid'''
@ -221,18 +221,18 @@ def get_jid(jid):
'''
Return the information returned from a specified jid
'''
log.debug('sqlite3 returner <get_jid> called jid: {0}'.format(jid))
log.debug('sqlite3 returner <get_jid> called jid: %s', jid)
conn = _get_conn(ret=None)
cur = conn.cursor()
sql = '''SELECT id, full_ret FROM salt_returns WHERE jid = :jid'''
cur.execute(sql,
{'jid': jid})
data = cur.fetchone()
log.debug('query result: {0}'.format(data))
log.debug('query result: %s', data)
ret = {}
if data and len(data) > 1:
ret = {str(data[0]): {u'return': json.loads(data[1])}}
log.debug("ret: {0}".format(ret))
ret = {six.text_type(data[0]): {u'return': json.loads(data[1])}}
log.debug('ret: %s', ret)
_close_conn(conn)
return ret
@ -241,7 +241,7 @@ def get_fun(fun):
'''
Return a dict of the last function called for all minions
'''
log.debug('sqlite3 returner <get_fun> called fun: {0}'.format(fun))
log.debug('sqlite3 returner <get_fun> called fun: %s', fun)
conn = _get_conn(ret=None)
cur = conn.cursor()
sql = '''SELECT s.id, s.full_ret, s.jid

View File

@ -86,7 +86,7 @@ To override individual configuration items, append
implmentation's documentation to determine how to adjust this limit.
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import logging
# Import python libs
@ -148,10 +148,10 @@ def _verify_options(options):
for opt_name, opt in bitwise_args:
if not hasattr(syslog, opt):
log.error('syslog has no attribute {0}'.format(opt))
log.error('syslog has no attribute %s', opt)
return False
if not isinstance(getattr(syslog, opt), int):
log.error('{0} is not a valid syslog {1}'.format(opt, opt_name))
log.error('%s is not a valid syslog %s', opt, opt_name)
return False
# Sanity check tag

View File

@ -22,7 +22,7 @@ To use the Telegram return, append '--return telegram' to the salt command.
salt '*' test.ping --return telegram
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import Python libs
import logging
@ -60,7 +60,7 @@ def _get_options(ret=None):
attrs,
__salt__=__salt__,
__opts__=__opts__)
log.debug('Options: {0}'.format(_options))
log.debug('Options: %s', _options)
return _options

View File

@ -67,7 +67,7 @@ To override individual configuration items, append --return_kwargs '{"key:": "va
salt '*' test.ping --return xmpp --return_kwargs '{"recipient": "someone-else@xmpp.example.com"}'
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
import logging

View File

@ -20,7 +20,7 @@ To use the Zabbix returner, append '--return zabbix' to the salt command. ex:
'''
# Import Python libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import logging
import os

View File

@ -28,16 +28,17 @@ master configuration at ``/etc/salt/master`` or ``/etc/salt/master.d/asam.conf``
is not using the defaults. Default is ``protocol: https`` and ``port: 3451``.
'''
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
# Import python libs
# Import Python libs
import logging
# Import third party libs
# Import 3rd-party libs
import salt.ext.six as six
HAS_LIBS = False
try:
import requests
import salt.ext.six as six
from salt.ext.six.moves.html_parser import HTMLParser # pylint: disable=E0611
HAS_LIBS = True
@ -90,8 +91,8 @@ def _get_asam_configuration(driver_url=''):
if not username or not password:
log.error(
"Username or Password has not been specified in the master "
"configuration for {0}".format(asam_server)
'Username or Password has not been specified in the '
'master configuration for %s', asam_server
)
return False
@ -107,15 +108,13 @@ def _get_asam_configuration(driver_url=''):
if (not driver_url) or (driver_url == asam_server):
return ret
except Exception as exc:
log.error(
"Exception encountered: {0}".format(exc)
)
log.error('Exception encountered: %s', exc)
return False
if driver_url:
log.error(
"Configuration for {0} has not been specified in the master "
"configuration".format(driver_url)
'Configuration for %s has not been specified in the master '
'configuration', driver_url
)
return False
@ -205,7 +204,7 @@ def remove_platform(name, server_url):
html_content = _make_post_request(url, data, auth, verify=False)
except Exception as exc:
err_msg = "Failed to look up existing platforms on {0}".format(server_url)
log.error("{0}:\n{1}".format(err_msg, exc))
log.error('%s:\n%s', err_msg, exc)
return {name: err_msg}
parser = _parse_html_content(html_content)
@ -214,14 +213,14 @@ def remove_platform(name, server_url):
if platformset_name:
log.debug(platformset_name)
data['platformName'] = name
data['platformSetName'] = str(platformset_name)
data['platformSetName'] = six.text_type(platformset_name)
data['postType'] = 'platformRemove'
data['Submit'] = 'Yes'
try:
html_content = _make_post_request(url, data, auth, verify=False)
except Exception as exc:
err_msg = "Failed to delete platform from {1}".format(server_url)
log.error("{0}:\n{1}".format(err_msg, exc))
log.error('%s:\n%s', err_msg, exc)
return {name: err_msg}
parser = _parse_html_content(html_content)
@ -263,7 +262,7 @@ def list_platforms(server_url):
html_content = _make_post_request(url, data, auth, verify=False)
except Exception as exc:
err_msg = "Failed to look up existing platforms"
log.error("{0}:\n{1}".format(err_msg, exc))
log.error('%s:\n%s', err_msg, exc)
return {server_url: err_msg}
parser = _parse_html_content(html_content)
@ -304,7 +303,7 @@ def list_platform_sets(server_url):
html_content = _make_post_request(url, data, auth, verify=False)
except Exception as exc:
err_msg = "Failed to look up existing platform sets"
log.error("{0}:\n{1}".format(err_msg, exc))
log.error('%s:\n%s', err_msg, exc)
return {server_url: err_msg}
parser = _parse_html_content(html_content)
@ -359,7 +358,7 @@ def add_platform(name, platform_set, server_url):
html_content = _make_post_request(url, data, auth, verify=False)
except Exception as exc:
err_msg = "Failed to add platform on {0}".format(server_url)
log.error("{0}:\n{1}".format(err_msg, exc))
log.error('%s:\n%s', err_msg, exc)
return {name: err_msg}
platforms = list_platforms(server_url)

View File

@ -7,7 +7,7 @@ Authentication runner for creating, deleting, and managing eauth tokens.
'''
# Import Python libs
from __future__ import absolute_import
from __future__ import absolute_import, print_function, unicode_literals
import os
# Import Salt libs

View File

@ -97,9 +97,7 @@ Configuration
- flap_count
outputter: yaml
'''
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import absolute_import, print_function, unicode_literals
# Import third party libs
try:
@ -335,7 +333,7 @@ def neighbors(*asns, **kwargs):
title_parts = []
if asns:
title_parts.append('BGP Neighbors for {asns}'.format(
asns=', '.join([str(asn) for asn in asns])
asns=', '.join([six.text_type(asn) for asn in asns])
))
if neighbor_ip:
title_parts.append('Selecting neighbors having the remote IP address: {ipaddr}'.format(ipaddr=neighbor_ip))

Some files were not shown because too many files have changed in this diff Show More