mirror of
https://github.com/valitydev/salt.git
synced 2024-11-07 08:58:59 +00:00
Merge pull request #45429 from terminalmage/py3-salt.utils-r-s
[PY3] Add unicode_literals to salt.utils modules (R-S)
This commit is contained in:
commit
e8e82f2f7f
@ -7,7 +7,7 @@ This module is used to manage events via RAET
|
||||
# pylint: disable=3rd-party-module-not-gated
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import os
|
||||
import logging
|
||||
import time
|
||||
@ -68,7 +68,7 @@ class RAETEvent(object):
|
||||
self.stack = transport.jobber_stack
|
||||
else:
|
||||
self.stack = transport.jobber_stack = self._setup_stack(ryn=self.ryn)
|
||||
log.debug("RAETEvent Using Jobber Stack at = {0}\n".format(self.stack.ha))
|
||||
log.debug("RAETEvent Using Jobber Stack at = %s\n", self.stack.ha)
|
||||
if listen:
|
||||
self.subscribe()
|
||||
|
||||
@ -220,7 +220,7 @@ class RAETEvent(object):
|
||||
identifier "tag"
|
||||
'''
|
||||
# Timeout is retained for compat with zeromq events
|
||||
if not str(tag): # no empty tags allowed
|
||||
if not six.text_type(tag): # no empty tags allowed
|
||||
raise ValueError('Empty tag.')
|
||||
|
||||
if not isinstance(data, MutableMapping): # data must be dict
|
||||
|
@ -49,7 +49,7 @@ if not msg:
|
||||
raise ValueError("Timed out out waiting for response")
|
||||
'''
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
try:
|
||||
from raet import raeting, nacling
|
||||
@ -124,15 +124,19 @@ if HAS_RAET:
|
||||
sockdirpath=opts['sock_dir'])
|
||||
|
||||
lane_stack.Pk = raeting.PackKind.pack.value
|
||||
log.debug("Created new LaneStack and local Yard named {0} at {1}\n"
|
||||
"".format(lane_stack.name, lane_stack.ha))
|
||||
log.debug(
|
||||
'Created new LaneStack and local Yard named %s at %s\n',
|
||||
lane_stack.name, lane_stack.ha
|
||||
)
|
||||
remote_yard = RemoteYard(stack=lane_stack,
|
||||
name=ryn,
|
||||
lanename=lanename,
|
||||
dirpath=opts['sock_dir'])
|
||||
lane_stack.addRemote(remote_yard)
|
||||
log.debug("Added to LaneStack {0} remote Yard named {1} at {2}\n"
|
||||
"".format(lane_stack.name, remote_yard.name, remote_yard.ha))
|
||||
log.debug(
|
||||
'Added to LaneStack %s remote Yard named %s at %s\n',
|
||||
lane_stack.name, remote_yard.name, remote_yard.ha
|
||||
)
|
||||
|
||||
def transmit(msg):
|
||||
'''
|
||||
|
@ -5,7 +5,7 @@ Functions which implement running reactor jobs
|
||||
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import fnmatch
|
||||
import glob
|
||||
import logging
|
||||
@ -83,7 +83,7 @@ class Reactor(salt.utils.process.SignalHandlingMultiprocessingProcess, salt.stat
|
||||
glob_ref = self.minion.functions['cp.cache_file'](glob_ref) or ''
|
||||
globbed_ref = glob.glob(glob_ref)
|
||||
if not globbed_ref:
|
||||
log.error('Can not render SLS {0} for tag {1}. File missing or not found.'.format(glob_ref, tag))
|
||||
log.error('Can not render SLS %s for tag %s. File missing or not found.', glob_ref, tag)
|
||||
for fn_ in globbed_ref:
|
||||
try:
|
||||
res = self.render_template(
|
||||
@ -98,7 +98,7 @@ class Reactor(salt.utils.process.SignalHandlingMultiprocessingProcess, salt.stat
|
||||
|
||||
react.update(res)
|
||||
except Exception:
|
||||
log.error('Failed to render "{0}": '.format(fn_), exc_info=True)
|
||||
log.exception('Failed to render "%s": ', fn_)
|
||||
return react
|
||||
|
||||
def list_reactors(self, tag):
|
||||
@ -106,24 +106,16 @@ class Reactor(salt.utils.process.SignalHandlingMultiprocessingProcess, salt.stat
|
||||
Take in the tag from an event and return a list of the reactors to
|
||||
process
|
||||
'''
|
||||
log.debug('Gathering reactors for tag {0}'.format(tag))
|
||||
log.debug('Gathering reactors for tag %s', tag)
|
||||
reactors = []
|
||||
if isinstance(self.opts['reactor'], six.string_types):
|
||||
try:
|
||||
with salt.utils.files.fopen(self.opts['reactor']) as fp_:
|
||||
react_map = salt.utils.yaml.safe_load(fp_)
|
||||
except (OSError, IOError):
|
||||
log.error(
|
||||
'Failed to read reactor map: "{0}"'.format(
|
||||
self.opts['reactor']
|
||||
)
|
||||
)
|
||||
log.error('Failed to read reactor map: "%s"', self.opts['reactor'])
|
||||
except Exception:
|
||||
log.error(
|
||||
'Failed to parse YAML in reactor map: "{0}"'.format(
|
||||
self.opts['reactor']
|
||||
)
|
||||
)
|
||||
log.error('Failed to parse YAML in reactor map: "%s"', self.opts['reactor'])
|
||||
else:
|
||||
react_map = self.opts['reactor']
|
||||
for ropt in react_map:
|
||||
@ -145,22 +137,17 @@ class Reactor(salt.utils.process.SignalHandlingMultiprocessingProcess, salt.stat
|
||||
Return a list of the reactors
|
||||
'''
|
||||
if isinstance(self.minion.opts['reactor'], six.string_types):
|
||||
log.debug('Reading reactors from yaml {0}'.format(self.opts['reactor']))
|
||||
log.debug('Reading reactors from yaml %s', self.opts['reactor'])
|
||||
try:
|
||||
with salt.utils.files.fopen(self.opts['reactor']) as fp_:
|
||||
react_map = salt.utils.yaml.safe_load(fp_)
|
||||
except (OSError, IOError):
|
||||
log.error(
|
||||
'Failed to read reactor map: "{0}"'.format(
|
||||
self.opts['reactor']
|
||||
)
|
||||
)
|
||||
log.error('Failed to read reactor map: "%s"', self.opts['reactor'])
|
||||
except Exception:
|
||||
log.error(
|
||||
'Failed to parse YAML in reactor map: "{0}"'.format(
|
||||
self.opts['reactor']
|
||||
)
|
||||
)
|
||||
'Failed to parse YAML in reactor map: "%s"',
|
||||
self.opts['reactor']
|
||||
)
|
||||
else:
|
||||
log.debug('Not reading reactors from yaml')
|
||||
react_map = self.minion.opts['reactor']
|
||||
@ -206,7 +193,7 @@ class Reactor(salt.utils.process.SignalHandlingMultiprocessingProcess, salt.stat
|
||||
'''
|
||||
Render a list of reactor files and returns a reaction struct
|
||||
'''
|
||||
log.debug('Compiling reactions for tag {0}'.format(tag))
|
||||
log.debug('Compiling reactions for tag %s', tag)
|
||||
high = {}
|
||||
chunks = []
|
||||
try:
|
||||
@ -215,12 +202,15 @@ class Reactor(salt.utils.process.SignalHandlingMultiprocessingProcess, salt.stat
|
||||
if high:
|
||||
errors = self.verify_high(high)
|
||||
if errors:
|
||||
log.error(('Unable to render reactions for event {0} due to '
|
||||
'errors ({1}) in one or more of the sls files ({2})').format(tag, errors, reactors))
|
||||
log.error(
|
||||
'Unable to render reactions for event %s due to '
|
||||
'errors (%s) in one or more of the sls files (%s)',
|
||||
tag, errors, reactors
|
||||
)
|
||||
return [] # We'll return nothing since there was an error
|
||||
chunks = self.order_chunks(self.compile_high_data(high))
|
||||
except Exception as exc:
|
||||
log.error('Exception trying to compile reactions: {0}'.format(exc), exc_info=True)
|
||||
log.exception('Exception encountered while compiling reactions')
|
||||
|
||||
self.resolve_aliases(chunks)
|
||||
return chunks
|
||||
|
@ -3,7 +3,7 @@
|
||||
Common utility functions for the reclass adapters
|
||||
http://reclass.pantsfullofunix.net
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import python libs
|
||||
import sys
|
||||
|
@ -4,7 +4,7 @@ Create and verify ANSI X9.31 RSA signatures using OpenSSL libcrypto
|
||||
'''
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import glob
|
||||
import sys
|
||||
import os
|
||||
|
@ -4,7 +4,7 @@ Connection library for Amazon S3
|
||||
|
||||
:depends: requests
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import Python libs
|
||||
import logging
|
||||
@ -23,6 +23,7 @@ import salt.utils.hashutils
|
||||
import salt.utils.xmlutil as xml
|
||||
from salt._compat import ElementTree as ET
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from salt.ext import six
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@ -141,9 +142,9 @@ def query(key, keyid, method='GET', params=None, headers=None,
|
||||
payload_hash=payload_hash,
|
||||
)
|
||||
|
||||
log.debug('S3 Request: {0}'.format(requesturl))
|
||||
log.debug('S3 Request: %s', requesturl)
|
||||
log.debug('S3 Headers::')
|
||||
log.debug(' Authorization: {0}'.format(headers['Authorization']))
|
||||
log.debug(' Authorization: %s', headers['Authorization'])
|
||||
|
||||
if not data:
|
||||
data = None
|
||||
@ -180,7 +181,7 @@ def query(key, keyid, method='GET', params=None, headers=None,
|
||||
if result.status_code >= 400:
|
||||
# On error the S3 API response should contain error message
|
||||
err_text = result.content or 'Unknown error'
|
||||
log.debug(' Response content: {0}'.format(err_text))
|
||||
log.debug(' Response content: %s', err_text)
|
||||
|
||||
# Try to get err info from response xml
|
||||
try:
|
||||
@ -188,12 +189,14 @@ def query(key, keyid, method='GET', params=None, headers=None,
|
||||
err_code = err_data['Code']
|
||||
err_msg = err_data['Message']
|
||||
except (KeyError, ET.ParseError) as err:
|
||||
log.debug('Failed to parse s3 err response. {0}: {1}'.format(
|
||||
type(err).__name__, err))
|
||||
log.debug(
|
||||
'Failed to parse s3 err response. %s: %s',
|
||||
type(err).__name__, err
|
||||
)
|
||||
err_code = 'http-{0}'.format(result.status_code)
|
||||
err_msg = err_text
|
||||
|
||||
log.debug('S3 Response Status Code: {0}'.format(result.status_code))
|
||||
log.debug('S3 Response Status Code: %s', result.status_code)
|
||||
|
||||
if method == 'PUT':
|
||||
if result.status_code != 200:
|
||||
@ -206,13 +209,13 @@ def query(key, keyid, method='GET', params=None, headers=None,
|
||||
bucket, err_code, err_msg))
|
||||
|
||||
if local_file:
|
||||
log.debug('Uploaded from {0} to {1}'.format(local_file, path))
|
||||
log.debug('Uploaded from %s to %s', local_file, path)
|
||||
else:
|
||||
log.debug('Created bucket {0}'.format(bucket))
|
||||
log.debug('Created bucket %s', bucket)
|
||||
return
|
||||
|
||||
if method == 'DELETE':
|
||||
if not str(result.status_code).startswith('2'):
|
||||
if not six.text_type(result.status_code).startswith('2'):
|
||||
if path:
|
||||
raise CommandExecutionError(
|
||||
'Failed to delete {0} from bucket {1}. {2}: {3}'.format(
|
||||
@ -222,9 +225,9 @@ def query(key, keyid, method='GET', params=None, headers=None,
|
||||
bucket, err_code, err_msg))
|
||||
|
||||
if path:
|
||||
log.debug('Deleted {0} from bucket {1}'.format(path, bucket))
|
||||
log.debug('Deleted %s from bucket %s', path, bucket)
|
||||
else:
|
||||
log.debug('Deleted bucket {0}'.format(bucket))
|
||||
log.debug('Deleted bucket %s', bucket)
|
||||
return
|
||||
|
||||
# This can be used to save a binary object to disk
|
||||
@ -233,7 +236,7 @@ def query(key, keyid, method='GET', params=None, headers=None,
|
||||
raise CommandExecutionError(
|
||||
'Failed to get file. {0}: {1}'.format(err_code, err_msg))
|
||||
|
||||
log.debug('Saving to local file: {0}'.format(local_file))
|
||||
log.debug('Saving to local file: %s', local_file)
|
||||
with salt.utils.files.fopen(local_file, 'wb') as out:
|
||||
for chunk in result.iter_content(chunk_size=chunk_size):
|
||||
out.write(chunk)
|
||||
|
@ -1,6 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import os
|
||||
import re
|
||||
import logging
|
||||
@ -11,7 +11,7 @@ import salt.utils.path
|
||||
import salt.utils.yaml
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext.six import iteritems
|
||||
from salt.ext import six
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@ -59,7 +59,7 @@ def get_class(_class, salt_data):
|
||||
if sub_init in l_files:
|
||||
return render_yaml(sub_init, salt_data)
|
||||
|
||||
log.warning('{0}: Class definition not found'.format(_class))
|
||||
log.warning('%s: Class definition not found', _class)
|
||||
return {}
|
||||
|
||||
|
||||
@ -86,7 +86,7 @@ def dict_merge(a, b, path=None):
|
||||
else:
|
||||
a[key].extend(b[key])
|
||||
elif isinstance(a[key], dict) and isinstance(b[key], dict):
|
||||
dict_merge(a[key], b[key], path + [str(key)])
|
||||
dict_merge(a[key], b[key], path + [six.text_type(key)])
|
||||
elif a[key] == b[key]:
|
||||
pass
|
||||
else:
|
||||
@ -98,7 +98,7 @@ def dict_merge(a, b, path=None):
|
||||
|
||||
# Recursive search and replace in a dict
|
||||
def dict_search_and_replace(d, old, new, expanded):
|
||||
for (k, v) in iteritems(d):
|
||||
for (k, v) in six.iteritems(d):
|
||||
if isinstance(v, dict):
|
||||
dict_search_and_replace(d[k], old, new, expanded)
|
||||
if v == old:
|
||||
@ -125,9 +125,9 @@ def expand_variables(a, b, expanded, path=None):
|
||||
b = a.copy()
|
||||
path = []
|
||||
|
||||
for (k, v) in iteritems(a):
|
||||
for (k, v) in six.iteritems(a):
|
||||
if isinstance(v, dict):
|
||||
expand_variables(v, b, expanded, path + [str(k)])
|
||||
expand_variables(v, b, expanded, path + [six.text_type(k)])
|
||||
else:
|
||||
if isinstance(v, str):
|
||||
vre = re.search(r'(^|.)\$\{.*?\}', v)
|
||||
@ -230,7 +230,7 @@ def expanded_dict_from_minion(minion_id, salt_data):
|
||||
if _file:
|
||||
node_dict[minion_id] = render_yaml(_file, salt_data)
|
||||
else:
|
||||
log.warning('{0}: Node definition not found'.format(minion_id))
|
||||
log.warning('%s: Node definition not found', minion_id)
|
||||
node_dict[minion_id] = {}
|
||||
|
||||
# Get 2 ordered lists:
|
||||
|
@ -15,12 +15,12 @@
|
||||
# limitations under the License.
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import re
|
||||
import os.path
|
||||
|
||||
# Import Salt libs
|
||||
from salt.ext.six import text_type as text
|
||||
from salt.ext import six
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
|
||||
@ -36,7 +36,7 @@ class InputSanitizer(object):
|
||||
if not value:
|
||||
raise CommandExecutionError("Empty value during sanitation")
|
||||
|
||||
return text(value)
|
||||
return six.text_type(value)
|
||||
|
||||
@staticmethod
|
||||
def filename(value):
|
||||
|
@ -11,7 +11,7 @@ Detailed tutorial about scheduling jobs can be found :ref:`here
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, with_statement
|
||||
from __future__ import absolute_import, with_statement, print_function, unicode_literals
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
@ -192,8 +192,7 @@ class Schedule(object):
|
||||
if name in self.opts['schedule']:
|
||||
del self.opts['schedule'][name]
|
||||
elif name in self._get_schedule(include_opts=False):
|
||||
log.warning('Cannot delete job {0}, '
|
||||
'it`s in the pillar!'.format(name))
|
||||
log.warning("Cannot delete job %s, it's in the pillar!", name)
|
||||
|
||||
# Fire the complete event back along with updated list of schedule
|
||||
evt = salt.utils.event.get_event('minion', opts=self.opts, listen=False)
|
||||
@ -217,8 +216,7 @@ class Schedule(object):
|
||||
del self.opts['schedule'][job]
|
||||
for job in self._get_schedule(include_opts=False):
|
||||
if job.startswith(name):
|
||||
log.warning('Cannot delete job {0}, '
|
||||
'it`s in the pillar!'.format(job))
|
||||
log.warning("Cannot delete job %s, it's in the pillar!", job)
|
||||
|
||||
# Fire the complete event back along with updated list of schedule
|
||||
evt = salt.utils.event.get_event('minion', opts=self.opts, listen=False)
|
||||
@ -256,16 +254,14 @@ class Schedule(object):
|
||||
new_job = next(six.iterkeys(data))
|
||||
|
||||
if new_job in self._get_schedule(include_opts=False):
|
||||
log.warning('Cannot update job {0}, '
|
||||
'it`s in the pillar!'.format(new_job))
|
||||
log.warning("Cannot update job %s, it's in the pillar!", new_job)
|
||||
|
||||
elif new_job in self.opts['schedule']:
|
||||
log.info('Updating job settings for scheduled '
|
||||
'job: {0}'.format(new_job))
|
||||
log.info('Updating job settings for scheduled job: %s', new_job)
|
||||
self.opts['schedule'].update(data)
|
||||
|
||||
else:
|
||||
log.info('Added new job {0} to scheduler'.format(new_job))
|
||||
log.info('Added new job %s to scheduler', new_job)
|
||||
self.opts['schedule'].update(data)
|
||||
|
||||
# Fire the complete event back along with updated list of schedule
|
||||
@ -283,10 +279,9 @@ class Schedule(object):
|
||||
# ensure job exists, then enable it
|
||||
if name in self.opts['schedule']:
|
||||
self.opts['schedule'][name]['enabled'] = True
|
||||
log.info('Enabling job {0} in scheduler'.format(name))
|
||||
log.info('Enabling job %s in scheduler', name)
|
||||
elif name in self._get_schedule(include_opts=False):
|
||||
log.warning('Cannot modify job {0}, '
|
||||
'it`s in the pillar!'.format(name))
|
||||
log.warning("Cannot modify job %s, it's in the pillar!", name)
|
||||
|
||||
# Fire the complete event back along with updated list of schedule
|
||||
evt = salt.utils.event.get_event('minion', opts=self.opts, listen=False)
|
||||
@ -303,10 +298,9 @@ class Schedule(object):
|
||||
# ensure job exists, then disable it
|
||||
if name in self.opts['schedule']:
|
||||
self.opts['schedule'][name]['enabled'] = False
|
||||
log.info('Disabling job {0} in scheduler'.format(name))
|
||||
log.info('Disabling job %s in scheduler', name)
|
||||
elif name in self._get_schedule(include_opts=False):
|
||||
log.warning('Cannot modify job {0}, '
|
||||
'it`s in the pillar!'.format(name))
|
||||
log.warning("Cannot modify job %s, it's in the pillar!", name)
|
||||
|
||||
# Fire the complete event back along with updated list of schedule
|
||||
evt = salt.utils.event.get_event('minion', opts=self.opts, listen=False)
|
||||
@ -324,8 +318,7 @@ class Schedule(object):
|
||||
if name in self.opts['schedule']:
|
||||
self.delete_job(name, persist)
|
||||
elif name in self._get_schedule(include_opts=False):
|
||||
log.warning('Cannot modify job {0}, '
|
||||
'it`s in the pillar!'.format(name))
|
||||
log.warning("Cannot modify job %s, it's in the pillar!", name)
|
||||
return
|
||||
|
||||
self.opts['schedule'][name] = schedule
|
||||
@ -349,16 +342,13 @@ class Schedule(object):
|
||||
func = None
|
||||
if func not in self.functions:
|
||||
log.info(
|
||||
'Invalid function: {0} in scheduled job {1}.'.format(
|
||||
func, name
|
||||
)
|
||||
'Invalid function: %s in scheduled job %s.',
|
||||
func, name
|
||||
)
|
||||
|
||||
if 'name' not in data:
|
||||
data['name'] = name
|
||||
log.info(
|
||||
'Running Job: {0}.'.format(name)
|
||||
)
|
||||
log.info('Running Job: %s', name)
|
||||
|
||||
multiprocessing_enabled = self.opts.get('multiprocessing', True)
|
||||
if multiprocessing_enabled:
|
||||
@ -456,8 +446,7 @@ class Schedule(object):
|
||||
self.opts['schedule'][name]['run_explicit'].append(new_time)
|
||||
|
||||
elif name in self._get_schedule(include_opts=False):
|
||||
log.warning('Cannot modify job {0}, '
|
||||
'it`s in the pillar!'.format(name))
|
||||
log.warning("Cannot modify job %s, it's in the pillar!", name)
|
||||
|
||||
# Fire the complete event back along with updated list of schedule
|
||||
evt = salt.utils.event.get_event('minion', opts=self.opts, listen=False)
|
||||
@ -478,8 +467,7 @@ class Schedule(object):
|
||||
self.opts['schedule'][name]['skip_explicit'].append(time)
|
||||
|
||||
elif name in self._get_schedule(include_opts=False):
|
||||
log.warning('Cannot modify job {0}, '
|
||||
'it`s in the pillar!'.format(name))
|
||||
log.warning("Cannot modify job %s, it's in the pillar!", name)
|
||||
|
||||
# Fire the complete event back along with updated list of schedule
|
||||
evt = salt.utils.event.get_event('minion', opts=self.opts, listen=False)
|
||||
@ -560,20 +548,24 @@ class Schedule(object):
|
||||
jobcount = 0
|
||||
for job in salt.utils.minion.running(self.opts):
|
||||
if 'schedule' in job:
|
||||
log.debug('schedule.handle_func: Checking job against '
|
||||
'fun {0}: {1}'.format(ret['fun'], job))
|
||||
log.debug(
|
||||
'schedule.handle_func: Checking job against fun '
|
||||
'%s: %s', ret['fun'], job
|
||||
)
|
||||
if ret['schedule'] == job['schedule'] \
|
||||
and salt.utils.process.os_is_running(job['pid']):
|
||||
jobcount += 1
|
||||
log.debug(
|
||||
'schedule.handle_func: Incrementing jobcount, now '
|
||||
'{0}, maxrunning is {1}'.format(
|
||||
jobcount, data['maxrunning']))
|
||||
'schedule.handle_func: Incrementing jobcount, '
|
||||
'now %s, maxrunning is %s',
|
||||
jobcount, data['maxrunning']
|
||||
)
|
||||
if jobcount >= data['maxrunning']:
|
||||
log.debug(
|
||||
'schedule.handle_func: The scheduled job {0} '
|
||||
'was not started, {1} already running'.format(
|
||||
ret['schedule'], data['maxrunning']))
|
||||
'schedule.handle_func: The scheduled job '
|
||||
'%s was not started, %s already running',
|
||||
ret['schedule'], data['maxrunning']
|
||||
)
|
||||
return False
|
||||
|
||||
if multiprocessing_enabled and not salt.utils.platform.is_windows():
|
||||
@ -589,8 +581,10 @@ class Schedule(object):
|
||||
|
||||
if not self.standalone:
|
||||
if 'jid_include' not in data or data['jid_include']:
|
||||
log.debug('schedule.handle_func: adding this job to the jobcache '
|
||||
'with data {0}'.format(ret))
|
||||
log.debug(
|
||||
'schedule.handle_func: adding this job to the '
|
||||
'jobcache with data %s', ret
|
||||
)
|
||||
# write this to /var/cache/salt/minion/proc
|
||||
with salt.utils.files.fopen(proc_fn, 'w+b') as fp_:
|
||||
fp_.write(salt.payload.Serial(self.opts).dumps(ret))
|
||||
@ -688,13 +682,12 @@ class Schedule(object):
|
||||
self.returners[ret_str](ret)
|
||||
else:
|
||||
log.info(
|
||||
'Job {0} using invalid returner: {1}. Ignoring.'.format(
|
||||
func, returner
|
||||
)
|
||||
'Job %s using invalid returner: %s. Ignoring.',
|
||||
func, returner
|
||||
)
|
||||
|
||||
except Exception:
|
||||
log.exception("Unhandled exception running {0}".format(ret['fun']))
|
||||
log.exception('Unhandled exception running %s', ret['fun'])
|
||||
# Although catch-all exception handlers are bad, the exception here
|
||||
# is to let the exception bubble up to the top of the thread context,
|
||||
# where the thread will die silently, which is worse.
|
||||
@ -731,10 +724,10 @@ class Schedule(object):
|
||||
try:
|
||||
event.fire_event(load, '__schedule_return')
|
||||
except Exception as exc:
|
||||
log.exception("Unhandled exception firing event: {0}".format(exc))
|
||||
log.exception('Unhandled exception firing __schedule_return event')
|
||||
|
||||
if not self.standalone:
|
||||
log.debug('schedule.handle_func: Removing {0}'.format(proc_fn))
|
||||
log.debug('schedule.handle_func: Removing %s', proc_fn)
|
||||
|
||||
try:
|
||||
os.unlink(proc_fn)
|
||||
@ -744,7 +737,7 @@ class Schedule(object):
|
||||
# we wanted
|
||||
pass
|
||||
else:
|
||||
log.error("Failed to delete '{0}': {1}".format(proc_fn, exc.errno))
|
||||
log.error("Failed to delete '%s': %s", proc_fn, exc.errno)
|
||||
# Otherwise, failing to delete this file is not something
|
||||
# we can cleanly handle.
|
||||
raise
|
||||
@ -761,7 +754,7 @@ class Schedule(object):
|
||||
|
||||
'''
|
||||
|
||||
log.trace('==== evaluating schedule now {} ====='.format(now))
|
||||
log.trace('==== evaluating schedule now %s =====', now)
|
||||
|
||||
def _splay(splaytime):
|
||||
'''
|
||||
@ -799,7 +792,10 @@ class Schedule(object):
|
||||
continue
|
||||
|
||||
if not isinstance(data, dict):
|
||||
log.error('Scheduled job "{0}" should have a dict value, not {1}'.format(job, type(data)))
|
||||
log.error(
|
||||
'Scheduled job "%s" should have a dict value, not %s',
|
||||
job, type(data)
|
||||
)
|
||||
continue
|
||||
# Job is disabled, continue
|
||||
if 'enabled' in data and not data['enabled']:
|
||||
@ -814,9 +810,8 @@ class Schedule(object):
|
||||
func = None
|
||||
if func not in self.functions:
|
||||
log.info(
|
||||
'Invalid function: {0} in scheduled job {1}.'.format(
|
||||
func, job
|
||||
)
|
||||
'Invalid function: %s in scheduled job %s.',
|
||||
func, job
|
||||
)
|
||||
if 'name' not in data:
|
||||
data['name'] = job
|
||||
@ -844,8 +839,10 @@ class Schedule(object):
|
||||
until = int(time.mktime(until__.timetuple()))
|
||||
|
||||
if until <= now:
|
||||
log.debug('Until time has passed '
|
||||
'skipping job: {0}.'.format(data['name']))
|
||||
log.debug(
|
||||
'Until time has passed skipping job: %s.',
|
||||
data['name']
|
||||
)
|
||||
continue
|
||||
|
||||
if 'after' in data:
|
||||
@ -857,8 +854,10 @@ class Schedule(object):
|
||||
after = int(time.mktime(after__.timetuple()))
|
||||
|
||||
if after >= now:
|
||||
log.debug('After time has not passed '
|
||||
'skipping job: {0}.'.format(data['name']))
|
||||
log.debug(
|
||||
'After time has not passed skipping job: %s.',
|
||||
data['name']
|
||||
)
|
||||
continue
|
||||
|
||||
# Used for quick lookups when detecting invalid option combinations.
|
||||
@ -871,8 +870,10 @@ class Schedule(object):
|
||||
for i in itertools.combinations(scheduling_elements, 2)]
|
||||
|
||||
if any(i <= schedule_keys for i in invalid_sched_combos):
|
||||
log.error('Unable to use "{0}" options together. Ignoring.'
|
||||
.format('", "'.join(scheduling_elements)))
|
||||
log.error(
|
||||
'Unable to use "%s" options together. Ignoring.',
|
||||
'", "'.join(scheduling_elements)
|
||||
)
|
||||
continue
|
||||
|
||||
invalid_time_combos = []
|
||||
@ -882,9 +883,11 @@ class Schedule(object):
|
||||
set(itertools.combinations(all_items, 2)))
|
||||
|
||||
if any(set(x) <= schedule_keys for x in invalid_time_combos):
|
||||
log.error('Unable to use "{0}" with "{1}" options. Ignoring'
|
||||
.format('", "'.join(time_elements),
|
||||
'", "'.join(scheduling_elements)))
|
||||
log.error(
|
||||
'Unable to use "%s" with "%s" options. Ignoring',
|
||||
'", "'.join(time_elements),
|
||||
'", "'.join(scheduling_elements)
|
||||
)
|
||||
continue
|
||||
|
||||
if 'run_explicit' in data:
|
||||
@ -946,8 +949,7 @@ class Schedule(object):
|
||||
|
||||
elif 'when' in data:
|
||||
if not _WHEN_SUPPORTED:
|
||||
log.error('Missing python-dateutil. '
|
||||
'Ignoring job {0}.'.format(job))
|
||||
log.error('Missing python-dateutil. Ignoring job %s.', job)
|
||||
continue
|
||||
|
||||
if isinstance(data['when'], list):
|
||||
@ -983,8 +985,10 @@ class Schedule(object):
|
||||
try:
|
||||
when__ = dateutil_parser.parse(i)
|
||||
except ValueError:
|
||||
log.error('Invalid date string {0}. '
|
||||
'Ignoring job {1}.'.format(i, job))
|
||||
log.error(
|
||||
'Invalid date string %s. Ignoring job %s.',
|
||||
i, job
|
||||
)
|
||||
continue
|
||||
_when.append(int(time.mktime(when__.timetuple())))
|
||||
|
||||
@ -1076,7 +1080,7 @@ class Schedule(object):
|
||||
|
||||
elif 'cron' in data:
|
||||
if not _CRON_SUPPORTED:
|
||||
log.error('Missing python-croniter. Ignoring job {0}'.format(job))
|
||||
log.error('Missing python-croniter. Ignoring job %s', job)
|
||||
continue
|
||||
|
||||
if data['_next_fire_time'] is None:
|
||||
@ -1110,8 +1114,10 @@ class Schedule(object):
|
||||
# immediately otherwise.
|
||||
splay = _splay(data['splay'])
|
||||
if now < data['_next_fire_time'] + splay:
|
||||
log.debug('schedule.handle_func: Adding splay of '
|
||||
'{0} seconds to next run.'.format(splay))
|
||||
log.debug(
|
||||
'schedule.handle_func: Adding splay of %s seconds '
|
||||
'to next run.', splay
|
||||
)
|
||||
data['_splay'] = data['_next_fire_time'] + splay
|
||||
if 'when' in data:
|
||||
data['_run'] = True
|
||||
@ -1147,19 +1153,19 @@ class Schedule(object):
|
||||
elif run:
|
||||
if 'range' in data:
|
||||
if not _RANGE_SUPPORTED:
|
||||
log.error('Missing python-dateutil. Ignoring job {0}'.format(job))
|
||||
log.error('Missing python-dateutil. Ignoring job %s', job)
|
||||
continue
|
||||
else:
|
||||
if isinstance(data['range'], dict):
|
||||
try:
|
||||
start = int(time.mktime(dateutil_parser.parse(data['range']['start']).timetuple()))
|
||||
except ValueError:
|
||||
log.error('Invalid date string for start. Ignoring job {0}.'.format(job))
|
||||
log.error('Invalid date string for start. Ignoring job %s.', job)
|
||||
continue
|
||||
try:
|
||||
end = int(time.mktime(dateutil_parser.parse(data['range']['end']).timetuple()))
|
||||
except ValueError:
|
||||
log.error('Invalid date string for end. Ignoring job {0}.'.format(job))
|
||||
log.error('Invalid date string for end. Ignoring job %s.', job)
|
||||
continue
|
||||
if end > start:
|
||||
if 'invert' in data['range'] and data['range']['invert']:
|
||||
@ -1177,12 +1183,18 @@ class Schedule(object):
|
||||
else:
|
||||
run = False
|
||||
else:
|
||||
log.error('schedule.handle_func: Invalid range, end must be larger than start. \
|
||||
Ignoring job {0}.'.format(job))
|
||||
log.error(
|
||||
'schedule.handle_func: Invalid range, end '
|
||||
'must be larger than start. Ignoring job %s.',
|
||||
job
|
||||
)
|
||||
continue
|
||||
else:
|
||||
log.error('schedule.handle_func: Invalid, range must be specified as a dictionary. \
|
||||
Ignoring job {0}.'.format(job))
|
||||
log.error(
|
||||
'schedule.handle_func: Invalid, range must be '
|
||||
'specified as a dictionary. Ignoring job %s.',
|
||||
job
|
||||
)
|
||||
continue
|
||||
|
||||
# If there is no job specific skip_during_range available,
|
||||
@ -1192,19 +1204,27 @@ class Schedule(object):
|
||||
|
||||
if 'skip_during_range' in data and data['skip_during_range']:
|
||||
if not _RANGE_SUPPORTED:
|
||||
log.error('Missing python-dateutil. Ignoring job {0}'.format(job))
|
||||
log.error('Missing python-dateutil. Ignoring job %s', job)
|
||||
continue
|
||||
else:
|
||||
if isinstance(data['skip_during_range'], dict):
|
||||
try:
|
||||
start = int(time.mktime(dateutil_parser.parse(data['skip_during_range']['start']).timetuple()))
|
||||
except ValueError:
|
||||
log.error('Invalid date string for start in skip_during_range. Ignoring job {0}.'.format(job))
|
||||
log.error(
|
||||
'Invalid date string for start in '
|
||||
'skip_during_range. Ignoring job %s.',
|
||||
job
|
||||
)
|
||||
continue
|
||||
try:
|
||||
end = int(time.mktime(dateutil_parser.parse(data['skip_during_range']['end']).timetuple()))
|
||||
except ValueError:
|
||||
log.error('Invalid date string for end in skip_during_range. Ignoring job {0}.'.format(job))
|
||||
log.error(
|
||||
'Invalid date string for end in '
|
||||
'skip_during_range. Ignoring job %s.',
|
||||
job
|
||||
)
|
||||
log.error(data)
|
||||
continue
|
||||
|
||||
@ -1230,12 +1250,18 @@ class Schedule(object):
|
||||
else:
|
||||
run = True
|
||||
else:
|
||||
log.error('schedule.handle_func: Invalid range, end must be larger than start. \
|
||||
Ignoring job {0}.'.format(job))
|
||||
log.error(
|
||||
'schedule.handle_func: Invalid range, end '
|
||||
'must be larger than start. Ignoring job %s.',
|
||||
job
|
||||
)
|
||||
continue
|
||||
else:
|
||||
log.error('schedule.handle_func: Invalid, range must be specified as a dictionary. \
|
||||
Ignoring job {0}.'.format(job))
|
||||
log.error(
|
||||
'schedule.handle_func: Invalid, range must be '
|
||||
'specified as a dictionary. Ignoring job %s.',
|
||||
job
|
||||
)
|
||||
continue
|
||||
|
||||
if 'skip_explicit' in data:
|
||||
@ -1267,18 +1293,22 @@ class Schedule(object):
|
||||
miss_msg = ' (runtime missed ' \
|
||||
'by {0} seconds)'.format(abs(seconds))
|
||||
|
||||
log.info('Running scheduled job: {0}{1}'.format(job, miss_msg))
|
||||
log.info('Running scheduled job: %s%s', job, miss_msg)
|
||||
|
||||
if 'jid_include' not in data or data['jid_include']:
|
||||
data['jid_include'] = True
|
||||
log.debug('schedule: This job was scheduled with jid_include, '
|
||||
'adding to cache (jid_include defaults to True)')
|
||||
if 'maxrunning' in data:
|
||||
log.debug('schedule: This job was scheduled with a max '
|
||||
'number of {0}'.format(data['maxrunning']))
|
||||
log.debug(
|
||||
'schedule: This job was scheduled with a max number '
|
||||
'of %s', data['maxrunning']
|
||||
)
|
||||
else:
|
||||
log.info('schedule: maxrunning parameter was not specified for '
|
||||
'job {0}, defaulting to 1.'.format(job))
|
||||
log.info(
|
||||
'schedule: maxrunning parameter was not specified '
|
||||
'for job %s, defaulting to 1.', job
|
||||
)
|
||||
data['maxrunning'] = 1
|
||||
|
||||
multiprocessing_enabled = self.opts.get('multiprocessing', True)
|
||||
@ -1341,12 +1371,16 @@ def clean_proc_dir(opts):
|
||||
continue
|
||||
except OSError:
|
||||
continue
|
||||
log.debug('schedule.clean_proc_dir: checking job {0} for process '
|
||||
'existence'.format(job))
|
||||
log.debug(
|
||||
'schedule.clean_proc_dir: checking job %s for process '
|
||||
'existence', job
|
||||
)
|
||||
if job is not None and 'pid' in job:
|
||||
if salt.utils.process.os_is_running(job['pid']):
|
||||
log.debug('schedule.clean_proc_dir: Cleaning proc dir, '
|
||||
'pid {0} still exists.'.format(job['pid']))
|
||||
log.debug(
|
||||
'schedule.clean_proc_dir: Cleaning proc dir, pid %s '
|
||||
'still exists.', job['pid']
|
||||
)
|
||||
else:
|
||||
# Windows cannot delete an open file
|
||||
if salt.utils.platform.is_windows():
|
||||
|
@ -320,7 +320,7 @@
|
||||
}
|
||||
'''
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import sys
|
||||
import inspect
|
||||
import textwrap
|
||||
|
@ -5,7 +5,7 @@ Basic functions for accessing the SDB interface
|
||||
For configuration options, see the docs for specific sdb
|
||||
modules.
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import python libs
|
||||
import random
|
||||
|
@ -15,7 +15,7 @@ Library for interacting with Slack API
|
||||
slack:
|
||||
api_key: peWcBiMOS9HrZG15peWcBiMOS9HrZG15
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import logging
|
||||
# Import 3rd-party libs
|
||||
|
@ -5,7 +5,7 @@ Utility functions for SMB connections
|
||||
:depends: impacket
|
||||
'''
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import python libs
|
||||
import salt.utils.files
|
||||
@ -78,8 +78,8 @@ def mkdirs(path, share='C$', conn=None, host=None, username=None, password=None)
|
||||
cwd = '\\'.join(comps[0:pos])
|
||||
try:
|
||||
conn.listPath(share, cwd)
|
||||
except (smbSessionError, smb3SessionError) as exc:
|
||||
log.debug('Exception: {0}'.format(exc))
|
||||
except (smbSessionError, smb3SessionError):
|
||||
log.exception('Encountered error running conn.listPath')
|
||||
conn.createDirectory(share, cwd)
|
||||
pos += 1
|
||||
|
||||
|
@ -27,7 +27,7 @@ There are a few things to keep in mind:
|
||||
gpg public key matching the address the mail is sent to. If left unset, no
|
||||
encryption will be used.
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import python libs
|
||||
import os
|
||||
@ -35,6 +35,8 @@ import logging
|
||||
import smtplib
|
||||
from email.utils import formatdate
|
||||
|
||||
from salt.ext import six
|
||||
|
||||
try:
|
||||
import gnupg
|
||||
HAS_GNUPG = True
|
||||
@ -71,8 +73,8 @@ def send(kwargs, opts):
|
||||
if not config['smtp.port']:
|
||||
config['smtp.port'] = 25
|
||||
|
||||
log.debug('SMTP port has been set to {0}'.format(config['smtp.port']))
|
||||
log.debug("smtp_return: Subject is '{0}'".format(config['smtp.subject']))
|
||||
log.debug('SMTP port has been set to %s', config['smtp.port'])
|
||||
log.debug("smtp_return: Subject is '%s'", config['smtp.subject'])
|
||||
|
||||
if HAS_GNUPG and config['smtp.gpgowner']:
|
||||
gpg = gnupg.GPG(
|
||||
@ -84,7 +86,7 @@ def send(kwargs, opts):
|
||||
encrypted_data = gpg.encrypt(config['smtp.content'], config['smtp.to'])
|
||||
if encrypted_data.ok:
|
||||
log.debug('smtp_return: Encryption successful')
|
||||
config['smtp.content'] = str(encrypted_data)
|
||||
config['smtp.content'] = six.text_type(encrypted_data)
|
||||
else:
|
||||
log.error(
|
||||
'SMTP: Encryption failed, only an error message will be sent'
|
||||
|
@ -19,7 +19,7 @@
|
||||
Salt Service Discovery Protocol.
|
||||
JSON-based service discovery protocol, used by minions to find running Master.
|
||||
'''
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import datetime
|
||||
import time
|
||||
import logging
|
||||
@ -157,7 +157,7 @@ class SSDPFactory(SSDPBase):
|
||||
self.log.debug('Sent successfully')
|
||||
return
|
||||
except AttributeError as ex:
|
||||
self.log.debug('Permission error: {0}'.format(ex))
|
||||
self.log.debug('Permission error: %s', ex)
|
||||
time.sleep(slp)
|
||||
tries += 1
|
||||
slp += slp_time()
|
||||
@ -175,7 +175,10 @@ class SSDPFactory(SSDPBase):
|
||||
try:
|
||||
timestamp = float(message[len(self.signature):])
|
||||
except TypeError:
|
||||
self.log.debug('Received invalid timestamp in package from {}'.format("{}:{}".format(*addr)))
|
||||
self.log.debug(
|
||||
'Received invalid timestamp in package from %s:%s',
|
||||
*addr
|
||||
)
|
||||
if self.disable_hidden:
|
||||
self._sendto('{0}:E:{1}'.format(self.signature, 'Invalid timestamp'), addr)
|
||||
return
|
||||
@ -183,10 +186,10 @@ class SSDPFactory(SSDPBase):
|
||||
if datetime.datetime.fromtimestamp(timestamp) < (datetime.datetime.now() - datetime.timedelta(seconds=20)):
|
||||
if self.disable_hidden:
|
||||
self._sendto('{0}:E:{1}'.format(self.signature, 'Timestamp is too old'), addr)
|
||||
self.log.debug('Received outdated package from {}'.format("{}:{}".format(*addr)))
|
||||
self.log.debug('Received outdated package from %s:%s', *addr)
|
||||
return
|
||||
|
||||
self.log.debug('Received "{}" from {}'.format(message, "{}:{}".format(*addr)))
|
||||
self.log.debug('Received "%s" from %s:%s', message, *addr)
|
||||
self._sendto(
|
||||
str('{0}:@:{1}').format( # future lint: disable=blacklisted-function
|
||||
self.signature,
|
||||
@ -197,7 +200,7 @@ class SSDPFactory(SSDPBase):
|
||||
else:
|
||||
if self.disable_hidden:
|
||||
self._sendto('{0}:E:{1}'.format(self.signature, 'Invalid packet signature').encode(), addr)
|
||||
self.log.debug('Received bad signature from {}:{}'.format(*addr))
|
||||
self.log.debug('Received bad signature from %s:%s', *addr)
|
||||
|
||||
|
||||
class SSDPDiscoveryServer(SSDPBase):
|
||||
@ -301,7 +304,7 @@ class SSDPDiscoveryServer(SSDPBase):
|
||||
'''
|
||||
listen_ip = self._config.get(self.LISTEN_IP, self.DEFAULTS[self.LISTEN_IP])
|
||||
port = self._config.get(self.PORT, self.DEFAULTS[self.PORT])
|
||||
self.log.info('Starting service discovery listener on udp://{0}:{1}'.format(listen_ip, port))
|
||||
self.log.info('Starting service discovery listener on udp://%s:%s', listen_ip, port)
|
||||
loop = asyncio.get_event_loop()
|
||||
protocol = SSDPFactory(answer=self._config[self.ANSWER])
|
||||
if asyncio.ported:
|
||||
@ -369,7 +372,10 @@ class SSDPDiscoveryClient(SSDPBase):
|
||||
except socket.timeout:
|
||||
break
|
||||
except socket.error as err:
|
||||
self.log.error('Error ocurred while discovering masters from the network: {0}'.format(err))
|
||||
self.log.error(
|
||||
'Error ocurred while discovering masters from the network: %s',
|
||||
err
|
||||
)
|
||||
|
||||
def discover(self):
|
||||
'''
|
||||
@ -391,12 +397,18 @@ class SSDPDiscoveryClient(SSDPBase):
|
||||
msg = data.decode()
|
||||
if msg.startswith(self.signature):
|
||||
msg = msg.split(self.signature)[-1]
|
||||
self.log.debug("Service announcement at '{0}'. Response: '{1}'".format("{}:{}".format(*addr), msg))
|
||||
self.log.debug(
|
||||
"Service announcement at '%s:%s'. Response: '%s'",
|
||||
addr[0], addr[1], msg
|
||||
)
|
||||
if ':E:' in msg:
|
||||
err = msg.split(':E:')[-1]
|
||||
self.log.error('Error response from the service publisher at {0}: {1}'.format(addr, err))
|
||||
self.log.error(
|
||||
'Error response from the service publisher at %s: %s',
|
||||
addr, err
|
||||
)
|
||||
if "timestamp" in err:
|
||||
self.log.error('Publisher sent shifted timestamp from {0}'.format(addr))
|
||||
self.log.error('Publisher sent shifted timestamp from %s', addr)
|
||||
else:
|
||||
if addr not in masters:
|
||||
masters[addr] = []
|
||||
|
@ -6,7 +6,7 @@ Utility functions for state functions
|
||||
'''
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import copy
|
||||
|
||||
# Import Salt libs
|
||||
@ -242,7 +242,7 @@ def get_sls_opts(opts, **kwargs):
|
||||
if 'pillarenv' in kwargs or opts.get('pillarenv_from_saltenv', False):
|
||||
pillarenv = kwargs.get('pillarenv') or kwargs.get('saltenv')
|
||||
if pillarenv is not None and not isinstance(pillarenv, six.string_types):
|
||||
opts['pillarenv'] = str(pillarenv)
|
||||
opts['pillarenv'] = six.text_type(pillarenv)
|
||||
else:
|
||||
opts['pillarenv'] = pillarenv
|
||||
|
||||
|
@ -3,7 +3,7 @@
|
||||
Functions for StringIO objects
|
||||
'''
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext import six
|
||||
|
@ -71,7 +71,7 @@ def to_str(s, encoding=None):
|
||||
raise TypeError('expected str, bytes, or bytearray not {}'.format(type(s)))
|
||||
else:
|
||||
if isinstance(s, bytearray):
|
||||
return str(s)
|
||||
return str(s) # future lint: disable=blacklisted-function
|
||||
if isinstance(s, unicode): # pylint: disable=incompatible-py3-code,undefined-variable
|
||||
if encoding:
|
||||
return s.encode(encoding)
|
||||
@ -131,7 +131,7 @@ def to_none(text):
|
||||
'''
|
||||
Convert a string to None if the string is empty or contains only spaces.
|
||||
'''
|
||||
if str(text).strip():
|
||||
if six.tex_type(text).strip():
|
||||
return text
|
||||
return None
|
||||
|
||||
|
@ -3,7 +3,7 @@
|
||||
Contains systemd related help files
|
||||
'''
|
||||
# import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
@ -69,7 +69,7 @@ def version(context=None):
|
||||
except (IndexError, ValueError):
|
||||
log.error(
|
||||
'Unable to determine systemd version from systemctl '
|
||||
'--version, output follows:\n{0}'.format(outstr)
|
||||
'--version, output follows:\n%s', outstr
|
||||
)
|
||||
return None
|
||||
else:
|
||||
|
@ -1,6 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import codecs
|
||||
import glob
|
||||
import logging
|
||||
|
@ -4,7 +4,7 @@ Test the RSA ANSI X9.31 signer and verifier
|
||||
'''
|
||||
|
||||
# python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# salt testing libs
|
||||
from tests.support.unit import TestCase
|
||||
|
@ -8,7 +8,7 @@
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import re
|
||||
|
||||
# Import Salt Testing libs
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from salt.ext.six import text_type as text
|
||||
|
||||
# Import Salt Libs
|
||||
|
@ -4,7 +4,7 @@
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import copy
|
||||
import os
|
||||
import time
|
||||
|
@ -4,7 +4,7 @@
|
||||
# pylint: disable=abstract-method
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import copy
|
||||
|
||||
# Import Salt Testing Libs
|
||||
@ -14,6 +14,7 @@ from tests.support.unit import TestCase, skipIf
|
||||
import salt.utils.json
|
||||
import salt.utils.yaml
|
||||
import salt.utils.schema as schema
|
||||
from salt.ext import six
|
||||
from salt.utils.versions import LooseVersion as _LooseVersion
|
||||
|
||||
# Import 3rd-party libs
|
||||
@ -2262,8 +2263,9 @@ class ComplexSchemaTestCase(TestCase):
|
||||
as excinfo:
|
||||
jsonschema.validate({'complex_item': {'thirsty': 'Foo'}},
|
||||
serialized)
|
||||
self.assertIn('\'Foo\' is not of type \'boolean\'',
|
||||
excinfo.exception.message)
|
||||
expected = "u'Foo' is not of type u'boolean'" if six.PY2 \
|
||||
else "'Foo' is not of type 'boolean'"
|
||||
self.assertIn(expected, excinfo.exception.message)
|
||||
|
||||
@skipIf(HAS_JSONSCHEMA is False, 'The \'jsonschema\' library is missing')
|
||||
def test_complex_complex_schema_item_hungry_valid(self):
|
||||
@ -2293,8 +2295,9 @@ class ComplexSchemaTestCase(TestCase):
|
||||
as excinfo:
|
||||
jsonschema.validate({'complex_complex_item': {'hungry': 'Foo'}},
|
||||
serialized)
|
||||
self.assertIn('\'Foo\' is not of type \'boolean\'',
|
||||
excinfo.exception.message)
|
||||
expected = "u'Foo' is not of type u'boolean'" if six.PY2 \
|
||||
else "'Foo' is not of type 'boolean'"
|
||||
self.assertIn(expected, excinfo.exception.message)
|
||||
|
||||
@skipIf(HAS_JSONSCHEMA is False, 'The \'jsonschema\' library is missing')
|
||||
def test_complex_complex_schema_item_inner_thirsty_invalid(self):
|
||||
@ -2306,8 +2309,9 @@ class ComplexSchemaTestCase(TestCase):
|
||||
{'complex_complex_item': {'hungry': True,
|
||||
'complex_item': {'thirsty': 'Bar'}}},
|
||||
serialized)
|
||||
self.assertIn('\'Bar\' is not of type \'boolean\'',
|
||||
excinfo.exception.message)
|
||||
expected = "u'Bar' is not of type u'boolean'" if six.PY2 \
|
||||
else "'Bar' is not of type 'boolean'"
|
||||
self.assertIn(expected, excinfo.exception.message)
|
||||
|
||||
@skipIf(HAS_JSONSCHEMA is False, 'The \'jsonschema\' library is missing')
|
||||
def test_complex_complex_schema_item_missing_required_hungry(self):
|
||||
|
@ -4,7 +4,7 @@
|
||||
'''
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import os
|
||||
|
||||
# Import Salt Testing Libs
|
||||
|
@ -4,7 +4,7 @@ Unit Tests for functions located in salt.utils.state.py.
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import copy
|
||||
import textwrap
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import Salt libs
|
||||
from tests.support.mock import patch
|
||||
@ -11,6 +11,9 @@ import salt.utils.stringutils
|
||||
from salt.ext import six
|
||||
from salt.ext.six.moves import builtins, range # pylint: disable=redefined-builtin
|
||||
|
||||
UNICODE = '中国語 (繁体)'
|
||||
STR = BYTES = UNICODE.encode('utf-8')
|
||||
|
||||
|
||||
class StringutilsTestCase(TestCase):
|
||||
def test_contains_whitespace(self):
|
||||
@ -26,7 +29,7 @@ class StringutilsTestCase(TestCase):
|
||||
self.assertEqual(7, salt.utils.stringutils.to_num('7.0'))
|
||||
self.assertIsInstance(salt.utils.stringutils.to_num('7.0'), float)
|
||||
self.assertEqual(salt.utils.stringutils.to_num('Seven'), 'Seven')
|
||||
self.assertIsInstance(salt.utils.stringutils.to_num('Seven'), str)
|
||||
self.assertIsInstance(salt.utils.stringutils.to_num('Seven'), six.text_type)
|
||||
|
||||
def test_is_binary(self):
|
||||
self.assertFalse(salt.utils.stringutils.is_binary(LOREM_IPSUM))
|
||||
@ -50,61 +53,49 @@ class StringutilsTestCase(TestCase):
|
||||
if six.PY3:
|
||||
self.assertEqual(salt.utils.stringutils.to_str('plugh'), 'plugh')
|
||||
self.assertEqual(salt.utils.stringutils.to_str('áéíóúý', 'utf-8'), 'áéíóúý')
|
||||
un = '\u4e2d\u56fd\u8a9e (\u7e41\u4f53)' # pylint: disable=anomalous-unicode-escape-in-string
|
||||
ut = bytes((0xe4, 0xb8, 0xad, 0xe5, 0x9b, 0xbd, 0xe8, 0xaa, 0x9e, 0x20, 0x28, 0xe7, 0xb9, 0x81, 0xe4, 0xbd, 0x93, 0x29))
|
||||
self.assertEqual(salt.utils.stringutils.to_str(ut, 'utf-8'), un)
|
||||
self.assertEqual(salt.utils.stringutils.to_str(bytearray(ut), 'utf-8'), un)
|
||||
self.assertEqual(salt.utils.stringutils.to_str(BYTES, 'utf-8'), UNICODE)
|
||||
self.assertEqual(salt.utils.stringutils.to_str(bytearray(BYTES), 'utf-8'), UNICODE)
|
||||
# Test situation when a minion returns incorrect utf-8 string because of... million reasons
|
||||
ut2 = b'\x9c'
|
||||
self.assertEqual(salt.utils.stringutils.to_str(ut2, 'utf-8'), u'\ufffd')
|
||||
self.assertEqual(salt.utils.stringutils.to_str(bytearray(ut2), 'utf-8'), u'\ufffd')
|
||||
else:
|
||||
self.assertEqual(salt.utils.stringutils.to_str('plugh'), 'plugh')
|
||||
self.assertEqual(salt.utils.stringutils.to_str(u'áéíóúý', 'utf-8'), 'áéíóúý')
|
||||
un = u'\u4e2d\u56fd\u8a9e (\u7e41\u4f53)'
|
||||
ut = '\xe4\xb8\xad\xe5\x9b\xbd\xe8\xaa\x9e (\xe7\xb9\x81\xe4\xbd\x93)'
|
||||
self.assertEqual(salt.utils.stringutils.to_str(un, 'utf-8'), ut)
|
||||
self.assertEqual(salt.utils.stringutils.to_str(bytearray(ut), 'utf-8'), ut)
|
||||
self.assertEqual(salt.utils.stringutils.to_str('plugh'), str('plugh')) # future lint: disable=blacklisted-function
|
||||
self.assertEqual(salt.utils.stringutils.to_str('áéíóúý', 'utf-8'), 'áéíóúý'.encode('utf-8'))
|
||||
self.assertEqual(salt.utils.stringutils.to_str(UNICODE, 'utf-8'), STR)
|
||||
self.assertEqual(salt.utils.stringutils.to_str(bytearray(STR), 'utf-8'), STR)
|
||||
|
||||
# Test utf-8 fallback with Windows default codepage
|
||||
with patch.object(builtins, '__salt_system_encoding__', 'CP1252'):
|
||||
self.assertEqual(salt.utils.stringutils.to_str(u'Ψ'), u'Ψ'.encode('utf-8'))
|
||||
self.assertEqual(salt.utils.stringutils.to_str('Ψ'), 'Ψ'.encode('utf-8'))
|
||||
|
||||
def test_to_bytes(self):
|
||||
for x in (123, (1, 2, 3), [1, 2, 3], {1: 23}, None):
|
||||
self.assertRaises(TypeError, salt.utils.stringutils.to_bytes, x)
|
||||
if six.PY3:
|
||||
self.assertEqual(salt.utils.stringutils.to_bytes('xyzzy'), b'xyzzy')
|
||||
ut = bytes((0xe4, 0xb8, 0xad, 0xe5, 0x9b, 0xbd, 0xe8, 0xaa, 0x9e, 0x20, 0x28, 0xe7, 0xb9, 0x81, 0xe4, 0xbd, 0x93, 0x29))
|
||||
un = '\u4e2d\u56fd\u8a9e (\u7e41\u4f53)' # pylint: disable=anomalous-unicode-escape-in-string
|
||||
self.assertEqual(salt.utils.stringutils.to_bytes(ut), ut)
|
||||
self.assertEqual(salt.utils.stringutils.to_bytes(bytearray(ut)), ut)
|
||||
self.assertEqual(salt.utils.stringutils.to_bytes(un, 'utf-8'), ut)
|
||||
self.assertEqual(salt.utils.stringutils.to_bytes(BYTES), BYTES)
|
||||
self.assertEqual(salt.utils.stringutils.to_bytes(bytearray(BYTES)), BYTES)
|
||||
self.assertEqual(salt.utils.stringutils.to_bytes(UNICODE, 'utf-8'), BYTES)
|
||||
|
||||
# Test utf-8 fallback with ascii default encoding
|
||||
with patch.object(builtins, '__salt_system_encoding__', 'ascii'):
|
||||
self.assertEqual(salt.utils.stringutils.to_bytes('Ψ'), b'\xce\xa8')
|
||||
else:
|
||||
self.assertEqual(salt.utils.stringutils.to_bytes('xyzzy'), 'xyzzy')
|
||||
ut = ''.join([chr(x) for x in (0xe4, 0xb8, 0xad, 0xe5, 0x9b, 0xbd, 0xe8, 0xaa, 0x9e, 0x20, 0x28, 0xe7, 0xb9, 0x81, 0xe4, 0xbd, 0x93, 0x29)])
|
||||
un = u'\u4e2d\u56fd\u8a9e (\u7e41\u4f53)' # pylint: disable=anomalous-unicode-escape-in-string
|
||||
self.assertEqual(salt.utils.stringutils.to_bytes(ut), ut)
|
||||
self.assertEqual(salt.utils.stringutils.to_bytes(bytearray(ut)), ut)
|
||||
self.assertEqual(salt.utils.stringutils.to_bytes(un, 'utf-8'), ut)
|
||||
self.assertEqual(salt.utils.stringutils.to_bytes(BYTES), BYTES)
|
||||
self.assertEqual(salt.utils.stringutils.to_bytes(bytearray(BYTES)), BYTES)
|
||||
self.assertEqual(salt.utils.stringutils.to_bytes(UNICODE, 'utf-8'), BYTES)
|
||||
|
||||
def test_to_unicode(self):
|
||||
if six.PY3:
|
||||
self.assertEqual(salt.utils.stringutils.to_unicode('plugh'), 'plugh')
|
||||
self.assertEqual(salt.utils.stringutils.to_unicode('áéíóúý'), 'áéíóúý')
|
||||
un = '\u4e2d\u56fd\u8a9e (\u7e41\u4f53)' # pylint: disable=anomalous-unicode-escape-in-string
|
||||
ut = bytes((0xe4, 0xb8, 0xad, 0xe5, 0x9b, 0xbd, 0xe8, 0xaa, 0x9e, 0x20, 0x28, 0xe7, 0xb9, 0x81, 0xe4, 0xbd, 0x93, 0x29))
|
||||
self.assertEqual(salt.utils.stringutils.to_unicode(ut, 'utf-8'), un)
|
||||
self.assertEqual(salt.utils.stringutils.to_unicode(bytearray(ut), 'utf-8'), un)
|
||||
self.assertEqual(salt.utils.stringutils.to_unicode(BYTES, 'utf-8'), UNICODE)
|
||||
self.assertEqual(salt.utils.stringutils.to_unicode(bytearray(BYTES), 'utf-8'), UNICODE)
|
||||
else:
|
||||
self.assertEqual(salt.utils.stringutils.to_unicode('xyzzy', 'utf-8'), u'xyzzy')
|
||||
ut = '\xe4\xb8\xad\xe5\x9b\xbd\xe8\xaa\x9e (\xe7\xb9\x81\xe4\xbd\x93)'
|
||||
un = u'\u4e2d\u56fd\u8a9e (\u7e41\u4f53)'
|
||||
self.assertEqual(salt.utils.stringutils.to_unicode(ut, 'utf-8'), un)
|
||||
self.assertEqual(salt.utils.stringutils.to_unicode(str('xyzzy'), 'utf-8'), 'xyzzy') # future lint: disable=blacklisted-function
|
||||
self.assertEqual(salt.utils.stringutils.to_unicode(BYTES, 'utf-8'), UNICODE)
|
||||
|
||||
# Test utf-8 fallback with ascii default encoding
|
||||
with patch.object(builtins, '__salt_system_encoding__', 'ascii'):
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import errno
|
||||
import os
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user