mirror of
https://github.com/valitydev/salt.git
synced 2024-11-07 00:55:19 +00:00
Merge pull request #45406 from garethgreenaway/oxygen_updating_utils_for_unicode_round_two
[oxygen] Oxygen updating utils and related tests for Unicode round two
This commit is contained in:
commit
0c58cb77ac
@ -4,7 +4,7 @@ Connection library for Amazon IAM
|
||||
|
||||
:depends: requests
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
# Import Python libs
|
||||
import logging
|
||||
@ -42,12 +42,12 @@ def _retry_get_url(url, num_retries=10, timeout=5):
|
||||
pass
|
||||
|
||||
log.warning(
|
||||
'Caught exception reading from URL. Retry no. {0}'.format(i)
|
||||
'Caught exception reading from URL. Retry no. %s', i
|
||||
)
|
||||
log.warning(pprint.pformat(exc))
|
||||
time.sleep(2 ** i)
|
||||
log.error(
|
||||
'Failed to read from URL for {0} times. Giving up.'.format(num_retries)
|
||||
'Failed to read from URL for %s times. Giving up.', num_retries
|
||||
)
|
||||
return ''
|
||||
|
||||
@ -59,5 +59,5 @@ def _convert_key_to_str(key):
|
||||
if isinstance(key, six.text_type):
|
||||
# the secret key must be bytes and not unicode to work
|
||||
# properly with hmac.new (see http://bugs.python.org/issue5285)
|
||||
return str(key)
|
||||
return six.text_type(key)
|
||||
return key
|
||||
|
@ -8,7 +8,7 @@
|
||||
|
||||
Immutable types
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
# Import python libs
|
||||
import collections
|
||||
|
@ -4,7 +4,7 @@ Helpful generators and other tools
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import fnmatch
|
||||
import re
|
||||
|
||||
|
@ -3,9 +3,7 @@
|
||||
Functions for creating and working with job IDs
|
||||
'''
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
from calendar import month_abbr as months
|
||||
import datetime
|
||||
import hashlib
|
||||
@ -50,7 +48,7 @@ def jid_to_time(jid):
|
||||
'''
|
||||
Convert a salt job id into the time when the job was invoked
|
||||
'''
|
||||
jid = str(jid)
|
||||
jid = six.text_type(jid)
|
||||
if len(jid) != 20 and (len(jid) <= 21 or jid[20] != '_'):
|
||||
return ''
|
||||
year = jid[:4]
|
||||
@ -116,7 +114,7 @@ def jid_dir(jid, job_dir=None, hash_type='sha256'):
|
||||
Return the jid_dir for the given job id
|
||||
'''
|
||||
if not isinstance(jid, six.string_types):
|
||||
jid = str(jid)
|
||||
jid = six.text_type(jid)
|
||||
if six.PY3:
|
||||
jid = jid.encode('utf-8')
|
||||
jhash = getattr(hashlib, hash_type)(jid).hexdigest()
|
||||
|
@ -4,7 +4,7 @@ Jinja loading utils to enable a more powerful backend for jinja templates
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import collections
|
||||
import logging
|
||||
import os.path
|
||||
@ -98,8 +98,8 @@ class SaltCacheLoader(BaseLoader):
|
||||
# checks for relative '..' paths
|
||||
if '..' in template:
|
||||
log.warning(
|
||||
'Discarded template path \'{0}\', relative paths are '
|
||||
'prohibited'.format(template)
|
||||
'Discarded template path \'%s\', relative paths are '
|
||||
'prohibited', template
|
||||
)
|
||||
raise TemplateNotFound(template)
|
||||
|
||||
@ -393,7 +393,7 @@ def uuid_(val):
|
||||
|
||||
f4efeff8-c219-578a-bad7-3dc280612ec8
|
||||
'''
|
||||
return str(uuid.uuid5(GLOBAL_UUID, str(val)))
|
||||
return six.text_type(uuid.uuid5(GLOBAL_UUID, six.text_type(val)))
|
||||
|
||||
|
||||
### List-related filters
|
||||
@ -822,10 +822,10 @@ class SerializerExtension(Extension, object):
|
||||
else:
|
||||
sub = Element(tag)
|
||||
if isinstance(attrs, (str, int, bool, float)):
|
||||
sub.text = str(attrs)
|
||||
sub.text = six.text_type(attrs)
|
||||
continue
|
||||
if isinstance(attrs, dict):
|
||||
sub.attrib = {attr: str(val) for attr, val in attrs.items()
|
||||
sub.attrib = {attr: six.text_type(val) for attr, val in attrs.items()
|
||||
if not isinstance(val, (dict, list))}
|
||||
for tag, val in [item for item in normalize_iter(attrs) if
|
||||
isinstance(item[1], (dict, list))]:
|
||||
@ -841,16 +841,16 @@ class SerializerExtension(Extension, object):
|
||||
|
||||
def load_yaml(self, value):
|
||||
if isinstance(value, TemplateModule):
|
||||
value = str(value)
|
||||
value = six.text_type(value)
|
||||
try:
|
||||
return salt.utils.yaml.safe_load(value)
|
||||
return salt.utils.data.decode(salt.utils.yaml.safe_load(value))
|
||||
except AttributeError:
|
||||
raise TemplateRuntimeError(
|
||||
'Unable to load yaml from {0}'.format(value))
|
||||
|
||||
def load_json(self, value):
|
||||
if isinstance(value, TemplateModule):
|
||||
value = str(value)
|
||||
value = six.text_type(value)
|
||||
try:
|
||||
return salt.utils.json.loads(value)
|
||||
except (ValueError, TypeError, AttributeError):
|
||||
@ -859,7 +859,7 @@ class SerializerExtension(Extension, object):
|
||||
|
||||
def load_text(self, value):
|
||||
if isinstance(value, TemplateModule):
|
||||
value = str(value)
|
||||
value = six.text_type(value)
|
||||
|
||||
return value
|
||||
|
||||
|
@ -4,7 +4,7 @@ Functions for interacting with the job cache
|
||||
'''
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import logging
|
||||
|
||||
# Import Salt libs
|
||||
@ -65,7 +65,7 @@ def store_job(opts, load, event=None, mminion=None):
|
||||
|
||||
if event:
|
||||
# If the return data is invalid, just ignore it
|
||||
log.info('Got return from {id} for job {jid}'.format(**load))
|
||||
log.info('Got return from %s for job %s', load['id'], load['jid'])
|
||||
event.fire_event(load,
|
||||
salt.utils.event.tagify([load['jid'], 'ret', load['id']], 'job'))
|
||||
event.fire_ret_load(load)
|
||||
@ -77,7 +77,8 @@ def store_job(opts, load, event=None, mminion=None):
|
||||
|
||||
# do not cache job results if explicitly requested
|
||||
if load.get('jid') == 'nocache':
|
||||
log.debug('Ignoring job return with jid for caching {jid} from {id}'.format(**load))
|
||||
log.debug('Ignoring job return with jid for caching %s from %s',
|
||||
load['jid'], load['id'])
|
||||
return
|
||||
|
||||
# otherwise, write to the master cache
|
||||
|
@ -3,7 +3,7 @@
|
||||
Functions to work with JSON
|
||||
'''
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
# Import Python libs
|
||||
import json # future lint: blacklisted-module
|
||||
|
@ -4,7 +4,7 @@ Utilities for managing kickstart
|
||||
|
||||
.. versionadded:: Beryllium
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import shlex
|
||||
import argparse # pylint: disable=minimum-python-version
|
||||
import salt.utils.files
|
||||
|
@ -3,7 +3,7 @@
|
||||
Application Kinds of Salt apps.
|
||||
These are used to indicate what kind of Application is using RAET
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from collections import namedtuple
|
||||
from salt.utils.odict import OrderedDict
|
||||
|
||||
|
@ -4,7 +4,7 @@ Lazily-evaluated data structures, primarily used by Salt's loader
|
||||
'''
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import logging
|
||||
import collections
|
||||
import salt.exceptions
|
||||
|
@ -16,9 +16,11 @@ The following can be retrieved:
|
||||
|
||||
Note: All dictionaries keys are expected to be strings
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from salt.utils.dictdiffer import recursive_diff
|
||||
|
||||
from salt.ext import six
|
||||
|
||||
|
||||
def list_diff(list_a, list_b, key):
|
||||
return ListDictDiffer(list_a, list_b, key)
|
||||
@ -226,9 +228,9 @@ class ListDictDiffer(object):
|
||||
for recursive_item in self._get_recursive_difference(type='all'):
|
||||
# We want the unset values as well
|
||||
recursive_item.ignore_unset_values = False
|
||||
key_val = str(recursive_item.past_dict[self._key]) \
|
||||
key_val = six.text_type(recursive_item.past_dict[self._key]) \
|
||||
if self._key in recursive_item.past_dict \
|
||||
else str(recursive_item.current_dict[self._key])
|
||||
else six.text_type(recursive_item.current_dict[self._key])
|
||||
|
||||
for change in recursive_item.changed():
|
||||
if change != self._key:
|
||||
@ -238,9 +240,9 @@ class ListDictDiffer(object):
|
||||
# We want the unset values as well
|
||||
for recursive_item in self._get_recursive_difference(type='intersect'):
|
||||
recursive_item.ignore_unset_values = False
|
||||
key_val = str(recursive_item.past_dict[self._key]) \
|
||||
key_val = six.text_type(recursive_item.past_dict[self._key]) \
|
||||
if self._key in recursive_item.past_dict \
|
||||
else str(recursive_item.current_dict[self._key])
|
||||
else six.text_type(recursive_item.current_dict[self._key])
|
||||
|
||||
for change in recursive_item.changed():
|
||||
if change != self._key:
|
||||
|
@ -4,7 +4,7 @@ the locale utils used by salt
|
||||
'''
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import sys
|
||||
|
||||
# Import Salt libs
|
||||
|
@ -3,7 +3,7 @@
|
||||
Helper functions for use by mac modules
|
||||
.. versionadded:: 2016.3.0
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
# Import Python Libraries
|
||||
import logging
|
||||
@ -23,6 +23,7 @@ from salt.exceptions import CommandExecutionError, SaltInvocationError,\
|
||||
|
||||
# Import Third Party Libs
|
||||
from salt.ext.six.moves import range
|
||||
from salt.ext import six
|
||||
|
||||
DEFAULT_SHELL = salt.grains.extra.shell()['shell']
|
||||
|
||||
@ -57,7 +58,7 @@ def _run_all(cmd):
|
||||
|
||||
for idx, item in enumerate(cmd):
|
||||
if not isinstance(cmd[idx], six.string_types):
|
||||
cmd[idx] = str(cmd[idx])
|
||||
cmd[idx] = six.text_type(cmd[idx])
|
||||
|
||||
cmd = ' '.join(cmd)
|
||||
|
||||
@ -88,7 +89,7 @@ def _run_all(cmd):
|
||||
try:
|
||||
proc.run()
|
||||
except TimedProcTimeoutError as exc:
|
||||
ret['stdout'] = str(exc)
|
||||
ret['stdout'] = six.text_type(exc)
|
||||
ret['stderr'] = ''
|
||||
ret['retcode'] = 1
|
||||
ret['pid'] = proc.process.pid
|
||||
|
@ -2,7 +2,7 @@
|
||||
'''
|
||||
Functions for working with Mako templates
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
try:
|
||||
from mako.lookup import TemplateCollection, TemplateLookup # pylint: disable=import-error,3rd-party-module-not-gated
|
||||
|
@ -8,7 +8,7 @@
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import os
|
||||
import logging
|
||||
import signal
|
||||
@ -90,11 +90,11 @@ class MasterPillarUtil(object):
|
||||
)
|
||||
tgt_type = expr_form
|
||||
|
||||
log.debug('New instance of {0} created.'.format(
|
||||
self.__class__.__name__))
|
||||
log.debug('New instance of %s created.',
|
||||
self.__class__.__name__)
|
||||
if opts is None:
|
||||
log.error('{0}: Missing master opts init arg.'.format(
|
||||
self.__class__.__name__))
|
||||
log.error('%s: Missing master opts init arg.',
|
||||
self.__class__.__name__)
|
||||
raise SaltException('{0}: Missing master opts init arg.'.format(
|
||||
self.__class__.__name__))
|
||||
else:
|
||||
@ -109,12 +109,11 @@ class MasterPillarUtil(object):
|
||||
self.pillar_fallback = pillar_fallback
|
||||
self.cache = salt.cache.factory(opts)
|
||||
log.debug(
|
||||
'Init settings: tgt: \'{0}\', tgt_type: \'{1}\', saltenv: \'{2}\', '
|
||||
'use_cached_grains: {3}, use_cached_pillar: {4}, '
|
||||
'grains_fallback: {5}, pillar_fallback: {6}'.format(
|
||||
tgt, tgt_type, saltenv, use_cached_grains, use_cached_pillar,
|
||||
grains_fallback, pillar_fallback
|
||||
)
|
||||
'Init settings: tgt: \'%s\', tgt_type: \'%s\', saltenv: \'%s\', '
|
||||
'use_cached_grains: %s, use_cached_pillar: %s, '
|
||||
'grains_fallback: %s, pillar_fallback: %s',
|
||||
tgt, tgt_type, saltenv, use_cached_grains, use_cached_pillar,
|
||||
grains_fallback, pillar_fallback
|
||||
)
|
||||
|
||||
def _get_cached_mine_data(self, *minion_ids):
|
||||
@ -152,10 +151,9 @@ class MasterPillarUtil(object):
|
||||
mdata = self.cache.fetch('minions/{0}'.format(minion_id), 'data')
|
||||
if not isinstance(mdata, dict):
|
||||
log.warning(
|
||||
'cache.fetch should always return a dict. ReturnedType: {0}, MinionId: {1}'.format(
|
||||
type(mdata).__name__,
|
||||
minion_id
|
||||
)
|
||||
'cache.fetch should always return a dict. ReturnedType: %s, MinionId: %s',
|
||||
type(mdata).__name__,
|
||||
minion_id
|
||||
)
|
||||
continue
|
||||
if 'grains' in mdata:
|
||||
@ -166,7 +164,7 @@ class MasterPillarUtil(object):
|
||||
|
||||
def _get_live_minion_grains(self, minion_ids):
|
||||
# Returns a dict of grains fetched directly from the minions
|
||||
log.debug('Getting live grains for minions: "{0}"'.format(minion_ids))
|
||||
log.debug('Getting live grains for minions: "%s"', minion_ids)
|
||||
client = salt.client.get_local_client(self.opts['conf_file'])
|
||||
ret = client.cmd(
|
||||
','.join(minion_ids),
|
||||
@ -181,19 +179,18 @@ class MasterPillarUtil(object):
|
||||
return {}
|
||||
if not minion_grains:
|
||||
log.warning(
|
||||
'Cannot get pillar data for {0}: no grains supplied.'.format(
|
||||
minion_id
|
||||
)
|
||||
'Cannot get pillar data for %s: no grains supplied.',
|
||||
minion_id
|
||||
)
|
||||
return {}
|
||||
log.debug('Getting live pillar for {0}'.format(minion_id))
|
||||
log.debug('Getting live pillar for %s', minion_id)
|
||||
pillar = salt.pillar.Pillar(
|
||||
self.opts,
|
||||
minion_grains,
|
||||
minion_id,
|
||||
self.saltenv,
|
||||
self.opts['ext_pillar'])
|
||||
log.debug('Compiling pillar for {0}'.format(minion_id))
|
||||
log.debug('Compiling pillar for %s', minion_id)
|
||||
ret = pillar.compile_pillar()
|
||||
return ret
|
||||
|
||||
@ -208,14 +205,14 @@ class MasterPillarUtil(object):
|
||||
if self.use_cached_grains:
|
||||
cret = dict([(minion_id, mcache) for (minion_id, mcache) in six.iteritems(cached_grains) if mcache])
|
||||
missed_minions = [minion_id for minion_id in minion_ids if minion_id not in cret]
|
||||
log.debug('Missed cached minion grains for: {0}'.format(missed_minions))
|
||||
log.debug('Missed cached minion grains for: %s', missed_minions)
|
||||
if self.grains_fallback:
|
||||
lret = self._get_live_minion_grains(missed_minions)
|
||||
ret = dict(list(six.iteritems(dict([(minion_id, {}) for minion_id in minion_ids]))) + list(lret.items()) + list(cret.items()))
|
||||
else:
|
||||
lret = self._get_live_minion_grains(minion_ids)
|
||||
missed_minions = [minion_id for minion_id in minion_ids if minion_id not in lret]
|
||||
log.debug('Missed live minion grains for: {0}'.format(missed_minions))
|
||||
log.debug('Missed live minion grains for: %s', missed_minions)
|
||||
if self.grains_fallback:
|
||||
cret = dict([(minion_id, mcache) for (minion_id, mcache) in six.iteritems(cached_grains) if mcache])
|
||||
ret = dict(list(six.iteritems(dict([(minion_id, {}) for minion_id in minion_ids]))) + list(lret.items()) + list(cret.items()))
|
||||
@ -233,14 +230,14 @@ class MasterPillarUtil(object):
|
||||
if self.use_cached_pillar:
|
||||
cret = dict([(minion_id, mcache) for (minion_id, mcache) in six.iteritems(cached_pillar) if mcache])
|
||||
missed_minions = [minion_id for minion_id in minion_ids if minion_id not in cret]
|
||||
log.debug('Missed cached minion pillars for: {0}'.format(missed_minions))
|
||||
log.debug('Missed cached minion pillars for: %s', missed_minions)
|
||||
if self.pillar_fallback:
|
||||
lret = dict([(minion_id, self._get_live_minion_pillar(minion_id, grains.get(minion_id, {}))) for minion_id in missed_minions])
|
||||
ret = dict(list(six.iteritems(dict([(minion_id, {}) for minion_id in minion_ids]))) + list(lret.items()) + list(cret.items()))
|
||||
else:
|
||||
lret = dict([(minion_id, self._get_live_minion_pillar(minion_id, grains.get(minion_id, {}))) for minion_id in minion_ids])
|
||||
missed_minions = [minion_id for minion_id in minion_ids if minion_id not in lret]
|
||||
log.debug('Missed live minion pillars for: {0}'.format(missed_minions))
|
||||
log.debug('Missed live minion pillars for: %s', missed_minions)
|
||||
if self.pillar_fallback:
|
||||
cret = dict([(minion_id, mcache) for (minion_id, mcache) in six.iteritems(cached_pillar) if mcache])
|
||||
ret = dict(list(six.iteritems(dict([(minion_id, {}) for minion_id in minion_ids]))) + list(lret.items()) + list(cret.items()))
|
||||
@ -253,9 +250,9 @@ class MasterPillarUtil(object):
|
||||
_res = ckminions.check_minions(self.tgt, self.tgt_type)
|
||||
minion_ids = _res['minions']
|
||||
if len(minion_ids) == 0:
|
||||
log.debug('No minions matched for tgt="{0}" and tgt_type="{1}"'.format(self.tgt, self.tgt_type))
|
||||
log.debug('No minions matched for tgt="%s" and tgt_type="%s"', self.tgt, self.tgt_type)
|
||||
return {}
|
||||
log.debug('Matching minions for tgt="{0}" and tgt_type="{1}": {2}'.format(self.tgt, self.tgt_type, minion_ids))
|
||||
log.debug('Matching minions for tgt="%s" and tgt_type="%s": %s', self.tgt, self.tgt_type, minion_ids)
|
||||
return minion_ids
|
||||
|
||||
def get_minion_pillar(self):
|
||||
@ -286,11 +283,11 @@ class MasterPillarUtil(object):
|
||||
else:
|
||||
cached_minion_grains = {}
|
||||
cached_minion_pillars = {}
|
||||
log.debug('Getting minion grain data for: {0}'.format(minion_ids))
|
||||
log.debug('Getting minion grain data for: %s', minion_ids)
|
||||
minion_grains = self._get_minion_grains(
|
||||
*minion_ids,
|
||||
cached_grains=cached_minion_grains)
|
||||
log.debug('Getting minion pillar data for: {0}'.format(minion_ids))
|
||||
log.debug('Getting minion pillar data for: %s', minion_ids)
|
||||
minion_pillars = self._get_minion_pillar(
|
||||
*minion_ids,
|
||||
grains=minion_grains,
|
||||
@ -317,7 +314,7 @@ class MasterPillarUtil(object):
|
||||
cached_minion_grains, cached_minion_pillars = self._get_cached_minion_data(*minion_ids)
|
||||
else:
|
||||
cached_minion_grains = {}
|
||||
log.debug('Getting minion grain data for: {0}'.format(minion_ids))
|
||||
log.debug('Getting minion grain data for: %s', minion_ids)
|
||||
minion_grains = self._get_minion_grains(
|
||||
*minion_ids,
|
||||
cached_grains=cached_minion_grains)
|
||||
@ -329,7 +326,7 @@ class MasterPillarUtil(object):
|
||||
'''
|
||||
mine_data = {}
|
||||
minion_ids = self._tgt_to_list()
|
||||
log.debug('Getting cached mine data for: {0}'.format(minion_ids))
|
||||
log.debug('Getting cached mine data for: %s', minion_ids)
|
||||
mine_data = self._get_cached_mine_data(*minion_ids)
|
||||
return mine_data
|
||||
|
||||
@ -355,9 +352,9 @@ class MasterPillarUtil(object):
|
||||
return False
|
||||
|
||||
minion_ids = self._tgt_to_list()
|
||||
log.debug('Clearing cached {0} data for: {1}'.format(
|
||||
', '.join(clear_what),
|
||||
minion_ids))
|
||||
log.debug('Clearing cached %s data for: %s',
|
||||
', '.join(clear_what),
|
||||
minion_ids)
|
||||
if clear_pillar == clear_grains:
|
||||
# clear_pillar and clear_grains are both True or both False.
|
||||
# This means we don't deal with pillar/grains caches at all.
|
||||
@ -612,7 +609,7 @@ class ConnectedCache(MultiprocessingProcess):
|
||||
# check for next cache-request
|
||||
if socks.get(creq_in) == zmq.POLLIN:
|
||||
msg = serial.loads(creq_in.recv())
|
||||
log.debug('ConCache Received request: {0}'.format(msg))
|
||||
log.debug('ConCache Received request: %s', msg)
|
||||
|
||||
# requests to the minion list are send as str's
|
||||
if isinstance(msg, six.string_types):
|
||||
@ -649,7 +646,8 @@ class ConnectedCache(MultiprocessingProcess):
|
||||
|
||||
if isinstance(data, six.string_types):
|
||||
if data not in self.minions:
|
||||
log.debug('ConCache Adding minion {0} to cache'.format(new_c_data[0]))
|
||||
log.debug('ConCache Adding minion %s to cache',
|
||||
new_c_data[0])
|
||||
self.minions.append(data)
|
||||
|
||||
elif isinstance(data, list):
|
||||
@ -660,7 +658,7 @@ class ConnectedCache(MultiprocessingProcess):
|
||||
log.debug('ConCache Got malformed result dict from worker')
|
||||
del new_c_data
|
||||
|
||||
log.info('ConCache {0} entries in cache'.format(len(self.minions)))
|
||||
log.info('ConCache %s entries in cache', len(self.minions))
|
||||
|
||||
# check for next timer-event to start new jobs
|
||||
if socks.get(timer_in) == zmq.POLLIN:
|
||||
|
@ -10,7 +10,7 @@ Library for interacting with Mattermost Incoming Webhooks
|
||||
hook: 3tdgo8restnxiykdx88wqtxryr
|
||||
api_url: https://example.com
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
# Import 3rd-party libs
|
||||
@ -21,6 +21,7 @@ from salt.version import __version__
|
||||
# pylint: enable=import-error,no-name-in-module
|
||||
import salt.utils.http
|
||||
|
||||
from salt.ext import six
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@ -41,7 +42,7 @@ def query(hook=None,
|
||||
'res': True}
|
||||
|
||||
base_url = _urljoin(api_url, '/hooks/')
|
||||
url = _urljoin(base_url, str(hook))
|
||||
url = _urljoin(base_url, six.text_type(hook))
|
||||
|
||||
result = salt.utils.http.query(url,
|
||||
method,
|
||||
|
@ -36,12 +36,13 @@ better to always use a named configuration profile, as shown above.
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import logging
|
||||
|
||||
# Import salt libs
|
||||
from salt.exceptions import CommandExecutionError, SaltInvocationError
|
||||
from salt.ext.six import integer_types
|
||||
from salt.ext import six
|
||||
|
||||
# Import third party libs
|
||||
try:
|
||||
@ -94,7 +95,7 @@ def get_conn(opts, profile=None, host=None, port=None):
|
||||
host = conf.get('memcached.host', DEFAULT_HOST)
|
||||
port = conf.get('memcached.port', DEFAULT_PORT)
|
||||
|
||||
if not str(port).isdigit():
|
||||
if not six.text_type(port).isdigit():
|
||||
raise SaltInvocationError('port must be an integer')
|
||||
|
||||
if HAS_LIBS:
|
||||
|
@ -2,7 +2,7 @@
|
||||
'''
|
||||
Migration tools
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
# Import python libs
|
||||
import os.path
|
||||
|
@ -4,7 +4,7 @@ Utility functions for minions
|
||||
'''
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import os
|
||||
import logging
|
||||
import threading
|
||||
@ -112,7 +112,7 @@ def _read_proc_file(path, opts):
|
||||
pid = data.get('pid')
|
||||
if pid:
|
||||
log.warning(
|
||||
'PID {0} exists but does not appear to be a salt process.'.format(pid)
|
||||
'PID %s exists but does not appear to be a salt process.', pid
|
||||
)
|
||||
try:
|
||||
os.remove(path)
|
||||
|
@ -5,7 +5,7 @@ expected to return
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import os
|
||||
import fnmatch
|
||||
import re
|
||||
@ -55,7 +55,7 @@ def parse_target(target_expression):
|
||||
|
||||
match = TARGET_REX.match(target_expression)
|
||||
if not match:
|
||||
log.warning('Unable to parse target "{0}"'.format(target_expression))
|
||||
log.warning('Unable to parse target "%s"', target_expression)
|
||||
ret = {
|
||||
'engine': None,
|
||||
'delimiter': None,
|
||||
@ -102,11 +102,11 @@ def nodegroup_comp(nodegroup, nodegroups, skip=None, first_call=True):
|
||||
if skip is None:
|
||||
skip = set()
|
||||
elif nodegroup in skip:
|
||||
log.error('Failed nodegroup expansion: illegal nested nodegroup "{0}"'.format(nodegroup))
|
||||
log.error('Failed nodegroup expansion: illegal nested nodegroup "%s"', nodegroup)
|
||||
return ''
|
||||
|
||||
if nodegroup not in nodegroups:
|
||||
log.error('Failed nodegroup expansion: unknown nodegroup "{0}"'.format(nodegroup))
|
||||
log.error('Failed nodegroup expansion: unknown nodegroup "%s"', nodegroup)
|
||||
return ''
|
||||
|
||||
nglookup = nodegroups[nodegroup]
|
||||
@ -124,7 +124,7 @@ def nodegroup_comp(nodegroup, nodegroups, skip=None, first_call=True):
|
||||
opers = ['and', 'or', 'not', '(', ')']
|
||||
for word in words:
|
||||
if not isinstance(word, six.string_types):
|
||||
word = str(word)
|
||||
word = six.text_type(word)
|
||||
if word in opers:
|
||||
ret.append(word)
|
||||
elif len(word) >= 3 and word.startswith('N@'):
|
||||
@ -139,7 +139,7 @@ def nodegroup_comp(nodegroup, nodegroups, skip=None, first_call=True):
|
||||
|
||||
skip.remove(nodegroup)
|
||||
|
||||
log.debug('nodegroup_comp({0}) => {1}'.format(nodegroup, ret))
|
||||
log.debug('nodegroup_comp(%s) => %s', nodegroup, ret)
|
||||
# Only return list form if a nodegroup was expanded. Otherwise return
|
||||
# the original string to conserve backwards compat
|
||||
if expanded_nodegroup or not first_call:
|
||||
@ -391,7 +391,7 @@ class CkMinions(object):
|
||||
# Target is a network?
|
||||
tgt = ipaddress.ip_network(tgt)
|
||||
except: # pylint: disable=bare-except
|
||||
log.error('Invalid IP/CIDR target: {0}'.format(tgt))
|
||||
log.error('Invalid IP/CIDR target: %s', tgt)
|
||||
return {'minions': [],
|
||||
'missing': []}
|
||||
proto = 'ipv{0}'.format(tgt.version)
|
||||
@ -407,7 +407,7 @@ class CkMinions(object):
|
||||
if grains is None or proto not in grains:
|
||||
match = False
|
||||
elif isinstance(tgt, (ipaddress.IPv4Address, ipaddress.IPv6Address)):
|
||||
match = str(tgt) in grains[proto]
|
||||
match = six.text_type(tgt) in grains[proto]
|
||||
else:
|
||||
match = salt.utils.network.in_subnet(tgt, grains[proto])
|
||||
|
||||
@ -432,7 +432,7 @@ class CkMinions(object):
|
||||
return self._range.expand(expr)
|
||||
except seco.range.RangeException as exc:
|
||||
log.error(
|
||||
'Range exception in compound match: {0}'.format(exc)
|
||||
'Range exception in compound match: %s', exc
|
||||
)
|
||||
cache_enabled = self.opts.get('minion_data_cache', False)
|
||||
if greedy:
|
||||
@ -472,7 +472,7 @@ class CkMinions(object):
|
||||
log.error('Compound target that is neither string, list nor tuple')
|
||||
return {'minions': [], 'missing': []}
|
||||
minions = set(self._pki_minions())
|
||||
log.debug('minions: {0}'.format(minions))
|
||||
log.debug('minions: %s', minions)
|
||||
|
||||
if self.opts.get('minion_data_cache', False):
|
||||
ref = {'G': self._check_grain_minions,
|
||||
@ -505,13 +505,13 @@ class CkMinions(object):
|
||||
if word in opers:
|
||||
if results:
|
||||
if results[-1] == '(' and word in ('and', 'or'):
|
||||
log.error('Invalid beginning operator after "(": {0}'.format(word))
|
||||
log.error('Invalid beginning operator after "(": %s', word)
|
||||
return {'minions': [], 'missing': []}
|
||||
if word == 'not':
|
||||
if not results[-1] in ('&', '|', '('):
|
||||
results.append('&')
|
||||
results.append('(')
|
||||
results.append(str(set(minions)))
|
||||
results.append(six.text_type(set(minions)))
|
||||
results.append('-')
|
||||
unmatched.append('-')
|
||||
elif word == 'and':
|
||||
@ -524,8 +524,8 @@ class CkMinions(object):
|
||||
elif word == ')':
|
||||
if not unmatched or unmatched[-1] != '(':
|
||||
log.error('Invalid compound expr (unexpected '
|
||||
'right parenthesis): {0}'
|
||||
.format(expr))
|
||||
'right parenthesis): %s',
|
||||
expr)
|
||||
return {'minions': [], 'missing': []}
|
||||
results.append(word)
|
||||
unmatched.pop()
|
||||
@ -533,14 +533,14 @@ class CkMinions(object):
|
||||
results.append(')')
|
||||
unmatched.pop()
|
||||
else: # Won't get here, unless oper is added
|
||||
log.error('Unhandled oper in compound expr: {0}'
|
||||
.format(expr))
|
||||
log.error('Unhandled oper in compound expr: %s',
|
||||
expr)
|
||||
return {'minions': [], 'missing': []}
|
||||
else:
|
||||
# seq start with oper, fail
|
||||
if word == 'not':
|
||||
results.append('(')
|
||||
results.append(str(set(minions)))
|
||||
results.append(six.text_type(set(minions)))
|
||||
results.append('-')
|
||||
unmatched.append('-')
|
||||
elif word == '(':
|
||||
@ -549,24 +549,23 @@ class CkMinions(object):
|
||||
else:
|
||||
log.error(
|
||||
'Expression may begin with'
|
||||
' binary operator: {0}'.format(word)
|
||||
' binary operator: %s', word
|
||||
)
|
||||
return {'minions': [], 'missing': []}
|
||||
|
||||
elif target_info and target_info['engine']:
|
||||
if 'N' == target_info['engine']:
|
||||
# Nodegroups should already be expanded/resolved to other engines
|
||||
log.error('Detected nodegroup expansion failure of "{0}"'.format(word))
|
||||
log.error('Detected nodegroup expansion failure of "%s"', word)
|
||||
return {'minions': [], 'missing': []}
|
||||
engine = ref.get(target_info['engine'])
|
||||
if not engine:
|
||||
# If an unknown engine is called at any time, fail out
|
||||
log.error(
|
||||
'Unrecognized target engine "{0}" for'
|
||||
' target expression "{1}"'.format(
|
||||
target_info['engine'],
|
||||
word,
|
||||
)
|
||||
'Unrecognized target engine "%s" for'
|
||||
' target expression "%s"',
|
||||
target_info['engine'],
|
||||
word,
|
||||
)
|
||||
return {'minions': [], 'missing': []}
|
||||
|
||||
@ -576,7 +575,7 @@ class CkMinions(object):
|
||||
engine_args.append(greedy)
|
||||
|
||||
_results = engine(*engine_args)
|
||||
results.append(str(set(_results['minions'])))
|
||||
results.append(six.text_type(set(_results['minions'])))
|
||||
missing.extend(_results['missing'])
|
||||
if unmatched and unmatched[-1] == '-':
|
||||
results.append(')')
|
||||
@ -585,7 +584,7 @@ class CkMinions(object):
|
||||
else:
|
||||
# The match is not explicitly defined, evaluate as a glob
|
||||
_results = self._check_glob_minions(word, True)
|
||||
results.append(str(set(_results['minions'])))
|
||||
results.append(six.text_type(set(_results['minions'])))
|
||||
if unmatched and unmatched[-1] == '-':
|
||||
results.append(')')
|
||||
unmatched.pop()
|
||||
@ -594,13 +593,13 @@ class CkMinions(object):
|
||||
results.extend([')' for item in unmatched])
|
||||
|
||||
results = ' '.join(results)
|
||||
log.debug('Evaluating final compound matching expr: {0}'
|
||||
.format(results))
|
||||
log.debug('Evaluating final compound matching expr: %s',
|
||||
results)
|
||||
try:
|
||||
minions = list(eval(results)) # pylint: disable=W0123
|
||||
return {'minions': minions, 'missing': missing}
|
||||
except Exception:
|
||||
log.error('Invalid compound target: {0}'.format(expr))
|
||||
log.error('Invalid compound target: %s', expr)
|
||||
return {'minions': [], 'missing': []}
|
||||
|
||||
return {'minions': list(minions),
|
||||
@ -685,8 +684,8 @@ class CkMinions(object):
|
||||
_res = check_func(expr, greedy)
|
||||
except Exception:
|
||||
log.exception(
|
||||
'Failed matching available minions with {0} pattern: {1}'
|
||||
.format(tgt_type, expr))
|
||||
'Failed matching available minions with %s pattern: %s',
|
||||
tgt_type, expr)
|
||||
_res = {'minions': [], 'missing': []}
|
||||
return _res
|
||||
|
||||
@ -703,7 +702,7 @@ class CkMinions(object):
|
||||
|
||||
target_info = parse_target(auth_entry)
|
||||
if not target_info:
|
||||
log.error('Failed to parse valid target "{0}"'.format(auth_entry))
|
||||
log.error('Failed to parse valid target "%s"', auth_entry)
|
||||
|
||||
v_matcher = ref.get(target_info['engine'])
|
||||
v_expr = target_info['pattern']
|
||||
@ -754,7 +753,7 @@ class CkMinions(object):
|
||||
else:
|
||||
vals.append(False)
|
||||
except Exception:
|
||||
log.error('Invalid regular expression: {0}'.format(regex))
|
||||
log.error('Invalid regular expression: %s', regex)
|
||||
return vals and all(vals)
|
||||
|
||||
def any_auth(self, form, auth_list, fun, arg, tgt=None, tgt_type='glob'):
|
||||
@ -863,7 +862,7 @@ class CkMinions(object):
|
||||
continue
|
||||
if isinstance(auth_list_entry, dict):
|
||||
if len(auth_list_entry) != 1:
|
||||
log.info('Malformed ACL: {0}'.format(auth_list_entry))
|
||||
log.info('Malformed ACL: %s', auth_list_entry)
|
||||
continue
|
||||
allowed_minions.update(set(auth_list_entry.keys()))
|
||||
for key in auth_list_entry:
|
||||
@ -1097,7 +1096,7 @@ class CkMinions(object):
|
||||
break
|
||||
if cond_arg is None: # None == '.*' i.e. allow any
|
||||
continue
|
||||
if not self.match_check(cond_arg, str(args[i])):
|
||||
if not self.match_check(cond_arg, six.text_type(args[i])):
|
||||
good = False
|
||||
break
|
||||
if not good:
|
||||
@ -1110,7 +1109,7 @@ class CkMinions(object):
|
||||
break
|
||||
if v is None: # None == '.*' i.e. allow any
|
||||
continue
|
||||
if not self.match_check(v, str(kwargs[k])):
|
||||
if not self.match_check(v, six.text_type(kwargs[k])):
|
||||
good = False
|
||||
break
|
||||
if good:
|
||||
|
@ -4,7 +4,7 @@ Common functions for managing mounts
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import logging
|
||||
import os
|
||||
|
||||
|
@ -6,7 +6,7 @@ Utilities for accessing storage container blobs on Azure
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import logging
|
||||
import inspect
|
||||
|
||||
@ -191,7 +191,7 @@ def object_to_dict(obj):
|
||||
continue
|
||||
# This is ugly, but inspect.isclass() doesn't seem to work
|
||||
try:
|
||||
if inspect.isclass(obj) or 'class' in str(type(obj.__dict__.get(item))):
|
||||
if inspect.isclass(obj) or 'class' in six.text_type(type(obj.__dict__.get(item))):
|
||||
ret[item] = object_to_dict(obj.__dict__[item])
|
||||
elif isinstance(obj.__dict__[item], six.text_type):
|
||||
ret[item] = obj.__dict__[item].encode('ascii', 'replace')
|
||||
|
@ -10,7 +10,7 @@
|
||||
'''
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.unit import TestCase
|
||||
|
@ -4,7 +4,7 @@ Tests for salt.utils.jid
|
||||
'''
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import datetime
|
||||
import os
|
||||
|
||||
|
@ -4,7 +4,7 @@ Tests for salt.utils.json
|
||||
'''
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils.json
|
||||
|
@ -8,7 +8,7 @@
|
||||
'''
|
||||
|
||||
# Import Pytohn libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.unit import TestCase
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.unit import TestCase
|
||||
|
@ -1,7 +1,7 @@
|
||||
# coding: utf-8
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
|
||||
# Import Salt libs
|
||||
|
@ -4,7 +4,7 @@ mac_utils tests
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.utils.minions as minions
|
||||
|
Loading…
Reference in New Issue
Block a user