mirror of
https://github.com/valitydev/salt.git
synced 2024-11-07 08:58:59 +00:00
Merge pull request #44946 from terminalmage/unicode-file
[PY3] Update file state/execution modules and associated files with unicode_literals
This commit is contained in:
commit
653ad2a224
@ -869,26 +869,33 @@ Example:
|
||||
|
||||
.. note::
|
||||
|
||||
This option may have adverse effects when using the default renderer, ``yaml_jinja``.
|
||||
This is due to the fact that YAML requires proper handling in regard to special
|
||||
characters. Please see the section on :ref:`YAML ASCII support <yaml_plain_ascii>`
|
||||
in the :ref:`YAML Idiosyncracies <yaml-idiosyncrasies>` documentation for more
|
||||
information.
|
||||
This option may have adverse effects when using the default renderer,
|
||||
``yaml_jinja``. This is due to the fact that YAML requires proper handling
|
||||
in regard to special characters. Please see the section on :ref:`YAML ASCII
|
||||
support <yaml_plain_ascii>` in the :ref:`YAML Idiosyncracies
|
||||
<yaml-idiosyncrasies>` documentation for more information.
|
||||
|
||||
.. jinja_ref:: json_decode_list
|
||||
.. jinja_ref:: json_encode_list
|
||||
|
||||
``json_decode_list``
|
||||
``json_encode_list``
|
||||
--------------------
|
||||
|
||||
.. versionadded:: 2017.7.0
|
||||
.. versionadded:: Oxygen
|
||||
Renamed from ``json_decode_list`` to ``json_encode_list``. When you encode
|
||||
something you get bytes, and when you decode, you get your locale's
|
||||
encoding (usually a ``unicode`` type). This filter was incorrectly-named
|
||||
when it was added. ``json_decode_list`` will be supported until the Neon
|
||||
release.
|
||||
|
||||
JSON decodes as unicode, Jinja needs bytes.
|
||||
Recursively encodes all string elements of the list to bytes.
|
||||
|
||||
Example:
|
||||
|
||||
.. code-block:: jinja
|
||||
|
||||
{{ [1, 2, 3] | json_decode_list }}
|
||||
{{ [1, 2, 3] | json_encode_list }}
|
||||
|
||||
Returns:
|
||||
|
||||
@ -898,25 +905,35 @@ Returns:
|
||||
|
||||
|
||||
.. jinja_ref:: json_decode_dict
|
||||
.. jinja_ref:: json_encode_dict
|
||||
|
||||
``json_decode_dict``
|
||||
``json_encode_dict``
|
||||
--------------------
|
||||
|
||||
.. versionadded:: 2017.7.0
|
||||
.. versionadded:: Oxygen
|
||||
Renamed from ``json_decode_dict`` to ``json_encode_dict``. When you encode
|
||||
something you get bytes, and when you decode, you get your locale's
|
||||
encoding (usually a ``unicode`` type). This filter was incorrectly-named
|
||||
when it was added. ``json_decode_dict`` will be supported until the Neon
|
||||
release.
|
||||
|
||||
JSON decodes as unicode, Jinja needs bytes.
|
||||
Recursively encodes all string items in the dictionary to bytes.
|
||||
|
||||
Example:
|
||||
|
||||
Assuming that ``pillar['foo']`` contains ``{u'a': u'\u0414'}``, and your locale
|
||||
is ``en_US.UTF-8``:
|
||||
|
||||
.. code-block:: jinja
|
||||
|
||||
{{ {'a': 'b'} | json_decode_dict }}
|
||||
{{ pillar['foo'] | json_encode_dict }}
|
||||
|
||||
Returns:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
{'a': 'b'}
|
||||
{'a': '\xd0\x94'}
|
||||
|
||||
|
||||
.. jinja_ref:: random_hash
|
||||
@ -927,7 +944,8 @@ Returns:
|
||||
.. versionadded:: 2017.7.0
|
||||
.. versionadded:: Oxygen
|
||||
Renamed from ``rand_str`` to ``random_hash`` to more accurately describe
|
||||
what the filter does.
|
||||
what the filter does. ``rand_str`` will be supported until the Neon
|
||||
release.
|
||||
|
||||
Generates a random number between 1 and the number passed to the filter, and
|
||||
then hashes it. The default hash type is the one specified by the minion's
|
||||
|
@ -128,6 +128,17 @@ by any master tops matches that are not matched via a top file.
|
||||
To make master tops matches execute first, followed by top file matches, set
|
||||
the new :conf_minion:`master_tops_first` minion config option to ``True``.
|
||||
|
||||
Several Jinja Filters Renamed
|
||||
-----------------------------
|
||||
|
||||
The following Jinja filters (originally added in 2017.7.0) have been renamed
|
||||
due to the fact that they were inaccurately named when initially added. The
|
||||
original names will be supported until the Neon release of Salt.
|
||||
|
||||
- :jinja_ref:`rand_str` renamed to :jinja_ref:`random_hash`
|
||||
- :jinja_ref:`jinja_decode_dict` renamed to :jinja_ref:`jinja_encode_dict`
|
||||
- :jinja_ref:`jinja_decode_list` renamed to :jinja_ref:`jinja_encode_list`
|
||||
|
||||
Return Codes for Runner/Wheel Functions
|
||||
---------------------------------------
|
||||
|
||||
|
@ -824,7 +824,7 @@ def query(params=None):
|
||||
|
||||
content = request.text
|
||||
|
||||
result = json.loads(content, object_hook=salt.utils.data.decode_dict)
|
||||
result = json.loads(content, object_hook=salt.utils.data.encode_dict)
|
||||
if 'Code' in result:
|
||||
raise SaltCloudSystemExit(
|
||||
pprint.pformat(result.get('Message', {}))
|
||||
|
@ -188,7 +188,7 @@ def query(params=None):
|
||||
log.debug(request.url)
|
||||
|
||||
content = request.text
|
||||
result = json.loads(content, object_hook=salt.utils.data.decode_dict)
|
||||
result = json.loads(content, object_hook=salt.utils.data.encode_dict)
|
||||
|
||||
# print('response:')
|
||||
# pprint.pprint(result)
|
||||
|
@ -62,7 +62,7 @@ def jobber_check(self):
|
||||
rms.append(jid)
|
||||
data = self.shells.value[jid]
|
||||
stdout, stderr = data['proc'].communicate()
|
||||
ret = json.loads(salt.utils.stringutils.to_str(stdout), object_hook=salt.utils.data.decode_dict)['local']
|
||||
ret = json.loads(salt.utils.stringutils.to_str(stdout), object_hook=salt.utils.data.encode_dict)['local']
|
||||
route = {'src': (self.stack.value.local.name, 'manor', 'jid_ret'),
|
||||
'dst': (data['msg']['route']['src'][0], None, 'remote_cmd')}
|
||||
ret['cmd'] = '_return'
|
||||
|
@ -5,7 +5,7 @@ involves preparing the three listeners and the workers needed by the master.
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, with_statement
|
||||
from __future__ import absolute_import, with_statement, unicode_literals
|
||||
import copy
|
||||
import ctypes
|
||||
import os
|
||||
@ -1543,7 +1543,7 @@ class AESFuncs(object):
|
||||
'publish_auth')
|
||||
if not os.path.isdir(auth_cache):
|
||||
os.makedirs(auth_cache)
|
||||
jid_fn = os.path.join(auth_cache, str(load['jid']))
|
||||
jid_fn = os.path.join(auth_cache, six.text_type(load['jid']))
|
||||
with salt.utils.files.fopen(jid_fn, 'r') as fp_:
|
||||
if not load['id'] == fp_.read():
|
||||
return {}
|
||||
@ -1772,8 +1772,8 @@ class ClearFuncs(object):
|
||||
except Exception as exc:
|
||||
log.error('Exception occurred while introspecting %s: %s', fun, exc)
|
||||
return {'error': {'name': exc.__class__.__name__,
|
||||
'args': exc.args,
|
||||
'message': str(exc)}}
|
||||
'args': exc.args,
|
||||
'message': six.text_type(exc)}}
|
||||
|
||||
def wheel(self, clear_load):
|
||||
'''
|
||||
|
@ -3,7 +3,7 @@
|
||||
Routines to set up a minion
|
||||
'''
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, print_function, with_statement
|
||||
from __future__ import absolute_import, print_function, with_statement, unicode_literals
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
@ -1638,7 +1638,7 @@ class Minion(MinionBase):
|
||||
if not iret:
|
||||
iret = []
|
||||
iret.append(single)
|
||||
tag = tagify([data['jid'], 'prog', opts['id'], str(ind)], 'job')
|
||||
tag = tagify([data['jid'], 'prog', opts['id'], six.text_type(ind)], 'job')
|
||||
event_data = {'return': single}
|
||||
minion_instance._fire_master(event_data, tag)
|
||||
ind += 1
|
||||
@ -1879,6 +1879,7 @@ class Minion(MinionBase):
|
||||
# The file is gone already
|
||||
pass
|
||||
log.info('Returning information for job: %s', jid)
|
||||
log.trace('Return data: %s', ret)
|
||||
if ret_cmd == '_syndic_return':
|
||||
load = {'cmd': ret_cmd,
|
||||
'id': self.opts['uid'],
|
||||
@ -3282,7 +3283,7 @@ class Matcher(object):
|
||||
if isinstance(val, list):
|
||||
# We are matching a single component to a single list member
|
||||
for member in val:
|
||||
if fnmatch.fnmatch(str(member).lower(), comps[1].lower()):
|
||||
if fnmatch.fnmatch(six.text_type(member).lower(), comps[1].lower()):
|
||||
return True
|
||||
return False
|
||||
if isinstance(val, dict):
|
||||
@ -3355,7 +3356,7 @@ class Matcher(object):
|
||||
if proto not in grains:
|
||||
match = False
|
||||
elif isinstance(tgt, (ipaddress.IPv4Address, ipaddress.IPv6Address)):
|
||||
match = str(tgt) in grains[proto]
|
||||
match = six.text_type(tgt) in grains[proto]
|
||||
else:
|
||||
match = salt.utils.network.in_subnet(tgt, grains[proto])
|
||||
|
||||
@ -3442,12 +3443,12 @@ class Matcher(object):
|
||||
engine_kwargs['delimiter'] = target_info['delimiter']
|
||||
|
||||
results.append(
|
||||
str(getattr(self, '{0}_match'.format(engine))(*engine_args, **engine_kwargs))
|
||||
six.text_type(getattr(self, '{0}_match'.format(engine))(*engine_args, **engine_kwargs))
|
||||
)
|
||||
|
||||
else:
|
||||
# The match is not explicitly defined, evaluate it as a glob
|
||||
results.append(str(self.glob_match(word)))
|
||||
results.append(six.text_type(self.glob_match(word)))
|
||||
|
||||
results = ' '.join(results)
|
||||
log.debug('compound_match %s ? "%s" => "%s"', self.opts['id'], tgt, results)
|
||||
|
@ -5,7 +5,7 @@ A module for shelling out.
|
||||
Keep in mind that this module is insecure, in that it can give whomever has
|
||||
access to the master root execution access to all salt minions.
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
# Import python libs
|
||||
import functools
|
||||
@ -149,7 +149,7 @@ def _render_cmd(cmd, cwd, template, saltenv='base', pillarenv=None, pillar_overr
|
||||
# write out path to temp file
|
||||
tmp_path_fn = salt.utils.files.mkstemp()
|
||||
with salt.utils.files.fopen(tmp_path_fn, 'w+') as fp_:
|
||||
fp_.write(contents)
|
||||
fp_.write(salt.utils.stringutils.to_str(contents))
|
||||
data = salt.utils.templates.TEMPLATE_REGISTRY[template](
|
||||
tmp_path_fn,
|
||||
to_str=True,
|
||||
@ -223,7 +223,7 @@ def _check_avail(cmd):
|
||||
Check to see if the given command can be run
|
||||
'''
|
||||
if isinstance(cmd, list):
|
||||
cmd = ' '.join([str(x) if not isinstance(x, six.string_types) else x
|
||||
cmd = ' '.join([six.text_type(x) if not isinstance(x, six.string_types) else x
|
||||
for x in cmd])
|
||||
bret = True
|
||||
wret = False
|
||||
@ -372,7 +372,7 @@ def _run(cmd,
|
||||
# requested. The command output is what will be controlled by the
|
||||
# 'loglevel' parameter.
|
||||
msg = (
|
||||
u'Executing command {0}{1}{0} {2}in directory \'{3}\'{4}'.format(
|
||||
'Executing command {0}{1}{0} {2}in directory \'{3}\'{4}'.format(
|
||||
'\'' if not isinstance(cmd, list) else '',
|
||||
_get_stripped(cmd),
|
||||
'as user \'{0}\' '.format(runas) if runas else '',
|
||||
@ -433,7 +433,7 @@ def _run(cmd,
|
||||
import itertools
|
||||
env_runas = dict(itertools.izip(*[iter(env_encoded.split(b'\0'))]*2))
|
||||
elif six.PY3:
|
||||
if isinstance(env_encoded, str):
|
||||
if isinstance(env_encoded, str): # future lint: disable=blacklisted-function
|
||||
env_encoded = env_encoded.encode(__salt_system_encoding__)
|
||||
env_runas = dict(list(zip(*[iter(env_encoded.split(b'\0'))]*2)))
|
||||
|
||||
@ -491,7 +491,7 @@ def _run(cmd,
|
||||
kwargs = {'cwd': cwd,
|
||||
'shell': python_shell,
|
||||
'env': run_env,
|
||||
'stdin': str(stdin) if stdin is not None else stdin,
|
||||
'stdin': six.text_type(stdin) if stdin is not None else stdin,
|
||||
'stdout': stdout,
|
||||
'stderr': stderr,
|
||||
'with_communicate': with_communicate,
|
||||
@ -500,7 +500,7 @@ def _run(cmd,
|
||||
}
|
||||
|
||||
if umask is not None:
|
||||
_umask = str(umask).lstrip('0')
|
||||
_umask = six.text_type(umask).lstrip('0')
|
||||
|
||||
if _umask == '':
|
||||
msg = 'Zero umask is not allowed.'
|
||||
@ -566,7 +566,7 @@ def _run(cmd,
|
||||
try:
|
||||
proc.run()
|
||||
except TimedProcTimeoutError as exc:
|
||||
ret['stdout'] = str(exc)
|
||||
ret['stdout'] = six.text_type(exc)
|
||||
ret['stderr'] = ''
|
||||
ret['retcode'] = None
|
||||
ret['pid'] = proc.process.pid
|
||||
@ -577,7 +577,7 @@ def _run(cmd,
|
||||
try:
|
||||
out = proc.stdout.decode(__salt_system_encoding__)
|
||||
except AttributeError:
|
||||
out = u''
|
||||
out = ''
|
||||
except UnicodeDecodeError:
|
||||
log.error('UnicodeDecodeError while decoding output of cmd {0}'.format(cmd))
|
||||
out = proc.stdout.decode(__salt_system_encoding__, 'replace')
|
||||
@ -585,7 +585,7 @@ def _run(cmd,
|
||||
try:
|
||||
err = proc.stderr.decode(__salt_system_encoding__)
|
||||
except AttributeError:
|
||||
err = u''
|
||||
err = ''
|
||||
except UnicodeDecodeError:
|
||||
log.error('UnicodeDecodeError while decoding error of cmd {0}'.format(cmd))
|
||||
err = proc.stderr.decode(__salt_system_encoding__, 'replace')
|
||||
@ -998,10 +998,10 @@ def run(cmd,
|
||||
log.error(log_callback(msg))
|
||||
if raise_err:
|
||||
raise CommandExecutionError(
|
||||
log_callback(ret[u'stdout'] if not hide_output else u'')
|
||||
log_callback(ret['stdout'] if not hide_output else '')
|
||||
)
|
||||
log.log(lvl, u'output: %s', log_callback(ret[u'stdout']))
|
||||
return ret[u'stdout'] if not hide_output else u''
|
||||
log.log(lvl, 'output: %s', log_callback(ret['stdout']))
|
||||
return ret['stdout'] if not hide_output else ''
|
||||
|
||||
|
||||
def shell(cmd,
|
||||
@ -1837,9 +1837,9 @@ def run_all(cmd,
|
||||
)
|
||||
log.error(log_callback(msg))
|
||||
if ret['stdout']:
|
||||
log.log(lvl, u'stdout: {0}'.format(log_callback(ret['stdout'])))
|
||||
log.log(lvl, 'stdout: {0}'.format(log_callback(ret['stdout'])))
|
||||
if ret['stderr']:
|
||||
log.log(lvl, u'stderr: {0}'.format(log_callback(ret['stderr'])))
|
||||
log.log(lvl, 'stderr: {0}'.format(log_callback(ret['stderr'])))
|
||||
if ret['retcode']:
|
||||
log.log(lvl, 'retcode: {0}'.format(ret['retcode']))
|
||||
|
||||
@ -2275,7 +2275,7 @@ def script(source,
|
||||
if not salt.utils.platform.is_windows():
|
||||
os.chmod(path, 320)
|
||||
os.chown(path, __salt__['file.user_to_uid'](runas), -1)
|
||||
ret = _run(path + ' ' + str(args) if args else path,
|
||||
ret = _run(path + ' ' + six.text_type(args) if args else path,
|
||||
cwd=cwd,
|
||||
stdin=stdin,
|
||||
output_loglevel=output_loglevel,
|
||||
@ -2550,7 +2550,7 @@ def exec_code_all(lang, code, cwd=None, args=None, **kwargs):
|
||||
codefile = salt.utils.files.mkstemp()
|
||||
|
||||
with salt.utils.files.fopen(codefile, 'w+t', binary=False) as fp_:
|
||||
fp_.write(code)
|
||||
fp_.write(salt.utils.stringutils.to_str(code))
|
||||
|
||||
if powershell:
|
||||
cmd = [lang, "-File", codefile]
|
||||
@ -2751,7 +2751,7 @@ def run_chroot(root,
|
||||
sh_ = '/bin/bash'
|
||||
|
||||
if isinstance(cmd, (list, tuple)):
|
||||
cmd = ' '.join([str(i) for i in cmd])
|
||||
cmd = ' '.join([six.text_type(i) for i in cmd])
|
||||
cmd = 'chroot {0} {1} -c {2}'.format(root, sh_, _cmd_quote(cmd))
|
||||
|
||||
run_func = __context__.pop('cmd.run_chroot.func', run_all)
|
||||
@ -2795,7 +2795,7 @@ def run_chroot(root,
|
||||
__salt__['mount.umount'](os.path.join(root, 'proc'))
|
||||
__salt__['mount.umount'](os.path.join(root, 'dev'))
|
||||
if hide_output:
|
||||
ret[u'stdout'] = ret[u'stderr'] = u''
|
||||
ret['stdout'] = ret['stderr'] = ''
|
||||
return ret
|
||||
|
||||
|
||||
@ -2811,7 +2811,8 @@ def _is_valid_shell(shell):
|
||||
if os.path.exists(shells):
|
||||
try:
|
||||
with salt.utils.files.fopen(shells, 'r') as shell_fp:
|
||||
lines = shell_fp.read().splitlines()
|
||||
lines = [salt.utils.stringutils.to_unicode(x)
|
||||
for x in shell_fp.read().splitlines()]
|
||||
for line in lines:
|
||||
if line.startswith('#'):
|
||||
continue
|
||||
@ -2843,7 +2844,8 @@ def shells():
|
||||
if os.path.exists(shells_fn):
|
||||
try:
|
||||
with salt.utils.files.fopen(shells_fn, 'r') as shell_fp:
|
||||
lines = shell_fp.read().splitlines()
|
||||
lines = [salt.utils.stringutils.to_unicode(x)
|
||||
for x in shell_fp.read().splitlines()]
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
if line.startswith('#'):
|
||||
|
@ -8,7 +8,7 @@ group, mode, and data
|
||||
# TODO: We should add the capability to do u+r type operations here
|
||||
# some time in the future
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import python libs
|
||||
import datetime
|
||||
@ -123,12 +123,12 @@ def _binary_replace(old, new):
|
||||
new_isbin = not __utils__['files.is_text'](new)
|
||||
if any((old_isbin, new_isbin)):
|
||||
if all((old_isbin, new_isbin)):
|
||||
return u'Replace binary file'
|
||||
return 'Replace binary file'
|
||||
elif old_isbin:
|
||||
return u'Replace binary file with text file'
|
||||
return 'Replace binary file with text file'
|
||||
elif new_isbin:
|
||||
return u'Replace text file with binary file'
|
||||
return u''
|
||||
return 'Replace text file with binary file'
|
||||
return ''
|
||||
|
||||
|
||||
def _get_bkroot():
|
||||
@ -384,7 +384,7 @@ def set_mode(path, mode):
|
||||
'''
|
||||
path = os.path.expanduser(path)
|
||||
|
||||
mode = str(mode).lstrip('0Oo')
|
||||
mode = six.text_type(mode).lstrip('0Oo')
|
||||
if not mode:
|
||||
mode = '0'
|
||||
if not os.path.exists(path):
|
||||
@ -1090,8 +1090,8 @@ def sed(path,
|
||||
return False
|
||||
|
||||
# Mandate that before and after are strings
|
||||
before = str(before)
|
||||
after = str(after)
|
||||
before = six.text_type(before)
|
||||
after = six.text_type(after)
|
||||
before = _sed_esc(before, escape_all)
|
||||
after = _sed_esc(after, escape_all)
|
||||
limit = _sed_esc(limit, escape_all)
|
||||
@ -1140,8 +1140,8 @@ def sed_contains(path,
|
||||
if not os.path.exists(path):
|
||||
return False
|
||||
|
||||
before = _sed_esc(str(text), False)
|
||||
limit = _sed_esc(str(limit), False)
|
||||
before = _sed_esc(six.text_type(text), False)
|
||||
limit = _sed_esc(six.text_type(limit), False)
|
||||
options = '-n -r -e'
|
||||
if sys.platform == 'darwin':
|
||||
options = options.replace('-r', '-E')
|
||||
@ -1225,8 +1225,8 @@ def psed(path,
|
||||
|
||||
multi = bool(multi)
|
||||
|
||||
before = str(before)
|
||||
after = str(after)
|
||||
before = six.text_type(before)
|
||||
after = six.text_type(after)
|
||||
before = _sed_esc(before, escape_all)
|
||||
# The pattern to replace with does not need to be escaped!!!
|
||||
#after = _sed_esc(after, escape_all)
|
||||
@ -1238,9 +1238,29 @@ def psed(path,
|
||||
with salt.utils.files.fopen('{0}{1}'.format(path, backup), 'r') as ifile:
|
||||
if multi is True:
|
||||
for line in ifile.readline():
|
||||
ofile.write(_psed(line, before, after, limit, flags))
|
||||
ofile.write(
|
||||
salt.utils.stringutils.to_str(
|
||||
_psed(
|
||||
salt.utils.stringutils.to_unicode(line),
|
||||
before,
|
||||
after,
|
||||
limit,
|
||||
flags
|
||||
)
|
||||
)
|
||||
)
|
||||
else:
|
||||
ofile.write(_psed(ifile.read(), before, after, limit, flags))
|
||||
ofile.write(
|
||||
salt.utils.stringutils.to_str(
|
||||
_psed(
|
||||
salt.utils.stringutils.to_unicode(ifile.read()),
|
||||
before,
|
||||
after,
|
||||
limit,
|
||||
flags
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
RE_FLAG_TABLE = {'I': re.I,
|
||||
@ -1449,8 +1469,7 @@ def comment_line(path,
|
||||
# Loop through each line of the file and look for a match
|
||||
for line in r_file:
|
||||
# Is it in this line
|
||||
if six.PY3:
|
||||
line = line.decode(__salt_system_encoding__)
|
||||
line = salt.utils.stringutils.to_unicode(line)
|
||||
if re.match(regex, line):
|
||||
# Load lines into dictionaries, set found to True
|
||||
orig_file.append(line)
|
||||
@ -1492,8 +1511,7 @@ def comment_line(path,
|
||||
buffering=bufsize) as r_file:
|
||||
# Loop through each line of the file and look for a match
|
||||
for line in r_file:
|
||||
if six.PY3:
|
||||
line = line.decode(__salt_system_encoding__)
|
||||
line = salt.utils.stringutils.to_unicode(line)
|
||||
try:
|
||||
# Is it in this line
|
||||
if re.match(regex, line):
|
||||
@ -1505,9 +1523,7 @@ def comment_line(path,
|
||||
else:
|
||||
# Write the existing line (no change)
|
||||
wline = line
|
||||
if six.PY3:
|
||||
wline = wline.encode(__salt_system_encoding__)
|
||||
w_file.write(wline)
|
||||
w_file.write(salt.utils.stringutils.to_str(wline))
|
||||
except (OSError, IOError) as exc:
|
||||
raise CommandExecutionError(
|
||||
"Unable to write file '{0}'. Contents may "
|
||||
@ -1561,7 +1577,7 @@ def _get_flags(flags):
|
||||
if isinstance(flags, Iterable) and not isinstance(flags, Mapping):
|
||||
_flags_acc = []
|
||||
for flag in flags:
|
||||
_flag = getattr(re, str(flag).upper())
|
||||
_flag = getattr(re, six.text_type(flag).upper())
|
||||
|
||||
if not isinstance(_flag, six.integer_types):
|
||||
raise SaltInvocationError(
|
||||
@ -1861,7 +1877,7 @@ def line(path, content=None, match=None, mode=None, location=None,
|
||||
match = content
|
||||
|
||||
with salt.utils.files.fopen(path, mode='r') as fp_:
|
||||
body = fp_.read()
|
||||
body = salt.utils.stringutils.to_unicode(fp_.read())
|
||||
body_before = hashlib.sha256(salt.utils.stringutils.to_bytes(body)).hexdigest()
|
||||
after = _regex_to_static(body, after)
|
||||
before = _regex_to_static(body, before)
|
||||
@ -2000,8 +2016,13 @@ def line(path, content=None, match=None, mode=None, location=None,
|
||||
if changed:
|
||||
if show_changes:
|
||||
with salt.utils.files.fopen(path, 'r') as fp_:
|
||||
path_content = fp_.read().splitlines(True)
|
||||
changes_diff = ''.join(difflib.unified_diff(path_content, body.splitlines(True)))
|
||||
path_content = [salt.utils.stringutils.to_unicode(x)
|
||||
for x in fp_.read().splitlines(True)]
|
||||
changes_diff = ''.join(difflib.unified_diff(
|
||||
path_content,
|
||||
[salt.utils.stringutils.to_unicode(x)
|
||||
for x in body.splitlines(True)]
|
||||
))
|
||||
if __opts__['test'] is False:
|
||||
fh_ = None
|
||||
try:
|
||||
@ -2204,16 +2225,15 @@ def replace(path,
|
||||
|
||||
# Avoid TypeErrors by forcing repl to be bytearray related to mmap
|
||||
# Replacement text may contains integer: 123 for example
|
||||
repl = salt.utils.stringutils.to_bytes(str(repl))
|
||||
repl = salt.utils.stringutils.to_bytes(six.text_type(repl))
|
||||
if not_found_content:
|
||||
not_found_content = salt.utils.stringutils.to_bytes(not_found_content)
|
||||
|
||||
found = False
|
||||
temp_file = None
|
||||
content = salt.utils.stringutils.to_str(not_found_content) if not_found_content and \
|
||||
(prepend_if_not_found or
|
||||
append_if_not_found) \
|
||||
else salt.utils.stringutils.to_str(repl)
|
||||
content = salt.utils.stringutils.to_unicode(not_found_content) \
|
||||
if not_found_content and (prepend_if_not_found or append_if_not_found) \
|
||||
else salt.utils.stringutils.to_unicode(repl)
|
||||
|
||||
try:
|
||||
# First check the whole file, determine whether to make the replacement
|
||||
@ -2384,8 +2404,8 @@ def replace(path,
|
||||
check_perms(path, None, pre_user, pre_group, pre_mode)
|
||||
|
||||
def get_changes():
|
||||
orig_file_as_str = [salt.utils.stringutils.to_str(x) for x in orig_file]
|
||||
new_file_as_str = [salt.utils.stringutils.to_str(x) for x in new_file]
|
||||
orig_file_as_str = [salt.utils.stringutils.to_unicode(x) for x in orig_file]
|
||||
new_file_as_str = [salt.utils.stringutils.to_unicode(x) for x in new_file]
|
||||
return ''.join(difflib.unified_diff(orig_file_as_str, new_file_as_str))
|
||||
|
||||
if show_changes:
|
||||
@ -2511,7 +2531,7 @@ def blockreplace(path,
|
||||
bufsize=1, mode='rb')
|
||||
for line in fi_file:
|
||||
|
||||
line = salt.utils.stringutils.to_str(line)
|
||||
line = salt.utils.stringutils.to_unicode(line)
|
||||
result = line
|
||||
|
||||
if marker_start in line:
|
||||
@ -2782,7 +2802,7 @@ def contains(path, text):
|
||||
if not os.path.exists(path):
|
||||
return False
|
||||
|
||||
stripped_text = str(text).strip()
|
||||
stripped_text = six.text_type(text).strip()
|
||||
try:
|
||||
with salt.utils.filebuffer.BufferedReader(path) as breader:
|
||||
for chunk in breader:
|
||||
@ -2818,6 +2838,7 @@ def contains_regex(path, regex, lchar=''):
|
||||
try:
|
||||
with salt.utils.files.fopen(path, 'r') as target:
|
||||
for line in target:
|
||||
line = salt.utils.stringutils.to_unicode(line)
|
||||
if lchar:
|
||||
line = line.lstrip(lchar)
|
||||
if re.search(regex, line):
|
||||
@ -2918,7 +2939,11 @@ def append(path, *args, **kwargs):
|
||||
# Append lines in text mode
|
||||
with salt.utils.files.fopen(path, 'a') as ofile:
|
||||
for new_line in args:
|
||||
ofile.write('{0}{1}'.format(new_line, os.linesep))
|
||||
ofile.write(
|
||||
salt.utils.stringutils.to_str(
|
||||
'{0}{1}'.format(new_line, os.linesep)
|
||||
)
|
||||
)
|
||||
|
||||
return 'Wrote {0} lines to "{1}"'.format(len(args), path)
|
||||
|
||||
@ -2966,7 +2991,8 @@ def prepend(path, *args, **kwargs):
|
||||
|
||||
try:
|
||||
with salt.utils.files.fopen(path) as fhr:
|
||||
contents = fhr.readlines()
|
||||
contents = [salt.utils.stringutils.to_unicode(line)
|
||||
for line in fhr.readlines()]
|
||||
except IOError:
|
||||
contents = []
|
||||
|
||||
@ -2974,9 +3000,9 @@ def prepend(path, *args, **kwargs):
|
||||
for line in args:
|
||||
preface.append('{0}\n'.format(line))
|
||||
|
||||
with salt.utils.files.fopen(path, "w") as ofile:
|
||||
with salt.utils.files.fopen(path, 'w') as ofile:
|
||||
contents = preface + contents
|
||||
ofile.write(''.join(contents))
|
||||
ofile.write(salt.utils.stringutils.to_str(''.join(contents)))
|
||||
return 'Prepended {0} lines to "{1}"'.format(len(args), path)
|
||||
|
||||
|
||||
@ -3024,7 +3050,7 @@ def write(path, *args, **kwargs):
|
||||
for line in args:
|
||||
contents.append('{0}\n'.format(line))
|
||||
with salt.utils.files.fopen(path, "w") as ofile:
|
||||
ofile.write(''.join(contents))
|
||||
ofile.write(salt.utils.stringutils.to_str(''.join(contents)))
|
||||
return 'Wrote {0} lines to "{1}"'.format(len(contents), path)
|
||||
|
||||
|
||||
@ -3054,8 +3080,8 @@ def touch(name, atime=None, mtime=None):
|
||||
mtime = int(mtime)
|
||||
try:
|
||||
if not os.path.exists(name):
|
||||
with salt.utils.files.fopen(name, 'a') as fhw:
|
||||
fhw.write('')
|
||||
with salt.utils.files.fopen(name, 'a'):
|
||||
pass
|
||||
|
||||
if not atime and not mtime:
|
||||
times = None
|
||||
@ -3405,7 +3431,7 @@ def read(path, binary=False):
|
||||
if binary is True:
|
||||
access_mode += 'b'
|
||||
with salt.utils.files.fopen(path, access_mode) as file_obj:
|
||||
return file_obj.read()
|
||||
return salt.utils.stringutils.to_unicode(file_obj.read())
|
||||
|
||||
|
||||
def readlink(path, canonicalize=False):
|
||||
@ -3526,7 +3552,7 @@ def stats(path, hash_type=None, follow_symlinks=True):
|
||||
ret['mtime'] = pstat.st_mtime
|
||||
ret['ctime'] = pstat.st_ctime
|
||||
ret['size'] = pstat.st_size
|
||||
ret['mode'] = str(oct(stat.S_IMODE(pstat.st_mode)))
|
||||
ret['mode'] = six.text_type(oct(stat.S_IMODE(pstat.st_mode)))
|
||||
if hash_type:
|
||||
ret['sum'] = get_hash(path, hash_type)
|
||||
ret['type'] = 'file'
|
||||
@ -4159,13 +4185,13 @@ def extract_hash(hash_fn,
|
||||
hash_type = ''
|
||||
hash_len_expr = '{0},{1}'.format(min(HASHES_REVMAP), max(HASHES_REVMAP))
|
||||
else:
|
||||
hash_len_expr = str(hash_len)
|
||||
hash_len_expr = six.text_type(hash_len)
|
||||
|
||||
filename_separators = string.whitespace + r'\/'
|
||||
|
||||
if source_hash_name:
|
||||
if not isinstance(source_hash_name, six.string_types):
|
||||
source_hash_name = str(source_hash_name)
|
||||
source_hash_name = six.text_type(source_hash_name)
|
||||
source_hash_name_idx = (len(source_hash_name) + 1) * -1
|
||||
log.debug(
|
||||
'file.extract_hash: Extracting %s hash for file matching '
|
||||
@ -4175,12 +4201,12 @@ def extract_hash(hash_fn,
|
||||
)
|
||||
if file_name:
|
||||
if not isinstance(file_name, six.string_types):
|
||||
file_name = str(file_name)
|
||||
file_name = six.text_type(file_name)
|
||||
file_name_basename = os.path.basename(file_name)
|
||||
file_name_idx = (len(file_name_basename) + 1) * -1
|
||||
if source:
|
||||
if not isinstance(source, six.string_types):
|
||||
source = str(source)
|
||||
source = six.text_type(source)
|
||||
urlparsed_source = _urlparse(source)
|
||||
source_basename = os.path.basename(
|
||||
urlparsed_source.path or urlparsed_source.netloc
|
||||
@ -4204,7 +4230,7 @@ def extract_hash(hash_fn,
|
||||
|
||||
with salt.utils.files.fopen(hash_fn, 'r') as fp_:
|
||||
for line in fp_:
|
||||
line = line.strip()
|
||||
line = salt.utils.stringutils.to_unicode(line.strip())
|
||||
hash_re = r'(?i)(?<![a-z0-9])([a-f0-9]{' + hash_len_expr + '})(?![a-z0-9])'
|
||||
hash_match = re.search(hash_re, line)
|
||||
matched = None
|
||||
@ -4867,7 +4893,7 @@ def get_diff(file1,
|
||||
source_hash=source_hash)
|
||||
if cached_path is False:
|
||||
errors.append(
|
||||
u'File {0} not found'.format(
|
||||
'File {0} not found'.format(
|
||||
salt.utils.stringutils.to_unicode(filename)
|
||||
)
|
||||
)
|
||||
@ -4887,20 +4913,21 @@ def get_diff(file1,
|
||||
for idx, filename in enumerate(files):
|
||||
try:
|
||||
with salt.utils.files.fopen(filename, 'r') as fp_:
|
||||
args.append(fp_.readlines())
|
||||
args.append([salt.utils.stringutils.to_unicode(x)
|
||||
for x in fp_.readlines()])
|
||||
except (IOError, OSError) as exc:
|
||||
raise CommandExecutionError(
|
||||
'Failed to read {0}: {1}'.format(
|
||||
salt.utils.stringutils.to_str(filename),
|
||||
salt.utils.stringutils.to_unicode(filename),
|
||||
exc.strerror
|
||||
)
|
||||
)
|
||||
|
||||
if args[0] != args[1]:
|
||||
if template and __salt__['config.option']('obfuscate_templates'):
|
||||
ret = u'<Obfuscated Template>'
|
||||
ret = '<Obfuscated Template>'
|
||||
elif not show_changes:
|
||||
ret = u'<show_changes=False>'
|
||||
ret = '<show_changes=False>'
|
||||
else:
|
||||
bdiff = _binary_replace(*files)
|
||||
if bdiff:
|
||||
@ -4908,14 +4935,14 @@ def get_diff(file1,
|
||||
else:
|
||||
if show_filenames:
|
||||
args.extend(
|
||||
[salt.utils.stringutils.to_str(x) for x in files]
|
||||
[salt.utils.stringutils.to_unicode(x) for x in files]
|
||||
)
|
||||
ret = salt.utils.locales.sdecode(
|
||||
''.join(difflib.unified_diff(*args)) # pylint: disable=no-value-for-parameter
|
||||
)
|
||||
return ret
|
||||
|
||||
return u''
|
||||
return ''
|
||||
|
||||
|
||||
def manage_file(name,
|
||||
@ -5017,11 +5044,11 @@ def manage_file(name,
|
||||
unable to stat the file as it exists on the fileserver and thus
|
||||
cannot mirror the mode on the salt-ssh minion
|
||||
|
||||
encoding : None
|
||||
If None, str() will be applied to contents.
|
||||
If not None, specified encoding will be used.
|
||||
See https://docs.python.org/3/library/codecs.html#standard-encodings
|
||||
for the list of available encodings.
|
||||
encoding
|
||||
If specified, then the specified encoding will be used. Otherwise, the
|
||||
file will be encoded using the system locale (usually UTF-8). See
|
||||
https://docs.python.org/3/library/codecs.html#standard-encodings for
|
||||
the list of available encodings.
|
||||
|
||||
.. versionadded:: 2017.7.0
|
||||
|
||||
@ -5233,12 +5260,12 @@ def manage_file(name,
|
||||
ret, _ = check_perms(name, ret, user, group, mode, attrs, follow_symlinks)
|
||||
|
||||
if ret['changes']:
|
||||
ret['comment'] = u'File {0} updated'.format(
|
||||
ret['comment'] = 'File {0} updated'.format(
|
||||
salt.utils.locales.sdecode(name)
|
||||
)
|
||||
|
||||
elif not ret['changes'] and ret['result']:
|
||||
ret['comment'] = u'File {0} is in the correct state'.format(
|
||||
ret['comment'] = 'File {0} is in the correct state'.format(
|
||||
salt.utils.locales.sdecode(name)
|
||||
)
|
||||
if sfn:
|
||||
@ -5260,10 +5287,10 @@ def manage_file(name,
|
||||
# dir_mode was not specified. Otherwise, any
|
||||
# directories created with makedirs_() below can't be
|
||||
# listed via a shell.
|
||||
mode_list = [x for x in str(mode)][-3:]
|
||||
mode_list = [x for x in six.text_type(mode)][-3:]
|
||||
for idx in range(len(mode_list)):
|
||||
if mode_list[idx] != '0':
|
||||
mode_list[idx] = str(int(mode_list[idx]) | 1)
|
||||
mode_list[idx] = six.text_type(int(mode_list[idx]) | 1)
|
||||
dir_mode = ''.join(mode_list)
|
||||
|
||||
if salt.utils.platform.is_windows():
|
||||
@ -5623,7 +5650,7 @@ def mknod_chrdev(name,
|
||||
ret['result'] = None
|
||||
else:
|
||||
if os.mknod(name,
|
||||
int(str(mode).lstrip('0Oo'), 8) | stat.S_IFCHR,
|
||||
int(six.text_type(mode).lstrip('0Oo'), 8) | stat.S_IFCHR,
|
||||
os.makedev(major, minor)) is None:
|
||||
ret['changes'] = {'new': 'Character device {0} created.'.format(name)}
|
||||
ret['result'] = True
|
||||
@ -5698,7 +5725,7 @@ def mknod_blkdev(name,
|
||||
ret['result'] = None
|
||||
else:
|
||||
if os.mknod(name,
|
||||
int(str(mode).lstrip('0Oo'), 8) | stat.S_IFBLK,
|
||||
int(six.text_type(mode).lstrip('0Oo'), 8) | stat.S_IFBLK,
|
||||
os.makedev(major, minor)) is None:
|
||||
ret['changes'] = {'new': 'Block device {0} created.'.format(name)}
|
||||
ret['result'] = True
|
||||
@ -5769,7 +5796,7 @@ def mknod_fifo(name,
|
||||
ret['changes'] = {'new': 'Fifo pipe {0} created.'.format(name)}
|
||||
ret['result'] = None
|
||||
else:
|
||||
if os.mkfifo(name, int(str(mode).lstrip('0Oo'), 8)) is None:
|
||||
if os.mkfifo(name, int(six.text_type(mode).lstrip('0Oo'), 8)) is None:
|
||||
ret['changes'] = {'new': 'Fifo pipe {0} created.'.format(name)}
|
||||
ret['result'] = True
|
||||
except OSError as exc:
|
||||
@ -5984,7 +6011,7 @@ def restore_backup(path, backup_id):
|
||||
ret = {'result': False,
|
||||
'comment': 'Invalid backup_id \'{0}\''.format(backup_id)}
|
||||
try:
|
||||
if len(str(backup_id)) == len(str(int(backup_id))):
|
||||
if len(six.text_type(backup_id)) == len(six.text_type(int(backup_id))):
|
||||
backup = list_backups(path)[int(backup_id)]
|
||||
else:
|
||||
return ret
|
||||
@ -6044,7 +6071,7 @@ def delete_backup(path, backup_id):
|
||||
ret = {'result': False,
|
||||
'comment': 'Invalid backup_id \'{0}\''.format(backup_id)}
|
||||
try:
|
||||
if len(str(backup_id)) == len(str(int(backup_id))):
|
||||
if len(six.text_type(backup_id)) == len(six.text_type(int(backup_id))):
|
||||
backup = list_backups(path)[int(backup_id)]
|
||||
else:
|
||||
return ret
|
||||
@ -6115,7 +6142,7 @@ def grep(path,
|
||||
try:
|
||||
split = salt.utils.args.shlex_split(opt)
|
||||
except AttributeError:
|
||||
split = salt.utils.args.shlex_split(str(opt))
|
||||
split = salt.utils.args.shlex_split(six.text_type(opt))
|
||||
if len(split) > 1:
|
||||
raise SaltInvocationError(
|
||||
'Passing multiple command line arguments in a single string '
|
||||
|
@ -43,7 +43,7 @@ def create(name, profile):
|
||||
cmd = 'salt-cloud --out json -p {0} {1}'.format(profile, name)
|
||||
out = __salt__['cmd.run_stdout'](cmd, python_shell=False)
|
||||
try:
|
||||
ret = json.loads(out, object_hook=salt.utils.data.decode_dict)
|
||||
ret = json.loads(out, object_hook=salt.utils.data.encode_dict)
|
||||
except ValueError:
|
||||
ret = {}
|
||||
return ret
|
||||
|
@ -169,7 +169,7 @@ def _get_pause(jid, state_id=None):
|
||||
'''
|
||||
Return the pause information for a given jid
|
||||
'''
|
||||
pause_dir = os.path.join(__opts__[u'cachedir'], 'state_pause')
|
||||
pause_dir = os.path.join(__opts__['cachedir'], 'state_pause')
|
||||
pause_path = os.path.join(pause_dir, jid)
|
||||
if not os.path.exists(pause_dir):
|
||||
try:
|
||||
@ -196,7 +196,7 @@ def get_pauses(jid=None):
|
||||
'''
|
||||
ret = {}
|
||||
active = __salt__['saltutil.is_running']('state.*')
|
||||
pause_dir = os.path.join(__opts__[u'cachedir'], 'state_pause')
|
||||
pause_dir = os.path.join(__opts__['cachedir'], 'state_pause')
|
||||
if not os.path.exists(pause_dir):
|
||||
return ret
|
||||
if jid is None:
|
||||
@ -1956,7 +1956,7 @@ def single(fun, name, test=None, queue=False, **kwargs):
|
||||
|
||||
st_._mod_init(kwargs)
|
||||
snapper_pre = _snapper_pre(opts, kwargs.get('__pub_jid', 'called localy'))
|
||||
ret = {u'{0[state]}_|-{0[__id__]}_|-{0[name]}_|-{0[fun]}'.format(kwargs):
|
||||
ret = {'{0[state]}_|-{0[__id__]}_|-{0[name]}_|-{0[fun]}'.format(kwargs):
|
||||
st_.call(kwargs)}
|
||||
_set_retcode(ret)
|
||||
# Work around Windows multiprocessing bug, set __opts__['test'] back to
|
||||
@ -2026,7 +2026,7 @@ def pkg(pkg_path,
|
||||
s_pkg.close()
|
||||
lowstate_json = os.path.join(root, 'lowstate.json')
|
||||
with salt.utils.files.fopen(lowstate_json, 'r') as fp_:
|
||||
lowstate = json.load(fp_, object_hook=salt.utils.data.decode_dict)
|
||||
lowstate = json.load(fp_, object_hook=salt.utils.data.encode_dict)
|
||||
# Check for errors in the lowstate
|
||||
for chunk in lowstate:
|
||||
if not isinstance(chunk, dict):
|
||||
@ -2041,7 +2041,7 @@ def pkg(pkg_path,
|
||||
roster_grains_json = os.path.join(root, 'roster_grains.json')
|
||||
if os.path.isfile(roster_grains_json):
|
||||
with salt.utils.files.fopen(roster_grains_json, 'r') as fp_:
|
||||
roster_grains = json.load(fp_, object_hook=salt.utils.data.decode_dict)
|
||||
roster_grains = json.load(fp_, object_hook=salt.utils.data.encode_dict)
|
||||
|
||||
if os.path.isfile(roster_grains_json):
|
||||
popts['grains'] = roster_grains
|
||||
|
@ -8,7 +8,7 @@
|
||||
It's just a wrapper around json (or simplejson if available).
|
||||
'''
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
try:
|
||||
import simplejson as json
|
||||
|
@ -8,7 +8,7 @@
|
||||
Implements a Python serializer (via pprint.format)
|
||||
'''
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
try:
|
||||
import simplejson as json
|
||||
@ -16,7 +16,6 @@ except ImportError:
|
||||
import json
|
||||
|
||||
import pprint
|
||||
import salt.utils.data
|
||||
|
||||
__all__ = ['serialize', 'available']
|
||||
|
||||
@ -35,10 +34,4 @@ def serialize(obj, **options):
|
||||
# there's probably a more performant way to do this...
|
||||
# TODO remove json round-trip when all dataset will use
|
||||
# serializers
|
||||
return pprint.pformat(
|
||||
json.loads(
|
||||
json.dumps(obj),
|
||||
object_hook=salt.utils.data.decode_dict
|
||||
),
|
||||
**options
|
||||
)
|
||||
return pprint.pformat(json.loads(json.dumps(obj)), **options)
|
||||
|
@ -9,7 +9,7 @@
|
||||
It also use C bindings if they are available.
|
||||
'''
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import datetime
|
||||
|
||||
import yaml
|
||||
|
@ -285,14 +285,14 @@ def format_log(ret):
|
||||
# non-ascii characters like "Français" or "Español". See Issue #33605.
|
||||
msg += '\'{0}\' changed from \'{1}\' to \'{2}\'\n'.format(pkg, old, new)
|
||||
if not msg:
|
||||
msg = str(ret['changes'])
|
||||
msg = six.text_type(ret['changes'])
|
||||
if ret['result'] is True or ret['result'] is None:
|
||||
log.info(msg)
|
||||
else:
|
||||
log.error(msg)
|
||||
else:
|
||||
# catch unhandled data
|
||||
log.info(str(ret))
|
||||
log.info(six.text_type(ret))
|
||||
|
||||
|
||||
def master_compile(master_opts, minion_opts, grains, id_, saltenv):
|
||||
@ -506,7 +506,7 @@ class Compiler(object):
|
||||
'Illegal requisite "{0}", '
|
||||
'is SLS {1}\n'
|
||||
).format(
|
||||
str(req_val),
|
||||
six.text_type(req_val),
|
||||
body['__sls__']))
|
||||
continue
|
||||
|
||||
@ -701,7 +701,7 @@ class State(object):
|
||||
try:
|
||||
pillar_enc = pillar_enc.lower()
|
||||
except AttributeError:
|
||||
pillar_enc = str(pillar_enc).lower()
|
||||
pillar_enc = six.text_type(pillar_enc).lower()
|
||||
self._pillar_enc = pillar_enc
|
||||
if initial_pillar:
|
||||
self.opts['pillar'] = initial_pillar
|
||||
@ -721,7 +721,7 @@ class State(object):
|
||||
self.pre = {}
|
||||
self.__run_num = 0
|
||||
self.jid = jid
|
||||
self.instance_id = str(id(self))
|
||||
self.instance_id = six.text_type(id(self))
|
||||
self.inject_globals = {}
|
||||
self.mocked = mocked
|
||||
|
||||
@ -2406,15 +2406,15 @@ class State(object):
|
||||
ret = {'ret': chunk_ret}
|
||||
if fire_event is True:
|
||||
tag = salt.utils.event.tagify(
|
||||
[self.jid, self.opts['id'], str(chunk_ret['name'])], 'state_result'
|
||||
[self.jid, self.opts['id'], six.text_type(chunk_ret['name'])], 'state_result'
|
||||
)
|
||||
elif isinstance(fire_event, six.string_types):
|
||||
tag = salt.utils.event.tagify(
|
||||
[self.jid, self.opts['id'], str(fire_event)], 'state_result'
|
||||
[self.jid, self.opts['id'], six.text_type(fire_event)], 'state_result'
|
||||
)
|
||||
else:
|
||||
tag = salt.utils.event.tagify(
|
||||
[self.jid, 'prog', self.opts['id'], str(chunk_ret['__run_num__'])], 'job'
|
||||
[self.jid, 'prog', self.opts['id'], six.text_type(chunk_ret['__run_num__'])], 'job'
|
||||
)
|
||||
ret['len'] = length
|
||||
preload = {'jid': self.jid}
|
||||
@ -2580,7 +2580,7 @@ class State(object):
|
||||
failed_requisites.add(key)
|
||||
|
||||
_cmt = 'One or more requisite failed: {0}'.format(
|
||||
', '.join(str(i) for i in failed_requisites)
|
||||
', '.join(six.text_type(i) for i in failed_requisites)
|
||||
)
|
||||
running[tag] = {
|
||||
'changes': {},
|
||||
@ -2996,7 +2996,7 @@ class BaseHighState(object):
|
||||
state_top_saltenv = self.opts.get('state_top_saltenv', False)
|
||||
if state_top_saltenv \
|
||||
and not isinstance(state_top_saltenv, six.string_types):
|
||||
state_top_saltenv = str(state_top_saltenv)
|
||||
state_top_saltenv = six.text_type(state_top_saltenv)
|
||||
|
||||
for saltenv in [state_top_saltenv] if state_top_saltenv \
|
||||
else self._get_envs():
|
||||
@ -3306,7 +3306,7 @@ class BaseHighState(object):
|
||||
try:
|
||||
tops = self.get_tops()
|
||||
except SaltRenderError as err:
|
||||
log.error('Unable to render top file: ' + str(err.error))
|
||||
log.error('Unable to render top file: %s', err.error)
|
||||
return {}
|
||||
return self.merge_tops(tops)
|
||||
|
||||
@ -3850,7 +3850,7 @@ class BaseHighState(object):
|
||||
top = self.get_top()
|
||||
except SaltRenderError as err:
|
||||
ret[tag_name]['comment'] = 'Unable to render top file: '
|
||||
ret[tag_name]['comment'] += str(err.error)
|
||||
ret[tag_name]['comment'] += six.text_type(err.error)
|
||||
return ret
|
||||
except Exception:
|
||||
trb = traceback.format_exc()
|
||||
|
@ -232,7 +232,7 @@ To use it, one may pass it like this. Example:
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import os
|
||||
import copy
|
||||
@ -243,7 +243,7 @@ import logging
|
||||
import salt.utils.args
|
||||
import salt.utils.functools
|
||||
from salt.exceptions import CommandExecutionError, SaltRenderError
|
||||
from salt.ext.six import string_types
|
||||
from salt.ext import six
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@ -317,9 +317,9 @@ def _failout(state, msg):
|
||||
|
||||
|
||||
def _is_true(val):
|
||||
if val and str(val).lower() in ('true', 'yes', '1'):
|
||||
if val and six.text_type(val).lower() in ('true', 'yes', '1'):
|
||||
return True
|
||||
elif str(val).lower() in ('false', 'no', '0'):
|
||||
elif six.text_type(val).lower() in ('false', 'no', '0'):
|
||||
return False
|
||||
raise ValueError('Failed parsing boolean value: {0}'.format(val))
|
||||
|
||||
@ -339,7 +339,7 @@ def mod_run_check(cmd_kwargs, onlyif, unless, creates):
|
||||
cmd_kwargs['bg'] = False
|
||||
|
||||
if onlyif is not None:
|
||||
if isinstance(onlyif, string_types):
|
||||
if isinstance(onlyif, six.string_types):
|
||||
cmd = __salt__['cmd.retcode'](onlyif, ignore_retcode=True, python_shell=True, **cmd_kwargs)
|
||||
log.debug('Last command return code: {0}'.format(cmd))
|
||||
if cmd != 0:
|
||||
@ -354,7 +354,7 @@ def mod_run_check(cmd_kwargs, onlyif, unless, creates):
|
||||
return {'comment': 'onlyif condition is false: {0}'.format(entry),
|
||||
'skip_watch': True,
|
||||
'result': True}
|
||||
elif not isinstance(onlyif, string_types):
|
||||
elif not isinstance(onlyif, six.string_types):
|
||||
if not onlyif:
|
||||
log.debug('Command not run: onlyif did not evaluate to string_type')
|
||||
return {'comment': 'onlyif condition is false',
|
||||
@ -362,7 +362,7 @@ def mod_run_check(cmd_kwargs, onlyif, unless, creates):
|
||||
'result': True}
|
||||
|
||||
if unless is not None:
|
||||
if isinstance(unless, string_types):
|
||||
if isinstance(unless, six.string_types):
|
||||
cmd = __salt__['cmd.retcode'](unless, ignore_retcode=True, python_shell=True, **cmd_kwargs)
|
||||
log.debug('Last command return code: {0}'.format(cmd))
|
||||
if cmd == 0:
|
||||
@ -378,14 +378,14 @@ def mod_run_check(cmd_kwargs, onlyif, unless, creates):
|
||||
return {'comment': 'unless condition is true',
|
||||
'skip_watch': True,
|
||||
'result': True}
|
||||
elif not isinstance(unless, string_types):
|
||||
elif not isinstance(unless, six.string_types):
|
||||
if unless:
|
||||
log.debug('Command not run: unless did not evaluate to string_type')
|
||||
return {'comment': 'unless condition is true',
|
||||
'skip_watch': True,
|
||||
'result': True}
|
||||
|
||||
if isinstance(creates, string_types) and os.path.exists(creates):
|
||||
if isinstance(creates, six.string_types) and os.path.exists(creates):
|
||||
return {'comment': '{0} exists'.format(creates),
|
||||
'result': True}
|
||||
elif isinstance(creates, list) and all([
|
||||
@ -831,15 +831,15 @@ def run(name,
|
||||
'result': False,
|
||||
'comment': ''}
|
||||
|
||||
if u'quiet' in kwargs:
|
||||
quiet = kwargs.pop(u'quiet')
|
||||
if 'quiet' in kwargs:
|
||||
quiet = kwargs.pop('quiet')
|
||||
msg = (
|
||||
u'The \'quiet\' argument for cmd.run has been deprecated since '
|
||||
u'2014.1.0 and will be removed as of the Neon release. Please set '
|
||||
u'\'output_loglevel\' to \'quiet\' instead.'
|
||||
'The \'quiet\' argument for cmd.run has been deprecated since '
|
||||
'2014.1.0 and will be removed as of the Neon release. Please set '
|
||||
'\'output_loglevel\' to \'quiet\' instead.'
|
||||
)
|
||||
salt.utils.versions.warn_until(u'Neon', msg)
|
||||
ret.setdefault(u'warnings', []).append(msg)
|
||||
salt.utils.versions.warn_until('Neon', msg)
|
||||
ret.setdefault('warnings', []).append(msg)
|
||||
else:
|
||||
quiet = False
|
||||
|
||||
@ -893,7 +893,7 @@ def run(name,
|
||||
name, timeout=timeout, python_shell=True, **cmd_kwargs
|
||||
)
|
||||
except CommandExecutionError as err:
|
||||
ret['comment'] = str(err)
|
||||
ret['comment'] = six.text_type(err)
|
||||
return ret
|
||||
|
||||
ret['changes'] = cmd_all
|
||||
@ -1152,7 +1152,7 @@ def script(name,
|
||||
try:
|
||||
cmd_all = __salt__['cmd.script'](source, python_shell=True, **cmd_kwargs)
|
||||
except (CommandExecutionError, SaltRenderError, IOError) as err:
|
||||
ret['comment'] = str(err)
|
||||
ret['comment'] = six.text_type(err)
|
||||
return ret
|
||||
|
||||
ret['changes'] = cmd_all
|
||||
@ -1212,7 +1212,7 @@ def call(name,
|
||||
'name': name
|
||||
'changes': {'retval': result},
|
||||
'result': True if result is None else bool(result),
|
||||
'comment': result if isinstance(result, string_types) else ''
|
||||
'comment': result if isinstance(result, six.string_types) else ''
|
||||
}
|
||||
'''
|
||||
ret = {'name': name,
|
||||
@ -1244,7 +1244,7 @@ def call(name,
|
||||
# result must be JSON serializable else we get an error
|
||||
ret['changes'] = {'retval': result}
|
||||
ret['result'] = True if result is None else bool(result)
|
||||
if isinstance(result, string_types):
|
||||
if isinstance(result, six.string_types):
|
||||
ret['comment'] = result
|
||||
return ret
|
||||
|
||||
|
@ -265,7 +265,7 @@ For example:
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import difflib
|
||||
import itertools
|
||||
import logging
|
||||
@ -942,10 +942,10 @@ def _check_touch(name, atime, mtime):
|
||||
return None, 'File {0} is set to be created'.format(name)
|
||||
stats = __salt__['file.stats'](name, follow_symlinks=False)
|
||||
if atime is not None:
|
||||
if str(atime) != str(stats['atime']):
|
||||
if six.text_type(atime) != six.text_type(stats['atime']):
|
||||
return None, 'Times set to be updated on file {0}'.format(name)
|
||||
if mtime is not None:
|
||||
if str(mtime) != str(stats['mtime']):
|
||||
if six.text_type(mtime) != six.text_type(stats['mtime']):
|
||||
return None, 'Times set to be updated on file {0}'.format(name)
|
||||
return True, 'File {0} exists and has the correct times'.format(name)
|
||||
|
||||
@ -1130,9 +1130,9 @@ def _validate_str_list(arg):
|
||||
if isinstance(item, six.string_types):
|
||||
ret.append(item)
|
||||
else:
|
||||
ret.append(str(item))
|
||||
ret.append(six.text_type(item))
|
||||
else:
|
||||
ret = [str(arg)]
|
||||
ret = [six.text_type(arg)]
|
||||
return ret
|
||||
|
||||
|
||||
@ -1985,11 +1985,10 @@ def managed(name,
|
||||
<salt.modules.grains.get>` when retrieving the contents.
|
||||
|
||||
encoding
|
||||
Encoding used for the file, e.g. ```UTF-8```, ```base64```.
|
||||
Default is None, which means str() will be applied to contents to
|
||||
ensure an ascii encoded file and backwards compatibility.
|
||||
See https://docs.python.org/3/library/codecs.html#standard-encodings
|
||||
for available encodings.
|
||||
If specified, then the specified encoding will be used. Otherwise, the
|
||||
file will be encoded using the system locale (usually UTF-8). See
|
||||
https://docs.python.org/3/library/codecs.html#standard-encodings for
|
||||
the list of available encodings.
|
||||
|
||||
.. versionadded:: 2017.7.0
|
||||
|
||||
@ -2611,7 +2610,7 @@ def _depth_limited_walk(top, max_depth=None):
|
||||
rel_depth = root.count(os.path.sep) - top.count(os.path.sep)
|
||||
if rel_depth >= max_depth:
|
||||
del dirs[:]
|
||||
yield (str(root), list(dirs), list(files))
|
||||
yield (six.text_type(root), list(dirs), list(files))
|
||||
|
||||
|
||||
def directory(name,
|
||||
@ -4361,7 +4360,7 @@ def blockreplace(
|
||||
text = tmpret['data']
|
||||
|
||||
for index, item in enumerate(text):
|
||||
content += str(item)
|
||||
content += six.text_type(item)
|
||||
|
||||
changes = __salt__['file.blockreplace'](
|
||||
name,
|
||||
@ -5719,11 +5718,10 @@ def serialize(name,
|
||||
modules <salt.serializers>` for supported output formats.
|
||||
|
||||
encoding
|
||||
Encoding used for the file, e.g. ```UTF-8```, ```base64```.
|
||||
Default is None, which means str() will be applied to contents to
|
||||
ensure an ascii encoded file.
|
||||
See https://docs.python.org/3/library/codecs.html#standard-encodings
|
||||
for available encodings.
|
||||
If specified, then the specified encoding will be used. Otherwise, the
|
||||
file will be encoded using the system locale (usually UTF-8). See
|
||||
https://docs.python.org/3/library/codecs.html#standard-encodings for
|
||||
the list of available encodings.
|
||||
|
||||
.. versionadded:: 2017.7.0
|
||||
|
||||
|
@ -9,7 +9,7 @@ consult the dev team if you are unsure where a new function should go.
|
||||
'''
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
# Import Salt libs
|
||||
from salt.defaults import DEFAULT_TARGET_DELIM
|
||||
@ -1530,11 +1530,13 @@ def decode_dict(data):
|
||||
salt.utils.versions.warn_until(
|
||||
'Neon',
|
||||
'Use of \'salt.utils.decode_dict\' detected. This function '
|
||||
'has been moved to \'salt.utils.data.decode_dict\' as of '
|
||||
'Salt Oxygen. This warning will be removed in Salt Neon.',
|
||||
'has been moved to \'salt.utils.data.encode_dict\' as of '
|
||||
'Salt Oxygen. Note that the new name is "encode_dict", as this '
|
||||
'function was erroneously named when initially added. This '
|
||||
'warning will be removed in Salt Neon.',
|
||||
stacklevel=3
|
||||
)
|
||||
return salt.utils.data.decode_dict(data)
|
||||
return salt.utils.data.encode_dict(data)
|
||||
|
||||
|
||||
def decode_list(data):
|
||||
@ -1544,11 +1546,13 @@ def decode_list(data):
|
||||
salt.utils.versions.warn_until(
|
||||
'Neon',
|
||||
'Use of \'salt.utils.decode_list\' detected. This function '
|
||||
'has been moved to \'salt.utils.data.decode_list\' as of '
|
||||
'Salt Oxygen. This warning will be removed in Salt Neon.',
|
||||
'has been moved to \'salt.utils.data.encode_list\' as of '
|
||||
'Salt Oxygen. Note that the new name is "encode_list", as this '
|
||||
'function was erroneously named when initially added. This '
|
||||
'warning will be removed in Salt Neon.',
|
||||
stacklevel=3
|
||||
)
|
||||
return salt.utils.data.decode_list(data)
|
||||
return salt.utils.data.encode_list(data)
|
||||
|
||||
|
||||
def exactly_n(l, n=1):
|
||||
|
@ -4,7 +4,7 @@ Functions for manipulating, inspecting, or otherwise working with data types
|
||||
and data structures.
|
||||
'''
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
# Import Python libs
|
||||
import collections
|
||||
@ -67,17 +67,16 @@ def compare_lists(old=None, new=None):
|
||||
return ret
|
||||
|
||||
|
||||
@jinja_filter('json_decode_dict')
|
||||
def decode_dict(data):
|
||||
'''
|
||||
JSON decodes as unicode, Jinja needs bytes...
|
||||
Decode all values to Unicode
|
||||
'''
|
||||
rv = {}
|
||||
for key, value in six.iteritems(data):
|
||||
if isinstance(key, six.text_type) and six.PY2:
|
||||
key = key.encode('utf-8')
|
||||
if isinstance(value, six.text_type) and six.PY2:
|
||||
value = value.encode('utf-8')
|
||||
if six.PY2 and isinstance(key, str):
|
||||
key = key.decode(__salt_system_encoding__)
|
||||
if six.PY2 and isinstance(value, str):
|
||||
value = value.decode(__salt_system_encoding__)
|
||||
elif isinstance(value, list):
|
||||
value = decode_list(value)
|
||||
elif isinstance(value, dict):
|
||||
@ -86,15 +85,14 @@ def decode_dict(data):
|
||||
return rv
|
||||
|
||||
|
||||
@jinja_filter('json_decode_list')
|
||||
def decode_list(data):
|
||||
'''
|
||||
JSON decodes as unicode, Jinja needs bytes...
|
||||
Decode all values to Unicode
|
||||
'''
|
||||
rv = []
|
||||
for item in data:
|
||||
if isinstance(item, six.text_type) and six.PY2:
|
||||
item = item.encode('utf-8')
|
||||
if six.PY2 and isinstance(item, six.text_type):
|
||||
item = item.decode(__salt_system_encoding__)
|
||||
elif isinstance(item, list):
|
||||
item = decode_list(item)
|
||||
elif isinstance(item, dict):
|
||||
@ -103,6 +101,44 @@ def decode_list(data):
|
||||
return rv
|
||||
|
||||
|
||||
@jinja_filter('json_decode_dict') # Remove this for Neon
|
||||
@jinja_filter('json_encode_dict')
|
||||
def encode_dict(data):
|
||||
'''
|
||||
Encode all values to bytes
|
||||
'''
|
||||
rv = {}
|
||||
for key, value in six.iteritems(data):
|
||||
if six.PY2 and isinstance(key, six.text_type):
|
||||
key = key.encode(__salt_system_encoding__)
|
||||
if six.PY2 and isinstance(value, six.text_type):
|
||||
value = value.encode(__salt_system_encoding__)
|
||||
elif isinstance(value, list):
|
||||
value = encode_list(value)
|
||||
elif isinstance(value, dict):
|
||||
value = encode_dict(value)
|
||||
rv[key] = value
|
||||
return rv
|
||||
|
||||
|
||||
@jinja_filter('json_decode_list') # Remove this for Neon
|
||||
@jinja_filter('json_encode_list')
|
||||
def encode_list(data):
|
||||
'''
|
||||
Encode all values to bytes
|
||||
'''
|
||||
rv = []
|
||||
for item in data:
|
||||
if isinstance(item, six.text_type) and six.PY2:
|
||||
item = item.encode(__salt_system_encoding__)
|
||||
elif isinstance(item, list):
|
||||
item = encode_list(item)
|
||||
elif isinstance(item, dict):
|
||||
item = encode_dict(item)
|
||||
rv.append(item)
|
||||
return rv
|
||||
|
||||
|
||||
@jinja_filter('exactly_n_true')
|
||||
def exactly_n(l, n=1):
|
||||
'''
|
||||
@ -138,8 +174,10 @@ def filter_by(lookup_dict,
|
||||
# lookup_dict keys
|
||||
for each in val if isinstance(val, list) else [val]:
|
||||
for key in lookup_dict:
|
||||
test_key = key if isinstance(key, six.string_types) else str(key)
|
||||
test_each = each if isinstance(each, six.string_types) else str(each)
|
||||
test_key = key if isinstance(key, six.string_types) \
|
||||
else six.text_type(key)
|
||||
test_each = each if isinstance(each, six.string_types) \
|
||||
else six.text_type(each)
|
||||
if fnmatch.fnmatchcase(test_each, test_key):
|
||||
ret = lookup_dict[key]
|
||||
break
|
||||
@ -246,14 +284,14 @@ def subdict_match(data,
|
||||
def _match(target, pattern, regex_match=False, exact_match=False):
|
||||
if regex_match:
|
||||
try:
|
||||
return re.match(pattern.lower(), str(target).lower())
|
||||
return re.match(pattern.lower(), six.text_type(target).lower())
|
||||
except Exception:
|
||||
log.error('Invalid regex \'{0}\' in match'.format(pattern))
|
||||
return False
|
||||
elif exact_match:
|
||||
return str(target).lower() == pattern.lower()
|
||||
return six.text_type(target).lower() == pattern.lower()
|
||||
else:
|
||||
return fnmatch.fnmatch(str(target).lower(), pattern.lower())
|
||||
return fnmatch.fnmatch(six.text_type(target).lower(), pattern.lower())
|
||||
|
||||
def _dict_match(target, pattern, regex_match=False, exact_match=False):
|
||||
wildcard = pattern.startswith('*:')
|
||||
@ -497,7 +535,7 @@ def is_true(value=None):
|
||||
if isinstance(value, (six.integer_types, float)):
|
||||
return value > 0
|
||||
elif isinstance(value, six.string_types):
|
||||
return str(value).lower() == 'true'
|
||||
return six.text_type(value).lower() == 'true'
|
||||
else:
|
||||
return bool(value)
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import os
|
||||
import sys
|
||||
import textwrap
|
||||
@ -267,52 +267,52 @@ class CMDModuleTest(ModuleCase):
|
||||
'''
|
||||
Test the hide_output argument
|
||||
'''
|
||||
ls_command = [u'ls', u'/'] \
|
||||
ls_command = ['ls', '/'] \
|
||||
if not salt.utils.platform.is_windows() \
|
||||
else [u'dir', u'c:\\']
|
||||
else ['dir', 'c:\\']
|
||||
|
||||
error_command = ['thiscommanddoesnotexist']
|
||||
|
||||
# cmd.run
|
||||
out = self.run_function(
|
||||
u'cmd.run',
|
||||
'cmd.run',
|
||||
ls_command,
|
||||
hide_output=True)
|
||||
self.assertEqual(out, u'')
|
||||
self.assertEqual(out, '')
|
||||
|
||||
# cmd.shell
|
||||
out = self.run_function(
|
||||
u'cmd.shell',
|
||||
'cmd.shell',
|
||||
ls_command,
|
||||
hide_output=True)
|
||||
self.assertEqual(out, u'')
|
||||
self.assertEqual(out, '')
|
||||
|
||||
# cmd.run_stdout
|
||||
out = self.run_function(
|
||||
u'cmd.run_stdout',
|
||||
'cmd.run_stdout',
|
||||
ls_command,
|
||||
hide_output=True)
|
||||
self.assertEqual(out, u'')
|
||||
self.assertEqual(out, '')
|
||||
|
||||
# cmd.run_stderr
|
||||
out = self.run_function(
|
||||
u'cmd.shell',
|
||||
'cmd.shell',
|
||||
error_command,
|
||||
hide_output=True)
|
||||
self.assertEqual(out, u'')
|
||||
self.assertEqual(out, '')
|
||||
|
||||
# cmd.run_all (command should have produced stdout)
|
||||
out = self.run_function(
|
||||
u'cmd.run_all',
|
||||
'cmd.run_all',
|
||||
ls_command,
|
||||
hide_output=True)
|
||||
self.assertEqual(out['stdout'], u'')
|
||||
self.assertEqual(out['stderr'], u'')
|
||||
self.assertEqual(out['stdout'], '')
|
||||
self.assertEqual(out['stderr'], '')
|
||||
|
||||
# cmd.run_all (command should have produced stderr)
|
||||
out = self.run_function(
|
||||
u'cmd.run_all',
|
||||
'cmd.run_all',
|
||||
error_command,
|
||||
hide_output=True)
|
||||
self.assertEqual(out['stdout'], u'')
|
||||
self.assertEqual(out['stderr'], u'')
|
||||
self.assertEqual(out['stdout'], '')
|
||||
self.assertEqual(out['stderr'], '')
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import getpass
|
||||
import os
|
||||
import shutil
|
||||
@ -47,7 +47,7 @@ class FileModuleTest(ModuleCase):
|
||||
def setUp(self):
|
||||
self.myfile = os.path.join(TMP, 'myfile')
|
||||
with salt.utils.files.fopen(self.myfile, 'w+') as fp:
|
||||
fp.write('Hello' + os.linesep)
|
||||
fp.write(salt.utils.stringutils.to_str('Hello' + os.linesep))
|
||||
self.mydir = os.path.join(TMP, 'mydir/isawesome')
|
||||
if not os.path.isdir(self.mydir):
|
||||
# left behind... Don't fail because of this!
|
||||
@ -146,18 +146,20 @@ class FileModuleTest(ModuleCase):
|
||||
FILES, 'file', 'base', 'hello.patch')
|
||||
src_file = os.path.join(TMP, 'src.txt')
|
||||
with salt.utils.files.fopen(src_file, 'w+') as fp:
|
||||
fp.write('Hello\n')
|
||||
fp.write(salt.utils.stringutils.to_str('Hello\n'))
|
||||
|
||||
# dry-run should not modify src_file
|
||||
ret = self.minion_run('file.patch', src_file, src_patch, dry_run=True)
|
||||
assert ret['retcode'] == 0, repr(ret)
|
||||
with salt.utils.files.fopen(src_file) as fp:
|
||||
self.assertEqual(fp.read(), 'Hello\n')
|
||||
self.assertEqual(
|
||||
salt.utils.stringutils.to_unicode(fp.read()), 'Hello\n')
|
||||
|
||||
ret = self.minion_run('file.patch', src_file, src_patch)
|
||||
assert ret['retcode'] == 0, repr(ret)
|
||||
with salt.utils.files.fopen(src_file) as fp:
|
||||
self.assertEqual(fp.read(), 'Hello world\n')
|
||||
self.assertEqual(
|
||||
salt.utils.stringutils.to_unicode(fp.read()), 'Hello world\n')
|
||||
|
||||
def test_remove_file(self):
|
||||
ret = self.run_function('file.remove', arg=[self.myfile])
|
||||
|
@ -3,7 +3,7 @@
|
||||
Tests for the file state
|
||||
'''
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import errno
|
||||
import os
|
||||
import textwrap
|
||||
@ -99,11 +99,11 @@ class CMDRunRedirectTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
'''
|
||||
state_key = 'cmd_|-{0}_|-{0}_|-run'.format(self.test_tmp_path)
|
||||
with salt.utils.files.fopen(self.state_file, 'w') as fb_:
|
||||
fb_.write(textwrap.dedent('''
|
||||
fb_.write(salt.utils.stringutils.to_str(textwrap.dedent('''
|
||||
{0}:
|
||||
cmd.run:
|
||||
- unless: echo cheese > {1}
|
||||
'''.format(self.test_tmp_path, self.test_file)))
|
||||
'''.format(self.test_tmp_path, self.test_file))))
|
||||
|
||||
ret = self.run_function('state.sls', [self.state_name])
|
||||
self.assertTrue(ret[state_key]['result'])
|
||||
@ -130,11 +130,11 @@ class CMDRunRedirectTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
'''
|
||||
state_key = 'cmd_|-echo >> {0}_|-echo >> {0}_|-run'.format(self.test_file)
|
||||
with salt.utils.files.fopen(self.state_file, 'w') as fb_:
|
||||
fb_.write(textwrap.dedent('''
|
||||
fb_.write(salt.utils.stringutils.to_str(textwrap.dedent('''
|
||||
echo >> {0}:
|
||||
cmd.run:
|
||||
- creates: {0}
|
||||
'''.format(self.test_file)))
|
||||
'''.format(self.test_file))))
|
||||
|
||||
ret = self.run_function('state.sls', [self.state_name])
|
||||
self.assertTrue(ret[state_key]['result'])
|
||||
@ -147,11 +147,11 @@ class CMDRunRedirectTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
os.remove(self.test_file)
|
||||
state_key = 'cmd_|-echo >> {0}_|-echo >> {0}_|-run'.format(self.test_file)
|
||||
with salt.utils.files.fopen(self.state_file, 'w') as fb_:
|
||||
fb_.write(textwrap.dedent('''
|
||||
fb_.write(salt.utils.stringutils.to_str(textwrap.dedent('''
|
||||
echo >> {0}:
|
||||
cmd.run:
|
||||
- creates: {0}
|
||||
'''.format(self.test_file)))
|
||||
'''.format(self.test_file))))
|
||||
|
||||
ret = self.run_function('state.sls', [self.state_name])
|
||||
self.assertTrue(ret[state_key]['result'])
|
||||
@ -163,10 +163,10 @@ class CMDRunRedirectTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
'''
|
||||
state_key = 'cmd_|-echo test > {0}_|-echo test > {0}_|-run'.format(self.test_file)
|
||||
with salt.utils.files.fopen(self.state_file, 'w') as fb_:
|
||||
fb_.write(textwrap.dedent('''
|
||||
fb_.write(salt.utils.stringutils.to_str(textwrap.dedent('''
|
||||
echo test > {0}:
|
||||
cmd.run
|
||||
'''.format(self.test_file)))
|
||||
'''.format(self.test_file))))
|
||||
|
||||
ret = self.run_function('state.sls', [self.state_name])
|
||||
self.assertTrue(ret[state_key]['result'])
|
||||
@ -194,7 +194,7 @@ class CMDRunWatchTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
biscuits_key = 'cmd_|-biscuits_|-echo biscuits_|-wait'
|
||||
|
||||
with salt.utils.files.fopen(self.state_file, 'w') as fb_:
|
||||
fb_.write(textwrap.dedent('''
|
||||
fb_.write(salt.utils.stringutils.to_str(textwrap.dedent('''
|
||||
saltines:
|
||||
cmd.run:
|
||||
- name: echo changed=true
|
||||
@ -207,7 +207,7 @@ class CMDRunWatchTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
- cwd: /
|
||||
- watch:
|
||||
- cmd: saltines
|
||||
'''))
|
||||
''')))
|
||||
|
||||
ret = self.run_function('state.sls', [self.state_name])
|
||||
self.assertTrue(ret[saltines_key]['result'])
|
||||
|
@ -5,7 +5,7 @@ Tests for the file state
|
||||
'''
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import errno
|
||||
import glob
|
||||
import logging
|
||||
@ -2134,65 +2134,61 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
for filename in glob.glob('{0}.bak*'.format(testcase_filedest)):
|
||||
os.unlink(filename)
|
||||
|
||||
@skipIf(six.PY3, 'This test will have a LOT of rewriting to support both Py2 and Py3')
|
||||
# And I'm more comfortable with the author doing it - s0undt3ch
|
||||
@skipIf(IS_WINDOWS, 'Don\'t know how to fix for Windows')
|
||||
@skipIf(True, 'Skipped until unicode codebase conversion is completed')
|
||||
def test_issue_8947_utf8_sls(self):
|
||||
'''
|
||||
Test some file operation with utf-8 characters on the sls
|
||||
|
||||
This is more generic than just a file test. Feel free to move
|
||||
'''
|
||||
korean_1 = u'한국어 시험'
|
||||
korean_2 = u'첫 번째 행'
|
||||
korean_3 = u'마지막 행'
|
||||
korean_1 = '한국어 시험'
|
||||
korean_2 = '첫 번째 행'
|
||||
korean_3 = '마지막 행'
|
||||
test_file = os.path.join(
|
||||
TMP,
|
||||
u'salt_utf8_tests',
|
||||
u'{0}.txt'.format(korean_1)
|
||||
'salt_utf8_tests',
|
||||
'{0}.txt'.format(korean_1)
|
||||
)
|
||||
test_file_encoded = salt.utils.stringutils.to_str(test_file)
|
||||
template_path = os.path.join(TMP_STATE_TREE, 'issue-8947.sls')
|
||||
# create the sls template
|
||||
template_lines = [
|
||||
u'# -*- coding: utf-8 -*-',
|
||||
u'some-utf8-file-create:',
|
||||
u' file.managed:',
|
||||
u" - name: '{0}'".format(test_file),
|
||||
u" - contents: {0}".format(korean_1),
|
||||
u' - makedirs: True',
|
||||
u' - replace: True',
|
||||
u' - show_diff: True',
|
||||
u'some-utf8-file-create2:',
|
||||
u' file.managed:',
|
||||
u" - name: '{0}'".format(test_file),
|
||||
u' - contents: |',
|
||||
u' {0}'.format(korean_2),
|
||||
u' {0}'.format(korean_1),
|
||||
u' {0}'.format(korean_3),
|
||||
u' - replace: True',
|
||||
u' - show_diff: True',
|
||||
u'some-utf8-file-exists:',
|
||||
u' file.exists:',
|
||||
u" - name: '{0}'".format(test_file),
|
||||
u' - require:',
|
||||
u' - file: some-utf8-file-create2',
|
||||
u'some-utf8-file-content-test:',
|
||||
u' cmd.run:',
|
||||
u' - name: \'cat "{0}"\''.format(test_file),
|
||||
u' - require:',
|
||||
u' - file: some-utf8-file-exists',
|
||||
u'some-utf8-file-content-remove:',
|
||||
u' cmd.run:',
|
||||
u' - name: \'rm -f "{0}"\''.format(test_file),
|
||||
u' - require:',
|
||||
u' - cmd: some-utf8-file-content-test',
|
||||
u'some-utf8-file-removed:',
|
||||
u' file.missing:',
|
||||
u" - name: '{0}'".format(test_file),
|
||||
u' - require:',
|
||||
u' - cmd: some-utf8-file-content-remove',
|
||||
'# -*- coding: utf-8 -*-',
|
||||
'some-utf8-file-create:',
|
||||
' file.managed:',
|
||||
" - name: '{0}'".format(test_file),
|
||||
" - contents: {0}".format(korean_1),
|
||||
' - makedirs: True',
|
||||
' - replace: True',
|
||||
' - show_diff: True',
|
||||
'some-utf8-file-create2:',
|
||||
' file.managed:',
|
||||
" - name: '{0}'".format(test_file),
|
||||
' - contents: |',
|
||||
' {0}'.format(korean_2),
|
||||
' {0}'.format(korean_1),
|
||||
' {0}'.format(korean_3),
|
||||
' - replace: True',
|
||||
' - show_diff: True',
|
||||
'some-utf8-file-exists:',
|
||||
' file.exists:',
|
||||
" - name: '{0}'".format(test_file),
|
||||
' - require:',
|
||||
' - file: some-utf8-file-create2',
|
||||
'some-utf8-file-content-test:',
|
||||
' cmd.run:',
|
||||
' - name: \'cat "{0}"\''.format(test_file),
|
||||
' - require:',
|
||||
' - file: some-utf8-file-exists',
|
||||
'some-utf8-file-content-remove:',
|
||||
' cmd.run:',
|
||||
' - name: \'rm -f "{0}"\''.format(test_file),
|
||||
' - require:',
|
||||
' - cmd: some-utf8-file-content-test',
|
||||
'some-utf8-file-removed:',
|
||||
' file.missing:',
|
||||
" - name: '{0}'".format(test_file),
|
||||
' - require:',
|
||||
' - cmd: some-utf8-file-content-remove',
|
||||
]
|
||||
with salt.utils.files.fopen(template_path, 'wb') as fp_:
|
||||
fp_.write(
|
||||
@ -2209,57 +2205,59 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
)
|
||||
# difflib produces different output on python 2.6 than on >=2.7
|
||||
if sys.version_info < (2, 7):
|
||||
utf_diff = '--- \n+++ \n@@ -1,1 +1,3 @@\n'
|
||||
diff = '--- \n+++ \n@@ -1,1 +1,3 @@\n'
|
||||
else:
|
||||
utf_diff = '--- \n+++ \n@@ -1 +1,3 @@\n'
|
||||
#utf_diff += '+\xec\xb2\xab \xeb\xb2\x88\xec\xa7\xb8 \xed\x96\x89\n \xed\x95\x9c\xea\xb5\xad\xec\x96\xb4 \xec\x8b\x9c\xed\x97\x98\n+\xeb\xa7\x88\xec\xa7\x80\xeb\xa7\x89 \xed\x96\x89\n'
|
||||
utf_diff += salt.utils.stringutils.to_str(
|
||||
u'+첫 번째 행\n'
|
||||
u' 한국어 시험\n'
|
||||
u'+마지막 행\n'
|
||||
diff = '--- \n+++ \n@@ -1 +1,3 @@\n'
|
||||
diff += (
|
||||
'+첫 번째 행\n'
|
||||
' 한국어 시험\n'
|
||||
'+마지막 행\n'
|
||||
)
|
||||
diff = salt.utils.stringutils.to_str(diff)
|
||||
# using unicode.encode('utf-8') we should get the same as
|
||||
# an utf-8 string
|
||||
# future_lint: disable=blacklisted-function
|
||||
expected = {
|
||||
'file_|-some-utf8-file-create_|-{0}_|-managed'.format(test_file_encoded): {
|
||||
'name': '{0}'.format(test_file_encoded),
|
||||
str('file_|-some-utf8-file-create_|-{0}_|-managed').format(test_file_encoded): {
|
||||
'name': test_file_encoded,
|
||||
'__run_num__': 0,
|
||||
'comment': 'File {0} updated'.format(test_file_encoded),
|
||||
'comment': str('File {0} updated').format(test_file_encoded),
|
||||
'diff': 'New file'
|
||||
},
|
||||
'file_|-some-utf8-file-create2_|-{0}_|-managed'.format(test_file_encoded): {
|
||||
'name': '{0}'.format(test_file_encoded),
|
||||
str('file_|-some-utf8-file-create2_|-{0}_|-managed').format(test_file_encoded): {
|
||||
'name': test_file_encoded,
|
||||
'__run_num__': 1,
|
||||
'comment': 'File {0} updated'.format(test_file_encoded),
|
||||
'diff': utf_diff
|
||||
'comment': str('File {0} updated').format(test_file_encoded),
|
||||
'diff': diff
|
||||
},
|
||||
'file_|-some-utf8-file-exists_|-{0}_|-exists'.format(test_file_encoded): {
|
||||
'name': '{0}'.format(test_file_encoded),
|
||||
str('file_|-some-utf8-file-exists_|-{0}_|-exists').format(test_file_encoded): {
|
||||
'name': test_file_encoded,
|
||||
'__run_num__': 2,
|
||||
'comment': 'Path {0} exists'.format(test_file_encoded)
|
||||
'comment': str('Path {0} exists').format(test_file_encoded)
|
||||
},
|
||||
'cmd_|-some-utf8-file-content-test_|-cat "{0}"_|-run'.format(test_file_encoded): {
|
||||
'name': 'cat "{0}"'.format(test_file_encoded),
|
||||
str('cmd_|-some-utf8-file-content-test_|-cat "{0}"_|-run').format(test_file_encoded): {
|
||||
'name': str('cat "{0}"').format(test_file_encoded),
|
||||
'__run_num__': 3,
|
||||
'comment': 'Command "cat "{0}"" run'.format(test_file_encoded),
|
||||
'stdout': '{0}\n{1}\n{2}'.format(
|
||||
'comment': str('Command "cat "{0}"" run').format(test_file_encoded),
|
||||
'stdout': str('{0}\n{1}\n{2}').format(
|
||||
salt.utils.stringutils.to_str(korean_2),
|
||||
salt.utils.stringutils.to_str(korean_1),
|
||||
salt.utils.stringutils.to_str(korean_3),
|
||||
)
|
||||
},
|
||||
'cmd_|-some-utf8-file-content-remove_|-rm -f "{0}"_|-run'.format(test_file_encoded): {
|
||||
'name': 'rm -f "{0}"'.format(test_file_encoded),
|
||||
str('cmd_|-some-utf8-file-content-remove_|-rm -f "{0}"_|-run').format(test_file_encoded): {
|
||||
'name': str('rm -f "{0}"').format(test_file_encoded),
|
||||
'__run_num__': 4,
|
||||
'comment': 'Command "rm -f "{0}"" run'.format(test_file_encoded),
|
||||
'comment': str('Command "rm -f "{0}"" run').format(test_file_encoded),
|
||||
'stdout': ''
|
||||
},
|
||||
'file_|-some-utf8-file-removed_|-{0}_|-missing'.format(test_file_encoded): {
|
||||
'name': '{0}'.format(test_file_encoded),
|
||||
str('file_|-some-utf8-file-removed_|-{0}_|-missing').format(test_file_encoded): {
|
||||
'name': test_file_encoded,
|
||||
'__run_num__': 5,
|
||||
'comment': 'Path {0} is missing'.format(test_file_encoded),
|
||||
'comment': str('Path {0} is missing').format(test_file_encoded),
|
||||
}
|
||||
}
|
||||
# future_lint: enable=blacklisted-function
|
||||
result = {}
|
||||
for name, step in six.iteritems(ret):
|
||||
self.assertSaltTrueReturn({name: step})
|
||||
@ -2277,12 +2275,12 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
self.maxDiff = None
|
||||
|
||||
self.assertEqual(expected, result)
|
||||
cat_id = 'cmd_|-some-utf8-file-content-test_|-cat "{0}"_|-run'.format(test_file_encoded)
|
||||
# future_lint: disable=blacklisted-function
|
||||
cat_id = str('cmd_|-some-utf8-file-content-test_|-cat "{0}"_|-run').format(test_file_encoded)
|
||||
# future_lint: enable=blacklisted-function
|
||||
self.assertEqual(
|
||||
result[cat_id]['stdout'],
|
||||
salt.utils.stringutils.to_str(
|
||||
korean_2 + '\n' + korean_1 + '\n' + korean_3
|
||||
)
|
||||
salt.utils.stringutils.to_unicode(result[cat_id]['stdout']),
|
||||
korean_2 + '\n' + korean_1 + '\n' + korean_3
|
||||
)
|
||||
finally:
|
||||
if os.path.isdir(test_file):
|
||||
|
@ -4,7 +4,7 @@
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
@ -28,6 +28,7 @@ import salt.modules.file as filemod
|
||||
import salt.modules.config as configmod
|
||||
import salt.modules.cmdmod as cmdmod
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from salt.ext import six
|
||||
|
||||
SED_CONTENT = '''test
|
||||
some
|
||||
@ -88,7 +89,10 @@ class FileReplaceTestCase(TestCase, LoaderModuleMockMixin):
|
||||
filemod.replace(self.tfile.name, r'Etiam', 'Salticus', backup=False)
|
||||
|
||||
with salt.utils.files.fopen(self.tfile.name, 'r') as fp:
|
||||
self.assertIn('Salticus', fp.read())
|
||||
self.assertIn(
|
||||
'Salticus',
|
||||
salt.utils.stringutils.to_unicode(fp.read())
|
||||
)
|
||||
|
||||
def test_replace_append_if_not_found(self):
|
||||
'''
|
||||
@ -108,7 +112,8 @@ class FileReplaceTestCase(TestCase, LoaderModuleMockMixin):
|
||||
filemod.replace(tfile.name, **args)
|
||||
expected = os.linesep.join([base, 'baz=\\g<value>']) + os.linesep
|
||||
with salt.utils.files.fopen(tfile.name) as tfile2:
|
||||
self.assertEqual(tfile2.read(), expected)
|
||||
self.assertEqual(
|
||||
salt.utils.stringutils.to_unicode(tfile2.read()), expected)
|
||||
os.remove(tfile.name)
|
||||
|
||||
# File not ending with a newline, no match
|
||||
@ -117,7 +122,8 @@ class FileReplaceTestCase(TestCase, LoaderModuleMockMixin):
|
||||
tfile.flush()
|
||||
filemod.replace(tfile.name, **args)
|
||||
with salt.utils.files.fopen(tfile.name) as tfile2:
|
||||
self.assertEqual(tfile2.read(), expected)
|
||||
self.assertEqual(
|
||||
salt.utils.stringutils.to_unicode(tfile2.read()), expected)
|
||||
os.remove(tfile.name)
|
||||
|
||||
# A newline should not be added in empty files
|
||||
@ -126,7 +132,8 @@ class FileReplaceTestCase(TestCase, LoaderModuleMockMixin):
|
||||
filemod.replace(tfile.name, **args)
|
||||
expected = args['repl'] + os.linesep
|
||||
with salt.utils.files.fopen(tfile.name) as tfile2:
|
||||
self.assertEqual(tfile2.read(), expected)
|
||||
self.assertEqual(
|
||||
salt.utils.stringutils.to_unicode(tfile2.read()), expected)
|
||||
os.remove(tfile.name)
|
||||
|
||||
# Using not_found_content, rather than repl
|
||||
@ -137,7 +144,8 @@ class FileReplaceTestCase(TestCase, LoaderModuleMockMixin):
|
||||
expected = os.linesep.join([base, 'baz=3']) + os.linesep
|
||||
filemod.replace(tfile.name, **args)
|
||||
with salt.utils.files.fopen(tfile.name) as tfile2:
|
||||
self.assertEqual(tfile2.read(), expected)
|
||||
self.assertEqual(
|
||||
salt.utils.stringutils.to_unicode(tfile2.read()), expected)
|
||||
os.remove(tfile.name)
|
||||
|
||||
# not appending if matches
|
||||
@ -148,7 +156,8 @@ class FileReplaceTestCase(TestCase, LoaderModuleMockMixin):
|
||||
expected = base
|
||||
filemod.replace(tfile.name, **args)
|
||||
with salt.utils.files.fopen(tfile.name) as tfile2:
|
||||
self.assertEqual(tfile2.read(), expected)
|
||||
self.assertEqual(
|
||||
salt.utils.stringutils.to_unicode(tfile2.read()), expected)
|
||||
|
||||
def test_backup(self):
|
||||
fext = '.bak'
|
||||
@ -303,9 +312,10 @@ class FileBlockReplaceTestCase(TestCase, LoaderModuleMockMixin):
|
||||
backup=False
|
||||
)
|
||||
with salt.utils.files.fopen(self.tfile.name, 'r') as fp:
|
||||
self.assertNotIn('#-- START BLOCK 2'
|
||||
+ "\n" + new_content
|
||||
+ '#-- END BLOCK 2', fp.read())
|
||||
self.assertNotIn(
|
||||
'#-- START BLOCK 2' + "\n" + new_content + '#-- END BLOCK 2',
|
||||
salt.utils.stringutils.to_unicode(fp.read())
|
||||
)
|
||||
|
||||
filemod.blockreplace(self.tfile.name,
|
||||
'#-- START BLOCK 2',
|
||||
@ -341,7 +351,8 @@ class FileBlockReplaceTestCase(TestCase, LoaderModuleMockMixin):
|
||||
filemod.blockreplace(tfile.name, **args)
|
||||
expected = os.linesep.join([base, block])
|
||||
with salt.utils.files.fopen(tfile.name) as tfile2:
|
||||
self.assertEqual(tfile2.read(), expected)
|
||||
self.assertEqual(
|
||||
salt.utils.stringutils.to_unicode(tfile2.read()), expected)
|
||||
os.remove(tfile.name)
|
||||
|
||||
# File not ending with a newline
|
||||
@ -350,7 +361,8 @@ class FileBlockReplaceTestCase(TestCase, LoaderModuleMockMixin):
|
||||
tfile.flush()
|
||||
filemod.blockreplace(tfile.name, **args)
|
||||
with salt.utils.files.fopen(tfile.name) as tfile2:
|
||||
self.assertEqual(tfile2.read(), expected)
|
||||
self.assertEqual(
|
||||
salt.utils.stringutils.to_unicode(tfile2.read()), expected)
|
||||
os.remove(tfile.name)
|
||||
|
||||
# A newline should not be added in empty files
|
||||
@ -358,7 +370,8 @@ class FileBlockReplaceTestCase(TestCase, LoaderModuleMockMixin):
|
||||
pass
|
||||
filemod.blockreplace(tfile.name, **args)
|
||||
with salt.utils.files.fopen(tfile.name) as tfile2:
|
||||
self.assertEqual(tfile2.read(), block)
|
||||
self.assertEqual(
|
||||
salt.utils.stringutils.to_unicode(tfile2.read()), block)
|
||||
os.remove(tfile.name)
|
||||
|
||||
def test_replace_prepend(self):
|
||||
@ -402,7 +415,7 @@ class FileBlockReplaceTestCase(TestCase, LoaderModuleMockMixin):
|
||||
backup=False)
|
||||
|
||||
with salt.utils.files.fopen(self.tfile.name, 'r') as fp:
|
||||
filecontent = fp.read()
|
||||
filecontent = salt.utils.stringutils.to_unicode(fp.read())
|
||||
self.assertIn('new content 1', filecontent)
|
||||
self.assertNotIn('to be removed', filecontent)
|
||||
self.assertIn('first part of start line', filecontent)
|
||||
@ -545,7 +558,7 @@ class FileModuleTestCase(TestCase, LoaderModuleMockMixin):
|
||||
with salt.utils.files.fopen(path, 'r') as newfile:
|
||||
self.assertEqual(
|
||||
SED_CONTENT.replace(before, ''),
|
||||
newfile.read()
|
||||
salt.utils.stringutils.to_unicode(newfile.read())
|
||||
)
|
||||
|
||||
def test_append_newline_at_eof(self):
|
||||
@ -560,7 +573,8 @@ class FileModuleTestCase(TestCase, LoaderModuleMockMixin):
|
||||
filemod.append(tfile.name, 'bar')
|
||||
expected = os.linesep.join(['foo', 'bar']) + os.linesep
|
||||
with salt.utils.files.fopen(tfile.name) as tfile2:
|
||||
self.assertEqual(tfile2.read(), expected)
|
||||
self.assertEqual(
|
||||
salt.utils.stringutils.to_unicode(tfile2.read()), expected)
|
||||
|
||||
# File not ending with a newline
|
||||
with tempfile.NamedTemporaryFile(mode='w+b', delete=False) as tfile:
|
||||
@ -568,13 +582,17 @@ class FileModuleTestCase(TestCase, LoaderModuleMockMixin):
|
||||
tfile.flush()
|
||||
filemod.append(tfile.name, 'bar')
|
||||
with salt.utils.files.fopen(tfile.name) as tfile2:
|
||||
self.assertEqual(tfile2.read(), expected)
|
||||
self.assertEqual(
|
||||
salt.utils.stringutils.to_unicode(tfile2.read()), expected)
|
||||
|
||||
# A newline should be added in empty files
|
||||
with tempfile.NamedTemporaryFile(mode='w+b', delete=False) as tfile:
|
||||
filemod.append(tfile.name, 'bar')
|
||||
filemod.append(tfile.name, salt.utils.stringutils.to_bytes('bar'))
|
||||
with salt.utils.files.fopen(tfile.name) as tfile2:
|
||||
self.assertEqual(tfile2.read(), 'bar' + os.linesep)
|
||||
self.assertEqual(
|
||||
salt.utils.stringutils.to_unicode(tfile2.read()),
|
||||
'bar' + os.linesep
|
||||
)
|
||||
|
||||
def test_extract_hash(self):
|
||||
'''
|
||||
@ -809,7 +827,7 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
|
||||
for mode, err_msg in [(None, 'How to process the file'), ('nonsense', 'Unknown mode')]:
|
||||
with pytest.raises(CommandExecutionError) as cmd_err:
|
||||
filemod.line('foo', mode=mode)
|
||||
assert err_msg in str(cmd_err)
|
||||
assert err_msg in six.text_type(cmd_err)
|
||||
|
||||
@patch('os.path.realpath', MagicMock())
|
||||
@patch('os.path.isfile', MagicMock(return_value=True))
|
||||
@ -821,7 +839,7 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
|
||||
for mode in ['insert', 'ensure', 'replace']:
|
||||
with pytest.raises(CommandExecutionError) as cmd_err:
|
||||
filemod.line('foo', mode=mode)
|
||||
assert 'Content can only be empty if mode is "delete"' in str(cmd_err)
|
||||
assert 'Content can only be empty if mode is "delete"' in six.text_type(cmd_err)
|
||||
|
||||
@patch('os.path.realpath', MagicMock())
|
||||
@patch('os.path.isfile', MagicMock(return_value=True))
|
||||
@ -835,7 +853,7 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
|
||||
with patch('salt.utils.files.fopen', files_fopen):
|
||||
with pytest.raises(CommandExecutionError) as cmd_err:
|
||||
filemod.line('foo', content='test content', mode='insert')
|
||||
assert '"location" or "before/after"' in str(cmd_err)
|
||||
assert '"location" or "before/after"' in six.text_type(cmd_err)
|
||||
|
||||
def test_util_starts_till(self):
|
||||
'''
|
||||
@ -1082,7 +1100,7 @@ class FilemodLineTests(TestCase, LoaderModuleMockMixin):
|
||||
with patch('salt.utils.atomicfile.atomic_open', atomic_opener):
|
||||
with pytest.raises(CommandExecutionError) as cmd_err:
|
||||
filemod.line('foo', content=cfg_content, after=_after, before=_before, mode='ensure')
|
||||
assert 'Found more than one line between boundaries "before" and "after"' in str(cmd_err)
|
||||
assert 'Found more than one line between boundaries "before" and "after"' in six.text_type(cmd_err)
|
||||
|
||||
@patch('os.path.realpath', MagicMock())
|
||||
@patch('os.path.isfile', MagicMock(return_value=True))
|
||||
@ -1154,7 +1172,7 @@ class FileBasicsTestCase(TestCase, LoaderModuleMockMixin):
|
||||
self.addCleanup(delattr, self, 'tfile')
|
||||
self.myfile = os.path.join(TMP, 'myfile')
|
||||
with salt.utils.files.fopen(self.myfile, 'w+') as fp:
|
||||
fp.write('Hello\n')
|
||||
fp.write(salt.utils.stringutils.to_str('Hello\n'))
|
||||
self.addCleanup(os.remove, self.myfile)
|
||||
self.addCleanup(delattr, self, 'myfile')
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from textwrap import dedent
|
||||
|
||||
# Import Salt Testing libs
|
||||
@ -337,7 +337,8 @@ class TestSerializers(TestCase):
|
||||
def test_serialize_python(self):
|
||||
data = {'foo': 'bar'}
|
||||
serialized = python.serialize(data)
|
||||
assert serialized == '{\'foo\': \'bar\'}', serialized
|
||||
expected = "{u'foo': u'bar'}" if six.PY2 else "{'foo': 'bar'}"
|
||||
assert serialized == expected, serialized
|
||||
|
||||
@skipIf(not configparser.available, SKIP_MESSAGE % 'configparser')
|
||||
def test_configparser(self):
|
||||
|
@ -3,7 +3,7 @@
|
||||
:codeauthor: :email:`Jayesh Kariya <jayeshk@saltstack.com>`
|
||||
'''
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import os.path
|
||||
|
||||
# Import Salt Testing Libs
|
||||
|
@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from datetime import datetime
|
||||
import os
|
||||
import json
|
||||
|
@ -4,7 +4,7 @@
|
||||
'''
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import copy
|
||||
import os
|
||||
import tempfile
|
||||
@ -37,7 +37,7 @@ class StateCompilerTestCase(TestCase, AdaptedConfigurationTestCaseMixin):
|
||||
# function doesn't return anything. However, we do want
|
||||
# to make sure that the function doesn't stacktrace when
|
||||
# called.
|
||||
ret = {'changes': {u'Français': {'old': 'something old',
|
||||
ret = {'changes': {'Français': {'old': 'something old',
|
||||
'new': 'something new'}},
|
||||
'result': True}
|
||||
salt.state.format_log(ret)
|
||||
@ -56,7 +56,7 @@ class StateCompilerTestCase(TestCase, AdaptedConfigurationTestCaseMixin):
|
||||
OrderedDict([
|
||||
('file', OrderedDict(
|
||||
[('test1', 'test')]))])])]),
|
||||
'installed', {'order': 10000}]), ('__sls__', u'issue_35226'), ('__env__', 'base')])}
|
||||
'installed', {'order': 10000}]), ('__sls__', 'issue_35226'), ('__env__', 'base')])}
|
||||
minion_opts = self.get_temp_config('minion')
|
||||
minion_opts['pillar'] = {'git': OrderedDict([('test1', 'test')])}
|
||||
state_obj = salt.state.State(minion_opts)
|
||||
@ -496,5 +496,5 @@ class StateReturnsTestCase(TestCase):
|
||||
# Not suitable for export as is
|
||||
salt.state.State.verify_ret_for_export(ret)
|
||||
salt.state.State.munge_ret_for_export(ret)
|
||||
self.assertIsInstance(ret[u'comment'], six.string_types)
|
||||
self.assertIsInstance(ret['comment'], six.string_types)
|
||||
salt.state.State.verify_ret_for_export(ret)
|
||||
|
Loading…
Reference in New Issue
Block a user