mirror of
https://github.com/valitydev/salt.git
synced 2024-11-08 01:18:58 +00:00
Merge pull request #45361 from terminalmage/py3-fileserver
[PY3] Update fileserver for Unicode/PY3 compatibility
This commit is contained in:
commit
ccefc34a79
@ -4,7 +4,7 @@ File server pluggable modules and generic backend functions
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import collections
|
||||
import errno
|
||||
import fnmatch
|
||||
@ -15,6 +15,7 @@ import time
|
||||
|
||||
# Import salt libs
|
||||
import salt.loader
|
||||
import salt.utils.data
|
||||
import salt.utils.files
|
||||
import salt.utils.locales
|
||||
import salt.utils.path
|
||||
@ -41,7 +42,7 @@ def _unlock_cache(w_lock):
|
||||
elif os.path.isfile(w_lock):
|
||||
os.unlink(w_lock)
|
||||
except (OSError, IOError) as exc:
|
||||
log.trace('Error removing lockfile {0}: {1}'.format(w_lock, exc))
|
||||
log.trace('Error removing lockfile %s: %s', w_lock, exc)
|
||||
|
||||
|
||||
def _lock_cache(w_lock):
|
||||
@ -52,7 +53,7 @@ def _lock_cache(w_lock):
|
||||
raise
|
||||
return False
|
||||
else:
|
||||
log.trace('Lockfile {0} created'.format(w_lock))
|
||||
log.trace('Lockfile %s created', w_lock)
|
||||
return True
|
||||
|
||||
|
||||
@ -95,9 +96,10 @@ def wait_lock(lk_fn, dest, wait_timeout=0):
|
||||
if timeout:
|
||||
if time.time() > timeout:
|
||||
raise ValueError(
|
||||
'Timeout({0}s) for {1} '
|
||||
'(lock: {2}) elapsed'.format(
|
||||
wait_timeout, dest, lk_fn))
|
||||
'Timeout({0}s) for {1} (lock: {2}) elapsed'.format(
|
||||
wait_timeout, dest, lk_fn
|
||||
)
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
@ -130,9 +132,11 @@ def check_file_list_cache(opts, form, list_cache, w_lock):
|
||||
if age < opts.get('fileserver_list_cache_time', 20):
|
||||
# Young enough! Load this sucker up!
|
||||
with salt.utils.files.fopen(list_cache, 'rb') as fp_:
|
||||
log.trace('Returning file_lists cache data from '
|
||||
'{0}'.format(list_cache))
|
||||
return serial.load(fp_).get(form, []), False, False
|
||||
log.trace(
|
||||
'Returning file_lists cache data from %s',
|
||||
list_cache
|
||||
)
|
||||
return salt.utils.data.decode(serial.load(fp_).get(form, [])), False, False
|
||||
elif _lock_cache(w_lock):
|
||||
# Set the w_lock and go
|
||||
refresh_cache = True
|
||||
@ -157,7 +161,7 @@ def write_file_list_cache(opts, data, list_cache, w_lock):
|
||||
with salt.utils.files.fopen(list_cache, 'w+b') as fp_:
|
||||
fp_.write(serial.dumps(data))
|
||||
_unlock_cache(w_lock)
|
||||
log.trace('Lockfile {0} removed'.format(w_lock))
|
||||
log.trace('Lockfile %s removed', w_lock)
|
||||
|
||||
|
||||
def check_env_cache(opts, env_cache):
|
||||
@ -168,9 +172,9 @@ def check_env_cache(opts, env_cache):
|
||||
return None
|
||||
try:
|
||||
with salt.utils.files.fopen(env_cache, 'rb') as fp_:
|
||||
log.trace('Returning env cache data from {0}'.format(env_cache))
|
||||
log.trace('Returning env cache data from %s', env_cache)
|
||||
serial = salt.payload.Serial(opts)
|
||||
return serial.load(fp_)
|
||||
return salt.utils.data.decode(serial.load(fp_))
|
||||
except (IOError, OSError):
|
||||
pass
|
||||
return None
|
||||
@ -193,8 +197,9 @@ def generate_mtime_map(opts, path_map):
|
||||
except (OSError, IOError):
|
||||
# skip dangling symlinks
|
||||
log.info(
|
||||
'Failed to get mtime on {0}, '
|
||||
'dangling symlink ?'.format(file_path))
|
||||
'Failed to get mtime on %s, dangling symlink?',
|
||||
file_path
|
||||
)
|
||||
continue
|
||||
return file_map
|
||||
|
||||
@ -242,10 +247,10 @@ def reap_fileserver_cache_dir(cache_base, find_func):
|
||||
try:
|
||||
filename, _, hash_type = file_rel_path.rsplit('.', 2)
|
||||
except ValueError:
|
||||
log.warning((
|
||||
'Found invalid hash file [{0}] when attempting to reap'
|
||||
' cache directory.'
|
||||
).format(file_))
|
||||
log.warning(
|
||||
'Found invalid hash file [%s] when attempting to reap '
|
||||
'cache directory', file_
|
||||
)
|
||||
continue
|
||||
# do we have the file?
|
||||
ret = find_func(filename, saltenv=saltenv)
|
||||
@ -265,9 +270,8 @@ def is_file_ignored(opts, fname):
|
||||
for regex in opts['file_ignore_regex']:
|
||||
if re.search(regex, fname):
|
||||
log.debug(
|
||||
'File matching file_ignore_regex. Skipping: {0}'.format(
|
||||
fname
|
||||
)
|
||||
'File matching file_ignore_regex. Skipping: %s',
|
||||
fname
|
||||
)
|
||||
return True
|
||||
|
||||
@ -275,9 +279,8 @@ def is_file_ignored(opts, fname):
|
||||
for glob in opts['file_ignore_glob']:
|
||||
if fnmatch.fnmatch(fname, glob):
|
||||
log.debug(
|
||||
'File matching file_ignore_glob. Skipping: {0}'.format(
|
||||
fname
|
||||
)
|
||||
'File matching file_ignore_glob. Skipping: %s',
|
||||
fname
|
||||
)
|
||||
return True
|
||||
return False
|
||||
@ -395,7 +398,7 @@ class Fileserver(object):
|
||||
for fsb in back:
|
||||
fstr = '{0}.clear_cache'.format(fsb)
|
||||
if fstr in self.servers:
|
||||
log.debug('Clearing {0} fileserver cache'.format(fsb))
|
||||
log.debug('Clearing %s fileserver cache', fsb)
|
||||
failed = self.servers[fstr]()
|
||||
if failed:
|
||||
errors.extend(failed)
|
||||
@ -468,7 +471,7 @@ class Fileserver(object):
|
||||
for fsb in back:
|
||||
fstr = '{0}.update'.format(fsb)
|
||||
if fstr in self.servers:
|
||||
log.debug('Updating {0} fileserver cache'.format(fsb))
|
||||
log.debug('Updating %s fileserver cache', fsb)
|
||||
self.servers[fstr]()
|
||||
|
||||
def update_intervals(self, back=None):
|
||||
@ -543,6 +546,8 @@ class Fileserver(object):
|
||||
Find the path and return the fnd structure, this structure is passed
|
||||
to other backend interfaces.
|
||||
'''
|
||||
path = salt.utils.locales.sdecode(path)
|
||||
saltenv = salt.utils.locales.sdecode(saltenv)
|
||||
back = self.backends(back)
|
||||
kwargs = {}
|
||||
fnd = {'path': '',
|
||||
@ -663,7 +668,7 @@ class Fileserver(object):
|
||||
try:
|
||||
saltenv = [x.strip() for x in saltenv.split(',')]
|
||||
except AttributeError:
|
||||
saltenv = [x.strip() for x in str(saltenv).split(',')]
|
||||
saltenv = [x.strip() for x in six.text_type(saltenv).split(',')]
|
||||
|
||||
for idx, val in enumerate(saltenv):
|
||||
if not isinstance(val, six.string_types):
|
||||
@ -858,7 +863,7 @@ class FSChan(object):
|
||||
Emulate the channel send method, the tries and timeout are not used
|
||||
'''
|
||||
if 'cmd' not in load:
|
||||
log.error('Malformed request, no cmd: {0}'.format(load))
|
||||
log.error('Malformed request, no cmd: %s', load)
|
||||
return {}
|
||||
cmd = load['cmd'].lstrip('_')
|
||||
if cmd in self.cmd_stub:
|
||||
@ -866,6 +871,6 @@ class FSChan(object):
|
||||
if cmd == 'file_envs':
|
||||
return self.fs.envs()
|
||||
if not hasattr(self.fs, cmd):
|
||||
log.error('Malformed request, invalid cmd: {0}'.format(load))
|
||||
log.error('Malformed request, invalid cmd: %s', load)
|
||||
return {}
|
||||
return getattr(self.fs, cmd)(load)
|
||||
|
@ -47,7 +47,7 @@ permissions.
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import base64
|
||||
import logging
|
||||
import os
|
||||
@ -60,6 +60,7 @@ import salt.utils.gzip_util
|
||||
import salt.utils.hashutils
|
||||
import salt.utils.json
|
||||
import salt.utils.path
|
||||
import salt.utils.stringutils
|
||||
from salt.utils.versions import LooseVersion
|
||||
|
||||
try:
|
||||
@ -153,10 +154,8 @@ def serve_file(load, fnd):
|
||||
required_load_keys = set(['path', 'loc', 'saltenv'])
|
||||
if not all(x in load for x in required_load_keys):
|
||||
log.debug(
|
||||
'Not all of the required keys present in payload. '
|
||||
'Missing: {0}'.format(
|
||||
', '.join(required_load_keys.difference(load))
|
||||
)
|
||||
'Not all of the required keys present in payload. Missing: %s',
|
||||
', '.join(required_load_keys.difference(load))
|
||||
)
|
||||
return ret
|
||||
if not fnd['path']:
|
||||
@ -239,8 +238,8 @@ def update():
|
||||
# Lock writes
|
||||
lk_fn = fname + '.lk'
|
||||
salt.fileserver.wait_lock(lk_fn, fname)
|
||||
with salt.utils.files.fopen(lk_fn, 'w+') as fp_:
|
||||
fp_.write('')
|
||||
with salt.utils.files.fopen(lk_fn, 'w'):
|
||||
pass
|
||||
|
||||
try:
|
||||
blob_service.get_blob_to_path(name, blob.name, fname)
|
||||
@ -258,8 +257,8 @@ def update():
|
||||
container_list = path + '.list'
|
||||
lk_fn = container_list + '.lk'
|
||||
salt.fileserver.wait_lock(lk_fn, container_list)
|
||||
with salt.utils.files.fopen(lk_fn, 'w+') as fp_:
|
||||
fp_.write('')
|
||||
with salt.utils.files.fopen(lk_fn, 'w'):
|
||||
pass
|
||||
with salt.utils.files.fopen(container_list, 'w') as fp_:
|
||||
salt.utils.json.dump(blob_names, fp_)
|
||||
try:
|
||||
@ -292,11 +291,11 @@ def file_hash(load, fnd):
|
||||
os.makedirs(os.path.dirname(hashdest))
|
||||
ret['hsum'] = salt.utils.hashutils.get_hash(path, __opts__['hash_type'])
|
||||
with salt.utils.files.fopen(hashdest, 'w+') as fp_:
|
||||
fp_.write(ret['hsum'])
|
||||
fp_.write(salt.utils.stringutils.to_str(ret['hsum']))
|
||||
return ret
|
||||
else:
|
||||
with salt.utils.files.fopen(hashdest, 'rb') as fp_:
|
||||
ret['hsum'] = fp_.read()
|
||||
ret['hsum'] = salt.utils.stringutils.to_unicode(fp_.read())
|
||||
return ret
|
||||
|
||||
|
||||
@ -378,13 +377,15 @@ def _validate_config():
|
||||
return False
|
||||
for container in __opts__['azurefs']:
|
||||
if not isinstance(container, dict):
|
||||
log.error('One or more entries in the azurefs configuration list '
|
||||
'are not formed as a dict. Skipping azurefs: {0}'
|
||||
.format(container))
|
||||
log.error(
|
||||
'One or more entries in the azurefs configuration list are '
|
||||
'not formed as a dict. Skipping azurefs: %s', container
|
||||
)
|
||||
return False
|
||||
if 'account_name' not in container or 'container_name' not in container:
|
||||
log.error('An azurefs container configuration is missing either '
|
||||
'an account_name or a container_name: {0}'
|
||||
.format(container))
|
||||
log.error(
|
||||
'An azurefs container configuration is missing either an '
|
||||
'account_name or a container_name: %s', container
|
||||
)
|
||||
return False
|
||||
return True
|
||||
|
@ -49,7 +49,7 @@ Walkthrough <tutorial-gitfs>`.
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import logging
|
||||
|
||||
PER_REMOTE_OVERRIDES = (
|
||||
|
@ -37,7 +37,7 @@ will set the desired branch method. Possible values are: ``branches``,
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import copy
|
||||
import errno
|
||||
import fnmatch
|
||||
@ -91,8 +91,8 @@ def __virtual__():
|
||||
return False
|
||||
if __opts__['hgfs_branch_method'] not in VALID_BRANCH_METHODS:
|
||||
log.error(
|
||||
'Invalid hgfs_branch_method \'{0}\'. Valid methods are: {1}'
|
||||
.format(__opts__['hgfs_branch_method'], VALID_BRANCH_METHODS)
|
||||
'Invalid hgfs_branch_method \'%s\'. Valid methods are: %s',
|
||||
__opts__['hgfs_branch_method'], VALID_BRANCH_METHODS
|
||||
)
|
||||
return False
|
||||
return __virtualname__
|
||||
@ -213,11 +213,10 @@ def init():
|
||||
)
|
||||
if not per_remote_conf:
|
||||
log.error(
|
||||
'Invalid per-remote configuration for hgfs remote {0}. If '
|
||||
'Invalid per-remote configuration for hgfs remote %s. If '
|
||||
'no per-remote parameters are being specified, there may '
|
||||
'be a trailing colon after the URL, which should be '
|
||||
'removed. Check the master configuration file.'
|
||||
.format(repo_url)
|
||||
'removed. Check the master configuration file.', repo_url
|
||||
)
|
||||
_failhard()
|
||||
|
||||
@ -226,10 +225,9 @@ def init():
|
||||
per_remote_defaults['branch_method'])
|
||||
if branch_method not in VALID_BRANCH_METHODS:
|
||||
log.error(
|
||||
'Invalid branch_method \'{0}\' for remote {1}. Valid '
|
||||
'branch methods are: {2}. This remote will be ignored.'
|
||||
.format(branch_method, repo_url,
|
||||
', '.join(VALID_BRANCH_METHODS))
|
||||
'Invalid branch_method \'%s\' for remote %s. Valid '
|
||||
'branch methods are: %s. This remote will be ignored.',
|
||||
branch_method, repo_url, ', '.join(VALID_BRANCH_METHODS)
|
||||
)
|
||||
_failhard()
|
||||
|
||||
@ -237,11 +235,10 @@ def init():
|
||||
for param in (x for x in per_remote_conf
|
||||
if x not in PER_REMOTE_OVERRIDES):
|
||||
log.error(
|
||||
'Invalid configuration parameter \'{0}\' for remote {1}. '
|
||||
'Valid parameters are: {2}. See the documentation for '
|
||||
'further information.'.format(
|
||||
param, repo_url, ', '.join(PER_REMOTE_OVERRIDES)
|
||||
)
|
||||
'Invalid configuration parameter \'%s\' for remote %s. '
|
||||
'Valid parameters are: %s. See the documentation for '
|
||||
'further information.',
|
||||
param, repo_url, ', '.join(PER_REMOTE_OVERRIDES)
|
||||
)
|
||||
per_remote_errors = True
|
||||
if per_remote_errors:
|
||||
@ -253,8 +250,8 @@ def init():
|
||||
|
||||
if not isinstance(repo_url, six.string_types):
|
||||
log.error(
|
||||
'Invalid hgfs remote {0}. Remotes must be strings, you may '
|
||||
'need to enclose the URL in quotes'.format(repo_url)
|
||||
'Invalid hgfs remote %s. Remotes must be strings, you may '
|
||||
'need to enclose the URL in quotes', repo_url
|
||||
)
|
||||
_failhard()
|
||||
|
||||
@ -280,16 +277,16 @@ def init():
|
||||
repo = hglib.open(rp_)
|
||||
except hglib.error.ServerError:
|
||||
log.error(
|
||||
'Cache path {0} (corresponding remote: {1}) exists but is not '
|
||||
'Cache path %s (corresponding remote: %s) exists but is not '
|
||||
'a valid mercurial repository. You will need to manually '
|
||||
'delete this directory on the master to continue to use this '
|
||||
'hgfs remote.'.format(rp_, repo_url)
|
||||
'hgfs remote.', rp_, repo_url
|
||||
)
|
||||
_failhard()
|
||||
except Exception as exc:
|
||||
log.error(
|
||||
'Exception \'{0}\' encountered while initializing hgfs remote '
|
||||
'{1}'.format(exc, repo_url)
|
||||
'Exception \'%s\' encountered while initializing hgfs '
|
||||
'remote %s', exc, repo_url
|
||||
)
|
||||
_failhard()
|
||||
|
||||
@ -306,7 +303,11 @@ def init():
|
||||
hgconfpath = os.path.join(rp_, '.hg', 'hgrc')
|
||||
with salt.utils.files.fopen(hgconfpath, 'w+') as hgconfig:
|
||||
hgconfig.write('[paths]\n')
|
||||
hgconfig.write('default = {0}\n'.format(repo_url))
|
||||
hgconfig.write(
|
||||
salt.utils.stringutils.to_str(
|
||||
'default = {0}\n'.format(repo_url)
|
||||
)
|
||||
)
|
||||
|
||||
repo_conf.update({
|
||||
'repo': repo,
|
||||
@ -327,11 +328,15 @@ def init():
|
||||
timestamp = datetime.now().strftime('%d %b %Y %H:%M:%S.%f')
|
||||
fp_.write('# hgfs_remote map as of {0}\n'.format(timestamp))
|
||||
for repo in repos:
|
||||
fp_.write('{0} = {1}\n'.format(repo['hash'], repo['url']))
|
||||
fp_.write(
|
||||
salt.utils.stringutils.to_str(
|
||||
'{0} = {1}\n'.format(repo['hash'], repo['url'])
|
||||
)
|
||||
)
|
||||
except OSError:
|
||||
pass
|
||||
else:
|
||||
log.info('Wrote new hgfs_remote map to {0}'.format(remote_map))
|
||||
log.info('Wrote new hgfs_remote map to %s', remote_map)
|
||||
|
||||
return repos
|
||||
|
||||
@ -366,12 +371,12 @@ def _clear_old_remotes():
|
||||
shutil.rmtree(rdir)
|
||||
except OSError as exc:
|
||||
log.error(
|
||||
'Unable to remove old hgfs remote cachedir {0}: {1}'
|
||||
.format(rdir, exc)
|
||||
'Unable to remove old hgfs remote cachedir %s: %s',
|
||||
rdir, exc
|
||||
)
|
||||
failed.append(rdir)
|
||||
else:
|
||||
log.debug('hgfs removed old cachedir {0}'.format(rdir))
|
||||
log.debug('hgfs removed old cachedir %s', rdir)
|
||||
for fdir in failed:
|
||||
to_remove.remove(fdir)
|
||||
return bool(to_remove), repos
|
||||
@ -462,8 +467,8 @@ def lock(remote=None):
|
||||
failed = []
|
||||
if not os.path.exists(repo['lockfile']):
|
||||
try:
|
||||
with salt.utils.files.fopen(repo['lockfile'], 'w+') as fp_:
|
||||
fp_.write('')
|
||||
with salt.utils.files.fopen(repo['lockfile'], 'w'):
|
||||
pass
|
||||
except (IOError, OSError) as exc:
|
||||
msg = ('Unable to set update lock for {0} ({1}): {2} '
|
||||
.format(repo['url'], repo['lockfile'], exc))
|
||||
@ -509,29 +514,29 @@ def update():
|
||||
for repo in repos:
|
||||
if os.path.exists(repo['lockfile']):
|
||||
log.warning(
|
||||
'Update lockfile is present for hgfs remote {0}, skipping. '
|
||||
'Update lockfile is present for hgfs remote %s, skipping. '
|
||||
'If this warning persists, it is possible that the update '
|
||||
'process was interrupted. Removing {1} or running '
|
||||
'process was interrupted. Removing %s or running '
|
||||
'\'salt-run fileserver.clear_lock hgfs\' will allow updates '
|
||||
'to continue for this remote.'
|
||||
.format(repo['url'], repo['lockfile'])
|
||||
'to continue for this remote.', repo['url'], repo['lockfile']
|
||||
)
|
||||
continue
|
||||
_, errors = lock(repo)
|
||||
if errors:
|
||||
log.error('Unable to set update lock for hgfs remote {0}, '
|
||||
'skipping.'.format(repo['url']))
|
||||
log.error(
|
||||
'Unable to set update lock for hgfs remote %s, skipping.',
|
||||
repo['url']
|
||||
)
|
||||
continue
|
||||
log.debug('hgfs is fetching from {0}'.format(repo['url']))
|
||||
log.debug('hgfs is fetching from %s', repo['url'])
|
||||
repo['repo'].open()
|
||||
curtip = repo['repo'].tip()
|
||||
try:
|
||||
repo['repo'].pull()
|
||||
except Exception as exc:
|
||||
log.error(
|
||||
'Exception {0} caught while updating hgfs remote {1}'
|
||||
.format(exc, repo['url']),
|
||||
exc_info_on_loglevel=logging.DEBUG
|
||||
'Exception %s caught while updating hgfs remote %s',
|
||||
exc, repo['url'], exc_info_on_loglevel=logging.DEBUG
|
||||
)
|
||||
else:
|
||||
newtip = repo['repo'].tip()
|
||||
@ -549,7 +554,7 @@ def update():
|
||||
serial = salt.payload.Serial(__opts__)
|
||||
with salt.utils.files.fopen(env_cache, 'wb+') as fp_:
|
||||
fp_.write(serial.dumps(new_envs))
|
||||
log.trace('Wrote env cache data to {0}'.format(env_cache))
|
||||
log.trace('Wrote env cache data to %s', env_cache)
|
||||
|
||||
# if there is a change, fire an event
|
||||
if __opts__.get('fileserver_events', False):
|
||||
@ -701,8 +706,8 @@ def find_file(path, tgt_env='base', **kwargs): # pylint: disable=W0613
|
||||
except hglib.error.CommandError:
|
||||
repo['repo'].close()
|
||||
continue
|
||||
with salt.utils.files.fopen(lk_fn, 'w+') as fp_:
|
||||
fp_.write('')
|
||||
with salt.utils.files.fopen(lk_fn, 'w'):
|
||||
pass
|
||||
for filename in glob.glob(hashes_glob):
|
||||
try:
|
||||
os.remove(filename)
|
||||
@ -791,7 +796,7 @@ def file_hash(load, fnd):
|
||||
return ret
|
||||
else:
|
||||
with salt.utils.files.fopen(hashdest, 'rb') as fp_:
|
||||
ret['hsum'] = fp_.read()
|
||||
ret['hsum'] = salt.utils.stringutils.to_unicode(fp_.read())
|
||||
return ret
|
||||
|
||||
|
||||
@ -808,7 +813,7 @@ def _file_lists(load, form):
|
||||
try:
|
||||
os.makedirs(list_cachedir)
|
||||
except os.error:
|
||||
log.critical('Unable to make cachedir {0}'.format(list_cachedir))
|
||||
log.critical('Unable to make cachedir %s', list_cachedir)
|
||||
return []
|
||||
list_cache = os.path.join(list_cachedir, '{0}.p'.format(load['saltenv']))
|
||||
w_lock = os.path.join(list_cachedir, '.{0}.w'.format(load['saltenv']))
|
||||
|
@ -27,7 +27,7 @@ Other minionfs settings include: :conf_master:`minionfs_whitelist`,
|
||||
.. seealso:: :ref:`tutorial-minionfs`
|
||||
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import python libs
|
||||
import os
|
||||
@ -83,8 +83,10 @@ def find_file(path, tgt_env='base', **kwargs): # pylint: disable=W0613
|
||||
if tgt_env not in envs():
|
||||
return fnd
|
||||
if os.path.basename(path) == 'top.sls':
|
||||
log.debug('minionfs will NOT serve top.sls '
|
||||
'for security reasons (path requested: {0})'.format(path))
|
||||
log.debug(
|
||||
'minionfs will NOT serve top.sls '
|
||||
'for security reasons (path requested: %s)', path
|
||||
)
|
||||
return fnd
|
||||
|
||||
mountpoint = salt.utils.url.strip_proto(__opts__['minionfs_mountpoint'])
|
||||
@ -188,7 +190,8 @@ def file_hash(load, fnd):
|
||||
# cache file's contents should be "hash:mtime"
|
||||
cache_path = os.path.join(
|
||||
__opts__['cachedir'],
|
||||
'minionfs/hash',
|
||||
'minionfs',
|
||||
'hash',
|
||||
load['saltenv'],
|
||||
'{0}.hash.{1}'.format(fnd['rel'], __opts__['hash_type'])
|
||||
)
|
||||
@ -197,7 +200,7 @@ def file_hash(load, fnd):
|
||||
try:
|
||||
with salt.utils.files.fopen(cache_path, 'rb') as fp_:
|
||||
try:
|
||||
hsum, mtime = fp_.read().split(':')
|
||||
hsum, mtime = salt.utils.stringutils.to_unicode(fp_.read()).split(':')
|
||||
except ValueError:
|
||||
log.debug(
|
||||
'Fileserver attempted to read incomplete cache file. '
|
||||
@ -261,8 +264,8 @@ def file_list(load):
|
||||
# pushed files
|
||||
if tgt_minion not in minion_dirs:
|
||||
log.warning(
|
||||
'No files found in minionfs cache for minion ID \'{0}\''
|
||||
.format(tgt_minion)
|
||||
'No files found in minionfs cache for minion ID \'%s\'',
|
||||
tgt_minion
|
||||
)
|
||||
return []
|
||||
minion_dirs = [tgt_minion]
|
||||
@ -274,8 +277,8 @@ def file_list(load):
|
||||
minion_files_dir = os.path.join(minions_cache_dir, minion, 'files')
|
||||
if not os.path.isdir(minion_files_dir):
|
||||
log.debug(
|
||||
'minionfs: could not find files directory under {0}!'
|
||||
.format(os.path.join(minions_cache_dir, minion))
|
||||
'minionfs: could not find files directory under %s!',
|
||||
os.path.join(minions_cache_dir, minion)
|
||||
)
|
||||
continue
|
||||
walk_dir = os.path.join(minion_files_dir, prefix)
|
||||
@ -340,8 +343,8 @@ def dir_list(load):
|
||||
# pushed files
|
||||
if tgt_minion not in minion_dirs:
|
||||
log.warning(
|
||||
'No files found in minionfs cache for minion ID \'{0}\''
|
||||
.format(tgt_minion)
|
||||
'No files found in minionfs cache for minion ID \'%s\'',
|
||||
tgt_minion
|
||||
)
|
||||
return []
|
||||
minion_dirs = [tgt_minion]
|
||||
@ -353,8 +356,8 @@ def dir_list(load):
|
||||
minion_files_dir = os.path.join(minions_cache_dir, minion, 'files')
|
||||
if not os.path.isdir(minion_files_dir):
|
||||
log.warning(
|
||||
'minionfs: could not find files directory under {0}!'
|
||||
.format(os.path.join(minions_cache_dir, minion))
|
||||
'minionfs: could not find files directory under %s!',
|
||||
os.path.join(minions_cache_dir, minion)
|
||||
)
|
||||
continue
|
||||
walk_dir = os.path.join(minion_files_dir, prefix)
|
||||
|
@ -106,7 +106,7 @@ def envs():
|
||||
'''
|
||||
Return the file server environments
|
||||
'''
|
||||
return list(__opts__['file_roots'].keys())
|
||||
return sorted(__opts__['file_roots'])
|
||||
|
||||
|
||||
def serve_file(load, fnd):
|
||||
@ -144,14 +144,14 @@ def update():
|
||||
'''
|
||||
try:
|
||||
salt.fileserver.reap_fileserver_cache_dir(
|
||||
os.path.join(__opts__['cachedir'], 'roots/hash'),
|
||||
os.path.join(__opts__['cachedir'], 'roots', 'hash'),
|
||||
find_file
|
||||
)
|
||||
except (IOError, OSError):
|
||||
# Hash file won't exist if no files have yet been served up
|
||||
pass
|
||||
|
||||
mtime_map_path = os.path.join(__opts__['cachedir'], 'roots/mtime_map')
|
||||
mtime_map_path = os.path.join(__opts__['cachedir'], 'roots', 'mtime_map')
|
||||
# data to send on event
|
||||
data = {'changed': False,
|
||||
'files': {'changed': []},
|
||||
@ -165,15 +165,18 @@ def update():
|
||||
if os.path.exists(mtime_map_path):
|
||||
with salt.utils.files.fopen(mtime_map_path, 'r') as fp_:
|
||||
for line in fp_:
|
||||
line = salt.utils.stringutils.to_unicode(line)
|
||||
try:
|
||||
file_path, mtime = line.replace('\n', '').split(':', 1)
|
||||
old_mtime_map[file_path] = mtime
|
||||
if mtime != str(new_mtime_map.get(file_path, mtime)):
|
||||
if mtime != new_mtime_map.get(file_path, mtime):
|
||||
data['files']['changed'].append(file_path)
|
||||
except ValueError:
|
||||
# Document the invalid entry in the log
|
||||
log.warning('Skipped invalid cache mtime entry in {0}: {1}'
|
||||
.format(mtime_map_path, line))
|
||||
log.warning(
|
||||
'Skipped invalid cache mtime entry in %s: %s',
|
||||
mtime_map_path, line
|
||||
)
|
||||
|
||||
# compare the maps, set changed to the return value
|
||||
data['changed'] = salt.fileserver.diff_mtime_map(old_mtime_map, new_mtime_map)
|
||||
@ -190,8 +193,11 @@ def update():
|
||||
os.makedirs(mtime_map_path_dir)
|
||||
with salt.utils.files.fopen(mtime_map_path, 'w') as fp_:
|
||||
for file_path, mtime in six.iteritems(new_mtime_map):
|
||||
fp_.write('{file_path}:{mtime}\n'.format(file_path=file_path,
|
||||
mtime=mtime))
|
||||
fp_.write(
|
||||
salt.utils.stringutils.to_str(
|
||||
'{0}:{1}\n'.format(file_path, mtime)
|
||||
)
|
||||
)
|
||||
|
||||
if __opts__.get('fileserver_events', False):
|
||||
# if there is a change, fire an event
|
||||
@ -228,7 +234,8 @@ def file_hash(load, fnd):
|
||||
# check if the hash is cached
|
||||
# cache file's contents should be "hash:mtime"
|
||||
cache_path = os.path.join(__opts__['cachedir'],
|
||||
'roots/hash',
|
||||
'roots',
|
||||
'hash',
|
||||
load['saltenv'],
|
||||
'{0}.hash.{1}'.format(fnd['rel'],
|
||||
__opts__['hash_type']))
|
||||
@ -237,7 +244,7 @@ def file_hash(load, fnd):
|
||||
try:
|
||||
with salt.utils.files.fopen(cache_path, 'r') as fp_:
|
||||
try:
|
||||
hsum, mtime = fp_.read().split(':')
|
||||
hsum, mtime = salt.utils.stringutils.to_unicode(fp_.read()).split(':')
|
||||
except ValueError:
|
||||
log.debug('Fileserver attempted to read incomplete cache file. Retrying.')
|
||||
# Delete the file since its incomplete (either corrupted or incomplete)
|
||||
@ -296,7 +303,7 @@ def _file_lists(load, form):
|
||||
try:
|
||||
os.makedirs(list_cachedir)
|
||||
except os.error:
|
||||
log.critical('Unable to make cachedir {0}'.format(list_cachedir))
|
||||
log.critical('Unable to make cachedir %s', list_cachedir)
|
||||
return []
|
||||
list_cache = os.path.join(list_cachedir, '{0}.p'.format(load['saltenv']))
|
||||
w_lock = os.path.join(list_cachedir, '.{0}.w'.format(load['saltenv']))
|
||||
|
@ -60,7 +60,7 @@ structure::
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import datetime
|
||||
import os
|
||||
import time
|
||||
@ -113,7 +113,7 @@ def update():
|
||||
for bucket, files in six.iteritems(_find_files(env_meta)):
|
||||
for file_path in files:
|
||||
cached_file_path = _get_cached_file_name(bucket, saltenv, file_path)
|
||||
log.info('{0} - {1} : {2}'.format(bucket, saltenv, file_path))
|
||||
log.info('%s - %s : %s', bucket, saltenv, file_path)
|
||||
|
||||
# load the file from S3 if it's not in the cache or it's old
|
||||
_get_file_from_s3(metadata, saltenv, bucket, file_path, cached_file_path)
|
||||
@ -438,15 +438,15 @@ def _refresh_buckets_cache_file(cache_file):
|
||||
meta_response.update(k)
|
||||
# attempt use of human readable output first.
|
||||
try:
|
||||
log.warning("'{0}' response for bucket '{1}'".format(meta_response['Message'], bucket_name))
|
||||
log.warning("'%s' response for bucket '%s'", meta_response['Message'], bucket_name)
|
||||
continue
|
||||
except KeyError:
|
||||
# no human readable error message provided
|
||||
if 'Code' in meta_response:
|
||||
log.warning(
|
||||
("'{0}' response for "
|
||||
"bucket '{1}'").format(meta_response['Code'],
|
||||
bucket_name))
|
||||
"'%s' response for bucket '%s'",
|
||||
meta_response['Code'], bucket_name
|
||||
)
|
||||
continue
|
||||
else:
|
||||
log.warning(
|
||||
@ -477,15 +477,15 @@ def _refresh_buckets_cache_file(cache_file):
|
||||
meta_response.update(k)
|
||||
# attempt use of human readable output first.
|
||||
try:
|
||||
log.warning("'{0}' response for bucket '{1}'".format(meta_response['Message'], bucket_name))
|
||||
log.warning("'%s' response for bucket '%s'", meta_response['Message'], bucket_name)
|
||||
continue
|
||||
except KeyError:
|
||||
# no human readable error message provided
|
||||
if 'Code' in meta_response:
|
||||
log.warning(
|
||||
("'{0}' response for "
|
||||
"bucket '{1}'").format(meta_response['Code'],
|
||||
bucket_name))
|
||||
"'%s' response for bucket '%s'",
|
||||
meta_response['Code'], bucket_name
|
||||
)
|
||||
continue
|
||||
else:
|
||||
log.warning(
|
||||
@ -658,17 +658,18 @@ def _get_file_from_s3(metadata, saltenv, bucket_name, path, cached_file_path):
|
||||
for header_name, header_value in ret['headers'].items():
|
||||
name = header_name.strip()
|
||||
value = header_value.strip()
|
||||
if str(name).lower() == 'last-modified':
|
||||
if six.text_type(name).lower() == 'last-modified':
|
||||
s3_file_mtime = datetime.datetime.strptime(
|
||||
value, '%a, %d %b %Y %H:%M:%S %Z')
|
||||
elif str(name).lower() == 'content-length':
|
||||
elif six.text_type(name).lower() == 'content-length':
|
||||
s3_file_size = int(value)
|
||||
if (cached_file_size == s3_file_size and
|
||||
cached_file_mtime > s3_file_mtime):
|
||||
log.info(
|
||||
'{0} - {1} : {2} skipped download since cached file size '
|
||||
'equal to and mtime after s3 values'.format(
|
||||
bucket_name, saltenv, path))
|
||||
'%s - %s : %s skipped download since cached file size '
|
||||
'equal to and mtime after s3 values',
|
||||
bucket_name, saltenv, path
|
||||
)
|
||||
return
|
||||
|
||||
# ... or get the file from S3
|
||||
|
@ -32,7 +32,7 @@ This backend assumes a standard svn layout with directories for ``branches``,
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import copy
|
||||
import errno
|
||||
import fnmatch
|
||||
@ -109,9 +109,9 @@ def _rev(repo):
|
||||
except (pysvn._pysvn.ClientError, TypeError,
|
||||
KeyError, AttributeError) as exc:
|
||||
log.error(
|
||||
'Error retrieving revision ID for svnfs remote {0} '
|
||||
'(cachedir: {1}): {2}'
|
||||
.format(repo['url'], repo['repo'], exc)
|
||||
'Error retrieving revision ID for svnfs remote %s '
|
||||
'(cachedir: %s): %s',
|
||||
repo['url'], repo['repo'], exc
|
||||
)
|
||||
else:
|
||||
return repo_info['revision'].number
|
||||
@ -150,10 +150,10 @@ def init():
|
||||
)
|
||||
if not per_remote_conf:
|
||||
log.error(
|
||||
'Invalid per-remote configuration for remote {0}. If no '
|
||||
'Invalid per-remote configuration for remote %s. If no '
|
||||
'per-remote parameters are being specified, there may be '
|
||||
'a trailing colon after the URL, which should be removed. '
|
||||
'Check the master configuration file.'.format(repo_url)
|
||||
'Check the master configuration file.', repo_url
|
||||
)
|
||||
_failhard()
|
||||
|
||||
@ -161,11 +161,10 @@ def init():
|
||||
for param in (x for x in per_remote_conf
|
||||
if x not in PER_REMOTE_OVERRIDES):
|
||||
log.error(
|
||||
'Invalid configuration parameter \'{0}\' for remote {1}. '
|
||||
'Valid parameters are: {2}. See the documentation for '
|
||||
'further information.'.format(
|
||||
param, repo_url, ', '.join(PER_REMOTE_OVERRIDES)
|
||||
)
|
||||
'Invalid configuration parameter \'%s\' for remote %s. '
|
||||
'Valid parameters are: %s. See the documentation for '
|
||||
'further information.',
|
||||
param, repo_url, ', '.join(PER_REMOTE_OVERRIDES)
|
||||
)
|
||||
per_remote_errors = True
|
||||
if per_remote_errors:
|
||||
@ -177,8 +176,8 @@ def init():
|
||||
|
||||
if not isinstance(repo_url, six.string_types):
|
||||
log.error(
|
||||
'Invalid svnfs remote {0}. Remotes must be strings, you may '
|
||||
'need to enclose the URL in quotes'.format(repo_url)
|
||||
'Invalid svnfs remote %s. Remotes must be strings, you may '
|
||||
'need to enclose the URL in quotes', repo_url
|
||||
)
|
||||
_failhard()
|
||||
|
||||
@ -204,8 +203,8 @@ def init():
|
||||
new_remote = True
|
||||
except pysvn._pysvn.ClientError as exc:
|
||||
log.error(
|
||||
'Failed to initialize svnfs remote \'{0}\': {1}'
|
||||
.format(repo_url, exc)
|
||||
'Failed to initialize svnfs remote \'%s\': %s',
|
||||
repo_url, exc
|
||||
)
|
||||
_failhard()
|
||||
else:
|
||||
@ -215,10 +214,10 @@ def init():
|
||||
CLIENT.status(rp_)
|
||||
except pysvn._pysvn.ClientError as exc:
|
||||
log.error(
|
||||
'Cache path {0} (corresponding remote: {1}) exists but is '
|
||||
'Cache path %s (corresponding remote: %s) exists but is '
|
||||
'not a valid subversion checkout. You will need to '
|
||||
'manually delete this directory on the master to continue '
|
||||
'to use this svnfs remote.'.format(rp_, repo_url)
|
||||
'to use this svnfs remote.', rp_, repo_url
|
||||
)
|
||||
_failhard()
|
||||
|
||||
@ -239,14 +238,16 @@ def init():
|
||||
fp_.write('# svnfs_remote map as of {0}\n'.format(timestamp))
|
||||
for repo_conf in repos:
|
||||
fp_.write(
|
||||
'{0} = {1}\n'.format(
|
||||
repo_conf['hash'], repo_conf['url']
|
||||
salt.utils.stringutils.to_str(
|
||||
'{0} = {1}\n'.format(
|
||||
repo_conf['hash'], repo_conf['url']
|
||||
)
|
||||
)
|
||||
)
|
||||
except OSError:
|
||||
pass
|
||||
else:
|
||||
log.info('Wrote new svnfs_remote map to {0}'.format(remote_map))
|
||||
log.info('Wrote new svnfs_remote map to %s', remote_map)
|
||||
|
||||
return repos
|
||||
|
||||
@ -281,12 +282,12 @@ def _clear_old_remotes():
|
||||
shutil.rmtree(rdir)
|
||||
except OSError as exc:
|
||||
log.error(
|
||||
'Unable to remove old svnfs remote cachedir {0}: {1}'
|
||||
.format(rdir, exc)
|
||||
'Unable to remove old svnfs remote cachedir %s: %s',
|
||||
rdir, exc
|
||||
)
|
||||
failed.append(rdir)
|
||||
else:
|
||||
log.debug('svnfs removed old cachedir {0}'.format(rdir))
|
||||
log.debug('svnfs removed old cachedir %s', rdir)
|
||||
for fdir in failed:
|
||||
to_remove.remove(fdir)
|
||||
return bool(to_remove), repos
|
||||
@ -424,27 +425,28 @@ def update():
|
||||
for repo in repos:
|
||||
if os.path.exists(repo['lockfile']):
|
||||
log.warning(
|
||||
'Update lockfile is present for svnfs remote {0}, skipping. '
|
||||
'Update lockfile is present for svnfs remote %s, skipping. '
|
||||
'If this warning persists, it is possible that the update '
|
||||
'process was interrupted. Removing {1} or running '
|
||||
'process was interrupted. Removing %s or running '
|
||||
'\'salt-run fileserver.clear_lock svnfs\' will allow updates '
|
||||
'to continue for this remote.'
|
||||
.format(repo['url'], repo['lockfile'])
|
||||
'to continue for this remote.', repo['url'], repo['lockfile']
|
||||
)
|
||||
continue
|
||||
_, errors = lock(repo)
|
||||
if errors:
|
||||
log.error('Unable to set update lock for svnfs remote {0}, '
|
||||
'skipping.'.format(repo['url']))
|
||||
log.error(
|
||||
'Unable to set update lock for svnfs remote %s, skipping.',
|
||||
repo['url']
|
||||
)
|
||||
continue
|
||||
log.debug('svnfs is fetching from {0}'.format(repo['url']))
|
||||
log.debug('svnfs is fetching from %s', repo['url'])
|
||||
old_rev = _rev(repo)
|
||||
try:
|
||||
CLIENT.update(repo['repo'])
|
||||
except pysvn._pysvn.ClientError as exc:
|
||||
log.error(
|
||||
'Error updating svnfs remote {0} (cachedir: {1}): {2}'
|
||||
.format(repo['url'], repo['cachedir'], exc)
|
||||
'Error updating svnfs remote %s (cachedir: %s): %s',
|
||||
repo['url'], repo['cachedir'], exc
|
||||
)
|
||||
|
||||
new_rev = _rev(repo)
|
||||
@ -465,7 +467,7 @@ def update():
|
||||
serial = salt.payload.Serial(__opts__)
|
||||
with salt.utils.files.fopen(env_cache, 'wb+') as fp_:
|
||||
fp_.write(serial.dumps(new_envs))
|
||||
log.trace('Wrote env cache data to {0}'.format(env_cache))
|
||||
log.trace('Wrote env cache data to %s', env_cache)
|
||||
|
||||
# if there is a change, fire an event
|
||||
if __opts__.get('fileserver_events', False):
|
||||
@ -535,9 +537,9 @@ def envs(ignore_cache=False):
|
||||
ret.add('base')
|
||||
else:
|
||||
log.error(
|
||||
'svnfs trunk path \'{0}\' does not exist in repo {1}, no base '
|
||||
'environment will be provided by this remote'
|
||||
.format(repo['trunk'], repo['url'])
|
||||
'svnfs trunk path \'%s\' does not exist in repo %s, no base '
|
||||
'environment will be provided by this remote',
|
||||
repo['trunk'], repo['url']
|
||||
)
|
||||
|
||||
branches = os.path.join(repo['repo'], repo['branches'])
|
||||
@ -545,8 +547,8 @@ def envs(ignore_cache=False):
|
||||
ret.update(os.listdir(branches))
|
||||
else:
|
||||
log.error(
|
||||
'svnfs branches path \'{0}\' does not exist in repo {1}'
|
||||
.format(repo['branches'], repo['url'])
|
||||
'svnfs branches path \'%s\' does not exist in repo %s',
|
||||
repo['branches'], repo['url']
|
||||
)
|
||||
|
||||
tags = os.path.join(repo['repo'], repo['tags'])
|
||||
@ -554,8 +556,8 @@ def envs(ignore_cache=False):
|
||||
ret.update(os.listdir(tags))
|
||||
else:
|
||||
log.error(
|
||||
'svnfs tags path \'{0}\' does not exist in repo {1}'
|
||||
.format(repo['tags'], repo['url'])
|
||||
'svnfs tags path \'%s\' does not exist in repo %s',
|
||||
repo['tags'], repo['url']
|
||||
)
|
||||
return [x for x in sorted(ret) if _env_is_exposed(x)]
|
||||
|
||||
@ -689,7 +691,8 @@ def file_hash(load, fnd):
|
||||
# Check if the hash is cached
|
||||
# Cache file's contents should be "hash:mtime"
|
||||
cache_path = os.path.join(__opts__['cachedir'],
|
||||
'svnfs/hash',
|
||||
'svnfs',
|
||||
'hash',
|
||||
saltenv,
|
||||
'{0}.hash.{1}'.format(relpath,
|
||||
__opts__['hash_type']))
|
||||
@ -731,7 +734,7 @@ def _file_lists(load, form):
|
||||
try:
|
||||
os.makedirs(list_cachedir)
|
||||
except os.error:
|
||||
log.critical('Unable to make cachedir {0}'.format(list_cachedir))
|
||||
log.critical('Unable to make cachedir %s', list_cachedir)
|
||||
return []
|
||||
list_cache = os.path.join(list_cachedir, '{0}.p'.format(load['saltenv']))
|
||||
w_lock = os.path.join(list_cachedir, '.{0}.w'.format(load['saltenv']))
|
||||
|
@ -4,7 +4,7 @@ Classes which provide the shared base for GitFS, git_pillar, and winrepo
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, unicode_literals, print_function
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import copy
|
||||
import contextlib
|
||||
import distutils
|
||||
@ -14,7 +14,6 @@ import glob
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import shutil
|
||||
import stat
|
||||
@ -70,19 +69,19 @@ AUTH_PARAMS = ('user', 'password', 'pubkey', 'privkey', 'passphrase',
|
||||
PER_SALTENV_PARAMS = ('mountpoint', 'root', 'ref')
|
||||
|
||||
_RECOMMEND_GITPYTHON = (
|
||||
'GitPython is installed, you may wish to set {0}_provider to '
|
||||
'\'gitpython\' to use GitPython for {0} support.'
|
||||
'GitPython is installed, you may wish to set %s_provider to '
|
||||
'\'gitpython\' to use GitPython for %s support.'
|
||||
)
|
||||
|
||||
_RECOMMEND_PYGIT2 = (
|
||||
'pygit2 is installed, you may wish to set {0}_provider to '
|
||||
'\'pygit2\' to use pygit2 for for {0} support.'
|
||||
'pygit2 is installed, you may wish to set %s_provider to '
|
||||
'\'pygit2\' to use pygit2 for for %s support.'
|
||||
)
|
||||
|
||||
_INVALID_REPO = (
|
||||
'Cache path {0} (corresponding remote: {1}) exists but is not a valid '
|
||||
'Cache path %s (corresponding remote: %s) exists but is not a valid '
|
||||
'git repository. You will need to manually delete this directory on the '
|
||||
'master to continue to use this {2} remote.'
|
||||
'master to continue to use this %s remote.'
|
||||
)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@ -221,7 +220,7 @@ class GitProvider(object):
|
||||
'The following parameter names are restricted to per-remote '
|
||||
'use only: %s. This is a bug, please report it.',
|
||||
', '.join(per_remote_collisions)
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
valid_per_remote_params = override_params + per_remote_only
|
||||
@ -448,7 +447,8 @@ class GitProvider(object):
|
||||
|
||||
ret = set()
|
||||
for ref in refs:
|
||||
ref = re.sub('^refs/', '', ref)
|
||||
if ref.startswith('refs/'):
|
||||
ref = ref[5:]
|
||||
rtype, rname = ref.split('/', 1)
|
||||
if rtype == 'remotes' and use_branches:
|
||||
parted = rname.partition('/')
|
||||
@ -744,7 +744,7 @@ class GitProvider(object):
|
||||
try:
|
||||
fh_ = os.open(self._get_lock_file(lock_type),
|
||||
os.O_CREAT | os.O_EXCL | os.O_WRONLY)
|
||||
with os.fdopen(fh_, 'w'):
|
||||
with os.fdopen(fh_, 'wb'):
|
||||
# Write the lock file and close the filehandle
|
||||
os.write(fh_, six.b(six.text_type(os.getpid())))
|
||||
except (OSError, IOError) as exc:
|
||||
@ -1153,7 +1153,7 @@ class GitPython(GitProvider):
|
||||
try:
|
||||
self.repo = git.Repo(self.cachedir)
|
||||
except git.exc.InvalidGitRepositoryError:
|
||||
log.error(_INVALID_REPO.format(self.cachedir, self.url, self.role))
|
||||
log.error(_INVALID_REPO, self.cachedir, self.url, self.role)
|
||||
return new
|
||||
|
||||
self.gitdir = salt.utils.path.join(self.repo.working_dir, '.git')
|
||||
@ -1583,7 +1583,7 @@ class Pygit2(GitProvider):
|
||||
pygit2.settings.search_path[pygit2.GIT_CONFIG_LEVEL_GLOBAL] = home
|
||||
self.repo = pygit2.Repository(self.cachedir)
|
||||
except KeyError:
|
||||
log.error(_INVALID_REPO.format(self.cachedir, self.url, self.role))
|
||||
log.error(_INVALID_REPO, self.cachedir, self.url, self.role)
|
||||
return new
|
||||
|
||||
self.gitdir = salt.utils.path.join(self.repo.workdir, '.git')
|
||||
@ -2181,8 +2181,7 @@ class GitBase(object):
|
||||
log.critical(
|
||||
'The following %s remotes have conflicting cachedirs: '
|
||||
'%s. Resolve this using a per-remote parameter called '
|
||||
'\'name\'.',
|
||||
self.role, ', '.join(cachedir_map[dirname])
|
||||
'\'name\'.', self.role, ', '.join(cachedir_map[dirname])
|
||||
)
|
||||
failhard(self.role)
|
||||
|
||||
@ -2222,9 +2221,7 @@ class GitBase(object):
|
||||
)
|
||||
failed.append(rdir)
|
||||
else:
|
||||
log.debug(
|
||||
'%s removed old cachedir %s', self.role, rdir
|
||||
)
|
||||
log.debug('%s removed old cachedir %s', self.role, rdir)
|
||||
for fdir in failed:
|
||||
to_remove.remove(fdir)
|
||||
ret = bool(to_remove)
|
||||
@ -2429,8 +2426,7 @@ class GitBase(object):
|
||||
self.provider = 'gitpython'
|
||||
if not hasattr(self, 'provider'):
|
||||
log.critical(
|
||||
'No suitable %s provider module is installed.',
|
||||
self.role
|
||||
'No suitable %s provider module is installed.', self.role
|
||||
)
|
||||
failhard(self.role)
|
||||
|
||||
@ -2440,7 +2436,7 @@ class GitBase(object):
|
||||
'''
|
||||
def _recommend():
|
||||
if HAS_PYGIT2 and 'pygit2' in self.git_providers:
|
||||
log.error(_RECOMMEND_PYGIT2.format(self.role))
|
||||
log.error(_RECOMMEND_PYGIT2, self.role, self.role)
|
||||
|
||||
if not HAS_GITPYTHON:
|
||||
if not quiet:
|
||||
@ -2491,7 +2487,7 @@ class GitBase(object):
|
||||
'''
|
||||
def _recommend():
|
||||
if HAS_GITPYTHON and 'gitpython' in self.git_providers:
|
||||
log.error(_RECOMMEND_GITPYTHON.format(self.role))
|
||||
log.error(_RECOMMEND_GITPYTHON, self.role, self.role)
|
||||
|
||||
if not HAS_PYGIT2:
|
||||
if not quiet:
|
||||
@ -2565,19 +2561,17 @@ class GitBase(object):
|
||||
)
|
||||
for repo in self.remotes:
|
||||
fp_.write(
|
||||
'{0} = {1}\n'.format(
|
||||
repo.cachedir_basename,
|
||||
repo.id
|
||||
salt.utils.stringutils.to_str(
|
||||
'{0} = {1}\n'.format(
|
||||
repo.cachedir_basename,
|
||||
repo.id
|
||||
)
|
||||
)
|
||||
)
|
||||
except OSError:
|
||||
pass
|
||||
else:
|
||||
log.info(
|
||||
'Wrote new %s remote map to %s',
|
||||
self.role,
|
||||
remote_map
|
||||
)
|
||||
log.info('Wrote new %s remote map to %s', self.role, remote_map)
|
||||
|
||||
def do_checkout(self, repo):
|
||||
'''
|
||||
@ -2757,13 +2751,13 @@ class GitFS(GitBase):
|
||||
salt.fileserver.wait_lock(lk_fn, dest)
|
||||
if os.path.isfile(blobshadest) and os.path.isfile(dest):
|
||||
with salt.utils.files.fopen(blobshadest, 'r') as fp_:
|
||||
sha = fp_.read()
|
||||
sha = salt.utils.stringutils.to_unicode(fp_.read())
|
||||
if sha == blob_hexsha:
|
||||
fnd['rel'] = path
|
||||
fnd['path'] = dest
|
||||
return _add_file_stat(fnd, blob_mode)
|
||||
with salt.utils.files.fopen(lk_fn, 'w+') as fp_:
|
||||
fp_.write('')
|
||||
with salt.utils.files.fopen(lk_fn, 'w'):
|
||||
pass
|
||||
for filename in glob.glob(hashes_glob):
|
||||
try:
|
||||
os.remove(filename)
|
||||
@ -2798,10 +2792,8 @@ class GitFS(GitBase):
|
||||
required_load_keys = set(['path', 'loc', 'saltenv'])
|
||||
if not all(x in load for x in required_load_keys):
|
||||
log.debug(
|
||||
'Not all of the required keys present in payload. '
|
||||
'Missing: {0}'.format(
|
||||
', '.join(required_load_keys.difference(load))
|
||||
)
|
||||
'Not all of the required keys present in payload. Missing: %s',
|
||||
', '.join(required_load_keys.difference(load))
|
||||
)
|
||||
return ret
|
||||
if not fnd['path']:
|
||||
@ -2861,10 +2853,7 @@ class GitFS(GitBase):
|
||||
try:
|
||||
os.makedirs(self.file_list_cachedir)
|
||||
except os.error:
|
||||
log.error(
|
||||
'Unable to make cachedir %s',
|
||||
self.file_list_cachedir
|
||||
)
|
||||
log.error('Unable to make cachedir %s', self.file_list_cachedir)
|
||||
return []
|
||||
list_cache = salt.utils.path.join(
|
||||
self.file_list_cachedir,
|
||||
|
@ -4,7 +4,7 @@ URL utils
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import re
|
||||
import sys
|
||||
|
||||
|
3
tests/integration/files/file/base/питон.txt
Normal file
3
tests/integration/files/file/base/питон.txt
Normal file
@ -0,0 +1,3 @@
|
||||
{{ grains['id'] }}
|
||||
|
||||
спам, спам, спам, яйца и спам
|
1
tests/integration/files/file/base/соль/foo.txt
Normal file
1
tests/integration/files/file/base/соль/foo.txt
Normal file
@ -0,0 +1 @@
|
||||
bar
|
@ -4,7 +4,7 @@
|
||||
'''
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import errno
|
||||
import os
|
||||
import shutil
|
||||
@ -35,6 +35,7 @@ from tests.support.paths import TMP, FILES
|
||||
|
||||
# Import salt libs
|
||||
import salt.fileserver.gitfs as gitfs
|
||||
import salt.utils.files
|
||||
import salt.utils.gitfs
|
||||
import salt.utils.platform
|
||||
import salt.utils.win_functions
|
||||
@ -45,6 +46,8 @@ log = logging.getLogger(__name__)
|
||||
TMP_SOCK_DIR = tempfile.mkdtemp(dir=TMP)
|
||||
TMP_REPO_DIR = os.path.join(TMP, 'gitfs_root')
|
||||
INTEGRATION_BASE_FILES = os.path.join(FILES, 'file', 'base')
|
||||
UNICODE_FILENAME = 'питон.txt'
|
||||
UNICODE_DIRNAME = UNICODE_ENVNAME = 'соль'
|
||||
|
||||
|
||||
def _rmtree_error(func, path, excinfo):
|
||||
@ -261,6 +264,9 @@ class GitFSTest(TestCase, LoaderModuleMockMixin):
|
||||
if x != '.git'])
|
||||
repo.index.commit('Test')
|
||||
|
||||
# Add another branch with unicode characters in the name
|
||||
repo.create_head(UNICODE_ENVNAME, 'HEAD')
|
||||
|
||||
def setUp(self):
|
||||
'''
|
||||
We don't want to check in another .git dir into GH because that just
|
||||
@ -286,11 +292,17 @@ class GitFSTest(TestCase, LoaderModuleMockMixin):
|
||||
def test_file_list(self):
|
||||
ret = gitfs.file_list(LOAD)
|
||||
self.assertIn('testfile', ret)
|
||||
self.assertIn(UNICODE_FILENAME, ret)
|
||||
# This function does not use os.sep, the Salt fileserver uses the
|
||||
# forward slash, hence it being explicitly used to join here.
|
||||
self.assertIn('/'.join((UNICODE_DIRNAME, 'foo.txt')), ret)
|
||||
|
||||
def test_dir_list(self):
|
||||
ret = gitfs.dir_list(LOAD)
|
||||
self.assertIn('grail', ret)
|
||||
self.assertIn(UNICODE_DIRNAME, ret)
|
||||
|
||||
def test_envs(self):
|
||||
ret = gitfs.envs()
|
||||
ret = gitfs.envs(ignore_cache=True)
|
||||
self.assertIn('base', ret)
|
||||
self.assertIn(UNICODE_ENVNAME, ret)
|
||||
|
@ -4,7 +4,7 @@
|
||||
'''
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.unit import TestCase
|
||||
|
@ -4,7 +4,8 @@
|
||||
'''
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import copy
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
@ -16,7 +17,7 @@ from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import patch, NO_MOCK, NO_MOCK_REASON
|
||||
|
||||
# Import Salt libs
|
||||
import salt.fileserver.roots
|
||||
import salt.fileserver.roots as roots
|
||||
import salt.fileclient
|
||||
import salt.utils.files
|
||||
import salt.utils.platform
|
||||
@ -26,6 +27,9 @@ try:
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
UNICODE_FILENAME = 'питон.txt'
|
||||
UNICODE_DIRNAME = UNICODE_ENVNAME = 'соль'
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class RootsTest(TestCase, AdaptedConfigurationTestCaseMixin, LoaderModuleMockMixin):
|
||||
@ -37,7 +41,7 @@ class RootsTest(TestCase, AdaptedConfigurationTestCaseMixin, LoaderModuleMockMix
|
||||
empty_dir = os.path.join(TMP_STATE_TREE, 'empty_dir')
|
||||
if not os.path.isdir(empty_dir):
|
||||
os.makedirs(empty_dir)
|
||||
return {salt.fileserver.roots: {'__opts__': self.opts}}
|
||||
return {roots: {'__opts__': self.opts}}
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
@ -74,25 +78,26 @@ class RootsTest(TestCase, AdaptedConfigurationTestCaseMixin, LoaderModuleMockMix
|
||||
del self.opts
|
||||
|
||||
def test_file_list(self):
|
||||
ret = salt.fileserver.roots.file_list({'saltenv': 'base'})
|
||||
ret = roots.file_list({'saltenv': 'base'})
|
||||
self.assertIn('testfile', ret)
|
||||
self.assertIn(UNICODE_FILENAME, ret)
|
||||
|
||||
def test_find_file(self):
|
||||
ret = salt.fileserver.roots.find_file('testfile')
|
||||
ret = roots.find_file('testfile')
|
||||
self.assertEqual('testfile', ret['rel'])
|
||||
|
||||
full_path_to_file = os.path.join(FILES, 'file', 'base', 'testfile')
|
||||
self.assertEqual(full_path_to_file, ret['path'])
|
||||
|
||||
def test_serve_file(self):
|
||||
with patch.dict(salt.fileserver.roots.__opts__, {'file_buffer_size': 262144}):
|
||||
with patch.dict(roots.__opts__, {'file_buffer_size': 262144}):
|
||||
load = {'saltenv': 'base',
|
||||
'path': os.path.join(FILES, 'file', 'base', 'testfile'),
|
||||
'loc': 0
|
||||
}
|
||||
fnd = {'path': os.path.join(FILES, 'file', 'base', 'testfile'),
|
||||
'rel': 'testfile'}
|
||||
ret = salt.fileserver.roots.serve_file(load, fnd)
|
||||
ret = roots.serve_file(load, fnd)
|
||||
|
||||
data = 'Scene 24\n\n \n OLD MAN: Ah, hee he he ha!\n ' \
|
||||
'ARTHUR: And this enchanter of whom you speak, he ' \
|
||||
@ -128,9 +133,13 @@ class RootsTest(TestCase, AdaptedConfigurationTestCaseMixin, LoaderModuleMockMix
|
||||
{'data': data,
|
||||
'dest': 'testfile'})
|
||||
|
||||
@skipIf(True, "Update test not yet implemented")
|
||||
def test_update(self):
|
||||
pass
|
||||
def test_envs(self):
|
||||
opts = {'file_roots': copy.copy(self.opts['file_roots'])}
|
||||
opts['file_roots'][UNICODE_ENVNAME] = opts['file_roots']['base']
|
||||
with patch.dict(roots.__opts__, opts):
|
||||
ret = roots.envs()
|
||||
self.assertIn('base', ret)
|
||||
self.assertIn(UNICODE_ENVNAME, ret)
|
||||
|
||||
def test_file_hash(self):
|
||||
load = {
|
||||
@ -141,7 +150,7 @@ class RootsTest(TestCase, AdaptedConfigurationTestCaseMixin, LoaderModuleMockMix
|
||||
'path': os.path.join(FILES, 'file', 'base', 'testfile'),
|
||||
'rel': 'testfile'
|
||||
}
|
||||
ret = salt.fileserver.roots.file_hash(load, fnd)
|
||||
ret = roots.file_hash(load, fnd)
|
||||
|
||||
# Hashes are different in Windows. May be how git translates line
|
||||
# endings
|
||||
@ -158,17 +167,18 @@ class RootsTest(TestCase, AdaptedConfigurationTestCaseMixin, LoaderModuleMockMix
|
||||
)
|
||||
|
||||
def test_file_list_emptydirs(self):
|
||||
ret = salt.fileserver.roots.file_list_emptydirs({'saltenv': 'base'})
|
||||
ret = roots.file_list_emptydirs({'saltenv': 'base'})
|
||||
self.assertIn('empty_dir', ret)
|
||||
|
||||
def test_dir_list(self):
|
||||
ret = salt.fileserver.roots.dir_list({'saltenv': 'base'})
|
||||
ret = roots.dir_list({'saltenv': 'base'})
|
||||
self.assertIn('empty_dir', ret)
|
||||
self.assertIn(UNICODE_DIRNAME, ret)
|
||||
|
||||
def test_symlink_list(self):
|
||||
if self.test_symlink_list_file_roots:
|
||||
self.opts['file_roots'] = self.test_symlink_list_file_roots
|
||||
ret = salt.fileserver.roots.symlink_list({'saltenv': 'base'})
|
||||
ret = roots.symlink_list({'saltenv': 'base'})
|
||||
self.assertDictEqual(ret, {'dest_sym': 'source_sym'})
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user