diff --git a/doc/topics/releases/2014.7.2.rst b/doc/topics/releases/2014.7.2.rst index 8218748ce4..1806523ff5 100644 --- a/doc/topics/releases/2014.7.2.rst +++ b/doc/topics/releases/2014.7.2.rst @@ -15,7 +15,8 @@ Version 2014.7.2 is a bugfix release for :doc:`2014.7.0 to lowercase their npm package names for them. The :py:mod:`npm module ` was never affected by mandatory lowercasing. (:issue:`20329`) -- Deprecate the `activate` parameter for pip.install for both the +- Deprecate the ``activate`` parameter for pip.install for both the :py:mod:`module ` and the :py:mod:`state `. - If `bin_env` is given and points to a virtualenv, there is no need to + If ``bin_env`` is given and points to a virtualenv, there is no need to activate that virtualenv in a shell for pip to install to the virtualenv. +- Fix a file-locking bug in gitfs (:issue:`18839`) diff --git a/salt/fileserver/gitfs.py b/salt/fileserver/gitfs.py index 3a3c62e060..07b7d79d11 100644 --- a/salt/fileserver/gitfs.py +++ b/salt/fileserver/gitfs.py @@ -44,7 +44,9 @@ from __future__ import absolute_import # Import python libs import copy +import contextlib import distutils.version # pylint: disable=E0611 +import fcntl import glob import hashlib import logging @@ -890,7 +892,8 @@ def purge_cache(): remove_dirs = [] for repo in init(): try: - remove_dirs.remove(repo['hash']) + with _aquire_update_lock_for_repo(repo): + remove_dirs.remove(repo['hash']) except ValueError: pass remove_dirs = [os.path.join(bp_, rdir) for rdir in remove_dirs @@ -902,6 +905,37 @@ def purge_cache(): return False +@contextlib.contextmanager +def _aquire_update_lock_for_repo(repo): + provider = _get_provider() + + if provider == 'gitpython': + working_dir = repo['repo'].working_dir + elif provider == 'pygit2': + working_dir = repo['repo'].workdir + elif provider == 'dulwich': + working_dir = repo['repo'].path + + with wait_for_write_lock(os.path.join(working_dir, 'update.lk')): + yield + + +@contextlib.contextmanager +def wait_for_write_lock(filename): + fhandle = open(filename, 'w') + + if salt.utils.is_fcntl_available(check_sunos=True): + fcntl.flock(fhandle.fileno(), fcntl.LOCK_EX) + try: + yield + finally: + if salt.utils.is_fcntl_available(check_sunos=True): + fcntl.flock(fhandle.fileno(), fcntl.LOCK_UN) + + fhandle.close() + os.remove(filename) + + def update(): ''' Execute a git fetch on all of the repos @@ -923,98 +957,93 @@ def update(): # origin is just a url here, there is no origin object origin = repo['url'] working_dir = repo['repo'].path - lk_fn = os.path.join(working_dir, 'update.lk') - with salt.utils.fopen(lk_fn, 'w+') as fp_: - fp_.write(str(pid)) - try: - log.debug('Fetching from {0}'.format(repo['url'])) - if provider == 'gitpython': - try: - fetch_results = origin.fetch() - except AssertionError: - fetch_results = origin.fetch() - for fetch in fetch_results: - if fetch.old_commit is not None: - data['changed'] = True - elif provider == 'pygit2': - try: - origin.credentials = repo['credentials'] - except KeyError: - # No credentials configured for this repo - pass - fetch = origin.fetch() - try: - # pygit2.Remote.fetch() returns a dict in pygit2 < 0.21.0 - received_objects = fetch['received_objects'] - except (AttributeError, TypeError): - # pygit2.Remote.fetch() returns a class instance in - # pygit2 >= 0.21.0 - received_objects = fetch.received_objects - log.debug( - 'Gitfs received {0} objects for remote {1}' - .format(received_objects, repo['url']) - ) - if received_objects: - data['changed'] = True - elif provider == 'dulwich': - client, path = \ - dulwich.client.get_transport_and_path_from_url( - origin, thin_packs=True - ) - refs_pre = repo['repo'].get_refs() - try: - refs_post = client.fetch(path, repo['repo']) - except dulwich.errors.NotGitRepository: - log.critical( - 'Dulwich does not recognize remote {0} as a valid ' - 'remote URL. Perhaps it is missing \'.git\' at the ' - 'end.'.format(repo['url']) - ) - continue - except KeyError: - log.critical( - 'Local repository cachedir {0!r} (corresponding ' - 'remote: {1}) has been corrupted. Salt will now ' - 'attempt to remove the local checkout to allow it to ' - 'be re-initialized in the next fileserver cache ' - 'update.' - .format(repo['cachedir'], repo['url']) - ) + + with _aquire_update_lock_for_repo(repo): + try: + log.debug('Fetching from {0}'.format(repo['url'])) + if provider == 'gitpython': try: - salt.utils.rm_rf(repo['cachedir']) - except OSError as exc: - log.critical( - 'Unable to remove {0!r}: {1}' - .format(repo['cachedir'], exc) - ) - continue - if refs_post is None: - # Empty repository - log.warning( - 'Gitfs remote {0!r} is an empty repository and will ' - 'be skipped.'.format(origin) + fetch_results = origin.fetch() + except AssertionError: + fetch_results = origin.fetch() + for fetch in fetch_results: + if fetch.old_commit is not None: + data['changed'] = True + elif provider == 'pygit2': + try: + origin.credentials = repo['credentials'] + except KeyError: + # No credentials configured for this repo + pass + fetch = origin.fetch() + try: + # pygit2.Remote.fetch() returns a dict in pygit2 < 0.21.0 + received_objects = fetch['received_objects'] + except (AttributeError, TypeError): + # pygit2.Remote.fetch() returns a class instance in + # pygit2 >= 0.21.0 + received_objects = fetch.received_objects + log.debug( + 'Gitfs received {0} objects for remote {1}' + .format(received_objects, repo['url']) ) - continue - if refs_pre != refs_post: - data['changed'] = True - # Update local refs - for ref in _dulwich_env_refs(refs_post): - repo['repo'][ref] = refs_post[ref] - # Prune stale refs - for ref in repo['repo'].get_refs(): - if ref not in refs_post: - del repo['repo'][ref] - except Exception as exc: - # Do not use {0!r} in the error message, as exc is not a string - log.error( - 'Exception \'{0}\' caught while fetching gitfs remote {1}' - .format(exc, repo['url']), - exc_info_on_loglevel=logging.DEBUG - ) - try: - os.remove(lk_fn) - except (IOError, OSError): - pass + if received_objects: + data['changed'] = True + elif provider == 'dulwich': + client, path = \ + dulwich.client.get_transport_and_path_from_url( + origin, thin_packs=True + ) + refs_pre = repo['repo'].get_refs() + try: + refs_post = client.fetch(path, repo['repo']) + except dulwich.errors.NotGitRepository: + log.critical( + 'Dulwich does not recognize remote {0} as a valid ' + 'remote URL. Perhaps it is missing \'.git\' at the ' + 'end.'.format(repo['url']) + ) + continue + except KeyError: + log.critical( + 'Local repository cachedir {0!r} (corresponding ' + 'remote: {1}) has been corrupted. Salt will now ' + 'attempt to remove the local checkout to allow it to ' + 'be re-initialized in the next fileserver cache ' + 'update.' + .format(repo['cachedir'], repo['url']) + ) + try: + salt.utils.rm_rf(repo['cachedir']) + except OSError as exc: + log.critical( + 'Unable to remove {0!r}: {1}' + .format(repo['cachedir'], exc) + ) + continue + if refs_post is None: + # Empty repository + log.warning( + 'Gitfs remote {0!r} is an empty repository and will ' + 'be skipped.'.format(origin) + ) + continue + if refs_pre != refs_post: + data['changed'] = True + # Update local refs + for ref in _dulwich_env_refs(refs_post): + repo['repo'][ref] = refs_post[ref] + # Prune stale refs + for ref in repo['repo'].get_refs(): + if ref not in refs_post: + del repo['repo'][ref] + except Exception as exc: + # Do not use {0!r} in the error message, as exc is not a string + log.error( + 'Exception \'{0}\' caught while fetching gitfs remote {1}' + .format(exc, repo['url']), + exc_info_on_loglevel=logging.DEBUG + ) env_cache = os.path.join(__opts__['cachedir'], 'gitfs/envs.p') if data.get('changed', False) is True or not os.path.isfile(env_cache): @@ -1175,155 +1204,157 @@ def find_file(path, tgt_env='base', **kwargs): # pylint: disable=W0613 '{0}.lk'.format(path)) destdir = os.path.dirname(dest) hashdir = os.path.dirname(blobshadest) - if not os.path.isdir(destdir): - try: - os.makedirs(destdir) - except OSError: - # Path exists and is a file, remove it and retry - os.remove(destdir) - os.makedirs(destdir) - if not os.path.isdir(hashdir): - try: - os.makedirs(hashdir) - except OSError: - # Path exists and is a file, remove it and retry - os.remove(hashdir) - os.makedirs(hashdir) for repo in init(): - if repo['mountpoint'] \ - and not path.startswith(repo['mountpoint'] + os.path.sep): - continue - repo_path = path[len(repo['mountpoint']):].lstrip(os.path.sep) - if repo['root']: - repo_path = os.path.join(repo['root'], repo_path) - - blob = None - depth = 0 - if provider == 'gitpython': - tree = _get_tree_gitpython(repo, tgt_env) - if not tree: - # Branch/tag/SHA not found in repo, try the next - continue - while True: - depth += 1 - if depth > SYMLINK_RECURSE_DEPTH: - break + with _aquire_update_lock_for_repo(repo): + if not os.path.isdir(destdir): try: - file_blob = tree / repo_path - if stat.S_ISLNK(file_blob.mode): - # Path is a symlink. The blob data corresponding to - # this path's object ID will be the target of the - # symlink. Follow the symlink and set repo_path to the - # location indicated in the blob data. - stream = StringIO() - file_blob.stream_data(stream) - stream.seek(0) - link_tgt = stream.read() - stream.close() - repo_path = os.path.normpath( - os.path.join(os.path.dirname(repo_path), link_tgt) - ) - else: - blob = file_blob - break - except KeyError: - # File not found or repo_path points to a directory - break - if blob is None: - continue - blob_hexsha = blob.hexsha - - elif provider == 'pygit2': - tree = _get_tree_pygit2(repo, tgt_env) - if not tree: - # Branch/tag/SHA not found in repo, try the next - continue - while True: - depth += 1 - if depth > SYMLINK_RECURSE_DEPTH: - break + os.makedirs(destdir) + except OSError: + # Path exists and is a file, remove it and retry + os.remove(destdir) + os.makedirs(destdir) + if not os.path.isdir(hashdir): try: - if stat.S_ISLNK(tree[repo_path].filemode): - # Path is a symlink. The blob data corresponding to this - # path's object ID will be the target of the symlink. Follow - # the symlink and set repo_path to the location indicated - # in the blob data. - link_tgt = repo['repo'][tree[repo_path].oid].data - repo_path = os.path.normpath( - os.path.join(os.path.dirname(repo_path), link_tgt) - ) - else: - oid = tree[repo_path].oid - blob = repo['repo'][oid] - except KeyError: - break - if blob is None: - continue - blob_hexsha = blob.hex + os.makedirs(hashdir) + except OSError: + # Path exists and is a file, remove it and retry + os.remove(hashdir) + os.makedirs(hashdir) - elif provider == 'dulwich': - while True: - depth += 1 - if depth > SYMLINK_RECURSE_DEPTH: - break - prefix_dirs, _, filename = repo_path.rpartition(os.path.sep) - tree = _get_tree_dulwich(repo, tgt_env) - tree = _dulwich_walk_tree(repo['repo'], tree, prefix_dirs) - if not isinstance(tree, dulwich.objects.Tree): - # Branch/tag/SHA not found in repo - break - try: - mode, oid = tree[filename] - if stat.S_ISLNK(mode): - # Path is a symlink. The blob data corresponding to - # this path's object ID will be the target of the - # symlink. Follow the symlink and set repo_path to the - # location indicated in the blob data. - link_tgt = repo['repo'].get_object(oid).as_raw_string() - repo_path = os.path.normpath( - os.path.join(os.path.dirname(repo_path), link_tgt) - ) - else: - blob = repo['repo'].get_object(oid) - break - except KeyError: - break - if blob is None: + if repo['mountpoint'] \ + and not path.startswith(repo['mountpoint'] + os.path.sep): continue - blob_hexsha = blob.sha().hexdigest() + repo_path = path[len(repo['mountpoint']):].lstrip(os.path.sep) + if repo['root']: + repo_path = os.path.join(repo['root'], repo_path) - salt.fileserver.wait_lock(lk_fn, dest) - if os.path.isfile(blobshadest) and os.path.isfile(dest): - with salt.utils.fopen(blobshadest, 'r') as fp_: - sha = fp_.read() - if sha == blob_hexsha: - fnd['rel'] = path - fnd['path'] = dest - return fnd - with salt.utils.fopen(lk_fn, 'w+') as fp_: - fp_.write('') - for filename in glob.glob(hashes_glob): - try: - os.remove(filename) - except Exception: - pass - with salt.utils.fopen(dest, 'w+') as fp_: + blob = None + depth = 0 if provider == 'gitpython': - blob.stream_data(fp_) + tree = _get_tree_gitpython(repo, tgt_env) + if not tree: + # Branch/tag/SHA not found in repo, try the next + continue + while True: + depth += 1 + if depth > SYMLINK_RECURSE_DEPTH: + break + try: + file_blob = tree / repo_path + if stat.S_ISLNK(file_blob.mode): + # Path is a symlink. The blob data corresponding to + # this path's object ID will be the target of the + # symlink. Follow the symlink and set repo_path to the + # location indicated in the blob data. + stream = StringIO() + file_blob.stream_data(stream) + stream.seek(0) + link_tgt = stream.read() + stream.close() + repo_path = os.path.normpath( + os.path.join(os.path.dirname(repo_path), link_tgt) + ) + else: + blob = file_blob + break + except KeyError: + # File not found or repo_path points to a directory + break + if blob is None: + continue + blob_hexsha = blob.hexsha + elif provider == 'pygit2': - fp_.write(blob.data) + tree = _get_tree_pygit2(repo, tgt_env) + if not tree: + # Branch/tag/SHA not found in repo, try the next + continue + while True: + depth += 1 + if depth > SYMLINK_RECURSE_DEPTH: + break + try: + if stat.S_ISLNK(tree[repo_path].filemode): + # Path is a symlink. The blob data corresponding to this + # path's object ID will be the target of the symlink. Follow + # the symlink and set repo_path to the location indicated + # in the blob data. + link_tgt = repo['repo'][tree[repo_path].oid].data + repo_path = os.path.normpath( + os.path.join(os.path.dirname(repo_path), link_tgt) + ) + else: + oid = tree[repo_path].oid + blob = repo['repo'][oid] + except KeyError: + break + if blob is None: + continue + blob_hexsha = blob.hex + elif provider == 'dulwich': - fp_.write(blob.as_raw_string()) - with salt.utils.fopen(blobshadest, 'w+') as fp_: - fp_.write(blob_hexsha) - try: - os.remove(lk_fn) - except (OSError, IOError): - pass - fnd['rel'] = path - fnd['path'] = dest - return fnd + while True: + depth += 1 + if depth > SYMLINK_RECURSE_DEPTH: + break + prefix_dirs, _, filename = repo_path.rpartition(os.path.sep) + tree = _get_tree_dulwich(repo, tgt_env) + tree = _dulwich_walk_tree(repo['repo'], tree, prefix_dirs) + if not isinstance(tree, dulwich.objects.Tree): + # Branch/tag/SHA not found in repo + break + try: + mode, oid = tree[filename] + if stat.S_ISLNK(mode): + # Path is a symlink. The blob data corresponding to + # this path's object ID will be the target of the + # symlink. Follow the symlink and set repo_path to the + # location indicated in the blob data. + link_tgt = repo['repo'].get_object(oid).as_raw_string() + repo_path = os.path.normpath( + os.path.join(os.path.dirname(repo_path), link_tgt) + ) + else: + blob = repo['repo'].get_object(oid) + break + except KeyError: + break + if blob is None: + continue + blob_hexsha = blob.sha().hexdigest() + + salt.fileserver.wait_lock(lk_fn, dest) + if os.path.isfile(blobshadest) and os.path.isfile(dest): + with salt.utils.fopen(blobshadest, 'r') as fp_: + sha = fp_.read() + if sha == blob_hexsha: + fnd['rel'] = path + fnd['path'] = dest + return fnd + with salt.utils.fopen(lk_fn, 'w+') as fp_: + fp_.write('') + for filename in glob.glob(hashes_glob): + try: + os.remove(filename) + except Exception: + pass + with salt.utils.fopen(dest, 'w+') as fp_: + if provider == 'gitpython': + blob.stream_data(fp_) + elif provider == 'pygit2': + fp_.write(blob.data) + elif provider == 'dulwich': + fp_.write(blob.as_raw_string()) + with salt.utils.fopen(blobshadest, 'w+') as fp_: + fp_.write(blob_hexsha) + try: + os.remove(lk_fn) + except (OSError, IOError): + pass + fnd['rel'] = path + fnd['path'] = dest + return fnd return fnd diff --git a/salt/modules/ps.py b/salt/modules/ps.py index fe1e25f950..ccaef052aa 100644 --- a/salt/modules/ps.py +++ b/salt/modules/ps.py @@ -65,7 +65,12 @@ def _get_proc_name(proc): It's backward compatible with < 2.0 versions of psutil. ''' - return proc.name() if PSUTIL2 else proc.name + ret = [] + try: + ret = proc.name() if PSUTIL2 else proc.name + except (psutil.NoSuchProcess, psutil.AccessDenied): + pass + return ret def _get_proc_status(proc): diff --git a/salt/utils/args.py b/salt/utils/args.py index 9aba8bbb66..75d02c1edb 100644 --- a/salt/utils/args.py +++ b/salt/utils/args.py @@ -44,7 +44,7 @@ def parse_input(args, condition=True): _args = [] _kwargs = {} for arg in args: - if isinstance(arg, string_types) and r'\n' not in arg and '\n' not in arg: + if isinstance(arg, string_types): arg_name, arg_value = parse_kwarg(arg) if arg_name: _kwargs[arg_name] = yamlify_arg(arg_value)