mirror of
https://github.com/valitydev/salt.git
synced 2024-11-07 08:58:59 +00:00
Merge pull request #24930 from jacksontj/2015.5
Don't refetch file templates 100% of the time-- Performance optimization for templated files
This commit is contained in:
commit
f52f7e1d20
@ -356,11 +356,14 @@ class Client(object):
|
||||
self.opts['cachedir'], 'localfiles', path.lstrip('/'))
|
||||
filesdest = os.path.join(
|
||||
self.opts['cachedir'], 'files', saltenv, path.lstrip('salt://'))
|
||||
extrndest = self._extrn_path(path, saltenv)
|
||||
|
||||
if os.path.exists(filesdest):
|
||||
return filesdest
|
||||
elif os.path.exists(localsfilesdest):
|
||||
return localsfilesdest
|
||||
elif os.path.exists(extrndest):
|
||||
return extrndest
|
||||
|
||||
return ''
|
||||
|
||||
@ -537,13 +540,7 @@ class Client(object):
|
||||
netloc = salt.utils.sanitize_win_path_string(url_data.netloc)
|
||||
else:
|
||||
netloc = url_data.netloc
|
||||
dest = salt.utils.path_join(
|
||||
self.opts['cachedir'],
|
||||
'extrn_files',
|
||||
saltenv,
|
||||
netloc,
|
||||
url_data.path
|
||||
)
|
||||
dest = self._extrn_path(url, saltenv)
|
||||
destdir = os.path.dirname(dest)
|
||||
if not os.path.isdir(destdir):
|
||||
os.makedirs(destdir)
|
||||
@ -670,13 +667,7 @@ class Client(object):
|
||||
return ''
|
||||
if not dest:
|
||||
# No destination passed, set the dest as an extrn_files cache
|
||||
dest = salt.utils.path_join(
|
||||
self.opts['cachedir'],
|
||||
'extrn_files',
|
||||
saltenv,
|
||||
url_data.netloc,
|
||||
url_data.path
|
||||
)
|
||||
dest = self._extrn_path(url, saltenv)
|
||||
# If Salt generated the dest name, create any required dirs
|
||||
makedirs = True
|
||||
|
||||
@ -690,6 +681,20 @@ class Client(object):
|
||||
shutil.move(data['data'], dest)
|
||||
return dest
|
||||
|
||||
def _extrn_path(self, url, saltenv):
|
||||
'''
|
||||
Return the extn_filepath for a given url
|
||||
'''
|
||||
url_data = urlparse(url)
|
||||
|
||||
return salt.utils.path_join(
|
||||
self.opts['cachedir'],
|
||||
'extrn_files',
|
||||
saltenv,
|
||||
url_data.netloc,
|
||||
url_data.path
|
||||
)
|
||||
|
||||
|
||||
class LocalClient(Client):
|
||||
'''
|
||||
|
@ -2758,8 +2758,60 @@ def get_managed(
|
||||
# Copy the file to the minion and templatize it
|
||||
sfn = ''
|
||||
source_sum = {}
|
||||
if template and source:
|
||||
sfn = __salt__['cp.cache_file'](source, saltenv)
|
||||
# if we have a source defined, lets figure out what the hash is
|
||||
if source:
|
||||
urlparsed_source = _urlparse(source)
|
||||
if urlparsed_source.scheme == 'salt':
|
||||
source_sum = __salt__['cp.hash_file'](source, saltenv)
|
||||
if not source_sum:
|
||||
return '', {}, 'Source file {0} not found'.format(source)
|
||||
# if its a local file
|
||||
elif urlparsed_source.scheme == 'file':
|
||||
source_sum = get_hash(urlparsed_source.path)
|
||||
elif source.startswith('/'):
|
||||
source_sum = get_hash(source)
|
||||
elif source_hash:
|
||||
protos = ('salt', 'http', 'https', 'ftp', 'swift')
|
||||
if _urlparse(source_hash).scheme in protos:
|
||||
# The source_hash is a file on a server
|
||||
hash_fn = __salt__['cp.cache_file'](source_hash, saltenv)
|
||||
if not hash_fn:
|
||||
return '', {}, 'Source hash file {0} not found'.format(
|
||||
source_hash)
|
||||
source_sum = extract_hash(hash_fn, '', name)
|
||||
if source_sum is None:
|
||||
return '', {}, ('Source hash file {0} contains an invalid '
|
||||
'hash format, it must be in the format <hash type>=<hash>.'
|
||||
).format(source_hash)
|
||||
|
||||
else:
|
||||
# The source_hash is a hash string
|
||||
comps = source_hash.split('=')
|
||||
if len(comps) < 2:
|
||||
return '', {}, ('Source hash file {0} contains an '
|
||||
'invalid hash format, it must be in '
|
||||
'the format <hash type>=<hash>'
|
||||
).format(source_hash)
|
||||
source_sum['hsum'] = comps[1].strip()
|
||||
source_sum['hash_type'] = comps[0].strip()
|
||||
else:
|
||||
return '', {}, ('Unable to determine upstream hash of'
|
||||
' source file {0}').format(source)
|
||||
|
||||
# if the file is a template we need to actually template the file to get
|
||||
# a checksum, but we can cache the template itselt
|
||||
if template:
|
||||
# check if we have the template cached
|
||||
template_dest = __salt__['cp.is_cached'](source, saltenv)
|
||||
if template_dest:
|
||||
comps = source_hash.split('=')
|
||||
cached_template_sum = get_hash(template_dest, form=source_sum['hash_type'])
|
||||
if cached_template_sum == source_sum['hsum']:
|
||||
sfn = template_dest
|
||||
# if we didn't have the template file, lets get it
|
||||
if not sfn:
|
||||
sfn = __salt__['cp.cache_file'](source, saltenv)
|
||||
|
||||
# exists doesn't play nice with sfn as bool
|
||||
# but if cache failed, sfn == False
|
||||
if not sfn or not os.path.exists(sfn):
|
||||
@ -2798,40 +2850,7 @@ def get_managed(
|
||||
else:
|
||||
__clean_tmp(sfn)
|
||||
return sfn, {}, data['data']
|
||||
else:
|
||||
# Copy the file down if there is a source
|
||||
if source:
|
||||
if _urlparse(source).scheme == 'salt':
|
||||
source_sum = __salt__['cp.hash_file'](source, saltenv)
|
||||
if not source_sum:
|
||||
return '', {}, 'Source file {0} not found'.format(source)
|
||||
elif source_hash:
|
||||
protos = ['salt', 'http', 'https', 'ftp', 'swift']
|
||||
if _urlparse(source_hash).scheme in protos:
|
||||
# The source_hash is a file on a server
|
||||
hash_fn = __salt__['cp.cache_file'](source_hash, saltenv)
|
||||
if not hash_fn:
|
||||
return '', {}, 'Source hash file {0} not found'.format(
|
||||
source_hash)
|
||||
source_sum = extract_hash(hash_fn, '', name)
|
||||
if source_sum is None:
|
||||
return '', {}, ('Source hash file {0} contains an invalid '
|
||||
'hash format, it must be in the format <hash type>=<hash>.'
|
||||
).format(source_hash)
|
||||
|
||||
else:
|
||||
# The source_hash is a hash string
|
||||
comps = source_hash.split('=')
|
||||
if len(comps) < 2:
|
||||
return '', {}, ('Source hash file {0} contains an '
|
||||
'invalid hash format, it must be in '
|
||||
'the format <hash type>=<hash>'
|
||||
).format(source_hash)
|
||||
source_sum['hsum'] = comps[1].strip()
|
||||
source_sum['hash_type'] = comps[0].strip()
|
||||
else:
|
||||
return '', {}, ('Unable to determine upstream hash of'
|
||||
' source file {0}').format(source)
|
||||
return sfn, source_sum, ''
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user