mirror of
https://github.com/valitydev/salt.git
synced 2024-11-08 17:33:54 +00:00
PEP8 (two lines between functions)
This commit is contained in:
parent
efef6f7dd4
commit
4adb52f0c1
@ -62,6 +62,7 @@ log = logging.getLogger(__name__)
|
|||||||
_s3_cache_expire = 30 # cache for 30 seconds
|
_s3_cache_expire = 30 # cache for 30 seconds
|
||||||
_s3_sync_on_update = True # sync cache on update rather than jit
|
_s3_sync_on_update = True # sync cache on update rather than jit
|
||||||
|
|
||||||
|
|
||||||
def envs():
|
def envs():
|
||||||
'''
|
'''
|
||||||
Return a list of directories within the bucket that can be
|
Return a list of directories within the bucket that can be
|
||||||
@ -72,6 +73,7 @@ def envs():
|
|||||||
metadata = _init()
|
metadata = _init()
|
||||||
return metadata.keys()
|
return metadata.keys()
|
||||||
|
|
||||||
|
|
||||||
def update():
|
def update():
|
||||||
'''
|
'''
|
||||||
Update the cache file for the bucket.
|
Update the cache file for the bucket.
|
||||||
@ -95,6 +97,7 @@ def update():
|
|||||||
|
|
||||||
log.info('Sync local cache from S3 completed.')
|
log.info('Sync local cache from S3 completed.')
|
||||||
|
|
||||||
|
|
||||||
def find_file(path, env='base', **kwargs):
|
def find_file(path, env='base', **kwargs):
|
||||||
'''
|
'''
|
||||||
Look through the buckets cache file for a match.
|
Look through the buckets cache file for a match.
|
||||||
@ -130,6 +133,7 @@ def find_file(path, env='base', **kwargs):
|
|||||||
|
|
||||||
return fnd
|
return fnd
|
||||||
|
|
||||||
|
|
||||||
def file_hash(load, fnd):
|
def file_hash(load, fnd):
|
||||||
'''
|
'''
|
||||||
Return an MD5 file hash
|
Return an MD5 file hash
|
||||||
@ -154,6 +158,7 @@ def file_hash(load, fnd):
|
|||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def serve_file(load, fnd):
|
def serve_file(load, fnd):
|
||||||
'''
|
'''
|
||||||
Return a chunk from a file based on the data received
|
Return a chunk from a file based on the data received
|
||||||
@ -187,6 +192,7 @@ def serve_file(load, fnd):
|
|||||||
ret['data'] = data
|
ret['data'] = data
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def file_list(load):
|
def file_list(load):
|
||||||
'''
|
'''
|
||||||
Return a list of all files on the file server in a specified environment
|
Return a list of all files on the file server in a specified environment
|
||||||
@ -209,6 +215,7 @@ def file_list(load):
|
|||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def file_list_emptydirs(load):
|
def file_list_emptydirs(load):
|
||||||
'''
|
'''
|
||||||
Return a list of all empty directories on the master
|
Return a list of all empty directories on the master
|
||||||
@ -218,6 +225,7 @@ def file_list_emptydirs(load):
|
|||||||
|
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
def dir_list(load):
|
def dir_list(load):
|
||||||
'''
|
'''
|
||||||
Return a list of all directories on the master
|
Return a list of all directories on the master
|
||||||
@ -243,6 +251,7 @@ def dir_list(load):
|
|||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def _get_s3_key():
|
def _get_s3_key():
|
||||||
'''
|
'''
|
||||||
Get AWS keys from pillar or config
|
Get AWS keys from pillar or config
|
||||||
@ -253,6 +262,7 @@ def _get_s3_key():
|
|||||||
|
|
||||||
return key, keyid
|
return key, keyid
|
||||||
|
|
||||||
|
|
||||||
def _init():
|
def _init():
|
||||||
'''
|
'''
|
||||||
Connect to S3 and download the metadata for each file in all buckets
|
Connect to S3 and download the metadata for each file in all buckets
|
||||||
@ -269,6 +279,7 @@ def _init():
|
|||||||
# bucket files cache expired
|
# bucket files cache expired
|
||||||
return _refresh_buckets_cache_file(cache_file)
|
return _refresh_buckets_cache_file(cache_file)
|
||||||
|
|
||||||
|
|
||||||
def _get_cache_dir():
|
def _get_cache_dir():
|
||||||
'''
|
'''
|
||||||
Return the path to the s3cache dir
|
Return the path to the s3cache dir
|
||||||
@ -277,6 +288,7 @@ def _get_cache_dir():
|
|||||||
# Or is that making too many assumptions?
|
# Or is that making too many assumptions?
|
||||||
return os.path.join(__opts__['cachedir'], 's3cache')
|
return os.path.join(__opts__['cachedir'], 's3cache')
|
||||||
|
|
||||||
|
|
||||||
def _get_cached_file_name(bucket_name, env, path):
|
def _get_cached_file_name(bucket_name, env, path):
|
||||||
'''
|
'''
|
||||||
Return the cached file name for a bucket path file
|
Return the cached file name for a bucket path file
|
||||||
@ -290,6 +302,7 @@ def _get_cached_file_name(bucket_name, env, path):
|
|||||||
|
|
||||||
return file_path
|
return file_path
|
||||||
|
|
||||||
|
|
||||||
def _get_buckets_cache_filename():
|
def _get_buckets_cache_filename():
|
||||||
'''
|
'''
|
||||||
Return the filename of the cache for bucket contents.
|
Return the filename of the cache for bucket contents.
|
||||||
@ -302,6 +315,7 @@ def _get_buckets_cache_filename():
|
|||||||
|
|
||||||
return os.path.join(cache_dir, 'buckets_files.cache')
|
return os.path.join(cache_dir, 'buckets_files.cache')
|
||||||
|
|
||||||
|
|
||||||
def _refresh_buckets_cache_file(cache_file):
|
def _refresh_buckets_cache_file(cache_file):
|
||||||
'''
|
'''
|
||||||
Retrieve the content of all buckets and cache the metadata to the buckets
|
Retrieve the content of all buckets and cache the metadata to the buckets
|
||||||
@ -375,6 +389,7 @@ def _refresh_buckets_cache_file(cache_file):
|
|||||||
|
|
||||||
return metadata
|
return metadata
|
||||||
|
|
||||||
|
|
||||||
def _read_buckets_cache_file(cache_file):
|
def _read_buckets_cache_file(cache_file):
|
||||||
'''
|
'''
|
||||||
Return the contents of the buckets cache file
|
Return the contents of the buckets cache file
|
||||||
@ -387,6 +402,7 @@ def _read_buckets_cache_file(cache_file):
|
|||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def _find_files(metadata, dirs_only=False):
|
def _find_files(metadata, dirs_only=False):
|
||||||
'''
|
'''
|
||||||
Looks for all the files in the S3 bucket cache metadata
|
Looks for all the files in the S3 bucket cache metadata
|
||||||
@ -405,6 +421,7 @@ def _find_files(metadata, dirs_only=False):
|
|||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def _find_file_meta(metadata, bucket_name, env, path):
|
def _find_file_meta(metadata, bucket_name, env, path):
|
||||||
'''
|
'''
|
||||||
Looks for a file's metadata in the S3 bucket cache file
|
Looks for a file's metadata in the S3 bucket cache file
|
||||||
@ -418,6 +435,7 @@ def _find_file_meta(metadata, bucket_name, env, path):
|
|||||||
if 'Key' in item_meta and item_meta['Key'] == path:
|
if 'Key' in item_meta and item_meta['Key'] == path:
|
||||||
return item_meta
|
return item_meta
|
||||||
|
|
||||||
|
|
||||||
def _get_buckets():
|
def _get_buckets():
|
||||||
'''
|
'''
|
||||||
Return the configuration buckets
|
Return the configuration buckets
|
||||||
@ -425,6 +443,7 @@ def _get_buckets():
|
|||||||
|
|
||||||
return __opts__['s3.buckets'] if 's3.buckets' in __opts__ else {}
|
return __opts__['s3.buckets'] if 's3.buckets' in __opts__ else {}
|
||||||
|
|
||||||
|
|
||||||
def _get_file_from_s3(metadata, env, bucket_name, path, cached_file_path):
|
def _get_file_from_s3(metadata, env, bucket_name, path, cached_file_path):
|
||||||
'''
|
'''
|
||||||
Checks the local cache for the file, if it's old or missing go grab the
|
Checks the local cache for the file, if it's old or missing go grab the
|
||||||
@ -453,6 +472,7 @@ def _get_file_from_s3(metadata, env, bucket_name, path, cached_file_path):
|
|||||||
path=urllib.quote(path),
|
path=urllib.quote(path),
|
||||||
local_file=cached_file_path)
|
local_file=cached_file_path)
|
||||||
|
|
||||||
|
|
||||||
def _trim_env_off_path(paths, env, trim_slash=False):
|
def _trim_env_off_path(paths, env, trim_slash=False):
|
||||||
'''
|
'''
|
||||||
Return a list of file paths with the env directory removed
|
Return a list of file paths with the env directory removed
|
||||||
@ -462,6 +482,7 @@ def _trim_env_off_path(paths, env, trim_slash=False):
|
|||||||
|
|
||||||
return map(lambda d: d[env_len:slash_len], paths)
|
return map(lambda d: d[env_len:slash_len], paths)
|
||||||
|
|
||||||
|
|
||||||
def _is_env_per_bucket():
|
def _is_env_per_bucket():
|
||||||
'''
|
'''
|
||||||
Return the configuration mode, either buckets per environment or a list of
|
Return the configuration mode, either buckets per environment or a list of
|
||||||
|
Loading…
Reference in New Issue
Block a user