PEP8 (two lines between functions)

This commit is contained in:
Erik Johnson 2013-08-30 17:24:26 -05:00
parent efef6f7dd4
commit 4adb52f0c1

View File

@ -62,6 +62,7 @@ log = logging.getLogger(__name__)
_s3_cache_expire = 30 # cache for 30 seconds
_s3_sync_on_update = True # sync cache on update rather than jit
def envs():
'''
Return a list of directories within the bucket that can be
@ -72,6 +73,7 @@ def envs():
metadata = _init()
return metadata.keys()
def update():
'''
Update the cache file for the bucket.
@ -95,6 +97,7 @@ def update():
log.info('Sync local cache from S3 completed.')
def find_file(path, env='base', **kwargs):
'''
Look through the buckets cache file for a match.
@ -130,6 +133,7 @@ def find_file(path, env='base', **kwargs):
return fnd
def file_hash(load, fnd):
'''
Return an MD5 file hash
@ -154,6 +158,7 @@ def file_hash(load, fnd):
return ret
def serve_file(load, fnd):
'''
Return a chunk from a file based on the data received
@ -187,6 +192,7 @@ def serve_file(load, fnd):
ret['data'] = data
return ret
def file_list(load):
'''
Return a list of all files on the file server in a specified environment
@ -209,6 +215,7 @@ def file_list(load):
return ret
def file_list_emptydirs(load):
'''
Return a list of all empty directories on the master
@ -218,6 +225,7 @@ def file_list_emptydirs(load):
return []
def dir_list(load):
'''
Return a list of all directories on the master
@ -243,6 +251,7 @@ def dir_list(load):
return ret
def _get_s3_key():
'''
Get AWS keys from pillar or config
@ -253,6 +262,7 @@ def _get_s3_key():
return key, keyid
def _init():
'''
Connect to S3 and download the metadata for each file in all buckets
@ -269,6 +279,7 @@ def _init():
# bucket files cache expired
return _refresh_buckets_cache_file(cache_file)
def _get_cache_dir():
'''
Return the path to the s3cache dir
@ -277,6 +288,7 @@ def _get_cache_dir():
# Or is that making too many assumptions?
return os.path.join(__opts__['cachedir'], 's3cache')
def _get_cached_file_name(bucket_name, env, path):
'''
Return the cached file name for a bucket path file
@ -290,6 +302,7 @@ def _get_cached_file_name(bucket_name, env, path):
return file_path
def _get_buckets_cache_filename():
'''
Return the filename of the cache for bucket contents.
@ -302,6 +315,7 @@ def _get_buckets_cache_filename():
return os.path.join(cache_dir, 'buckets_files.cache')
def _refresh_buckets_cache_file(cache_file):
'''
Retrieve the content of all buckets and cache the metadata to the buckets
@ -375,6 +389,7 @@ def _refresh_buckets_cache_file(cache_file):
return metadata
def _read_buckets_cache_file(cache_file):
'''
Return the contents of the buckets cache file
@ -387,6 +402,7 @@ def _read_buckets_cache_file(cache_file):
return data
def _find_files(metadata, dirs_only=False):
'''
Looks for all the files in the S3 bucket cache metadata
@ -405,6 +421,7 @@ def _find_files(metadata, dirs_only=False):
return ret
def _find_file_meta(metadata, bucket_name, env, path):
'''
Looks for a file's metadata in the S3 bucket cache file
@ -418,6 +435,7 @@ def _find_file_meta(metadata, bucket_name, env, path):
if 'Key' in item_meta and item_meta['Key'] == path:
return item_meta
def _get_buckets():
'''
Return the configuration buckets
@ -425,6 +443,7 @@ def _get_buckets():
return __opts__['s3.buckets'] if 's3.buckets' in __opts__ else {}
def _get_file_from_s3(metadata, env, bucket_name, path, cached_file_path):
'''
Checks the local cache for the file, if it's old or missing go grab the
@ -453,6 +472,7 @@ def _get_file_from_s3(metadata, env, bucket_name, path, cached_file_path):
path=urllib.quote(path),
local_file=cached_file_path)
def _trim_env_off_path(paths, env, trim_slash=False):
'''
Return a list of file paths with the env directory removed
@ -462,6 +482,7 @@ def _trim_env_off_path(paths, env, trim_slash=False):
return map(lambda d: d[env_len:slash_len], paths)
def _is_env_per_bucket():
'''
Return the configuration mode, either buckets per environment or a list of