Merge pull request #25695 from stanislavb/expose-aws-region-config-and-fetch-region-from-metadata

Configurable AWS region & region from IAM metadata
This commit is contained in:
Nicole Thomas 2015-07-24 13:36:40 -06:00
commit d330ef0d81
7 changed files with 91 additions and 34 deletions

View File

@ -563,7 +563,9 @@ class Client(object):
service_url=self.opts.get('s3.service_url', service_url=self.opts.get('s3.service_url',
None), None),
verify_ssl=self.opts.get('s3.verify_ssl', verify_ssl=self.opts.get('s3.verify_ssl',
True)) True),
location=self.opts.get('s3.location',
None))
return dest return dest
except Exception: except Exception:
raise MinionError('Could not fetch from {0}'.format(url)) raise MinionError('Could not fetch from {0}'.format(url))

View File

@ -320,8 +320,11 @@ def _get_s3_key():
verify_ssl = __opts__['s3.verify_ssl'] \ verify_ssl = __opts__['s3.verify_ssl'] \
if 's3.verify_ssl' in __opts__ \ if 's3.verify_ssl' in __opts__ \
else None else None
location = __opts__['s3.location'] \
if 's3.location' in __opts__ \
else None
return key, keyid, service_url, verify_ssl return key, keyid, service_url, verify_ssl, location
def _init(): def _init():
@ -391,7 +394,7 @@ def _refresh_buckets_cache_file(cache_file):
log.debug('Refreshing buckets cache file') log.debug('Refreshing buckets cache file')
key, keyid, service_url, verify_ssl = _get_s3_key() key, keyid, service_url, verify_ssl, location = _get_s3_key()
metadata = {} metadata = {}
# helper s3 query function # helper s3 query function
@ -402,6 +405,7 @@ def _refresh_buckets_cache_file(cache_file):
bucket=bucket, bucket=bucket,
service_url=service_url, service_url=service_url,
verify_ssl=verify_ssl, verify_ssl=verify_ssl,
location=location,
return_bin=False) return_bin=False)
if _is_env_per_bucket(): if _is_env_per_bucket():
@ -582,7 +586,7 @@ def _get_file_from_s3(metadata, saltenv, bucket_name, path, cached_file_path):
Checks the local cache for the file, if it's old or missing go grab the Checks the local cache for the file, if it's old or missing go grab the
file from S3 and update the cache file from S3 and update the cache
''' '''
key, keyid, service_url, verify_ssl = _get_s3_key() key, keyid, service_url, verify_ssl, location = _get_s3_key()
# check the local cache... # check the local cache...
if os.path.isfile(cached_file_path): if os.path.isfile(cached_file_path):
@ -617,6 +621,7 @@ def _get_file_from_s3(metadata, saltenv, bucket_name, path, cached_file_path):
bucket=bucket_name, bucket=bucket_name,
service_url=service_url, service_url=service_url,
verify_ssl=verify_ssl, verify_ssl=verify_ssl,
location=location,
path=_quote(path), path=_quote(path),
local_file=cached_file_path, local_file=cached_file_path,
full_headers=True full_headers=True
@ -645,6 +650,7 @@ def _get_file_from_s3(metadata, saltenv, bucket_name, path, cached_file_path):
bucket=bucket_name, bucket=bucket_name,
service_url=service_url, service_url=service_url,
verify_ssl=verify_ssl, verify_ssl=verify_ssl,
location=location,
path=_quote(path), path=_quote(path),
local_file=cached_file_path local_file=cached_file_path
) )

View File

@ -30,12 +30,18 @@ Connection module for Amazon S3
SSL verification may also be turned off in the configuration: SSL verification may also be turned off in the configuration:
s3.verify_ssl: False s3.verify_ssl: False
This is required if using S3 bucket names that contain a period, as This is required if using S3 bucket names that contain a period, as
these will not match Amazon's S3 wildcard certificates. Certificate these will not match Amazon's S3 wildcard certificates. Certificate
verification is enabled by default. verification is enabled by default.
AWS region may be specified in the configuration:
s3.location: eu-central-1
Default is us-east-1.
This module should be usable to query other S3-like services, such as This module should be usable to query other S3-like services, such as
Eucalyptus. Eucalyptus.
@ -61,7 +67,7 @@ def __virtual__():
def delete(bucket, path=None, action=None, key=None, keyid=None, def delete(bucket, path=None, action=None, key=None, keyid=None,
service_url=None, verify_ssl=None): service_url=None, verify_ssl=None, location=None):
''' '''
Delete a bucket, or delete an object from a bucket. Delete a bucket, or delete an object from a bucket.
@ -73,8 +79,8 @@ def delete(bucket, path=None, action=None, key=None, keyid=None,
salt myminion s3.delete mybucket remoteobject salt myminion s3.delete mybucket remoteobject
''' '''
key, keyid, service_url, verify_ssl = _get_key(key, keyid, service_url, key, keyid, service_url, verify_ssl, location = _get_key(
verify_ssl) key, keyid, service_url, verify_ssl, location)
return salt.utils.s3.query(method='DELETE', return salt.utils.s3.query(method='DELETE',
bucket=bucket, bucket=bucket,
@ -83,12 +89,13 @@ def delete(bucket, path=None, action=None, key=None, keyid=None,
key=key, key=key,
keyid=keyid, keyid=keyid,
service_url=service_url, service_url=service_url,
verify_ssl=verify_ssl) verify_ssl=verify_ssl,
location=location)
def get(bucket=None, path=None, return_bin=False, action=None, def get(bucket=None, path=None, return_bin=False, action=None,
local_file=None, key=None, keyid=None, service_url=None, local_file=None, key=None, keyid=None, service_url=None,
verify_ssl=None): verify_ssl=None, location=None):
''' '''
List the contents of a bucket, or return an object from a bucket. Set List the contents of a bucket, or return an object from a bucket. Set
return_bin to True in order to retrieve an object wholesale. Otherwise, return_bin to True in order to retrieve an object wholesale. Otherwise,
@ -140,8 +147,8 @@ def get(bucket=None, path=None, return_bin=False, action=None,
salt myminion s3.get mybucket myfile.png action=acl salt myminion s3.get mybucket myfile.png action=acl
''' '''
key, keyid, service_url, verify_ssl = _get_key(key, keyid, service_url, key, keyid, service_url, verify_ssl, location = _get_key(
verify_ssl) key, keyid, service_url, verify_ssl, location)
return salt.utils.s3.query(method='GET', return salt.utils.s3.query(method='GET',
bucket=bucket, bucket=bucket,
@ -152,11 +159,12 @@ def get(bucket=None, path=None, return_bin=False, action=None,
key=key, key=key,
keyid=keyid, keyid=keyid,
service_url=service_url, service_url=service_url,
verify_ssl=verify_ssl) verify_ssl=verify_ssl,
location=location)
def head(bucket, path=None, key=None, keyid=None, service_url=None, def head(bucket, path=None, key=None, keyid=None, service_url=None,
verify_ssl=None): verify_ssl=None, location=None):
''' '''
Return the metadata for a bucket, or an object in a bucket. Return the metadata for a bucket, or an object in a bucket.
@ -167,8 +175,8 @@ def head(bucket, path=None, key=None, keyid=None, service_url=None,
salt myminion s3.head mybucket salt myminion s3.head mybucket
salt myminion s3.head mybucket myfile.png salt myminion s3.head mybucket myfile.png
''' '''
key, keyid, service_url, verify_ssl = _get_key(key, keyid, service_url, key, keyid, service_url, verify_ssl, location = _get_key(
verify_ssl) key, keyid, service_url, verify_ssl, location)
return salt.utils.s3.query(method='HEAD', return salt.utils.s3.query(method='HEAD',
bucket=bucket, bucket=bucket,
@ -177,11 +185,12 @@ def head(bucket, path=None, key=None, keyid=None, service_url=None,
keyid=keyid, keyid=keyid,
service_url=service_url, service_url=service_url,
verify_ssl=verify_ssl, verify_ssl=verify_ssl,
location=location,
full_headers=True) full_headers=True)
def put(bucket, path=None, return_bin=False, action=None, local_file=None, def put(bucket, path=None, return_bin=False, action=None, local_file=None,
key=None, keyid=None, service_url=None, verify_ssl=None): key=None, keyid=None, service_url=None, verify_ssl=None, location=None):
''' '''
Create a new bucket, or upload an object to a bucket. Create a new bucket, or upload an object to a bucket.
@ -197,8 +206,8 @@ def put(bucket, path=None, return_bin=False, action=None, local_file=None,
salt myminion s3.put mybucket remotepath local_file=/path/to/file salt myminion s3.put mybucket remotepath local_file=/path/to/file
''' '''
key, keyid, service_url, verify_ssl = _get_key(key, keyid, service_url, key, keyid, service_url, verify_ssl, location = _get_key(
verify_ssl) key, keyid, service_url, verify_ssl, location)
return salt.utils.s3.query(method='PUT', return salt.utils.s3.query(method='PUT',
bucket=bucket, bucket=bucket,
@ -209,10 +218,11 @@ def put(bucket, path=None, return_bin=False, action=None, local_file=None,
key=key, key=key,
keyid=keyid, keyid=keyid,
service_url=service_url, service_url=service_url,
verify_ssl=verify_ssl) verify_ssl=verify_ssl,
location=location)
def _get_key(key, keyid, service_url, verify_ssl): def _get_key(key, keyid, service_url, verify_ssl, location):
''' '''
Examine the keys, and populate as necessary Examine the keys, and populate as necessary
''' '''
@ -234,4 +244,7 @@ def _get_key(key, keyid, service_url, verify_ssl):
if verify_ssl is None: if verify_ssl is None:
verify_ssl = True verify_ssl = True
return key, keyid, service_url, verify_ssl if location is None and __salt__['config.option']('s3.location') is not None:
location = __salt__['config.option']('s3.location')
return key, keyid, service_url, verify_ssl, location

View File

@ -96,12 +96,14 @@ _s3_sync_on_update = True # sync cache on update rather than jit
class S3Credentials(object): class S3Credentials(object):
def __init__(self, key, keyid, bucket, service_url, verify_ssl=True): def __init__(self, key, keyid, bucket, service_url, verify_ssl,
location):
self.key = key self.key = key
self.keyid = keyid self.keyid = keyid
self.bucket = bucket self.bucket = bucket
self.service_url = service_url self.service_url = service_url
self.verify_ssl = verify_ssl self.verify_ssl = verify_ssl
self.location = location
def ext_pillar(minion_id, def ext_pillar(minion_id,
@ -110,6 +112,7 @@ def ext_pillar(minion_id,
key, key,
keyid, keyid,
verify_ssl=True, verify_ssl=True,
location=None,
multiple_env=False, multiple_env=False,
environment='base', environment='base',
prefix='', prefix='',
@ -118,7 +121,8 @@ def ext_pillar(minion_id,
Execute a command and read the output as YAML Execute a command and read the output as YAML
''' '''
s3_creds = S3Credentials(key, keyid, bucket, service_url, verify_ssl) s3_creds = S3Credentials(key, keyid, bucket, service_url, verify_ssl,
location)
# normpath is needed to remove appended '/' if root is empty string. # normpath is needed to remove appended '/' if root is empty string.
pillar_dir = os.path.normpath(os.path.join(_get_cache_dir(), environment, pillar_dir = os.path.normpath(os.path.join(_get_cache_dir(), environment,
@ -230,6 +234,7 @@ def _refresh_buckets_cache_file(creds, cache_file, multiple_env, environment, pr
bucket=creds.bucket, bucket=creds.bucket,
service_url=creds.service_url, service_url=creds.service_url,
verify_ssl=creds.verify_ssl, verify_ssl=creds.verify_ssl,
location=creds.location,
return_bin=False, return_bin=False,
params={'prefix': prefix}) params={'prefix': prefix})
@ -368,5 +373,6 @@ def _get_file_from_s3(creds, metadata, saltenv, bucket, path,
service_url=creds.service_url, service_url=creds.service_url,
path=_quote(path), path=_quote(path),
local_file=cached_file_path, local_file=cached_file_path,
verify_ssl=creds.verify_ssl verify_ssl=creds.verify_ssl,
location=creds.location
) )

View File

@ -63,8 +63,25 @@ def _convert_key_to_str(key):
return key return key
def get_iam_region(version='latest', url='http://169.254.169.254',
timeout=None, num_retries=5):
'''
Gets instance identity document and returns region
'''
instance_identity_url = '{0}/{1}/latest/dynamic/instance-identity/document'.format(url, version)
region = None
try:
document = _retry_get_url(instance_identity_url, num_retries, timeout)
region = json.loads(document)['region']
except (ValueError, TypeError, KeyError):
# JSON failed to decode
log.error('Failed to read region from instance metadata. Giving up.')
return region
def get_iam_metadata(version='latest', url='http://169.254.169.254', def get_iam_metadata(version='latest', url='http://169.254.169.254',
timeout=None, num_retries=5): timeout=None, num_retries=5):
''' '''
Grabs the first IAM role from this instances metadata if it exists. Grabs the first IAM role from this instances metadata if it exists.
''' '''

View File

@ -30,7 +30,7 @@ DEFAULT_LOCATION = 'us-east-1'
def query(key, keyid, method='GET', params=None, headers=None, def query(key, keyid, method='GET', params=None, headers=None,
requesturl=None, return_url=False, bucket=None, service_url=None, requesturl=None, return_url=False, bucket=None, service_url=None,
path='', return_bin=False, action=None, local_file=None, path='', return_bin=False, action=None, local_file=None,
verify_ssl=True, location=DEFAULT_LOCATION, full_headers=False): verify_ssl=True, location=None, full_headers=False):
''' '''
Perform a query against an S3-like API. This function requires that a Perform a query against an S3-like API. This function requires that a
secret key and the id for that key are passed in. For instance: secret key and the id for that key are passed in. For instance:
@ -38,7 +38,10 @@ def query(key, keyid, method='GET', params=None, headers=None,
s3.keyid: GKTADJGHEIQSXMKKRBJ08H s3.keyid: GKTADJGHEIQSXMKKRBJ08H
s3.key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs s3.key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
A service_url may also be specified in the configuration:: If keyid or key is not specified, an attempt to fetch them from EC2 IAM
metadata service will be made.
A service_url may also be specified in the configuration:
s3.service_url: s3.amazonaws.com s3.service_url: s3.amazonaws.com
@ -58,6 +61,13 @@ def query(key, keyid, method='GET', params=None, headers=None,
This is required if using S3 bucket names that contain a period, as This is required if using S3 bucket names that contain a period, as
these will not match Amazon's S3 wildcard certificates. Certificate these will not match Amazon's S3 wildcard certificates. Certificate
verification is enabled by default. verification is enabled by default.
A region may be specified:
s3.location: eu-central-1
If region is not specified, an attempt to fetch the region from EC2 IAM
metadata service will be made. Failing that, default is us-east-1
''' '''
if not HAS_REQUESTS: if not HAS_REQUESTS:
log.error('There was an error: requests is required for s3 access') log.error('There was an error: requests is required for s3 access')
@ -77,12 +87,15 @@ def query(key, keyid, method='GET', params=None, headers=None,
endpoint = service_url endpoint = service_url
# Try grabbing the credentials from the EC2 instance IAM metadata if available # Try grabbing the credentials from the EC2 instance IAM metadata if available
token = None
if not key or not keyid: if not key or not keyid:
iam_creds = iam.get_iam_metadata() iam_creds = iam.get_iam_metadata()
key = iam_creds['secret_key'] key = iam_creds['secret_key']
keyid = iam_creds['access_key'] keyid = iam_creds['access_key']
token = iam_creds['security_token']
if not location:
location = iam.get_iam_region()
if not location:
location = DEFAULT_LOCATION
data = '' data = ''
if method == 'PUT': if method == 'PUT':

View File

@ -33,7 +33,7 @@ class S3TestCase(TestCase):
''' '''
with patch.object(s3, '_get_key', with patch.object(s3, '_get_key',
return_value=('key', 'keyid', 'service_url', return_value=('key', 'keyid', 'service_url',
'verify_ssl')): 'verify_ssl', 'location')):
with patch.object(salt.utils.s3, 'query', return_value='A'): with patch.object(salt.utils.s3, 'query', return_value='A'):
self.assertEqual(s3.delete('bucket'), 'A') self.assertEqual(s3.delete('bucket'), 'A')
@ -44,7 +44,7 @@ class S3TestCase(TestCase):
''' '''
with patch.object(s3, '_get_key', with patch.object(s3, '_get_key',
return_value=('key', 'keyid', 'service_url', return_value=('key', 'keyid', 'service_url',
'verify_ssl')): 'verify_ssl', 'location')):
with patch.object(salt.utils.s3, 'query', return_value='A'): with patch.object(salt.utils.s3, 'query', return_value='A'):
self.assertEqual(s3.get(), 'A') self.assertEqual(s3.get(), 'A')
@ -54,7 +54,7 @@ class S3TestCase(TestCase):
''' '''
with patch.object(s3, '_get_key', with patch.object(s3, '_get_key',
return_value=('key', 'keyid', 'service_url', return_value=('key', 'keyid', 'service_url',
'verify_ssl')): 'verify_ssl', 'location')):
with patch.object(salt.utils.s3, 'query', return_value='A'): with patch.object(salt.utils.s3, 'query', return_value='A'):
self.assertEqual(s3.head('bucket'), 'A') self.assertEqual(s3.head('bucket'), 'A')
@ -64,7 +64,7 @@ class S3TestCase(TestCase):
''' '''
with patch.object(s3, '_get_key', with patch.object(s3, '_get_key',
return_value=('key', 'keyid', 'service_url', return_value=('key', 'keyid', 'service_url',
'verify_ssl')): 'verify_ssl', 'location')):
with patch.object(salt.utils.s3, 'query', return_value='A'): with patch.object(salt.utils.s3, 'query', return_value='A'):
self.assertEqual(s3.put('bucket'), 'A') self.assertEqual(s3.put('bucket'), 'A')