Merge pull request #42118 from fake-name/develop

Patches for https://github.com/saltstack/salt/issues/42106 and https://github.com/saltstack/salt/issues/42117
This commit is contained in:
Nicole Thomas 2017-12-15 15:51:33 -05:00 committed by GitHub
commit d933af39a6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 55 additions and 0 deletions

View File

@ -36,6 +36,7 @@ import salt.utils.crypt
import salt.utils.data
import salt.utils.dictupdate
import salt.utils.files
import salt.utils.verify
import salt.syspaths
from salt.template import compile_template
@ -185,6 +186,10 @@ class CloudClient(object):
else:
self.opts = salt.config.cloud_config(path)
# Check the cache-dir exists. If not, create it.
v_dirs = [self.opts['cachedir']]
salt.utils.verify.verify_env(v_dirs, salt.utils.get_user())
if pillars:
for name, provider in six.iteritems(pillars.pop('providers', {})):
driver = provider['driver']

View File

@ -49,6 +49,7 @@ Example Provider Configuration
# Import python libs
from __future__ import absolute_import
import os
import sys
import re
import pprint
import logging
@ -58,6 +59,7 @@ from salt.utils.versions import LooseVersion as _LooseVersion
# Import 3rd-party libs
# pylint: disable=import-error
LIBCLOUD_IMPORT_ERROR = None
try:
import libcloud
from libcloud.compute.types import Provider
@ -78,6 +80,7 @@ try:
libcloud.security.CA_CERTS_PATH.append('/etc/ssl/certs/YaST-CA.pem')
HAS_LIBCLOUD = True
except ImportError:
LIBCLOUD_IMPORT_ERROR = sys.exc_info()
HAS_LIBCLOUD = False
# pylint: enable=import-error
@ -155,6 +158,9 @@ def get_dependencies():
'''
Warn if dependencies aren't met.
'''
if LIBCLOUD_IMPORT_ERROR:
log.error("Failure when importing LibCloud: ", exc_info=LIBCLOUD_IMPORT_ERROR)
log.error("Note: The libcloud dependency is called 'apache-libcloud' on PyPi/pip.")
return config.check_driver_dependencies(
__virtualname__,
{'libcloud': HAS_LIBCLOUD}

View File

@ -15,6 +15,9 @@ import os.path
import pprint
import socket
import yaml
import io
import zlib
import gzip
import re
import ssl
@ -94,6 +97,37 @@ log = logging.getLogger(__name__)
USERAGENT = 'Salt/{0}'.format(salt.version.__version__)
def __decompressContent(coding, pgctnt):
'''
Decompress returned HTTP content depending on the specified encoding.
Currently supports identity/none, deflate, and gzip, which should
cover 99%+ of the content on the internet.
'''
log.trace("Decompressing %s byte content with compression type: %s", len(pgctnt), coding)
if coding == 'deflate':
pgctnt = zlib.decompress(pgctnt, -zlib.MAX_WBITS)
elif coding == 'gzip':
buf = io.BytesIO(pgctnt)
f = gzip.GzipFile(fileobj=buf)
pgctnt = f.read()
elif coding == "sdch":
raise ValueError("SDCH compression is not currently supported")
elif coding == "br":
raise ValueError("Brotli compression is not currently supported")
elif coding == "compress":
raise ValueError("LZW compression is not currently supported")
elif coding == 'identity':
pass
log.trace("Content size after decompression: %s", len(pgctnt))
return pgctnt
@jinja_filter('http_query')
def query(url,
method='GET',
@ -197,6 +231,8 @@ def query(url,
log_url = sanitize_url(url_full, hide_fields)
log.debug('Requesting URL {0} using {1} method'.format(log_url, method))
log.debug("Using backend: %s", backend)
if method == 'POST' and log.isEnabledFor(logging.TRACE):
# Make sure no secret fields show up in logs
if isinstance(data, dict):
@ -545,6 +581,14 @@ def query(url,
log.debug('Response Status Code: {0}'.format(result_status_code))
log.trace('Response Headers: {0}'.format(result_headers))
log.trace('Response Cookies: {0}'.format(sess_cookies))
# log.trace("Content: %s", result_text)
coding = result_headers.get('Content-Encoding', "identity")
# Requests will always decompress the content, and working around that is annoying.
if backend != 'requests':
result_text = __decompressContent(coding, result_text)
try:
log.trace('Response Text: {0}'.format(result_text))
except UnicodeEncodeError as exc: