mirror of
https://github.com/valitydev/salt.git
synced 2024-11-07 08:58:59 +00:00
Merge pull request #18263 from s0undt3ch/hotfix/rockin-salt
More Py3 and Lint Fixes
This commit is contained in:
commit
d8e8d6007a
124
salt/_compat.py
124
salt/_compat.py
@ -2,11 +2,16 @@
|
|||||||
'''
|
'''
|
||||||
Salt compatibility code
|
Salt compatibility code
|
||||||
'''
|
'''
|
||||||
# pylint: disable=W0611
|
# pylint: disable=import-error,unused-import
|
||||||
|
|
||||||
# Import python libs
|
# Import python libs
|
||||||
import sys
|
import sys
|
||||||
import types
|
import types
|
||||||
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
from salt.ext.six import binary_type, string_types, text_type
|
||||||
|
from salt.ext.six.moves import cStringIO, StringIO
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import cPickle as pickle
|
import cPickle as pickle
|
||||||
except ImportError:
|
except ImportError:
|
||||||
@ -33,31 +38,6 @@ except ImportError:
|
|||||||
# True if we are running on Python 3.
|
# True if we are running on Python 3.
|
||||||
PY3 = sys.version_info[0] == 3
|
PY3 = sys.version_info[0] == 3
|
||||||
|
|
||||||
if PY3:
|
|
||||||
MAX_SIZE = sys.maxsize
|
|
||||||
else:
|
|
||||||
MAX_SIZE = sys.maxint
|
|
||||||
|
|
||||||
if PY3:
|
|
||||||
xrange = range
|
|
||||||
else:
|
|
||||||
xrange = xrange
|
|
||||||
|
|
||||||
# pylint: disable=C0103
|
|
||||||
if PY3:
|
|
||||||
string_types = str,
|
|
||||||
integer_types = int,
|
|
||||||
class_types = type,
|
|
||||||
text_type = str
|
|
||||||
binary_type = bytes
|
|
||||||
long = int
|
|
||||||
else:
|
|
||||||
string_types = basestring,
|
|
||||||
integer_types = (int, long)
|
|
||||||
class_types = (type, types.ClassType)
|
|
||||||
text_type = unicode
|
|
||||||
binary_type = str
|
|
||||||
long = long
|
|
||||||
|
|
||||||
if PY3:
|
if PY3:
|
||||||
import builtins
|
import builtins
|
||||||
@ -65,12 +45,6 @@ if PY3:
|
|||||||
else:
|
else:
|
||||||
import exceptions # pylint: disable=W0403
|
import exceptions # pylint: disable=W0403
|
||||||
|
|
||||||
if PY3:
|
|
||||||
def callable(obj):
|
|
||||||
return any('__call__' in klass.__dict__ for klass in type(obj).__mro__)
|
|
||||||
else:
|
|
||||||
callable = callable
|
|
||||||
|
|
||||||
|
|
||||||
def text_(s, encoding='latin-1', errors='strict'):
|
def text_(s, encoding='latin-1', errors='strict'):
|
||||||
'''
|
'''
|
||||||
@ -103,31 +77,6 @@ else:
|
|||||||
s = s.encode('ascii')
|
s = s.encode('ascii')
|
||||||
return str(s)
|
return str(s)
|
||||||
|
|
||||||
if PY3:
|
|
||||||
exec_ = getattr(builtins, 'exec') # pylint: disable=E0602
|
|
||||||
|
|
||||||
def reraise(tp, value, tb=None):
|
|
||||||
if value is None:
|
|
||||||
value = tp()
|
|
||||||
if value.__traceback__ is not tb:
|
|
||||||
raise value.with_traceback(tb)
|
|
||||||
raise value
|
|
||||||
else:
|
|
||||||
def exec_(code_, globals_=None, locals_=None):
|
|
||||||
'''
|
|
||||||
Execute code directly in a passed namespace
|
|
||||||
'''
|
|
||||||
if globals_ is None:
|
|
||||||
frame = sys._getframe(1)
|
|
||||||
globals_ = frame.f_globals
|
|
||||||
if locals_ is None:
|
|
||||||
locals_ = frame.f_locals
|
|
||||||
del frame
|
|
||||||
elif locals_ is None:
|
|
||||||
locals_ = globals_
|
|
||||||
exec('''exec code_ in globals_, locals_''')
|
|
||||||
|
|
||||||
|
|
||||||
ascii_native_.__doc__ = '''
|
ascii_native_.__doc__ = '''
|
||||||
Python 3: If ``s`` is an instance of ``text_type``, return
|
Python 3: If ``s`` is an instance of ``text_type``, return
|
||||||
``s.encode('ascii')``, otherwise return ``str(s, 'ascii', 'strict')``
|
``s.encode('ascii')``, otherwise return ``str(s, 'ascii', 'strict')``
|
||||||
@ -164,60 +113,6 @@ Python 2: If ``s`` is an instance of ``text_type``, return
|
|||||||
``s.encode(encoding, errors)``, otherwise return ``str(s)``
|
``s.encode(encoding, errors)``, otherwise return ``str(s)``
|
||||||
'''
|
'''
|
||||||
|
|
||||||
if PY3:
|
|
||||||
# pylint: disable=E0611
|
|
||||||
from urllib.parse import urlparse
|
|
||||||
from urllib.parse import urlunparse
|
|
||||||
from urllib.error import URLError
|
|
||||||
import http.server as BaseHTTPServer
|
|
||||||
from urllib.error import HTTPError
|
|
||||||
from urllib.parse import quote as url_quote
|
|
||||||
from urllib.parse import quote_plus as url_quote_plus
|
|
||||||
from urllib.parse import unquote as url_unquote
|
|
||||||
from urllib.parse import urlencode as url_encode
|
|
||||||
from urllib.request import urlopen as url_open
|
|
||||||
from urllib.request import HTTPPasswordMgrWithDefaultRealm as url_passwd_mgr
|
|
||||||
from urllib.request import HTTPBasicAuthHandler as url_auth_handler
|
|
||||||
from urllib.request import build_opener as url_build_opener
|
|
||||||
from urllib.request import install_opener as url_install_opener
|
|
||||||
url_unquote_text = url_unquote
|
|
||||||
url_unquote_native = url_unquote
|
|
||||||
import configparser
|
|
||||||
else:
|
|
||||||
from urlparse import urlparse
|
|
||||||
from urlparse import urlunparse
|
|
||||||
import BaseHTTPServer
|
|
||||||
from urllib2 import HTTPError, URLError
|
|
||||||
from urllib import quote as url_quote
|
|
||||||
from urllib import quote_plus as url_quote_plus
|
|
||||||
from urllib import unquote as url_unquote
|
|
||||||
from urllib import urlencode as url_encode
|
|
||||||
from urllib2 import urlopen as url_open
|
|
||||||
from urllib2 import HTTPPasswordMgrWithDefaultRealm as url_passwd_mgr
|
|
||||||
from urllib2 import HTTPBasicAuthHandler as url_auth_handler
|
|
||||||
from urllib2 import build_opener as url_build_opener
|
|
||||||
from urllib2 import install_opener as url_install_opener
|
|
||||||
import ConfigParser as configparser
|
|
||||||
|
|
||||||
def url_unquote_text(v, encoding='utf-8', errors='replace'):
|
|
||||||
v = url_unquote(v)
|
|
||||||
return v.decode(encoding, errors)
|
|
||||||
|
|
||||||
def url_unquote_native(v, encoding='utf-8', errors='replace'):
|
|
||||||
return native_(url_unquote_text(v, encoding, errors))
|
|
||||||
|
|
||||||
if PY3:
|
|
||||||
zip = zip
|
|
||||||
else:
|
|
||||||
from future_builtins import zip
|
|
||||||
|
|
||||||
if PY3:
|
|
||||||
from io import StringIO
|
|
||||||
from io import BytesIO as cStringIO
|
|
||||||
else:
|
|
||||||
from StringIO import StringIO
|
|
||||||
from cStringIO import StringIO as cStringIO
|
|
||||||
|
|
||||||
|
|
||||||
def string_io(data=None): # cStringIO can't handle unicode
|
def string_io(data=None): # cStringIO can't handle unicode
|
||||||
'''
|
'''
|
||||||
@ -227,10 +122,3 @@ def string_io(data=None): # cStringIO can't handle unicode
|
|||||||
return cStringIO(bytes(data))
|
return cStringIO(bytes(data))
|
||||||
except (UnicodeEncodeError, TypeError):
|
except (UnicodeEncodeError, TypeError):
|
||||||
return StringIO(data)
|
return StringIO(data)
|
||||||
|
|
||||||
|
|
||||||
if PY3:
|
|
||||||
import queue as Queue
|
|
||||||
else:
|
|
||||||
import Queue
|
|
||||||
# pylint: enable=C0103
|
|
||||||
|
@ -30,15 +30,17 @@ from __future__ import absolute_import
|
|||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
import pprint
|
import pprint
|
||||||
import requests
|
|
||||||
import logging
|
import logging
|
||||||
import hmac
|
import hmac
|
||||||
import urllib
|
|
||||||
import uuid
|
import uuid
|
||||||
import sys
|
import sys
|
||||||
import base64
|
import base64
|
||||||
from hashlib import sha1
|
from hashlib import sha1
|
||||||
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
import requests
|
||||||
|
from salt.ext.six.moves.urllib.parse import quote as _quote # pylint: disable=import-error,no-name-in-module
|
||||||
|
|
||||||
# Import salt cloud libs
|
# Import salt cloud libs
|
||||||
import salt.utils.cloud
|
import salt.utils.cloud
|
||||||
import salt.config as config
|
import salt.config as config
|
||||||
@ -664,7 +666,7 @@ def _compute_signature(parameters, access_key_secret):
|
|||||||
s = line.decode().encode('utf8')
|
s = line.decode().encode('utf8')
|
||||||
else:
|
else:
|
||||||
s = line.decode(sys.stdin.encoding).encode('utf8')
|
s = line.decode(sys.stdin.encoding).encode('utf8')
|
||||||
res = urllib.quote(s, '')
|
res = _quote(s, '')
|
||||||
res = res.replace('+', '%20')
|
res = res.replace('+', '%20')
|
||||||
res = res.replace('*', '%2A')
|
res = res.replace('*', '%2A')
|
||||||
res = res.replace('%7E', '~')
|
res = res.replace('%7E', '~')
|
||||||
|
@ -74,19 +74,21 @@ import uuid
|
|||||||
import pprint
|
import pprint
|
||||||
import logging
|
import logging
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
|
||||||
|
# pylint: disable=import-error,no-name-in-module,redefined-builtin
|
||||||
|
import requests
|
||||||
import salt.ext.six as six
|
import salt.ext.six as six
|
||||||
from salt.ext.six.moves import map
|
from salt.ext.six.moves import map, range, zip
|
||||||
from salt.ext.six.moves import zip
|
from salt.ext.six.moves.urllib.parse import urlparse as _urlparse, urlencode as _urlencode
|
||||||
from salt.ext.six.moves import range
|
# pylint: enable=import-error,no-name-in-module
|
||||||
|
|
||||||
# Import libs for talking to the EC2 API
|
# Import libs for talking to the EC2 API
|
||||||
import hmac
|
import hmac
|
||||||
import hashlib
|
import hashlib
|
||||||
import binascii
|
import binascii
|
||||||
import datetime
|
import datetime
|
||||||
import urllib
|
|
||||||
import urlparse
|
|
||||||
import requests
|
|
||||||
import base64
|
import base64
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
@ -312,11 +314,12 @@ def query(params=None, setname=None, requesturl=None, location=None,
|
|||||||
|
|
||||||
requesturl = 'https://{0}/'.format(endpoint)
|
requesturl = 'https://{0}/'.format(endpoint)
|
||||||
else:
|
else:
|
||||||
endpoint = urlparse.urlparse(requesturl).netloc
|
endpoint = _urlparse(requesturl).netloc
|
||||||
if endpoint == '':
|
if endpoint == '':
|
||||||
endpoint_err = 'Could not find a valid endpoint in the requesturl: {0}. Looking for something like https://some.ec2.endpoint/?args'.format(
|
endpoint_err = (
|
||||||
requesturl
|
'Could not find a valid endpoint in the '
|
||||||
)
|
'requesturl: {0}. Looking for something '
|
||||||
|
'like https://some.ec2.endpoint/?args').format(requesturl)
|
||||||
log.error(endpoint_err)
|
log.error(endpoint_err)
|
||||||
if return_url is True:
|
if return_url is True:
|
||||||
return {'error': endpoint_err}, requesturl
|
return {'error': endpoint_err}, requesturl
|
||||||
@ -337,7 +340,7 @@ def query(params=None, setname=None, requesturl=None, location=None,
|
|||||||
params_with_headers['Version'] = ec2_api_version
|
params_with_headers['Version'] = ec2_api_version
|
||||||
keys = sorted(params_with_headers)
|
keys = sorted(params_with_headers)
|
||||||
values = list(map(params_with_headers.get, keys))
|
values = list(map(params_with_headers.get, keys))
|
||||||
querystring = urllib.urlencode(list(zip(keys, values)))
|
querystring = _urlencode(list(zip(keys, values)))
|
||||||
|
|
||||||
# AWS signature version 2 requires that spaces be encoded as
|
# AWS signature version 2 requires that spaces be encoded as
|
||||||
# %20, however urlencode uses '+'. So replace pluses with %20.
|
# %20, however urlencode uses '+'. So replace pluses with %20.
|
||||||
@ -1038,10 +1041,10 @@ def _request_eip(interface):
|
|||||||
'''
|
'''
|
||||||
params = {'Action': 'AllocateAddress'}
|
params = {'Action': 'AllocateAddress'}
|
||||||
params['Domain'] = interface.setdefault('domain', 'vpc')
|
params['Domain'] = interface.setdefault('domain', 'vpc')
|
||||||
eip = query(params, return_root=True)
|
eips = query(params, return_root=True)
|
||||||
for e in eip:
|
for eip in eips:
|
||||||
if 'allocationId' in e:
|
if 'allocationId' in eip:
|
||||||
return e['allocationId']
|
return eip['allocationId']
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@ -19,16 +19,27 @@ Set up the cloud configuration at ``/etc/salt/cloud.providers`` or
|
|||||||
provider: parallels
|
provider: parallels
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
# Import python libs
|
# Import python libs
|
||||||
|
from __future__ import absolute_import
|
||||||
import copy
|
import copy
|
||||||
import time
|
import time
|
||||||
import pprint
|
import pprint
|
||||||
import urllib
|
|
||||||
import urllib2
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
# pylint: disable=import-error,no-name-in-module
|
||||||
|
from salt.ext.six.moves.urllib.error import URLError
|
||||||
|
from salt.ext.six.moves.urllib.parse import urlencode as _urlencode
|
||||||
|
from salt.ext.six.moves.urllib.request import (
|
||||||
|
HTTPBasicAuthHandler as _HTTPBasicAuthHandler,
|
||||||
|
Request as _Request,
|
||||||
|
urlopen as _urlopen,
|
||||||
|
build_opener as _build_opener,
|
||||||
|
install_opener as _install_opener
|
||||||
|
)
|
||||||
|
# pylint: enable=import-error,no-name-in-module
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
import salt.utils
|
import salt.utils
|
||||||
from salt._compat import ElementTree as ET
|
from salt._compat import ElementTree as ET
|
||||||
@ -467,7 +478,7 @@ def query(action=None, command=None, args=None, method='GET', data=None):
|
|||||||
path = config.get_cloud_config_value(
|
path = config.get_cloud_config_value(
|
||||||
'url', get_configured_provider(), __opts__, search_global=False
|
'url', get_configured_provider(), __opts__, search_global=False
|
||||||
)
|
)
|
||||||
auth_handler = urllib2.HTTPBasicAuthHandler()
|
auth_handler = _HTTPBasicAuthHandler()
|
||||||
auth_handler.add_password(
|
auth_handler.add_password(
|
||||||
realm='Parallels Instance Manager',
|
realm='Parallels Instance Manager',
|
||||||
uri=path,
|
uri=path,
|
||||||
@ -479,8 +490,8 @@ def query(action=None, command=None, args=None, method='GET', data=None):
|
|||||||
search_global=False
|
search_global=False
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
opener = urllib2.build_opener(auth_handler)
|
opener = _build_opener(auth_handler)
|
||||||
urllib2.install_opener(opener)
|
_install_opener(opener)
|
||||||
|
|
||||||
if action:
|
if action:
|
||||||
path += action
|
path += action
|
||||||
@ -498,10 +509,10 @@ def query(action=None, command=None, args=None, method='GET', data=None):
|
|||||||
}
|
}
|
||||||
|
|
||||||
if args:
|
if args:
|
||||||
params = urllib.urlencode(args)
|
params = _urlencode(args)
|
||||||
req = urllib2.Request(url='{0}?{1}'.format(path, params), **kwargs)
|
req = _Request(url='{0}?{1}'.format(path, params), **kwargs)
|
||||||
else:
|
else:
|
||||||
req = urllib2.Request(url=path, **kwargs)
|
req = _Request(url=path, **kwargs)
|
||||||
|
|
||||||
req.get_method = lambda: method
|
req.get_method = lambda: method
|
||||||
|
|
||||||
@ -510,7 +521,7 @@ def query(action=None, command=None, args=None, method='GET', data=None):
|
|||||||
log.debug(data)
|
log.debug(data)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = urllib2.urlopen(req)
|
result = _urlopen(req)
|
||||||
log.debug(
|
log.debug(
|
||||||
'PARALLELS Response Status Code: {0}'.format(
|
'PARALLELS Response Status Code: {0}'.format(
|
||||||
result.getcode()
|
result.getcode()
|
||||||
@ -524,7 +535,7 @@ def query(action=None, command=None, args=None, method='GET', data=None):
|
|||||||
return items
|
return items
|
||||||
|
|
||||||
return {}
|
return {}
|
||||||
except urllib2.URLError as exc:
|
except URLError as exc:
|
||||||
log.error(
|
log.error(
|
||||||
'PARALLELS Response Status Code: {0} {1}'.format(
|
'PARALLELS Response Status Code: {0} {1}'.format(
|
||||||
exc.code,
|
exc.code,
|
||||||
|
@ -14,9 +14,9 @@ import logging
|
|||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
import time
|
import time
|
||||||
import codecs
|
import codecs
|
||||||
|
|
||||||
# import third party libs
|
# import third party libs
|
||||||
import yaml
|
import yaml
|
||||||
import salt.ext.six as six
|
|
||||||
try:
|
try:
|
||||||
yaml.Loader = yaml.CLoader
|
yaml.Loader = yaml.CLoader
|
||||||
yaml.Dumper = yaml.CDumper
|
yaml.Dumper = yaml.CDumper
|
||||||
@ -31,8 +31,9 @@ import salt.syspaths
|
|||||||
import salt.utils.validate.path
|
import salt.utils.validate.path
|
||||||
import salt.utils.xdg
|
import salt.utils.xdg
|
||||||
import salt.exceptions
|
import salt.exceptions
|
||||||
from salt._compat import urlparse
|
|
||||||
from salt.ext.six import string_types
|
from salt.ext.six import string_types, text_type
|
||||||
|
from salt.ext.six.moves.urllib.parse import urlparse # pylint: disable=import-error,no-name-in-module
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
@ -747,7 +748,7 @@ def _read_conf_file(path):
|
|||||||
if 'id' in conf_opts:
|
if 'id' in conf_opts:
|
||||||
conf_opts['id'] = str(conf_opts['id'])
|
conf_opts['id'] = str(conf_opts['id'])
|
||||||
for key, value in conf_opts.copy().items():
|
for key, value in conf_opts.copy().items():
|
||||||
if isinstance(value, six.text_type):
|
if isinstance(value, text_type):
|
||||||
# We do not want unicode settings
|
# We do not want unicode settings
|
||||||
conf_opts[key] = value.encode('utf-8')
|
conf_opts[key] = value.encode('utf-8')
|
||||||
return conf_opts
|
return conf_opts
|
||||||
|
@ -26,11 +26,20 @@ import salt.fileserver
|
|||||||
import salt.utils
|
import salt.utils
|
||||||
import salt.utils.templates
|
import salt.utils.templates
|
||||||
import salt.utils.gzip_util
|
import salt.utils.gzip_util
|
||||||
from salt._compat import (
|
|
||||||
URLError, HTTPError, BaseHTTPServer, urlparse, urlunparse,
|
|
||||||
url_passwd_mgr, url_auth_handler, url_build_opener, url_install_opener)
|
|
||||||
from salt.utils.openstack.swift import SaltSwift
|
from salt.utils.openstack.swift import SaltSwift
|
||||||
|
|
||||||
|
# pylint: disable=no-name-in-module,import-error
|
||||||
|
import salt.ext.six.moves.BaseHTTPServer as BaseHTTPServer
|
||||||
|
from salt.ext.six.moves.urllib.error import HTTPError, URLError
|
||||||
|
from salt.ext.six.moves.urllib.parse import urlparse, urlunparse
|
||||||
|
from salt.ext.six.moves.urllib.request import (
|
||||||
|
HTTPPasswordMgrWithDefaultRealm as url_passwd_mgr,
|
||||||
|
HTTPBasicAuthHandler as url_auth_handler,
|
||||||
|
build_opener as url_build_opener,
|
||||||
|
install_opener as url_install_opener
|
||||||
|
)
|
||||||
|
# pylint: enable=no-name-in-module,import-error
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -69,7 +69,6 @@ import datetime
|
|||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
import pickle
|
import pickle
|
||||||
import urllib
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
@ -77,8 +76,13 @@ import salt.fileserver as fs
|
|||||||
import salt.modules
|
import salt.modules
|
||||||
import salt.utils
|
import salt.utils
|
||||||
import salt.utils.s3 as s3
|
import salt.utils.s3 as s3
|
||||||
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
# pylint: disable=import-error,no-name-in-module,redefined-builtin
|
||||||
import salt.ext.six as six
|
import salt.ext.six as six
|
||||||
from salt.ext.six.moves import filter
|
from salt.ext.six.moves import filter
|
||||||
|
from salt.ext.six.moves.urllib.parse import quote as _quote
|
||||||
|
# pylint: disable=import-error,no-name-in-module,redefined-builtin
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -572,7 +576,7 @@ def _get_file_from_s3(metadata, saltenv, bucket_name, path, cached_file_path):
|
|||||||
bucket=bucket_name,
|
bucket=bucket_name,
|
||||||
service_url=service_url,
|
service_url=service_url,
|
||||||
verify_ssl=verify_ssl,
|
verify_ssl=verify_ssl,
|
||||||
path=urllib.quote(path),
|
path=_quote(path),
|
||||||
local_file=cached_file_path
|
local_file=cached_file_path
|
||||||
)
|
)
|
||||||
if ret is not None:
|
if ret is not None:
|
||||||
@ -600,7 +604,7 @@ def _get_file_from_s3(metadata, saltenv, bucket_name, path, cached_file_path):
|
|||||||
bucket=bucket_name,
|
bucket=bucket_name,
|
||||||
service_url=service_url,
|
service_url=service_url,
|
||||||
verify_ssl=verify_ssl,
|
verify_ssl=verify_ssl,
|
||||||
path=urllib.quote(path),
|
path=_quote(path),
|
||||||
local_file=cached_file_path
|
local_file=cached_file_path
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -17,7 +17,6 @@ import threading
|
|||||||
import logging.handlers
|
import logging.handlers
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
from salt._compat import Queue
|
|
||||||
from salt.log.mixins import NewStyleClassMixIn, ExcInfoOnLogLevelFormatMixIn
|
from salt.log.mixins import NewStyleClassMixIn, ExcInfoOnLogLevelFormatMixIn
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
@ -24,7 +24,9 @@ import socket
|
|||||||
import logging
|
import logging
|
||||||
import logging.handlers
|
import logging.handlers
|
||||||
import traceback
|
import traceback
|
||||||
|
from salt.ext.six import PY3
|
||||||
from salt.ext.six import string_types, text_type, with_metaclass
|
from salt.ext.six import string_types, text_type, with_metaclass
|
||||||
|
from salt.ext.six.moves.urllib.parse import urlparse # pylint: disable=import-error,no-name-in-module
|
||||||
|
|
||||||
# Let's define these custom logging levels before importing the salt.log.mixins
|
# Let's define these custom logging levels before importing the salt.log.mixins
|
||||||
# since they will be used there
|
# since they will be used there
|
||||||
@ -35,7 +37,6 @@ QUIET = logging.QUIET = 1000
|
|||||||
# Import salt libs
|
# Import salt libs
|
||||||
from salt.log.handlers import TemporaryLoggingHandler, StreamHandler, SysLogHandler, WatchedFileHandler
|
from salt.log.handlers import TemporaryLoggingHandler, StreamHandler, SysLogHandler, WatchedFileHandler
|
||||||
from salt.log.mixins import LoggingMixInMeta, NewStyleClassMixIn
|
from salt.log.mixins import LoggingMixInMeta, NewStyleClassMixIn
|
||||||
from salt._compat import PY3, urlparse
|
|
||||||
|
|
||||||
LOG_LEVELS = {
|
LOG_LEVELS = {
|
||||||
'all': logging.NOTSET,
|
'all': logging.NOTSET,
|
||||||
|
@ -16,8 +16,19 @@ from __future__ import absolute_import
|
|||||||
# Import python libs
|
# Import python libs
|
||||||
import re
|
import re
|
||||||
import logging
|
import logging
|
||||||
import urllib2
|
|
||||||
import cStringIO
|
# Import 3rd-party libs
|
||||||
|
# pylint: disable=import-error,no-name-in-module
|
||||||
|
from salt.ext.six.moves import cStringIO
|
||||||
|
from salt.ext.six.moves.urllib.error import URLError
|
||||||
|
from salt.ext.six.moves.urllib.request import (
|
||||||
|
HTTPBasicAuthHandler as _HTTPBasicAuthHandler,
|
||||||
|
HTTPDigestAuthHandler as _HTTPDigestAuthHandler,
|
||||||
|
urlopen as _urlopen,
|
||||||
|
build_opener as _build_opener,
|
||||||
|
install_opener as _install_opener
|
||||||
|
)
|
||||||
|
# pylint: enable=import-error,no-name-in-module
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
import salt.utils
|
import salt.utils
|
||||||
@ -354,17 +365,17 @@ def server_status(profile='default'):
|
|||||||
|
|
||||||
# create authentication handler if configuration exists
|
# create authentication handler if configuration exists
|
||||||
if user and passwd:
|
if user and passwd:
|
||||||
basic = urllib2.HTTPBasicAuthHandler()
|
basic = _HTTPBasicAuthHandler()
|
||||||
basic.add_password(realm=realm, uri=url, user=user, passwd=passwd)
|
basic.add_password(realm=realm, uri=url, user=user, passwd=passwd)
|
||||||
digest = urllib2.HTTPDigestAuthHandler()
|
digest = _HTTPDigestAuthHandler()
|
||||||
digest.add_password(realm=realm, uri=url, user=user, passwd=passwd)
|
digest.add_password(realm=realm, uri=url, user=user, passwd=passwd)
|
||||||
urllib2.install_opener(urllib2.build_opener(basic, digest))
|
_install_opener(_build_opener(basic, digest))
|
||||||
|
|
||||||
# get http data
|
# get http data
|
||||||
url += '?auto'
|
url += '?auto'
|
||||||
try:
|
try:
|
||||||
response = urllib2.urlopen(url, timeout=timeout).read().splitlines()
|
response = _urlopen(url, timeout=timeout).read().splitlines()
|
||||||
except urllib2.URLError:
|
except URLError:
|
||||||
return 'error'
|
return 'error'
|
||||||
|
|
||||||
# parse the data
|
# parse the data
|
||||||
@ -387,7 +398,7 @@ def server_status(profile='default'):
|
|||||||
|
|
||||||
|
|
||||||
def _parse_config(conf, slot=None):
|
def _parse_config(conf, slot=None):
|
||||||
ret = cStringIO.StringIO()
|
ret = cStringIO()
|
||||||
if isinstance(conf, str):
|
if isinstance(conf, str):
|
||||||
if slot:
|
if slot:
|
||||||
print('{0} {1}'.format(slot, conf), file=ret, end='')
|
print('{0} {1}'.format(slot, conf), file=ret, end='')
|
||||||
|
@ -16,29 +16,30 @@ import copy
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import logging
|
import logging
|
||||||
import urllib2
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
# Import third party libs
|
# Import third party libs
|
||||||
import yaml
|
import yaml
|
||||||
|
# pylint: disable=no-name-in-module,import-error,redefined-builtin
|
||||||
|
import salt.ext.six as six
|
||||||
|
from salt.ext.six.moves import range
|
||||||
|
from salt.ext.six.moves.urllib.error import HTTPError
|
||||||
|
from salt.ext.six.moves.urllib.request import Request as _Request, urlopen as _urlopen
|
||||||
|
# pylint: enable=no-name-in-module,import-error,redefined-builtin
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
from salt.modules.cmdmod import _parse_env
|
from salt.modules.cmdmod import _parse_env
|
||||||
import salt.utils
|
import salt.utils
|
||||||
from salt.ext.six import string_types
|
|
||||||
from salt.exceptions import (
|
from salt.exceptions import (
|
||||||
CommandExecutionError, MinionError, SaltInvocationError
|
CommandExecutionError, MinionError, SaltInvocationError
|
||||||
)
|
)
|
||||||
import salt.ext.six as six
|
|
||||||
from salt.ext.six.moves import range
|
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# pylint: disable=import-error
|
||||||
try:
|
try:
|
||||||
import apt.cache # pylint: disable=E0611
|
import apt.cache
|
||||||
import apt.debfile # pylint: disable=E0611
|
import apt.debfile
|
||||||
from aptsources import sourceslist
|
from aptsources import sourceslist
|
||||||
HAS_APT = True
|
HAS_APT = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
@ -49,6 +50,7 @@ try:
|
|||||||
HAS_SOFTWAREPROPERTIES = True
|
HAS_SOFTWAREPROPERTIES = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
HAS_SOFTWAREPROPERTIES = False
|
HAS_SOFTWAREPROPERTIES = False
|
||||||
|
# pylint: disable=import-error
|
||||||
|
|
||||||
# Source format for urllib fallback on PPA handling
|
# Source format for urllib fallback on PPA handling
|
||||||
LP_SRC_FORMAT = 'deb http://ppa.launchpad.net/{0}/{1}/ubuntu {2} main'
|
LP_SRC_FORMAT = 'deb http://ppa.launchpad.net/{0}/{1}/ubuntu {2} main'
|
||||||
@ -102,8 +104,8 @@ def _get_ppa_info_from_launchpad(owner_name, ppa_name):
|
|||||||
|
|
||||||
lp_url = 'https://launchpad.net/api/1.0/~{0}/+archive/{1}'.format(
|
lp_url = 'https://launchpad.net/api/1.0/~{0}/+archive/{1}'.format(
|
||||||
owner_name, ppa_name)
|
owner_name, ppa_name)
|
||||||
request = urllib2.Request(lp_url, headers={'Accept': 'application/json'})
|
request = _Request(lp_url, headers={'Accept': 'application/json'})
|
||||||
lp_page = urllib2.urlopen(request)
|
lp_page = _urlopen(request)
|
||||||
return json.load(lp_page)
|
return json.load(lp_page)
|
||||||
|
|
||||||
|
|
||||||
@ -1398,7 +1400,7 @@ def mod_repo(repo, saltenv='base', **kwargs):
|
|||||||
raise CommandExecutionError(
|
raise CommandExecutionError(
|
||||||
error_str.format(owner_name, ppa_name)
|
error_str.format(owner_name, ppa_name)
|
||||||
)
|
)
|
||||||
except urllib2.HTTPError as exc:
|
except HTTPError as exc:
|
||||||
raise CommandExecutionError(
|
raise CommandExecutionError(
|
||||||
'Launchpad does not know about {0}/{1}: {2}'.format(
|
'Launchpad does not know about {0}/{1}: {2}'.format(
|
||||||
owner_name, ppa_name, exc)
|
owner_name, ppa_name, exc)
|
||||||
@ -1671,7 +1673,7 @@ def _parse_selections(dpkgselection):
|
|||||||
pkg.get_selections and pkg.set_selections work with.
|
pkg.get_selections and pkg.set_selections work with.
|
||||||
'''
|
'''
|
||||||
ret = {}
|
ret = {}
|
||||||
if isinstance(dpkgselection, string_types):
|
if isinstance(dpkgselection, six.string_types):
|
||||||
dpkgselection = dpkgselection.split('\n')
|
dpkgselection = dpkgselection.split('\n')
|
||||||
for line in dpkgselection:
|
for line in dpkgselection:
|
||||||
if line:
|
if line:
|
||||||
@ -1776,7 +1778,7 @@ def set_selections(path=None, selection=None, clear=False, saltenv='base'):
|
|||||||
'specified together')
|
'specified together')
|
||||||
raise SaltInvocationError(err)
|
raise SaltInvocationError(err)
|
||||||
|
|
||||||
if isinstance(selection, string_types):
|
if isinstance(selection, six.string_types):
|
||||||
try:
|
try:
|
||||||
selection = yaml.safe_load(selection)
|
selection = yaml.safe_load(selection)
|
||||||
except (yaml.parser.ParserError, yaml.scanner.ScannerError) as exc:
|
except (yaml.parser.ParserError, yaml.scanner.ScannerError) as exc:
|
||||||
|
@ -12,13 +12,14 @@ import re
|
|||||||
import time
|
import time
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
try:
|
# Import 3rd-party libs
|
||||||
from shlex import quote as _cmd_quote # pylint: disable=E0611
|
# pylint: disable=import-error,redefined-builtin
|
||||||
except ImportError:
|
from salt.ext.six.moves import map
|
||||||
from pipes import quote as _cmd_quote
|
from salt.ext.six.moves import shlex_quote as _cmd_quote
|
||||||
|
# pylint: enable=import-error,redefined-builtin
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
import salt.utils
|
import salt.utils
|
||||||
from salt.ext.six.moves import map
|
|
||||||
|
|
||||||
# OS Families that should work (Ubuntu and Debian are the default)
|
# OS Families that should work (Ubuntu and Debian are the default)
|
||||||
# TODO: Refactor some of this module to remove the checks for binaries
|
# TODO: Refactor some of this module to remove the checks for binaries
|
||||||
|
@ -14,20 +14,22 @@ from __future__ import absolute_import
|
|||||||
# Import python libs
|
# Import python libs
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
# pylint: disable=import-error
|
||||||
|
from salt.ext.six.moves import shlex_quote as _cmd_quote
|
||||||
|
# pylint: enable=import-error
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
import salt.utils.validate.net
|
import salt.utils.validate.net
|
||||||
from salt.exceptions import CommandExecutionError
|
from salt.exceptions import CommandExecutionError
|
||||||
try:
|
|
||||||
from shlex import quote as _cmd_quote # pylint: disable=E0611
|
|
||||||
except ImportError:
|
|
||||||
from pipes import quote as _cmd_quote
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
HAS_PYBLUEZ = False
|
HAS_PYBLUEZ = False
|
||||||
try:
|
try:
|
||||||
import bluetooth
|
import bluetooth # pylint: disable=import-error
|
||||||
HAS_PYBLUEZ = True
|
HAS_PYBLUEZ = True
|
||||||
except Exception as exc:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
__func_alias__ = {
|
__func_alias__ = {
|
||||||
|
@ -32,12 +32,14 @@ from __future__ import absolute_import
|
|||||||
|
|
||||||
# Import Python libs
|
# Import Python libs
|
||||||
import logging
|
import logging
|
||||||
import urllib
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Import third party libs
|
# Import third party libs
|
||||||
|
# pylint: disable=import-error
|
||||||
|
from salt.ext.six import string_types
|
||||||
|
from salt.ext.six.moves.urllib.parse import unquote as _unquote # pylint: disable=no-name-in-module
|
||||||
try:
|
try:
|
||||||
import boto
|
import boto
|
||||||
import boto.iam
|
import boto.iam
|
||||||
@ -45,9 +47,9 @@ try:
|
|||||||
HAS_BOTO = True
|
HAS_BOTO = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
HAS_BOTO = False
|
HAS_BOTO = False
|
||||||
|
# pylint: enable=import-error
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
from salt.ext.six import string_types
|
|
||||||
import salt.utils.odict as odict
|
import salt.utils.odict as odict
|
||||||
|
|
||||||
|
|
||||||
@ -342,7 +344,7 @@ def get_role_policy(role_name, policy_name, region=None, key=None,
|
|||||||
# I _hate_ you for not giving me an object boto.
|
# I _hate_ you for not giving me an object boto.
|
||||||
_policy = _policy.get_role_policy_response.policy_document
|
_policy = _policy.get_role_policy_response.policy_document
|
||||||
# Policy is url encoded
|
# Policy is url encoded
|
||||||
_policy = urllib.unquote(_policy)
|
_policy = _unquote(_policy)
|
||||||
_policy = json.loads(_policy, object_pairs_hook=odict.OrderedDict)
|
_policy = json.loads(_policy, object_pairs_hook=odict.OrderedDict)
|
||||||
return _policy
|
return _policy
|
||||||
except boto.exception.BotoServerError:
|
except boto.exception.BotoServerError:
|
||||||
|
@ -9,11 +9,6 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
try:
|
|
||||||
from shlex import quote as _cmd_quote # pylint: disable=E0611
|
|
||||||
except ImportError:
|
|
||||||
from pipes import quote as _cmd_quote
|
|
||||||
|
|
||||||
# Define the module's virtual name
|
# Define the module's virtual name
|
||||||
__virtualname__ = 'shadow'
|
__virtualname__ = 'shadow'
|
||||||
|
|
||||||
|
@ -11,10 +11,15 @@ import logging
|
|||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
import bz2
|
import bz2
|
||||||
from urllib import urlopen
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
from salt.ext.six.moves.urllib.request import urlopen as _urlopen # pylint: disable=no-name-in-module,import-error
|
||||||
|
|
||||||
|
# Import Salt libs
|
||||||
import salt.utils
|
import salt.utils
|
||||||
from salt.exceptions import SaltInvocationError
|
from salt.exceptions import SaltInvocationError
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
DEFAULT_MIRROR = "ftp://mirrors.kernel.org/sourceware/cygwin/"
|
DEFAULT_MIRROR = "ftp://mirrors.kernel.org/sourceware/cygwin/"
|
||||||
@ -73,7 +78,7 @@ def _get_all_packages(mirror=DEFAULT_MIRROR,
|
|||||||
if not len(__context__['cyg.all_packages'][mirror]):
|
if not len(__context__['cyg.all_packages'][mirror]):
|
||||||
pkg_source = '/'.join([mirror, cyg_arch, 'setup.bz2'])
|
pkg_source = '/'.join([mirror, cyg_arch, 'setup.bz2'])
|
||||||
|
|
||||||
file_data = urlopen(pkg_source).read()
|
file_data = _urlopen(pkg_source).read()
|
||||||
file_lines = bz2.decompress(file_data).decode('utf_8',
|
file_lines = bz2.decompress(file_data).decode('utf_8',
|
||||||
errors='replace'
|
errors='replace'
|
||||||
).splitlines()
|
).splitlines()
|
||||||
@ -123,7 +128,7 @@ def _run_silent_cygwin(cyg_arch='x86_64',
|
|||||||
elif os.path.exists(cyg_setup_path):
|
elif os.path.exists(cyg_setup_path):
|
||||||
os.remove(cyg_setup_path)
|
os.remove(cyg_setup_path)
|
||||||
|
|
||||||
file_data = urlopen(cyg_setup_source)
|
file_data = _urlopen(cyg_setup_source)
|
||||||
open(cyg_setup_path, "wb").write(file_data.read())
|
open(cyg_setup_path, "wb").write(file_data.read())
|
||||||
|
|
||||||
setup_command = cyg_setup_path
|
setup_command = cyg_setup_path
|
||||||
|
@ -5,12 +5,14 @@ Service support for Debian systems (uses update-rc.d and /sbin/service)
|
|||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
# Import python libs
|
# Import python libs
|
||||||
|
import logging
|
||||||
import glob
|
import glob
|
||||||
import re
|
import re
|
||||||
try:
|
|
||||||
from shlex import quote as _cmd_quote # pylint: disable=E0611
|
# Import 3rd-party libs
|
||||||
except ImportError:
|
# pylint: disable=import-error
|
||||||
from pipes import quote as _cmd_quote
|
from salt.ext.six.moves import shlex_quote as _cmd_quote
|
||||||
|
# pylint: enable=import-error
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
from .systemd import _sd_booted
|
from .systemd import _sd_booted
|
||||||
@ -22,7 +24,6 @@ __func_alias__ = {
|
|||||||
# Define the module's virtual name
|
# Define the module's virtual name
|
||||||
__virtualname__ = 'service'
|
__virtualname__ = 'service'
|
||||||
|
|
||||||
import logging
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -30,11 +30,12 @@ import sys
|
|||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
import glob
|
import glob
|
||||||
from functools import reduce
|
|
||||||
|
# pylint: disable=import-error,no-name-in-module,redefined-builtin
|
||||||
from salt.ext.six import string_types
|
from salt.ext.six import string_types
|
||||||
from salt.ext.six.moves import range
|
from salt.ext.six.moves import range, reduce, zip
|
||||||
from salt.ext.six.moves import zip
|
from salt.ext.six.moves.urllib.parse import urlparse as _urlparse
|
||||||
from salt.ext.six.moves.urllib.parse import urlparse as _urlparse # pylint: disable=E0611
|
# pylint: enable=import-error,no-name-in-module,redefined-builtin
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import grp
|
import grp
|
||||||
|
@ -8,14 +8,11 @@ from __future__ import absolute_import
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
# Import 3rd-party libs
|
# Import 3rd-party libs
|
||||||
# pylint: disable=import-error
|
from salt.ext.six.moves import range, shlex_quote as _cmd_quote # pylint: disable=import-error,redefined-builtin
|
||||||
from salt.ext.six.moves import shlex_quote as _cmd_quote
|
|
||||||
# pylint: enable=import-error
|
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
import salt.utils
|
import salt.utils
|
||||||
import salt.utils.cloud as suc
|
import salt.utils.cloud as suc
|
||||||
from salt.ext.six.moves import range
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -14,15 +14,21 @@ Module for sending messages to hipchat
|
|||||||
api_key: peWcBiMOS9HrZG15peWcBiMOS9HrZG15
|
api_key: peWcBiMOS9HrZG15peWcBiMOS9HrZG15
|
||||||
api_version: v1
|
api_version: v1
|
||||||
'''
|
'''
|
||||||
|
# Import Python Libs
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
import json
|
import json
|
||||||
import requests
|
|
||||||
import logging
|
import logging
|
||||||
from urlparse import urljoin as _urljoin
|
|
||||||
|
# Import 3rd-party Libs
|
||||||
|
import requests
|
||||||
from requests.exceptions import ConnectionError
|
from requests.exceptions import ConnectionError
|
||||||
|
# pylint: disable=import-error,no-name-in-module
|
||||||
|
from salt.ext.six.moves.urllib.parse import urljoin as _urljoin
|
||||||
from salt.ext.six.moves import range
|
from salt.ext.six.moves import range
|
||||||
|
# pylint: enable=import-error,no-name-in-module
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
__virtualname__ = 'hipchat'
|
__virtualname__ = 'hipchat'
|
||||||
|
|
||||||
|
|
||||||
|
@ -7,6 +7,8 @@ from __future__ import absolute_import
|
|||||||
# Import python libs
|
# Import python libs
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
#from salt.ext.six.moves.urllib.parse import urlparse # pylint: disable=import-error,no-name-in-module
|
||||||
|
|
||||||
# Set up logging
|
# Set up logging
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
@ -33,7 +35,7 @@ def mount_image(location):
|
|||||||
return first
|
return first
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
#compatibility for api change
|
# compatibility for api change
|
||||||
mnt_image = mount_image
|
mnt_image = mount_image
|
||||||
|
|
||||||
|
|
||||||
@ -60,7 +62,7 @@ def umount_image(mnt):
|
|||||||
# system
|
# system
|
||||||
# '''
|
# '''
|
||||||
# cache_dir = os.path.join(__salt__['config.option']('img.cache'), 'src')
|
# cache_dir = os.path.join(__salt__['config.option']('img.cache'), 'src')
|
||||||
# parse = urlparse.urlparse(name)
|
# parse = urlparse(name)
|
||||||
# if __salt__['config.valid_file_proto'](parse.scheme):
|
# if __salt__['config.valid_file_proto'](parse.scheme):
|
||||||
# # Valid scheme to download
|
# # Valid scheme to download
|
||||||
# dest = os.path.join(cache_dir, parse.netloc)
|
# dest = os.path.join(cache_dir, parse.netloc)
|
||||||
|
@ -32,9 +32,17 @@ this module.
|
|||||||
'''
|
'''
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
# Python libs
|
# Import 3rd-party libs
|
||||||
import urllib
|
# pylint: disable=import-error,no-name-in-module
|
||||||
import urllib2
|
from salt.ext.six.moves.urllib.parse import urlencode as _urlencode
|
||||||
|
from salt.ext.six.moves.urllib.request import (
|
||||||
|
HTTPBasicAuthHandler as _HTTPBasicAuthHandler,
|
||||||
|
HTTPDigestAuthHandler as _HTTPDigestAuthHandler,
|
||||||
|
urlopen as _urlopen,
|
||||||
|
build_opener as _build_opener,
|
||||||
|
install_opener as _install_opener
|
||||||
|
)
|
||||||
|
# pylint: enable=import-error,no-name-in-module
|
||||||
|
|
||||||
|
|
||||||
def __virtual__():
|
def __virtual__():
|
||||||
@ -49,11 +57,11 @@ def _auth(url, user, passwd, realm):
|
|||||||
returns a authentication handler.
|
returns a authentication handler.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
basic = urllib2.HTTPBasicAuthHandler()
|
basic = _HTTPBasicAuthHandler()
|
||||||
basic.add_password(realm=realm, uri=url, user=user, passwd=passwd)
|
basic.add_password(realm=realm, uri=url, user=user, passwd=passwd)
|
||||||
digest = urllib2.HTTPDigestAuthHandler()
|
digest = _HTTPDigestAuthHandler()
|
||||||
digest.add_password(realm=realm, uri=url, user=user, passwd=passwd)
|
digest.add_password(realm=realm, uri=url, user=user, passwd=passwd)
|
||||||
return urllib2.build_opener(basic, digest)
|
return _build_opener(basic, digest)
|
||||||
|
|
||||||
|
|
||||||
def _do_http(opts, profile='default'):
|
def _do_http(opts, profile='default'):
|
||||||
@ -74,11 +82,11 @@ def _do_http(opts, profile='default'):
|
|||||||
|
|
||||||
if user and passwd:
|
if user and passwd:
|
||||||
auth = _auth(url, realm, user, passwd)
|
auth = _auth(url, realm, user, passwd)
|
||||||
urllib2.install_opener(auth)
|
_install_opener(auth)
|
||||||
|
|
||||||
url += '?{0}'.format(urllib.urlencode(opts))
|
url += '?{0}'.format(_urlencode(opts))
|
||||||
|
|
||||||
for line in urllib2.urlopen(url, timeout=timeout).read().splitlines():
|
for line in _urlopen(url, timeout=timeout).read().splitlines():
|
||||||
splt = line.split('=', 1)
|
splt = line.split('=', 1)
|
||||||
if splt[0] in ret:
|
if splt[0] in ret:
|
||||||
ret[splt[0]] += ',{0}'.format(splt[1])
|
ret[splt[0]] += ',{0}'.format(splt[1])
|
||||||
|
@ -3,7 +3,10 @@
|
|||||||
Support for nginx
|
Support for nginx
|
||||||
'''
|
'''
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
import urllib2
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
from salt.ext.six.moves.urllib.request import urlopen as _urlopen # pylint: disable=no-name-in-module,import-error
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
import salt.utils
|
import salt.utils
|
||||||
import salt.utils.decorators as decorators
|
import salt.utils.decorators as decorators
|
||||||
@ -110,7 +113,7 @@ def status(url="http://127.0.0.1/status"):
|
|||||||
|
|
||||||
salt '*' nginx.status
|
salt '*' nginx.status
|
||||||
"""
|
"""
|
||||||
resp = urllib2.urlopen(url)
|
resp = _urlopen(url)
|
||||||
status_data = resp.read()
|
status_data = resp.read()
|
||||||
resp.close()
|
resp.close()
|
||||||
|
|
||||||
|
@ -17,7 +17,19 @@ import fnmatch
|
|||||||
import time
|
import time
|
||||||
import sys
|
import sys
|
||||||
import copy
|
import copy
|
||||||
from urllib2 import URLError
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
# pylint: disable=import-error
|
||||||
|
try:
|
||||||
|
import esky
|
||||||
|
from esky import EskyVersionError
|
||||||
|
HAS_ESKY = True
|
||||||
|
except ImportError:
|
||||||
|
HAS_ESKY = False
|
||||||
|
# pylint: disable=no-name-in-module
|
||||||
|
from salt.ext.six import string_types
|
||||||
|
from salt.ext.six.moves.urllib.error import URLError
|
||||||
|
# pylint: enable=import-error,no-name-in-module
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
import salt
|
import salt
|
||||||
@ -33,18 +45,9 @@ import salt.wheel
|
|||||||
from salt.exceptions import (
|
from salt.exceptions import (
|
||||||
SaltReqTimeoutError, SaltRenderError, CommandExecutionError
|
SaltReqTimeoutError, SaltRenderError, CommandExecutionError
|
||||||
)
|
)
|
||||||
from salt.ext.six import string_types
|
|
||||||
|
|
||||||
__proxyenabled__ = ['*']
|
__proxyenabled__ = ['*']
|
||||||
|
|
||||||
# Import third party libs
|
|
||||||
try:
|
|
||||||
import esky
|
|
||||||
from esky import EskyVersionError
|
|
||||||
HAS_ESKY = True
|
|
||||||
except ImportError:
|
|
||||||
HAS_ESKY = False
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -15,12 +15,18 @@ Module for sending messages to Slack
|
|||||||
slack:
|
slack:
|
||||||
api_key: peWcBiMOS9HrZG15peWcBiMOS9HrZG15
|
api_key: peWcBiMOS9HrZG15peWcBiMOS9HrZG15
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
# Import Python libs
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
import requests
|
|
||||||
import logging
|
import logging
|
||||||
from urlparse import urljoin as _urljoin
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
import requests
|
||||||
from requests.exceptions import ConnectionError
|
from requests.exceptions import ConnectionError
|
||||||
|
# pylint: disable=import-error,no-name-in-module,redefined-builtin
|
||||||
|
from salt.ext.six.moves.urllib.parse import urljoin as _urljoin
|
||||||
from salt.ext.six.moves import range
|
from salt.ext.six.moves import range
|
||||||
|
# pylint: enable=import-error,no-name-in-module
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
__virtualname__ = 'slack'
|
__virtualname__ = 'slack'
|
||||||
|
@ -63,16 +63,25 @@ verbose : True
|
|||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import urllib2
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
# pylint: disable=no-name-in-module,import-error
|
||||||
|
from salt.ext.six import string_types
|
||||||
|
from salt.ext.six.moves.urllib.request import (
|
||||||
|
urlopen as _urlopen,
|
||||||
|
HTTPBasicAuthHandler as _HTTPBasicAuthHandler,
|
||||||
|
HTTPDigestAuthHandler as _HTTPDigestAuthHandler,
|
||||||
|
build_opener as _build_opener,
|
||||||
|
install_opener as _install_opener
|
||||||
|
)
|
||||||
|
# pylint: enable=no-name-in-module,import-error
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
import salt.utils
|
import salt.utils
|
||||||
from salt._compat import url_open
|
|
||||||
from salt.ext.six import string_types
|
|
||||||
|
|
||||||
|
|
||||||
########################## PRIVATE METHODS ##############################
|
########################## PRIVATE METHODS ##############################
|
||||||
|
|
||||||
|
|
||||||
def __virtual__():
|
def __virtual__():
|
||||||
'''
|
'''
|
||||||
PRIVATE METHOD
|
PRIVATE METHOD
|
||||||
@ -233,16 +242,16 @@ def _auth(url):
|
|||||||
realm = __salt__['config.get']('solr.auth_realm', 'Solr')
|
realm = __salt__['config.get']('solr.auth_realm', 'Solr')
|
||||||
|
|
||||||
if user and password:
|
if user and password:
|
||||||
basic = urllib2.HTTPBasicAuthHandler()
|
basic = _HTTPBasicAuthHandler()
|
||||||
basic.add_password(
|
basic.add_password(
|
||||||
realm=realm, uri=url, user=user, passwd=password
|
realm=realm, uri=url, user=user, passwd=password
|
||||||
)
|
)
|
||||||
digest = urllib2.HTTPDigestAuthHandler()
|
digest = _HTTPDigestAuthHandler()
|
||||||
digest.add_password(
|
digest.add_password(
|
||||||
realm=realm, uri=url, user=user, passwd=password
|
realm=realm, uri=url, user=user, passwd=password
|
||||||
)
|
)
|
||||||
urllib2.install_opener(
|
_install_opener(
|
||||||
urllib2.build_opener(basic, digest)
|
_build_opener(basic, digest)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -266,9 +275,9 @@ def _http_request(url, request_timeout=None):
|
|||||||
|
|
||||||
request_timeout = __salt__['config.option']('solr.request_timeout')
|
request_timeout = __salt__['config.option']('solr.request_timeout')
|
||||||
if request_timeout is None:
|
if request_timeout is None:
|
||||||
data = json.load(url_open(url))
|
data = json.load(_urlopen(url))
|
||||||
else:
|
else:
|
||||||
data = json.load(url_open(url, timeout=request_timeout))
|
data = json.load(_urlopen(url, timeout=request_timeout))
|
||||||
return _get_return_dict(True, data, [])
|
return _get_return_dict(True, data, [])
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
return _get_return_dict(False, {}, ["{0} : {1}".format(url, err)])
|
return _get_return_dict(False, {}, ["{0} : {1}".format(url, err)])
|
||||||
@ -384,18 +393,18 @@ def _pre_index_check(handler, host=None, core_name=None):
|
|||||||
|
|
||||||
{'success':boolean, 'data':dict, 'errors':list, 'warnings':list}
|
{'success':boolean, 'data':dict, 'errors':list, 'warnings':list}
|
||||||
'''
|
'''
|
||||||
#make sure that it's a master minion
|
# make sure that it's a master minion
|
||||||
if _get_none_or_value(host) is None and not _is_master():
|
if _get_none_or_value(host) is None and not _is_master():
|
||||||
err = [
|
err = [
|
||||||
'solr.pre_indexing_check can only be called by "master" minions']
|
'solr.pre_indexing_check can only be called by "master" minions']
|
||||||
return _get_return_dict(False, err)
|
return _get_return_dict(False, err)
|
||||||
#solr can run out of memory quickly if the dih is processing multiple
|
# solr can run out of memory quickly if the dih is processing multiple
|
||||||
#handlers at the same time, so if it's a multicore setup require a
|
# handlers at the same time, so if it's a multicore setup require a
|
||||||
#core_name param.
|
# core_name param.
|
||||||
if _get_none_or_value(core_name) is None and _check_for_cores():
|
if _get_none_or_value(core_name) is None and _check_for_cores():
|
||||||
errors = ['solr.full_import is not safe to multiple handlers at once']
|
errors = ['solr.full_import is not safe to multiple handlers at once']
|
||||||
return _get_return_dict(False, errors=errors)
|
return _get_return_dict(False, errors=errors)
|
||||||
#check to make sure that we're not already indexing
|
# check to make sure that we're not already indexing
|
||||||
resp = import_status(handler, host, core_name)
|
resp = import_status(handler, host, core_name)
|
||||||
if resp['success']:
|
if resp['success']:
|
||||||
status = resp['data']['status']
|
status = resp['data']['status']
|
||||||
@ -472,7 +481,7 @@ def lucene_version(core_name=None):
|
|||||||
salt '*' solr.lucene_version
|
salt '*' solr.lucene_version
|
||||||
'''
|
'''
|
||||||
ret = _get_return_dict()
|
ret = _get_return_dict()
|
||||||
#do we want to check for all the cores?
|
# do we want to check for all the cores?
|
||||||
if _get_none_or_value(core_name) is None and _check_for_cores():
|
if _get_none_or_value(core_name) is None and _check_for_cores():
|
||||||
success = True
|
success = True
|
||||||
for name in __salt__['config.option']('solr.cores'):
|
for name in __salt__['config.option']('solr.cores'):
|
||||||
@ -515,7 +524,7 @@ def version(core_name=None):
|
|||||||
salt '*' solr.version
|
salt '*' solr.version
|
||||||
'''
|
'''
|
||||||
ret = _get_return_dict()
|
ret = _get_return_dict()
|
||||||
#do we want to check for all the cores?
|
# do we want to check for all the cores?
|
||||||
if _get_none_or_value(core_name) is None and _check_for_cores():
|
if _get_none_or_value(core_name) is None and _check_for_cores():
|
||||||
success = True
|
success = True
|
||||||
for name in __opts__['solr.cores']:
|
for name in __opts__['solr.cores']:
|
||||||
@ -655,7 +664,7 @@ def is_replication_enabled(host=None, core_name=None):
|
|||||||
errors = ['Only "slave" minions can run "is_replication_enabled"']
|
errors = ['Only "slave" minions can run "is_replication_enabled"']
|
||||||
return ret.update({'success': False, 'errors': errors})
|
return ret.update({'success': False, 'errors': errors})
|
||||||
|
|
||||||
#define a convenience method so we don't duplicate code
|
# define a convenience method so we don't duplicate code
|
||||||
def _checks(ret, success, resp, core):
|
def _checks(ret, success, resp, core):
|
||||||
if response['success']:
|
if response['success']:
|
||||||
slave = resp['data']['details']['slave']
|
slave = resp['data']['details']['slave']
|
||||||
@ -663,27 +672,27 @@ def is_replication_enabled(host=None, core_name=None):
|
|||||||
# on the master and we can't get this info.
|
# on the master and we can't get this info.
|
||||||
enabled = 'false'
|
enabled = 'false'
|
||||||
master_url = slave['masterUrl']
|
master_url = slave['masterUrl']
|
||||||
#check for errors on the slave
|
# check for errors on the slave
|
||||||
if 'ERROR' in slave:
|
if 'ERROR' in slave:
|
||||||
success = False
|
success = False
|
||||||
err = "{0}: {1} - {2}".format(core, slave['ERROR'], master_url)
|
err = "{0}: {1} - {2}".format(core, slave['ERROR'], master_url)
|
||||||
resp['errors'].append(err)
|
resp['errors'].append(err)
|
||||||
#if there is an error return everything
|
# if there is an error return everything
|
||||||
data = slave if core is None else {core: {'data': slave}}
|
data = slave if core is None else {core: {'data': slave}}
|
||||||
else:
|
else:
|
||||||
enabled = slave['masterDetails']['master'][
|
enabled = slave['masterDetails']['master'][
|
||||||
'replicationEnabled']
|
'replicationEnabled']
|
||||||
#if replication is turned off on the master, or polling is
|
# if replication is turned off on the master, or polling is
|
||||||
#disabled we need to return false. These may not be errors,
|
# disabled we need to return false. These may not be errors,
|
||||||
#but the purpose of this call is to check to see if the slaves
|
# but the purpose of this call is to check to see if the slaves
|
||||||
#can replicate.
|
# can replicate.
|
||||||
if enabled == 'false':
|
if enabled == 'false':
|
||||||
resp['warnings'].append("Replication is disabled on master.")
|
resp['warnings'].append("Replication is disabled on master.")
|
||||||
success = False
|
success = False
|
||||||
if slave['isPollingDisabled'] == 'true':
|
if slave['isPollingDisabled'] == 'true':
|
||||||
success = False
|
success = False
|
||||||
resp['warning'].append("Polling is disabled")
|
resp['warning'].append("Polling is disabled")
|
||||||
#update the return
|
# update the return
|
||||||
ret = _update_return_dict(ret, success, data,
|
ret = _update_return_dict(ret, success, data,
|
||||||
resp['errors'], resp['warnings'])
|
resp['errors'], resp['warnings'])
|
||||||
return (ret, success)
|
return (ret, success)
|
||||||
@ -748,8 +757,8 @@ def match_index_versions(host=None, core_name=None):
|
|||||||
success = False
|
success = False
|
||||||
err = "{0}: {1} - {2}".format(core, error, master_url)
|
err = "{0}: {1} - {2}".format(core, error, master_url)
|
||||||
resp['errors'].append(err)
|
resp['errors'].append(err)
|
||||||
#if there was an error return the entire response so the
|
# if there was an error return the entire response so the
|
||||||
#alterer can get what it wants
|
# alterer can get what it wants
|
||||||
data = slave if core is None else {core: {'data': slave}}
|
data = slave if core is None else {core: {'data': slave}}
|
||||||
else:
|
else:
|
||||||
versions = {
|
versions = {
|
||||||
@ -762,7 +771,7 @@ def match_index_versions(host=None, core_name=None):
|
|||||||
if 'replicationFailedAtList' in slave:
|
if 'replicationFailedAtList' in slave:
|
||||||
versions.update({'failed_list': slave[
|
versions.update({'failed_list': slave[
|
||||||
'replicationFailedAtList']})
|
'replicationFailedAtList']})
|
||||||
#check the index versions
|
# check the index versions
|
||||||
if versions['master'] != versions['slave']:
|
if versions['master'] != versions['slave']:
|
||||||
success = False
|
success = False
|
||||||
resp['errors'].append(
|
resp['errors'].append(
|
||||||
@ -778,7 +787,7 @@ def match_index_versions(host=None, core_name=None):
|
|||||||
ret = _update_return_dict(ret, success, data, errors=err)
|
ret = _update_return_dict(ret, success, data, errors=err)
|
||||||
return (ret, success)
|
return (ret, success)
|
||||||
|
|
||||||
#check all cores?
|
# check all cores?
|
||||||
if _get_none_or_value(core_name) is None and _check_for_cores():
|
if _get_none_or_value(core_name) is None and _check_for_cores():
|
||||||
success = True
|
success = True
|
||||||
for name in __opts__['solr.cores']:
|
for name in __opts__['solr.cores']:
|
||||||
@ -1130,7 +1139,7 @@ def reload_import_config(handler, host=None, core_name=None, verbose=False):
|
|||||||
salt '*' solr.reload_import_config dataimport None music {'clean':True}
|
salt '*' solr.reload_import_config dataimport None music {'clean':True}
|
||||||
'''
|
'''
|
||||||
|
|
||||||
#make sure that it's a master minion
|
# make sure that it's a master minion
|
||||||
if not _is_master() and _get_none_or_value(host) is None:
|
if not _is_master() and _get_none_or_value(host) is None:
|
||||||
err = [
|
err = [
|
||||||
'solr.pre_indexing_check can only be called by "master" minions']
|
'solr.pre_indexing_check can only be called by "master" minions']
|
||||||
@ -1285,7 +1294,7 @@ def delta_import(handler, host=None, core_name=None, options=None, extra=None):
|
|||||||
if not resp['success']:
|
if not resp['success']:
|
||||||
return resp
|
return resp
|
||||||
options = _merge_options(options)
|
options = _merge_options(options)
|
||||||
#if we're nuking data, and we're multi-core disable replication for safety
|
# if we're nuking data, and we're multi-core disable replication for safety
|
||||||
if options['clean'] and _check_for_cores():
|
if options['clean'] and _check_for_cores():
|
||||||
resp = set_replication_enabled(False, host=host, core_name=core_name)
|
resp = set_replication_enabled(False, host=host, core_name=core_name)
|
||||||
if not resp['success']:
|
if not resp['success']:
|
||||||
|
@ -3,15 +3,19 @@
|
|||||||
Provide the service module for system supervisord or supervisord in a
|
Provide the service module for system supervisord or supervisord in a
|
||||||
virtualenv
|
virtualenv
|
||||||
'''
|
'''
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
# Import python libs
|
# Import python libs
|
||||||
|
from __future__ import absolute_import
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
from salt.ext.six import string_types
|
||||||
|
from salt.ext.six.moves import configparser # pylint: disable=import-error
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
import salt.utils
|
import salt.utils
|
||||||
from salt.exceptions import CommandExecutionError, CommandNotFoundError
|
from salt.exceptions import CommandExecutionError, CommandNotFoundError
|
||||||
from salt._compat import configparser, string_types
|
|
||||||
|
|
||||||
|
|
||||||
def __virtual__():
|
def __virtual__():
|
||||||
@ -375,10 +379,12 @@ def options(name, conf_file=None):
|
|||||||
ret = {}
|
ret = {}
|
||||||
for key, val in config.items(section_name):
|
for key, val in config.items(section_name):
|
||||||
val = salt.utils.str_to_num(val.split(';')[0].strip())
|
val = salt.utils.str_to_num(val.split(';')[0].strip())
|
||||||
|
# pylint: disable=maybe-no-member
|
||||||
if isinstance(val, string_types):
|
if isinstance(val, string_types):
|
||||||
if val.lower() == 'true':
|
if val.lower() == 'true':
|
||||||
val = True
|
val = True
|
||||||
elif val.lower() == 'false':
|
elif val.lower() == 'false':
|
||||||
val = False
|
val = False
|
||||||
|
# pylint: enable=maybe-no-member
|
||||||
ret[key] = val
|
ret[key] = val
|
||||||
return ret
|
return ret
|
||||||
|
@ -45,12 +45,22 @@ from __future__ import absolute_import
|
|||||||
# Import python libs
|
# Import python libs
|
||||||
import glob
|
import glob
|
||||||
import hashlib
|
import hashlib
|
||||||
import urllib
|
|
||||||
import urllib2
|
|
||||||
import tempfile
|
import tempfile
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
# pylint: disable=no-name-in-module,import-error
|
||||||
|
from salt.ext.six.moves.urllib.parse import urlencode as _urlencode
|
||||||
|
from salt.ext.six.moves.urllib.request import (
|
||||||
|
urlopen as _urlopen,
|
||||||
|
HTTPBasicAuthHandler as _HTTPBasicAuthHandler,
|
||||||
|
HTTPDigestAuthHandler as _HTTPDigestAuthHandler,
|
||||||
|
build_opener as _build_opener,
|
||||||
|
install_opener as _install_opener
|
||||||
|
)
|
||||||
|
# pylint: enable=no-name-in-module,import-error
|
||||||
|
|
||||||
# Import Salt libs
|
# Import Salt libs
|
||||||
import salt.utils
|
import salt.utils
|
||||||
|
|
||||||
@ -116,13 +126,13 @@ def _auth(uri):
|
|||||||
if user is False or password is False:
|
if user is False or password is False:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
basic = urllib2.HTTPBasicAuthHandler()
|
basic = _HTTPBasicAuthHandler()
|
||||||
basic.add_password(realm='Tomcat Manager Application', uri=uri,
|
basic.add_password(realm='Tomcat Manager Application', uri=uri,
|
||||||
user=user, passwd=password)
|
user=user, passwd=password)
|
||||||
digest = urllib2.HTTPDigestAuthHandler()
|
digest = _HTTPDigestAuthHandler()
|
||||||
digest.add_password(realm='Tomcat Manager Application', uri=uri,
|
digest.add_password(realm='Tomcat Manager Application', uri=uri,
|
||||||
user=user, passwd=password)
|
user=user, passwd=password)
|
||||||
return urllib2.build_opener(basic, digest)
|
return _build_opener(basic, digest)
|
||||||
|
|
||||||
|
|
||||||
def _wget(cmd, opts=None, url='http://localhost:8080/manager', timeout=180):
|
def _wget(cmd, opts=None, url='http://localhost:8080/manager', timeout=180):
|
||||||
@ -167,19 +177,19 @@ def _wget(cmd, opts=None, url='http://localhost:8080/manager', timeout=180):
|
|||||||
url += 'text/{0}'.format(cmd)
|
url += 'text/{0}'.format(cmd)
|
||||||
url6 += '{0}'.format(cmd)
|
url6 += '{0}'.format(cmd)
|
||||||
if opts:
|
if opts:
|
||||||
url += '?{0}'.format(urllib.urlencode(opts))
|
url += '?{0}'.format(_urlencode(opts))
|
||||||
url6 += '?{0}'.format(urllib.urlencode(opts))
|
url6 += '?{0}'.format(_urlencode(opts))
|
||||||
|
|
||||||
# Make the HTTP request
|
# Make the HTTP request
|
||||||
urllib2.install_opener(auth)
|
_install_opener(auth)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Trying tomcat >= 7 url
|
# Trying tomcat >= 7 url
|
||||||
ret['msg'] = urllib2.urlopen(url, timeout=timeout).read().splitlines()
|
ret['msg'] = _urlopen(url, timeout=timeout).read().splitlines()
|
||||||
except Exception:
|
except Exception:
|
||||||
try:
|
try:
|
||||||
# Trying tomcat6 url
|
# Trying tomcat6 url
|
||||||
ret['msg'] = urllib2.urlopen(url6, timeout=timeout).read().splitlines()
|
ret['msg'] = _urlopen(url6, timeout=timeout).read().splitlines()
|
||||||
except Exception:
|
except Exception:
|
||||||
ret['msg'] = 'Failed to create HTTP request'
|
ret['msg'] = 'Failed to create HTTP request'
|
||||||
|
|
||||||
|
@ -16,14 +16,15 @@ import shutil
|
|||||||
import subprocess
|
import subprocess
|
||||||
import string # pylint: disable=deprecated-module
|
import string # pylint: disable=deprecated-module
|
||||||
import logging
|
import logging
|
||||||
import salt.ext.six as six
|
|
||||||
|
|
||||||
# Import third party libs
|
# Import third party libs
|
||||||
import yaml
|
import yaml
|
||||||
import jinja2
|
import jinja2
|
||||||
import jinja2.exceptions
|
import jinja2.exceptions
|
||||||
|
import salt.ext.six as six
|
||||||
|
from salt.ext.six.moves import StringIO as _StringIO # pylint: disable=import-error
|
||||||
try:
|
try:
|
||||||
import libvirt
|
import libvirt # pylint: disable=import-error
|
||||||
from xml.dom import minidom
|
from xml.dom import minidom
|
||||||
HAS_ALL_IMPORTS = True
|
HAS_ALL_IMPORTS = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
@ -34,7 +35,6 @@ import salt.utils
|
|||||||
import salt.utils.files
|
import salt.utils.files
|
||||||
import salt.utils.templates
|
import salt.utils.templates
|
||||||
import salt.utils.validate.net
|
import salt.utils.validate.net
|
||||||
from salt._compat import StringIO as _StringIO
|
|
||||||
from salt.exceptions import CommandExecutionError, SaltInvocationError
|
from salt.exceptions import CommandExecutionError, SaltInvocationError
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
@ -1589,7 +1589,7 @@ def is_hyper():
|
|||||||
salt '*' virt.is_hyper
|
salt '*' virt.is_hyper
|
||||||
'''
|
'''
|
||||||
try:
|
try:
|
||||||
import libvirt
|
import libvirt # pylint: disable=import-error
|
||||||
except ImportError:
|
except ImportError:
|
||||||
# not a usable hypervisor without libvirt module
|
# not a usable hypervisor without libvirt module
|
||||||
return False
|
return False
|
||||||
|
@ -30,7 +30,7 @@ import fileinput # do not remove, used in imported file.py functions
|
|||||||
import fnmatch # do not remove, used in imported file.py functions
|
import fnmatch # do not remove, used in imported file.py functions
|
||||||
from salt.ext.six import string_types # do not remove, used in imported file.py functions
|
from salt.ext.six import string_types # do not remove, used in imported file.py functions
|
||||||
# do not remove, used in imported file.py functions
|
# do not remove, used in imported file.py functions
|
||||||
from salt.ext.six.moves.urllib.parse import urlparse # pylint: disable=E0611
|
from salt.ext.six.moves.urllib.parse import urlparse as _urlparse # pylint: disable=import-error,no-name-in-module
|
||||||
import salt.utils.atomicfile # do not remove, used in imported file.py functions
|
import salt.utils.atomicfile # do not remove, used in imported file.py functions
|
||||||
from salt.exceptions import CommandExecutionError, SaltInvocationError
|
from salt.exceptions import CommandExecutionError, SaltInvocationError
|
||||||
# pylint: enable=W0611
|
# pylint: enable=W0611
|
||||||
|
@ -19,20 +19,17 @@ import copy
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from distutils.version import LooseVersion as _LooseVersion
|
from distutils.version import LooseVersion as _LooseVersion # pylint: disable=no-name-in-module,import-error
|
||||||
try:
|
|
||||||
from shlex import quote as _cmd_quote # pylint: disable=E0611
|
|
||||||
except ImportError:
|
|
||||||
from pipes import quote as _cmd_quote # pylint: disable=E0611
|
|
||||||
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
# pylint: disable=import-error,redefined-builtin
|
||||||
import salt.ext.six as six
|
import salt.ext.six as six
|
||||||
from distutils.version import LooseVersion as _LooseVersion
|
from salt.ext.six import string_types
|
||||||
from salt.ext.six.moves import range
|
from salt.ext.six.moves import shlex_quote as _cmd_quote, range
|
||||||
|
# pylint: enable=import-error
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
import salt.utils
|
import salt.utils
|
||||||
import salt.ext.six as six
|
|
||||||
from salt.ext.six import string_types
|
|
||||||
from salt.exceptions import (
|
from salt.exceptions import (
|
||||||
CommandExecutionError, MinionError, SaltInvocationError
|
CommandExecutionError, MinionError, SaltInvocationError
|
||||||
)
|
)
|
||||||
@ -722,6 +719,7 @@ def group_install(name,
|
|||||||
elif not isinstance(groups, list):
|
elif not isinstance(groups, list):
|
||||||
raise SaltInvocationError('\'groups\' must be a list')
|
raise SaltInvocationError('\'groups\' must be a list')
|
||||||
|
|
||||||
|
# pylint: disable=maybe-no-member
|
||||||
if isinstance(skip, string_types):
|
if isinstance(skip, string_types):
|
||||||
skip = skip.split(',')
|
skip = skip.split(',')
|
||||||
if not isinstance(skip, (list, tuple)):
|
if not isinstance(skip, (list, tuple)):
|
||||||
@ -731,6 +729,7 @@ def group_install(name,
|
|||||||
include = include.split(',')
|
include = include.split(',')
|
||||||
if not isinstance(include, (list, tuple)):
|
if not isinstance(include, (list, tuple)):
|
||||||
raise SaltInvocationError('\'include\' must be a list')
|
raise SaltInvocationError('\'include\' must be a list')
|
||||||
|
# pylint: enable=maybe-no-member
|
||||||
|
|
||||||
targets = []
|
targets = []
|
||||||
for group in groups:
|
for group in groups:
|
||||||
|
@ -22,34 +22,26 @@ You have those following methods:
|
|||||||
* run_buildout
|
* run_buildout
|
||||||
* buildout
|
* buildout
|
||||||
'''
|
'''
|
||||||
from __future__ import absolute_import
|
|
||||||
import salt.ext.six as six
|
|
||||||
from salt.ext.six.moves import range
|
|
||||||
|
|
||||||
# Define the module's virtual name
|
|
||||||
__virtualname__ = 'buildout'
|
|
||||||
|
|
||||||
|
|
||||||
def __virtual__():
|
|
||||||
'''
|
|
||||||
Only load if buildout libs are present
|
|
||||||
'''
|
|
||||||
if True:
|
|
||||||
return __virtualname__
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Import python libs
|
# Import python libs
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
import copy
|
import copy
|
||||||
import urllib2
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
# pylint: disable=import-error,no-name-in-module,redefined-builtin
|
||||||
|
from salt.ext.six import string_types, text_type
|
||||||
|
from salt.ext.six.moves import range
|
||||||
|
from salt.ext.six.moves.urllib.request import urlopen as _urlopen
|
||||||
|
# pylint: enable=import-error,no-name-in-module,redefined-builtin
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
from salt.exceptions import CommandExecutionError
|
from salt.exceptions import CommandExecutionError
|
||||||
from salt.ext.six import string_types
|
|
||||||
|
|
||||||
|
|
||||||
INVALID_RESPONSE = 'We did not get any expectable answer from buildout'
|
INVALID_RESPONSE = 'We did not get any expectable answer from buildout'
|
||||||
@ -73,6 +65,18 @@ _URL_VERSIONS = {
|
|||||||
DEFAULT_VER = 2
|
DEFAULT_VER = 2
|
||||||
_logger = logging.getLogger(__name__)
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Define the module's virtual name
|
||||||
|
__virtualname__ = 'buildout'
|
||||||
|
|
||||||
|
|
||||||
|
def __virtual__():
|
||||||
|
'''
|
||||||
|
Only load if buildout libs are present
|
||||||
|
'''
|
||||||
|
if True:
|
||||||
|
return __virtualname__
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
def _salt_callback(func, **kwargs):
|
def _salt_callback(func, **kwargs):
|
||||||
LOG.clear()
|
LOG.clear()
|
||||||
@ -139,7 +143,7 @@ class _Logger(object):
|
|||||||
self._by_level = {}
|
self._by_level = {}
|
||||||
|
|
||||||
def _log(self, level, msg):
|
def _log(self, level, msg):
|
||||||
if not isinstance(msg, six.text_type):
|
if not isinstance(msg, text_type):
|
||||||
msg = msg.decode('utf-8')
|
msg = msg.decode('utf-8')
|
||||||
if level not in self._by_level:
|
if level not in self._by_level:
|
||||||
self._by_level[level] = []
|
self._by_level[level] = []
|
||||||
@ -182,7 +186,7 @@ LOG = _Logger()
|
|||||||
|
|
||||||
|
|
||||||
def _encode_string(string):
|
def _encode_string(string):
|
||||||
if isinstance(string, six.text_type):
|
if isinstance(string, text_type):
|
||||||
string = string.encode('utf-8')
|
string = string.encode('utf-8')
|
||||||
return string
|
return string
|
||||||
|
|
||||||
@ -522,7 +526,7 @@ def upgrade_bootstrap(directory='.',
|
|||||||
'{0}.updated_bootstrap'.format(buildout_ver)))
|
'{0}.updated_bootstrap'.format(buildout_ver)))
|
||||||
except (OSError, IOError):
|
except (OSError, IOError):
|
||||||
LOG.info('Bootstrap updated from repository')
|
LOG.info('Bootstrap updated from repository')
|
||||||
data = urllib2.urlopen(booturl).read()
|
data = _urlopen(booturl).read()
|
||||||
updated = True
|
updated = True
|
||||||
dled = True
|
dled = True
|
||||||
if 'socket.setdefaulttimeout' not in data:
|
if 'socket.setdefaulttimeout' not in data:
|
||||||
|
@ -11,14 +11,15 @@ import copy
|
|||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
# pylint: disable=import-error,redefined-builtin,no-name-in-module
|
||||||
import salt.ext.six as six
|
import salt.ext.six as six
|
||||||
import salt.ext.six.moves.configparser # pylint: disable=E0611
|
from salt.ext.six.moves import shlex_quote as _cmd_quote, configparser
|
||||||
import urlparse
|
from salt.ext.six.moves.urllib.parse import urlparse
|
||||||
|
# pylint: enable=import-error,redefined-builtin,no-name-in-module
|
||||||
|
|
||||||
from xml.dom import minidom as dom
|
from xml.dom import minidom as dom
|
||||||
try:
|
|
||||||
from shlex import quote as _cmd_quote # pylint: disable=E0611
|
|
||||||
except ImportError:
|
|
||||||
from pipes import quote as _cmd_quote
|
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
import salt.utils
|
import salt.utils
|
||||||
@ -28,9 +29,9 @@ from salt.exceptions import (
|
|||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
HAS_ZYPP = False
|
HAS_ZYPP = False
|
||||||
ZYPP_HOME = "/etc/zypp"
|
ZYPP_HOME = '/etc/zypp'
|
||||||
LOCKS = "{0}/locks".format(ZYPP_HOME)
|
LOCKS = '{0}/locks'.format(ZYPP_HOME)
|
||||||
REPOS = "{0}/repos.d".format(ZYPP_HOME)
|
REPOS = '{0}/repos.d'.format(ZYPP_HOME)
|
||||||
|
|
||||||
# Define the module's virtual name
|
# Define the module's virtual name
|
||||||
__virtualname__ = 'pkg'
|
__virtualname__ = 'pkg'
|
||||||
@ -245,8 +246,8 @@ def _get_configured_repos():
|
|||||||
Get all the info about repositories from the configurations.
|
Get all the info about repositories from the configurations.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
repos_cfg = salt.ext.six.moves.configparser.ConfigParser()
|
repos_cfg = configparser.ConfigParser()
|
||||||
repos_cfg.read([REPOS + "/" + fname for fname in os.listdir(REPOS)])
|
repos_cfg.read([REPOS + '/' + fname for fname in os.listdir(REPOS)])
|
||||||
|
|
||||||
return repos_cfg
|
return repos_cfg
|
||||||
|
|
||||||
@ -314,14 +315,14 @@ def del_repo(repo):
|
|||||||
if alias == repo:
|
if alias == repo:
|
||||||
cmd = ('zypper -x --non-interactive rr --loose-auth --loose-query {0}'.format(alias))
|
cmd = ('zypper -x --non-interactive rr --loose-auth --loose-query {0}'.format(alias))
|
||||||
doc = dom.parseString(__salt__['cmd.run'](cmd, output_loglevel='trace'))
|
doc = dom.parseString(__salt__['cmd.run'](cmd, output_loglevel='trace'))
|
||||||
msg = doc.getElementsByTagName("message")
|
msg = doc.getElementsByTagName('message')
|
||||||
if doc.getElementsByTagName("progress") and msg:
|
if doc.getElementsByTagName('progress') and msg:
|
||||||
return {
|
return {
|
||||||
repo: True,
|
repo: True,
|
||||||
'message': msg[0].childNodes[0].nodeValue,
|
'message': msg[0].childNodes[0].nodeValue,
|
||||||
}
|
}
|
||||||
|
|
||||||
raise CommandExecutionError('Repository "{0}" not found.'.format(repo))
|
raise CommandExecutionError('Repository \'{0}\' not found.'.format(repo))
|
||||||
|
|
||||||
|
|
||||||
def mod_repo(repo, **kwargs):
|
def mod_repo(repo, **kwargs):
|
||||||
@ -365,21 +366,21 @@ def mod_repo(repo, **kwargs):
|
|||||||
|
|
||||||
# An attempt to add new one?
|
# An attempt to add new one?
|
||||||
if repo not in repos_cfg.sections():
|
if repo not in repos_cfg.sections():
|
||||||
url = kwargs.get("url", kwargs.get("mirrorlist"))
|
url = kwargs.get('url', kwargs.get('mirrorlist'))
|
||||||
if not url:
|
if not url:
|
||||||
raise CommandExecutionError(
|
raise CommandExecutionError(
|
||||||
'Repository "{0}" not found and no URL passed to create one.'.format(repo))
|
'Repository \'{0}\' not found and no URL passed to create one.'.format(repo))
|
||||||
|
|
||||||
if not urlparse.urlparse(url).scheme:
|
if not urlparse(url).scheme:
|
||||||
raise CommandExecutionError(
|
raise CommandExecutionError(
|
||||||
'Repository "{0}" not found and passed URL looks wrong.'.format(repo))
|
'Repository \'{0}\' not found and passed URL looks wrong.'.format(repo))
|
||||||
|
|
||||||
# Is there already such repo under different alias?
|
# Is there already such repo under different alias?
|
||||||
for alias in repos_cfg.sections():
|
for alias in repos_cfg.sections():
|
||||||
repo_meta = _get_repo_info(alias, repos_cfg=repos_cfg)
|
repo_meta = _get_repo_info(alias, repos_cfg=repos_cfg)
|
||||||
|
|
||||||
# Complete user URL, in case it is not
|
# Complete user URL, in case it is not
|
||||||
new_url = urlparse.urlparse(url)
|
new_url = urlparse(url)
|
||||||
if not new_url.path:
|
if not new_url.path:
|
||||||
new_url = urlparse.ParseResult(scheme=new_url.scheme, # pylint: disable=E1123
|
new_url = urlparse.ParseResult(scheme=new_url.scheme, # pylint: disable=E1123
|
||||||
netloc=new_url.netloc,
|
netloc=new_url.netloc,
|
||||||
@ -387,59 +388,61 @@ def mod_repo(repo, **kwargs):
|
|||||||
params=new_url.params,
|
params=new_url.params,
|
||||||
query=new_url.query,
|
query=new_url.query,
|
||||||
fragment=new_url.fragment)
|
fragment=new_url.fragment)
|
||||||
base_url = urlparse.urlparse(repo_meta["baseurl"])
|
base_url = urlparse(repo_meta['baseurl'])
|
||||||
|
|
||||||
if new_url == base_url:
|
if new_url == base_url:
|
||||||
raise CommandExecutionError(
|
raise CommandExecutionError(
|
||||||
'Repository "{0}" already exists as "{1}".'.format(repo, alias))
|
'Repository \'{0}\' already exists as \'{1}\'.'.format(repo, alias))
|
||||||
|
|
||||||
# Add new repo
|
# Add new repo
|
||||||
doc = None
|
doc = None
|
||||||
try:
|
try:
|
||||||
# Try to parse the output and find the error,
|
# Try to parse the output and find the error,
|
||||||
# but this not always working (depends on Zypper version)
|
# but this not always working (depends on Zypper version)
|
||||||
doc = dom.parseString(__salt__['cmd.run'](("zypper -x ar {0} '{1}'".format(url, repo)),
|
doc = dom.parseString(__salt__['cmd.run'](('zypper -x ar {0} \'{1}\''.format(url, repo)),
|
||||||
output_loglevel='trace'))
|
output_loglevel='trace'))
|
||||||
except Exception:
|
except Exception:
|
||||||
# No XML out available, but it is still unknown the state of the result.
|
# No XML out available, but it is still unknown the state of the result.
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if doc:
|
if doc:
|
||||||
msg_nodes = doc.getElementsByTagName("message")
|
msg_nodes = doc.getElementsByTagName('message')
|
||||||
if msg_nodes:
|
if msg_nodes:
|
||||||
msg_node = msg_nodes[0]
|
msg_node = msg_nodes[0]
|
||||||
if msg_node.getAttribute("type") == "error":
|
if msg_node.getAttribute('type') == 'error':
|
||||||
raise CommandExecutionError(msg_node.childNodes[0].nodeValue)
|
raise CommandExecutionError(msg_node.childNodes[0].nodeValue)
|
||||||
|
|
||||||
# Verify the repository has been added
|
# Verify the repository has been added
|
||||||
repos_cfg = _get_configured_repos()
|
repos_cfg = _get_configured_repos()
|
||||||
if repo not in repos_cfg.sections():
|
if repo not in repos_cfg.sections():
|
||||||
raise CommandExecutionError(
|
raise CommandExecutionError(
|
||||||
'Failed add new repository "{0}" for unknown reason. Please look into Zypper logs.'.format(repo))
|
'Failed add new repository \'{0}\' for unknown reason. '
|
||||||
|
'Please look into Zypper logs.'.format(repo))
|
||||||
added = True
|
added = True
|
||||||
|
|
||||||
# Modify added or existing repo according to the options
|
# Modify added or existing repo according to the options
|
||||||
cmd_opt = []
|
cmd_opt = []
|
||||||
|
|
||||||
if "enabled" in kwargs:
|
if 'enabled' in kwargs:
|
||||||
cmd_opt.append(kwargs["enabled"] and "--enable" or "--disable")
|
cmd_opt.append(kwargs['enabled'] and '--enable' or '--disable')
|
||||||
|
|
||||||
if "refresh" in kwargs:
|
if 'refresh' in kwargs:
|
||||||
cmd_opt.append(kwargs["refresh"] and "--refresh" or "--no-refresh")
|
cmd_opt.append(kwargs['refresh'] and '--refresh' or '--no-refresh')
|
||||||
|
|
||||||
if "cache" in kwargs:
|
if 'cache' in kwargs:
|
||||||
cmd_opt.append(kwargs["cache"] and "--keep-packages" or "--no-keep-packages")
|
cmd_opt.append(kwargs['cache'] and '--keep-packages' or '--no-keep-packages')
|
||||||
|
|
||||||
if "gpgcheck" in kwargs:
|
if 'gpgcheck' in kwargs:
|
||||||
cmd_opt.append(kwargs["gpgcheck"] and "--gpgcheck" or "--no-gpgcheck")
|
cmd_opt.append(kwargs['gpgcheck'] and '--gpgcheck' or '--no-gpgcheck')
|
||||||
|
|
||||||
if cmd_opt:
|
if cmd_opt:
|
||||||
__salt__['cmd.run'](("zypper -x mr {0} '{1}'".format(' '.join(cmd_opt), repo)),
|
__salt__['cmd.run'](('zypper -x mr {0} \'{1}\''.format(' '.join(cmd_opt), repo)),
|
||||||
output_loglevel='trace')
|
output_loglevel='trace')
|
||||||
|
|
||||||
# If repo nor added neither modified, error should be thrown
|
# If repo nor added neither modified, error should be thrown
|
||||||
if not added and not cmd_opt:
|
if not added and not cmd_opt:
|
||||||
raise CommandExecutionError('Modification of the repository "{0}" was not specified.'.format(repo))
|
raise CommandExecutionError(
|
||||||
|
'Modification of the repository \'{0}\' was not specified.'.format(repo))
|
||||||
|
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
@ -474,11 +477,11 @@ def refresh_db():
|
|||||||
if not line:
|
if not line:
|
||||||
continue
|
continue
|
||||||
if line.strip().startswith('Repository'):
|
if line.strip().startswith('Repository'):
|
||||||
key = line.split("'")[1].strip()
|
key = line.split('\'')[1].strip()
|
||||||
if 'is up to date' in line:
|
if 'is up to date' in line:
|
||||||
ret[key] = False
|
ret[key] = False
|
||||||
elif line.strip().startswith('Building'):
|
elif line.strip().startswith('Building'):
|
||||||
key = line.split("'")[1].strip()
|
key = line.split('\'')[1].strip()
|
||||||
if 'done' in line:
|
if 'done' in line:
|
||||||
ret[key] = True
|
ret[key] = True
|
||||||
return ret
|
return ret
|
||||||
@ -572,7 +575,7 @@ def install(name=None,
|
|||||||
# Allow "version" to work for single package target
|
# Allow "version" to work for single package target
|
||||||
pkg_params = {name: version_num}
|
pkg_params = {name: version_num}
|
||||||
else:
|
else:
|
||||||
log.warning('"version" parameter will be ignored for multiple '
|
log.warning('\'version\' parameter will be ignored for multiple '
|
||||||
'package targets')
|
'package targets')
|
||||||
|
|
||||||
if pkg_type == 'repository':
|
if pkg_type == 'repository':
|
||||||
@ -605,10 +608,10 @@ def install(name=None,
|
|||||||
old = list_pkgs()
|
old = list_pkgs()
|
||||||
downgrades = []
|
downgrades = []
|
||||||
if fromrepo:
|
if fromrepo:
|
||||||
fromrepoopt = "--force --force-resolution --from {0} ".format(fromrepo)
|
fromrepoopt = '--force --force-resolution --from {0} '.format(fromrepo)
|
||||||
log.info('Targeting repo {0!r}'.format(fromrepo))
|
log.info('Targeting repo {0!r}'.format(fromrepo))
|
||||||
else:
|
else:
|
||||||
fromrepoopt = ""
|
fromrepoopt = ''
|
||||||
# Split the targets into batches of 500 packages each, so that
|
# Split the targets into batches of 500 packages each, so that
|
||||||
# the maximal length of the command line is not broken
|
# the maximal length of the command line is not broken
|
||||||
while targets:
|
while targets:
|
||||||
@ -793,11 +796,11 @@ def list_locks():
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
locks = {}
|
locks = {}
|
||||||
for meta in [item.split("\n") for item in open(LOCKS).read().split("\n\n")]:
|
for meta in [item.split('\n') for item in open(LOCKS).read().split('\n\n')]:
|
||||||
lock = {}
|
lock = {}
|
||||||
for element in [el for el in meta if el]:
|
for element in [el for el in meta if el]:
|
||||||
if ":" in element:
|
if ':' in element:
|
||||||
lock.update(dict([tuple([i.strip() for i in element.split(":", 1)]), ]))
|
lock.update(dict([tuple([i.strip() for i in element.split(':', 1)]), ]))
|
||||||
if lock.get('solvable_name'):
|
if lock.get('solvable_name'):
|
||||||
locks[lock.pop('solvable_name')] = lock
|
locks[lock.pop('solvable_name')] = lock
|
||||||
|
|
||||||
@ -969,12 +972,12 @@ def _get_patterns(installed_only=None):
|
|||||||
patterns = {}
|
patterns = {}
|
||||||
doc = dom.parseString(__salt__['cmd.run'](('zypper --xmlout se -t pattern'),
|
doc = dom.parseString(__salt__['cmd.run'](('zypper --xmlout se -t pattern'),
|
||||||
output_loglevel='trace'))
|
output_loglevel='trace'))
|
||||||
for element in doc.getElementsByTagName("solvable"):
|
for element in doc.getElementsByTagName('solvable'):
|
||||||
installed = element.getAttribute("status") == "installed"
|
installed = element.getAttribute('status') == 'installed'
|
||||||
if (installed_only and installed) or not installed_only:
|
if (installed_only and installed) or not installed_only:
|
||||||
patterns[element.getAttribute("name")] = {
|
patterns[element.getAttribute('name')] = {
|
||||||
'installed': installed,
|
'installed': installed,
|
||||||
'summary': element.getAttribute("summary"),
|
'summary': element.getAttribute('summary'),
|
||||||
}
|
}
|
||||||
|
|
||||||
return patterns
|
return patterns
|
||||||
@ -1018,16 +1021,16 @@ def search(criteria):
|
|||||||
'''
|
'''
|
||||||
doc = dom.parseString(__salt__['cmd.run'](('zypper --xmlout se {0}'.format(criteria)),
|
doc = dom.parseString(__salt__['cmd.run'](('zypper --xmlout se {0}'.format(criteria)),
|
||||||
output_loglevel='trace'))
|
output_loglevel='trace'))
|
||||||
solvables = doc.getElementsByTagName("solvable")
|
solvables = doc.getElementsByTagName('solvable')
|
||||||
if not solvables:
|
if not solvables:
|
||||||
raise CommandExecutionError("No packages found by criteria \"{0}\".".format(criteria))
|
raise CommandExecutionError('No packages found by criteria "{0}".'.format(criteria))
|
||||||
|
|
||||||
out = {}
|
out = {}
|
||||||
for solvable in [s for s in solvables
|
for solvable in [s for s in solvables
|
||||||
if s.getAttribute("status") == "not-installed" and
|
if s.getAttribute('status') == 'not-installed' and
|
||||||
s.getAttribute("kind") == "package"]:
|
s.getAttribute('kind') == 'package']:
|
||||||
out[solvable.getAttribute("name")] = {
|
out[solvable.getAttribute('name')] = {
|
||||||
'summary': solvable.getAttribute("summary")
|
'summary': solvable.getAttribute('summary')
|
||||||
}
|
}
|
||||||
return out
|
return out
|
||||||
|
|
||||||
@ -1037,13 +1040,13 @@ def _get_first_aggregate_text(node_list):
|
|||||||
Extract text from the first occurred DOM aggregate.
|
Extract text from the first occurred DOM aggregate.
|
||||||
'''
|
'''
|
||||||
if not node_list:
|
if not node_list:
|
||||||
return ""
|
return ''
|
||||||
|
|
||||||
out = []
|
out = []
|
||||||
for node in node_list[0].childNodes:
|
for node in node_list[0].childNodes:
|
||||||
if node.nodeType == dom.Document.TEXT_NODE:
|
if node.nodeType == dom.Document.TEXT_NODE:
|
||||||
out.append(node.nodeValue)
|
out.append(node.nodeValue)
|
||||||
return "\n".join(out)
|
return '\n'.join(out)
|
||||||
|
|
||||||
|
|
||||||
def _parse_suse_product(path, *info):
|
def _parse_suse_product(path, *info):
|
||||||
@ -1072,12 +1075,12 @@ def list_products():
|
|||||||
|
|
||||||
salt '*' pkg.list_products
|
salt '*' pkg.list_products
|
||||||
'''
|
'''
|
||||||
PRODUCTS = "/etc/products.d"
|
PRODUCTS = '/etc/products.d'
|
||||||
if not os.path.exists(PRODUCTS):
|
if not os.path.exists(PRODUCTS):
|
||||||
raise CommandExecutionError("Directory {0} does not exists.".format(PRODUCTS))
|
raise CommandExecutionError('Directory {0} does not exists.'.format(PRODUCTS))
|
||||||
|
|
||||||
products = {}
|
products = {}
|
||||||
for fname in os.listdir("/etc/products.d"):
|
for fname in os.listdir('/etc/products.d'):
|
||||||
pth_name = os.path.join(PRODUCTS, fname)
|
pth_name = os.path.join(PRODUCTS, fname)
|
||||||
r_pth_name = os.path.realpath(pth_name)
|
r_pth_name = os.path.realpath(pth_name)
|
||||||
products[r_pth_name] = r_pth_name != pth_name and 'baseproduct' or None
|
products[r_pth_name] = r_pth_name != pth_name and 'baseproduct' or None
|
||||||
|
@ -63,15 +63,19 @@ import logging
|
|||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
import pickle
|
import pickle
|
||||||
import urllib
|
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
# pylint: disable=import-error,no-name-in-module,redefined-builtin
|
||||||
|
import salt.ext.six as six
|
||||||
|
from salt.ext.six.moves import filter
|
||||||
|
from salt.ext.six.moves.urllib.parse import quote as _quote
|
||||||
|
# pylint: enable=import-error,no-name-in-module,redefined-builtin
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
from salt.pillar import Pillar
|
from salt.pillar import Pillar
|
||||||
import salt.utils
|
import salt.utils
|
||||||
import salt.utils.s3 as s3
|
import salt.utils.s3 as s3
|
||||||
import salt.ext.six as six
|
|
||||||
from salt.ext.six.moves import filter
|
|
||||||
|
|
||||||
# Set up logging
|
# Set up logging
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
@ -347,7 +351,7 @@ def _get_file_from_s3(creds, metadata, saltenv, bucket, path,
|
|||||||
keyid=creds.keyid,
|
keyid=creds.keyid,
|
||||||
bucket=bucket,
|
bucket=bucket,
|
||||||
service_url=creds.service_url,
|
service_url=creds.service_url,
|
||||||
path=urllib.quote(path),
|
path=_quote(path),
|
||||||
local_file=cached_file_path,
|
local_file=cached_file_path,
|
||||||
verify_ssl=creds.verify_ssl
|
verify_ssl=creds.verify_ssl
|
||||||
)
|
)
|
||||||
|
@ -41,13 +41,23 @@ otherwise multi-minion targetting can lead to losing output:
|
|||||||
* the first returning minion is able to create a document in the database
|
* the first returning minion is able to create a document in the database
|
||||||
* other minions fail with ``{'error': 'HTTP Error 409: Conflict'}``
|
* other minions fail with ``{'error': 'HTTP Error 409: Conflict'}``
|
||||||
'''
|
'''
|
||||||
from __future__ import absolute_import
|
|
||||||
# Import Python libs
|
# Import Python libs
|
||||||
|
from __future__ import absolute_import
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
import urllib2
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
# pylint: disable=no-name-in-module,import-error
|
||||||
|
from salt.ext.six.moves.urllib.error import HTTPError
|
||||||
|
from salt.ext.six.moves.urllib.request import (
|
||||||
|
Request as _Request,
|
||||||
|
HTTPHandler as _HTTPHandler,
|
||||||
|
build_opener as _build_opener,
|
||||||
|
)
|
||||||
|
# pylint: enable=no-name-in-module,import-error
|
||||||
|
|
||||||
# Import Salt libs
|
# Import Salt libs
|
||||||
import salt.utils
|
import salt.utils
|
||||||
import salt.returners
|
import salt.returners
|
||||||
@ -88,37 +98,37 @@ def _get_options(ret=None):
|
|||||||
return {"url": server_url, "db": db_name}
|
return {"url": server_url, "db": db_name}
|
||||||
|
|
||||||
|
|
||||||
def _generate_doc(ret, options):
|
def _generate_doc(ret):
|
||||||
'''
|
'''
|
||||||
Create a object that will be saved into the database based on
|
Create a object that will be saved into the database based on
|
||||||
options.
|
options.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
# Create a copy of the object that we will return.
|
# Create a copy of the object that we will return.
|
||||||
r = ret.copy()
|
retc = ret.copy()
|
||||||
|
|
||||||
# Set the ID of the document to be the JID.
|
# Set the ID of the document to be the JID.
|
||||||
r["_id"] = ret["jid"]
|
retc["_id"] = ret["jid"]
|
||||||
|
|
||||||
# Add a timestamp field to the document
|
# Add a timestamp field to the document
|
||||||
r["timestamp"] = time.time()
|
retc["timestamp"] = time.time()
|
||||||
|
|
||||||
return r
|
return retc
|
||||||
|
|
||||||
|
|
||||||
def _request(method, url, content_type=None, _data=None):
|
def _request(method, url, content_type=None, _data=None):
|
||||||
'''
|
'''
|
||||||
Makes a HTTP request. Returns the JSON parse, or an obj with an error.
|
Makes a HTTP request. Returns the JSON parse, or an obj with an error.
|
||||||
'''
|
'''
|
||||||
opener = urllib2.build_opener(urllib2.HTTPHandler)
|
opener = _build_opener(_HTTPHandler)
|
||||||
request = urllib2.Request(url, data=_data)
|
request = _Request(url, data=_data)
|
||||||
if content_type:
|
if content_type:
|
||||||
request.add_header('Content-Type', content_type)
|
request.add_header('Content-Type', content_type)
|
||||||
request.get_method = lambda: method
|
request.get_method = lambda: method
|
||||||
try:
|
try:
|
||||||
handler = opener.open(request)
|
handler = opener.open(request)
|
||||||
except urllib2.HTTPError as e:
|
except HTTPError as exc:
|
||||||
return {'error': '{0}'.format(e)}
|
return {'error': '{0}'.format(exc)}
|
||||||
return json.loads(handler.read())
|
return json.loads(handler.read())
|
||||||
|
|
||||||
|
|
||||||
@ -146,7 +156,7 @@ def returner(ret):
|
|||||||
|
|
||||||
# Call _generate_doc to get a dict object of the document we're going to
|
# Call _generate_doc to get a dict object of the document we're going to
|
||||||
# shove into the database.
|
# shove into the database.
|
||||||
doc = _generate_doc(ret, options)
|
doc = _generate_doc(ret)
|
||||||
|
|
||||||
# Make the actual HTTP PUT request to create the doc.
|
# Make the actual HTTP PUT request to create the doc.
|
||||||
_response = _request("PUT",
|
_response = _request("PUT",
|
||||||
@ -195,7 +205,7 @@ def get_jids():
|
|||||||
# See if the identifier is an int..
|
# See if the identifier is an int..
|
||||||
try:
|
try:
|
||||||
int(row['id'])
|
int(row['id'])
|
||||||
except Exception:
|
except ValueError:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Check the correct number of digits by simply casting to str and
|
# Check the correct number of digits by simply casting to str and
|
||||||
|
@ -60,14 +60,21 @@ Hipchat settings may also be configured as::
|
|||||||
'''
|
'''
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import salt.returners
|
# Import Python libs
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import pprint
|
import pprint
|
||||||
import requests
|
|
||||||
import logging
|
import logging
|
||||||
from urlparse import urljoin as _urljoin
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
import requests
|
||||||
from requests.exceptions import ConnectionError
|
from requests.exceptions import ConnectionError
|
||||||
|
# pylint: disable=import-error
|
||||||
|
from salt.ext.six.moves.urllib.parse import urljoin as _urljoin # pylint: disable=import-error,no-name-in-module
|
||||||
|
# pylint: enable=import-error
|
||||||
|
|
||||||
|
# Import Salt Libs
|
||||||
|
import salt.returners
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
__virtualname__ = 'hipchat'
|
__virtualname__ = 'hipchat'
|
||||||
|
@ -55,15 +55,22 @@ Hipchat settings may also be configured as::
|
|||||||
'''
|
'''
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
# Import Python libs
|
||||||
|
import pprint
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
import requests
|
||||||
|
from requests.exceptions import ConnectionError
|
||||||
|
# pylint: disable=import-error
|
||||||
|
from salt.ext.six.moves.urllib.parse import urljoin as _urljoin # pylint: disable=import-error,no-name-in-module
|
||||||
|
# pylint: enable=import-error
|
||||||
|
|
||||||
|
# Import Salt Libs
|
||||||
import salt.returners
|
import salt.returners
|
||||||
|
|
||||||
import pprint
|
|
||||||
import requests
|
|
||||||
import logging
|
|
||||||
from urlparse import urljoin as _urljoin
|
|
||||||
from requests.exceptions import ConnectionError
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
__virtualname__ = 'slack'
|
__virtualname__ = 'slack'
|
||||||
|
|
||||||
|
|
||||||
|
@ -5,15 +5,16 @@ and what hosts are down
|
|||||||
'''
|
'''
|
||||||
|
|
||||||
# Import python libs
|
# Import python libs
|
||||||
from __future__ import print_function
|
from __future__ import absolute_import, print_function
|
||||||
from __future__ import absolute_import
|
|
||||||
import os
|
import os
|
||||||
import operator
|
import operator
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
import urllib
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
from salt.ext.six.moves.urllib.request import urlopen as _urlopen # pylint: disable=no-name-in-module,import-error
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
import salt.key
|
import salt.key
|
||||||
@ -370,7 +371,7 @@ def bootstrap_psexec(hosts='', master=None, version=None, arch='win32',
|
|||||||
|
|
||||||
if not installer_url:
|
if not installer_url:
|
||||||
base_url = 'http://docs.saltstack.com/downloads/'
|
base_url = 'http://docs.saltstack.com/downloads/'
|
||||||
source = urllib.urlopen(base_url).read()
|
source = _urlopen(base_url).read()
|
||||||
salty_rx = re.compile('>(Salt-Minion-(.+?)-(.+)-Setup.exe)</a></td><td align="right">(.*?)\\s*<')
|
salty_rx = re.compile('>(Salt-Minion-(.+?)-(.+)-Setup.exe)</a></td><td align="right">(.*?)\\s*<')
|
||||||
source_list = sorted([[path, ver, plat, time.strptime(date, "%d-%b-%Y %H:%M")]
|
source_list = sorted([[path, ver, plat, time.strptime(date, "%d-%b-%Y %H:%M")]
|
||||||
for path, ver, plat, date in salty_rx.findall(source)],
|
for path, ver, plat, date in salty_rx.findall(source)],
|
||||||
|
@ -18,16 +18,17 @@ import datetime
|
|||||||
import hashlib
|
import hashlib
|
||||||
import hmac
|
import hmac
|
||||||
import logging
|
import logging
|
||||||
import urllib
|
|
||||||
import urlparse
|
|
||||||
import requests
|
|
||||||
|
|
||||||
# Import Salt libs
|
# Import Salt libs
|
||||||
import salt.utils.xmlutil as xml
|
import salt.utils.xmlutil as xml
|
||||||
from salt._compat import ElementTree as ET
|
from salt._compat import ElementTree as ET
|
||||||
from salt.ext.six.moves import map
|
|
||||||
from salt.ext.six.moves import zip
|
# Import 3rd-party libs
|
||||||
from salt.ext.six.moves import range
|
import requests
|
||||||
|
# pylint: disable=import-error,redefined-builtin,no-name-in-module
|
||||||
|
from salt.ext.six.moves import map, range, zip
|
||||||
|
from salt.ext.six.moves.urllib.parse import urlencode, urlparse
|
||||||
|
# pylint: enable=import-error,redefined-builtin,no-name-in-module
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
DEFAULT_LOCATION = 'us-east-1'
|
DEFAULT_LOCATION = 'us-east-1'
|
||||||
@ -60,7 +61,7 @@ def sig2(method, endpoint, params, provider, aws_api_version):
|
|||||||
params_with_headers['Version'] = aws_api_version
|
params_with_headers['Version'] = aws_api_version
|
||||||
keys = sorted(params_with_headers.keys())
|
keys = sorted(params_with_headers.keys())
|
||||||
values = list(list(map(params_with_headers.get, keys)))
|
values = list(list(map(params_with_headers.get, keys)))
|
||||||
querystring = urllib.urlencode(list(zip(keys, values)))
|
querystring = urlencode(list(zip(keys, values)))
|
||||||
|
|
||||||
canonical = '{0}\n{1}\n/\n{2}'.format(
|
canonical = '{0}\n{1}\n/\n{2}'.format(
|
||||||
method.encode('utf-8'),
|
method.encode('utf-8'),
|
||||||
@ -91,7 +92,7 @@ def sig4(method, endpoint, params, prov_dict, aws_api_version, location,
|
|||||||
params_with_headers['Version'] = aws_api_version
|
params_with_headers['Version'] = aws_api_version
|
||||||
keys = sorted(params_with_headers.keys())
|
keys = sorted(params_with_headers.keys())
|
||||||
values = list(map(params_with_headers.get, keys))
|
values = list(map(params_with_headers.get, keys))
|
||||||
querystring = urllib.urlencode(list(zip(keys, values)))
|
querystring = urlencode(list(zip(keys, values)))
|
||||||
|
|
||||||
amzdate = timenow.strftime('%Y%m%dT%H%M%SZ')
|
amzdate = timenow.strftime('%Y%m%dT%H%M%SZ')
|
||||||
datestamp = timenow.strftime('%Y%m%d')
|
datestamp = timenow.strftime('%Y%m%d')
|
||||||
@ -254,7 +255,7 @@ def query(params=None, setname=None, requesturl=None, location=None,
|
|||||||
|
|
||||||
requesturl = 'https://{0}/'.format(endpoint)
|
requesturl = 'https://{0}/'.format(endpoint)
|
||||||
else:
|
else:
|
||||||
endpoint = urlparse.urlparse(requesturl).netloc
|
endpoint = urlparse(requesturl).netloc
|
||||||
if endpoint == '':
|
if endpoint == '':
|
||||||
endpoint_err = ('Could not find a valid endpoint in the '
|
endpoint_err = ('Could not find a valid endpoint in the '
|
||||||
'requesturl: {0}. Looking for something '
|
'requesturl: {0}. Looking for something '
|
||||||
|
@ -104,9 +104,11 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
import salt.ext.six as six
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
import salt.utils
|
import salt.utils
|
||||||
from salt._compat import MAX_SIZE
|
|
||||||
from salt.utils.filebuffer import BufferedReader
|
from salt.utils.filebuffer import BufferedReader
|
||||||
|
|
||||||
# Set up logger
|
# Set up logger
|
||||||
@ -209,7 +211,7 @@ def _parse_size(value):
|
|||||||
max_size = num
|
max_size = num
|
||||||
elif style == '+':
|
elif style == '+':
|
||||||
min_size = num
|
min_size = num
|
||||||
max_size = MAX_SIZE
|
max_size = six.MAXSIZE
|
||||||
else:
|
else:
|
||||||
min_size = num
|
min_size = num
|
||||||
max_size = num + multiplier - 1
|
max_size = num + multiplier - 1
|
||||||
|
@ -9,7 +9,9 @@ from __future__ import absolute_import
|
|||||||
|
|
||||||
# Import python libs
|
# Import python libs
|
||||||
import gzip
|
import gzip
|
||||||
from salt._compat import StringIO
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
from salt.ext.six.moves import StringIO # pylint: disable=import-error
|
||||||
|
|
||||||
|
|
||||||
class GzipFile(gzip.GzipFile):
|
class GzipFile(gzip.GzipFile):
|
||||||
|
@ -6,7 +6,10 @@ import os
|
|||||||
import urlparse
|
import urlparse
|
||||||
|
|
||||||
# Import third party libs
|
# Import third party libs
|
||||||
from mako.lookup import TemplateCollection, TemplateLookup
|
# pylint: disable=import-error,no-name-in-module
|
||||||
|
from salt.ext.six.moves.urllib.parse import urlparse
|
||||||
|
# pylint: enable=import-error,no-name-in-module
|
||||||
|
from mako.lookup import TemplateCollection, TemplateLookup # pylint: disable=import-error
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
import salt.fileclient
|
import salt.fileclient
|
||||||
@ -57,7 +60,7 @@ class SaltMakoTemplateLookup(TemplateCollection):
|
|||||||
self.cache = {}
|
self.cache = {}
|
||||||
|
|
||||||
def adjust_uri(self, uri, filename):
|
def adjust_uri(self, uri, filename):
|
||||||
scheme = urlparse.urlparse(uri).scheme
|
scheme = urlparse(uri).scheme
|
||||||
if scheme in ('salt', 'file'):
|
if scheme in ('salt', 'file'):
|
||||||
return uri
|
return uri
|
||||||
elif scheme:
|
elif scheme:
|
||||||
|
@ -21,6 +21,9 @@ import optparse
|
|||||||
import traceback
|
import traceback
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
import salt.ext.six as six
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
import salt.config as config
|
import salt.config as config
|
||||||
import salt.exitcodes
|
import salt.exitcodes
|
||||||
@ -34,7 +37,6 @@ import salt.utils.xdg
|
|||||||
from salt.utils import kinds
|
from salt.utils import kinds
|
||||||
from salt.defaults import DEFAULT_TARGET_DELIM
|
from salt.defaults import DEFAULT_TARGET_DELIM
|
||||||
from salt.utils.validate.path import is_writeable
|
from salt.utils.validate.path import is_writeable
|
||||||
from salt._compat import MAX_SIZE
|
|
||||||
|
|
||||||
|
|
||||||
def _sorted(mixins_or_funcs):
|
def _sorted(mixins_or_funcs):
|
||||||
@ -207,7 +209,7 @@ class MergeConfigMixIn(object):
|
|||||||
This mix-in should run last.
|
This mix-in should run last.
|
||||||
'''
|
'''
|
||||||
__metaclass__ = MixInMeta
|
__metaclass__ = MixInMeta
|
||||||
_mixin_prio_ = MAX_SIZE
|
_mixin_prio_ = six.MAXSIZE
|
||||||
|
|
||||||
def _mixin_setup(self):
|
def _mixin_setup(self):
|
||||||
if not hasattr(self, 'setup_config') and not hasattr(self, 'config'):
|
if not hasattr(self, 'setup_config') and not hasattr(self, 'config'):
|
||||||
|
@ -12,8 +12,10 @@ import datetime
|
|||||||
import hashlib
|
import hashlib
|
||||||
import hmac
|
import hmac
|
||||||
import logging
|
import logging
|
||||||
import urllib
|
|
||||||
|
# Import 3rd-party libs
|
||||||
import requests
|
import requests
|
||||||
|
from salt.ext.six.moves.urllib.parse import urlencode # pylint: disable=no-name-in-module,import-error
|
||||||
|
|
||||||
# Import Salt libs
|
# Import Salt libs
|
||||||
import salt.utils
|
import salt.utils
|
||||||
@ -128,7 +130,7 @@ def query(key, keyid, method='GET', params=None, headers=None,
|
|||||||
sig = binascii.b2a_base64(hashed.digest())
|
sig = binascii.b2a_base64(hashed.digest())
|
||||||
headers['Authorization'] = 'AWS {0}:{1}'.format(keyid, sig.strip())
|
headers['Authorization'] = 'AWS {0}:{1}'.format(keyid, sig.strip())
|
||||||
|
|
||||||
querystring = urllib.urlencode(params)
|
querystring = urlencode(params)
|
||||||
if action:
|
if action:
|
||||||
if querystring:
|
if querystring:
|
||||||
querystring = '{0}&{1}'.format(action, querystring)
|
querystring = '{0}&{1}'.format(action, querystring)
|
||||||
|
@ -2,9 +2,10 @@
|
|||||||
'''
|
'''
|
||||||
Generate the salt thin tarball from the installed python files
|
Generate the salt thin tarball from the installed python files
|
||||||
'''
|
'''
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
# Import python libs
|
# Import python libs
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import tarfile
|
import tarfile
|
||||||
@ -15,6 +16,9 @@ import tempfile
|
|||||||
import jinja2
|
import jinja2
|
||||||
import yaml
|
import yaml
|
||||||
import requests
|
import requests
|
||||||
|
import salt.ext.six as six
|
||||||
|
|
||||||
|
# pylint: disable=import-error,no-name-in-module
|
||||||
try:
|
try:
|
||||||
import msgpack
|
import msgpack
|
||||||
HAS_MSGPACK = True
|
HAS_MSGPACK = True
|
||||||
@ -26,27 +30,17 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
# Import the bundled package
|
# Import the bundled package
|
||||||
try:
|
try:
|
||||||
from requests.packages import urllib3 # pylint: disable=E0611
|
from requests.packages import urllib3
|
||||||
HAS_URLLIB3 = True
|
HAS_URLLIB3 = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
HAS_URLLIB3 = False
|
HAS_URLLIB3 = False
|
||||||
try:
|
|
||||||
import salt.ext.six as six
|
|
||||||
HAS_SIX = True
|
|
||||||
except ImportError:
|
|
||||||
# Import the bundled package
|
|
||||||
try:
|
|
||||||
from requests.packages.urllib3.packages import six # pylint: disable=E0611
|
|
||||||
HAS_SIX = True
|
|
||||||
except ImportError:
|
|
||||||
HAS_SIX = False
|
|
||||||
try:
|
try:
|
||||||
import chardet
|
import chardet
|
||||||
HAS_CHARDET = True
|
HAS_CHARDET = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
# Import the bundled package
|
# Import the bundled package
|
||||||
try:
|
try:
|
||||||
from requests.packages.urllib3.packages import chardet # pylint: disable=E0611
|
from requests.packages.urllib3.packages import chardet
|
||||||
HAS_CHARDET = True
|
HAS_CHARDET = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
HAS_CHARDET = False
|
HAS_CHARDET = False
|
||||||
@ -56,6 +50,7 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
# Older jinja does not need markupsafe
|
# Older jinja does not need markupsafe
|
||||||
HAS_MARKUPSAFE = False
|
HAS_MARKUPSAFE = False
|
||||||
|
# pylint: enable=import-error,no-name-in-module
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
import salt
|
import salt
|
||||||
@ -108,7 +103,6 @@ def gen_thin(cachedir, extra_mods='', overwrite=False, so_mods=''):
|
|||||||
if HAS_URLLIB3:
|
if HAS_URLLIB3:
|
||||||
tops.append(os.path.dirname(urllib3.__file__))
|
tops.append(os.path.dirname(urllib3.__file__))
|
||||||
|
|
||||||
if HAS_SIX:
|
|
||||||
tops.append(six.__file__.replace('.pyc', '.py'))
|
tops.append(six.__file__.replace('.pyc', '.py'))
|
||||||
|
|
||||||
if HAS_CHARDET:
|
if HAS_CHARDET:
|
||||||
|
@ -6,27 +6,15 @@ import subprocess
|
|||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
# Import Salt Testing libs
|
# Import Salt Testing libs
|
||||||
from salttesting.helpers import ensure_in_syspath
|
from salttesting.helpers import ensure_in_syspath, skip_if_binaries_missing
|
||||||
ensure_in_syspath('../../')
|
ensure_in_syspath('../..')
|
||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
import integration
|
import integration
|
||||||
|
|
||||||
|
|
||||||
|
@skip_if_binaries_missing('git')
|
||||||
class GitModuleTest(integration.ModuleCase):
|
class GitModuleTest(integration.ModuleCase):
|
||||||
'''
|
|
||||||
Integration tests for the git module
|
|
||||||
'''
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def setUpClass(cls):
|
|
||||||
'''
|
|
||||||
Check if git is installed. If it isn't, skip everything in this class.
|
|
||||||
'''
|
|
||||||
from salt.utils import which
|
|
||||||
git = which('git')
|
|
||||||
if not git:
|
|
||||||
cls.skipTest('The git binary is not available')
|
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.repos = tempfile.mkdtemp(dir=integration.TMP)
|
self.repos = tempfile.mkdtemp(dir=integration.TMP)
|
||||||
|
@ -1,24 +1,23 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
|
||||||
# Import python libs
|
|
||||||
import urllib
|
|
||||||
|
|
||||||
# Import salttesting libs
|
# Import salttesting libs
|
||||||
from salttesting import mock
|
from salttesting import mock
|
||||||
from salttesting.unit import skipIf
|
from salttesting.unit import skipIf
|
||||||
from salttesting.helpers import ensure_in_syspath
|
from salttesting.helpers import ensure_in_syspath
|
||||||
ensure_in_syspath('../../../')
|
ensure_in_syspath('../../../')
|
||||||
|
|
||||||
|
|
||||||
from salt.exceptions import EauthAuthenticationError
|
from salt.exceptions import EauthAuthenticationError
|
||||||
from tests.utils import BaseRestCherryPyTest
|
from tests.utils import BaseRestCherryPyTest
|
||||||
|
|
||||||
# Import 3rd-party libs
|
# Import 3rd-party libs
|
||||||
|
# pylint: disable=import-error,unused-import
|
||||||
|
from salt.ext.six.moves.urllib.parse import urlencode # pylint: disable=no-name-in-module
|
||||||
try:
|
try:
|
||||||
import cherrypy # pylint: disable=W0611
|
import cherrypy
|
||||||
HAS_CHERRYPY = True
|
HAS_CHERRYPY = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
HAS_CHERRYPY = False
|
HAS_CHERRYPY = False
|
||||||
|
# pylint: enable=import-error,unused-import
|
||||||
|
|
||||||
|
|
||||||
@skipIf(HAS_CHERRYPY is False, 'CherryPy not installed')
|
@skipIf(HAS_CHERRYPY is False, 'CherryPy not installed')
|
||||||
@ -78,7 +77,7 @@ class TestLogin(BaseRestCherryPyTest):
|
|||||||
'eauth': 'auto',
|
'eauth': 'auto',
|
||||||
}
|
}
|
||||||
|
|
||||||
body = urllib.urlencode(self.auth_creds)
|
body = urlencode(self.auth_creds)
|
||||||
request, response = self.request('/login', method='POST', body=body,
|
request, response = self.request('/login', method='POST', body=body,
|
||||||
headers={
|
headers={
|
||||||
'content-type': 'application/x-www-form-urlencoded'
|
'content-type': 'application/x-www-form-urlencoded'
|
||||||
@ -93,7 +92,7 @@ class TestLogin(BaseRestCherryPyTest):
|
|||||||
# Mock mk_token for a negative return
|
# Mock mk_token for a negative return
|
||||||
self.Resolver.return_value.mk_token.return_value = {}
|
self.Resolver.return_value.mk_token.return_value = {}
|
||||||
|
|
||||||
body = urllib.urlencode({'totally': 'invalid_creds'})
|
body = urlencode({'totally': 'invalid_creds'})
|
||||||
request, response = self.request('/login', method='POST', body=body,
|
request, response = self.request('/login', method='POST', body=body,
|
||||||
headers={
|
headers={
|
||||||
'content-type': 'application/x-www-form-urlencoded'
|
'content-type': 'application/x-www-form-urlencoded'
|
||||||
@ -104,7 +103,7 @@ class TestLogin(BaseRestCherryPyTest):
|
|||||||
ret = self.test_good_login()
|
ret = self.test_good_login()
|
||||||
token = ret.headers['X-Auth-Token']
|
token = ret.headers['X-Auth-Token']
|
||||||
|
|
||||||
body = urllib.urlencode({})
|
body = urlencode({})
|
||||||
request, response = self.request('/logout', method='POST', body=body,
|
request, response = self.request('/logout', method='POST', body=body,
|
||||||
headers={
|
headers={
|
||||||
'content-type': 'application/x-www-form-urlencoded',
|
'content-type': 'application/x-www-form-urlencoded',
|
||||||
@ -130,7 +129,7 @@ class TestRun(BaseRestCherryPyTest):
|
|||||||
Test the run URL with good auth credentials
|
Test the run URL with good auth credentials
|
||||||
'''
|
'''
|
||||||
cmd = dict(self.low, **dict(self.auth_creds))
|
cmd = dict(self.low, **dict(self.auth_creds))
|
||||||
body = urllib.urlencode(cmd)
|
body = urlencode(cmd)
|
||||||
|
|
||||||
# Mock the interaction with Salt so we can focus on the API.
|
# Mock the interaction with Salt so we can focus on the API.
|
||||||
with mock.patch.object(self.app.salt.netapi.NetapiClient, 'run',
|
with mock.patch.object(self.app.salt.netapi.NetapiClient, 'run',
|
||||||
@ -147,7 +146,7 @@ class TestRun(BaseRestCherryPyTest):
|
|||||||
Test the run URL with bad auth credentials
|
Test the run URL with bad auth credentials
|
||||||
'''
|
'''
|
||||||
cmd = dict(self.low, **{'totally': 'invalid_creds'})
|
cmd = dict(self.low, **{'totally': 'invalid_creds'})
|
||||||
body = urllib.urlencode(cmd)
|
body = urlencode(cmd)
|
||||||
|
|
||||||
# Mock the interaction with Salt so we can focus on the API.
|
# Mock the interaction with Salt so we can focus on the API.
|
||||||
with mock.patch.object(self.app.salt.netapi.NetapiClient, 'run',
|
with mock.patch.object(self.app.salt.netapi.NetapiClient, 'run',
|
||||||
@ -183,7 +182,7 @@ class TestWebhookDisableAuth(BaseRestCherryPyTest):
|
|||||||
# Mock fire_event() since we're only testing auth here.
|
# Mock fire_event() since we're only testing auth here.
|
||||||
self.get_event.return_value.fire_event.return_value = True
|
self.get_event.return_value.fire_event.return_value = True
|
||||||
|
|
||||||
body = urllib.urlencode({'foo': 'Foo!'})
|
body = urlencode({'foo': 'Foo!'})
|
||||||
request, response = self.request('/hook', method='POST', body=body,
|
request, response = self.request('/hook', method='POST', body=body,
|
||||||
headers={
|
headers={
|
||||||
'content-type': 'application/x-www-form-urlencoded'
|
'content-type': 'application/x-www-form-urlencoded'
|
||||||
|
@ -1,18 +1,23 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
import json
|
|
||||||
|
|
||||||
from salt.netapi.rest_tornado import saltnado
|
|
||||||
|
|
||||||
import tornado.testing
|
|
||||||
import tornado.concurrent
|
|
||||||
import tornado.web
|
|
||||||
import tornado.ioloop
|
|
||||||
|
|
||||||
from unit.netapi.rest_tornado.test_handlers import SaltnadoTestCase
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
from salttesting.helpers import ensure_in_syspath
|
||||||
|
ensure_in_syspath('../../../')
|
||||||
|
|
||||||
|
try:
|
||||||
|
from salt.netapi.rest_tornado import saltnado
|
||||||
|
|
||||||
|
import tornado.testing
|
||||||
|
import tornado.concurrent
|
||||||
|
import tornado.web
|
||||||
|
import tornado.ioloop
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
from unit.netapi.rest_tornado.test_handlers import SaltnadoTestCase
|
||||||
|
|
||||||
|
|
||||||
class TestSaltAPIHandler(SaltnadoTestCase):
|
class TestSaltAPIHandler(SaltnadoTestCase):
|
||||||
def get_app(self):
|
def get_app(self):
|
||||||
@ -492,3 +497,13 @@ class TestWebhookSaltAPIHandler(SaltnadoTestCase):
|
|||||||
)
|
)
|
||||||
response_obj = json.loads(response.body)
|
response_obj = json.loads(response.body)
|
||||||
assert response_obj['success'] is True
|
assert response_obj['success'] is True
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
from integration import run_tests # pylint: disable=import-error
|
||||||
|
run_tests(TestEventsSaltAPIHandler,
|
||||||
|
TestJobsSaltAPIHandler,
|
||||||
|
TestMinionSaltAPIHandler,
|
||||||
|
TestRunSaltAPIHandler,
|
||||||
|
TestSaltAPIHandler,
|
||||||
|
TestWebhookSaltAPIHandler, needs_daemon=True)
|
||||||
|
@ -1,7 +1,12 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
import salt.loader
|
|
||||||
|
|
||||||
from tests.integration import ModuleCase
|
# Import salt testing libs
|
||||||
|
from salttesting.case import ModuleCase
|
||||||
|
from salttesting.helpers import ensure_in_syspath
|
||||||
|
ensure_in_syspath('../../')
|
||||||
|
|
||||||
|
# Import Salt libs
|
||||||
|
import salt.loader
|
||||||
|
|
||||||
|
|
||||||
class HashutilTestCase(ModuleCase):
|
class HashutilTestCase(ModuleCase):
|
||||||
|
@ -1,15 +1,14 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import urllib2
|
|
||||||
# Import Salt Testing libs
|
# Import Salt Testing libs
|
||||||
from salttesting import skipIf, TestCase
|
from salttesting import skipIf, TestCase
|
||||||
from salttesting.helpers import ensure_in_syspath
|
from salttesting.helpers import ensure_in_syspath
|
||||||
from salttesting.mock import NO_MOCK, NO_MOCK_REASON, Mock, patch
|
from salttesting.mock import NO_MOCK, NO_MOCK_REASON, Mock, patch
|
||||||
|
|
||||||
from salt.modules import nginx
|
|
||||||
|
|
||||||
ensure_in_syspath('../../')
|
ensure_in_syspath('../../')
|
||||||
|
|
||||||
|
# Import Salt Module
|
||||||
|
from salt.modules import nginx
|
||||||
|
|
||||||
MOCK_STATUS_OUTPUT = """Active connections: 7
|
MOCK_STATUS_OUTPUT = """Active connections: 7
|
||||||
server accepts handled requests
|
server accepts handled requests
|
||||||
46756 46756 89318
|
46756 46756 89318
|
||||||
@ -29,10 +28,10 @@ class MockUrllibStatus(object):
|
|||||||
@patch('salt.utils.which', Mock(return_value='/usr/bin/nginx'))
|
@patch('salt.utils.which', Mock(return_value='/usr/bin/nginx'))
|
||||||
class NginxTestCase(TestCase):
|
class NginxTestCase(TestCase):
|
||||||
|
|
||||||
@patch('urllib2.urlopen', Mock(return_value=MockUrllibStatus()))
|
@patch('salt.modules.nginx._urlopen', Mock(return_value=MockUrllibStatus()))
|
||||||
def test_nginx_status(self):
|
def test_nginx_status(self):
|
||||||
result = nginx.status()
|
result = nginx.status()
|
||||||
urllib2.urlopen.assert_called_once_with('http://127.0.0.1/status')
|
nginx._urlopen.assert_called_once_with('http://127.0.0.1/status')
|
||||||
self.assertEqual(result, {
|
self.assertEqual(result, {
|
||||||
'active connections': 7,
|
'active connections': 7,
|
||||||
'accepted': 46756,
|
'accepted': 46756,
|
||||||
@ -43,11 +42,11 @@ class NginxTestCase(TestCase):
|
|||||||
'waiting': 0,
|
'waiting': 0,
|
||||||
})
|
})
|
||||||
|
|
||||||
@patch('urllib2.urlopen', Mock(return_value=MockUrllibStatus()))
|
@patch('salt.modules.nginx._urlopen', Mock(return_value=MockUrllibStatus()))
|
||||||
def test_nginx_status_with_arg(self):
|
def test_nginx_status_with_arg(self):
|
||||||
other_path = 'http://localhost/path'
|
other_path = 'http://localhost/path'
|
||||||
result = nginx.status(other_path)
|
result = nginx.status(other_path)
|
||||||
urllib2.urlopen.assert_called_once_with(other_path)
|
nginx._urlopen.assert_called_once_with(other_path)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -3,10 +3,15 @@
|
|||||||
# Import python libs
|
# Import python libs
|
||||||
import os
|
import os
|
||||||
import tempfile
|
import tempfile
|
||||||
import urllib2
|
|
||||||
import logging
|
import logging
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
# pylint: disable=import-error,no-name-in-module,redefined-builtin
|
||||||
|
from salt.ext.six.moves.urllib.error import URLError
|
||||||
|
from salt.ext.six.moves.urllib.request import urlopen
|
||||||
|
# pylint: enable=import-error,no-name-in-module,redefined-builtin
|
||||||
|
|
||||||
# Import Salt Testing libs
|
# Import Salt Testing libs
|
||||||
from salttesting import TestCase, skipIf
|
from salttesting import TestCase, skipIf
|
||||||
from salttesting.helpers import (
|
from salttesting.helpers import (
|
||||||
@ -14,10 +19,10 @@ from salttesting.helpers import (
|
|||||||
requires_network,
|
requires_network,
|
||||||
skip_if_binaries_missing
|
skip_if_binaries_missing
|
||||||
)
|
)
|
||||||
ensure_in_syspath('../../')
|
ensure_in_syspath('../..')
|
||||||
|
|
||||||
# Import Salt libs
|
# Import Salt libs
|
||||||
import integration
|
import integration # pylint: disable=import-error
|
||||||
import salt.utils
|
import salt.utils
|
||||||
from salt.modules import zcbuildout as buildout
|
from salt.modules import zcbuildout as buildout
|
||||||
from salt.modules import cmdmod as cmd
|
from salt.modules import cmdmod as cmd
|
||||||
@ -51,9 +56,7 @@ log = logging.getLogger(__name__)
|
|||||||
|
|
||||||
def download_to(url, dest):
|
def download_to(url, dest):
|
||||||
with salt.utils.fopen(dest, 'w') as fic:
|
with salt.utils.fopen(dest, 'w') as fic:
|
||||||
fic.write(
|
fic.write(urlopen(url, timeout=10).read())
|
||||||
urllib2.urlopen(url, timeout=10).read()
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Base(TestCase):
|
class Base(TestCase):
|
||||||
@ -70,7 +73,7 @@ class Base(TestCase):
|
|||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
download_to(url, dest)
|
download_to(url, dest)
|
||||||
except urllib2.URLError:
|
except URLError:
|
||||||
log.debug('Failed to download {0}'.format(url))
|
log.debug('Failed to download {0}'.format(url))
|
||||||
# creating a new setuptools install
|
# creating a new setuptools install
|
||||||
cls.ppy_st = os.path.join(cls.rdir, 'psetuptools')
|
cls.ppy_st = os.path.join(cls.rdir, 'psetuptools')
|
||||||
@ -489,7 +492,7 @@ class BuildoutAPITestCase(TestCase):
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
from integration import run_tests
|
from integration import run_tests # pylint: disable=import-error
|
||||||
run_tests(
|
run_tests(
|
||||||
BuildoutAPITestCase,
|
BuildoutAPITestCase,
|
||||||
BuildoutTestCase,
|
BuildoutTestCase,
|
||||||
|
@ -1,8 +1,11 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
import json
|
|
||||||
import urllib
|
|
||||||
|
|
||||||
|
# Import Python libs
|
||||||
|
import json
|
||||||
|
|
||||||
|
# Import 3rd-party libs
|
||||||
import yaml
|
import yaml
|
||||||
|
from salt.ext.six.moves.urllib.parse import urlencode # pylint: disable=no-name-in-module,import-error
|
||||||
|
|
||||||
from tests.utils import BaseToolsTest
|
from tests.utils import BaseToolsTest
|
||||||
|
|
||||||
@ -43,7 +46,7 @@ class TestInFormats(BaseToolsTest):
|
|||||||
def test_urlencoded_ctype(self):
|
def test_urlencoded_ctype(self):
|
||||||
data = {'valid': 'stuff'}
|
data = {'valid': 'stuff'}
|
||||||
request, response = self.request('/', method='POST',
|
request, response = self.request('/', method='POST',
|
||||||
body=urllib.urlencode(data), headers=(
|
body=urlencode(data), headers=(
|
||||||
('Content-type', 'application/x-www-form-urlencoded'),
|
('Content-type', 'application/x-www-form-urlencoded'),
|
||||||
))
|
))
|
||||||
self.assertEqual(response.status, '200 OK')
|
self.assertEqual(response.status, '200 OK')
|
||||||
|
@ -1,18 +1,44 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
|
||||||
|
# Import Python libs
|
||||||
import json
|
import json
|
||||||
import yaml
|
import yaml
|
||||||
import urllib
|
|
||||||
|
|
||||||
from salt.netapi.rest_tornado import saltnado
|
# Import Salt Testing Libs
|
||||||
|
from salttesting.unit import skipIf
|
||||||
|
from salttesting.helpers import ensure_in_syspath
|
||||||
|
ensure_in_syspath('../../..')
|
||||||
|
import integration # pylint: disable=import-error
|
||||||
|
|
||||||
|
# Import Salt libs
|
||||||
|
try:
|
||||||
|
from salt.netapi.rest_tornado import saltnado
|
||||||
|
HAS_TORNADO = True
|
||||||
|
except ImportError:
|
||||||
|
HAS_TORNADO = False
|
||||||
import salt.auth
|
import salt.auth
|
||||||
import integration
|
|
||||||
|
|
||||||
import tornado.testing
|
|
||||||
import tornado.concurrent
|
|
||||||
|
|
||||||
|
|
||||||
class SaltnadoTestCase(integration.ModuleCase, tornado.testing.AsyncHTTPTestCase):
|
# Import 3rd-party libs
|
||||||
|
# pylint: disable=import-error
|
||||||
|
try:
|
||||||
|
import tornado.testing
|
||||||
|
import tornado.concurrent
|
||||||
|
from tornado.testing import AsyncHTTPTestCase
|
||||||
|
HAS_TORNADO = True
|
||||||
|
except ImportError:
|
||||||
|
HAS_TORNADO = False
|
||||||
|
|
||||||
|
# Let's create a fake AsyncHTTPTestCase so we can properly skip the test case
|
||||||
|
class AsyncHTTPTestCase(object):
|
||||||
|
pass
|
||||||
|
|
||||||
|
from salt.ext.six.moves.urllib.parse import urlencode # pylint: disable=no-name-in-module
|
||||||
|
# pylint: enable=import-error
|
||||||
|
|
||||||
|
|
||||||
|
@skipIf(HAS_TORNADO is False, 'The tornado package needs to be installed')
|
||||||
|
class SaltnadoTestCase(integration.ModuleCase, AsyncHTTPTestCase):
|
||||||
'''
|
'''
|
||||||
Mixin to hold some shared things
|
Mixin to hold some shared things
|
||||||
'''
|
'''
|
||||||
@ -174,7 +200,7 @@ class TestBaseSaltAPIHandler(SaltnadoTestCase):
|
|||||||
)
|
)
|
||||||
response = self.fetch('/',
|
response = self.fetch('/',
|
||||||
method='POST',
|
method='POST',
|
||||||
body=urllib.urlencode(form_lowstate),
|
body=urlencode(form_lowstate),
|
||||||
headers={'Content-Type': self.content_type_map['form']})
|
headers={'Content-Type': self.content_type_map['form']})
|
||||||
returned_lowstate = json.loads(response.body)['lowstate']
|
returned_lowstate = json.loads(response.body)['lowstate']
|
||||||
assert len(returned_lowstate) == 1
|
assert len(returned_lowstate) == 1
|
||||||
@ -209,7 +235,7 @@ class TestSaltAuthHandler(SaltnadoTestCase):
|
|||||||
'''
|
'''
|
||||||
response = self.fetch('/login',
|
response = self.fetch('/login',
|
||||||
method='POST',
|
method='POST',
|
||||||
body=urllib.urlencode(self.auth_creds),
|
body=urlencode(self.auth_creds),
|
||||||
headers={'Content-Type': self.content_type_map['form']})
|
headers={'Content-Type': self.content_type_map['form']})
|
||||||
|
|
||||||
response_obj = json.loads(response.body)['return'][0]
|
response_obj = json.loads(response.body)['return'][0]
|
||||||
@ -229,7 +255,7 @@ class TestSaltAuthHandler(SaltnadoTestCase):
|
|||||||
bad_creds.append((key, val))
|
bad_creds.append((key, val))
|
||||||
response = self.fetch('/login',
|
response = self.fetch('/login',
|
||||||
method='POST',
|
method='POST',
|
||||||
body=urllib.urlencode(bad_creds),
|
body=urlencode(bad_creds),
|
||||||
headers={'Content-Type': self.content_type_map['form']})
|
headers={'Content-Type': self.content_type_map['form']})
|
||||||
|
|
||||||
assert response.code == 400
|
assert response.code == 400
|
||||||
@ -245,7 +271,12 @@ class TestSaltAuthHandler(SaltnadoTestCase):
|
|||||||
bad_creds.append((key, val))
|
bad_creds.append((key, val))
|
||||||
response = self.fetch('/login',
|
response = self.fetch('/login',
|
||||||
method='POST',
|
method='POST',
|
||||||
body=urllib.urlencode(bad_creds),
|
body=urlencode(bad_creds),
|
||||||
headers={'Content-Type': self.content_type_map['form']})
|
headers={'Content-Type': self.content_type_map['form']})
|
||||||
|
|
||||||
assert response.code == 401
|
assert response.code == 401
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
from integration import run_tests # pylint: disable=import-error
|
||||||
|
run_tests(TestBaseSaltAPIHandler, TestSaltAuthHandler, needs_daemon=False)
|
||||||
|
@ -1,15 +1,39 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from salt.netapi.rest_tornado import saltnado
|
# Import Salt Testing Libs
|
||||||
|
from salttesting.unit import skipIf
|
||||||
|
from salttesting.case import TestCase
|
||||||
|
from salttesting.helpers import ensure_in_syspath
|
||||||
|
ensure_in_syspath('../../..')
|
||||||
|
|
||||||
import tornado.testing
|
# Import 3rd-party libs
|
||||||
import tornado.concurrent
|
# pylint: disable=import-error
|
||||||
from salttesting import TestCase
|
try:
|
||||||
|
import tornado.testing
|
||||||
|
import tornado.concurrent
|
||||||
|
from tornado.testing import AsyncTestCase
|
||||||
|
HAS_TORNADO = True
|
||||||
|
except ImportError:
|
||||||
|
HAS_TORNADO = False
|
||||||
|
|
||||||
from unit.utils.event_test import eventpublisher_process, event, SOCK_DIR
|
# Let's create a fake AsyncHTTPTestCase so we can properly skip the test case
|
||||||
|
class AsyncTestCase(object):
|
||||||
|
pass
|
||||||
|
# pylint: enable=import-error
|
||||||
|
|
||||||
|
try:
|
||||||
|
from salt.netapi.rest_tornado import saltnado
|
||||||
|
HAS_TORNADO = True
|
||||||
|
except ImportError:
|
||||||
|
HAS_TORNADO = False
|
||||||
|
|
||||||
|
# Import utility lib from tests
|
||||||
|
from unit.utils.event_test import eventpublisher_process, event, SOCK_DIR # pylint: disable=import-error
|
||||||
|
|
||||||
|
|
||||||
|
@skipIf(HAS_TORNADO is False, 'The tornado package needs to be installed')
|
||||||
class TestUtils(TestCase):
|
class TestUtils(TestCase):
|
||||||
def test_batching(self):
|
def test_batching(self):
|
||||||
assert 1 == saltnado.get_batch_size('1', 10)
|
assert 1 == saltnado.get_batch_size('1', 10)
|
||||||
@ -20,7 +44,8 @@ class TestUtils(TestCase):
|
|||||||
assert 11 == saltnado.get_batch_size('110%', 10)
|
assert 11 == saltnado.get_batch_size('110%', 10)
|
||||||
|
|
||||||
|
|
||||||
class TestSaltnadoUtils(tornado.testing.AsyncTestCase):
|
@skipIf(HAS_TORNADO is False, 'The tornado package needs to be installed')
|
||||||
|
class TestSaltnadoUtils(AsyncTestCase):
|
||||||
def test_any_future(self):
|
def test_any_future(self):
|
||||||
'''
|
'''
|
||||||
Test that the Any Future does what we think it does
|
Test that the Any Future does what we think it does
|
||||||
@ -58,7 +83,8 @@ class TestSaltnadoUtils(tornado.testing.AsyncTestCase):
|
|||||||
assert futures[1].done() is False
|
assert futures[1].done() is False
|
||||||
|
|
||||||
|
|
||||||
class TestEventListener(tornado.testing.AsyncTestCase):
|
@skipIf(HAS_TORNADO is False, 'The tornado package needs to be installed')
|
||||||
|
class TestEventListener(AsyncTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
if not os.path.exists(SOCK_DIR):
|
if not os.path.exists(SOCK_DIR):
|
||||||
os.makedirs(SOCK_DIR)
|
os.makedirs(SOCK_DIR)
|
||||||
@ -85,5 +111,5 @@ class TestEventListener(tornado.testing.AsyncTestCase):
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
from integration import run_tests
|
from integration import run_tests # pylint: disable=import-error
|
||||||
run_tests(TestUtils, needs_daemon=False)
|
run_tests(TestUtils, needs_daemon=False)
|
||||||
|
@ -8,8 +8,8 @@ import datetime
|
|||||||
import pprint
|
import pprint
|
||||||
|
|
||||||
# Import Salt Testing libs
|
# Import Salt Testing libs
|
||||||
from tests.integration import ModuleCase
|
|
||||||
from salttesting import skipIf, TestCase
|
from salttesting import skipIf, TestCase
|
||||||
|
from salttesting.case import ModuleCase
|
||||||
from salttesting.helpers import ensure_in_syspath
|
from salttesting.helpers import ensure_in_syspath
|
||||||
ensure_in_syspath('../../')
|
ensure_in_syspath('../../')
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user