mirror of
https://github.com/valitydev/salt.git
synced 2024-11-07 08:58:59 +00:00
Merge pull request #39930 from s0undt3ch/features/py3
Moar Py3 and a fix for #38121
This commit is contained in:
commit
bbf4eacbba
@ -25,7 +25,7 @@ load-plugins=saltpylint.pep8,
|
||||
saltpylint.py3modernize,
|
||||
saltpylint.smartup,
|
||||
saltpylint.minpyver,
|
||||
saltpylint.salttesting,
|
||||
saltpylint.blacklist,
|
||||
saltpylint.thirdparty
|
||||
|
||||
# Use multiple processes to speed up Pylint.
|
||||
@ -43,7 +43,7 @@ extension-pkg-whitelist=
|
||||
|
||||
# Fileperms Lint Plugin Settings
|
||||
fileperms-default=0644
|
||||
fileperms-ignore-paths=tests/runtests.py,tests/jenkins*.py,tests/saltsh.py,tests/buildpackage.py
|
||||
fileperms-ignore-paths=setup.py,tests/runtests.py,tests/jenkins*.py,tests/saltsh.py,tests/buildpackage.py
|
||||
|
||||
# Minimum Python Version To Enforce
|
||||
minimum-python-version = 2.7
|
||||
|
@ -22,7 +22,7 @@ load-plugins=saltpylint.pep8,
|
||||
saltpylint.py3modernize,
|
||||
saltpylint.smartup,
|
||||
saltpylint.minpyver,
|
||||
saltpylint.salttesting,
|
||||
saltpylint.blacklist,
|
||||
saltpylint.thirdparty
|
||||
|
||||
# Use multiple processes to speed up Pylint.
|
||||
@ -40,7 +40,7 @@ extension-pkg-whitelist=
|
||||
|
||||
# Fileperms Lint Plugin Settings
|
||||
fileperms-default=0644
|
||||
fileperms-ignore-paths=tests/runtests.py,tests/jenkins*.py,tests/saltsh.py,tests/buildpackage.py
|
||||
fileperms-ignore-paths=setup.py,tests/runtests.py,tests/jenkins*.py,tests/saltsh.py,tests/buildpackage.py
|
||||
|
||||
# Minimum Python Version To Enforce
|
||||
minimum-python-version = 2.7
|
||||
|
@ -8,4 +8,5 @@ moto>=0.3.6
|
||||
SaltPyLint>=v2017.3.6
|
||||
GitPython>=0.3
|
||||
pytest
|
||||
git+https://github.com/eisensheng/pytest-catchlog.git@develop#egg=Pytest-catchlog
|
||||
git+https://github.com/saltstack/pytest-salt.git@master#egg=pytest-salt
|
||||
|
@ -1,4 +1,3 @@
|
||||
pytest
|
||||
git+https://github.com/eisensheng/pytest-catchlog.git@develop#egg=Pytest-catchlog
|
||||
pytest-helpers-namespace
|
||||
pytest-tempdir
|
||||
|
@ -60,18 +60,19 @@ def beacon(config):
|
||||
if _name not in procs:
|
||||
procs.append(_name)
|
||||
|
||||
for process in config:
|
||||
ret_dict = {}
|
||||
if config[process] == 'running':
|
||||
if process not in procs:
|
||||
ret_dict[process] = 'Stopped'
|
||||
ret.append(ret_dict)
|
||||
elif config[process] == 'stopped':
|
||||
if process in procs:
|
||||
ret_dict[process] = 'Running'
|
||||
ret.append(ret_dict)
|
||||
else:
|
||||
if process not in procs:
|
||||
ret_dict[process] = False
|
||||
ret.append(ret_dict)
|
||||
for entry in config:
|
||||
for process in entry:
|
||||
ret_dict = {}
|
||||
if entry[process] == 'running':
|
||||
if process not in procs:
|
||||
ret_dict[process] = 'Stopped'
|
||||
ret.append(ret_dict)
|
||||
elif entry[process] == 'stopped':
|
||||
if process in procs:
|
||||
ret_dict[process] = 'Running'
|
||||
ret.append(ret_dict)
|
||||
else:
|
||||
if process not in procs:
|
||||
ret_dict[process] = False
|
||||
ret.append(ret_dict)
|
||||
return ret
|
||||
|
@ -126,33 +126,42 @@ def beacon(config):
|
||||
}]
|
||||
|
||||
if len(config) < 1:
|
||||
config = {
|
||||
config = [{
|
||||
'loadavg': ['all'],
|
||||
'cpustats': ['all'],
|
||||
'meminfo': ['all'],
|
||||
'vmstats': ['all'],
|
||||
'time': ['all'],
|
||||
}
|
||||
}]
|
||||
|
||||
for func in config:
|
||||
try:
|
||||
data = __salt__['status.{0}'.format(func)]()
|
||||
except salt.exceptions.CommandExecutionError as exc:
|
||||
log.debug('Status beacon attempted to process function {0} \
|
||||
but encountered error: {1}'.format(func, exc))
|
||||
continue
|
||||
ret[func] = {}
|
||||
item = config[func]
|
||||
if item == 'all':
|
||||
ret[func] = data
|
||||
else:
|
||||
if not isinstance(config, list):
|
||||
# To support the old dictionary config format
|
||||
config = [config]
|
||||
|
||||
for entry in config:
|
||||
for func in entry:
|
||||
ret[func] = {}
|
||||
try:
|
||||
try:
|
||||
ret[func][item] = data[item]
|
||||
except TypeError:
|
||||
ret[func][item] = data[int(item)]
|
||||
except KeyError as exc:
|
||||
ret[func] = 'Status beacon is incorrectly configured: {0}'.format(exc)
|
||||
data = __salt__['status.{0}'.format(func)]()
|
||||
except salt.exceptions.CommandExecutionError as exc:
|
||||
log.debug('Status beacon attempted to process function {0} '
|
||||
'but encountered error: {1}'.format(func, exc))
|
||||
continue
|
||||
if not isinstance(entry[func], list):
|
||||
func_items = [entry[func]]
|
||||
else:
|
||||
func_items = entry[func]
|
||||
for item in func_items:
|
||||
if item == 'all':
|
||||
ret[func] = data
|
||||
else:
|
||||
try:
|
||||
try:
|
||||
ret[func][item] = data[item]
|
||||
except TypeError:
|
||||
ret[func][item] = data[int(item)]
|
||||
except KeyError as exc:
|
||||
ret[func] = 'Status beacon is incorrectly configured: {0}'.format(exc)
|
||||
|
||||
return [{
|
||||
'tag': ctime,
|
||||
|
@ -2155,7 +2155,8 @@ def namespaced_function(function, global_dict, defaults=None, preserve_context=F
|
||||
function.__code__,
|
||||
global_dict,
|
||||
name=function.__name__,
|
||||
argdefs=defaults
|
||||
argdefs=defaults,
|
||||
closure=function.__closure__
|
||||
)
|
||||
new_namespaced_function.__dict__.update(function.__dict__)
|
||||
return new_namespaced_function
|
||||
|
30
setup.py
30
setup.py
@ -4,12 +4,12 @@
|
||||
The setup script for salt
|
||||
'''
|
||||
|
||||
from __future__ import absolute_import
|
||||
# pylint: disable=file-perms
|
||||
# pylint: disable=file-perms,ungrouped-imports,wrong-import-order,wrong-import-position,repr-flag-used-in-string
|
||||
# pylint: disable=3rd-party-local-module-not-gated
|
||||
# pylint: disable=C0111,E1101,E1103,F0401,W0611,W0201,W0232,R0201,R0902,R0903
|
||||
|
||||
# For Python 2.5. A no-op on 2.6 and above.
|
||||
from __future__ import print_function, with_statement
|
||||
from __future__ import absolute_import, print_function, with_statement
|
||||
|
||||
import os
|
||||
import sys
|
||||
@ -396,12 +396,13 @@ class InstallPyCryptoWindowsWheel(Command):
|
||||
with indent_log():
|
||||
call_subprocess(call_arguments)
|
||||
|
||||
|
||||
def uri_to_resource(resource_file):
|
||||
### Returns the URI for a resource
|
||||
# ## Returns the URI for a resource
|
||||
# The basic case is that the resource is on saltstack.com
|
||||
# It could be the case that the resource is cached.
|
||||
salt_uri = 'https://repo.saltstack.com/windows/dependencies/' + resource_file
|
||||
if os.getenv('SALTREPO_LOCAL_CACHE') == None:
|
||||
salt_uri = 'https://repo.saltstack.com/windows/dependencies/' + resource_file
|
||||
if os.getenv('SALTREPO_LOCAL_CACHE') is None:
|
||||
# if environment variable not set, return the basic case
|
||||
return salt_uri
|
||||
if not os.path.isdir(os.getenv('SALTREPO_LOCAL_CACHE')):
|
||||
@ -410,14 +411,14 @@ def uri_to_resource(resource_file):
|
||||
cached_resource = os.path.join(os.getenv('SALTREPO_LOCAL_CACHE'), resource_file)
|
||||
cached_resource = cached_resource.replace('/', '\\')
|
||||
if not os.path.isfile(cached_resource):
|
||||
# if file does not exist, return the basic case
|
||||
# if file does not exist, return the basic case
|
||||
return salt_uri
|
||||
if os.path.getsize(cached_resource) == 0:
|
||||
# if file has zero size, return the basic case
|
||||
return salt_uri
|
||||
# if file has zero size, return the basic case
|
||||
return salt_uri
|
||||
return cached_resource
|
||||
|
||||
|
||||
|
||||
class InstallCompiledPyYaml(Command):
|
||||
|
||||
description = 'Install PyYAML on Windows'
|
||||
@ -427,7 +428,6 @@ class InstallCompiledPyYaml(Command):
|
||||
|
||||
def finalize_options(self):
|
||||
pass
|
||||
|
||||
|
||||
def run(self):
|
||||
if getattr(self.distribution, 'salt_installing_pyyaml_windows', None) is None:
|
||||
@ -503,7 +503,7 @@ class DownloadWindowsDlls(Command):
|
||||
while True:
|
||||
chunk = req.read(4096)
|
||||
if len(chunk) == 0:
|
||||
break;
|
||||
break
|
||||
wfh.write(chunk)
|
||||
wfh.flush()
|
||||
else:
|
||||
@ -544,7 +544,7 @@ class Sdist(sdist):
|
||||
os.unlink(PACKAGED_FOR_SALT_SSH_FILE)
|
||||
|
||||
|
||||
class CloudSdist(Sdist):
|
||||
class CloudSdist(Sdist): # pylint: disable=too-many-ancestors
|
||||
user_options = Sdist.user_options + [
|
||||
('download-bootstrap-script', None,
|
||||
'Download the latest stable bootstrap-salt.sh script. This '
|
||||
@ -564,7 +564,7 @@ class CloudSdist(Sdist):
|
||||
def finalize_options(self):
|
||||
Sdist.finalize_options(self)
|
||||
if 'SKIP_BOOTSTRAP_DOWNLOAD' in os.environ:
|
||||
log('Please stop using \'SKIP_BOOTSTRAP_DOWNLOAD\' and use '
|
||||
log('Please stop using \'SKIP_BOOTSTRAP_DOWNLOAD\' and use ' # pylint: disable=not-callable
|
||||
'\'DOWNLOAD_BOOTSTRAP_SCRIPT\' instead')
|
||||
|
||||
if 'DOWNLOAD_BOOTSTRAP_SCRIPT' in os.environ:
|
||||
@ -978,7 +978,7 @@ class SaltDistribution(distutils.dist.Distribution):
|
||||
'virt/*.jinja',
|
||||
'git/*',
|
||||
'lxc/*',
|
||||
]}
|
||||
]}
|
||||
if not IS_WINDOWS_PLATFORM:
|
||||
package_data['salt.cloud'] = ['deploy/*.sh']
|
||||
|
||||
|
@ -26,6 +26,7 @@ import salt.utils.event
|
||||
from tornado import gen
|
||||
from tornado import ioloop
|
||||
from tornado import netutil
|
||||
from tornado import iostream
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@ -114,7 +115,10 @@ class PyTestEngine(object):
|
||||
timeout = 60
|
||||
while True:
|
||||
timeout -= 1
|
||||
event_bus.fire_event(load, master_start_event_tag, timeout=500)
|
||||
if timeout <= 0:
|
||||
try:
|
||||
event_bus.fire_event(load, master_start_event_tag, timeout=500)
|
||||
if timeout <= 0:
|
||||
break
|
||||
yield gen.sleep(1)
|
||||
except iostream.StreamClosedError:
|
||||
break
|
||||
yield gen.sleep(1)
|
||||
|
@ -9,38 +9,33 @@ import os
|
||||
|
||||
# Salt Libs
|
||||
from salt.exceptions import CommandExecutionError
|
||||
import tests.integration as integration
|
||||
import salt.utils
|
||||
|
||||
# Salttesting libs
|
||||
import tests.integration as integration
|
||||
from tests.support.unit import skipIf
|
||||
|
||||
|
||||
BEACON_CONF_DIR = os.path.join(integration.TMP, 'minion.d')
|
||||
if not os.path.exists(BEACON_CONF_DIR):
|
||||
os.makedirs(BEACON_CONF_DIR)
|
||||
|
||||
IS_ADMIN = False
|
||||
if salt.utils.is_windows():
|
||||
import salt.utils.win_functions
|
||||
current_user = salt.utils.win_functions.get_current_user()
|
||||
if current_user == 'SYSTEM':
|
||||
IS_ADMIN = True
|
||||
else:
|
||||
IS_ADMIN = salt.utils.win_functions.is_admin(current_user)
|
||||
else:
|
||||
IS_ADMIN = os.geteuid() == 0
|
||||
|
||||
|
||||
class BeaconsAddDeleteTest(integration.ModuleCase):
|
||||
'''
|
||||
Tests the add and delete functions
|
||||
'''
|
||||
def setUp(self):
|
||||
self.minion_conf_d_dir = os.path.join(
|
||||
self.minion_opts['config_dir'],
|
||||
os.path.dirname(self.minion_opts['default_include']))
|
||||
if not os.path.isdir(self.minion_conf_d_dir):
|
||||
os.makedirs(self.minion_conf_d_dir)
|
||||
self.beacons_config_file_path = os.path.join(self.minion_conf_d_dir, 'beacons.conf')
|
||||
|
||||
def tearDown(self):
|
||||
if os.path.isfile(self.beacons_config_file_path):
|
||||
os.unlink(self.beacons_config_file_path)
|
||||
|
||||
def test_add_and_delete(self):
|
||||
'''
|
||||
Test adding and deleting a beacon
|
||||
'''
|
||||
_add = self.run_function('beacons.add', ['ps', {'apache2': 'stopped'}])
|
||||
_add = self.run_function('beacons.add', ['ps', [{'apache2': 'stopped'}]])
|
||||
self.assertTrue(_add['result'])
|
||||
|
||||
# save added beacon
|
||||
@ -59,10 +54,24 @@ class BeaconsTest(integration.ModuleCase):
|
||||
'''
|
||||
Tests the beacons execution module
|
||||
'''
|
||||
beacons_config_file_path = minion_conf_d_dir = None
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
if os.path.isfile(cls.beacons_config_file_path):
|
||||
os.unlink(cls.beacons_config_file_path)
|
||||
|
||||
def setUp(self):
|
||||
if self.minion_conf_d_dir is None:
|
||||
self.minion_conf_d_dir = os.path.join(
|
||||
self.minion_opts['config_dir'],
|
||||
os.path.dirname(self.minion_opts['default_include']))
|
||||
if not os.path.isdir(self.minion_conf_d_dir):
|
||||
os.makedirs(self.minion_conf_d_dir)
|
||||
self.__class__.beacons_config_file_path = os.path.join(self.minion_conf_d_dir, 'beacons.conf')
|
||||
try:
|
||||
# Add beacon to disable
|
||||
self.run_function('beacons.add', ['ps', {'apache2': 'stopped'}])
|
||||
self.run_function('beacons.add', ['ps', [{'apache2': 'stopped'}]])
|
||||
self.run_function('beacons.save')
|
||||
except CommandExecutionError:
|
||||
self.skipTest('Unable to add beacon')
|
||||
@ -93,7 +102,10 @@ class BeaconsTest(integration.ModuleCase):
|
||||
|
||||
# assert beacon ps is disabled
|
||||
_list = self.run_function('beacons.list', return_yaml=False)
|
||||
self.assertFalse(_list['ps']['enabled'])
|
||||
for bdict in _list['ps']:
|
||||
if 'enabled' in bdict:
|
||||
self.assertFalse(bdict['enabled'])
|
||||
break
|
||||
|
||||
def test_enable(self):
|
||||
'''
|
||||
@ -131,6 +143,6 @@ class BeaconsTest(integration.ModuleCase):
|
||||
# list beacons
|
||||
ret = self.run_function('beacons.list', return_yaml=False)
|
||||
if 'enabled' in ret:
|
||||
self.assertEqual(ret, {'ps': {'apache2': 'stopped'}, 'enabled': True})
|
||||
self.assertEqual(ret, {'ps': [{'apache2': 'stopped'}], 'enabled': True})
|
||||
else:
|
||||
self.assertEqual(ret, {'ps': {'apache': 'stopped'}})
|
||||
|
@ -417,35 +417,59 @@ class _FixLoaderModuleMockMixinMroOrder(type):
|
||||
|
||||
class LoaderModuleMockMixin(six.with_metaclass(_FixLoaderModuleMockMixinMroOrder, object)):
|
||||
def setUp(self):
|
||||
loader_module = getattr(self, 'loader_module', None)
|
||||
if loader_module is not None:
|
||||
if NO_MOCK:
|
||||
self.skipTest(NO_MOCK_REASON)
|
||||
loader_modules = getattr(self, 'loader_module', None)
|
||||
if loader_modules is None:
|
||||
return
|
||||
|
||||
loader_module_name = loader_module.__name__
|
||||
loader_module_globals = getattr(self, 'loader_module_globals', None)
|
||||
loader_module_blacklisted_dunders = getattr(self, 'loader_module_blacklisted_dunders', ())
|
||||
if loader_module_globals is None:
|
||||
loader_module_globals = {}
|
||||
elif callable(loader_module_globals):
|
||||
loader_module_globals = loader_module_globals()
|
||||
else:
|
||||
loader_module_globals = copy.deepcopy(loader_module_globals)
|
||||
if NO_MOCK:
|
||||
self.skipTest(NO_MOCK_REASON)
|
||||
|
||||
salt_dunders = (
|
||||
'__opts__', '__salt__', '__runner__', '__context__', '__utils__',
|
||||
'__ext_pillar__', '__thorium__', '__states__', '__serializers__', '__ret__',
|
||||
'__grains__', '__pillar__', '__sdb__',
|
||||
# Proxy is commented out on purpose since some code in salt expects a NameError
|
||||
# and is most of the time not a required dunder
|
||||
# '__proxy__'
|
||||
if not isinstance(loader_modules, (list, tuple)):
|
||||
loader_modules = [loader_modules]
|
||||
|
||||
loader_module_globals = getattr(self, 'loader_module_globals', None)
|
||||
loader_module_blacklisted_dunders = getattr(self, 'loader_module_blacklisted_dunders', ())
|
||||
if loader_module_globals is None:
|
||||
loader_module_globals = {}
|
||||
elif callable(loader_module_globals):
|
||||
loader_module_globals = loader_module_globals()
|
||||
else:
|
||||
loader_module_globals = copy.deepcopy(loader_module_globals)
|
||||
|
||||
minion_funcs = None
|
||||
if '__salt__' in loader_module_globals and loader_module_globals['__salt__'] == 'autoload':
|
||||
if '__opts__' not in loader_module_globals:
|
||||
raise RuntimeError(
|
||||
'You must provide __opts__ in the loader_module_globals to auto load the minion functions'
|
||||
)
|
||||
import salt.loader
|
||||
ctx = {}
|
||||
if '__utils__' not in loader_module_globals:
|
||||
utils = salt.loader.utils(loader_module_globals['__opts__'],
|
||||
context=loader_module_globals.get('__context__') or ctx)
|
||||
loader_module_globals['__utils__'] = utils
|
||||
minion_funcs = salt.loader.minion_mods(
|
||||
loader_module_globals['__opts__'],
|
||||
context=loader_module_globals.get('__context__') or ctx,
|
||||
utils=loader_module_globals.get('__utils__'),
|
||||
)
|
||||
for dunder_name in salt_dunders:
|
||||
if dunder_name not in loader_module_globals:
|
||||
if dunder_name in loader_module_blacklisted_dunders:
|
||||
continue
|
||||
loader_module_globals[dunder_name] = {}
|
||||
loader_module_globals['__salt__'] = minion_funcs
|
||||
|
||||
salt_dunders = (
|
||||
'__opts__', '__salt__', '__runner__', '__context__', '__utils__',
|
||||
'__ext_pillar__', '__thorium__', '__states__', '__serializers__', '__ret__',
|
||||
'__grains__', '__pillar__', '__sdb__',
|
||||
# Proxy is commented out on purpose since some code in salt expects a NameError
|
||||
# and is most of the time not a required dunder
|
||||
# '__proxy__'
|
||||
)
|
||||
for dunder_name in salt_dunders:
|
||||
if dunder_name not in loader_module_globals:
|
||||
if dunder_name in loader_module_blacklisted_dunders:
|
||||
continue
|
||||
loader_module_globals[dunder_name] = {}
|
||||
|
||||
for loader_module in loader_modules:
|
||||
for key in loader_module_globals:
|
||||
if not hasattr(loader_module, key):
|
||||
if key in salt_dunders:
|
||||
@ -454,7 +478,7 @@ class LoaderModuleMockMixin(six.with_metaclass(_FixLoaderModuleMockMixinMroOrder
|
||||
setattr(loader_module, key, None)
|
||||
|
||||
if loader_module_globals:
|
||||
patcher = patch.multiple(loader_module_name, **loader_module_globals)
|
||||
patcher = patch.multiple(loader_module, **loader_module_globals)
|
||||
patcher.start()
|
||||
|
||||
def cleanup(patcher, loader_module_globals):
|
||||
@ -462,4 +486,14 @@ class LoaderModuleMockMixin(six.with_metaclass(_FixLoaderModuleMockMixinMroOrder
|
||||
del loader_module_globals
|
||||
|
||||
self.addCleanup(cleanup, patcher, loader_module_globals)
|
||||
if minion_funcs is not None:
|
||||
# Since we autoloaded the minion_funcs, let's namespace the functions with the globals
|
||||
# used to patch above
|
||||
import salt.utils
|
||||
for func in minion_funcs:
|
||||
minion_funcs[func] = salt.utils.namespaced_function(
|
||||
minion_funcs[func],
|
||||
loader_module_globals,
|
||||
preserve_context=True
|
||||
)
|
||||
super(LoaderModuleMockMixin, self).setUp()
|
||||
|
@ -10,15 +10,16 @@
|
||||
|
||||
XML Unit Tests
|
||||
'''
|
||||
# pylint: disable=wrong-import-order,wrong-import-position
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
import io
|
||||
import sys
|
||||
import logging
|
||||
|
||||
# Import 3rd-party libs
|
||||
import salt.ext.six as six
|
||||
from salt.ext.six.moves import StringIO # pylint: disable=import-error
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@ -35,7 +36,7 @@ try:
|
||||
'''
|
||||
|
||||
def __init__(self, delegate):
|
||||
self._captured = StringIO()
|
||||
self._captured = six.StringIO()
|
||||
self.delegate = delegate
|
||||
|
||||
def write(self, text):
|
||||
@ -44,17 +45,18 @@ try:
|
||||
self._captured.write(text)
|
||||
self.delegate.write(text)
|
||||
|
||||
def fileno(self):
|
||||
return self.delegate.fileno()
|
||||
|
||||
def __getattr__(self, attr):
|
||||
try:
|
||||
return getattr(self._captured, attr)
|
||||
except AttributeError:
|
||||
except (AttributeError, io.UnsupportedOperation):
|
||||
return getattr(self.delegate, attr)
|
||||
|
||||
class _XMLTestResult(xmlrunner.result._XMLTestResult):
|
||||
def startTest(self, test):
|
||||
logging.getLogger(__name__).debug(
|
||||
'>>>>> START >>>>> {0}'.format(test.id())
|
||||
)
|
||||
log.debug('>>>>> START >>>>> {0}'.format(test.id()))
|
||||
# xmlrunner classes are NOT new-style classes
|
||||
xmlrunner.result._XMLTestResult.startTest(self, test)
|
||||
if self.buffer:
|
||||
@ -66,9 +68,7 @@ try:
|
||||
sys.stdout = self._stdout_buffer
|
||||
|
||||
def stopTest(self, test):
|
||||
logging.getLogger(__name__).debug(
|
||||
'<<<<< END <<<<<<< {0}'.format(test.id())
|
||||
)
|
||||
log.debug('<<<<< END <<<<<<< {0}'.format(test.id()))
|
||||
# xmlrunner classes are NOT new-style classes
|
||||
return xmlrunner.result._XMLTestResult.stopTest(self, test)
|
||||
|
||||
|
@ -11,8 +11,6 @@ from salt.beacons import inotify
|
||||
|
||||
# Salt testing libs
|
||||
from tests.support.unit import skipIf, TestCase
|
||||
from tests.support.helpers import destructiveTest
|
||||
from tests.support.mock import NO_MOCK, NO_MOCK_REASON
|
||||
|
||||
# Third-party libs
|
||||
try:
|
||||
@ -23,20 +21,23 @@ except ImportError:
|
||||
|
||||
|
||||
@skipIf(not HAS_PYINOTIFY, 'pyinotify is not available')
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class INotifyBeaconTestCase(TestCase):
|
||||
'''
|
||||
Test case for salt.beacons.inotify
|
||||
'''
|
||||
def setUp(self):
|
||||
inotify.__context__ = {}
|
||||
self.tmpdir = tempfile.mkdtemp()
|
||||
|
||||
def test_empty_config(self, *args, **kwargs):
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.tmpdir, ignore_errors=True)
|
||||
|
||||
def test_empty_config(self):
|
||||
config = {}
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(ret, [])
|
||||
|
||||
def test_file_open(self, *args, **kwargs):
|
||||
def test_file_open(self):
|
||||
path = os.path.realpath(__file__)
|
||||
config = {path: {'mask': ['open']}}
|
||||
ret = inotify.beacon(config)
|
||||
@ -49,119 +50,87 @@ class INotifyBeaconTestCase(TestCase):
|
||||
self.assertEqual(ret[0]['path'], path)
|
||||
self.assertEqual(ret[0]['change'], 'IN_OPEN')
|
||||
|
||||
@destructiveTest
|
||||
def test_dir_no_auto_add(self, *args, **kwargs):
|
||||
tmpdir = None
|
||||
try:
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
config = {tmpdir: {'mask': ['create']}}
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(ret, [])
|
||||
fp = os.path.join(tmpdir, 'tmpfile')
|
||||
with open(fp, 'w') as f:
|
||||
pass
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(len(ret), 1)
|
||||
self.assertEqual(ret[0]['path'], fp)
|
||||
self.assertEqual(ret[0]['change'], 'IN_CREATE')
|
||||
with open(fp, 'r') as f:
|
||||
pass
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(ret, [])
|
||||
def test_dir_no_auto_add(self):
|
||||
config = {self.tmpdir: {'mask': ['create']}}
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(ret, [])
|
||||
fp = os.path.join(self.tmpdir, 'tmpfile')
|
||||
with open(fp, 'w') as f:
|
||||
pass
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(len(ret), 1)
|
||||
self.assertEqual(ret[0]['path'], fp)
|
||||
self.assertEqual(ret[0]['change'], 'IN_CREATE')
|
||||
with open(fp, 'r') as f:
|
||||
pass
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(ret, [])
|
||||
|
||||
finally:
|
||||
if tmpdir:
|
||||
shutil.rmtree(tmpdir)
|
||||
def test_dir_auto_add(self):
|
||||
config = {self.tmpdir: {'mask': ['create', 'open'], 'auto_add': True}}
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(ret, [])
|
||||
fp = os.path.join(self.tmpdir, 'tmpfile')
|
||||
with open(fp, 'w') as f:
|
||||
pass
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(len(ret), 2)
|
||||
self.assertEqual(ret[0]['path'], fp)
|
||||
self.assertEqual(ret[0]['change'], 'IN_CREATE')
|
||||
self.assertEqual(ret[1]['path'], fp)
|
||||
self.assertEqual(ret[1]['change'], 'IN_OPEN')
|
||||
with open(fp, 'r') as f:
|
||||
pass
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(len(ret), 1)
|
||||
self.assertEqual(ret[0]['path'], fp)
|
||||
self.assertEqual(ret[0]['change'], 'IN_OPEN')
|
||||
|
||||
@destructiveTest
|
||||
def test_dir_auto_add(self, *args, **kwargs):
|
||||
tmpdir = None
|
||||
try:
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
config = {tmpdir: {'mask': ['create', 'open'], 'auto_add': True}}
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(ret, [])
|
||||
fp = os.path.join(tmpdir, 'tmpfile')
|
||||
with open(fp, 'w') as f:
|
||||
pass
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(len(ret), 2)
|
||||
self.assertEqual(ret[0]['path'], fp)
|
||||
self.assertEqual(ret[0]['change'], 'IN_CREATE')
|
||||
self.assertEqual(ret[1]['path'], fp)
|
||||
self.assertEqual(ret[1]['change'], 'IN_OPEN')
|
||||
with open(fp, 'r') as f:
|
||||
pass
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(len(ret), 1)
|
||||
self.assertEqual(ret[0]['path'], fp)
|
||||
self.assertEqual(ret[0]['change'], 'IN_OPEN')
|
||||
def test_dir_recurse(self):
|
||||
dp1 = os.path.join(self.tmpdir, 'subdir1')
|
||||
os.mkdir(dp1)
|
||||
dp2 = os.path.join(dp1, 'subdir2')
|
||||
os.mkdir(dp2)
|
||||
fp = os.path.join(dp2, 'tmpfile')
|
||||
with open(fp, 'w') as f:
|
||||
pass
|
||||
config = {self.tmpdir: {'mask': ['open'], 'recurse': True}}
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(ret, [])
|
||||
with open(fp) as f:
|
||||
pass
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(len(ret), 3)
|
||||
self.assertEqual(ret[0]['path'], dp1)
|
||||
self.assertEqual(ret[0]['change'], 'IN_OPEN|IN_ISDIR')
|
||||
self.assertEqual(ret[1]['path'], dp2)
|
||||
self.assertEqual(ret[1]['change'], 'IN_OPEN|IN_ISDIR')
|
||||
self.assertEqual(ret[2]['path'], fp)
|
||||
self.assertEqual(ret[2]['change'], 'IN_OPEN')
|
||||
|
||||
finally:
|
||||
if tmpdir:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
@destructiveTest
|
||||
def test_dir_recurse(self, *args, **kwargs):
|
||||
tmpdir = None
|
||||
try:
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
dp1 = os.path.join(tmpdir, 'subdir1')
|
||||
os.mkdir(dp1)
|
||||
dp2 = os.path.join(dp1, 'subdir2')
|
||||
os.mkdir(dp2)
|
||||
fp = os.path.join(dp2, 'tmpfile')
|
||||
with open(fp, 'w') as f:
|
||||
pass
|
||||
config = {tmpdir: {'mask': ['open'], 'recurse': True}}
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(ret, [])
|
||||
with open(fp) as f:
|
||||
pass
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(len(ret), 3)
|
||||
self.assertEqual(ret[0]['path'], dp1)
|
||||
self.assertEqual(ret[0]['change'], 'IN_OPEN|IN_ISDIR')
|
||||
self.assertEqual(ret[1]['path'], dp2)
|
||||
self.assertEqual(ret[1]['change'], 'IN_OPEN|IN_ISDIR')
|
||||
self.assertEqual(ret[2]['path'], fp)
|
||||
self.assertEqual(ret[2]['change'], 'IN_OPEN')
|
||||
|
||||
finally:
|
||||
if tmpdir:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
@destructiveTest
|
||||
def test_dir_recurse_auto_add(self, *args, **kwargs):
|
||||
tmpdir = None
|
||||
try:
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
dp1 = os.path.join(tmpdir, 'subdir1')
|
||||
os.mkdir(dp1)
|
||||
config = {tmpdir: {'mask': ['create', 'delete'],
|
||||
'recurse': True,
|
||||
'auto_add': True}}
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(ret, [])
|
||||
dp2 = os.path.join(dp1, 'subdir2')
|
||||
os.mkdir(dp2)
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(len(ret), 1)
|
||||
self.assertEqual(ret[0]['path'], dp2)
|
||||
self.assertEqual(ret[0]['change'], 'IN_CREATE|IN_ISDIR')
|
||||
fp = os.path.join(dp2, 'tmpfile')
|
||||
with open(fp, 'w') as f:
|
||||
pass
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(len(ret), 1)
|
||||
self.assertEqual(ret[0]['path'], fp)
|
||||
self.assertEqual(ret[0]['change'], 'IN_CREATE')
|
||||
os.remove(fp)
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(len(ret), 1)
|
||||
self.assertEqual(ret[0]['path'], fp)
|
||||
self.assertEqual(ret[0]['change'], 'IN_DELETE')
|
||||
|
||||
finally:
|
||||
if tmpdir:
|
||||
shutil.rmtree(tmpdir)
|
||||
def test_dir_recurse_auto_add(self):
|
||||
dp1 = os.path.join(self.tmpdir, 'subdir1')
|
||||
os.mkdir(dp1)
|
||||
config = {self.tmpdir: {'mask': ['create', 'delete'],
|
||||
'recurse': True,
|
||||
'auto_add': True}}
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(ret, [])
|
||||
dp2 = os.path.join(dp1, 'subdir2')
|
||||
os.mkdir(dp2)
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(len(ret), 1)
|
||||
self.assertEqual(ret[0]['path'], dp2)
|
||||
self.assertEqual(ret[0]['change'], 'IN_CREATE|IN_ISDIR')
|
||||
fp = os.path.join(dp2, 'tmpfile')
|
||||
with open(fp, 'w') as f:
|
||||
pass
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(len(ret), 1)
|
||||
self.assertEqual(ret[0]['path'], fp)
|
||||
self.assertEqual(ret[0]['change'], 'IN_CREATE')
|
||||
os.remove(fp)
|
||||
ret = inotify.beacon(config)
|
||||
self.assertEqual(len(ret), 1)
|
||||
self.assertEqual(ret[0]['path'], fp)
|
||||
self.assertEqual(ret[0]['change'], 'IN_DELETE')
|
||||
|
55
tests/unit/beacons/test_status.py
Normal file
55
tests/unit/beacons/test_status.py
Normal file
@ -0,0 +1,55 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Pedro Algarvio (pedro@algarvio.me)`
|
||||
:copyright: © 2017 by the SaltStack Team, see AUTHORS for more details.
|
||||
|
||||
|
||||
tests.unit.beacons.test_status
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Status beacon test cases
|
||||
'''
|
||||
|
||||
# Python libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Salt libs
|
||||
import salt.config
|
||||
import salt.loader
|
||||
from salt.beacons import status
|
||||
from salt.modules import status as status_module
|
||||
|
||||
# Salt testing libs
|
||||
from tests.support.unit import TestCase
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
|
||||
|
||||
class StatusBeaconTestCase(TestCase, LoaderModuleMockMixin):
|
||||
'''
|
||||
Test case for salt.beacons.status
|
||||
'''
|
||||
loader_module = (status, status_module)
|
||||
|
||||
def loader_module_globals(self):
|
||||
opts = salt.config.DEFAULT_MINION_OPTS
|
||||
return {
|
||||
'__opts__': opts,
|
||||
'__salt__': 'autoload',
|
||||
'__context__': {},
|
||||
'__grains__': {'kernel': 'Linux'}
|
||||
}
|
||||
|
||||
def test_empty_config(self, *args, **kwargs):
|
||||
config = {}
|
||||
ret = status.beacon(config)
|
||||
self.assertEqual(ret[0]['data'].keys(), ['loadavg', 'meminfo', 'cpustats', 'vmstats', 'time'])
|
||||
|
||||
def test_deprecated_dict_config(self):
|
||||
config = {'time': ['all']}
|
||||
ret = status.beacon(config)
|
||||
self.assertEqual(ret[0]['data'].keys(), ['time'])
|
||||
|
||||
def test_list_config(self):
|
||||
config = [{'time': ['all']}]
|
||||
ret = status.beacon(config)
|
||||
self.assertEqual(ret[0]['data'].keys(), ['time'])
|
Loading…
Reference in New Issue
Block a user