mirror of
https://github.com/valitydev/salt.git
synced 2024-11-07 17:09:03 +00:00
Merge pull request #43439 from rallytime/merge-develop
[develop] Merge forward from 2017.7 to develop
This commit is contained in:
commit
2ac185b055
@ -263,9 +263,17 @@ against that branch.
|
||||
Release Branches
|
||||
----------------
|
||||
|
||||
For each release a branch will be created when we are ready to tag. The branch will be the same name as the tag minus the v. For example, the v2017.7.1 release was created from the 2017.7.1 branch. This branching strategy will allow for more stability when there is a need for a re-tag during the testing phase of our releases.
|
||||
For each release, a branch will be created when the SaltStack release team is
|
||||
ready to tag. The release branch is created from the parent branch and will be
|
||||
the same name as the tag minus the ``v``. For example, the ``2017.7.1`` release
|
||||
branch was created from the ``2017.7`` parent branch and the ``v2017.7.1``
|
||||
release was tagged at the ``HEAD`` of the ``2017.7.1`` branch. This branching
|
||||
strategy will allow for more stability when there is a need for a re-tag during
|
||||
the testing phase of the release process.
|
||||
|
||||
Once the branch is created, the fixes required for a given release, as determined by the SaltStack release team, will be added to this branch. All commits in this branch will be merged forward into the parent branch as well.
|
||||
Once the release branch is created, the fixes required for a given release, as
|
||||
determined by the SaltStack release team, will be added to this branch. All
|
||||
commits in this branch will be merged forward into the parent branch as well.
|
||||
|
||||
Keeping Salt Forks in Sync
|
||||
==========================
|
||||
|
@ -14,7 +14,7 @@ from __future__ import absolute_import
|
||||
# Import Salt libs
|
||||
import salt.spm
|
||||
import salt.utils.parsers as parsers
|
||||
from salt.utils.verify import verify_log
|
||||
from salt.utils.verify import verify_log, verify_env
|
||||
|
||||
|
||||
class SPM(parsers.SPMParser):
|
||||
@ -29,6 +29,10 @@ class SPM(parsers.SPMParser):
|
||||
ui = salt.spm.SPMCmdlineInterface()
|
||||
self.parse_args()
|
||||
self.setup_logfile_logger()
|
||||
v_dirs = [
|
||||
self.config['cachedir'],
|
||||
]
|
||||
verify_env(v_dirs, self.config['user'],)
|
||||
verify_log(self.config)
|
||||
client = salt.spm.SPMClient(ui, self.config)
|
||||
client.run(self.args)
|
||||
|
37
salt/key.py
37
salt/key.py
@ -496,7 +496,7 @@ class Key(object):
|
||||
minions = []
|
||||
for key, val in six.iteritems(keys):
|
||||
minions.extend(val)
|
||||
if not self.opts.get(u'preserve_minion_cache', False) or not preserve_minions:
|
||||
if not self.opts.get(u'preserve_minion_cache', False):
|
||||
m_cache = os.path.join(self.opts[u'cachedir'], self.ACC)
|
||||
if os.path.isdir(m_cache):
|
||||
for minion in os.listdir(m_cache):
|
||||
@ -743,7 +743,7 @@ class Key(object):
|
||||
def delete_key(self,
|
||||
match=None,
|
||||
match_dict=None,
|
||||
preserve_minions=False,
|
||||
preserve_minions=None,
|
||||
revoke_auth=False):
|
||||
'''
|
||||
Delete public keys. If "match" is passed, it is evaluated as a glob.
|
||||
@ -781,11 +781,10 @@ class Key(object):
|
||||
salt.utils.event.tagify(prefix=u'key'))
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
if preserve_minions:
|
||||
preserve_minions_list = matches.get(u'minions', [])
|
||||
if self.opts.get(u'preserve_minions') is True:
|
||||
self.check_minion_cache(preserve_minions=matches.get(u'minions', []))
|
||||
else:
|
||||
preserve_minions_list = []
|
||||
self.check_minion_cache(preserve_minions=preserve_minions_list)
|
||||
self.check_minion_cache()
|
||||
if self.opts.get(u'rotate_aes_key'):
|
||||
salt.crypt.dropfile(self.opts[u'cachedir'], self.opts[u'user'])
|
||||
return (
|
||||
@ -976,16 +975,17 @@ class RaetKey(Key):
|
||||
minions.extend(val)
|
||||
|
||||
m_cache = os.path.join(self.opts[u'cachedir'], u'minions')
|
||||
if os.path.isdir(m_cache):
|
||||
for minion in os.listdir(m_cache):
|
||||
if minion not in minions:
|
||||
shutil.rmtree(os.path.join(m_cache, minion))
|
||||
cache = salt.cache.factory(self.opts)
|
||||
clist = cache.list(self.ACC)
|
||||
if clist:
|
||||
for minion in clist:
|
||||
if not self.opts.get('preserve_minion_cache', False):
|
||||
if os.path.isdir(m_cache):
|
||||
for minion in os.listdir(m_cache):
|
||||
if minion not in minions and minion not in preserve_minions:
|
||||
cache.flush(u'{0}/{1}'.format(self.ACC, minion))
|
||||
shutil.rmtree(os.path.join(m_cache, minion))
|
||||
cache = salt.cache.factory(self.opts)
|
||||
clist = cache.list(self.ACC)
|
||||
if clist:
|
||||
for minion in clist:
|
||||
if minion not in minions and minion not in preserve_minions:
|
||||
cache.flush(u'{0}/{1}'.format(self.ACC, minion))
|
||||
|
||||
kind = self.opts.get(u'__role', u'') # application kind
|
||||
if kind not in kinds.APPL_KINDS:
|
||||
@ -1227,7 +1227,7 @@ class RaetKey(Key):
|
||||
def delete_key(self,
|
||||
match=None,
|
||||
match_dict=None,
|
||||
preserve_minions=False,
|
||||
preserve_minions=None,
|
||||
revoke_auth=False):
|
||||
'''
|
||||
Delete public keys. If "match" is passed, it is evaluated as a glob.
|
||||
@ -1258,7 +1258,10 @@ class RaetKey(Key):
|
||||
os.remove(os.path.join(self.opts[u'pki_dir'], status, key))
|
||||
except (OSError, IOError):
|
||||
pass
|
||||
self.check_minion_cache(preserve_minions=matches.get(u'minions', []))
|
||||
if self.opts.get('preserve_minions') is True:
|
||||
self.check_minion_cache(preserve_minions=matches.get(u'minions', []))
|
||||
else:
|
||||
self.check_minion_cache()
|
||||
return (
|
||||
self.name_match(match) if match is not None
|
||||
else self.dict_match(matches)
|
||||
|
@ -181,7 +181,10 @@ def has_settable_hwclock():
|
||||
salt '*' system.has_settable_hwclock
|
||||
'''
|
||||
if salt.utils.path.which_bin(['hwclock']) is not None:
|
||||
res = __salt__['cmd.run_all'](['hwclock', '--test', '--systohc'], python_shell=False)
|
||||
res = __salt__['cmd.run_all'](
|
||||
['hwclock', '--test', '--systohc'], python_shell=False,
|
||||
output_loglevel='quiet', ignore_retcode=True
|
||||
)
|
||||
return res['retcode'] == 0
|
||||
return False
|
||||
|
||||
|
@ -889,8 +889,8 @@ def list_repo_pkgs(*args, **kwargs):
|
||||
_parse_output(out['stdout'], strict=True)
|
||||
else:
|
||||
for repo in repos:
|
||||
cmd = [_yum(), '--quiet', 'repository-packages', repo,
|
||||
'list', '--showduplicates']
|
||||
cmd = [_yum(), '--quiet', '--showduplicates',
|
||||
'repository-packages', repo, 'list']
|
||||
if cacheonly:
|
||||
cmd.append('-C')
|
||||
# Can't concatenate because args is a tuple, using list.extend()
|
||||
@ -2840,7 +2840,7 @@ def _parse_repo_file(filename):
|
||||
|
||||
for section in parsed._sections:
|
||||
section_dict = dict(parsed._sections[section])
|
||||
section_dict.pop('__name__')
|
||||
section_dict.pop('__name__', None)
|
||||
config[section] = section_dict
|
||||
|
||||
# Try to extract leading comments
|
||||
|
@ -490,20 +490,19 @@ class Pillar(object):
|
||||
self.opts['pillarenv'], ', '.join(self.opts['file_roots'])
|
||||
)
|
||||
else:
|
||||
tops[self.opts['pillarenv']] = [
|
||||
compile_template(
|
||||
self.client.cache_file(
|
||||
self.opts['state_top'],
|
||||
self.opts['pillarenv']
|
||||
),
|
||||
self.rend,
|
||||
self.opts['renderer'],
|
||||
self.opts['renderer_blacklist'],
|
||||
self.opts['renderer_whitelist'],
|
||||
self.opts['pillarenv'],
|
||||
_pillar_rend=True,
|
||||
)
|
||||
]
|
||||
top = self.client.cache_file(self.opts['state_top'], self.opts['pillarenv'])
|
||||
if top:
|
||||
tops[self.opts['pillarenv']] = [
|
||||
compile_template(
|
||||
top,
|
||||
self.rend,
|
||||
self.opts['renderer'],
|
||||
self.opts['renderer_blacklist'],
|
||||
self.opts['renderer_whitelist'],
|
||||
self.opts['pillarenv'],
|
||||
_pillar_rend=True,
|
||||
)
|
||||
]
|
||||
else:
|
||||
for saltenv in self._get_envs():
|
||||
if self.opts.get('pillar_source_merging_strategy', None) == "none":
|
||||
|
@ -13,9 +13,12 @@ import tarfile
|
||||
import shutil
|
||||
import hashlib
|
||||
import logging
|
||||
import pwd
|
||||
import grp
|
||||
import sys
|
||||
try:
|
||||
import pwd
|
||||
import grp
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Import Salt libs
|
||||
import salt.client
|
||||
@ -490,10 +493,18 @@ class SPMClient(object):
|
||||
|
||||
# No defaults for this in config.py; default to the current running
|
||||
# user and group
|
||||
uid = self.opts.get('spm_uid', os.getuid())
|
||||
gid = self.opts.get('spm_gid', os.getgid())
|
||||
uname = pwd.getpwuid(uid)[0]
|
||||
gname = grp.getgrgid(gid)[0]
|
||||
import salt.utils
|
||||
if salt.utils.is_windows():
|
||||
import salt.utils.win_functions
|
||||
uname = gname = salt.utils.win_functions.get_current_user()
|
||||
uname_sid = salt.utils.win_functions.get_sid_from_name(uname)
|
||||
uid = self.opts.get('spm_uid', uname_sid)
|
||||
gid = self.opts.get('spm_gid', uname_sid)
|
||||
else:
|
||||
uid = self.opts.get('spm_uid', os.getuid())
|
||||
gid = self.opts.get('spm_gid', os.getgid())
|
||||
uname = pwd.getpwuid(uid)[0]
|
||||
gname = grp.getgrgid(gid)[0]
|
||||
|
||||
# Second pass: install the files
|
||||
for member in pkg_files:
|
||||
@ -709,7 +720,7 @@ class SPMClient(object):
|
||||
raise SPMInvocationError('A path to a directory must be specified')
|
||||
|
||||
if args[1] == '.':
|
||||
repo_path = os.environ['PWD']
|
||||
repo_path = os.getcwdu()
|
||||
else:
|
||||
repo_path = args[1]
|
||||
|
||||
|
@ -3172,7 +3172,7 @@ class BaseHighState(object):
|
||||
Returns:
|
||||
{'saltenv': ['state1', 'state2', ...]}
|
||||
'''
|
||||
matches = {}
|
||||
matches = DefaultOrderedDict(OrderedDict)
|
||||
# pylint: disable=cell-var-from-loop
|
||||
for saltenv, body in six.iteritems(top):
|
||||
if self.opts[u'environment']:
|
||||
|
@ -15,6 +15,8 @@ DEVICE="{{name}}"
|
||||
{%endif%}{% if onparent %}ONPARENT={{onparent}}
|
||||
{%endif%}{% if ipv4_failure_fatal %}IPV4_FAILURE_FATAL="{{ipv4_failure_fatal}}"
|
||||
{%endif%}{% if ipaddr %}IPADDR="{{ipaddr}}"
|
||||
{%endif%}{% if ipaddr_start %}IPADDR_START="{{ipaddr_start}}"
|
||||
{%endif%}{% if ipaddr_end %}IPADDR_END="{{ipaddr_end}}"
|
||||
{%endif%}{% if netmask %}NETMASK="{{netmask}}"
|
||||
{%endif%}{% if prefix %}PREFIX="{{prefix}}"
|
||||
{%endif%}{% if gateway %}GATEWAY="{{gateway}}"
|
||||
|
@ -2402,6 +2402,16 @@ class SaltKeyOptionParser(six.with_metaclass(OptionParserMeta,
|
||||
'Default: %default.')
|
||||
)
|
||||
|
||||
self.add_option(
|
||||
'--preserve-minions',
|
||||
default=False,
|
||||
help=('Setting this to True prevents the master from deleting '
|
||||
'the minion cache when keys are deleted, this may have '
|
||||
'security implications if compromised minions auth with '
|
||||
'a previous deleted minion ID. '
|
||||
'Default: %default.')
|
||||
)
|
||||
|
||||
key_options_group = optparse.OptionGroup(
|
||||
self, 'Key Generation Options'
|
||||
)
|
||||
@ -2501,6 +2511,13 @@ class SaltKeyOptionParser(six.with_metaclass(OptionParserMeta,
|
||||
elif self.options.rotate_aes_key.lower() == 'false':
|
||||
self.options.rotate_aes_key = False
|
||||
|
||||
def process_preserve_minions(self):
|
||||
if hasattr(self.options, 'preserve_minions') and isinstance(self.options.preserve_minions, str):
|
||||
if self.options.preserve_minions.lower() == 'true':
|
||||
self.options.preserve_minions = True
|
||||
elif self.options.preserve_minions.lower() == 'false':
|
||||
self.options.preserve_minions = False
|
||||
|
||||
def process_list(self):
|
||||
# Filter accepted list arguments as soon as possible
|
||||
if not self.options.list:
|
||||
|
@ -2,42 +2,39 @@
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
# Import Salt Libs
|
||||
from salt.cloud.clouds import ec2
|
||||
from salt.exceptions import SaltCloudSystemExit
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import NO_MOCK, NO_MOCK_REASON
|
||||
from tests.support.mock import NO_MOCK, NO_MOCK_REASON, patch, PropertyMock
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class EC2TestCase(TestCase, LoaderModuleMockMixin):
|
||||
class EC2TestCase(TestCase):
|
||||
'''
|
||||
Unit TestCase for salt.cloud.clouds.ec2 module.
|
||||
'''
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {ec2: {}}
|
||||
|
||||
def test__validate_key_path_and_mode(self):
|
||||
with tempfile.NamedTemporaryFile() as f:
|
||||
key_file = f.name
|
||||
|
||||
os.chmod(key_file, 0o644)
|
||||
self.assertRaises(SaltCloudSystemExit,
|
||||
ec2._validate_key_path_and_mode,
|
||||
key_file)
|
||||
os.chmod(key_file, 0o600)
|
||||
self.assertTrue(ec2._validate_key_path_and_mode(key_file))
|
||||
os.chmod(key_file, 0o400)
|
||||
self.assertTrue(ec2._validate_key_path_and_mode(key_file))
|
||||
# Key file exists
|
||||
with patch('os.path.exists', return_value=True):
|
||||
with patch('os.stat') as patched_stat:
|
||||
|
||||
# tmp file removed
|
||||
self.assertRaises(SaltCloudSystemExit,
|
||||
ec2._validate_key_path_and_mode,
|
||||
key_file)
|
||||
type(patched_stat.return_value).st_mode = PropertyMock(return_value=0o644)
|
||||
self.assertRaises(
|
||||
SaltCloudSystemExit, ec2._validate_key_path_and_mode, 'key_file')
|
||||
|
||||
type(patched_stat.return_value).st_mode = PropertyMock(return_value=0o600)
|
||||
self.assertTrue(ec2._validate_key_path_and_mode('key_file'))
|
||||
|
||||
type(patched_stat.return_value).st_mode = PropertyMock(return_value=0o400)
|
||||
self.assertTrue(ec2._validate_key_path_and_mode('key_file'))
|
||||
|
||||
# Key file does not exist
|
||||
with patch('os.path.exists', return_value=False):
|
||||
self.assertRaises(
|
||||
SaltCloudSystemExit, ec2._validate_key_path_and_mode, 'key_file')
|
||||
|
@ -65,7 +65,8 @@ class TestGemModule(TestCase, LoaderModuleMockMixin):
|
||||
with patch.dict(gem.__salt__,
|
||||
{'rvm.is_installed': MagicMock(return_value=False),
|
||||
'rbenv.is_installed': MagicMock(return_value=True),
|
||||
'rbenv.do': mock}):
|
||||
'rbenv.do': mock}),\
|
||||
patch('salt.utils.is_windows', return_value=False):
|
||||
gem._gem(['install', 'rails'])
|
||||
mock.assert_called_once_with(
|
||||
['gem', 'install', 'rails'],
|
||||
|
@ -21,8 +21,8 @@ from tests.support.mock import (
|
||||
# Import Salt Libs
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
from salt.exceptions import CommandExecutionError
|
||||
import salt.modules.mount as mount
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
MOCK_SHELL_FILE = 'A B C D F G\n'
|
||||
|
||||
|
@ -34,7 +34,8 @@ class PamTestCase(TestCase):
|
||||
'''
|
||||
Test if the parsing function works
|
||||
'''
|
||||
with patch('salt.utils.files.fopen', mock_open(read_data=MOCK_FILE)):
|
||||
with patch('os.path.exists', return_value=True), \
|
||||
patch('salt.utils.files.fopen', mock_open(read_data=MOCK_FILE)):
|
||||
self.assertListEqual(pam.read_file('/etc/pam.d/login'),
|
||||
[{'arguments': [], 'control_flag': 'ok',
|
||||
'interface': 'ok', 'module': 'ignore'}])
|
||||
|
@ -49,21 +49,24 @@ class PartedTestCase(TestCase, LoaderModuleMockMixin):
|
||||
|
||||
def test_virtual_bails_without_parted(self):
|
||||
'''If parted not in PATH, __virtual__ shouldn't register module'''
|
||||
with patch('salt.utils.path.which', lambda exe: not exe == "parted"):
|
||||
with patch('salt.utils.path.which', lambda exe: not exe == "parted"),\
|
||||
patch('salt.utils.is_windows', return_value=False):
|
||||
ret = parted.__virtual__()
|
||||
err = (False, 'The parted execution module failed to load parted binary is not in the path.')
|
||||
self.assertEqual(err, ret)
|
||||
|
||||
def test_virtual_bails_without_lsblk(self):
|
||||
'''If lsblk not in PATH, __virtual__ shouldn't register module'''
|
||||
with patch('salt.utils.path.which', lambda exe: not exe == "lsblk"):
|
||||
with patch('salt.utils.path.which', lambda exe: not exe == "lsblk"),\
|
||||
patch('salt.utils.is_windows', return_value=False):
|
||||
ret = parted.__virtual__()
|
||||
err = (False, 'The parted execution module failed to load lsblk binary is not in the path.')
|
||||
self.assertEqual(err, ret)
|
||||
|
||||
def test_virtual_bails_without_partprobe(self):
|
||||
'''If partprobe not in PATH, __virtual__ shouldn't register module'''
|
||||
with patch('salt.utils.path.which', lambda exe: not exe == "partprobe"):
|
||||
with patch('salt.utils.path.which', lambda exe: not exe == "partprobe"),\
|
||||
patch('salt.utils.is_windows', return_value=False):
|
||||
ret = parted.__virtual__()
|
||||
err = (False, 'The parted execution module failed to load partprobe binary is not in the path.')
|
||||
self.assertEqual(err, ret)
|
||||
|
@ -18,6 +18,7 @@ from tests.support.mock import (
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.modules.pw_group as pw_group
|
||||
import salt.utils
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
@ -44,6 +45,7 @@ class PwGroupTestCase(TestCase, LoaderModuleMockMixin):
|
||||
with patch.dict(pw_group.__salt__, {'cmd.run_all': mock}):
|
||||
self.assertTrue(pw_group.delete('a'))
|
||||
|
||||
@skipIf(salt.utils.is_windows(), 'grp not available on Windows')
|
||||
def test_info(self):
|
||||
'''
|
||||
Tests to return information about a group
|
||||
@ -57,6 +59,7 @@ class PwGroupTestCase(TestCase, LoaderModuleMockMixin):
|
||||
with patch.dict(pw_group.grinfo, mock):
|
||||
self.assertDictEqual(pw_group.info('name'), {})
|
||||
|
||||
@skipIf(salt.utils.is_windows(), 'grp not available on Windows')
|
||||
def test_getent(self):
|
||||
'''
|
||||
Tests for return info on all groups
|
||||
|
@ -80,15 +80,14 @@ class QemuNbdTestCase(TestCase, LoaderModuleMockMixin):
|
||||
with patch.dict(qemu_nbd.__salt__, {'cmd.run': mock}):
|
||||
self.assertEqual(qemu_nbd.init('/srv/image.qcow2'), '')
|
||||
|
||||
with patch.object(os.path, 'isfile', mock):
|
||||
with patch.object(glob, 'glob',
|
||||
MagicMock(return_value=['/dev/nbd0'])):
|
||||
with patch.dict(qemu_nbd.__salt__,
|
||||
{'cmd.run': mock,
|
||||
'mount.mount': mock,
|
||||
'cmd.retcode': MagicMock(side_effect=[1, 0])}):
|
||||
self.assertDictEqual(qemu_nbd.init('/srv/image.qcow2'),
|
||||
{'{0}/nbd/nbd0/nbd0'.format(tempfile.gettempdir()): '/dev/nbd0'})
|
||||
with patch.object(os.path, 'isfile', mock),\
|
||||
patch.object(glob, 'glob', MagicMock(return_value=['/dev/nbd0'])),\
|
||||
patch.dict(qemu_nbd.__salt__,
|
||||
{'cmd.run': mock,
|
||||
'mount.mount': mock,
|
||||
'cmd.retcode': MagicMock(side_effect=[1, 0])}):
|
||||
expected = {os.sep.join([tempfile.gettempdir(), 'nbd', 'nbd0', 'nbd0']): '/dev/nbd0'}
|
||||
self.assertDictEqual(qemu_nbd.init('/srv/image.qcow2'), expected)
|
||||
|
||||
# 'clear' function tests: 1
|
||||
|
||||
|
@ -47,14 +47,19 @@ class SeedTestCase(TestCase, LoaderModuleMockMixin):
|
||||
'''
|
||||
Test to update and get the random script to a random place
|
||||
'''
|
||||
with patch.dict(seed.__salt__,
|
||||
{'config.gather_bootstrap_script': MagicMock(return_value='BS_PATH/BS')}):
|
||||
with patch.object(uuid, 'uuid4', return_value='UUID'):
|
||||
with patch.object(os.path, 'exists', return_value=True):
|
||||
with patch.object(os, 'chmod', return_value=None):
|
||||
with patch.object(shutil, 'copy', return_value=None):
|
||||
self.assertEqual(seed.prep_bootstrap('MPT'), ('MPT/tmp/UUID/BS', '/tmp/UUID'))
|
||||
self.assertEqual(seed.prep_bootstrap('/MPT'), ('/MPT/tmp/UUID/BS', '/tmp/UUID'))
|
||||
with patch.dict(seed.__salt__, {'config.gather_bootstrap_script': MagicMock(return_value=os.path.join('BS_PATH', 'BS'))}),\
|
||||
patch.object(uuid, 'uuid4', return_value='UUID'),\
|
||||
patch.object(os.path, 'exists', return_value=True),\
|
||||
patch.object(os, 'chmod', return_value=None),\
|
||||
patch.object(shutil, 'copy', return_value=None):
|
||||
|
||||
expect = (os.path.join('MPT', 'tmp', 'UUID', 'BS'),
|
||||
os.sep + os.path.join('tmp', 'UUID'))
|
||||
self.assertEqual(seed.prep_bootstrap('MPT'), expect)
|
||||
|
||||
expect = (os.sep + os.path.join('MPT', 'tmp', 'UUID', 'BS'),
|
||||
os.sep + os.path.join('tmp', 'UUID'))
|
||||
self.assertEqual(seed.prep_bootstrap(os.sep + 'MPT'), expect)
|
||||
|
||||
def test_apply_(self):
|
||||
'''
|
||||
|
Loading…
Reference in New Issue
Block a user