mirror of
https://github.com/valitydev/salt.git
synced 2024-11-06 16:45:27 +00:00
Merge branch '2018.3' into 'fluorine'
No conflicts.
This commit is contained in:
commit
f6943f0241
5
.github/CODEOWNERS
vendored
5
.github/CODEOWNERS
vendored
@ -1,14 +1,11 @@
|
|||||||
# SALTSTACK CODE OWNERS
|
# SALTSTACK CODE OWNERS
|
||||||
|
|
||||||
# See https://help.github.com/articles/about-codeowners/
|
# See https://help.github.com/articles/about-codeowners/
|
||||||
# for more info about CODEOWNERS file
|
# for more info about the CODEOWNERS file
|
||||||
|
|
||||||
# Lines starting with '#' are comments.
|
# Lines starting with '#' are comments.
|
||||||
# Each line is a file pattern followed by one or more owners.
|
# Each line is a file pattern followed by one or more owners.
|
||||||
|
|
||||||
# See https://help.github.com/articles/about-codeowners/
|
|
||||||
# for more info about the CODEOWNERS file
|
|
||||||
|
|
||||||
# This file uses an fnmatch-style matching pattern.
|
# This file uses an fnmatch-style matching pattern.
|
||||||
|
|
||||||
# Team Boto
|
# Team Boto
|
||||||
|
@ -25,6 +25,8 @@ class ServiceModuleTest(ModuleCase):
|
|||||||
os_family = self.run_function('grains.get', ['os_family'])
|
os_family = self.run_function('grains.get', ['os_family'])
|
||||||
os_release = self.run_function('grains.get', ['osrelease'])
|
os_release = self.run_function('grains.get', ['osrelease'])
|
||||||
if os_family == 'RedHat':
|
if os_family == 'RedHat':
|
||||||
|
if os_release[0] == '7':
|
||||||
|
self.skipTest('Disabled on CentOS 7 until we can fix SSH connection issues.')
|
||||||
self.service_name = 'crond'
|
self.service_name = 'crond'
|
||||||
elif os_family == 'Arch':
|
elif os_family == 'Arch':
|
||||||
self.service_name = 'sshd'
|
self.service_name = 'sshd'
|
||||||
|
@ -47,14 +47,14 @@ class FilesTestCase(TestCase):
|
|||||||
if os.stat(tmp).st_ino == 0:
|
if os.stat(tmp).st_ino == 0:
|
||||||
self.skipTest('inodes not supported in {0}'.format(tmp))
|
self.skipTest('inodes not supported in {0}'.format(tmp))
|
||||||
os.mkdir(os.path.join(tmp, 'fax'))
|
os.mkdir(os.path.join(tmp, 'fax'))
|
||||||
os.makedirs(os.path.join(tmp, 'foo/bar'))
|
os.makedirs(os.path.join(tmp, 'foo', 'bar'))
|
||||||
os.symlink('../..', os.path.join(tmp, 'foo/bar/baz'))
|
os.symlink(os.path.join('..', '..'), os.path.join(tmp, 'foo', 'bar', 'baz'))
|
||||||
os.symlink('foo', os.path.join(tmp, 'root'))
|
os.symlink('foo', os.path.join(tmp, 'root'))
|
||||||
expected = [
|
expected = [
|
||||||
(os.path.join(tmp, 'root'), ['bar'], []),
|
(os.path.join(tmp, 'root'), ['bar'], []),
|
||||||
(os.path.join(tmp, 'root/bar'), ['baz'], []),
|
(os.path.join(tmp, 'root', 'bar'), ['baz'], []),
|
||||||
(os.path.join(tmp, 'root/bar/baz'), ['fax', 'foo', 'root'], []),
|
(os.path.join(tmp, 'root', 'bar', 'baz'), ['fax', 'foo', 'root'], []),
|
||||||
(os.path.join(tmp, 'root/bar/baz/fax'), [], []),
|
(os.path.join(tmp, 'root', 'bar', 'baz', 'fax'), [], []),
|
||||||
]
|
]
|
||||||
paths = []
|
paths = []
|
||||||
for root, dirs, names in salt.utils.files.safe_walk(os.path.join(tmp, 'root')):
|
for root, dirs, names in salt.utils.files.safe_walk(os.path.join(tmp, 'root')):
|
||||||
|
@ -386,7 +386,7 @@ class TestGetTemplate(TestCase):
|
|||||||
self.assertEqual(fc.requests[0]['path'], 'salt://macro')
|
self.assertEqual(fc.requests[0]['path'], 'salt://macro')
|
||||||
|
|
||||||
filename = os.path.join(self.template_dir, 'non_ascii')
|
filename = os.path.join(self.template_dir, 'non_ascii')
|
||||||
with salt.utils.files.fopen(filename) as fp_:
|
with salt.utils.files.fopen(filename, 'rb') as fp_:
|
||||||
out = render_jinja_tmpl(
|
out = render_jinja_tmpl(
|
||||||
salt.utils.stringutils.to_unicode(fp_.read(), 'utf-8'),
|
salt.utils.stringutils.to_unicode(fp_.read(), 'utf-8'),
|
||||||
dict(opts={'cachedir': self.tempdir, 'file_client': 'remote',
|
dict(opts={'cachedir': self.tempdir, 'file_client': 'remote',
|
||||||
@ -453,7 +453,7 @@ class TestGetTemplate(TestCase):
|
|||||||
saltenv='test',
|
saltenv='test',
|
||||||
salt=self.local_salt
|
salt=self.local_salt
|
||||||
)
|
)
|
||||||
with salt.utils.files.fopen(out['data']) as fp:
|
with salt.utils.files.fopen(out['data'], 'rb') as fp:
|
||||||
result = salt.utils.stringutils.to_unicode(fp.read(), 'utf-8')
|
result = salt.utils.stringutils.to_unicode(fp.read(), 'utf-8')
|
||||||
self.assertEqual(salt.utils.stringutils.to_unicode('Assunção' + os.linesep), result)
|
self.assertEqual(salt.utils.stringutils.to_unicode('Assunção' + os.linesep), result)
|
||||||
|
|
||||||
|
@ -137,9 +137,9 @@ class JSONTestCase(TestCase):
|
|||||||
'''
|
'''
|
||||||
Test dumping to and loading from a file handle
|
Test dumping to and loading from a file handle
|
||||||
'''
|
'''
|
||||||
with salt.utils.files.fopen(json_out, 'w') as fp_:
|
with salt.utils.files.fopen(json_out, 'wb') as fp_:
|
||||||
salt.utils.json.dump(self.data, fp_)
|
fp_.write(salt.utils.to_bytes(salt.utils.json.dumps(self.data)))
|
||||||
with salt.utils.files.fopen(json_out, 'r') as fp_:
|
with salt.utils.files.fopen(json_out, 'rb') as fp_:
|
||||||
ret = salt.utils.json.load(fp_)
|
ret = salt.utils.json.loads(salt.utils.to_unicode(fp_.read()))
|
||||||
# Loading should be equal to the original data
|
# Loading should be equal to the original data
|
||||||
self.assertEqual(ret, self.data)
|
self.assertEqual(ret, self.data)
|
||||||
|
Loading…
Reference in New Issue
Block a user