salt/tests/integration/states/test_archive.py
Erik Johnson 3184168365 Use explicit unicode strings + break up salt.utils
This PR is part of what will be an ongoing effort to use explicit
unicode strings in Salt. Because Python 3 does not suport Python 2's raw
unicode string syntax (i.e. `ur'\d+'`), we must use
`salt.utils.locales.sdecode()` to ensure that the raw string is unicode.
However, because of how `salt/utils/__init__.py` has evolved into the
hulking monstrosity it is today, this means importing a large module in
places where it is not needed, which could negatively impact
performance. For this reason, this PR also breaks out some of the
functions from `salt/utils/__init__.py` into new/existing modules under
`salt/utils/`. The long term goal will be that the modules within this
directory do not depend on importing `salt.utils`.

A summary of the changes in this PR is as follows:

* Moves the following functions from `salt.utils` to new locations
  (including a deprecation warning if invoked from `salt.utils`):
  `to_bytes`, `to_str`, `to_unicode`, `str_to_num`, `is_quoted`,
  `dequote`, `is_hex`, `is_bin_str`, `rand_string`,
  `contains_whitespace`, `clean_kwargs`, `invalid_kwargs`, `which`,
  `which_bin`, `path_join`, `shlex_split`, `rand_str`, `is_windows`,
  `is_proxy`, `is_linux`, `is_darwin`, `is_sunos`, `is_smartos`,
  `is_smartos_globalzone`, `is_smartos_zone`, `is_freebsd`, `is_netbsd`,
  `is_openbsd`, `is_aix`
* Moves the functions already deprecated by @rallytime to the bottom of
  `salt/utils/__init__.py` for better organization, so we can keep the
  deprecated ones separate from the ones yet to be deprecated as we
  continue to break up `salt.utils`
* Updates `salt/*.py` and all files under `salt/client/` to use explicit
  unicode string literals.
* Gets rid of implicit imports of `salt.utils` (e.g. `from salt.utils
  import foo` becomes `import salt.utils.foo as foo`).
* Renames the `test.rand_str` function to `test.random_hash` to more
  accurately reflect what it does
* Modifies `salt.utils.stringutils.random()` (née `salt.utils.rand_string()`)
  such that it returns a string matching the passed size. Previously
  this function would get `size` bytes from `os.urandom()`,
  base64-encode it, and return the result, which would in most cases not
  be equal to the passed size.
2017-08-08 13:33:43 -05:00

235 lines
8.0 KiB
Python

# -*- coding: utf-8 -*-
'''
Tests for the archive state
'''
# Import Python libs
from __future__ import absolute_import
import errno
import logging
import os
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.helpers import skip_if_not_root, Webserver
from tests.support.mixins import SaltReturnAssertsMixin
from tests.support.paths import FILES
# Import Salt libs
import salt.utils.files
import salt.utils.platform
# Setup logging
log = logging.getLogger(__name__)
ARCHIVE_DIR = os.path.join('c:/', 'tmp') \
if salt.utils.platform.is_windows() \
else '/tmp/archive'
ARCHIVE_NAME = 'custom.tar.gz'
ARCHIVE_TAR_SOURCE = 'http://localhost:{0}/{1}'.format(9999, ARCHIVE_NAME)
ARCHIVE_LOCAL_TAR_SOURCE = 'file://{0}'.format(os.path.join(FILES, 'file', 'base', ARCHIVE_NAME))
UNTAR_FILE = os.path.join(ARCHIVE_DIR, 'custom/README')
ARCHIVE_TAR_HASH = 'md5=7643861ac07c30fe7d2310e9f25ca514'
ARCHIVE_TAR_BAD_HASH = 'md5=d41d8cd98f00b204e9800998ecf8427e'
class ArchiveTest(ModuleCase, SaltReturnAssertsMixin):
'''
Validate the archive state
'''
@classmethod
def setUpClass(cls):
cls.webserver = Webserver()
cls.webserver.start()
cls.archive_tar_source = cls.webserver.url('custom.tar.gz')
@classmethod
def tearDownClass(cls):
cls.webserver.stop()
def setUp(self):
self._clear_archive_dir()
def tearDown(self):
self._clear_archive_dir()
@staticmethod
def _clear_archive_dir():
try:
salt.utils.files.rm_rf(ARCHIVE_DIR)
except OSError as exc:
if exc.errno != errno.ENOENT:
raise
def _check_extracted(self, path):
'''
function to check if file was extracted
'''
log.debug('Checking for extracted file: %s', path)
self.assertTrue(os.path.isfile(path))
def test_archive_extracted_skip_verify(self):
'''
test archive.extracted with skip_verify
'''
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
source=self.archive_tar_source, archive_format='tar',
skip_verify=True)
log.debug('ret = %s', ret)
if 'Timeout' in ret:
self.skipTest('Timeout talking to local tornado server.')
self.assertSaltTrueReturn(ret)
self._check_extracted(UNTAR_FILE)
def test_archive_extracted_with_source_hash(self):
'''
test archive.extracted without skip_verify
only external resources work to check to
ensure source_hash is verified correctly
'''
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
source=self.archive_tar_source, archive_format='tar',
source_hash=ARCHIVE_TAR_HASH)
log.debug('ret = %s', ret)
if 'Timeout' in ret:
self.skipTest('Timeout talking to local tornado server.')
self.assertSaltTrueReturn(ret)
self._check_extracted(UNTAR_FILE)
@skip_if_not_root
def test_archive_extracted_with_root_user_and_group(self):
'''
test archive.extracted with user and group set to "root"
'''
r_group = 'root'
if salt.utils.platform.is_darwin():
r_group = 'wheel'
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
source=self.archive_tar_source, archive_format='tar',
source_hash=ARCHIVE_TAR_HASH,
user='root', group=r_group)
log.debug('ret = %s', ret)
if 'Timeout' in ret:
self.skipTest('Timeout talking to local tornado server.')
self.assertSaltTrueReturn(ret)
self._check_extracted(UNTAR_FILE)
def test_archive_extracted_with_strip_in_options(self):
'''
test archive.extracted with --strip in options
'''
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
source=self.archive_tar_source,
source_hash=ARCHIVE_TAR_HASH,
options='--strip=1',
enforce_toplevel=False)
log.debug('ret = %s', ret)
if 'Timeout' in ret:
self.skipTest('Timeout talking to local tornado server.')
self.assertSaltTrueReturn(ret)
self._check_extracted(os.path.join(ARCHIVE_DIR, 'README'))
def test_archive_extracted_with_strip_components_in_options(self):
'''
test archive.extracted with --strip-components in options
'''
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
source=self.archive_tar_source,
source_hash=ARCHIVE_TAR_HASH,
options='--strip-components=1',
enforce_toplevel=False)
log.debug('ret = %s', ret)
if 'Timeout' in ret:
self.skipTest('Timeout talking to local tornado server.')
self.assertSaltTrueReturn(ret)
self._check_extracted(os.path.join(ARCHIVE_DIR, 'README'))
def test_archive_extracted_without_archive_format(self):
'''
test archive.extracted with no archive_format option
'''
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
source=self.archive_tar_source,
source_hash=ARCHIVE_TAR_HASH)
log.debug('ret = %s', ret)
if 'Timeout' in ret:
self.skipTest('Timeout talking to local tornado server.')
self.assertSaltTrueReturn(ret)
self._check_extracted(UNTAR_FILE)
def test_archive_extracted_with_cmd_unzip_false(self):
'''
test archive.extracted using use_cmd_unzip argument as false
'''
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
source=self.archive_tar_source,
source_hash=ARCHIVE_TAR_HASH,
use_cmd_unzip=False,
archive_format='tar')
log.debug('ret = %s', ret)
if 'Timeout' in ret:
self.skipTest('Timeout talking to local tornado server.')
self.assertSaltTrueReturn(ret)
self._check_extracted(UNTAR_FILE)
def test_local_archive_extracted(self):
'''
test archive.extracted with local file
'''
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
source=ARCHIVE_LOCAL_TAR_SOURCE, archive_format='tar')
log.debug('ret = %s', ret)
self.assertSaltTrueReturn(ret)
self._check_extracted(UNTAR_FILE)
def test_local_archive_extracted_skip_verify(self):
'''
test archive.extracted with local file, bad hash and skip_verify
'''
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
source=ARCHIVE_LOCAL_TAR_SOURCE, archive_format='tar',
source_hash=ARCHIVE_TAR_BAD_HASH, skip_verify=True)
log.debug('ret = %s', ret)
self.assertSaltTrueReturn(ret)
self._check_extracted(UNTAR_FILE)
def test_local_archive_extracted_with_source_hash(self):
'''
test archive.extracted with local file and valid hash
'''
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
source=ARCHIVE_LOCAL_TAR_SOURCE, archive_format='tar',
source_hash=ARCHIVE_TAR_HASH)
log.debug('ret = %s', ret)
self.assertSaltTrueReturn(ret)
self._check_extracted(UNTAR_FILE)
def test_local_archive_extracted_with_bad_source_hash(self):
'''
test archive.extracted with local file and bad hash
'''
ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
source=ARCHIVE_LOCAL_TAR_SOURCE, archive_format='tar',
source_hash=ARCHIVE_TAR_BAD_HASH)
log.debug('ret = %s', ret)
self.assertSaltFalseReturn(ret)