Merge pull request #49777 from terminalmage/issue49738

Fix parallel states with long ID dec or name
This commit is contained in:
Nicole Thomas 2018-09-30 14:46:02 -04:00 committed by GitHub
commit 297031b042
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 89 additions and 54 deletions

View File

@ -35,6 +35,7 @@ import salt.fileclient
import salt.utils.crypt import salt.utils.crypt
import salt.utils.dictupdate import salt.utils.dictupdate
import salt.utils.event import salt.utils.event
import salt.utils.hashutils
import salt.utils.url import salt.utils.url
import salt.utils.process import salt.utils.process
import salt.utils.files import salt.utils.files
@ -1732,7 +1733,9 @@ class State(object):
ret['duration'] = duration ret['duration'] = duration
troot = os.path.join(self.opts['cachedir'], self.jid) troot = os.path.join(self.opts['cachedir'], self.jid)
tfile = os.path.join(troot, _clean_tag(tag)) tfile = os.path.join(
troot,
salt.utils.hashutils.sha1_digest(tag))
if not os.path.isdir(troot): if not os.path.isdir(troot):
try: try:
os.makedirs(troot) os.makedirs(troot)
@ -2091,7 +2094,10 @@ class State(object):
proc = running[tag].get('proc') proc = running[tag].get('proc')
if proc: if proc:
if not proc.is_alive(): if not proc.is_alive():
ret_cache = os.path.join(self.opts['cachedir'], self.jid, _clean_tag(tag)) ret_cache = os.path.join(
self.opts['cachedir'],
self.jid,
salt.utils.hashutils.sha1_digest(tag))
if not os.path.isfile(ret_cache): if not os.path.isfile(ret_cache):
ret = {'result': False, ret = {'result': False,
'comment': 'Parallel process failed to return', 'comment': 'Parallel process failed to return',

View File

@ -82,6 +82,16 @@ def md5_digest(instr):
return hashlib.md5(instr).hexdigest() return hashlib.md5(instr).hexdigest()
def sha1_digest(instr):
'''
Generate an sha1 hash of a given string.
'''
if six.PY3:
b = salt.utils.to_bytes(instr)
return hashlib.sha1(b).hexdigest()
return hashlib.sha1(instr).hexdigest()
def sha256_digest(instr): def sha256_digest(instr):
''' '''
Generate an sha256 hash of a given string. Generate an sha256 hash of a given string.

View File

@ -0,0 +1,9 @@
test_cmd_too_long:
cmd.run:
- name: {{ pillar['long_command'] }}
- parallel: True
test_cmd_not_found:
cmd.run:
- name: {{ pillar['short_command'] }}
- parallel: True

View File

@ -4,6 +4,7 @@
from __future__ import absolute_import from __future__ import absolute_import
import os import os
import shutil import shutil
import sys
import tempfile import tempfile
import textwrap import textwrap
import threading import threading
@ -12,48 +13,20 @@ import time
# Import Salt Testing libs # Import Salt Testing libs
from tests.support.case import ModuleCase from tests.support.case import ModuleCase
from tests.support.unit import skipIf from tests.support.unit import skipIf
from tests.support.paths import TMP, FILES from tests.support.paths import TMP, BASE_FILES
from tests.support.mixins import SaltReturnAssertsMixin from tests.support.mixins import SaltReturnAssertsMixin
# Import salt libs # Import salt libs
import salt.utils import salt.utils
import salt.utils.atomicfile
from salt.modules.virtualenv_mod import KNOWN_BINARY_NAMES from salt.modules.virtualenv_mod import KNOWN_BINARY_NAMES
# Import 3rd-party libs # Import 3rd-party libs
import salt.ext.six as six import salt.ext.six as six
DEFAULT_ENDING = salt.utils.to_bytes(os.linesep) DEFAULT_ENDING = salt.utils.to_bytes(os.linesep)
def trim_line_end(line):
'''
Remove CRLF or LF from the end of line.
'''
if line[-2:] == salt.utils.to_bytes('\r\n'):
return line[:-2]
elif line[-1:] == salt.utils.to_bytes('\n'):
return line[:-1]
raise Exception("Invalid line ending")
def reline(source, dest, force=False, ending=DEFAULT_ENDING):
'''
Normalize the line endings of a file.
'''
fp, tmp = tempfile.mkstemp()
os.close(fp)
with salt.utils.fopen(tmp, 'wb') as tmp_fd:
with salt.utils.fopen(source, 'rb') as fd:
lines = fd.readlines()
for line in lines:
line_noend = trim_line_end(line)
tmp_fd.write(line_noend + ending)
if os.path.exists(dest) and force:
os.remove(dest)
os.rename(tmp, dest)
class StateModuleTest(ModuleCase, SaltReturnAssertsMixin): class StateModuleTest(ModuleCase, SaltReturnAssertsMixin):
''' '''
Validate the state module Validate the state module
@ -61,12 +34,22 @@ class StateModuleTest(ModuleCase, SaltReturnAssertsMixin):
maxDiff = None maxDiff = None
def setUp(self): @classmethod
super(StateModuleTest, self).setUp() def setUpClass(cls):
destpath = os.path.join(FILES, 'file', 'base', 'testappend', 'firstif') def _reline(path, ending=DEFAULT_ENDING):
reline(destpath, destpath, force=True) '''
destpath = os.path.join(FILES, 'file', 'base', 'testappend', 'secondif') Normalize the line endings of a file.
reline(destpath, destpath, force=True) '''
with salt.utils.fopen(path, 'rb') as fhr:
lines = fhr.read().splitlines()
with salt.utils.atomicfile.atomic_open(path, 'wb') as fhw:
for line in lines:
fhw.write(line + ending)
destpath = os.path.join(BASE_FILES, 'testappend', 'firstif')
_reline(destpath)
destpath = os.path.join(BASE_FILES, 'testappend', 'secondif')
_reline(destpath)
def test_show_highstate(self): def test_show_highstate(self):
''' '''
@ -1423,20 +1406,47 @@ class StateModuleTest(ModuleCase, SaltReturnAssertsMixin):
test state.sls with saltenv using a nonbase environment test state.sls with saltenv using a nonbase environment
with a salt source with a salt source
''' '''
file_name = os.path.join(TMP, 'nonbase_env') filename = os.path.join(TMP, 'nonbase_env')
state_run = self.run_function( try:
ret = self.run_function(
'state.sls', 'state.sls',
mods='non-base-env', mods='non-base-env',
saltenv='prod' saltenv='prod'
) )
state_id = 'file_|-test_file_|-{0}_|-managed'.format(file_name) ret = ret[next(iter(ret))]
self.assertEqual(state_run[state_id]['comment'], assert ret['result']
'File {0} updated'.format(file_name)) assert ret['comment'] == 'File {0} updated'.format(filename)
self.assertTrue( assert os.path.isfile(filename)
state_run['file_|-test_file_|-{0}_|-managed'.format(file_name)]['result']) finally:
self.assertTrue(os.path.isfile(file_name)) try:
os.remove(filename)
except OSError:
pass
def tearDown(self): @skipIf(sys.platform.startswith('win'), 'Skipped until parallel states can be fixed on Windows')
nonbase_file = os.path.join(TMP, 'nonbase_env') def test_parallel_state_with_long_tag(self):
if os.path.isfile(nonbase_file): '''
os.remove(nonbase_file) This tests the case where the state being executed has a long ID dec or
name and states are being run in parallel. The filenames used for the
parallel state cache were previously based on the tag for each chunk,
and longer ID decs or name params can cause the cache file to be longer
than the operating system's max file name length. To counter this we
instead generate a SHA1 hash of the chunk's tag to use as the cache
filename. This test will ensure that long tags don't cause caching
failures.
See https://github.com/saltstack/salt/issues/49738 for more info.
'''
short_command = 'helloworld'
long_command = short_command * 25
ret = self.run_function(
'state.sls',
mods='issue-49738',
pillar={'short_command': short_command,
'long_command': long_command}
)
comments = sorted([x['comment'] for x in six.itervalues(ret)])
expected = sorted(['Command "{0}" run'.format(x)
for x in (short_command, long_command)])
assert comments == expected, '{0} != {1}'.format(comments, expected)