Merge pull request #36443 from rallytime/merge-carbon

[carbon] Merge forward from 2016.3 to carbon
This commit is contained in:
Nicole Thomas 2016-09-20 17:18:44 -06:00 committed by GitHub
commit 065c83c2f2
18 changed files with 256 additions and 33 deletions

View File

@ -14,10 +14,10 @@ open and proprietary projects.
To expand on this a little: To expand on this a little:
There is much argument over the actual definition of "open core". From our standpoint, Salt is open source because There is much argument over the actual definition of "open core". From our standpoint, Salt is open source because
1. It is a standalone product that anyone is free to use. 1. It is a standalone product that anyone is free to use.
2. It is developed in the open with contributions accepted from the community for the good of the project. 2. It is developed in the open with contributions accepted from the community for the good of the project.
3. There are no features of Salt itself that are restricted to separate proprietary products distributed by SaltStack, Inc. 3. There are no features of Salt itself that are restricted to separate proprietary products distributed by SaltStack, Inc.
4. Because of our Apache 2.0 license, Salt can be used as the foundation for a project or even a proprietary tool. 4. Because of our Apache 2.0 license, Salt can be used as the foundation for a project or even a proprietary tool.
5. Our APIs are open and documented (any lack of documentation is an oversight as opposed to an intentional decision by SaltStack the company) and available for use by anyone. 5. Our APIs are open and documented (any lack of documentation is an oversight as opposed to an intentional decision by SaltStack the company) and available for use by anyone.

View File

@ -109,7 +109,7 @@ a change is detected.
``salt myminion pkg.install python-inotify``. ``salt myminion pkg.install python-inotify``.
First, on the Salt minion, add the following beacon configuration to First, on the Salt minion, add the following beacon configuration to
``/ect/salt/minion``: ``/etc/salt/minion``:
.. code-block:: yaml .. code-block:: yaml

10
requirements/windows.txt Normal file
View File

@ -0,0 +1,10 @@
backports-abc
backports.ssl-match-hostname
certifi
psutil
python-dateutil
pypiwin32
pyzmq
six
timelib
WMI

View File

@ -134,7 +134,7 @@ class LocalClient(object):
def __init__(self, def __init__(self,
c_path=os.path.join(syspaths.CONFIG_DIR, 'master'), c_path=os.path.join(syspaths.CONFIG_DIR, 'master'),
mopts=None, skip_perm_errors=False, mopts=None, skip_perm_errors=False,
io_loop=None): io_loop=None, keep_loop=False):
''' '''
:param IOLoop io_loop: io_loop used for events. :param IOLoop io_loop: io_loop used for events.
Pass in an io_loop if you want asynchronous Pass in an io_loop if you want asynchronous
@ -163,7 +163,8 @@ class LocalClient(object):
self.opts['transport'], self.opts['transport'],
opts=self.opts, opts=self.opts,
listen=False, listen=False,
io_loop=io_loop) io_loop=io_loop,
keep_loop=keep_loop)
self.utils = salt.loader.utils(self.opts) self.utils = salt.loader.utils(self.opts)
self.functions = salt.loader.minion_mods(self.opts, utils=self.utils) self.functions = salt.loader.minion_mods(self.opts, utils=self.utils)
self.returners = salt.loader.returners(self.opts, self.functions) self.returners = salt.loader.returners(self.opts, self.functions)

View File

@ -709,12 +709,17 @@ class Client(object):
elif not os.path.isabs(cachedir): elif not os.path.isabs(cachedir):
cachedir = os.path.join(self.opts['cachedir'], cachedir) cachedir = os.path.join(self.opts['cachedir'], cachedir)
if url_data.query is not None:
file_name = '-'.join([url_data.path, url_data.query])
else:
file_name = url_data.path
return salt.utils.path_join( return salt.utils.path_join(
cachedir, cachedir,
'extrn_files', 'extrn_files',
saltenv, saltenv,
netloc, netloc,
url_data.path file_name
) )

View File

@ -1005,6 +1005,7 @@ _OS_NAME_MAP = {
'enterprise': 'OEL', 'enterprise': 'OEL',
'oracleserv': 'OEL', 'oracleserv': 'OEL',
'cloudserve': 'CloudLinux', 'cloudserve': 'CloudLinux',
'cloudlinux': 'CloudLinux',
'pidora': 'Fedora', 'pidora': 'Fedora',
'scientific': 'ScientificLinux', 'scientific': 'ScientificLinux',
'synology': 'Synology', 'synology': 'Synology',
@ -1289,7 +1290,12 @@ def os_data():
grains[ grains[
'lsb_{0}'.format(match.groups()[0].lower()) 'lsb_{0}'.format(match.groups()[0].lower())
] = match.groups()[1].rstrip() ] = match.groups()[1].rstrip()
if 'lsb_distrib_id' not in grains: if grains.get('lsb_distrib_description', '').lower().startswith('antergos'):
# Antergos incorrectly configures their /etc/lsb-release,
# setting the DISTRIB_ID to "Arch". This causes the "os" grain
# to be incorrectly set to "Arch".
grains['osfullname'] = 'Antergos Linux'
elif 'lsb_distrib_id' not in grains:
if os.path.isfile('/etc/os-release') or os.path.isfile('/usr/lib/os-release'): if os.path.isfile('/etc/os-release') or os.path.isfile('/usr/lib/os-release'):
os_release = _parse_os_release() os_release = _parse_os_release()
if 'NAME' in os_release: if 'NAME' in os_release:

View File

@ -1048,14 +1048,20 @@ def upgrade(refresh=True, dist_upgrade=False, **kwargs):
force_conf = '--force-confnew' force_conf = '--force-confnew'
else: else:
force_conf = '--force-confold' force_conf = '--force-confold'
cmd = [] cmd = []
if salt.utils.systemd.has_scope(__context__) \ if salt.utils.systemd.has_scope(__context__) \
and __salt__['config.get']('systemd.scope', True): and __salt__['config.get']('systemd.scope', True):
cmd.extend(['systemd-run', '--scope']) cmd.extend(['systemd-run', '--scope'])
cmd.extend(['apt-get', '-q', '-y', cmd.extend(['apt-get', '-q', '-y',
'-o', 'DPkg::Options::={0}'.format(force_conf), '-o', 'DPkg::Options::={0}'.format(force_conf),
'-o', 'DPkg::Options::=--force-confdef']) '-o', 'DPkg::Options::=--force-confdef'])
if kwargs.get('force_yes', False):
cmd.append('--force-yes')
if kwargs.get('skip_verify', False):
cmd.append('--allow-unauthenticated')
cmd.append('dist-upgrade' if dist_upgrade else 'upgrade') cmd.append('dist-upgrade' if dist_upgrade else 'upgrade')
call = __salt__['cmd.run_all'](cmd, call = __salt__['cmd.run_all'](cmd,

View File

@ -958,7 +958,7 @@ def create_key(key_name, save_path, region=None, key=None, keyid=None,
.. code-block:: bash .. code-block:: bash
salt myminion boto_ec2.create mykey /root/ salt myminion boto_ec2.create_key mykey /root/
''' '''
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)

View File

@ -1299,6 +1299,7 @@ def get_account_id(region=None, key=None, keyid=None, profile=None):
# The get_user call returns an user ARN: # The get_user call returns an user ARN:
# arn:aws:iam::027050522557:user/salt-test # arn:aws:iam::027050522557:user/salt-test
arn = ret['get_user_response']['get_user_result']['user']['arn'] arn = ret['get_user_response']['get_user_result']['user']['arn']
account_id = arn.split(':')[4]
except boto.exception.BotoServerError: except boto.exception.BotoServerError:
# If call failed, then let's try to get the ARN from the metadata # If call failed, then let's try to get the ARN from the metadata
timeout = boto.config.getfloat( timeout = boto.config.getfloat(
@ -1307,15 +1308,15 @@ def get_account_id(region=None, key=None, keyid=None, profile=None):
attempts = boto.config.getint( attempts = boto.config.getint(
'Boto', 'metadata_service_num_attempts', 1 'Boto', 'metadata_service_num_attempts', 1
) )
metadata = boto.utils.get_instance_metadata( identity = boto.utils.get_instance_identity(
timeout=timeout, num_retries=attempts timeout=timeout, num_retries=attempts
) )
try: try:
arn = metadata['iam']['info']['InstanceProfileArn'] account_id = identity['document']['accountId']
except KeyError: except KeyError:
log.error('Failed to get user or metadata ARN information in' log.error('Failed to get account id from instance_identity in'
' boto_iam.get_account_id.') ' boto_iam.get_account_id.')
__context__[cache_key] = arn.split(':')[4] __context__[cache_key] = account_id
return __context__[cache_key] return __context__[cache_key]

View File

@ -227,6 +227,9 @@ def get_arn(name, region=None, key=None, keyid=None, profile=None):
def _get_region(region=None, profile=None): def _get_region(region=None, profile=None):
if profile and 'region' in profile: if profile and 'region' in profile:
return profile['region'] return profile['region']
if not region and __salt__['config.option'](profile):
_profile = __salt__['config.option'](profile)
region = _profile.get('region', None)
if not region and __salt__['config.option']('sns.region'): if not region and __salt__['config.option']('sns.region'):
region = __salt__['config.option']('sns.region') region = __salt__['config.option']('sns.region')
if not region: if not region:

View File

@ -36,7 +36,7 @@ def __virtual__():
''' '''
Set the virtual pkg module if the os is Arch Set the virtual pkg module if the os is Arch
''' '''
if __grains__['os'] in ('Arch', 'Arch ARM', 'Antergos', 'ManjaroLinux'): if __grains__['os_family'] == 'Arch':
return __virtualname__ return __virtualname__
return (False, 'The pacman module could not be loaded: unsupported OS family.') return (False, 'The pacman module could not be loaded: unsupported OS family.')
@ -155,10 +155,17 @@ def list_upgrades(refresh=False, root=None, **kwargs): # pylint: disable=W0613
out = call['stdout'] out = call['stdout']
for line in salt.utils.itertools.split(out, '\n'): for line in salt.utils.itertools.split(out, '\n'):
comps = line.split(' ') try:
if len(comps) != 2: pkgname, pkgver = line.split()
except ValueError:
continue continue
upgrades[comps[0]] = comps[1] if pkgname.lower() == 'downloading' and '.db' in pkgver.lower():
# Antergos (and possibly other Arch derivatives) add lines when pkg
# metadata is being downloaded. Because these lines, when split,
# contain two columns (i.e. 'downloading community.db...'), we will
# skip this line to keep it from being interpreted as an upgrade.
continue
upgrades[pkgname] = pkgver
return upgrades return upgrades

View File

@ -406,7 +406,13 @@ def stop(name):
salt '*' service.stop <service name> salt '*' service.stop <service name>
''' '''
if not status(name): # net stop issues a stop command and waits briefly (~30s), but will give
# up if the service takes too long to stop with a misleading
# "service could not be stopped" message and RC 0.
cmd = ['net', 'stop', '/y', name]
res = __salt__['cmd.run'](cmd, python_shell=False)
if 'service was stopped' in res:
return True return True
try: try:

View File

@ -900,7 +900,7 @@ def lowdata_fmt():
# if the data was sent as urlencoded, we need to make it a list. # if the data was sent as urlencoded, we need to make it a list.
# this is a very forgiving implementation as different clients set different # this is a very forgiving implementation as different clients set different
# headers for form encoded data (including charset or something similar) # headers for form encoded data (including charset or something similar)
if data and not isinstance(data, list): if data and isinstance(data, collections.Mapping):
# Make the 'arg' param a list if not already # Make the 'arg' param a list if not already
if 'arg' in data and not isinstance(data['arg'], list): if 'arg' in data and not isinstance(data['arg'], list):
data['arg'] = [data['arg']] data['arg'] = [data['arg']]

View File

@ -1818,6 +1818,8 @@ class State(object):
Check if the low data chunk should send a failhard signal Check if the low data chunk should send a failhard signal
''' '''
tag = _gen_tag(low) tag = _gen_tag(low)
if self.opts['test']:
return False
if (low.get('failhard', False) or self.opts['failhard'] if (low.get('failhard', False) or self.opts['failhard']
and tag in running): and tag in running):
if running[tag]['result'] is None: if running[tag]['result'] is None:

View File

@ -1142,6 +1142,20 @@ def latest(name,
else: else:
branch_opts = None branch_opts = None
if branch_opts is not None and local_branch is None:
return _fail(
ret,
'Cannot set/unset upstream tracking branch, local '
'HEAD refers to nonexistent branch. This may have '
'been caused by cloning a remote repository for which '
'the default branch was renamed or deleted. If you '
'are unable to fix the remote repository, you can '
'work around this by setting the \'branch\' argument '
'(which will ensure that the named branch is created '
'if it does not already exist).',
comments
)
if not has_remote_rev: if not has_remote_rev:
try: try:
fetch_changes = __salt__['git.fetch']( fetch_changes = __salt__['git.fetch'](
@ -1561,6 +1575,21 @@ def latest(name,
local_rev, local_branch = \ local_rev, local_branch = \
_get_local_rev_and_branch(target, user, password) _get_local_rev_and_branch(target, user, password)
if local_branch is None \
and remote_rev is not None \
and 'HEAD' not in all_remote_refs:
return _fail(
ret,
'Remote HEAD refers to a ref that does not exist. '
'This can happen when the default branch on the '
'remote repository is renamed or deleted. If you '
'are unable to fix the remote repository, you can '
'work around this by setting the \'branch\' argument '
'(which will ensure that the named branch is created '
'if it does not already exist).',
comments
)
if not _revs_equal(local_rev, remote_rev, remote_rev_type): if not _revs_equal(local_rev, remote_rev, remote_rev_type):
__salt__['git.reset']( __salt__['git.reset'](
target, target,

View File

@ -111,7 +111,7 @@ TAGS = {
def get_event( def get_event(
node, sock_dir=None, transport='zeromq', node, sock_dir=None, transport='zeromq',
opts=None, listen=True, io_loop=None): opts=None, listen=True, io_loop=None, keep_loop=False):
''' '''
Return an event object suitable for the named transport Return an event object suitable for the named transport
@ -124,8 +124,8 @@ def get_event(
# TODO: AIO core is separate from transport # TODO: AIO core is separate from transport
if transport in ('zeromq', 'tcp'): if transport in ('zeromq', 'tcp'):
if node == 'master': if node == 'master':
return MasterEvent(sock_dir, opts, listen=listen, io_loop=io_loop) return MasterEvent(sock_dir, opts, listen=listen, io_loop=io_loop, keep_loop=keep_loop)
return SaltEvent(node, sock_dir, opts, listen=listen, io_loop=io_loop) return SaltEvent(node, sock_dir, opts, listen=listen, io_loop=io_loop, keep_loop=keep_loop)
elif transport == 'raet': elif transport == 'raet':
import salt.utils.raetevent import salt.utils.raetevent
return salt.utils.raetevent.RAETEvent(node, return salt.utils.raetevent.RAETEvent(node,
@ -197,14 +197,19 @@ class SaltEvent(object):
''' '''
def __init__( def __init__(
self, node, sock_dir=None, self, node, sock_dir=None,
opts=None, listen=True, io_loop=None): opts=None, listen=True, io_loop=None, keep_loop=False):
''' '''
:param IOLoop io_loop: Pass in an io_loop if you want asynchronous :param IOLoop io_loop: Pass in an io_loop if you want asynchronous
operation for obtaining events. Eg use of operation for obtaining events. Eg use of
set_event_handler() API. Otherwise, operation set_event_handler() API. Otherwise, operation
will be synchronous. will be synchronous.
:param Bool keep_loop: Pass a boolean to determine if we want to keep
the io loop or destroy it when the event handle
is destroyed. This is useful when using event
loops from within third party async code
''' '''
self.serial = salt.payload.Serial({'serial': 'msgpack'}) self.serial = salt.payload.Serial({'serial': 'msgpack'})
self.keep_loop = keep_loop
if io_loop is not None: if io_loop is not None:
self.io_loop = io_loop self.io_loop = io_loop
self._run_io_loop_sync = False self._run_io_loop_sync = False
@ -727,7 +732,7 @@ class SaltEvent(object):
self.subscriber.close() self.subscriber.close()
if self.pusher is not None: if self.pusher is not None:
self.pusher.close() self.pusher.close()
if self._run_io_loop_sync: if self._run_io_loop_sync and not self.keep_loop:
self.io_loop.close() self.io_loop.close()
def fire_ret_load(self, load): def fire_ret_load(self, load):
@ -790,9 +795,20 @@ class MasterEvent(SaltEvent):
RAET compatible RAET compatible
Create a master event management object Create a master event management object
''' '''
def __init__(self, sock_dir, opts=None, listen=True, io_loop=None): def __init__(
self,
sock_dir,
opts=None,
listen=True,
io_loop=None,
keep_loop=False):
super(MasterEvent, self).__init__( super(MasterEvent, self).__init__(
'master', sock_dir, opts, listen=listen, io_loop=io_loop) 'master',
sock_dir,
opts,
listen=listen,
io_loop=io_loop,
keep_loop=keep_loop)
class LocalClientEvent(MasterEvent): class LocalClientEvent(MasterEvent):

View File

@ -119,6 +119,7 @@ SALT_VERSION = os.path.join(os.path.abspath(SETUP_DIRNAME), 'salt', 'version.py'
SALT_VERSION_HARDCODED = os.path.join(os.path.abspath(SETUP_DIRNAME), 'salt', '_version.py') SALT_VERSION_HARDCODED = os.path.join(os.path.abspath(SETUP_DIRNAME), 'salt', '_version.py')
SALT_SYSPATHS_HARDCODED = os.path.join(os.path.abspath(SETUP_DIRNAME), 'salt', '_syspaths.py') SALT_SYSPATHS_HARDCODED = os.path.join(os.path.abspath(SETUP_DIRNAME), 'salt', '_syspaths.py')
SALT_REQS = os.path.join(os.path.abspath(SETUP_DIRNAME), 'requirements', 'base.txt') SALT_REQS = os.path.join(os.path.abspath(SETUP_DIRNAME), 'requirements', 'base.txt')
SALT_WINDOWS_REQS = os.path.join(os.path.abspath(SETUP_DIRNAME), 'requirements', 'windows.txt')
SALT_ZEROMQ_REQS = os.path.join(os.path.abspath(SETUP_DIRNAME), 'requirements', 'zeromq.txt') SALT_ZEROMQ_REQS = os.path.join(os.path.abspath(SETUP_DIRNAME), 'requirements', 'zeromq.txt')
SALT_RAET_REQS = os.path.join(os.path.abspath(SETUP_DIRNAME), 'requirements', 'raet.txt') SALT_RAET_REQS = os.path.join(os.path.abspath(SETUP_DIRNAME), 'requirements', 'raet.txt')
@ -384,11 +385,11 @@ class InstallPyCryptoWindowsWheel(Command):
call_arguments = ['pip', 'install', 'wheel'] call_arguments = ['pip', 'install', 'wheel']
if platform_bits == '64bit': if platform_bits == '64bit':
call_arguments.append( call_arguments.append(
'http://repo.saltstack.com/windows/dependencies/64/pycrypto-2.6.1-cp27-none-win_amd64.whl' 'https://repo.saltstack.com/windows/dependencies/64/pycrypto-2.6.1-cp27-none-win_amd64.whl'
) )
else: else:
call_arguments.append( call_arguments.append(
'http://repo.saltstack.com/windows/dependencies/32/pycrypto-2.6.1-cp27-none-win32.whl' 'https://repo.saltstack.com/windows/dependencies/32/pycrypto-2.6.1-cp27-none-win32.whl'
) )
with indent_log(): with indent_log():
call_subprocess(call_arguments) call_subprocess(call_arguments)
@ -415,11 +416,11 @@ class InstallCompiledPyYaml(Command):
call_arguments = ['easy_install', '-Z'] call_arguments = ['easy_install', '-Z']
if platform_bits == '64bit': if platform_bits == '64bit':
call_arguments.append( call_arguments.append(
'http://repo.saltstack.com/windows/dependencies/64/PyYAML-3.11.win-amd64-py2.7.exe' 'https://repo.saltstack.com/windows/dependencies/64/PyYAML-3.11.win-amd64-py2.7.exe'
) )
else: else:
call_arguments.append( call_arguments.append(
'http://repo.saltstack.com/windows/dependencies/32/PyYAML-3.11.win-amd64-py2.7.exe' 'https://repo.saltstack.com/windows/dependencies/32/PyYAML-3.11.win32-py2.7.exe'
) )
with indent_log(): with indent_log():
call_subprocess(call_arguments) call_subprocess(call_arguments)
@ -442,7 +443,7 @@ class DownloadWindowsDlls(Command):
import platform import platform
from pip.utils.logging import indent_log from pip.utils.logging import indent_log
platform_bits, _ = platform.architecture() platform_bits, _ = platform.architecture()
url = 'http://repo.saltstack.com/windows/dependencies/{bits}/{fname}.dll' url = 'https://repo.saltstack.com/windows/dependencies/{bits}/{fname}.dll'
dest = os.path.join(os.path.dirname(sys.executable), '{fname}.dll') dest = os.path.join(os.path.dirname(sys.executable), '{fname}.dll')
with indent_log(): with indent_log():
for fname in ('libeay32', 'ssleay32', 'libsodium', 'msvcr120'): for fname in ('libeay32', 'ssleay32', 'libsodium', 'msvcr120'):
@ -1023,8 +1024,7 @@ class SaltDistribution(distutils.dist.Distribution):
install_requires = _parse_requirements_file(SALT_REQS) install_requires = _parse_requirements_file(SALT_REQS)
if IS_WINDOWS_PLATFORM: if IS_WINDOWS_PLATFORM:
install_requires.append('WMI') install_requires += _parse_requirements_file(SALT_WINDOWS_REQS)
install_requires.append('pypiwin32 >= 219')
if self.salt_transport == 'zeromq': if self.salt_transport == 'zeromq':
install_requires += _parse_requirements_file(SALT_ZEROMQ_REQS) install_requires += _parse_requirements_file(SALT_ZEROMQ_REQS)

View File

@ -8,6 +8,7 @@ from __future__ import absolute_import
import os import os
import shutil import shutil
import socket import socket
import string
import subprocess import subprocess
import tempfile import tempfile
@ -328,6 +329,136 @@ class GitTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
self.assertSaltTrueReturn(ret) self.assertSaltTrueReturn(ret)
@skip_if_binaries_missing('git')
class LocalRepoGitTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
'''
Tests which do no require connectivity to github.com
'''
def test_renamed_default_branch(self):
'''
Test the case where the remote branch has been removed
https://github.com/saltstack/salt/issues/36242
'''
cwd = os.getcwd()
repo = tempfile.mkdtemp(dir=integration.TMP)
admin = tempfile.mkdtemp(dir=integration.TMP)
name = tempfile.mkdtemp(dir=integration.TMP)
for dirname in (repo, admin, name):
self.addCleanup(shutil.rmtree, dirname, ignore_errors=True)
self.addCleanup(os.chdir, cwd)
with salt.utils.fopen(os.devnull, 'w') as devnull:
# Create bare repo
subprocess.check_call(['git', 'init', '--bare', repo],
stdout=devnull, stderr=devnull)
# Clone bare repo
subprocess.check_call(['git', 'clone', repo, admin],
stdout=devnull, stderr=devnull)
# Create, add, commit, and push file
os.chdir(admin)
with salt.utils.fopen('foo', 'w'):
pass
subprocess.check_call(['git', 'add', '.'],
stdout=devnull, stderr=devnull)
subprocess.check_call(['git', 'commit', '-m', 'init'],
stdout=devnull, stderr=devnull)
subprocess.check_call(['git', 'push', 'origin', 'master'],
stdout=devnull, stderr=devnull)
# Change back to the original cwd
os.chdir(cwd)
# Rename remote 'master' branch to 'develop'
os.rename(
os.path.join(repo, 'refs', 'heads', 'master'),
os.path.join(repo, 'refs', 'heads', 'develop')
)
# Run git.latest state. This should successfuly clone and fail with a
# specific error in the comment field.
ret = self.run_state(
'git.latest',
name=repo,
target=name,
rev='develop',
)
self.assertSaltFalseReturn(ret)
self.assertEqual(
ret[next(iter(ret))]['comment'],
'Remote HEAD refers to a ref that does not exist. '
'This can happen when the default branch on the '
'remote repository is renamed or deleted. If you '
'are unable to fix the remote repository, you can '
'work around this by setting the \'branch\' argument '
'(which will ensure that the named branch is created '
'if it does not already exist).\n\n'
'Changes already made: {0} cloned to {1}'
.format(repo, name)
)
self.assertEqual(
ret[next(iter(ret))]['changes'],
{'new': '{0} => {1}'.format(repo, name)}
)
# Run git.latest state again. This should fail again, with a different
# error in the comment field, and should not change anything.
ret = self.run_state(
'git.latest',
name=repo,
target=name,
rev='develop',
)
self.assertSaltFalseReturn(ret)
self.assertEqual(
ret[next(iter(ret))]['comment'],
'Cannot set/unset upstream tracking branch, local '
'HEAD refers to nonexistent branch. This may have '
'been caused by cloning a remote repository for which '
'the default branch was renamed or deleted. If you '
'are unable to fix the remote repository, you can '
'work around this by setting the \'branch\' argument '
'(which will ensure that the named branch is created '
'if it does not already exist).'
)
self.assertEqual(ret[next(iter(ret))]['changes'], {})
# Run git.latest state again with a branch manually set. This should
# checkout a new branch and the state should pass.
ret = self.run_state(
'git.latest',
name=repo,
target=name,
rev='develop',
branch='develop',
)
# State should succeed
self.assertSaltTrueReturn(ret)
self.assertSaltCommentRegexpMatches(
ret,
'New branch \'develop\' was checked out, with origin/develop '
r'\([0-9a-f]{7}\) as a starting point'
)
# Only the revision should be in the changes dict.
self.assertEqual(
list(ret[next(iter(ret))]['changes'].keys()),
['revision']
)
# Since the remote repo was incorrectly set up, the local head should
# not exist (therefore the old revision should be None).
self.assertEqual(
ret[next(iter(ret))]['changes']['revision']['old'],
None
)
# Make sure the new revision is a SHA (40 chars, all hex)
self.assertTrue(
len(ret[next(iter(ret))]['changes']['revision']['new']) == 40)
self.assertTrue(
all([x in string.hexdigits for x in
ret[next(iter(ret))]['changes']['revision']['new']])
)
if __name__ == '__main__': if __name__ == '__main__':
from integration import run_tests from integration import run_tests
run_tests(GitTest) run_tests(GitTest)