mirror of
https://github.com/valitydev/salt.git
synced 2024-11-07 17:09:03 +00:00
Merge branch 'develop' of https://github.com/saltstack/salt into defaults_merge_enhancements
This commit is contained in:
commit
3e96abaa8d
@ -321,6 +321,117 @@ option on the Salt master.
|
||||
|
||||
master_port: 4506
|
||||
|
||||
.. conf_minion:: source_interface_name
|
||||
|
||||
``source_interface_name``
|
||||
-------------------------
|
||||
|
||||
.. versionadded:: Oxygen
|
||||
|
||||
The name of the interface to use when establishing the connection to the Master.
|
||||
|
||||
.. note::
|
||||
|
||||
If multiple IP addresses are configured on the named interface,
|
||||
the first one will be selected. In that case, for a better selection,
|
||||
consider using the :conf_minion:`source_address` option.
|
||||
|
||||
.. note::
|
||||
|
||||
To use an IPv6 address from the named interface, make sure the option
|
||||
:conf_minion:`ipv6` is enabled, i.e., ``ipv6: true``.
|
||||
|
||||
.. note::
|
||||
|
||||
If the interface is down, it will avoid using it, and the Minion
|
||||
will bind to ``0.0.0.0`` (all interfaces).
|
||||
|
||||
.. warning::
|
||||
|
||||
This option requires modern version of the underlying libraries used by
|
||||
the selected transport:
|
||||
|
||||
- ``zeromq`` requires ``pyzmq`` >= 16.0.1 and ``libzmq`` >= 4.1.6
|
||||
- ``tcp`` requires ``tornado`` >= 4.5
|
||||
|
||||
Configuration example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
source_interface_name: bond0.1234
|
||||
|
||||
.. conf_minion:: source_address
|
||||
|
||||
``source_address``
|
||||
------------------
|
||||
|
||||
.. versionadded:: Oxygen
|
||||
|
||||
The source IP address or the domain name to be used when connecting the Minion
|
||||
to the Master.
|
||||
See :conf_minion:`ipv6` for IPv6 connections to the Master.
|
||||
|
||||
.. warning::
|
||||
|
||||
This option requires modern version of the underlying libraries used by
|
||||
the selected transport:
|
||||
|
||||
- ``zeromq`` requires ``pyzmq`` >= 16.0.1 and ``libzmq`` >= 4.1.6
|
||||
- ``tcp`` requires ``tornado`` >= 4.5
|
||||
|
||||
Configuration example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
source_address: if-bond0-1234.sjc.us-west.internal
|
||||
|
||||
.. conf_minion:: source_ret_port
|
||||
|
||||
``source_ret_port``
|
||||
-------------------
|
||||
|
||||
.. versionadded:: Oxygen
|
||||
|
||||
The source port to be used when connecting the Minion to the Master ret server.
|
||||
|
||||
.. warning::
|
||||
|
||||
This option requires modern version of the underlying libraries used by
|
||||
the selected transport:
|
||||
|
||||
- ``zeromq`` requires ``pyzmq`` >= 16.0.1 and ``libzmq`` >= 4.1.6
|
||||
- ``tcp`` requires ``tornado`` >= 4.5
|
||||
|
||||
Configuration example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
source_ret_port: 49017
|
||||
|
||||
.. conf_minion:: source_publish_port
|
||||
|
||||
``source_publish_port``
|
||||
-----------------------
|
||||
|
||||
.. versionadded:: Oxygen
|
||||
|
||||
The source port to be used when connecting the Minion to the Master publish
|
||||
server.
|
||||
|
||||
.. warning::
|
||||
|
||||
This option requires modern version of the underlying libraries used by
|
||||
the selected transport:
|
||||
|
||||
- ``zeromq`` requires ``pyzmq`` >= 16.0.1 and ``libzmq`` >= 4.1.6
|
||||
- ``tcp`` requires ``tornado`` >= 4.5
|
||||
|
||||
Configuration example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
source_publish_port: 49018
|
||||
|
||||
.. conf_minion:: user
|
||||
|
||||
``user``
|
||||
|
@ -65,6 +65,18 @@ noon PST so the Stormpath external authentication module has been removed.
|
||||
|
||||
https://stormpath.com/oktaplusstormpath
|
||||
|
||||
|
||||
New (Proxy) Minion Configuration Options
|
||||
----------------------------------------
|
||||
|
||||
To be able to connect the Minion to the Master using a certain source IP address
|
||||
or port, the following options have been added:
|
||||
|
||||
- :conf_minion:`source_interface_name`
|
||||
- :conf_minion:`source_address`
|
||||
- :conf_minion:`source_ret_port`
|
||||
- :conf_minion:`source_publish_port`
|
||||
|
||||
:conf_minion:`environment` config option renamed to :conf_minion:`saltenv`
|
||||
--------------------------------------------------------------------------
|
||||
|
||||
@ -161,7 +173,7 @@ environments (i.e. ``saltenvs``) have been added:
|
||||
available as saltenvs.
|
||||
|
||||
Additional output modes
|
||||
------------------
|
||||
-----------------------
|
||||
|
||||
The ``state_output`` parameter now supports ``full_id``, ``changes_id`` and ``terse_id``.
|
||||
Just like ``mixed_id``, these use the state ID as name in the highstate output.
|
||||
|
@ -3652,6 +3652,65 @@ def revert_to_snapshot(name, kwargs=None, call=None):
|
||||
return msg
|
||||
|
||||
|
||||
def remove_snapshot(name, kwargs=None, call=None):
|
||||
'''
|
||||
Remove a snapshot of the specified virtual machine in this VMware environment
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt-cloud -a remove_snapshot vmname snapshot_name="mySnapshot"
|
||||
salt-cloud -a remove_snapshot vmname snapshot_name="mySnapshot" [remove_children="True"]
|
||||
'''
|
||||
|
||||
if call != 'action':
|
||||
raise SaltCloudSystemExit(
|
||||
'The create_snapshot action must be called with '
|
||||
'-a or --action.'
|
||||
)
|
||||
|
||||
if kwargs is None:
|
||||
kwargs = {}
|
||||
|
||||
snapshot_name = kwargs.get('snapshot_name') if kwargs and 'snapshot_name' in kwargs else None
|
||||
remove_children = _str_to_bool(kwargs.get('remove_children', False))
|
||||
|
||||
if not snapshot_name:
|
||||
raise SaltCloudSystemExit(
|
||||
'You must specify snapshot name for the snapshot to be deleted.'
|
||||
)
|
||||
|
||||
vm_ref = salt.utils.vmware.get_mor_by_property(_get_si(), vim.VirtualMachine, name)
|
||||
|
||||
if not _get_snapshot_ref_by_name(vm_ref, snapshot_name):
|
||||
raise SaltCloudSystemExit(
|
||||
'Сould not find the snapshot with the specified name.'
|
||||
)
|
||||
|
||||
try:
|
||||
snap_obj = _get_snapshot_ref_by_name(vm_ref, snapshot_name).snapshot
|
||||
task = snap_obj.RemoveSnapshot_Task(remove_children)
|
||||
salt.utils.vmware.wait_for_task(task, name, 'remove snapshot', 5, 'info')
|
||||
|
||||
except Exception as exc:
|
||||
log.error(
|
||||
'Error while removing snapshot of {0}: {1}'.format(
|
||||
name,
|
||||
exc
|
||||
),
|
||||
# Show the traceback if the debug logging level is enabled
|
||||
exc_info_on_loglevel=logging.DEBUG
|
||||
)
|
||||
return 'failed to remove snapshot'
|
||||
|
||||
if vm_ref.snapshot:
|
||||
return {'Snapshot removed successfully': _get_snapshots(vm_ref.snapshot.rootSnapshotList,
|
||||
vm_ref.snapshot.currentSnapshot)}
|
||||
else:
|
||||
return 'Snapshots removed successfully'
|
||||
|
||||
|
||||
def remove_all_snapshots(name, kwargs=None, call=None):
|
||||
'''
|
||||
Remove all the snapshots present for the specified virtual machine.
|
||||
|
@ -124,6 +124,15 @@ VALID_OPTS = {
|
||||
# master address will not be split into IP and PORT.
|
||||
'master_uri_format': str,
|
||||
|
||||
# The following optiosn refer to the Minion only, and they specify
|
||||
# the details of the source address / port to be used when connecting to
|
||||
# the Master. This is useful when dealing withmachines where due to firewall
|
||||
# rules you are restricted to use a certain IP/port combination only.
|
||||
'source_interface_name': str,
|
||||
'source_address': str,
|
||||
'source_ret_port': (six.string_types, int),
|
||||
'source_publish_port': (six.string_types, int),
|
||||
|
||||
# The fingerprint of the master key may be specified to increase security. Generate
|
||||
# a master fingerprint with `salt-key -F master`
|
||||
'master_finger': str,
|
||||
@ -1152,6 +1161,10 @@ DEFAULT_MINION_OPTS = {
|
||||
'master': 'salt',
|
||||
'master_type': 'str',
|
||||
'master_uri_format': 'default',
|
||||
'source_interface_name': '',
|
||||
'source_address': '',
|
||||
'source_ret_port': 0,
|
||||
'source_publish_port': 0,
|
||||
'master_port': 4506,
|
||||
'master_finger': '',
|
||||
'master_shuffle': False,
|
||||
|
@ -670,7 +670,7 @@ class SaltLoadModules(ioflo.base.deeding.Deed):
|
||||
)
|
||||
modules_max_memory = True
|
||||
old_mem_limit = resource.getrlimit(resource.RLIMIT_AS)
|
||||
rss, vms = psutil.Process(os.getpid()).memory_info()
|
||||
rss, vms = psutil.Process(os.getpid()).memory_info()[:2]
|
||||
mem_limit = rss + vms + self.opts.value['modules_max_memory']
|
||||
resource.setrlimit(resource.RLIMIT_AS, (mem_limit, mem_limit))
|
||||
elif self.opts.value.get('modules_max_memory', -1) > 0:
|
||||
|
@ -1152,6 +1152,9 @@ class AESFuncs(object):
|
||||
return {}
|
||||
return self.masterapi._master_tops(load, skip_verify=True)
|
||||
|
||||
# Needed so older minions can request master_tops
|
||||
_ext_nodes = _master_tops
|
||||
|
||||
def _master_opts(self, load):
|
||||
'''
|
||||
Return the master options to the minion
|
||||
|
@ -214,8 +214,38 @@ def resolve_dns(opts, fallback=True):
|
||||
u'Master ip address changed from %s to %s',
|
||||
opts[u'master_ip'], ret[u'master_ip']
|
||||
)
|
||||
if opts[u'source_interface_name']:
|
||||
log.trace('Custom source interface required: %s', opts[u'source_interface_name'])
|
||||
interfaces = salt.utils.network.interfaces()
|
||||
log.trace('The following interfaces are available on this Minion:')
|
||||
log.trace(interfaces)
|
||||
if opts[u'source_interface_name'] in interfaces:
|
||||
if interfaces[opts[u'source_interface_name']]['up']:
|
||||
addrs = interfaces[opts[u'source_interface_name']]['inet'] if not opts[u'ipv6'] else\
|
||||
interfaces[opts[u'source_interface_name']]['inet6']
|
||||
ret[u'source_ip'] = addrs[0]['address']
|
||||
log.debug('Using %s as source IP address', ret[u'source_ip'])
|
||||
else:
|
||||
log.warning('The interface %s is down so it cannot be used as source to connect to the Master',
|
||||
opts[u'source_interface_name'])
|
||||
else:
|
||||
log.warning('%s is not a valid interface. Ignoring.', opts[u'source_interface_name'])
|
||||
elif opts[u'source_address']:
|
||||
ret[u'source_ip'] = salt.utils.network.dns_check(
|
||||
opts[u'source_address'],
|
||||
int(opts[u'source_ret_port']),
|
||||
True,
|
||||
opts[u'ipv6'])
|
||||
log.debug('Using %s as source IP address', ret[u'source_ip'])
|
||||
if opts[u'source_ret_port']:
|
||||
ret[u'source_ret_port'] = int(opts[u'source_ret_port'])
|
||||
log.debug('Using %d as source port for the ret server', ret[u'source_ret_port'])
|
||||
if opts[u'source_publish_port']:
|
||||
ret[u'source_publish_port'] = int(opts[u'source_publish_port'])
|
||||
log.debug('Using %d as source port for the master pub', ret[u'source_publish_port'])
|
||||
ret[u'master_uri'] = u'tcp://{ip}:{port}'.format(
|
||||
ip=ret[u'master_ip'], port=opts[u'master_port'])
|
||||
log.debug('Master URI: %s', ret[u'master_uri'])
|
||||
|
||||
return ret
|
||||
|
||||
|
@ -133,3 +133,17 @@ def merge(dest, src, merge_lists=False, in_place=True):
|
||||
else:
|
||||
merged = copy.deepcopy(dest)
|
||||
return dictupdate.update(merged, src, merge_lists=merge_lists)
|
||||
|
||||
|
||||
def deepcopy(source):
|
||||
'''
|
||||
defaults.deepcopy
|
||||
Allows deep copy of objects in formulas.
|
||||
|
||||
By default, Python does not copy objects,
|
||||
it creates bindings between a target and an object.
|
||||
|
||||
It is more typical to use this in a templating language in formulas,
|
||||
instead of directly on the command-line.
|
||||
'''
|
||||
return copy.deepcopy(source)
|
@ -106,6 +106,7 @@ import inspect
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
import salt.utils.files
|
||||
|
||||
from operator import attrgetter
|
||||
@ -130,7 +131,7 @@ log = logging.getLogger(__name__)
|
||||
debug = False
|
||||
|
||||
__virtualname__ = 'dockercompose'
|
||||
dc_filename = 'docker-compose.yml'
|
||||
DEFAULT_DC_FILENAMES = ('docker-compose.yml', 'docker-compose.yaml')
|
||||
|
||||
|
||||
def __virtual__():
|
||||
@ -168,27 +169,45 @@ def __standardize_result(status, message, data=None, debug_msg=None):
|
||||
return result
|
||||
|
||||
|
||||
def __read_docker_compose(path):
|
||||
def __get_docker_file_path(path):
|
||||
'''
|
||||
Read the docker-compose.yml file if it exists in the directory
|
||||
Determines the filepath to use
|
||||
|
||||
:param path:
|
||||
:return:
|
||||
'''
|
||||
if os.path.isfile(os.path.join(path, dc_filename)) is False:
|
||||
if os.path.isfile(path):
|
||||
return path
|
||||
for dc_filename in DEFAULT_DC_FILENAMES:
|
||||
file_path = os.path.join(path, dc_filename)
|
||||
if os.path.isfile(file_path):
|
||||
return file_path
|
||||
# implicitly return None
|
||||
|
||||
|
||||
def __read_docker_compose_file(file_path):
|
||||
'''
|
||||
Read the compose file if it exists in the directory
|
||||
|
||||
:param file_path:
|
||||
:return:
|
||||
'''
|
||||
if not os.path.isfile(file_path):
|
||||
return __standardize_result(False,
|
||||
'Path does not exist or docker-compose.yml is not present',
|
||||
'Path {} is not present'.format(file_path),
|
||||
None, None)
|
||||
f = salt.utils.files.fopen(os.path.join(path, dc_filename), 'r') # pylint: disable=resource-leakage
|
||||
result = {'docker-compose.yml': ''}
|
||||
if f:
|
||||
for line in f:
|
||||
result['docker-compose.yml'] += line
|
||||
f.close()
|
||||
else:
|
||||
return __standardize_result(False, 'Could not read docker-compose.yml file.',
|
||||
try:
|
||||
with salt.utils.files.fopen(file_path, 'r') as fl:
|
||||
file_name = os.path.basename(file_path)
|
||||
result = {file_name: ''}
|
||||
for line in fl:
|
||||
result[file_name] += line
|
||||
except EnvironmentError:
|
||||
return __standardize_result(False,
|
||||
'Could not read {0}'.format(file_path),
|
||||
None, None)
|
||||
return __standardize_result(True, 'Reading content of docker-compose.yml file',
|
||||
return __standardize_result(True,
|
||||
'Reading content of {0}'.format(file_path),
|
||||
result, None)
|
||||
|
||||
|
||||
@ -204,33 +223,54 @@ def __write_docker_compose(path, docker_compose):
|
||||
|
||||
:return:
|
||||
'''
|
||||
|
||||
if os.path.isdir(path) is False:
|
||||
os.mkdir(path)
|
||||
f = salt.utils.files.fopen(os.path.join(path, dc_filename), 'w') # pylint: disable=resource-leakage
|
||||
if f:
|
||||
f.write(docker_compose)
|
||||
f.close()
|
||||
if path.lower().endswith(('.yml', '.yaml')):
|
||||
file_path = path
|
||||
dir_name = os.path.dirname(path)
|
||||
else:
|
||||
dir_name = path
|
||||
file_path = os.path.join(dir_name, DEFAULT_DC_FILENAMES[0])
|
||||
if os.path.isdir(dir_name) is False:
|
||||
os.mkdir(dir_name)
|
||||
try:
|
||||
with salt.utils.files.fopen(file_path, 'w') as fl:
|
||||
fl.write(docker_compose)
|
||||
except EnvironmentError:
|
||||
return __standardize_result(False,
|
||||
'Could not write docker-compose file in {0}'.format(path),
|
||||
'Could not write {0}'.format(file_path),
|
||||
None, None)
|
||||
project = __load_project(path)
|
||||
project = __load_project_from_file_path(file_path)
|
||||
if isinstance(project, dict):
|
||||
os.remove(os.path.join(path, dc_filename))
|
||||
os.remove(file_path)
|
||||
return project
|
||||
return path
|
||||
return file_path
|
||||
|
||||
|
||||
def __load_project(path):
|
||||
'''
|
||||
Load a docker-compose project from path
|
||||
|
||||
:param path:
|
||||
:return:
|
||||
'''
|
||||
file_path = __get_docker_file_path(path)
|
||||
if file_path is None:
|
||||
msg = 'Could not find docker-compose file at {0}'.format(path)
|
||||
return __standardize_result(False,
|
||||
msg,
|
||||
None, None)
|
||||
return __load_project_from_file_path(file_path)
|
||||
|
||||
|
||||
def __load_project_from_file_path(file_path):
|
||||
'''
|
||||
Load a docker-compose project from file path
|
||||
|
||||
:param path:
|
||||
:return:
|
||||
'''
|
||||
try:
|
||||
project = get_project(path)
|
||||
project = get_project(project_dir=os.path.dirname(file_path),
|
||||
config_path=[os.path.basename(file_path)])
|
||||
except Exception as inst:
|
||||
return __handle_except(inst)
|
||||
return project
|
||||
@ -286,8 +326,12 @@ def get(path):
|
||||
|
||||
salt myminion dockercompose.get /path/where/docker-compose/stored
|
||||
'''
|
||||
|
||||
salt_result = __read_docker_compose(path)
|
||||
file_path = __get_docker_file_path(path)
|
||||
if file_path is None:
|
||||
return __standardize_result(False,
|
||||
'Path {} is not present'.format(path),
|
||||
None, None)
|
||||
salt_result = __read_docker_compose_file(file_path)
|
||||
if not salt_result['status']:
|
||||
return salt_result
|
||||
project = __load_project(path)
|
||||
@ -322,7 +366,10 @@ def create(path, docker_compose):
|
||||
return __standardize_result(False,
|
||||
'Creating a docker-compose project failed, you must send a valid docker-compose file',
|
||||
None, None)
|
||||
return __standardize_result(True, 'Successfully created the docker-compose file', {'compose.base_dir': path}, None)
|
||||
return __standardize_result(True,
|
||||
'Successfully created the docker-compose file',
|
||||
{'compose.base_dir': path},
|
||||
None)
|
||||
|
||||
|
||||
def pull(path, service_names=None):
|
||||
|
@ -176,8 +176,9 @@ def _query(function,
|
||||
if result.get('status', None) == salt.ext.six.moves.http_client.OK:
|
||||
response = hipchat_functions.get(api_version).get(function).get('response')
|
||||
return result.get('dict', {}).get(response, None)
|
||||
elif result.get('status', None) == salt.ext.six.moves.http_client.NO_CONTENT:
|
||||
return False
|
||||
elif result.get('status', None) == salt.ext.six.moves.http_client.NO_CONTENT and \
|
||||
api_version == 'v2':
|
||||
return True
|
||||
else:
|
||||
log.debug(url)
|
||||
log.debug(query_params)
|
||||
|
@ -236,7 +236,7 @@ def neighbors(neighbor=None, **kwargs):
|
||||
'local_address_configured' : True,
|
||||
'local_port' : 179,
|
||||
'remote_address' : u'192.247.78.0',
|
||||
'router_id': : u'192.168.0.1',
|
||||
'router_id' : u'192.168.0.1',
|
||||
'remote_port' : 58380,
|
||||
'multihop' : False,
|
||||
'import_policy' : u'4-NTT-TRANSIT-IN',
|
||||
|
@ -230,7 +230,7 @@ def _config_logic(napalm_device,
|
||||
|
||||
|
||||
@salt.utils.napalm.proxy_napalm_wrap
|
||||
def connected(**kwarvs): # pylint: disable=unused-argument
|
||||
def connected(**kwargs): # pylint: disable=unused-argument
|
||||
'''
|
||||
Specifies if the connection to the device succeeded.
|
||||
|
||||
@ -1178,6 +1178,7 @@ def load_config(filename=None,
|
||||
debug=False,
|
||||
replace=False,
|
||||
inherit_napalm_device=None,
|
||||
saltenv='base',
|
||||
**kwargs): # pylint: disable=unused-argument
|
||||
'''
|
||||
Applies configuration changes on the device. It can be loaded from a file or from inline string.
|
||||
@ -1193,10 +1194,21 @@ def load_config(filename=None,
|
||||
To replace the config, set ``replace`` to ``True``.
|
||||
|
||||
filename
|
||||
Path to the file containing the desired configuration. By default is None.
|
||||
Path to the file containing the desired configuration.
|
||||
This can be specified using the absolute path to the file,
|
||||
or using one of the following URL schemes:
|
||||
|
||||
- ``salt://``, to fetch the template from the Salt fileserver.
|
||||
- ``http://`` or ``https://``
|
||||
- ``ftp://``
|
||||
- ``s3://``
|
||||
- ``swift://``
|
||||
|
||||
.. versionchanged:: Oxygen
|
||||
|
||||
text
|
||||
String containing the desired configuration.
|
||||
This argument is ignored when ``filename`` is specified.
|
||||
|
||||
test: False
|
||||
Dry run? If set as ``True``, will apply the config, discard and return the changes. Default: ``False``
|
||||
@ -1216,6 +1228,11 @@ def load_config(filename=None,
|
||||
|
||||
.. versionadded:: 2016.11.2
|
||||
|
||||
saltenv: ``base``
|
||||
Specifies the Salt environment name.
|
||||
|
||||
.. versionadded:: Oxygen
|
||||
|
||||
:return: a dictionary having the following keys:
|
||||
|
||||
* result (bool): if the config was applied successfully. It is ``False`` only in case of failure. In case \
|
||||
@ -1246,7 +1263,6 @@ def load_config(filename=None,
|
||||
'diff': '[edit interfaces xe-0/0/5]+ description "Adding a description";'
|
||||
}
|
||||
'''
|
||||
|
||||
fun = 'load_merge_candidate'
|
||||
if replace:
|
||||
fun = 'load_replace_candidate'
|
||||
@ -1259,11 +1275,22 @@ def load_config(filename=None,
|
||||
# compare_config, discard / commit
|
||||
# which have to be over the same session
|
||||
napalm_device['CLOSE'] = False # pylint: disable=undefined-variable
|
||||
if filename:
|
||||
text = __salt__['cp.get_file_str'](filename, saltenv=saltenv)
|
||||
if text is False:
|
||||
# When using salt:// or https://, if the resource is not available,
|
||||
# it will either raise an exception, or return False.
|
||||
ret = {
|
||||
'result': False,
|
||||
'out': None
|
||||
}
|
||||
ret['comment'] = 'Unable to read from {}. Please specify a valid file or text.'.format(filename)
|
||||
log.error(ret['comment'])
|
||||
return ret
|
||||
_loaded = salt.utils.napalm.call(
|
||||
napalm_device, # pylint: disable=undefined-variable
|
||||
fun,
|
||||
**{
|
||||
'filename': filename,
|
||||
'config': text
|
||||
}
|
||||
)
|
||||
|
@ -113,6 +113,17 @@ def servers(**kwargs): # pylint: disable=unused-argument
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' ntp.servers
|
||||
|
||||
Example output:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
[
|
||||
'192.168.0.1',
|
||||
'172.17.17.1',
|
||||
'172.17.17.2',
|
||||
'2400:cb00:6:1024::c71b:840a'
|
||||
]
|
||||
'''
|
||||
|
||||
ntp_servers = salt.utils.napalm.call(
|
||||
|
@ -142,7 +142,7 @@ def results(**kwargs): # pylint: disable=unused-argument
|
||||
'last_test_loss' : 0,
|
||||
'round_trip_jitter' : -59.0,
|
||||
'target' : '192.168.0.1',
|
||||
'source' : '192.168.0.2'
|
||||
'source' : '192.168.0.2',
|
||||
'probe_count' : 15,
|
||||
'current_test_min_delay': 63.138
|
||||
},
|
||||
@ -160,7 +160,7 @@ def results(**kwargs): # pylint: disable=unused-argument
|
||||
'last_test_loss' : 0,
|
||||
'round_trip_jitter' : -34.0,
|
||||
'target' : '172.17.17.1',
|
||||
'source' : '172.17.17.2'
|
||||
'source' : '172.17.17.2',
|
||||
'probe_count' : 15,
|
||||
'current_test_min_delay': 176.402
|
||||
}
|
||||
|
@ -82,7 +82,7 @@ def show(destination, protocol=None, **kwargs): # pylint: disable=unused-argume
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt 'my_router' route.show
|
||||
salt 'my_router' route.show 172.16.0.0/25
|
||||
salt 'my_router' route.show 172.16.0.0/25 bgp
|
||||
|
||||
Output example:
|
||||
|
@ -69,7 +69,7 @@ def _get_root_object(models):
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
|
||||
def diff(candidate, running, models):
|
||||
def diff(candidate, running, *models):
|
||||
'''
|
||||
Returns the difference between two configuration entities structured
|
||||
according to the YANG model.
|
||||
@ -119,6 +119,9 @@ def diff(candidate, running, models):
|
||||
}
|
||||
}
|
||||
'''
|
||||
if isinstance(models, tuple) and isinstance(models[0], list):
|
||||
models = models[0]
|
||||
|
||||
first = _get_root_object(models)
|
||||
first.load_dict(candidate)
|
||||
second = _get_root_object(models)
|
||||
@ -127,7 +130,7 @@ def diff(candidate, running, models):
|
||||
|
||||
|
||||
@proxy_napalm_wrap
|
||||
def parse(models, **kwargs):
|
||||
def parse(*models, **kwargs):
|
||||
'''
|
||||
Parse configuration from the device.
|
||||
|
||||
@ -340,6 +343,8 @@ def parse(models, **kwargs):
|
||||
}
|
||||
}
|
||||
'''
|
||||
if isinstance(models, tuple) and isinstance(models[0], list):
|
||||
models = models[0]
|
||||
config = kwargs.pop('config', False)
|
||||
state = kwargs.pop('state', False)
|
||||
profiles = kwargs.pop('profiles', [])
|
||||
@ -360,7 +365,7 @@ def parse(models, **kwargs):
|
||||
|
||||
|
||||
@proxy_napalm_wrap
|
||||
def get_config(data, models, **kwargs):
|
||||
def get_config(data, *models, **kwargs):
|
||||
'''
|
||||
Return the native config.
|
||||
|
||||
@ -393,6 +398,8 @@ def get_config(data, models, **kwargs):
|
||||
description Uplink2
|
||||
mtu 9000
|
||||
'''
|
||||
if isinstance(models, tuple) and isinstance(models[0], list):
|
||||
models = models[0]
|
||||
profiles = kwargs.pop('profiles', [])
|
||||
if not profiles and hasattr(napalm_device, 'profile'): # pylint: disable=undefined-variable
|
||||
profiles = napalm_device.profile # pylint: disable=undefined-variable
|
||||
@ -410,7 +417,7 @@ def get_config(data, models, **kwargs):
|
||||
|
||||
|
||||
@proxy_napalm_wrap
|
||||
def load_config(data, models, **kwargs):
|
||||
def load_config(data, *models, **kwargs):
|
||||
'''
|
||||
Generate and load the config on the device using the OpenConfig or IETF
|
||||
models and device profiles.
|
||||
@ -545,7 +552,9 @@ def load_config(data, models, **kwargs):
|
||||
result:
|
||||
True
|
||||
'''
|
||||
config = get_config(data, models, **kwargs)
|
||||
if isinstance(models, tuple) and isinstance(models[0], list):
|
||||
models = models[0]
|
||||
config = get_config(data, *models, **kwargs)
|
||||
test = kwargs.pop('test', False)
|
||||
debug = kwargs.pop('debug', False)
|
||||
commit = kwargs.pop('commit', True)
|
||||
@ -559,7 +568,7 @@ def load_config(data, models, **kwargs):
|
||||
|
||||
|
||||
@proxy_napalm_wrap
|
||||
def compliance_report(data, models, **kwargs):
|
||||
def compliance_report(data, *models, **kwargs):
|
||||
'''
|
||||
Return the compliance report using YANG objects.
|
||||
|
||||
@ -598,6 +607,8 @@ def compliance_report(data, models, **kwargs):
|
||||
}
|
||||
}
|
||||
'''
|
||||
if isinstance(models, tuple) and isinstance(models[0], list):
|
||||
models = models[0]
|
||||
filepath = kwargs.pop('filepath', '')
|
||||
root = _get_root_object(models)
|
||||
root.load_dict(data)
|
||||
|
@ -2,7 +2,7 @@
|
||||
'''
|
||||
Neutron module for interacting with OpenStack Neutron
|
||||
|
||||
.. versionadded:: Nitrogen
|
||||
.. versionadded:: Oxygen
|
||||
|
||||
:depends:shade
|
||||
|
||||
|
@ -166,6 +166,98 @@ def _snapper_post(opts, jid, pre_num):
|
||||
log.error('Failed to create snapper pre snapshot for jid: {0}'.format(jid))
|
||||
|
||||
|
||||
def _get_pause(jid, state_id=None):
|
||||
'''
|
||||
Return the pause information for a given jid
|
||||
'''
|
||||
pause_dir = os.path.join(__opts__[u'cachedir'], 'state_pause')
|
||||
pause_path = os.path.join(pause_dir, jid)
|
||||
if not os.path.exists(pause_dir):
|
||||
try:
|
||||
os.makedirs(pause_dir)
|
||||
except OSError:
|
||||
# File created in the gap
|
||||
pass
|
||||
data = {}
|
||||
if state_id is not None:
|
||||
if state_id not in data:
|
||||
data[state_id] = {}
|
||||
if os.path.exists(pause_path):
|
||||
with salt.utils.files.fopen(pause_path, 'rb') as fp_:
|
||||
data = msgpack.loads(fp_.read())
|
||||
return data, pause_path
|
||||
|
||||
|
||||
def get_pauses(jid=None):
|
||||
'''
|
||||
Get a report on all of the currently paused state runs and pause
|
||||
run settings.
|
||||
Optionally send in a jid if you only desire to see a single pause
|
||||
data set.
|
||||
'''
|
||||
ret = {}
|
||||
active = __salt__['saltutil.is_running']('state.*')
|
||||
pause_dir = os.path.join(__opts__[u'cachedir'], 'state_pause')
|
||||
if not os.path.exists(pause_dir):
|
||||
return ret
|
||||
if jid is None:
|
||||
jids = os.listdir(pause_dir)
|
||||
elif isinstance(jid, list):
|
||||
jids = jid
|
||||
else:
|
||||
jids = [str(jid)]
|
||||
for scan_jid in jids:
|
||||
is_active = False
|
||||
for active_data in active:
|
||||
if active_data['jid'] == scan_jid:
|
||||
is_active = True
|
||||
if not is_active:
|
||||
try:
|
||||
pause_path = os.path.join(pause_dir, scan_jid)
|
||||
os.remove(pause_path)
|
||||
except OSError:
|
||||
# Already gone
|
||||
pass
|
||||
continue
|
||||
data, pause_path = _get_pause(scan_jid)
|
||||
ret[scan_jid] = data
|
||||
return ret
|
||||
|
||||
|
||||
def soft_kill(jid, state_id=None):
|
||||
'''
|
||||
Set up a state run to die before executing the given state id,
|
||||
this instructs a running state to safely exit at a given
|
||||
state id. This needs to pass in the jid of the running state.
|
||||
If a state_id is not passed then the jid referenced will be safely exited
|
||||
at the begining of the next state run.
|
||||
|
||||
The given state id is the id got a given state execution, so given a state
|
||||
that looks like this:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
vim:
|
||||
pkg.installed: []
|
||||
|
||||
The state_id to pass to `soft_kill` is `vim`
|
||||
|
||||
CLI Examples:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' state.soft_kill 20171130110407769519
|
||||
salt '*' state.soft_kill 20171130110407769519 vim
|
||||
'''
|
||||
jid = str(jid)
|
||||
if state_id is None:
|
||||
state_id = '__all__'
|
||||
data, pause_path = _get_pause(jid, state_id)
|
||||
data[state_id]['kill'] = True
|
||||
with salt.utils.files.fopen(pause_path, 'wb') as fp_:
|
||||
fp_.write(msgpack.dumps(data))
|
||||
|
||||
|
||||
def pause(jid, state_id=None, duration=None):
|
||||
'''
|
||||
Set up a state id pause, this instructs a running state to pause at a given
|
||||
@ -194,20 +286,7 @@ def pause(jid, state_id=None, duration=None):
|
||||
jid = str(jid)
|
||||
if state_id is None:
|
||||
state_id = '__all__'
|
||||
pause_dir = os.path.join(__opts__[u'cachedir'], 'state_pause')
|
||||
pause_path = os.path.join(pause_dir, jid)
|
||||
if not os.path.exists(pause_dir):
|
||||
try:
|
||||
os.makedirs(pause_dir)
|
||||
except OSError:
|
||||
# File created in the gap
|
||||
pass
|
||||
data = {}
|
||||
if os.path.exists(pause_path):
|
||||
with salt.utils.files.fopen(pause_path, 'rb') as fp_:
|
||||
data = msgpack.loads(fp_.read())
|
||||
if state_id not in data:
|
||||
data[state_id] = {}
|
||||
data, pause_path = _get_pause(jid, state_id)
|
||||
if duration:
|
||||
data[state_id]['duration'] = int(duration)
|
||||
with salt.utils.files.fopen(pause_path, 'wb') as fp_:
|
||||
@ -239,22 +318,11 @@ def resume(jid, state_id=None):
|
||||
jid = str(jid)
|
||||
if state_id is None:
|
||||
state_id = '__all__'
|
||||
pause_dir = os.path.join(__opts__[u'cachedir'], 'state_pause')
|
||||
pause_path = os.path.join(pause_dir, jid)
|
||||
if not os.path.exists(pause_dir):
|
||||
try:
|
||||
os.makedirs(pause_dir)
|
||||
except OSError:
|
||||
# File created in the gap
|
||||
pass
|
||||
data = {}
|
||||
if os.path.exists(pause_path):
|
||||
with salt.utils.files.fopen(pause_path, 'rb') as fp_:
|
||||
data = msgpack.loads(fp_.read())
|
||||
else:
|
||||
return True
|
||||
data, pause_path = _get_pause(jid, state_id)
|
||||
if state_id in data:
|
||||
data.pop(state_id)
|
||||
if state_id == '__all__':
|
||||
data = {}
|
||||
with salt.utils.files.fopen(pause_path, 'wb') as fp_:
|
||||
fp_.write(msgpack.dumps(data))
|
||||
|
||||
|
@ -29,6 +29,13 @@ will follow symbolic links to other directories.
|
||||
Be careful when using ``follow_dir_links``, as a recursive symlink chain
|
||||
will result in unexpected results.
|
||||
|
||||
.. versionchanged:: Oxygen
|
||||
If ``root_dir`` is a relative path, it will be treated as relative to the
|
||||
:conf_master:`pillar_roots` of the environment specified by
|
||||
:conf_minion:`pillarenv`. If an environment specifies multiple
|
||||
roots, this module will search for files relative to all of them, in order,
|
||||
merging the results.
|
||||
|
||||
If ``keep_newline`` is set to ``True``, then the pillar values for files ending
|
||||
in newlines will keep that newline. The default behavior is to remove the
|
||||
end-of-file newline. ``keep_newline`` should be turned on if the pillar data is
|
||||
@ -259,14 +266,14 @@ def _construct_pillar(top_dir,
|
||||
log.error('file_tree: %s: not a regular file', file_path)
|
||||
continue
|
||||
|
||||
contents = ''
|
||||
contents = b''
|
||||
try:
|
||||
with salt.utils.files.fopen(file_path, 'rb') as fhr:
|
||||
buf = fhr.read(__opts__['file_buffer_size'])
|
||||
while buf:
|
||||
contents += buf
|
||||
buf = fhr.read(__opts__['file_buffer_size'])
|
||||
if contents.endswith('\n') \
|
||||
if contents.endswith(b'\n') \
|
||||
and _check_newline(prefix,
|
||||
file_name,
|
||||
keep_newline):
|
||||
@ -311,6 +318,60 @@ def ext_pillar(minion_id,
|
||||
log.error('file_tree: no root_dir specified')
|
||||
return {}
|
||||
|
||||
if not os.path.isabs(root_dir):
|
||||
pillarenv = __opts__['pillarenv']
|
||||
if pillarenv is None:
|
||||
log.error('file_tree: root_dir is relative but pillarenv is not set')
|
||||
return {}
|
||||
log.debug('file_tree: pillarenv = %s', pillarenv)
|
||||
|
||||
env_roots = __opts__['pillar_roots'].get(pillarenv, None)
|
||||
if env_roots is None:
|
||||
log.error('file_tree: root_dir is relative but no pillar_roots are specified '
|
||||
' for pillarenv %s', pillarenv)
|
||||
return {}
|
||||
|
||||
env_dirs = []
|
||||
for env_root in env_roots:
|
||||
env_dir = os.path.normpath(os.path.join(env_root, root_dir))
|
||||
# don't redundantly load consecutively, but preserve any expected precedence
|
||||
if env_dir not in env_dirs or env_dir != env_dirs[-1]:
|
||||
env_dirs.append(env_dir)
|
||||
dirs = env_dirs
|
||||
else:
|
||||
dirs = [root_dir]
|
||||
|
||||
result_pillar = {}
|
||||
for root in dirs:
|
||||
dir_pillar = _ext_pillar(minion_id,
|
||||
root,
|
||||
follow_dir_links,
|
||||
debug,
|
||||
keep_newline,
|
||||
render_default,
|
||||
renderer_blacklist,
|
||||
renderer_whitelist,
|
||||
template)
|
||||
result_pillar = salt.utils.dictupdate.merge(result_pillar,
|
||||
dir_pillar,
|
||||
strategy='recurse')
|
||||
return result_pillar
|
||||
|
||||
|
||||
def _ext_pillar(minion_id,
|
||||
root_dir,
|
||||
follow_dir_links,
|
||||
debug,
|
||||
keep_newline,
|
||||
render_default,
|
||||
renderer_blacklist,
|
||||
renderer_whitelist,
|
||||
template):
|
||||
'''
|
||||
Compile pillar data for a single root_dir for the specified minion ID
|
||||
'''
|
||||
log.debug('file_tree: reading %s', root_dir)
|
||||
|
||||
if not os.path.isdir(root_dir):
|
||||
log.error(
|
||||
'file_tree: root_dir %s does not exist or is not a directory',
|
||||
|
@ -1,6 +1,8 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Sync custom types to the Master
|
||||
The Saltutil runner is used to sync custom types to the Master. See the
|
||||
:mod:`saltutil module <salt.modules.saltutil>` for documentation on
|
||||
managing updates to minions.
|
||||
|
||||
.. versionadded:: 2016.3.0
|
||||
'''
|
||||
|
@ -42,6 +42,20 @@ def generate_token(minion_id, signature, impersonated_by_master=False):
|
||||
|
||||
try:
|
||||
config = __opts__['vault']
|
||||
verify = config.get('verify', None)
|
||||
|
||||
if config['auth']['method'] == 'approle':
|
||||
if _selftoken_expired():
|
||||
log.debug('Vault token expired. Recreating one')
|
||||
# Requesting a short ttl token
|
||||
url = '{0}/v1/auth/approle/login'.format(config['url'])
|
||||
payload = {'role_id': config['auth']['role_id']}
|
||||
if 'secret_id' in config['auth']:
|
||||
payload['secret_id'] = config['auth']['secret_id']
|
||||
response = requests.post(url, json=payload, verify=verify)
|
||||
if response.status_code != 200:
|
||||
return {'error': response.reason}
|
||||
config['auth']['token'] = response.json()['auth']['client_token']
|
||||
|
||||
url = '{0}/v1/auth/token/create'.format(config['url'])
|
||||
headers = {'X-Vault-Token': config['auth']['token']}
|
||||
@ -56,8 +70,6 @@ def generate_token(minion_id, signature, impersonated_by_master=False):
|
||||
'metadata': audit_data
|
||||
}
|
||||
|
||||
verify = config.get('verify', None)
|
||||
|
||||
log.trace('Sending token creation request to Vault')
|
||||
response = requests.post(url, headers=headers, json=payload, verify=verify)
|
||||
|
||||
@ -185,3 +197,23 @@ def _expand_pattern_lists(pattern, **mappings):
|
||||
expanded_patterns += result
|
||||
return expanded_patterns
|
||||
return [pattern]
|
||||
|
||||
|
||||
def _selftoken_expired():
|
||||
'''
|
||||
Validate the current token exists and is still valid
|
||||
'''
|
||||
try:
|
||||
verify = __opts__['vault'].get('verify', None)
|
||||
url = '{0}/v1/auth/token/lookup-self'.format(__opts__['vault']['url'])
|
||||
if 'token' not in __opts__['vault']['auth']:
|
||||
return True
|
||||
headers = {'X-Vault-Token': __opts__['vault']['auth']['token']}
|
||||
response = requests.get(url, headers=headers, verify=verify)
|
||||
if response.status_code != 200:
|
||||
return True
|
||||
return False
|
||||
except Exception as e:
|
||||
raise salt.exceptions.CommandExecutionError(
|
||||
'Error while looking up self token : {0}'.format(str(e))
|
||||
)
|
||||
|
@ -1925,8 +1925,6 @@ class State(object):
|
||||
if self.mocked:
|
||||
ret = mock_ret(cdata)
|
||||
else:
|
||||
# Check if this low chunk is paused
|
||||
self.check_pause(low)
|
||||
# Execute the state function
|
||||
if not low.get(u'__prereq__') and low.get(u'parallel'):
|
||||
# run the state call in parallel, but only if not in a prereq
|
||||
@ -2112,6 +2110,10 @@ class State(object):
|
||||
return running
|
||||
tag = _gen_tag(low)
|
||||
if tag not in running:
|
||||
# Check if this low chunk is paused
|
||||
action = self.check_pause(low)
|
||||
if action == u'kill':
|
||||
break
|
||||
running = self.call_chunk(low, running, chunks)
|
||||
if self.check_failhard(low, running):
|
||||
return running
|
||||
@ -2170,13 +2172,16 @@ class State(object):
|
||||
if u'duration' in pdat[key]:
|
||||
now = time.time()
|
||||
if now - start > pdat[key][u'duration']:
|
||||
return
|
||||
return u'run'
|
||||
if u'kill' in pdat[key]:
|
||||
return u'kill'
|
||||
else:
|
||||
return
|
||||
return u'run'
|
||||
time.sleep(1)
|
||||
except Exception as exc:
|
||||
log.error('Failed to read in pause data for file located at: %s', pause_path)
|
||||
return
|
||||
return u'run'
|
||||
return u'run'
|
||||
|
||||
def reconcile_procs(self, running):
|
||||
'''
|
||||
|
@ -287,7 +287,8 @@ def present(name=None,
|
||||
if not table_exists:
|
||||
if __opts__['test']:
|
||||
ret['result'] = None
|
||||
comments.append('DynamoDB table {0} is set to be created.'.format(name))
|
||||
ret['comment'] = 'DynamoDB table {0} would be created.'.format(name)
|
||||
return ret
|
||||
else:
|
||||
is_created = __salt__['boto_dynamodb.create_table'](
|
||||
name,
|
||||
|
@ -177,7 +177,7 @@ def sources_add(name, ruby=None, user=None):
|
||||
ret['comment'] = 'Gem source is already added.'
|
||||
return ret
|
||||
if __opts__['test']:
|
||||
ret['comment'] = 'The gem source {0} would have been removed.'.format(name)
|
||||
ret['comment'] = 'The gem source {0} would have been added.'.format(name)
|
||||
return ret
|
||||
if __salt__['gem.sources_add'](source_uri=name, ruby=ruby, runas=user):
|
||||
ret['result'] = True
|
||||
@ -212,7 +212,7 @@ def sources_remove(name, ruby=None, user=None):
|
||||
return ret
|
||||
|
||||
if __opts__['test']:
|
||||
ret['comment'] = 'The gem source would have been removed'
|
||||
ret['comment'] = 'The gem source would have been removed.'
|
||||
return ret
|
||||
|
||||
if __salt__['gem.sources_remove'](source_uri=name, ruby=ruby, runas=user):
|
||||
|
@ -76,7 +76,7 @@ def __virtual__():
|
||||
|
||||
def managed(name,
|
||||
data,
|
||||
models,
|
||||
*models,
|
||||
**kwargs):
|
||||
'''
|
||||
Manage the device configuration given the input data strucuted
|
||||
@ -142,6 +142,8 @@ def managed(name,
|
||||
config:
|
||||
description: "description example"
|
||||
'''
|
||||
if isinstance(models, tuple) and isinstance(models[0], list):
|
||||
models = models[0]
|
||||
ret = salt.utils.napalm.default_ret(name)
|
||||
test = kwargs.get('test', False) or __opts__.get('test', False)
|
||||
debug = kwargs.get('debug', False) or __opts__.get('debug', False)
|
||||
@ -156,13 +158,13 @@ def managed(name,
|
||||
data = [data]
|
||||
with salt.utils.files.fopen(temp_file, 'w') as file_handle:
|
||||
yaml.safe_dump(json.loads(json.dumps(data)), file_handle, encoding='utf-8', allow_unicode=True)
|
||||
device_config = __salt__['napalm_yang.parse'](models,
|
||||
device_config = __salt__['napalm_yang.parse'](*models,
|
||||
config=True,
|
||||
profiles=profiles)
|
||||
log.debug('Parsed the config from the device:')
|
||||
log.debug(device_config)
|
||||
compliance_report = __salt__['napalm_yang.compliance_report'](device_config,
|
||||
models,
|
||||
*models,
|
||||
filepath=temp_file)
|
||||
log.debug('Compliance report:')
|
||||
log.debug(compliance_report)
|
||||
@ -179,7 +181,7 @@ def managed(name,
|
||||
if '_kwargs' in data:
|
||||
data.pop('_kwargs')
|
||||
loaded_changes = __salt__['napalm_yang.load_config'](data,
|
||||
models,
|
||||
*models,
|
||||
profiles=profiles,
|
||||
test=test,
|
||||
debug=debug,
|
||||
@ -199,7 +201,7 @@ def managed(name,
|
||||
|
||||
def configured(name,
|
||||
data,
|
||||
models,
|
||||
*models,
|
||||
**kwargs):
|
||||
'''
|
||||
Configure the network device, given the input data strucuted
|
||||
@ -269,6 +271,8 @@ def configured(name,
|
||||
config:
|
||||
description: "description example"
|
||||
'''
|
||||
if isinstance(models, tuple) and isinstance(models[0], list):
|
||||
models = models[0]
|
||||
ret = salt.utils.napalm.default_ret(name)
|
||||
test = kwargs.get('test', False) or __opts__.get('test', False)
|
||||
debug = kwargs.get('debug', False) or __opts__.get('debug', False)
|
||||
@ -278,7 +282,7 @@ def configured(name,
|
||||
if '_kwargs' in data:
|
||||
data.pop('_kwargs')
|
||||
loaded_changes = __salt__['napalm_yang.load_config'](data,
|
||||
models,
|
||||
*models,
|
||||
profiles=profiles,
|
||||
test=test,
|
||||
debug=debug,
|
||||
|
@ -266,12 +266,14 @@ class AsyncTCPReqChannel(salt.transport.client.ReqChannel):
|
||||
resolver = kwargs.get('resolver')
|
||||
|
||||
parse = urlparse.urlparse(self.opts['master_uri'])
|
||||
host, port = parse.netloc.rsplit(':', 1)
|
||||
self.master_addr = (host, int(port))
|
||||
master_host, master_port = parse.netloc.rsplit(':', 1)
|
||||
self.master_addr = (master_host, int(master_port))
|
||||
self._closing = False
|
||||
self.message_client = SaltMessageClientPool(self.opts,
|
||||
args=(self.opts, host, int(port),),
|
||||
kwargs={'io_loop': self.io_loop, 'resolver': resolver})
|
||||
args=(self.opts, master_host, int(master_port),),
|
||||
kwargs={'io_loop': self.io_loop, 'resolver': resolver,
|
||||
'source_ip': self.opts.get('source_ip'),
|
||||
'source_port': self.opts.get('source_ret_port')})
|
||||
|
||||
def close(self):
|
||||
if self._closing:
|
||||
@ -501,7 +503,9 @@ class AsyncTCPPubChannel(salt.transport.mixins.auth.AESPubClientMixin, salt.tran
|
||||
args=(self.opts, self.opts['master_ip'], int(self.auth.creds['publish_port']),),
|
||||
kwargs={'io_loop': self.io_loop,
|
||||
'connect_callback': self.connect_callback,
|
||||
'disconnect_callback': self.disconnect_callback})
|
||||
'disconnect_callback': self.disconnect_callback,
|
||||
'source_ip': self.opts.get('source_ip'),
|
||||
'source_port': self.opts.get('source_publish_port')})
|
||||
yield self.message_client.connect() # wait for the client to be connected
|
||||
self.connected = True
|
||||
# TODO: better exception handling...
|
||||
@ -833,10 +837,13 @@ class SaltMessageClient(object):
|
||||
Low-level message sending client
|
||||
'''
|
||||
def __init__(self, opts, host, port, io_loop=None, resolver=None,
|
||||
connect_callback=None, disconnect_callback=None):
|
||||
connect_callback=None, disconnect_callback=None,
|
||||
source_ip=None, source_port=None):
|
||||
self.opts = opts
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.source_ip = source_ip
|
||||
self.source_port = source_port
|
||||
self.connect_callback = connect_callback
|
||||
self.disconnect_callback = disconnect_callback
|
||||
|
||||
@ -932,9 +939,21 @@ class SaltMessageClient(object):
|
||||
if self._closing:
|
||||
break
|
||||
try:
|
||||
self._stream = yield self._tcp_client.connect(self.host,
|
||||
self.port,
|
||||
ssl_options=self.opts.get('ssl'))
|
||||
if (self.source_ip or self.source_port) and tornado.version_info >= (4, 5):
|
||||
### source_ip and source_port are supported only in Tornado >= 4.5
|
||||
# See http://www.tornadoweb.org/en/stable/releases/v4.5.0.html
|
||||
# Otherwise will just ignore these args
|
||||
self._stream = yield self._tcp_client.connect(self.host,
|
||||
self.port,
|
||||
ssl_options=self.opts.get('ssl'),
|
||||
source_ip=self.source_ip,
|
||||
source_port=self.source_port)
|
||||
else:
|
||||
if self.source_ip or self.source_port:
|
||||
log.warning('If you need a certain source IP/port, consider upgrading Tornado >= 4.5')
|
||||
self._stream = yield self._tcp_client.connect(self.host,
|
||||
self.port,
|
||||
ssl_options=self.opts.get('ssl'))
|
||||
self._connecting_future.set_result(True)
|
||||
break
|
||||
except Exception as e:
|
||||
|
@ -28,6 +28,8 @@ import salt.payload
|
||||
import salt.transport.client
|
||||
import salt.transport.server
|
||||
import salt.transport.mixins.auth
|
||||
from salt.ext import six
|
||||
from salt.ext.six.moves import map
|
||||
from salt.exceptions import SaltReqTimeoutError
|
||||
|
||||
import zmq
|
||||
@ -58,6 +60,44 @@ except ImportError:
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _get_master_uri(master_ip,
|
||||
master_port,
|
||||
source_ip=None,
|
||||
source_port=None):
|
||||
'''
|
||||
Return the ZeroMQ URI to connect the Minion to the Master.
|
||||
It supports different source IP / port, given the ZeroMQ syntax:
|
||||
|
||||
// Connecting using a IP address and bind to an IP address
|
||||
rc = zmq_connect(socket, "tcp://192.168.1.17:5555;192.168.1.1:5555"); assert (rc == 0);
|
||||
|
||||
Source: http://api.zeromq.org/4-1:zmq-tcp
|
||||
'''
|
||||
libzmq_version_tup = tuple(map(int, zmq.zmq_version().split('.')))
|
||||
pyzmq_version_tup = tuple(map(int, zmq.pyzmq_version().split('.')))
|
||||
if libzmq_version_tup >= (4, 1, 6) and pyzmq_version_tup >= (16, 0, 1):
|
||||
# The source:port syntax for ZeroMQ has been added in libzmq 4.1.6
|
||||
# which is included in the pyzmq wheels starting with 16.0.1.
|
||||
if source_ip or source_port:
|
||||
if source_ip and source_port:
|
||||
return 'tcp://{source_ip}:{source_port};{master_ip}:{master_port}'.format(
|
||||
source_ip=source_ip, source_port=source_port,
|
||||
master_ip=master_ip, master_port=master_port)
|
||||
elif source_ip and not source_port:
|
||||
return 'tcp://{source_ip}:0;{master_ip}:{master_port}'.format(
|
||||
source_ip=source_ip,
|
||||
master_ip=master_ip, master_port=master_port)
|
||||
elif not source_ip and source_port:
|
||||
return 'tcp://0.0.0.0:{source_port};{master_ip}:{master_port}'.format(
|
||||
source_port=source_port,
|
||||
master_ip=master_ip, master_port=master_port)
|
||||
if source_ip or source_port:
|
||||
log.warning('Unable to connect to the Master using a specific source IP / port')
|
||||
log.warning('Consider upgrading to pyzmq >= 16.0.1 and libzmq >= 4.1.6')
|
||||
return 'tcp://{master_ip}:{master_port}'.format(
|
||||
master_ip=master_ip, master_port=master_port)
|
||||
|
||||
|
||||
class AsyncZeroMQReqChannel(salt.transport.client.ReqChannel):
|
||||
'''
|
||||
Encapsulate sending routines to ZeroMQ.
|
||||
@ -150,6 +190,7 @@ class AsyncZeroMQReqChannel(salt.transport.client.ReqChannel):
|
||||
if self.crypt != 'clear':
|
||||
# we don't need to worry about auth as a kwarg, since its a singleton
|
||||
self.auth = salt.crypt.AsyncAuth(self.opts, io_loop=self._io_loop)
|
||||
log.debug('Connecting the Minion to the Master URI (for the return server): %s', self.master_uri)
|
||||
self.message_client = AsyncReqMessageClientPool(self.opts,
|
||||
args=(self.opts, self.master_uri,),
|
||||
kwargs={'io_loop': self._io_loop})
|
||||
@ -166,6 +207,11 @@ class AsyncZeroMQReqChannel(salt.transport.client.ReqChannel):
|
||||
|
||||
@property
|
||||
def master_uri(self):
|
||||
if 'master_ip' in self.opts:
|
||||
return _get_master_uri(self.opts['master_ip'],
|
||||
self.opts['master_port'],
|
||||
source_ip=self.opts.get('source_ip'),
|
||||
source_port=self.opts.get('source_ret_port'))
|
||||
return self.opts['master_uri']
|
||||
|
||||
def _package_load(self, load):
|
||||
@ -379,6 +425,7 @@ class AsyncZeroMQPubChannel(salt.transport.mixins.auth.AESPubClientMixin, salt.t
|
||||
if not self.auth.authenticated:
|
||||
yield self.auth.authenticate()
|
||||
self.publish_port = self.auth.creds['publish_port']
|
||||
log.debug('Connecting the Minion to the Master publish port, using the URI: %s', self.master_pub)
|
||||
self._socket.connect(self.master_pub)
|
||||
|
||||
@property
|
||||
@ -386,8 +433,10 @@ class AsyncZeroMQPubChannel(salt.transport.mixins.auth.AESPubClientMixin, salt.t
|
||||
'''
|
||||
Return the master publish port
|
||||
'''
|
||||
return 'tcp://{ip}:{port}'.format(ip=self.opts['master_ip'],
|
||||
port=self.publish_port)
|
||||
return _get_master_uri(self.opts['master_ip'],
|
||||
self.publish_port,
|
||||
source_ip=self.opts.get('source_ip'),
|
||||
source_port=self.opts.get('source_publish_port'))
|
||||
|
||||
@tornado.gen.coroutine
|
||||
def _decode_messages(self, messages):
|
||||
@ -954,6 +1003,7 @@ class AsyncReqMessageClient(object):
|
||||
elif hasattr(zmq, 'IPV4ONLY'):
|
||||
self.socket.setsockopt(zmq.IPV4ONLY, 0)
|
||||
self.socket.linger = self.linger
|
||||
log.debug('Trying to connect to: %s', self.addr)
|
||||
self.socket.connect(self.addr)
|
||||
self.stream = zmq.eventloop.zmqstream.ZMQStream(self.socket, io_loop=self.io_loop)
|
||||
|
||||
|
@ -337,12 +337,12 @@ def sanitize_win_path(winpath):
|
||||
Remove illegal path characters for windows
|
||||
'''
|
||||
intab = '<>:|?*'
|
||||
outtab = '_' * len(intab)
|
||||
trantab = ''.maketrans(intab, outtab) if six.PY3 else string.maketrans(intab, outtab) # pylint: disable=no-member
|
||||
if isinstance(winpath, six.string_types):
|
||||
winpath = winpath.translate(trantab)
|
||||
elif isinstance(winpath, six.text_type):
|
||||
if isinstance(winpath, six.text_type):
|
||||
winpath = winpath.translate(dict((ord(c), u'_') for c in intab))
|
||||
elif isinstance(winpath, six.string_types):
|
||||
outtab = '_' * len(intab)
|
||||
trantab = ''.maketrans(intab, outtab) if six.PY3 else string.maketrans(intab, outtab) # pylint: disable=no-member
|
||||
winpath = winpath.translate(trantab)
|
||||
return winpath
|
||||
|
||||
|
||||
|
@ -103,6 +103,20 @@ def _get_vault_connection():
|
||||
if 'vault' in __opts__ and not __opts__.get('__role', 'minion') == 'master':
|
||||
log.debug('Using Vault connection details from local config')
|
||||
try:
|
||||
if __opts__['vault']['auth']['method'] == 'approle':
|
||||
verify = __opts__['vault'].get('verify', None)
|
||||
if _selftoken_expired():
|
||||
log.debug('Vault token expired. Recreating one')
|
||||
# Requesting a short ttl token
|
||||
url = '{0}/v1/auth/approle/login'.format(__opts__['vault']['url'])
|
||||
payload = {'role_id': __opts__['vault']['auth']['role_id']}
|
||||
if 'secret_id' in __opts__['vault']['auth']:
|
||||
payload['secret_id'] = __opts__['vault']['auth']['secret_id']
|
||||
response = requests.post(url, json=payload, verify=verify)
|
||||
if response.status_code != 200:
|
||||
errmsg = 'An error occured while getting a token from approle'
|
||||
raise salt.exceptions.CommandExecutionError(errmsg)
|
||||
__opts__['vault']['auth']['token'] = response.json()['auth']['client_token']
|
||||
return {
|
||||
'url': __opts__['vault']['url'],
|
||||
'token': __opts__['vault']['auth']['token'],
|
||||
@ -162,3 +176,23 @@ def make_request_with_profile(method, resource, profile, **args):
|
||||
response = requests.request(method, url, headers=headers, **args)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def _selftoken_expired():
|
||||
'''
|
||||
Validate the current token exists and is still valid
|
||||
'''
|
||||
try:
|
||||
verify = __opts__['vault'].get('verify', None)
|
||||
url = '{0}/v1/auth/token/lookup-self'.format(__opts__['vault']['url'])
|
||||
if 'token' not in __opts__['vault']['auth']:
|
||||
return True
|
||||
headers = {'X-Vault-Token': __opts__['vault']['auth']['token']}
|
||||
response = requests.get(url, headers=headers, verify=verify)
|
||||
if response.status_code != 200:
|
||||
return True
|
||||
return False
|
||||
except Exception as e:
|
||||
raise salt.exceptions.CommandExecutionError(
|
||||
'Error while looking up self token : {0}'.format(str(e))
|
||||
)
|
||||
|
516
tests/support/napalm.py
Normal file
516
tests/support/napalm.py
Normal file
@ -0,0 +1,516 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Base classes for napalm unit tests
|
||||
|
||||
:codeauthor: :email:`Anthony Shaw <anthonyshaw@apache.org>`
|
||||
'''
|
||||
|
||||
from __future__ import absolute_import
|
||||
from functools import wraps
|
||||
|
||||
|
||||
TEST_INTERFACES = {
|
||||
'Management1': {
|
||||
'is_up': False,
|
||||
'is_enabled': False,
|
||||
'description': u'',
|
||||
'last_flapped': -1,
|
||||
'speed': 1000,
|
||||
'mac_address': u'dead:beef:dead',
|
||||
}
|
||||
}
|
||||
|
||||
# Test data
|
||||
TEST_FACTS = {
|
||||
'__opts__': {},
|
||||
'OPTIONAL_ARGS': {},
|
||||
'uptime': 'Forever',
|
||||
'UP': True,
|
||||
'HOSTNAME': 'test-device.com',
|
||||
'hostname': 'test-device.com',
|
||||
'username': 'admin',
|
||||
'os_version': '1.2.3',
|
||||
'model': 'test_model',
|
||||
'serial_number': '123456',
|
||||
'vendor': 'cisco',
|
||||
'interface_list': TEST_INTERFACES
|
||||
}
|
||||
|
||||
TEST_ENVIRONMENT = {
|
||||
'hot': 'yes'
|
||||
}
|
||||
|
||||
TEST_COMMAND_RESPONSE = {
|
||||
'show run': 'all the command output'
|
||||
}
|
||||
|
||||
TEST_TRACEROUTE_RESPONSE = {
|
||||
'success': {
|
||||
1: {
|
||||
'probes': {
|
||||
1: {
|
||||
'rtt': 1.123,
|
||||
'ip_address': u'206.223.116.21',
|
||||
'host_name': u'eqixsj-google-gige.google.com'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TEST_PING_RESPONSE = {
|
||||
'success': {
|
||||
'probes_sent': 5,
|
||||
'packet_loss': 0,
|
||||
'rtt_min': 72.158,
|
||||
'rtt_max': 72.433,
|
||||
'rtt_avg': 72.268,
|
||||
'rtt_stddev': 0.094,
|
||||
'results': [
|
||||
{
|
||||
'ip_address': '1.1.1.1',
|
||||
'rtt': 72.248
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
TEST_ARP_TABLE = [
|
||||
{
|
||||
'interface': 'MgmtEth0/RSP0/CPU0/0',
|
||||
'mac': '5C:5E:AB:DA:3C:F0',
|
||||
'ip': '172.17.17.1',
|
||||
'age': 1454496274.84
|
||||
}
|
||||
]
|
||||
|
||||
TEST_IPADDRS = {
|
||||
'FastEthernet8': {
|
||||
'ipv4': {
|
||||
'10.66.43.169': {
|
||||
'prefix_length': 22
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TEST_INTERFACES = {
|
||||
'Management1': {
|
||||
'is_up': False,
|
||||
'is_enabled': False,
|
||||
'description': u'',
|
||||
'last_flapped': -1,
|
||||
'speed': 1000,
|
||||
'mac_address': u'dead:beef:dead',
|
||||
}
|
||||
}
|
||||
|
||||
TEST_LLDP_NEIGHBORS = {
|
||||
u'Ethernet2':
|
||||
[
|
||||
{
|
||||
'hostname': u'junos-unittest',
|
||||
'port': u'520',
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
TEST_MAC_TABLE = [
|
||||
{
|
||||
'mac': '00:1C:58:29:4A:71',
|
||||
'interface': 'Ethernet47',
|
||||
'vlan': 100,
|
||||
'static': False,
|
||||
'active': True,
|
||||
'moves': 1,
|
||||
'last_move': 1454417742.58
|
||||
}
|
||||
]
|
||||
|
||||
TEST_RUNNING_CONFIG = {
|
||||
'one': 'two'
|
||||
}
|
||||
|
||||
TEST_OPTICS = {
|
||||
'et1': {
|
||||
'physical_channels': {
|
||||
'channel': [
|
||||
{
|
||||
'index': 0,
|
||||
'state': {
|
||||
'input_power': {
|
||||
'instant': 0.0,
|
||||
'avg': 0.0,
|
||||
'min': 0.0,
|
||||
'max': 0.0,
|
||||
},
|
||||
'output_power': {
|
||||
'instant': 0.0,
|
||||
'avg': 0.0,
|
||||
'min': 0.0,
|
||||
'max': 0.0,
|
||||
},
|
||||
'laser_bias_current': {
|
||||
'instant': 0.0,
|
||||
'avg': 0.0,
|
||||
'min': 0.0,
|
||||
'max': 0.0,
|
||||
},
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TEST_BGP_CONFIG = {
|
||||
'test': 'value'
|
||||
}
|
||||
|
||||
TEST_BGP_NEIGHBORS = {
|
||||
'default': {
|
||||
8121: [
|
||||
{
|
||||
'up': True,
|
||||
'local_as': 13335,
|
||||
'remote_as': 8121,
|
||||
'local_address': u'172.101.76.1',
|
||||
'local_address_configured': True,
|
||||
'local_port': 179,
|
||||
'remote_address': u'192.247.78.0',
|
||||
'router_id': u'192.168.0.1',
|
||||
'remote_port': 58380,
|
||||
'multihop': False,
|
||||
'import_policy': u'4-NTT-TRANSIT-IN',
|
||||
'export_policy': u'4-NTT-TRANSIT-OUT',
|
||||
'input_messages': 123,
|
||||
'output_messages': 13,
|
||||
'input_updates': 123,
|
||||
'output_updates': 5,
|
||||
'messages_queued_out': 23,
|
||||
'connection_state': u'Established',
|
||||
'previous_connection_state': u'EstabSync',
|
||||
'last_event': u'RecvKeepAlive',
|
||||
'suppress_4byte_as': False,
|
||||
'local_as_prepend': False,
|
||||
'holdtime': 90,
|
||||
'configured_holdtime': 90,
|
||||
'keepalive': 30,
|
||||
'configured_keepalive': 30,
|
||||
'active_prefix_count': 132808,
|
||||
'received_prefix_count': 566739,
|
||||
'accepted_prefix_count': 566479,
|
||||
'suppressed_prefix_count': 0,
|
||||
'advertise_prefix_count': 0,
|
||||
'flap_count': 27
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
TEST_TERM_CONFIG = {
|
||||
'result': True,
|
||||
'already_configured': False
|
||||
}
|
||||
|
||||
TEST_NTP_PEERS = {
|
||||
'192.168.0.1': 1,
|
||||
'172.17.17.1': 2,
|
||||
'172.17.17.2': 3,
|
||||
'2400:cb00:6:1024::c71b:840a': 4
|
||||
}
|
||||
|
||||
TEST_NTP_SERVERS = {
|
||||
'192.168.0.1': 1,
|
||||
'172.17.17.1': 2,
|
||||
'172.17.17.2': 3,
|
||||
'2400:cb00:6:1024::c71b:840a': 4
|
||||
}
|
||||
|
||||
TEST_NTP_STATS = [
|
||||
{
|
||||
'remote': u'188.114.101.4',
|
||||
'referenceid': u'188.114.100.1',
|
||||
'synchronized': True,
|
||||
'stratum': 4,
|
||||
'type': u'-',
|
||||
'when': u'107',
|
||||
'hostpoll': 256,
|
||||
'reachability': 377,
|
||||
'delay': 164.228,
|
||||
'offset': -13.866,
|
||||
'jitter': 2.695
|
||||
}
|
||||
]
|
||||
|
||||
TEST_PROBES_CONFIG = {
|
||||
'probe1': {
|
||||
'test1': {
|
||||
'probe_type': 'icmp-ping',
|
||||
'target': '192.168.0.1',
|
||||
'source': '192.168.0.2',
|
||||
'probe_count': 13,
|
||||
'test_interval': 3
|
||||
},
|
||||
'test2': {
|
||||
'probe_type': 'http-ping',
|
||||
'target': '172.17.17.1',
|
||||
'source': '192.17.17.2',
|
||||
'probe_count': 5,
|
||||
'test_interval': 60
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TEST_PROBES_RESULTS = {
|
||||
'probe1': {
|
||||
'test1': {
|
||||
'last_test_min_delay': 63.120,
|
||||
'global_test_min_delay': 62.912,
|
||||
'current_test_avg_delay': 63.190,
|
||||
'global_test_max_delay': 177.349,
|
||||
'current_test_max_delay': 63.302,
|
||||
'global_test_avg_delay': 63.802,
|
||||
'last_test_avg_delay': 63.438,
|
||||
'last_test_max_delay': 65.356,
|
||||
'probe_type': 'icmp-ping',
|
||||
'rtt': 63.138,
|
||||
'last_test_loss': 0,
|
||||
'round_trip_jitter': -59.0,
|
||||
'target': '192.168.0.1',
|
||||
'source': '192.168.0.2',
|
||||
'probe_count': 15,
|
||||
'current_test_min_delay': 63.138
|
||||
},
|
||||
'test2': {
|
||||
'last_test_min_delay': 176.384,
|
||||
'global_test_min_delay': 169.226,
|
||||
'current_test_avg_delay': 177.098,
|
||||
'global_test_max_delay': 292.628,
|
||||
'current_test_max_delay': 180.055,
|
||||
'global_test_avg_delay': 177.959,
|
||||
'last_test_avg_delay': 177.178,
|
||||
'last_test_max_delay': 184.671,
|
||||
'probe_type': 'icmp-ping',
|
||||
'rtt': 176.449,
|
||||
'last_test_loss': 0,
|
||||
'round_trip_jitter': -34.0,
|
||||
'target': '172.17.17.1',
|
||||
'source': '172.17.17.2',
|
||||
'probe_count': 15,
|
||||
'current_test_min_delay': 176.402
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TEST_ROUTE = {
|
||||
'172.16.0.0/25': [
|
||||
{
|
||||
'protocol': 'BGP',
|
||||
'last_active': True,
|
||||
'current_active': True,
|
||||
'age': 1178693,
|
||||
'routing_table': 'inet.0',
|
||||
'next_hop': '192.168.0.11',
|
||||
'outgoing_interface': 'xe-1/1/1.100',
|
||||
'preference': 170,
|
||||
'selected_next_hop': False,
|
||||
'protocol_attributes': {
|
||||
'remote_as': 65001,
|
||||
'metric': 5,
|
||||
'local_as': 13335,
|
||||
'as_path': '',
|
||||
'remote_address': '192.168.0.11',
|
||||
'metric2': 0,
|
||||
'local_preference': 0,
|
||||
'communities': [
|
||||
'0:2',
|
||||
'no-export'
|
||||
],
|
||||
'preference2': -1
|
||||
},
|
||||
'inactive_reason': ''
|
||||
},
|
||||
{
|
||||
'protocol': 'BGP',
|
||||
'last_active': False,
|
||||
'current_active': False,
|
||||
'age': 2359429,
|
||||
'routing_table': 'inet.0',
|
||||
'next_hop': '192.168.0.17',
|
||||
'outgoing_interface': 'xe-1/1/1.100',
|
||||
'preference': 170,
|
||||
'selected_next_hop': True,
|
||||
'protocol_attributes': {
|
||||
'remote_as': 65001,
|
||||
'metric': 5,
|
||||
'local_as': 13335,
|
||||
'as_path': '',
|
||||
'remote_address': '192.168.0.17',
|
||||
'metric2': 0,
|
||||
'local_preference': 0,
|
||||
'communities': [
|
||||
'0:3',
|
||||
'no-export'
|
||||
],
|
||||
'preference2': -1
|
||||
},
|
||||
'inactive_reason': 'Not Best in its group - Router ID'
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
TEST_SNMP_INFO = {
|
||||
'test_': 'value'
|
||||
}
|
||||
|
||||
TEST_USERS = {
|
||||
'mircea': {
|
||||
'level': 15,
|
||||
'password': '$1$0P70xKPa$4jt5/10cBTckk6I/w/',
|
||||
'sshkeys': [
|
||||
'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC4pFn+shPwTb2yELO4L7NtQrKOJXNeCl1je\
|
||||
l9STXVaGnRAnuc2PXl35vnWmcUq6YbUEcgUTRzzXfmelJKuVJTJIlMXii7h2xkbQp0YZIEs4P\
|
||||
8ipwnRBAxFfk/ZcDsdfsdfsdfsdN56ejk345jhk345jk345jk341p3A/9LIL7l6YewLBCwJj6\
|
||||
D+fWSJ0/YW+7oH17Fk2HH+tw0L5PcWLHkwA4t60iXn16qDbIk/ze6jv2hDGdCdz7oYQeCE55C\
|
||||
CHOHMJWYfN3jcL4s0qv8/u6Ka1FVkV7iMmro7ChThoV/5snI4Ljf2wKqgHH7TfNaCfpU0WvHA\
|
||||
nTs8zhOrGScSrtb mircea@master-roshi'
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class MockNapalmDevice(object):
|
||||
'''Setup a mock device for our tests'''
|
||||
def get_facts(self):
|
||||
return TEST_FACTS
|
||||
|
||||
def get_environment(self):
|
||||
return TEST_ENVIRONMENT
|
||||
|
||||
def get_arp_table(self):
|
||||
return TEST_ARP_TABLE
|
||||
|
||||
def get(self, key, default=None, *args, **kwargs):
|
||||
try:
|
||||
if key == 'DRIVER':
|
||||
return self
|
||||
return TEST_FACTS[key]
|
||||
except KeyError:
|
||||
return default
|
||||
|
||||
def cli(self, commands, *args, **kwargs):
|
||||
assert commands[0] == 'show run'
|
||||
return TEST_COMMAND_RESPONSE
|
||||
|
||||
def traceroute(self, destination, **kwargs):
|
||||
assert destination == 'destination.com'
|
||||
return TEST_TRACEROUTE_RESPONSE
|
||||
|
||||
def ping(self, destination, **kwargs):
|
||||
assert destination == 'destination.com'
|
||||
return TEST_PING_RESPONSE
|
||||
|
||||
def get_config(self, retrieve='all'):
|
||||
assert retrieve == 'running'
|
||||
return TEST_RUNNING_CONFIG
|
||||
|
||||
def get_interfaces_ip(self, **kwargs):
|
||||
return TEST_IPADDRS
|
||||
|
||||
def get_interfaces(self, **kwargs):
|
||||
return TEST_INTERFACES
|
||||
|
||||
def get_lldp_neighbors_detail(self, **kwargs):
|
||||
return TEST_LLDP_NEIGHBORS
|
||||
|
||||
def get_mac_address_table(self, **kwargs):
|
||||
return TEST_MAC_TABLE
|
||||
|
||||
def get_optics(self, **kwargs):
|
||||
return TEST_OPTICS
|
||||
|
||||
def load_merge_candidate(self, filename=None, config=None):
|
||||
assert config == 'new config'
|
||||
return TEST_RUNNING_CONFIG
|
||||
|
||||
def load_replace_candidate(self, filename=None, config=None):
|
||||
assert config == 'new config'
|
||||
return TEST_RUNNING_CONFIG
|
||||
|
||||
def commit_config(self, **kwargs):
|
||||
return TEST_RUNNING_CONFIG
|
||||
|
||||
def discard_config(self, **kwargs):
|
||||
return TEST_RUNNING_CONFIG
|
||||
|
||||
def compare_config(self, **kwargs):
|
||||
return TEST_RUNNING_CONFIG
|
||||
|
||||
def rollback(self, **kwargs):
|
||||
return TEST_RUNNING_CONFIG
|
||||
|
||||
def get_bgp_config(self, **kwargs):
|
||||
return TEST_BGP_CONFIG
|
||||
|
||||
def get_bgp_neighbors_detail(self, neighbor_address=None, **kwargs):
|
||||
assert neighbor_address is None or "test_address"
|
||||
return TEST_BGP_NEIGHBORS
|
||||
|
||||
def get_ntp_peers(self, **kwargs):
|
||||
return TEST_NTP_PEERS
|
||||
|
||||
def get_ntp_servers(self, **kwargs):
|
||||
return TEST_NTP_SERVERS
|
||||
|
||||
def get_ntp_stats(self, **kwargs):
|
||||
return TEST_NTP_STATS
|
||||
|
||||
def get_probes_config(self, **kwargs):
|
||||
return TEST_PROBES_CONFIG
|
||||
|
||||
def get_probes_results(self, **kwargs):
|
||||
return TEST_PROBES_RESULTS
|
||||
|
||||
def get_route_to(self, destination, protocol=None, **kwargs):
|
||||
assert destination == '1.2.3.4'
|
||||
return TEST_ROUTE
|
||||
|
||||
def get_snmp_information(self, **kwargs):
|
||||
return TEST_SNMP_INFO
|
||||
|
||||
def get_users(self, **kwargs):
|
||||
return TEST_USERS
|
||||
|
||||
|
||||
def mock_proxy_napalm_wrap(func):
|
||||
'''
|
||||
The proper decorator checks for proxy minions. We don't care
|
||||
so just pass back to the origination function
|
||||
'''
|
||||
|
||||
@wraps(func)
|
||||
def func_wrapper(*args, **kwargs):
|
||||
func.__globals__['napalm_device'] = MockNapalmDevice()
|
||||
return func(*args, **kwargs)
|
||||
return func_wrapper
|
||||
|
||||
|
||||
import salt.utils.napalm as napalm_utils # NOQA
|
||||
napalm_utils.proxy_napalm_wrap = mock_proxy_napalm_wrap # NOQA
|
||||
|
||||
|
||||
def true(name):
|
||||
return True
|
||||
|
||||
|
||||
def random_hash(source, method):
|
||||
return 12346789
|
||||
|
||||
|
||||
def join(*files):
|
||||
return True
|
||||
|
||||
|
||||
def get_managed_file(*args, **kwargs):
|
||||
return 'True'
|
@ -456,7 +456,7 @@ class SaltTestingParser(optparse.OptionParser):
|
||||
logging_level = logging.INFO
|
||||
else:
|
||||
logging_level = logging.ERROR
|
||||
os.environ['TESTS_LOG_LEVEL'] = six.text_type(self.options.verbosity)
|
||||
os.environ['TESTS_LOG_LEVEL'] = six.binary_type(self.options.verbosity)
|
||||
consolehandler.setLevel(logging_level)
|
||||
logging.root.addHandler(consolehandler)
|
||||
log.info('Runtests logging has been setup')
|
||||
|
@ -619,6 +619,30 @@ class VMwareTestCase(ExtendedTestCase):
|
||||
call='function'
|
||||
)
|
||||
|
||||
def test_remove_snapshot_call(self):
|
||||
'''
|
||||
Tests that a SaltCloudSystemExit is raised when trying to call remove_snapshot
|
||||
with anything other than --action or -a.
|
||||
'''
|
||||
self.assertRaises(
|
||||
SaltCloudSystemExit,
|
||||
vmware.remove_snapshot,
|
||||
name=VM_NAME,
|
||||
kwargs={'snapshot_name': 'mySnapshot'},
|
||||
call='function'
|
||||
)
|
||||
|
||||
def test_remove_snapshot_call_no_snapshot_name_in_kwargs(self):
|
||||
'''
|
||||
Tests that a SaltCloudSystemExit is raised when name is not present in kwargs.
|
||||
'''
|
||||
self.assertRaises(
|
||||
SaltCloudSystemExit,
|
||||
vmware.remove_snapshot,
|
||||
name=VM_NAME,
|
||||
call='action'
|
||||
)
|
||||
|
||||
def test_remove_all_snapshots_call(self):
|
||||
'''
|
||||
Tests that a SaltCloudSystemExit is raised when trying to call remove_all_snapshots
|
||||
|
99
tests/unit/grains/test_napalm.py
Normal file
99
tests/unit/grains/test_napalm.py
Normal file
@ -0,0 +1,99 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Anthony Shaw <anthonyshaw@apache.org>`
|
||||
'''
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import (
|
||||
MagicMock,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON,
|
||||
patch
|
||||
)
|
||||
|
||||
import tests.support.napalm as napalm_test_support
|
||||
import salt.grains.napalm as napalm_grains # NOQA
|
||||
import salt.proxy.napalm as napalm_proxy # NOQA
|
||||
napalm_grains.salt.utils.napalm.is_proxy = MagicMock(return_value=True)
|
||||
|
||||
TEST_DEVICE_CACHE = {
|
||||
'DRIVER': napalm_test_support.MockNapalmDevice(),
|
||||
'DRIVER_NAME': 'cisco',
|
||||
'OS_VERSION': '1.2.3',
|
||||
'HOSTNAME': 'test-device.com',
|
||||
'USERNAME': 'admin'
|
||||
}
|
||||
|
||||
TEST_CACHE = {
|
||||
'result': True,
|
||||
'out': napalm_test_support.TEST_FACTS
|
||||
}
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
@patch('salt.grains.napalm.DEVICE_CACHE', TEST_DEVICE_CACHE)
|
||||
@patch('salt.grains.napalm.GRAINS_CACHE', TEST_CACHE)
|
||||
class NapalmGrainsTestCase(TestCase, LoaderModuleMockMixin):
|
||||
|
||||
def setup_loader_modules(self):
|
||||
module_globals = {
|
||||
'__salt__': {
|
||||
'config.option': MagicMock(return_value={
|
||||
'test': {
|
||||
'driver': 'test',
|
||||
'key': '2orgk34kgk34g'
|
||||
}
|
||||
}),
|
||||
'file.file_exists': napalm_test_support.true,
|
||||
'file.join': napalm_test_support.join,
|
||||
'file.get_managed': napalm_test_support.get_managed_file,
|
||||
'random.hash': napalm_test_support.random_hash,
|
||||
}
|
||||
}
|
||||
|
||||
return {napalm_grains: module_globals}
|
||||
|
||||
def test_os(self):
|
||||
ret = napalm_grains.getos(proxy=napalm_proxy)
|
||||
assert ret['os'] == 'cisco'
|
||||
|
||||
def test_os_version(self):
|
||||
ret = napalm_grains.version(proxy=napalm_proxy)
|
||||
assert ret['version'] == '1.2.3'
|
||||
|
||||
def test_model(self):
|
||||
ret = napalm_grains.model(proxy=napalm_proxy)
|
||||
assert ret['model'] == 'test_model'
|
||||
|
||||
def test_serial(self):
|
||||
ret = napalm_grains.serial(proxy=napalm_proxy)
|
||||
assert ret['serial'] == '123456'
|
||||
|
||||
def test_vendor(self):
|
||||
ret = napalm_grains.vendor(proxy=napalm_proxy)
|
||||
assert ret['vendor'] == 'cisco'
|
||||
|
||||
def test_uptime(self):
|
||||
ret = napalm_grains.uptime(proxy=napalm_proxy)
|
||||
assert ret['uptime'] == 'Forever'
|
||||
|
||||
def test_interfaces(self):
|
||||
ret = napalm_grains.interfaces(proxy=napalm_proxy)
|
||||
assert ret['interfaces'] == napalm_test_support.TEST_INTERFACES
|
||||
|
||||
def test_username(self):
|
||||
ret = napalm_grains.username(proxy=napalm_proxy)
|
||||
assert ret['username'] == 'admin'
|
||||
|
||||
def test_hostname(self):
|
||||
ret = napalm_grains.hostname(proxy=napalm_proxy)
|
||||
assert ret['hostname'] == 'test-device.com'
|
||||
|
||||
def test_host(self):
|
||||
ret = napalm_grains.host(proxy=napalm_proxy)
|
||||
assert ret['host'] == 'test-device.com'
|
@ -150,3 +150,25 @@ class DefaultsTestCase(TestCase, LoaderModuleMockMixin):
|
||||
final = defaults.merge(dest, src, in_place=False)
|
||||
self.assertEqual(dest, dest_orig)
|
||||
self.assertEqual(final, merged)
|
||||
|
||||
def test_deepcopy(self):
|
||||
'''
|
||||
Test a deep copy of object.
|
||||
'''
|
||||
|
||||
src = {
|
||||
'A': 'A',
|
||||
'B': 'B'
|
||||
}
|
||||
|
||||
dist = defaults.deepcopy(src)
|
||||
dist.update({'C': 'C'})
|
||||
|
||||
result = {
|
||||
'A': 'A',
|
||||
'B': 'B',
|
||||
'C': 'C'
|
||||
}
|
||||
|
||||
self.assertFalse(src == dist)
|
||||
self.assertTrue(dist == result)
|
106
tests/unit/modules/test_napalm_acl.py
Normal file
106
tests/unit/modules/test_napalm_acl.py
Normal file
@ -0,0 +1,106 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Anthony Shaw <anthonyshaw@apache.org>`
|
||||
'''
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import (
|
||||
MagicMock,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON
|
||||
)
|
||||
|
||||
import tests.support.napalm as napalm_test_support
|
||||
import salt.modules.napalm_acl as napalm_acl # NOQA
|
||||
|
||||
|
||||
def mock_capirca_term_config(platform, filter_name, term_name, *args, **kwargs):
|
||||
assert platform == 'cisco'
|
||||
assert filter_name == 'test_filter'
|
||||
assert term_name == 'test_term'
|
||||
return 'test_config'
|
||||
|
||||
|
||||
def mock_capirca_filter_config(platform, filter_name, *args, **kwargs):
|
||||
assert platform == 'cisco'
|
||||
assert filter_name == 'test_filter'
|
||||
return 'test_config'
|
||||
|
||||
|
||||
def mock_capirca_policy_config(platform, *args, **kwargs):
|
||||
assert platform == 'cisco'
|
||||
return 'test_config'
|
||||
|
||||
|
||||
def mock_net_load_config(text, *args, **kwargs):
|
||||
assert text == 'test_config'
|
||||
return napalm_test_support.TEST_TERM_CONFIG
|
||||
|
||||
|
||||
def mock_capirca_get_filter_pillar(filter_, *args, **kwargs):
|
||||
assert filter_ == 'test_filter'
|
||||
return {'test': 'value'}
|
||||
|
||||
|
||||
def mock_capirca_get_term_pillar(filter_, term, *args, **kwargs):
|
||||
assert filter_ == 'test_filter'
|
||||
assert term == 'test_term'
|
||||
return {'test': 'value'}
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class NapalmAclModuleTestCase(TestCase, LoaderModuleMockMixin):
|
||||
|
||||
def setup_loader_modules(self):
|
||||
module_globals = {
|
||||
'__salt__': {
|
||||
'config.option': MagicMock(return_value={
|
||||
'test': {
|
||||
'driver': 'test',
|
||||
'key': '2orgk34kgk34g'
|
||||
}
|
||||
}),
|
||||
'file.file_exists': napalm_test_support.true,
|
||||
'file.join': napalm_test_support.join,
|
||||
'file.get_managed': napalm_test_support.get_managed_file,
|
||||
'random.hash': napalm_test_support.random_hash,
|
||||
'capirca.get_term_config': mock_capirca_term_config,
|
||||
'capirca.get_policy_config': mock_capirca_policy_config,
|
||||
'capirca.get_filter_config': mock_capirca_filter_config,
|
||||
'capirca.get_filter_pillar': mock_capirca_get_filter_pillar,
|
||||
'capirca.get_term_pillar': mock_capirca_get_term_pillar,
|
||||
'net.load_config': mock_net_load_config
|
||||
},
|
||||
'__grains__': {
|
||||
'os': 'ios',
|
||||
'vendor': 'cisco',
|
||||
'model': '3750X'
|
||||
}
|
||||
}
|
||||
|
||||
return {napalm_acl: module_globals}
|
||||
|
||||
def test_load_term_config(self):
|
||||
ret = napalm_acl.load_term_config('test_filter', 'test_term')
|
||||
assert ret['already_configured'] is False
|
||||
|
||||
def test_load_filter_config(self):
|
||||
ret = napalm_acl.load_filter_config('test_filter', 'test_term')
|
||||
assert ret['already_configured'] is False
|
||||
|
||||
def test_load_policy_config(self):
|
||||
ret = napalm_acl.load_policy_config('test_filter', 'test_term')
|
||||
assert ret['already_configured'] is False
|
||||
|
||||
def test_get_filter_pillar(self):
|
||||
ret = napalm_acl.get_filter_pillar('test_filter')
|
||||
assert ret['test'] == 'value'
|
||||
|
||||
def test_get_term_pillar(self):
|
||||
ret = napalm_acl.get_term_pillar('test_filter', 'test_term')
|
||||
assert ret['test'] == 'value'
|
49
tests/unit/modules/test_napalm_bgp.py
Normal file
49
tests/unit/modules/test_napalm_bgp.py
Normal file
@ -0,0 +1,49 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Anthony Shaw <anthonyshaw@apache.org>`
|
||||
'''
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import (
|
||||
MagicMock,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON
|
||||
)
|
||||
|
||||
import tests.support.napalm as napalm_test_support
|
||||
import salt.modules.napalm_bgp as napalm_bgp # NOQA
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class NapalmBgpModuleTestCase(TestCase, LoaderModuleMockMixin):
|
||||
|
||||
def setup_loader_modules(self):
|
||||
module_globals = {
|
||||
'__salt__': {
|
||||
'config.option': MagicMock(return_value={
|
||||
'test': {
|
||||
'driver': 'test',
|
||||
'key': '2orgk34kgk34g'
|
||||
}
|
||||
}),
|
||||
'file.file_exists': napalm_test_support.true,
|
||||
'file.join': napalm_test_support.join,
|
||||
'file.get_managed': napalm_test_support.get_managed_file,
|
||||
'random.hash': napalm_test_support.random_hash
|
||||
}
|
||||
}
|
||||
|
||||
return {napalm_bgp: module_globals}
|
||||
|
||||
def test_config(self):
|
||||
ret = napalm_bgp.config("test_group")
|
||||
assert ret['out'] is napalm_test_support.TEST_BGP_CONFIG
|
||||
|
||||
def test_neighbors(self):
|
||||
ret = napalm_bgp.neighbors("test_address")
|
||||
assert ret['out'] is napalm_test_support.TEST_BGP_NEIGHBORS
|
140
tests/unit/modules/test_napalm_network.py
Normal file
140
tests/unit/modules/test_napalm_network.py
Normal file
@ -0,0 +1,140 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Anthony Shaw <anthonyshaw@apache.org>`
|
||||
'''
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import (
|
||||
MagicMock,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON
|
||||
)
|
||||
|
||||
import tests.support.napalm as napalm_test_support
|
||||
import salt.modules.napalm_network as napalm_network # NOQA
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class NapalmNetworkModuleTestCase(TestCase, LoaderModuleMockMixin):
|
||||
|
||||
def setup_loader_modules(self):
|
||||
module_globals = {
|
||||
'__salt__': {
|
||||
'config.option': MagicMock(return_value={
|
||||
'test': {
|
||||
'driver': 'test',
|
||||
'key': '2orgk34kgk34g'
|
||||
}
|
||||
}),
|
||||
'file.file_exists': napalm_test_support.true,
|
||||
'file.join': napalm_test_support.join,
|
||||
'file.get_managed': napalm_test_support.get_managed_file,
|
||||
'random.hash': napalm_test_support.random_hash
|
||||
}
|
||||
}
|
||||
|
||||
return {napalm_network: module_globals}
|
||||
|
||||
def test_connected_pass(self):
|
||||
ret = napalm_network.connected()
|
||||
assert ret['out'] is True
|
||||
|
||||
def test_facts(self):
|
||||
ret = napalm_network.facts()
|
||||
assert ret['out'] == napalm_test_support.TEST_FACTS
|
||||
|
||||
def test_environment(self):
|
||||
ret = napalm_network.environment()
|
||||
assert ret['out'] == napalm_test_support.TEST_ENVIRONMENT
|
||||
|
||||
def test_cli_single_command(self):
|
||||
'''
|
||||
Test that CLI works with 1 arg
|
||||
'''
|
||||
ret = napalm_network.cli("show run")
|
||||
assert ret['out'] == napalm_test_support.TEST_COMMAND_RESPONSE
|
||||
|
||||
def test_cli_multi_command(self):
|
||||
'''
|
||||
Test that CLI works with 2 arg
|
||||
'''
|
||||
ret = napalm_network.cli("show run", "show run")
|
||||
assert ret['out'] == napalm_test_support.TEST_COMMAND_RESPONSE
|
||||
|
||||
def test_traceroute(self):
|
||||
ret = napalm_network.traceroute('destination.com')
|
||||
assert list(ret['out'].keys())[0] == 'success'
|
||||
|
||||
def test_ping(self):
|
||||
ret = napalm_network.ping('destination.com')
|
||||
assert list(ret['out'].keys())[0] == 'success'
|
||||
|
||||
def test_arp(self):
|
||||
ret = napalm_network.arp()
|
||||
assert ret['out'] == napalm_test_support.TEST_ARP_TABLE
|
||||
|
||||
def test_ipaddrs(self):
|
||||
ret = napalm_network.ipaddrs()
|
||||
assert ret['out'] == napalm_test_support.TEST_IPADDRS
|
||||
|
||||
def test_interfaces(self):
|
||||
ret = napalm_network.interfaces()
|
||||
assert ret['out'] == napalm_test_support.TEST_INTERFACES
|
||||
|
||||
def test_lldp(self):
|
||||
ret = napalm_network.lldp()
|
||||
assert ret['out'] == napalm_test_support.TEST_LLDP_NEIGHBORS
|
||||
|
||||
def test_mac(self):
|
||||
ret = napalm_network.mac()
|
||||
assert ret['out'] == napalm_test_support.TEST_MAC_TABLE
|
||||
|
||||
def test_config(self):
|
||||
ret = napalm_network.config('running')
|
||||
assert ret['out'] == napalm_test_support.TEST_RUNNING_CONFIG
|
||||
|
||||
def test_optics(self):
|
||||
ret = napalm_network.optics()
|
||||
assert ret['out'] == napalm_test_support.TEST_OPTICS
|
||||
|
||||
def test_load_config(self):
|
||||
ret = napalm_network.load_config(text='new config')
|
||||
assert ret['result']
|
||||
|
||||
def test_load_config_replace(self):
|
||||
ret = napalm_network.load_config(text='new config', replace=True)
|
||||
assert ret['result']
|
||||
|
||||
def test_load_template(self):
|
||||
ret = napalm_network.load_template('set_ntp_peers',
|
||||
peers=['192.168.0.1'])
|
||||
assert ret['out'] is None
|
||||
|
||||
def test_commit(self):
|
||||
ret = napalm_network.commit()
|
||||
assert ret['out'] == napalm_test_support.TEST_RUNNING_CONFIG
|
||||
|
||||
def test_discard_config(self):
|
||||
ret = napalm_network.discard_config()
|
||||
assert ret['out'] == napalm_test_support.TEST_RUNNING_CONFIG
|
||||
|
||||
def test_compare_config(self):
|
||||
ret = napalm_network.compare_config()
|
||||
assert ret['out'] == napalm_test_support.TEST_RUNNING_CONFIG
|
||||
|
||||
def test_rollback(self):
|
||||
ret = napalm_network.rollback()
|
||||
assert ret['out'] == napalm_test_support.TEST_RUNNING_CONFIG
|
||||
|
||||
def test_config_changed(self):
|
||||
ret = napalm_network.config_changed()
|
||||
assert ret == (True, '')
|
||||
|
||||
def test_config_control(self):
|
||||
ret = napalm_network.config_control()
|
||||
assert ret == (True, '')
|
77
tests/unit/modules/test_napalm_ntp.py
Normal file
77
tests/unit/modules/test_napalm_ntp.py
Normal file
@ -0,0 +1,77 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Anthony Shaw <anthonyshaw@apache.org>`
|
||||
'''
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import (
|
||||
MagicMock,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON
|
||||
)
|
||||
|
||||
import tests.support.napalm as napalm_test_support
|
||||
import salt.modules.napalm_ntp as napalm_ntp # NOQA
|
||||
|
||||
|
||||
def mock_net_load_template(template, *args, **kwargs):
|
||||
if template == 'set_ntp_peers' or template == 'delete_ntp_peers':
|
||||
assert '1.2.3.4' in kwargs['peers']
|
||||
if template == 'set_ntp_servers' or template == 'delete_ntp_servers':
|
||||
assert '2.2.3.4' in kwargs['servers']
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class NapalmNtpModuleTestCase(TestCase, LoaderModuleMockMixin):
|
||||
|
||||
def setup_loader_modules(self):
|
||||
module_globals = {
|
||||
'__salt__': {
|
||||
'config.option': MagicMock(return_value={
|
||||
'test': {
|
||||
'driver': 'test',
|
||||
'key': '2orgk34kgk34g'
|
||||
}
|
||||
}),
|
||||
'file.file_exists': napalm_test_support.true,
|
||||
'file.join': napalm_test_support.join,
|
||||
'file.get_managed': napalm_test_support.get_managed_file,
|
||||
'random.hash': napalm_test_support.random_hash,
|
||||
'net.load_template': mock_net_load_template
|
||||
}
|
||||
}
|
||||
|
||||
return {napalm_ntp: module_globals}
|
||||
|
||||
def test_peers(self):
|
||||
ret = napalm_ntp.peers()
|
||||
assert '172.17.17.1' in ret['out']
|
||||
|
||||
def test_servers(self):
|
||||
ret = napalm_ntp.servers()
|
||||
assert '172.17.17.1' in ret['out']
|
||||
|
||||
def test_stats(self):
|
||||
ret = napalm_ntp.stats()
|
||||
assert ret['out'][0]['reachability'] == 377
|
||||
|
||||
def test_set_peers(self):
|
||||
ret = napalm_ntp.set_peers('1.2.3.4', '5.6.7.8')
|
||||
assert ret is None
|
||||
|
||||
def test_set_servers(self):
|
||||
ret = napalm_ntp.set_servers('2.2.3.4', '6.6.7.8')
|
||||
assert ret is None
|
||||
|
||||
def test_delete_servers(self):
|
||||
ret = napalm_ntp.delete_servers('2.2.3.4', '6.6.7.8')
|
||||
assert ret is None
|
||||
|
||||
def test_delete_peers(self):
|
||||
ret = napalm_ntp.delete_peers('1.2.3.4', '5.6.7.8')
|
||||
assert ret is None
|
104
tests/unit/modules/test_napalm_probes.py
Normal file
104
tests/unit/modules/test_napalm_probes.py
Normal file
@ -0,0 +1,104 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Anthony Shaw <anthonyshaw@apache.org>`
|
||||
'''
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import (
|
||||
MagicMock,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON
|
||||
)
|
||||
|
||||
import tests.support.napalm as napalm_test_support
|
||||
import salt.modules.napalm_probes as napalm_probes # NOQA
|
||||
|
||||
|
||||
TEST_PROBES = {
|
||||
'new_probe': {
|
||||
'new_test1': {
|
||||
'probe_type': 'icmp-ping',
|
||||
'target': '192.168.0.1',
|
||||
'source': '192.168.0.2',
|
||||
'probe_count': 13,
|
||||
'test_interval': 3
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
TEST_DELETE_PROBES = {
|
||||
'existing_probe': {
|
||||
'existing_test1': {},
|
||||
'existing_test2': {}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
TEST_SCHEDULE_PROBES = {
|
||||
'test_probe': {
|
||||
'existing_test1': {},
|
||||
'existing_test2': {}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def mock_net_load(template, *args, **kwargs):
|
||||
if template == 'set_probes':
|
||||
assert kwargs['probes'] == TEST_PROBES
|
||||
return napalm_test_support.TEST_TERM_CONFIG
|
||||
if template == 'delete_probes':
|
||||
assert kwargs['probes'] == TEST_DELETE_PROBES
|
||||
return napalm_test_support.TEST_TERM_CONFIG
|
||||
if template == 'schedule_probes':
|
||||
assert kwargs['probes'] == TEST_SCHEDULE_PROBES
|
||||
return napalm_test_support.TEST_TERM_CONFIG
|
||||
raise ValueError("incorrect template {0}".format(template))
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class NapalmProbesModuleTestCase(TestCase, LoaderModuleMockMixin):
|
||||
|
||||
def setup_loader_modules(self):
|
||||
module_globals = {
|
||||
'__salt__': {
|
||||
'config.option': MagicMock(return_value={
|
||||
'test': {
|
||||
'driver': 'test',
|
||||
'key': '2orgk34kgk34g'
|
||||
}
|
||||
}),
|
||||
'file.file_exists': napalm_test_support.true,
|
||||
'file.join': napalm_test_support.join,
|
||||
'file.get_managed': napalm_test_support.get_managed_file,
|
||||
'random.hash': napalm_test_support.random_hash,
|
||||
'net.load_template': mock_net_load
|
||||
}
|
||||
}
|
||||
|
||||
return {napalm_probes: module_globals}
|
||||
|
||||
def test_probes_config(self):
|
||||
ret = napalm_probes.config()
|
||||
assert ret['out'] == napalm_test_support.TEST_PROBES_CONFIG
|
||||
|
||||
def test_probes_results(self):
|
||||
ret = napalm_probes.results()
|
||||
assert ret['out'] == napalm_test_support.TEST_PROBES_RESULTS
|
||||
|
||||
def test_set_probes(self):
|
||||
ret = napalm_probes.set_probes(TEST_PROBES)
|
||||
assert ret['result'] is True
|
||||
|
||||
def test_delete_probes(self):
|
||||
ret = napalm_probes.delete_probes(TEST_DELETE_PROBES)
|
||||
assert ret['result'] is True
|
||||
|
||||
def test_schedule_probes(self):
|
||||
ret = napalm_probes.schedule_probes(TEST_SCHEDULE_PROBES)
|
||||
assert ret['result'] is True
|
50
tests/unit/modules/test_napalm_route.py
Normal file
50
tests/unit/modules/test_napalm_route.py
Normal file
@ -0,0 +1,50 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Anthony Shaw <anthonyshaw@apache.org>`
|
||||
'''
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import (
|
||||
MagicMock,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON
|
||||
)
|
||||
|
||||
import tests.support.napalm as napalm_test_support
|
||||
import salt.modules.napalm_route as napalm_route # NOQA
|
||||
|
||||
|
||||
def mock_net_load(template, *args, **kwargs):
|
||||
raise ValueError("incorrect template {0}".format(template))
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class NapalmRouteModuleTestCase(TestCase, LoaderModuleMockMixin):
|
||||
|
||||
def setup_loader_modules(self):
|
||||
module_globals = {
|
||||
'__salt__': {
|
||||
'config.option': MagicMock(return_value={
|
||||
'test': {
|
||||
'driver': 'test',
|
||||
'key': '2orgk34kgk34g'
|
||||
}
|
||||
}),
|
||||
'file.file_exists': napalm_test_support.true,
|
||||
'file.join': napalm_test_support.join,
|
||||
'file.get_managed': napalm_test_support.get_managed_file,
|
||||
'random.hash': napalm_test_support.random_hash,
|
||||
'net.load_template': mock_net_load
|
||||
}
|
||||
}
|
||||
|
||||
return {napalm_route: module_globals}
|
||||
|
||||
def test_show(self):
|
||||
ret = napalm_route.show('1.2.3.4')
|
||||
assert ret['out'] == napalm_test_support.TEST_ROUTE
|
55
tests/unit/modules/test_napalm_snmp.py
Normal file
55
tests/unit/modules/test_napalm_snmp.py
Normal file
@ -0,0 +1,55 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Anthony Shaw <anthonyshaw@apache.org>`
|
||||
'''
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import (
|
||||
MagicMock,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON
|
||||
)
|
||||
|
||||
import tests.support.napalm as napalm_test_support
|
||||
import salt.modules.napalm_snmp as napalm_snmp # NOQA
|
||||
import salt.modules.napalm_network as napalm_network # NOQA
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class NapalmSnmpModuleTestCase(TestCase, LoaderModuleMockMixin):
|
||||
|
||||
def setup_loader_modules(self):
|
||||
module_globals = {
|
||||
'__salt__': {
|
||||
'config.option': MagicMock(return_value={
|
||||
'test': {
|
||||
'driver': 'test',
|
||||
'key': '2orgk34kgk34g'
|
||||
}
|
||||
}),
|
||||
'file.file_exists': napalm_test_support.true,
|
||||
'file.join': napalm_test_support.join,
|
||||
'file.get_managed': napalm_test_support.get_managed_file,
|
||||
'random.hash': napalm_test_support.random_hash,
|
||||
'net.load_template': napalm_network.load_template
|
||||
}
|
||||
}
|
||||
|
||||
return {napalm_snmp: module_globals, napalm_network: module_globals}
|
||||
|
||||
def test_config(self):
|
||||
ret = napalm_snmp.config()
|
||||
assert ret['out'] == napalm_test_support.TEST_SNMP_INFO
|
||||
|
||||
def test_remove_config(self):
|
||||
ret = napalm_snmp.remove_config('1.2.3.4')
|
||||
assert ret['result'] is False
|
||||
|
||||
def test_update_config(self):
|
||||
ret = napalm_snmp.update_config('1.2.3.4')
|
||||
assert ret['result'] is False
|
55
tests/unit/modules/test_napalm_users.py
Normal file
55
tests/unit/modules/test_napalm_users.py
Normal file
@ -0,0 +1,55 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Anthony Shaw <anthonyshaw@apache.org>`
|
||||
'''
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import (
|
||||
MagicMock,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON
|
||||
)
|
||||
|
||||
import tests.support.napalm as napalm_test_support
|
||||
import salt.modules.napalm_users as napalm_users # NOQA
|
||||
import salt.modules.napalm_network as napalm_network # NOQA
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class NapalmUsersModuleTestCase(TestCase, LoaderModuleMockMixin):
|
||||
|
||||
def setup_loader_modules(self):
|
||||
module_globals = {
|
||||
'__salt__': {
|
||||
'config.option': MagicMock(return_value={
|
||||
'test': {
|
||||
'driver': 'test',
|
||||
'key': '2orgk34kgk34g'
|
||||
}
|
||||
}),
|
||||
'file.file_exists': napalm_test_support.true,
|
||||
'file.join': napalm_test_support.join,
|
||||
'file.get_managed': napalm_test_support.get_managed_file,
|
||||
'random.hash': napalm_test_support.random_hash,
|
||||
'net.load_template': napalm_network.load_template
|
||||
}
|
||||
}
|
||||
|
||||
return {napalm_users: module_globals, napalm_network: module_globals}
|
||||
|
||||
def test_config(self):
|
||||
ret = napalm_users.config()
|
||||
assert ret['out'] == napalm_test_support.TEST_USERS
|
||||
|
||||
def test_set_users(self):
|
||||
ret = napalm_users.set_users({'mircea': {}})
|
||||
assert ret['result'] is False
|
||||
|
||||
def test_delete_users(self):
|
||||
ret = napalm_users.delete_users({'mircea': {}})
|
||||
assert ret['result'] is False
|
108
tests/unit/modules/test_napalm_yang_mod.py
Normal file
108
tests/unit/modules/test_napalm_yang_mod.py
Normal file
@ -0,0 +1,108 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Anthony Shaw <anthonyshaw@apache.org>`
|
||||
'''
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import (
|
||||
MagicMock,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON
|
||||
)
|
||||
|
||||
import tests.support.napalm as napalm_test_support
|
||||
import salt.modules.napalm_yang_mod as napalm_yang_mod # NOQA
|
||||
import salt.modules.napalm_network as napalm_network # NOQA
|
||||
|
||||
|
||||
TEST_DIFF = {
|
||||
'diff1': 'value'
|
||||
}
|
||||
|
||||
|
||||
class MockNapalmYangModel(object):
|
||||
def Root(self):
|
||||
return MagicMock()
|
||||
|
||||
|
||||
class MockNapalmYangModels(object):
|
||||
openconfig_interfaces = MockNapalmYangModel()
|
||||
|
||||
|
||||
class MockUtils(object):
|
||||
def diff(self, *args):
|
||||
return TEST_DIFF
|
||||
|
||||
|
||||
class MockNapalmYangModule(object):
|
||||
base = MockNapalmYangModel()
|
||||
models = MockNapalmYangModels()
|
||||
utils = MockUtils()
|
||||
|
||||
TEST_CONFIG = {
|
||||
'comment': 'Configuration discarded.',
|
||||
'already_configured': False,
|
||||
'result': True,
|
||||
'diff': '[edit interfaces xe-0/0/5]+ description "Adding a description";'
|
||||
}
|
||||
|
||||
|
||||
def mock_net_load_config(**kwargs):
|
||||
return TEST_CONFIG
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class NapalmYangModModuleTestCase(TestCase, LoaderModuleMockMixin):
|
||||
|
||||
def setup_loader_modules(self):
|
||||
module_globals = {
|
||||
'__salt__': {
|
||||
'config.option': MagicMock(return_value={
|
||||
'test': {
|
||||
'driver': 'test',
|
||||
'key': '2orgk34kgk34g'
|
||||
}
|
||||
}),
|
||||
'file.file_exists': napalm_test_support.true,
|
||||
'file.join': napalm_test_support.join,
|
||||
'file.get_managed': napalm_test_support.get_managed_file,
|
||||
'random.hash': napalm_test_support.random_hash,
|
||||
'net.load_template': napalm_network.load_template,
|
||||
'net.load_config': mock_net_load_config
|
||||
}
|
||||
}
|
||||
module_globals['napalm_yang'] = MockNapalmYangModule()
|
||||
|
||||
return {napalm_yang_mod: module_globals, napalm_network: module_globals}
|
||||
|
||||
def test_diff(self):
|
||||
ret = napalm_yang_mod.diff({}, {'test': True}, 'models.openconfig_interfaces')
|
||||
assert ret == TEST_DIFF
|
||||
|
||||
def test_diff_list(self):
|
||||
'''
|
||||
Test it with an actual list
|
||||
'''
|
||||
ret = napalm_yang_mod.diff({}, {'test': True}, ['models.openconfig_interfaces'])
|
||||
assert ret == TEST_DIFF
|
||||
|
||||
def test_parse(self):
|
||||
ret = napalm_yang_mod.parse('models.openconfig_interfaces')
|
||||
assert ret is not None
|
||||
|
||||
def test_get_config(self):
|
||||
ret = napalm_yang_mod.get_config({}, 'models.openconfig_interfaces')
|
||||
assert ret is not None
|
||||
|
||||
def test_load_config(self):
|
||||
ret = napalm_yang_mod.load_config({}, 'models.openconfig_interfaces')
|
||||
assert ret is TEST_CONFIG
|
||||
|
||||
def test_compliance_report(self):
|
||||
ret = napalm_yang_mod.compliance_report({}, 'models.openconfig_interfaces')
|
||||
assert ret is not None
|
134
tests/unit/pillar/test_file_tree.py
Normal file
134
tests/unit/pillar/test_file_tree.py
Normal file
@ -0,0 +1,134 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''test for pillar file_tree.py'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
import shutil
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import NO_MOCK, NO_MOCK_REASON, patch, MagicMock
|
||||
from tests.support.paths import TMP
|
||||
from tests.support.helpers import TestsLoggingHandler
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.utils.files
|
||||
import salt.pillar.file_tree as file_tree
|
||||
|
||||
|
||||
MINION_ID = 'test-host'
|
||||
NODEGROUP_PATH = os.path.join('nodegroups', 'test-group', 'files')
|
||||
HOST_PATH = os.path.join('hosts', MINION_ID, 'files')
|
||||
|
||||
BASE_PILLAR_CONTENT = {'files': {'hostfile': b'base', 'groupfile': b'base'}}
|
||||
DEV_PILLAR_CONTENT = {'files': {'hostfile': b'base', 'groupfile': b'dev2',
|
||||
'hostfile1': b'dev1', 'groupfile1': b'dev1',
|
||||
'hostfile2': b'dev2'}}
|
||||
PARENT_PILLAR_CONTENT = {'files': {'hostfile': b'base', 'groupfile': b'base',
|
||||
'hostfile2': b'dev2'}}
|
||||
|
||||
FILE_DATA = {
|
||||
os.path.join('base', HOST_PATH, 'hostfile'): 'base',
|
||||
os.path.join('dev1', HOST_PATH, 'hostfile1'): 'dev1',
|
||||
os.path.join('dev2', HOST_PATH, 'hostfile2'): 'dev2',
|
||||
os.path.join('base', NODEGROUP_PATH, 'groupfile'): 'base',
|
||||
os.path.join('dev1', NODEGROUP_PATH, 'groupfile1'): 'dev1',
|
||||
os.path.join('dev2', NODEGROUP_PATH, 'groupfile'): 'dev2' # test merging
|
||||
}
|
||||
|
||||
_CHECK_MINIONS_RETURN = {'minions': [MINION_ID], 'missing': []}
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class FileTreePillarTestCase(TestCase, LoaderModuleMockMixin):
|
||||
'test file_tree pillar'
|
||||
maxDiff = None
|
||||
|
||||
def setup_loader_modules(self):
|
||||
self.tmpdir = tempfile.mkdtemp(dir=TMP)
|
||||
self.addCleanup(shutil.rmtree, self.tmpdir)
|
||||
cachedir = os.path.join(self.tmpdir, 'cachedir')
|
||||
os.makedirs(os.path.join(cachedir, 'file_tree'))
|
||||
self.pillar_path = self._create_pillar_files()
|
||||
return {
|
||||
file_tree: {
|
||||
'__opts__': {
|
||||
'cachedir': cachedir,
|
||||
'pillar_roots': {
|
||||
'base': [os.path.join(self.pillar_path, 'base')],
|
||||
'dev': [os.path.join(self.pillar_path, 'base'),
|
||||
os.path.join(self.pillar_path, 'dev1'),
|
||||
os.path.join(self.pillar_path, 'dev2')
|
||||
],
|
||||
'parent': [os.path.join(self.pillar_path, 'base', 'sub1'),
|
||||
os.path.join(self.pillar_path, 'dev2', 'sub'),
|
||||
os.path.join(self.pillar_path, 'base', 'sub2'),
|
||||
],
|
||||
},
|
||||
'pillarenv': 'base',
|
||||
'nodegroups': {'test-group': [MINION_ID]},
|
||||
'file_buffer_size': 262144,
|
||||
'file_roots': {'base': '', 'dev': '', 'parent': ''},
|
||||
'extension_modules': '',
|
||||
'renderer': 'yaml_jinja',
|
||||
'renderer_blacklist': [],
|
||||
'renderer_whitelist': []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def _create_pillar_files(self):
|
||||
'create files in tempdir'
|
||||
pillar_path = os.path.join(self.tmpdir, 'file_tree')
|
||||
for filename in FILE_DATA:
|
||||
filepath = os.path.join(pillar_path, filename)
|
||||
os.makedirs(os.path.dirname(filepath))
|
||||
with salt.utils.files.fopen(filepath, 'w') as data_file:
|
||||
data_file.write(FILE_DATA[filename])
|
||||
return pillar_path
|
||||
|
||||
def test_absolute_path(self):
|
||||
'check file tree is imported correctly with an absolute path'
|
||||
absolute_path = os.path.join(self.pillar_path, 'base')
|
||||
with patch('salt.utils.minions.CkMinions.check_minions', MagicMock(return_value=_CHECK_MINIONS_RETURN)):
|
||||
mypillar = file_tree.ext_pillar(MINION_ID, None, absolute_path)
|
||||
self.assertEqual(BASE_PILLAR_CONTENT, mypillar)
|
||||
|
||||
with patch.dict(file_tree.__opts__, {'pillarenv': 'dev'}):
|
||||
mypillar = file_tree.ext_pillar(MINION_ID, None, absolute_path)
|
||||
self.assertEqual(BASE_PILLAR_CONTENT, mypillar)
|
||||
|
||||
def test_relative_path(self):
|
||||
'check file tree is imported correctly with a relative path'
|
||||
with patch('salt.utils.minions.CkMinions.check_minions', MagicMock(return_value=_CHECK_MINIONS_RETURN)):
|
||||
mypillar = file_tree.ext_pillar(MINION_ID, None, '.')
|
||||
self.assertEqual(BASE_PILLAR_CONTENT, mypillar)
|
||||
|
||||
with patch.dict(file_tree.__opts__, {'pillarenv': 'dev'}):
|
||||
mypillar = file_tree.ext_pillar(MINION_ID, None, '.')
|
||||
self.assertEqual(DEV_PILLAR_CONTENT, mypillar)
|
||||
|
||||
def test_parent_path(self):
|
||||
'check if file tree is merged correctly with a .. path'
|
||||
with patch('salt.utils.minions.CkMinions.check_minions', MagicMock(return_value=_CHECK_MINIONS_RETURN)):
|
||||
with patch.dict(file_tree.__opts__, {'pillarenv': 'parent'}):
|
||||
mypillar = file_tree.ext_pillar(MINION_ID, None, '..')
|
||||
self.assertEqual(PARENT_PILLAR_CONTENT, mypillar)
|
||||
|
||||
def test_no_pillarenv(self):
|
||||
'confirm that file_tree yells when pillarenv is missing for a relative path'
|
||||
with patch('salt.utils.minions.CkMinions.check_minions', MagicMock(return_value=_CHECK_MINIONS_RETURN)):
|
||||
with patch.dict(file_tree.__opts__, {'pillarenv': None}):
|
||||
with TestsLoggingHandler() as handler:
|
||||
mypillar = file_tree.ext_pillar(MINION_ID, None, '.')
|
||||
self.assertEqual({}, mypillar)
|
||||
|
||||
for message in handler.messages:
|
||||
if message.startswith('ERROR:') and 'pillarenv is not set' in message:
|
||||
break
|
||||
else:
|
||||
raise AssertionError('Did not find error message')
|
100
tests/unit/proxy/test_napalm.py
Normal file
100
tests/unit/proxy/test_napalm.py
Normal file
@ -0,0 +1,100 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Anthony Shaw <anthonyshaw@apache.org>`
|
||||
'''
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
from tests.support.mock import (
|
||||
MagicMock,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON,
|
||||
patch
|
||||
)
|
||||
|
||||
import tests.support.napalm as napalm_test_support
|
||||
import salt.proxy.napalm as napalm_proxy # NOQA
|
||||
|
||||
|
||||
TEST_OPTS = {
|
||||
'proxytype': 'napalm',
|
||||
'driver': 'junos',
|
||||
'host': 'core05.nrt02'
|
||||
}
|
||||
|
||||
|
||||
def mock_get_device(opts, *args, **kwargs):
|
||||
assert opts == TEST_OPTS
|
||||
return {
|
||||
'DRIVER': napalm_test_support.MockNapalmDevice(),
|
||||
'UP': True
|
||||
}
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
@patch('salt.utils.napalm.get_device', mock_get_device)
|
||||
class NapalmProxyTestCase(TestCase, LoaderModuleMockMixin):
|
||||
|
||||
def setup_loader_modules(self):
|
||||
module_globals = {
|
||||
'__salt__': {
|
||||
'config.option': MagicMock(return_value={
|
||||
'test': {
|
||||
'driver': 'test',
|
||||
'key': '2orgk34kgk34g'
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
module_globals['napalm_base'] = MagicMock()
|
||||
return {napalm_proxy: module_globals}
|
||||
|
||||
def test_init(self):
|
||||
ret = napalm_proxy.init(TEST_OPTS)
|
||||
assert ret is True
|
||||
|
||||
def test_alive(self):
|
||||
ret = napalm_proxy.alive(TEST_OPTS)
|
||||
assert ret is True
|
||||
|
||||
def test_ping(self):
|
||||
napalm_proxy.init(TEST_OPTS)
|
||||
ret = napalm_proxy.ping()
|
||||
assert ret is True
|
||||
|
||||
def test_initialized(self):
|
||||
napalm_proxy.init(TEST_OPTS)
|
||||
ret = napalm_proxy.initialized()
|
||||
assert ret is True
|
||||
|
||||
def test_get_device(self):
|
||||
napalm_proxy.init(TEST_OPTS)
|
||||
ret = napalm_proxy.get_device()
|
||||
assert ret['UP'] is True
|
||||
|
||||
def test_get_grains(self):
|
||||
napalm_proxy.init(TEST_OPTS)
|
||||
ret = napalm_proxy.get_grains()
|
||||
assert ret['out'] == napalm_test_support.TEST_FACTS
|
||||
|
||||
def test_grains_refresh(self):
|
||||
napalm_proxy.init(TEST_OPTS)
|
||||
ret = napalm_proxy.grains_refresh()
|
||||
assert ret['out'] == napalm_test_support.TEST_FACTS
|
||||
|
||||
def test_fns(self):
|
||||
ret = napalm_proxy.fns()
|
||||
assert 'details' in ret.keys()
|
||||
|
||||
def test_shutdown(self):
|
||||
ret = napalm_proxy.shutdown(TEST_OPTS)
|
||||
assert ret is True
|
||||
|
||||
def test_call(self):
|
||||
napalm_proxy.init(TEST_OPTS)
|
||||
ret = napalm_proxy.call('get_arp_table')
|
||||
assert ret['out'] == napalm_test_support.TEST_ARP_TABLE
|
@ -56,9 +56,7 @@ class BotoDynamodbTestCase(TestCase, LoaderModuleMockMixin):
|
||||
self.assertDictEqual(boto_dynamodb.present(name), ret)
|
||||
|
||||
with patch.dict(boto_dynamodb.__opts__, {'test': True}):
|
||||
comt = ('DynamoDB table {0} is set to be created.,\n'
|
||||
'DynamoDB table {0} throughput matches,\n'
|
||||
'All global secondary indexes match,\n'.format(name))
|
||||
comt = ('DynamoDB table {0} would be created.'.format(name))
|
||||
ret.update({'comment': comt, 'result': None})
|
||||
self.assertDictEqual(boto_dynamodb.present(name), ret)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user