mirror of
https://github.com/valitydev/salt.git
synced 2024-11-07 00:55:19 +00:00
Merge pull request #25772 from s0undt3ch/features/raas-5-salt-ssh
Salt-SSH Py3 thin & Config schema
This commit is contained in:
commit
df5aaeef61
@ -45,7 +45,6 @@ The information which can be stored in a roster `target` is the following:
|
|||||||
priv: # File path to ssh private key, defaults to salt-ssh.rsa
|
priv: # File path to ssh private key, defaults to salt-ssh.rsa
|
||||||
timeout: # Number of seconds to wait for response when establishing
|
timeout: # Number of seconds to wait for response when establishing
|
||||||
# an SSH connection
|
# an SSH connection
|
||||||
timeout: # Number of seconds to wait for response
|
|
||||||
minion_opts: # Dictionary of minion opts
|
minion_opts: # Dictionary of minion opts
|
||||||
thin_dir: # The target system's storage directory for Salt
|
thin_dir: # The target system's storage directory for Salt
|
||||||
# components. Defaults to /tmp/salt-<hash>.
|
# components. Defaults to /tmp/salt-<hash>.
|
||||||
|
@ -4,6 +4,7 @@ Create ssh executor system
|
|||||||
'''
|
'''
|
||||||
# Import python libs
|
# Import python libs
|
||||||
from __future__ import absolute_import, print_function
|
from __future__ import absolute_import, print_function
|
||||||
|
import base64
|
||||||
import copy
|
import copy
|
||||||
import getpass
|
import getpass
|
||||||
import json
|
import json
|
||||||
@ -126,9 +127,9 @@ if [ -n "{{SUDO}}" ]
|
|||||||
then SUDO="sudo "
|
then SUDO="sudo "
|
||||||
fi
|
fi
|
||||||
EX_PYTHON_INVALID={EX_THIN_PYTHON_INVALID}
|
EX_PYTHON_INVALID={EX_THIN_PYTHON_INVALID}
|
||||||
PYTHON_CMDS="python27 python2.7 python26 python2.6 python2 python"
|
PYTHON_CMDS="python3 python27 python2.7 python26 python2.6 python2 python"
|
||||||
for py_cmd in $PYTHON_CMDS
|
for py_cmd in $PYTHON_CMDS
|
||||||
do if "$py_cmd" -c "import sys; sys.exit(not (sys.hexversion >= 0x02060000 and sys.version_info[0] == {{HOST_PY_MAJOR}}));" >/dev/null 2>&1
|
do if "$py_cmd" -c "import sys; sys.exit(not (sys.version_info >= (2, 6) and sys.version_info[0] == {{HOST_PY_MAJOR}}));"
|
||||||
then py_cmd_path=`"$py_cmd" -c 'from __future__ import print_function; import sys; print(sys.executable);'`
|
then py_cmd_path=`"$py_cmd" -c 'from __future__ import print_function; import sys; print(sys.executable);'`
|
||||||
exec $SUDO "$py_cmd_path" -c 'import base64; exec(base64.b64decode("""{{SSH_PY_CODE}}""").decode("utf-8"))'
|
exec $SUDO "$py_cmd_path" -c 'import base64; exec(base64.b64decode("""{{SSH_PY_CODE}}""").decode("utf-8"))'
|
||||||
exit 0
|
exit 0
|
||||||
@ -200,7 +201,13 @@ class SSH(object):
|
|||||||
try:
|
try:
|
||||||
salt.client.ssh.shell.gen_key(priv)
|
salt.client.ssh.shell.gen_key(priv)
|
||||||
except OSError:
|
except OSError:
|
||||||
raise salt.exceptions.SaltClientError('salt-ssh could not be run because it could not generate keys.\n\nYou can probably resolve this by executing this script with increased permissions via sudo or by running as root.\nYou could also use the \'-c\' option to supply a configuration directory that you have permissions to read and write to.')
|
raise salt.exceptions.SaltClientError(
|
||||||
|
'salt-ssh could not be run because it could not generate keys.\n\n'
|
||||||
|
'You can probably resolve this by executing this script with '
|
||||||
|
'increased permissions via sudo or by running as root.\n'
|
||||||
|
'You could also use the \'-c\' option to supply a configuration '
|
||||||
|
'directory that you have permissions to read and write to.'
|
||||||
|
)
|
||||||
self.defaults = {
|
self.defaults = {
|
||||||
'user': self.opts.get(
|
'user': self.opts.get(
|
||||||
'ssh_user',
|
'ssh_user',
|
||||||
@ -239,7 +246,9 @@ class SSH(object):
|
|||||||
self.serial = salt.payload.Serial(opts)
|
self.serial = salt.payload.Serial(opts)
|
||||||
self.returners = salt.loader.returners(self.opts, {})
|
self.returners = salt.loader.returners(self.opts, {})
|
||||||
self.fsclient = salt.fileclient.FSClient(self.opts)
|
self.fsclient = salt.fileclient.FSClient(self.opts)
|
||||||
self.thin = salt.utils.thin.gen_thin(self.opts['cachedir'])
|
self.thin = salt.utils.thin.gen_thin(self.opts['cachedir'],
|
||||||
|
python2_bin=self.opts['python2_bin'],
|
||||||
|
python3_bin=self.opts['python3_bin'])
|
||||||
self.mods = mod_data(self.fsclient)
|
self.mods = mod_data(self.fsclient)
|
||||||
|
|
||||||
def get_pubkey(self):
|
def get_pubkey(self):
|
||||||
@ -437,7 +446,7 @@ class SSH(object):
|
|||||||
if len(running) >= self.opts.get('ssh_max_procs', 25) or len(self.targets) >= len(running):
|
if len(running) >= self.opts.get('ssh_max_procs', 25) or len(self.targets) >= len(running):
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
|
|
||||||
def run_iter(self, mine=False):
|
def run_iter(self, mine=False, jid=None):
|
||||||
'''
|
'''
|
||||||
Execute and yield returns as they come in, do not print to the display
|
Execute and yield returns as they come in, do not print to the display
|
||||||
|
|
||||||
@ -447,7 +456,7 @@ class SSH(object):
|
|||||||
will modify the argv with the arguments from mine_functions
|
will modify the argv with the arguments from mine_functions
|
||||||
'''
|
'''
|
||||||
fstr = '{0}.prep_jid'.format(self.opts['master_job_cache'])
|
fstr = '{0}.prep_jid'.format(self.opts['master_job_cache'])
|
||||||
jid = self.returners[fstr]()
|
jid = self.returners[fstr](passed_jid=jid or self.opts.get('jid', None))
|
||||||
|
|
||||||
# Save the invocation information
|
# Save the invocation information
|
||||||
argv = self.opts['argv']
|
argv = self.opts['argv']
|
||||||
@ -491,12 +500,12 @@ class SSH(object):
|
|||||||
'return': ret,
|
'return': ret,
|
||||||
'fun': fun})
|
'fun': fun})
|
||||||
|
|
||||||
def run(self):
|
def run(self, jid=None):
|
||||||
'''
|
'''
|
||||||
Execute the overall routine, print results via outputters
|
Execute the overall routine, print results via outputters
|
||||||
'''
|
'''
|
||||||
fstr = '{0}.prep_jid'.format(self.opts['master_job_cache'])
|
fstr = '{0}.prep_jid'.format(self.opts['master_job_cache'])
|
||||||
jid = self.returners[fstr]()
|
jid = self.returners[fstr](passed_jid=jid or self.opts.get('jid', None))
|
||||||
|
|
||||||
# Save the invocation information
|
# Save the invocation information
|
||||||
argv = self.opts['argv']
|
argv = self.opts['argv']
|
||||||
@ -519,8 +528,11 @@ class SSH(object):
|
|||||||
|
|
||||||
# save load to the master job cache
|
# save load to the master job cache
|
||||||
try:
|
try:
|
||||||
|
if isinstance(jid, bytes):
|
||||||
|
jid = jid.decode('utf-8')
|
||||||
self.returners['{0}.save_load'.format(self.opts['master_job_cache'])](jid, job_load)
|
self.returners['{0}.save_load'.format(self.opts['master_job_cache'])](jid, job_load)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
|
log.exception(exc)
|
||||||
log.error('Could not save load with returner {0}: {1}'.format(self.opts['master_job_cache'], exc))
|
log.error('Could not save load with returner {0}: {1}'.format(self.opts['master_job_cache'], exc))
|
||||||
|
|
||||||
if self.opts.get('verbose'):
|
if self.opts.get('verbose'):
|
||||||
@ -911,8 +923,10 @@ ARGS = {9}\n'''.format(self.minion_config,
|
|||||||
self.tty,
|
self.tty,
|
||||||
self.argv)
|
self.argv)
|
||||||
py_code = SSH_PY_SHIM.replace('#%%OPTS', arg_str)
|
py_code = SSH_PY_SHIM.replace('#%%OPTS', arg_str)
|
||||||
py_code_enc = py_code.encode('base64')
|
if six.PY2:
|
||||||
|
py_code_enc = py_code.encode('base64')
|
||||||
|
else:
|
||||||
|
py_code_enc = base64.encodebytes(py_code.encode('utf-8')).decode('utf-8')
|
||||||
cmd = SSH_SH_SHIM.format(
|
cmd = SSH_SH_SHIM.format(
|
||||||
DEBUG=debug,
|
DEBUG=debug,
|
||||||
SUDO=sudo,
|
SUDO=sudo,
|
||||||
@ -1233,9 +1247,16 @@ def ssh_version():
|
|||||||
stdout=subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
stderr=subprocess.PIPE).communicate()
|
stderr=subprocess.PIPE).communicate()
|
||||||
try:
|
try:
|
||||||
return ret[1].split(b',')[0].split(b'_')[1]
|
version_parts = ret[1].split(b',')[0].split(b'_')[1]
|
||||||
|
parts = []
|
||||||
|
for part in version_parts:
|
||||||
|
try:
|
||||||
|
parts.append(int(part))
|
||||||
|
except ValueError:
|
||||||
|
return tuple(parts)
|
||||||
|
return tuple(parts)
|
||||||
except IndexError:
|
except IndexError:
|
||||||
return '2.0'
|
return (2, 0)
|
||||||
|
|
||||||
|
|
||||||
def _convert_args(args):
|
def _convert_args(args):
|
||||||
|
@ -82,7 +82,7 @@ class SSHClient(object):
|
|||||||
expr_form,
|
expr_form,
|
||||||
kwarg,
|
kwarg,
|
||||||
**kwargs)
|
**kwargs)
|
||||||
for ret in ssh.run_iter():
|
for ret in ssh.run_iter(jid=kwargs.get('jid', None)):
|
||||||
yield ret
|
yield ret
|
||||||
|
|
||||||
def cmd(
|
def cmd(
|
||||||
@ -109,7 +109,7 @@ class SSHClient(object):
|
|||||||
kwarg,
|
kwarg,
|
||||||
**kwargs)
|
**kwargs)
|
||||||
final = {}
|
final = {}
|
||||||
for ret in ssh.run_iter():
|
for ret in ssh.run_iter(jid=kwargs.get('jid', None)):
|
||||||
final.update(ret)
|
final.update(ret)
|
||||||
return final
|
return final
|
||||||
|
|
||||||
|
@ -96,7 +96,7 @@ class Shell(object):
|
|||||||
options.append('PasswordAuthentication=yes')
|
options.append('PasswordAuthentication=yes')
|
||||||
else:
|
else:
|
||||||
options.append('PasswordAuthentication=no')
|
options.append('PasswordAuthentication=no')
|
||||||
if self.opts.get('_ssh_version', '') > '4.9':
|
if self.opts.get('_ssh_version', (0,)) > (4, 9):
|
||||||
options.append('GSSAPIAuthentication=no')
|
options.append('GSSAPIAuthentication=no')
|
||||||
options.append('ConnectTimeout={0}'.format(self.timeout))
|
options.append('ConnectTimeout={0}'.format(self.timeout))
|
||||||
if self.opts.get('ignore_host_keys'):
|
if self.opts.get('ignore_host_keys'):
|
||||||
@ -131,7 +131,7 @@ class Shell(object):
|
|||||||
options = ['ControlMaster=auto',
|
options = ['ControlMaster=auto',
|
||||||
'StrictHostKeyChecking=no',
|
'StrictHostKeyChecking=no',
|
||||||
]
|
]
|
||||||
if self.opts['_ssh_version'] > '4.9':
|
if self.opts['_ssh_version'] > (4, 9):
|
||||||
options.append('GSSAPIAuthentication=no')
|
options.append('GSSAPIAuthentication=no')
|
||||||
options.append('ConnectTimeout={0}'.format(self.timeout))
|
options.append('ConnectTimeout={0}'.format(self.timeout))
|
||||||
if self.opts.get('ignore_host_keys'):
|
if self.opts.get('ignore_host_keys'):
|
||||||
|
9
salt/config/schemas/__init__.py
Normal file
9
salt/config/schemas/__init__.py
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
'''
|
||||||
|
:codeauthor: :email:`Pedro Algarvio (pedro@algarvio.me)`
|
||||||
|
|
||||||
|
salt.config.schemas
|
||||||
|
~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Salt configuration related schemas for future validation
|
||||||
|
'''
|
63
salt/config/schemas/common.py
Normal file
63
salt/config/schemas/common.py
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
'''
|
||||||
|
:codeauthor: :email:`Pedro Algarvio (pedro@algarvio.me)`
|
||||||
|
|
||||||
|
|
||||||
|
salt.config.schemas.common
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Common salt configuration schemas
|
||||||
|
'''
|
||||||
|
|
||||||
|
# Import Pythosn libs
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
# Import salt libs
|
||||||
|
from salt.utils.schema import (Schema,
|
||||||
|
StringItem,
|
||||||
|
ArrayItem,
|
||||||
|
OneOfItem)
|
||||||
|
|
||||||
|
|
||||||
|
class DefaultIncludeConfig(StringItem):
|
||||||
|
'''
|
||||||
|
Per default, the {0}, will automatically include all config files
|
||||||
|
from '{1}/*.conf' ('{1}' is a sub-directory in the same directory
|
||||||
|
as the main {0} config file).
|
||||||
|
'''
|
||||||
|
__target__ = None
|
||||||
|
__confd_directory__ = None
|
||||||
|
|
||||||
|
title = 'Include Config'
|
||||||
|
description = __doc__
|
||||||
|
|
||||||
|
def __init__(self, default=None, pattern=None, **kwargs):
|
||||||
|
default = '{0}/*.conf'.format(self.__confd_directory__)
|
||||||
|
pattern = r'(?:.*)/\*\.conf'
|
||||||
|
super(DefaultIncludeConfig, self).__init__(default=default, pattern=pattern, **kwargs)
|
||||||
|
|
||||||
|
def __validate_attributes__(self):
|
||||||
|
self.__doc__ = DefaultIncludeConfig.__doc__.format(self.__target__,
|
||||||
|
self.__confd_directory__)
|
||||||
|
super(DefaultIncludeConfig, self).__validate_attributes__()
|
||||||
|
|
||||||
|
def __get_description__(self):
|
||||||
|
return self.__doc__.format(self.__target__, self.__confd_directory__)
|
||||||
|
|
||||||
|
|
||||||
|
class MinionDefaultInclude(DefaultIncludeConfig):
|
||||||
|
__target__ = 'minion'
|
||||||
|
__confd_directory__ = 'minion.d'
|
||||||
|
|
||||||
|
|
||||||
|
class MasterDefaultInclude(DefaultIncludeConfig):
|
||||||
|
__target__ = 'master'
|
||||||
|
__confd_directory = 'master.d'
|
||||||
|
|
||||||
|
|
||||||
|
class IncludeConfig(Schema):
|
||||||
|
title = 'Include Configuration File(s)'
|
||||||
|
description = 'Include one or more specific configuration files'
|
||||||
|
|
||||||
|
string_or_array = OneOfItem(items=(StringItem(),
|
||||||
|
ArrayItem(items=StringItem())))(flatten=True)
|
35
salt/config/schemas/minion.py
Normal file
35
salt/config/schemas/minion.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
'''
|
||||||
|
:codeauthor: :email:`Pedro Algarvio (pedro@algarvio.me)`
|
||||||
|
|
||||||
|
salt.config.schemas.minion
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Minion configuration schema
|
||||||
|
'''
|
||||||
|
|
||||||
|
# Import python libs
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
# Import salt libs
|
||||||
|
from salt.utils.schema import (Schema,
|
||||||
|
IPv4Item,
|
||||||
|
)
|
||||||
|
from salt.config.schemas.common import (MinionDefaultInclude,
|
||||||
|
IncludeConfig
|
||||||
|
)
|
||||||
|
|
||||||
|
# XXX: THIS IS WAY TOO MINIMAL, BUT EXISTS TO IMPLEMENT salt-ssh
|
||||||
|
|
||||||
|
|
||||||
|
class MinionConfiguration(Schema):
|
||||||
|
|
||||||
|
# Because salt's configuration is very permissive with additioal
|
||||||
|
# configuration settings, let's allow them in the schema or validation
|
||||||
|
# would fail
|
||||||
|
__allow_additional_items__ = True
|
||||||
|
|
||||||
|
interface = IPv4Item(title='Interface')
|
||||||
|
|
||||||
|
default_include = MinionDefaultInclude()
|
||||||
|
include = IncludeConfig()
|
79
salt/config/schemas/ssh.py
Normal file
79
salt/config/schemas/ssh.py
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
'''
|
||||||
|
:codeauthor: :email:`Pedro Algarvio (pedro@algarvio.me)`
|
||||||
|
|
||||||
|
|
||||||
|
salt.config.schemas.ssh
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Salt SSH related configuration schemas
|
||||||
|
'''
|
||||||
|
|
||||||
|
# Import Python libs
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
# Import Salt libs
|
||||||
|
from salt.utils.schema import (Schema,
|
||||||
|
StringItem,
|
||||||
|
IntegerItem,
|
||||||
|
SecretItem,
|
||||||
|
PortItem,
|
||||||
|
BooleanItem,
|
||||||
|
RequirementsItem,
|
||||||
|
DictItem,
|
||||||
|
AnyOfItem
|
||||||
|
)
|
||||||
|
from salt.config.schemas.minion import MinionConfiguration
|
||||||
|
|
||||||
|
|
||||||
|
class RosterEntryConfig(Schema):
|
||||||
|
'''
|
||||||
|
Schema definition of a Salt SSH Roster entry
|
||||||
|
'''
|
||||||
|
|
||||||
|
title = 'Roster Entry'
|
||||||
|
description = 'Salt SSH roster entry definition'
|
||||||
|
|
||||||
|
host = StringItem(title='Host',
|
||||||
|
description='The IP address or DNS name of the remote host',
|
||||||
|
# Pretty naive pattern matching
|
||||||
|
pattern=r'^((\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})|([A-Za-z0-9][A-Za-z0-9\.\-]{1,255}))$',
|
||||||
|
min_length=1,
|
||||||
|
required=True)
|
||||||
|
port = PortItem(title='Port',
|
||||||
|
description='The target system\'s ssh port number',
|
||||||
|
default=22)
|
||||||
|
user = StringItem(title='User',
|
||||||
|
description='The user to log in as. Defaults to root',
|
||||||
|
default='root',
|
||||||
|
min_length=1,
|
||||||
|
required=True)
|
||||||
|
passwd = SecretItem(title='Password',
|
||||||
|
description='The password to log in with',
|
||||||
|
min_length=1)
|
||||||
|
priv = StringItem(title='Private Key',
|
||||||
|
description='File path to ssh private key, defaults to salt-ssh.rsa',
|
||||||
|
min_length=1)
|
||||||
|
passwd_or_priv_requirement = AnyOfItem(items=(RequirementsItem(requirements=['passwd']),
|
||||||
|
RequirementsItem(requirements=['priv'])))(flatten=True)
|
||||||
|
sudo = BooleanItem(title='Sudo',
|
||||||
|
description='run command via sudo. Defaults to False',
|
||||||
|
default=False)
|
||||||
|
timeout = IntegerItem(title='Timeout',
|
||||||
|
description=('Number of seconds to wait for response '
|
||||||
|
'when establishing an SSH connection'))
|
||||||
|
thin_dir = StringItem(title='Thin Directory',
|
||||||
|
description=('The target system\'s storage directory for Salt '
|
||||||
|
'components. Defaults to /tmp/salt-<hash>.'))
|
||||||
|
minion_opts = DictItem(title='Minion Options',
|
||||||
|
description='Dictionary of minion options',
|
||||||
|
properties=MinionConfiguration())
|
||||||
|
|
||||||
|
|
||||||
|
class RosterItem(Schema):
|
||||||
|
title = 'Roster Configuration'
|
||||||
|
description = 'Roster entries definition'
|
||||||
|
|
||||||
|
roster_entries = DictItem(
|
||||||
|
pattern_properties={
|
||||||
|
r'^([^:]+)$': RosterEntryConfig()})(flatten=True)
|
@ -19,6 +19,10 @@ import salt.payload
|
|||||||
import salt.utils
|
import salt.utils
|
||||||
import salt.utils.jid
|
import salt.utils.jid
|
||||||
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
import salt.ext.six as six
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
# load is the published job
|
# load is the published job
|
||||||
@ -45,8 +49,10 @@ def _jid_dir(jid):
|
|||||||
'''
|
'''
|
||||||
Return the jid_dir for the given job id
|
Return the jid_dir for the given job id
|
||||||
'''
|
'''
|
||||||
jid = str(jid)
|
if six.PY3:
|
||||||
jhash = getattr(hashlib, __opts__['hash_type'])(jid).hexdigest()
|
jhash = getattr(hashlib, __opts__['hash_type'])(jid.encode('utf-8')).hexdigest()
|
||||||
|
else:
|
||||||
|
jhash = getattr(hashlib, __opts__['hash_type'])(str(jid)).hexdigest()
|
||||||
return os.path.join(_job_dir(),
|
return os.path.join(_job_dir(),
|
||||||
jhash[:2],
|
jhash[:2],
|
||||||
jhash[2:])
|
jhash[2:])
|
||||||
@ -96,7 +102,10 @@ def prep_jid(nocache=False, passed_jid=None):
|
|||||||
return prep_jid(nocache=nocache)
|
return prep_jid(nocache=nocache)
|
||||||
|
|
||||||
with salt.utils.fopen(os.path.join(jid_dir_, 'jid'), 'wb+') as fn_:
|
with salt.utils.fopen(os.path.join(jid_dir_, 'jid'), 'wb+') as fn_:
|
||||||
fn_.write(jid)
|
if six.PY2:
|
||||||
|
fn_.write(jid)
|
||||||
|
else:
|
||||||
|
fn_.write(bytes(jid, 'utf-8'))
|
||||||
if nocache:
|
if nocache:
|
||||||
with salt.utils.fopen(os.path.join(jid_dir_, 'nocache'), 'wb+') as fn_:
|
with salt.utils.fopen(os.path.join(jid_dir_, 'nocache'), 'wb+') as fn_:
|
||||||
fn_.write('')
|
fn_.write('')
|
||||||
|
@ -15,7 +15,8 @@ from __future__ import absolute_import
|
|||||||
import salt.utils.thin
|
import salt.utils.thin
|
||||||
|
|
||||||
|
|
||||||
def generate(extra_mods='', overwrite=False, so_mods=''):
|
def generate(extra_mods='', overwrite=False, so_mods='',
|
||||||
|
python2_bin='python2', python3_bin='python3'):
|
||||||
'''
|
'''
|
||||||
Generate the salt-thin tarball and print the location of the tarball
|
Generate the salt-thin tarball and print the location of the tarball
|
||||||
Optional additional mods to include (e.g. mako) can be supplied as a comma
|
Optional additional mods to include (e.g. mako) can be supplied as a comma
|
||||||
@ -30,4 +31,9 @@ def generate(extra_mods='', overwrite=False, so_mods=''):
|
|||||||
salt-run thin.generate mako,wempy 1
|
salt-run thin.generate mako,wempy 1
|
||||||
salt-run thin.generate overwrite=1
|
salt-run thin.generate overwrite=1
|
||||||
'''
|
'''
|
||||||
return salt.utils.thin.gen_thin(__opts__['cachedir'], extra_mods, overwrite, so_mods)
|
return salt.utils.thin.gen_thin(__opts__['cachedir'],
|
||||||
|
extra_mods,
|
||||||
|
overwrite,
|
||||||
|
so_mods,
|
||||||
|
python2_bin,
|
||||||
|
python3_bin)
|
||||||
|
@ -218,12 +218,12 @@ def query(url,
|
|||||||
# proper cookie jar. Unfortunately, since session cookies do not
|
# proper cookie jar. Unfortunately, since session cookies do not
|
||||||
# contain expirations, they can't be stored in a proper cookie jar.
|
# contain expirations, they can't be stored in a proper cookie jar.
|
||||||
if os.path.isfile(session_cookie_jar):
|
if os.path.isfile(session_cookie_jar):
|
||||||
with salt.utils.fopen(session_cookie_jar, 'r') as fh_:
|
with salt.utils.fopen(session_cookie_jar, 'rb') as fh_:
|
||||||
session_cookies = msgpack.load(fh_)
|
session_cookies = msgpack.load(fh_)
|
||||||
if isinstance(session_cookies, dict):
|
if isinstance(session_cookies, dict):
|
||||||
header_dict.update(session_cookies)
|
header_dict.update(session_cookies)
|
||||||
else:
|
else:
|
||||||
with salt.utils.fopen(session_cookie_jar, 'w') as fh_:
|
with salt.utils.fopen(session_cookie_jar, 'wb') as fh_:
|
||||||
msgpack.dump('', fh_)
|
msgpack.dump('', fh_)
|
||||||
|
|
||||||
for header in header_list:
|
for header in header_list:
|
||||||
@ -462,7 +462,7 @@ def query(url,
|
|||||||
if persist_session is True and HAS_MSGPACK:
|
if persist_session is True and HAS_MSGPACK:
|
||||||
# TODO: See persist_session above
|
# TODO: See persist_session above
|
||||||
if 'set-cookie' in result_headers:
|
if 'set-cookie' in result_headers:
|
||||||
with salt.utils.fopen(session_cookie_jar, 'w') as fh_:
|
with salt.utils.fopen(session_cookie_jar, 'wb') as fh_:
|
||||||
session_cookies = result_headers.get('set-cookie', None)
|
session_cookies = result_headers.get('set-cookie', None)
|
||||||
if session_cookies is not None:
|
if session_cookies is not None:
|
||||||
msgpack.dump({'Cookie': session_cookies}, fh_)
|
msgpack.dump({'Cookie': session_cookies}, fh_)
|
||||||
|
@ -32,6 +32,7 @@ import salt.utils as utils
|
|||||||
import salt.version as version
|
import salt.version as version
|
||||||
import salt.utils.args
|
import salt.utils.args
|
||||||
import salt.utils.xdg
|
import salt.utils.xdg
|
||||||
|
import salt.utils.jid
|
||||||
from salt.utils import kinds
|
from salt.utils import kinds
|
||||||
from salt.defaults import DEFAULT_TARGET_DELIM
|
from salt.defaults import DEFAULT_TARGET_DELIM
|
||||||
from salt.utils.validate.path import is_writeable
|
from salt.utils.validate.path import is_writeable
|
||||||
@ -2446,6 +2447,21 @@ class SaltSSHOptionParser(six.with_metaclass(OptionParserMeta,
|
|||||||
action='store_true',
|
action='store_true',
|
||||||
help=('Select a random temp dir to deploy on the remote system. '
|
help=('Select a random temp dir to deploy on the remote system. '
|
||||||
'The dir will be cleaned after the execution.'))
|
'The dir will be cleaned after the execution.'))
|
||||||
|
self.add_option(
|
||||||
|
'--python2-bin',
|
||||||
|
default='python2',
|
||||||
|
help='Path to a python2 binary which has salt installed'
|
||||||
|
)
|
||||||
|
self.add_option(
|
||||||
|
'--python3-bin',
|
||||||
|
default='python3',
|
||||||
|
help='Path to a python3 binary which has salt installed'
|
||||||
|
)
|
||||||
|
self.add_option(
|
||||||
|
'--jid',
|
||||||
|
default=None,
|
||||||
|
help='Pass a JID to be used instead of generating one'
|
||||||
|
)
|
||||||
|
|
||||||
auth_group = optparse.OptionGroup(
|
auth_group = optparse.OptionGroup(
|
||||||
self, 'Authentication Options',
|
self, 'Authentication Options',
|
||||||
@ -2556,6 +2572,11 @@ class SaltSSHOptionParser(six.with_metaclass(OptionParserMeta,
|
|||||||
def setup_config(self):
|
def setup_config(self):
|
||||||
return config.master_config(self.get_config_file_path())
|
return config.master_config(self.get_config_file_path())
|
||||||
|
|
||||||
|
def process_jid(self):
|
||||||
|
if self.options.jid is not None:
|
||||||
|
if not salt.utils.jid.is_jid(self.options.jid):
|
||||||
|
self.error('\'{0}\' is not a valid JID'.format(self.options.jid))
|
||||||
|
|
||||||
|
|
||||||
class SaltCloudParser(six.with_metaclass(OptionParserMeta,
|
class SaltCloudParser(six.with_metaclass(OptionParserMeta,
|
||||||
OptionParser,
|
OptionParser,
|
||||||
|
@ -520,7 +520,7 @@ class Schema(six.with_metaclass(SchemaMeta, object)):
|
|||||||
# Define some class level attributes to make PyLint happier
|
# Define some class level attributes to make PyLint happier
|
||||||
title = None
|
title = None
|
||||||
description = None
|
description = None
|
||||||
_items = _sections = None
|
_items = _sections = _order = None
|
||||||
__flatten__ = False
|
__flatten__ = False
|
||||||
__allow_additional_items__ = False
|
__allow_additional_items__ = False
|
||||||
|
|
||||||
|
@ -7,16 +7,20 @@ Generate the salt thin tarball from the installed python files
|
|||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
import shutil
|
import shutil
|
||||||
import tarfile
|
import tarfile
|
||||||
import zipfile
|
import zipfile
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import subprocess
|
||||||
|
|
||||||
# Import third party libs
|
# Import third party libs
|
||||||
import jinja2
|
import jinja2
|
||||||
import yaml
|
import yaml
|
||||||
import salt.ext.six as six
|
import salt.ext.six as six
|
||||||
import tornado
|
import tornado
|
||||||
|
import msgpack
|
||||||
|
|
||||||
# pylint: disable=import-error,no-name-in-module
|
# pylint: disable=import-error,no-name-in-module
|
||||||
try:
|
try:
|
||||||
@ -56,6 +60,17 @@ import salt
|
|||||||
import salt.utils
|
import salt.utils
|
||||||
|
|
||||||
SALTCALL = '''
|
SALTCALL = '''
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.insert(
|
||||||
|
0,
|
||||||
|
os.path.join(
|
||||||
|
os.path.dirname(__file__),
|
||||||
|
'py{0[0]}'.format(sys.version_info)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
from salt.scripts import salt_call
|
from salt.scripts import salt_call
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
salt_call()
|
salt_call()
|
||||||
@ -69,50 +84,13 @@ def thin_path(cachedir):
|
|||||||
return os.path.join(cachedir, 'thin', 'thin.tgz')
|
return os.path.join(cachedir, 'thin', 'thin.tgz')
|
||||||
|
|
||||||
|
|
||||||
def gen_thin(cachedir, extra_mods='', overwrite=False, so_mods=''):
|
def get_tops(extra_mods='', so_mods=''):
|
||||||
'''
|
|
||||||
Generate the salt-thin tarball and print the location of the tarball
|
|
||||||
Optional additional mods to include (e.g. mako) can be supplied as a comma
|
|
||||||
delimited string. Permits forcing an overwrite of the output file as well.
|
|
||||||
|
|
||||||
CLI Example:
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
salt-run thin.generate
|
|
||||||
salt-run thin.generate mako
|
|
||||||
salt-run thin.generate mako,wempy 1
|
|
||||||
salt-run thin.generate overwrite=1
|
|
||||||
'''
|
|
||||||
thindir = os.path.join(cachedir, 'thin')
|
|
||||||
if not os.path.isdir(thindir):
|
|
||||||
os.makedirs(thindir)
|
|
||||||
thintar = os.path.join(thindir, 'thin.tgz')
|
|
||||||
thinver = os.path.join(thindir, 'version')
|
|
||||||
salt_call = os.path.join(thindir, 'salt-call')
|
|
||||||
with salt.utils.fopen(salt_call, 'w+') as fp_:
|
|
||||||
fp_.write(SALTCALL)
|
|
||||||
if os.path.isfile(thintar):
|
|
||||||
if not overwrite:
|
|
||||||
if os.path.isfile(thinver):
|
|
||||||
with salt.utils.fopen(thinver) as fh_:
|
|
||||||
overwrite = fh_.read() != salt.version.__version__
|
|
||||||
else:
|
|
||||||
overwrite = True
|
|
||||||
|
|
||||||
if overwrite:
|
|
||||||
try:
|
|
||||||
os.remove(thintar)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
return thintar
|
|
||||||
|
|
||||||
tops = [
|
tops = [
|
||||||
os.path.dirname(salt.__file__),
|
os.path.dirname(salt.__file__),
|
||||||
os.path.dirname(jinja2.__file__),
|
os.path.dirname(jinja2.__file__),
|
||||||
os.path.dirname(yaml.__file__),
|
os.path.dirname(yaml.__file__),
|
||||||
os.path.dirname(tornado.__file__),
|
os.path.dirname(tornado.__file__),
|
||||||
|
os.path.dirname(msgpack.__file__)
|
||||||
]
|
]
|
||||||
|
|
||||||
tops.append(six.__file__.replace('.pyc', '.py'))
|
tops.append(six.__file__.replace('.pyc', '.py'))
|
||||||
@ -152,41 +130,135 @@ def gen_thin(cachedir, extra_mods='', overwrite=False, so_mods=''):
|
|||||||
pass # As per comment above
|
pass # As per comment above
|
||||||
if HAS_MARKUPSAFE:
|
if HAS_MARKUPSAFE:
|
||||||
tops.append(os.path.dirname(markupsafe.__file__))
|
tops.append(os.path.dirname(markupsafe.__file__))
|
||||||
|
|
||||||
|
return tops
|
||||||
|
|
||||||
|
|
||||||
|
def gen_thin(cachedir, extra_mods='', overwrite=False, so_mods='',
|
||||||
|
python2_bin='python2', python3_bin='python3'):
|
||||||
|
'''
|
||||||
|
Generate the salt-thin tarball and print the location of the tarball
|
||||||
|
Optional additional mods to include (e.g. mako) can be supplied as a comma
|
||||||
|
delimited string. Permits forcing an overwrite of the output file as well.
|
||||||
|
|
||||||
|
CLI Example:
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
salt-run thin.generate
|
||||||
|
salt-run thin.generate mako
|
||||||
|
salt-run thin.generate mako,wempy 1
|
||||||
|
salt-run thin.generate overwrite=1
|
||||||
|
'''
|
||||||
|
thindir = os.path.join(cachedir, 'thin')
|
||||||
|
if not os.path.isdir(thindir):
|
||||||
|
os.makedirs(thindir)
|
||||||
|
thintar = os.path.join(thindir, 'thin.tgz')
|
||||||
|
thinver = os.path.join(thindir, 'version')
|
||||||
|
pythinver = os.path.join(thindir, '.thin-gen-py-version')
|
||||||
|
salt_call = os.path.join(thindir, 'salt-call')
|
||||||
|
with salt.utils.fopen(salt_call, 'w+') as fp_:
|
||||||
|
fp_.write(SALTCALL)
|
||||||
|
if os.path.isfile(thintar):
|
||||||
|
if not overwrite:
|
||||||
|
if os.path.isfile(thinver):
|
||||||
|
with salt.utils.fopen(thinver) as fh_:
|
||||||
|
overwrite = fh_.read() != salt.version.__version__
|
||||||
|
if overwrite is False and os.path.isfile(pythinver):
|
||||||
|
with salt.utils.fopen(pythinver) as fh_:
|
||||||
|
overwrite = fh_.read() != str(sys.version_info[0])
|
||||||
|
else:
|
||||||
|
overwrite = True
|
||||||
|
|
||||||
|
if overwrite:
|
||||||
|
try:
|
||||||
|
os.remove(thintar)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
return thintar
|
||||||
|
|
||||||
|
tops_py_version_mapping = {}
|
||||||
|
tops = get_tops(extra_mods=extra_mods, so_mods=so_mods)
|
||||||
|
if six.PY2:
|
||||||
|
tops_py_version_mapping['2'] = tops
|
||||||
|
else:
|
||||||
|
tops_py_version_mapping['3'] = tops
|
||||||
|
|
||||||
|
# TODO: Consider putting known py2 and py3 compatible libs in it's own sharable directory.
|
||||||
|
# This would reduce the thin size.
|
||||||
|
if six.PY2 and sys.version_info[0] == 2:
|
||||||
|
# Get python 3 tops
|
||||||
|
py_shell_cmd = (
|
||||||
|
python3_bin + ' -c \'import sys; import json; import salt.utils.thin; '
|
||||||
|
'print(json.dumps(salt.utils.thin.get_tops(**(json.loads(sys.argv[1]))))); exit(0);\' '
|
||||||
|
'\'{0}\''.format(json.dumps({'extra_mods': extra_mods, 'so_mods': so_mods}))
|
||||||
|
)
|
||||||
|
cmd = subprocess.Popen(py_shell_cmd, stdout=subprocess.PIPE, shell=True)
|
||||||
|
stdout, stderr = cmd.communicate()
|
||||||
|
if cmd.returncode == 0:
|
||||||
|
try:
|
||||||
|
tops = json.loads(stdout)
|
||||||
|
tops_py_version_mapping['3'] = tops
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
if six.PY3 and sys.version_info[0] == 3:
|
||||||
|
# Get python 2 tops
|
||||||
|
py_shell_cmd = (
|
||||||
|
python2_bin + ' -c \'from __future__ import print_function; '
|
||||||
|
'import sys; import json; import salt.utils.thin; '
|
||||||
|
'print(json.dumps(salt.utils.thin.get_tops(**(json.loads(sys.argv[1]))))); exit(0);\' '
|
||||||
|
'\'{0}\''.format(json.dumps({'extra_mods': extra_mods, 'so_mods': so_mods}))
|
||||||
|
)
|
||||||
|
cmd = subprocess.Popen(py_shell_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||||
|
stdout, stderr = cmd.communicate()
|
||||||
|
if cmd.returncode == 0:
|
||||||
|
try:
|
||||||
|
tops = json.loads(stdout.decode('utf-8'))
|
||||||
|
tops_py_version_mapping['2'] = tops
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
tfp = tarfile.open(thintar, 'w:gz', dereference=True)
|
tfp = tarfile.open(thintar, 'w:gz', dereference=True)
|
||||||
try: # cwd may not exist if it was removed but salt was run from it
|
try: # cwd may not exist if it was removed but salt was run from it
|
||||||
start_dir = os.getcwd()
|
start_dir = os.getcwd()
|
||||||
except OSError:
|
except OSError:
|
||||||
start_dir = None
|
start_dir = None
|
||||||
tempdir = None
|
tempdir = None
|
||||||
for top in tops:
|
for py_ver, tops in six.iteritems(tops_py_version_mapping):
|
||||||
base = os.path.basename(top)
|
for top in tops:
|
||||||
top_dirname = os.path.dirname(top)
|
base = os.path.basename(top)
|
||||||
if os.path.isdir(top_dirname):
|
top_dirname = os.path.dirname(top)
|
||||||
os.chdir(top_dirname)
|
if os.path.isdir(top_dirname):
|
||||||
else:
|
os.chdir(top_dirname)
|
||||||
# This is likely a compressed python .egg
|
else:
|
||||||
tempdir = tempfile.mkdtemp()
|
# This is likely a compressed python .egg
|
||||||
egg = zipfile.ZipFile(top_dirname)
|
tempdir = tempfile.mkdtemp()
|
||||||
egg.extractall(tempdir)
|
egg = zipfile.ZipFile(top_dirname)
|
||||||
top = os.path.join(tempdir, base)
|
egg.extractall(tempdir)
|
||||||
os.chdir(tempdir)
|
top = os.path.join(tempdir, base)
|
||||||
if not os.path.isdir(top):
|
os.chdir(tempdir)
|
||||||
# top is a single file module
|
if not os.path.isdir(top):
|
||||||
tfp.add(base)
|
# top is a single file module
|
||||||
continue
|
tfp.add(base, arcname=os.path.join('py{0}'.format(py_ver), base))
|
||||||
for root, dirs, files in os.walk(base, followlinks=True):
|
continue
|
||||||
for name in files:
|
for root, dirs, files in os.walk(base, followlinks=True):
|
||||||
if not name.endswith(('.pyc', '.pyo')):
|
for name in files:
|
||||||
tfp.add(os.path.join(root, name))
|
if not name.endswith(('.pyc', '.pyo')):
|
||||||
if tempdir is not None:
|
tfp.add(os.path.join(root, name),
|
||||||
shutil.rmtree(tempdir)
|
arcname=os.path.join('py{0}'.format(py_ver), root, name))
|
||||||
tempdir = None
|
if tempdir is not None:
|
||||||
|
shutil.rmtree(tempdir)
|
||||||
|
tempdir = None
|
||||||
os.chdir(thindir)
|
os.chdir(thindir)
|
||||||
tfp.add('salt-call')
|
tfp.add('salt-call')
|
||||||
with salt.utils.fopen(thinver, 'w+') as fp_:
|
with salt.utils.fopen(thinver, 'w+') as fp_:
|
||||||
fp_.write(salt.version.__version__)
|
fp_.write(salt.version.__version__)
|
||||||
|
with salt.utils.fopen(pythinver, 'w+') as fp_:
|
||||||
|
fp_.write(str(sys.version_info[0]))
|
||||||
os.chdir(os.path.dirname(thinver))
|
os.chdir(os.path.dirname(thinver))
|
||||||
tfp.add('version')
|
tfp.add('version')
|
||||||
|
tfp.add('.thin-gen-py-version')
|
||||||
if start_dir:
|
if start_dir:
|
||||||
os.chdir(start_dir)
|
os.chdir(start_dir)
|
||||||
tfp.close()
|
tfp.close()
|
||||||
|
@ -1200,6 +1200,12 @@ class ShellCase(AdaptedConfigurationTestCaseMixIn, ShellTestCase):
|
|||||||
_script_dir_ = SCRIPT_DIR
|
_script_dir_ = SCRIPT_DIR
|
||||||
_python_executable_ = PYEXEC
|
_python_executable_ = PYEXEC
|
||||||
|
|
||||||
|
def chdir(self, dirname):
|
||||||
|
try:
|
||||||
|
os.chdir(dirname)
|
||||||
|
except OSError:
|
||||||
|
os.chdir(INTEGRATION_TEST_DIR)
|
||||||
|
|
||||||
def run_salt(self, arg_str, with_retcode=False, catch_stderr=False):
|
def run_salt(self, arg_str, with_retcode=False, catch_stderr=False):
|
||||||
'''
|
'''
|
||||||
Execute salt
|
Execute salt
|
||||||
|
@ -297,7 +297,7 @@ class CallTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
|||||||
)
|
)
|
||||||
self.assertEqual(ret[2], 2)
|
self.assertEqual(ret[2], 2)
|
||||||
finally:
|
finally:
|
||||||
os.chdir(old_cwd)
|
self.chdir(old_cwd)
|
||||||
if os.path.isdir(config_dir):
|
if os.path.isdir(config_dir):
|
||||||
shutil.rmtree(config_dir)
|
shutil.rmtree(config_dir)
|
||||||
|
|
||||||
|
@ -159,7 +159,7 @@ class CopyTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
|||||||
self.assertEqual(ret[2], 2)
|
self.assertEqual(ret[2], 2)
|
||||||
finally:
|
finally:
|
||||||
if old_cwd is not None:
|
if old_cwd is not None:
|
||||||
os.chdir(old_cwd)
|
self.chdir(old_cwd)
|
||||||
if os.path.isdir(config_dir):
|
if os.path.isdir(config_dir):
|
||||||
shutil.rmtree(config_dir)
|
shutil.rmtree(config_dir)
|
||||||
|
|
||||||
|
@ -254,7 +254,7 @@ class KeyTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
|||||||
self.assertIn('minion', '\n'.join(ret))
|
self.assertIn('minion', '\n'.join(ret))
|
||||||
self.assertFalse(os.path.isdir(os.path.join(config_dir, 'file:')))
|
self.assertFalse(os.path.isdir(os.path.join(config_dir, 'file:')))
|
||||||
finally:
|
finally:
|
||||||
os.chdir(old_cwd)
|
self.chdir(old_cwd)
|
||||||
if os.path.isdir(config_dir):
|
if os.path.isdir(config_dir):
|
||||||
shutil.rmtree(config_dir)
|
shutil.rmtree(config_dir)
|
||||||
|
|
||||||
|
@ -74,7 +74,7 @@ class MasterTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
|||||||
)
|
)
|
||||||
self.assertEqual(ret[2], 2)
|
self.assertEqual(ret[2], 2)
|
||||||
finally:
|
finally:
|
||||||
os.chdir(old_cwd)
|
self.chdir(old_cwd)
|
||||||
if os.path.isdir(config_dir):
|
if os.path.isdir(config_dir):
|
||||||
shutil.rmtree(config_dir)
|
shutil.rmtree(config_dir)
|
||||||
|
|
||||||
|
@ -345,7 +345,7 @@ class MatchTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
|||||||
)
|
)
|
||||||
self.assertEqual(ret[2], 2)
|
self.assertEqual(ret[2], 2)
|
||||||
finally:
|
finally:
|
||||||
os.chdir(old_cwd)
|
self.chdir(old_cwd)
|
||||||
if os.path.isdir(config_dir):
|
if os.path.isdir(config_dir):
|
||||||
shutil.rmtree(config_dir)
|
shutil.rmtree(config_dir)
|
||||||
|
|
||||||
|
@ -71,7 +71,7 @@ class MinionTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
|||||||
)
|
)
|
||||||
self.assertEqual(ret[2], 2)
|
self.assertEqual(ret[2], 2)
|
||||||
finally:
|
finally:
|
||||||
os.chdir(old_cwd)
|
self.chdir(old_cwd)
|
||||||
if os.path.isdir(config_dir):
|
if os.path.isdir(config_dir):
|
||||||
shutil.rmtree(config_dir)
|
shutil.rmtree(config_dir)
|
||||||
|
|
||||||
|
@ -94,7 +94,7 @@ class RunTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
|||||||
)
|
)
|
||||||
self.assertEqual(ret[2], 2)
|
self.assertEqual(ret[2], 2)
|
||||||
finally:
|
finally:
|
||||||
os.chdir(old_cwd)
|
self.chdir(old_cwd)
|
||||||
if os.path.isdir(config_dir):
|
if os.path.isdir(config_dir):
|
||||||
shutil.rmtree(config_dir)
|
shutil.rmtree(config_dir)
|
||||||
|
|
||||||
|
@ -75,7 +75,7 @@ class SyndicTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
|||||||
)
|
)
|
||||||
self.assertEqual(ret[2], 2)
|
self.assertEqual(ret[2], 2)
|
||||||
finally:
|
finally:
|
||||||
os.chdir(old_cwd)
|
self.chdir(old_cwd)
|
||||||
if os.path.isdir(config_dir):
|
if os.path.isdir(config_dir):
|
||||||
shutil.rmtree(config_dir)
|
shutil.rmtree(config_dir)
|
||||||
|
|
||||||
|
@ -14,13 +14,13 @@ import time
|
|||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
from integration import TestDaemon, TMP # pylint: disable=W0403
|
from integration import TestDaemon, TMP # pylint: disable=W0403
|
||||||
|
from integration import INTEGRATION_TEST_DIR
|
||||||
|
from integration import CODE_DIR as SALT_ROOT
|
||||||
|
|
||||||
# Import Salt Testing libs
|
# Import Salt Testing libs
|
||||||
from salttesting.parser import PNUM, print_header
|
from salttesting.parser import PNUM, print_header
|
||||||
from salttesting.parser.cover import SaltCoverageTestingParser
|
from salttesting.parser.cover import SaltCoverageTestingParser
|
||||||
|
|
||||||
TEST_DIR = os.path.dirname(os.path.normpath(os.path.abspath(__file__)))
|
|
||||||
SALT_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
|
||||||
XML_OUTPUT_DIR = os.environ.get(
|
XML_OUTPUT_DIR = os.environ.get(
|
||||||
'SALT_XML_TEST_REPORTS_DIR',
|
'SALT_XML_TEST_REPORTS_DIR',
|
||||||
os.path.join(TMP, 'xml-test-reports')
|
os.path.join(TMP, 'xml-test-reports')
|
||||||
@ -30,7 +30,7 @@ HTML_OUTPUT_DIR = os.environ.get(
|
|||||||
os.path.join(TMP, 'html-test-reports')
|
os.path.join(TMP, 'html-test-reports')
|
||||||
)
|
)
|
||||||
|
|
||||||
|
TEST_DIR = os.path.dirname(INTEGRATION_TEST_DIR)
|
||||||
try:
|
try:
|
||||||
if SALT_ROOT:
|
if SALT_ROOT:
|
||||||
os.chdir(SALT_ROOT)
|
os.chdir(SALT_ROOT)
|
||||||
|
1
tests/unit/config/__init__.py
Normal file
1
tests/unit/config/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
1
tests/unit/config/schemas/__init__.py
Normal file
1
tests/unit/config/schemas/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
302
tests/unit/config/schemas/ssh_test.py
Normal file
302
tests/unit/config/schemas/ssh_test.py
Normal file
@ -0,0 +1,302 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
'''
|
||||||
|
:codeauthor: :email:`Pedro Algarvio (pedro@algarvio.me)`
|
||||||
|
|
||||||
|
tests.unit.config.schemas.test_ssh
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
'''
|
||||||
|
# Import python libs
|
||||||
|
from __future__ import absolute_import, print_function
|
||||||
|
|
||||||
|
# Import Salt Testing Libs
|
||||||
|
from salttesting import TestCase, skipIf
|
||||||
|
from salttesting.helpers import ensure_in_syspath
|
||||||
|
|
||||||
|
ensure_in_syspath('../../')
|
||||||
|
|
||||||
|
# Import Salt Libs
|
||||||
|
from salt.config.schemas import ssh as ssh_schemas
|
||||||
|
from salt.config.schemas.minion import MinionConfiguration
|
||||||
|
|
||||||
|
# Import 3rd-party libs
|
||||||
|
try:
|
||||||
|
import jsonschema
|
||||||
|
import jsonschema.exceptions
|
||||||
|
HAS_JSONSCHEMA = True
|
||||||
|
except ImportError:
|
||||||
|
HAS_JSONSCHEMA = False
|
||||||
|
|
||||||
|
|
||||||
|
class RoosterEntryConfigTest(TestCase):
|
||||||
|
def test_config(self):
|
||||||
|
config = ssh_schemas.RosterEntryConfig()
|
||||||
|
|
||||||
|
expected = {
|
||||||
|
'$schema': 'http://json-schema.org/draft-04/schema#',
|
||||||
|
'title': 'Roster Entry',
|
||||||
|
'description': 'Salt SSH roster entry definition',
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'host': {
|
||||||
|
'title': 'Host',
|
||||||
|
'description': 'The IP address or DNS name of the remote host',
|
||||||
|
'type': 'string',
|
||||||
|
'pattern': r'^((\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})|([A-Za-z0-9][A-Za-z0-9\.\-]{1,255}))$',
|
||||||
|
'minLength': 1
|
||||||
|
},
|
||||||
|
'port': {
|
||||||
|
'description': 'The target system\'s ssh port number',
|
||||||
|
'title': 'Port',
|
||||||
|
'default': 22,
|
||||||
|
'maximum': 65535,
|
||||||
|
'minimum': 0,
|
||||||
|
'type': 'integer'
|
||||||
|
},
|
||||||
|
'user': {
|
||||||
|
'default': 'root',
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'The user to log in as. Defaults to root',
|
||||||
|
'title': 'User',
|
||||||
|
'minLength': 1
|
||||||
|
},
|
||||||
|
'passwd': {
|
||||||
|
'title': 'Password',
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'The password to log in with',
|
||||||
|
'format': 'secret',
|
||||||
|
'minLength': 1
|
||||||
|
},
|
||||||
|
'priv': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'File path to ssh private key, defaults to salt-ssh.rsa',
|
||||||
|
'title': 'Private Key',
|
||||||
|
'minLength': 1
|
||||||
|
},
|
||||||
|
'sudo': {
|
||||||
|
'default': False,
|
||||||
|
'type': 'boolean',
|
||||||
|
'description': 'run command via sudo. Defaults to False',
|
||||||
|
'title': 'Sudo'
|
||||||
|
},
|
||||||
|
'timeout': {
|
||||||
|
'type': 'integer',
|
||||||
|
'description': 'Number of seconds to wait for response when establishing an SSH connection',
|
||||||
|
'title': 'Timeout'
|
||||||
|
},
|
||||||
|
'thin_dir': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'The target system\'s storage directory for Salt components. Defaults to /tmp/salt-<hash>.',
|
||||||
|
'title': 'Thin Directory'
|
||||||
|
},
|
||||||
|
# The actuall representation of the minion options would make this HUGE!
|
||||||
|
'minion_opts': ssh_schemas.DictItem(title='Minion Options',
|
||||||
|
description='Dictionary of minion options',
|
||||||
|
properties=MinionConfiguration()).serialize(),
|
||||||
|
},
|
||||||
|
'anyOf': [
|
||||||
|
{
|
||||||
|
'required': [
|
||||||
|
'passwd'
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'required': [
|
||||||
|
'priv'
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
'required': [
|
||||||
|
'host',
|
||||||
|
'user',
|
||||||
|
],
|
||||||
|
'x-ordering': [
|
||||||
|
'host',
|
||||||
|
'port',
|
||||||
|
'user',
|
||||||
|
'passwd',
|
||||||
|
'priv',
|
||||||
|
'sudo',
|
||||||
|
'timeout',
|
||||||
|
'thin_dir',
|
||||||
|
'minion_opts'
|
||||||
|
],
|
||||||
|
'additionalProperties': False
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
self.assertDictContainsSubset(expected['properties'], config.serialize()['properties'])
|
||||||
|
self.assertDictContainsSubset(expected, config.serialize())
|
||||||
|
except AssertionError:
|
||||||
|
import json
|
||||||
|
print(json.dumps(config.serialize(), indent=4))
|
||||||
|
raise
|
||||||
|
|
||||||
|
@skipIf(HAS_JSONSCHEMA is False, 'The \'jsonschema\' library is missing')
|
||||||
|
def test_config_validate(self):
|
||||||
|
try:
|
||||||
|
jsonschema.validate(
|
||||||
|
{
|
||||||
|
'host': 'localhost',
|
||||||
|
'user': 'root',
|
||||||
|
'passwd': 'foo'
|
||||||
|
},
|
||||||
|
ssh_schemas.RosterEntryConfig.serialize(),
|
||||||
|
format_checker=jsonschema.FormatChecker()
|
||||||
|
)
|
||||||
|
except jsonschema.exceptions.ValidationError as exc:
|
||||||
|
self.fail('ValidationError raised: {0}'.format(exc))
|
||||||
|
|
||||||
|
try:
|
||||||
|
jsonschema.validate(
|
||||||
|
{
|
||||||
|
'host': '127.0.0.1',
|
||||||
|
'user': 'root',
|
||||||
|
'passwd': 'foo'
|
||||||
|
},
|
||||||
|
ssh_schemas.RosterEntryConfig.serialize(),
|
||||||
|
format_checker=jsonschema.FormatChecker()
|
||||||
|
)
|
||||||
|
except jsonschema.exceptions.ValidationError as exc:
|
||||||
|
self.fail('ValidationError raised: {0}'.format(exc))
|
||||||
|
|
||||||
|
try:
|
||||||
|
jsonschema.validate(
|
||||||
|
{
|
||||||
|
'host': '127.1.0.1',
|
||||||
|
'user': 'root',
|
||||||
|
'priv': 'foo',
|
||||||
|
'passwd': 'foo'
|
||||||
|
},
|
||||||
|
ssh_schemas.RosterEntryConfig.serialize(),
|
||||||
|
format_checker=jsonschema.FormatChecker()
|
||||||
|
)
|
||||||
|
except jsonschema.exceptions.ValidationError as exc:
|
||||||
|
self.fail('ValidationError raised: {0}'.format(exc))
|
||||||
|
|
||||||
|
try:
|
||||||
|
jsonschema.validate(
|
||||||
|
{
|
||||||
|
'host': '127.1.0.1',
|
||||||
|
'user': 'root',
|
||||||
|
'passwd': 'foo',
|
||||||
|
'sudo': False
|
||||||
|
},
|
||||||
|
ssh_schemas.RosterEntryConfig.serialize(),
|
||||||
|
format_checker=jsonschema.FormatChecker()
|
||||||
|
)
|
||||||
|
except jsonschema.exceptions.ValidationError as exc:
|
||||||
|
self.fail('ValidationError raised: {0}'.format(exc))
|
||||||
|
|
||||||
|
try:
|
||||||
|
jsonschema.validate(
|
||||||
|
{
|
||||||
|
'host': '127.1.0.1',
|
||||||
|
'user': 'root',
|
||||||
|
'priv': 'foo',
|
||||||
|
'passwd': 'foo',
|
||||||
|
'thin_dir': '/foo/bar'
|
||||||
|
},
|
||||||
|
ssh_schemas.RosterEntryConfig.serialize(),
|
||||||
|
format_checker=jsonschema.FormatChecker()
|
||||||
|
)
|
||||||
|
except jsonschema.exceptions.ValidationError as exc:
|
||||||
|
self.fail('ValidationError raised: {0}'.format(exc))
|
||||||
|
|
||||||
|
try:
|
||||||
|
jsonschema.validate(
|
||||||
|
{
|
||||||
|
'host': '127.1.0.1',
|
||||||
|
'user': 'root',
|
||||||
|
'passwd': 'foo',
|
||||||
|
'minion_opts': {
|
||||||
|
'interface': '0.0.0.0'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
ssh_schemas.RosterEntryConfig.serialize(),
|
||||||
|
format_checker=jsonschema.FormatChecker()
|
||||||
|
)
|
||||||
|
except jsonschema.exceptions.ValidationError as exc:
|
||||||
|
self.fail('ValidationError raised: {0}'.format(exc))
|
||||||
|
|
||||||
|
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
|
||||||
|
jsonschema.validate(
|
||||||
|
{
|
||||||
|
'host': '127.1.0.1',
|
||||||
|
'user': '',
|
||||||
|
'passwd': 'foo',
|
||||||
|
},
|
||||||
|
ssh_schemas.RosterEntryConfig.serialize(),
|
||||||
|
format_checker=jsonschema.FormatChecker()
|
||||||
|
)
|
||||||
|
self.assertIn('is too short', excinfo.exception.message)
|
||||||
|
|
||||||
|
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
|
||||||
|
jsonschema.validate(
|
||||||
|
{
|
||||||
|
'host': '127.1.0.1',
|
||||||
|
'user': 'root',
|
||||||
|
'passwd': 'foo',
|
||||||
|
'minion_opts': {
|
||||||
|
'interface': 0
|
||||||
|
}
|
||||||
|
},
|
||||||
|
ssh_schemas.RosterEntryConfig.serialize(),
|
||||||
|
format_checker=jsonschema.FormatChecker()
|
||||||
|
)
|
||||||
|
self.assertIn('is not of type', excinfo.exception.message)
|
||||||
|
|
||||||
|
|
||||||
|
class RosterItemTest(TestCase):
|
||||||
|
|
||||||
|
def test_roster_config(self):
|
||||||
|
try:
|
||||||
|
self.assertDictContainsSubset(
|
||||||
|
{
|
||||||
|
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||||
|
"title": "Roster Configuration",
|
||||||
|
"description": "Roster entries definition",
|
||||||
|
"type": "object",
|
||||||
|
"patternProperties": {
|
||||||
|
r"^([^:]+)$": ssh_schemas.RosterEntryConfig.serialize()
|
||||||
|
},
|
||||||
|
"additionalProperties": False
|
||||||
|
},
|
||||||
|
ssh_schemas.RosterItem.serialize()
|
||||||
|
)
|
||||||
|
except AssertionError:
|
||||||
|
import json
|
||||||
|
print(json.dumps(ssh_schemas.RosterItem.serialize(), indent=4))
|
||||||
|
raise
|
||||||
|
|
||||||
|
@skipIf(HAS_JSONSCHEMA is False, 'The \'jsonschema\' library is missing')
|
||||||
|
def test_roster_config_validate(self):
|
||||||
|
try:
|
||||||
|
jsonschema.validate(
|
||||||
|
{'target-1':
|
||||||
|
{
|
||||||
|
'host': 'localhost',
|
||||||
|
'user': 'root',
|
||||||
|
'passwd': 'foo'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
ssh_schemas.RosterItem.serialize(),
|
||||||
|
format_checker=jsonschema.FormatChecker()
|
||||||
|
)
|
||||||
|
except jsonschema.exceptions.ValidationError as exc:
|
||||||
|
self.fail('ValidationError raised: {0}'.format(exc))
|
||||||
|
|
||||||
|
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
|
||||||
|
jsonschema.validate(
|
||||||
|
{'target-1:1':
|
||||||
|
{
|
||||||
|
'host': 'localhost',
|
||||||
|
'user': 'root',
|
||||||
|
'passwd': 'foo'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
ssh_schemas.RosterItem.serialize(),
|
||||||
|
format_checker=jsonschema.FormatChecker()
|
||||||
|
)
|
||||||
|
self.assertIn(
|
||||||
|
'Additional properties are not allowed (\'target-1:1\' was unexpected)',
|
||||||
|
excinfo.exception.message
|
||||||
|
)
|
@ -20,7 +20,7 @@ ensure_in_syspath('../../')
|
|||||||
|
|
||||||
# Import salt libs
|
# Import salt libs
|
||||||
from salt.utils import cloud
|
from salt.utils import cloud
|
||||||
from integration import TMP
|
from integration import TMP, CODE_DIR
|
||||||
|
|
||||||
GPG_KEYDIR = os.path.join(TMP, 'gpg-keydir')
|
GPG_KEYDIR = os.path.join(TMP, 'gpg-keydir')
|
||||||
|
|
||||||
@ -63,6 +63,8 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
HAS_KEYRING = False
|
HAS_KEYRING = False
|
||||||
|
|
||||||
|
os.chdir(CODE_DIR)
|
||||||
|
|
||||||
|
|
||||||
class CloudUtilsTestCase(TestCase):
|
class CloudUtilsTestCase(TestCase):
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
# pylint: disable=function-redefined
|
# pylint: disable=function-redefined,missing-docstring
|
||||||
# TODO: Remove the following PyLint disable as soon as we support YAML and RST rendering
|
# TODO: Remove the following PyLint disable as soon as we support YAML and RST rendering
|
||||||
# pylint: disable=abstract-method
|
# pylint: disable=abstract-method
|
||||||
|
|
||||||
@ -67,8 +67,8 @@ class ConfigTestCase(TestCase):
|
|||||||
'default': True,
|
'default': True,
|
||||||
'type': 'boolean',
|
'type': 'boolean',
|
||||||
'title': 'base'
|
'title': 'base'
|
||||||
},
|
},
|
||||||
'hungry': {
|
'hungry': {
|
||||||
'type': 'boolean',
|
'type': 'boolean',
|
||||||
'description': 'Are you hungry?',
|
'description': 'Are you hungry?',
|
||||||
'title': 'Hungry'
|
'title': 'Hungry'
|
||||||
@ -519,8 +519,8 @@ class ConfigTestCase(TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
item = schema.BooleanItem(title='Hungry',
|
item = schema.BooleanItem(title='Hungry',
|
||||||
description='Are you hungry?',
|
description='Are you hungry?',
|
||||||
default=False)
|
default=False)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
item.serialize(), {
|
item.serialize(), {
|
||||||
'type': 'boolean',
|
'type': 'boolean',
|
||||||
@ -531,8 +531,8 @@ class ConfigTestCase(TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
item = schema.BooleanItem(title='Hungry',
|
item = schema.BooleanItem(title='Hungry',
|
||||||
description='Are you hungry?',
|
description='Are you hungry?',
|
||||||
default=schema.Null)
|
default=schema.Null)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
item.serialize(), {
|
item.serialize(), {
|
||||||
'type': 'boolean',
|
'type': 'boolean',
|
||||||
@ -578,10 +578,10 @@ class ConfigTestCase(TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
item = schema.StringItem(title='Foo',
|
item = schema.StringItem(title='Foo',
|
||||||
description='Foo Item',
|
description='Foo Item',
|
||||||
min_length=1,
|
min_length=1,
|
||||||
max_length=3,
|
max_length=3,
|
||||||
default='foo')
|
default='foo')
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
item.serialize(), {
|
item.serialize(), {
|
||||||
'type': 'string',
|
'type': 'string',
|
||||||
@ -594,10 +594,10 @@ class ConfigTestCase(TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
item = schema.StringItem(title='Foo',
|
item = schema.StringItem(title='Foo',
|
||||||
description='Foo Item',
|
description='Foo Item',
|
||||||
min_length=1,
|
min_length=1,
|
||||||
max_length=3,
|
max_length=3,
|
||||||
enum=('foo', 'bar'))
|
enum=('foo', 'bar'))
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
item.serialize(), {
|
item.serialize(), {
|
||||||
'type': 'string',
|
'type': 'string',
|
||||||
@ -610,11 +610,11 @@ class ConfigTestCase(TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
item = schema.StringItem(title='Foo',
|
item = schema.StringItem(title='Foo',
|
||||||
description='Foo Item',
|
description='Foo Item',
|
||||||
min_length=1,
|
min_length=1,
|
||||||
max_length=3,
|
max_length=3,
|
||||||
enum=('foo', 'bar'),
|
enum=('foo', 'bar'),
|
||||||
enumNames=('Foo', 'Bar'))
|
enumNames=('Foo', 'Bar'))
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
item.serialize(), {
|
item.serialize(), {
|
||||||
'type': 'string',
|
'type': 'string',
|
||||||
@ -628,8 +628,8 @@ class ConfigTestCase(TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
item = schema.StringItem(title='Foo',
|
item = schema.StringItem(title='Foo',
|
||||||
description='Foo Item',
|
description='Foo Item',
|
||||||
pattern=r'^([\w_-]+)$')
|
pattern=r'^([\w_-]+)$')
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
item.serialize(), {
|
item.serialize(), {
|
||||||
'type': 'string',
|
'type': 'string',
|
||||||
@ -651,7 +651,7 @@ class ConfigTestCase(TestCase):
|
|||||||
|
|
||||||
class TestConf(schema.Schema):
|
class TestConf(schema.Schema):
|
||||||
item = schema.StringItem(title='Foo', description='Foo Item',
|
item = schema.StringItem(title='Foo', description='Foo Item',
|
||||||
min_length=1, max_length=10)
|
min_length=1, max_length=10)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
jsonschema.validate({'item': 'the item'}, TestConf.serialize())
|
jsonschema.validate({'item': 'the item'}, TestConf.serialize())
|
||||||
@ -668,7 +668,7 @@ class ConfigTestCase(TestCase):
|
|||||||
|
|
||||||
class TestConf(schema.Schema):
|
class TestConf(schema.Schema):
|
||||||
item = schema.StringItem(title='Foo', description='Foo Item',
|
item = schema.StringItem(title='Foo', description='Foo Item',
|
||||||
min_length=10, max_length=100)
|
min_length=10, max_length=100)
|
||||||
|
|
||||||
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
|
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
|
||||||
jsonschema.validate({'item': 'the item'}, TestConf.serialize())
|
jsonschema.validate({'item': 'the item'}, TestConf.serialize())
|
||||||
@ -676,8 +676,8 @@ class ConfigTestCase(TestCase):
|
|||||||
|
|
||||||
class TestConf(schema.Schema):
|
class TestConf(schema.Schema):
|
||||||
item = schema.StringItem(title='Foo',
|
item = schema.StringItem(title='Foo',
|
||||||
description='Foo Item',
|
description='Foo Item',
|
||||||
enum=('foo', 'bar'))
|
enum=('foo', 'bar'))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
jsonschema.validate({'item': 'foo'}, TestConf.serialize())
|
jsonschema.validate({'item': 'foo'}, TestConf.serialize())
|
||||||
@ -686,15 +686,15 @@ class ConfigTestCase(TestCase):
|
|||||||
|
|
||||||
class TestConf(schema.Schema):
|
class TestConf(schema.Schema):
|
||||||
item = schema.StringItem(title='Foo',
|
item = schema.StringItem(title='Foo',
|
||||||
description='Foo Item',
|
description='Foo Item',
|
||||||
enum=('foo', 'bar'))
|
enum=('foo', 'bar'))
|
||||||
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
|
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
|
||||||
jsonschema.validate({'item': 'bin'}, TestConf.serialize())
|
jsonschema.validate({'item': 'bin'}, TestConf.serialize())
|
||||||
self.assertIn('is not one of', excinfo.exception.message)
|
self.assertIn('is not one of', excinfo.exception.message)
|
||||||
|
|
||||||
class TestConf(schema.Schema):
|
class TestConf(schema.Schema):
|
||||||
item = schema.StringItem(title='Foo', description='Foo Item',
|
item = schema.StringItem(title='Foo', description='Foo Item',
|
||||||
pattern=r'^([\w_-]+)$')
|
pattern=r'^([\w_-]+)$')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
jsonschema.validate({'item': 'the-item'}, TestConf.serialize(),
|
jsonschema.validate({'item': 'the-item'}, TestConf.serialize(),
|
||||||
@ -893,9 +893,9 @@ class ConfigTestCase(TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
item = schema.NumberItem(title='How many dogs',
|
item = schema.NumberItem(title='How many dogs',
|
||||||
description='Question',
|
description='Question',
|
||||||
minimum=0,
|
minimum=0,
|
||||||
maximum=10)
|
maximum=10)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
item.serialize(), {
|
item.serialize(), {
|
||||||
'type': 'number',
|
'type': 'number',
|
||||||
@ -907,8 +907,8 @@ class ConfigTestCase(TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
item = schema.NumberItem(title='How many dogs',
|
item = schema.NumberItem(title='How many dogs',
|
||||||
description='Question',
|
description='Question',
|
||||||
multiple_of=2)
|
multiple_of=2)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
item.serialize(), {
|
item.serialize(), {
|
||||||
'type': 'number',
|
'type': 'number',
|
||||||
@ -919,11 +919,11 @@ class ConfigTestCase(TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
item = schema.NumberItem(title='How many dogs',
|
item = schema.NumberItem(title='How many dogs',
|
||||||
description='Question',
|
description='Question',
|
||||||
minimum=0,
|
minimum=0,
|
||||||
exclusive_minimum=True,
|
exclusive_minimum=True,
|
||||||
maximum=10,
|
maximum=10,
|
||||||
exclusive_maximum=True)
|
exclusive_maximum=True)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
item.serialize(), {
|
item.serialize(), {
|
||||||
'type': 'number',
|
'type': 'number',
|
||||||
@ -937,10 +937,10 @@ class ConfigTestCase(TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
item = schema.NumberItem(title='How many dogs',
|
item = schema.NumberItem(title='How many dogs',
|
||||||
description='Question',
|
description='Question',
|
||||||
minimum=0,
|
minimum=0,
|
||||||
maximum=10,
|
maximum=10,
|
||||||
default=0)
|
default=0)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
item.serialize(), {
|
item.serialize(), {
|
||||||
'type': 'number',
|
'type': 'number',
|
||||||
@ -953,11 +953,11 @@ class ConfigTestCase(TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
item = schema.NumberItem(title='How many dogs',
|
item = schema.NumberItem(title='How many dogs',
|
||||||
description='Question',
|
description='Question',
|
||||||
minimum=0,
|
minimum=0,
|
||||||
maximum=10,
|
maximum=10,
|
||||||
default=0,
|
default=0,
|
||||||
enum=(0, 2, 4, 6))
|
enum=(0, 2, 4, 6))
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
item.serialize(), {
|
item.serialize(), {
|
||||||
'type': 'number',
|
'type': 'number',
|
||||||
@ -986,8 +986,8 @@ class ConfigTestCase(TestCase):
|
|||||||
|
|
||||||
class TestConf(schema.Schema):
|
class TestConf(schema.Schema):
|
||||||
item = schema.NumberItem(title='How many dogs',
|
item = schema.NumberItem(title='How many dogs',
|
||||||
description='Question',
|
description='Question',
|
||||||
multiple_of=2.2)
|
multiple_of=2.2)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
jsonschema.validate({'item': 4.4}, TestConf.serialize())
|
jsonschema.validate({'item': 4.4}, TestConf.serialize())
|
||||||
@ -1000,7 +1000,7 @@ class ConfigTestCase(TestCase):
|
|||||||
|
|
||||||
class TestConf(schema.Schema):
|
class TestConf(schema.Schema):
|
||||||
item = schema.NumberItem(title='Foo', description='Foo Item',
|
item = schema.NumberItem(title='Foo', description='Foo Item',
|
||||||
minimum=1, maximum=10)
|
minimum=1, maximum=10)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
jsonschema.validate({'item': 3}, TestConf.serialize())
|
jsonschema.validate({'item': 3}, TestConf.serialize())
|
||||||
@ -1013,7 +1013,7 @@ class ConfigTestCase(TestCase):
|
|||||||
|
|
||||||
class TestConf(schema.Schema):
|
class TestConf(schema.Schema):
|
||||||
item = schema.NumberItem(title='Foo', description='Foo Item',
|
item = schema.NumberItem(title='Foo', description='Foo Item',
|
||||||
minimum=10, maximum=100)
|
minimum=10, maximum=100)
|
||||||
|
|
||||||
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
|
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
|
||||||
jsonschema.validate({'item': 3}, TestConf.serialize())
|
jsonschema.validate({'item': 3}, TestConf.serialize())
|
||||||
@ -1021,11 +1021,11 @@ class ConfigTestCase(TestCase):
|
|||||||
|
|
||||||
class TestConf(schema.Schema):
|
class TestConf(schema.Schema):
|
||||||
item = schema.NumberItem(title='How many dogs',
|
item = schema.NumberItem(title='How many dogs',
|
||||||
description='Question',
|
description='Question',
|
||||||
minimum=0,
|
minimum=0,
|
||||||
exclusive_minimum=True,
|
exclusive_minimum=True,
|
||||||
maximum=10,
|
maximum=10,
|
||||||
exclusive_maximum=True)
|
exclusive_maximum=True)
|
||||||
|
|
||||||
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
|
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
|
||||||
jsonschema.validate({'item': 0}, TestConf.serialize())
|
jsonschema.validate({'item': 0}, TestConf.serialize())
|
||||||
@ -1037,8 +1037,8 @@ class ConfigTestCase(TestCase):
|
|||||||
|
|
||||||
class TestConf(schema.Schema):
|
class TestConf(schema.Schema):
|
||||||
item = schema.NumberItem(title='Foo',
|
item = schema.NumberItem(title='Foo',
|
||||||
description='Foo Item',
|
description='Foo Item',
|
||||||
enum=(0, 2, 4, 6))
|
enum=(0, 2, 4, 6))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
jsonschema.validate({'item': 4}, TestConf.serialize())
|
jsonschema.validate({'item': 4}, TestConf.serialize())
|
||||||
@ -1047,8 +1047,8 @@ class ConfigTestCase(TestCase):
|
|||||||
|
|
||||||
class TestConf(schema.Schema):
|
class TestConf(schema.Schema):
|
||||||
item = schema.NumberItem(title='Foo',
|
item = schema.NumberItem(title='Foo',
|
||||||
description='Foo Item',
|
description='Foo Item',
|
||||||
enum=(0, 2, 4, 6))
|
enum=(0, 2, 4, 6))
|
||||||
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
|
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
|
||||||
jsonschema.validate({'item': 3}, TestConf.serialize())
|
jsonschema.validate({'item': 3}, TestConf.serialize())
|
||||||
self.assertIn('is not one of', excinfo.exception.message)
|
self.assertIn('is not one of', excinfo.exception.message)
|
||||||
@ -1064,9 +1064,9 @@ class ConfigTestCase(TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
item = schema.IntegerItem(title='How many dogs',
|
item = schema.IntegerItem(title='How many dogs',
|
||||||
description='Question',
|
description='Question',
|
||||||
minimum=0,
|
minimum=0,
|
||||||
maximum=10)
|
maximum=10)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
item.serialize(), {
|
item.serialize(), {
|
||||||
'type': 'integer',
|
'type': 'integer',
|
||||||
@ -1078,8 +1078,8 @@ class ConfigTestCase(TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
item = schema.IntegerItem(title='How many dogs',
|
item = schema.IntegerItem(title='How many dogs',
|
||||||
description='Question',
|
description='Question',
|
||||||
multiple_of=2)
|
multiple_of=2)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
item.serialize(), {
|
item.serialize(), {
|
||||||
'type': 'integer',
|
'type': 'integer',
|
||||||
@ -1090,11 +1090,11 @@ class ConfigTestCase(TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
item = schema.IntegerItem(title='How many dogs',
|
item = schema.IntegerItem(title='How many dogs',
|
||||||
description='Question',
|
description='Question',
|
||||||
minimum=0,
|
minimum=0,
|
||||||
exclusive_minimum=True,
|
exclusive_minimum=True,
|
||||||
maximum=10,
|
maximum=10,
|
||||||
exclusive_maximum=True)
|
exclusive_maximum=True)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
item.serialize(), {
|
item.serialize(), {
|
||||||
'type': 'integer',
|
'type': 'integer',
|
||||||
@ -1108,10 +1108,10 @@ class ConfigTestCase(TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
item = schema.IntegerItem(title='How many dogs',
|
item = schema.IntegerItem(title='How many dogs',
|
||||||
description='Question',
|
description='Question',
|
||||||
minimum=0,
|
minimum=0,
|
||||||
maximum=10,
|
maximum=10,
|
||||||
default=0)
|
default=0)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
item.serialize(), {
|
item.serialize(), {
|
||||||
'type': 'integer',
|
'type': 'integer',
|
||||||
@ -1124,11 +1124,11 @@ class ConfigTestCase(TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
item = schema.IntegerItem(title='How many dogs',
|
item = schema.IntegerItem(title='How many dogs',
|
||||||
description='Question',
|
description='Question',
|
||||||
minimum=0,
|
minimum=0,
|
||||||
maximum=10,
|
maximum=10,
|
||||||
default=0,
|
default=0,
|
||||||
enum=(0, 2, 4, 6))
|
enum=(0, 2, 4, 6))
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
item.serialize(), {
|
item.serialize(), {
|
||||||
'type': 'integer',
|
'type': 'integer',
|
||||||
@ -1157,8 +1157,8 @@ class ConfigTestCase(TestCase):
|
|||||||
|
|
||||||
class TestConf(schema.Schema):
|
class TestConf(schema.Schema):
|
||||||
item = schema.IntegerItem(title='How many dogs',
|
item = schema.IntegerItem(title='How many dogs',
|
||||||
description='Question',
|
description='Question',
|
||||||
multiple_of=2)
|
multiple_of=2)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
jsonschema.validate({'item': 4}, TestConf.serialize())
|
jsonschema.validate({'item': 4}, TestConf.serialize())
|
||||||
@ -1171,7 +1171,7 @@ class ConfigTestCase(TestCase):
|
|||||||
|
|
||||||
class TestConf(schema.Schema):
|
class TestConf(schema.Schema):
|
||||||
item = schema.IntegerItem(title='Foo', description='Foo Item',
|
item = schema.IntegerItem(title='Foo', description='Foo Item',
|
||||||
minimum=1, maximum=10)
|
minimum=1, maximum=10)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
jsonschema.validate({'item': 3}, TestConf.serialize())
|
jsonschema.validate({'item': 3}, TestConf.serialize())
|
||||||
@ -1184,7 +1184,7 @@ class ConfigTestCase(TestCase):
|
|||||||
|
|
||||||
class TestConf(schema.Schema):
|
class TestConf(schema.Schema):
|
||||||
item = schema.IntegerItem(title='Foo', description='Foo Item',
|
item = schema.IntegerItem(title='Foo', description='Foo Item',
|
||||||
minimum=10, maximum=100)
|
minimum=10, maximum=100)
|
||||||
|
|
||||||
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
|
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
|
||||||
jsonschema.validate({'item': 3}, TestConf.serialize())
|
jsonschema.validate({'item': 3}, TestConf.serialize())
|
||||||
@ -1192,11 +1192,11 @@ class ConfigTestCase(TestCase):
|
|||||||
|
|
||||||
class TestConf(schema.Schema):
|
class TestConf(schema.Schema):
|
||||||
item = schema.IntegerItem(title='How many dogs',
|
item = schema.IntegerItem(title='How many dogs',
|
||||||
description='Question',
|
description='Question',
|
||||||
minimum=0,
|
minimum=0,
|
||||||
exclusive_minimum=True,
|
exclusive_minimum=True,
|
||||||
maximum=10,
|
maximum=10,
|
||||||
exclusive_maximum=True)
|
exclusive_maximum=True)
|
||||||
|
|
||||||
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
|
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
|
||||||
jsonschema.validate({'item': 0}, TestConf.serialize())
|
jsonschema.validate({'item': 0}, TestConf.serialize())
|
||||||
@ -1208,8 +1208,8 @@ class ConfigTestCase(TestCase):
|
|||||||
|
|
||||||
class TestConf(schema.Schema):
|
class TestConf(schema.Schema):
|
||||||
item = schema.IntegerItem(title='Foo',
|
item = schema.IntegerItem(title='Foo',
|
||||||
description='Foo Item',
|
description='Foo Item',
|
||||||
enum=(0, 2, 4, 6))
|
enum=(0, 2, 4, 6))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
jsonschema.validate({'item': 4}, TestConf.serialize())
|
jsonschema.validate({'item': 4}, TestConf.serialize())
|
||||||
@ -1218,18 +1218,18 @@ class ConfigTestCase(TestCase):
|
|||||||
|
|
||||||
class TestConf(schema.Schema):
|
class TestConf(schema.Schema):
|
||||||
item = schema.IntegerItem(title='Foo',
|
item = schema.IntegerItem(title='Foo',
|
||||||
description='Foo Item',
|
description='Foo Item',
|
||||||
enum=(0, 2, 4, 6))
|
enum=(0, 2, 4, 6))
|
||||||
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
|
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
|
||||||
jsonschema.validate({'item': 3}, TestConf.serialize())
|
jsonschema.validate({'item': 3}, TestConf.serialize())
|
||||||
self.assertIn('is not one of', excinfo.exception.message)
|
self.assertIn('is not one of', excinfo.exception.message)
|
||||||
|
|
||||||
def test_array_config(self):
|
def test_array_config(self):
|
||||||
string_item = schema.StringItem(title='Dog Name',
|
string_item = schema.StringItem(title='Dog Name',
|
||||||
description='The dog name')
|
description='The dog name')
|
||||||
item = schema.ArrayItem(title='Dog Names',
|
item = schema.ArrayItem(title='Dog Names',
|
||||||
description='Name your dogs',
|
description='Name your dogs',
|
||||||
items=string_item)
|
items=string_item)
|
||||||
|
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
item.serialize(), {
|
item.serialize(), {
|
||||||
@ -1245,10 +1245,10 @@ class ConfigTestCase(TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
integer_item = schema.IntegerItem(title='Dog Age',
|
integer_item = schema.IntegerItem(title='Dog Age',
|
||||||
description='The dog age')
|
description='The dog age')
|
||||||
item = schema.ArrayItem(title='Dog Names',
|
item = schema.ArrayItem(title='Dog Names',
|
||||||
description='Name your dogs',
|
description='Name your dogs',
|
||||||
items=(string_item, integer_item))
|
items=(string_item, integer_item))
|
||||||
|
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
item.serialize(), {
|
item.serialize(), {
|
||||||
@ -1271,13 +1271,13 @@ class ConfigTestCase(TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
item = schema.ArrayItem(title='Dog Names',
|
item = schema.ArrayItem(title='Dog Names',
|
||||||
description='Name your dogs',
|
description='Name your dogs',
|
||||||
items=(schema.StringItem(),
|
items=(schema.StringItem(),
|
||||||
schema.IntegerItem()),
|
schema.IntegerItem()),
|
||||||
min_items=1,
|
min_items=1,
|
||||||
max_items=3,
|
max_items=3,
|
||||||
additional_items=False,
|
additional_items=False,
|
||||||
unique_items=True)
|
unique_items=True)
|
||||||
|
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
item.serialize(), {
|
item.serialize(), {
|
||||||
@ -1303,8 +1303,8 @@ class ConfigTestCase(TestCase):
|
|||||||
item = schema.IntegerItem(title='How many dogs', description='Question')
|
item = schema.IntegerItem(title='How many dogs', description='Question')
|
||||||
|
|
||||||
item = schema.ArrayItem(title='Dog Names',
|
item = schema.ArrayItem(title='Dog Names',
|
||||||
description='Name your dogs',
|
description='Name your dogs',
|
||||||
items=HowManyConfig())
|
items=HowManyConfig())
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
item.serialize(), {
|
item.serialize(), {
|
||||||
'type': 'array',
|
'type': 'array',
|
||||||
@ -1318,8 +1318,8 @@ class ConfigTestCase(TestCase):
|
|||||||
item = schema.IntegerItem()
|
item = schema.IntegerItem()
|
||||||
|
|
||||||
item = schema.ArrayItem(title='Dog Names',
|
item = schema.ArrayItem(title='Dog Names',
|
||||||
description='Name your dogs',
|
description='Name your dogs',
|
||||||
items=(HowManyConfig(), AgesConfig()))
|
items=(HowManyConfig(), AgesConfig()))
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
item.serialize(), {
|
item.serialize(), {
|
||||||
'type': 'array',
|
'type': 'array',
|
||||||
@ -1336,8 +1336,8 @@ class ConfigTestCase(TestCase):
|
|||||||
def test_array_config_validation(self):
|
def test_array_config_validation(self):
|
||||||
class TestConf(schema.Schema):
|
class TestConf(schema.Schema):
|
||||||
item = schema.ArrayItem(title='Dog Names',
|
item = schema.ArrayItem(title='Dog Names',
|
||||||
description='Name your dogs',
|
description='Name your dogs',
|
||||||
items=schema.StringItem())
|
items=schema.StringItem())
|
||||||
|
|
||||||
try:
|
try:
|
||||||
jsonschema.validate({'item': ['Tobias', 'Óscar']}, TestConf.serialize(),
|
jsonschema.validate({'item': ['Tobias', 'Óscar']}, TestConf.serialize(),
|
||||||
@ -1352,10 +1352,10 @@ class ConfigTestCase(TestCase):
|
|||||||
|
|
||||||
class TestConf(schema.Schema):
|
class TestConf(schema.Schema):
|
||||||
item = schema.ArrayItem(title='Dog Names',
|
item = schema.ArrayItem(title='Dog Names',
|
||||||
description='Name your dogs',
|
description='Name your dogs',
|
||||||
items=schema.StringItem(),
|
items=schema.StringItem(),
|
||||||
min_items=1,
|
min_items=1,
|
||||||
max_items=2)
|
max_items=2)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
jsonschema.validate({'item': ['Tobias', 'Óscar']}, TestConf.serialize(),
|
jsonschema.validate({'item': ['Tobias', 'Óscar']}, TestConf.serialize(),
|
||||||
@ -1375,9 +1375,9 @@ class ConfigTestCase(TestCase):
|
|||||||
|
|
||||||
class TestConf(schema.Schema):
|
class TestConf(schema.Schema):
|
||||||
item = schema.ArrayItem(title='Dog Names',
|
item = schema.ArrayItem(title='Dog Names',
|
||||||
description='Name your dogs',
|
description='Name your dogs',
|
||||||
items=schema.StringItem(),
|
items=schema.StringItem(),
|
||||||
uniqueItems=True)
|
uniqueItems=True)
|
||||||
|
|
||||||
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
|
with self.assertRaises(jsonschema.exceptions.ValidationError) as excinfo:
|
||||||
jsonschema.validate({'item': ['Tobias', 'Tobias']}, TestConf.serialize(),
|
jsonschema.validate({'item': ['Tobias', 'Tobias']}, TestConf.serialize(),
|
||||||
@ -1386,7 +1386,7 @@ class ConfigTestCase(TestCase):
|
|||||||
|
|
||||||
class TestConf(schema.Schema):
|
class TestConf(schema.Schema):
|
||||||
item = schema.ArrayItem(items=(schema.StringItem(),
|
item = schema.ArrayItem(items=(schema.StringItem(),
|
||||||
schema.IntegerItem()))
|
schema.IntegerItem()))
|
||||||
try:
|
try:
|
||||||
jsonschema.validate({'item': ['Óscar', 4]}, TestConf.serialize(),
|
jsonschema.validate({'item': ['Óscar', 4]}, TestConf.serialize(),
|
||||||
format_checker=jsonschema.FormatChecker())
|
format_checker=jsonschema.FormatChecker())
|
||||||
@ -1480,7 +1480,7 @@ class ConfigTestCase(TestCase):
|
|||||||
title='Poligon',
|
title='Poligon',
|
||||||
description='Describe the Poligon',
|
description='Describe the Poligon',
|
||||||
pattern_properties={
|
pattern_properties={
|
||||||
's*': schema.IntegerItem()
|
's.*': schema.IntegerItem()
|
||||||
},
|
},
|
||||||
min_properties=1,
|
min_properties=1,
|
||||||
max_properties=2
|
max_properties=2
|
||||||
@ -1491,7 +1491,7 @@ class ConfigTestCase(TestCase):
|
|||||||
'title': item.title,
|
'title': item.title,
|
||||||
'description': item.description,
|
'description': item.description,
|
||||||
'patternProperties': {
|
'patternProperties': {
|
||||||
's*': {'type': 'integer'}
|
's.*': {'type': 'integer'}
|
||||||
},
|
},
|
||||||
'minProperties': 1,
|
'minProperties': 1,
|
||||||
'maxProperties': 2
|
'maxProperties': 2
|
||||||
@ -1568,7 +1568,7 @@ class ConfigTestCase(TestCase):
|
|||||||
'sides': schema.IntegerItem()
|
'sides': schema.IntegerItem()
|
||||||
},
|
},
|
||||||
additional_properties=schema.OneOfItem(items=[schema.BooleanItem(),
|
additional_properties=schema.OneOfItem(items=[schema.BooleanItem(),
|
||||||
schema.StringItem()])
|
schema.StringItem()])
|
||||||
)
|
)
|
||||||
self.assertDictEqual(
|
self.assertDictEqual(
|
||||||
item.serialize(), {
|
item.serialize(), {
|
||||||
@ -1597,7 +1597,6 @@ class ConfigTestCase(TestCase):
|
|||||||
'sides': schema.IntegerItem()
|
'sides': schema.IntegerItem()
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
jsonschema.validate({'item': {'sides': 1}}, TestConf.serialize())
|
jsonschema.validate({'item': {'sides': 1}}, TestConf.serialize())
|
||||||
except jsonschema.exceptions.ValidationError as exc:
|
except jsonschema.exceptions.ValidationError as exc:
|
||||||
@ -1663,7 +1662,6 @@ class ConfigTestCase(TestCase):
|
|||||||
additional_properties=schema.OneOfItem(items=[
|
additional_properties=schema.OneOfItem(items=[
|
||||||
schema.BooleanItem(),
|
schema.BooleanItem(),
|
||||||
schema.IntegerItem()
|
schema.IntegerItem()
|
||||||
|
|
||||||
])
|
])
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -1688,7 +1686,6 @@ class ConfigTestCase(TestCase):
|
|||||||
additional_properties=schema.OneOfItem(items=[
|
additional_properties=schema.OneOfItem(items=[
|
||||||
schema.BooleanItem(),
|
schema.BooleanItem(),
|
||||||
schema.IntegerItem()
|
schema.IntegerItem()
|
||||||
|
|
||||||
]),
|
]),
|
||||||
min_properties=2,
|
min_properties=2,
|
||||||
max_properties=3
|
max_properties=3
|
Loading…
Reference in New Issue
Block a user