mirror of
https://github.com/valitydev/salt.git
synced 2024-11-07 08:58:59 +00:00
Merge remote-tracking branch 'upstream/develop' into sam_raet_49
This commit is contained in:
commit
98ebad06c5
@ -4544,3 +4544,51 @@ source_file = _build/locale/topics/targeting/ipcidr.pot
|
||||
source_lang = en
|
||||
source_name = topics/targeting/ipcidr.rst
|
||||
|
||||
[salt.ref--modules--all--salt_modules_boto_elasticache]
|
||||
file_filter = locale/<lang>/LC_MESSAGES/ref/modules/all/salt.modules.boto_elasticache.po
|
||||
source_file = _build/locale/ref/modules/all/salt.modules.boto_elasticache.pot
|
||||
source_lang = en
|
||||
source_name = ref/modules/all/salt.modules.boto_elasticache.rst
|
||||
|
||||
[salt.ref--roster--all--index]
|
||||
file_filter = locale/<lang>/LC_MESSAGES/ref/roster/all/index.po
|
||||
source_file = _build/locale/ref/roster/all/index.pot
|
||||
source_lang = en
|
||||
source_name = ref/roster/all/index.rst
|
||||
|
||||
[salt.ref--roster--all--salt_roster_flat]
|
||||
file_filter = locale/<lang>/LC_MESSAGES/ref/roster/all/salt.roster.flat.po
|
||||
source_file = _build/locale/ref/roster/all/salt.roster.flat.pot
|
||||
source_lang = en
|
||||
source_name = ref/roster/all/salt.roster.flat.rst
|
||||
|
||||
[salt.ref--roster--all--salt_roster_scan]
|
||||
file_filter = locale/<lang>/LC_MESSAGES/ref/roster/all/salt.roster.scan.po
|
||||
source_file = _build/locale/ref/roster/all/salt.roster.scan.pot
|
||||
source_lang = en
|
||||
source_name = ref/roster/all/salt.roster.scan.rst
|
||||
|
||||
[salt.ref--states--all--salt_states_apache_module]
|
||||
file_filter = locale/<lang>/LC_MESSAGES/ref/states/all/salt.states.apache_module.po
|
||||
source_file = _build/locale/ref/states/all/salt.states.apache_module.pot
|
||||
source_lang = en
|
||||
source_name = ref/states/all/salt.states.apache_module.rst
|
||||
|
||||
[salt.ref--states--all--salt_states_boto_elasticache]
|
||||
file_filter = locale/<lang>/LC_MESSAGES/ref/states/all/salt.states.boto_elasticache.po
|
||||
source_file = _build/locale/ref/states/all/salt.states.boto_elasticache.pot
|
||||
source_lang = en
|
||||
source_name = ref/states/all/salt.states.boto_elasticache.rst
|
||||
|
||||
[salt.topics--releases--2014_1_6]
|
||||
file_filter = locale/<lang>/LC_MESSAGES/topics/releases/2014.1.6.po
|
||||
source_file = _build/locale/topics/releases/2014.1.6.pot
|
||||
source_lang = en
|
||||
source_name = topics/releases/2014.1.6.rst
|
||||
|
||||
[salt.topics--releases--version_numbers]
|
||||
file_filter = locale/<lang>/LC_MESSAGES/topics/releases/version_numbers.po
|
||||
source_file = _build/locale/topics/releases/version_numbers.pot
|
||||
source_lang = en
|
||||
source_name = topics/releases/version_numbers.rst
|
||||
|
||||
|
@ -142,7 +142,7 @@ latexpdfja: translations
|
||||
xetexpdf: translations
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through xelatex..."
|
||||
cd $(BUILDDIR)/latex; xelatex *.tex && xelatex *.tex; cd -
|
||||
cd $(BUILDDIR)/latex; xelatex *.tex && xelatex *.tex && xelatex *.tex; cd -
|
||||
@echo "xelatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
text: translations
|
||||
|
@ -401,4 +401,4 @@ executed.
|
||||
|
||||
Other types can be synced by replacing ``cmd.saltutil.sync_grains`` with
|
||||
``cmd.saltutil.sync_modules``, ``cmd.saltutil.sync_all``, or whatever else
|
||||
suits your particular use case.
|
||||
suits the intended use case.
|
||||
|
39
pkg/rpm/logrotate.salt
Normal file
39
pkg/rpm/logrotate.salt
Normal file
@ -0,0 +1,39 @@
|
||||
/var/log/salt/master {
|
||||
weekly
|
||||
missingok
|
||||
rotate 5
|
||||
compress
|
||||
notifempty
|
||||
}
|
||||
|
||||
/var/log/salt/minion {
|
||||
weekly
|
||||
missingok
|
||||
rotate 5
|
||||
compress
|
||||
notifempty
|
||||
}
|
||||
|
||||
/var/log/salt/key {
|
||||
weekly
|
||||
missingok
|
||||
rotate 5
|
||||
compress
|
||||
notifempty
|
||||
}
|
||||
|
||||
/var/log/salt/cloud {
|
||||
weekly
|
||||
missingok
|
||||
rotate 5
|
||||
compress
|
||||
notifempty
|
||||
}
|
||||
|
||||
/var/log/salt/ssh {
|
||||
weekly
|
||||
missingok
|
||||
rotate 5
|
||||
compress
|
||||
notifempty
|
||||
}
|
@ -31,6 +31,7 @@ Source5: %{name}-master.service
|
||||
Source6: %{name}-syndic.service
|
||||
Source7: %{name}-minion.service
|
||||
Source8: README.fedora
|
||||
Source9: logrotate.salt
|
||||
|
||||
BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n)
|
||||
|
||||
@ -161,6 +162,7 @@ install -p -m 0644 %{SOURCE7} $RPM_BUILD_ROOT%{_unitdir}/
|
||||
%endif
|
||||
|
||||
install -p %{SOURCE8} .
|
||||
install -p %{SOURCE9} $RPM_BUILD_ROOT%{_sysconfdir}/logrotate.d/salt
|
||||
|
||||
mkdir -p $RPM_BUILD_ROOT%{_sysconfdir}/salt/
|
||||
install -p -m 0640 conf/minion $RPM_BUILD_ROOT%{_sysconfdir}/salt/minion
|
||||
|
@ -574,8 +574,9 @@ class Cloud(object):
|
||||
# If driver has function list_nodes_min, just replace it
|
||||
# with query param to check existing vms on this driver
|
||||
# for minimum information, Othwise still use query param.
|
||||
if '{0}.list_nodes_min'.format(driver) in self.clouds:
|
||||
this_query = 'list_nodes_min'
|
||||
if 'selected_query_option' not in opts:
|
||||
if '{0}.list_nodes_min'.format(driver) in self.clouds:
|
||||
this_query = 'list_nodes_min'
|
||||
fun = '{0}.{1}'.format(driver, this_query)
|
||||
if fun not in self.clouds:
|
||||
log.error(
|
||||
|
@ -1464,7 +1464,7 @@ def request_instance(vm_=None, call=None):
|
||||
] = str(set_del_root_vol_on_destroy).lower()
|
||||
|
||||
set_del_all_vols_on_destroy = config.get_cloud_config_value(
|
||||
'del_all_vols_on_destroy', vm_, __opts__, search_global=False
|
||||
'del_all_vols_on_destroy', vm_, __opts__, search_global=False, default=False
|
||||
)
|
||||
|
||||
if set_del_all_vols_on_destroy is not None:
|
||||
@ -2015,7 +2015,7 @@ def create(vm_=None, call=None):
|
||||
'volumes': volumes,
|
||||
'zone': ret['placement']['availabilityZone'],
|
||||
'instance_id': ret['instanceId'],
|
||||
'del_all_vols_on_destroy': vm_['set_del_all_vols_on_destroy']
|
||||
'del_all_vols_on_destroy': vm_.get('set_del_all_vols_on_destroy', False)
|
||||
},
|
||||
call='action'
|
||||
)
|
||||
|
@ -146,6 +146,7 @@ class WorkerSetup(ioflo.base.deeding.Deed):
|
||||
self.stack.server.close()
|
||||
self.stack.clearAllDir()
|
||||
|
||||
|
||||
class WorkerRouter(ioflo.base.deeding.Deed):
|
||||
'''
|
||||
FloScript:
|
||||
|
@ -642,7 +642,7 @@ class RemoteFuncs(object):
|
||||
if the requesting minion also initialted the execution.
|
||||
'''
|
||||
if not skip_verify and any(key not in load for key in ('jid', 'id')):
|
||||
return {}
|
||||
return {}
|
||||
else:
|
||||
auth_cache = os.path.join(
|
||||
self.opts['cachedir'],
|
||||
@ -652,7 +652,7 @@ class RemoteFuncs(object):
|
||||
jid_fn = os.path.join(auth_cache, load['jid'])
|
||||
with salt.utils.fopen(jid_fn, 'r') as fp_:
|
||||
if not load['id'] == fp_.read():
|
||||
return {}
|
||||
return {}
|
||||
|
||||
return self.local.get_cache_returns(load['jid'])
|
||||
|
||||
|
@ -495,6 +495,42 @@ def set_health_check(name, health_check, region=None, key=None, keyid=None,
|
||||
return True
|
||||
|
||||
|
||||
def register_instances(name, instances, region=None, key=None, keyid=None,
|
||||
profile=None):
|
||||
'''
|
||||
Register instances with an ELB. Instances is either a string
|
||||
instance id or a list of string instance id's.
|
||||
|
||||
CLI example to set attributes on an ELB::
|
||||
|
||||
salt myminion boto_elb.register_instances myelb instance_id
|
||||
salt myminion boto_elb.register_instances myelb "[instance_id,instance_id]"
|
||||
'''
|
||||
conn = _get_conn(region, key, keyid, profile)
|
||||
if not conn:
|
||||
return False
|
||||
elb = conn.get_all_load_balancers(name)[0]
|
||||
return elb.register_instances(instances)
|
||||
|
||||
|
||||
def deregister_instances(name, instances, region=None, key=None, keyid=None,
|
||||
profile=None):
|
||||
'''
|
||||
Deregister instances with an ELB. Instances is either a string
|
||||
instance id or a list of string instance id's.
|
||||
|
||||
CLI example to set attributes on an ELB::
|
||||
|
||||
salt myminion boto_elb.deregister_instances myelb instance_id
|
||||
salt myminion boto_elb.deregister_instances myelb "[instance_id,instance_id]"
|
||||
'''
|
||||
conn = _get_conn(region, key, keyid, profile)
|
||||
if not conn:
|
||||
return False
|
||||
elb = conn.get_all_load_balancers(name)[0]
|
||||
return elb.deregister_instances(instances)
|
||||
|
||||
|
||||
def _get_conn(region, key, keyid, profile):
|
||||
'''
|
||||
Get a boto connection to ELB.
|
||||
|
@ -3,7 +3,7 @@
|
||||
A module for shelling out
|
||||
|
||||
Keep in mind that this module is insecure, in that it can give whomever has
|
||||
access to the master root execution access to all salt minions
|
||||
access to the master root execution access to all salt minions.
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
|
@ -2444,19 +2444,24 @@ def check_perms(name, ret, user, group, mode, follow_symlinks=False):
|
||||
|
||||
# Mode changes if needed
|
||||
if mode is not None:
|
||||
mode = __salt__['config.manage_mode'](mode)
|
||||
if mode != perms['lmode']:
|
||||
if __opts__['test'] is True:
|
||||
ret['changes']['mode'] = mode
|
||||
else:
|
||||
set_mode(name, mode)
|
||||
if mode != __salt__['config.manage_mode'](get_mode(name)):
|
||||
ret['result'] = False
|
||||
ret['comment'].append(
|
||||
'Failed to change mode to {0}'.format(mode)
|
||||
)
|
||||
else:
|
||||
# File is a symlink, ignore the mode setting
|
||||
# if follow_symlinks is False
|
||||
if os.path.islink(name) and not follow_symlinks:
|
||||
pass
|
||||
else:
|
||||
mode = __salt__['config.manage_mode'](mode)
|
||||
if mode != perms['lmode']:
|
||||
if __opts__['test'] is True:
|
||||
ret['changes']['mode'] = mode
|
||||
else:
|
||||
set_mode(name, mode)
|
||||
if mode != __salt__['config.manage_mode'](get_mode(name)):
|
||||
ret['result'] = False
|
||||
ret['comment'].append(
|
||||
'Failed to change mode to {0}'.format(mode)
|
||||
)
|
||||
else:
|
||||
ret['changes']['mode'] = mode
|
||||
# user/group changes if needed, then check if it worked
|
||||
if user:
|
||||
if user != perms['luser']:
|
||||
|
@ -15,6 +15,7 @@ def finger():
|
||||
Return the minion's public key fingerprint
|
||||
|
||||
CLI Example:
|
||||
dfasdf
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
@ -23,3 +24,18 @@ def finger():
|
||||
return salt.utils.pem_finger(
|
||||
os.path.join(__opts__['pki_dir'], 'minion.pub')
|
||||
)
|
||||
|
||||
|
||||
def finger_master():
|
||||
'''
|
||||
Return the fingerprint of the master's public key on the minion.
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' key.finger_master
|
||||
'''
|
||||
return salt.utils.pem_finger(
|
||||
os.path.join(__opts__['pki_dir'], 'minion_master.pub')
|
||||
)
|
||||
|
@ -4,13 +4,11 @@
|
||||
|
||||
Provides access to randomness generators.
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
import base64
|
||||
import hashlib
|
||||
import os
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.pycrypto
|
||||
from salt.exceptions import SaltInvocationError
|
||||
|
||||
# Define the module's virtual name
|
||||
@ -24,62 +22,98 @@ def __virtual__():
|
||||
return __virtualname__
|
||||
|
||||
|
||||
def encode(value, encoder='sha256'):
|
||||
def hash(value, algorithm='sha512'):
|
||||
'''
|
||||
.. versionadded:: Helium
|
||||
|
||||
Encodes a value with the specified encoder.
|
||||
|
||||
value
|
||||
The value to be encoded.
|
||||
The value to be hashed.
|
||||
|
||||
encoder : sha256
|
||||
The encoder to use. May be any valid algorithm supported by hashlib or
|
||||
``base64``.
|
||||
algorithm : sha512
|
||||
The algorithm to use. May be any valid algorithm supported by
|
||||
hashlib.
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' random.encode 'I am a string' md5
|
||||
salt '*' random.hash 'I am a string' md5
|
||||
'''
|
||||
if encoder == 'base64':
|
||||
out = base64.b64encode(value)
|
||||
elif encoder in hashlib.algorithms:
|
||||
hasher = hashlib.new(encoder)
|
||||
if algorithm in hashlib.algorithms:
|
||||
hasher = hashlib.new(algorithm)
|
||||
hasher.update(value)
|
||||
out = hasher.hexdigest()
|
||||
else:
|
||||
raise SaltInvocationError('You must specify a valid encoder.')
|
||||
raise SaltInvocationError('You must specify a valid algorithm.')
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def urandom(length=256, encoder=None):
|
||||
def str_encode(value, encoder='base64'):
|
||||
'''
|
||||
.. versionadded:: Helium
|
||||
|
||||
Returns a random string of the specified length, optionally encoded. The
|
||||
truncation takes place prior to encoding so final output may be larger or
|
||||
smaller according to the encoder output.
|
||||
value
|
||||
The value to be encoded.
|
||||
|
||||
length : 256
|
||||
Any valid number of bytes.
|
||||
|
||||
encoder : None
|
||||
An optional encoder. May be any valid algorithm supported by haslib
|
||||
or ``base64``.
|
||||
encoder : base64
|
||||
The encoder to use on the subsequent string.
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' random.get 128 sha512
|
||||
salt '*' random.str_encode 'I am a new string' base64
|
||||
'''
|
||||
try:
|
||||
out = value.encode(encoder)
|
||||
except LookupError:
|
||||
raise SaltInvocationError('You must specify a valid encoder')
|
||||
except AttributeError:
|
||||
raise SaltInvocationError('Value must be an encode-able string')
|
||||
|
||||
rand = os.urandom(length)
|
||||
return out
|
||||
|
||||
if encoder is not None:
|
||||
rand = encode(rand, encoder)
|
||||
|
||||
return rand
|
||||
def get_str(length=20):
|
||||
'''
|
||||
.. versionadded:: Helium
|
||||
|
||||
Returns a random string of the specified length.
|
||||
|
||||
length : 20
|
||||
Any valid number of bytes.
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' random.get_str 128
|
||||
'''
|
||||
return salt.utils.pycrypto.secure_password(length)
|
||||
|
||||
|
||||
def shadow_hash(crypt_salt=None, password=None, algorithm='sha512'):
|
||||
'''
|
||||
Generates a salted hash suitable for /etc/shadow.
|
||||
|
||||
crypt_salt : None
|
||||
Salt to be used in the generation of the hash. If one is not
|
||||
provided, a random salt will be generated.
|
||||
|
||||
password : None
|
||||
Value to be salted and hashed. If one is not provided, a random
|
||||
password will be generated.
|
||||
|
||||
algorithm : sha512
|
||||
Hash algorithm to use.
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' random.shadow_hash 'My5alT' 'MyP@asswd' md5
|
||||
'''
|
||||
return salt.utils.pycrypto.gen_hash(crypt_salt, password, algorithm)
|
||||
|
@ -40,7 +40,8 @@ SCHEDULE_CONF = [
|
||||
'minutes',
|
||||
'hours',
|
||||
'days',
|
||||
'enabled'
|
||||
'enabled',
|
||||
'cron'
|
||||
]
|
||||
|
||||
|
||||
@ -203,9 +204,17 @@ def build_schedule_item(name, **kwargs):
|
||||
if item in kwargs and 'when' in kwargs:
|
||||
time_conflict = True
|
||||
|
||||
if item in kwargs and 'cron' in kwargs:
|
||||
time_conflict = True
|
||||
|
||||
if time_conflict:
|
||||
ret['result'] = False
|
||||
ret['comment'] = 'Unable to use "seconds", "minutes", "hours", or "days" with "when" option.'
|
||||
ret['comment'] = 'Unable to use "seconds", "minutes", "hours", or "days" with "when" or "cron" options.'
|
||||
return ret
|
||||
|
||||
if 'when' in kwargs and 'cron' in kwargs:
|
||||
ret['result'] = False
|
||||
ret['comment'] = 'Unable to use "when" and "cron" options together. Ignoring.'
|
||||
return ret
|
||||
|
||||
for item in ['seconds', 'minutes', 'hours', 'days']:
|
||||
@ -240,7 +249,7 @@ def build_schedule_item(name, **kwargs):
|
||||
else:
|
||||
schedule[name]['splay'] = kwargs['splay']
|
||||
|
||||
for item in ['range', 'when', 'returner']:
|
||||
for item in ['range', 'when', 'cron', 'returner']:
|
||||
if item in kwargs:
|
||||
schedule[name][item] = kwargs[item]
|
||||
|
||||
@ -278,10 +287,17 @@ def add(name, **kwargs):
|
||||
for item in ['seconds', 'minutes', 'hours', 'days']:
|
||||
if item in kwargs and 'when' in kwargs:
|
||||
time_conflict = True
|
||||
if item in kwargs and 'cron' in kwargs:
|
||||
time_conflict = True
|
||||
|
||||
if time_conflict:
|
||||
ret['result'] = False
|
||||
ret['comment'] = 'Error: Unable to use "seconds", "minutes", "hours", or "days" with "when" option.'
|
||||
ret['comment'] = 'Error: Unable to use "seconds", "minutes", "hours", or "days" with "when" or "cron" options.'
|
||||
return ret
|
||||
|
||||
if 'when' in kwargs and 'cron' in kwargs:
|
||||
ret['result'] = False
|
||||
ret['comment'] = 'Unable to use "when" and "cron" options together. Ignoring.'
|
||||
return ret
|
||||
|
||||
_new = build_schedule_item(name, **kwargs)
|
||||
@ -321,11 +337,19 @@ def modify(name, **kwargs):
|
||||
if item in kwargs and 'when' in kwargs:
|
||||
time_conflict = True
|
||||
|
||||
if item in kwargs and 'cron' in kwargs:
|
||||
time_conflict = True
|
||||
|
||||
if time_conflict:
|
||||
ret['result'] = False
|
||||
ret['comment'] = 'Error: Unable to use "seconds", "minutes", "hours", or "days" with "when" option.'
|
||||
return ret
|
||||
|
||||
if 'when' in kwargs and 'cron' in kwargs:
|
||||
ret['result'] = False
|
||||
ret['comment'] = 'Unable to use "when" and "cron" options together. Ignoring.'
|
||||
return ret
|
||||
|
||||
current_schedule = __opts__['schedule'].copy()
|
||||
if 'schedule' in __pillar__:
|
||||
current_schedule.update(__pillar__['schedule'])
|
||||
|
@ -16,6 +16,7 @@ or use Self-Signed certificates.
|
||||
# Import python libs
|
||||
import os
|
||||
import time
|
||||
import datetime
|
||||
import logging
|
||||
import hashlib
|
||||
|
||||
@ -118,6 +119,56 @@ def _write_cert_to_database(ca_name, cert):
|
||||
ofile.write(index_data)
|
||||
|
||||
|
||||
def maybe_fix_ssl_version(ca_name):
|
||||
'''
|
||||
Check that the X509 version is correct
|
||||
(was incorrectly setted in previous salt versions).
|
||||
This will fix the version if needed.
|
||||
|
||||
ca_name
|
||||
ca authority name
|
||||
'''
|
||||
certp = '{0}/{1}/{2}_ca_cert.crt'.format(
|
||||
_cert_base_path(),
|
||||
ca_name,
|
||||
ca_name)
|
||||
ca_keyp = '{0}/{1}/{2}_ca_cert.key'.format(
|
||||
_cert_base_path(), ca_name, ca_name)
|
||||
with open(certp) as fic:
|
||||
cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM,
|
||||
fic.read())
|
||||
if cert.get_version() == 3:
|
||||
log.info(
|
||||
'Regenerating wrong x509 version '
|
||||
'for certificate {0}'.format(certp))
|
||||
with open(ca_keyp) as fic2:
|
||||
try:
|
||||
# try to determine the key bits
|
||||
key = OpenSSL.crypto.load_privatekey(
|
||||
OpenSSL.crypto.FILETYPE_PEM, fic2.read())
|
||||
bits = key.bits()
|
||||
except Exception:
|
||||
bits = 2048
|
||||
try:
|
||||
days = (datetime.datetime.strptime(cert.get_notAfter(),
|
||||
'%Y%m%d%H%M%SZ') -
|
||||
datetime.datetime.now()).days
|
||||
except (ValueError, TypeError):
|
||||
days = 365
|
||||
subj = cert.get_subject()
|
||||
create_ca(
|
||||
ca_name,
|
||||
bits=bits,
|
||||
days=days,
|
||||
CN=subj.CN,
|
||||
C=subj.C,
|
||||
ST=subj.ST,
|
||||
L=subj.L,
|
||||
O=subj.O,
|
||||
OU=subj.OU,
|
||||
emailAddress=subj.emailAddress,
|
||||
fixmode=True)
|
||||
|
||||
def _ca_exists(ca_name):
|
||||
'''
|
||||
Verify whether a Certificate Authority (CA) already exists
|
||||
@ -125,12 +176,12 @@ def _ca_exists(ca_name):
|
||||
ca_name
|
||||
name of the CA
|
||||
'''
|
||||
|
||||
if os.path.exists('{0}/{1}/{2}_ca_cert.crt'.format(
|
||||
certp = '{0}/{1}/{2}_ca_cert.crt'.format(
|
||||
_cert_base_path(),
|
||||
ca_name,
|
||||
ca_name
|
||||
)):
|
||||
ca_name)
|
||||
if os.path.exists(certp):
|
||||
maybe_fix_ssl_version(ca_name)
|
||||
return True
|
||||
return False
|
||||
|
||||
@ -145,7 +196,8 @@ def create_ca(
|
||||
L='Salt Lake City',
|
||||
O='SaltStack',
|
||||
OU=None,
|
||||
emailAddress='xyz@pdq.net'):
|
||||
emailAddress='xyz@pdq.net',
|
||||
fixmode=False):
|
||||
'''
|
||||
Create a Certificate Authority (CA)
|
||||
|
||||
@ -190,17 +242,34 @@ def create_ca(
|
||||
|
||||
salt '*' tls.create_ca test_ca
|
||||
'''
|
||||
if _ca_exists(ca_name):
|
||||
return 'Certificate for CA named "{0}" already exists'.format(ca_name)
|
||||
certp = '{0}/{1}/{2}_ca_cert.crt'.format(
|
||||
_cert_base_path(), ca_name, ca_name)
|
||||
ca_keyp = '{0}/{1}/{2}_ca_cert.key'.format(
|
||||
_cert_base_path(), ca_name, ca_name)
|
||||
if (not fixmode) and _ca_exists(ca_name):
|
||||
return (
|
||||
'Certificate for CA named "{0}" '
|
||||
'already exists').format(ca_name)
|
||||
|
||||
if fixmode and not os.path.exists(certp):
|
||||
raise ValueError('{0} does not exists, can\'t fix'.format(certp))
|
||||
|
||||
if not os.path.exists('{0}/{1}'.format(_cert_base_path(), ca_name)):
|
||||
os.makedirs('{0}/{1}'.format(_cert_base_path(), ca_name))
|
||||
|
||||
key = OpenSSL.crypto.PKey()
|
||||
key.generate_key(OpenSSL.crypto.TYPE_RSA, bits)
|
||||
# try to reuse existing ssl key
|
||||
key = None
|
||||
if os.path.exists(ca_keyp):
|
||||
with open(ca_keyp) as fic2:
|
||||
# try to determine the key bits
|
||||
key = OpenSSL.crypto.load_privatekey(
|
||||
OpenSSL.crypto.FILETYPE_PEM, fic2.read())
|
||||
if not key:
|
||||
key = OpenSSL.crypto.PKey()
|
||||
key.generate_key(OpenSSL.crypto.TYPE_RSA, bits)
|
||||
|
||||
ca = OpenSSL.crypto.X509()
|
||||
ca.set_version(3)
|
||||
ca.set_version(2)
|
||||
ca.set_serial_number(_new_serial(ca_name, CN))
|
||||
ca.get_subject().C = C
|
||||
ca.get_subject().ST = ST
|
||||
@ -222,59 +291,48 @@ def create_ca(
|
||||
OpenSSL.crypto.X509Extension('keyUsage', True,
|
||||
'keyCertSign, cRLSign'),
|
||||
OpenSSL.crypto.X509Extension('subjectKeyIdentifier', False, 'hash',
|
||||
subject=ca)
|
||||
])
|
||||
subject=ca)])
|
||||
|
||||
ca.add_extensions([
|
||||
OpenSSL.crypto.X509Extension(
|
||||
'authorityKeyIdentifier',
|
||||
False,
|
||||
'issuer:always,keyid:always',
|
||||
issuer=ca
|
||||
)
|
||||
])
|
||||
issuer=ca)])
|
||||
ca.sign(key, 'sha1')
|
||||
|
||||
ca_key = salt.utils.fopen(
|
||||
'{0}/{1}/{2}_ca_cert.key'.format(
|
||||
_cert_base_path(),
|
||||
ca_name,
|
||||
ca_name
|
||||
),
|
||||
'w'
|
||||
)
|
||||
ca_key.write(
|
||||
OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key)
|
||||
)
|
||||
ca_key.close()
|
||||
# alway backup existing keys in case
|
||||
keycontent = OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM,
|
||||
key)
|
||||
write_key = True
|
||||
if os.path.exists(ca_keyp):
|
||||
bck = "{0}.{1}".format(ca_keyp, datetime.datetime.now().strftime(
|
||||
"%Y%m%d%H%M%S"))
|
||||
with open(ca_keyp) as fic:
|
||||
old_key = fic.read().strip()
|
||||
if old_key.strip() == keycontent.strip():
|
||||
write_key = False
|
||||
else:
|
||||
log.info('Saving old CA ssl key in {0}'.format(bck))
|
||||
with open(bck, 'w') as bckf:
|
||||
bckf.write(old_key)
|
||||
os.chmod(bck, 0600)
|
||||
if write_key:
|
||||
ca_key = salt.utils.fopen(ca_keyp, 'w')
|
||||
ca_key.write(keycontent)
|
||||
ca_key.close()
|
||||
|
||||
ca_crt = salt.utils.fopen(
|
||||
'{0}/{1}/{2}_ca_cert.crt'.format(
|
||||
_cert_base_path(),
|
||||
ca_name,
|
||||
ca_name
|
||||
),
|
||||
'w'
|
||||
)
|
||||
ca_crt = salt.utils.fopen(certp, 'w')
|
||||
ca_crt.write(
|
||||
OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, ca)
|
||||
)
|
||||
OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, ca))
|
||||
ca_crt.close()
|
||||
|
||||
_write_cert_to_database(ca_name, ca)
|
||||
|
||||
ret = ('Created Private Key: "{1}/{2}/{3}_ca_cert.key." ').format(
|
||||
ca_name,
|
||||
_cert_base_path(),
|
||||
ca_name,
|
||||
ca_name
|
||||
)
|
||||
ca_name, _cert_base_path(), ca_name, ca_name)
|
||||
ret += ('Created CA "{0}": "{1}/{2}/{3}_ca_cert.crt."').format(
|
||||
ca_name,
|
||||
_cert_base_path(),
|
||||
ca_name,
|
||||
ca_name
|
||||
)
|
||||
ca_name, _cert_base_path(), ca_name, ca_name)
|
||||
|
||||
return ret
|
||||
|
||||
@ -580,6 +638,7 @@ def create_ca_signed_cert(ca_name, CN, days=365):
|
||||
return 'Certificate "{0}" already exists'.format(ca_name)
|
||||
|
||||
try:
|
||||
maybe_fix_ssl_version(ca_name)
|
||||
ca_cert = OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
salt.utils.fopen('{0}/{1}/{2}_ca_cert.crt'.format(
|
||||
|
@ -37,7 +37,7 @@ def _conn(queue):
|
||||
'''
|
||||
Return an sqlite connection
|
||||
'''
|
||||
queue_dir = __opts__['queue_dir']
|
||||
queue_dir = __opts__['sqlite_queue_dir']
|
||||
db = os.path.join(queue_dir, '{0}.db'.format(queue))
|
||||
log.debug('Connecting to: {0}'.format(db))
|
||||
|
||||
@ -86,7 +86,7 @@ def _list_queues():
|
||||
'''
|
||||
Return a list of sqlite databases in the queue_dir
|
||||
'''
|
||||
queue_dir = __opts__['queue_dir']
|
||||
queue_dir = __opts__['sqlite_queue_dir']
|
||||
files = os.path.join(queue_dir, '*.db')
|
||||
paths = glob.glob(files)
|
||||
queues = [os.path.splitext(os.path.basename(item))[0] for item in paths]
|
||||
|
@ -1830,13 +1830,6 @@ def recurse(name,
|
||||
'comment': {} # { path: [comment, ...] }
|
||||
}
|
||||
|
||||
try:
|
||||
source = source.rstrip('/')
|
||||
except AttributeError:
|
||||
ret['result'] = False
|
||||
ret['comment'] = '\'source\' parameter must be a string'
|
||||
return ret
|
||||
|
||||
if 'mode' in kwargs:
|
||||
ret['result'] = False
|
||||
ret['comment'] = (
|
||||
@ -1887,14 +1880,35 @@ def recurse(name,
|
||||
.format(precheck))
|
||||
return ret
|
||||
|
||||
if isinstance(source, list):
|
||||
sources = source
|
||||
else:
|
||||
sources = [source]
|
||||
|
||||
try:
|
||||
for idx, val in enumerate(sources):
|
||||
sources[idx] = val.rstrip('/')
|
||||
except AttributeError:
|
||||
ret['result'] = False
|
||||
ret['comment'] = '\'source\' parameter(s) must be a string'
|
||||
return ret
|
||||
|
||||
# If source is a list, find which in the list actually exists
|
||||
try:
|
||||
source, source_hash = __salt__['file.source_list'](source, '', __env__)
|
||||
source, source_hash = __salt__['file.source_list'](sources, '', __env__)
|
||||
except CommandExecutionError as exc:
|
||||
ret['result'] = False
|
||||
ret['comment'] = 'Recurse failed: {0}'.format(exc)
|
||||
return ret
|
||||
|
||||
try:
|
||||
for idx, val in enumerate(sources):
|
||||
sources[idx] = val.rstrip('/')
|
||||
except AttributeError:
|
||||
ret['result'] = False
|
||||
ret['comment'] = '\'source\' parameter must be a string'
|
||||
return ret
|
||||
|
||||
# Check source path relative to fileserver root, make sure it is a
|
||||
# directory
|
||||
source_rel = source.partition('://')[2]
|
||||
|
@ -42,10 +42,21 @@ Management of the Salt scheduler
|
||||
This will schedule the command: state.sls httpd test=True at 5pm on Monday,
|
||||
Wednesday and Friday, and 3pm on Tuesday and Thursday.
|
||||
|
||||
'''
|
||||
job1:
|
||||
schedule.present:
|
||||
- function: state.sls
|
||||
- args:
|
||||
- httpd
|
||||
- kwargs:
|
||||
test: True
|
||||
- cron: '*/5 * * * *'
|
||||
|
||||
import logging
|
||||
log = logging.getLogger(__name__)
|
||||
Scheduled jobs can also be specified using the format used by cron. This will
|
||||
schedule the command: state.sls httpd test=True to run every 5 minutes. Requires
|
||||
that python-croniter is installed.
|
||||
|
||||
|
||||
'''
|
||||
|
||||
|
||||
def present(name,
|
||||
@ -77,6 +88,10 @@ def present(name,
|
||||
The when parameter must be a single value or a dictionary
|
||||
with the date string(s) using the dateutil format.
|
||||
|
||||
cron
|
||||
This will schedule the job at the specified time(s)
|
||||
using the crontab format.
|
||||
|
||||
function
|
||||
The function that should be executed by the scheduled job.
|
||||
|
||||
|
@ -115,6 +115,7 @@ class RAETChannel(Channel):
|
||||
self.stack.server.close()
|
||||
self.stack.clearAllDir()
|
||||
|
||||
|
||||
class ZeroMQChannel(Channel):
|
||||
'''
|
||||
Encapsulate sending routines to ZeroMQ.
|
||||
|
@ -151,6 +151,12 @@ except ImportError:
|
||||
_WHEN_SUPPORTED = False
|
||||
_RANGE_SUPPORTED = False
|
||||
|
||||
try:
|
||||
import croniter
|
||||
_CRON_SUPPORTED = True
|
||||
except ImportError:
|
||||
_CRON_SUPPORTED = False
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils
|
||||
import salt.utils.process
|
||||
@ -448,7 +454,6 @@ class Schedule(object):
|
||||
Evaluate and execute the schedule
|
||||
'''
|
||||
schedule = self.option('schedule')
|
||||
#log.debug('calling eval {0}'.format(schedule))
|
||||
if not isinstance(schedule, dict):
|
||||
return
|
||||
if 'enabled' in schedule and not schedule['enabled']:
|
||||
@ -479,18 +484,25 @@ class Schedule(object):
|
||||
# Add up how many seconds between now and then
|
||||
when = 0
|
||||
seconds = 0
|
||||
cron = 0
|
||||
|
||||
time_conflict = False
|
||||
for item in ['seconds', 'minutes', 'hours', 'days']:
|
||||
if item in data and 'when' in data:
|
||||
time_conflict = True
|
||||
if item in data and 'cron' in data:
|
||||
time_conflict = True
|
||||
|
||||
if time_conflict:
|
||||
log.error('Unable to use "seconds", "minutes", "hours", or "days" with "when" option. Ignoring.')
|
||||
log.error('Unable to use "seconds", "minutes", "hours", or "days" with "when" or "cron" options. Ignoring.')
|
||||
continue
|
||||
|
||||
# clean this up
|
||||
if 'seconds' in data or 'minutes' in data or 'hours' in data or 'days' in data:
|
||||
if 'when' in data and 'cron' in data:
|
||||
log.error('Unable to use "when" and "cron" options together. Ignoring.')
|
||||
continue
|
||||
|
||||
time_elements = ['seconds', 'minutes', 'hours', 'days']
|
||||
if True in [True for item in time_elements if item in data]:
|
||||
# Add up how many seconds between now and then
|
||||
seconds += int(data.get('seconds', 0))
|
||||
seconds += int(data.get('minutes', 0)) * 60
|
||||
@ -570,8 +582,21 @@ class Schedule(object):
|
||||
data['_when'] = when
|
||||
data['_when_run'] = True
|
||||
|
||||
elif 'cron' in data:
|
||||
if not _CRON_SUPPORTED:
|
||||
log.error('Missing python-croniter. Ignoring job {0}'.format(job))
|
||||
continue
|
||||
|
||||
now = int(datetime.datetime.now().strftime('%s'))
|
||||
try:
|
||||
cron = int(croniter.croniter(data['cron'], now).get_next())
|
||||
except (ValueError, KeyError):
|
||||
log.error('Invalid cron string. Ignoring')
|
||||
continue
|
||||
seconds = cron - now
|
||||
else:
|
||||
continue
|
||||
|
||||
# Check if the seconds variable is lower than current lowest
|
||||
# loop interval needed. If it is lower then overwrite variable
|
||||
# external loops using can then check this variable for how often
|
||||
@ -580,20 +605,25 @@ class Schedule(object):
|
||||
self.loop_interval = seconds
|
||||
now = int(time.time())
|
||||
run = False
|
||||
|
||||
if job in self.intervals:
|
||||
if 'when' in data:
|
||||
if now - when >= seconds:
|
||||
if data['_when_run']:
|
||||
data['_when_run'] = False
|
||||
run = True
|
||||
if 'cron' in data:
|
||||
if seconds == 1:
|
||||
run = True
|
||||
else:
|
||||
if now - self.intervals[job] >= seconds:
|
||||
run = True
|
||||
|
||||
else:
|
||||
if 'splay' in data:
|
||||
if 'when' in data:
|
||||
log.error('Unable to use "splay" with "when" option at this time. Ignoring.')
|
||||
elif 'cron' in data:
|
||||
log.error('Unable to use "splay" with "cron" option at this time. Ignoring.')
|
||||
else:
|
||||
data['_seconds'] = data['seconds']
|
||||
|
||||
@ -602,6 +632,9 @@ class Schedule(object):
|
||||
if data['_when_run']:
|
||||
data['_when_run'] = False
|
||||
run = True
|
||||
if 'cron' in data:
|
||||
if seconds == 1:
|
||||
run = True
|
||||
else:
|
||||
run = True
|
||||
|
||||
|
@ -19,7 +19,7 @@ import integration
|
||||
from salttesting import skipIf
|
||||
|
||||
import random
|
||||
|
||||
import pwd
|
||||
|
||||
class AuthTest(integration.ShellCase):
|
||||
'''
|
||||
@ -32,7 +32,14 @@ class AuthTest(integration.ShellCase):
|
||||
|
||||
@destructiveTest
|
||||
@skipIf(is_root, 'You must be logged in as root to run this test')
|
||||
# @with_system_user('saltdev') - doesn't work with ShellCase
|
||||
def setUp(self):
|
||||
# This is a little wasteful but shouldn't be a problem
|
||||
try:
|
||||
pwd.getpwnam('saltdev')
|
||||
except KeyError:
|
||||
self.run_call('user.add saltdev createhome=False')
|
||||
|
||||
|
||||
def test_pam_auth_valid_user(self):
|
||||
'''
|
||||
test pam auth mechanism is working with a valid user
|
||||
@ -59,7 +66,6 @@ class AuthTest(integration.ShellCase):
|
||||
'minion:' in resp
|
||||
)
|
||||
|
||||
@skipIf(is_root, 'You must be logged in as root to run this test')
|
||||
def test_pam_auth_invalid_user(self):
|
||||
'''
|
||||
test pam auth mechanism errors for an invalid user
|
||||
@ -72,6 +78,12 @@ class AuthTest(integration.ShellCase):
|
||||
'Failed to authenticate' in ''.join(resp)
|
||||
)
|
||||
|
||||
@destructiveTest
|
||||
@skipIf(is_root, 'You must be logged in as root to run this test')
|
||||
def test_zzzz_tearDown(self):
|
||||
if pwd.getpwnam('saltdev'):
|
||||
self.run_call('user.delete saltdev')
|
||||
|
||||
if __name__ == '__main__':
|
||||
from integration import run_tests
|
||||
run_tests(AuthTest)
|
||||
|
Loading…
Reference in New Issue
Block a user