mirror of
https://github.com/valitydev/salt.git
synced 2024-11-06 08:35:21 +00:00
Merge branch '2018.3' into '2019.2'
Conflicts: - salt/cloud/clouds/azurearm.py - salt/minion.py - salt/utils/network.py
This commit is contained in:
commit
ffe23fa258
@ -2,7 +2,7 @@
|
||||
What is SaltStack?
|
||||
==================
|
||||
|
||||
SaltStack makes software for complex systems management at scale.
|
||||
SaltStack makes software for complex systems management at scale.
|
||||
SaltStack is the company that created and maintains the Salt Open
|
||||
project and develops and sells SaltStack Enterprise software, services
|
||||
and support. Easy enough to get running in minutes, scalable enough to
|
||||
|
@ -338,7 +338,14 @@ def groups(username, **kwargs):
|
||||
|
||||
'''
|
||||
group_list = []
|
||||
bind = auth(username, kwargs.get('password', None))
|
||||
|
||||
# If bind credentials are configured, use them instead of user's
|
||||
if _config('binddn', mandatory=False) and _config('bindpw', mandatory=False):
|
||||
bind = _bind_for_search(anonymous=_config('anonymous', mandatory=False))
|
||||
else:
|
||||
bind = _bind(username, kwargs.get('password', ''),
|
||||
anonymous=_config('auth_by_group_membership_only', mandatory=False)
|
||||
and _config('anonymous', mandatory=False))
|
||||
|
||||
if bind:
|
||||
log.debug('ldap bind to determine group membership succeeded!')
|
||||
|
@ -1,4 +1,9 @@
|
||||
#!/bin/sh -
|
||||
|
||||
# WARNING: Changes to this file in the salt repo will be overwritten!
|
||||
# Please submit pull requests against the salt-bootstrap repo:
|
||||
# https://github.com/saltstack/salt-bootstrap
|
||||
|
||||
#======================================================================================================================
|
||||
# vim: softtabstop=4 shiftwidth=4 expandtab fenc=utf-8 spell spelllang=en cc=120
|
||||
#======================================================================================================================
|
||||
@ -18,7 +23,7 @@
|
||||
#======================================================================================================================
|
||||
set -o nounset # Treat unset variables as an error
|
||||
|
||||
__ScriptVersion="2018.08.15"
|
||||
__ScriptVersion="2019.01.08"
|
||||
__ScriptName="bootstrap-salt.sh"
|
||||
|
||||
__ScriptFullName="$0"
|
||||
@ -585,14 +590,14 @@ elif [ "$ITYPE" = "stable" ]; then
|
||||
if [ "$#" -eq 0 ];then
|
||||
STABLE_REV="latest"
|
||||
else
|
||||
if [ "$(echo "$1" | grep -E '^(latest|1\.6|1\.7|2014\.1|2014\.7|2015\.5|2015\.8|2016\.3|2016\.11|2017\.7|2018\.3)$')" != "" ]; then
|
||||
if [ "$(echo "$1" | grep -E '^(latest|1\.6|1\.7|2014\.1|2014\.7|2015\.5|2015\.8|2016\.3|2016\.11|2017\.7|2018\.3|2019\.2)$')" != "" ]; then
|
||||
STABLE_REV="$1"
|
||||
shift
|
||||
elif [ "$(echo "$1" | grep -E '^([0-9]*\.[0-9]*\.[0-9]*)$')" != "" ]; then
|
||||
STABLE_REV="archive/$1"
|
||||
shift
|
||||
else
|
||||
echo "Unknown stable version: $1 (valid: 1.6, 1.7, 2014.1, 2014.7, 2015.5, 2015.8, 2016.3, 2016.11, 2017.7, 2018.3, latest, \$MAJOR.\$MINOR.\$PATCH)"
|
||||
echo "Unknown stable version: $1 (valid: 1.6, 1.7, 2014.1, 2014.7, 2015.5, 2015.8, 2016.3, 2016.11, 2017.7, 2018.3, 2019.2, latest, \$MAJOR.\$MINOR.\$PATCH)"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
@ -1269,6 +1274,7 @@ __ubuntu_derivatives_translation() {
|
||||
linuxmint_13_ubuntu_base="12.04"
|
||||
linuxmint_17_ubuntu_base="14.04"
|
||||
linuxmint_18_ubuntu_base="16.04"
|
||||
linuxmint_19_ubuntu_base="18.04"
|
||||
linaro_12_ubuntu_base="12.04"
|
||||
elementary_os_02_ubuntu_base="12.04"
|
||||
neon_16_ubuntu_base="16.04"
|
||||
@ -1632,7 +1638,8 @@ __check_end_of_life_versions() {
|
||||
|
||||
amazon*linux*ami)
|
||||
# Amazon Linux versions lower than 2012.0X no longer supported
|
||||
if [ "$DISTRO_MAJOR_VERSION" -lt 2012 ]; then
|
||||
# Except for Amazon Linux 2, which reset the major version counter
|
||||
if [ "$DISTRO_MAJOR_VERSION" -lt 2012 ] && [ "$DISTRO_MAJOR_VERSION" -gt 10 ]; then
|
||||
echoerror "End of life distributions are not supported."
|
||||
echoerror "Please consider upgrading to the next stable. See:"
|
||||
echoerror " https://aws.amazon.com/amazon-linux-ami/"
|
||||
@ -1797,24 +1804,32 @@ __function_defined() {
|
||||
# process is finished so the script doesn't exit on a locked proc.
|
||||
#----------------------------------------------------------------------------------------------------------------------
|
||||
__wait_for_apt(){
|
||||
echodebug "Checking if apt process is currently running."
|
||||
|
||||
# Timeout set at 15 minutes
|
||||
WAIT_TIMEOUT=900
|
||||
|
||||
while ps -C apt,apt-get,aptitude,dpkg >/dev/null; do
|
||||
sleep 1
|
||||
WAIT_TIMEOUT=$((WAIT_TIMEOUT - 1))
|
||||
# Run our passed in apt command
|
||||
"${@}"
|
||||
APT_RETURN=$?
|
||||
|
||||
# If timeout reaches 0, abort.
|
||||
if [ "$WAIT_TIMEOUT" -eq 0 ]; then
|
||||
echoerror "Apt, apt-get, aptitude, or dpkg process is taking too long."
|
||||
echoerror "Bootstrap script cannot proceed. Aborting."
|
||||
return 1
|
||||
fi
|
||||
# If our exit code from apt is 100, then we're waiting on a lock
|
||||
while [ $APT_RETURN -eq 100 ]; do
|
||||
echoinfo "Aware of the lock. Patiently waiting $WAIT_TIMEOUT more seconds..."
|
||||
sleep 1
|
||||
WAIT_TIMEOUT=$((WAIT_TIMEOUT - 1))
|
||||
|
||||
# If timeout reaches 0, abort.
|
||||
if [ "$WAIT_TIMEOUT" -eq 0 ]; then
|
||||
echoerror "Apt, apt-get, aptitude, or dpkg process is taking too long."
|
||||
echoerror "Bootstrap script cannot proceed. Aborting."
|
||||
return 1
|
||||
else
|
||||
# Try running apt again until our return code != 100
|
||||
"${@}"
|
||||
APT_RETURN=$?
|
||||
fi
|
||||
done
|
||||
|
||||
echodebug "No apt processes are currently running."
|
||||
return $APT_RETURN
|
||||
}
|
||||
|
||||
#--- FUNCTION -------------------------------------------------------------------------------------------------------
|
||||
@ -1823,8 +1838,7 @@ __wait_for_apt(){
|
||||
# PARAMETERS: packages
|
||||
#----------------------------------------------------------------------------------------------------------------------
|
||||
__apt_get_install_noinput() {
|
||||
__wait_for_apt
|
||||
apt-get install -y -o DPkg::Options::=--force-confold "${@}"; return $?
|
||||
__wait_for_apt apt-get install -y -o DPkg::Options::=--force-confold "${@}"; return $?
|
||||
} # ---------- end of function __apt_get_install_noinput ----------
|
||||
|
||||
|
||||
@ -1833,8 +1847,7 @@ __apt_get_install_noinput() {
|
||||
# DESCRIPTION: (DRY) apt-get upgrade with noinput options
|
||||
#----------------------------------------------------------------------------------------------------------------------
|
||||
__apt_get_upgrade_noinput() {
|
||||
__wait_for_apt
|
||||
apt-get upgrade -y -o DPkg::Options::=--force-confold; return $?
|
||||
__wait_for_apt apt-get upgrade -y -o DPkg::Options::=--force-confold; return $?
|
||||
} # ---------- end of function __apt_get_upgrade_noinput ----------
|
||||
|
||||
|
||||
@ -1844,11 +1857,10 @@ __apt_get_upgrade_noinput() {
|
||||
# PARAMETERS: url
|
||||
#----------------------------------------------------------------------------------------------------------------------
|
||||
__apt_key_fetch() {
|
||||
__wait_for_apt
|
||||
url=$1
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
apt-key adv ${_GPG_ARGS} --fetch-keys "$url"; return $?
|
||||
__wait_for_apt apt-key adv ${_GPG_ARGS} --fetch-keys "$url"; return $?
|
||||
} # ---------- end of function __apt_key_fetch ----------
|
||||
|
||||
|
||||
@ -2633,8 +2645,7 @@ __install_saltstack_ubuntu_repository() {
|
||||
|
||||
__apt_key_fetch "$SALTSTACK_UBUNTU_URL/SALTSTACK-GPG-KEY.pub" || return 1
|
||||
|
||||
__wait_for_apt
|
||||
apt-get update || return 1
|
||||
__wait_for_apt apt-get update || return 1
|
||||
}
|
||||
|
||||
install_ubuntu_deps() {
|
||||
@ -2646,8 +2657,7 @@ install_ubuntu_deps() {
|
||||
|
||||
__enable_universe_repository || return 1
|
||||
|
||||
__wait_for_apt
|
||||
apt-get update || return 1
|
||||
__wait_for_apt apt-get update || return 1
|
||||
fi
|
||||
|
||||
__PACKAGES=''
|
||||
@ -2703,8 +2713,7 @@ install_ubuntu_stable_deps() {
|
||||
# No user interaction, libc6 restart services for example
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
__wait_for_apt
|
||||
apt-get update || return 1
|
||||
__wait_for_apt apt-get update || return 1
|
||||
|
||||
if [ "${_UPGRADE_SYS}" -eq $BS_TRUE ]; then
|
||||
if [ "${_INSECURE_DL}" -eq $BS_TRUE ]; then
|
||||
@ -2724,8 +2733,7 @@ install_ubuntu_stable_deps() {
|
||||
}
|
||||
|
||||
install_ubuntu_git_deps() {
|
||||
__wait_for_apt
|
||||
apt-get update || return 1
|
||||
__wait_for_apt apt-get update || return 1
|
||||
|
||||
if ! __check_command_exists git; then
|
||||
__apt_get_install_noinput git-core || return 1
|
||||
@ -3032,8 +3040,7 @@ __install_saltstack_debian_repository() {
|
||||
|
||||
__apt_key_fetch "$SALTSTACK_DEBIAN_URL/SALTSTACK-GPG-KEY.pub" || return 1
|
||||
|
||||
__wait_for_apt
|
||||
apt-get update || return 1
|
||||
__wait_for_apt apt-get update || return 1
|
||||
}
|
||||
|
||||
install_debian_deps() {
|
||||
@ -3044,8 +3051,7 @@ install_debian_deps() {
|
||||
# No user interaction, libc6 restart services for example
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
__wait_for_apt
|
||||
apt-get update || return 1
|
||||
__wait_for_apt apt-get update || return 1
|
||||
|
||||
if [ "${_UPGRADE_SYS}" -eq $BS_TRUE ]; then
|
||||
# Try to update GPG keys first if allowed
|
||||
@ -3164,8 +3170,7 @@ install_debian_8_git_deps() {
|
||||
/etc/apt/sources.list.d/backports.list
|
||||
fi
|
||||
|
||||
__wait_for_apt
|
||||
apt-get update || return 1
|
||||
__wait_for_apt apt-get update || return 1
|
||||
|
||||
# python-tornado package should be installed from backports repo
|
||||
__PACKAGES="${__PACKAGES} python-backports.ssl-match-hostname python-tornado/jessie-backports"
|
||||
@ -3415,36 +3420,33 @@ install_debian_check_services() {
|
||||
#
|
||||
|
||||
install_fedora_deps() {
|
||||
if [ "$_UPGRADE_SYS" -eq $BS_TRUE ]; then
|
||||
dnf -y update || return 1
|
||||
fi
|
||||
|
||||
__PACKAGES="${__PACKAGES:=}"
|
||||
if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then
|
||||
# Packages are named python3-<whatever>
|
||||
PY_PKG_VER=3
|
||||
__PACKAGES="python3-m2crypto python3-PyYAML"
|
||||
__PACKAGES="${__PACKAGES} python3-m2crypto python3-PyYAML"
|
||||
else
|
||||
PY_PKG_VER=2
|
||||
__PACKAGES="m2crypto"
|
||||
__PACKAGES="${__PACKAGES} m2crypto"
|
||||
if [ "$DISTRO_MAJOR_VERSION" -ge 28 ]; then
|
||||
__PACKAGES="${__PACKAGES} python2-pyyaml"
|
||||
else
|
||||
__PACKAGES="${__PACKAGES} PyYAML"
|
||||
fi
|
||||
fi
|
||||
|
||||
__PACKAGES="${__PACKAGES} procps-ng dnf-utils libyaml python${PY_PKG_VER}-crypto python${PY_PKG_VER}-jinja2"
|
||||
__PACKAGES="${__PACKAGES} dnf-utils libyaml procps-ng python${PY_PKG_VER}-crypto python${PY_PKG_VER}-jinja2"
|
||||
__PACKAGES="${__PACKAGES} python${PY_PKG_VER}-msgpack python${PY_PKG_VER}-requests python${PY_PKG_VER}-zmq"
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
dnf install -y ${__PACKAGES} || return 1
|
||||
|
||||
if [ "$_UPGRADE_SYS" -eq $BS_TRUE ]; then
|
||||
dnf -y update || return 1
|
||||
fi
|
||||
|
||||
if [ "${_EXTRA_PACKAGES}" != "" ]; then
|
||||
echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}"
|
||||
# shellcheck disable=SC2086
|
||||
dnf install -y ${_EXTRA_PACKAGES} || return 1
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
dnf install -y ${__PACKAGES} ${_EXTRA_PACKAGES} || return 1
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
@ -3494,36 +3496,38 @@ install_fedora_git_deps() {
|
||||
PY_PKG_VER=2
|
||||
fi
|
||||
|
||||
__PACKAGES="${__PACKAGES:=}"
|
||||
if [ "$_INSECURE_DL" -eq $BS_FALSE ] && [ "${_SALT_REPO_URL%%://*}" = "https" ]; then
|
||||
dnf install -y ca-certificates || return 1
|
||||
__PACKAGES="${__PACKAGES} ca-certificates"
|
||||
fi
|
||||
if ! __check_command_exists git; then
|
||||
__PACKAGES="${__PACKAGES} git"
|
||||
fi
|
||||
if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
|
||||
__PACKAGES="${__PACKAGES} python${PY_PKG_VER}-libcloud python${PY_PKG_VER}-netaddr"
|
||||
fi
|
||||
__PACKAGES="${__PACKAGES} python${PY_PKG_VER}-systemd"
|
||||
|
||||
# Fedora 28+ ships with tornado 5.0+ which is broken for salt on py3
|
||||
# https://github.com/saltstack/salt-bootstrap/issues/1220
|
||||
if [ "${PY_PKG_VER}" -lt 3 ] || [ "$DISTRO_MAJOR_VERSION" -lt 28 ]; then
|
||||
__PACKAGES="${__PACKAGES} python${PY_PKG_VER}-tornado"
|
||||
fi
|
||||
|
||||
install_fedora_deps || return 1
|
||||
|
||||
if ! __check_command_exists git; then
|
||||
dnf install -y git || return 1
|
||||
fi
|
||||
|
||||
__git_clone_and_checkout || return 1
|
||||
|
||||
__PACKAGES="python${PY_PKG_VER}-systemd"
|
||||
# Fedora 28+ needs tornado <5.0 from pip
|
||||
# https://github.com/saltstack/salt-bootstrap/issues/1220
|
||||
if [ "${PY_PKG_VER}" -eq 3 ] && [ "$DISTRO_MAJOR_VERSION" -ge 28 ]; then
|
||||
__check_pip_allowed "You need to allow pip based installations (-P) for Tornado <5.0 in order to install Salt on Python 3"
|
||||
grep tornado "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt" | while IFS='
|
||||
' read -r dep; do
|
||||
"${_PY_EXE}" -m pip install "${dep}" || return 1
|
||||
done
|
||||
else
|
||||
__PACKAGES="${__PACKAGES} python${PY_PKG_VER}-tornado"
|
||||
fi
|
||||
|
||||
if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
|
||||
__PACKAGES="${__PACKAGES} python${PY_PKG_VER}-libcloud python${PY_PKG_VER}-netaddr"
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
dnf install -y ${__PACKAGES} || return 1
|
||||
|
||||
# Let's trigger config_salt()
|
||||
if [ "$_TEMP_CONFIG_DIR" = "null" ]; then
|
||||
_TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf/"
|
||||
@ -4681,6 +4685,138 @@ install_amazon_linux_ami_git_deps() {
|
||||
return 0
|
||||
}
|
||||
|
||||
install_amazon_linux_ami_2_git_deps() {
|
||||
if [ "$_INSECURE_DL" -eq $BS_FALSE ] && [ "${_SALT_REPO_URL%%://*}" = "https" ]; then
|
||||
yum -y install ca-certificates || return 1
|
||||
fi
|
||||
|
||||
PIP_EXE='pip'
|
||||
if __check_command_exists python2.7; then
|
||||
if ! __check_command_exists pip2.7; then
|
||||
__yum_install_noinput python2-pip
|
||||
fi
|
||||
PIP_EXE='/bin/pip'
|
||||
_PY_EXE='python2.7'
|
||||
fi
|
||||
|
||||
install_amazon_linux_ami_2_deps || return 1
|
||||
|
||||
if ! __check_command_exists git; then
|
||||
__yum_install_noinput git || return 1
|
||||
fi
|
||||
|
||||
__git_clone_and_checkout || return 1
|
||||
|
||||
__PACKAGES=""
|
||||
__PIP_PACKAGES=""
|
||||
|
||||
if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then
|
||||
__check_pip_allowed "You need to allow pip based installations (-P) in order to install apache-libcloud"
|
||||
__PACKAGES="${__PACKAGES} python27-pip"
|
||||
__PIP_PACKAGES="${__PIP_PACKAGES} apache-libcloud>=$_LIBCLOUD_MIN_VERSION"
|
||||
fi
|
||||
|
||||
if [ -f "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt" ]; then
|
||||
# We're on the develop branch, install whichever tornado is on the requirements file
|
||||
__REQUIRED_TORNADO="$(grep tornado "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt")"
|
||||
if [ "${__REQUIRED_TORNADO}" != "" ]; then
|
||||
__PACKAGES="${__PACKAGES} ${pkg_append}-tornado"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "${__PACKAGES}" != "" ]; then
|
||||
# shellcheck disable=SC2086
|
||||
__yum_install_noinput ${__PACKAGES} || return 1
|
||||
fi
|
||||
|
||||
if [ "${__PIP_PACKAGES}" != "" ]; then
|
||||
# shellcheck disable=SC2086
|
||||
${PIP_EXE} install ${__PIP_PACKAGES} || return 1
|
||||
fi
|
||||
|
||||
# Let's trigger config_salt()
|
||||
if [ "$_TEMP_CONFIG_DIR" = "null" ]; then
|
||||
_TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf/"
|
||||
CONFIG_SALT_FUNC="config_salt"
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
install_amazon_linux_ami_2_deps() {
|
||||
# Shim to figure out if we're using old (rhel) or new (aws) rpms.
|
||||
_USEAWS=$BS_FALSE
|
||||
pkg_append="python"
|
||||
|
||||
if [ "$ITYPE" = "stable" ]; then
|
||||
repo_rev="$STABLE_REV"
|
||||
else
|
||||
repo_rev="latest"
|
||||
fi
|
||||
|
||||
if echo $repo_rev | grep -E -q '^archive'; then
|
||||
year=$(echo "$repo_rev" | cut -d '/' -f 2 | cut -c1-4)
|
||||
else
|
||||
year=$(echo "$repo_rev" | cut -c1-4)
|
||||
fi
|
||||
|
||||
if echo "$repo_rev" | grep -E -q '^(latest|2016\.11)$' || \
|
||||
[ "$year" -gt 2016 ]; then
|
||||
_USEAWS=$BS_TRUE
|
||||
pkg_append="python"
|
||||
fi
|
||||
|
||||
# We need to install yum-utils before doing anything else when installing on
|
||||
# Amazon Linux ECS-optimized images. See issue #974.
|
||||
__yum_install_noinput yum-utils
|
||||
|
||||
# Do upgrade early
|
||||
if [ "$_UPGRADE_SYS" -eq $BS_TRUE ]; then
|
||||
yum -y update || return 1
|
||||
fi
|
||||
|
||||
if [ $_DISABLE_REPOS -eq $BS_FALSE ] || [ "$_CUSTOM_REPO_URL" != "null" ]; then
|
||||
__REPO_FILENAME="saltstack-repo.repo"
|
||||
|
||||
base_url="$HTTP_VAL://${_REPO_URL}/yum/redhat/7/\$basearch/$repo_rev/"
|
||||
base_url="$HTTP_VAL://${_REPO_URL}/yum/amazon/2/\$basearch/latest/"
|
||||
gpg_key="${base_url}SALTSTACK-GPG-KEY.pub
|
||||
${base_url}base/RPM-GPG-KEY-CentOS-7"
|
||||
repo_name="SaltStack repo for Amazon Linux 2.0"
|
||||
|
||||
# This should prob be refactored to use __install_saltstack_rhel_repository()
|
||||
# With args passed in to do the right thing. Reformatted to be more like the
|
||||
# amazon linux yum file.
|
||||
if [ ! -s "/etc/yum.repos.d/${__REPO_FILENAME}" ]; then
|
||||
cat <<_eof > "/etc/yum.repos.d/${__REPO_FILENAME}"
|
||||
[saltstack-repo]
|
||||
name=$repo_name
|
||||
failovermethod=priority
|
||||
priority=10
|
||||
gpgcheck=1
|
||||
gpgkey=$gpg_key
|
||||
baseurl=$base_url
|
||||
_eof
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
# Package python-ordereddict-1.1-2.el6.noarch is obsoleted by python26-2.6.9-2.88.amzn1.x86_64
|
||||
# which is already installed
|
||||
__PACKAGES="m2crypto ${pkg_append}-crypto ${pkg_append}-jinja2 PyYAML"
|
||||
__PACKAGES="${__PACKAGES} ${pkg_append}-msgpack ${pkg_append}-requests ${pkg_append}-zmq"
|
||||
__PACKAGES="${__PACKAGES} ${pkg_append}-futures"
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
__yum_install_noinput ${__PACKAGES} || return 1
|
||||
|
||||
if [ "${_EXTRA_PACKAGES}" != "" ]; then
|
||||
echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}"
|
||||
# shellcheck disable=SC2086
|
||||
__yum_install_noinput ${_EXTRA_PACKAGES} || return 1
|
||||
fi
|
||||
}
|
||||
|
||||
install_amazon_linux_ami_stable() {
|
||||
install_centos_stable || return 1
|
||||
return 0
|
||||
@ -4715,6 +4851,41 @@ install_amazon_linux_ami_testing_post() {
|
||||
install_centos_testing_post || return 1
|
||||
return 0
|
||||
}
|
||||
|
||||
install_amazon_linux_ami_2_stable() {
|
||||
install_centos_stable || return 1
|
||||
return 0
|
||||
}
|
||||
|
||||
install_amazon_linux_ami_2_stable_post() {
|
||||
install_centos_stable_post || return 1
|
||||
return 0
|
||||
}
|
||||
|
||||
install_amazon_linux_ami_2_restart_daemons() {
|
||||
install_centos_restart_daemons || return 1
|
||||
return 0
|
||||
}
|
||||
|
||||
install_amazon_linux_ami_2_git() {
|
||||
install_centos_git || return 1
|
||||
return 0
|
||||
}
|
||||
|
||||
install_amazon_linux_ami_2_git_post() {
|
||||
install_centos_git_post || return 1
|
||||
return 0
|
||||
}
|
||||
|
||||
install_amazon_linux_ami_2_testing() {
|
||||
install_centos_testing || return 1
|
||||
return 0
|
||||
}
|
||||
|
||||
install_amazon_linux_ami_2_testing_post() {
|
||||
install_centos_testing_post || return 1
|
||||
return 0
|
||||
}
|
||||
#
|
||||
# Ended Amazon Linux AMI Install Functions
|
||||
#
|
||||
@ -5336,7 +5507,8 @@ install_openbsd_restart_daemons() {
|
||||
# SmartOS Install Functions
|
||||
#
|
||||
install_smartos_deps() {
|
||||
pkgin -y install zeromq py27-crypto py27-m2crypto py27-msgpack py27-yaml py27-jinja2 py27-zmq py27-requests || return 1
|
||||
smartos_deps="$(pkgin show-deps salt | grep '^\s' | grep -v '\snot' | xargs) py27-m2crypto"
|
||||
pkgin -y install "${smartos_deps}" || return 1
|
||||
|
||||
# Set _SALT_ETC_DIR to SmartOS default if they didn't specify
|
||||
_SALT_ETC_DIR=${BS_SALT_ETC_DIR:-/opt/local/etc/salt}
|
||||
|
@ -9,6 +9,7 @@ authenticating peers
|
||||
# the Array class, which has incompatibilities with it.
|
||||
from __future__ import absolute_import, print_function
|
||||
import os
|
||||
import random
|
||||
import sys
|
||||
import copy
|
||||
import time
|
||||
@ -740,6 +741,10 @@ class AsyncAuth(object):
|
||||
'minion.\nOr restart the Salt Master in open mode to '
|
||||
'clean out the keys. The Salt Minion will now exit.'
|
||||
)
|
||||
# Add a random sleep here for systems that are using a
|
||||
# a service manager to immediately restart the service
|
||||
# to avoid overloading the system
|
||||
time.sleep(random.randint(10, 20))
|
||||
sys.exit(salt.defaults.exitcodes.EX_NOPERM)
|
||||
# has the master returned that its maxed out with minions?
|
||||
elif payload['load']['ret'] == 'full':
|
||||
|
@ -303,8 +303,8 @@ def _file_lists(load, form):
|
||||
except os.error:
|
||||
log.critical('Unable to make cachedir %s', list_cachedir)
|
||||
return []
|
||||
list_cache = os.path.join(list_cachedir, '{0}.p'.format(load['saltenv']))
|
||||
w_lock = os.path.join(list_cachedir, '.{0}.w'.format(load['saltenv']))
|
||||
list_cache = os.path.join(list_cachedir, '{0}.p'.format(salt.utils.files.safe_filename_leaf(load['saltenv'])))
|
||||
w_lock = os.path.join(list_cachedir, '.{0}.w'.format(salt.utils.files.safe_filename_leaf(load['saltenv'])))
|
||||
cache_match, refresh_cache, save_cache = \
|
||||
salt.fileserver.check_file_list_cache(
|
||||
__opts__, form, list_cache, w_lock
|
||||
|
@ -1004,10 +1004,11 @@ def _virtual(osdata):
|
||||
if 'QEMU Virtual CPU' in model:
|
||||
grains['virtual'] = 'kvm'
|
||||
elif osdata['kernel'] == 'OpenBSD':
|
||||
if osdata['manufacturer'] in ['QEMU', 'Red Hat']:
|
||||
grains['virtual'] = 'kvm'
|
||||
if osdata['manufacturer'] == 'OpenBSD':
|
||||
grains['virtual'] = 'vmm'
|
||||
if 'manufacturer' in osdata:
|
||||
if osdata['manufacturer'] in ['QEMU', 'Red Hat', 'Joyent']:
|
||||
grains['virtual'] = 'kvm'
|
||||
if osdata['manufacturer'] == 'OpenBSD':
|
||||
grains['virtual'] = 'vmm'
|
||||
elif osdata['kernel'] == 'SunOS':
|
||||
if grains['virtual'] == 'LDOM':
|
||||
roles = []
|
||||
|
@ -1725,7 +1725,7 @@ class LazyLoader(salt.utils.lazy.LazyDict):
|
||||
if not isinstance(key, six.string_types):
|
||||
raise KeyError('The key must be a string.')
|
||||
if '.' not in key:
|
||||
raise KeyError('The key \'%s\' should contain a \'.\'', key)
|
||||
raise KeyError('The key \'{0}\' should contain a \'.\''.format(key))
|
||||
mod_name, _ = key.split('.', 1)
|
||||
with self._lock:
|
||||
# It is possible that the key is in the dictionary after
|
||||
|
@ -136,9 +136,7 @@ def setup_handlers():
|
||||
transport_registry = TransportRegistry(default_transports)
|
||||
url = urlparse(dsn)
|
||||
if not transport_registry.supported_scheme(url.scheme):
|
||||
raise ValueError(
|
||||
'Unsupported Sentry DSN scheme: %s', url.scheme
|
||||
)
|
||||
raise ValueError('Unsupported Sentry DSN scheme: {0}'.format(url.scheme))
|
||||
except ValueError as exc:
|
||||
log.info(
|
||||
'Raven failed to parse the configuration provided DSN: %s', exc
|
||||
|
@ -25,6 +25,8 @@ from binascii import crc32
|
||||
# Import Salt Libs
|
||||
# pylint: disable=import-error,no-name-in-module,redefined-builtin
|
||||
from salt.ext import six
|
||||
from salt._compat import ipaddress
|
||||
from salt.utils.network import parse_host_port
|
||||
from salt.ext.six.moves import range
|
||||
from salt.utils.zeromq import zmq, ZMQDefaultLoop, install_zmq, ZMQ_VERSION_INFO
|
||||
import salt.transport.client
|
||||
@ -238,27 +240,29 @@ def resolve_dns(opts, fallback=True):
|
||||
|
||||
|
||||
def prep_ip_port(opts):
|
||||
'''
|
||||
parse host:port values from opts['master'] and return valid:
|
||||
master: ip address or hostname as a string
|
||||
master_port: (optional) master returner port as integer
|
||||
|
||||
e.g.:
|
||||
- master: 'localhost:1234' -> {'master': 'localhost', 'master_port': 1234}
|
||||
- master: '127.0.0.1:1234' -> {'master': '127.0.0.1', 'master_port' :1234}
|
||||
- master: '[::1]:1234' -> {'master': '::1', 'master_port': 1234}
|
||||
- master: 'fe80::a00:27ff:fedc:ba98' -> {'master': 'fe80::a00:27ff:fedc:ba98'}
|
||||
'''
|
||||
ret = {}
|
||||
# Use given master IP if "ip_only" is set or if master_ip is an ipv6 address without
|
||||
# a port specified. The is_ipv6 check returns False if brackets are used in the IP
|
||||
# definition such as master: '[::1]:1234'.
|
||||
if opts['master_uri_format'] == 'ip_only' or salt.utils.network.is_ipv6(opts['master']):
|
||||
ret['master'] = opts['master']
|
||||
if opts['master_uri_format'] == 'ip_only':
|
||||
ret['master'] = ipaddress.ip_address(opts['master'])
|
||||
else:
|
||||
ip_port = opts['master'].rsplit(':', 1)
|
||||
if len(ip_port) == 1:
|
||||
# e.g. master: mysaltmaster
|
||||
ret['master'] = ip_port[0]
|
||||
else:
|
||||
# e.g. master: localhost:1234
|
||||
# e.g. master: 127.0.0.1:1234
|
||||
# e.g. master: [::1]:1234
|
||||
# Strip off brackets for ipv6 support
|
||||
ret['master'] = ip_port[0].strip('[]')
|
||||
host, port = parse_host_port(opts['master'])
|
||||
ret = {'master': host}
|
||||
if port:
|
||||
ret.update({'master_port': port})
|
||||
|
||||
# Cast port back to an int! Otherwise a TypeError is thrown
|
||||
# on some of the socket calls elsewhere in the minion and utils code.
|
||||
ret['master_port'] = int(ip_port[1])
|
||||
return ret
|
||||
|
||||
|
||||
|
@ -1899,9 +1899,11 @@ def get_network_settings():
|
||||
|
||||
hostname = _parse_hostname()
|
||||
domainname = _parse_domainname()
|
||||
searchdomain = _parse_searchdomain()
|
||||
|
||||
settings['hostname'] = hostname
|
||||
settings['domainname'] = domainname
|
||||
settings['searchdomain'] = searchdomain
|
||||
|
||||
else:
|
||||
settings = _parse_current_network_settings()
|
||||
|
@ -81,7 +81,11 @@ __grants__ = [
|
||||
'ALL PRIVILEGES',
|
||||
'ALTER',
|
||||
'ALTER ROUTINE',
|
||||
'BACKUP_ADMIN',
|
||||
'BINLOG_ADMIN',
|
||||
'CONNECTION_ADMIN',
|
||||
'CREATE',
|
||||
'CREATE ROLE',
|
||||
'CREATE ROUTINE',
|
||||
'CREATE TABLESPACE',
|
||||
'CREATE TEMPORARY TABLES',
|
||||
@ -89,26 +93,37 @@ __grants__ = [
|
||||
'CREATE VIEW',
|
||||
'DELETE',
|
||||
'DROP',
|
||||
'DROP ROLE',
|
||||
'ENCRYPTION_KEY_ADMIN',
|
||||
'EVENT',
|
||||
'EXECUTE',
|
||||
'FILE',
|
||||
'GRANT OPTION',
|
||||
'GROUP_REPLICATION_ADMIN',
|
||||
'INDEX',
|
||||
'INSERT',
|
||||
'LOCK TABLES',
|
||||
'PERSIST_RO_VARIABLES_ADMIN',
|
||||
'PROCESS',
|
||||
'REFERENCES',
|
||||
'RELOAD',
|
||||
'REPLICATION CLIENT',
|
||||
'REPLICATION SLAVE',
|
||||
'REPLICATION_SLAVE_ADMIN',
|
||||
'RESOURCE_GROUP_ADMIN',
|
||||
'RESOURCE_GROUP_USER',
|
||||
'ROLE_ADMIN',
|
||||
'SELECT',
|
||||
'SET_USER_ID',
|
||||
'SHOW DATABASES',
|
||||
'SHOW VIEW',
|
||||
'SHUTDOWN',
|
||||
'SUPER',
|
||||
'SYSTEM_VARIABLES_ADMIN',
|
||||
'TRIGGER',
|
||||
'UPDATE',
|
||||
'USAGE'
|
||||
'USAGE',
|
||||
'XA_RECOVER_ADMIN'
|
||||
]
|
||||
|
||||
__ssl_options_parameterized__ = [
|
||||
@ -121,6 +136,52 @@ __ssl_options__ = __ssl_options_parameterized__ + [
|
||||
'X509'
|
||||
]
|
||||
|
||||
__all_privileges__ = [
|
||||
'ALTER',
|
||||
'ALTER ROUTINE',
|
||||
'BACKUP_ADMIN',
|
||||
'BINLOG_ADMIN',
|
||||
'CONNECTION_ADMIN',
|
||||
'CREATE',
|
||||
'CREATE ROLE',
|
||||
'CREATE ROUTINE',
|
||||
'CREATE TABLESPACE',
|
||||
'CREATE TEMPORARY TABLES',
|
||||
'CREATE USER',
|
||||
'CREATE VIEW',
|
||||
'DELETE',
|
||||
'DROP',
|
||||
'DROP ROLE',
|
||||
'ENCRYPTION_KEY_ADMIN',
|
||||
'EVENT',
|
||||
'EXECUTE',
|
||||
'FILE',
|
||||
'GROUP_REPLICATION_ADMIN',
|
||||
'INDEX',
|
||||
'INSERT',
|
||||
'LOCK TABLES',
|
||||
'PERSIST_RO_VARIABLES_ADMIN',
|
||||
'PROCESS',
|
||||
'REFERENCES',
|
||||
'RELOAD',
|
||||
'REPLICATION CLIENT',
|
||||
'REPLICATION SLAVE',
|
||||
'REPLICATION_SLAVE_ADMIN',
|
||||
'RESOURCE_GROUP_ADMIN',
|
||||
'RESOURCE_GROUP_USER',
|
||||
'ROLE_ADMIN',
|
||||
'SELECT',
|
||||
'SET_USER_ID',
|
||||
'SHOW DATABASES',
|
||||
'SHOW VIEW',
|
||||
'SHUTDOWN',
|
||||
'SUPER',
|
||||
'SYSTEM_VARIABLES_ADMIN',
|
||||
'TRIGGER',
|
||||
'UPDATE',
|
||||
'XA_RECOVER_ADMIN'
|
||||
]
|
||||
|
||||
r'''
|
||||
DEVELOPER NOTE: ABOUT arguments management, escapes, formats, arguments and
|
||||
security of SQL.
|
||||
@ -1787,12 +1848,12 @@ def user_grants(user,
|
||||
|
||||
|
||||
def grant_exists(grant,
|
||||
database,
|
||||
user,
|
||||
host='localhost',
|
||||
grant_option=False,
|
||||
escape=True,
|
||||
**connection_args):
|
||||
database,
|
||||
user,
|
||||
host='localhost',
|
||||
grant_option=False,
|
||||
escape=True,
|
||||
**connection_args):
|
||||
'''
|
||||
Checks to see if a grant exists in the database
|
||||
|
||||
@ -1803,6 +1864,14 @@ def grant_exists(grant,
|
||||
salt '*' mysql.grant_exists \
|
||||
'SELECT,INSERT,UPDATE,...' 'database.*' 'frank' 'localhost'
|
||||
'''
|
||||
|
||||
server_version = version(**connection_args)
|
||||
if 'ALL' in grant:
|
||||
if salt.utils.versions.version_cmp(server_version, '8.0') >= 0:
|
||||
grant = ','.join([i for i in __all_privileges__])
|
||||
else:
|
||||
grant = 'ALL PRIVILEGES'
|
||||
|
||||
try:
|
||||
target = __grant_generate(
|
||||
grant, database, user, host, grant_option, escape
|
||||
@ -1818,15 +1887,27 @@ def grant_exists(grant,
|
||||
'this could also indicate a connection error. Check your configuration.')
|
||||
return False
|
||||
|
||||
target_tokens = None
|
||||
# Combine grants that match the same database
|
||||
_grants = {}
|
||||
for grant in grants:
|
||||
try:
|
||||
if not target_tokens: # Avoid the overhead of re-calc in loop
|
||||
target_tokens = _grant_to_tokens(target)
|
||||
grant_tokens = _grant_to_tokens(grant)
|
||||
grant_token = _grant_to_tokens(grant)
|
||||
if grant_token['database'] not in _grants:
|
||||
_grants[grant_token['database']] = {'user': grant_token['user'],
|
||||
'database': grant_token['database'],
|
||||
'host': grant_token['host'],
|
||||
'grant': grant_token['grant']}
|
||||
else:
|
||||
_grants[grant_token['database']]['grant'].extend(grant_token['grant'])
|
||||
|
||||
target_tokens = _grant_to_tokens(target)
|
||||
for database, grant_tokens in _grants.items():
|
||||
try:
|
||||
_grant_tokens = {}
|
||||
_target_tokens = {}
|
||||
|
||||
_grant_matches = [True if i in grant_tokens['grant']
|
||||
else False for i in target_tokens['grant']]
|
||||
|
||||
for item in ['user', 'database', 'host']:
|
||||
_grant_tokens[item] = grant_tokens[item].replace('"', '').replace('\\', '').replace('`', '')
|
||||
_target_tokens[item] = target_tokens[item].replace('"', '').replace('\\', '').replace('`', '')
|
||||
@ -1834,7 +1915,7 @@ def grant_exists(grant,
|
||||
if _grant_tokens['user'] == _target_tokens['user'] and \
|
||||
_grant_tokens['database'] == _target_tokens['database'] and \
|
||||
_grant_tokens['host'] == _target_tokens['host'] and \
|
||||
set(grant_tokens['grant']) >= set(target_tokens['grant']):
|
||||
all(_grant_matches):
|
||||
return True
|
||||
else:
|
||||
log.debug('grants mismatch \'%s\'<>\'%s\'', grant_tokens, target_tokens)
|
||||
|
@ -29,7 +29,7 @@ import requests
|
||||
import salt.exceptions
|
||||
import salt.utils.json
|
||||
|
||||
API_ENDPOINT = "https://api.opsgenie.com/v1/json/saltstack?apiKey="
|
||||
API_ENDPOINT = "https://api.opsgenie.com/v2/alerts"
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@ -68,14 +68,14 @@ def post_data(api_key=None, name='OpsGenie Execution Module', reason=None,
|
||||
functionality you must provide name field for both states like in
|
||||
this case.
|
||||
'''
|
||||
if api_key is None or reason is None or action_type is None:
|
||||
if api_key is None or reason is None:
|
||||
raise salt.exceptions.SaltInvocationError(
|
||||
'API Key or Reason or Action Type cannot be None.')
|
||||
'API Key or Reason cannot be None.')
|
||||
|
||||
data = dict()
|
||||
data['name'] = name
|
||||
data['reason'] = reason
|
||||
data['actionType'] = action_type
|
||||
data['alias'] = name
|
||||
data['message'] = reason
|
||||
# data['actions'] = action_type
|
||||
data['cpuModel'] = __grains__['cpu_model']
|
||||
data['cpuArch'] = __grains__['cpuarch']
|
||||
data['fqdn'] = __grains__['fqdn']
|
||||
@ -93,8 +93,17 @@ def post_data(api_key=None, name='OpsGenie Execution Module', reason=None,
|
||||
log.debug('Below data will be posted:\n%s', data)
|
||||
log.debug('API Key: %s \t API Endpoint: %s', api_key, API_ENDPOINT)
|
||||
|
||||
response = requests.post(
|
||||
url=API_ENDPOINT + api_key,
|
||||
data=salt.utils.json.dumps(data),
|
||||
headers={'Content-Type': 'application/json'})
|
||||
if action_type == "Create":
|
||||
response = requests.post(
|
||||
url=API_ENDPOINT,
|
||||
data=salt.utils.json.dumps(data),
|
||||
headers={'Content-Type': 'application/json',
|
||||
'Authorization': 'GenieKey ' + api_key})
|
||||
else:
|
||||
response = requests.post(
|
||||
url=API_ENDPOINT + "/" + name + "/close?identifierType=alias",
|
||||
data=salt.utils.json.dumps(data),
|
||||
headers={'Content-Type': 'application/json',
|
||||
'Authorization': 'GenieKey ' + api_key})
|
||||
|
||||
return response.status_code, response.text
|
||||
|
@ -412,7 +412,7 @@ def create_snapshot(config='root', snapshot_type='single', pre_number=None,
|
||||
cleanup_algorithm, userdata)
|
||||
else:
|
||||
raise CommandExecutionError(
|
||||
"Invalid snapshot type '{0}'", format(snapshot_type))
|
||||
"Invalid snapshot type '{0}'".format(snapshot_type))
|
||||
except dbus.DBusException as exc:
|
||||
raise CommandExecutionError(
|
||||
'Error encountered while listing changed files: {0}'
|
||||
|
@ -357,6 +357,9 @@ def gets_service_instance_via_proxy(fn):
|
||||
local_service_instance = \
|
||||
salt.utils.vmware.get_service_instance(
|
||||
*connection_details)
|
||||
# Tuples are immutable, so if we want to change what
|
||||
# was passed in, we need to first convert to a list.
|
||||
args = list(args)
|
||||
args[idx] = local_service_instance
|
||||
else:
|
||||
# case 2: Not enough positional parameters so
|
||||
|
179
salt/modules/win_auditpol.py
Normal file
179
salt/modules/win_auditpol.py
Normal file
@ -0,0 +1,179 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
A salt module for modifying the audit policies on the machine
|
||||
|
||||
Though this module does not set group policy for auditing, it displays how all
|
||||
auditing configuration is applied on the machine, either set directly or via
|
||||
local or domain group policy.
|
||||
|
||||
.. versionadded:: 2018.3.4
|
||||
.. versionadded:: 2019.2.1
|
||||
|
||||
This module allows you to view and modify the audit settings as they are applied
|
||||
on the machine. The audit settings are broken down into nine categories:
|
||||
|
||||
- Account Logon
|
||||
- Account Management
|
||||
- Detailed Tracking
|
||||
- DS Access
|
||||
- Logon/Logoff
|
||||
- Object Access
|
||||
- Policy Change
|
||||
- Privilege Use
|
||||
- System
|
||||
|
||||
The ``get_settings`` function will return the subcategories for all nine of
|
||||
the above categories in one dictionary along with their auditing status.
|
||||
|
||||
To modify a setting you only need to specify the subcategory name and the value
|
||||
you wish to set. Valid settings are:
|
||||
|
||||
- No Auditing
|
||||
- Success
|
||||
- Failure
|
||||
- Success and Failure
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
# Get current state of all audit settings
|
||||
salt * auditpol.get_settings
|
||||
|
||||
# Get the current state of all audit settings in the "Account Logon"
|
||||
# category
|
||||
salt * auditpol.get_settings category="Account Logon"
|
||||
|
||||
# Get current state of the "Credential Validation" setting
|
||||
salt * auditpol.get_setting name="Credential Validation"
|
||||
|
||||
# Set the state of the "Credential Validation" setting to Success and
|
||||
# Failure
|
||||
salt * auditpol.set_setting name="Credential Validation" value="Success and Failure"
|
||||
|
||||
# Set the state of the "Credential Validation" setting to No Auditing
|
||||
salt * auditpol.set_setting name="Credential Validation" value="No Auditing"
|
||||
'''
|
||||
from __future__ import absolute_import, unicode_literals, print_function
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils.platform
|
||||
|
||||
# Define the module's virtual name
|
||||
__virtualname__ = 'auditpol'
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
Only works on Windows systems
|
||||
'''
|
||||
if not salt.utils.platform.is_windows():
|
||||
return False, "Module win_auditpol: module only available on Windows"
|
||||
|
||||
return __virtualname__
|
||||
|
||||
|
||||
def get_settings(category='All'):
|
||||
'''
|
||||
Get the current configuration for all audit settings specified in the
|
||||
category
|
||||
|
||||
Args:
|
||||
category (str):
|
||||
One of the nine categories to return. Can also be ``All`` to return
|
||||
the settings for all categories. Valid options are:
|
||||
|
||||
- Account Logon
|
||||
- Account Management
|
||||
- Detailed Tracking
|
||||
- DS Access
|
||||
- Logon/Logoff
|
||||
- Object Access
|
||||
- Policy Change
|
||||
- Privilege Use
|
||||
- System
|
||||
- All
|
||||
|
||||
Default value is ``All``
|
||||
|
||||
Returns:
|
||||
dict: A dictionary containing all subcategories for the specified
|
||||
category along with their current configuration
|
||||
|
||||
Raises:
|
||||
KeyError: On invalid category
|
||||
CommandExecutionError: If an error is encountered retrieving the settings
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
# Get current state of all audit settings
|
||||
salt * auditipol.get_settings
|
||||
|
||||
# Get the current state of all audit settings in the "Account Logon"
|
||||
# category
|
||||
salt * auditpol.get_settings "Account Logon"
|
||||
'''
|
||||
return __utils__['auditpol.get_settings'](category=category)
|
||||
|
||||
|
||||
def get_setting(name):
|
||||
'''
|
||||
Get the current configuration for the named audit setting
|
||||
|
||||
Args:
|
||||
name (str): The name of the setting to retrieve
|
||||
|
||||
Returns:
|
||||
str: The current configuration for the named setting
|
||||
|
||||
Raises:
|
||||
KeyError: On invalid setting name
|
||||
CommandExecutionError: If an error is encountered retrieving the settings
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
# Get current state of the "Credential Validation" setting
|
||||
salt * auditpol.get_setting "Credential Validation"
|
||||
'''
|
||||
return __utils__['auditpol.get_setting'](name=name)
|
||||
|
||||
|
||||
def set_setting(name, value):
|
||||
'''
|
||||
Set the configuration for the named audit setting
|
||||
|
||||
Args:
|
||||
|
||||
name (str):
|
||||
The name of the setting to configure
|
||||
|
||||
value (str):
|
||||
The configuration for the named value. Valid options are:
|
||||
|
||||
- No Auditing
|
||||
- Success
|
||||
- Failure
|
||||
- Success and Failure
|
||||
|
||||
Returns:
|
||||
bool: True if successful
|
||||
|
||||
Raises:
|
||||
KeyError: On invalid ``name`` or ``value``
|
||||
CommandExecutionError: If an error is encountered modifying the setting
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
# Set the state of the "Credential Validation" setting to Success and
|
||||
# Failure
|
||||
salt * auditpol.set_setting "Credential Validation" "Success and Failure"
|
||||
|
||||
# Set the state of the "Credential Validation" setting to No Auditing
|
||||
salt * auditpol.set_setting "Credential Validation" "No Auditing"
|
||||
'''
|
||||
return __utils__['auditpol.set_setting'](name=name, value=value)
|
@ -39,12 +39,14 @@ Current known limitations
|
||||
'''
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, unicode_literals, print_function
|
||||
import csv
|
||||
import io
|
||||
import os
|
||||
import logging
|
||||
import re
|
||||
import locale
|
||||
import ctypes
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
# Import Salt libs
|
||||
@ -280,6 +282,19 @@ class _policy_info(object):
|
||||
netsh advfirewall>set help
|
||||
netsh advfirewall>set domain help
|
||||
|
||||
AdvAudit Mechanism
|
||||
------------------
|
||||
|
||||
The Advanced Audit Policies are configured using a combination of the
|
||||
auditpol command-line utility and modifying the audit.csv file in two
|
||||
locations. The value of this key is a dict with the following make-up:
|
||||
|
||||
====== ===================================
|
||||
Key Value
|
||||
====== ===================================
|
||||
Option The Advanced Audit Policy to modify
|
||||
====== ===================================
|
||||
|
||||
Transforms
|
||||
----------
|
||||
|
||||
@ -310,6 +325,13 @@ class _policy_info(object):
|
||||
'Not Defined': 'Not Defined',
|
||||
None: 'Not Defined',
|
||||
}
|
||||
self.advanced_audit_lookup = {
|
||||
0: 'No Auditing',
|
||||
1: 'Success',
|
||||
2: 'Failure',
|
||||
3: 'Success and Failure',
|
||||
None: 'Not Configured',
|
||||
}
|
||||
self.sc_removal_lookup = {
|
||||
0: 'No Action',
|
||||
1: 'Lock Workstation',
|
||||
@ -372,6 +394,18 @@ class _policy_info(object):
|
||||
'value_lookup': True,
|
||||
},
|
||||
}
|
||||
self.advanced_audit_transform = {
|
||||
'Get': '_dict_lookup',
|
||||
'Put': '_dict_lookup',
|
||||
'GetArgs': {
|
||||
'lookup': self.advanced_audit_lookup,
|
||||
'value_lookup': False,
|
||||
},
|
||||
'PutArgs': {
|
||||
'lookup': self.advanced_audit_lookup,
|
||||
'value_lookup': True,
|
||||
},
|
||||
}
|
||||
self.enabled_one_disabled_zero_strings = {
|
||||
'0': 'Disabled',
|
||||
'1': 'Enabled',
|
||||
@ -418,6 +452,13 @@ class _policy_info(object):
|
||||
'Local Policies',
|
||||
'Audit Policy'
|
||||
]
|
||||
self.advanced_audit_policy_gpedit_path = [
|
||||
'Computer Configuration',
|
||||
'Windows Settings',
|
||||
'Security Settings',
|
||||
'Advanced Audit Policy Configuration',
|
||||
'System Audit Policies - Local Group Policy Object'
|
||||
]
|
||||
self.account_lockout_policy_gpedit_path = [
|
||||
'Computer Configuration',
|
||||
'Windows Settings',
|
||||
@ -2603,6 +2644,11 @@ class _policy_info(object):
|
||||
'Put': '_minutes_to_seconds'
|
||||
},
|
||||
},
|
||||
########## LEGACY AUDIT POLICIES ##########
|
||||
# To use these set the following policy to DISABLED
|
||||
# "Audit: Force audit policy subcategory settings (Windows Vista or later) to override audit policy category settings"
|
||||
# or it's alias...
|
||||
# SceNoApplyLegacyAuditPolicy
|
||||
'AuditAccountLogon': {
|
||||
'Policy': 'Audit account logon events',
|
||||
'lgpo_section': self.audit_policy_gpedit_path,
|
||||
@ -2693,6 +2739,557 @@ class _policy_info(object):
|
||||
},
|
||||
'Transform': self.audit_transform,
|
||||
},
|
||||
########## END OF LEGACY AUDIT POLICIES ##########
|
||||
########## ADVANCED AUDIT POLICIES ##########
|
||||
# Advanced Audit Policies
|
||||
# To use these set the following policy to ENABLED
|
||||
# "Audit: Force audit policy subcategory settings (Windows
|
||||
# Vista or later) to override audit policy category
|
||||
# settings"
|
||||
# or it's alias...
|
||||
# SceNoApplyLegacyAuditPolicy
|
||||
|
||||
# Account Logon Section
|
||||
'AuditCredentialValidation': {
|
||||
'Policy': 'Audit Credential Validation',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Credential Validation',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditKerberosAuthenticationService': {
|
||||
'Policy': 'Audit Kerberos Authentication Service',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Kerberos Authentication Service',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditKerberosServiceTicketOperations': {
|
||||
'Policy': 'Audit Kerberos Service Ticket Operations',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Kerberos Service Ticket Operations',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditOtherAccountLogonEvents': {
|
||||
'Policy': 'Audit Other Account Logon Events',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Other Account Logon Events',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
# Account Management Section
|
||||
'AuditApplicationGroupManagement': {
|
||||
'Policy': 'Audit Application Group Management',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Application Group Management',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditComputerAccountManagement': {
|
||||
'Policy': 'Audit Computer Account Management',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Computer Account Management',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditDistributionGroupManagement': {
|
||||
'Policy': 'Audit Distribution Group Management',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Distribution Group Management',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditOtherAccountManagementEvents': {
|
||||
'Policy': 'Audit Other Account Management Events',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Other Account Management Events',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditSecurityGroupManagement': {
|
||||
'Policy': 'Audit Security Group Management',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Security Group Management',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditUserAccountManagement': {
|
||||
'Policy': 'Audit User Account Management',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit User Account Management',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
# Detailed Tracking Settings
|
||||
'AuditDPAPIActivity': {
|
||||
'Policy': 'Audit DPAPI Activity',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit DPAPI Activity',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditPNPActivity': {
|
||||
'Policy': 'Audit PNP Activity',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit PNP Activity',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditProcessCreation': {
|
||||
'Policy': 'Audit Process Creation',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Process Creation',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditProcessTermination': {
|
||||
'Policy': 'Audit Process Termination',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Process Termination',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditRPCEvents': {
|
||||
'Policy': 'Audit RPC Events',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit RPC Events',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditTokenRightAdjusted': {
|
||||
'Policy': 'Audit Token Right Adjusted',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Token Right Adjusted',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
# DS Access Section
|
||||
'AuditDetailedDirectoryServiceReplication': {
|
||||
'Policy': 'Audit Detailed Directory Service Replication',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Detailed Directory Service Replication',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditDirectoryServiceAccess': {
|
||||
'Policy': 'Audit Directory Service Access',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Directory Service Access',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditDirectoryServiceChanges': {
|
||||
'Policy': 'Audit Directory Service Changes',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Directory Service Changes',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditDirectoryServiceReplication': {
|
||||
'Policy': 'Audit Directory Service Replication',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Directory Service Replication',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
# Logon/Logoff Section
|
||||
'AuditAccountLockout': {
|
||||
'Policy': 'Audit Account Lockout',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Account Lockout',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditUserDeviceClaims': {
|
||||
'Policy': 'Audit User / Device Claims',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit User / Device Claims',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditGroupMembership': {
|
||||
'Policy': 'Audit Group Membership',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Group Membership',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditIPsecExtendedMode': {
|
||||
'Policy': 'Audit IPsec Extended Mode',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit IPsec Extended Mode',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditIPsecMainMode': {
|
||||
'Policy': 'Audit IPsec Main Mode',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit IPsec Main Mode',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditIPsecQuickMode': {
|
||||
'Policy': 'Audit IPsec Quick Mode',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit IPsec Quick Mode',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditLogoff': {
|
||||
'Policy': 'Audit Logoff',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Logoff',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditLogon': {
|
||||
'Policy': 'Audit Logon',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Logon',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditNetworkPolicyServer': {
|
||||
'Policy': 'Audit Network Policy Server',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Network Policy Server',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditOtherLogonLogoffEvents': {
|
||||
'Policy': 'Audit Other Logon/Logoff Events',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Other Logon/Logoff Events',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditSpecialLogon': {
|
||||
'Policy': 'Audit Special Logon',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Special Logon',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
# Object Access Section
|
||||
'AuditApplicationGenerated': {
|
||||
'Policy': 'Audit Application Generated',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Application Generated',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditCertificationServices': {
|
||||
'Policy': 'Audit Certification Services',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Certification Services',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditDetailedFileShare': {
|
||||
'Policy': 'Audit Detailed File Share',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Detailed File Share',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditFileShare': {
|
||||
'Policy': 'Audit File Share',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit File Share',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditFileSystem': {
|
||||
'Policy': 'Audit File System',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit File System',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditFilteringPlatformConnection': {
|
||||
'Policy': 'Audit Filtering Platform Connection',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Filtering Platform Connection',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditFilteringPlatformPacketDrop': {
|
||||
'Policy': 'Audit Filtering Platform Packet Drop',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Filtering Platform Packet Drop',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditHandleManipulation': {
|
||||
'Policy': 'Audit Handle Manipulation',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Handle Manipulation',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditKernelObject': {
|
||||
'Policy': 'Audit Kernel Object',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Kernel Object',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditOtherObjectAccessEvents': {
|
||||
'Policy': 'Audit Other Object Access Events',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Other Object Access Events',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditRegistry': {
|
||||
'Policy': 'Audit Registry',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Registry',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditRemovableStorage': {
|
||||
'Policy': 'Audit Removable Storage',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Removable Storage',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditSAM': {
|
||||
'Policy': 'Audit SAM',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit SAM',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditCentralAccessPolicyStaging': {
|
||||
'Policy': 'Audit Central Access Policy Staging',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Central Access Policy Staging',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
# Policy Change Section
|
||||
'AuditAuditPolicyChange': {
|
||||
'Policy': 'Audit Audit Policy Change',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Audit Policy Change',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditAuthenticationPolicyChange': {
|
||||
'Policy': 'Audit Authentication Policy Change',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Authentication Policy Change',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditAuthorizationPolicyChange': {
|
||||
'Policy': 'Audit Authorization Policy Change',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Authorization Policy Change',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditFilteringPlatformPolicyChange': {
|
||||
'Policy': 'Audit Filtering Platform Policy Change',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Filtering Platform Policy Change',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditMPSSVCRuleLevelPolicyChange': {
|
||||
'Policy': 'Audit MPSSVC Rule-Level Policy Change',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit MPSSVC Rule-Level Policy Change',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditOtherPolicyChangeEvents': {
|
||||
'Policy': 'Audit Other Policy Change Events',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Other Policy Change Events',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
# Privilege Use Section
|
||||
'AuditNonSensitivePrivilegeUse': {
|
||||
'Policy': 'Audit Non Sensitive Privilege Use',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Non Sensitive Privilege Use',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditOtherPrivilegeUseEvents': {
|
||||
'Policy': 'Audit Other Privilege Use Events',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Other Privilege Use Events',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditSensitivePrivilegeUse': {
|
||||
'Policy': 'Audit Sensitive Privilege Use',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Sensitive Privilege Use',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
# System Section
|
||||
'AuditIPsecDriver': {
|
||||
'Policy': 'Audit IPsec Driver',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit IPsec Driver',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditOtherSystemEvents': {
|
||||
'Policy': 'Audit Other System Events',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Other System Events',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditSecurityStateChange': {
|
||||
'Policy': 'Audit Security State Change',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Security State Change',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditSecuritySystemExtension': {
|
||||
'Policy': 'Audit Security System Extension',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit Security System Extension',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
'AuditSystemIntegrity': {
|
||||
'Policy': 'Audit System Integrity',
|
||||
'lgpo_section': self.advanced_audit_policy_gpedit_path,
|
||||
'Settings': self.advanced_audit_lookup.keys(),
|
||||
'AdvAudit': {
|
||||
'Option': 'Audit System Integrity',
|
||||
},
|
||||
'Transform': self.advanced_audit_transform,
|
||||
},
|
||||
########## END OF ADVANCED AUDIT POLICIES ##########
|
||||
'SeTrustedCredManAccessPrivilege': {
|
||||
'Policy': 'Access Credential Manager as a trusted '
|
||||
'caller',
|
||||
@ -4349,6 +4946,296 @@ def _buildElementNsmap(using_elements):
|
||||
return thisMap
|
||||
|
||||
|
||||
def _get_audit_defaults(option=None):
|
||||
'''
|
||||
Loads audit.csv defaults into a dict in __context__ called
|
||||
'lgpo.audit_defaults'. The dictionary includes fieldnames and all
|
||||
configurable policies as keys. The values are used to create/modify the
|
||||
``audit.csv`` file. The first entry is `fieldnames` used to create the
|
||||
header for the csv file. The rest of the entries are the audit policy names.
|
||||
Sample data follows:
|
||||
|
||||
{
|
||||
'fieldnames': ['Machine Name',
|
||||
'Policy Target',
|
||||
'Subcategory',
|
||||
'Subcategory GUID',
|
||||
'Inclusion Setting',
|
||||
'Exclusion Setting',
|
||||
'Setting Value'],
|
||||
'Audit Sensitive Privilege Use': {'Auditpol Name': 'Sensitive Privilege Use',
|
||||
'Exclusion Setting': '',
|
||||
'Inclusion Setting': 'No Auditing',
|
||||
'Machine Name': 'WIN-8FGT3E045SE',
|
||||
'Policy Target': 'System',
|
||||
'Setting Value': '0',
|
||||
'Subcategory': u'Audit Sensitive Privilege Use',
|
||||
'Subcategory GUID': '{0CCE9228-69AE-11D9-BED3-505054503030}'},
|
||||
'Audit Special Logon': {'Auditpol Name': 'Special Logon',
|
||||
'Exclusion Setting': '',
|
||||
'Inclusion Setting': 'No Auditing',
|
||||
'Machine Name': 'WIN-8FGT3E045SE',
|
||||
'Policy Target': 'System',
|
||||
'Setting Value': '0',
|
||||
'Subcategory': u'Audit Special Logon',
|
||||
'Subcategory GUID': '{0CCE921B-69AE-11D9-BED3-505054503030}'},
|
||||
'Audit System Integrity': {'Auditpol Name': 'System Integrity',
|
||||
'Exclusion Setting': '',
|
||||
'Inclusion Setting': 'No Auditing',
|
||||
'Machine Name': 'WIN-8FGT3E045SE',
|
||||
'Policy Target': 'System',
|
||||
'Setting Value': '0',
|
||||
'Subcategory': u'Audit System Integrity',
|
||||
'Subcategory GUID': '{0CCE9212-69AE-11D9-BED3-505054503030}'},
|
||||
...
|
||||
}
|
||||
|
||||
.. note::
|
||||
`Auditpol Name` designates the value to use when setting the value with
|
||||
the auditpol command
|
||||
|
||||
Args:
|
||||
option (str): The item from the dictionary to return. If ``None`` the
|
||||
entire dictionary is returned. Default is ``None``
|
||||
|
||||
Returns:
|
||||
dict: If ``None`` or one of the audit settings is passed
|
||||
list: If ``fieldnames`` is passed
|
||||
'''
|
||||
if 'lgpo.audit_defaults' not in __context__:
|
||||
# Get available setting names and GUIDs
|
||||
# This is used to get the fieldnames and GUIDs for individual policies
|
||||
log.debug('Loading auditpol defaults into __context__')
|
||||
dump = __utils__['auditpol.get_auditpol_dump']()
|
||||
reader = csv.DictReader(dump)
|
||||
audit_defaults = {'fieldnames': reader.fieldnames}
|
||||
for row in reader:
|
||||
row['Machine Name'] = ''
|
||||
row['Auditpol Name'] = row['Subcategory']
|
||||
# Special handling for snowflake scenarios where the audit.csv names
|
||||
# don't match the auditpol names
|
||||
if row['Subcategory'] == 'Central Policy Staging':
|
||||
row['Subcategory'] = 'Audit Central Access Policy Staging'
|
||||
elif row['Subcategory'] == 'Plug and Play Events':
|
||||
row['Subcategory'] = 'Audit PNP Activity'
|
||||
elif row['Subcategory'] == 'Token Right Adjusted Events':
|
||||
row['Subcategory'] = 'Audit Token Right Adjusted'
|
||||
else:
|
||||
row['Subcategory'] = 'Audit {0}'.format(row['Subcategory'])
|
||||
audit_defaults[row['Subcategory']] = row
|
||||
|
||||
__context__['lgpo.audit_defaults'] = audit_defaults
|
||||
|
||||
if option:
|
||||
return __context__['lgpo.audit_defaults'][option]
|
||||
else:
|
||||
return __context__['lgpo.audit_defaults']
|
||||
|
||||
|
||||
def _findOptionValueAdvAudit(option):
|
||||
'''
|
||||
Get the Advanced Auditing policy as configured in
|
||||
``C:\\Windows\\Security\\Audit\\audit.csv``
|
||||
|
||||
Args:
|
||||
option (str): The name of the setting as it appears in audit.csv
|
||||
|
||||
Returns:
|
||||
bool: ``True`` if successful, otherwise ``False``
|
||||
'''
|
||||
if 'lgpo.adv_audit_data' not in __context__:
|
||||
system_root = os.environ.get('SystemRoot', 'C:\\Windows')
|
||||
f_audit = os.path.join(system_root, 'security', 'audit', 'audit.csv')
|
||||
f_audit_gpo = os.path.join(system_root, 'System32', 'GroupPolicy',
|
||||
'Machine', 'Microsoft', 'Windows NT',
|
||||
'Audit', 'audit.csv')
|
||||
|
||||
# Make sure there is an existing audit.csv file on the machine
|
||||
if not __salt__['file.file_exists'](f_audit):
|
||||
if __salt__['file.file_exists'](f_audit_gpo):
|
||||
# If the GPO audit.csv exists, we'll use that one
|
||||
__salt__['file.copy'](f_audit_gpo, f_audit)
|
||||
else:
|
||||
field_names = _get_audit_defaults('fieldnames')
|
||||
# If the file doesn't exist anywhere, create it with default
|
||||
# fieldnames
|
||||
__salt__['file.touch'](f_audit)
|
||||
__salt__['file.append'](f_audit, ','.join(field_names))
|
||||
|
||||
audit_settings = {}
|
||||
with salt.utils.files.fopen(f_audit, mode='r') as csv_file:
|
||||
reader = csv.DictReader(csv_file)
|
||||
|
||||
for row in reader:
|
||||
audit_settings.update(
|
||||
{row['Subcategory']: row['Setting Value']})
|
||||
|
||||
__context__['lgpo.adv_audit_data'] = audit_settings
|
||||
|
||||
return __context__['lgpo.adv_audit_data'].get(option, None)
|
||||
|
||||
|
||||
def _set_audit_file_data(option, value):
|
||||
'''
|
||||
Helper function that sets the Advanced Audit settings in the two .csv files
|
||||
on Windows. Those files are located at:
|
||||
C:\\Windows\\Security\\Audit\\audit.csv
|
||||
C:\\Windows\\System32\\GroupPolicy\\Machine\\Microsoft\\Windows NT\\Audit\\audit.csv
|
||||
|
||||
Args:
|
||||
option (str): The name of the option to set
|
||||
value (str): The value to set. ['None', '0', '1', '2', '3']
|
||||
|
||||
Returns:
|
||||
bool: ``True`` if successful, otherwise ``False``
|
||||
'''
|
||||
# Set up some paths here
|
||||
system_root = os.environ.get('SystemRoot', 'C:\\Windows')
|
||||
f_audit = os.path.join(system_root, 'security', 'audit', 'audit.csv')
|
||||
f_audit_gpo = os.path.join(system_root, 'System32', 'GroupPolicy',
|
||||
'Machine', 'Microsoft', 'Windows NT',
|
||||
'Audit', 'audit.csv')
|
||||
f_temp = tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.csv',
|
||||
prefix='audit')
|
||||
|
||||
# Lookup dict for "Inclusion Setting" field
|
||||
auditpol_values = {'None': 'No Auditing',
|
||||
'0': 'No Auditing',
|
||||
'1': 'Success',
|
||||
'2': 'Failure',
|
||||
'3': 'Success and Failure'}
|
||||
|
||||
try:
|
||||
# Open the existing audit.csv and load the csv `reader`
|
||||
with salt.utils.files.fopen(f_audit, mode='r') as csv_file:
|
||||
reader = csv.DictReader(csv_file)
|
||||
|
||||
# Open the temporary .csv and load the csv `writer`
|
||||
with salt.utils.files.fopen(f_temp.name, mode='w') as tmp_file:
|
||||
writer = csv.DictWriter(tmp_file, fieldnames=reader.fieldnames)
|
||||
|
||||
# Write the header values (labels)
|
||||
writer.writeheader()
|
||||
|
||||
value_written = False
|
||||
# Loop through the current audit.csv and write the changes to
|
||||
# the temp csv file for existing settings
|
||||
for row in reader:
|
||||
# If the row matches the value we're setting, update it with
|
||||
# the new value
|
||||
if row['Subcategory'] == option:
|
||||
if not value == 'None':
|
||||
# The value is not None, make the change
|
||||
row['Inclusion Setting'] = auditpol_values[value]
|
||||
row['Setting Value'] = value
|
||||
log.debug('LGPO: Setting {0} to {1}'
|
||||
''.format(option, value))
|
||||
writer.writerow(row)
|
||||
else:
|
||||
# value is None, remove it by not writing it to the
|
||||
# temp file
|
||||
log.debug('LGPO: Removing {0}'.format(option))
|
||||
value_written = True
|
||||
# If it's not the value we're setting, just write it
|
||||
else:
|
||||
writer.writerow(row)
|
||||
|
||||
# If a value was not written, it is a new setting not found in
|
||||
# the existing audit.cvs file. Add the new setting with values
|
||||
# from the defaults
|
||||
if not value_written:
|
||||
if not value == 'None':
|
||||
# value is not None, write the new value
|
||||
log.debug('LGPO: Setting {0} to {1}'
|
||||
''.format(option, value))
|
||||
defaults = _get_audit_defaults(option)
|
||||
writer.writerow({
|
||||
'Machine Name': defaults['Machine Name'],
|
||||
'Policy Target': defaults['Policy Target'],
|
||||
'Subcategory': defaults['Subcategory'],
|
||||
'Subcategory GUID': defaults['Subcategory GUID'],
|
||||
'Inclusion Setting': auditpol_values[value],
|
||||
'Exclusion Setting': defaults['Exclusion Setting'],
|
||||
'Setting Value': value})
|
||||
value_written = True
|
||||
|
||||
if value_written:
|
||||
# Copy the temporary csv file over the existing audit.csv in both
|
||||
# locations if a value was written
|
||||
__salt__['file.copy'](f_temp.name, f_audit, remove_existing=True)
|
||||
__salt__['file.copy'](f_temp.name, f_audit_gpo, remove_existing=True)
|
||||
finally:
|
||||
f_temp.close()
|
||||
__salt__['file.remove'](f_temp.name)
|
||||
|
||||
return value_written
|
||||
|
||||
|
||||
def _set_auditpol_data(option, value):
|
||||
'''
|
||||
Helper function that updates the current applied settings to match what has
|
||||
just been set in the audit.csv files. We're doing it this way instead of
|
||||
running `gpupdate`
|
||||
|
||||
Args:
|
||||
option (str): The name of the option to set
|
||||
value (str): The value to set. ['None', '0', '1', '2', '3']
|
||||
|
||||
Returns:
|
||||
bool: ``True`` if successful, otherwise ``False``
|
||||
'''
|
||||
auditpol_values = {'None': 'No Auditing',
|
||||
'0': 'No Auditing',
|
||||
'1': 'Success',
|
||||
'2': 'Failure',
|
||||
'3': 'Success and Failure'}
|
||||
defaults = _get_audit_defaults(option)
|
||||
return __utils__['auditpol.set_setting'](
|
||||
name=defaults['Auditpol Name'],
|
||||
value=auditpol_values[value])
|
||||
|
||||
|
||||
def _setOptionValueAdvAudit(option, value):
|
||||
'''
|
||||
Helper function to update the Advanced Audit policy on the machine. This
|
||||
function modifies the two ``audit.csv`` files in the following locations:
|
||||
|
||||
C:\\Windows\\Security\\Audit\\audit.csv
|
||||
C:\\Windows\\System32\\GroupPolicy\\Machine\\Microsoft\\Windows NT\\Audit\\audit.csv
|
||||
|
||||
Then it applies those settings using ``auditpol``
|
||||
|
||||
After that, it updates ``__context__`` with the new setting
|
||||
|
||||
Args:
|
||||
option (str): The name of the option to set
|
||||
value (str): The value to set. ['None', '0', '1', '2', '3']
|
||||
|
||||
Returns:
|
||||
bool: ``True`` if successful, otherwise ``False``
|
||||
'''
|
||||
# Set the values in both audit.csv files
|
||||
if not _set_audit_file_data(option=option, value=value):
|
||||
raise CommandExecutionError('Failed to set audit.csv option: {0}'
|
||||
''.format(option))
|
||||
# Apply the settings locally
|
||||
if not _set_auditpol_data(option=option, value=value):
|
||||
# Only log this error, it will be in effect the next time the machine
|
||||
# updates its policy
|
||||
log.debug('Failed to apply audit setting: {0}'.format(option))
|
||||
|
||||
# Update __context__
|
||||
if value is None:
|
||||
log.debug('LGPO: Removing Advanced Audit data: {0}'.format(option))
|
||||
__context__['lgpo.adv_audit_data'].pop(option)
|
||||
else:
|
||||
log.debug('LGPO: Updating Advanced Audit data: {0}: {1}'
|
||||
''.format(option, value))
|
||||
__context__['lgpo.adv_audit_data'][option] = value
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _findOptionValueNetSH(profile, option):
|
||||
if 'lgpo.netsh_data' not in __context__:
|
||||
__context__['lgpo.netsh_data'] = {}
|
||||
@ -6770,7 +7657,10 @@ def get(policy_class=None, return_full_policy_names=True,
|
||||
class_vals[policy_name] = _findOptionValueNetSH(
|
||||
profile=_pol['NetSH']['Profile'],
|
||||
option=_pol['NetSH']['Option'])
|
||||
|
||||
elif 'AdvAudit' in _pol:
|
||||
# get value from auditpol
|
||||
class_vals[policy_name] = _findOptionValueAdvAudit(
|
||||
option=_pol['AdvAudit']['Option'])
|
||||
elif 'NetUserModal' in _pol:
|
||||
# get value from UserNetMod
|
||||
if _pol['NetUserModal']['Modal'] not in modal_returns:
|
||||
@ -6993,6 +7883,7 @@ def set_(computer_policy=None, user_policy=None,
|
||||
for p_class in policies:
|
||||
_secedits = {}
|
||||
_netshs = {}
|
||||
_advaudits = {}
|
||||
_modal_sets = {}
|
||||
_admTemplateData = {}
|
||||
_regedits = {}
|
||||
@ -7041,6 +7932,12 @@ def set_(computer_policy=None, user_policy=None,
|
||||
'option': _pol['NetSH']['Option'],
|
||||
'value': six.text_type(_value)
|
||||
})
|
||||
elif 'AdvAudit' in _pol:
|
||||
# set value with advaudit
|
||||
_advaudits.setdefault(policy_name, {
|
||||
'option': _pol['AdvAudit']['Option'],
|
||||
'value': six.text_type(_value)
|
||||
})
|
||||
elif 'NetUserModal' in _pol:
|
||||
# set value via NetUserModal
|
||||
log.debug('%s is a NetUserModal policy', policy_name)
|
||||
@ -7237,6 +8134,13 @@ def set_(computer_policy=None, user_policy=None,
|
||||
log.debug(_netshs[setting])
|
||||
_setOptionValueNetSH(**_netshs[setting])
|
||||
|
||||
if _advaudits:
|
||||
# We've got AdvAudit settings to make
|
||||
for setting in _advaudits:
|
||||
log.debug('Setting Advanced Audit policy: {0}'.format(setting))
|
||||
log.debug(_advaudits[setting])
|
||||
_setOptionValueAdvAudit(**_advaudits[setting])
|
||||
|
||||
if _modal_sets:
|
||||
# we've got modalsets to make
|
||||
log.debug(_modal_sets)
|
||||
|
@ -671,7 +671,7 @@ def read_crl(crl):
|
||||
text = get_pem_entry(text, pem_type='X509 CRL')
|
||||
|
||||
crltempfile = tempfile.NamedTemporaryFile()
|
||||
crltempfile.write(text)
|
||||
crltempfile.write(salt.utils.stringutils.to_str(text))
|
||||
crltempfile.flush()
|
||||
crlparsed = _parse_openssl_crl(crltempfile.name)
|
||||
crltempfile.close()
|
||||
@ -776,17 +776,18 @@ def write_pem(text, path, overwrite=True, pem_type=None):
|
||||
text = get_pem_entry(text, pem_type=pem_type)
|
||||
_dhparams = ''
|
||||
_private_key = ''
|
||||
if pem_type and pem_type == 'CERTIFICATE' and os.path.isfile(path) and \
|
||||
not overwrite:
|
||||
if pem_type and pem_type == 'CERTIFICATE' and os.path.isfile(path) and not overwrite:
|
||||
_filecontents = _text_or_file(path)
|
||||
try:
|
||||
_dhparams = get_pem_entry(_filecontents, 'DH PARAMETERS')
|
||||
except salt.exceptions.SaltInvocationError:
|
||||
pass
|
||||
except salt.exceptions.SaltInvocationError as err:
|
||||
log.debug("Error when getting DH PARAMETERS: %s", err)
|
||||
log.trace(err, exc_info=err)
|
||||
try:
|
||||
_private_key = get_pem_entry(_filecontents, '(?:RSA )?PRIVATE KEY')
|
||||
except salt.exceptions.SaltInvocationError:
|
||||
pass
|
||||
except salt.exceptions.SaltInvocationError as err:
|
||||
log.debug("Error when getting PRIVATE KEY: %s", err)
|
||||
log.trace(err, exc_info=err)
|
||||
with salt.utils.files.fopen(path, 'w') as _fp:
|
||||
if pem_type and pem_type == 'CERTIFICATE' and _private_key:
|
||||
_fp.write(salt.utils.stringutils.to_str(_private_key))
|
||||
@ -1378,9 +1379,9 @@ def create_certificate(
|
||||
pem_type='CERTIFICATE REQUEST').replace('\n', '')
|
||||
if 'public_key' in kwargs:
|
||||
# Strip newlines to make passing through as cli functions easier
|
||||
kwargs['public_key'] = get_public_key(
|
||||
kwargs['public_key'] = salt.utils.stringutils.to_str(get_public_key(
|
||||
kwargs['public_key'],
|
||||
passphrase=kwargs['public_key_passphrase']).replace('\n', '')
|
||||
passphrase=kwargs['public_key_passphrase'])).replace('\n', '')
|
||||
|
||||
# Remove system entries in kwargs
|
||||
# Including listen_in and preqreuired because they are not included
|
||||
@ -1781,13 +1782,13 @@ def verify_crl(crl, cert):
|
||||
crltext = _text_or_file(crl)
|
||||
crltext = get_pem_entry(crltext, pem_type='X509 CRL')
|
||||
crltempfile = tempfile.NamedTemporaryFile()
|
||||
crltempfile.write(crltext)
|
||||
crltempfile.write(salt.utils.stringutils.to_str(crltext))
|
||||
crltempfile.flush()
|
||||
|
||||
certtext = _text_or_file(cert)
|
||||
certtext = get_pem_entry(certtext, pem_type='CERTIFICATE')
|
||||
certtempfile = tempfile.NamedTemporaryFile()
|
||||
certtempfile.write(certtext)
|
||||
certtempfile.write(salt.utils.stringutils.to_str(certtext))
|
||||
certtempfile.flush()
|
||||
|
||||
cmd = ('openssl crl -noout -in {0} -CAfile {1}'.format(
|
||||
|
@ -768,6 +768,12 @@ def latest(name,
|
||||
ret,
|
||||
'Failed to check remote refs: {0}'.format(_strip_exc(exc))
|
||||
)
|
||||
except NameError as exc:
|
||||
if 'global name' in exc.message:
|
||||
raise CommandExecutionError(
|
||||
'Failed to check remote refs: You may need to install '
|
||||
'GitPython or PyGit2')
|
||||
raise
|
||||
|
||||
if 'HEAD' in all_remote_refs:
|
||||
head_rev = all_remote_refs['HEAD']
|
||||
|
@ -86,9 +86,7 @@ def create_alert(name=None, api_key=None, reason=None, action_type="Create"):
|
||||
if __opts__['test'] is True:
|
||||
ret[
|
||||
'comment'] = 'Test: {0} alert request will be processed ' \
|
||||
'using the API Key="{1}".'.format(
|
||||
action_type,
|
||||
api_key)
|
||||
'using the API Key="{1}".'.format(action_type, api_key)
|
||||
|
||||
# Return ``None`` when running with ``test=true``.
|
||||
ret['result'] = None
|
||||
|
@ -35,6 +35,7 @@ import salt.transport.server
|
||||
import salt.transport.mixins.auth
|
||||
from salt.ext import six
|
||||
from salt.exceptions import SaltReqTimeoutError
|
||||
from salt._compat import ipaddress
|
||||
|
||||
from salt.utils.zeromq import zmq, ZMQDefaultLoop, install_zmq, ZMQ_VERSION_INFO, LIBZMQ_VERSION_INFO
|
||||
import zmq.error
|
||||
@ -73,33 +74,38 @@ def _get_master_uri(master_ip,
|
||||
'''
|
||||
Return the ZeroMQ URI to connect the Minion to the Master.
|
||||
It supports different source IP / port, given the ZeroMQ syntax:
|
||||
|
||||
// Connecting using a IP address and bind to an IP address
|
||||
rc = zmq_connect(socket, "tcp://192.168.1.17:5555;192.168.1.1:5555"); assert (rc == 0);
|
||||
|
||||
Source: http://api.zeromq.org/4-1:zmq-tcp
|
||||
'''
|
||||
if LIBZMQ_VERSION_INFO >= (4, 1, 6) and ZMQ_VERSION_INFO >= (16, 0, 1):
|
||||
# The source:port syntax for ZeroMQ has been added in libzmq 4.1.6
|
||||
# which is included in the pyzmq wheels starting with 16.0.1.
|
||||
if source_ip or source_port:
|
||||
if source_ip and source_port:
|
||||
return 'tcp://{source_ip}:{source_port};{master_ip}:{master_port}'.format(
|
||||
source_ip=source_ip, source_port=source_port,
|
||||
master_ip=master_ip, master_port=master_port)
|
||||
elif source_ip and not source_port:
|
||||
return 'tcp://{source_ip}:0;{master_ip}:{master_port}'.format(
|
||||
source_ip=source_ip,
|
||||
master_ip=master_ip, master_port=master_port)
|
||||
elif not source_ip and source_port:
|
||||
return 'tcp://0.0.0.0:{source_port};{master_ip}:{master_port}'.format(
|
||||
source_port=source_port,
|
||||
master_ip=master_ip, master_port=master_port)
|
||||
from salt.utils.zeromq import ip_bracket
|
||||
|
||||
master_uri = 'tcp://{master_ip}:{master_port}'.format(
|
||||
master_ip=ip_bracket(master_ip), master_port=master_port)
|
||||
|
||||
if source_ip or source_port:
|
||||
log.warning('Unable to connect to the Master using a specific source IP / port')
|
||||
log.warning('Consider upgrading to pyzmq >= 16.0.1 and libzmq >= 4.1.6')
|
||||
return 'tcp://{master_ip}:{master_port}'.format(
|
||||
master_ip=master_ip, master_port=master_port)
|
||||
if LIBZMQ_VERSION_INFO >= (4, 1, 6) and ZMQ_VERSION_INFO >= (16, 0, 1):
|
||||
# The source:port syntax for ZeroMQ has been added in libzmq 4.1.6
|
||||
# which is included in the pyzmq wheels starting with 16.0.1.
|
||||
if source_ip and source_port:
|
||||
master_uri = 'tcp://{source_ip}:{source_port};{master_ip}:{master_port}'.format(
|
||||
source_ip=ip_bracket(source_ip), source_port=source_port,
|
||||
master_ip=ip_bracket(master_ip), master_port=master_port)
|
||||
elif source_ip and not source_port:
|
||||
master_uri = 'tcp://{source_ip}:0;{master_ip}:{master_port}'.format(
|
||||
source_ip=ip_bracket(source_ip),
|
||||
master_ip=ip_bracket(master_ip), master_port=master_port)
|
||||
elif source_port and not source_ip:
|
||||
ip_any = '0.0.0.0' if ipaddress.ip_address(master_ip).version == 4 else ip_bracket('::')
|
||||
master_uri = 'tcp://{ip_any}:{source_port};{master_ip}:{master_port}'.format(
|
||||
ip_any=ip_any, source_port=source_port,
|
||||
master_ip=ip_bracket(master_ip), master_port=master_port)
|
||||
else:
|
||||
log.warning('Unable to connect to the Master using a specific source IP / port')
|
||||
log.warning('Consider upgrading to pyzmq >= 16.0.1 and libzmq >= 4.1.6')
|
||||
log.warning('Specific source IP / port for connecting to master returner port: configuraion ignored')
|
||||
|
||||
return master_uri
|
||||
|
||||
|
||||
class AsyncZeroMQReqChannel(salt.transport.client.ReqChannel):
|
||||
|
@ -8,7 +8,6 @@ Utilities for accessing storage container blobs on Azure
|
||||
# Import python libs
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
import logging
|
||||
import inspect
|
||||
|
||||
# Import azure libs
|
||||
HAS_LIBS = False
|
||||
@ -19,7 +18,6 @@ except ImportError:
|
||||
pass
|
||||
|
||||
# Import salt libs
|
||||
from salt.ext import six
|
||||
from salt.exceptions import SaltSystemExit
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@ -178,25 +176,13 @@ def object_to_dict(obj):
|
||||
if isinstance(obj, list) or isinstance(obj, tuple):
|
||||
ret = []
|
||||
for item in obj:
|
||||
#ret.append(obj.__dict__[item])
|
||||
ret.append(object_to_dict(obj))
|
||||
elif isinstance(obj, six.text_type):
|
||||
ret = obj.encode('ascii', 'replace'),
|
||||
elif isinstance(obj, six.string_types):
|
||||
ret = obj
|
||||
else:
|
||||
ret.append(object_to_dict(item))
|
||||
elif hasattr(obj, '__dict__'):
|
||||
ret = {}
|
||||
for item in obj.__dict__:
|
||||
if item.startswith('_'):
|
||||
continue
|
||||
# This is ugly, but inspect.isclass() doesn't seem to work
|
||||
try:
|
||||
if inspect.isclass(obj) or 'class' in six.text_type(type(obj.__dict__.get(item))):
|
||||
ret[item] = object_to_dict(obj.__dict__[item])
|
||||
elif isinstance(obj.__dict__[item], six.text_type):
|
||||
ret[item] = obj.__dict__[item].encode('ascii', 'replace')
|
||||
else:
|
||||
ret[item] = obj.__dict__[item]
|
||||
except AttributeError:
|
||||
ret[item] = obj.get(item)
|
||||
ret[item] = object_to_dict(obj.__dict__[item])
|
||||
else:
|
||||
ret = obj
|
||||
return ret
|
||||
|
@ -58,10 +58,10 @@ except (ImportError, OSError, AttributeError, TypeError):
|
||||
def sanitize_host(host):
|
||||
'''
|
||||
Sanitize host string.
|
||||
https://tools.ietf.org/html/rfc1123#section-2.1
|
||||
'''
|
||||
return ''.join([
|
||||
c for c in host[0:255] if c in (ascii_letters + digits + '.-')
|
||||
])
|
||||
RFC952_characters = ascii_letters + digits + ".-"
|
||||
return "".join([c for c in host[0:255] if c in RFC952_characters])
|
||||
|
||||
|
||||
def isportopen(host, port):
|
||||
@ -137,7 +137,11 @@ def _generate_minion_id():
|
||||
def first(self):
|
||||
return self and self[0] or None
|
||||
|
||||
hosts = DistinctList().append(socket.getfqdn()).append(platform.node()).append(socket.gethostname())
|
||||
hostname = socket.gethostname()
|
||||
|
||||
hosts = DistinctList().append(
|
||||
salt.utils.stringutils.to_unicode(socket.getfqdn(salt.utils.stringutils.to_bytes(hostname)))
|
||||
).append(platform.node()).append(hostname)
|
||||
if not hosts:
|
||||
try:
|
||||
for a_nfo in socket.getaddrinfo(hosts.first() or 'localhost', None, socket.AF_INET,
|
||||
@ -1870,12 +1874,19 @@ def dns_check(addr, port, safe=False, ipv6=None):
|
||||
resolved = salt.utils.zeromq.ip_bracket(addr)
|
||||
break
|
||||
|
||||
candidate_addr = salt.utils.zeromq.ip_bracket(h[4][0])
|
||||
candidates.append(candidate_addr)
|
||||
if h[0] == socket.AF_INET and ipv6 is True:
|
||||
continue
|
||||
if h[0] == socket.AF_INET6 and ipv6 is False:
|
||||
continue
|
||||
|
||||
candidate_addr = h[4][0]
|
||||
|
||||
if h[0] != socket.AF_INET6 or ipv6 is not None:
|
||||
candidates.append(candidate_addr)
|
||||
|
||||
try:
|
||||
s = socket.socket(h[0], socket.SOCK_STREAM)
|
||||
s.connect((candidate_addr.strip('[]'), h[4][1]))
|
||||
s.connect((candidate_addr, port))
|
||||
s.close()
|
||||
|
||||
resolved = candidate_addr
|
||||
@ -1904,3 +1915,55 @@ def dns_check(addr, port, safe=False, ipv6=None):
|
||||
raise SaltClientError()
|
||||
raise SaltSystemExit(code=42, msg=err)
|
||||
return resolved
|
||||
|
||||
|
||||
def parse_host_port(host_port):
|
||||
"""
|
||||
Takes a string argument specifying host or host:port.
|
||||
|
||||
Returns a (hostname, port) or (ip_address, port) tuple. If no port is given,
|
||||
the second (port) element of the returned tuple will be None.
|
||||
|
||||
host:port argument, for example, is accepted in the forms of:
|
||||
- hostname
|
||||
- hostname:1234
|
||||
- hostname.domain.tld
|
||||
- hostname.domain.tld:5678
|
||||
- [1234::5]:5678
|
||||
- 1234::5
|
||||
- 10.11.12.13:4567
|
||||
- 10.11.12.13
|
||||
"""
|
||||
host, port = None, None # default
|
||||
|
||||
_s_ = host_port[:]
|
||||
if _s_[0] == "[":
|
||||
if "]" in host_port:
|
||||
host, _s_ = _s_.lstrip("[").rsplit("]", 1)
|
||||
host = ipaddress.IPv6Address(host)
|
||||
if _s_[0] == ":":
|
||||
port = int(_s_.lstrip(":"))
|
||||
else:
|
||||
if len(_s_) > 1:
|
||||
raise ValueError('found ambiguous "{}" port in "{}"'.format(_s_, host_port))
|
||||
else:
|
||||
if _s_.count(":") == 1:
|
||||
host, _hostport_separator_, port = _s_.partition(":")
|
||||
try:
|
||||
port = int(port)
|
||||
except ValueError as _e_:
|
||||
log.error('host_port "%s" port value "%s" is not an integer.', host_port, port)
|
||||
raise _e_
|
||||
else:
|
||||
host = _s_
|
||||
try:
|
||||
if not isinstance(host, ipaddress._BaseAddress):
|
||||
host_ip = ipaddress.ip_address(host)
|
||||
host = host_ip
|
||||
except ValueError:
|
||||
log.debug('"%s" Not an IP address? Assuming it is a hostname.', host)
|
||||
if host != sanitize_host(host):
|
||||
log.error('bad hostname: "%s"', host)
|
||||
raise ValueError('bad hostname: "{}"'.format(host))
|
||||
|
||||
return host, port
|
||||
|
308
salt/utils/win_lgpo_auditpol.py
Normal file
308
salt/utils/win_lgpo_auditpol.py
Normal file
@ -0,0 +1,308 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
r'''
|
||||
A salt util for modifying the audit policies on the machine. This util is used
|
||||
by the ``win_auditpol`` and ``win_lgpo`` modules.
|
||||
|
||||
Though this utility does not set group policy for auditing, it displays how all
|
||||
auditing configuration is applied on the machine, either set directly or via
|
||||
local or domain group policy.
|
||||
|
||||
.. versionadded:: 2018.3.4
|
||||
.. versionadded:: 2019.2.1
|
||||
|
||||
This util allows you to view and modify the audit settings as they are applied
|
||||
on the machine. The audit settings are broken down into nine categories:
|
||||
|
||||
- Account Logon
|
||||
- Account Management
|
||||
- Detailed Tracking
|
||||
- DS Access
|
||||
- Logon/Logoff
|
||||
- Object Access
|
||||
- Policy Change
|
||||
- Privilege Use
|
||||
- System
|
||||
|
||||
The ``get_settings`` function will return the subcategories for all nine of
|
||||
the above categories in one dictionary along with their auditing status.
|
||||
|
||||
To modify a setting you only need to specify the subcategory name and the value
|
||||
you wish to set. Valid settings are:
|
||||
|
||||
- No Auditing
|
||||
- Success
|
||||
- Failure
|
||||
- Success and Failure
|
||||
|
||||
Usage:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import salt.utils.win_lgpo_auditpol
|
||||
|
||||
# Get current state of all audit settings
|
||||
salt.utils.win_lgpo_auditpol.get_settings()
|
||||
|
||||
# Get the current state of all audit settings in the "Account Logon"
|
||||
# category
|
||||
salt.utils.win_lgpo_auditpol.get_settings(category="Account Logon")
|
||||
|
||||
# Get current state of the "Credential Validation" setting
|
||||
salt.utils.win_lgpo_auditpol.get_setting(name='Credential Validation')
|
||||
|
||||
# Set the state of the "Credential Validation" setting to Success and
|
||||
# Failure
|
||||
salt.utils.win_lgpo_auditpol.set_setting(name='Credential Validation',
|
||||
value='Success and Failure')
|
||||
|
||||
# Set the state of the "Credential Validation" setting to No Auditing
|
||||
salt.utils.win_lgpo_auditpol.set_setting(name='Credential Validation',
|
||||
value='No Auditing')
|
||||
'''
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, unicode_literals, print_function
|
||||
import logging
|
||||
import re
|
||||
import tempfile
|
||||
|
||||
# Import Salt libs
|
||||
import salt.modules.cmdmod
|
||||
import salt.utils.files
|
||||
import salt.utils.platform
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
# Import 3rd Party libs
|
||||
from salt.ext.six.moves import zip
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
__virtualname__ = 'auditpol'
|
||||
|
||||
categories = ['Account Logon',
|
||||
'Account Management',
|
||||
'Detailed Tracking',
|
||||
'DS Access',
|
||||
'Logon/Logoff',
|
||||
'Object Access',
|
||||
'Policy Change',
|
||||
'Privilege Use',
|
||||
'System']
|
||||
|
||||
settings = {'No Auditing': '/success:disable /failure:disable',
|
||||
'Success': '/success:enable /failure:disable',
|
||||
'Failure': '/success:disable /failure:enable',
|
||||
'Success and Failure': '/success:enable /failure:enable'}
|
||||
|
||||
|
||||
# Although utils are often directly imported, it is also possible to use the
|
||||
# loader.
|
||||
def __virtual__():
|
||||
'''
|
||||
Only load if on a Windows system
|
||||
'''
|
||||
if not salt.utils.platform.is_windows():
|
||||
return False, 'This utility only available on Windows'
|
||||
|
||||
return __virtualname__
|
||||
|
||||
|
||||
def _auditpol_cmd(cmd):
|
||||
'''
|
||||
Helper function for running the auditpol command
|
||||
|
||||
Args:
|
||||
cmd (str): the auditpol command to run
|
||||
|
||||
Returns:
|
||||
list: A list containing each line of the return (splitlines)
|
||||
|
||||
Raises:
|
||||
CommandExecutionError: If the command encounters an error
|
||||
'''
|
||||
ret = salt.modules.cmdmod.run_all(cmd='auditpol {0}'.format(cmd),
|
||||
python_shell=True)
|
||||
if ret['retcode'] == 0:
|
||||
return ret['stdout'].splitlines()
|
||||
|
||||
msg = 'Error executing auditpol command: {0}\n'.format(cmd)
|
||||
msg += '\n'.join(ret['stdout'])
|
||||
raise CommandExecutionError(msg)
|
||||
|
||||
|
||||
def get_settings(category='All'):
|
||||
'''
|
||||
Get the current configuration for all audit settings specified in the
|
||||
category
|
||||
|
||||
Args:
|
||||
category (str):
|
||||
One of the nine categories to return. Can also be ``All`` to return
|
||||
the settings for all categories. Valid options are:
|
||||
|
||||
- Account Logon
|
||||
- Account Management
|
||||
- Detailed Tracking
|
||||
- DS Access
|
||||
- Logon/Logoff
|
||||
- Object Access
|
||||
- Policy Change
|
||||
- Privilege Use
|
||||
- System
|
||||
- All
|
||||
|
||||
Default value is ``All``
|
||||
|
||||
Returns:
|
||||
dict: A dictionary containing all subcategories for the specified
|
||||
category along with their current configuration
|
||||
|
||||
Raises:
|
||||
KeyError: On invalid category
|
||||
CommandExecutionError: If an error is encountered retrieving the settings
|
||||
|
||||
Usage:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import salt.utils.win_lgpo_auditpol
|
||||
|
||||
# Get current state of all audit settings
|
||||
salt.utils.win_lgpo_auditpol.get_settings()
|
||||
|
||||
# Get the current state of all audit settings in the "Account Logon"
|
||||
# category
|
||||
salt.utils.win_lgpo_auditpol.get_settings(category="Account Logon")
|
||||
'''
|
||||
# Parameter validation
|
||||
if category.lower() in ['all', '*']:
|
||||
category = '*'
|
||||
elif category.lower() not in [x.lower() for x in categories]:
|
||||
raise KeyError('Invalid category: "{0}"'.format(category))
|
||||
|
||||
cmd = '/get /category:"{0}"'.format(category)
|
||||
results = _auditpol_cmd(cmd)
|
||||
|
||||
ret = {}
|
||||
# Skip the first 2 lines
|
||||
for line in results[3:]:
|
||||
if ' ' in line.strip():
|
||||
ret.update(dict(list(zip(*[iter(re.split(r"\s{2,}", line.strip()))]*2))))
|
||||
return ret
|
||||
|
||||
|
||||
def get_setting(name):
|
||||
'''
|
||||
Get the current configuration for the named audit setting
|
||||
|
||||
Args:
|
||||
name (str): The name of the setting to retrieve
|
||||
|
||||
Returns:
|
||||
str: The current configuration for the named setting
|
||||
|
||||
Raises:
|
||||
KeyError: On invalid setting name
|
||||
CommandExecutionError: If an error is encountered retrieving the settings
|
||||
|
||||
Usage:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import salt.utils.win_lgpo_auditpol
|
||||
|
||||
# Get current state of the "Credential Validation" setting
|
||||
salt.utils.win_lgpo_auditpol.get_setting(name='Credential Validation')
|
||||
'''
|
||||
current_settings = get_settings(category='All')
|
||||
for setting in current_settings:
|
||||
if name.lower() == setting.lower():
|
||||
return current_settings[setting]
|
||||
raise KeyError('Invalid name: {0}'.format(name))
|
||||
|
||||
|
||||
def _get_valid_names():
|
||||
if 'auditpol.valid_names' not in __context__:
|
||||
settings = get_settings(category='All')
|
||||
__context__['auditpol.valid_names'] = [k.lower() for k in settings]
|
||||
return __context__['auditpol.valid_names']
|
||||
|
||||
|
||||
def set_setting(name, value):
|
||||
'''
|
||||
Set the configuration for the named audit setting
|
||||
|
||||
Args:
|
||||
|
||||
name (str):
|
||||
The name of the setting to configure
|
||||
|
||||
value (str):
|
||||
The configuration for the named value. Valid options are:
|
||||
|
||||
- No Auditing
|
||||
- Success
|
||||
- Failure
|
||||
- Success and Failure
|
||||
|
||||
Returns:
|
||||
bool: True if successful
|
||||
|
||||
Raises:
|
||||
KeyError: On invalid ``name`` or ``value``
|
||||
CommandExecutionError: If an error is encountered modifying the setting
|
||||
|
||||
Usage:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import salt.utils.win_lgpo_auditpol
|
||||
|
||||
# Set the state of the "Credential Validation" setting to Success and
|
||||
# Failure
|
||||
salt.utils.win_lgpo_auditpol.set_setting(name='Credential Validation',
|
||||
value='Success and Failure')
|
||||
|
||||
# Set the state of the "Credential Validation" setting to No Auditing
|
||||
salt.utils.win_lgpo_auditpol.set_setting(name='Credential Validation',
|
||||
value='No Auditing')
|
||||
'''
|
||||
# Input validation
|
||||
if name.lower() not in _get_valid_names():
|
||||
raise KeyError('Invalid name: {0}'.format(name))
|
||||
for setting in settings:
|
||||
if value.lower() == setting.lower():
|
||||
cmd = '/set /subcategory:"{0}" {1}'.format(name, settings[setting])
|
||||
break
|
||||
else:
|
||||
raise KeyError('Invalid setting value: {0}'.format(value))
|
||||
|
||||
_auditpol_cmd(cmd)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def get_auditpol_dump():
|
||||
'''
|
||||
Gets the contents of an auditpol /backup. Used by the LGPO module to get
|
||||
fieldnames and GUIDs for Advanced Audit policies.
|
||||
|
||||
Returns:
|
||||
list: A list of lines form the backup file
|
||||
|
||||
Usage:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import salt.utils.win_lgpo_auditpol
|
||||
|
||||
dump = salt.utils.win_lgpo_auditpol.get_auditpol_dump()
|
||||
'''
|
||||
# Just get a temporary file name
|
||||
# NamedTemporaryFile will delete the file it creates by default on Windows
|
||||
with tempfile.NamedTemporaryFile(suffix='.csv') as tmp_file:
|
||||
csv_file = tmp_file.name
|
||||
|
||||
cmd = '/backup /file:{0}'.format(csv_file)
|
||||
_auditpol_cmd(cmd)
|
||||
|
||||
with salt.utils.files.fopen(csv_file) as fp:
|
||||
return fp.readlines()
|
@ -81,6 +81,19 @@ from salt.ext.six.moves import zip
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
__hostname__ = socket.gethostname()
|
||||
__virtualname__ = 'netsh'
|
||||
|
||||
|
||||
# Although utils are often directly imported, it is also possible to use the
|
||||
# loader.
|
||||
def __virtual__():
|
||||
'''
|
||||
Only load if on a Windows system
|
||||
'''
|
||||
if not salt.utils.platform.is_windows():
|
||||
return False, 'This utility only available on Windows'
|
||||
|
||||
return __virtualname__
|
||||
|
||||
|
||||
def _netsh_file(content):
|
||||
|
@ -8,6 +8,7 @@ from __future__ import absolute_import, print_function, unicode_literals
|
||||
import logging
|
||||
import tornado.ioloop
|
||||
from salt.exceptions import SaltSystemExit
|
||||
from salt._compat import ipaddress
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@ -82,6 +83,5 @@ def ip_bracket(addr):
|
||||
Convert IP address representation to ZMQ (URL) format. ZMQ expects
|
||||
brackets around IPv6 literals, since they are used in URLs.
|
||||
'''
|
||||
if addr and ':' in addr and not addr.startswith('['):
|
||||
return '[{0}]'.format(addr)
|
||||
return addr
|
||||
addr = ipaddress.ip_address(addr)
|
||||
return ('[{}]' if addr.version == 6 else '{}').format(addr)
|
||||
|
5
setup.py
5
setup.py
@ -855,7 +855,10 @@ class SaltDistribution(distutils.dist.Distribution):
|
||||
self.name = 'salt-ssh' if PACKAGED_FOR_SALT_SSH else 'salt'
|
||||
self.salt_version = __version__ # pylint: disable=undefined-variable
|
||||
self.description = 'Portable, distributed, remote execution and configuration management system'
|
||||
with open(SALT_LONG_DESCRIPTION_FILE) as f:
|
||||
kwargs = {}
|
||||
if IS_PY3:
|
||||
kwargs['encoding'] = 'utf-8'
|
||||
with open(SALT_LONG_DESCRIPTION_FILE, **kwargs) as f:
|
||||
self.long_description = f.read()
|
||||
self.long_description_content_type = 'text/x-rst'
|
||||
self.author = 'Thomas S Hatch'
|
||||
|
@ -67,7 +67,7 @@ class LDAPAuthTestCase(TestCase):
|
||||
'''
|
||||
self.opts['auth.ldap.freeipa'] = True
|
||||
with patch.dict(salt.auth.ldap.__opts__, self.opts):
|
||||
with patch('salt.auth.ldap.auth', return_value=Bind):
|
||||
with patch('salt.auth.ldap._bind', return_value=Bind):
|
||||
self.assertIn('saltusers', salt.auth.ldap.groups('saltuser', password='password'))
|
||||
|
||||
def test_groups(self):
|
||||
@ -75,7 +75,7 @@ class LDAPAuthTestCase(TestCase):
|
||||
test groups in ldap
|
||||
'''
|
||||
with patch.dict(salt.auth.ldap.__opts__, self.opts):
|
||||
with patch('salt.auth.ldap.auth', return_value=Bind):
|
||||
with patch('salt.auth.ldap._bind', return_value=Bind):
|
||||
self.assertIn('saltusers', salt.auth.ldap.groups('saltuser', password='password'))
|
||||
|
||||
def test_groups_activedirectory(self):
|
||||
@ -84,7 +84,7 @@ class LDAPAuthTestCase(TestCase):
|
||||
'''
|
||||
self.opts['auth.ldap.activedirectory'] = True
|
||||
with patch.dict(salt.auth.ldap.__opts__, self.opts):
|
||||
with patch('salt.auth.ldap.auth', return_value=Bind):
|
||||
with patch('salt.auth.ldap._bind', return_value=Bind):
|
||||
self.assertIn('saltusers', salt.auth.ldap.groups('saltuser', password='password'))
|
||||
|
||||
def test_auth_nopass(self):
|
||||
|
@ -153,6 +153,17 @@ class RootsTest(TestCase, AdaptedConfigurationTestCaseMixin, LoaderModuleMockMix
|
||||
ret = roots.file_list_emptydirs({'saltenv': 'base'})
|
||||
self.assertIn('empty_dir', ret)
|
||||
|
||||
def test_file_list_with_slash(self):
|
||||
opts = {'file_roots': copy.copy(self.opts['file_roots'])}
|
||||
opts['file_roots']['foo/bar'] = opts['file_roots']['base']
|
||||
load = {
|
||||
'saltenv': 'foo/bar',
|
||||
}
|
||||
with patch.dict(roots.__opts__, opts):
|
||||
ret = roots.file_list(load)
|
||||
self.assertIn('testfile', ret)
|
||||
self.assertIn(UNICODE_FILENAME, ret)
|
||||
|
||||
def test_dir_list(self):
|
||||
ret = roots.dir_list({'saltenv': 'base'})
|
||||
self.assertIn('empty_dir', ret)
|
||||
|
@ -476,14 +476,17 @@ class DebianIpTestCase(TestCase, LoaderModuleMockMixin):
|
||||
patch('salt.modules.debian_ip._parse_hostname',
|
||||
MagicMock(return_value='SaltStack')), \
|
||||
patch('salt.modules.debian_ip._parse_domainname',
|
||||
MagicMock(return_value='saltstack.com')):
|
||||
MagicMock(return_value='saltstack.com')), \
|
||||
patch('salt.modules.debian_ip._parse_searchdomain',
|
||||
MagicMock(return_value='test.saltstack.com')):
|
||||
mock_avai = MagicMock(return_value=True)
|
||||
with patch.dict(debian_ip.__salt__, {'service.available': mock_avai,
|
||||
'service.status': mock_avai}):
|
||||
self.assertEqual(debian_ip.get_network_settings(),
|
||||
['NETWORKING=yes\n',
|
||||
'HOSTNAME=SaltStack\n',
|
||||
'DOMAIN=saltstack.com\n'])
|
||||
[u'NETWORKING=yes\n',
|
||||
u'HOSTNAME=SaltStack\n',
|
||||
u'DOMAIN=saltstack.com\n',
|
||||
u'SEARCH=test.saltstack.com\n'])
|
||||
|
||||
mock = MagicMock(side_effect=jinja2.exceptions.TemplateNotFound
|
||||
('error'))
|
||||
|
@ -24,6 +24,52 @@ try:
|
||||
except Exception:
|
||||
NO_MYSQL = True
|
||||
|
||||
__all_privileges__ = [
|
||||
'ALTER',
|
||||
'ALTER ROUTINE',
|
||||
'BACKUP_ADMIN',
|
||||
'BINLOG_ADMIN',
|
||||
'CONNECTION_ADMIN',
|
||||
'CREATE',
|
||||
'CREATE ROLE',
|
||||
'CREATE ROUTINE',
|
||||
'CREATE TABLESPACE',
|
||||
'CREATE TEMPORARY TABLES',
|
||||
'CREATE USER',
|
||||
'CREATE VIEW',
|
||||
'DELETE',
|
||||
'DROP',
|
||||
'DROP ROLE',
|
||||
'ENCRYPTION_KEY_ADMIN',
|
||||
'EVENT',
|
||||
'EXECUTE',
|
||||
'FILE',
|
||||
'GROUP_REPLICATION_ADMIN',
|
||||
'INDEX',
|
||||
'INSERT',
|
||||
'LOCK TABLES',
|
||||
'PERSIST_RO_VARIABLES_ADMIN',
|
||||
'PROCESS',
|
||||
'REFERENCES',
|
||||
'RELOAD',
|
||||
'REPLICATION CLIENT',
|
||||
'REPLICATION SLAVE',
|
||||
'REPLICATION_SLAVE_ADMIN',
|
||||
'RESOURCE_GROUP_ADMIN',
|
||||
'RESOURCE_GROUP_USER',
|
||||
'ROLE_ADMIN',
|
||||
'SELECT',
|
||||
'SET_USER_ID',
|
||||
'SHOW DATABASES',
|
||||
'SHOW VIEW',
|
||||
'SHUTDOWN',
|
||||
'SUPER',
|
||||
'SYSTEM_VARIABLES_ADMIN',
|
||||
'TRIGGER',
|
||||
'UPDATE',
|
||||
'XA_RECOVER_ADMIN'
|
||||
]
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
@skipIf(NO_MYSQL, 'Install MySQL bindings before running MySQL unit tests.')
|
||||
@ -256,15 +302,16 @@ class MySQLTestCase(TestCase, LoaderModuleMockMixin):
|
||||
"GRANT SELECT ON `testdb`.`testtabletwo` TO 'testuer'@'%'",
|
||||
"GRANT SELECT ON `testdb`.`testtablethree` TO 'testuser'@'%'",
|
||||
]
|
||||
mock = MagicMock(return_value=mock_grants)
|
||||
with patch.object(mysql, 'user_grants', return_value=mock_grants) as mock_user_grants:
|
||||
ret = mysql.grant_exists(
|
||||
'SELECT, INSERT, UPDATE',
|
||||
'testdb.testtableone',
|
||||
'testuser',
|
||||
'%'
|
||||
)
|
||||
self.assertEqual(ret, True)
|
||||
with patch.object(mysql, 'version', return_value='5.6.41'):
|
||||
mock = MagicMock(return_value=mock_grants)
|
||||
with patch.object(mysql, 'user_grants', return_value=mock_grants) as mock_user_grants:
|
||||
ret = mysql.grant_exists(
|
||||
'SELECT, INSERT, UPDATE',
|
||||
'testdb.testtableone',
|
||||
'testuser',
|
||||
'%'
|
||||
)
|
||||
self.assertEqual(ret, True)
|
||||
|
||||
def test_grant_exists_false(self):
|
||||
'''
|
||||
@ -275,15 +322,47 @@ class MySQLTestCase(TestCase, LoaderModuleMockMixin):
|
||||
"GRANT SELECT, INSERT, UPDATE ON `testdb`.`testtableone` TO 'testuser'@'%'",
|
||||
"GRANT SELECT ON `testdb`.`testtablethree` TO 'testuser'@'%'",
|
||||
]
|
||||
mock = MagicMock(return_value=mock_grants)
|
||||
with patch.object(mysql, 'user_grants', return_value=mock_grants) as mock_user_grants:
|
||||
ret = mysql.grant_exists(
|
||||
'SELECT',
|
||||
'testdb.testtabletwo',
|
||||
'testuser',
|
||||
'%'
|
||||
)
|
||||
self.assertEqual(ret, False)
|
||||
with patch.object(mysql, 'version', return_value='5.6.41'):
|
||||
mock = MagicMock(return_value=mock_grants)
|
||||
with patch.object(mysql, 'user_grants', return_value=mock_grants) as mock_user_grants:
|
||||
ret = mysql.grant_exists(
|
||||
'SELECT',
|
||||
'testdb.testtabletwo',
|
||||
'testuser',
|
||||
'%'
|
||||
)
|
||||
self.assertEqual(ret, False)
|
||||
|
||||
def test_grant_exists_all(self):
|
||||
'''
|
||||
Test to ensure that we can find a grant that exists
|
||||
'''
|
||||
mock_grants = [
|
||||
"GRANT SELECT, INSERT, UPDATE, DELETE, CREATE, DROP, RELOAD, SHUTDOWN, PROCESS, FILE, REFERENCES, INDEX, ALTER, SHOW DATABASES, SUPER, CREATE TEMPORARY TABLES, LOCK TABLES, EXECUTE, REPLICATION SLAVE, REPLICATION CLIENT, CREATE VIEW, SHOW VIEW, CREATE ROUTINE, ALTER ROUTINE, CREATE USER, EVENT, TRIGGER, CREATE TABLESPACE, CREATE ROLE, DROP ROLE ON testdb.testtableone TO `testuser`@`%`",
|
||||
"GRANT BACKUP_ADMIN,BINLOG_ADMIN,CONNECTION_ADMIN,ENCRYPTION_KEY_ADMIN,GROUP_REPLICATION_ADMIN,PERSIST_RO_VARIABLES_ADMIN,REPLICATION_SLAVE_ADMIN,RESOURCE_GROUP_ADMIN,RESOURCE_GROUP_USER,ROLE_ADMIN,SET_USER_ID,SYSTEM_VARIABLES_ADMIN,XA_RECOVER_ADMIN ON testdb.testtableone TO `testuser`@`%`"
|
||||
]
|
||||
with patch.object(mysql, 'version', return_value='8.0.10'):
|
||||
mock = MagicMock(return_value=mock_grants)
|
||||
with patch.object(mysql, 'user_grants', return_value=mock_grants) as mock_user_grants:
|
||||
ret = mysql.grant_exists(
|
||||
'ALL',
|
||||
'testdb.testtableone',
|
||||
'testuser',
|
||||
'%'
|
||||
)
|
||||
self.assertEqual(ret, True)
|
||||
|
||||
mock_grants = ["GRANT ALL PRIVILEGES ON testdb.testtableone TO `testuser`@`%`"]
|
||||
with patch.object(mysql, 'version', return_value='5.6.41'):
|
||||
mock = MagicMock(return_value=mock_grants)
|
||||
with patch.object(mysql, 'user_grants', return_value=mock_grants) as mock_user_grants:
|
||||
ret = mysql.grant_exists(
|
||||
'ALL PRIVILEGES',
|
||||
'testdb.testtableone',
|
||||
'testuser',
|
||||
'%'
|
||||
)
|
||||
self.assertEqual(ret, True)
|
||||
|
||||
@skipIf(True, 'TODO: Mock up user_grants()')
|
||||
def test_grant_add(self):
|
||||
|
@ -305,17 +305,13 @@ class LazyLoaderSingleItem(TestCase):
|
||||
'''
|
||||
Checks that a KeyError is raised when the function key does not contain a '.'
|
||||
'''
|
||||
key = 'testing_no_dot'
|
||||
expected = "The key '{0}' should contain a '.'".format(key)
|
||||
with self.assertRaises(KeyError) as err:
|
||||
inspect.isfunction(self.loader['testing_no_dot'])
|
||||
|
||||
if six.PY2:
|
||||
self.assertEqual(err.exception[0],
|
||||
'The key \'%s\' should contain a \'.\'')
|
||||
else:
|
||||
self.assertEqual(
|
||||
six.text_type(err.exception),
|
||||
six.text_type(("The key '%s' should contain a '.'", 'testing_no_dot'))
|
||||
)
|
||||
result = err.exception.args[0]
|
||||
assert result == expected, result
|
||||
|
||||
|
||||
module_template = '''
|
||||
|
@ -317,11 +317,15 @@ class ZMQConfigTest(TestCase):
|
||||
'''
|
||||
test _get_master_uri method
|
||||
'''
|
||||
|
||||
m_ip = '127.0.0.1'
|
||||
m_port = 4505
|
||||
s_ip = '111.1.0.1'
|
||||
s_port = 4058
|
||||
|
||||
m_ip6 = '1234:5678::9abc'
|
||||
s_ip6 = '1234:5678::1:9abc'
|
||||
|
||||
with patch('salt.transport.zeromq.LIBZMQ_VERSION_INFO', (4, 1, 6)), \
|
||||
patch('salt.transport.zeromq.ZMQ_VERSION_INFO', (16, 0, 1)):
|
||||
# pass in both source_ip and source_port
|
||||
@ -330,15 +334,27 @@ class ZMQConfigTest(TestCase):
|
||||
source_ip=s_ip,
|
||||
source_port=s_port) == 'tcp://{0}:{1};{2}:{3}'.format(s_ip, s_port, m_ip, m_port)
|
||||
|
||||
assert salt.transport.zeromq._get_master_uri(master_ip=m_ip6,
|
||||
master_port=m_port,
|
||||
source_ip=s_ip6,
|
||||
source_port=s_port) == 'tcp://[{0}]:{1};[{2}]:{3}'.format(s_ip6, s_port, m_ip6, m_port)
|
||||
|
||||
# source ip and source_port empty
|
||||
assert salt.transport.zeromq._get_master_uri(master_ip=m_ip,
|
||||
master_port=m_port) == 'tcp://{0}:{1}'.format(m_ip, m_port)
|
||||
|
||||
assert salt.transport.zeromq._get_master_uri(master_ip=m_ip6,
|
||||
master_port=m_port) == 'tcp://[{0}]:{1}'.format(m_ip6, m_port)
|
||||
|
||||
# pass in only source_ip
|
||||
assert salt.transport.zeromq._get_master_uri(master_ip=m_ip,
|
||||
master_port=m_port,
|
||||
source_ip=s_ip) == 'tcp://{0}:0;{1}:{2}'.format(s_ip, m_ip, m_port)
|
||||
|
||||
assert salt.transport.zeromq._get_master_uri(master_ip=m_ip6,
|
||||
master_port=m_port,
|
||||
source_ip=s_ip6) == 'tcp://[{0}]:0;[{1}]:{2}'.format(s_ip6, m_ip6, m_port)
|
||||
|
||||
# pass in only source_port
|
||||
assert salt.transport.zeromq._get_master_uri(master_ip=m_ip,
|
||||
master_port=m_port,
|
||||
|
@ -18,6 +18,7 @@ from tests.support.mock import (
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils.network as network
|
||||
from salt._compat import ipaddress
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@ -202,6 +203,35 @@ class NetworkTestCase(TestCase):
|
||||
self.assertFalse(network.is_ipv6('10.0.1.2'))
|
||||
self.assertFalse(network.is_ipv6('2001.0db8.85a3.0000.0000.8a2e.0370.7334'))
|
||||
|
||||
def test_parse_host_port(self):
|
||||
_ip = ipaddress.ip_address
|
||||
good_host_ports = {
|
||||
'10.10.0.3': (_ip('10.10.0.3'), None),
|
||||
'10.10.0.3:1234': (_ip('10.10.0.3'), 1234),
|
||||
'2001:0db8:85a3::8a2e:0370:7334': (_ip('2001:0db8:85a3::8a2e:0370:7334'), None),
|
||||
'[2001:0db8:85a3::8a2e:0370:7334]:1234': (_ip('2001:0db8:85a3::8a2e:0370:7334'), 1234),
|
||||
'2001:0db8:85a3::7334': (_ip('2001:0db8:85a3::7334'), None),
|
||||
'[2001:0db8:85a3::7334]:1234': (_ip('2001:0db8:85a3::7334'), 1234)
|
||||
}
|
||||
bad_host_ports = [
|
||||
'10.10.0.3/24',
|
||||
'10.10.0.3::1234',
|
||||
'2001:0db8:0370:7334',
|
||||
'2001:0db8:0370::7334]:1234',
|
||||
'2001:0db8:0370:0:a:b:c:d:1234'
|
||||
]
|
||||
for host_port, assertion_value in good_host_ports.items():
|
||||
host = port = None
|
||||
host, port = network.parse_host_port(host_port)
|
||||
self.assertEqual((host, port), assertion_value)
|
||||
|
||||
for host_port in bad_host_ports:
|
||||
try:
|
||||
self.assertRaises(ValueError, network.parse_host_port, host_port)
|
||||
except AssertionError as _e_:
|
||||
log.error('bad host_port value: "%s" failed to trigger ValueError exception', host_port)
|
||||
raise _e_
|
||||
|
||||
def test_is_subnet(self):
|
||||
for subnet_data in (IPV4_SUBNETS, IPV6_SUBNETS):
|
||||
for item in subnet_data[True]:
|
||||
@ -538,3 +568,15 @@ class NetworkTestCase(TestCase):
|
||||
self.assertRaises(ValueError, network.mac_str_to_bytes, 'a0:b0:c0:d0:e0:fg')
|
||||
self.assertEqual(b'\x10\x08\x06\x04\x02\x00', network.mac_str_to_bytes('100806040200'))
|
||||
self.assertEqual(b'\xf8\xe7\xd6\xc5\xb4\xa3', network.mac_str_to_bytes('f8e7d6c5b4a3'))
|
||||
|
||||
def test_generate_minion_id_with_long_hostname(self):
|
||||
'''
|
||||
Validate the fix for:
|
||||
|
||||
https://github.com/saltstack/salt/issues/51160
|
||||
'''
|
||||
long_name = 'localhost-abcdefghijklmnopqrstuvwxyz-abcdefghijklmnopqrstuvwxyz'
|
||||
with patch('socket.gethostname', MagicMock(return_value=long_name)):
|
||||
# An exception is raised if unicode is passed to socket.getfqdn
|
||||
minion_id = network.generate_minion_id()
|
||||
assert minion_id != '', minion_id
|
||||
|
99
tests/unit/utils/test_win_lgpo_auditpol.py
Normal file
99
tests/unit/utils/test_win_lgpo_auditpol.py
Normal file
@ -0,0 +1,99 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import, unicode_literals, print_function
|
||||
import random
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.mock import NO_MOCK, NO_MOCK_REASON, patch, MagicMock
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.modules.cmdmod
|
||||
import salt.utils.platform
|
||||
import salt.utils.win_lgpo_auditpol as win_lgpo_auditpol
|
||||
|
||||
settings = ['No Auditing', 'Success', 'Failure', 'Success and Failure']
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
@skipIf(not salt.utils.platform.is_windows(), 'System is not Windows')
|
||||
class WinLgpoAuditpolTestCase(TestCase, LoaderModuleMockMixin):
|
||||
def setup_loader_modules(self):
|
||||
return {
|
||||
win_lgpo_auditpol: {
|
||||
'__context__': {},
|
||||
'__salt__': {
|
||||
'cmd.run_all': salt.modules.cmdmod.run_all
|
||||
}}}
|
||||
|
||||
def test_get_settings(self):
|
||||
names = win_lgpo_auditpol._get_valid_names()
|
||||
ret = win_lgpo_auditpol.get_settings(category='All')
|
||||
for name in names:
|
||||
self.assertIn(name, [k.lower() for k in ret])
|
||||
|
||||
def test_get_settings_invalid_category(self):
|
||||
self.assertRaises(
|
||||
KeyError,
|
||||
win_lgpo_auditpol.get_settings,
|
||||
category='Fake Category')
|
||||
|
||||
def test_get_setting(self):
|
||||
names = win_lgpo_auditpol._get_valid_names()
|
||||
for name in names:
|
||||
ret = win_lgpo_auditpol.get_setting(name)
|
||||
self.assertIn(ret, settings)
|
||||
|
||||
def test_get_setting_invalid_name(self):
|
||||
self.assertRaises(
|
||||
KeyError,
|
||||
win_lgpo_auditpol.get_setting,
|
||||
name='Fake Name')
|
||||
|
||||
def test_set_setting(self):
|
||||
names = ['Credential Validation', 'IPsec Driver', 'File System', 'SAM']
|
||||
mock_set = MagicMock(return_value={'retcode': 0, 'stdout': 'Success'})
|
||||
with patch.object(salt.modules.cmdmod, 'run_all', mock_set):
|
||||
with patch.object(win_lgpo_auditpol, '_get_valid_names',
|
||||
return_value=[k.lower() for k in names]):
|
||||
for name in names:
|
||||
value = random.choice(settings)
|
||||
win_lgpo_auditpol.set_setting(name=name, value=value)
|
||||
switches = win_lgpo_auditpol.settings[value]
|
||||
cmd = 'auditpol /set /subcategory:"{0}" {1}' \
|
||||
''.format(name, switches)
|
||||
mock_set.assert_called_once_with(cmd=cmd, python_shell=True)
|
||||
mock_set.reset_mock()
|
||||
|
||||
def test_set_setting_invalid_setting(self):
|
||||
names = ['Credential Validation', 'IPsec Driver', 'File System']
|
||||
with patch.object(win_lgpo_auditpol, '_get_valid_names',
|
||||
return_value=[k.lower() for k in names]):
|
||||
self.assertRaises(
|
||||
KeyError,
|
||||
win_lgpo_auditpol.set_setting,
|
||||
name='Fake Name',
|
||||
value='No Auditing')
|
||||
|
||||
def test_set_setting_invalid_value(self):
|
||||
names = ['Credential Validation', 'IPsec Driver', 'File System']
|
||||
with patch.object(win_lgpo_auditpol, '_get_valid_names',
|
||||
return_value=[k.lower() for k in names]):
|
||||
self.assertRaises(
|
||||
KeyError,
|
||||
win_lgpo_auditpol.set_setting,
|
||||
name='Credential Validation',
|
||||
value='Fake Value')
|
||||
|
||||
def test_get_auditpol_dump(self):
|
||||
names = win_lgpo_auditpol._get_valid_names()
|
||||
dump = win_lgpo_auditpol.get_auditpol_dump()
|
||||
for name in names:
|
||||
found = False
|
||||
for line in dump:
|
||||
if name.lower() in line.lower():
|
||||
found = True
|
||||
break
|
||||
self.assertTrue(found)
|
@ -50,6 +50,7 @@ class ValidateNetTestCase(TestCase):
|
||||
Test IPv6 address validation
|
||||
'''
|
||||
true_addrs = [
|
||||
'::',
|
||||
'::1',
|
||||
'::1/32',
|
||||
'::1/32',
|
||||
@ -62,6 +63,8 @@ class ValidateNetTestCase(TestCase):
|
||||
'::1/0',
|
||||
'::1/32d',
|
||||
'::1/129',
|
||||
'2a03:4000:c:10aa:1017:f00d:aaaa:a:4506',
|
||||
'2a03::1::2',
|
||||
]
|
||||
|
||||
for addr in true_addrs:
|
||||
|
Loading…
Reference in New Issue
Block a user