mirror of
https://github.com/valitydev/salt.git
synced 2024-11-07 17:09:03 +00:00
Merge pull request #18530 from s0undt3ch/develop
Merge branch '2014.7' forward into develop
This commit is contained in:
commit
347ad77e53
@ -42,7 +42,9 @@ load-plugins=salttesting.pylintplugins.pep8,salttesting.pylintplugins.pep263,sal
|
||||
# multiple time (only on the command line, not in the configuration file where
|
||||
# it should appear only once).
|
||||
disable=W0142,
|
||||
C0330
|
||||
C0330,
|
||||
I0011,
|
||||
I0012
|
||||
# E8121,
|
||||
# E8122,
|
||||
# E8123,
|
||||
@ -58,6 +60,8 @@ disable=W0142,
|
||||
# E812* All PEP8 E12*
|
||||
# E8501 PEP8 line too long
|
||||
# C0330 (bad-continuation)
|
||||
# I0011 (locally-disabling)
|
||||
# I0012 (locally-enabling)
|
||||
|
||||
|
||||
[REPORTS]
|
||||
|
@ -34,6 +34,7 @@ load-plugins=salttesting.pylintplugins.pep8,salttesting.pylintplugins.pep263,sal
|
||||
# --disable=W"
|
||||
disable=R,
|
||||
I0011,
|
||||
I0012,
|
||||
I0013,
|
||||
E1101,
|
||||
E1103,
|
||||
@ -77,6 +78,7 @@ disable=R,
|
||||
# Disabled:
|
||||
# R* [refactoring suggestions & reports]
|
||||
# I0011 (locally-disabling)
|
||||
# I0012 (locally-enabling)
|
||||
# I0013 (file-ignored)
|
||||
# E1101 (no-member) [pylint isn't smart enough]
|
||||
# E1103 (maybe-no-member)
|
||||
|
@ -4868,3 +4868,9 @@ source_file = _build/locale/topics/releases/2014.1.13.pot
|
||||
source_lang = en
|
||||
source_name = topics/releases/2014.1.13.rst
|
||||
|
||||
[salt.topics--releases--2014_7_1]
|
||||
file_filter = locale/<lang>/LC_MESSAGES/topics/releases/2014.7.1.po
|
||||
source_file = _build/locale/topics/releases/2014.7.1.pot
|
||||
source_lang = en
|
||||
source_name = topics/releases/2014.7.1.rst
|
||||
|
||||
|
@ -16,7 +16,7 @@ How it Works
|
||||
The best example is the `pkg` state. One of the major requests in Salt has long
|
||||
been adding the ability to install all packages defined at the same time. The
|
||||
mod_aggregate system makes this a reality. While executing Salt's state system,
|
||||
when a `pkg` state is reached the ``mod_agregate`` function in the state module
|
||||
when a `pkg` state is reached the ``mod_aggregate`` function in the state module
|
||||
is called. For `pkg` this function scans all of the other states that are slated
|
||||
to run, and picks up the references to ``name`` and ``pkgs``, then adds them to
|
||||
``pkgs`` in the first state. The result is calling yum/apt-get/pacman etc. just
|
||||
|
@ -926,7 +926,7 @@ the network interfaces of your virtual machines, for example:-
|
||||
# interface (will be associated with the primary private ip address
|
||||
# of the interface
|
||||
#
|
||||
# allocation_new_eip: True
|
||||
# allocate_new_eip: True
|
||||
|
||||
# Uncomment this instead to allocate a new Elastic IP Address to
|
||||
# both the primary private ip address and each of the secondary ones
|
||||
|
@ -7,5 +7,7 @@ Salt 2014.7.1 Release Notes
|
||||
Version 2014.7.1 is a bugfix release for :doc:`2014.7.0
|
||||
</topics/releases/2014.7.0>`. The changes include:
|
||||
|
||||
- Fix gitfs serving of symlinked files (:issue:`17700`)
|
||||
|
||||
- Fixed gitfs serving symlinks in :mod:`file.recurse
|
||||
<salt.states.file.recurse>` states (:issue:`17700`)
|
||||
- Fix holding of multiple packages (YUM) when combined with version pinning
|
||||
(:issue:`18468`)
|
||||
|
Binary file not shown.
@ -27,6 +27,9 @@ except ImportError:
|
||||
# Import python libs
|
||||
import logging
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -60,7 +63,7 @@ def auth(pem, **kwargs):
|
||||
cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, pem)
|
||||
|
||||
cacert_file = __salt__['config.get']('external_auth:pki:ca_file')
|
||||
with open(cacert_file) as f:
|
||||
with salt.utils.fopen(cacert_file) as f:
|
||||
cacert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, f.read())
|
||||
|
||||
log.debug('Attempting to authenticate via pki.')
|
||||
|
@ -140,7 +140,7 @@ if not is_windows():
|
||||
if not os.path.exists(shim_file):
|
||||
# On esky builds we only have the .pyc file
|
||||
shim_file += "c"
|
||||
with open(shim_file) as ssh_py_shim:
|
||||
with salt.utils.fopen(shim_file) as ssh_py_shim:
|
||||
SSH_PY_SHIM = ssh_py_shim.read()
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@ -232,7 +232,7 @@ class SSH(object):
|
||||
)
|
||||
)
|
||||
pub = '{0}.pub'.format(priv)
|
||||
with open(pub, 'r') as fp_:
|
||||
with salt.utils.fopen(pub, 'r') as fp_:
|
||||
return '{0} rsa root@master'.format(fp_.read().split()[1])
|
||||
|
||||
def key_deploy(self, host, ret):
|
||||
|
@ -135,10 +135,10 @@ def prep_trans_tar(file_client, chunks, file_refs, pillar=None):
|
||||
['salt://_outputters'],
|
||||
['salt://_utils'],
|
||||
]
|
||||
with open(lowfn, 'w+') as fp_:
|
||||
with salt.utils.fopen(lowfn, 'w+') as fp_:
|
||||
fp_.write(json.dumps(chunks))
|
||||
if pillar:
|
||||
with open(pillarfn, 'w+') as fp_:
|
||||
with salt.utils.fopen(pillarfn, 'w+') as fp_:
|
||||
fp_.write(json.dumps(pillar))
|
||||
for saltenv in file_refs:
|
||||
file_refs[saltenv].extend(sync_refs)
|
||||
|
@ -120,8 +120,8 @@ def ssh_pub(vm_):
|
||||
ssh = os.path.expanduser(ssh)
|
||||
if os.path.isfile(ssh):
|
||||
return None
|
||||
|
||||
return SSHKeyDeployment(open(ssh).read())
|
||||
with salt.utils.fopen(ssh) as fhr:
|
||||
return SSHKeyDeployment(fhr.read())
|
||||
|
||||
|
||||
def avail_locations(conn=None, call=None):
|
||||
|
@ -339,12 +339,17 @@ def _init():
|
||||
cache_file = _get_buckets_cache_filename()
|
||||
exp = time.time() - _s3_cache_expire
|
||||
|
||||
metadata = None
|
||||
|
||||
# check mtime of the buckets files cache
|
||||
if os.path.isfile(cache_file) and os.path.getmtime(cache_file) > exp:
|
||||
return _read_buckets_cache_file(cache_file)
|
||||
else:
|
||||
metadata = _read_buckets_cache_file(cache_file)
|
||||
|
||||
if metadata is None:
|
||||
# bucket files cache expired
|
||||
return _refresh_buckets_cache_file(cache_file)
|
||||
metadata = _refresh_buckets_cache_file(cache_file)
|
||||
|
||||
return metadata
|
||||
|
||||
|
||||
def _get_cache_dir():
|
||||
@ -467,7 +472,11 @@ def _read_buckets_cache_file(cache_file):
|
||||
log.debug('Reading buckets cache file')
|
||||
|
||||
with salt.utils.fopen(cache_file, 'rb') as fp_:
|
||||
data = pickle.load(fp_)
|
||||
try:
|
||||
data = pickle.load(fp_)
|
||||
except (pickle.UnpicklingError, AttributeError, EOFError, ImportError,
|
||||
IndexError, KeyError):
|
||||
data = None
|
||||
|
||||
return data
|
||||
|
||||
|
@ -595,14 +595,12 @@ def _virtual(osdata):
|
||||
pass
|
||||
if os.path.isfile('/proc/1/cgroup'):
|
||||
try:
|
||||
if ':/lxc/' in salt.utils.fopen(
|
||||
'/proc/1/cgroup', 'r'
|
||||
).read():
|
||||
grains['virtual_subtype'] = 'LXC'
|
||||
if ':/docker/' in salt.utils.fopen(
|
||||
'/proc/1/cgroup', 'r'
|
||||
).read():
|
||||
grains['virtual_subtype'] = 'Docker'
|
||||
with salt.utils.fopen('/proc/1/cgroup', 'r') as fhr:
|
||||
if ':/lxc/' in fhr.read():
|
||||
grains['virtual_subtype'] = 'LXC'
|
||||
with salt.utils.fopen('/proc/1/cgroup', 'r') as fhr:
|
||||
if ':/docker/' in fhr.read():
|
||||
grains['virtual_subtype'] = 'Docker'
|
||||
except IOError:
|
||||
pass
|
||||
if isdir('/proc/vz'):
|
||||
@ -635,14 +633,13 @@ def _virtual(osdata):
|
||||
grains['virtual_subtype'] = 'Xen HVM DomU'
|
||||
elif os.path.isfile('/proc/xen/capabilities') and \
|
||||
os.access('/proc/xen/capabilities', os.R_OK):
|
||||
caps = salt.utils.fopen('/proc/xen/capabilities')
|
||||
if 'control_d' not in caps.read():
|
||||
# Tested on CentOS 5.5 / 2.6.18-194.3.1.el5xen
|
||||
grains['virtual_subtype'] = 'Xen PV DomU'
|
||||
else:
|
||||
# Shouldn't get to this, but just in case
|
||||
grains['virtual_subtype'] = 'Xen Dom0'
|
||||
caps.close()
|
||||
with salt.utils.fopen('/proc/xen/capabilities') as fhr:
|
||||
if 'control_d' not in fhr.read():
|
||||
# Tested on CentOS 5.5 / 2.6.18-194.3.1.el5xen
|
||||
grains['virtual_subtype'] = 'Xen PV DomU'
|
||||
else:
|
||||
# Shouldn't get to this, but just in case
|
||||
grains['virtual_subtype'] = 'Xen Dom0'
|
||||
# Tested on Fedora 10 / 2.6.27.30-170.2.82 with xen
|
||||
# Tested on Fedora 15 / 2.6.41.4-1 without running xen
|
||||
elif isdir('/sys/bus/xen'):
|
||||
@ -656,9 +653,9 @@ def _virtual(osdata):
|
||||
if 'dom' in grains.get('virtual_subtype', '').lower():
|
||||
grains['virtual'] = 'xen'
|
||||
if os.path.isfile('/proc/cpuinfo'):
|
||||
if 'QEMU Virtual CPU' in \
|
||||
salt.utils.fopen('/proc/cpuinfo', 'r').read():
|
||||
grains['virtual'] = 'kvm'
|
||||
with salt.utils.fopen('/proc/cpuinfo', 'r') as fhr:
|
||||
if 'QEMU Virtual CPU' in fhr.read():
|
||||
grains['virtual'] = 'kvm'
|
||||
elif osdata['kernel'] == 'FreeBSD':
|
||||
kenv = salt.utils.which('kenv')
|
||||
if kenv:
|
||||
@ -1024,10 +1021,10 @@ def os_data():
|
||||
grains['lsb_distrib_id'] = value.strip()
|
||||
elif os.path.isfile('/etc/SuSE-release'):
|
||||
grains['lsb_distrib_id'] = 'SUSE'
|
||||
rel = open('/etc/SuSE-release').read().split('\n')[1]
|
||||
patch = open('/etc/SuSE-release').read().split('\n')[2]
|
||||
rel = re.sub("[^0-9]", "", rel)
|
||||
patch = re.sub("[^0-9]", "", patch)
|
||||
with salt.utils.fopen('/etc/SuSE-release') as fhr:
|
||||
rel = re.sub("[^0-9]", "", fhr.read().split('\n')[1])
|
||||
with salt.utils.fopen('/etc/SuSE-release') as fhr:
|
||||
patch = re.sub("[^0-9]", "", fhr.read().split('\n')[2])
|
||||
release = rel + " SP" + patch
|
||||
grains['lsb_distrib_release'] = release
|
||||
grains['lsb_distrib_codename'] = "n.a"
|
||||
@ -1413,7 +1410,7 @@ def path():
|
||||
'''
|
||||
# Provides:
|
||||
# path
|
||||
return {'path': os.environ['PATH'].strip()}
|
||||
return {'path': os.environ.get('PATH', '').strip()}
|
||||
|
||||
|
||||
def pythonversion():
|
||||
|
@ -1,341 +1,344 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Module for fetching artifacts from Artifactory
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
import urllib2
|
||||
import os
|
||||
import xml.etree.ElementTree as ET
|
||||
from urllib2 import HTTPError
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_latest_snapshot(artifactory_url, repository, group_id, artifact_id, packaging, target_dir='/tmp', target_file=None):
|
||||
'''
|
||||
Gets latest snapshot of the given artifact
|
||||
|
||||
artifactory_url
|
||||
URL of artifactory instance
|
||||
repository
|
||||
Snapshot repository in artifactory to retrieve artifact from, for example: libs-snapshots
|
||||
group_id
|
||||
Group Id of the artifact
|
||||
artifact_id
|
||||
Artifact Id of the artifact
|
||||
packaging
|
||||
Packaging type (jar,war,ear,etc)
|
||||
target_dir
|
||||
Target directory to download artifact to (default: /tmp)
|
||||
target_file
|
||||
Target file to download artifact to (by default it is target_dir/artifact_id-snapshot_version.packaging)
|
||||
'''
|
||||
log.debug("======================== MODULE FUNCTION: artifactory.get_latest_snapshot, artifactory_url=%s, repository=%s, group_id=%s, artifact_id=%s, packaging=%s, target_dir=%s)",
|
||||
artifactory_url, repository, group_id, artifact_id, packaging, target_dir)
|
||||
artifact_metadata = _get_artifact_metadata(artifactory_url=artifactory_url, repository=repository, group_id=group_id, artifact_id=artifact_id)
|
||||
version = artifact_metadata['latest_version']
|
||||
|
||||
snapshot_url, file_name = _get_snapshot_url(artifactory_url, repository, group_id, artifact_id, version, packaging)
|
||||
target_file = __resolve_target_file(file_name, target_dir, target_file)
|
||||
|
||||
return __save_artifact(snapshot_url, target_file)
|
||||
|
||||
|
||||
def get_snapshot(artifactory_url, repository, group_id, artifact_id, packaging, version, snapshot_version=None, target_dir='/tmp', target_file=None):
|
||||
'''
|
||||
Gets snapshot of the desired version of the artifact
|
||||
|
||||
artifactory_url
|
||||
URL of artifactory instance
|
||||
repository
|
||||
Snapshot repository in artifactory to retrieve artifact from, for example: libs-snapshots
|
||||
group_id
|
||||
Group Id of the artifact
|
||||
artifact_id
|
||||
Artifact Id of the artifact
|
||||
packaging
|
||||
Packaging type (jar,war,ear,etc)
|
||||
version
|
||||
Version of the artifact
|
||||
target_dir
|
||||
Target directory to download artifact to (default: /tmp)
|
||||
target_file
|
||||
Target file to download artifact to (by default it is target_dir/artifact_id-snapshot_version.packaging)
|
||||
'''
|
||||
log.debug('======================== MODULE FUNCTION: artifactory.get_snapshot(artifactory_url=%s, repository=%s, group_id=%s, artifact_id=%s, packaging=%s, version=%s, target_dir=%s)',
|
||||
artifactory_url, repository, group_id, artifact_id, packaging, version, target_dir)
|
||||
|
||||
snapshot_url, file_name = _get_snapshot_url(artifactory_url, repository, group_id, artifact_id, version, packaging, snapshot_version)
|
||||
target_file = __resolve_target_file(file_name, target_dir, target_file)
|
||||
|
||||
return __save_artifact(snapshot_url, target_file)
|
||||
|
||||
|
||||
def get_release(artifactory_url, repository, group_id, artifact_id, packaging, version, target_dir='/tmp', target_file=None):
|
||||
'''
|
||||
Gets the specified release of the artifact
|
||||
|
||||
artifactory_url
|
||||
URL of artifactory instance
|
||||
repository
|
||||
Release repository in artifactory to retrieve artifact from, for example: libs-releases
|
||||
group_id
|
||||
Group Id of the artifact
|
||||
artifact_id
|
||||
Artifact Id of the artifact
|
||||
packaging
|
||||
Packaging type (jar,war,ear,etc)
|
||||
version
|
||||
Version of the artifact
|
||||
target_dir
|
||||
Target directory to download artifact to (default: /tmp)
|
||||
target_file
|
||||
Target file to download artifact to (by default it is target_dir/artifact_id-version.packaging)
|
||||
'''
|
||||
log.debug('======================== MODULE FUNCTION: artifactory.get_release(artifactory_url=%s, repository=%s, group_id=%s, artifact_id=%s, packaging=%s, version=%s, target_dir=%s)',
|
||||
artifactory_url, repository, group_id, artifact_id, packaging, version, target_dir)
|
||||
|
||||
release_url, file_name = _get_release_url(repository, group_id, artifact_id, packaging, version, artifactory_url)
|
||||
target_file = __resolve_target_file(file_name, target_dir, target_file)
|
||||
|
||||
return __save_artifact(release_url, target_file)
|
||||
|
||||
|
||||
def __resolve_target_file(file_name, target_dir, target_file=None):
|
||||
if target_file is None:
|
||||
target_file = os.path.join(target_dir, file_name)
|
||||
return target_file
|
||||
|
||||
|
||||
def _get_snapshot_url(artifactory_url, repository, group_id, artifact_id, version, packaging, snapshot_version=None):
|
||||
if snapshot_version is None:
|
||||
snapshot_version_metadata = _get_snapshot_version_metadata(artifactory_url=artifactory_url, repository=repository, group_id=group_id, artifact_id=artifact_id, version=version)
|
||||
if packaging not in snapshot_version_metadata['snapshot_versions']:
|
||||
error_message = '''Cannot find requested packaging '{packaging}' in the snapshot version metadata.
|
||||
artifactory_url: {artifactory_url}
|
||||
repository: {repository}
|
||||
group_id: {group_id}
|
||||
artifact_id: {artifact_id}
|
||||
packaging: {packaging}
|
||||
version: {version}'''.format(
|
||||
artifactory_url=artifactory_url,
|
||||
repository=repository,
|
||||
group_id=group_id,
|
||||
artifact_id=artifact_id,
|
||||
packaging=packaging,
|
||||
version=version)
|
||||
raise ArtifactoryError(error_message)
|
||||
snapshot_version = snapshot_version_metadata['snapshot_versions'][packaging]
|
||||
|
||||
group_url = __get_group_id_subpath(group_id)
|
||||
|
||||
file_name = '{artifact_id}-{snapshot_version}.{packaging}'.format(
|
||||
artifact_id=artifact_id,
|
||||
snapshot_version=snapshot_version,
|
||||
packaging=packaging)
|
||||
snapshot_url = '{artifactory_url}/{repository}/{group_url}/{artifact_id}/{version}/{file_name}'.format(
|
||||
artifactory_url=artifactory_url,
|
||||
repository=repository,
|
||||
group_url=group_url,
|
||||
artifact_id=artifact_id,
|
||||
version=version,
|
||||
file_name=file_name)
|
||||
log.debug('snapshot_url=%s', snapshot_url)
|
||||
|
||||
return snapshot_url, file_name
|
||||
|
||||
|
||||
def _get_release_url(repository, group_id, artifact_id, packaging, version, artifactory_url):
|
||||
group_url = __get_group_id_subpath(group_id)
|
||||
# for released versions the suffix for the file is same as version
|
||||
file_name = '{artifact_id}-{version}.{packaging}'.format(
|
||||
artifact_id=artifact_id,
|
||||
version=version,
|
||||
packaging=packaging)
|
||||
|
||||
release_url = '{artifactory_url}/{repository}/{group_url}/{artifact_id}/{version}/{file_name}'.format(
|
||||
artifactory_url=artifactory_url,
|
||||
repository=repository,
|
||||
group_url=group_url,
|
||||
artifact_id=artifact_id,
|
||||
version=version,
|
||||
file_name=file_name)
|
||||
log.debug('release_url=%s', release_url)
|
||||
return release_url, file_name
|
||||
|
||||
|
||||
def _get_artifact_metadata_url(artifactory_url, repository, group_id, artifact_id):
|
||||
group_url = __get_group_id_subpath(group_id)
|
||||
# for released versions the suffix for the file is same as version
|
||||
artifact_metadata_url = '{artifactory_url}/{repository}/{group_url}/{artifact_id}/maven-metadata.xml'.format(
|
||||
artifactory_url=artifactory_url,
|
||||
repository=repository,
|
||||
group_url=group_url,
|
||||
artifact_id=artifact_id)
|
||||
log.debug('artifact_metadata_url=%s', artifact_metadata_url)
|
||||
return artifact_metadata_url
|
||||
|
||||
|
||||
def _get_artifact_metadata_xml(artifactory_url, repository, group_id, artifact_id):
|
||||
artifact_metadata_url = _get_artifact_metadata_url(artifactory_url=artifactory_url, repository=repository, group_id=group_id, artifact_id=artifact_id)
|
||||
try:
|
||||
artifact_metadata_xml = urllib2.urlopen(artifact_metadata_url).read()
|
||||
except HTTPError as e:
|
||||
raise Exception("Could not fetch data from url: {url}, HTTPError: {message}".format(url=artifact_metadata_url, message=e.message))
|
||||
|
||||
log.debug('artifact_metadata_xml=%s', artifact_metadata_xml)
|
||||
return artifact_metadata_xml
|
||||
|
||||
|
||||
def _get_artifact_metadata(artifactory_url, repository, group_id, artifact_id):
|
||||
metadata_xml = _get_artifact_metadata_xml(artifactory_url=artifactory_url, repository=repository, group_id=group_id, artifact_id=artifact_id)
|
||||
root = ET.fromstring(metadata_xml)
|
||||
|
||||
assert group_id == root.find('groupId').text
|
||||
assert artifact_id == root.find('artifactId').text
|
||||
latest_version = root.find('versioning').find('latest').text
|
||||
return {
|
||||
'latest_version': latest_version
|
||||
}
|
||||
|
||||
|
||||
# functions for handling snapshots
|
||||
def _get_snapshot_version_metadata_url(artifactory_url, repository, group_id, artifact_id, version):
|
||||
group_url = __get_group_id_subpath(group_id)
|
||||
# for released versions the suffix for the file is same as version
|
||||
snapshot_version_metadata_url = '{artifactory_url}/{repository}/{group_url}/{artifact_id}/{version}/maven-metadata.xml'.format(
|
||||
artifactory_url=artifactory_url,
|
||||
repository=repository,
|
||||
group_url=group_url,
|
||||
artifact_id=artifact_id,
|
||||
version=version)
|
||||
log.debug('snapshot_version_metadata_url=%s', snapshot_version_metadata_url)
|
||||
return snapshot_version_metadata_url
|
||||
|
||||
|
||||
def _get_snapshot_version_metadata_xml(artifactory_url, repository, group_id, artifact_id, version):
|
||||
snapshot_version_metadata_url = _get_snapshot_version_metadata_url(artifactory_url=artifactory_url, repository=repository, group_id=group_id, artifact_id=artifact_id, version=version)
|
||||
try:
|
||||
snapshot_version_metadata_xml = urllib2.urlopen(snapshot_version_metadata_url).read()
|
||||
except HTTPError as e:
|
||||
raise Exception("Could not fetch data from url: {url}, HTTPError: {message}".format(url=snapshot_version_metadata_url, message=e.message))
|
||||
log.debug('snapshot_version_metadata_xml=%s', snapshot_version_metadata_xml)
|
||||
return snapshot_version_metadata_xml
|
||||
|
||||
|
||||
def _get_snapshot_version_metadata(artifactory_url, repository, group_id, artifact_id, version):
|
||||
metadata_xml = _get_snapshot_version_metadata_xml(artifactory_url=artifactory_url, repository=repository, group_id=group_id, artifact_id=artifact_id, version=version)
|
||||
metadata = ET.fromstring(metadata_xml)
|
||||
|
||||
assert group_id == metadata.find('groupId').text
|
||||
assert artifact_id == metadata.find('artifactId').text
|
||||
assert version == metadata.find('version').text
|
||||
|
||||
snapshot_versions = metadata.find('versioning').find('snapshotVersions')
|
||||
extension_version_dict = {}
|
||||
for snapshot_version in snapshot_versions:
|
||||
extension = snapshot_version.find('extension').text
|
||||
value = snapshot_version.find('value').text
|
||||
extension_version_dict[extension] = value
|
||||
|
||||
return {
|
||||
'snapshot_versions': extension_version_dict
|
||||
}
|
||||
|
||||
|
||||
def __save_artifact(artifact_url, target_file):
|
||||
log.debug("__save_artifact(%s, %s)", artifact_url, target_file)
|
||||
result = {
|
||||
'status': False,
|
||||
'changes': {},
|
||||
'comment': ''
|
||||
}
|
||||
|
||||
if os.path.isfile(target_file):
|
||||
log.debug("File %s already exists, checking checksum...")
|
||||
checksum_url = artifact_url + ".sha1"
|
||||
|
||||
checksum_success, artifact_sum, checksum_comment = __download(checksum_url)
|
||||
if checksum_success:
|
||||
log.debug("Downloaded SHA1 SUM: %s", artifact_sum)
|
||||
file_sum = __salt__['file.get_hash'](path=target_file, form='sha1')
|
||||
log.debug("Target file (%s) SHA1 SUM: %s", target_file, file_sum)
|
||||
|
||||
if artifact_sum == file_sum:
|
||||
result['status'] = True
|
||||
result['target_file'] = target_file
|
||||
result['comment'] = 'File {0} already exists, checksum matches with Artifactory.\n' \
|
||||
'Checksum URL: {1}'.format(target_file, checksum_url)
|
||||
return result
|
||||
else:
|
||||
result['comment'] = 'File {0} already exists, checksum does not match with Artifactory!\n'\
|
||||
'Checksum URL: {1}'.format(target_file, checksum_url)
|
||||
|
||||
else:
|
||||
result['status'] = False
|
||||
result['comment'] = checksum_comment
|
||||
return result
|
||||
|
||||
log.debug('Downloading: {url} -> {target_file}'.format(url=artifact_url, target_file=target_file))
|
||||
try:
|
||||
f = urllib2.urlopen(artifact_url)
|
||||
with open(target_file, "wb") as local_file:
|
||||
local_file.write(f.read())
|
||||
result['status'] = True
|
||||
result['comment'] = __append_comment(('Artifact downloaded from URL: {0}'.format(artifact_url)), result['comment'])
|
||||
result['changes']['downloaded_file'] = target_file
|
||||
result['target_file'] = target_file
|
||||
except (HTTPError, urllib2.URLError) as e:
|
||||
result['status'] = False
|
||||
result['comment'] = __get_error_comment(e, artifact_url)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def __get_group_id_subpath(group_id):
|
||||
group_url = group_id.replace('.', '/')
|
||||
return group_url
|
||||
|
||||
|
||||
def __download(request_url):
|
||||
log.debug('Downloading content from {0}'.format(request_url))
|
||||
|
||||
success = False
|
||||
content = None
|
||||
comment = None
|
||||
try:
|
||||
url = urllib2.urlopen(request_url)
|
||||
content = url.read()
|
||||
success = True
|
||||
except HTTPError as e:
|
||||
comment = __get_error_comment(e, request_url)
|
||||
|
||||
return success, content, comment
|
||||
|
||||
|
||||
def __get_error_comment(http_error, request_url):
|
||||
if http_error.code == 404:
|
||||
comment = 'HTTP Error 404. Request URL: ' + request_url
|
||||
elif http_error.code == 409:
|
||||
comment = 'HTTP Error 409: Conflict. Requested URL: {0}. \n' \
|
||||
'This error may be caused by reading snapshot artifact from non-snapshot repository.'.format(request_url)
|
||||
else:
|
||||
comment = 'HTTP Error {err_code}. Request URL: {url}'.format(err_code=http_error.code, url=request_url)
|
||||
|
||||
return comment
|
||||
|
||||
|
||||
def __append_comment(new_comment, current_comment=''):
|
||||
return current_comment+'\n'+new_comment
|
||||
|
||||
|
||||
class ArtifactoryError(Exception):
|
||||
|
||||
def __init__(self, value):
|
||||
super(ArtifactoryError, self).__init__()
|
||||
self.value = value
|
||||
|
||||
def __str__(self):
|
||||
return repr(self.value)
|
||||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
Module for fetching artifacts from Artifactory
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
import urllib2
|
||||
import os
|
||||
import xml.etree.ElementTree as ET
|
||||
from urllib2 import HTTPError
|
||||
import logging
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_latest_snapshot(artifactory_url, repository, group_id, artifact_id, packaging, target_dir='/tmp', target_file=None):
|
||||
'''
|
||||
Gets latest snapshot of the given artifact
|
||||
|
||||
artifactory_url
|
||||
URL of artifactory instance
|
||||
repository
|
||||
Snapshot repository in artifactory to retrieve artifact from, for example: libs-snapshots
|
||||
group_id
|
||||
Group Id of the artifact
|
||||
artifact_id
|
||||
Artifact Id of the artifact
|
||||
packaging
|
||||
Packaging type (jar,war,ear,etc)
|
||||
target_dir
|
||||
Target directory to download artifact to (default: /tmp)
|
||||
target_file
|
||||
Target file to download artifact to (by default it is target_dir/artifact_id-snapshot_version.packaging)
|
||||
'''
|
||||
log.debug("======================== MODULE FUNCTION: artifactory.get_latest_snapshot, artifactory_url=%s, repository=%s, group_id=%s, artifact_id=%s, packaging=%s, target_dir=%s)",
|
||||
artifactory_url, repository, group_id, artifact_id, packaging, target_dir)
|
||||
artifact_metadata = _get_artifact_metadata(artifactory_url=artifactory_url, repository=repository, group_id=group_id, artifact_id=artifact_id)
|
||||
version = artifact_metadata['latest_version']
|
||||
|
||||
snapshot_url, file_name = _get_snapshot_url(artifactory_url, repository, group_id, artifact_id, version, packaging)
|
||||
target_file = __resolve_target_file(file_name, target_dir, target_file)
|
||||
|
||||
return __save_artifact(snapshot_url, target_file)
|
||||
|
||||
|
||||
def get_snapshot(artifactory_url, repository, group_id, artifact_id, packaging, version, snapshot_version=None, target_dir='/tmp', target_file=None):
|
||||
'''
|
||||
Gets snapshot of the desired version of the artifact
|
||||
|
||||
artifactory_url
|
||||
URL of artifactory instance
|
||||
repository
|
||||
Snapshot repository in artifactory to retrieve artifact from, for example: libs-snapshots
|
||||
group_id
|
||||
Group Id of the artifact
|
||||
artifact_id
|
||||
Artifact Id of the artifact
|
||||
packaging
|
||||
Packaging type (jar,war,ear,etc)
|
||||
version
|
||||
Version of the artifact
|
||||
target_dir
|
||||
Target directory to download artifact to (default: /tmp)
|
||||
target_file
|
||||
Target file to download artifact to (by default it is target_dir/artifact_id-snapshot_version.packaging)
|
||||
'''
|
||||
log.debug('======================== MODULE FUNCTION: artifactory.get_snapshot(artifactory_url=%s, repository=%s, group_id=%s, artifact_id=%s, packaging=%s, version=%s, target_dir=%s)',
|
||||
artifactory_url, repository, group_id, artifact_id, packaging, version, target_dir)
|
||||
|
||||
snapshot_url, file_name = _get_snapshot_url(artifactory_url, repository, group_id, artifact_id, version, packaging, snapshot_version)
|
||||
target_file = __resolve_target_file(file_name, target_dir, target_file)
|
||||
|
||||
return __save_artifact(snapshot_url, target_file)
|
||||
|
||||
|
||||
def get_release(artifactory_url, repository, group_id, artifact_id, packaging, version, target_dir='/tmp', target_file=None):
|
||||
'''
|
||||
Gets the specified release of the artifact
|
||||
|
||||
artifactory_url
|
||||
URL of artifactory instance
|
||||
repository
|
||||
Release repository in artifactory to retrieve artifact from, for example: libs-releases
|
||||
group_id
|
||||
Group Id of the artifact
|
||||
artifact_id
|
||||
Artifact Id of the artifact
|
||||
packaging
|
||||
Packaging type (jar,war,ear,etc)
|
||||
version
|
||||
Version of the artifact
|
||||
target_dir
|
||||
Target directory to download artifact to (default: /tmp)
|
||||
target_file
|
||||
Target file to download artifact to (by default it is target_dir/artifact_id-version.packaging)
|
||||
'''
|
||||
log.debug('======================== MODULE FUNCTION: artifactory.get_release(artifactory_url=%s, repository=%s, group_id=%s, artifact_id=%s, packaging=%s, version=%s, target_dir=%s)',
|
||||
artifactory_url, repository, group_id, artifact_id, packaging, version, target_dir)
|
||||
|
||||
release_url, file_name = _get_release_url(repository, group_id, artifact_id, packaging, version, artifactory_url)
|
||||
target_file = __resolve_target_file(file_name, target_dir, target_file)
|
||||
|
||||
return __save_artifact(release_url, target_file)
|
||||
|
||||
|
||||
def __resolve_target_file(file_name, target_dir, target_file=None):
|
||||
if target_file is None:
|
||||
target_file = os.path.join(target_dir, file_name)
|
||||
return target_file
|
||||
|
||||
|
||||
def _get_snapshot_url(artifactory_url, repository, group_id, artifact_id, version, packaging, snapshot_version=None):
|
||||
if snapshot_version is None:
|
||||
snapshot_version_metadata = _get_snapshot_version_metadata(artifactory_url=artifactory_url, repository=repository, group_id=group_id, artifact_id=artifact_id, version=version)
|
||||
if packaging not in snapshot_version_metadata['snapshot_versions']:
|
||||
error_message = '''Cannot find requested packaging '{packaging}' in the snapshot version metadata.
|
||||
artifactory_url: {artifactory_url}
|
||||
repository: {repository}
|
||||
group_id: {group_id}
|
||||
artifact_id: {artifact_id}
|
||||
packaging: {packaging}
|
||||
version: {version}'''.format(
|
||||
artifactory_url=artifactory_url,
|
||||
repository=repository,
|
||||
group_id=group_id,
|
||||
artifact_id=artifact_id,
|
||||
packaging=packaging,
|
||||
version=version)
|
||||
raise ArtifactoryError(error_message)
|
||||
snapshot_version = snapshot_version_metadata['snapshot_versions'][packaging]
|
||||
|
||||
group_url = __get_group_id_subpath(group_id)
|
||||
|
||||
file_name = '{artifact_id}-{snapshot_version}.{packaging}'.format(
|
||||
artifact_id=artifact_id,
|
||||
snapshot_version=snapshot_version,
|
||||
packaging=packaging)
|
||||
snapshot_url = '{artifactory_url}/{repository}/{group_url}/{artifact_id}/{version}/{file_name}'.format(
|
||||
artifactory_url=artifactory_url,
|
||||
repository=repository,
|
||||
group_url=group_url,
|
||||
artifact_id=artifact_id,
|
||||
version=version,
|
||||
file_name=file_name)
|
||||
log.debug('snapshot_url=%s', snapshot_url)
|
||||
|
||||
return snapshot_url, file_name
|
||||
|
||||
|
||||
def _get_release_url(repository, group_id, artifact_id, packaging, version, artifactory_url):
|
||||
group_url = __get_group_id_subpath(group_id)
|
||||
# for released versions the suffix for the file is same as version
|
||||
file_name = '{artifact_id}-{version}.{packaging}'.format(
|
||||
artifact_id=artifact_id,
|
||||
version=version,
|
||||
packaging=packaging)
|
||||
|
||||
release_url = '{artifactory_url}/{repository}/{group_url}/{artifact_id}/{version}/{file_name}'.format(
|
||||
artifactory_url=artifactory_url,
|
||||
repository=repository,
|
||||
group_url=group_url,
|
||||
artifact_id=artifact_id,
|
||||
version=version,
|
||||
file_name=file_name)
|
||||
log.debug('release_url=%s', release_url)
|
||||
return release_url, file_name
|
||||
|
||||
|
||||
def _get_artifact_metadata_url(artifactory_url, repository, group_id, artifact_id):
|
||||
group_url = __get_group_id_subpath(group_id)
|
||||
# for released versions the suffix for the file is same as version
|
||||
artifact_metadata_url = '{artifactory_url}/{repository}/{group_url}/{artifact_id}/maven-metadata.xml'.format(
|
||||
artifactory_url=artifactory_url,
|
||||
repository=repository,
|
||||
group_url=group_url,
|
||||
artifact_id=artifact_id)
|
||||
log.debug('artifact_metadata_url=%s', artifact_metadata_url)
|
||||
return artifact_metadata_url
|
||||
|
||||
|
||||
def _get_artifact_metadata_xml(artifactory_url, repository, group_id, artifact_id):
|
||||
artifact_metadata_url = _get_artifact_metadata_url(artifactory_url=artifactory_url, repository=repository, group_id=group_id, artifact_id=artifact_id)
|
||||
try:
|
||||
artifact_metadata_xml = urllib2.urlopen(artifact_metadata_url).read()
|
||||
except HTTPError as e:
|
||||
raise Exception("Could not fetch data from url: {url}, HTTPError: {message}".format(url=artifact_metadata_url, message=e.message))
|
||||
|
||||
log.debug('artifact_metadata_xml=%s', artifact_metadata_xml)
|
||||
return artifact_metadata_xml
|
||||
|
||||
|
||||
def _get_artifact_metadata(artifactory_url, repository, group_id, artifact_id):
|
||||
metadata_xml = _get_artifact_metadata_xml(artifactory_url=artifactory_url, repository=repository, group_id=group_id, artifact_id=artifact_id)
|
||||
root = ET.fromstring(metadata_xml)
|
||||
|
||||
assert group_id == root.find('groupId').text
|
||||
assert artifact_id == root.find('artifactId').text
|
||||
latest_version = root.find('versioning').find('latest').text
|
||||
return {
|
||||
'latest_version': latest_version
|
||||
}
|
||||
|
||||
|
||||
# functions for handling snapshots
|
||||
def _get_snapshot_version_metadata_url(artifactory_url, repository, group_id, artifact_id, version):
|
||||
group_url = __get_group_id_subpath(group_id)
|
||||
# for released versions the suffix for the file is same as version
|
||||
snapshot_version_metadata_url = '{artifactory_url}/{repository}/{group_url}/{artifact_id}/{version}/maven-metadata.xml'.format(
|
||||
artifactory_url=artifactory_url,
|
||||
repository=repository,
|
||||
group_url=group_url,
|
||||
artifact_id=artifact_id,
|
||||
version=version)
|
||||
log.debug('snapshot_version_metadata_url=%s', snapshot_version_metadata_url)
|
||||
return snapshot_version_metadata_url
|
||||
|
||||
|
||||
def _get_snapshot_version_metadata_xml(artifactory_url, repository, group_id, artifact_id, version):
|
||||
snapshot_version_metadata_url = _get_snapshot_version_metadata_url(artifactory_url=artifactory_url, repository=repository, group_id=group_id, artifact_id=artifact_id, version=version)
|
||||
try:
|
||||
snapshot_version_metadata_xml = urllib2.urlopen(snapshot_version_metadata_url).read()
|
||||
except HTTPError as e:
|
||||
raise Exception("Could not fetch data from url: {url}, HTTPError: {message}".format(url=snapshot_version_metadata_url, message=e.message))
|
||||
log.debug('snapshot_version_metadata_xml=%s', snapshot_version_metadata_xml)
|
||||
return snapshot_version_metadata_xml
|
||||
|
||||
|
||||
def _get_snapshot_version_metadata(artifactory_url, repository, group_id, artifact_id, version):
|
||||
metadata_xml = _get_snapshot_version_metadata_xml(artifactory_url=artifactory_url, repository=repository, group_id=group_id, artifact_id=artifact_id, version=version)
|
||||
metadata = ET.fromstring(metadata_xml)
|
||||
|
||||
assert group_id == metadata.find('groupId').text
|
||||
assert artifact_id == metadata.find('artifactId').text
|
||||
assert version == metadata.find('version').text
|
||||
|
||||
snapshot_versions = metadata.find('versioning').find('snapshotVersions')
|
||||
extension_version_dict = {}
|
||||
for snapshot_version in snapshot_versions:
|
||||
extension = snapshot_version.find('extension').text
|
||||
value = snapshot_version.find('value').text
|
||||
extension_version_dict[extension] = value
|
||||
|
||||
return {
|
||||
'snapshot_versions': extension_version_dict
|
||||
}
|
||||
|
||||
|
||||
def __save_artifact(artifact_url, target_file):
|
||||
log.debug("__save_artifact(%s, %s)", artifact_url, target_file)
|
||||
result = {
|
||||
'status': False,
|
||||
'changes': {},
|
||||
'comment': ''
|
||||
}
|
||||
|
||||
if os.path.isfile(target_file):
|
||||
log.debug("File %s already exists, checking checksum...")
|
||||
checksum_url = artifact_url + ".sha1"
|
||||
|
||||
checksum_success, artifact_sum, checksum_comment = __download(checksum_url)
|
||||
if checksum_success:
|
||||
log.debug("Downloaded SHA1 SUM: %s", artifact_sum)
|
||||
file_sum = __salt__['file.get_hash'](path=target_file, form='sha1')
|
||||
log.debug("Target file (%s) SHA1 SUM: %s", target_file, file_sum)
|
||||
|
||||
if artifact_sum == file_sum:
|
||||
result['status'] = True
|
||||
result['target_file'] = target_file
|
||||
result['comment'] = 'File {0} already exists, checksum matches with Artifactory.\n' \
|
||||
'Checksum URL: {1}'.format(target_file, checksum_url)
|
||||
return result
|
||||
else:
|
||||
result['comment'] = 'File {0} already exists, checksum does not match with Artifactory!\n'\
|
||||
'Checksum URL: {1}'.format(target_file, checksum_url)
|
||||
|
||||
else:
|
||||
result['status'] = False
|
||||
result['comment'] = checksum_comment
|
||||
return result
|
||||
|
||||
log.debug('Downloading: {url} -> {target_file}'.format(url=artifact_url, target_file=target_file))
|
||||
try:
|
||||
f = urllib2.urlopen(artifact_url)
|
||||
with salt.utils.fopen(target_file, "wb") as local_file:
|
||||
local_file.write(f.read())
|
||||
result['status'] = True
|
||||
result['comment'] = __append_comment(('Artifact downloaded from URL: {0}'.format(artifact_url)), result['comment'])
|
||||
result['changes']['downloaded_file'] = target_file
|
||||
result['target_file'] = target_file
|
||||
except (HTTPError, urllib2.URLError) as e:
|
||||
result['status'] = False
|
||||
result['comment'] = __get_error_comment(e, artifact_url)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def __get_group_id_subpath(group_id):
|
||||
group_url = group_id.replace('.', '/')
|
||||
return group_url
|
||||
|
||||
|
||||
def __download(request_url):
|
||||
log.debug('Downloading content from {0}'.format(request_url))
|
||||
|
||||
success = False
|
||||
content = None
|
||||
comment = None
|
||||
try:
|
||||
url = urllib2.urlopen(request_url)
|
||||
content = url.read()
|
||||
success = True
|
||||
except HTTPError as e:
|
||||
comment = __get_error_comment(e, request_url)
|
||||
|
||||
return success, content, comment
|
||||
|
||||
|
||||
def __get_error_comment(http_error, request_url):
|
||||
if http_error.code == 404:
|
||||
comment = 'HTTP Error 404. Request URL: ' + request_url
|
||||
elif http_error.code == 409:
|
||||
comment = 'HTTP Error 409: Conflict. Requested URL: {0}. \n' \
|
||||
'This error may be caused by reading snapshot artifact from non-snapshot repository.'.format(request_url)
|
||||
else:
|
||||
comment = 'HTTP Error {err_code}. Request URL: {url}'.format(err_code=http_error.code, url=request_url)
|
||||
|
||||
return comment
|
||||
|
||||
|
||||
def __append_comment(new_comment, current_comment=''):
|
||||
return current_comment+'\n'+new_comment
|
||||
|
||||
|
||||
class ArtifactoryError(Exception):
|
||||
|
||||
def __init__(self, value):
|
||||
super(ArtifactoryError, self).__init__()
|
||||
self.value = value
|
||||
|
||||
def __str__(self):
|
||||
return repr(self.value)
|
||||
|
@ -38,15 +38,17 @@ from __future__ import absolute_import
|
||||
# Import Python libs
|
||||
import logging
|
||||
import re
|
||||
from distutils.version import LooseVersion as _LooseVersion
|
||||
from distutils.version import LooseVersion as _LooseVersion # pylint: disable=import-error,no-name-in-module
|
||||
import salt.ext.six as six
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Import third party libs
|
||||
try:
|
||||
# pylint: disable=import-error
|
||||
import boto
|
||||
import boto.ec2
|
||||
# pylint: enable=import-error
|
||||
logging.getLogger('boto').setLevel(logging.CRITICAL)
|
||||
HAS_BOTO = True
|
||||
except ImportError:
|
||||
|
@ -38,16 +38,17 @@ from __future__ import absolute_import
|
||||
|
||||
# Import Python libs
|
||||
import logging
|
||||
from distutils.version import LooseVersion as _LooseVersion
|
||||
from distutils.version import LooseVersion as _LooseVersion # pylint: disable=import-error,no-name-in-module
|
||||
from salt.exceptions import SaltInvocationError, CommandExecutionError
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Import third party libs
|
||||
try:
|
||||
# pylint: disable=import-error
|
||||
import boto
|
||||
import boto.vpc
|
||||
|
||||
# pylint: enable=import-error
|
||||
logging.getLogger('boto').setLevel(logging.CRITICAL)
|
||||
HAS_BOTO = True
|
||||
except ImportError:
|
||||
|
@ -182,7 +182,7 @@ def _exec_cmd(*args, **kwargs):
|
||||
result = __salt__['cmd.run_all']('script -q -c "{0}" {1}'.format(cmd_exec, filename))
|
||||
|
||||
# Read the output from the script command, stripping the first line
|
||||
with open(filename, 'r') as outfile:
|
||||
with salt.utils.fopen(filename, 'r') as outfile:
|
||||
stdout = outfile.readlines()
|
||||
result['stdout'] = ''.join(stdout[1:])
|
||||
os.remove(filename)
|
||||
|
@ -12,7 +12,7 @@ import logging
|
||||
import os.path
|
||||
import re
|
||||
import tempfile
|
||||
from distutils.version import LooseVersion as _LooseVersion
|
||||
from distutils.version import LooseVersion as _LooseVersion # pylint: disable=import-error,no-name-in-module
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
|
@ -129,7 +129,8 @@ def _run_silent_cygwin(cyg_arch='x86_64',
|
||||
os.remove(cyg_setup_path)
|
||||
|
||||
file_data = _urlopen(cyg_setup_source)
|
||||
open(cyg_setup_path, "wb").write(file_data.read())
|
||||
with salt.utils.fopen(cyg_setup_path, "wb") as fhw:
|
||||
fhw.write(file_data.read())
|
||||
|
||||
setup_command = cyg_setup_path
|
||||
options = []
|
||||
|
@ -260,7 +260,7 @@ def _parse_current_network_settings():
|
||||
opts['networking'] = ''
|
||||
|
||||
if os.path.isfile(_DEB_NETWORKING_FILE):
|
||||
with open(_DEB_NETWORKING_FILE) as contents:
|
||||
with salt.utils.fopen(_DEB_NETWORKING_FILE) as contents:
|
||||
for line in contents:
|
||||
if line.startswith('#'):
|
||||
continue
|
||||
@ -531,7 +531,7 @@ def _parse_interfaces(interface_files=None):
|
||||
method = -1
|
||||
|
||||
for interface_file in interface_files:
|
||||
with open(interface_file) as interfaces:
|
||||
with salt.utils.fopen(interface_file) as interfaces:
|
||||
for line in interfaces:
|
||||
# Identify the clauses by the first word of each line.
|
||||
# Go to the next line if the current line is a comment
|
||||
@ -1448,9 +1448,8 @@ def _write_file_ppp_ifaces(iface, data):
|
||||
msg = msg.format(os.path.dirname(filename))
|
||||
log.error(msg)
|
||||
raise AttributeError(msg)
|
||||
fout = salt.utils.fopen(filename, 'w')
|
||||
fout.write(ifcfg)
|
||||
fout.close()
|
||||
with salt.utils.fopen(filename, 'w') as fout:
|
||||
fout.write(ifcfg)
|
||||
|
||||
# Return as a array so the difflib works
|
||||
return filename
|
||||
|
@ -46,7 +46,8 @@ def _load(pillar_name, defaults_path):
|
||||
for loader in json, yaml:
|
||||
defaults_file = os.path.join(defaults_path, 'defaults.' + loader.__name__)
|
||||
if os.path.exists(defaults_file):
|
||||
defaults = loader.load(open(defaults_file))
|
||||
with salt.utils.fopen(defaults_file) as fhr:
|
||||
defaults = loader.load(fhr)
|
||||
return defaults
|
||||
|
||||
|
||||
|
@ -536,20 +536,17 @@ def export(container, path):
|
||||
'''
|
||||
try:
|
||||
ppath = os.path.abspath(path)
|
||||
fic = open(ppath, 'w')
|
||||
status = base_status.copy()
|
||||
client = _get_client()
|
||||
response = client.export(_get_container_infos(container)['Id'])
|
||||
try:
|
||||
with salt.utils.fopen(ppath, 'w') as fic:
|
||||
status = base_status.copy()
|
||||
client = _get_client()
|
||||
response = client.export(_get_container_infos(container)['Id'])
|
||||
byte = response.read(4096)
|
||||
fic.write(byte)
|
||||
while byte != '':
|
||||
# Do stuff with byte.
|
||||
byte = response.read(4096)
|
||||
fic.write(byte)
|
||||
finally:
|
||||
fic.flush()
|
||||
fic.close()
|
||||
_valid(status,
|
||||
id_=container, out=ppath,
|
||||
comment='Exported to {0}'.format(ppath))
|
||||
@ -2065,11 +2062,9 @@ def get_container_root(container):
|
||||
default_rootfs = os.path.join(default_path, 'roofs')
|
||||
rootfs_re = re.compile(r'^lxc.rootfs\s*=\s*(.*)\s*$', re.U)
|
||||
try:
|
||||
lxcconfig = os.path.join(
|
||||
default_path, 'config.lxc')
|
||||
f = open(lxcconfig)
|
||||
try:
|
||||
lines = f.readlines()
|
||||
lxcconfig = os.path.join(default_path, 'config.lxc')
|
||||
with salt.utils.fopen(lxcconfig) as fhr:
|
||||
lines = fhr.readlines()
|
||||
rlines = lines[:]
|
||||
rlines.reverse()
|
||||
for rl in rlines:
|
||||
@ -2077,8 +2072,6 @@ def get_container_root(container):
|
||||
if robj:
|
||||
rootfs = robj.groups()[0]
|
||||
break
|
||||
finally:
|
||||
f.close()
|
||||
except Exception:
|
||||
rootfs = default_rootfs
|
||||
return rootfs
|
||||
|
@ -897,16 +897,13 @@ def psed(path,
|
||||
|
||||
shutil.copy2(path, '{0}{1}'.format(path, backup))
|
||||
|
||||
try:
|
||||
ofile = salt.utils.fopen(path, 'w')
|
||||
with salt.utils.fopen(path, 'w') as ofile:
|
||||
with salt.utils.fopen('{0}{1}'.format(path, backup), 'r') as ifile:
|
||||
if multi is True:
|
||||
for line in ifile.readline():
|
||||
ofile.write(_psed(line, before, after, limit, flags))
|
||||
else:
|
||||
ofile.write(_psed(ifile.read(), before, after, limit, flags))
|
||||
finally:
|
||||
ofile.close()
|
||||
|
||||
|
||||
RE_FLAG_TABLE = {'I': re.I,
|
||||
@ -1758,7 +1755,8 @@ def prepend(path, *args, **kwargs):
|
||||
args = [kwargs['args']]
|
||||
|
||||
try:
|
||||
contents = salt.utils.fopen(path).readlines()
|
||||
with salt.utils.fopen(path) as fhr:
|
||||
contents = fhr.readlines()
|
||||
except IOError:
|
||||
contents = []
|
||||
|
||||
@ -1846,7 +1844,8 @@ def touch(name, atime=None, mtime=None):
|
||||
mtime = int(mtime)
|
||||
try:
|
||||
if not os.path.exists(name):
|
||||
salt.utils.fopen(name, 'a')
|
||||
with salt.utils.fopen(name, 'a') as fhw:
|
||||
fhw.write('')
|
||||
|
||||
if not atime and not mtime:
|
||||
times = None
|
||||
@ -1948,11 +1947,8 @@ def truncate(path, length):
|
||||
salt '*' file.truncate /path/to/file 512
|
||||
'''
|
||||
path = os.path.expanduser(path)
|
||||
try:
|
||||
seek_fh = open(path, 'r+')
|
||||
with salt.utils.fopen(path, 'r+') as seek_fh:
|
||||
seek_fh.truncate(int(length))
|
||||
finally:
|
||||
seek_fh.close()
|
||||
|
||||
|
||||
def link(src, path):
|
||||
@ -2702,34 +2698,34 @@ def extract_hash(hash_fn, hash_type='sha256', file_name=''):
|
||||
name_sought = os.path.basename(file_name)
|
||||
log.debug('modules.file.py - extract_hash(): Extracting hash for file '
|
||||
'named: {0}'.format(name_sought))
|
||||
hash_fn_fopen = salt.utils.fopen(hash_fn, 'r')
|
||||
for hash_variant in HASHES:
|
||||
if hash_type == '' or hash_type == hash_variant[0]:
|
||||
log.debug('modules.file.py - extract_hash(): Will use regex to get'
|
||||
' a purely hexadecimal number of length ({0}), presumably hash'
|
||||
' type : {1}'.format(hash_variant[1], hash_variant[0]))
|
||||
hash_fn_fopen.seek(0)
|
||||
for line in hash_fn_fopen.read().splitlines():
|
||||
hash_array = re.findall(r'(?i)(?<![a-z0-9])[a-f0-9]{' + str(hash_variant[1]) + '}(?![a-z0-9])', line)
|
||||
log.debug('modules.file.py - extract_hash(): From "line": {0} '
|
||||
'got : {1}'.format(line, hash_array))
|
||||
if hash_array:
|
||||
if not partial_id:
|
||||
source_sum = {'hsum': hash_array[0], 'hash_type': hash_variant[0]}
|
||||
partial_id = True
|
||||
with salt.utils.fopen(hash_fn, 'r') as hash_fn_fopen:
|
||||
for hash_variant in HASHES:
|
||||
if hash_type == '' or hash_type == hash_variant[0]:
|
||||
log.debug('modules.file.py - extract_hash(): Will use regex to get'
|
||||
' a purely hexadecimal number of length ({0}), presumably hash'
|
||||
' type : {1}'.format(hash_variant[1], hash_variant[0]))
|
||||
hash_fn_fopen.seek(0)
|
||||
for line in hash_fn_fopen.read().splitlines():
|
||||
hash_array = re.findall(r'(?i)(?<![a-z0-9])[a-f0-9]{' + str(hash_variant[1]) + '}(?![a-z0-9])', line)
|
||||
log.debug('modules.file.py - extract_hash(): From "line": {0} '
|
||||
'got : {1}'.format(line, hash_array))
|
||||
if hash_array:
|
||||
if not partial_id:
|
||||
source_sum = {'hsum': hash_array[0], 'hash_type': hash_variant[0]}
|
||||
partial_id = True
|
||||
|
||||
log.debug('modules.file.py - extract_hash(): Found: {0} '
|
||||
'-- {1}'.format(source_sum['hash_type'],
|
||||
source_sum['hsum']))
|
||||
log.debug('modules.file.py - extract_hash(): Found: {0} '
|
||||
'-- {1}'.format(source_sum['hash_type'],
|
||||
source_sum['hsum']))
|
||||
|
||||
if re.search(name_sought, line):
|
||||
source_sum = {'hsum': hash_array[0], 'hash_type': hash_variant[0]}
|
||||
log.debug('modules.file.py - extract_hash: For {0} -- '
|
||||
'returning the {1} hash "{2}".'.format(
|
||||
name_sought,
|
||||
source_sum['hash_type'],
|
||||
source_sum['hsum']))
|
||||
return source_sum
|
||||
if re.search(name_sought, line):
|
||||
source_sum = {'hsum': hash_array[0], 'hash_type': hash_variant[0]}
|
||||
log.debug('modules.file.py - extract_hash: For {0} -- '
|
||||
'returning the {1} hash "{2}".'.format(
|
||||
name_sought,
|
||||
source_sum['hash_type'],
|
||||
source_sum['hsum']))
|
||||
return source_sum
|
||||
|
||||
if partial_id:
|
||||
log.debug('modules.file.py - extract_hash: Returning the partially '
|
||||
|
@ -11,6 +11,7 @@ import re
|
||||
import logging
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@ -35,17 +36,18 @@ def _get_mounts(fs_type):
|
||||
List mounted filesystems.
|
||||
'''
|
||||
mounts = {}
|
||||
for line in open("/proc/mounts").readlines():
|
||||
device, mntpnt, fstype, options, fs_freq, fs_passno = line.strip().split(" ")
|
||||
if fstype != fs_type:
|
||||
continue
|
||||
if mounts.get(device) is None:
|
||||
mounts[device] = []
|
||||
with salt.utils.fopen("/proc/mounts") as fhr:
|
||||
for line in fhr.readlines():
|
||||
device, mntpnt, fstype, options, fs_freq, fs_passno = line.strip().split(" ")
|
||||
if fstype != fs_type:
|
||||
continue
|
||||
if mounts.get(device) is None:
|
||||
mounts[device] = []
|
||||
|
||||
mounts[device].append({
|
||||
'mount_point': mntpnt,
|
||||
'options': options.split(",")
|
||||
})
|
||||
mounts[device].append({
|
||||
'mount_point': mntpnt,
|
||||
'options': options.split(",")
|
||||
})
|
||||
|
||||
return mounts
|
||||
|
||||
|
@ -12,7 +12,7 @@ from keyservers. Sign, encrypt and sign & encrypt text and files.
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import python libs
|
||||
import distutils.version
|
||||
import distutils.version # pylint: disable=import-error,no-name-in-module
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
|
@ -16,6 +16,9 @@ from __future__ import print_function
|
||||
from __future__ import absolute_import
|
||||
import re
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils
|
||||
|
||||
__virtualname__ = 'ini'
|
||||
|
||||
|
||||
@ -279,7 +282,7 @@ class _Ini(object):
|
||||
self.sections = []
|
||||
current_section = _Section('DEFAULT_IMPLICIT')
|
||||
self.sections.append(current_section)
|
||||
with open(self.file_name, 'r') as inifile:
|
||||
with salt.utils.fopen(self.file_name, 'r') as inifile:
|
||||
previous_line = None
|
||||
for line in inifile.readlines():
|
||||
# Make sure the empty lines between options are preserved
|
||||
@ -295,7 +298,7 @@ class _Ini(object):
|
||||
previous_line = line
|
||||
|
||||
def flush(self):
|
||||
with open(self.file_name, 'w') as outfile:
|
||||
with salt.utils.fopen(self.file_name, 'w') as outfile:
|
||||
outfile.write(self.current_contents())
|
||||
|
||||
def dump(self):
|
||||
|
@ -11,6 +11,7 @@ import os
|
||||
import plistlib
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
import salt.utils.decorators as decorators
|
||||
import salt.ext.six as six
|
||||
|
||||
@ -57,7 +58,7 @@ def _available_services():
|
||||
|
||||
try:
|
||||
# This assumes most of the plist files will be already in XML format
|
||||
with open(file_path):
|
||||
with salt.utils.fopen(file_path):
|
||||
plist = plistlib.readPlist(true_path)
|
||||
|
||||
except Exception:
|
||||
|
@ -567,7 +567,7 @@ class _LXCConfig(object):
|
||||
if self.name:
|
||||
self.path = '/var/lib/lxc/{0}/config'.format(self.name)
|
||||
if os.path.isfile(self.path):
|
||||
with open(self.path) as f:
|
||||
with salt.utils.fopen(self.path) as f:
|
||||
for l in f.readlines():
|
||||
match = self.pattern.findall((l.strip()))
|
||||
if match:
|
||||
@ -607,7 +607,7 @@ class _LXCConfig(object):
|
||||
content = self.as_string()
|
||||
# 2 step rendering to be sure not to open/wipe the config
|
||||
# before as_string succeeds.
|
||||
with open(self.path, 'w') as fic:
|
||||
with salt.utils.fopen(self.path, 'w') as fic:
|
||||
fic.write(content)
|
||||
fic.flush()
|
||||
|
||||
@ -1643,9 +1643,10 @@ def info(name):
|
||||
|
||||
ret = {}
|
||||
|
||||
config = [(v[0].strip(), v[1].strip()) for v in
|
||||
[l.split('#', 1)[0].strip().split('=', 1) for l in
|
||||
open(f).readlines()] if len(v) == 2]
|
||||
with salt.utils.fopen(f) as fhr:
|
||||
config = [(v[0].strip(), v[1].strip()) for v in
|
||||
[l.split('#', 1)[0].strip().split('=', 1) for l in
|
||||
fhr.readlines()] if len(v) == 2]
|
||||
|
||||
ifaces = []
|
||||
current = None
|
||||
@ -1791,7 +1792,7 @@ def update_lxc_conf(name, lxc_conf, lxc_conf_unset):
|
||||
ret['comment'] = (
|
||||
'Configuration does not exist: {0}'.format(lxc_conf_p))
|
||||
else:
|
||||
with open(lxc_conf_p, 'r') as fic:
|
||||
with salt.utils.fopen(lxc_conf_p, 'r') as fic:
|
||||
filtered_lxc_conf = []
|
||||
for row in lxc_conf:
|
||||
if not row:
|
||||
@ -1848,12 +1849,10 @@ def update_lxc_conf(name, lxc_conf, lxc_conf_unset):
|
||||
conf_changed = conf != orig_config
|
||||
chrono = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
|
||||
if conf_changed:
|
||||
wfic = open('{0}.{1}'.format(lxc_conf_p, chrono), 'w')
|
||||
wfic.write(conf)
|
||||
wfic.close()
|
||||
wfic = open(lxc_conf_p, 'w')
|
||||
wfic.write(conf)
|
||||
wfic.close()
|
||||
with salt.utils.fopen('{0}.{1}'.format(lxc_conf_p, chrono), 'w') as wfic:
|
||||
wfic.write(conf)
|
||||
with salt.utils.fopen(lxc_conf_p, 'w') as wific:
|
||||
wfic.write(conf)
|
||||
ret['comment'] = 'Updated'
|
||||
ret['result'] = True
|
||||
if (
|
||||
|
@ -9,6 +9,9 @@ import os
|
||||
import logging
|
||||
from salt.ext.six.moves import map
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# XXX enable/disable support would be nice
|
||||
@ -131,7 +134,7 @@ def _get_rc():
|
||||
try:
|
||||
# now read the system startup script /etc/rc
|
||||
# to know what are the system enabled daemons
|
||||
with open('/etc/rc', 'r') as handle:
|
||||
with salt.utils.fopen('/etc/rc', 'r') as handle:
|
||||
lines = handle.readlines()
|
||||
except IOError:
|
||||
log.error('Unable to read /etc/rc')
|
||||
|
@ -31,7 +31,7 @@ __func_alias__ = {
|
||||
|
||||
def _file_or_content(file_):
|
||||
if os.path.exists(file_):
|
||||
with open(file_) as fic:
|
||||
with salt.utils.fopen(file_) as fic:
|
||||
return fic.read()
|
||||
return file_
|
||||
|
||||
@ -206,9 +206,9 @@ def mkconfig(config=None, tmp=None, id_=None, approve_key=True,
|
||||
privkeyfn = os.path.join(tmp, 'minion.pem')
|
||||
preseeded = pub_key and priv_key
|
||||
if preseeded:
|
||||
with open(pubkeyfn, 'w') as fic:
|
||||
with salt.utils.fopen(pubkeyfn, 'w') as fic:
|
||||
fic.write(_file_or_content(pub_key))
|
||||
with open(privkeyfn, 'w') as fic:
|
||||
with salt.utils.fopen(privkeyfn, 'w') as fic:
|
||||
fic.write(_file_or_content(priv_key))
|
||||
os.chmod(pubkeyfn, 0o600)
|
||||
os.chmod(privkeyfn, 0o600)
|
||||
|
@ -343,8 +343,9 @@ def list_downloads():
|
||||
ret = []
|
||||
for update in _get_upgradable():
|
||||
for f in dist_files:
|
||||
if update.rsplit('-', 1)[0] in open(f).read():
|
||||
ret.append(update)
|
||||
with salt.utils.fopen(f) as fhr:
|
||||
if update.rsplit('-', 1)[0] in fhr.read():
|
||||
ret.append(update)
|
||||
|
||||
return ret
|
||||
|
||||
|
@ -322,7 +322,7 @@ def check_request(name=None):
|
||||
notify_path = os.path.join(__opts__['cachedir'], 'req_state.p')
|
||||
serial = salt.payload.Serial(__opts__)
|
||||
if os.path.isfile(notify_path):
|
||||
with open(notify_path, 'rb') as fp_:
|
||||
with salt.utils.fopen(notify_path, 'rb') as fp_:
|
||||
req = serial.load(fp_)
|
||||
if name:
|
||||
return req[name]
|
||||
|
@ -25,8 +25,7 @@ configuration file.
|
||||
|
||||
'''
|
||||
|
||||
from __future__ import generators, with_statement
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, generators, with_statement
|
||||
|
||||
import time
|
||||
import logging
|
||||
@ -780,7 +779,7 @@ def _add_to_path_envvar(directory):
|
||||
Adds directory to the PATH environment variable and returns the original
|
||||
one.
|
||||
'''
|
||||
orig_path = os.environ["PATH"]
|
||||
orig_path = os.environ.get('PATH', '')
|
||||
if directory:
|
||||
if not os.path.isdir(directory):
|
||||
log.error("The given parameter is not a directory")
|
||||
@ -1140,9 +1139,7 @@ def _write_config(config, newlines=2):
|
||||
text = config[key]
|
||||
|
||||
try:
|
||||
open_flags = 'a'
|
||||
|
||||
with open(__SYSLOG_NG_CONFIG_FILE, open_flags) as f:
|
||||
with salt.utils.fopen(__SYSLOG_NG_CONFIG_FILE, 'a') as f:
|
||||
f.write(text)
|
||||
|
||||
for i in range(0, newlines):
|
||||
|
@ -185,14 +185,14 @@ def maybe_fix_ssl_version(ca_name, cacert_path=None):
|
||||
ca_name)
|
||||
ca_keyp = '{0}/{1}/{2}_ca_cert.key'.format(
|
||||
cert_base_path(), ca_name, ca_name)
|
||||
with open(certp) as fic:
|
||||
with salt.utils.fopen(certp) as fic:
|
||||
cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM,
|
||||
fic.read())
|
||||
if cert.get_version() == 3:
|
||||
log.info(
|
||||
'Regenerating wrong x509 version '
|
||||
'for certificate {0}'.format(certp))
|
||||
with open(ca_keyp) as fic2:
|
||||
with salt.utils.fopen(ca_keyp) as fic2:
|
||||
try:
|
||||
# try to determine the key bits
|
||||
key = OpenSSL.crypto.load_privatekey(
|
||||
@ -278,7 +278,7 @@ def get_ca(ca_name, as_text=False, cacert_path=None):
|
||||
raise ValueError('Certificate does not exists for {0}'.format(ca_name))
|
||||
else:
|
||||
if as_text:
|
||||
with open(certp) as fic:
|
||||
with salt.utils.fopen(certp) as fic:
|
||||
certp = fic.read()
|
||||
return certp
|
||||
|
||||
@ -372,7 +372,7 @@ def create_ca(ca_name,
|
||||
# try to reuse existing ssl key
|
||||
key = None
|
||||
if os.path.exists(ca_keyp):
|
||||
with open(ca_keyp) as fic2:
|
||||
with salt.utils.fopen(ca_keyp) as fic2:
|
||||
# try to determine the key bits
|
||||
key = OpenSSL.crypto.load_privatekey(
|
||||
OpenSSL.crypto.FILETYPE_PEM, fic2.read())
|
||||
@ -420,24 +420,22 @@ def create_ca(ca_name,
|
||||
if os.path.exists(ca_keyp):
|
||||
bck = "{0}.{1}".format(ca_keyp, datetime.datetime.now().strftime(
|
||||
"%Y%m%d%H%M%S"))
|
||||
with open(ca_keyp) as fic:
|
||||
with salt.utils.fopen(ca_keyp) as fic:
|
||||
old_key = fic.read().strip()
|
||||
if old_key.strip() == keycontent.strip():
|
||||
write_key = False
|
||||
else:
|
||||
log.info('Saving old CA ssl key in {0}'.format(bck))
|
||||
with open(bck, 'w') as bckf:
|
||||
with salt.utils.fopen(bck, 'w') as bckf:
|
||||
bckf.write(old_key)
|
||||
os.chmod(bck, 0o600)
|
||||
if write_key:
|
||||
ca_key = salt.utils.fopen(ca_keyp, 'w')
|
||||
ca_key.write(keycontent)
|
||||
ca_key.close()
|
||||
with salt.utils.fopen(ca_keyp, 'w') as ca_key:
|
||||
ca_key.write(keycontent)
|
||||
|
||||
ca_crt = salt.utils.fopen(certp, 'w')
|
||||
ca_crt.write(
|
||||
OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, ca))
|
||||
ca_crt.close()
|
||||
with salt.utils.fopen(certp, 'w') as ca_crt:
|
||||
ca_crt.write(
|
||||
OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, ca))
|
||||
|
||||
_write_cert_to_database(ca_name, ca)
|
||||
|
||||
@ -558,24 +556,20 @@ def create_csr(ca_name,
|
||||
req.sign(key, digest)
|
||||
|
||||
# Write private key and request
|
||||
priv_key = salt.utils.fopen(
|
||||
'{0}/{1}/certs/{2}.key'.format(cert_base_path(),
|
||||
ca_name, CN),
|
||||
'w+'
|
||||
)
|
||||
priv_key.write(
|
||||
OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key)
|
||||
)
|
||||
priv_key.close()
|
||||
|
||||
csr = salt.utils.fopen(csr_f, 'w+')
|
||||
csr.write(
|
||||
OpenSSL.crypto.dump_certificate_request(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
req
|
||||
with salt.utils.fopen('{0}/{1}/certs/{2}.key'.format(
|
||||
cert_base_path(),
|
||||
ca_name, CN), 'w+') as priv_key:
|
||||
priv_key.write(
|
||||
OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key)
|
||||
)
|
||||
|
||||
with salt.utils.fopen(csr_f, 'w+') as csr:
|
||||
csr.write(
|
||||
OpenSSL.crypto.dump_certificate_request(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
req
|
||||
)
|
||||
)
|
||||
)
|
||||
csr.close()
|
||||
|
||||
ret = 'Created Private Key: "{0}/{1}/certs/{2}.key." '.format(
|
||||
cert_base_path(),
|
||||
@ -697,28 +691,25 @@ def create_self_signed_cert(tls_dir='tls',
|
||||
cert.sign(key, digest)
|
||||
|
||||
# Write private key and cert
|
||||
priv_key = salt.utils.fopen(
|
||||
'{0}/{1}/certs/{2}.key'.format(cert_base_path(),
|
||||
tls_dir, CN),
|
||||
'w+'
|
||||
)
|
||||
priv_key.write(
|
||||
OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key)
|
||||
)
|
||||
priv_key.close()
|
||||
|
||||
crt = salt.utils.fopen('{0}/{1}/certs/{2}.crt'.format(
|
||||
cert_base_path(),
|
||||
tls_dir,
|
||||
CN
|
||||
), 'w+')
|
||||
crt.write(
|
||||
OpenSSL.crypto.dump_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
cert
|
||||
with salt.utils.fopen(
|
||||
'{0}/{1}/certs/{2}.key'.format(cert_base_path(),
|
||||
tls_dir, CN),
|
||||
'w+'
|
||||
) as priv_key:
|
||||
priv_key.write(
|
||||
OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key)
|
||||
)
|
||||
|
||||
with salt.utils.fopen('{0}/{1}/certs/{2}.crt'.format(cert_base_path(),
|
||||
tls_dir,
|
||||
CN
|
||||
), 'w+') as crt:
|
||||
crt.write(
|
||||
OpenSSL.crypto.dump_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
cert
|
||||
)
|
||||
)
|
||||
)
|
||||
crt.close()
|
||||
|
||||
_write_cert_to_database(tls_dir, cert)
|
||||
|
||||
@ -800,33 +791,30 @@ def create_ca_signed_cert(ca_name, CN, days=365, cacert_path=None, digest='sha25
|
||||
|
||||
try:
|
||||
maybe_fix_ssl_version(ca_name)
|
||||
ca_cert = OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
salt.utils.fopen('{0}/{1}/{2}_ca_cert.crt'.format(
|
||||
cert_base_path(),
|
||||
ca_name, ca_name
|
||||
)).read()
|
||||
with salt.utils.fopen('{0}/{1}/{2}_ca_cert.crt'.format(cert_base_path(),
|
||||
ca_name,
|
||||
ca_name)) as fhr:
|
||||
ca_cert = OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM, fhr.read()
|
||||
)
|
||||
ca_key = OpenSSL.crypto.load_privatekey(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
salt.utils.fopen('{0}/{1}/{2}_ca_cert.key'.format(
|
||||
cert_base_path(),
|
||||
ca_name,
|
||||
ca_name
|
||||
)).read()
|
||||
with salt.utils.fopen('{0}/{1}/{2}_ca_cert.key'.format(cert_base_path(),
|
||||
ca_name,
|
||||
ca_name)) as fhr:
|
||||
ca_key = OpenSSL.crypto.load_privatekey(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
fhr.read()
|
||||
)
|
||||
except IOError:
|
||||
return 'There is no CA named "{0}"'.format(ca_name)
|
||||
|
||||
try:
|
||||
req = OpenSSL.crypto.load_certificate_request(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
salt.utils.fopen('{0}/{1}/certs/{2}.csr'.format(
|
||||
cert_base_path(),
|
||||
ca_name,
|
||||
CN
|
||||
)).read()
|
||||
)
|
||||
with salt.utils.fopen('{0}/{1}/certs/{2}.csr'.format(cert_base_path(),
|
||||
ca_name,
|
||||
CN)) as fhr:
|
||||
req = OpenSSL.crypto.load_certificate_request(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
fhr.read()
|
||||
)
|
||||
except IOError:
|
||||
return 'There is no CSR that matches the CN "{0}"'.format(CN)
|
||||
|
||||
@ -856,18 +844,15 @@ def create_ca_signed_cert(ca_name, CN, days=365, cacert_path=None, digest='sha25
|
||||
cert.set_pubkey(req.get_pubkey())
|
||||
cert.sign(ca_key, digest)
|
||||
|
||||
crt = salt.utils.fopen('{0}/{1}/certs/{2}.crt'.format(
|
||||
cert_base_path(),
|
||||
ca_name,
|
||||
CN
|
||||
), 'w+')
|
||||
crt.write(
|
||||
with salt.utils.fopen('{0}/{1}/certs/{2}.crt'.format(cert_base_path(),
|
||||
ca_name,
|
||||
CN), 'w+') as crt:
|
||||
crt.write(
|
||||
OpenSSL.crypto.dump_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
cert
|
||||
)
|
||||
)
|
||||
crt.close()
|
||||
|
||||
_write_cert_to_database(ca_name, cert)
|
||||
|
||||
@ -924,34 +909,31 @@ def create_pkcs12(ca_name, CN, passphrase='', cacert_path=None):
|
||||
return 'Certificate "{0}" already exists'.format(CN)
|
||||
|
||||
try:
|
||||
ca_cert = OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
salt.utils.fopen('{0}/{1}/{2}_ca_cert.crt'.format(
|
||||
cert_base_path(),
|
||||
ca_name,
|
||||
ca_name
|
||||
)).read()
|
||||
with salt.utils.fopen('{0}/{1}/{2}_ca_cert.crt'.format(cert_base_path(),
|
||||
ca_name,
|
||||
ca_name)) as fhr:
|
||||
ca_cert = OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
fhr.read()
|
||||
)
|
||||
except IOError:
|
||||
return 'There is no CA named "{0}"'.format(ca_name)
|
||||
|
||||
try:
|
||||
cert = OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
salt.utils.fopen('{0}/{1}/certs/{2}.crt'.format(
|
||||
cert_base_path(),
|
||||
ca_name,
|
||||
CN
|
||||
)).read()
|
||||
)
|
||||
key = OpenSSL.crypto.load_privatekey(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
salt.utils.fopen('{0}/{1}/certs/{2}.key'.format(
|
||||
cert_base_path(),
|
||||
ca_name,
|
||||
CN
|
||||
)).read()
|
||||
with salt.utils.fopen('{0}/{1}/certs/{2}.crt'.format(cert_base_path(),
|
||||
ca_name,
|
||||
CN)) as fhr:
|
||||
cert = OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
fhr.read()
|
||||
)
|
||||
with salt.utils.fopen('{0}/{1}/certs/{2}.key'.format(cert_base_path(),
|
||||
ca_name,
|
||||
CN)) as fhr:
|
||||
key = OpenSSL.crypto.load_privatekey(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
fhr.read()
|
||||
)
|
||||
except IOError:
|
||||
return 'There is no certificate that matches the CN "{0}"'.format(CN)
|
||||
|
||||
@ -961,11 +943,9 @@ def create_pkcs12(ca_name, CN, passphrase='', cacert_path=None):
|
||||
pkcs12.set_ca_certificates([ca_cert])
|
||||
pkcs12.set_privatekey(key)
|
||||
|
||||
with salt.utils.fopen('{0}/{1}/certs/{2}.p12'.format(
|
||||
cert_base_path(),
|
||||
ca_name,
|
||||
CN
|
||||
), 'w') as ofile:
|
||||
with salt.utils.fopen('{0}/{1}/certs/{2}.p12'.format(cert_base_path(),
|
||||
ca_name,
|
||||
CN), 'w') as ofile:
|
||||
ofile.write(pkcs12.export(passphrase=passphrase))
|
||||
|
||||
return ('Created PKCS#12 Certificate for "{0}": '
|
||||
|
@ -427,7 +427,7 @@ def _upstart_disable(name):
|
||||
Disable an Upstart service.
|
||||
'''
|
||||
override = '/etc/init/{0}.override'.format(name)
|
||||
with open(override, 'w') as ofile:
|
||||
with salt.utils.fopen(override, 'w') as ofile:
|
||||
ofile.write('manual')
|
||||
return _upstart_is_disabled(name)
|
||||
|
||||
|
@ -10,6 +10,10 @@ import shutil
|
||||
import logging
|
||||
import os
|
||||
import os.path
|
||||
try:
|
||||
from shlex import quote as _cmd_quote # pylint: disable=E0611
|
||||
except ImportError:
|
||||
from pipes import quote as _cmd_quote
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
@ -126,7 +130,7 @@ def create(path,
|
||||
elif runas is not None and not user:
|
||||
user = str(runas)
|
||||
|
||||
cmd = [venv_bin]
|
||||
cmd = [_cmd_quote(venv_bin)]
|
||||
|
||||
if 'pyvenv' not in venv_bin:
|
||||
# ----- Stop the user if pyvenv only options are used --------------->
|
||||
@ -154,7 +158,7 @@ def create(path,
|
||||
)
|
||||
except ImportError:
|
||||
# Unable to import?? Let's parse the version from the console
|
||||
version_cmd = '{0} --version'.format(venv_bin)
|
||||
version_cmd = '{0} --version'.format(_cmd_quote(venv_bin))
|
||||
ret = __salt__['cmd.run_all'](version_cmd, runas=user)
|
||||
if ret['retcode'] > 0 or not ret['stdout'].strip():
|
||||
raise salt.exceptions.CommandExecutionError(
|
||||
@ -183,7 +187,7 @@ def create(path,
|
||||
'Requested python ({0}) does not appear '
|
||||
'executable.'.format(python)
|
||||
)
|
||||
cmd.append('--python={0}'.format(python))
|
||||
cmd.append('--python={0}'.format(_cmd_quote(python)))
|
||||
if extra_search_dir is not None:
|
||||
if isinstance(extra_search_dir, string_types) and \
|
||||
extra_search_dir.strip() != '':
|
||||
@ -191,7 +195,7 @@ def create(path,
|
||||
e.strip() for e in extra_search_dir.split(',')
|
||||
]
|
||||
for entry in extra_search_dir:
|
||||
cmd.append('--extra-search-dir={0}'.format(entry))
|
||||
cmd.append('--extra-search-dir={0}'.format(_cmd_quote(entry)))
|
||||
if never_download is True:
|
||||
if virtualenv_version_info >= (1, 10):
|
||||
log.info(
|
||||
@ -203,7 +207,7 @@ def create(path,
|
||||
else:
|
||||
cmd.append('--never-download')
|
||||
if prompt is not None and prompt.strip() != '':
|
||||
cmd.append('--prompt={0!r}'.format(prompt))
|
||||
cmd.append('--prompt={0!r}'.format(_cmd_quote(prompt)))
|
||||
else:
|
||||
# venv module from the Python >= 3.3 standard library
|
||||
|
||||
@ -244,7 +248,7 @@ def create(path,
|
||||
cmd.append('--system-site-packages')
|
||||
|
||||
# Finally the virtualenv path
|
||||
cmd.append(path)
|
||||
cmd.append(_cmd_quote(path))
|
||||
|
||||
# Let's create the virtualenv
|
||||
ret = __salt__['cmd.run_all'](' '.join(cmd), runas=user)
|
||||
@ -310,7 +314,7 @@ def get_site_packages(venv):
|
||||
raise salt.exceptions.CommandExecutionError(
|
||||
"Path does not appear to be a virtualenv: '{0}'".format(bin_path))
|
||||
|
||||
return __salt__['cmd.exec_code'](bin_path,
|
||||
return __salt__['cmd.exec_code'](_cmd_quote(bin_path),
|
||||
'from distutils import sysconfig; print sysconfig.get_python_lib()')
|
||||
|
||||
|
||||
@ -327,7 +331,7 @@ def _install_script(source, cwd, python, user, saltenv='base', use_vt=False):
|
||||
os.chown(tmppath, __salt__['file.user_to_uid'](user), -1)
|
||||
try:
|
||||
return __salt__['cmd.run_all'](
|
||||
'{0} {1}'.format(python, tmppath),
|
||||
'{0} {1}'.format(_cmd_quote(python), _cmd_quote(tmppath)),
|
||||
runas=user,
|
||||
cwd=cwd,
|
||||
env={'VIRTUAL_ENV': cwd},
|
||||
|
@ -502,14 +502,15 @@ def _get_mounts():
|
||||
List mounted filesystems.
|
||||
'''
|
||||
mounts = {}
|
||||
for line in open("/proc/mounts").readlines():
|
||||
device, mntpnt, fstype, options, fs_freq, fs_passno = line.strip().split(" ")
|
||||
if fstype != 'xfs':
|
||||
continue
|
||||
mounts[device] = {
|
||||
'mount_point': mntpnt,
|
||||
'options': options.split(","),
|
||||
}
|
||||
with salt.utils.fopen("/proc/mounts") as fhr:
|
||||
for line in fhr.readlines():
|
||||
device, mntpnt, fstype, options, fs_freq, fs_passno = line.strip().split(" ")
|
||||
if fstype != 'xfs':
|
||||
continue
|
||||
mounts[device] = {
|
||||
'mount_point': mntpnt,
|
||||
'options': options.split(","),
|
||||
}
|
||||
|
||||
return mounts
|
||||
|
||||
|
@ -1173,7 +1173,7 @@ def hold(name=None, pkgs=None, sources=None, **kwargs): # pylint: disable=W0613
|
||||
|
||||
targets = []
|
||||
if pkgs:
|
||||
for pkg in pkgs:
|
||||
for pkg in salt.utils.repack_dictlist(pkgs):
|
||||
ret = check_db(pkg)
|
||||
if not ret[pkg]['found']:
|
||||
raise SaltInvocationError(
|
||||
@ -1266,7 +1266,8 @@ def unhold(name=None, pkgs=None, sources=None, **kwargs): # pylint: disable=W06
|
||||
|
||||
targets = []
|
||||
if pkgs:
|
||||
targets.extend(pkgs)
|
||||
for pkg in salt.utils.repack_dictlist(pkgs):
|
||||
targets.append(pkg)
|
||||
elif sources:
|
||||
for source in sources:
|
||||
targets.append(next(iter(source)))
|
||||
|
@ -41,6 +41,7 @@ from salt.ext.six.moves.urllib.request import urlopen as _urlopen
|
||||
# pylint: enable=import-error,no-name-in-module,redefined-builtin
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
|
||||
@ -73,9 +74,7 @@ def __virtual__():
|
||||
'''
|
||||
Only load if buildout libs are present
|
||||
'''
|
||||
if True:
|
||||
return __virtualname__
|
||||
return False
|
||||
return __virtualname__
|
||||
|
||||
|
||||
def _salt_callback(func, **kwargs):
|
||||
@ -392,11 +391,10 @@ def _get_bootstrap_content(directory='.'):
|
||||
Get the current bootstrap.py script content
|
||||
'''
|
||||
try:
|
||||
fic = open(
|
||||
os.path.join(
|
||||
os.path.abspath(directory), 'bootstrap.py'))
|
||||
oldcontent = fic.read()
|
||||
fic.close()
|
||||
with salt.utils.fopen(os.path.join(
|
||||
os.path.abspath(directory),
|
||||
'bootstrap.py')) as fic:
|
||||
oldcontent = fic.read()
|
||||
except (OSError, IOError):
|
||||
oldcontent = ''
|
||||
return oldcontent
|
||||
@ -418,16 +416,15 @@ def _get_buildout_ver(directory='.'):
|
||||
try:
|
||||
files = _find_cfgs(directory)
|
||||
for f in files:
|
||||
fic = open(f)
|
||||
buildout1re = re.compile(r'^zc\.buildout\s*=\s*1', RE_F)
|
||||
dfic = fic.read()
|
||||
if (
|
||||
('buildout.dumppick' in dfic)
|
||||
or
|
||||
(buildout1re.search(dfic))
|
||||
):
|
||||
buildoutver = 1
|
||||
fic.close()
|
||||
with salt.utils.fopen(f) as fic:
|
||||
buildout1re = re.compile(r'^zc\.buildout\s*=\s*1', RE_F)
|
||||
dfic = fic.read()
|
||||
if (
|
||||
('buildout.dumppick' in dfic)
|
||||
or
|
||||
(buildout1re.search(dfic))
|
||||
):
|
||||
buildoutver = 1
|
||||
bcontent = _get_bootstrap_content(directory)
|
||||
if (
|
||||
'--download-base' in bcontent
|
||||
@ -521,7 +518,7 @@ def upgrade_bootstrap(directory='.',
|
||||
if not os.path.isdir(dbuild):
|
||||
os.makedirs(dbuild)
|
||||
# only try to download once per buildout checkout
|
||||
open(os.path.join(
|
||||
salt.utils.fopen(os.path.join(
|
||||
dbuild,
|
||||
'{0}.updated_bootstrap'.format(buildout_ver)))
|
||||
except (OSError, IOError):
|
||||
@ -536,20 +533,17 @@ def upgrade_bootstrap(directory='.',
|
||||
data = '\n'.join(ldata)
|
||||
if updated:
|
||||
comment = 'Bootstrap updated'
|
||||
fic = open(b_py, 'w')
|
||||
fic.write(data)
|
||||
fic.close()
|
||||
with salt.utils.fopen(b_py, 'w') as fic:
|
||||
fic.write(data)
|
||||
if dled:
|
||||
afic = open(os.path.join(
|
||||
dbuild, '{0}.updated_bootstrap'.format(buildout_ver)
|
||||
), 'w')
|
||||
afic.write('foo')
|
||||
afic.close()
|
||||
with salt.utils.fopen(os.path.join(dbuild,
|
||||
'{0}.updated_bootstrap'.format(
|
||||
buildout_ver)), 'w') as afic:
|
||||
afic.write('foo')
|
||||
except (OSError, IOError):
|
||||
if oldcontent:
|
||||
fic = open(b_py, 'w')
|
||||
fic.write(oldcontent)
|
||||
fic.close()
|
||||
with salt.utils.fopen(b_py, 'w') as fic:
|
||||
fic.write(oldcontent)
|
||||
|
||||
return {'comment': comment}
|
||||
|
||||
@ -739,9 +733,8 @@ def bootstrap(directory='.',
|
||||
buildout_ver=buildout_ver)
|
||||
# be sure which buildout bootstrap we have
|
||||
b_py = os.path.join(directory, 'bootstrap.py')
|
||||
fic = open(b_py)
|
||||
content = fic.read()
|
||||
fic.close()
|
||||
with salt.utils.fopen(b_py) as fic:
|
||||
content = fic.read()
|
||||
if (
|
||||
(False != test_release)
|
||||
and ' --accept-buildout-test-releases' in content
|
||||
|
@ -790,13 +790,14 @@ def list_locks():
|
||||
return False
|
||||
|
||||
locks = {}
|
||||
for meta in [item.split('\n') for item in open(LOCKS).read().split('\n\n')]:
|
||||
lock = {}
|
||||
for element in [el for el in meta if el]:
|
||||
if ':' in element:
|
||||
lock.update(dict([tuple([i.strip() for i in element.split(':', 1)]), ]))
|
||||
if lock.get('solvable_name'):
|
||||
locks[lock.pop('solvable_name')] = lock
|
||||
with salt.utils.fopen(LOCKS) as fhr:
|
||||
for meta in [item.split('\n') for item in fhr.read().split('\n\n')]:
|
||||
lock = {}
|
||||
for element in [el for el in meta if el]:
|
||||
if ':' in element:
|
||||
lock.update(dict([tuple([i.strip() for i in element.split(':', 1)]), ]))
|
||||
if lock.get('solvable_name'):
|
||||
locks[lock.pop('solvable_name')] = lock
|
||||
|
||||
return locks
|
||||
|
||||
|
@ -84,6 +84,14 @@ def _format_host(host, data):
|
||||
hstrs = []
|
||||
nchanges = 0
|
||||
strip_colors = __opts__.get('strip_colors', True)
|
||||
|
||||
if isinstance(data, int) or isinstance(data, str):
|
||||
# Data in this format is from saltmod.function,
|
||||
# so it is always a 'change'
|
||||
nchanges = 1
|
||||
hstrs.append((u'{0} {1}{2[ENDC]}'
|
||||
.format(hcolor, data, colors)))
|
||||
hcolor = colors['CYAN'] # Print the minion name in cyan
|
||||
if isinstance(data, list):
|
||||
# Errors have been detected, list them in RED!
|
||||
hcolor = colors['RED_BOLD']
|
||||
@ -101,7 +109,7 @@ def _format_host(host, data):
|
||||
data = _strip_clean(data)
|
||||
# Verify that the needed data is present
|
||||
for tname, info in data.items():
|
||||
if '__run_num__' not in info:
|
||||
if isinstance(info, dict) and '__run_num__' not in info:
|
||||
err = (u'The State execution failed to record the order '
|
||||
'in which all states were executed. The state '
|
||||
'return missing data is:')
|
||||
|
@ -173,9 +173,10 @@ def _construct_pillar(top_dir, follow_dir_links, raw_data=False):
|
||||
continue
|
||||
|
||||
try:
|
||||
pillar_node[file_name] = open(file_path, 'rb').read()
|
||||
if raw_data is False and pillar_node[file_name].endswith('\n'):
|
||||
pillar_node[file_name] = pillar_node[file_name][:-1]
|
||||
with salt.utils.fopen(file_path, 'rb') as fhr:
|
||||
pillar_node[file_name] = fhr.read()
|
||||
if raw_data is False and pillar_node[file_name].endswith('\n'):
|
||||
pillar_node[file_name] = pillar_node[file_name][:-1]
|
||||
except IOError as err:
|
||||
log.error('%s', str(err))
|
||||
|
||||
|
@ -281,6 +281,9 @@ import re
|
||||
from os.path import isfile, join
|
||||
from salt.ext.six.moves import input
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils
|
||||
|
||||
# Only used when called from a terminal
|
||||
log = None
|
||||
if __name__ == '__main__':
|
||||
@ -413,7 +416,8 @@ def ext_pillar(minion_id, pillar, resource, sequence, subkey=False, subkey_only=
|
||||
fn = join(templdir, re.sub(r'\W', '_', entry.lower()) + '.yaml')
|
||||
if isfile(fn):
|
||||
log.info("Loading template: {0}".format(fn))
|
||||
template = jinja2.Template(open(fn).read())
|
||||
with salt.utils.fopen(fn) as fhr:
|
||||
template = jinja2.Template(fhr.read())
|
||||
output['pepa_templates'].append(fn)
|
||||
|
||||
try:
|
||||
@ -509,7 +513,8 @@ def validate(output, resource):
|
||||
pepa_schemas = []
|
||||
for fn in glob.glob(valdir + '/*.yaml'):
|
||||
log.info("Loading schema: {0}".format(fn))
|
||||
template = jinja2.Template(open(fn).read())
|
||||
with salt.utils.fopen(fn) as fhr:
|
||||
template = jinja2.Template(fhr.read())
|
||||
data = output
|
||||
data['grains'] = __grains__.copy()
|
||||
data['pillar'] = __pillar__.copy()
|
||||
@ -534,7 +539,8 @@ if __name__ == '__main__':
|
||||
sys.exit(1)
|
||||
|
||||
# Get configuration
|
||||
__opts__.update(yaml.load(open(args.config).read()))
|
||||
with salt.utils.fopen(args.config) as fh_:
|
||||
__opts__.update(yaml.load(fh_.read()))
|
||||
|
||||
loc = 0
|
||||
for name in [next(iter(list(e.keys()))) for e in __opts__['ext_pillar']]:
|
||||
|
@ -118,8 +118,10 @@ def _do_search(conf):
|
||||
'''
|
||||
# Build LDAP connection args
|
||||
connargs = {}
|
||||
for name in ['server', 'port', 'tls', 'binddn', 'bindpw']:
|
||||
for name in ['server', 'port', 'tls', 'binddn', 'bindpw', 'anonymous']:
|
||||
connargs[name] = _config(name, conf)
|
||||
if connargs['binddn'] and connargs['bindpw']:
|
||||
connargs['anonymous'] = False
|
||||
# Build search args
|
||||
try:
|
||||
_filter = conf['filter']
|
||||
@ -136,12 +138,12 @@ def _do_search(conf):
|
||||
try:
|
||||
result = __salt__['ldap.search'](_filter, _dn, scope, attrs,
|
||||
**connargs)['results'][0][1]
|
||||
except IndexError: # we got no results for this search
|
||||
log.debug(
|
||||
'LDAP search returned no results for filter {0}'.format(
|
||||
_filter
|
||||
)
|
||||
)
|
||||
except IndexError: # we got no results for this search
|
||||
result = {}
|
||||
except Exception:
|
||||
log.critical(
|
||||
|
@ -265,6 +265,7 @@ import logging
|
||||
import re
|
||||
from salt.ext.six import exec_
|
||||
|
||||
import salt.utils
|
||||
from salt.loader import _create_loader
|
||||
from salt.fileclient import get_file_client
|
||||
from salt.utils.pyobjects import Registry, StateFactory, SaltObject, Map
|
||||
@ -403,7 +404,7 @@ def render(template, saltenv='base', sls='', salt_data=True, **kwargs):
|
||||
if not state_file:
|
||||
raise ImportError("Could not find the file {0!r}".format(import_file))
|
||||
|
||||
with open(state_file) as f:
|
||||
with salt.utils.fopen(state_file) as f:
|
||||
state_contents = f.read()
|
||||
|
||||
state_locals = {}
|
||||
|
@ -55,7 +55,7 @@ XMPP settings may also be configured as::
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import python libs
|
||||
import distutils.version
|
||||
import distutils.version # pylint: disable=import-error,no-name-in-module
|
||||
import logging
|
||||
import pprint
|
||||
|
||||
@ -63,7 +63,7 @@ import salt.returners
|
||||
|
||||
HAS_LIBS = False
|
||||
try:
|
||||
from sleekxmpp import ClientXMPP as _ClientXMPP
|
||||
from sleekxmpp import ClientXMPP as _ClientXMPP # pylint: disable=import-error
|
||||
HAS_LIBS = True
|
||||
except ImportError:
|
||||
class _ClientXMPP(object):
|
||||
|
@ -6,22 +6,22 @@ Control Linux Containers via Salt
|
||||
'''
|
||||
|
||||
# Import python libs
|
||||
from __future__ import print_function
|
||||
from __future__ import absolute_import
|
||||
from __future__ import absolute_import, print_function
|
||||
import time
|
||||
import os
|
||||
import copy
|
||||
import logging
|
||||
from salt.ext.six import string_types
|
||||
|
||||
# Import Salt libs
|
||||
from salt.utils.odict import OrderedDict as _OrderedDict
|
||||
import salt.client
|
||||
import salt.utils
|
||||
import salt.utils.virt
|
||||
import salt.utils.cloud
|
||||
import salt.key
|
||||
import salt.ext.six as six
|
||||
from salt.utils.odict import OrderedDict as _OrderedDict
|
||||
|
||||
# Import 3rd-party lib
|
||||
import salt.ext.six as six
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@ -190,7 +190,7 @@ def init(names, host=None, saltcloud_mode=False, quiet=False, **kwargs):
|
||||
ret['comment'] = 'A host must be provided'
|
||||
ret['result'] = False
|
||||
return ret
|
||||
if isinstance(names, string_types):
|
||||
if isinstance(names, six.string_types):
|
||||
names = names.split(',')
|
||||
if not isinstance(names, list):
|
||||
ret['comment'] = 'Container names are not formed as a list'
|
||||
@ -305,10 +305,10 @@ def init(names, host=None, saltcloud_mode=False, quiet=False, **kwargs):
|
||||
if explicit_auth:
|
||||
fcontent = ''
|
||||
if os.path.exists(key):
|
||||
with open(key) as fic:
|
||||
with salt.utils.fopen(key) as fic:
|
||||
fcontent = fic.read().strip()
|
||||
if pub_key.strip() != fcontent:
|
||||
with open(key, 'w') as fic:
|
||||
with salt.utils.fopen(key, 'w') as fic:
|
||||
fic.write(pub_key)
|
||||
fic.flush()
|
||||
mid = j_ret.get('mid', None)
|
||||
|
@ -19,6 +19,7 @@ from salt.ext.six.moves.urllib.request import urlopen as _urlopen # pylint: dis
|
||||
# Import salt libs
|
||||
import salt.key
|
||||
import salt.client
|
||||
import salt.utils
|
||||
import salt.utils.minions
|
||||
import salt.wheel
|
||||
import salt.version
|
||||
@ -457,9 +458,8 @@ objShell.Exec("{1}{2}")'''
|
||||
' >>' + x + '.vbs\ncscript.exe /NoLogo ' + x + '.vbs'
|
||||
|
||||
batch_path = tempfile.mkstemp(suffix='.bat')[1]
|
||||
batch_file = open(batch_path, 'wb')
|
||||
batch_file.write(batch)
|
||||
batch_file.close()
|
||||
with salt.utils.fopen(batch_path, 'wb') as batch_file:
|
||||
batch_file.write(batch)
|
||||
|
||||
for host in hosts.split(","):
|
||||
argv = ['psexec', '\\\\' + host]
|
||||
|
@ -44,6 +44,9 @@ from __future__ import absolute_import
|
||||
# Import python libs
|
||||
import os.path
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils
|
||||
|
||||
|
||||
def __virtual__():
|
||||
return 'apache.config' in __salt__
|
||||
@ -58,7 +61,7 @@ def configfile(name, config):
|
||||
configs = __salt__['apache.config'](name, config, edit=False)
|
||||
current_configs = ''
|
||||
if os.path.exists(name):
|
||||
with open(name) as config_file:
|
||||
with salt.utils.fopen(name) as config_file:
|
||||
current_configs = config_file.read()
|
||||
|
||||
if configs == current_configs.strip():
|
||||
@ -75,7 +78,7 @@ def configfile(name, config):
|
||||
return ret
|
||||
|
||||
try:
|
||||
with open(name, 'w') as config_file:
|
||||
with salt.utils.fopen(name, 'w') as config_file:
|
||||
print(configs, file=config_file)
|
||||
ret['changes'] = {
|
||||
'old': current_configs,
|
||||
|
@ -34,6 +34,9 @@ import re
|
||||
import os.path
|
||||
import difflib
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils
|
||||
|
||||
|
||||
def __virtual__():
|
||||
return 'augeas' if 'augeas.execute' in __salt__ else False
|
||||
@ -175,9 +178,8 @@ def change(name, context=None, changes=None, lens=None, **kwargs):
|
||||
if context:
|
||||
filename = re.sub('^/files|/$', '', context)
|
||||
if os.path.isfile(filename):
|
||||
file_ = open(filename, 'r')
|
||||
old_file = file_.readlines()
|
||||
file_.close()
|
||||
with salt.utils.fopen(filename, 'r') as file_:
|
||||
old_file = file_.readlines()
|
||||
|
||||
result = __salt__['augeas.execute'](context=context, lens=lens, commands=changes)
|
||||
ret['result'] = result['retval']
|
||||
@ -187,9 +189,8 @@ def change(name, context=None, changes=None, lens=None, **kwargs):
|
||||
return ret
|
||||
|
||||
if old_file:
|
||||
file_ = open(filename, 'r')
|
||||
diff = ''.join(difflib.unified_diff(old_file, file_.readlines(), n=0))
|
||||
file_.close()
|
||||
with salt.utils.fopen(filename, 'r') as file_:
|
||||
diff = ''.join(difflib.unified_diff(old_file, file_.readlines(), n=0))
|
||||
|
||||
if diff:
|
||||
ret['comment'] = 'Changes have been saved'
|
||||
|
@ -3872,9 +3872,11 @@ def serialize(name,
|
||||
if merge_if_exists:
|
||||
if os.path.isfile(name):
|
||||
if formatter == 'yaml':
|
||||
existing_data = yaml.safe_load(open(name, 'r'))
|
||||
with salt.utils.fopen(name, 'r') as fhr:
|
||||
existing_data = yaml.safe_load(fhr.read())
|
||||
elif formatter == 'json':
|
||||
existing_data = json.load(open(name, 'r'))
|
||||
with salt.utils.fopen(name, 'r') as fhr:
|
||||
existing_data = json.load(fhr.read())
|
||||
else:
|
||||
return {'changes': {},
|
||||
'comment': ('{0} format is not supported for merging'
|
||||
|
@ -21,9 +21,13 @@ Its output may be stored in a file or in a grain.
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import python libs
|
||||
import sys
|
||||
import os.path
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils
|
||||
|
||||
|
||||
def __virtual__():
|
||||
'''
|
||||
@ -169,7 +173,7 @@ def run(name,
|
||||
+ grain + ":" + key
|
||||
elif output is not None:
|
||||
ret['changes']['query'] = "Executed. Output into " + output
|
||||
with open(output, 'w') as output_file:
|
||||
with salt.utils.fopen(output, 'w') as output_file:
|
||||
if 'results' in query_result:
|
||||
for res in query_result['results']:
|
||||
for col, val in res:
|
||||
|
@ -311,10 +311,10 @@ def daemonize(redirect_out=True):
|
||||
# not cleanly redirected and the parent process dies when the
|
||||
# multiprocessing process attempts to access stdout or err.
|
||||
if redirect_out:
|
||||
dev_null = open('/dev/null', 'r+')
|
||||
os.dup2(dev_null.fileno(), sys.stdin.fileno())
|
||||
os.dup2(dev_null.fileno(), sys.stdout.fileno())
|
||||
os.dup2(dev_null.fileno(), sys.stderr.fileno())
|
||||
with fopen('/dev/null', 'r+') as dev_null:
|
||||
os.dup2(dev_null.fileno(), sys.stdin.fileno())
|
||||
os.dup2(dev_null.fileno(), sys.stdout.fileno())
|
||||
os.dup2(dev_null.fileno(), sys.stderr.fileno())
|
||||
|
||||
|
||||
def daemonize_if(opts):
|
||||
@ -2192,7 +2192,7 @@ def is_bin_file(path):
|
||||
if not os.path.isfile(path):
|
||||
return None
|
||||
try:
|
||||
with open(path, 'r') as fp_:
|
||||
with fopen(path, 'r') as fp_:
|
||||
return is_bin_str(fp_.read(2048))
|
||||
except os.error:
|
||||
return None
|
||||
|
@ -2267,7 +2267,7 @@ def update_bootstrap(config, url=None):
|
||||
else:
|
||||
script_name = os.path.basename(url)
|
||||
elif os.path.exists(url):
|
||||
with open(url) as fic:
|
||||
with salt.utils.fopen(url) as fic:
|
||||
script_content = fic.read()
|
||||
script_name = os.path.basename(url)
|
||||
# in last case, assuming we got a script content
|
||||
|
@ -858,7 +858,7 @@ def active_tcp():
|
||||
'''
|
||||
ret = {}
|
||||
if os.path.isfile('/proc/net/tcp'):
|
||||
with open('/proc/net/tcp', 'rb') as fp_:
|
||||
with salt.utils.fopen('/proc/net/tcp', 'rb') as fp_:
|
||||
for line in fp_:
|
||||
if line.strip().startswith('sl'):
|
||||
continue
|
||||
@ -873,7 +873,7 @@ def local_port_tcp(port):
|
||||
'''
|
||||
ret = set()
|
||||
if os.path.isfile('/proc/net/tcp'):
|
||||
with open('/proc/net/tcp', 'rb') as fp_:
|
||||
with salt.utils.fopen('/proc/net/tcp', 'rb') as fp_:
|
||||
for line in fp_:
|
||||
if line.strip().startswith('sl'):
|
||||
continue
|
||||
@ -893,7 +893,7 @@ def remote_port_tcp(port):
|
||||
'''
|
||||
ret = set()
|
||||
if os.path.isfile('/proc/net/tcp'):
|
||||
with open('/proc/net/tcp', 'rb') as fp_:
|
||||
with salt.utils.fopen('/proc/net/tcp', 'rb') as fp_:
|
||||
for line in fp_:
|
||||
if line.strip().startswith('sl'):
|
||||
continue
|
||||
|
@ -13,6 +13,9 @@ from os import makedirs
|
||||
from os.path import dirname, isdir
|
||||
from errno import EEXIST
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils
|
||||
|
||||
# Get logging started
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@ -174,7 +177,7 @@ class SaltSwift(object):
|
||||
dirpath = dirname(local_file)
|
||||
if dirpath and not isdir(dirpath):
|
||||
mkdirs(dirpath)
|
||||
fp = open(local_file, 'wb')
|
||||
fp = salt.utils.fopen(local_file, 'wb')
|
||||
|
||||
read_length = 0
|
||||
for chunk in body:
|
||||
@ -197,9 +200,8 @@ class SaltSwift(object):
|
||||
Upload a file to Swift
|
||||
'''
|
||||
try:
|
||||
fp = open(local_file, 'rb')
|
||||
self.conn.put_object(cont, obj, fp)
|
||||
fp.close()
|
||||
with salt.utils.fopen(local_file, 'rb') as fp_:
|
||||
self.conn.put_object(cont, obj, fp_)
|
||||
return True
|
||||
except Exception as exc:
|
||||
log.error('There was an error::')
|
||||
|
@ -133,6 +133,20 @@ scheduler to skip this first run and wait until the next scheduled run.
|
||||
kwargs:
|
||||
test: True
|
||||
|
||||
... versionadded:: 2014.7.0
|
||||
|
||||
schedule:
|
||||
job1:
|
||||
function: state.sls
|
||||
cron: '*/15 * * * *'
|
||||
args:
|
||||
- httpd
|
||||
kwargs:
|
||||
test: True
|
||||
|
||||
The scheduler also supports scheduling jobs using a cron like format. This requires the
|
||||
python-croniter library.
|
||||
|
||||
The scheduler also supports ensuring that there are no more than N copies of
|
||||
a particular routine running. Use this for jobs that may be long-running
|
||||
and could step on each other or pile up in case of infrastructure outage.
|
||||
@ -560,8 +574,9 @@ class Schedule(object):
|
||||
when = 0
|
||||
seconds = 0
|
||||
cron = 0
|
||||
|
||||
now = int(time.time())
|
||||
time_conflict = False
|
||||
|
||||
for item in ['seconds', 'minutes', 'hours', 'days']:
|
||||
if item in data and 'when' in data:
|
||||
time_conflict = True
|
||||
@ -594,7 +609,6 @@ class Schedule(object):
|
||||
|
||||
if isinstance(data['when'], list):
|
||||
_when = []
|
||||
now = int(time.time())
|
||||
for i in data['when']:
|
||||
if ('whens' in self.opts['pillar'] and
|
||||
i in self.opts['pillar']['whens']):
|
||||
@ -643,7 +657,7 @@ class Schedule(object):
|
||||
if '_when' in data and data['_when'] != when:
|
||||
data['_when_run'] = True
|
||||
data['_when'] = when
|
||||
seconds = when - int(time.time())
|
||||
seconds = when - now
|
||||
|
||||
# scheduled time is in the past
|
||||
if seconds < 0:
|
||||
@ -733,14 +747,16 @@ class Schedule(object):
|
||||
# loop interval needed. If it is lower then overwrite variable
|
||||
# external loops using can then check this variable for how often
|
||||
# they need to reschedule themselves
|
||||
if seconds < self.loop_interval:
|
||||
self.loop_interval = seconds
|
||||
now = int(time.time())
|
||||
# Not used with 'when' parameter, causes run away jobs and CPU
|
||||
# spikes.
|
||||
if 'when' not in data:
|
||||
if seconds < self.loop_interval:
|
||||
self.loop_interval = seconds
|
||||
run = False
|
||||
|
||||
if job in self.intervals:
|
||||
if 'when' in data:
|
||||
if now - when >= seconds:
|
||||
if seconds == 0:
|
||||
if data['_when_run']:
|
||||
data['_when_run'] = False
|
||||
run = True
|
||||
@ -763,7 +779,7 @@ class Schedule(object):
|
||||
data['_seconds'] = 0
|
||||
|
||||
if 'when' in data:
|
||||
if now - when >= seconds:
|
||||
if seconds == 0:
|
||||
if data['_when_run']:
|
||||
data['_when_run'] = False
|
||||
run = True
|
||||
@ -868,7 +884,7 @@ class Schedule(object):
|
||||
if self.opts.get('multiprocessing', True):
|
||||
proc.join()
|
||||
finally:
|
||||
self.intervals[job] = int(time.time())
|
||||
self.intervals[job] = now
|
||||
|
||||
|
||||
def clean_proc_dir(opts):
|
||||
|
@ -96,16 +96,17 @@ def gen_thin(cachedir, extra_mods='', overwrite=False, so_mods=''):
|
||||
thintar = os.path.join(thindir, 'thin.tgz')
|
||||
thinver = os.path.join(thindir, 'version')
|
||||
salt_call = os.path.join(thindir, 'salt-call')
|
||||
with open(salt_call, 'w+') as fp_:
|
||||
with salt.utils.fopen(salt_call, 'w+') as fp_:
|
||||
fp_.write(SALTCALL)
|
||||
if os.path.isfile(thintar):
|
||||
if overwrite or not os.path.isfile(thinver):
|
||||
try:
|
||||
os.remove(thintar)
|
||||
except OSError:
|
||||
pass
|
||||
elif open(thinver).read() == salt.__version__:
|
||||
return thintar
|
||||
with salt.utils.fopen(thinver) as fh_:
|
||||
if overwrite or not os.path.isfile(thinver):
|
||||
try:
|
||||
os.remove(thintar)
|
||||
except OSError:
|
||||
pass
|
||||
elif fh_.read() == salt.__version__:
|
||||
return thintar
|
||||
tops = [
|
||||
os.path.dirname(salt.__file__),
|
||||
os.path.dirname(jinja2.__file__),
|
||||
@ -179,7 +180,7 @@ def gen_thin(cachedir, extra_mods='', overwrite=False, so_mods=''):
|
||||
tempdir = None
|
||||
os.chdir(thindir)
|
||||
tfp.add('salt-call')
|
||||
with open(thinver, 'w+') as fp_:
|
||||
with salt.utils.fopen(thinver, 'w+') as fp_:
|
||||
fp_.write(salt.__version__)
|
||||
os.chdir(os.path.dirname(thinver))
|
||||
tfp.add('version')
|
||||
|
@ -407,7 +407,7 @@ class TestDaemon(object):
|
||||
if keygen_ed25519_err:
|
||||
print('ssh-keygen had errors: {0}'.format(keygen_ed25519_err))
|
||||
|
||||
with open(os.path.join(TMP_CONF_DIR, 'sshd_config'), 'a') as ssh_config:
|
||||
with salt.utils.fopen(os.path.join(TMP_CONF_DIR, 'sshd_config'), 'a') as ssh_config:
|
||||
ssh_config.write('AuthorizedKeysFile {0}\n'.format(auth_key_file))
|
||||
ssh_config.write('HostKey {0}\n'.format(server_dsa_priv_key_file))
|
||||
ssh_config.write('HostKey {0}\n'.format(server_ecdsa_priv_key_file))
|
||||
@ -426,7 +426,7 @@ class TestDaemon(object):
|
||||
print('sshd had errors on startup: {0}'.format(sshd_err))
|
||||
roster_path = os.path.join(FILES, 'conf/_ssh/roster')
|
||||
shutil.copy(roster_path, TMP_CONF_DIR)
|
||||
with open(os.path.join(TMP_CONF_DIR, 'roster'), 'a') as roster:
|
||||
with salt.utils.fopen(os.path.join(TMP_CONF_DIR, 'roster'), 'a') as roster:
|
||||
roster.write(' user: {0}\n'.format(pwd.getpwuid(os.getuid()).pw_name))
|
||||
roster.write(' priv: {0}/{1}'.format(TMP_CONF_DIR, 'key_test'))
|
||||
|
||||
@ -551,7 +551,7 @@ class TestDaemon(object):
|
||||
|
||||
for entry in ('master', 'minion', 'sub_minion', 'syndic_master'):
|
||||
computed_config = copy.deepcopy(locals()['{0}_opts'.format(entry)])
|
||||
open(os.path.join(TMP_CONF_DIR, entry), 'w').write(
|
||||
salt.utils.fopen(os.path.join(TMP_CONF_DIR, entry), 'w').write(
|
||||
yaml.dump(computed_config, default_flow_style=False)
|
||||
)
|
||||
# <---- Transcribe Configuration -----------------------------------------------------------------------------
|
||||
@ -738,11 +738,12 @@ class TestDaemon(object):
|
||||
except OSError as exc:
|
||||
if exc.errno != 3:
|
||||
raise
|
||||
try:
|
||||
os.kill(int(open(self.sshd_pidfile).read()), signal.SIGKILL)
|
||||
except OSError as exc:
|
||||
if exc.errno != 3:
|
||||
raise
|
||||
with salt.utils.fopen(self.sshd_pidfile) as fhr:
|
||||
try:
|
||||
os.kill(int(fhr.read()), signal.SIGKILL)
|
||||
except OSError as exc:
|
||||
if exc.errno != 3:
|
||||
raise
|
||||
|
||||
def _exit_mockbin(self):
|
||||
path = os.environ.get('PATH', '')
|
||||
|
@ -1,12 +1,15 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import python libs
|
||||
import os
|
||||
|
||||
# Import Salt Testing libs
|
||||
from salttesting.helpers import ensure_in_syspath
|
||||
ensure_in_syspath('../../')
|
||||
|
||||
# Import salt libs
|
||||
import integration
|
||||
import os
|
||||
import salt.utils
|
||||
|
||||
|
||||
class StdTest(integration.ModuleCase):
|
||||
@ -41,7 +44,7 @@ class StdTest(integration.ModuleCase):
|
||||
# create fake mininion
|
||||
key_file = os.path.join(self.master_opts['pki_dir'], 'minions', 'footest')
|
||||
# touch the file
|
||||
open(key_file, 'a').close()
|
||||
salt.utils.fopen(key_file, 'a').close()
|
||||
# ping that minion and ensure it times out
|
||||
try:
|
||||
cmd_iter = self.client.cmd_cli(
|
||||
|
@ -9,7 +9,7 @@ import random
|
||||
|
||||
# Import Salt Libs
|
||||
import integration
|
||||
from salt.utils import mkstemp, fopen
|
||||
import salt.utils
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
# Import Salt Testing Libs
|
||||
@ -155,16 +155,12 @@ class DarwinSysctlModuleTest(integration.ModuleCase):
|
||||
file will be restored in tearDown
|
||||
'''
|
||||
# Create new temporary file path and open needed files
|
||||
org_conf = fopen(CONFIG, 'r')
|
||||
temp_path = mkstemp()
|
||||
temp_sysconf = open(temp_path, 'w')
|
||||
|
||||
# write sysctl lines to temp file
|
||||
for line in org_conf:
|
||||
temp_sysconf.write(line)
|
||||
org_conf.close()
|
||||
temp_sysconf.close()
|
||||
|
||||
temp_path = salt.utils.mkstemp()
|
||||
with salt.utils.fopen(CONFIG, 'r') as org_conf:
|
||||
with salt.utils.fopen(temp_path, 'w') as temp_sysconf:
|
||||
# write sysctl lines to temp file
|
||||
for line in org_conf:
|
||||
temp_sysconf.write(line)
|
||||
return temp_path
|
||||
|
||||
def __restore_sysctl(self):
|
||||
@ -174,14 +170,12 @@ class DarwinSysctlModuleTest(integration.ModuleCase):
|
||||
# If sysctl testing file exists, delete it
|
||||
if os.path.isfile(CONFIG):
|
||||
os.remove(CONFIG)
|
||||
temp_sysctl = open(self.conf, 'r')
|
||||
sysctl = open(CONFIG, 'w')
|
||||
|
||||
# write temp lines to sysctl file to restore
|
||||
for line in temp_sysctl:
|
||||
sysctl.write(line)
|
||||
temp_sysctl.close()
|
||||
sysctl.close()
|
||||
with salt.utils.fopen(self.conf, 'r') as temp_sysctl:
|
||||
with salt.utils.fopen(CONFIG, 'w') as sysctl:
|
||||
for line in temp_sysctl:
|
||||
sysctl.write(line)
|
||||
|
||||
# delete temporary file
|
||||
os.remove(self.conf)
|
||||
@ -190,13 +184,11 @@ class DarwinSysctlModuleTest(integration.ModuleCase):
|
||||
'''
|
||||
Returns True if given line is present in file
|
||||
'''
|
||||
f_in = open(conf_file, 'r')
|
||||
for line in f_in:
|
||||
if to_find in line:
|
||||
f_in.close()
|
||||
return True
|
||||
f_in.close()
|
||||
return False
|
||||
with salt.utils.fopen(conf_file, 'r') as f_in:
|
||||
for line in f_in:
|
||||
if to_find in line:
|
||||
return True
|
||||
return False
|
||||
|
||||
@destructiveTest
|
||||
@skipIf(os.geteuid() != 0, 'You must be logged in as root to run this test')
|
||||
|
@ -3,7 +3,7 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
# Import Python Libs
|
||||
from distutils.version import LooseVersion
|
||||
from distutils.version import LooseVersion # pylint: disable=import-error,no-name-in-module
|
||||
|
||||
# Import Salt Testing libs
|
||||
from salttesting import skipIf
|
||||
|
@ -67,9 +67,9 @@ class PipModuleTest(integration.ModuleCase):
|
||||
# Create a requirements file that depends on another one.
|
||||
req1_filename = os.path.join(self.venv_dir, 'requirements.txt')
|
||||
req2_filename = os.path.join(self.venv_dir, 'requirements2.txt')
|
||||
with open(req1_filename, 'wb') as f:
|
||||
with salt.utils.fopen(req1_filename, 'wb') as f:
|
||||
f.write('-r requirements2.txt')
|
||||
with open(req2_filename, 'wb') as f:
|
||||
with salt.utils.fopen(req2_filename, 'wb') as f:
|
||||
f.write('pep8')
|
||||
|
||||
this_user = pwd.getpwuid(os.getuid())[0]
|
||||
|
@ -139,7 +139,9 @@ class CallTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
||||
if not os.path.isdir(config_dir):
|
||||
os.makedirs(config_dir)
|
||||
|
||||
master_config = yaml.load(open(self.get_config_file_path('master')).read())
|
||||
with salt.utils.fopen(self.get_config_file_path('master')) as fhr:
|
||||
master_config = yaml.load(fhr.read())
|
||||
|
||||
master_root_dir = master_config['root_dir']
|
||||
this_minion_key = os.path.join(
|
||||
master_root_dir, 'pki', 'minions', 'minion_test_issue_2731'
|
||||
@ -165,9 +167,10 @@ class CallTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
||||
|
||||
start = datetime.now()
|
||||
# Let's first test with a master running
|
||||
open(minion_config_file, 'w').write(
|
||||
yaml.dump(minion_config, default_flow_style=False)
|
||||
)
|
||||
with salt.utils.fopen(minion_config_file, 'w') as fh_:
|
||||
fh_.write(
|
||||
yaml.dump(minion_config, default_flow_style=False)
|
||||
)
|
||||
ret = self.run_script(
|
||||
'salt-call',
|
||||
'--config-dir {0} cmd.run "echo foo"'.format(
|
||||
@ -194,9 +197,10 @@ class CallTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
||||
# Now let's remove the master configuration
|
||||
minion_config.pop('master')
|
||||
minion_config.pop('master_port')
|
||||
open(minion_config_file, 'w').write(
|
||||
yaml.dump(minion_config, default_flow_style=False)
|
||||
)
|
||||
with salt.utils.fopen(minion_config_file, 'w') as fh_:
|
||||
fh_.write(
|
||||
yaml.dump(minion_config, default_flow_style=False)
|
||||
)
|
||||
|
||||
out = self.run_script(
|
||||
'salt-call',
|
||||
@ -241,9 +245,10 @@ class CallTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
||||
|
||||
# Should work with local file client
|
||||
minion_config['file_client'] = 'local'
|
||||
open(minion_config_file, 'w').write(
|
||||
yaml.dump(minion_config, default_flow_style=False)
|
||||
)
|
||||
with salt.utils.fopen(minion_config_file, 'w') as fh_:
|
||||
fh_.write(
|
||||
yaml.dump(minion_config, default_flow_style=False)
|
||||
)
|
||||
ret = self.run_script(
|
||||
'salt-call',
|
||||
'--config-dir {0} cmd.run "echo foo"'.format(
|
||||
@ -268,13 +273,13 @@ class CallTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
||||
|
||||
os.chdir(config_dir)
|
||||
|
||||
minion_config = yaml.load(
|
||||
open(self.get_config_file_path('minion'), 'r').read()
|
||||
)
|
||||
minion_config['log_file'] = 'file:///dev/log/LOG_LOCAL3'
|
||||
open(os.path.join(config_dir, 'minion'), 'w').write(
|
||||
yaml.dump(minion_config, default_flow_style=False)
|
||||
)
|
||||
with salt.utils.fopen(self.get_config_file_path('minion'), 'r') as fh_:
|
||||
minion_config = yaml.load(fh_.read())
|
||||
minion_config['log_file'] = 'file:///dev/log/LOG_LOCAL3'
|
||||
with salt.utils.fopen(os.path.join(config_dir, 'minion'), 'w') as fh_:
|
||||
fh_.write(
|
||||
yaml.dump(minion_config, default_flow_style=False)
|
||||
)
|
||||
ret = self.run_script(
|
||||
'salt-call',
|
||||
'--config-dir {0} cmd.run "echo foo"'.format(
|
||||
|
@ -45,7 +45,8 @@ class CopyTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
||||
'files', 'file', 'base', 'testfile'
|
||||
)
|
||||
)
|
||||
testfile_contents = salt.utils.fopen(testfile, 'r').read()
|
||||
with salt.utils.fopen(testfile, 'r') as fh_:
|
||||
testfile_contents = fh_.read()
|
||||
|
||||
for idx, minion in enumerate(minions):
|
||||
ret = self.run_salt(
|
||||
@ -121,13 +122,13 @@ class CopyTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
||||
os.chdir(config_dir)
|
||||
|
||||
config_file_name = 'master'
|
||||
config = yaml.load(
|
||||
open(self.get_config_file_path(config_file_name), 'r').read()
|
||||
)
|
||||
config['log_file'] = 'file:///dev/log/LOG_LOCAL3'
|
||||
open(os.path.join(config_dir, config_file_name), 'w').write(
|
||||
yaml.dump(config, default_flow_style=False)
|
||||
)
|
||||
with salt.utils.fopen(self.get_config_file_path(config_file_name), 'r') as fhr:
|
||||
config = yaml.load(fhr.read())
|
||||
config['log_file'] = 'file:///dev/log/LOG_LOCAL3'
|
||||
with salt.utils.fopen(os.path.join(config_dir, config_file_name), 'w') as fhw:
|
||||
fhw.write(
|
||||
yaml.dump(config, default_flow_style=False)
|
||||
)
|
||||
|
||||
ret = self.run_script(
|
||||
self._call_binary_,
|
||||
|
@ -13,6 +13,7 @@ ensure_in_syspath('../../')
|
||||
|
||||
# Import salt libs
|
||||
import integration
|
||||
import salt.utils
|
||||
|
||||
|
||||
class KeyTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
||||
@ -234,13 +235,13 @@ class KeyTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
||||
os.chdir(config_dir)
|
||||
|
||||
config_file_name = 'master'
|
||||
config = yaml.load(
|
||||
open(self.get_config_file_path(config_file_name), 'r').read()
|
||||
)
|
||||
config['log_file'] = 'file:///dev/log/LOG_LOCAL3'
|
||||
open(os.path.join(config_dir, config_file_name), 'w').write(
|
||||
yaml.dump(config, default_flow_style=False)
|
||||
)
|
||||
with salt.utils.fopen(self.get_config_file_path(config_file_name), 'r') as fhr:
|
||||
config = yaml.load(fhr.read())
|
||||
config['log_file'] = 'file:///dev/log/LOG_LOCAL3'
|
||||
with salt.utils.fopen(os.path.join(config_dir, config_file_name), 'w') as fhw:
|
||||
fhw.write(
|
||||
yaml.dump(config, default_flow_style=False)
|
||||
)
|
||||
ret = self.run_script(
|
||||
self._call_binary_,
|
||||
'--config-dir {0} -L'.format(
|
||||
|
@ -19,6 +19,7 @@ ensure_in_syspath('../../')
|
||||
|
||||
# Import salt libs
|
||||
import integration
|
||||
import salt.utils
|
||||
|
||||
|
||||
class MasterTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
||||
@ -35,17 +36,17 @@ class MasterTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
||||
|
||||
config_file_name = 'master'
|
||||
pid_path = os.path.join(config_dir, '{0}.pid'.format(config_file_name))
|
||||
config = yaml.load(
|
||||
open(self.get_config_file_path(config_file_name), 'r').read()
|
||||
)
|
||||
config['root_dir'] = config_dir
|
||||
config['log_file'] = 'file:///tmp/log/LOG_LOCAL3'
|
||||
config['ret_port'] = config['ret_port'] + 10
|
||||
config['publish_port'] = config['publish_port'] + 10
|
||||
with salt.utils.fopen(self.get_config_file_path(config_file_name), 'r') as fhr:
|
||||
config = yaml.load(fhr.read())
|
||||
config['root_dir'] = config_dir
|
||||
config['log_file'] = 'file:///tmp/log/LOG_LOCAL3'
|
||||
config['ret_port'] = config['ret_port'] + 10
|
||||
config['publish_port'] = config['publish_port'] + 10
|
||||
|
||||
open(os.path.join(config_dir, config_file_name), 'w').write(
|
||||
yaml.dump(config, default_flow_style=False)
|
||||
)
|
||||
with salt.utils.fopen(os.path.join(config_dir, config_file_name), 'w') as fhw:
|
||||
fhw.write(
|
||||
yaml.dump(config, default_flow_style=False)
|
||||
)
|
||||
|
||||
ret = self.run_script(
|
||||
self._call_binary_,
|
||||
@ -60,10 +61,11 @@ class MasterTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
||||
|
||||
# Now kill it if still running
|
||||
if os.path.exists(pid_path):
|
||||
try:
|
||||
os.kill(int(open(pid_path).read()), signal.SIGKILL)
|
||||
except OSError:
|
||||
pass
|
||||
with salt.utils.fopen(pid_path) as fhr:
|
||||
try:
|
||||
os.kill(int(fhr.read()), signal.SIGKILL)
|
||||
except OSError:
|
||||
pass
|
||||
try:
|
||||
self.assertFalse(os.path.isdir(os.path.join(config_dir, 'file:')))
|
||||
self.assertIn(
|
||||
|
@ -12,6 +12,7 @@ ensure_in_syspath('../../')
|
||||
|
||||
# Import salt libs
|
||||
import integration
|
||||
import salt.utils
|
||||
|
||||
|
||||
class MatchTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
||||
@ -248,13 +249,15 @@ class MatchTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
||||
os.chdir(config_dir)
|
||||
|
||||
config_file_name = 'master'
|
||||
config = yaml.load(
|
||||
open(self.get_config_file_path(config_file_name), 'r').read()
|
||||
)
|
||||
config['log_file'] = 'file:///dev/log/LOG_LOCAL3'
|
||||
open(os.path.join(config_dir, config_file_name), 'w').write(
|
||||
yaml.dump(config, default_flow_style=False)
|
||||
)
|
||||
with salt.utils.fopen(self.get_config_file_path(config_file_name), 'r') as fhr:
|
||||
config = yaml.load(
|
||||
fhr.read()
|
||||
)
|
||||
config['log_file'] = 'file:///dev/log/LOG_LOCAL3'
|
||||
with salt.utils.fopen(os.path.join(config_dir, config_file_name), 'w') as fhw:
|
||||
fhw.write(
|
||||
yaml.dump(config, default_flow_style=False)
|
||||
)
|
||||
ret = self.run_script(
|
||||
self._call_binary_,
|
||||
'--config-dir {0} minion test.ping'.format(
|
||||
|
@ -19,6 +19,7 @@ ensure_in_syspath('../../')
|
||||
|
||||
# Import salt libs
|
||||
import integration
|
||||
import salt.utils
|
||||
|
||||
|
||||
class MinionTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
||||
@ -35,14 +36,14 @@ class MinionTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
||||
|
||||
config_file_name = 'minion'
|
||||
pid_path = os.path.join(config_dir, '{0}.pid'.format(config_file_name))
|
||||
config = yaml.load(
|
||||
open(self.get_config_file_path(config_file_name), 'r').read()
|
||||
)
|
||||
config['log_file'] = 'file:///tmp/log/LOG_LOCAL3'
|
||||
with salt.utils.fopen(self.get_config_file_path(config_file_name), 'r') as fhr:
|
||||
config = yaml.load(fhr.read())
|
||||
config['log_file'] = 'file:///tmp/log/LOG_LOCAL3'
|
||||
|
||||
open(os.path.join(config_dir, config_file_name), 'w').write(
|
||||
yaml.dump(config, default_flow_style=False)
|
||||
)
|
||||
with salt.utils.fopen(os.path.join(config_dir, config_file_name), 'w') as fhw:
|
||||
fhw.write(
|
||||
yaml.dump(config, default_flow_style=False)
|
||||
)
|
||||
|
||||
ret = self.run_script(
|
||||
self._call_binary_,
|
||||
@ -57,10 +58,11 @@ class MinionTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
||||
|
||||
# Now kill it if still running
|
||||
if os.path.exists(pid_path):
|
||||
try:
|
||||
os.kill(int(open(pid_path).read()), signal.SIGKILL)
|
||||
except OSError:
|
||||
pass
|
||||
with salt.utils.fopen(pid_path) as fhr:
|
||||
try:
|
||||
os.kill(int(fhr.read()), signal.SIGKILL)
|
||||
except OSError:
|
||||
pass
|
||||
try:
|
||||
self.assertFalse(os.path.isdir(os.path.join(config_dir, 'file:')))
|
||||
self.assertIn(
|
||||
|
@ -15,6 +15,7 @@ ensure_in_syspath('../../')
|
||||
|
||||
# Import salt libs
|
||||
import integration
|
||||
import salt.utils
|
||||
|
||||
|
||||
class RunTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
||||
@ -62,13 +63,13 @@ class RunTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
||||
os.chdir(config_dir)
|
||||
|
||||
config_file_name = 'master'
|
||||
config = yaml.load(
|
||||
open(self.get_config_file_path(config_file_name), 'r').read()
|
||||
)
|
||||
config['log_file'] = 'file:///dev/log/LOG_LOCAL3'
|
||||
open(os.path.join(config_dir, config_file_name), 'w').write(
|
||||
yaml.dump(config, default_flow_style=False)
|
||||
)
|
||||
with salt.utils.fopen(self.get_config_file_path(config_file_name), 'r') as fhr:
|
||||
config = yaml.load(fhr.read())
|
||||
config['log_file'] = 'file:///dev/log/LOG_LOCAL3'
|
||||
with salt.utils.fopen(os.path.join(config_dir, config_file_name), 'w') as fhw:
|
||||
fhw.write(
|
||||
yaml.dump(config, default_flow_style=False)
|
||||
)
|
||||
ret = self.run_script(
|
||||
self._call_binary_,
|
||||
'--config-dir {0} -d'.format(
|
||||
|
@ -19,6 +19,7 @@ ensure_in_syspath('../../')
|
||||
|
||||
# Import salt libs
|
||||
import integration
|
||||
import salt.utils
|
||||
|
||||
|
||||
class SyndicTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
||||
@ -35,18 +36,18 @@ class SyndicTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
||||
|
||||
for fname in ('master', 'minion'):
|
||||
pid_path = os.path.join(config_dir, '{0}.pid'.format(fname))
|
||||
config = yaml.load(
|
||||
open(self.get_config_file_path(fname), 'r').read()
|
||||
)
|
||||
config['log_file'] = config['syndic_log_file'] = 'file:///tmp/log/LOG_LOCAL3'
|
||||
config['root_dir'] = config_dir
|
||||
if 'ret_port' in config:
|
||||
config['ret_port'] = int(config['ret_port']) + 10
|
||||
config['publish_port'] = int(config['publish_port']) + 10
|
||||
with salt.utils.fopen(self.get_config_file_path(fname), 'r') as fhr:
|
||||
config = yaml.load(fhr.read())
|
||||
config['log_file'] = config['syndic_log_file'] = 'file:///tmp/log/LOG_LOCAL3'
|
||||
config['root_dir'] = config_dir
|
||||
if 'ret_port' in config:
|
||||
config['ret_port'] = int(config['ret_port']) + 10
|
||||
config['publish_port'] = int(config['publish_port']) + 10
|
||||
|
||||
open(os.path.join(config_dir, fname), 'w').write(
|
||||
yaml.dump(config, default_flow_style=False)
|
||||
)
|
||||
with salt.utils.fopen(os.path.join(config_dir, fname), 'w') as fhw:
|
||||
fhw.write(
|
||||
yaml.dump(config, default_flow_style=False)
|
||||
)
|
||||
|
||||
ret = self.run_script(
|
||||
self._call_binary_,
|
||||
@ -61,10 +62,11 @@ class SyndicTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn):
|
||||
|
||||
# Now kill it if still running
|
||||
if os.path.exists(pid_path):
|
||||
try:
|
||||
os.kill(int(open(pid_path).read()), signal.SIGKILL)
|
||||
except OSError:
|
||||
pass
|
||||
with salt.utils.fopen(pid_path) as fhr:
|
||||
try:
|
||||
os.kill(int(fhr.read()), signal.SIGKILL)
|
||||
except OSError:
|
||||
pass
|
||||
try:
|
||||
self.assertFalse(os.path.isdir(os.path.join(config_dir, 'file:')))
|
||||
self.assertIn(
|
||||
|
@ -55,7 +55,8 @@ class CompileTest(integration.ModuleCase):
|
||||
]
|
||||
try:
|
||||
ret = self.run_function('state.template_str', ['\n'.join(template)], timeout=120)
|
||||
self.assertEqual('base', open(managed_file).read())
|
||||
with salt.utils.fopen(managed_file) as fhr:
|
||||
self.assertEqual('base', fhr.read())
|
||||
finally:
|
||||
if os.path.isfile(managed_file):
|
||||
os.unlink(managed_file)
|
||||
@ -69,7 +70,8 @@ class CompileTest(integration.ModuleCase):
|
||||
]
|
||||
try:
|
||||
ret = self.run_function('state.template_str', ['\n'.join(template)], timeout=120)
|
||||
self.assertEqual('base', open(managed_file).read())
|
||||
with salt.utils.fopen(managed_file) as fhr:
|
||||
self.assertEqual('base', fhr.read())
|
||||
finally:
|
||||
if os.path.isfile(managed_file):
|
||||
os.unlink(managed_file)
|
||||
|
@ -182,11 +182,12 @@ class SSHAuthStateTests(integration.ModuleCase,
|
||||
self.assertSaltStateChangesEqual(
|
||||
ret, {'AAAAB3NzaC1kcQ9J5bYTEyZ==': 'New'}
|
||||
)
|
||||
self.assertEqual(
|
||||
open(authorized_keys_file, 'r').read(),
|
||||
'ssh-rsa AAAAB3NzaC1kc3MAAACBAL0sQ9fJ5bYTEyY== root\n'
|
||||
'ssh-rsa AAAAB3NzaC1kcQ9J5bYTEyZ== {0}\n'.format(username)
|
||||
)
|
||||
with salt.utils.fopen(authorized_keys_file, 'r') as fhr:
|
||||
self.assertEqual(
|
||||
fhr.read(),
|
||||
'ssh-rsa AAAAB3NzaC1kc3MAAACBAL0sQ9fJ5bYTEyY== root\n'
|
||||
'ssh-rsa AAAAB3NzaC1kcQ9J5bYTEyZ== {0}\n'.format(username)
|
||||
)
|
||||
|
||||
@destructiveTest
|
||||
@skipIf(os.geteuid() != 0, 'you must be root to run this test')
|
||||
@ -219,10 +220,11 @@ class SSHAuthStateTests(integration.ModuleCase,
|
||||
comment=username
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertEqual(
|
||||
open(authorized_keys_file, 'r').read(),
|
||||
'ssh-rsa AAAAB3NzaC1kcQ9J5bYTEyZ== {0}\n'.format(username)
|
||||
)
|
||||
with salt.utils.fopen(authorized_keys_file, 'r') as fhr:
|
||||
self.assertEqual(
|
||||
fhr.read(),
|
||||
'ssh-rsa AAAAB3NzaC1kcQ9J5bYTEyZ== {0}\n'.format(username)
|
||||
)
|
||||
|
||||
os.unlink(authorized_keys_file)
|
||||
|
||||
@ -236,10 +238,11 @@ class SSHAuthStateTests(integration.ModuleCase,
|
||||
saltenv='prod'
|
||||
)
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertEqual(
|
||||
open(authorized_keys_file, 'r').read(),
|
||||
'ssh-rsa AAAAB3NzaC1kcQ9J5bYTEyZ== {0}\n'.format(username)
|
||||
)
|
||||
with salt.utils.fopen(authorized_keys_file, 'r') as fhr:
|
||||
self.assertEqual(
|
||||
fhr.read(),
|
||||
'ssh-rsa AAAAB3NzaC1kcQ9J5bYTEyZ== {0}\n'.format(username)
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -77,7 +77,8 @@ class VirtualenvTest(integration.ModuleCase,
|
||||
]
|
||||
|
||||
# Let's populate the requirements file, just pep-8 for now
|
||||
open(requirements_file_path, 'a').write('pep8==1.3.3\n')
|
||||
with salt.utils.fopen(requirements_file_path, 'a') as fhw:
|
||||
fhw.write('pep8==1.3.3\n')
|
||||
|
||||
# Let's run our state!!!
|
||||
try:
|
||||
@ -104,7 +105,8 @@ class VirtualenvTest(integration.ModuleCase,
|
||||
self.assertNotIn('zope.interface==4.0.1', ret)
|
||||
|
||||
# Now let's update the requirements file, which is now cached.
|
||||
open(requirements_file_path, 'w').write('zope.interface==4.0.1\n')
|
||||
with salt.utils.fopen(requirements_file_path, 'w') as fhw:
|
||||
fhw.write('zope.interface==4.0.1\n')
|
||||
|
||||
# Let's run our state!!!
|
||||
try:
|
||||
|
@ -7,7 +7,8 @@ import copy
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
from salt.utils import files
|
||||
import salt.utils
|
||||
from salt.utils import files as util_files
|
||||
from salttesting import TestCase
|
||||
from salttesting.helpers import ensure_in_syspath
|
||||
ensure_in_syspath('../../')
|
||||
@ -30,7 +31,7 @@ class FilesTestCase(TestCase):
|
||||
os.makedirs(current_directory)
|
||||
for name, content in files.items():
|
||||
path = os.path.join(temp_directory, folder, name)
|
||||
with open(path, 'w+') as fh:
|
||||
with salt.utils.fopen(path, 'w+') as fh:
|
||||
fh.write(content)
|
||||
|
||||
def _validate_folder_structure_and_contents(self, target_directory,
|
||||
@ -38,7 +39,7 @@ class FilesTestCase(TestCase):
|
||||
for folder, files in desired_structure.items():
|
||||
for name, content in files.items():
|
||||
path = os.path.join(target_directory, folder, name)
|
||||
with open(path) as fh:
|
||||
with salt.utils.fopen(path) as fh:
|
||||
assert fh.read().strip() == content
|
||||
|
||||
def setUp(self):
|
||||
@ -63,7 +64,7 @@ class FilesTestCase(TestCase):
|
||||
}
|
||||
self._create_temp_structure(test_target_directory, TARGET_STRUCTURE)
|
||||
try:
|
||||
files.recursive_copy(self.temp_dir, test_target_directory)
|
||||
util_files.recursive_copy(self.temp_dir, test_target_directory)
|
||||
DESIRED_STRUCTURE = copy.copy(TARGET_STRUCTURE)
|
||||
DESIRED_STRUCTURE.update(self.STRUCTURE)
|
||||
self._validate_folder_structure_and_contents(
|
||||
|
@ -4,9 +4,10 @@
|
||||
import random
|
||||
import string
|
||||
from copy import deepcopy
|
||||
from distutils.version import LooseVersion
|
||||
from distutils.version import LooseVersion # pylint: disable=import-error,no-name-in-module
|
||||
|
||||
# import Python Third Party Libs
|
||||
# pylint: disable=import-error
|
||||
try:
|
||||
import boto
|
||||
HAS_BOTO = True
|
||||
@ -29,6 +30,7 @@ except ImportError:
|
||||
def stub_function(self):
|
||||
pass
|
||||
return stub_function
|
||||
# pylint: disable=import-error
|
||||
|
||||
# Import Salt Libs
|
||||
from salt.utils.odict import OrderedDict
|
||||
|
@ -1,13 +1,8 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# import Python Third Party Libs
|
||||
|
||||
from mock import patch
|
||||
|
||||
from salt.exceptions import SaltInvocationError, CommandExecutionError
|
||||
from salt.modules.boto_vpc import _maybe_set_name_tag, _maybe_set_tags
|
||||
|
||||
|
||||
# import Third Party Libs
|
||||
from salttesting.mock import patch
|
||||
# pylint: disable=import-error,no-name-in-module
|
||||
try:
|
||||
import boto
|
||||
from boto.exception import BotoServerError
|
||||
@ -37,10 +32,13 @@ except ImportError:
|
||||
return stub_function
|
||||
|
||||
# Import Python libs
|
||||
from distutils.version import LooseVersion
|
||||
from distutils.version import LooseVersion # pylint: disable=no-name-in-module
|
||||
# pylint: enable=import-error
|
||||
|
||||
# Import Salt Libs
|
||||
from salt.modules import boto_vpc
|
||||
from salt.exceptions import SaltInvocationError, CommandExecutionError
|
||||
from salt.modules.boto_vpc import _maybe_set_name_tag, _maybe_set_tags
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from salttesting import skipIf, TestCase
|
||||
@ -1450,6 +1448,5 @@ class BotoVpcRouteTablesTestCase(BotoVpcTestCaseBase):
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from integration import run_tests
|
||||
|
||||
from integration import run_tests # pylint: disable=import-error
|
||||
run_tests(BotoVpcTestCase, needs_daemon=False)
|
||||
|
@ -13,6 +13,7 @@ from salttesting.mock import MagicMock
|
||||
ensure_in_syspath('../../')
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils
|
||||
from salt.modules import file as filemod
|
||||
from salt.modules import config as configmod
|
||||
from salt.modules import cmdmod
|
||||
@ -61,7 +62,7 @@ class FileReplaceTestCase(TestCase):
|
||||
def test_replace(self):
|
||||
filemod.replace(self.tfile.name, r'Etiam', 'Salticus', backup=False)
|
||||
|
||||
with open(self.tfile.name, 'rb') as fp:
|
||||
with salt.utils.fopen(self.tfile.name, 'rb') as fp:
|
||||
self.assertIn('Salticus', fp.read())
|
||||
|
||||
def test_replace_append_if_not_found(self):
|
||||
@ -80,19 +81,19 @@ class FileReplaceTestCase(TestCase):
|
||||
tfile.write(base + '\n')
|
||||
tfile.flush()
|
||||
filemod.replace(tfile.name, **args)
|
||||
with open(tfile.name) as tfile2:
|
||||
with salt.utils.fopen(tfile.name) as tfile2:
|
||||
self.assertEqual(tfile2.read(), expected)
|
||||
# File not ending with a newline, no match
|
||||
with tempfile.NamedTemporaryFile() as tfile:
|
||||
tfile.write(base)
|
||||
tfile.flush()
|
||||
filemod.replace(tfile.name, **args)
|
||||
with open(tfile.name) as tfile2:
|
||||
with salt.utils.fopen(tfile.name) as tfile2:
|
||||
self.assertEqual(tfile2.read(), expected)
|
||||
# A newline should not be added in empty files
|
||||
with tempfile.NamedTemporaryFile() as tfile:
|
||||
filemod.replace(tfile.name, **args)
|
||||
with open(tfile.name) as tfile2:
|
||||
with salt.utils.fopen(tfile.name) as tfile2:
|
||||
self.assertEqual(tfile2.read(), args['repl'] + '\n')
|
||||
# Using not_found_content, rather than repl
|
||||
with tempfile.NamedTemporaryFile() as tfile:
|
||||
@ -101,7 +102,7 @@ class FileReplaceTestCase(TestCase):
|
||||
tfile.write(base)
|
||||
tfile.flush()
|
||||
filemod.replace(tfile.name, **args)
|
||||
with open(tfile.name) as tfile2:
|
||||
with salt.utils.fopen(tfile.name) as tfile2:
|
||||
self.assertEqual(tfile2.read(), expected)
|
||||
# not appending if matches
|
||||
with tempfile.NamedTemporaryFile() as tfile:
|
||||
@ -110,7 +111,7 @@ class FileReplaceTestCase(TestCase):
|
||||
tfile.write(base)
|
||||
tfile.flush()
|
||||
filemod.replace(tfile.name, **args)
|
||||
with open(tfile.name) as tfile2:
|
||||
with salt.utils.fopen(tfile.name) as tfile2:
|
||||
self.assertEqual(tfile2.read(), expected)
|
||||
|
||||
def test_backup(self):
|
||||
@ -222,7 +223,7 @@ class FileBlockReplaceTestCase(TestCase):
|
||||
new_multiline_content,
|
||||
backup=False)
|
||||
|
||||
with open(self.tfile.name, 'rb') as fp:
|
||||
with salt.utils.fopen(self.tfile.name, 'rb') as fp:
|
||||
filecontent = fp.read()
|
||||
self.assertIn('#-- START BLOCK 1'
|
||||
+ "\n" + new_multiline_content
|
||||
@ -244,7 +245,7 @@ class FileBlockReplaceTestCase(TestCase):
|
||||
append_if_not_found=False,
|
||||
backup=False
|
||||
)
|
||||
with open(self.tfile.name, 'rb') as fp:
|
||||
with salt.utils.fopen(self.tfile.name, 'rb') as fp:
|
||||
self.assertNotIn('#-- START BLOCK 2'
|
||||
+ "\n" + new_content + "\n"
|
||||
+ '#-- END BLOCK 2', fp.read())
|
||||
@ -256,7 +257,7 @@ class FileBlockReplaceTestCase(TestCase):
|
||||
backup=False,
|
||||
append_if_not_found=True)
|
||||
|
||||
with open(self.tfile.name, 'rb') as fp:
|
||||
with salt.utils.fopen(self.tfile.name, 'rb') as fp:
|
||||
self.assertIn('#-- START BLOCK 2'
|
||||
+ "\n" + new_content
|
||||
+ "\n" + '#-- END BLOCK 2', fp.read())
|
||||
@ -280,19 +281,19 @@ class FileBlockReplaceTestCase(TestCase):
|
||||
tfile.write(base + '\n')
|
||||
tfile.flush()
|
||||
filemod.blockreplace(tfile.name, **args)
|
||||
with open(tfile.name) as tfile2:
|
||||
with salt.utils.fopen(tfile.name) as tfile2:
|
||||
self.assertEqual(tfile2.read(), expected)
|
||||
# File not ending with a newline
|
||||
with tempfile.NamedTemporaryFile() as tfile:
|
||||
tfile.write(base)
|
||||
tfile.flush()
|
||||
filemod.blockreplace(tfile.name, **args)
|
||||
with open(tfile.name) as tfile2:
|
||||
with salt.utils.fopen(tfile.name) as tfile2:
|
||||
self.assertEqual(tfile2.read(), expected)
|
||||
# A newline should not be added in empty files
|
||||
with tempfile.NamedTemporaryFile() as tfile:
|
||||
filemod.blockreplace(tfile.name, **args)
|
||||
with open(tfile.name) as tfile2:
|
||||
with salt.utils.fopen(tfile.name) as tfile2:
|
||||
self.assertEqual(tfile2.read(), block)
|
||||
|
||||
def test_replace_prepend(self):
|
||||
@ -308,7 +309,7 @@ class FileBlockReplaceTestCase(TestCase):
|
||||
prepend_if_not_found=False,
|
||||
backup=False
|
||||
)
|
||||
with open(self.tfile.name, 'rb') as fp:
|
||||
with salt.utils.fopen(self.tfile.name, 'rb') as fp:
|
||||
self.assertNotIn(
|
||||
'#-- START BLOCK 2' + "\n"
|
||||
+ new_content + "\n" + '#-- END BLOCK 2',
|
||||
@ -320,7 +321,7 @@ class FileBlockReplaceTestCase(TestCase):
|
||||
backup=False,
|
||||
prepend_if_not_found=True)
|
||||
|
||||
with open(self.tfile.name, 'rb') as fp:
|
||||
with salt.utils.fopen(self.tfile.name, 'rb') as fp:
|
||||
self.assertTrue(
|
||||
fp.read().startswith(
|
||||
'#-- START BLOCK 2'
|
||||
@ -334,7 +335,7 @@ class FileBlockReplaceTestCase(TestCase):
|
||||
'new content 1',
|
||||
backup=False)
|
||||
|
||||
with open(self.tfile.name, 'rb') as fp:
|
||||
with salt.utils.fopen(self.tfile.name, 'rb') as fp:
|
||||
filecontent = fp.read()
|
||||
self.assertIn('new content 1', filecontent)
|
||||
self.assertNotIn('to be removed', filecontent)
|
||||
@ -435,7 +436,7 @@ class FileModuleTestCase(TestCase):
|
||||
|
||||
filemod.sed(path, before, after, limit=limit)
|
||||
|
||||
with open(path, 'rb') as newfile:
|
||||
with salt.utils.fopen(path, 'rb') as newfile:
|
||||
self.assertEqual(
|
||||
SED_CONTENT.replace(before, ''),
|
||||
newfile.read()
|
||||
@ -451,19 +452,19 @@ class FileModuleTestCase(TestCase):
|
||||
tfile.write('foo\n')
|
||||
tfile.flush()
|
||||
filemod.append(tfile.name, 'bar')
|
||||
with open(tfile.name) as tfile2:
|
||||
with salt.utils.fopen(tfile.name) as tfile2:
|
||||
self.assertEqual(tfile2.read(), 'foo\nbar\n')
|
||||
# File not ending with a newline
|
||||
with tempfile.NamedTemporaryFile() as tfile:
|
||||
tfile.write('foo')
|
||||
tfile.flush()
|
||||
filemod.append(tfile.name, 'bar')
|
||||
with open(tfile.name) as tfile2:
|
||||
with salt.utils.fopen(tfile.name) as tfile2:
|
||||
self.assertEqual(tfile2.read(), 'foo\nbar\n')
|
||||
# A newline should not be added in empty files
|
||||
with tempfile.NamedTemporaryFile() as tfile:
|
||||
filemod.append(tfile.name, 'bar')
|
||||
with open(tfile.name) as tfile2:
|
||||
with salt.utils.fopen(tfile.name) as tfile2:
|
||||
self.assertEqual(tfile2.read(), 'bar\n')
|
||||
|
||||
def test_extract_hash(self):
|
||||
|
@ -2,8 +2,6 @@
|
||||
|
||||
# Import salt testing libs
|
||||
from salttesting.case import ModuleCase
|
||||
from salttesting.helpers import ensure_in_syspath
|
||||
ensure_in_syspath('../../')
|
||||
|
||||
# Import Salt libs
|
||||
import salt.loader
|
||||
|
@ -2,7 +2,12 @@
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
from salttesting import TestCase
|
||||
from salttesting.unit import TestCase
|
||||
from salttesting.helpers import ensure_in_syspath
|
||||
|
||||
ensure_in_syspath('../../')
|
||||
|
||||
import salt.utils
|
||||
from salt.modules import ini_manage as ini
|
||||
|
||||
|
||||
@ -112,7 +117,7 @@ empty_option=
|
||||
ini.set_option(self.tfile.name, {
|
||||
'SectionB': {'test3': 'new value 3B'},
|
||||
})
|
||||
with open(self.tfile.name, 'rb') as fp:
|
||||
with salt.utils.fopen(self.tfile.name, 'rb') as fp:
|
||||
file_content = fp.read()
|
||||
self.assertIn('\nempty_option=\n', file_content,
|
||||
'empty_option was not preserved')
|
||||
@ -121,7 +126,7 @@ empty_option=
|
||||
ini.set_option(self.tfile.name, {
|
||||
'SectionB': {'test3': 'new value 3B'},
|
||||
})
|
||||
with open(self.tfile.name, 'rb') as fp:
|
||||
with salt.utils.fopen(self.tfile.name, 'rb') as fp:
|
||||
file_content = fp.read()
|
||||
self.assertEqual('''\
|
||||
# Comment on the first line
|
||||
|
@ -18,6 +18,7 @@ ensure_in_syspath('../')
|
||||
import integration
|
||||
import salt.loader
|
||||
import salt.config
|
||||
import salt.utils
|
||||
from salt.state import HighState
|
||||
from salt.utils.pydsl import PyDslError
|
||||
|
||||
@ -291,7 +292,7 @@ class PyDSLRendererTestCase(TestCase):
|
||||
'''.format(output, output, output)))
|
||||
|
||||
state_highstate({'base': ['aaa']}, dirpath)
|
||||
with open(output, 'r') as f:
|
||||
with salt.utils.fopen(output, 'r') as f:
|
||||
self.assertEqual(''.join(f.read().split()), "XYZABCDEF")
|
||||
|
||||
finally:
|
||||
@ -384,7 +385,7 @@ class PyDSLRendererTestCase(TestCase):
|
||||
hello blue 3
|
||||
''')
|
||||
|
||||
with open(output, 'r') as f:
|
||||
with salt.utils.fopen(output, 'r') as f:
|
||||
self.assertEqual(sorted(f.read()), sorted(expected))
|
||||
|
||||
finally:
|
||||
@ -417,10 +418,10 @@ class PyDSLRendererTestCase(TestCase):
|
||||
A()
|
||||
'''.format(dirpath, dirpath, dirpath, dirpath)))
|
||||
state_highstate({'base': ['aaa']}, dirpath)
|
||||
with open(os.path.join(dirpath, 'yyy.txt'), 'r') as f:
|
||||
with salt.utils.fopen(os.path.join(dirpath, 'yyy.txt'), 'r') as f:
|
||||
|
||||
self.assertEqual(f.read(), 'hehe\nhoho\n')
|
||||
with open(os.path.join(dirpath, 'xxx.txt'), 'r') as f:
|
||||
with salt.utils.fopen(os.path.join(dirpath, 'xxx.txt'), 'r') as f:
|
||||
self.assertEqual(f.read(), 'hehe\n')
|
||||
finally:
|
||||
shutil.rmtree(dirpath, ignore_errors=True)
|
||||
@ -493,7 +494,7 @@ class PyDSLRendererTestCase(TestCase):
|
||||
|
||||
|
||||
def write_to(fpath, content):
|
||||
with open(fpath, 'w') as f:
|
||||
with salt.utils.fopen(fpath, 'w') as f:
|
||||
f.write(content)
|
||||
|
||||
|
||||
|
@ -11,6 +11,7 @@ from salttesting.helpers import ensure_in_syspath
|
||||
ensure_in_syspath('../')
|
||||
|
||||
import salt.state
|
||||
import salt.utils
|
||||
from salt.config import minion_config
|
||||
from salt.template import compile_template
|
||||
from salt.utils.odict import OrderedDict
|
||||
@ -209,7 +210,7 @@ class RendererMixin(object):
|
||||
|
||||
def write_template_file(self, filename, content):
|
||||
full_path = os.path.join(self.root_dir, filename)
|
||||
with open(full_path, 'w') as f:
|
||||
with salt.utils.fopen(full_path, 'w') as f:
|
||||
f.write(content)
|
||||
return full_path
|
||||
|
||||
|
@ -14,6 +14,7 @@ from salttesting.mock import NO_MOCK, NO_MOCK_REASON, MagicMock, patch
|
||||
|
||||
ensure_in_syspath('../../')
|
||||
|
||||
import salt.utils
|
||||
from salt.states import syslog_ng
|
||||
from salt.modules import syslog_ng as syslog_ng_module
|
||||
|
||||
@ -360,7 +361,7 @@ class SyslogNGTestCase(TestCase):
|
||||
got = syslog_ng.config(id, config=parsed_yaml_config, write=True)
|
||||
|
||||
written_config = ""
|
||||
with open(config_file_name, "r") as f:
|
||||
with salt.utils.fopen(config_file_name, "r") as f:
|
||||
written_config = f.read()
|
||||
|
||||
config_without_whitespaces = remove_whitespaces(written_config)
|
||||
|
@ -8,7 +8,7 @@ import datetime
|
||||
import pprint
|
||||
|
||||
# Import Salt Testing libs
|
||||
from salttesting import skipIf, TestCase
|
||||
from salttesting.unit import skipIf, TestCase
|
||||
from salttesting.case import ModuleCase
|
||||
from salttesting.helpers import ensure_in_syspath
|
||||
ensure_in_syspath('../../')
|
||||
|
@ -109,8 +109,7 @@ class TestVerify(TestCase):
|
||||
# not support IPv6.
|
||||
pass
|
||||
|
||||
@skipIf(os.environ.get('TRAVIS_PYTHON_VERSION', None) is not None,
|
||||
'Travis environment does not like too many open files')
|
||||
@skipIf(True, 'Skipping until we can find why Jenkins is bailing out')
|
||||
def test_max_open_files(self):
|
||||
|
||||
with TestsLoggingHandler() as handler:
|
||||
|
Loading…
Reference in New Issue
Block a user