mirror of
https://github.com/valitydev/salt.git
synced 2024-11-07 08:58:59 +00:00
Add generator functions for reading files
This commit is contained in:
parent
2969153097
commit
fc401c9eb4
@ -10,6 +10,9 @@ from __future__ import absolute_import
|
|||||||
# Import python libs
|
# Import python libs
|
||||||
import gzip
|
import gzip
|
||||||
|
|
||||||
|
# Import Salt libs
|
||||||
|
import salt.utils
|
||||||
|
|
||||||
# Import 3rd-party libs
|
# Import 3rd-party libs
|
||||||
from salt.ext.six.moves import StringIO # pylint: disable=import-error
|
from salt.ext.six.moves import StringIO # pylint: disable=import-error
|
||||||
|
|
||||||
@ -63,3 +66,38 @@ def uncompress(data):
|
|||||||
with open_fileobj(buf, 'rb') as igz:
|
with open_fileobj(buf, 'rb') as igz:
|
||||||
unc = igz.read()
|
unc = igz.read()
|
||||||
return unc
|
return unc
|
||||||
|
|
||||||
|
|
||||||
|
def compress_file(fh_, compresslevel=9, chunk_size=1048576):
|
||||||
|
'''
|
||||||
|
Generator that reads chunk_size bytes at a time from a file/filehandle and
|
||||||
|
yields the compressed result of each read.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
Each chunk is compressed separately. They cannot be stitched together
|
||||||
|
to form a compressed file. This function is designed to break up a file
|
||||||
|
into compressed chunks for transport and decompression/reassembly on a
|
||||||
|
remote host.
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
bytes_read = int(chunk_size)
|
||||||
|
if bytes_read != chunk_size:
|
||||||
|
raise ValueError
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError('chunk_size must be an integer')
|
||||||
|
try:
|
||||||
|
while bytes_read == chunk_size:
|
||||||
|
buf = StringIO()
|
||||||
|
with open_fileobj(buf, 'wb', compresslevel) as ogz:
|
||||||
|
try:
|
||||||
|
bytes_read = ogz.write(fh_.read(chunk_size))
|
||||||
|
except AttributeError:
|
||||||
|
# Open the file and re-attempt the read
|
||||||
|
fh_ = salt.utils.fopen(fh_, 'rb')
|
||||||
|
bytes_read = ogz.write(fh_.read(chunk_size))
|
||||||
|
yield buf.getvalue()
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
fh_.close()
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
@ -7,6 +7,9 @@ Helpful generators and other tools
|
|||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
# Import Salt libs
|
||||||
|
import salt.utils
|
||||||
|
|
||||||
|
|
||||||
def split(orig, sep=None):
|
def split(orig, sep=None):
|
||||||
'''
|
'''
|
||||||
@ -32,3 +35,31 @@ def split(orig, sep=None):
|
|||||||
if pos < match.start() or sep is not None:
|
if pos < match.start() or sep is not None:
|
||||||
yield orig[pos:match.start()]
|
yield orig[pos:match.start()]
|
||||||
pos = match.end()
|
pos = match.end()
|
||||||
|
|
||||||
|
|
||||||
|
def read_file(fh_, chunk_size=1048576):
|
||||||
|
'''
|
||||||
|
Generator that reads chunk_size bytes at a time from a file/filehandle and
|
||||||
|
yields it.
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
if chunk_size != int(chunk_size):
|
||||||
|
raise ValueError
|
||||||
|
except ValueError:
|
||||||
|
raise ValueError('chunk_size must be an integer')
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
chunk = fh_.read(chunk_size)
|
||||||
|
except AttributeError:
|
||||||
|
# Open the file and re-attempt the read
|
||||||
|
fh_ = salt.utils.fopen(fh_, 'rb')
|
||||||
|
chunk = fh_.read(chunk_size)
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
yield chunk
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
fh_.close()
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
Loading…
Reference in New Issue
Block a user