def decompress(self):
with gzip.open(self.path, 'rb') as gfile:
return gfile.read()
python类open()的实例源码
def decompress(self, outpath):
with bz2.open(self.path, 'rb') as bfile:
return bfile.read()
def _open_file(self, mode, encoding=None):
"""
Opens the next current file.
:param str mode: The mode for opening the file.
:param str encoding: The encoding of the file.
"""
if self._filename[-4:] == '.bz2':
self._file = bz2.open(self._filename, mode=mode, encoding=encoding)
else:
self._file = open(self._filename, mode=mode, encoding=encoding)
# ------------------------------------------------------------------------------------------------------------------
def _get_sample(self, mode, encoding):
"""
Get a sample from the next current input file.
:param str mode: The mode for opening the file.
:param str|None encoding: The encoding of the file. None for open the file in binary mode.
"""
self._open_file(mode, encoding)
self._sample = self._file.read(UniversalCsvReader.sample_size)
self._file.close()
# ------------------------------------------------------------------------------------------------------------------
def load_perceptual(self):
vgg19_file = os.path.join(os.path.dirname(__file__), 'vgg19_conv.pkl.bz2')
if not os.path.exists(vgg19_file):
error("MOdel was not found",
"Reformat model directory")
data = pickle.load(bz2.open(vgg19_file, 'rb'))
layers = lasagne.layers.get_all_layers(self.last_layer(), treat_as_input=[self.network['percept']])
for p, d in zip(itertools.chain(*[l.get_params() for l in layers]), data): p.set_value(d)
def save_generator(self):
def cast(p): return p.get_value().astype(np.float16)
params = {k: [cast(p) for p in l.get_params()] for (k, l) in self.list_generator_layers()}
config = {k: getattr(args, k) for k in ['generator_blocks', 'generator_residual', 'generator_filters'] + \
['generator_upscale', 'generator_downscale']}
pickle.dump((config, params), bz2.open(self.get_filename(), 'wb'))
print(' - Saved model as `{}` after training.'.format(self.get_filename()))
def load_model(self):
if not os.path.exists(self.get_filename()):
if args.train: return {}, {}
error("Model Xfile with pre-trained convolution layers not found. Download it here...")
print(' - Loaded file `{}`.'.format(self.get_filename()))
return pickle.load(bz2.open(self.get_filename(), 'rb'))
def open_f(filename):
if filename.endswith('.bz2'):
return bz2.open(filename, 'r')
else: # assume it's normal tzt
return open(filename, 'r')
# prints to stdout for piping into kenlm
def __enter__(self):
if self.compression == 'gzip':
self.f = gzip.open(self.filename, 'rt' if self.read else 'wt')
elif self.compression == 'bzip2':
self.f = bz2.open(self.filename, 'rt' if self.read else 'wt')
else:
self.f = open(self.filename, 'r' if self.read else 'w')
return self.f
def load_pickle(filename):
with open(filename, 'rb') as f:
return pickle.load(f)
def save_pickle(filename, obj):
with open(filename, 'wb') as f:
pickle.dump(obj, f)
def save_json(filename, obj):
with open(filename, 'w') as f:
json.dump(obj, f)
def load(self, metadata_filename):
global logger
self.__ext = []
try:
with bz2.open(metadata_filename, "r") as f:
logger.debug("Retrieving metadata state from `%s`" % metadata_filename)
for e in json.load(f):
self.__ext.append(Extension(e))
except FileNotFoundError:
logger.warning("No metadata state stored in `%s`" % metadata_filename)
def save(self):
global logger
logger.debug("Writing metadata state to `%s`" % self.__filename)
with bz2.open(self.__filename, "w") as f:
f.write(json.dumps(self.__ext).encode("utf-8"))
def detect_archive_format_and_open(path):
if path.endswith(".bz2"):
return bz2.open(path)
if path.endswith(".gz"):
return gzip.open(path, mode='rt')
return open(path)
def save_json(data, path):
# if not os.path.isdir(path):
# os.makedirs(path)
s = json.dumps(data, ensure_ascii=False, indent=4, sort_keys=True)
f = open(path, 'w')
f.write(s)
f.close()
def load_json(path):
f = open(path)
s_data = f.read()
data = json.loads(s_data)
f.close()
return data
parsers.py 文件源码
项目:PyDataLondon29-EmbarrassinglyParallelDAWithAWSLambda
作者: SignalMedia
项目源码
文件源码
阅读 30
收藏 0
点赞 0
评论 0
def _wrap_compressed(f, compression, encoding=None):
"""wraps compressed fileobject in a decompressing fileobject
NOTE: For all files in Python 3.2 and for bzip'd files under all Python
versions, this means reading in the entire file and then re-wrapping it in
StringIO.
"""
compression = compression.lower()
encoding = encoding or get_option('display.encoding')
if compression == 'gzip':
import gzip
f = gzip.GzipFile(fileobj=f)
if compat.PY3:
from io import TextIOWrapper
f = TextIOWrapper(f)
return f
elif compression == 'bz2':
import bz2
if compat.PY3:
f = bz2.open(f, 'rt', encoding=encoding)
else:
# Python 2's bz2 module can't take file objects, so have to
# run through decompress manually
data = bz2.decompress(f.read())
f = StringIO(data)
return f
else:
raise ValueError('do not recognize compression method %s'
% compression)
parsers.py 文件源码
项目:PyDataLondon29-EmbarrassinglyParallelDAWithAWSLambda
作者: SignalMedia
项目源码
文件源码
阅读 86
收藏 0
点赞 0
评论 0
def __next__(self):
if self.buffer is not None:
try:
line = next(self.buffer)
except StopIteration:
self.buffer = None
line = next(self.f)
else:
line = next(self.f)
# Note: 'colspecs' is a sequence of half-open intervals.
return [line[fromm:to].strip(self.delimiter)
for (fromm, to) in self.colspecs]
def load_cached_storage(cache_file, default_size=1000):
if cache_file is not None and os.path.exists(cache_file):
print("Loading cache: {0}".format(cache_file))
with bz2.open(cache_file, "rb") as file:
try:
cache = pickle.load(file)
return cache
except pickle.PickleError and EOFError:
return None
return ThingCache(cache_size=default_size, file=cache_file)