def read_snapshots(filename):
"""Sequentially reads the sets of signatures from a file. For
each set of signatures, a GCSnapshot is created with the
stored name. return the dict set name -> GCSnapshot object"""
result = dict()
f = open(filename, 'r')
while 1:
try:
snap = GCSnapshot(f)
result[snap.name] = snap
except (EOFError, pickle.UnpicklingError):
break
f.close()
return result
#### BEGIN: ONLY FOR THE TESTS
python类UnpicklingError()的实例源码
def secure_loads(data, encryption_key, hash_key=None, compression_level=None):
if not ':' in data:
return None
if not hash_key:
hash_key = hashlib.sha1(encryption_key).hexdigest()
signature, encrypted_data = data.split(':', 1)
actual_signature = hmac.new(hash_key, encrypted_data).hexdigest()
if not compare(signature, actual_signature):
return None
key = pad(encryption_key[:32])
encrypted_data = base64.urlsafe_b64decode(encrypted_data)
IV, encrypted_data = encrypted_data[:16], encrypted_data[16:]
cipher, _ = AES_new(key, IV=IV)
try:
data = cipher.decrypt(encrypted_data)
data = data.rstrip(' ')
if compression_level:
data = zlib.decompress(data)
return pickle.loads(data)
except (TypeError, pickle.UnpicklingError):
return None
### compute constant CTOKENS
def load_models(models_dir):
"""
Load saved models from disk. This will attempt to unpickle all files in a
directory; any files that give errors on unpickling (such as README.txt) will
be skipped.
Inputs:
- models_dir: String giving the path to a directory containing model files.
Each model file is a pickled dictionary with a 'model' field.
Returns:
A dictionary mapping model file names to models.
"""
models = {}
for model_file in os.listdir(models_dir):
with open(os.path.join(models_dir, model_file), 'rb') as f:
try:
models[model_file] = pickle.load(f)['model']
except pickle.UnpicklingError:
continue
return models
def waitForNextObs(obsNum, statusUrl, sessionId, maxWaitTime, checkInterval = 60, isgleam = False):
max_time = 0
while (max_time <= maxWaitTime):
time.sleep(checkInterval)
max_time += checkInterval
try:
strRes = urllib2.urlopen(statusUrl + sessionId).read()
myRes = pickle.loads(strRes)
if (0 == myRes.number_files_to_be_delivered):
# modify database
markObsDeliveredStatus(obsNum, isGleam = isgleam)
break
elif (myRes.errorcode):
markObsDeliveredStatus(obsNum, -1, isGleam = isgleam)
break
except (UnpicklingError, socket.timeout) as uerr:
logger.error("Something wrong while getting status for obsNum %s, %s" % (obsNum, str(uerr)))
continue
def load_link_list(self):
try:
input = open('link_list.dat', 'rb')
data = cPickle.load(input)
disc, bkmark = data
except IOError:
log.msg("No link_list.dat found.")
except ValueError:
log.msg("File link_list.dat incomplete.")
except cPickle.UnpicklingError:
log.msg("File link_list.dat bad or corrupt.")
else:
for key, value in disc.iteritems():
self.add_link_list(value, key)
for key, value in bkmark.iteritems():
self.add_bookmark(key)
def load_models(models_dir):
"""
Load saved models from disk. This will attempt to unpickle all files in a
directory; any files that give errors on unpickling (such as README.txt) will
be skipped.
Inputs:
- models_dir: String giving the path to a directory containing model files.
Each model file is a pickled dictionary with a 'model' field.
Returns:
A dictionary mapping model file names to models.
"""
models = {}
for model_file in os.listdir(models_dir):
with open(os.path.join(models_dir, model_file), 'rb') as f:
try:
models[model_file] = pickle.load(f)['model']
except pickle.UnpicklingError:
continue
return models
def __init__(self, index):
"""Initialize the TokI object from a MongoDB or load from disk."""
self.index = index
if pymongo:
if 'toki' in self.index.mongo_db.collection_names():
self.mongo_toki = self.index.mongo_db['toki']
if self.mongo_toki.count() == 0:
raise IndexLoadError
else:
raise IndexLoadError
else:
# Load into memory (not suitable for large corpora!)
try:
with open(self.index.base_fname + '.toki', mode='rb') as f:
self.toki = pickle.load(f)
if not self.toki:
raise IndexLoadError
except (IOError, pickle.UnpicklingError):
raise IndexLoadError
def load_models(models_dir):
"""
Load saved models from disk. This will attempt to unpickle all files in a
directory; any files that give errors on unpickling (such as README.txt) will
be skipped.
Inputs:
- models_dir: String giving the path to a directory containing model files.
Each model file is a pickled dictionary with a 'model' field.
Returns:
A dictionary mapping model file names to models.
"""
models = {}
for model_file in os.listdir(models_dir):
with open(os.path.join(models_dir, model_file), 'rb') as f:
try:
models[model_file] = pickle.load(f)['model']
except pickle.UnpicklingError:
continue
return models
def load_models(models_dir):
"""
Load saved models from disk. This will attempt to unpickle all files in a
directory; any files that give errors on unpickling (such as README.txt) will
be skipped.
Inputs:
- models_dir: String giving the path to a directory containing model files.
Each model file is a pickled dictionary with a 'model' field.
Returns:
A dictionary mapping model file names to models.
"""
models = {}
for model_file in os.listdir(models_dir):
with open(os.path.join(models_dir, model_file), 'rb') as f:
try:
models[model_file] = pickle.load(f)['model']
except pickle.UnpicklingError:
continue
return models
def load_models(models_dir):
"""
Load saved models from disk. This will attempt to unpickle all files in a
directory; any files that give errors on unpickling (such as README.txt) will
be skipped.
Inputs:
- models_dir: String giving the path to a directory containing model files.
Each model file is a pickled dictionary with a 'model' field.
Returns:
A dictionary mapping model file names to models.
"""
models = {}
for model_file in os.listdir(models_dir):
with open(os.path.join(models_dir, model_file), 'rb') as f:
try:
models[model_file] = pickle.load(f)['model']
except pickle.UnpicklingError:
continue
return models
def __init__(self, log, journaledService, path, loadedCallback):
self.path = path
if os.path.exists(path):
try:
self.lastSync, obj = pickle.load(open(path, "rb"))
except (IOError, OSError, pickle.UnpicklingError):
self.lastSync, obj = 0, None
loadedCallback(obj)
else:
self.lastSync = 0
loadedCallback(None)
Journal.__init__(self, log, journaledService)
def get(self, *args, **kw):
# We do it with *args and **kw so if the default value wasn't
# given nothing is passed to the extension module. That way
# an exception can be raised if set_get_returns_none is turned
# off.
data = self.db.get(*args, **kw)
try:
return cPickle.loads(data)
except (EOFError, TypeError, cPickle.UnpicklingError):
return data # we may be getting the default value, or None,
# so it doesn't need unpickled.
def load_object(file_path):
file_path = os.path.expanduser(file_path)
# reading to string and loads is 2.5x faster that using the file handle and load.
with open(file_path, 'rb') as fh:
data = fh.read()
try:
return pickle.loads(data, encoding='bytes')
except pickle.UnpicklingError as e:
raise ValueError from e
def get(self, timeout=None):
"""Receive, decode and return data from the pipe. Block
gevent-cooperatively until data is available or timeout expires. The
default decoder is ``pickle.loads``.
:arg timeout: ``None`` (default) or a ``gevent.Timeout``
instance. The timeout must be started to take effect and is
canceled when the first byte of a new message arrives (i.e.
providing a timeout does not guarantee that the method completes
within the timeout interval).
:returns: a Python object.
Raises:
- :exc:`gevent.Timeout` (if provided)
- :exc:`GIPCError`
- :exc:`GIPCClosed`
- :exc:`pickle.UnpicklingError`
Recommended usage for silent timeout control::
with gevent.Timeout(TIME_SECONDS, False) as t:
reader.get(timeout=t)
.. warning::
The timeout control is currently not available on Windows,
because Windows can't apply select() to pipe handles.
An ``OSError`` is expected to be raised in case you set a
timeout.
"""
self._validate()
with self._lock:
if timeout:
# Wait for ready-to-read event.
h = gevent.get_hub()
h.wait(h.loop.io(self._fd, 1))
timeout.cancel()
msize, = struct.unpack("!i", self._recv_in_buffer(4).getvalue())
bindata = self._recv_in_buffer(msize).getvalue()
return self._decoder(bindata)
def getMessage(self):
if len(self.__readBuffer) < 4:
return None
l = struct.unpack('i', self.__readBuffer[:4])[0]
if len(self.__readBuffer) - 4 < l:
return None
data = self.__readBuffer[4:4 + l]
try:
message = cPickle.loads(zlib.decompress(data))
except (zlib.error, cPickle.UnpicklingError):
self.__disconnected = True
return None
self.__readBuffer = self.__readBuffer[4 + l:]
return message
def load_status(self):
"""Read the persistent state file and load the state it contains."""
try:
status_format_version, status = self._load_status()
if (status_format_version != self.status_format_version or
status['comp_vn'] != self.competition.status_format_version):
raise StandardError
self.void_game_count = status['void_game_count']
self.games_in_progress = {}
self.games_to_replay = {}
competition_status = status['comp']
except pickle.UnpicklingError:
raise RingmasterError("corrupt status file")
except EnvironmentError, e:
raise RingmasterError("error loading status file:\n%s" % e)
except KeyError, e:
raise RingmasterError("incompatible status file: missing %s" % e)
except Exception, e:
# Probably an exception from __setstate__ somewhere
raise RingmasterError("incompatible status file")
try:
self.competition.set_status(competition_status)
except CompetitionError, e:
raise RingmasterError("error loading competition state: %s" % e)
except KeyError, e:
raise RingmasterError(
"error loading competition state: missing %s" % e)
except Exception, e:
raise RingmasterError("error loading competition state:\n%s" %
compact_tracebacks.format_traceback(skip=1))
self.status_is_loaded = True
def get(self, *args, **kw):
# We do it with *args and **kw so if the default value wasn't
# given nothing is passed to the extension module. That way
# an exception can be raised if set_get_returns_none is turned
# off.
data = self.db.get(*args, **kw)
try:
return cPickle.loads(data)
except (EOFError, TypeError, cPickle.UnpicklingError):
return data # we may be getting the default value, or None,
# so it doesn't need unpickled.
def pollmessage(self, wait):
packet = self.pollpacket(wait)
if packet is None:
return None
try:
message = pickle.loads(packet)
except pickle.UnpicklingError:
print >>sys.__stderr__, "-----------------------"
print >>sys.__stderr__, "cannot unpickle packet:", repr(packet)
traceback.print_stack(file=sys.__stderr__)
print >>sys.__stderr__, "-----------------------"
raise
return message
def get(self, *args, **kw):
# We do it with *args and **kw so if the default value wasn't
# given nothing is passed to the extension module. That way
# an exception can be raised if set_get_returns_none is turned
# off.
data = self.db.get(*args, **kw)
try:
return cPickle.loads(data)
except (EOFError, TypeError, cPickle.UnpicklingError):
return data # we may be getting the default value, or None,
# so it doesn't need unpickled.
def pollmessage(self, wait):
packet = self.pollpacket(wait)
if packet is None:
return None
try:
message = pickle.loads(packet)
except pickle.UnpicklingError:
print >>sys.__stderr__, "-----------------------"
print >>sys.__stderr__, "cannot unpickle packet:", repr(packet)
traceback.print_stack(file=sys.__stderr__)
print >>sys.__stderr__, "-----------------------"
raise
return message
def __init__(self, log, journaledService, path, loadedCallback):
self.path = path
if os.path.exists(path):
try:
self.lastSync, obj = pickle.load(open(path, "rb"))
except (IOError, OSError, pickle.UnpicklingError):
self.lastSync, obj = 0, None
loadedCallback(obj)
else:
self.lastSync = 0
loadedCallback(None)
Journal.__init__(self, log, journaledService)
def get(self, *args, **kw):
# We do it with *args and **kw so if the default value wasn't
# given nothing is passed to the extension module. That way
# an exception can be raised if set_get_returns_none is turned
# off.
data = self.db.get(*args, **kw)
try:
return cPickle.loads(data)
except (EOFError, TypeError, cPickle.UnpicklingError):
return data # we may be getting the default value, or None,
# so it doesn't need unpickled.
def unpickle(pickled_string):
"""Unpickles a string, but raises a unified UnpickleError in case anything
fails.
This is a helper method to not have to deal with the fact that `loads()`
potentially raises many types of exceptions (e.g. AttributeError,
IndexError, TypeError, KeyError, etc.)
"""
try:
obj = loads(pickled_string)
except (StandardError, UnpicklingError):
raise UnpickleError('Could not unpickle.', pickled_string)
return obj
def test_bad_input(self):
# Test issue4298
s = '\x58\0\0\0\x54'
self.assertRaises(EOFError, self.module.loads, s)
# Test issue7455
s = '0'
# XXX Why doesn't pickle raise UnpicklingError?
self.assertRaises((IndexError, cPickle.UnpicklingError),
self.module.loads, s)
def test_bad_input(self):
# Test issue4298
s = '\x58\0\0\0\x54'
self.assertRaises(EOFError, self.module.loads, s)
# Test issue7455
s = '0'
# XXX Why doesn't pickle raise UnpicklingError?
self.assertRaises((IndexError, cPickle.UnpicklingError),
self.module.loads, s)
def pollmessage(self, wait):
packet = self.pollpacket(wait)
if packet is None:
return None
try:
message = pickle.loads(packet)
except pickle.UnpicklingError:
print >>sys.__stderr__, "-----------------------"
print >>sys.__stderr__, "cannot unpickle packet:", repr(packet)
traceback.print_stack(file=sys.__stderr__)
print >>sys.__stderr__, "-----------------------"
raise
return message
def _restore_drover(workdir):
"""Restores a saved drover state contained within a workdir.
Args:
workdir: A string containing the path to the workdir used by drover.
"""
try:
with open(os.path.join(workdir, '.git', 'drover'), 'rb') as f:
drover = cPickle.load(f)
drover._process_options()
return drover
except (IOError, cPickle.UnpicklingError):
raise Error('%r is not git drover workdir' % workdir)
def _load_binstring(self):
len, = unpack('<i', self.read(4))
if len < 0:
raise pickle.UnpicklingError("BINSTRING pickle has negative byte count")
data = self.read(len)
try:
data = str(data, self.encoding, self.errors)
except:
pass
self.append(data)
def init_from_string(self, s):
"""Initialize values from string. Return 0 if problem."""
try: val_dict = cPickle.loads(s)
except cPickle.UnpicklingError: return 0
try:
self.index = val_dict['index']
self.testfile_type = val_dict['testfile_type']
self.testfile_option = val_dict['testfile_option']
self.temp_index = val_dict['temp_index']
except TypeError, KeyError: return 0
return 1
def _restore_drover(workdir):
"""Restores a saved drover state contained within a workdir.
Args:
workdir: A string containing the path to the workdir used by drover.
"""
try:
with open(os.path.join(workdir, '.git', 'drover'), 'rb') as f:
drover = cPickle.load(f)
drover._process_options()
return drover
except (IOError, cPickle.UnpicklingError):
raise Error('%r is not git drover workdir' % workdir)