def __setstate__(self, state):
OLDEST_SUPPORTED_STATE = 4
version = state.pop(VERSION_LABEL)
if version < OLDEST_SUPPORTED_STATE:
raise BaseException("PerformanceTracker saved state is too old.")
self.__dict__.update(state)
# Handle the dividend frame specially
self.dividend_frame = pickle.loads(state['dividend_frame'])
# properly setup the perf periods
p_types = ['cumulative', 'todays']
for p_type in p_types:
name = p_type + '_performance'
period = getattr(self, name, None)
if period is None:
continue
period._position_tracker = self.position_tracker
python类loads()的实例源码
def get_cookie(self, key, default=None, secret=None, digestmod=hashlib.sha256):
""" Return the content of a cookie. To read a `Signed Cookie`, the
`secret` must match the one used to create the cookie (see
:meth:`BaseResponse.set_cookie`). If anything goes wrong (missing
cookie or wrong signature), return a default value. """
value = self.cookies.get(key)
if secret:
# See BaseResponse.set_cookie for details on signed cookies.
if value and value.startswith('!') and '?' in value:
sig, msg = map(tob, value[1:].split('?', 1))
hash = hmac.new(tob(secret), msg, digestmod=digestmod).digest()
if _lscmp(sig, base64.b64encode(hash)):
dst = pickle.loads(base64.b64decode(msg))
if dst and dst[0] == key:
return dst[1]
return default
return value or default
def entity_history(request):
data = []
mimetype = 'application/json'
if request.method == 'POST' and 'entity' in request.POST and request.POST['entity'] != '':
entity = request.POST['entity']
logger.debug("view entity_history user: %s entity: %s" % (request.user.username, entity))
for history_data in r.lrange('history_entity_' + entity, 0, 100):
data.append(pickle.loads(history_data))
return HttpResponse(json.dumps(data), mimetype)
#@login_required(login_url=reverse_lazy('login'))
def entity_notify_history(request):
data = []
mimetype = 'application/json'
if request.method == 'POST' and 'entity' in request.POST and request.POST['entity'] != '':
entity = request.POST['entity']
logger.debug("view entity_notify_history user: %s entity: %s" % (request.user.username, entity))
for history_data in r.lrange('notifyhistory_entity_' + entity, 0, 100):
data.append(pickle.loads(history_data))
return HttpResponse(json.dumps(data), mimetype)
#@login_required(login_url=reverse_lazy('login'))
def command_openweb(current_buffer, args):
trigger = w.config_get_plugin('trigger_value')
if trigger != "0":
if args is None:
channel = channels.find(current_buffer)
url = "{}/messages/{}".format(channel.server.server_buffer_name, channel.name)
topic = w.buffer_get_string(channel.channel_buffer, "title")
w.buffer_set(channel.channel_buffer, "title", "{}:{}".format(trigger, url))
w.hook_timer(1000, 0, 1, "command_openweb", json.dumps({"topic": topic, "buffer": current_buffer}))
else:
#TODO: fix this dirty hack because i don't know the right way to send multiple args.
args = current_buffer
data = json.loads(args)
channel_buffer = channels.find(data["buffer"]).channel_buffer
w.buffer_set(channel_buffer, "title", data["topic"])
return w.WEECHAT_RC_OK
def _restore_state(self):
""" Restore user state. """
try:
state = self._state_store.get_value(self._state_store_key)
state_dict = pickle.loads(
binascii.unhexlify(state.encode("utf-8")))
self._name = state_dict['name']
self.enrollment_secret = state_dict['enrollment_secret']
enrollment = state_dict['enrollment']
if enrollment:
private_key = serialization.load_pem_private_key(
enrollment['private_key'],
password=None,
backend=default_backend()
)
cert = enrollment['cert']
self.enrollment = Enrollment(private_key, cert)
self.affiliation = state_dict['affiliation']
self.account = state_dict['account']
self.roles = state_dict['roles']
self._org = state_dict['org']
self.msp_id = state_dict['msp_id']
except Exception as e:
raise IOError("Cannot deserialize the user", e)
def _testStruct(self, Struct, values = {}, delattrs = ()):
schema = mapped_struct.Schema.from_typed_slots(Struct)
x = Struct()
for k in delattrs:
delattr(x, k)
for k,v in values.iteritems():
setattr(x, k, v)
px = schema.pack(x)
old_schema = schema
schema = cPickle.loads(cPickle.dumps(schema, 2))
self.assertTrue(old_schema.compatible(schema))
self.assertTrue(schema.compatible(old_schema))
dx = schema.unpack(px)
for k in Struct.__slots__:
if k in values or k not in delattrs:
self.assertEquals(getattr(dx, k, None), getattr(x, k, None))
else:
self.assertFalse(hasattr(dx, k))
def testPackPickleUnpack(self):
# hack - unregister subschema (can't register twice)
mapped_struct.mapped_object.TYPE_CODES.pop(self.SubStruct,None)
mapped_struct.mapped_object.OBJ_PACKERS.pop('}',None)
for TEST_VALUES in self.TEST_VALUES:
# re-register subschema
mapped_struct.mapped_object.register_schema(self.SubStruct, self.subschema, '}')
x = self.Struct(**{k:v for k,v in TEST_VALUES.iteritems()})
pschema = cPickle.dumps(self.schema)
# Unregister schema to force the need for auto-register
mapped_struct.mapped_object.TYPE_CODES.pop(self.SubStruct,None)
mapped_struct.mapped_object.OBJ_PACKERS.pop('}',None)
pschema = cPickle.loads(pschema)
dx = pschema.unpack(self.schema.pack(x))
for k,v in TEST_VALUES.iteritems():
self.assertTrue(hasattr(dx, k))
self.assertEqual(getattr(dx, k), v)
for k in self.Struct.__slots__:
if k not in TEST_VALUES:
self.assertFalse(hasattr(dx, k))
def load_object(self, value):
"""The reversal of :meth:`dump_object`. This might be called with
None.
"""
if value is None:
return None
if value.startswith(b'!'):
try:
return pickle.loads(value[1:])
except pickle.PickleError:
return None
try:
return int(value)
except ValueError:
# before 0.8 we did not have serialization. Still support that.
return value
def get(self, key):
go = '/'
member = CheckAuth(self)
if member:
member = Member.get(member.id)
one = Member.get(key)
if one:
if one.num != member.num:
try:
blocked = pickle.loads(member.blocked.encode('utf-8'))
except:
blocked = []
if len(blocked) == 0:
blocked = []
if one.num not in blocked:
blocked.append(one.num)
member.blocked = pickle.dumps(blocked)
member.sync()
store.commit() #jon add
memcache.set('Member_' + str(member.num), member, 86400)
self.redirect(go)
def get(self, key):
go = '/'
member = CheckAuth(self)
if member:
member = Member.get(member.id)
one = Member.get(key)
if one:
if one.num != member.num:
try:
blocked = pickle.loads(member.blocked.encode('utf-8'))
except:
blocked = []
if len(blocked) == 0:
blocked = []
if one.num in blocked:
blocked.remove(one.num)
member.blocked = pickle.dumps(blocked)
member.sync()
store.commit() #jon add
memcache.set('Member_' + str(member.num), member, 86400)
self.redirect(go)
def export_to_csv(request, variants):
#export to csv
export = request.GET.get('export', '')
if export != '':
if export == 'csv':
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=export.csv'
writer = csv.writer(response)
elif export == 'txt':
response = HttpResponse(content_type='text/plain')
response['Content-Disposition'] = 'attachment; filename=export.txt'
writer = csv.writer(response, delimiter='\t', quoting=csv.QUOTE_NONE)
writer.writerow(['Individual', 'Index', 'Pos_index', 'Chr', 'Pos', 'Variant_id', 'Ref', 'Alt', 'Qual', 'Filter', 'Info', 'Format', 'Genotype_col', 'Genotype', 'Read_depth', 'Gene', 'Mutation_type', 'Vartype', 'Genomes1k_maf', 'Dbsnp_maf', 'Esp_maf', 'Dbsnp_build', 'Sift', 'Sift_pred', 'Polyphen2', 'Polyphen2_pred', 'Condel', 'Condel_pred', 'DANN', 'CADD', 'Is_at_omim', 'Is_at_hgmd', 'Hgmd_entries', 'Effect', 'Impact', 'Func_class', 'Codon_change', 'Aa_change', 'Aa_len', 'Gene_name', 'Biotype', 'Gene_coding', 'Transcript_id', 'Exon_rank', 'Genotype_number', 'Allele', 'Gene', 'Feature', 'Feature_type', 'Consequence', 'Cdna_position', 'Cds_position', 'Protein_position', 'Amino_acids', 'Codons', 'Existing_variation', 'Distance', 'Strand', 'Symbol', 'Symbol_source', 'Sift', 'Polyphen', 'Condel'])
for variant in variants:
# print 'variant', variant.index
writer.writerow([variant.individual, variant.index, variant.pos_index, variant.chr, variant.pos, variant.variant_id, variant.ref, variant.alt, variant.qual, variant.filter, pickle.loads(variant.info), variant.format, variant.genotype_col, variant.genotype, variant.read_depth, variant.gene, variant.mutation_type, variant.vartype, variant.genomes1k_maf, variant.dbsnp_maf, variant.esp_maf, variant.dbsnp_build, variant.sift, variant.sift_pred, variant.polyphen2, variant.polyphen2_pred, variant.condel, variant.condel_pred, variant.dann, variant.cadd, variant.is_at_omim, variant.is_at_hgmd, variant.hgmd_entries])
return response
def cache(key_prefix, timeout=300):
"""
Decorator for caching functions.
Returns the function value from cache, or the function if cache disabled
"""
if timeout is None:
timeout = 300
def decorator(f):
@wraps(f)
def wrapper(*args, **kwargs):
key = "%s::%s" % (settings.REDIS_KEYPREFIX, key_prefix)
if os.environ.get('PYBOSSA_REDIS_CACHE_DISABLED') is None:
output = sentinel.slave.get(key)
if output:
return pickle.loads(output)
output = f(*args, **kwargs)
sentinel.master.setex(key, timeout, pickle.dumps(output))
return output
output = f(*args, **kwargs)
sentinel.master.setex(key, timeout, pickle.dumps(output))
return output
return wrapper
return decorator
def load_object(self, value):
"""The reversal of :meth:`dump_object`. This might be called with
None.
"""
if value is None:
return None
if value.startswith(b'!'):
try:
return pickle.loads(value[1:])
except pickle.PickleError:
return None
try:
return int(value)
except ValueError:
# before 0.8 we did not have serialization. Still support that.
return value
def testOldPickles(self):
# Ensure that applications serializing pytz instances as pickles
# have no troubles upgrading to a new pytz release. These pickles
# where created with pytz2006j
east1 = pickle.loads(_byte_string(
"cpytz\n_p\np1\n(S'US/Eastern'\np2\nI-18000\n"
"I0\nS'EST'\np3\ntRp4\n."
))
east2 = pytz.timezone('US/Eastern')
self.assertTrue(east1 is east2)
# Confirm changes in name munging between 2006j and 2007c cause
# no problems.
pap1 = pickle.loads(_byte_string(
"cpytz\n_p\np1\n(S'America/Port_minus_au_minus_Prince'"
"\np2\nI-17340\nI0\nS'PPMT'\np3\ntRp4\n."))
pap2 = pytz.timezone('America/Port-au-Prince')
self.assertTrue(pap1 is pap2)
gmt1 = pickle.loads(_byte_string(
"cpytz\n_p\np1\n(S'Etc/GMT_plus_10'\np2\ntRp3\n."))
gmt2 = pytz.timezone('Etc/GMT+10')
self.assertTrue(gmt1 is gmt2)
def step4():
key_vec = pickle.loads(open("key_vec.pkl", "rb").read())
vecs = []
for ev, vec in enumerate(key_vec.values()):
x = np.array(vec)
if np.isnan(x).any():
# print(vec)
continue
vecs.append(x)
vecs = np.array(vecs)
kmeans = KMeans(n_clusters=128, init='k-means++', n_init=10, max_iter=300,
tol=0.0001,precompute_distances='auto', verbose=0,
random_state=None, copy_x=True, n_jobs=1)
print("now fitting...")
kmeans.fit(vecs)
open("kmeans.model", "wb").write( pickle.dumps(kmeans) )
for p in kmeans.predict(vecs):
print(p)
def _step5(arr):
kmeans = pickle.loads(open("kmeans.model", "rb").read())
key, lines, tipe = arr
print(key)
open("./tmp/tmp.{tipe}.{key}.txt".format(tipe=tipe,key=key), "w").write("\n".join(lines))
res = os.popen("./fasttext print-sentence-vectors ./models/model.bin < tmp/tmp.{tipe}.{key}.txt".format(tipe=tipe, key=key)).read()
w = open("tmp/tmp.{tipe}.{key}.json".format(tipe=tipe,key=key), "w")
for line in res.split("\n"):
try:
vec = list(map(float, line.split()[-100:]))
except:
print(line)
print(res)
continue
x = np.array(vec)
if np.isnan(x).any():
continue
cluster = kmeans.predict([vec])
txt = line.split()[:-100]
obj = {"txt": txt, "cluster": cluster.tolist()}
data = json.dumps(obj, ensure_ascii=False)
w.write( data + "\n" )
def step6():
for tipe in ["news", "nocturne"]:
names = [name for name in reversed(sorted(glob.glob("./tmp/tmp.{tipe}.*.json".format(tipe=tipe))))]
size = len(names)
for en, name in enumerate(names):
term_clus = {}
oss = []
with open(name) as f:
for line in f:
line = line.strip()
oss.append(json.loads(line))
for i in range(3, len(oss) - 3):
terms = set( oss[i]["txt"] )
for term in terms:
if term_clus.get(term) is None:
term_clus[term] = [0.0]*128
cd = [oss[i+d]["cluster"][0] for d in [-3, -2, -1, 1, 2, 3]]
for c in cd:
term_clus[term][c] += 1.0
print("{}/{} finished {}".format(en, size, name))
open("{tipe}.term_clus.pkl".format(tipe=tipe), "wb").write( pickle.dumps(term_clus) )
def step7():
term_clus = pickle.loads(open("./news.term_clus.pkl", "rb").read())
term_clus = {term:clus for term, clus in filter(lambda x: sum(x[1]) > 30, term_clus.items()) }
for term in term_clus.keys():
vec = term_clus[term]
acc = sum(vec)
term_clus[term] = list(map(lambda x:x/acc, vec))
open("news.term_dist.pkl", "wb").write(pickle.dumps(term_clus))
term_clus = pickle.loads(open("./nocturne.term_clus.pkl", "rb").read())
term_clus = {term:clus for term, clus in filter(lambda x: sum(x[1]) > 30, term_clus.items()) }
for term in term_clus.keys():
vec = term_clus[term]
acc = sum(vec)
term_clus[term] = list(map(lambda x:x/acc, vec))
open("nocturne.term_dist.pkl", "wb").write(pickle.dumps(term_clus))
def setProtected(self,name):
'''
Set a name in the table to be protected from removal
because of limits.
'''
# generate the filepath to the protected values
# list
filePath=pathJoin(self.path,'protected.table')
# check if the path exists
if pathExists(filePath):
# read the protected list from the file
protectedList=unpickle(loadFile(filePath))
else:
# create the list and append the name
protectedList=[]
# append the new value to the list
protectedList.append(name)
# pickle the protected list for storage
protectedList=pickle(protectedList)
# write the changes back to the protected list
writeFile(filePath,protectedList)
################################################################################
def loadValue(self,name):
'''
Loads a saved value and returns it.
'''
# find the file path in the names array
if name in self.names:
filePath=self.namePaths[name]
else:
return False
# check if the path exists
if pathExists(filePath):
# load the data
fileData=loadFile(filePath)
else:
# return false if the value does not exist
return False
# unpickle the filedata
fileData = unpickle(fileData)
debug.add('loading value '+str(name),fileData)
# returns the value of a table stored on disk
return fileData
################################################################################
def __init__(self):
'''
This object loads up the youtubeTV session for
cache functionality and automated work.
'''
# create the video cache for this session
self.cache=tables.table(_datadir+'cache/')
# cache timer
self.timer=tables.table(_datadir+'timer/')
# load the channels cache
self.channelCache=tables.table(_datadir+'channelCache/')
# playlist cache
self.playlistCache=tables.table(_datadir+'playlistCache/')
# channel blocklist
self.channelBlocklist=tables.table(_datadir+'channelBlocklist/')
# webpage cache
self.webCache=tables.table(_datadir+'webCache/')
def loadConfig(self,config,blankType):
'''
Used for loading objects from xbmc settings that were
stored using pythons pickle functionality.
:return array/dict
'''
# open the pickled settings using xbmcs settings api
configObject=addonObject.getSetting(config)
#configObject=xbmcplugin.getSettings(_handle,config)
#configObject=self.loadFile(config)
if bool(configObject):
# if config exists load up the config into channels
return pickle.loads(configObject)
else:
if blankType=='array':
# otherwise create a blank array
return []
elif blankType=='dict':
# return a blank dict
return {}
else:
# default to return an array
return []
def _receive_message(self, name, socket):
"""
Receives an incoming message from a ZMQ socket.
:param str name: The name of the end point of source of the message.
:param zmq.sugar.socket.Socket socket: The ZMQ socket.
"""
buffer = socket.recv()
if buffer[:1] == b'{':
tmp = jsonapi.loads(buffer)
if tmp['type'] not in self._json_message_creators:
raise ValueError("Received JSON message with unknown message type '{0}'".format(tmp['type']))
message = self._json_message_creators[tmp['type']](tmp)
else:
message = pickle.loads(buffer)
""":type: enarksh.message.Message.Message"""
message.message_source = name
if message.message_type not in self.__message_types:
raise ValueError("Received message with unknown message type '{0}'".format(message.message_type))
event = self.__message_types[message.message_type]
event.fire(message)
# ------------------------------------------------------------------------------------------------------------------
def test_pickle(self):
# ticket #135
import pickle
tz11 = FixedOffsetTimezone(60)
tz12 = FixedOffsetTimezone(120)
for proto in [-1, 0, 1, 2]:
tz21, tz22 = pickle.loads(pickle.dumps([tz11, tz12], proto))
self.assertEqual(tz11, tz21)
self.assertEqual(tz12, tz22)
tz11 = FixedOffsetTimezone(60, name='foo')
tz12 = FixedOffsetTimezone(120, name='bar')
for proto in [-1, 0, 1, 2]:
tz21, tz22 = pickle.loads(pickle.dumps([tz11, tz12], proto))
self.assertEqual(tz11, tz21)
self.assertEqual(tz12, tz22)
def test_no_conn_curs(self):
from psycopg2._json import _get_json_oids
oid, array_oid = _get_json_oids(self.conn)
old = psycopg2.extensions.string_types.get(114)
olda = psycopg2.extensions.string_types.get(199)
def loads(s):
return psycopg2.extras.json.loads(s, parse_float=Decimal)
try:
new, newa = psycopg2.extras.register_json(
loads=loads, oid=oid, array_oid=array_oid)
curs = self.conn.cursor()
curs.execute("""select '{"a": 100.0, "b": null}'::json""")
data = curs.fetchone()[0]
self.assertTrue(isinstance(data['a'], Decimal))
self.assertEqual(data['a'], Decimal('100.0'))
finally:
psycopg2.extensions.string_types.pop(new.values[0])
psycopg2.extensions.string_types.pop(newa.values[0])
if old:
psycopg2.extensions.register_type(old)
if olda:
psycopg2.extensions.register_type(olda)
def test_register_default(self):
curs = self.conn.cursor()
def loads(s):
return psycopg2.extras.json.loads(s, parse_float=Decimal)
psycopg2.extras.register_default_json(curs, loads=loads)
curs.execute("""select '{"a": 100.0, "b": null}'::json""")
data = curs.fetchone()[0]
self.assertTrue(isinstance(data['a'], Decimal))
self.assertEqual(data['a'], Decimal('100.0'))
curs.execute("""select array['{"a": 100.0, "b": null}']::json[]""")
data = curs.fetchone()[0]
self.assertTrue(isinstance(data[0]['a'], Decimal))
self.assertEqual(data[0]['a'], Decimal('100.0'))
def test_register_globally(self):
old = psycopg2.extensions.string_types.get(3802)
olda = psycopg2.extensions.string_types.get(3807)
try:
new, newa = psycopg2.extras.register_json(self.conn,
loads=self.myloads, globally=True, name='jsonb')
curs = self.conn.cursor()
curs.execute("""select '{"a": 100.0, "b": null}'::jsonb""")
self.assertEqual(curs.fetchone()[0], {'a': 100.0, 'b': None, 'test': 1})
finally:
psycopg2.extensions.string_types.pop(new.values[0])
psycopg2.extensions.string_types.pop(newa.values[0])
if old:
psycopg2.extensions.register_type(old)
if olda:
psycopg2.extensions.register_type(olda)
def test_loads(self):
json = psycopg2.extras.json
def loads(s):
return json.loads(s, parse_float=Decimal)
psycopg2.extras.register_json(self.conn, loads=loads, name='jsonb')
curs = self.conn.cursor()
curs.execute("""select '{"a": 100.0, "b": null}'::jsonb""")
data = curs.fetchone()[0]
self.assertTrue(isinstance(data['a'], Decimal))
self.assertEqual(data['a'], Decimal('100.0'))
# sure we are not manling json too?
curs.execute("""select '{"a": 100.0, "b": null}'::json""")
data = curs.fetchone()[0]
self.assertTrue(isinstance(data['a'], float))
self.assertEqual(data['a'], 100.0)
def test_register_default(self):
curs = self.conn.cursor()
def loads(s):
return psycopg2.extras.json.loads(s, parse_float=Decimal)
psycopg2.extras.register_default_jsonb(curs, loads=loads)
curs.execute("""select '{"a": 100.0, "b": null}'::jsonb""")
data = curs.fetchone()[0]
self.assertTrue(isinstance(data['a'], Decimal))
self.assertEqual(data['a'], Decimal('100.0'))
curs.execute("""select array['{"a": 100.0, "b": null}']::jsonb[]""")
data = curs.fetchone()[0]
self.assertTrue(isinstance(data[0]['a'], Decimal))
self.assertEqual(data[0]['a'], Decimal('100.0'))