def push(self, environ):
ct = environ.get('CONTENT_TYPE')
stream = environ['wsgi.input']
content = stream.read(int(environ['CONTENT_LENGTH']))
if ct == 'application/json':
try:
task = json.loads(content if PY2 else content.decode('utf-8'))
except:
return Error('400 BAD REQUEST', 'invalid-encoding', 'Can\'t decode body')
elif ct == 'application/x-msgpack':
try:
task = msgpack.loads(content, encoding='utf-8')
except:
return Error('400 BAD REQUEST', 'invalid-encoding', 'Can\'t decode body')
else:
return Error('400 BAD REQUEST', 'invalid-content-type',
'Content must be json or msgpack')
if not task.get('queue'):
return Error('400 BAD REQUEST', 'bad-params', 'queue required')
if not task.get('name'):
return Error('400 BAD REQUEST', 'bad-params', 'name required')
return {'id': self.manager.push(**task).id}
python类loads()的实例源码
def _read_header(self, header_path):
"""
Reads the header file located at `header_path` and loads it from its
msgpack format into the self.header dict.
:param bytes/string header_path: The path to the header file
:return: The loaded dict from the header file
:rtype: Dict
"""
with open(header_path, mode='rb') as f:
# TODO: Use custom Exception (invalid or corrupt header)
try:
header = msgpack.loads(f.read())
except ValueError as e:
raise e
return header
def test_build_header_prealpha(self):
enc_keys = [random(148), random(148), random(148)]
version = 100
header, length = self.client._build_header(enc_keys, version=version)
self.assertEqual(len(header), length)
try:
msgpack.loads(header)
except Exception as E:
self.fail("Failed to unpack header:\n{}".format(E))
self.assertIn((3).to_bytes(4, byteorder='big'), header)
for key in enc_keys:
self.assertIn(key, header)
self.assertIn(version.to_bytes(4, byteorder='big'), header)
def test_read_header_prealpha(self):
enc_keys = [random(148), random(148), random(148)]
version = 100
header, length = self.client._build_header(enc_keys, version=version)
self.assertEqual(len(header), length)
try:
msgpack.loads(header)
except Exception as E:
self.fail("Failed to unpack header: {}".format(E))
for key in enc_keys:
self.assertIn(key, header)
self.assertIn(version.to_bytes(4, byteorder='big'), header)
header = self.client._read_header(header)
self.assertEqual(int, type(header[0]))
self.assertEqual(100, header[0])
self.assertEqual(list, type(header[1]))
self.assertEqual(3, len(header[1]))
for key in header[1]:
self.assertIn(key, enc_keys)
def test_base_api_item_get_msgpack(http_client, base_url, test_data):
resp = await http_client.fetch(base_url + '/test/api_test_model/%s' % test_data[0].id,
headers={'Accept': 'application/x-msgpack'})
assert resp.code == 200
import msgpack
data = msgpack.loads(resp.body)
print(data)
assert data[b'success']
assert data[b'errors'] == []
for k, v in TEST_DATA[0].items():
if isinstance(v, datetime.datetime):
assert data[b'result'][k.encode()] == v.isoformat().encode()
elif isinstance(v, (bool, int)):
assert data[b'result'][k.encode()] == v
else:
assert data[b'result'][k.encode()] == v.encode()
def main():
context = zmq.Context()
socket = zmq.Socket(context, zmq.SUB)
monitor = socket.get_monitor_socket()
socket.connect(ipc_sub_url)
while True:
status = recv_monitor_message(monitor)
if status['event'] == zmq.EVENT_CONNECTED:
break
elif status['event'] == zmq.EVENT_CONNECT_DELAYED:
pass
print('connected')
socket.subscribe('pupil')
while True:
topic = socket.recv_string()
payload = serializer.loads(socket.recv(), encoding='utf-8')
print(topic, payload)
def recv(self):
'''Recv a message with topic, payload.
Topic is a utf-8 encoded string. Returned as unicode object.
Payload is a msgpack serialized dict. Returned as a python dict.
Any addional message frames will be added as a list
in the payload dict with key: '__raw_data__' .
'''
topic = self.socket.recv_string()
payload = serializer.loads(self.socket.recv(), encoding='utf-8')
extra_frames = []
while self.socket.get(zmq.RCVMORE):
extra_frames.append(self.socket.recv())
if extra_frames:
payload['__raw_data__'] = extra_frames
return topic, payload
def get(self):
try:
packet = self._socket2.recv(copy=False)
sample = msgpack.loads(packet)
self._retry_count = 0
except zmq.error.Again:
sample = None
self._retry_count += 1
if sample is not None:
identifier = sample.pop('__process_id__')
if identifier not in self._conn1_recv_count:
self._conn1_recv_count[identifier] = 0
self._conn2_send_count[identifier] = 0
self._conn1_recv_count[identifier] = max(self._conn1_recv_count[identifier],
sample.pop('__recv_count__'))
self._conn2_send_count[identifier] = max(self._conn2_send_count[identifier],
sample.pop('__send_count__'))
self._conn2_recv_count += 1
self.status = WAITING
return sample
def get(self):
try:
packet = self._socket2.recv(copy=False)
sample = msgpack.loads(packet)
self._retry_count = 0
except zmq.error.Again:
self._retry_count += 1
sample = None
if sample is not None:
identifier = sample.pop('__process_id__')
if identifier not in self._conn1_recv_count:
self._conn1_recv_count[identifier] = 0
self._conn2_send_count[identifier] = 0
self._conn1_recv_count[identifier] = max(self._conn1_recv_count[identifier],
sample.pop('__recv_count__'))
self._conn2_send_count[identifier] = max(self._conn2_send_count[identifier],
sample.pop('__send_count__'))
self._conn2_recv_count += 1
self.status = WAITING
return sample
def __init__(self, socket, sflags=0):
self.sflags = sflags
self.socket = socket
self.dumps = pickledumps
self.loads = pickle.loads
if sflags & SFLAG_MSGPACK:
if not msgpack:
raise Exception('Missing "msgpack" python module ( http://visi.kenshoto.com/viki/Msgpack )')
def msgpackloads(b):
return msgpack.loads(b, **loadargs)
def msgpackdumps(b):
return msgpack.dumps(b, **dumpargs)
self.dumps = msgpackdumps
self.loads = msgpackloads
if sflags & SFLAG_JSON:
self.dumps = jsondumps
self.loads = jsonloads
def recvMessage(self):
"""
Returns tuple of mtype, objname, and data
This method is *NOT* responsable for re-connection, because there
is not context on the server side for what to send on re-connect.
Client side uses of the CobraSocket object should use cobraTransaction
to ensure re-tranmission of the request on reception errors.
"""
s = self.socket
hdr = self.recvExact(12)
mtype, nsize, dsize = struct.unpack("<III", hdr)
name = self.recvExact(nsize)
data = self.loads(self.recvExact(dsize))
# NOTE: for errors while using msgpack, we must send only the str
if mtype == COBRA_ERROR and self.sflags & (SFLAG_MSGPACK | SFLAG_JSON):
data = CobraErrorException(data)
return (mtype, name, data)
def msgpack_lz4_to_series(data):
try:
import msgpack
import lz4
except ImportError:
logging.info('To load lz4-msgpacked data, '
'install packages "python-msgpack" and "lz4"')
raise
content = msgpack.loads(lz4.decompress(data))
series_load = lambda d: pd.Series(
data=d['values'],
index=d['index'] if d['index'][-1] <= 1e9 \
else pd.DatetimeIndex(d['index']),
name=d['id']
)
seria = list(map(series_load, content))
return seria
def task_names(tasks):
return [msgpack.loads(r) for r in tasks]
def stask_names(tasks):
return [msgpack.loads(r[0].partition(b':')[2]) for r in tasks]
def test_msgpack_404(app):
res = Request.blank('/not-found', headers={'Accept': 'application/x-msgpack'}).get_response(app)
assert res.status_code == 404
assert msgpack.loads(res.body, encoding='utf-8') == {'message': 'Not found', 'error': 'not-found'}
def task_names(tasks):
return [msgpack.loads(r)['name'] for r in tasks]
def get_queue(self, queue, offset=0, limit=100):
items = self.client.lrange(rqname(queue), offset, offset + limit - 1)
return [loads(r, encoding='utf-8') for r in items]
def get_schedule(self, offset=0, limit=100):
items = [(ts, r.partition(b':'))
for r, ts in self.client.zrange(SCHEDULE_KEY, offset,
offset + limit - 1, withscores=True)]
return [(ts, q if PY2 else q.decode('utf-8'), loads(r, encoding='utf-8'))
for ts, (q, _, r) in items]
def get(self, id):
value = self.client.get(id)
if value is not None:
return loads(value, encoding='utf-8')
def json_decode(data_type, serialized_obj, caller_permissions=None,
alias_validators=None, strict=True, old_style=False):
"""Performs the reverse operation of json_encode.
Args:
data_type (Validator): Validator for serialized_obj.
serialized_obj (str): The JSON string to deserialize.
caller_permissions (list): The list of raw-string caller permissions
with which to serialize.
alias_validators (Optional[Mapping[bv.Validator, Callable[[], None]]]):
Custom validation functions. These must raise bv.ValidationError on
failure.
strict (bool): If strict, then unknown struct fields will raise an
error, and unknown union variants will raise an error even if a
catch all field is specified. strict should only be used by a
recipient of serialized JSON if it's guaranteed that its Stone
specs are at least as recent as the senders it receives messages
from.
Returns:
The returned object depends on the input data_type.
- Boolean -> bool
- Bytes -> bytes
- Float -> float
- Integer -> long
- List -> list
- Map -> dict
- Nullable -> None or its wrapped type.
- String -> unicode (PY2) or str (PY3)
- Struct -> An instance of its definition attribute.
- Timestamp -> datetime.datetime
- Union -> An instance of its definition attribute.
"""
try:
deserialized_obj = json.loads(serialized_obj)
except ValueError:
raise bv.ValidationError('could not decode input as JSON')
else:
return json_compat_obj_decode(
data_type, deserialized_obj, caller_permissions=caller_permissions,
alias_validators=alias_validators, strict=strict, old_style=old_style)
def msgpack_decode(
data_type, serialized_obj, alias_validators=None, strict=True):
# We decode everything as utf-8 because we want all object keys to be
# unicode. Otherwise, we need to do a lot more refactoring to make
# json/msgpack share the same code. We expect byte arrays to fail
# decoding, but when they don't, we have to convert them to bytes.
deserialized_obj = msgpack.loads(
serialized_obj, encoding='utf-8', unicode_errors='ignore')
return msgpack_compat_obj_decode(
data_type, deserialized_obj, alias_validators, strict)
def unpack(msg):
return msgpack.loads(msg)
def json_decode(
data_type, serialized_obj, alias_validators=None, strict=True,
old_style=False):
"""Performs the reverse operation of json_encode.
Args:
data_type (Validator): Validator for serialized_obj.
serialized_obj (str): The JSON string to deserialize.
alias_validators (Optional[Mapping[bv.Validator, Callable[[], None]]]):
Custom validation functions. These must raise bv.ValidationError on
failure.
strict (bool): If strict, then unknown struct fields will raise an
error, and unknown union variants will raise an error even if a
catch all field is specified. strict should only be used by a
recipient of serialized JSON if it's guaranteed that its Stone
specs are at least as recent as the senders it receives messages
from.
Returns:
The returned object depends on the input data_type.
- Boolean -> bool
- Bytes -> bytes
- Float -> float
- Integer -> long
- List -> list
- Nullable -> None or its wrapped type.
- String -> unicode (PY2) or str (PY3)
- Struct -> An instance of its definition attribute.
- Timestamp -> datetime.datetime
- Union -> An instance of its definition attribute.
"""
try:
deserialized_obj = json.loads(serialized_obj)
except ValueError:
raise bv.ValidationError('could not decode input as JSON')
else:
return json_compat_obj_decode(
data_type, deserialized_obj, alias_validators, strict, old_style)
def msgpack_decode(
data_type, serialized_obj, alias_validators=None, strict=True):
# We decode everything as utf-8 because we want all object keys to be
# unicode. Otherwise, we need to do a lot more refactoring to make
# json/msgpack share the same code. We expect byte arrays to fail
# decoding, but when they don't, we have to convert them to bytes.
deserialized_obj = msgpack.loads(
serialized_obj, encoding='utf-8', unicode_errors='ignore')
return msgpack_compat_obj_decode(
data_type, deserialized_obj, alias_validators, strict)
def __getitem__(self, key):
with self.db.begin(write=False) as tx:
result = tx.get(key)
if result is None:
raise KeyError(key)
else:
return msgpack.loads(result)
def decrypt(self, edata, path=None, owner=None):
"""
Decrypt data encrypted by its owner. If the owner != ourselves, a
re-encryption request is automatically submitted. The function
automatically splits out encrypted symmetric keys.
:param bytes edata: Encrypted data
:param tuple(str) path: Path to the data or its identifier
:param bytes owner: If the path is None, owner can be used to identify
the re-encryption key. The owner is specified by his pubkey
:return: Unencrypted data
:rtype: bytes
"""
enc_file = BytesIO(edata)
header_length = int.from_bytes(enc_file.read(4), byteorder='big')
header = enc_file.read(header_length)
version, enc_keys = self._read_header(header)
ciphertext = msgpack.loads(enc_file.read())
if version < 1000:
valid_key = None
for enc_key in enc_keys:
dec_key = self.decrypt_key(enc_key, path=path)
if len(dec_key) == 32:
valid_key = dec_key
break
plaintext = self.decrypt_bulk(ciphertext, valid_key)
return plaintext
def reencrypt(self, rekey, emsg):
rk, epriv = msgpack.loads(rekey)
remsg = super(PRE, self).reencrypt(rk, emsg)
return msgpack.dumps([2, epriv, remsg]) # type 2 emsg
def decrypt(self, priv, emsg, padding=True):
# This is non-optimal b/c of double-deserialization
# but this cipher is for development/tests, not production
# so be it
emsg_l = msgpack.loads(emsg)
if emsg_l[0] == 2:
_, epriv_to, emsg = emsg_l
priv_to = self.decrypt(priv, epriv_to)
priv = priv_to
return super(PRE, self).decrypt(
convert_priv(priv), emsg, padding=padding)
def __call__(self, splittable, return_remainder=False, msgpack_remainder=False):
if not any((return_remainder, msgpack_remainder)) and len(self) != len(splittable):
raise ValueError(
"Wrong number of bytes to constitute message types {} - need {}, got {} \n Did you mean to return the remainder?".format(
self.message_types, len(self), len(splittable)))
if len(self) > len(splittable):
raise ValueError(
"Not enough bytes to constitute message types {} - need {}, got {}".format(self.message_types,
len(self),
len(splittable)))
cursor = 0
message_objects = []
for message_type in self.message_types:
message_class, message_length = self.get_message_meta(message_type)
expected_end_of_object_bytes = cursor + message_length
bytes_for_this_object = splittable[cursor:expected_end_of_object_bytes]
message = message_class(bytes_for_this_object)
message_objects.append(message)
cursor = expected_end_of_object_bytes
remainder = splittable[cursor:]
if msgpack_remainder:
message_objects.append(msgpack.loads(remainder))
elif return_remainder:
message_objects.append(remainder)
return message_objects
def from_rest_payload(cls, kfrag_hrac, rest_payload):
payload_splitter = BytestringSplitter(Signature, PublicKey)
signature, bob_pubkey_sig, (receipt_bytes, packed_pfrags) = payload_splitter(rest_payload,
msgpack_remainder=True)
pfrags = [PFrag(p) for p in msgpack.loads(packed_pfrags)]
verified = signature.verify(receipt_bytes, bob_pubkey_sig)
if not verified:
raise ValueError("This doesn't appear to be from Bob.")
bob = Bob.from_pubkey_sig_bytes(bob_pubkey_sig)
return cls(bob, kfrag_hrac, pfrags, receipt_bytes, signature)