def test_adapt_dumps(self):
from psycopg2.extras import json, Json
class DecimalEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, Decimal):
return float(obj)
return json.JSONEncoder.default(self, obj)
curs = self.conn.cursor()
obj = Decimal('123.45')
def dumps(obj):
return json.dumps(obj, cls=DecimalEncoder)
self.assertEqual(curs.mogrify("%s", (Json(obj, dumps=dumps),)),
b"'123.45'")
python类dumps()的实例源码
def setUserprop(self, key, value):
"""
Set single user's property. User's property must have key and value
return True if all done
return False if something wrong
"""
value = pickle.dumps(value)
instance = self.session.query(Userprop).filter_by(key=key).first()
if instance:
instance.value = value
else:
instance = Userprop(key, value)
self.session.add(instance)
self.session.commit()
return True
def setSettings(self, settings):
"""
Set multuple settings. Settings must be an instanse dict
return True if all done
return False if something wrong
"""
if not isinstance(settings, dict):
raise Exception("Wrong settings")
for key in settings.keys():
if not settings[key]:
raise Exception("Wrong setting's item")
instance = self.session.query(Setting).filter_by(key=key).first()
if instance:
instance.value = pickle.dumps(settings[key])
else:
instance = Setting(key, pickle.dumps(settings[key]))
self.session.add(instance)
self.session.commit()
return True
def setSearch(self, search_obj):
"""
Set searching.
Previous searching items will be removed
return True if all done
return False if something wrong
"""
for item in self.session.query(Search).all():
self.session.delete(item)
search = pickle.dumps(search_obj)
instance = Search(search)
self.session.add(instance)
self.session.commit()
return True
def do_GET(self):
if COORD.started:
if self.path.startswith(PREFIX_NEXT_INDEX):
index = COORD.get_next_index(self.path[len(PREFIX_NEXT_INDEX):])
if index >= 0:
self._send_answer(str(index))
return
elif self.path.startswith(PREFIX_GET_JOB):
job = COORD.get_job(worker=int(self.path[len(PREFIX_GET_JOB):]))
if job:
self._send_answer(pickle.dumps(job))
return
self.send_response(404)
else:
self.send_response(202)
self.end_headers()
def do_GET(self):
if COORD.started:
if self.path.startswith(PREFIX_NEXT_INDEX):
index = COORD.get_next_index(self.path[len(PREFIX_NEXT_INDEX):])
if index >= 0:
self._send_answer(str(index))
return
elif self.path.startswith(PREFIX_GET_JOB):
job = COORD.get_job(worker=int(self.path[len(PREFIX_GET_JOB):]))
if job:
self._send_answer(pickle.dumps(job))
return
self.send_response(404)
else:
self.send_response(202)
self.end_headers()
def do_GET(self):
if COORD.started:
if self.path.startswith(PREFIX_NEXT_INDEX):
index = COORD.get_next_index(self.path[len(PREFIX_NEXT_INDEX):])
if index >= 0:
self._send_answer(str(index))
return
elif self.path.startswith(PREFIX_GET_JOB):
job = COORD.get_job(worker=int(self.path[len(PREFIX_GET_JOB):]))
if job:
self._send_answer(pickle.dumps(job))
return
self.send_response(404)
else:
self.send_response(202)
self.end_headers()
def apply(self, callback, route):
dumps = self.json_dumps
if not self.json_dumps: return callback
def wrapper(*a, **ka):
try:
rv = callback(*a, **ka)
except HTTPResponse as resp:
rv = resp
if isinstance(rv, dict):
#Attempt to serialize, raises exception on failure
json_response = dumps(rv)
#Set content type only if serialization successful
response.content_type = 'application/json'
return json_response
elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict):
rv.body = dumps(rv.body)
rv.content_type = 'application/json'
return rv
return wrapper
def resolve(request):
mimetype = 'application/json'
data = {}
if request.method == 'POST' and 'entity' in request.POST and request.POST['entity'] != '':
data['entity'] = request.POST['entity']
data['status'] = 0
data['timestamp'] = datetime.datetime.now().timestamp()
data['output'] = "resolve request by %s" % (request.user.username)
data['result'] = 'okay'
sensu_event_resolve(data)
Channel('background-alert').send(dict(data))
return HttpResponse(json.dumps(data), mimetype)
#@login_required(login_url=reverse_lazy('login'))
def rmResult(request):
mimetype = 'application/json'
data = {}
if request.method == 'POST' and 'entity' in request.POST and request.POST['entity'] != '':
data['client'], data['check'] = request.POST['entity'].split(':')
data['status'] = 0
data['timestamp'] = datetime.datetime.now().timestamp()
if sensu_result_delete(data):
data['result'] = 'okay'
else:
data['result'] = 'failed deleting result using sensu api for: ' + request.POST['entity']
return HttpResponse(json.dumps(data), mimetype)
#@login_required(login_url=reverse_lazy('login'))
def user_settings(request):
logger.debug('settings view triggered by %s' % (request.user.username))
form = ContactForm(request.POST, instance=Contact.objects.get(user=request.user.id))
if form.is_valid:
try:
form.save()
return HttpResponse('Done', status=200)
except:
return HttpResponse(json.dumps(form.errors), status=409)
else:
return HttpResponse(json.dumps(form.errors), status=409)
return render(request, 'isubscribe/user_settings.html', {'DATA':data, 'form': form})
def entity_history(request):
data = []
mimetype = 'application/json'
if request.method == 'POST' and 'entity' in request.POST and request.POST['entity'] != '':
entity = request.POST['entity']
logger.debug("view entity_history user: %s entity: %s" % (request.user.username, entity))
for history_data in r.lrange('history_entity_' + entity, 0, 100):
data.append(pickle.loads(history_data))
return HttpResponse(json.dumps(data), mimetype)
#@login_required(login_url=reverse_lazy('login'))
def check_config(request):
mimetype = 'application/json'
data = {}
if request.method == 'POST' and 'entity' in request.POST and request.POST['entity'] != '':
client_name, check_name = request.POST['entity'].split(':')
#check_name = 'check_gw_tomcat_errors_1h'
#data = cache.get('check_' + check_name)
data = cache.get('check_' + request.POST['entity'])
return HttpResponse(json.dumps(data), mimetype)
#@login_required(login_url=reverse_lazy('login'))
def sensu_event_resolve(message):
API_URL = settings.SENSU_API_URL + '/resolve'
userAndPass = base64.b64encode(str.encode("%s:%s" % (settings.SENSU_API_USER, settings.SENSU_API_PASSWORD))).decode("ascii")
headers = { 'X_REQUESTED_WITH' :'XMLHttpRequest',
'Accept': 'application/json, text/javascript, */*; q=0.01',
'Authorization' : 'Basic %s' % userAndPass }
try:
client_name, check_name = message['entity'].split(':')
post_params = {"client": client_name, "check": check_name}
request = http.request('POST', API_URL, body=json.dumps(post_params), headers=headers)
response = request.status
if response == 202:
#reader = codecs.getreader('utf-8')
#data = json.load(reader(request))
request.release_conn()
else:
logger.error('response: %s' % str(response))
except:
logger.error("sensu_event_resolve failed resolving entity: %s" % message['entity'])
raise
def command_openweb(current_buffer, args):
trigger = w.config_get_plugin('trigger_value')
if trigger != "0":
if args is None:
channel = channels.find(current_buffer)
url = "{}/messages/{}".format(channel.server.server_buffer_name, channel.name)
topic = w.buffer_get_string(channel.channel_buffer, "title")
w.buffer_set(channel.channel_buffer, "title", "{}:{}".format(trigger, url))
w.hook_timer(1000, 0, 1, "command_openweb", json.dumps({"topic": topic, "buffer": current_buffer}))
else:
#TODO: fix this dirty hack because i don't know the right way to send multiple args.
args = current_buffer
data = json.loads(args)
channel_buffer = channels.find(data["buffer"]).channel_buffer
w.buffer_set(channel_buffer, "title", data["topic"])
return w.WEECHAT_RC_OK
def run(self):
"""
Entry point for the live plotting when started as a separate process. This starts the loop
"""
self.entity_name = current_process().name
plogger.info("Starting new thread %s", self.entity_name)
self.context = zmq.Context()
self.socket = self.context.socket(zmq.SUB)
self.socket.connect("tcp://localhost:%d" % self.port)
topic = pickle.dumps(self.var_name, protocol=pickle.HIGHEST_PROTOCOL)
self.socket.setsockopt(zmq.SUBSCRIBE, topic)
plogger.info("Subscribed to topic %s on port %d", self.var_name, self.port)
self.init(**self.init_kwargs)
# Reference to animation required so that GC doesn't clean it up.
# WILL NOT work if you remove it!!!!!
# See: http://matplotlib.org/api/animation_api.html
ani = animation.FuncAnimation(self.fig, self.loop, interval=100)
self.plt.show()
def _make_flow(request, scopes, return_url=None):
"""Creates a Web Server Flow"""
# Generate a CSRF token to prevent malicious requests.
csrf_token = hashlib.sha256(os.urandom(1024)).hexdigest()
request.session[_CSRF_KEY] = csrf_token
state = json.dumps({
'csrf_token': csrf_token,
'return_url': return_url,
})
flow = client.OAuth2WebServerFlow(
client_id=django_util.oauth2_settings.client_id,
client_secret=django_util.oauth2_settings.client_secret,
scope=scopes,
state=state,
redirect_uri=request.build_absolute_uri(
urlresolvers.reverse("google_oauth:callback")))
flow_key = _FLOW_KEY.format(csrf_token)
request.session[flow_key] = pickle.dumps(flow)
return flow
def _testStruct(self, Struct, values = {}, delattrs = ()):
schema = mapped_struct.Schema.from_typed_slots(Struct)
x = Struct()
for k in delattrs:
delattr(x, k)
for k,v in values.iteritems():
setattr(x, k, v)
px = schema.pack(x)
old_schema = schema
schema = cPickle.loads(cPickle.dumps(schema, 2))
self.assertTrue(old_schema.compatible(schema))
self.assertTrue(schema.compatible(old_schema))
dx = schema.unpack(px)
for k in Struct.__slots__:
if k in values or k not in delattrs:
self.assertEquals(getattr(dx, k, None), getattr(x, k, None))
else:
self.assertFalse(hasattr(dx, k))
def testPackPickleUnpack(self):
# hack - unregister subschema (can't register twice)
mapped_struct.mapped_object.TYPE_CODES.pop(self.SubStruct,None)
mapped_struct.mapped_object.OBJ_PACKERS.pop('}',None)
for TEST_VALUES in self.TEST_VALUES:
# re-register subschema
mapped_struct.mapped_object.register_schema(self.SubStruct, self.subschema, '}')
x = self.Struct(**{k:v for k,v in TEST_VALUES.iteritems()})
pschema = cPickle.dumps(self.schema)
# Unregister schema to force the need for auto-register
mapped_struct.mapped_object.TYPE_CODES.pop(self.SubStruct,None)
mapped_struct.mapped_object.OBJ_PACKERS.pop('}',None)
pschema = cPickle.loads(pschema)
dx = pschema.unpack(self.schema.pack(x))
for k,v in TEST_VALUES.iteritems():
self.assertTrue(hasattr(dx, k))
self.assertEqual(getattr(dx, k), v)
for k in self.Struct.__slots__:
if k not in TEST_VALUES:
self.assertFalse(hasattr(dx, k))
def save(self):
"""Serialize this mesh to a string appropriate for disk storage"""
import pickle
if self._faces is not None:
names = ['_vertexes', '_faces']
else:
names = ['_vertexesIndexedByFaces']
if self._vertexColors is not None:
names.append('_vertexColors')
elif self._vertexColorsIndexedByFaces is not None:
names.append('_vertexColorsIndexedByFaces')
if self._faceColors is not None:
names.append('_faceColors')
elif self._faceColorsIndexedByFaces is not None:
names.append('_faceColorsIndexedByFaces')
state = dict([(n,getattr(self, n)) for n in names])
return pickle.dumps(state)
def save(self):
"""Serialize this mesh to a string appropriate for disk storage"""
import pickle
if self._faces is not None:
names = ['_vertexes', '_faces']
else:
names = ['_vertexesIndexedByFaces']
if self._vertexColors is not None:
names.append('_vertexColors')
elif self._vertexColorsIndexedByFaces is not None:
names.append('_vertexColorsIndexedByFaces')
if self._faceColors is not None:
names.append('_faceColors')
elif self._faceColorsIndexedByFaces is not None:
names.append('_faceColorsIndexedByFaces')
state = dict([(n,getattr(self, n)) for n in names])
return pickle.dumps(state)
def distinct(self):
"Return copy of table having only distinct rows."
copy = type(self)()
copy.__columns = self.__columns
copy.__data_area = self.__data_area.copy()
copy.__row_index = self.__row_index
valid_indexs = set()
distinct_rows = set()
for row in copy.__data_area:
array = pickle.dumps(tuple(copy.__data_area[row][index] \
for index, name, data_type \
in self.__columns))
if array not in distinct_rows:
valid_indexs.add(row)
distinct_rows.add(array)
for row in tuple(copy.__data_area):
if row not in valid_indexs:
del copy.__data_area[row]
return copy
def get(self, key):
go = '/'
member = CheckAuth(self)
if member:
member = Member.get(member.id)
one = Member.get(key)
if one:
if one.num != member.num:
try:
blocked = pickle.loads(member.blocked.encode('utf-8'))
except:
blocked = []
if len(blocked) == 0:
blocked = []
if one.num not in blocked:
blocked.append(one.num)
member.blocked = pickle.dumps(blocked)
member.sync()
store.commit() #jon add
memcache.set('Member_' + str(member.num), member, 86400)
self.redirect(go)
def get(self, key):
go = '/'
member = CheckAuth(self)
if member:
member = Member.get(member.id)
one = Member.get(key)
if one:
if one.num != member.num:
try:
blocked = pickle.loads(member.blocked.encode('utf-8'))
except:
blocked = []
if len(blocked) == 0:
blocked = []
if one.num in blocked:
blocked.remove(one.num)
member.blocked = pickle.dumps(blocked)
member.sync()
store.commit() #jon add
memcache.set('Member_' + str(member.num), member, 86400)
self.redirect(go)
def news():
"""Get news from different ATOM RSS feeds."""
import feedparser
from pybossa.core import sentinel
from pybossa.news import get_news, notify_news_admins, FEED_KEY
try:
import cPickle as pickle
except ImportError: # pragma: no cover
import pickle
urls = ['https://github.com/pybossa/pybossa/releases.atom',
'http://scifabric.com/blog/all.atom.xml']
score = 0
notify = False
if current_app.config.get('NEWS_URL'):
urls += current_app.config.get('NEWS_URL')
for url in urls:
d = feedparser.parse(url)
tmp = get_news(score)
if (len(tmp) == 0) or (tmp[0]['updated'] != d.entries[0]['updated']):
sentinel.master.zadd(FEED_KEY, float(score),
pickle.dumps(d.entries[0]))
notify = True
score += 1
if notify:
notify_news_admins()
def cache(key_prefix, timeout=300):
"""
Decorator for caching functions.
Returns the function value from cache, or the function if cache disabled
"""
if timeout is None:
timeout = 300
def decorator(f):
@wraps(f)
def wrapper(*args, **kwargs):
key = "%s::%s" % (settings.REDIS_KEYPREFIX, key_prefix)
if os.environ.get('PYBOSSA_REDIS_CACHE_DISABLED') is None:
output = sentinel.slave.get(key)
if output:
return pickle.loads(output)
output = f(*args, **kwargs)
sentinel.master.setex(key, timeout, pickle.dumps(output))
return output
output = f(*args, **kwargs)
sentinel.master.setex(key, timeout, pickle.dumps(output))
return output
return wrapper
return decorator
def listCursors(self, backupkey=None, cursor=None, kind=None, *args, **kwargs):
assert safeStringComparison(backupKey, backupkey)
if cursor:
c = datastore_query.Cursor(urlsafe=cursor)
else:
c = None
r = []
for x in range(0,10):
q = datastore.Query(kind, cursor=c)
q.Get(1, offset=999)
newCursor = q.GetCursor()
if newCursor != c:
c = newCursor
r.append(c.urlsafe())
else:
break
return (pickle.dumps({"cursors": r}).encode("HEX"))
def exportDb(self, cursor=None, backupkey=None, endcursor=None, kind=None, *args, **kwargs):
global backupKey
assert safeStringComparison(backupKey, backupkey)
if cursor:
c = datastore_query.Cursor(urlsafe=cursor)
else:
c = None
if endcursor:
endCursor = datastore_query.Cursor(urlsafe=endcursor)
else:
endCursor = None
q = datastore.Query(kind, cursor=c, end_cursor=endCursor)
logging.error((cursor, backupkey, endcursor, kind))
r = []
for res in q.Run(limit=5):
r.append(self.genDict(res))
return (pickle.dumps({"cursor": str(q.GetCursor().urlsafe()), "values": r}).encode("HEX"))
def storeEntry(self, modul, entry):
if not entry:
return
id = entry["id"]
k = Key(encoded=id)
if k.kind() != modul:
raise ValueError("Invalid key! Key's kind should be %s, is %s" % (modul,k.kind()))
if k.app() != self.getAppId():
raise ValueError("Invalid key! Key's app should be %s, is %s" % (self.getAppId(),k.app()))
try:
t = {}
for k,v in entry.items():
if isinstance(v,unicode):
v = v.encode("UTF-8")
t[k] = v
self.ns.request("/dbtransfer/storeEntry", {"e":pickle.dumps(t).encode("HEX"),"key":self.importKey})
except:
print("------")
print( entry )
raise
def step3():
key_vec = {}
maxx = 12505807
size = 10000
for i in range(size, maxx, size):
print(i, maxx)
res = os.popen("head -n {i} ./dataset/bind.txt | tail -n {size} | ./fasttext print-sentence-vectors ./models/model.bin".format(i=i, size=size)).read()
for line in res.split("\n"):
if line == "":
continue
vec = list(map(float, line.split()[-100:]))
txt = line.split()[:-100]
key = " ".join(txt)
if key_vec.get(key) is None:
key_vec[key] = vec
open("key_vec.pkl", "wb").write(pickle.dumps(key_vec))