def __repr__(self):
return '{}, payload_length: {}, payload_checksum: {}'\
.format(self.command.decode(), self.payload_length, base64.b16encode(self.payload_checksum).decode())
python类b16encode()的实例源码
def __repr__(self):
return 'version, protocol_version: {}, services: {}, host: {}, port: {}, nonce: {}, user_agent: {}'\
.format(self.protocol_version, self.services, self.host, self.port, base64.b16encode(self.nonce).decode(), self.user_agent)
def __repr__(self):
return 'object, vector: {}'.format(base64.b16encode(self.vector).decode())
def _bytes_to_int(bts):
return int(base64.b16encode(bts), 16)
def test_b16encode(self):
eq = self.assertEqual
eq(base64.b16encode(b'\x01\x02\xab\xcd\xef'), b'0102ABCDEF')
eq(base64.b16encode(b'\x00'), b'00')
# Non-bytes
eq(base64.b16encode(bytearray(b'\x01\x02\xab\xcd\xef')), b'0102ABCDEF')
self.assertRaises(TypeError, base64.b16encode, "")
def signature(self):
try:
from hashlib import md5
except ImportError:
from md5 import md5
try:
sig = md5()
if self.start:
sig.update(self.start.encode('latin-1'))
if self.prec:
sig.update(''.join([''.join(p) for p in self.prec]).encode('latin-1'))
if self.tokens:
sig.update(' '.join(self.tokens).encode('latin-1'))
for f in self.pfuncs:
if f[3]:
sig.update(f[3].encode('latin-1'))
except (TypeError, ValueError):
pass
digest = base64.b16encode(sig.digest())
if sys.version_info[0] >= 3:
digest = digest.decode('latin-1')
return digest
# -----------------------------------------------------------------------------
# validate_modules()
#
# This method checks to see if there are duplicated p_rulename() functions
# in the parser module file. Without this function, it is really easy for
# users to make mistakes by cutting and pasting code fragments (and it's a real
# bugger to try and figure out why the resulting parser doesn't work). Therefore,
# we just do a little regular expression pattern matching of def statements
# to try and detect duplicates.
# -----------------------------------------------------------------------------
def signature(self):
try:
from hashlib import md5
except ImportError:
from md5 import md5
try:
sig = md5()
if self.start:
sig.update(self.start.encode('latin-1'))
if self.prec:
sig.update(''.join([''.join(p) for p in self.prec]).encode('latin-1'))
if self.tokens:
sig.update(' '.join(self.tokens).encode('latin-1'))
for f in self.pfuncs:
if f[3]:
sig.update(f[3].encode('latin-1'))
except (TypeError, ValueError):
pass
digest = base64.b16encode(sig.digest())
if sys.version_info[0] >= 3:
digest = digest.decode('latin-1')
return digest
# -----------------------------------------------------------------------------
# validate_modules()
#
# This method checks to see if there are duplicated p_rulename() functions
# in the parser module file. Without this function, it is really easy for
# users to make mistakes by cutting and pasting code fragments (and it's a real
# bugger to try and figure out why the resulting parser doesn't work). Therefore,
# we just do a little regular expression pattern matching of def statements
# to try and detect duplicates.
# -----------------------------------------------------------------------------
def generate_confirmation_nonce():
# Use Base-16 to avoid potential URL encoding issues
return b16encode(urandom(32))
def to_ascii(s_bytes, prefix="", encoding="base64"):
"""Return a version-prefixed ASCII representation of the given binary
string. 'encoding' indicates how to do the encoding, and can be one of:
* base64
* base32
* base16 (or hex)
This function handles bytes, not bits, so it does not append any trailing
'=' (unlike standard base64.b64encode). It also lowercases the base32
output.
'prefix' will be prepended to the encoded form, and is useful for
distinguishing the purpose and version of the binary string. E.g. you
could prepend 'pub0-' to a VerifyingKey string to allow the receiving
code to raise a useful error if someone pasted in a signature string by
mistake.
"""
assert isinstance(s_bytes, bytes)
if not isinstance(prefix, bytes):
prefix = prefix.encode('ascii')
if encoding == "base64":
s_ascii = base64.b64encode(s_bytes).decode('ascii').rstrip("=")
elif encoding == "base32":
s_ascii = base64.b32encode(s_bytes).decode('ascii').rstrip("=").lower()
elif encoding in ("base16", "hex"):
s_ascii = base64.b16encode(s_bytes).decode('ascii').lower()
else:
raise NotImplementedError
return prefix+s_ascii.encode('ascii')
def __init__(self, zmqtype, oag):
self.zmqtype = zmqtype
self._ctx = zmq.Context()
self._ctxsoc = self._ctx.socket(zmqtype)
self._oag = oag
self._hash = base64.b16encode(os.urandom(5))
def signature(self):
try:
from hashlib import md5
except ImportError:
from md5 import md5
try:
sig = md5()
if self.start:
sig.update(self.start.encode('latin-1'))
if self.prec:
sig.update(''.join([''.join(p) for p in self.prec]).encode('latin-1'))
if self.tokens:
sig.update(' '.join(self.tokens).encode('latin-1'))
for f in self.pfuncs:
if f[3]:
sig.update(f[3].encode('latin-1'))
except (TypeError, ValueError):
pass
digest = base64.b16encode(sig.digest())
if sys.version_info[0] >= 3:
digest = digest.decode('latin-1')
return digest
# -----------------------------------------------------------------------------
# validate_modules()
#
# This method checks to see if there are duplicated p_rulename() functions
# in the parser module file. Without this function, it is really easy for
# users to make mistakes by cutting and pasting code fragments (and it's a real
# bugger to try and figure out why the resulting parser doesn't work). Therefore,
# we just do a little regular expression pattern matching of def statements
# to try and detect duplicates.
# -----------------------------------------------------------------------------
def findDuplicateImages(photo = None, site = wikipedia.getSite(u'commons', u'commons')):
'''
Takes the photo, calculates the SHA1 hash and asks the mediawiki api for a list of duplicates.
TODO: Add exception handling, fix site thing
'''
hashObject = hashlib.sha1()
hashObject.update(photo.getvalue())
return site.getFilesFromAnHash(base64.b16encode(hashObject.digest()))
def findDuplicateImages(photo = None, site = wikipedia.getSite(u'commons', u'commons')):
'''
Takes the photo, calculates the SHA1 hash and asks the mediawiki api for a list of duplicates.
TODO: Add exception handling, fix site thing
'''
hashObject = hashlib.sha1()
hashObject.update(photo.getvalue())
return site.getFilesFromAnHash(base64.b16encode(hashObject.digest()))
def findDuplicateImages(filename, site = wikipedia.getSite(u'commons', u'commons')):
'''
Takes the photo, calculates the SHA1 hash and asks the mediawiki api for a list of duplicates.
TODO: Add exception handling, fix site thing
'''
f = open(filename, 'rb')
hashObject = hashlib.sha1()
hashObject.update(f.read(-1))
return site.getFilesFromAnHash(base64.b16encode(hashObject.digest()))
def findDuplicateImages(photo = None, site = wikipedia.getSite(u'commons', u'commons')):
'''
Takes the photo, calculates the SHA1 hash and asks the mediawiki api for a list of duplicates.
TODO: Add exception handling, fix site thing
'''
hashObject = hashlib.sha1()
hashObject.update(photo.getvalue())
return site.getFilesFromAnHash(base64.b16encode(hashObject.digest()))
def findDuplicateImages(photo = None, site = wikipedia.getSite(u'commons', u'commons')):
'''
Takes the photo, calculates the SHA1 hash and asks the mediawiki api for a list of duplicates.
TODO: Add exception handling, fix site thing
'''
hashObject = hashlib.sha1()
hashObject.update(photo.getvalue())
return site.getFilesFromAnHash(base64.b16encode(hashObject.digest()))
def findDuplicateImages(photo = None, site = wikipedia.getSite(u'commons', u'commons')):
'''
Takes the photo, calculates the SHA1 hash and asks the mediawiki api for a list of duplicates.
TODO: Add exception handling, fix site thing
'''
hashObject = hashlib.sha1()
hashObject.update(photo.getvalue())
return site.getFilesFromAnHash(base64.b16encode(hashObject.digest()))
def findDuplicateImages(photo = None, site = wikipedia.getSite(u'commons', u'commons')):
'''
Takes the photo, calculates the SHA1 hash and asks the mediawiki api for a list of duplicates.
TODO: Add exception handling, fix site thing
'''
hashObject = hashlib.sha1()
hashObject.update(photo.getvalue())
return site.getFilesFromAnHash(base64.b16encode(hashObject.digest()))
def findDuplicateImages(photo = None, site = wikipedia.getSite(u'commons', u'commons')):
'''
Takes the photo, calculates the SHA1 hash and asks the mediawiki api for a list of duplicates.
TODO: Add exception handling, fix site thing
'''
hashObject = hashlib.sha1()
hashObject.update(photo.getvalue())
return site.getFilesFromAnHash(base64.b16encode(hashObject.digest()))
def findDuplicateImages(self):
"""
Find duplicates of the photo.
Calculates the SHA1 hash and asks the MediaWiki api
for a list of duplicates.
TODO: Add exception handling, fix site thing
"""
hashObject = hashlib.sha1()
hashObject.update(self.downloadPhoto().getvalue())
return list(
page.title(withNamespace=False) for page in
self.site.allimages(sha1=base64.b16encode(hashObject.digest())))