def dnshostencode(data, zone):
"""
encodes the data in a DNS transmittable hostname, 0-9A-F
:param data: DNS transmittable hostname data
:param zone: DNS zone to add at the end
:return: encoded form
"""
# TODO: sending 0-9A-Z would be better
res = b""
sdata = base64.b16encode(data)
# every 60 characters, we will add a dot
for i in range(len(sdata)):
res += sdata[i:i+1]
if (i+1) % 60 == 0 and (i+1) < len(sdata):
res += b'.'
return res + b'.' + zone.encode('utf-8') + b'.'
python类b16encode()的实例源码
def dnsip6encode(data):
"""
encodes the data as a single IPv6 address
:param data: data to encode
:return: encoded form
"""
if len(data) != 16:
print_error("dnsip6encode: data is more or less than 16 bytes, cannot encode")
return None
res = b''
reslen = 0
for i in range(len(data)):
res += base64.b16encode(data[i:i+1])
reslen += 1
if reslen % 2 == 0:
res += b':'
return res[:-1]
def parse_tree(blob, path=b''):
"""Parses a git tree"""
res = []
leng, _, tree = blob.partition(b'\0')
if path is str:
path = path.encode('utf-8')
if not tree:
return Result.Err('Invalid tree')
while len(tree) > 0:
mode, _, tree = tree.partition(b' ')
name, _, tree = tree.partition(b'\0')
sha = b16encode(tree[0:20]).decode('utf-8').lower()
tree = tree[20:]
if not name or not sha:
return Result.Err()
is_dir = mode[0:1] != b'1'
res.append((is_dir, sha, path + b'/' + name))
return Result.Ok(res)
def main():
submission = raw_input("flag> ")
low_sub = submission.lower()
temp_list = list(low_sub)
temp_list.reverse()
flag = "".join(temp_list)
new_string = ""
for char in flag:
if char in BASE:
new_char = BASE[char]
else:
new_char = char
new_string += new_char
to_encode = new_string.encode('utf8')
to_give = base64.b16encode(to_encode)
#to_give = base64.b32encode(to_encode)
#to_give = base64.b64encode(to_encode)
print to_give
def main():
modulus = int("A9625641EE2E381A4A887EA3A8AE900DD1E27FD9184F2E01EA486A234909A22475F34B034B48E9B6FB407895B9EA66694A4951B032FBE60F11DFB1D145BB765F66B72FC7E0E1F938455620F141A5A85B2EF049F55C5B33E351943506889F826D6936DEC658B8926A26273C7B8E8AC9AF7123D106515F76ED37FC7C513AC19DA9",16)
e = 65537
p = int("00:b6:a0:fc:62:b3:d4:cd:68:06:cb:54:e5:0e:65:08:4a:49:b3:49:12:40:95:cb:9c:2c:de:40:f9:31:b5:6c:d9:1d:a6:80:e4:0c:ca:75:a0:0b:2c:4a:38:8b:5d:d9:15:87:71:6e:fd:c7:cf:5a:5b:90:88:37:83:b4:f3:fd:3d".replace(":",""),16)
assert modulus % p == 0
q = modulus //p
d = modinv(e,(p-1)*(q-1))
a = ""
c = b"HPDng5QcgvfhFuVLfMrs1+kg3cyo2GEGwxdTICOsAYgpTA2qNuTBHetrfmVMDr1n2Iu1D7lTqSGY/eH/ZGbryyBr3MuOoc+R7m2ipxl1cL/5e/UaPP0rplohjCxduEDil7WlrLfwFR8GCGhF1usgV9gzo3Ok12v8J4veejWPe4k="
import base64
hexString = base64.b16encode(base64.standard_b64decode(c))
print(hexString)
c = int(hexString,16)
msg = pow(c,d,modulus)
print(msg)
import binascii
print(binascii.unhexlify(hex(msg)[2:]))
def getStatus(self, param, value=""):
if self.whichCam() == "gpcontrol":
try:
req=urllib.request.urlopen("http://10.5.5.9/gp/gpControl/status", timeout=5)
data = req.read()
encoding = req.info().get_content_charset('utf-8')
json_data = json.loads(data.decode(encoding))
return json_data[param][value]
except (HTTPError, URLError) as error:
return ""
print("Error code:" + str(error.code) + "\nMake sure the connection to the WiFi camera is still active.")
except timeout:
return ""
print("HTTP Timeout\nMake sure the connection to the WiFi camera is still active.")
else:
response = urllib.request.urlopen("http://10.5.5.9/camera/sx?t=" + self.getPassword(), timeout=5).read()
response_hex = str(bytes.decode(base64.b16encode(response), 'utf-8'))
return str(response_hex[param[0]:param[1]])
def findDuplicateImages(photo, site=None):
"""Find duplicate images.
Take the photo, calculate the SHA1 hash and ask the MediaWiki api
for a list of duplicates.
TODO: Add exception handling.
@param photo: Photo
@type photo: io.BytesIO
@param site: Site to search for duplicates.
Defaults to using Wikimedia Commons if not supplied.
@type site: APISite or None
"""
if not site:
site = pywikibot.Site('commons', 'commons')
hashObject = hashlib.sha1()
hashObject.update(photo.getvalue())
return site.getFilesFromAnHash(base64.b16encode(hashObject.digest()))
def findDuplicateImages(filename):
'''
Takes the photo, calculates the SHA1 hash and asks the mediawiki api for a list of duplicates.
TODO: Add exception handling, fix site thing
'''
f = open(filename, 'rb')
result = []
hashObject = hashlib.sha1()
hashObject.update(f.read(-1))
#f.close()
sha1Hash = base64.b16encode(hashObject.digest())
params = {
'action' : 'query',
'list' : 'allimages',
'aisha1' : sha1Hash,
'aiprop' : '',
}
data = query.GetData(params, site=wikipedia.getSite(), useAPI = True, encodeTitle = False)
for image in data['query']['allimages']:
result.append(image['name'])
return result
def findDuplicateImages(filename):
'''
Takes the photo, calculates the SHA1 hash and asks the mediawiki api for a list of duplicates.
TODO: Add exception handling, fix site thing
'''
f = open(filename, 'rb')
result = []
hashObject = hashlib.sha1()
hashObject.update(f.read(-1))
#f.close()
sha1Hash = base64.b16encode(hashObject.digest())
params = {
'action' : 'query',
'list' : 'allimages',
'aisha1' : sha1Hash,
'aiprop' : '',
}
data = query.GetData(params, site=wikipedia.getSite(), useAPI = True, encodeTitle = False)
for image in data['query']['allimages']:
result.append(image['name'])
return result
def findDuplicateImages(filename):
'''
Takes the photo, calculates the SHA1 hash and asks the mediawiki api for a list of duplicates.
TODO: Add exception handling, fix site thing
'''
f = open(filename, 'rb')
result = []
hashObject = hashlib.sha1()
hashObject.update(f.read(-1))
#f.close()
sha1Hash = base64.b16encode(hashObject.digest())
params = {
'action' : 'query',
'list' : 'allimages',
'aisha1' : sha1Hash,
'aiprop' : '',
}
data = query.GetData(params, site=wikipedia.getSite(), useAPI = True, encodeTitle = False)
for image in data['query']['allimages']:
result.append(image['name'])
return result
def findDuplicateImages(filename):
'''
Takes the photo, calculates the SHA1 hash and asks the mediawiki api for a list of duplicates.
TODO: Add exception handling, fix site thing
'''
f = open(filename, 'rb')
result = []
hashObject = hashlib.sha1()
hashObject.update(f.read(-1))
#f.close()
sha1Hash = base64.b16encode(hashObject.digest())
params = {
'action' : 'query',
'list' : 'allimages',
'aisha1' : sha1Hash,
'aiprop' : '',
}
data = query.GetData(params, site=wikipedia.getSite(), useAPI = True, encodeTitle = False)
for image in data['query']['allimages']:
result.append(image['name'])
return result
def findDuplicateImages(filename):
'''
Takes the photo, calculates the SHA1 hash and asks the mediawiki api for a list of duplicates.
TODO: Add exception handling, fix site thing
'''
f = open(filename, 'rb')
result = []
hashObject = hashlib.sha1()
hashObject.update(f.read(-1))
#f.close()
sha1Hash = base64.b16encode(hashObject.digest())
params = {
'action' : 'query',
'list' : 'allimages',
'aisha1' : sha1Hash,
'aiprop' : '',
}
data = query.GetData(params, site=wikipedia.getSite(), useAPI = True, encodeTitle = False)
for image in data['query']['allimages']:
result.append(image['name'])
return result
def findDuplicateImages(filename):
'''
Takes the photo, calculates the SHA1 hash and asks the mediawiki api for a list of duplicates.
TODO: Add exception handling, fix site thing
'''
f = open(filename, 'rb')
result = []
hashObject = hashlib.sha1()
hashObject.update(f.read(-1))
#f.close()
sha1Hash = base64.b16encode(hashObject.digest())
params = {
'action' : 'query',
'list' : 'allimages',
'aisha1' : sha1Hash,
'aiprop' : '',
}
data = query.GetData(params, site=wikipedia.getSite(), useAPI = True, encodeTitle = False)
for image in data['query']['allimages']:
result.append(image['name'])
return result
def findDuplicateImages(photo=None, site=wikipedia.getSite()):
'''
Takes the photo, calculates the SHA1 hash and asks the mediawiki api for a list of duplicates.
TODO: Add exception handling, fix site thing
'''
result = []
hashObject = hashlib.sha1()
hashObject.update(photo.getvalue())
sha1Hash = base64.b16encode(hashObject.digest())
params = {
'action' : 'query',
'list' : 'allimages',
'aisha1' : sha1Hash,
'aiprop' : '',
}
data = query.GetData(params, site=wikipedia.getSite(), useAPI = True, encodeTitle = False)
for image in data['query']['allimages']:
result.append(image['name'])
return result
def get_gravatar(self):
"""Creates gravatar URL from email address."""
m = hashlib.md5()
m.update(self.user.email())
encoded_hash = base64.b16encode(m.digest()).lower()
return 'https://gravatar.com/avatar/{}?s=200'.format(encoded_hash)
def hex_key(self):
"""
secret key encoded as hexadecimal string
"""
return bascii_to_str(base64.b16encode(self.key)).lower()
def _oso(self, ob):
ser = self.dumps(ob)
try:
o2 = self.loads(ser)
assert ob == o2, '%r != %r from %s' % (ob, o2, base64.b16encode(ser))
except Exception as e:
sys.stderr.write('failure on buf len={0} {1!r} ob={2!r} {3!r}; {4}\n'.format(len(ser), hexstr(ser), ob, ser, e))
raise
def _oso_bytearray(self, ob):
ser = self.dumps(ob)
try:
o2 = self.loads(bytearray(ser))
assert ob == o2, '%r != %r from %s' % (ob, o2, base64.b16encode(ser))
except Exception as e:
sys.stderr.write('failure on buf len={0} {1!r} ob={2!r} {3!r}; {4}\n'.format(len(ser), hexstr(ser), ob, ser, e))
raise
def _oso(self, ob):
ser = self.tx.dumps(ob)
try:
o2 = self.tx.loads(ser)
assert ob == o2, '%r != %r from %s' % (ob, o2, base64.b16encode(ser))
except Exception as e:
sys.stderr.write('failure on buf len={0} {1!r} ob={2!r} {3!r}; {4}\n'.format(len(ser), hexstr(ser), ob, ser, e))
raise
def _check(row, decoded):
cbdata = base64.b64decode(row['cbor'])
if cloads is not None:
cb = cloads(cbdata)
if cb != decoded:
anyerr = True
sys.stderr.write('expected {0!r} got {1!r} c failed to decode cbor {2}\n'.format(decoded, cb, base64.b16encode(cbdata)))
cb = pyloads(cbdata)
if cb != decoded:
anyerr = True
sys.stderr.write('expected {0!r} got {1!r} py failed to decode cbor {2}\n'.format(decoded, cb, base64.b16encode(cbdata)))
def _check_foo(row, checkf):
cbdata = base64.b64decode(row['cbor'])
if cloads is not None:
cb = cloads(cbdata)
if not checkf(cb):
anyerr = True
sys.stderr.write('expected {0!r} got {1!r} c failed to decode cbor {2}\n'.format(decoded, cb, base64.b16encode(cbdata)))
cb = pyloads(cbdata)
if not checkf(cb):
anyerr = True
sys.stderr.write('expected {0!r} got {1!r} py failed to decode cbor {2}\n'.format(decoded, cb, base64.b16encode(cbdata)))
def signature(self):
try:
from hashlib import md5
except ImportError:
from md5 import md5
try:
sig = md5()
if self.start:
sig.update(self.start.encode('latin-1'))
if self.prec:
sig.update(''.join([''.join(p) for p in self.prec]).encode('latin-1'))
if self.tokens:
sig.update(' '.join(self.tokens).encode('latin-1'))
for f in self.pfuncs:
if f[3]:
sig.update(f[3].encode('latin-1'))
except (TypeError, ValueError):
pass
digest = base64.b16encode(sig.digest())
if sys.version_info[0] >= 3:
digest = digest.decode('latin-1')
return digest
# -----------------------------------------------------------------------------
# validate_modules()
#
# This method checks to see if there are duplicated p_rulename() functions
# in the parser module file. Without this function, it is really easy for
# users to make mistakes by cutting and pasting code fragments (and it's a real
# bugger to try and figure out why the resulting parser doesn't work). Therefore,
# we just do a little regular expression pattern matching of def statements
# to try and detect duplicates.
# -----------------------------------------------------------------------------
def b32_to_b16(s):
return b16encode(b32decode(s.upper()))
def test_b16encode(self):
eq = self.assertEqual
eq(base64.b16encode(b'\x01\x02\xab\xcd\xef'), b'0102ABCDEF')
eq(base64.b16encode(b'\x00'), b'00')
self.assertRaises(TypeError, base64.b16encode, "")
def K(a):
# def I has been replaced with hashlib.md5.digest
# def rstr2hex has been replaced with b16encode
# note that def rstr2hex outputs in lower
digest = hashlib.md5(a + H[1:]).digest()
return base64.b16encode(digest).replace('=', '')
def get_symkey(link):
md5 = hashlib.md5()
md5.update(link.encode("utf-8"))
return base64.b16encode(md5.digest()).decode("utf-8")
# this pattern matches substrings of reserved and non-ASCII characters
def test_b16encode(self):
eq = self.assertEqual
eq(base64.b16encode('\x01\x02\xab\xcd\xef'), '0102ABCDEF')
eq(base64.b16encode('\x00'), '00')
# Non-bytes
eq(base64.b16encode(bytearray('\x01\x02\xab\xcd\xef')), '0102ABCDEF')
def test_b16encode(self):
eq = self.assertEqual
eq(base64.b16encode('\x01\x02\xab\xcd\xef'), '0102ABCDEF')
eq(base64.b16encode('\x00'), '00')
# Non-bytes
eq(base64.b16encode(bytearray('\x01\x02\xab\xcd\xef')), '0102ABCDEF')
def clean_objects():
for vector in set(shared.objects):
if shared.objects[vector].is_expired():
with shared.objects_lock:
del shared.objects[vector]
logging.debug('Deleted expired object: {}'.format(base64.b16encode(vector).decode()))
def __repr__(self):
return 'type: header, command: "{}", payload_length: {}, payload_checksum: {}'\
.format(self.command.decode(), self.payload_length, base64.b16encode(self.payload_checksum).decode())