def __init__(self, codestr: str, astdict: AstDict) -> None:
self._astdict = astdict
# Tokenize and create the noop extractor and the position fixer
self._tokens: List[Token] = [Token(*i) for i in tokenize.tokenize(BytesIO(codestr.encode('utf-8')).readline)]
token_lines = _create_tokenized_lines(codestr, self._tokens)
self.noops_sync = NoopExtractor(codestr, token_lines)
self.pos_sync = LocationFixer(codestr, token_lines)
self.codestr = codestr
# This will store a dict of nodes to end positions, it will be filled
# on parse()
self._node2endpos = None
self.visit_Global = self.visit_Nonlocal = self._promote_names
python类encode()的实例源码
def visit_Bytes(self, node: Node) -> VisitResult:
try:
s = node["s"].decode()
encoding = 'utf8'
except UnicodeDecodeError:
# try with base64
s = encode(node["s"], 'base64').decode().strip()
encoding = 'base64'
node.update({"s": s, "encoding": encoding})
return node
def encode(self, input, errors='strict'):
return (input.translate(rot13_map), len(input))
def encode(self, input, final=False):
return input.translate(rot13_map)
def getregentry():
return codecs.CodecInfo(
name='rot-13',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
_is_text_encoding=False,
)
### Map
def rot13(infile, outfile):
outfile.write(codecs.encode(infile.read(), 'rot-13'))
def bytes_to_hex_str(arg_bytes):
return codecs.encode(arg_bytes,'hex').decode('ascii')
def hex_str_to_bytes(arg_str):
return codecs.decode(arg_str.encode('ascii'),'hex')
def create_42_guid(sensor_id, proc_pid, proc_createtime):
full_guid = codecs.encode(struct.pack('>IIQ', sensor_id, proc_pid, proc_createtime), "hex")
return '%s-%s-%s-%s-%s' % (full_guid[:8], full_guid[8:12], full_guid[12:16],
full_guid[16:20], full_guid[20:])
def get(self, name, year='0'):
try:
offset = '0'
if not control.setting('bookmarks') == 'true': raise Exception()
idFile = hashlib.md5()
for i in name: idFile.update(str(i))
for i in year: idFile.update(str(i))
idFile = str(idFile.hexdigest())
dbcon = database.connect(control.bookmarksFile)
dbcur = dbcon.cursor()
dbcur.execute("SELECT * FROM bookmark WHERE idFile = '%s'" % idFile)
match = dbcur.fetchone()
self.offset = str(match[1])
dbcon.commit()
if self.offset == '0': raise Exception()
minutes, seconds = divmod(float(self.offset), 60) ; hours, minutes = divmod(minutes, 60)
label = '%02d:%02d:%02d' % (hours, minutes, seconds)
label = (control.lang(32502) % label).encode('utf-8')
try: yes = control.dialog.contextmenu([label, control.lang(32501).encode('utf-8'), ])
except: yes = control.yesnoDialog(label, '', '', str(name), control.lang(32503).encode('utf-8'), control.lang(32501).encode('utf-8'))
if yes: self.offset = '0'
return self.offset
except:
return offset
def __init__(self, name, base_encoding, mapping):
self.name = name
self.base_encoding = base_encoding
self.mapping = mapping
self.reverse = {v:k for k,v in mapping.items()}
self.max_len = max(len(v) for v in mapping.values())
self.info = codecs.CodecInfo(name=self.name, encode=self.encode, decode=self.decode)
codecs.register_error(name, self.error)
def encode(self, input, errors='strict'):
assert errors == 'strict'
#return codecs.encode(input, self.base_encoding, self.name), len(input)
# The above line could totally be all we needed, relying on the error
# handling to replace the unencodable Unicode characters with our extended
# byte sequences.
#
# However, there seems to be a design bug in Python (probably intentional):
# the error handler for encoding is supposed to return a **Unicode** character,
# that then needs to be encodable itself... Ugh.
#
# So we implement what codecs.encode() should have been doing: which is expect
# error handler to return bytes() to be added to the output.
#
# This seems to have been fixed in Python 3.3. We should try using that and
# use fallback only if that failed.
# https://docs.python.org/3.3/library/codecs.html#codecs.register_error
length = len(input)
out = b''
while input:
try:
part = codecs.encode(input, self.base_encoding)
out += part
input = '' # All converted
except UnicodeEncodeError as e:
# Convert the correct part
out += codecs.encode(input[:e.start], self.base_encoding)
replacement, pos = self.error(e)
out += replacement
input = input[pos:]
return out, length
def safe_str_cmp(a, b):
"""This function compares strings in somewhat constant time. This
requires that the length of at least one string is known in advance.
Returns `True` if the two strings are equal, or `False` if they are not.
.. versionadded:: 0.7
"""
if isinstance(a, text_type):
a = a.encode('utf-8')
if isinstance(b, text_type):
b = b.encode('utf-8')
if _builtin_safe_str_cmp is not None:
return _builtin_safe_str_cmp(a, b)
if len(a) != len(b):
return False
rv = 0
if PY2:
for x, y in izip(a, b):
rv |= ord(x) ^ ord(y)
else:
for x, y in izip(a, b):
rv |= x ^ y
return rv == 0
def _hash_internal(method, salt, password):
"""Internal password hash helper. Supports plaintext without salt,
unsalted and salted passwords. In case salted passwords are used
hmac is used.
"""
if method == 'plain':
return password, method
if isinstance(password, text_type):
password = password.encode('utf-8')
if method.startswith('pbkdf2:'):
args = method[7:].split(':')
if len(args) not in (1, 2):
raise ValueError('Invalid number of arguments for PBKDF2')
method = args.pop(0)
iterations = args and int(args[0] or 0) or DEFAULT_PBKDF2_ITERATIONS
is_pbkdf2 = True
actual_method = 'pbkdf2:%s:%d' % (method, iterations)
else:
is_pbkdf2 = False
actual_method = method
hash_func = _hash_funcs.get(method)
if hash_func is None:
raise TypeError('invalid method %r' % method)
if is_pbkdf2:
if not salt:
raise ValueError('Salt is required for PBKDF2')
rv = pbkdf2_hex(password, salt, iterations,
hashfunc=hash_func)
elif salt:
if isinstance(salt, text_type):
salt = salt.encode('utf-8')
rv = hmac.HMAC(salt, password, hash_func).hexdigest()
else:
h = hash_func()
h.update(password)
rv = h.hexdigest()
return rv, actual_method
def explain_code():
code = request.form.get("code", "")
error = ""
try:
hexified = (codecs.encode(bytes([byte]), "hex_codec") for byte in explainer.optimise(bytearray(code, "utf-8")))
except:
error = "Error whilst optimising hex"
hexified = (codecs.encode(bytes([byte]), "hex_codec") for byte in bytearray(code, "utf-8"))
hex_code = b" ".join(hexified).decode("ascii").upper()
try:
return "\n{}\n{}\n{}".format(explainer.Explainer(bytearray(code, "utf-8"), []), error, hex_code).replace("\n", "\n ")
except:
return "\n Error formatting explanation\n {}\n {}".format(error, hex_code)
def safe_str_cmp(a, b):
"""This function compares strings in somewhat constant time. This
requires that the length of at least one string is known in advance.
Returns `True` if the two strings are equal, or `False` if they are not.
.. versionadded:: 0.7
"""
if isinstance(a, text_type):
a = a.encode('utf-8')
if isinstance(b, text_type):
b = b.encode('utf-8')
if _builtin_safe_str_cmp is not None:
return _builtin_safe_str_cmp(a, b)
if len(a) != len(b):
return False
rv = 0
if PY2:
for x, y in izip(a, b):
rv |= ord(x) ^ ord(y)
else:
for x, y in izip(a, b):
rv |= x ^ y
return rv == 0
def _hash_internal(method, salt, password):
"""Internal password hash helper. Supports plaintext without salt,
unsalted and salted passwords. In case salted passwords are used
hmac is used.
"""
if method == 'plain':
return password, method
if isinstance(password, text_type):
password = password.encode('utf-8')
if method.startswith('pbkdf2:'):
args = method[7:].split(':')
if len(args) not in (1, 2):
raise ValueError('Invalid number of arguments for PBKDF2')
method = args.pop(0)
iterations = args and int(args[0] or 0) or DEFAULT_PBKDF2_ITERATIONS
is_pbkdf2 = True
actual_method = 'pbkdf2:%s:%d' % (method, iterations)
else:
is_pbkdf2 = False
actual_method = method
hash_func = _hash_funcs.get(method)
if hash_func is None:
raise TypeError('invalid method %r' % method)
if is_pbkdf2:
if not salt:
raise ValueError('Salt is required for PBKDF2')
rv = pbkdf2_hex(password, salt, iterations,
hashfunc=hash_func)
elif salt:
if isinstance(salt, text_type):
salt = salt.encode('utf-8')
rv = hmac.HMAC(salt, password, hash_func).hexdigest()
else:
h = hash_func()
h.update(password)
rv = h.hexdigest()
return rv, actual_method
def bytesToHexStr(bytesInput):
"""Converts byte array to hex str
:param bytesInput: byte array to convert
:type bytesInput: byte-array
:return: Hex string representing bytesInput
"""
return codecs.encode(bytesInput, 'hex').decode('ascii')
def test_dont_update_when_nothing_changes(self):
"""
runner = CliRunner()
self.template.content = codecs.encode(b"some foo", "base64")
result = runner.invoke(update, ["--name", "testrepo", "--token", "token"])
self.assertEqual(result.exit_code, 0)
self.gh.assert_called_with("token")
self.gh().get_user().get_repo.assert_called_with(name="testrepo")
self.gh().get_user().get_repo().get_labels.assert_called_once_with()
self.gh().get_user().get_repo().update_file.assert_not_called()
"""
def test_dont_upgrade_when_nothing_changes(self):
runner = CliRunner()
self.template.content = codecs.encode(b"some foo", "base64")
result = runner.invoke(upgrade, ["--name", "testrepo", "--token", "token"])
self.assertEqual(result.exit_code, 0)
self.gh.assert_called_with("token")
self.gh().get_user().get_repo.assert_called_with(name="testrepo")
self.gh().get_user().get_repo().update_file.assert_not_called()