def test_streaming_follow_redirects(self):
# When following redirects, header and streaming callbacks
# should only be called for the final result.
# TODO(bdarnell): this test belongs in httpclient_test instead of
# simple_httpclient_test, but it fails with the version of libcurl
# available on travis-ci. Move it when that has been upgraded
# or we have a better framework to skip tests based on curl version.
headers = []
chunks = []
self.fetch("/redirect?url=/hello",
header_callback=headers.append,
streaming_callback=chunks.append)
chunks = list(map(to_unicode, chunks))
self.assertEqual(chunks, ['Hello world!'])
# Make sure we only got one set of headers.
num_start_lines = len([h for h in headers if h.startswith("HTTP/")])
self.assertEqual(num_start_lines, 1)
python类to_unicode()的实例源码
simple_httpclient_test.py 文件源码
项目:noc-orchestrator
作者: DirceuSilvaLabs
项目源码
文件源码
阅读 28
收藏 0
点赞 0
评论 0
def test_csv_bom(self):
with open(os.path.join(os.path.dirname(__file__), 'csv_translations',
'fr_FR.csv'), 'rb') as f:
char_data = to_unicode(f.read())
# Re-encode our input data (which is utf-8 without BOM) in
# encodings that use the BOM and ensure that we can still load
# it. Note that utf-16-le and utf-16-be do not write a BOM,
# so we only test whichver variant is native to our platform.
for encoding in ['utf-8-sig', 'utf-16']:
tmpdir = tempfile.mkdtemp()
try:
with open(os.path.join(tmpdir, 'fr_FR.csv'), 'wb') as f:
f.write(char_data.encode(encoding))
tornado.locale.load_translations(tmpdir)
locale = tornado.locale.get('fr_FR')
self.assertIsInstance(locale, tornado.locale.CSVLocale)
self.assertEqual(locale.translate("school"), u("\u00e9cole"))
finally:
shutil.rmtree(tmpdir)
simple_httpclient_test.py 文件源码
项目:noc-orchestrator
作者: DirceuSilvaLabs
项目源码
文件源码
阅读 24
收藏 0
点赞 0
评论 0
def test_streaming_follow_redirects(self):
# When following redirects, header and streaming callbacks
# should only be called for the final result.
# TODO(bdarnell): this test belongs in httpclient_test instead of
# simple_httpclient_test, but it fails with the version of libcurl
# available on travis-ci. Move it when that has been upgraded
# or we have a better framework to skip tests based on curl version.
headers = []
chunks = []
self.fetch("/redirect?url=/hello",
header_callback=headers.append,
streaming_callback=chunks.append)
chunks = list(map(to_unicode, chunks))
self.assertEqual(chunks, ['Hello world!'])
# Make sure we only got one set of headers.
num_start_lines = len([h for h in headers if h.startswith("HTTP/")])
self.assertEqual(num_start_lines, 1)
def test_csv_bom(self):
with open(os.path.join(os.path.dirname(__file__), 'csv_translations',
'fr_FR.csv'), 'rb') as f:
char_data = to_unicode(f.read())
# Re-encode our input data (which is utf-8 without BOM) in
# encodings that use the BOM and ensure that we can still load
# it. Note that utf-16-le and utf-16-be do not write a BOM,
# so we only test whichver variant is native to our platform.
for encoding in ['utf-8-sig', 'utf-16']:
tmpdir = tempfile.mkdtemp()
try:
with open(os.path.join(tmpdir, 'fr_FR.csv'), 'wb') as f:
f.write(char_data.encode(encoding))
tornado.locale.load_translations(tmpdir)
locale = tornado.locale.get('fr_FR')
self.assertIsInstance(locale, tornado.locale.CSVLocale)
self.assertEqual(locale.translate("school"), u("\u00e9cole"))
finally:
shutil.rmtree(tmpdir)
simple_httpclient_test.py 文件源码
项目:noc-orchestrator
作者: DirceuSilvaLabs
项目源码
文件源码
阅读 25
收藏 0
点赞 0
评论 0
def test_streaming_follow_redirects(self):
# When following redirects, header and streaming callbacks
# should only be called for the final result.
# TODO(bdarnell): this test belongs in httpclient_test instead of
# simple_httpclient_test, but it fails with the version of libcurl
# available on travis-ci. Move it when that has been upgraded
# or we have a better framework to skip tests based on curl version.
headers = []
chunks = []
self.fetch("/redirect?url=/hello",
header_callback=headers.append,
streaming_callback=chunks.append)
chunks = list(map(to_unicode, chunks))
self.assertEqual(chunks, ['Hello world!'])
# Make sure we only got one set of headers.
num_start_lines = len([h for h in headers if h.startswith("HTTP/")])
self.assertEqual(num_start_lines, 1)
def test_csv_bom(self):
with open(os.path.join(os.path.dirname(__file__), 'csv_translations',
'fr_FR.csv'), 'rb') as f:
char_data = to_unicode(f.read())
# Re-encode our input data (which is utf-8 without BOM) in
# encodings that use the BOM and ensure that we can still load
# it. Note that utf-16-le and utf-16-be do not write a BOM,
# so we only test whichver variant is native to our platform.
for encoding in ['utf-8-sig', 'utf-16']:
tmpdir = tempfile.mkdtemp()
try:
with open(os.path.join(tmpdir, 'fr_FR.csv'), 'wb') as f:
f.write(char_data.encode(encoding))
tornado.locale.load_translations(tmpdir)
locale = tornado.locale.get('fr_FR')
self.assertIsInstance(locale, tornado.locale.CSVLocale)
self.assertEqual(locale.translate("school"), u("\u00e9cole"))
finally:
shutil.rmtree(tmpdir)
simple_httpclient_test.py 文件源码
项目:My-Web-Server-Framework-With-Python2.7
作者: syjsu
项目源码
文件源码
阅读 23
收藏 0
点赞 0
评论 0
def test_streaming_follow_redirects(self):
# When following redirects, header and streaming callbacks
# should only be called for the final result.
# TODO(bdarnell): this test belongs in httpclient_test instead of
# simple_httpclient_test, but it fails with the version of libcurl
# available on travis-ci. Move it when that has been upgraded
# or we have a better framework to skip tests based on curl version.
headers = []
chunks = []
self.fetch("/redirect?url=/hello",
header_callback=headers.append,
streaming_callback=chunks.append)
chunks = list(map(to_unicode, chunks))
self.assertEqual(chunks, ['Hello world!'])
# Make sure we only got one set of headers.
num_start_lines = len([h for h in headers if h.startswith("HTTP/")])
self.assertEqual(num_start_lines, 1)
locale_test.py 文件源码
项目:My-Web-Server-Framework-With-Python2.7
作者: syjsu
项目源码
文件源码
阅读 24
收藏 0
点赞 0
评论 0
def test_csv_bom(self):
with open(os.path.join(os.path.dirname(__file__), 'csv_translations',
'fr_FR.csv'), 'rb') as f:
char_data = to_unicode(f.read())
# Re-encode our input data (which is utf-8 without BOM) in
# encodings that use the BOM and ensure that we can still load
# it. Note that utf-16-le and utf-16-be do not write a BOM,
# so we only test whichver variant is native to our platform.
for encoding in ['utf-8-sig', 'utf-16']:
tmpdir = tempfile.mkdtemp()
try:
with open(os.path.join(tmpdir, 'fr_FR.csv'), 'wb') as f:
f.write(char_data.encode(encoding))
tornado.locale.load_translations(tmpdir)
locale = tornado.locale.get('fr_FR')
self.assertIsInstance(locale, tornado.locale.CSVLocale)
self.assertEqual(locale.translate("school"), u("\u00e9cole"))
finally:
shutil.rmtree(tmpdir)
def __init__(self, template_string, name="<string>", loader=None,
compress_whitespace=None, autoescape=_UNSET):
self.name = name
if compress_whitespace is None:
compress_whitespace = name.endswith(".html") or \
name.endswith(".js")
if autoescape is not _UNSET:
self.autoescape = autoescape
elif loader:
self.autoescape = loader.autoescape
else:
self.autoescape = _DEFAULT_AUTOESCAPE
self.namespace = loader.namespace if loader else {}
reader = _TemplateReader(name, escape.native_str(template_string))
self.file = _File(_parse(reader, self))
self.code = self._generate_python(loader, compress_whitespace)
try:
self.compiled = compile(escape.to_unicode(self.code),
"<template %s>" % self.name,
"exec")
except Exception:
formatted_code = _format_code(self.code).rstrip()
logging.error("%s code:\n%s", self.name, formatted_code)
raise
def __init__(self, template_string, name="<string>", loader=None,
compress_whitespace=None, autoescape=_UNSET):
self.name = name
if compress_whitespace is None:
compress_whitespace = name.endswith(".html") or \
name.endswith(".js")
if autoescape is not _UNSET:
self.autoescape = autoescape
elif loader:
self.autoescape = loader.autoescape
else:
self.autoescape = _DEFAULT_AUTOESCAPE
self.namespace = loader.namespace if loader else {}
reader = _TemplateReader(name, escape.native_str(template_string))
self.file = _File(_parse(reader, self))
self.code = self._generate_python(loader, compress_whitespace)
try:
self.compiled = compile(escape.to_unicode(self.code),
"<template %s>" % self.name,
"exec")
except Exception:
formatted_code = _format_code(self.code).rstrip()
logging.error("%s code:\n%s", self.name, formatted_code)
raise
def test_streaming_follow_redirects(self):
# When following redirects, header and streaming callbacks
# should only be called for the final result.
# TODO(bdarnell): this test belongs in httpclient_test instead of
# simple_httpclient_test, but it fails with the version of libcurl
# available on travis-ci. Move it when that has been upgraded
# or we have a better framework to skip tests based on curl version.
headers = []
chunks = []
self.fetch("/redirect?url=/hello",
header_callback=headers.append,
streaming_callback=chunks.append)
chunks = list(map(to_unicode, chunks))
self.assertEqual(chunks, ['Hello world!'])
# Make sure we only got one set of headers.
num_start_lines = len([h for h in headers if h.startswith("HTTP/")])
self.assertEqual(num_start_lines, 1)
def test_csv_bom(self):
with open(os.path.join(os.path.dirname(__file__), 'csv_translations',
'fr_FR.csv'), 'rb') as f:
char_data = to_unicode(f.read())
# Re-encode our input data (which is utf-8 without BOM) in
# encodings that use the BOM and ensure that we can still load
# it. Note that utf-16-le and utf-16-be do not write a BOM,
# so we only test whichver variant is native to our platform.
for encoding in ['utf-8-sig', 'utf-16']:
tmpdir = tempfile.mkdtemp()
try:
with open(os.path.join(tmpdir, 'fr_FR.csv'), 'wb') as f:
f.write(char_data.encode(encoding))
tornado.locale.load_translations(tmpdir)
locale = tornado.locale.get('fr_FR')
self.assertIsInstance(locale, tornado.locale.CSVLocale)
self.assertEqual(locale.translate("school"), u("\u00e9cole"))
finally:
shutil.rmtree(tmpdir)
def test_streaming_follow_redirects(self):
# When following redirects, header and streaming callbacks
# should only be called for the final result.
# TODO(bdarnell): this test belongs in httpclient_test instead of
# simple_httpclient_test, but it fails with the version of libcurl
# available on travis-ci. Move it when that has been upgraded
# or we have a better framework to skip tests based on curl version.
headers = []
chunks = []
self.fetch("/redirect?url=/hello",
header_callback=headers.append,
streaming_callback=chunks.append)
chunks = list(map(to_unicode, chunks))
self.assertEqual(chunks, ['Hello world!'])
# Make sure we only got one set of headers.
num_start_lines = len([h for h in headers if h.startswith("HTTP/")])
self.assertEqual(num_start_lines, 1)
def test_csv_bom(self):
with open(os.path.join(os.path.dirname(__file__), 'csv_translations',
'fr_FR.csv'), 'rb') as f:
char_data = to_unicode(f.read())
# Re-encode our input data (which is utf-8 without BOM) in
# encodings that use the BOM and ensure that we can still load
# it. Note that utf-16-le and utf-16-be do not write a BOM,
# so we only test whichver variant is native to our platform.
for encoding in ['utf-8-sig', 'utf-16']:
tmpdir = tempfile.mkdtemp()
try:
with open(os.path.join(tmpdir, 'fr_FR.csv'), 'wb') as f:
f.write(char_data.encode(encoding))
tornado.locale.load_translations(tmpdir)
locale = tornado.locale.get('fr_FR')
self.assertIsInstance(locale, tornado.locale.CSVLocale)
self.assertEqual(locale.translate("school"), u"\u00e9cole")
finally:
shutil.rmtree(tmpdir)
def test_streaming_follow_redirects(self):
# When following redirects, header and streaming callbacks
# should only be called for the final result.
# TODO(bdarnell): this test belongs in httpclient_test instead of
# simple_httpclient_test, but it fails with the version of libcurl
# available on travis-ci. Move it when that has been upgraded
# or we have a better framework to skip tests based on curl version.
headers = []
chunks = []
self.fetch("/redirect?url=/hello",
header_callback=headers.append,
streaming_callback=chunks.append)
chunks = list(map(to_unicode, chunks))
self.assertEqual(chunks, ['Hello world!'])
# Make sure we only got one set of headers.
num_start_lines = len([h for h in headers if h.startswith("HTTP/")])
self.assertEqual(num_start_lines, 1)
def test_csv_bom(self):
with open(os.path.join(os.path.dirname(__file__), 'csv_translations',
'fr_FR.csv'), 'rb') as f:
char_data = to_unicode(f.read())
# Re-encode our input data (which is utf-8 without BOM) in
# encodings that use the BOM and ensure that we can still load
# it. Note that utf-16-le and utf-16-be do not write a BOM,
# so we only test whichver variant is native to our platform.
for encoding in ['utf-8-sig', 'utf-16']:
tmpdir = tempfile.mkdtemp()
try:
with open(os.path.join(tmpdir, 'fr_FR.csv'), 'wb') as f:
f.write(char_data.encode(encoding))
tornado.locale.load_translations(tmpdir)
locale = tornado.locale.get('fr_FR')
self.assertIsInstance(locale, tornado.locale.CSVLocale)
self.assertEqual(locale.translate("school"), u"\u00e9cole")
finally:
shutil.rmtree(tmpdir)
def test_streaming_follow_redirects(self):
# When following redirects, header and streaming callbacks
# should only be called for the final result.
# TODO(bdarnell): this test belongs in httpclient_test instead of
# simple_httpclient_test, but it fails with the version of libcurl
# available on travis-ci. Move it when that has been upgraded
# or we have a better framework to skip tests based on curl version.
headers = []
chunks = []
self.fetch("/redirect?url=/hello",
header_callback=headers.append,
streaming_callback=chunks.append)
chunks = list(map(to_unicode, chunks))
self.assertEqual(chunks, ['Hello world!'])
# Make sure we only got one set of headers.
num_start_lines = len([h for h in headers if h.startswith("HTTP/")])
self.assertEqual(num_start_lines, 1)
def test_csv_bom(self):
with open(os.path.join(os.path.dirname(__file__), 'csv_translations',
'fr_FR.csv'), 'rb') as f:
char_data = to_unicode(f.read())
# Re-encode our input data (which is utf-8 without BOM) in
# encodings that use the BOM and ensure that we can still load
# it. Note that utf-16-le and utf-16-be do not write a BOM,
# so we only test whichver variant is native to our platform.
for encoding in ['utf-8-sig', 'utf-16']:
tmpdir = tempfile.mkdtemp()
try:
with open(os.path.join(tmpdir, 'fr_FR.csv'), 'wb') as f:
f.write(char_data.encode(encoding))
tornado.locale.load_translations(tmpdir)
locale = tornado.locale.get('fr_FR')
self.assertIsInstance(locale, tornado.locale.CSVLocale)
self.assertEqual(locale.translate("school"), u"\u00e9cole")
finally:
shutil.rmtree(tmpdir)
def test_streaming_follow_redirects(self):
# When following redirects, header and streaming callbacks
# should only be called for the final result.
# TODO(bdarnell): this test belongs in httpclient_test instead of
# simple_httpclient_test, but it fails with the version of libcurl
# available on travis-ci. Move it when that has been upgraded
# or we have a better framework to skip tests based on curl version.
headers = []
chunks = []
self.fetch("/redirect?url=/hello",
header_callback=headers.append,
streaming_callback=chunks.append)
chunks = list(map(to_unicode, chunks))
self.assertEqual(chunks, ['Hello world!'])
# Make sure we only got one set of headers.
num_start_lines = len([h for h in headers if h.startswith("HTTP/")])
self.assertEqual(num_start_lines, 1)
def test_csv_bom(self):
with open(os.path.join(os.path.dirname(__file__), 'csv_translations',
'fr_FR.csv'), 'rb') as f:
char_data = to_unicode(f.read())
# Re-encode our input data (which is utf-8 without BOM) in
# encodings that use the BOM and ensure that we can still load
# it. Note that utf-16-le and utf-16-be do not write a BOM,
# so we only test whichver variant is native to our platform.
for encoding in ['utf-8-sig', 'utf-16']:
tmpdir = tempfile.mkdtemp()
try:
with open(os.path.join(tmpdir, 'fr_FR.csv'), 'wb') as f:
f.write(char_data.encode(encoding))
tornado.locale.load_translations(tmpdir)
locale = tornado.locale.get('fr_FR')
self.assertIsInstance(locale, tornado.locale.CSVLocale)
self.assertEqual(locale.translate("school"), u("\u00e9cole"))
finally:
shutil.rmtree(tmpdir)
def test_streaming_follow_redirects(self):
# When following redirects, header and streaming callbacks
# should only be called for the final result.
# TODO(bdarnell): this test belongs in httpclient_test instead of
# simple_httpclient_test, but it fails with the version of libcurl
# available on travis-ci. Move it when that has been upgraded
# or we have a better framework to skip tests based on curl version.
headers = []
chunks = []
self.fetch("/redirect?url=/hello",
header_callback=headers.append,
streaming_callback=chunks.append)
chunks = list(map(to_unicode, chunks))
self.assertEqual(chunks, ['Hello world!'])
# Make sure we only got one set of headers.
num_start_lines = len([h for h in headers if h.startswith("HTTP/")])
self.assertEqual(num_start_lines, 1)
def test_csv_bom(self):
with open(os.path.join(os.path.dirname(__file__), 'csv_translations',
'fr_FR.csv'), 'rb') as f:
char_data = to_unicode(f.read())
# Re-encode our input data (which is utf-8 without BOM) in
# encodings that use the BOM and ensure that we can still load
# it. Note that utf-16-le and utf-16-be do not write a BOM,
# so we only test whichver variant is native to our platform.
for encoding in ['utf-8-sig', 'utf-16']:
tmpdir = tempfile.mkdtemp()
try:
with open(os.path.join(tmpdir, 'fr_FR.csv'), 'wb') as f:
f.write(char_data.encode(encoding))
tornado.locale.load_translations(tmpdir)
locale = tornado.locale.get('fr_FR')
self.assertIsInstance(locale, tornado.locale.CSVLocale)
self.assertEqual(locale.translate("school"), u"\u00e9cole")
finally:
shutil.rmtree(tmpdir)
def test_streaming_follow_redirects(self):
# When following redirects, header and streaming callbacks
# should only be called for the final result.
# TODO(bdarnell): this test belongs in httpclient_test instead of
# simple_httpclient_test, but it fails with the version of libcurl
# available on travis-ci. Move it when that has been upgraded
# or we have a better framework to skip tests based on curl version.
headers = []
chunks = []
self.fetch("/redirect?url=/hello",
header_callback=headers.append,
streaming_callback=chunks.append)
chunks = list(map(to_unicode, chunks))
self.assertEqual(chunks, ['Hello world!'])
# Make sure we only got one set of headers.
num_start_lines = len([h for h in headers if h.startswith("HTTP/")])
self.assertEqual(num_start_lines, 1)
def test_csv_bom(self):
with open(os.path.join(os.path.dirname(__file__), 'csv_translations',
'fr_FR.csv'), 'rb') as f:
char_data = to_unicode(f.read())
# Re-encode our input data (which is utf-8 without BOM) in
# encodings that use the BOM and ensure that we can still load
# it. Note that utf-16-le and utf-16-be do not write a BOM,
# so we only test whichver variant is native to our platform.
for encoding in ['utf-8-sig', 'utf-16']:
tmpdir = tempfile.mkdtemp()
try:
with open(os.path.join(tmpdir, 'fr_FR.csv'), 'wb') as f:
f.write(char_data.encode(encoding))
tornado.locale.load_translations(tmpdir)
locale = tornado.locale.get('fr_FR')
self.assertIsInstance(locale, tornado.locale.CSVLocale)
self.assertEqual(locale.translate("school"), u"\u00e9cole")
finally:
shutil.rmtree(tmpdir)
def response_handler(self, msg):
ident, resp_bytes = msg
resp = json_decode(to_unicode(resp_bytes))
app_log.debug('resp: %s', resp)
subid = self.subscriptions.get('commandExecute')
if subid is not None:
self.write_message(json_encode({
'type': 'subscription_data',
'id': subid,
'payload': {
'data': resp
}
}))
def test_unicode_apply(self):
def upper(s):
return to_unicode(s).upper()
template = Template(utf8(u("{% apply upper %}foo \u00e9{% end %}")))
self.assertEqual(template.generate(upper=upper), utf8(u("FOO \u00c9")))
def test_bytes_apply(self):
def upper(s):
return utf8(to_unicode(s).upper())
template = Template(utf8(u("{% apply upper %}foo \u00e9{% end %}")))
self.assertEqual(template.generate(upper=upper), utf8(u("FOO \u00c9")))
def test_utf8_in_file(self):
tmpl = self.loader.load("utf8.html")
result = tmpl.generate()
self.assertEqual(to_unicode(result).strip(), u("H\u00e9llo"))
def decode_argument(self, value, name=None):
if type(value) != bytes:
raise Exception("unexpected type for value: %r" % type(value))
# use self.request.arguments directly to avoid recursion
if 'encoding' in self.request.arguments:
return value.decode(to_unicode(self.request.arguments['encoding'][0]))
else:
return value
def test_types(self):
cookie_value = to_unicode(create_signed_value(self.COOKIE_SECRET,
"asdf", "qwer"))
response = self.fetch("/typecheck/asdf?foo=bar",
headers={"Cookie": "asdf=" + cookie_value})
data = json_decode(response.body)
self.assertEqual(data, {})
response = self.fetch("/typecheck/asdf?foo=bar", method="POST",
headers={"Cookie": "asdf=" + cookie_value},
body="foo=bar")