def test_compiler_recursion_limit(self):
# Expected limit is sys.getrecursionlimit() * the scaling factor
# in symtable.c (currently 3)
# We expect to fail *at* that limit, because we use up some of
# the stack depth limit in the test suite code
# So we check the expected limit and 75% of that
# XXX (ncoghlan): duplicating the scaling factor here is a little
# ugly. Perhaps it should be exposed somewhere...
fail_depth = sys.getrecursionlimit() * 3
success_depth = int(fail_depth * 0.75)
def check_limit(prefix, repeated):
expect_ok = prefix + repeated * success_depth
self.compile_single(expect_ok)
broken = prefix + repeated * fail_depth
details = "Compiling ({!r} + {!r} * {})".format(
prefix, repeated, fail_depth)
with self.assertRaises(RuntimeError, msg=details):
self.compile_single(broken)
check_limit("a", "()")
check_limit("a", ".b")
check_limit("a", "[0]")
check_limit("a", "*a")
python类getrecursionlimit()的实例源码
def test_recursionlimit_recovery(self):
# NOTE: this test is slightly fragile in that it depends on the current
# recursion count when executing the test being low enough so as to
# trigger the recursion recovery detection in the _Py_MakeEndRecCheck
# macro (see ceval.h).
oldlimit = sys.getrecursionlimit()
def f():
f()
try:
for i in (50, 1000):
# Issue #5392: stack overflow after hitting recursion limit twice
sys.setrecursionlimit(i)
self.assertRaises(RuntimeError, f)
self.assertRaises(RuntimeError, f)
finally:
sys.setrecursionlimit(oldlimit)
def test_repr(self):
l0 = []
l2 = [0, 1, 2]
a0 = self.type2test(l0)
a2 = self.type2test(l2)
self.assertEqual(str(a0), str(l0))
self.assertEqual(repr(a0), repr(l0))
self.assertEqual(repr(a2), repr(l2))
self.assertEqual(str(a2), "[0, 1, 2]")
self.assertEqual(repr(a2), "[0, 1, 2]")
a2.append(a2)
a2.append(3)
self.assertEqual(str(a2), "[0, 1, 2, [...], 3]")
self.assertEqual(repr(a2), "[0, 1, 2, [...], 3]")
l0 = []
for i in range(sys.getrecursionlimit() + 100):
l0 = [l0]
self.assertRaises(RuntimeError, repr, l0)
read_write.py 文件源码
项目:visual_turing_test-tutorial
作者: mateuszmalinowski
项目源码
文件源码
阅读 23
收藏 0
点赞 0
评论 0
def pickle_model(
path,
model,
word2index_x,
word2index_y,
index2word_x,
index2word_y):
import sys
import cPickle as pickle
modifier=10
tmp = sys.getrecursionlimit()
sys.setrecursionlimit(tmp*modifier)
with open(path, 'wb') as f:
p_dict = {'model':model,
'word2index_x':word2index_x,
'word2index_y':word2index_y,
'index2word_x':index2word_x,
'index2word_y':index2word_y}
pickle.dump(p_dict, f, protocol=2)
sys.setrecursionlimit(tmp)
def __call__(self, *args):
## Start by extending recursion depth just a bit.
## If the error we are catching is due to recursion, we don't want to generate another one here.
recursionLimit = sys.getrecursionlimit()
try:
sys.setrecursionlimit(recursionLimit+100)
## call original exception handler first (prints exception)
global original_excepthook, callbacks, clear_tracebacks
try:
print("===== %s =====" % str(time.strftime("%Y.%m.%d %H:%m:%S", time.localtime(time.time()))))
except Exception:
sys.stderr.write("Warning: stdout is broken! Falling back to stderr.\n")
sys.stdout = sys.stderr
ret = original_excepthook(*args)
for cb in callbacks:
try:
cb(*args)
except Exception:
print(" --------------------------------------------------------------")
print(" Error occurred during exception callback %s" % str(cb))
print(" --------------------------------------------------------------")
traceback.print_exception(*sys.exc_info())
## Clear long-term storage of last traceback to prevent memory-hogging.
## (If an exception occurs while a lot of data is present on the stack,
## such as when loading large files, the data would ordinarily be kept
## until the next exception occurs. We would rather release this memory
## as soon as possible.)
if clear_tracebacks is True:
sys.last_traceback = None
finally:
sys.setrecursionlimit(recursionLimit)
def __call__(self, *args):
## Start by extending recursion depth just a bit.
## If the error we are catching is due to recursion, we don't want to generate another one here.
recursionLimit = sys.getrecursionlimit()
try:
sys.setrecursionlimit(recursionLimit+100)
## call original exception handler first (prints exception)
global original_excepthook, callbacks, clear_tracebacks
try:
print("===== %s =====" % str(time.strftime("%Y.%m.%d %H:%m:%S", time.localtime(time.time()))))
except Exception:
sys.stderr.write("Warning: stdout is broken! Falling back to stderr.\n")
sys.stdout = sys.stderr
ret = original_excepthook(*args)
for cb in callbacks:
try:
cb(*args)
except Exception:
print(" --------------------------------------------------------------")
print(" Error occurred during exception callback %s" % str(cb))
print(" --------------------------------------------------------------")
traceback.print_exception(*sys.exc_info())
## Clear long-term storage of last traceback to prevent memory-hogging.
## (If an exception occurs while a lot of data is present on the stack,
## such as when loading large files, the data would ordinarily be kept
## until the next exception occurs. We would rather release this memory
## as soon as possible.)
if clear_tracebacks is True:
sys.last_traceback = None
finally:
sys.setrecursionlimit(recursionLimit)
def test_recursion_limited(self):
"""
Verify that recursion is controlled when raise_on_first_error=False and something is wrong with the query.
PYTHON-585
"""
max_recursion = sys.getrecursionlimit()
s = Session(Cluster(), [Host("127.0.0.1", SimpleConvictionPolicy)])
self.assertRaises(TypeError, execute_concurrent_with_args, s, "doesn't matter", [('param',)] * max_recursion, raise_on_first_error=True)
results = execute_concurrent_with_args(s, "doesn't matter", [('param',)] * max_recursion, raise_on_first_error=False) # previously
self.assertEqual(len(results), max_recursion)
for r in results:
self.assertFalse(r[0])
self.assertIsInstance(r[1], TypeError)
def _getobjects():
# modules first, globals and stack
# objects (may contain duplicates)
return tuple(_values(sys.modules)) + (
globals(), stack(sys.getrecursionlimit())[2:])
bsp_tree.py 文件源码
项目:Blender-WMO-import-export-scripts
作者: WowDevTools
项目源码
文件源码
阅读 24
收藏 0
点赞 0
评论 0
def GenerateBSP(self, vertices, indices, max_face_count):
resurs_limit = sys.getrecursionlimit()
sys.setrecursionlimit(100000)
faces = []
for iFace in range(len(indices) // 3):
faces.append(iFace)
box = calculate_bounding_box(vertices)
self.add_node(box, faces, vertices, indices, max_face_count)
sys.setrecursionlimit(resurs_limit)
def test_allow_recursion_deeper_than_the_recursion_limit(self):
limit = 2 * sys.getrecursionlimit()
rule = Match("a", "b")
for _ in xrange(limit):
rule = No(rule)
self.assertEqual(format(rule), "no " * limit + "a=b")
def test_allow_recursion_deeper_than_the_recursion_limit(self):
expr = parsing.forward_ref()
expr.set(
parsing.union(
parsing.seq(parsing.txt("("), expr, parsing.txt(")"), pick=1),
parsing.txt("a")
)
)
limit = 2 * sys.getrecursionlimit()
string = (limit * "(") + "a" + (limit * ")")
self.assertEqual(expr.parse(string), ('a', ''))
def _GetCallingModuleObjectAndName():
"""Returns the module that's calling into this module.
We generally use this function to get the name of the module calling a
DEFINE_foo... function.
"""
# Walk down the stack to find the first globals dict that's not ours.
for depth in range(1, sys.getrecursionlimit()):
if not sys._getframe(depth).f_globals is globals():
globals_for_frame = sys._getframe(depth).f_globals
module, module_name = _GetModuleObjectAndName(globals_for_frame)
if module_name is not None:
return module, module_name
raise AssertionError("No module was found")
def test_copy(self):
current = sys.getrecursionlimit()
self.addCleanup(sys.setrecursionlimit, current)
# can't use sys.maxint as this doesn't exist in Python 3
sys.setrecursionlimit(int(10e8))
# this segfaults without the fix in place
copy.copy(Mock())
def serialize(self, file_name):
"""
Serialize this HMM to a file.
@param file_name: target file name
@type file_name: str
"""
rec = sys.getrecursionlimit()
sys.setrecursionlimit(10000)
csb.io.Pickle.dump(self, open(file_name, 'wb'))
sys.setrecursionlimit(rec)
def deserialize(file_name):
"""
De-serialize an HMM from a file.
@param file_name: source file name (pickle)
@type file_name: str
"""
rec = sys.getrecursionlimit()
sys.setrecursionlimit(10000)
try:
return csb.io.Pickle.load(open(file_name, 'rb'))
finally:
sys.setrecursionlimit(rec)
def runTest(self):
rec = sys.getrecursionlimit()
obj = ['X']
copy = utils.deepcopy(obj, recursion=(rec + 1))
self.assertEqual(obj, copy)
self.assertNotEquals(id(obj), id(copy))
def limited_recursion(recursion_limit):
"""
Prevent unlimited recursion.
"""
old_limit = sys.getrecursionlimit()
sys.setrecursionlimit(recursion_limit)
try:
yield
finally:
sys.setrecursionlimit(old_limit)
def get_recursion_limit():
return sys.getrecursionlimit()
def test_accessibility_on_very_deep_graph():
gr = graph()
gr.add_nodes(range(0,311)) # 2001
for i in range(0,310): #2000
gr.add_edge((i,i+1))
recursionlimit = getrecursionlimit()
accessibility(gr)
assert getrecursionlimit() == recursionlimit
def main(gr, n):
recursionlimit = getrecursionlimit()
for i in range(n):
accessibility(gr)
assert getrecursionlimit() == recursionlimit