我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用sys.setrecursionlimit()。
def setUp(self): if sys.getrecursionlimit() < 1000: sys.setrecursionlimit(1000) self.addr = 'vJmtjxSDxNPXL4RNapp9ARdqKz3uJyf1EDGjr1Fgqs9c8mYsVH82h8wvnA4i5rtJ57mr3kor1EVJrd4e5upACJd588xe52yXtzumxj' self.scan_pub = '025e58a31122b38c86abc119b9379fe247410aee87a533f9c07b189aef6c3c1f52' self.scan_priv = '3e49e7257cb31db997edb1cf8299af0f37e2663e2260e4b8033e49d39a6d02f2' self.spend_pub = '03616562c98e7d7b74be409a787cec3a912122f3fb331a9bee9b0b73ce7b9f50af' self.spend_priv = 'aa3db0cfb3edc94de4d10f873f8190843f2a17484f6021a95a7742302c744748' self.ephem_pub = '03403d306ec35238384c7e340393335f9bc9bb4a2e574eb4e419452c4ea19f14b0' self.ephem_priv = '9e63abaf8dcd5ea3919e6de0b6c544e00bf51bf92496113a01d6e369944dc091' self.shared_secret = 'a4047ee231f4121e3a99a3a3378542e34a384b865a9917789920e1f13ffd91c6' self.pay_pub = '02726112ad39cb6bf848b1b1ef30b88e35286bf99f746c2be575f96c0e02a9357c' self.pay_priv = '4e422fb1e5e1db6c1f6ab32a7706d368ceb385e7fab098e633c5c5949c3b97cd' self.testnet_addr = 'waPUuLLykSnY3itzf1AyrQZm42F7KyB7SR5zpfqmnzPXWhx9kXLzV3EcyqzDdpTwngiyCCMUqztS9S1d7XJs3JMt3MsHPDpBCudvx9'
def deepcopy(obj, recursion=100000): """ Perform a deep copy of obj using cPickle. Faster than copy.deepcopy() for large objects. @param obj: the object to copy @return: a deep copy of obj @param recursion: maximum recursion limit @type recursion: int """ from csb.io import Pickle current = sys.getrecursionlimit() sys.setrecursionlimit(recursion) tmp = Pickle.dumps(obj, Pickle.HIGHEST_PROTOCOL) copy = Pickle.loads(tmp) sys.setrecursionlimit(current) return copy
def test_recursionlimit_recovery(self): # NOTE: this test is slightly fragile in that it depends on the current # recursion count when executing the test being low enough so as to # trigger the recursion recovery detection in the _Py_MakeEndRecCheck # macro (see ceval.h). oldlimit = sys.getrecursionlimit() def f(): f() try: for i in (50, 1000): # Issue #5392: stack overflow after hitting recursion limit twice sys.setrecursionlimit(i) self.assertRaises(RuntimeError, f) self.assertRaises(RuntimeError, f) finally: sys.setrecursionlimit(oldlimit)
def test_recursionlimit_fatalerror(self): # A fatal error occurs if a second recursion limit is hit when recovering # from a first one. if os.name == "nt": raise unittest.SkipTest( "under Windows, test would generate a spurious crash dialog") code = textwrap.dedent(""" import sys def f(): try: f() except RuntimeError: f() sys.setrecursionlimit(%d) f()""") for i in (50, 1000): sub = subprocess.Popen([sys.executable, '-c', code % i], stderr=subprocess.PIPE) err = sub.communicate()[1] self.assertTrue(sub.returncode, sub.returncode) self.assertTrue( b"Fatal Python error: Cannot recover from stack overflow" in err, err)
def __deepcopy__(self, memo): """ Hack sorting double chained task lists by task_id to avoid hitting max_depth on deepcopy operations. """ sys.setrecursionlimit(5000) # TODO fix this in a better way cls = self.__class__ result = cls.__new__(cls) memo[id(self)] = result for k, v in list(self.__dict__.items()): if k not in ('user_defined_macros', 'user_defined_filters', 'params'): setattr(result, k, copy.deepcopy(v, memo)) result.params = self.params if hasattr(self, 'user_defined_macros'): result.user_defined_macros = self.user_defined_macros if hasattr(self, 'user_defined_filters'): result.user_defined_filters = self.user_defined_filters return result
def test_recursionlimit(self): self.assertRaises(TypeError, sys.getrecursionlimit, 42) oldlimit = sys.getrecursionlimit() self.assertRaises(TypeError, sys.setrecursionlimit) self.assertRaises(ValueError, sys.setrecursionlimit, -42) sys.setrecursionlimit(10000) self.assertEqual(sys.getrecursionlimit(), 10000) sys.setrecursionlimit(oldlimit) self.assertRaises(OverflowError, sys.setrecursionlimit, 1 << 31) try: sys.setrecursionlimit((1 << 31) - 5) try: # issue13546: isinstance(e, ValueError) used to fail # when the recursion limit is close to 1<<31 raise ValueError() except ValueError, e: pass finally: sys.setrecursionlimit(oldlimit)
def check_limit(n, test_func_name): sys.setrecursionlimit(n) if test_func_name.startswith("test_"): print test_func_name[5:] else: print test_func_name test_func = globals()[test_func_name] try: test_func() # AttributeError can be raised because of the way e.g. PyDict_GetItem() # silences all exceptions and returns NULL, which is usually interpreted # as "missing attribute". except (RuntimeError, AttributeError): pass else: print "Yikes!"
def populate_group_command_tree(group, groups_map,cmds_map): ## Inrease the limit just in case recursive func calling use up recursionlimit #import sys #sys.setrecursionlimit(10000) subgroups = [] for subgroup_name in group['subgroups']: if groups_map.has_key(subgroup_name): subgroup = groups_map[subgroup_name] populate_group_command_tree(subgroup, groups_map, cmds_map) subgroups.append(subgroup) group['subgroups']=subgroups cmd_list = [] for cmd_name in group['commands']: if cmds_map.has_key(cmd_name): cmd_list.append(cmds_map[cmd_name]) group['commands']=cmd_list
def test_recursionlimit_fatalerror(self): # A fatal error occurs if a second recursion limit is hit when recovering # from a first one. code = textwrap.dedent(""" import sys def f(): try: f() except RuntimeError: f() sys.setrecursionlimit(%d) f()""") with test.support.suppress_crash_popup(): for i in (50, 1000): sub = subprocess.Popen([sys.executable, '-c', code % i], stderr=subprocess.PIPE) err = sub.communicate()[1] self.assertTrue(sub.returncode, sub.returncode) self.assertIn( b"Fatal Python error: Cannot recover from stack overflow", err)
def test_worst_case(self): sys.setrecursionlimit(10 ** 7) # max depth of recursion resource.setrlimit(resource.RLIMIT_STACK, (resource.RLIM_INFINITY, resource.RLIM_INFINITY)) # Worst case is a tree with one child per node, that is, a list test_input = [-1 if x == 0 else x - 1 for x in range(10 ** 5)] tree = tree_height.TreeHeight(len(test_input), test_input) assert len(test_input) == tree.compute_height() # def test_provided_input_data(self): # files_wildcard = os.path.dirname(__file__) + "/tree_height_test_input/*" # files = glob.glob(files_wildcard) # for file in files: # with open(file) as f: # size = f.readline() # parent = f.readline() # test_input = [int(x) for x in parent.split()] # tree = tree_height.TreeHeight(len(test_input), test_input)
def __init__(self, n, e): self.d = None self.p = None self.q = None sys.setrecursionlimit(100000) frac = self.rational_to_contfrac(e, n) convergents = self.convergents_from_contfrac(frac) for (k,d) in convergents: if k!=0 and (e*d-1)%k == 0: phi = (e*d-1)//k s = n - phi + 1 discr = s*s - 4*n if(discr>=0): t = self.is_perfect_square(discr) if t!=-1 and (s+t)%2==0: self.d = d x = Symbol('x') roots = solve(x**2 - s*x + n, x) if len(roots) == 2: self.p = roots[0] self.q = roots[1] break
def save(s): # increasing recursion depth to avoid pickle: maximum recursion depth error, see: # http://stackoverflow.com/q/2134706/1030312 current_limit = sys.getrecursionlimit() if current_limit < fypickling_recursion_limit: sys.setrecursionlimit(fypickling_recursion_limit) dump( s, open(s.url.pickle, 'wb'), protocol = HIGHEST_PROTOCOL, ) if current_limit < fypickling_recursion_limit: sys.setrecursionlimit(current_limit)
def test_recursionlimit_recovery(self): if hasattr(sys, 'gettrace') and sys.gettrace(): self.skipTest('fatal error if run with a trace function') # NOTE: this test is slightly fragile in that it depends on the current # recursion count when executing the test being low enough so as to # trigger the recursion recovery detection in the _Py_MakeEndRecCheck # macro (see ceval.h). oldlimit = sys.getrecursionlimit() def f(): f() try: for i in (50, 1000): # Issue #5392: stack overflow after hitting recursion limit twice sys.setrecursionlimit(i) self.assertRaises(RuntimeError, f) self.assertRaises(RuntimeError, f) finally: sys.setrecursionlimit(oldlimit)
def test_recursionlimit_fatalerror(self): # A fatal error occurs if a second recursion limit is hit when recovering # from a first one. code = textwrap.dedent(""" import sys def f(): try: f() except RuntimeError: f() sys.setrecursionlimit(%d) f()""") with test.support.SuppressCrashReport(): for i in (50, 1000): sub = subprocess.Popen([sys.executable, '-c', code % i], stderr=subprocess.PIPE) err = sub.communicate()[1] self.assertTrue(sub.returncode, sub.returncode) self.assertIn( b"Fatal Python error: Cannot recover from stack overflow", err)
def __deepcopy__(self, memo): """ Hack sorting double chained task lists by task_id to avoid hitting max_depth on deepcopy operations. """ sys.setrecursionlimit(5000) # TODO fix this in a better way cls = self.__class__ result = cls.__new__(cls) memo[id(self)] = result for k, v in list(self.__dict__.items()): if k not in ('user_defined_macros', 'params'): setattr(result, k, copy.deepcopy(v, memo)) result.params = self.params if hasattr(self, 'user_defined_macros'): result.user_defined_macros = self.user_defined_macros return result
def test_getWorkerArguments(self): """ C{_getWorkerArguments} discards options like C{random} as they only matter in the manager, and forwards options like C{recursionlimit} or C{disablegc}. """ self.addCleanup(sys.setrecursionlimit, sys.getrecursionlimit()) if gc.isenabled(): self.addCleanup(gc.enable) self.options.parseOptions(["--recursionlimit", "2000", "--random", "4", "--disablegc"]) args = self.options._getWorkerArguments() self.assertIn("--disablegc", args) args.remove("--disablegc") self.assertEqual(["--recursionlimit", "2000"], args)
def test_g_zero(): # recursion limit :( sys.setrecursionlimit(2000) # generate signature m = "hello, world" h = int(sha1_hexdigest(m), 16) S = DSA_signer(g = 0) r, s = S.DSA_sign(h) # verify signature V = DSA_verifier(g = 0) print V.verify(S, r, s, h) forged_signature = "omgwtfbbq" h = int(sha1_hexdigest(forged_signature), 16) print V.verify(S, r, s, h)
def pickle_model( path, model, word2index_x, word2index_y, index2word_x, index2word_y): import sys import cPickle as pickle modifier=10 tmp = sys.getrecursionlimit() sys.setrecursionlimit(tmp*modifier) with open(path, 'wb') as f: p_dict = {'model':model, 'word2index_x':word2index_x, 'word2index_y':word2index_y, 'index2word_x':index2word_x, 'index2word_y':index2word_y} pickle.dump(p_dict, f, protocol=2) sys.setrecursionlimit(tmp)
def __call__(self, *args): ## Start by extending recursion depth just a bit. ## If the error we are catching is due to recursion, we don't want to generate another one here. recursionLimit = sys.getrecursionlimit() try: sys.setrecursionlimit(recursionLimit+100) ## call original exception handler first (prints exception) global original_excepthook, callbacks, clear_tracebacks try: print("===== %s =====" % str(time.strftime("%Y.%m.%d %H:%m:%S", time.localtime(time.time())))) except Exception: sys.stderr.write("Warning: stdout is broken! Falling back to stderr.\n") sys.stdout = sys.stderr ret = original_excepthook(*args) for cb in callbacks: try: cb(*args) except Exception: print(" --------------------------------------------------------------") print(" Error occurred during exception callback %s" % str(cb)) print(" --------------------------------------------------------------") traceback.print_exception(*sys.exc_info()) ## Clear long-term storage of last traceback to prevent memory-hogging. ## (If an exception occurs while a lot of data is present on the stack, ## such as when loading large files, the data would ordinarily be kept ## until the next exception occurs. We would rather release this memory ## as soon as possible.) if clear_tracebacks is True: sys.last_traceback = None finally: sys.setrecursionlimit(recursionLimit)
def GenerateBSP(self, vertices, indices, max_face_count): resurs_limit = sys.getrecursionlimit() sys.setrecursionlimit(100000) faces = [] for iFace in range(len(indices) // 3): faces.append(iFace) box = calculate_bounding_box(vertices) self.add_node(box, faces, vertices, indices, max_face_count) sys.setrecursionlimit(resurs_limit)
def opt_recursionlimit(self, arg): """see sys.setrecursionlimit()""" try: sys.setrecursionlimit(int(arg)) except (TypeError, ValueError): raise usage.UsageError( "argument to recursionlimit must be an integer")
def cnn_save(self, savepath='cnn.pkl'): """Save the trained network input ===== savepath: str Path of the net to be saved """ import sys sys.setrecursionlimit(1000000) import pickle fp = open(savepath, 'wb') # write pickle.dump(self.net, fp) fp.close()
def main(src_program, src_function, dst_program, f_silent, f_image, f_overwrite, f_top): sys.setrecursionlimit(3000) start_time = time.time() search_results = search_function(src_function, src_program, dst_program, f_top) if not search_results: return False else: (results, stats) = search_results result_time = time.time() - start_time stats["time"] = result_time if not f_silent: print_results(results, stats, src_program, src_function, dst_program) """ if f_image: def image_dump(program, function, f_overwrite): function_short = get_short_function_name(function) if not f_overwrite: if os.path.exists(os.path.join(get_dump_png_path(program), function_short + ".png")): return flag = ["-o"] if f_overwrite else [] cmd = ["python", "idascript.py", program, "bingrep_dump2.py", "-f", function, "-i"] + flag p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) p.communicate() print "Top 10 function images were dumped." """ return (results, stats)
def main(function, f_image, f_all, f_overwrite): sys.setrecursionlimit(3000) program = idaapi.get_root_filename() start_time = time.time() cfgs = get_cfgs() dump_function_info(cfgs, program, function, f_image, f_all, f_overwrite) result_time = time.time() - start_time print "Dump finished." print "result_time: " + str(result_time) + " sec."
def __copy_theano(self, theano_f): sys.setrecursionlimit(5000) pickle.dump(theano_f, open('/tmp/theano_model.p', 'wb'), protocol=pickle.HIGHEST_PROTOCOL) copied_f = pickle.load(open('/tmp/theano_model.p', 'rb')) sys.setrecursionlimit(1000) return copied_f
def test_copy(self): current = sys.getrecursionlimit() self.addCleanup(sys.setrecursionlimit, current) # can't use sys.maxint as this doesn't exist in Python 3 sys.setrecursionlimit(int(10e8)) # this segfaults without the fix in place copy.copy(Mock())
def main(): sys.setrecursionlimit(N * N + 10) for i in range(N): visited.append([False] * N) maze.append([0] * N) depth_first_search(0, 0, 0) with open('maze_gen.out', 'w') as f: for row in maze: f.write(' '.join(map(str, row))) f.write('\n')
def serialize(self, file_name): """ Serialize this HMM to a file. @param file_name: target file name @type file_name: str """ rec = sys.getrecursionlimit() sys.setrecursionlimit(10000) csb.io.Pickle.dump(self, open(file_name, 'wb')) sys.setrecursionlimit(rec)
def deserialize(file_name): """ De-serialize an HMM from a file. @param file_name: source file name (pickle) @type file_name: str """ rec = sys.getrecursionlimit() sys.setrecursionlimit(10000) try: return csb.io.Pickle.load(open(file_name, 'rb')) finally: sys.setrecursionlimit(rec)
def save_model(model, file=None): """ Save the model to file with cPickle This function is used by the training function to save the model. Parameters ---------- model : :class:`yadll.model.Model` model to be saved in file file : `string` file name """ if file is None: if model.file is None: logger.error('No file name. Model not saved.') return else: d_file = model.file else: d_file = file try: with open(d_file, 'wb') as f: pickle.dump(model, f, pickle.HIGHEST_PROTOCOL) except RuntimeError: sys.setrecursionlimit(5000) with open(d_file, 'wb') as f: pickle.dump(model, f, pickle.HIGHEST_PROTOCOL)
def limited_recursion(recursion_limit): """ Prevent unlimited recursion. """ old_limit = sys.getrecursionlimit() sys.setrecursionlimit(recursion_limit) try: yield finally: sys.setrecursionlimit(old_limit)
def set_recursion_limit(n=1 * 10 ** 8): sys.setrecursionlimit(n) return True
def test_recursionlimit(self): self.assertRaises(TypeError, sys.getrecursionlimit, 42) oldlimit = sys.getrecursionlimit() self.assertRaises(TypeError, sys.setrecursionlimit) self.assertRaises(ValueError, sys.setrecursionlimit, -42) sys.setrecursionlimit(10000) self.assertEqual(sys.getrecursionlimit(), 10000) sys.setrecursionlimit(oldlimit)
def solve(self): # increase recursion depth to avoid sys.setrecursionlimit(10000) for server in self.servers: logging.info("Server number " + str(server.id)) self.knapsack_list = [ChoiceStructure(video, server) for video in self.videos] self.knapsack_iterative()
def writeToStream(self, stream): # For certain large pdf files, PdfFileWriter.write() causes the error: # maximum recursion depth exceeded while calling a Python object # This issue is present in pyPdf as well as PyPDF2 1.23 # We therefore temporarily increase the recursion limit. old_reclimit = sys.getrecursionlimit() sys.setrecursionlimit(10000) self.output.write(stream) sys.setrecursionlimit(old_reclimit)
def setup_tests(): sys.setrecursionlimit(2000) directory = os.path.dirname(__file__) xml_idioms_dir = find_dir(directory, "idioms-xml") json_idioms_dir = find_dir(directory, "idioms-json") print("Setting up tests from following directories...") print(xml_idioms_dir) print(json_idioms_dir) for json_filename in sorted(os.listdir(json_idioms_dir)): if json_filename.endswith(".json"): path = os.path.join(json_idioms_dir, json_filename) json_file = open(path, "r") io = StringIO(json_file.read()) loaded_json = json.load(io) json_file.close() MASTER_JSON_FILES.append(loaded_json) for xml_filename in sorted(os.listdir(xml_idioms_dir)): if xml_filename.endswith(".xml"): path = os.path.join(xml_idioms_dir, xml_filename) XML_FILENAMES.append(xml_filename.split(".")[0]) TESTED_XML_FILES.append(path)
def main(): sys.setrecursionlimit(100000) #take_data( quick_sort_helper , "quick_sort-ordered.csv" , generate_ordered_vector ) #take_data( quick_sort_helper , "quick_sort-unordered.csv" , generate_inverse_vector ) take_data( quick_sort_helper , "quick_sort-mixed.csv" , generate_vector ) #take_data( merge_sort_helper , "merge_sort-mixed.csv" , generate_vector ) #take_data( merge_sort_helper , "merge_sort-ordered.csv" , generate_ordered_vector ) #take_data( merge_sort_helper , "merge_sort-unordered.csv" , generate_inverse_vector ) #take_data( insertion_sort , "insertion_sort-mixed.csv" , generate_vector ) #take_data( insertion_sort , "insertion_sort-ordered.csv" , generate_ordered_vector ) #take_data( insertion_sort , "insertion_sort-unordered.csv" , generate_inverse_vector )
def setUpClass(cls): # If there's an infinite recursion in the dictionary code, this will help # make the error messages more readable. cls._old_limit = sys.getrecursionlimit() sys.setrecursionlimit(100)
def tearDownClass(cls): # Put the recursion limit back where it was. sys.setrecursionlimit(cls._old_limit)