我们从Python开源项目中,提取了以下13个代码示例,用于说明如何使用inspect.BlockFinder()。
def getstatementrange_ast(lineno, source, assertion=False, astnode=None): if astnode is None: content = str(source) if sys.version_info < (2,7): content += "\n" try: astnode = compile(content, "source", "exec", 1024) # 1024 for AST except ValueError: start, end = getstatementrange_old(lineno, source, assertion) return None, start, end start, end = get_statement_startend2(lineno, astnode) # we need to correct the end: # - ast-parsing strips comments # - there might be empty lines # - we might have lesser indented code blocks at the end if end is None: end = len(source.lines) if end > start + 1: # make sure we don't span differently indented code blocks # by using the BlockFinder helper used which inspect.getsource() uses itself block_finder = inspect.BlockFinder() # if we start with an indented line, put blockfinder to "started" mode block_finder.started = source.lines[start][0].isspace() it = ((x + "\n") for x in source.lines[start:end]) try: for tok in tokenize.generate_tokens(lambda: next(it)): block_finder.tokeneater(*tok) except (inspect.EndOfBlock, IndentationError): end = block_finder.last + start except Exception: pass # the end might still point to a comment or empty line, correct it while end: line = source.lines[end - 1].lstrip() if line.startswith("#") or not line: end -= 1 else: break return astnode, start, end
def getblock(lines): """Extract the block of code at the top of the given list of lines.""" blockfinder = inspect.BlockFinder() try: tokens = inspect.tokenize.generate_tokens(iter(lines).__next__) for _token in tokens: blockfinder.tokeneater(*_token) except (inspect.EndOfBlock, IndentationError): pass return lines # different to builtin inspect is here
def getblock(lines): """Extract the block of code at the top of the given list of lines.""" blockfinder = inspect.BlockFinder() try: inspect.tokenize.tokenize(iter(lines).next, blockfinder.tokeneater) except (inspect.EndOfBlock, IndentationError): pass return lines
def getstatementrange_ast(lineno, source, assertion=False, astnode=None): if astnode is None: content = str(source) try: astnode = compile(content, "source", "exec", 1024) # 1024 for AST except ValueError: start, end = getstatementrange_old(lineno, source, assertion) return None, start, end start, end = get_statement_startend2(lineno, astnode) # we need to correct the end: # - ast-parsing strips comments # - there might be empty lines # - we might have lesser indented code blocks at the end if end is None: end = len(source.lines) if end > start + 1: # make sure we don't span differently indented code blocks # by using the BlockFinder helper used which inspect.getsource() uses itself block_finder = inspect.BlockFinder() # if we start with an indented line, put blockfinder to "started" mode block_finder.started = source.lines[start][0].isspace() it = ((x + "\n") for x in source.lines[start:end]) try: for tok in tokenize.generate_tokens(lambda: next(it)): block_finder.tokeneater(*tok) except (inspect.EndOfBlock, IndentationError): end = block_finder.last + start except Exception: pass # the end might still point to a comment or empty line, correct it while end: line = source.lines[end - 1].lstrip() if line.startswith("#") or not line: end -= 1 else: break return astnode, start, end