我们从Python开源项目中,提取了以下16个代码示例,用于说明如何使用pyparsing.SkipTo()。
def compile(self): manipulation_set = pp.Optional(pp.Suppress(pp.Keyword("THEN")) + pp.Suppress("|") + pp.SkipTo(pp.Suppress(";"), include=True)) manipulation_set.setParseAction(lambda x: self._add_manipulation_set(x[0])) parser = (pp.Keyword("CONNECT") + self.connect_block.parser() + pp.Keyword("RETRIEVE") + self.retrieve_block.parser() + pp.Optional(pp.Keyword("JOIN") + self.join_block.parser())) try: parser.parseString(self.qgl_str) except pp.ParseException, e: raise QGLSyntaxError("Couldn't parse query: \n %s" % e) self._create_connectors() self._create_query_nodes() if self.join_block: self._create_joins() if self.manipulation_set_str: self.query_graph.manipulation_set.append_from_str(self.manipulation_set_str)
def __parse_tc_filter_network(text): network_pattern = ( pp.SkipTo("{:s}=".format(Tc.Param.DST_NETWORK), include=True) + pp.Word(pp.alphanums + "." + "/")) return network_pattern.parseString(text)[-1]
def __parse_tc_filter_src_port(text): port_pattern = ( pp.SkipTo("{:s}=".format(Tc.Param.SRC_PORT), include=True) + pp.Word(pp.nums)) return port_pattern.parseString(text)[-1]
def __parse_tc_filter_dst_port(text): port_pattern = ( pp.SkipTo("{:s}=".format(Tc.Param.DST_PORT), include=True) + pp.Word(pp.nums)) return port_pattern.parseString(text)[-1]
def __parse_netem_delay_distro(self, line): parse_param_name = "delay" pattern = ( pp.SkipTo(parse_param_name, include=True) + pp.Word(pp.nums + ".msu") + pp.Word(pp.nums + ".msu")) try: parsed_list = pattern.parseString(line) self.__parsed_param[parse_param_name] = parsed_list[2] self.__parsed_param["delay-distro"] = parsed_list[3] except pp.ParseException: pass
def __parse_netem_param( self, line, parse_param_name, word_pattern, key_name=None): pattern = ( pp.SkipTo(parse_param_name, include=True) + pp.Word(word_pattern)) if not key_name: key_name = parse_param_name try: result = pattern.parseString(line)[-1] if typepy.is_not_null_string(result): self.__parsed_param[key_name] = result except pp.ParseException: pass
def __parse_duplicate(line): packet_pattern = ( pp.SkipTo(pp.Word("+" + pp.nums) + pp.Literal("duplicates,")) + pp.Word("+" + pp.nums) + pp.Literal("duplicates,") ) try: duplicate_parse_list = packet_pattern.parseString( _to_unicode(line)) except pp.ParseException: return 0 return int(duplicate_parse_list[-2].strip("+"))
def __parse_duplicate(line): packet_pattern = ( pp.SkipTo(pp.Word(pp.nums) + pp.Literal("duplicates,")) + pp.Word(pp.nums) + pp.Literal("duplicates,") ) try: duplicate_parse_list = packet_pattern.parseString( _to_unicode(line)) except pp.ParseException: return 0 return int(duplicate_parse_list[-2])
def parser(self): query_key = pp.Keyword("QUERY") query_value = pp.Suppress("|") + pp.SkipTo(pp.Suppress(";"), include=True) fields_key = pp.Keyword("FIELDS") field_name = common_parsers.column field_name_list = pp.Group(pp.delimitedList(field_name, delim=",")).setParseAction(lambda x: x.asList()) fields_block = (pp.Suppress(fields_key) + field_name_list) connector_name = pp.Word(pp.alphas, pp.alphanums + "_$") using_block = pp.Suppress("USING") + connector_name then_key = pp.Suppress("THEN") manipulation_set = pp.Suppress("|") + pp.SkipTo(pp.Suppress(";"), include=True) then_block = then_key + manipulation_set as_key = pp.Suppress("AS") node_name = pp.Word(pp.alphas, pp.alphanums + "_$") as_block = as_key + node_name query_node_block = (pp.Suppress(query_key) + query_value + pp.Optional(fields_block, default=None) + using_block + pp.Optional(then_block, default=None) + as_block) query_node_block.setParseAction(lambda x: self._add_query_node(query_value=x[0], connector_name=x[2], node_name=x[4], fields=x[1], manipulation_set=x[3])) single_query_node = query_node_block + pp.Optional(pp.Suppress("---")) retrieve_block = pp.OneOrMore(single_query_node) return retrieve_block
def condition_section(): return (_IDENTIFIER + _COLON + pyparsing.SkipTo(_RIGHT_CURLY).setResultsName("statement") ).setResultsName("condition")
def _enum_definition(self): """Detect an enum definition. e.g. enum foo { OPTION_1: 1 + 2, OPTION_2 } """ return ( _ENUM + pyparsing.Optional(self._identifier())("enum_name") + _OPEN_CURLY + pyparsing.ZeroOrMore( pyparsing.Group( self._identifier()("name") + pyparsing.Optional( _EQUALS # This allows us to get even invalid expressions. + pyparsing.SkipTo(pyparsing.Word(",}"))("expression") ) + pyparsing.Optional(_COMMA) ) )("fields") + _CLOSE_CURLY + self._maybe_attributes()("attributes") ).setParseAction(self._process_enum_definition)
def _struct_definition(): return ( (_STRUCT.setResultsName("type") | _UNION.setResultsName("type")) + _IDENTIFIER.setResultsName("name") + parsers.anything_in_curly() + pyparsing.SkipTo(_SEMICOLON) + _SEMICOLON ).setResultsName("_struct_definition")
def _struct_typedef(): return ( _TYPEDEF + (_STRUCT.setResultsName("type") | _UNION.setResultsName("type")) + pyparsing.Optional(_IDENTIFIER).setResultsName("id") + parsers.anything_in_curly() + pyparsing.Optional(_STAR) + _IDENTIFIER.setResultsName("typedef_name") + pyparsing.SkipTo(_SEMICOLON) + _SEMICOLON ).setResultsName("_struct_typedef")
def _preprocessor_directive(self): return (_SHARP.suppress() + _PREPROCESSOR_KEYWORD + pyparsing.SkipTo(pyparsing.lineEnd))
def _type_instance(self): """A type declaration. The modifiers of a typedef: struct s *P[]; ^^^^<- The type instance. """ type_instance = ( # Function pointer (*f)(int foobar) pyparsing.ZeroOrMore(_STAR) + _OPEN_PARENTHESIS + pyparsing.Optional(_STAR("function_pointer")) + self._identifier()("type_instance_name") + _CLOSE_PARENTHESIS + parsers.anything_in_parentheses()("function_args") ) | ( # Function object f(foo bar *) pyparsing.ZeroOrMore(_STAR) + self._identifier()("type_instance_name") + parsers.anything_in_parentheses()("function_args") ) | ( # Simple form: *foo[10]; pyparsing.ZeroOrMore(_STAR)("type_pointer") + self._identifier()("type_instance_name") # Possibly array: [] , [][] + pyparsing.ZeroOrMore( _OPEN_BRACKET + pyparsing.SkipTo(_CLOSE_BRACKET)( "brackets_with_expression_inside*") + _CLOSE_BRACKET) # Bitfields: int x: 7; + pyparsing.Optional( _COLON + pyparsing.SkipTo( _SEMICOLON | _COMMA)("bitfield") ) ) return pyparsing.Group( type_instance + self._maybe_attributes() )
def grammar(backend_keys): """Define the main multi-query grammar. Cumin provides a user-friendly generic query language that allows to combine the results of subqueries for multiple backends: * Each query part can be composed with the others using boolean operators ``and``, ``or``, ``and not``, ``xor``. * Multiple query parts can be grouped together with parentheses ``(``, ``)``. * Specific backend query ``I{backend-specific query syntax}``, where ``I`` is an identifier for the specific backend. * Alias replacement, according to aliases defined in the configuration file ``A:group1``. * The identifier ``A`` is reserved for the aliases replacement and cannot be used to identify a backend. * A complex query example: ``(D{host1 or host2} and (P{R:Class = Role::MyClass} and not A:group1)) or D{host3}`` Backus-Naur form (BNF) of the grammar:: <grammar> ::= <item> | <item> <boolean> <grammar> <item> ::= <backend_query> | <alias> | "(" <grammar> ")" <backend_query> ::= <backend> "{" <query> "}" <alias> ::= A:<alias_name> <boolean> ::= "and not" | "and" | "xor" | "or" Given that the pyparsing library defines the grammar in a BNF-like style, for the details of the tokens not specified above check directly the source code. Arguments: backend_keys (list): list of the GRAMMAR_PREFIX for each registered backend. Returns: pyparsing.ParserElement: the grammar parser. """ # Boolean operators boolean = (pp.CaselessKeyword('and not').leaveWhitespace() | pp.CaselessKeyword('and') | pp.CaselessKeyword('xor') | pp.CaselessKeyword('or'))('bool') # Parentheses lpar = pp.Literal('(')('open_subgroup') rpar = pp.Literal(')')('close_subgroup') # Backend query: P{PuppetDB specific query} query_start = pp.Combine(pp.oneOf(backend_keys, caseless=True)('backend') + pp.Literal('{')) query_end = pp.Literal('}') # Allow the backend specific query to use the end_query token as well, as long as it's in a quoted string # and fail if there is a query_start token before the first query_end is reached query = pp.SkipTo(query_end, ignore=pp.quotedString, failOn=query_start)('query') backend_query = pp.Combine(query_start + query + query_end) # Alias alias = pp.Combine(pp.CaselessKeyword('A') + ':' + pp.Word(pp.alphanums + '-_.+')('alias')) # Final grammar, see the docstring for its BNF based on the tokens defined above # Group are used to have an easy dictionary access to the parsed results full_grammar = pp.Forward() item = backend_query | alias | lpar + full_grammar + rpar full_grammar << pp.Group(item) + pp.ZeroOrMore(pp.Group(boolean + item)) # pylint: disable=expression-not-assigned return full_grammar