我们从Python开源项目中,提取了以下32个代码示例,用于说明如何使用docutils.nodes.definition()。
def build_regexp(definition, compile=True): """ Build, compile and return a regular expression based on `definition`. :Parameter: `definition`: a 4-tuple (group name, prefix, suffix, parts), where "parts" is a list of regular expressions and/or regular expression definitions to be joined into an or-group. """ name, prefix, suffix, parts = definition part_strings = [] for part in parts: if type(part) is tuple: part_strings.append(build_regexp(part, None)) else: part_strings.append(part) or_group = '|'.join(part_strings) regexp = '%(prefix)s(?P<%(name)s>%(or_group)s)%(suffix)s' % locals() if compile: return re.compile(regexp, re.UNICODE) else: return regexp
def definition_list_item(self, termline): indented, indent, line_offset, blank_finish = \ self.state_machine.get_indented() itemnode = nodes.definition_list_item( '\n'.join(termline + list(indented))) lineno = self.state_machine.abs_line_number() - 1 (itemnode.source, itemnode.line) = self.state_machine.get_source_and_line(lineno) termlist, messages = self.term(termline, lineno) itemnode += termlist definition = nodes.definition('', *messages) itemnode += definition if termline[0][-2:] == '::': definition += self.reporter.info( 'Blank line missing before literal block (after the "::")? ' 'Interpreted as a definition list item.', line=lineno+1) self.nested_parse(indented, input_offset=line_offset, node=definition) return itemnode, blank_finish
def _format_subcommands(self, parser_info): assert 'children' in parser_info items = [] for subcmd in parser_info['children']: subcmd_items = [] if subcmd['help']: subcmd_items.append(nodes.paragraph(text=subcmd['help'])) else: subcmd_items.append(nodes.paragraph(text='Undocumented')) items.append( nodes.definition_list_item( '', nodes.term('', '', nodes.strong( text=subcmd['bare_usage'])), nodes.definition('', *subcmd_items))) return nodes.definition_list('', *items)
def text(self, match, context, next_state): """Titles, definition lists, paragraphs.""" return [match.string], 'Text', []
def eof(self, context): """Not a definition.""" self.state_machine.previous_line(2) # so parent SM can reassess return []
def add_coqtop_output(self): """Add coqtop's responses to a Sphinx AST Finds nodes to process using is_coqtop_block.""" with CoqTop(color=True) as repl: for node in self.document.traverse(CoqtopBlocksTransform.is_coqtop_block): options = node['coqtop_options'] opt_undo, opt_reset, opt_input, opt_output = self.parse_options(options) if opt_reset: repl.sendone("Reset Initial.") pairs = [] for sentence in self.split_sentences(node.rawsource): pairs.append((sentence, repl.sendone(sentence))) if opt_undo: repl.sendone("Undo {}.".format(len(pairs))) dli = nodes.definition_list_item() for sentence, output in pairs: # Use Coqdoq to highlight input in_chunks = highlight_using_coqdoc(sentence) dli += nodes.term(sentence, '', *in_chunks, classes=self.block_classes(opt_input)) # Parse ANSI sequences to highlight output out_chunks = AnsiColorsParser().colorize_str(output) dli += nodes.definition(output, *out_chunks, classes=self.block_classes(opt_output, output)) node.clear() node.rawsource = self.make_rawsource(pairs, opt_input, opt_output) node['classes'].extend(self.block_classes(opt_input or opt_output)) node += nodes.inline('', '', classes=['coqtop-reset'] * opt_reset) node += nodes.definition_list(node.rawsource, dli)
def map_nested_definitions(nested_content): if nested_content is None: raise Exception('Nested content should be iterable, not null') # build definition dictionary definitions = {} for item in nested_content: if not isinstance(item, nodes.definition_list): continue for subitem in item: if not isinstance(subitem, nodes.definition_list_item): continue if not len(subitem.children) > 0: continue classifier = '@after' idx = subitem.first_child_matching_class(nodes.classifier) if idx is not None: ci = subitem[idx] if len(ci.children) > 0: classifier = ci.children[0].astext() if classifier is not None and classifier not in ( '@replace', '@before', '@after'): raise Exception('Unknown classifier: %s' % classifier) idx = subitem.first_child_matching_class(nodes.term) if idx is not None: ch = subitem[idx] if len(ch.children) > 0: term = ch.children[0].astext() idx = subitem.first_child_matching_class(nodes.definition) if idx is not None: def_node = subitem[idx] def_node.attributes['classifier'] = classifier definitions[term] = def_node return definitions
def print_command_args_and_opts(arg_list, opt_list, sub_list=None): items = [] if arg_list: items.append(nodes.definition_list_item( '', nodes.term(text='Positional arguments:'), nodes.definition('', arg_list))) if opt_list: items.append(nodes.definition_list_item( '', nodes.term(text='Options:'), nodes.definition('', opt_list))) if sub_list and len(sub_list): items.append(nodes.definition_list_item( '', nodes.term(text='Sub-commands:'), nodes.definition('', sub_list))) return nodes.definition_list('', *items)
def apply_definition(definitions, my_def, name): if name in definitions: definition = definitions[name] classifier = definition['classifier'] if classifier == '@replace': return definition.children if classifier == '@after': return my_def + definition.children if classifier == '@before': return definition.children + my_def raise Exception('Unknown classifier: %s' % classifier) return my_def
def print_subcommand_list(data, nested_content): definitions = map_nested_definitions(nested_content) items = [] if 'children' in data: for child in data['children']: my_def = [nodes.paragraph( text=child['help'])] if child['help'] else [] name = child['name'] my_def = apply_definition(definitions, my_def, name) if len(my_def) == 0: my_def.append(nodes.paragraph(text='Undocumented')) if 'description' in child: my_def.append(nodes.paragraph(text=child['description'])) my_def.append(nodes.literal_block(text=child['usage'])) my_def.append(print_command_args_and_opts( print_arg_list(child, nested_content), print_opt_list(child, nested_content), print_subcommand_list(child, nested_content) )) items.append( nodes.definition_list_item( '', nodes.term('', '', nodes.strong(text=name)), nodes.definition('', *my_def) ) ) return nodes.definition_list('', *items)
def process_service(self, filename): data = self.get_json_from_file(os.path.join(self.dir, filename)) request_filename = self.write_tmp(data['request'] or '') response_filename = self.write_tmp(data['response'] or '') example = HTTPExample( 'http:example', arguments=['curl', 'httpie', 'python-requests'], options={ 'request': request_filename, 'response': response_filename }, content=self.content, lineno=self.lineno, content_offset=self.content_offset, block_text='.. http:example::', state=self.state, state_machine=self.state_machine ) method = data['method'].upper() service = data['service'] name = service.get('name') or '' path_scheme = data.get('path_scheme') or name summary = service.get('summary') or '' permission = service.get('permission') or '' container = nodes.container('') container.append(addnodes.desc_name('', method + ' ')) container.append(addnodes.desc_name('', path_scheme)) inner_container = nodes.definition('') container.append(inner_container) inner_container.append(nodes.paragraph(summary, summary)) inner_container.append(addnodes.desc_name('permission', 'permission')) perm_label = ': ' + permission inner_container.append(addnodes.desc_annotation(perm_label, perm_label)) inner_container.append(example.run()[0]) # extra = nodes.paragraph('', '') # inner_container.append(extra) # if service.get('responses'): # extra.append(nodes.strong('', 'Responses')) # blist = nodes.bullet_list('') # extra.append(blist) # for code, config in service['responses'].items(): # blist.append(render_response(code, 'Hello')) # cleanup os.remove(request_filename) os.remove(response_filename) return container
def _parse_definition_list( def_list_node: nodes.definition_list) -> ExtraContentDict: """Parse a definition list inside the directive. Args: def_list_node: A definition list node containing definitions for extending the Sphinx output. Raises: ValueError: The given classifier was unrecognized. Returns: A dict where keys are item IDs and values contain the classifiers and the content as lists of docutils nodes. """ definitions = collections.defaultdict(lambda: None) for node in def_list_node: if not isinstance(node, nodes.definition_list_item): continue term = _get_matching_child(node, nodes.term, last=False).astext() classifiers = set() for child_node in node.children: if not isinstance(child_node, nodes.classifier): continue classifier = child_node.astext() if classifier not in ALL_CLASSIFIERS: raise ValueError("unknown classifier '{0}'".format(classifier)) classifiers.add(classifier) if not classifiers & CONTENT_CLASSIFIERS: classifiers.add("@after") if not classifiers & MARKUP_CLASSIFIERS: classifiers.add("@auto") content = _get_matching_child( node, nodes.definition, last=False).children if not definitions[term]: definitions[term] = [] definitions[term].append(ExtraContent(classifiers, content)) return definitions