我们从Python开源项目中,提取了以下26个代码示例,用于说明如何使用docutils.nodes.definition_list()。
def is_compactable(self, node): # print "is_compactable %s ?" % node.__class__, # explicite class arguments have precedence if 'compact' in node['classes']: return True if 'open' in node['classes']: return False # check config setting: if (isinstance(node, (nodes.field_list, nodes.definition_list)) and not self.settings.compact_field_lists): # print "`compact-field-lists` is False" return False if (isinstance(node, (nodes.enumerated_list, nodes.bullet_list)) and not self.settings.compact_lists): # print "`compact-lists` is False" return False # more special cases: if (self.topic_classes == ['contents']): # TODO: self.in_contents return True # check the list items: return self.check_simple_list(node)
def indent(self, match, context, next_state): """Definition list item.""" definitionlist = nodes.definition_list() definitionlistitem, blank_finish = self.definition_list_item(context) definitionlist += definitionlistitem self.parent += definitionlist offset = self.state_machine.line_offset + 1 # next line newline_offset, blank_finish = self.nested_list_parse( self.state_machine.input_lines[offset:], input_offset=self.state_machine.abs_line_offset() + 1, node=definitionlist, initial_state='DefinitionList', blank_finish=blank_finish, blank_finish_state='Definition') self.goto_line(newline_offset) if not blank_finish: self.parent += self.unindent_warning('Definition list') return [], 'Body', []
def term(self, lines, lineno): """Return a definition_list's term and optional classifiers.""" assert len(lines) == 1 text_nodes, messages = self.inline_text(lines[0], lineno) term_node = nodes.term() (term_node.source, term_node.line) = self.state_machine.get_source_and_line(lineno) term_node.rawsource = unescape(lines[0]) node_list = [term_node] for i in range(len(text_nodes)): node = text_nodes[i] if isinstance(node, nodes.Text): parts = self.classifier_delimiter.split(node.rawsource) if len(parts) == 1: node_list[-1] += node else: node_list[-1] += nodes.Text(parts[0].rstrip()) for part in parts[1:]: classifier_node = nodes.classifier('', part) node_list.append(classifier_node) else: node_list[-1] += node return node_list, messages
def run(self): self.assert_has_content() title = self.arguments[0] content = '\n'.join(self.content) math_node = self.make_math_node(self.prepare_latex(content)) tid = nodes.make_id(title) target = nodes.target('', '', ids=['inference-' + tid]) self.state.document.note_explicit_target(target) term, desc = nodes.term('', title), nodes.description('', math_node) dli = nodes.definition_list_item('', term, desc) dl = nodes.definition_list(content, target, dli) set_source_info(self, dl) return [dl]
def _format_subcommands(self, parser_info): assert 'children' in parser_info items = [] for subcmd in parser_info['children']: subcmd_items = [] if subcmd['help']: subcmd_items.append(nodes.paragraph(text=subcmd['help'])) else: subcmd_items.append(nodes.paragraph(text='Undocumented')) items.append( nodes.definition_list_item( '', nodes.term('', '', nodes.strong( text=subcmd['bare_usage'])), nodes.definition('', *subcmd_items))) return nodes.definition_list('', *items)
def indent(self, match, context, next_state): definitionlist = nodes.definition_list() definitionlistitem, blank_finish = self.definition_list_item(context) definitionlist += definitionlistitem self.parent += definitionlist offset = self.state_machine.line_offset + 1 # next line newline_offset, blank_finish = self.nested_list_parse( self.state_machine.input_lines[offset:], input_offset=self.state_machine.abs_line_offset() + 1, node=definitionlist, initial_state='DefinitionList', blank_finish=blank_finish, blank_finish_state='Definition') raw = [] indent = ' ' def _indent(line): if line == '': return '' return indent + line prefix = ' ' for child in definitionlist.children: line = child.rawsource.split('\n') raw.append(line[0]) for definition in line[1:]: if definition == '': raw.append('') else: raw.append(prefix + definition) raw.append('') definitionlist.rawsource = '\n'.join(raw).strip() self.goto_line(newline_offset) if not blank_finish: self.parent += self.unindent_warning('Definition list') return [], 'Body', []
def run(self) -> List[nodes.Node]: """Run the directive. Returns: A list of Node objects. """ root_item = self._retrieve_item() if "item_id" in self.options: root_item = root_item.get_item_by_id( self.options["item_id"], raising=True) if "children" in self.options: root_item.parent = None # Parse directive and get content to extend items with. nested_nodes = nodes.paragraph() self.state.nested_parse( self.content, self.content_offset, nested_nodes) def_list = _get_matching_child( nested_nodes, nodes.definition_list, last=False) if def_list is not None: definitions = _parse_definition_list(def_list) else: definitions = collections.defaultdict(lambda: None) return self._parse_tree(root_item, definitions)
def add_coqtop_output(self): """Add coqtop's responses to a Sphinx AST Finds nodes to process using is_coqtop_block.""" with CoqTop(color=True) as repl: for node in self.document.traverse(CoqtopBlocksTransform.is_coqtop_block): options = node['coqtop_options'] opt_undo, opt_reset, opt_input, opt_output = self.parse_options(options) if opt_reset: repl.sendone("Reset Initial.") pairs = [] for sentence in self.split_sentences(node.rawsource): pairs.append((sentence, repl.sendone(sentence))) if opt_undo: repl.sendone("Undo {}.".format(len(pairs))) dli = nodes.definition_list_item() for sentence, output in pairs: # Use Coqdoq to highlight input in_chunks = highlight_using_coqdoc(sentence) dli += nodes.term(sentence, '', *in_chunks, classes=self.block_classes(opt_input)) # Parse ANSI sequences to highlight output out_chunks = AnsiColorsParser().colorize_str(output) dli += nodes.definition(output, *out_chunks, classes=self.block_classes(opt_output, output)) node.clear() node.rawsource = self.make_rawsource(pairs, opt_input, opt_output) node['classes'].extend(self.block_classes(opt_input or opt_output)) node += nodes.inline('', '', classes=['coqtop-reset'] * opt_reset) node += nodes.definition_list(node.rawsource, dli)
def map_nested_definitions(nested_content): if nested_content is None: raise Exception('Nested content should be iterable, not null') # build definition dictionary definitions = {} for item in nested_content: if not isinstance(item, nodes.definition_list): continue for subitem in item: if not isinstance(subitem, nodes.definition_list_item): continue if not len(subitem.children) > 0: continue classifier = '@after' idx = subitem.first_child_matching_class(nodes.classifier) if idx is not None: ci = subitem[idx] if len(ci.children) > 0: classifier = ci.children[0].astext() if classifier is not None and classifier not in ( '@replace', '@before', '@after'): raise Exception('Unknown classifier: %s' % classifier) idx = subitem.first_child_matching_class(nodes.term) if idx is not None: ch = subitem[idx] if len(ch.children) > 0: term = ch.children[0].astext() idx = subitem.first_child_matching_class(nodes.definition) if idx is not None: def_node = subitem[idx] def_node.attributes['classifier'] = classifier definitions[term] = def_node return definitions
def print_command_args_and_opts(arg_list, opt_list, sub_list=None): items = [] if arg_list: items.append(nodes.definition_list_item( '', nodes.term(text='Positional arguments:'), nodes.definition('', arg_list))) if opt_list: items.append(nodes.definition_list_item( '', nodes.term(text='Options:'), nodes.definition('', opt_list))) if sub_list and len(sub_list): items.append(nodes.definition_list_item( '', nodes.term(text='Sub-commands:'), nodes.definition('', sub_list))) return nodes.definition_list('', *items)
def print_subcommand_list(data, nested_content): definitions = map_nested_definitions(nested_content) items = [] if 'children' in data: for child in data['children']: my_def = [nodes.paragraph( text=child['help'])] if child['help'] else [] name = child['name'] my_def = apply_definition(definitions, my_def, name) if len(my_def) == 0: my_def.append(nodes.paragraph(text='Undocumented')) if 'description' in child: my_def.append(nodes.paragraph(text=child['description'])) my_def.append(nodes.literal_block(text=child['usage'])) my_def.append(print_command_args_and_opts( print_arg_list(child, nested_content), print_opt_list(child, nested_content), print_subcommand_list(child, nested_content) )) items.append( nodes.definition_list_item( '', nodes.term('', '', nodes.strong(text=name)), nodes.definition('', *my_def) ) ) return nodes.definition_list('', *items)
def _parse_definition_list( def_list_node: nodes.definition_list) -> ExtraContentDict: """Parse a definition list inside the directive. Args: def_list_node: A definition list node containing definitions for extending the Sphinx output. Raises: ValueError: The given classifier was unrecognized. Returns: A dict where keys are item IDs and values contain the classifiers and the content as lists of docutils nodes. """ definitions = collections.defaultdict(lambda: None) for node in def_list_node: if not isinstance(node, nodes.definition_list_item): continue term = _get_matching_child(node, nodes.term, last=False).astext() classifiers = set() for child_node in node.children: if not isinstance(child_node, nodes.classifier): continue classifier = child_node.astext() if classifier not in ALL_CLASSIFIERS: raise ValueError("unknown classifier '{0}'".format(classifier)) classifiers.add(classifier) if not classifiers & CONTENT_CLASSIFIERS: classifiers.add("@after") if not classifiers & MARKUP_CLASSIFIERS: classifiers.add("@auto") content = _get_matching_child( node, nodes.definition, last=False).children if not definitions[term]: definitions[term] = [] definitions[term].append(ExtraContent(classifiers, content)) return definitions