def print_command_args_and_opts(arg_list, opt_list, sub_list=None): items = [] if arg_list: items.append(nodes.definition_list_item( '', nodes.term(text='Positional arguments:'), nodes.definition('', arg_list))) if opt_list: items.append(nodes.definition_list_item( '', nodes.term(text='Options:'), nodes.definition('', opt_list))) if sub_list and len(sub_list): items.append(nodes.definition_list_item( '', nodes.term(text='Sub-commands:'), nodes.definition('', sub_list))) return nodes.definition_list('', *items)
def document_permissions(self) -> docutils.nodes.section: """Document workflow permissions. :returns: A section node containing the doctree for permissions description. """ permissions = self.wf_elements.get('permissions') wf_name = self.wf_name node = nodes.section( ids=['workflows-{wf_name}-permissions'.format(wf_name=wf_name)]) node += nodes.title(text='Permissions') dl = nodes.definition_list() for permission in permissions: permission_id = 'workflows-{wf_name}-permissions-{permission}'.format( wf_name=wf_name, permission=permission.name) t_node = nodes.definition_list_item() t_node += nodes.term(text=permission.name, ids=[permission_id]) dd = nodes.definition() dd += self._rst2node(permission.__doc__) if hasattr(permission, 'groups'): dd += nodes.paragraph(text='Groups: {groups}'.format( groups=', '.join(permission.groups))) t_node += dd dl += t_node node += dl return node
def document_transitions(self) -> docutils.nodes.section: """Document workflow transitions. :returns: A section node containing the doctree for transitions description. """ transitions = self.wf_elements.get('transitions') wf_name = self.wf_name node = nodes.section( ids=['workflows-{wf_name}-transitions'.format(wf_name=wf_name)]) node += nodes.title(text='Transitions') dl = nodes.definition_list() for transition in transitions: transition_id = 'workflows-{wf_name}-transitions-{transition}'.format( wf_name=wf_name, transition=transition.name) t_node = nodes.definition_list_item() title = '{title} ({from_} → {to_})'.format( title=transition.title, from_=transition.state_from().name, to_=transition.state_to().name, ) t_node += nodes.term(text=title, ids=[transition_id]) dd = nodes.definition() dd += self._rst2node(transition.__doc__) dd += nodes.paragraph(text='Permission: {permission}'.format( permission=transition.permission)) t_node += dd dl += t_node node += dl return node
def document_states(self) -> docutils.nodes.section: """Document workflow states. :returns: A section node containing the doctree for states description. """ states = self.wf_elements.get('states') wf_name = self.wf_name node = nodes.section( ids=['workflows-{wf_name}-states'.format(wf_name=wf_name)]) node += nodes.title(text='States') dl = nodes.definition_list() for state in states: state_id = 'workflows-{wf_name}-states-{state}'.format( wf_name=wf_name, state=state.name) state_node = nodes.definition_list_item() state_node += nodes.term(text=state.title, ids=[state_id]) dd = nodes.definition() dd += self._rst2node(state.description) transitions = nodes.bullet_list() for t_name in state._transitions: t = state._transitions[t_name] temp = nodes.list_item() temp += nodes.inline(text=t.name) temp += nodes.inline(text=' ({state_to})'.format( state_to=t.state_to().name)) transitions += temp dd += transitions state_node += dd dl += state_node node += dl return node
def contribute_property(self, prop_list, prop_key, prop): prop_item = nodes.definition_list_item( '', nodes.term('', prop_key)) prop_list.append(prop_item) prop_item.append(nodes.classifier('', prop.type)) definition = nodes.definition() prop_item.append(definition) if not prop.implemented: para = nodes.inline('', _('Not implemented.')) warning = nodes.note('', para) definition.append(warning) return if prop.description: para = nodes.paragraph('', prop.description) definition.append(para) if prop.update_allowed: para = nodes.paragraph('', _('Can be updated without replacement.')) definition.append(para) else: para = nodes.paragraph('', _('Updates cause replacement.')) definition.append(para) if prop.required: para = nodes.paragraph('', _('Required property.')) elif prop.default is not None: para = nodes.paragraph( '', _('Optional property, defaults to "%s".') % prop.default) else: para = nodes.paragraph('', _('Optional property.')) definition.append(para) for constraint in prop.constraints: para = nodes.paragraph('', str(constraint)) definition.append(para) sub_schema = None if prop.schema and prop.type == properties.MAP: para = nodes.emphasis('', _('Map properties:')) definition.append(para) sub_schema = prop.schema elif prop.schema and prop.type == properties.LIST: para = nodes.emphasis( '', _('List contents:')) definition.append(para) sub_schema = prop.schema if sub_schema: sub_prop_list = nodes.definition_list() definition.append(sub_prop_list) for sub_prop_key in sorted(sub_schema.keys()): sub_prop = sub_schema[sub_prop_key] self.contribute_property(sub_prop_list, sub_prop_key, sub_prop)
def _definition_item(self, term, classifier): item = nodes.definition_list_item() term = nodes.term(text=term) item.append(term) classifier = nodes.classifier(text=classifier) item.append(classifier) return item
def contribute_attributes(self, parent): if not self.attrs_schemata: return section = self._section(parent, _('Attributes'), '%s-attrs') prop_list = nodes.definition_list() section.append(prop_list) for prop_key, prop in sorted(self.attrs_schemata.items()): description = prop.description prop_item = nodes.definition_list_item( '', nodes.term('', prop_key)) prop_list.append(prop_item) definition = nodes.definition() prop_item.append(definition) if prop.support_status.status != support.SUPPORTED: sstatus = prop.support_status.to_dict() msg = _('%(status)s') if sstatus['message'] is not None: msg = _('%(status)s - %(message)s') para = nodes.inline('', msg % sstatus) warning = nodes.note('', para) definition.append(warning) if description: def_para = nodes.paragraph('', description) definition.append(def_para)
def make_glossary_term(env, textnodes, index_key, source, lineno, new_id=None): # type: (BuildEnvironment, Iterable[nodes.Node], str, str, int, str) -> nodes.term # get a text-only representation of the term and register it # as a cross-reference target term = nodes.term('', '', *textnodes) term.source = source term.line = lineno gloss_entries = env.temp_data.setdefault('gloss_entries', set()) termtext = term.astext() if new_id is None: new_id = nodes.make_id('term-' + termtext) if new_id in gloss_entries: new_id = 'term-' + str(len(gloss_entries)) gloss_entries.add(new_id) std = cast(StandardDomain, env.get_domain('std')) std.add_object('term', termtext.lower(), env.docname, new_id) # add an index entry too indexnode = addnodes.index() indexnode['entries'] = [('single', termtext, new_id, 'main', index_key)] indexnode.source, indexnode.line = term.source, term.line term.append(indexnode) term['ids'].append(new_id) term['names'].append(new_id) return term
def make_glossary_term(env, textnodes, index_key, source, lineno, new_id=None): # type: (BuildEnvironment, List[nodes.Node], unicode, unicode, int, unicode) -> nodes.term # get a text-only representation of the term and register it # as a cross-reference target term = nodes.term('', '', *textnodes) term.source = source term.line = lineno gloss_entries = env.temp_data.setdefault('gloss_entries', set()) objects = env.domaindata['std']['objects'] termtext = term.astext() if new_id is None: new_id = nodes.make_id('term-' + termtext) if new_id in gloss_entries: new_id = 'term-' + str(len(gloss_entries)) gloss_entries.add(new_id) objects['term', termtext.lower()] = env.docname, new_id # add an index entry too indexnode = addnodes.index() indexnode['entries'] = [('single', termtext, new_id, 'main', index_key)] indexnode.source, indexnode.line = term.source, term.line term.append(indexnode) term['ids'].append(new_id) term['names'].append(new_id) return term
def add_coqtop_output(self): """Add coqtop's responses to a Sphinx AST Finds nodes to process using is_coqtop_block.""" with CoqTop(color=True) as repl: for node in self.document.traverse(CoqtopBlocksTransform.is_coqtop_block): options = node['coqtop_options'] opt_undo, opt_reset, opt_input, opt_output = self.parse_options(options) if opt_reset: repl.sendone("Reset Initial.") pairs = [] for sentence in self.split_sentences(node.rawsource): pairs.append((sentence, repl.sendone(sentence))) if opt_undo: repl.sendone("Undo {}.".format(len(pairs))) dli = nodes.definition_list_item() for sentence, output in pairs: # Use Coqdoq to highlight input in_chunks = highlight_using_coqdoc(sentence) dli += nodes.term(sentence, '', *in_chunks, classes=self.block_classes(opt_input)) # Parse ANSI sequences to highlight output out_chunks = AnsiColorsParser().colorize_str(output) dli += nodes.definition(output, *out_chunks, classes=self.block_classes(opt_output, output)) node.clear() node.rawsource = self.make_rawsource(pairs, opt_input, opt_output) node['classes'].extend(self.block_classes(opt_input or opt_output)) node += nodes.inline('', '', classes=['coqtop-reset'] * opt_reset) node += nodes.definition_list(node.rawsource, dli)
def _build_definition_list(self, defaults): from oauth2_provider import settings items = [] for setting, default_value in defaults: text_nodes, messages = self.state.inline_text( DESCRIPTIONS.get(setting, "TODO").strip(), self.lineno) node_name = nodes.literal(text=setting) node_default = nodes.paragraph(text="Default value: ") node_default += nodes.literal(text=repr(default_value)) node_description = nodes.paragraph() node_description.extend(text_nodes) subitems = [node_default, node_description] if setting in settings.MANDATORY: notice = nodes.paragraph() notice += nodes.strong(text="The value cannot be empty.") subitems.append(notice) term = nodes.term() term += node_name items.append( nodes.definition_list_item('', term, nodes.definition('', *subitems))) deflist = nodes.definition_list('', *items) return [deflist]
def _create_section(self, parent, sectionid, title=None, term=None): """Create a new section :returns: If term is specified, returns a definition node contained within the newly created section. Otherwise return the newly created section node. """ idb = nodes.make_id(sectionid) section = nodes.section(ids=[idb]) parent.append(section) if term: if term != '**': section.append(nodes.term('', term)) definition = nodes.definition() section.append(definition) return definition if title: section.append(nodes.title('', title)) return section
def run(self): # Raise an error if the directive does not have contents. self.assert_has_content() self.document = self.state_machine.document text = '\n'.join(self.content) # Create the admonition node, to be populated by `nested_parse`. self.name=self.arguments[0].strip() term = nodes.term() term += nodes.strong(text=self.arguments[0]) targetnode = self.make_targetnode() deflist = nodes.definition_list() configuration_def = nodes.definition_list_item() configuration_def += term defn = nodes.definition() configuration_def += defn deflist += configuration_def # Parse the directive contents. self.state.nested_parse(self.content, self.content_offset, defn) option_map = {} option_map['features'] = 'Required for features' field_list = self.options_to_field_list(option_map) if (field_list != None): defn += field_list self.parsed('configuration').append(self) return [targetnode, deflist]
def run(self): result = nodes.definition_list() for option in sorted(self.options.keys()): if option == 'added': continue term = option.capitalize() result += nodes.term(text=term) definition = nodes.definition() if option in ['kerk', 'predikant', 'tags']: taglink = {'kerk': SERMONCHURHLINK, 'predikant': SERMONREFERENTLINK, 'tags': SERMONTAGLINK}[option] value = self.options[option] values = [value.strip() for value in value.split(',')] paragraph = nodes.paragraph() for i, value in enumerate(values): link = taglink % value paragraph += nodes.reference(refuri=link, text=value) if not i == len(values) - 1: paragraph += nodes.inline(text=', ') definition += paragraph else: paragraph = nodes.paragraph() paragraph += nodes.inline(text=self.options[option]) definition += paragraph result += definition return [result]
def print_subcommand_list(data, nested_content): definitions = map_nested_definitions(nested_content) items = [] if 'children' in data: for child in data['children']: if 'description' in child and child['description']: my_def = [nodes.paragraph(text=child['description'])] elif child['help']: my_def = [nodes.paragraph(text=child['help'])] else: my_def = [] name = child['name'] my_def = apply_definition(definitions, my_def, name) if len(my_def) == 0 and 'description' not in child: my_def.append(nodes.paragraph(text='Undocumented')) my_def.append(nodes.literal_block(text=child['usage'])) my_def.append( print_command_args_and_opts( print_arg_list(child, nested_content), print_opt_list(child, nested_content), print_subcommand_list(child, nested_content))) items.append( nodes.definition_list_item( '', nodes.term('', '', nodes.strong(text=name)), nodes.definition('', *my_def))) return nodes.definition_list('', *items)
def add_coq_output_1(self, repl, node): options = self.parse_options(node) pairs = [] if options['restart']: repl.sendone("Restart.") if options['reset']: repl.sendone("Reset Initial.") repl.sendone("Set Coqtop Exit On Error.") if options['fail']: repl.sendone("Unset Coqtop Exit On Error.") for sentence in self.split_sentences(node.rawsource): pairs.append((sentence, repl.sendone(sentence))) if options['abort']: repl.sendone("Abort All.") if options['fail']: repl.sendone("Set Coqtop Exit On Error.") dli = nodes.definition_list_item() for sentence, output in pairs: # Use Coqdoc to highlight input in_chunks = highlight_using_coqdoc(sentence) dli += nodes.term(sentence, '', *in_chunks, classes=self.block_classes(options['input'])) # Parse ANSI sequences to highlight output out_chunks = AnsiColorsParser().colorize_str(output) dli += nodes.definition(output, *out_chunks, classes=self.block_classes(options['output'], output)) node.clear() node.rawsource = self.make_rawsource(pairs, options['input'], options['output']) node['classes'].extend(self.block_classes(options['input'] or options['output'])) node += nodes.inline('', '', classes=['coqtop-reset'] * options['reset']) node += nodes.definition_list(node.rawsource, dli)
def print_subcommand_list(data, nested_content): definitions = map_nested_definitions(nested_content) items = [] if 'children' in data: for child in data['children']: my_def = [nodes.paragraph( text=child['help'])] if child['help'] else [] name = child['name'] my_def = apply_definition(definitions, my_def, name) if len(my_def) == 0: my_def.append(nodes.paragraph(text='Undocumented')) my_def.append(nodes.literal_block(text=child['usage'])) my_def.append(print_command_args_and_opts( print_arg_list(child, nested_content), print_opt_list(child, nested_content), text_from_rst(child.get('description', ""), is_rst=True), print_subcommand_list(child, nested_content), )) items.append( nodes.definition_list_item( '', nodes.term('', '', nodes.strong(text=name)), nodes.definition('', *my_def) ) ) return nodes.definition_list('', *items)
def make_glossary_term(env, textnodes, index_key, source, lineno, new_id=None): # get a text-only representation of the term and register it # as a cross-reference target term = nodes.term('', '', *textnodes) term.source = source term.line = lineno gloss_entries = env.temp_data.setdefault('gloss_entries', set()) objects = env.domaindata['std']['objects'] termtext = term.astext() if new_id is None: new_id = nodes.make_id('term-' + termtext) if new_id in gloss_entries: new_id = 'term-' + str(len(gloss_entries)) gloss_entries.add(new_id) objects['term', termtext.lower()] = env.docname, new_id # add an index entry too indexnode = addnodes.index() indexnode['entries'] = [('single', termtext, new_id, 'main', index_key)] indexnode.source, indexnode.line = term.source, term.line term.append(indexnode) term['ids'].append(new_id) term['names'].append(new_id) return term
def _process_one(self, doc_expr, document): def_list_item = nodes.definition_list_item() # Create a target for this documentation entry so that the rest of # the documentation can reference it (see `properties_dsl_ref` # below`). target_id = 'properties-dsl-{}'.format(doc_expr.name) target_node = nodes.target('', '', ids=[target_id], names=[target_id]) document.note_explicit_target(target_node) term = nodes.term() term_label = '**{}**'.format(doc_expr.name) if doc_expr.is_attribute: term_label = r'{}.\ {}'.format(doc_expr.prefix_name, term_label) argspec = doc_expr.argspec if argspec is None: pass elif len(argspec) == 0: term_label += r'\ ()' else: term_label += r'\ (\ *{}*\ )'.format(', '.join(argspec)) self._parse([term_label], term) definition = nodes.definition() doc = doc_expr.doc or '*Not yet documented*' self._parse(self._prepare_docstring(doc), definition) def_list_item.append(target_node) def_list_item.append(term) def_list_item.append(definition) return def_list_item
def make_glossary_term(env: "BuildEnvironment", textnodes: Iterable[Node], index_key: str, source: str, lineno: int, new_id: str = None) -> nodes.term: # get a text-only representation of the term and register it # as a cross-reference target term = nodes.term('', '', *textnodes) term.source = source term.line = lineno gloss_entries = env.temp_data.setdefault('gloss_entries', set()) termtext = term.astext() if new_id is None: new_id = nodes.make_id('term-' + termtext) if new_id == 'term': # the term is not good for node_id. Generate it by sequence number instead. new_id = 'term-' + str(len(gloss_entries)) if new_id in gloss_entries: new_id = 'term-' + str(len(gloss_entries)) gloss_entries.add(new_id) std = cast(StandardDomain, env.get_domain('std')) std.add_object('term', termtext.lower(), env.docname, new_id) # add an index entry too indexnode = addnodes.index() indexnode['entries'] = [('single', termtext, new_id, 'main', index_key)] indexnode.source, indexnode.line = term.source, term.line term.append(indexnode) term['ids'].append(new_id) term['names'].append(new_id) return term
def print_command_args_and_opts(arg_list, opt_list, sub_list=None): items = [] if arg_list: items.append( nodes.definition_list_item( '', nodes.term(text='Positional arguments:'), nodes.definition('', arg_list))) if opt_list: items.append( nodes.definition_list_item('', nodes.term(text='Options:'), nodes.definition('', opt_list))) if sub_list and len(sub_list): items.append( nodes.definition_list_item('', nodes.term(text='Sub-commands:'), nodes.definition('', sub_list))) return nodes.definition_list('', *items)
def make_glossary_term(env: "BuildEnvironment", textnodes: Iterable[Node], index_key: str, source: str, lineno: int, node_id: str, document: nodes.document) -> nodes.term: # get a text-only representation of the term and register it # as a cross-reference target term = nodes.term('', '', *textnodes) term.source = source term.line = lineno termtext = term.astext() if node_id: # node_id is given from outside (mainly i18n module), use it forcedly term['ids'].append(node_id) else: node_id = make_id(env, document, 'term', termtext) term['ids'].append(node_id) document.note_explicit_target(term) std = cast(StandardDomain, env.get_domain('std')) std._note_term(termtext, node_id, location=term) # add an index entry too indexnode = addnodes.index() indexnode['entries'] = [('single', termtext, node_id, 'main', index_key)] indexnode.source, indexnode.line = term.source, term.line term.append(indexnode) return term
def print_command_args_and_opts(arg_list, opt_list, sub_list=None): items = [] if arg_list: items.append(nodes.definition_list_item( '', nodes.term(text='Positional arguments:'), nodes.definition('', arg_list))) for opt_dict in opt_list: opts = opt_dict['options'] if opts is not None: items.append(nodes.definition_list_item( '', nodes.term(text=opt_dict['title']), nodes.definition('', opts))) if sub_list and len(sub_list): items.append(nodes.definition_list_item( '', nodes.term(text='Sub-commands:'), nodes.definition('', sub_list))) return nodes.definition_list('', *items)
def contribute_property(self, prop_list, prop_key, prop): prop_item = nodes.definition_list_item('', nodes.term('', prop_key)) prop_list.append(prop_item) prop_item.append(nodes.classifier('', prop.type)) definition = nodes.definition() prop_item.append(definition) if not prop.implemented: para = nodes.inline('', _('Not implemented.')) warning = nodes.note('', para) definition.append(warning) return if prop.description: para = nodes.paragraph('', prop.description) definition.append(para) if prop.update_allowed: para = nodes.paragraph('', _('Can be updated without replacement.')) definition.append(para) else: para = nodes.paragraph('', _('Updates cause replacement.')) definition.append(para) if prop.required: para = nodes.paragraph('', _('Required property.')) elif prop.default is not None: para = nodes.paragraph( '', _('Optional property, defaults to "%s".') % prop.default) else: para = nodes.paragraph('', _('Optional property.')) definition.append(para) for constraint in prop.constraints: para = nodes.paragraph('', str(constraint)) definition.append(para) sub_schema = None if prop.schema and prop.type == properties.MAP: para = nodes.emphasis('', _('Map properties:')) definition.append(para) sub_schema = prop.schema elif prop.schema and prop.type == properties.LIST: para = nodes.emphasis('', _('List contents:')) definition.append(para) sub_schema = prop.schema if sub_schema: sub_prop_list = nodes.definition_list() definition.append(sub_prop_list) for sub_prop_key in sorted(sub_schema.keys()): sub_prop = sub_schema[sub_prop_key] self.contribute_property(sub_prop_list, sub_prop_key, sub_prop)
def run(self): options = io.get_options_for_format(self.arguments[0]) field_list_node = nodes.definition_list() for name, description, value in options: item = nodes.definition_list_item() item.append(nodes.term(name + ' ',name+ ' ')) item.append(nodes.definition('', nodes.paragraph('', description))) field_list_node.append(item) return [field_list_node]
def make_term_from_paragraph_node(termnodes, ids): # make a single "term" node with all the terms, separated by termsep # nodes (remove the dangling trailing separator) term = nodes.term('', '', *termnodes[:-1]) term.source, term.line = termnodes[0].source, termnodes[0].line term.rawsource = term.astext() term['ids'].extend(ids) term['names'].extend(ids) return term
def run(self): options = io.get_options_for_format(self.arguments[0]) field_list_node = nodes.definition_list() for name, description, value in options: item = nodes.definition_list_item() item.append(nodes.term(name + ' ', name + ' ')) item.append(nodes.definition('', nodes.paragraph('', description))) field_list_node.append(item) return [field_list_node]
def run(self): # Raise an error if the directive does not have contents. self.assert_has_content() self.document = self.state_machine.document text = '\n'.join(self.content) # Create the admonition node, to be populated by `nested_parse`. self.name = self.arguments[0] term = nodes.term() term += nodes.strong(text=self.arguments[0]) targetnode = self.make_targetnode() deflist = nodes.definition_list() test_def = nodes.definition_list_item() test_def += term defn = nodes.definition() test_def += defn deflist += test_def # CURRENT : Parse direction list if provided, which is comma-separated if 'direction' in self.options: input = 0 output = 0 for p in self.options['direction'].split(","): # print "Testing `" + p.strip() + "' in test_procedure `" + self.name + "'..." if p == "input": input = 1 if p == "output": output = 2 self.direction = input + output # Parse the directive contents. self.state.nested_parse(self.content, self.content_offset, defn) option_map = {} option_map['setup'] = 'Required setup' option_map['direction'] = 'Direction (input|output|both)' field_list = self.options_to_field_list(option_map) if (field_list != None): defn += field_list #print "*** TestProcedure options setup = " + self.options['setup'] if 'setup' in self.options: self.setup = self.options['setup'] self.parsed('test_procedure').append(self) return [targetnode, deflist]
def format_properties(model): list = nodes.definition_list() for prop in model['properties']: term = nodes.term('', prop['name']) definition = nodes.definition('') definition += nodes.strong('', '(' + prop['type'] + ') ') definition += format_javadoc(prop['doc']) item = nodes.definition_list_item('', term, definition) list.append(item) return list
def print_command_args_and_opts(arg_list, opt_list, sub_list=None): items = [] if arg_list: items.append( nodes.definition_list_item( '', nodes.term(text='Positional arguments:'), nodes.definition('', arg_list))) for opt_dict in opt_list: opts = opt_dict['options'] if opts is not None: items.append( nodes.definition_list_item('', nodes.term(text=opt_dict['title']), nodes.definition('', opts))) if sub_list and len(sub_list): items.append( nodes.definition_list_item('', nodes.term(text='Sub-commands:'), nodes.definition('', sub_list))) return nodes.definition_list('', *items)
def run(self): #self.assert_has_content() self.document = self.state_machine.document #text = '\n'.join(self.content) # Create the admonition node, to be populated by `nested_parse`. self.name = self.arguments[0] term = nodes.term() term += nodes.strong(text=self.arguments[0]) targetnode = self.make_targetnode() deflist = nodes.definition_list() test_def = nodes.definition_list_item() test_def += term defn = nodes.definition() test_def += defn deflist += test_def # Parse the directive contents. self.state.nested_parse(self.content, self.content_offset, defn) option_map = {} option_map['runtests'] = 'Tests to run' field_list = self.options_to_field_list(option_map) if 'runtests' in self.options: self.runtests = [] for p in self.options['runtests'].split(","): # print "Testing for `" + p.strip() + "' in prepare_setup `" + self.name + "'..." newruntests = [ t for t in self.parsed('test') if p.strip() == t.name ] if len(newruntests) == 0: sys.stderr.write( "ERROR : runtests field couldn't expand to any tests for name `" + p.strip() + "'\n") if (self.check_errors()): exit(1) # for t in newruntests : # print "Runtests adding test : " + t.name self.runtests.extend(newruntests) else: self.runtests = [] if (field_list != None): defn += field_list self.parsed('prepare_setup').append(self) return [targetnode, deflist]
def toDefItem(self, key, value='', Paragraph=None): item = nodes.definition_list_item() term = nodes.term(key, key) define = nodes.definition() if Paragraph is None: define += nodes.paragraph(value, value) else: define += Paragraph item += term item += define return item
def codeitem_directive(dirname, arguments, options, content, lineno, content_offset, block_text, state, state_machine): if not content: content = [u""] m = _CALLABLE_RE.match(u"".join(arguments)) m2 = _OTHER_RE.match(u"".join(arguments)) if m: g = m.groupdict() if g['rest'] is None: g['rest'] = '' if g['args'].strip(): firstline = "%s%s **%s** (``%s``) %s" % (g['pre'].replace('*', r'\*'), g['module'], g['name'], g['args'], g['rest']) else: firstline = "%s%s **%s** () %s" % (g['pre'].replace('*', r'\*'), g['module'], g['name'], g['rest']) if g['module']: target = '%s%s' % (g['module'], g['name']) else: target = g['name'] elif m2: g = m2.groupdict() firstline = "%s%s **%s**" % (g['pre'].replace('*', r'\*'), g['module'], g['name']) if g['module']: target = '%s%s' % (g['module'], g['name']) else: target = g['name'] else: firstline = u"".join(arguments) target = None dl = nodes.definition_list() di = nodes.definition_list_item() dl += di title_stuff, messages = state.inline_text(firstline, lineno) dt = nodes.term(firstline, *title_stuff) di += dt dd = nodes.definition() di += dd if target: dt['ids'] += [rst.make_target_id(target)] dl['classes'] += [dirname, 'code-item'] _nested_parse(state, content, dd) return [dl]
def codeitem_directive(dirname, arguments, options, content, lineno, content_offset, block_set, state, state_machine): if not content: content = [u""] m = _CALLABLE_RE.match(u"".join(arguments)) m2 = _OTHER_RE.match(u"".join(arguments)) if m: g = m.groupdict() if g["rest"] is None: g["rest"] = "" if g["args"].strip(): firstline = "%s%s **%s** (``%s``) %s" % ( g["pre"].replace("*", r"\*"), g["module"], g["name"], g["args"], g["rest"], ) else: firstline = "%s%s **%s** () %s" % (g["pre"].replace("*", r"\*"), g["module"], g["name"], g["rest"]) if g["module"]: target = "%s%s" % (g["module"], g["name"]) else: target = g["name"] elif m2: g = m2.groupdict() firstline = "%s%s **%s**" % (g["pre"].replace("*", r"\*"), g["module"], g["name"]) if g["module"]: target = "%s%s" % (g["module"], g["name"]) else: target = g["name"] else: firstline = u"".join(arguments) target = None dl = nodes.definition_list() di = nodes.definition_list_item() dl += di title_stuff, messages = state.inline_text(firstline, lineno) dt = nodes.term(firstline, *title_stuff) di += dt dd = nodes.definition() di += dd if target: dt["ids"] += [target] dl["classes"] += [dirname, "code-item"] _nested_parse(state, content, dd) return [dl]
def render(self): symbol = self.symbol() if not symbol.exceptions: yield nodes.paragraph(text=_('None.')) else: definition_list = nodes.definition_list() for k, v in symbol.exceptions.iteritems(): definition_list_item = nodes.definition_list_item( '', nodes.term('', '', nodes.literal('', k)), nodes.definition('', nodes.paragraph(text=v))) definition_list.append(definition_list_item) yield definition_list
def run(self): proptype = self.arguments[0] default = self.arguments[1] dl = nodes.definition_list() dl['classes'].append('propparams') term = nodes.term('', 'Type') defnode = nodes.definition('', nodes.paragraph('', proptype)) dl += nodes.definition_list_item('', term, defnode) if 'values' in self.options: term = nodes.term('', 'Values') defnode = nodes.definition('', nodes.paragraph('', self.options['values'])) dl += nodes.definition_list_item('', term, defnode) term = nodes.term('', 'Default') defnode = nodes.definition('', nodes.paragraph('', default)) dl += nodes.definition_list_item('', term, defnode) return [dl]
def render(self): symbol = self.symbol() if not symbol.exceptions: yield nodes.paragraph(text=_('None.')) else: definition_list = nodes.definition_list() for k, v in symbol.exceptions.iteritems(): definition_list_item = nodes.definition_list_item('', nodes.term('', '', nodes.literal('', k)), nodes.definition('', nodes.paragraph(text=v))) definition_list.append(definition_list_item) yield definition_list
def run(self): proptype = self.arguments[0] default = self.arguments[1] dl = nodes.definition_list() dl['classes'].append('propparams') term = nodes.term('', 'Type') defnode = nodes.definition('', nodes.paragraph('', proptype)) dl += nodes.definition_list_item('', term, defnode) if 'values' in self.options: term = nodes.term('', 'Values') defnode = nodes.definition( '', nodes.paragraph('', self.options['values'])) dl += nodes.definition_list_item('', term, defnode) term = nodes.term('', 'Default') defnode = nodes.definition('', nodes.paragraph('', default)) dl += nodes.definition_list_item('', term, defnode) return [dl]
def format_arguments(arguments): return [nodes.definition_list( '', *[ nodes.definition_list_item( '', nodes.term( # node.Text('') is required because otherwise for some # reason first name node is seen in HTML output as # `<strong>abc</strong>`. '', *([nodes.Text('')] + ( insert_separators([ nodes.strong('', '', *[nodes.Text(ch) for ch in name]) for name in argument.names ], ', ') if argument.is_option else # Unless node.Text('') is here metavar is written in # bold in the man page. [nodes.Text(''), nodes.emphasis(text=argument.metavar)] ) + ( [] if not argument.is_option or not argument.nargs else [nodes.Text(' '), nodes.emphasis('', argument.metavar)] )) ), nodes.definition('', nodes.paragraph('', *parse_argparse_text(argument.help or ''))), ) for argument in flatten_groups(arguments) ] + [ nodes.definition_list_item( '', nodes.term( '', nodes.Text(''), nodes.strong(text='-h'), nodes.Text(', '), nodes.strong('', '', nodes.Text('-'), nodes.Text('-help')), ), nodes.definition('', nodes.paragraph('', nodes.Text('Display help and exit.'))) ) ] )]
def run(self): #self.assert_has_content() self.document = self.state_machine.document #text = '\n'.join(self.content) # Create the admonition node, to be populated by `nested_parse`. self.name = self.arguments[0] term = nodes.term() term += nodes.strong(text=self.arguments[0]) targetnode = self.make_targetnode() deflist = nodes.definition_list() test_def = nodes.definition_list_item() test_def += term defn = nodes.definition() test_def += defn deflist += test_def # Parse the directive contents. self.state.nested_parse(self.content, self.content_offset, defn) option_map = {} option_map['runtests'] = 'Tests to run' field_list = self.options_to_field_list(option_map) if 'runtests' in self.options: self.runtests = [] for p in self.options['runtests'].split(","): # print "Testing for `" + p.strip() + "' in prepare_setup `" + self.name + "'..." newruntests = [t for t in self.parsed('test') if p.strip() == t.name] if len(newruntests) == 0: sys.stderr.write("ERROR : runtests field couldn't expand to any tests for name `" + p.strip() + "'\n") if (self.check_errors()): exit(1) # for t in newruntests : # print "Runtests adding test : " + t.name self.runtests.extend(newruntests) else: self.runtests = [] if (field_list != None): defn += field_list self.parsed('prepare_setup').append(self) return [targetnode, deflist]
def run(self): refid = 'cmdoption-arg-' + nodes.make_id(self.arguments[0]) target = nodes.target(names=[refid], ids=[refid]) dl = nodes.definition_list() dt = nodes.definition_list_item() term = nodes.term() term += nodes.literal(self.arguments[0], self.arguments[0], classes=["descname"]) dt += term definition = nodes.definition() dt += definition definition.document = self.state.document self.state.nested_parse(self.content, self.content_offset, definition) dl += dt return [target, dl]
def make_glossary_term(env: "BuildEnvironment", textnodes: Iterable[Node], index_key: str, source: str, lineno: int, node_id: str = None, document: nodes.document = None) -> nodes.term: # get a text-only representation of the term and register it # as a cross-reference target term = nodes.term('', '', *textnodes) term.source = source term.line = lineno termtext = term.astext() if node_id: # node_id is given from outside (mainly i18n module), use it forcedly term['ids'].append(node_id) elif document: node_id = make_id(env, document, 'term', termtext) term['ids'].append(node_id) document.note_explicit_target(term) else: warnings.warn( 'make_glossary_term() expects document is passed as an argument.', RemovedInSphinx40Warning) gloss_entries = env.temp_data.setdefault('gloss_entries', set()) node_id = nodes.make_id('term-' + termtext) if node_id == 'term': # "term" is not good for node_id. Generate it by sequence number instead. node_id = 'term-%d' % env.new_serialno('glossary') while node_id in gloss_entries: node_id = 'term-%d' % env.new_serialno('glossary') gloss_entries.add(node_id) term['ids'].append(node_id) std = cast(StandardDomain, env.get_domain('std')) std.note_object('term', termtext.lower(), node_id, location=(env.docname, lineno)) # add an index entry too indexnode = addnodes.index() indexnode['entries'] = [('single', termtext, node_id, 'main', index_key)] indexnode.source, indexnode.line = term.source, term.line term.append(indexnode) return term
def render(self): symbol = self.symbol() if not symbol.params: yield nodes.paragraph(text=_('None.')) else: definition_list = nodes.definition_list() for param in symbol.params: param_name = param.get('declname') if param_name is not None: param_desc = param.get('briefdescription', '') definition_list_item = nodes.definition_list_item( '', nodes.term('', '', nodes.literal('', param_name)), nodes.definition('', nodes.paragraph(text=param_desc))) definition_list.append(definition_list_item) yield definition_list
def run(self): # This starts processing and delegates to specific and generic process methods for forms and fields obj_path = self.content[0] formclass = import_obj(obj_path) root = nodes.definition_list() module, classname = obj_path.rsplit('.', 1) root.append( nodes.term(None, '', nodes.emphasis(text='Form '), nodes.literal(text='%s.' % module), nodes.strong(text=classname))) self.process_form(formclass, root) return [root]
def render(self): symbol = self.symbol() if not symbol.params: yield nodes.paragraph(text=_('None.')) else: definition_list = nodes.definition_list() for param in symbol.params: param_name = param.get('declname') if param_name is not None: param_desc = param.get('briefdescription', '') definition_list_item = nodes.definition_list_item('', nodes.term('', '', nodes.literal('', param_name)), nodes.definition('', nodes.paragraph(text=param_desc))) definition_list.append(definition_list_item) yield definition_list
def run(self): container = nodes.definition_list() env = self.state.document.settings.env root = pathlib.Path(env.relfn2path(self.arguments[0])[1]) for file_path in root.glob('**/*'): if file_path.is_dir(): continue with open(str(file_path), encoding='utf-8', errors='ignore') as f: data = f.read() container += nodes.definition_list_item() container[-1] += nodes.term(text=str(file_path)[len(str(root)) + 1:]) container[-1] += nodes.definition() container[-1][-1] += nodes.literal_block(text=data) return [container]
def run(self): result = [] yaml_file = self.arguments[0] dl_items = [] with open(yaml_file) as f: descriptor = yaml.load(f, Loader=yaml.FullLoader) for option_name, option in descriptor["options"].items(): term_nodes = [ nodes.literal("", option_name), nodes.Text(" ("), nodes.Text(option["type"].lower()), nodes.Text(")"), ] definition_nodes = [] if "deprecationMessage" in option: continue if "description" in option: for para in option["description"]: definition_nodes.append(nodes.paragraph(text=para)) if "default" in option: default_value = option["default"] if option["type"] == "BOOLEAN": # True, False ==> true, false default_value = str(default_value).lower() default_nodes = [ nodes.Text("Default: "), nodes.literal("", default_value), ] definition_nodes.append(nodes.paragraph("", "", *default_nodes)) # TODO, should become rubrics if option["type"] == "LIST": continue dl_items.append( nodes.definition_list_item( "", nodes.term("", "", *term_nodes), nodes.definition("", *definition_nodes), ) ) result += [nodes.definition_list("", *dl_items)] return result
def _format_subcommands(self, parser_info): assert 'children' in parser_info items = [] for subcmd in parser_info['children']: subcmd_items = [] if subcmd['help']: subcmd_items.append(nodes.paragraph(text=subcmd['help'])) else: subcmd_items.append(nodes.paragraph(text='Undocumented')) items.append( nodes.definition_list_item( '', nodes.term('', '', nodes.strong( text=subcmd['bare_usage'])), nodes.definition('', *subcmd_items))) return nodes.definition_list('', *items)
def _prop_section(self, parent, title, id_pattern): id = id_pattern % self.resource_type section = nodes.section(ids=[id]) parent.append(section) # Ignore title generated for list items if title != '*': title = nodes.term('', title) ref = nodes.reference('', u'\xb6') ref['classes'] = ['headerlink'] ref['refid'] = id title.append(ref) section.append(title) field = nodes.definition() section.append(field) return field
def handle_item(fieldarg: str, content: List[nodes.inline]) -> nodes.definition_list_item: head = nodes.term() head += makerefs(self.rolename, fieldarg, addnodes.literal_strong) fieldtype = types.pop(fieldarg, None) if fieldtype is not None: head += nodes.Text(' : ') if len(fieldtype) == 1 and isinstance(fieldtype[0], nodes.Text): typename = ''.join(n.astext() for n in fieldtype) head += makerefs(self.typerolename, typename, addnodes.literal_emphasis) else: head += fieldtype body_content = nodes.paragraph('', '', *content) body = nodes.definition('', body_content) return nodes.definition_list_item('', head, body)
def run(self): self.assert_has_content() title = self.arguments[0] content = '\n'.join(self.content) math_node = self.make_math_node(self.prepare_latex(content)) tid = nodes.make_id(title) target = nodes.target('', '', ids=['inference-' + tid]) self.state.document.note_explicit_target(target) term, desc = nodes.term('', title), nodes.description('', math_node) dli = nodes.definition_list_item('', term, desc) dl = nodes.definition_list(content, target, dli) set_source_info(self, dl) return [dl]
def rst_nodes(self): nodelist = [] for entry in self.parameternamelist: nodelist.extend(entry.rst_nodes()) term = nodes.term("","", *nodelist) nodelist = [] if self.parameterdescription: nodelist.extend(self.parameterdescription.rst_nodes()) definition = nodes.definition("", *nodelist) return [nodes.definition_list_item("", term, definition)]
def run(self): # This starts processing and delegates to specific and generic process methods for forms and fields obj_path = self.content[0] formclass = import_obj(obj_path) root = nodes.definition_list() module, classname = obj_path.rsplit('.', 1) root.append(nodes.term( None, '', nodes.emphasis(text='Form '), nodes.literal(text='%s.' % module), nodes.strong(text=classname))) self.process_form(formclass, root) return [root]
def describing(self, description=MARKER, after=None): dl = self._current_node assert isinstance(dl, nodes.definition_list), dl item = nodes.definition_list_item() dl += item term = nodes.term() item += term self._current_node = term yield # We must now have either a description (so we call # described_as) or they must call described_as # des self._current_node = item self._describing(description, after)
def generate_flag_summary(flags, category): summary_node = nodes.definition_list_item() term_node = nodes.term(text=categories[category]) summary_node += term_node block = nodes.definition() summary_node += block # Fill block with flags for flag_info in flags: for name in flag_info['names']: block += nodes.literal(text=name) block += nodes.inline(text=' ') block += nodes.inline(text='\n') return summary_node