def run(self): global CURRENT_TUTORIAL global CURRENT_WORKING_DIR command_mode = True if self.name == 'command-block' else False opts = self.options download_opts = [k in opts for k in ['url', 'saveas']] if command_mode: self.assert_has_content() if any(download_opts): raise sphinx.errors.ExtensionError('command-block does not ' 'support the following ' 'options: `url`, `saveas`.') commands = functools.reduce(self._parse_multiline_commands, self.content, []) nodes = [self._get_literal_block_node(self.content)] else: if self.content: raise sphinx.errors.ExtensionError('Content block not ' 'supported for the ' 'download directive.') if not all(download_opts): raise sphinx.errors.ExtensionError('Missing options for the ' 'download directive. ' 'Please specify `url` and ' '`saveas`.') commands = ['wget -O "%s" "%s"' % (opts['saveas'], opts['url'])] id_ = self.state.document.settings.env.new_serialno('download') nodes = [download_node(id_, opts['url'], opts['saveas'])] env = self._get_env() if not ((env.config.command_block_no_exec and env.config.debug_page != env.docname) or 'no-exec' in opts): if env.docname != CURRENT_TUTORIAL: CURRENT_TUTORIAL = env.docname CURRENT_WORKING_DIR = os.path.join( env.app.command_block_working_dir.name, env.docname) working_dir = os.path.join(env.app.command_block_working_dir.name, env.docname) os.makedirs(working_dir, exist_ok=True) completed_processes = self._execute_commands(commands, working_dir) if command_mode: for stream_type in ['stdout', 'stderr']: if stream_type in opts: node = self._get_stream_node(completed_processes, stream_type) if node is not None: nodes.extend(node) artifacts, visualizations = self._get_output_paths(working_dir) if artifacts or visualizations: nodes.append( self._get_output_links_node(artifacts, visualizations)) return nodes
def run(self): logger = sphinx.util.logging.getLogger(__name__) nodes = [] src = project_root/self.arguments[0] with src.open('r', encoding='utf-8') as f: prevtype = None lines = LineIter(f) if lines and lines.peek.startswith('#!'): next(lines) while lines: if lines.peek.rstrip('\n') == '': next(lines) elif self._isdocline(lines.peek): # Collect all doc lines. contents = docutils.statemachine.ViewList() while lines and self._isdocline(lines.peek): i, line = next(lines) contents.append(line.lstrip()[2:], self.arguments[0], i) # Parse as rst into `node`. with sphinx.util.docutils.switch_source_input(self.state, contents): node = docutils.nodes.container() self.state.nested_parse(contents, 0, node) # Process sh roles. Add links to logs. for sh_node in node.traverse(docutils.nodes.literal): if 'nutils_sh' not in sh_node: continue cmdline = sh_node.get('nutils_sh') cmdline_parts = tuple(shlex.split(cmdline)) if cmdline_parts[:2] != ('python3', src.name): logger.warn('Not creating a log for {}.'.format(cmdline)) continue log_link = sphinx.addnodes.only(expr='html') log_link.append(docutils.nodes.inline('', ' ')) xref = sphinx.addnodes.pending_xref('', reftype='nutils-log', refdomain='std', reftarget=cmdline_parts[2:], script=src) xref += docutils.nodes.inline('', '(view log)', classes=['nutils-log-link']) log_link += xref sh_node.parent.insert(sh_node.parent.index(sh_node)+1, log_link) nodes.extend(node.children) else: # Collect all source lines. istart, line = next(lines) contents = [line] while lines and not self._isdocline(lines.peek): i, line = next(lines) contents.append(line) # Remove trailing empty lines. while contents and contents[-1].rstrip('\n') == '': del contents[-1] contents = ''.join(contents) # Create literal block. literal = docutils.nodes.literal_block(contents, contents) literal['language'] = 'python3' literal['linenos'] = True literal['highlight_args'] = dict(linenostart=istart+1) sphinx.util.nodes.set_source_info(self, literal) nodes.append(literal) return nodes
def run(self): name = self.arguments[0] project_info = self.project_info_factory.create_project_info( self.options) finder = self.finder_factory.create_finder(project_info) finder_filter = self.filter_factory.create_file_finder_filter(name) matches = [] finder.filter_(finder_filter, matches) if len(matches) > 1: warning = ( 'doxygenfile: Found multiple matches for file "%s" in doxygen xml output for project "%s" ' 'from directory: %s' % (name, project_info.name(), project_info.path())) return [ docutils.nodes.warning( "", docutils.nodes.paragraph("", "", docutils.nodes.Text(warning))), self.state.document.reporter.warning(warning, line=self.lineno) ] elif not matches: warning = ( 'doxygenfile: Cannot find file "%s" in doxygen xml output for project "%s" from directory: %s' % (name, project_info.name(), project_info.path())) return [ docutils.nodes.warning( "", docutils.nodes.paragraph("", "", docutils.nodes.Text(warning))), self.state.document.reporter.warning(warning, line=self.lineno) ] target_handler = self.target_handler_factory.create( self.options, project_info, self.state.document) filter_ = self.filter_factory.create_file_filter(name, self.options) renderer_factory = self.renderer_factory_creator.create_factory( project_info, self.state, self.state.document, filter_, target_handler, ) nodes = [] for data_object in matches: object_renderer = renderer_factory.create_renderer( self.root_data_object, data_object) nodes.extend(object_renderer.render()) return nodes
def create_template_node(self, decl): """Creates a node for the ``template <...>`` part of the declaration.""" if not decl.templateparamlist: return None template = 'template ' nodes = [self.node_factory.desc_annotation(template, template), self.node_factory.Text('<')] nodes.extend(self.render(decl.templateparamlist)) nodes.append(self.node_factory.Text(">")) signode = self.node_factory.desc_signature() signode.extend(nodes) return signode
def title(self, node): nodes = [] # Variable type or function return type nodes.extend(self.render_optional(node.type_)) if nodes: nodes.append(self.node_factory.Text(" ")) nodes.append(self.node_factory.desc_name(text=node.name)) return nodes
def run(self): name = self.arguments[0] project_info = self.project_info_factory.create_project_info(self.options) finder = self.finder_factory.create_finder(project_info) finder_filter = self.filter_factory.create_file_finder_filter(name) matches = [] finder.filter_(finder_filter, matches) if len(matches) > 1: warning = ( 'doxygenfile: Found multiple matches for file "%s" in doxygen xml output for project "%s" ' "from directory: %s" % (name, project_info.name(), project_info.path()) ) return [ docutils.nodes.warning("", docutils.nodes.paragraph("", "", docutils.nodes.Text(warning))), self.state.document.reporter.warning(warning, line=self.lineno), ] elif not matches: warning = 'doxygenfile: Cannot find file "%s" in doxygen xml output for project "%s" from directory: %s' % ( name, project_info.name(), project_info.path(), ) return [ docutils.nodes.warning("", docutils.nodes.paragraph("", "", docutils.nodes.Text(warning))), self.state.document.reporter.warning(warning, line=self.lineno), ] target_handler = self.target_handler_factory.create(self.options, project_info, self.state.document) filter_ = self.filter_factory.create_file_filter(name, self.options) renderer_factory_creator = self.renderer_factory_creator_constructor.create_factory_creator( project_info, self.state.document, self.options ) nodes = [] for data_object in matches: renderer_factory = renderer_factory_creator.create_factory( data_object, self.state, self.state.document, filter_, target_handler ) object_renderer = renderer_factory.create_renderer(self.root_data_object, data_object) nodes.extend(object_renderer.render()) return nodes
def run(self) -> List[docutils.nodes.Node]: nodes = [] source_file, lineno = self.get_source_info() for example in doctest.DocTestParser().parse('\n'.join(self.content), source_file): if isinstance(example, str): continue if not example.want: continue assert isinstance(example, doctest.Example) nodes.extend( sphinx_utils.parse_rst(state=self.state, source_path=source_file, source_line=lineno, text=sphinx_utils.format_directive( *self.arguments, content=example.want))) return nodes
def jupyter_result_list(self, results, stdout, **options): """Create a list of results.""" nodes = [] if results: if len(results) != 1 or 'matplotlib' not in results[ 0].__class__.__module__: stream = StringIO() pprint.pprint(results, stream=stream, indent=1, depth=4) literal = stream.getvalue() stream.close() node = docutils.nodes.literal_block(literal, literal) nodes.append(node) else: for result in results: _nodes, stdout = self.jupyter_results( result, stdout, **options) nodes.extend(_nodes) return nodes, stdout
def run(self): self.config = self.state.document.settings.env.config self.env = self.state.document.settings.env self.record_dependencies = \ self.state.document.settings.record_dependencies nodes = [] location = os.path.normpath( os.path.join(self.env.srcdir, self.config.autoyaml_root + '/' + self.arguments[0])) if os.path.isfile(location): logger.debug('[autoyaml] parsing file: %s', location) try: nodes.extend(self._parse_file(location)) except Exception as e: raise AutoYAMLException('Failed to parse YAML file: %s' % (location)) from e else: raise AutoYAMLException('%s:%s: location "%s" is not a file.' % (self.env.doc2path(self.env.docname, None), self.content_offset - 1, location)) self.record_dependencies.add(location) return nodes
def nodes_for_module(self, module): """ Determine nodes for a module/class Taking into account nested modules """ info = {} nodes = [] tests = [] groups = [] support = [] for name in dir(module): if name.startswith("Test"): item = getattr(module, name) info[name] = (name, item, []) if hasattr(item, '__childof__'): spr = item.__childof__.__name__ else: spr = inspect.getmro(item)[1].__name__ if spr in info: info[spr][2].append((info[name])) else: groups.append(name) elif name.startswith("test_"): tests.append(name) elif name in ('setUp', 'tearDown'): support.append(name) for name in support: item = getattr(module, name) if callable(item): nodes.extend(self.nodes_for_support(name, item)) tests.sort() tests.sort(cmp=lambda a, b: len(a) - len(b)) for name in tests: item = getattr(module, name) if callable(item): nodes.extend(self.nodes_for_test(name, item)) for name in groups: _, item, children = info[name] if isinstance(item, object): nodes.extend(self.nodes_for_class(name, item, children)) return nodes
def nodes_for_module(self, module): """ Determine nodes for a module/class Taking into account nested modules """ info = {} nodes = [] tests = [] groups = [] support = [] for name in dir(module): if name.startswith("Test"): item = getattr(module, name) info[name] = (name, item, []) if hasattr(item, "__childof__"): spr = item.__childof__.__name__ else: spr = inspect.getmro(item)[1].__name__ if spr in info: info[spr][2].append((info[name])) else: groups.append(name) elif name.startswith("test_"): tests.append(name) elif name in ("setUp", "tearDown"): support.append(name) for name in support: item = getattr(module, name) if callable(item): nodes.extend(self.nodes_for_support(name, item)) tests.sort() tests.sort(cmp=lambda a, b: len(a) - len(b)) for name in tests: item = getattr(module, name) if callable(item): nodes.extend(self.nodes_for_test(name, item)) for name in groups: _, item, children = info[name] if isinstance(item, object): nodes.extend(self.nodes_for_class(name, item, children)) return nodes