def _build_markup(self): field_list = nodes.field_list() item = nodes.paragraph() item.append(field_list) if 'branch' in self.options and self.branch_name is not None: name = nodes.field_name(text="Branch") body = nodes.field_body() body.append(nodes.emphasis(text=self.branch_name)) field = nodes.field() field += [name, body] field_list.append(field) if 'commit' in self.options: name = nodes.field_name(text="Commit") body = nodes.field_body() if 'no_github_link' in self.options: body.append(self._commit_text_node()) else: body.append(self._github_link()) field = nodes.field() field += [name, body] field_list.append(field) if 'uncommitted' in self.options and self.repo.is_dirty(): item.append(nodes.warning('', nodes.inline( text="There were uncommitted changes when this was compiled." ))) if 'untracked' in self.options and self.repo.untracked_files: item.append(nodes.warning('', nodes.inline( text="There were untracked files when this was compiled." ))) return [item]
def _build_markup(self): field_list = nodes.field_list() item = nodes.paragraph() item.append(field_list) if 'branch' in self.options: name = nodes.field_name(text="Branch") body = nodes.field_body() body.append(nodes.emphasis(text=self.branch_name)) field = nodes.field() field += [name, body] field_list.append(field) if 'commit' in self.options: name = nodes.field_name(text="Commit") body = nodes.field_body() if 'no_github_link' in self.options: body.append(self._commit_text_node()) else: body.append(self._github_link()) field = nodes.field() field += [name, body] field_list.append(field) if 'uncommitted' in self.options and self.repo.is_dirty(): item.append(nodes.warning('', nodes.inline( text="There were uncommitted changes when this was compiled." ))) if 'untracked' in self.options and self.repo.untracked_files: item.append(nodes.warning('', nodes.inline( text="There were untracked files when this was compiled." ))) return [item]
def format_parser_error(name, error, filename, state, lineno, do_unicode_warning): warning = '%s: Unable to parse xml file "%s". ' % (name, filename) explanation = "Reported error: %s. " % error unicode_explanation_text = "" unicode_explanation = [] if do_unicode_warning: unicode_explanation_text = ( textwrap.dedent( """ Parsing errors are often due to unicode errors associated with the encoding of the original source files. Doxygen propagates invalid characters from the input source files to the output xml.""" ) .strip() .replace("\n", " ") ) unicode_explanation = [nodes.paragraph("", "", nodes.Text(unicode_explanation_text))] return [ nodes.warning( "", nodes.paragraph("", "", nodes.Text(warning)), nodes.paragraph("", "", nodes.Text(explanation)), *unicode_explanation ), state.document.reporter.warning(warning + explanation + unicode_explanation_text, line=lineno), ]
def run(self): env = self.state.document.settings.env logger = logging.getLogger(__name__) doc_folder = env.srcdir this_folder = os.path.split( os.path.relpath(self.state.document.current_source, doc_folder))[0] file = os.path.join(this_folder, self.arguments[0]) try: with open(file) as f: data = yaml.load(f) except: logger.warning('Could not read file {}'.format(file)) return [ nodes.warning(nodes.paragraph(text='Unable to process file!')) ] result = [] if 'class_stereotype' in data: result.append(self.make_class_stereotype(data['class_stereotype'])) elif 'relationship_stereotype' in data: result.append( self.make_relationship_stereotype( data['relationship_stereotype'])) elif 'aggregation_stereotype' in data: result.append( self.make_relationship_stereotype( data['aggregation_stereotype'])) else: logger.warning('Unknown entity in {}'.format(file)) return result
def format_parser_error(name, error, filename, state, lineno, do_unicode_warning): warning = '%s: Unable to parse xml file "%s". ' % (name, filename) explanation = 'Reported error: %s. ' % error unicode_explanation_text = "" unicode_explanation = [] if do_unicode_warning: unicode_explanation_text = textwrap.dedent(""" Parsing errors are often due to unicode errors associated with the encoding of the original source files. Doxygen propagates invalid characters from the input source files to the output xml.""").strip().replace("\n", " ") unicode_explanation = [ nodes.paragraph("", "", nodes.Text(unicode_explanation_text)) ] return [ nodes.warning("", nodes.paragraph("", "", nodes.Text(warning)), nodes.paragraph("", "", nodes.Text(explanation)), *unicode_explanation), state.document.reporter.warning(warning + explanation + unicode_explanation_text, line=lineno) ]
def _directive_checks(self): # Check if file insertion is enabled if not self.state.document.settings.file_insertion_enabled: msg = ( 'File and URL access deactivated. ' 'Ignoring directive "{}".'.format(self._get_directive_name()) ) warning = nodes.warning( '', self.state_machine.reporter.warning( '', nodes.literal_block('', msg), line=self.lineno ) ) return [warning] # Check that no content and argument are used at the same time if self.arguments and self.content: warning = self.state_machine.reporter.warning( '{} directive cannot have both content and a filename ' 'argument.'.format(self._get_directive_name()), line=self.lineno ) return [warning] # Check that at least one was provided if not (self.arguments or self.content): warning = self.state_machine.reporter.warning( '{} directive must have content or a filename ' 'argument.'.format(self._get_directive_name()), line=self.lineno ) return [warning] return None
def run(self): """ Executes python code for an RST document, taking input from content or from a filename :return: """ language = self.options.get('language') or 'python' output_language = self.options.get('output_language') or 'none' filename = self.options.get('filename') code = '' if not filename: code = '\n'.join(self.content) if filename: try: with open(filename, 'r') as code_file: code = code_file.read() self.warning('code is %s' % code) except (IOError, OSError) as err: # Raise warning instead of a code block error = 'Error opening file: %s, working folder: %s' % ( err, os.getcwd()) self.warning(error) return [nodes.warning(error, error)] output = [] # Show the example code if not 'hide_code' in self.options: if 'hide_import' in self.options: m = re.compile(r"import\s+[\.\w]+\s*\n+", re.MULTILINE) displayed_code = m.sub("", code) else: displayed_code = code input_code = nodes.literal_block(displayed_code, displayed_code) input_code['language'] = language input_code['linenos'] = 'linenos' in self.options if not 'hide_headers' in self.options and not 'hide_code_caption' in self.options: suffix = '' if not 'hide_filename' in self.options: suffix = '' if filename is None else str(filename) code_caption = self.options.get('code_caption') or 'Code' output.append( nodes.caption(text='%s %s' % (code_caption, suffix))) output.append(input_code) # Show the code results if not 'hide_headers' in self.options and not 'hide_results_caption' in self.options: results_caption = self.options.get('results_caption') or 'Results' output.append(nodes.caption(text=results_caption)) inputs = self.options.get('input') or None code_results = self.execute_code(code, inputs) code_results = nodes.literal_block(code_results, code_results) code_results['linenos'] = 'linenos' in self.options code_results['language'] = output_language output.append(code_results) return output
def process_meta(app, doctree, fromdocname): env = app.builder.env env.page_to_version = defaultdict(set) env.version_to_page = defaultdict(set) # index metadata for pagename, metadata in env.metadata.iteritems(): if 'version' in metadata: version = metadata['version'] env.page_to_version[pagename] = version env.version_to_page[version].add(pagename) if fromdocname == pagename: # Alert on outdated version current_version = env.config['version'] if version != current_version: text = 'This page documents version {old}. The latest version is {new}'.format( old=version, new=current_version, ) if app.config['versionwarning-node']: prose = nodes.paragraph(text, text) warning = nodes.warning(prose, prose) doctree.insert(0, warning) if app.config['versionwarning-console']: app.warn(bold('[Version Warning: %s] ' % pagename) + red(text))
def process_meta(app, doctree, fromdocname): env = app.builder.env env.page_to_version = defaultdict(set) env.version_to_page = defaultdict(set) # index metadata for pagename, metadata in iter(env.metadata.items()): if 'version' in metadata: version = metadata['version'] env.page_to_version[pagename] = version env.version_to_page[version].add(pagename) if fromdocname == pagename: # Alert on outdated version current_version = env.config['version'] if version != current_version: text = 'This page documents version {old} and has not yet been updated for version {new}'.format( old=version, new=current_version, ) if app.config['versionwarning_node']: prose = nodes.paragraph(text, text) warning = nodes.warning(prose, prose) doctree.insert(0, warning) if app.config['versionwarning_console']: app.warn( bold('[Version Warning: %s] ' % pagename) + red(text))
def _directive_checks(self): # Check if file insertion is enabled if not self.state.document.settings.file_insertion_enabled: msg = ('File and URL access deactivated. ' 'Ignoring directive "{}".'.format( self._get_directive_name())) warning = nodes.warning( '', self.state_machine.reporter.warning('', nodes.literal_block( '', msg), line=self.lineno)) return [warning] # Check that no content and argument are used at the same time if self.arguments and self.content: warning = self.state_machine.reporter.warning( '{} directive cannot have both content and a filename ' 'argument.'.format(self._get_directive_name()), line=self.lineno) return [warning] # Check that at least one was provided if not (self.arguments or self.content): warning = self.state_machine.reporter.warning( '{} directive must have content or a filename ' 'argument.'.format(self._get_directive_name()), line=self.lineno) return [warning] return None
def add_contributing_banner(app, doctree): """ Insert a banner at the top of the index. This way, we can easily communicate people to help with the translation, pointing them to different resources. """ if app.builder.format != 'html': # Do not include the banner when building with other formats # (this is useful when using -b gettext) return from docutils import nodes, core message = '¡Ayúdanos a traducir la documentación oficial de Python al Español! ' \ f'Puedes encontrar más información en `Como contribuir </es/{version}/CONTRIBUTING.html>`_. ' \ 'Ayuda a acercar Python a más personas de habla hispana.' paragraph = core.publish_doctree(message)[0] banner = nodes.warning(ids=['contributing-banner']) banner.append(paragraph) for document in doctree.traverse(nodes.document): document.insert(0, banner)
def run(self): document = self.state.document env = document.settings.env series = env.temp_data[SERIES_KEY] app = env.app if not document.settings.file_insertion_enabled: msg = "File insertion disabled" app.warn(msg) error = nodes.error('', nodes.inline(text=msg)) error.lineno = self.lineno return [error] patch = next(series, None) if patch is None: msg = "No patch left in queue %s" % series.path app.warn(msg) warning = nodes.warning('', nodes.inline(text=msg)) warning.lineno = self.lineno return [warning] if 'hidden' in self.options: return [] doc_dir = os.path.dirname(env.doc2path(env.docname)) patch_root = nodes.container(classes=['pq-patch']) for fname, path, hunks in patch: patch_root.append(nodes.emphasis(text=fname)) relative_path = os.path.relpath(path, doc_dir) try: lang = pygments.lexers.guess_lexer_for_filename( fname, open(path, 'rb').read()).aliases[0] except pygments.util.ClassNotFound: lang = 'guess' patchlines = [] section = nodes.container(classes=['pq-section']) for hunk in hunks: patchlines.extend(line.rstrip('\n') for line in hunk.hunk) section.extend(self.run_hunk(hunk, relative_path, lang=lang)) patch_root.append(section) patch_root.append( nodes.container( '', *self.run_diff(patchlines), classes=['pq-diff'])) patch_root.append( nodes.container( '', *self.run_content(relative_path, lang=lang), classes=['pq-file'])) undepend(env, relative_path) return [patch_root]
def trac_role(env, name, rawtext, text, lineno, inliner, options={}, content=[]): reference = trac_get_reference(env, rawtext, text) if reference: return [reference], [] warning = nodes.warning(None, nodes.literal_block(text, 'WARNING: %s is not a valid TracLink' % rawtext)) return warning, []
def warn(self, raw_text, rendered_nodes=None): raw_text = self.format(raw_text) if rendered_nodes is None: rendered_nodes = [nodes.paragraph("", "", nodes.Text(raw_text))] return [ nodes.warning("", *rendered_nodes), self.state.document.reporter.warning(raw_text, line=self.context['lineno']) ]
def warn(self, raw_text: str, *, rendered_nodes: Sequence[nodes.Node] = None, unformatted_suffix: str = '') -> List[nodes.Node]: raw_text = self.format(raw_text) + unformatted_suffix if rendered_nodes is None: rendered_nodes = [nodes.paragraph("", "", nodes.Text(raw_text))] return [ nodes.warning("", *rendered_nodes), self.state.document.reporter.warning(raw_text, line=self.context['lineno']) ]
def trac_role(name, rawtext, text, lineno, inliner, options={}, content=[]): args = text.split(" ",1) link = args[0] if len(args)==2: text = args[1] else: text = None reference = trac_get_reference(rawtext, link, text) if reference: return [reference], [] warning = nodes.warning(None, nodes.literal_block(text, 'WARNING: %s is not a valid TracLink' % rawtext)) return warning, []
def getErrorString(state): (nm,ne,nw) = omc.sendExpression("countMessages()") s = fixPaths(omc.sendExpression("OpenModelica.Scripting.getErrorString()")) if nm==0: return [] node = nodes.paragraph() for x in s.split("\n"): node += nodes.paragraph(text = x) if ne>0: return [nodes.error(None, node)] elif nw>0: return [nodes.warning(None, node)] else: return [nodes.note(None, node)]
def getErrorString(state): (nm, ne, nw) = omc.sendExpression("countMessages()") s = fixPaths(omc.sendExpression("OpenModelica.Scripting.getErrorString()")) if nm == 0: return [] node = nodes.paragraph() for x in s.split("\n"): node += nodes.paragraph(text=x) if ne > 0: return [nodes.error(None, node)] elif nw > 0: return [nodes.warning(None, node)] else: return [nodes.note(None, node)]
def trac_role(name, rawtext, text, lineno, inliner, options={}, content=[]): args = text.split(" ",1) link = args[0] if len(args)==2: text = args[1] else: text = None reference = trac_get_reference(rawtext, link, text) if reference: return [reference], [] warning = nodes.warning(None, nodes.literal_block(text, u'ATTENTION: %s n\'est pas un lien Trac correct' % rawtext)) return warning, []
def run(self): """ Executes python code for an RST document, taking input from content or from a filename :return: """ language = self.options.get('language') or 'python' output_language = self.options.get('output_language') or 'none' filename = self.options.get('filename') code = '' if not filename: code = '\n'.join(self.content) if filename: try: with open(filename, 'r') as code_file: code = code_file.read() self.warning('code is %s' % code) except (IOError, OSError) as err: # Raise warning instead of a code block error = 'Error opening file: %s, working folder: %s' % ( err, os.getcwd()) self.warning(error) return [nodes.warning(error, error)] output = [] # Show the example code if not 'hide_code' in self.options: input_code = nodes.literal_block(code, code) input_code['language'] = language input_code['linenos'] = 'linenos' in self.options if not 'hide_headers' in self.options: suffix = '' if not 'hide_filename' in self.options: suffix = '' if filename is None else str(filename) output.append(nodes.caption(text='Code %s' % suffix)) output.append(input_code) # Show the code results if not 'hide_headers' in self.options: output.append(nodes.caption(text='Results')) code_results = self.execute_code(code) code_results = nodes.literal_block(code_results, code_results) code_results['linenos'] = 'linenos' in self.options code_results['language'] = output_language output.append(code_results) return output
def process_external_version_warning_banner(app, doctree, fromdocname): """ Add warning banner for external versions in every page. If the version type is external this will show a warning banner at the top of each page of the documentation. """ for document in doctree.traverse(nodes.document): # TODO: Link to the Pull Request text = 'This page was created from a pull request.' if app.builder.config.html_context.get('display_gitlab'): text = 'This page was created from a merge request.' prose = nodes.paragraph(text, text) warning = nodes.warning(prose, prose) document.insert(0, warning)
def create_warning_node(self): """ Method creates warning node when hash of directive content is not match to content integrity hash in the rst document """ item = nodes.paragraph() item.append( nodes.warning( '', nodes.inline( text="This code example can be not actual for this version" ))) return item
def trac_role(name, rawtext, text, lineno, inliner, options={}, content=[]): if hasattr(inliner, 'trac'): env, context = inliner.trac args = text.split(" ", 1) link = args[0] if len(args) == 2: text = args[1] else: text = None reference = trac_get_reference(env, context, rawtext, link, text) if reference: return [reference], [] msg = _("%(link)s is not a valid TracLink", link=rawtext) else: msg = "No trac context active while rendering" return nodes.warning(None, nodes.literal_block(text, msg)), []
def iter_nodes(self): """ Iterate on the docutils nodes generated by this directive. """ folder_path = self.options.get("folder_path", "") source_path = os.path.join(self.env.config.settings_source_path, folder_path) settings = find_settings(source_path) # folder_path can point to a file or directory root_folder = folder_path if os.path.isdir(source_path) else os.path.dirname(folder_path) for setting_name in sorted(settings): setting = settings[setting_name] # setting["filename"] is relative to the root_path setting_filename = os.path.join(root_folder, setting["filename"]) setting_default_value = setting.get(".. setting_default:", "Not defined") setting_default_node = nodes.literal( text=quote_value(setting_default_value) ) setting_section = nodes.section("", ids=[f"setting-{setting_name}"]) setting_section += nodes.title(text=setting_name) setting_section += nodes.paragraph("", "Default: ", setting_default_node) setting_section += nodes.paragraph( "", "Source: ", nodes.reference( text="{} (line {})".format( setting["filename"], setting["line_number"] ), refuri="{}/blob/{}/{}#L{}".format( self.env.config.settings_repo_url, self.env.config.settings_repo_version, setting_filename, setting["line_number"], ), ), ) setting_section += nodes.paragraph( text=setting.get(".. setting_description:", "") ) if setting.get(".. setting_warning:") not in (None, "None", "n/a", "N/A"): setting_section += nodes.warning( "", nodes.paragraph("", setting[".. setting_warning:"]) ) yield setting_section
def process_external_version_warning_banner(app, doctree, fromdocname): """ Add warning banner for external versions in every page. If the version type is external this will show a warning banner at the top of each page of the documentation. """ is_gitlab = app.config.html_context.get('display_gitlab') name = 'merge request' if is_gitlab else 'pull request' build_url = app.config.readthedocs_build_url build_url_node = nodes.reference( '', '', nodes.Text('was created '), internal=False, refuri=build_url, ) pr_number = app.config.html_context.get('current_version') pr_number = '#{number}'.format(number=pr_number) vcs_url = app.config.readthedocs_vcs_url vcs_url_node = nodes.reference( '', '', nodes.Text(pr_number), internal=False, refuri=vcs_url, ) children = [ nodes.Text('This page '), build_url_node, # was created nodes.Text('from a {name} ('.format(name=name)), vcs_url_node, # #123 nodes.Text(').'), ] prose = nodes.paragraph('', '', *children) warning_node = nodes.warning(prose, prose) for document in doctree.traverse(nodes.document): document.insert(0, warning_node)
def create_draft_warning(draft_dependencies=None): text = DRAFT_DOCS_TEXT if draft_dependencies: text += " because it links to the following draft pages:" t = nodes.Text(text) p = nodes.paragraph() p.append(t) warning = nodes.warning() warning.append(p) if draft_dependencies: lst = nodes.bullet_list() for dep in draft_dependencies: item = nodes.list_item() item_p = nodes.paragraph() item_t = nodes.Text(dep) item_p.append(item_t) item.append(item_p) lst.append(item) warning.append(lst) return warning
def trac_role(name, rawtext, text, lineno, inliner, options={}, content=[]): args = text.split(" ", 1) link = args[0] if len(args) == 2: text = args[1] else: text = None reference = trac_get_reference(rawtext, link, text) if reference: return [reference], [] warning = nodes.warning( None, nodes.literal_block( text, u'ATTENTION: %s n\'est pas un lien Trac correct' % rawtext)) return warning, []
def iter_nodes(self): """ Iterate on the docutils nodes generated by this directive. """ toggles = find_feature_toggles( self.env.config.featuretoggles_source_path) for toggle_name in sorted(toggles): toggle = toggles[toggle_name] toggle_default_value = toggle.get(".. toggle_default:", "Not defined") toggle_default_node = nodes.literal( text=quote_value(toggle_default_value)) toggle_section = nodes.section( "", ids=[f"featuretoggle-{toggle_name}"]) toggle_section += nodes.title(text=toggle_name) toggle_section += nodes.paragraph("", "Default: ", toggle_default_node) toggle_section += nodes.paragraph( "", "Source: ", nodes.reference( text="{} (line {})".format(toggle["filename"], toggle["line_number"]), refuri="{}/blob/{}/{}#L{}".format( self.env.config.featuretoggles_repo_url, self.env.config.featuretoggles_repo_version, toggle["filename"], toggle["line_number"], ), ), ) toggle_section += nodes.paragraph( text=toggle.get(".. toggle_description:", "")) if toggle.get(".. toggle_warnings:") not in (None, "None", "n/a", "N/A"): toggle_section += nodes.warning( "", nodes.paragraph("", toggle[".. toggle_warnings:"])) yield toggle_section
def run(self): self.assert_has_content() # On error, you can ``raise self.error('Error message.')`` # You have access to all arguments and options # self.arguments # self.options print(self.arguments) # the content following directive print(self.options) print( self.content ) # List with each line of the content (text on lines after initial directive call) # You can return one or more nodes that will be inserted at the # location where the directive was called. The output format # will depend on the translator/writer that is used after # the parser is done reading and calling all directives to generate # the node tree. comment_node = nodes.comment(text="Section generated by " + __file__) warning_node = nodes.warning() text_node = nodes.Text("Who dares call my custom directive!?") line_node = nodes.transition() # You could simply return the nodes as a list # return [comment_node, line_node, text_node] # But, if you want to wrap multiple nodes up in a section with # a special class name, create a section and put the nodes in it. # This will wrap them with a <div class="section"> w/ html5 writer section_node = nodes.section() section_node['classes'] = ['my-custom-css-class', 'another-style'] section_node.append(comment_node) section_node.append(warning_node) section_node.append(text_node) section_node.append(line_node) return [section_node]
def run(self): document = self.state.document env = document.settings.env series = env.temp_data[SERIES_KEY] app = env.app if not document.settings.file_insertion_enabled: msg = "File insertion disabled" app.warn(msg) error = nodes.error('', nodes.inline(text=msg)) error.lineno = self.lineno return [error] patch = next(series, None) if patch is None: msg = "No patch left in queue %s" % series.path app.warn(msg) warning = nodes.warning('', nodes.inline(text=msg)) warning.lineno = self.lineno return [warning] if 'hidden' in self.options: return [] doc_dir = os.path.dirname(env.doc2path(env.docname)) sections = getattr(app, 'patchqueue_sections', ['changes']) classes = ['pq-patch'] if len(sections) > 1: classes.append('pq-needs-toggle') patch_root = nodes.container(classes=classes) for fname, path, hunks in patch: patch_root.append(nodes.emphasis(text=fname)) relative_path = os.path.relpath(path, doc_dir) try: lang = pygments.lexers.guess_lexer_for_filename( fname, open(path, 'rb').read()).aliases[0] except pygments.util.ClassNotFound: lang = 'guess' patchlines = [] if 'changes' in sections: section = nodes.container(classes=['pq-section']) for hunk in hunks: patchlines.extend(line.rstrip('\n') for line in hunk.hunk) if 'changes' in sections: section.extend( self.run_hunk(hunk, relative_path, lang=lang)) if 'changes' in sections: patch_root.append(section) if 'diff' in sections: patch_root.append( nodes.container('', *self.run_diff(patchlines), classes=['pq-diff'])) if 'content' in sections: patch_root.append( nodes.container('', *self.run_content(relative_path, lang=lang), classes=['pq-file'])) undepend(env, relative_path) return [patch_root]
def render(self): symbol = self.symbol() if symbol.warning: yield nodes.warning('', nodes.paragraph(text=symbol.warning))
import six def parse_text(text): parser = rst.Parser() settings = frontend.OptionParser( components=(rst.Parser, )).get_default_values() document = utils.new_document(text, settings) parser.parse(text, document) return document.children paragraph = lambda text: parse_text(text)[0] note = lambda msg: nodes.note("", paragraph(msg)) hint = lambda msg: nodes.hint("", *parse_text(msg)) warning = lambda msg: nodes.warning("", paragraph(msg)) category = lambda title: parse_text("%s\n%s" % (title, "-" * len(title)))[0] subcategory = lambda title: parse_text("%s\n%s" % (title, "~" * len(title)))[0] section = lambda title: parse_text("%s\n%s" % (title, "\"" * len(title)))[0] def make_definition(term, ref, descriptions): """Constructs definition with reference to it""" ref = ref.replace("_", "-").replace(" ", "-") definition = parse_text( ".. _%(ref)s:\n\n* *%(term)s* [ref__]\n\n__ #%(ref)s" % { "ref": ref, "term": term }) for descr in descriptions: if descr:
def run(self): result = [] symbol = self.arguments[0] descriptor = descriptors_by_symbol[symbol] comment = find_comment(symbol, prefix='') if comment: result += produce_nodes(self.state, comment) if descriptor.client_streaming: text = ('This method uses client-streaming.') result.append( nodes.warning( '', nodes.paragraph('', '', nodes.Text(text)), )) if descriptor.server_streaming: text = ('This method uses server-streaming. ' + 'Yamcs sends an unspecified amount of data ' + 'using chunked transfer encoding.') result.append( nodes.warning( '', nodes.paragraph('', '', nodes.Text(text)), )) route_options = descriptor.options.Extensions[annotations_pb2.route] route_text = get_route_for_method_descriptor(descriptor) raw = '.. rubric:: URI Template\n' raw += '.. code-block:: uritemplate\n\n' raw += ' ' + route_text + '\n' result += produce_nodes(self.state, raw) input_descriptor = descriptors_by_symbol[descriptor.input_type] route_params = get_route_params(route_text) if route_params: dl_items = [] for param in route_params: param_template = get_route_param_template(route_text, param) comment = find_comment(descriptor.input_type + '.' + param, prefix='') or '' dl_items.append( nodes.definition_list_item( '', nodes.term('', '', nodes.literal('', param_template)), nodes.definition('', nodes.paragraph(text=comment)), )) result += [nodes.definition_list('', *dl_items)] if route_options.get: query_param_fields = [] for field in input_descriptor.field: if field.json_name not in route_params: query_param_fields.append(field) if query_param_fields: dl_items = [] for field in query_param_fields: field_symbol = descriptor.input_type + '.' + field.name comment_node = nodes.section() comment = find_comment(field_symbol, prefix='') if comment: for child in produce_nodes(self.state, comment): comment_node += child dl_items.append( nodes.definition_list_item( '', nodes.term('', '', nodes.literal('', field.json_name)), nodes.definition('', comment_node), )) result += [ nodes.rubric('', 'Query Parameters'), nodes.definition_list('', *dl_items), ] return result
def run(self): api_name = " ".join(self.content) text = f"The {api_name} is in Beta stage, and backward compatibility is not guaranteed." return [nodes.warning("", nodes.paragraph("", "", nodes.Text(text)))]
def run(self): # Check if file insertion is enabled if not self.state.document.settings.file_insertion_enabled: msg = ( 'File and URL access deactivated. ' 'Ignoring directive "{}".'.format(self.name) ) warning = nodes.warning( '', self.state_machine.reporter.warning( '', nodes.literal_block('', msg), line=self.lineno ) ) return [warning] # Define plantuml file name if len(self.arguments) > 0: fname = self.arguments[0] else: fname = '{:06d}'.format(self.lineno) fname = join(self.uml_out_dir, fname) # Create images output folder mkpath(abspath(dirname(fname))) # Write plantuml content uml_file = fname + '.uml' if self.content: with open(uml_file, 'wb') as fd: fd.write('@startuml\n') try: fd.write( self.state_machine.document.settings.plantuml_hdr ) fd.write('\n') except AttributeError: pass fd.write('\n'.join(self.content)) fd.write('\n@enduml\n') # Execute plantuml call # Commented because plantuml doesn't have and output flag # image_file = fname + self.uml_out_ext plantuml_cmd = 'plantuml' try: plantuml_cmd = self.state_machine.document.settings.plantuml_cmd except AttributeError: pass try: code = call(shsplit( '{} {} "{}"'.format(plantuml_cmd, self.uml_cmd_args, uml_file) )) if code != 0: raise Exception('plantuml call returned {}.'.format(code)) except: msg = format_exc() error = nodes.error( '', self.state_machine.reporter.error( '', nodes.literal_block('', msg), line=self.lineno ) ) return [error] # Default to align center if not 'align' in self.options: self.options['align'] = 'center' # Run Image directive self.arguments = [fname + self.uml_emb_ext] return Image.run(self)
import six def parse_text(text): parser = rst.Parser() settings = frontend.OptionParser( components=(rst.Parser,)).get_default_values() document = utils.new_document(text, settings) parser.parse(text, document) return document.children paragraph = lambda text: parse_text(text)[0] note = lambda msg: nodes.note("", paragraph(msg)) hint = lambda msg: nodes.hint("", *parse_text(msg)) warning = lambda msg: nodes.warning("", paragraph(msg)) category = lambda title: parse_text("%s\n%s" % (title, "-" * len(title)))[0] subcategory = lambda title: parse_text("%s\n%s" % (title, "~" * len(title)))[0] section = lambda title: parse_text("%s\n%s" % (title, "\"" * len(title)))[0] def make_definition(term, ref, descriptions): """Constructs definition with reference to it.""" ref = ref.replace("_", "-").replace(" ", "-") definition = parse_text( ".. _%(ref)s:\n\n* *%(term)s* [ref__]\n\n__ #%(ref)s" % {"ref": ref, "term": term}) for descr in descriptions: if descr: if isinstance(descr, (six.text_type, six.binary_type)): if descr[0] not in string.ascii_uppercase:
def create_warning(self, message): warning = nodes.warning() warning += nodes.paragraph(text=message) return warning
def warn(self, text): result = text.format(**self.context) return [ nodes.warning("", nodes.paragraph("", "", nodes.Text(result))), self.state.document.reporter.warning(result, line=self.context['lineno']) ]