def parse_config(self, config, toplevel=False): """Parses @config setting up @self state.""" self.sitemap_path = config.get_path('sitemap') if self.sitemap_path is None: error('invalid-config', 'No sitemap was provided') self.include_paths = OrderedSet([]) index_file = config.get_index() if index_file: if not os.path.exists(index_file): error('invalid-config', 'The provided index "%s" does not exist' % index_file) self.include_paths |= OrderedSet([os.path.dirname(index_file)]) self.include_paths |= OrderedSet(config.get_paths('include_paths')) self.is_toplevel = toplevel self.tree = Tree(self, self.app) self.__create_extensions() for extension in list(self.extensions.values()): if toplevel: extension.parse_toplevel_config(config) extension.parse_config(config) if not toplevel and config.conf_file: self.app.change_tracker.add_hard_dependency(config.conf_file) self.extra_asset_folders = OrderedSet(config.get_paths('extra_assets'))
def __setup_private_folder(self): if os.path.exists(self.private_folder): if not os.path.isdir(self.private_folder): error('setup-issue', '%s exists but is not a directory' % self.private_folder) else: os.mkdir(self.private_folder)
def setup(self): if not self.cache_file: if self.list_plugins_page: self.__plugins = self.create_symbol( GstPluginsSymbol, display_name="All " + self.project.project_name.replace("-", " ").title(), unique_name=self.project.project_name + "-all-gst-plugins", plugins=[], all_plugins=True) super().setup() return gather_links() comment_parser = GtkDocParser(self.project, False) to_parse_sources = set(self.c_sources) - GstExtension.__parsed_cfiles CCommentExtractor(self, comment_parser).parse_comments(to_parse_sources) GstExtension.__parsed_cfiles.update(self.c_sources) if not self.cache: error('setup-issue', "No cache loaded or created for %s" % self.plugin) plugins = [] if self.plugin: pname = self.plugin dot_idx = pname.rfind('.') if dot_idx > 0: pname = self.plugin[:dot_idx] if pname.startswith('libgst'): pname = pname[6:] elif pname.startswith('gst'): pname = pname[3:] try: plugin_node = {pname: self.cache[pname]} except KeyError: error('setup-issue', "Plugin %s not found" % pname) else: plugin_node = self.cache for libfile, plugin in plugin_node.items(): plugin_sym = self.__parse_plugin(libfile, plugin) if not plugin_sym: continue plugins.append(plugin_sym) if not self.plugin: self.__plugins = self.create_symbol( GstPluginsSymbol, display_name=self.project.project_name.replace("-", " ").title(), unique_name=self.project.project_name + "-gst-plugins", plugins=plugins) super().setup()
def __init__(self, source_file, ast, meta=None, raw_contents=None): "Banana banana" assert source_file name = os.path.splitext(os.path.basename(source_file))[0] pagename = '%s.html' % name self.ast = ast self.extension_name = None self.source_file = source_file self.raw_contents = raw_contents self.comment = None self.generated = False self.output_attrs = None self.subpages = OrderedSet() self.symbols = [] self.typed_symbols = {} self.is_stale = True self.formatted_contents = None self.detailed_description = None meta = meta or {} try: self.meta = Schema(Page.meta_schema).validate(meta) except SchemaError as _: error('invalid-page-metadata', '%s: Invalid metadata: \n%s' % (self.source_file, str(_))) self.symbol_names = OrderedSet(meta.get('symbols') or []) self.short_description = meta.get('short-description') self.title = None self.__discover_title(meta) self.link = Link(pagename, self.title or name, name)
def __setup_private_folder(self): folder = self.get_private_folder() if os.path.exists(folder): if not os.path.isdir(folder): error('setup-issue', '%s exists but is not a directory' % folder) else: os.mkdir(folder)
def check_path(init_dir, name): """ Banana banana """ path = os.path.join(init_dir, name) if os.path.exists(path): error('setup-issue', '%s already exists' % path) return path
def error(self, code, message): """ Shortcut function for `utils.loggable.error` Args: code: see `utils.loggable.error` message: see `utils.loggable.error` """ error(code, message)
def parse_config(doc_repo, config): DevhelpExtension.activated = bool(config.get('devhelp_activate', False)) if DevhelpExtension.activated and config.get('project_name', None) is None: error( 'invalid-config', 'To activate the devhelp extension, --project-name has to be ' 'specified.')
def create_default_layout(config): """ Banana banana """ project_name = config.get('project_name') project_version = config.get('project_version') if not project_name or not project_version: error('setup-issue', '--project-name and --project-version must be specified') init_dir = config.get_path('init_dir') if not init_dir: init_dir = config.get_invoke_dir() else: if os.path.exists(init_dir) and not os.path.isdir(init_dir): error( 'setup-issue', 'Init directory exists but is not a directory: %s' % init_dir) sitemap_path = check_path(init_dir, 'sitemap.txt') conf_path = check_path(init_dir, 'hotdoc.json') md_folder_path = check_path(init_dir, 'markdown_files') assets_folder_path = check_path(init_dir, 'assets') check_path(init_dir, 'built_doc') cat_path = os.path.join(assets_folder_path, 'cat.gif') os.makedirs(init_dir) os.makedirs(assets_folder_path) os.makedirs(md_folder_path) with open(sitemap_path, 'w') as _: _.write('index.md\n') with open(conf_path, 'w') as _: _.write( json.dumps( { 'project_name': project_name, 'project_version': project_version, 'sitemap': 'sitemap.txt', 'index': os.path.join('markdown_files', 'index.md'), 'output': 'built_doc', 'extra_assets': ['assets'] }, indent=4)) with open(os.path.join(md_folder_path, 'index.md'), 'w') as _: _.write('# %s\n' % project_name.capitalize()) try: get_cat(cat_path) _.write("\nIt's dangerous to go alone, take this\n") _.write('\n![](assets/cat.gif)') except: # pylint: disable=bare-except pass
def __license_for_page(self, page): if 'license' in page.meta: short_name = page.meta['license'] try: license = ALL_LICENSES[short_name] except KeyError: error( 'no-such-license', 'In %s: no such license %s' % (page.source_file, short_name)) else: license = LicenseExtension.default_license return license
def parse_name_from_config(self, config): """ Banana banana """ self.project_name = config.get('project_name', None) if not self.project_name: error('invalid-config', 'No project name was provided') self.project_version = config.get('project_version', None) if not self.project_version: error('invalid-config', 'No project version was provided') self.sanitized_name = '%s' % (re.sub(r'\W+', '-', self.project_name))
def __license_for_page(self, page): if 'license' in page.meta: short_name = page.meta['license'] try: license = ALL_LICENSES[short_name] except KeyError: error('no-such-license', 'In %s: no such license %s' % (page.source_file, short_name)) else: license = LicenseExtension.default_license return license
def get_extension_classes(sort, extra_extension_paths=None): """ Banana banana """ all_classes = {} deps_map = {} for entry_point in pkg_resources.iter_entry_points( group='hotdoc.extensions', name='get_extension_classes'): if entry_point.module_name == 'hotdoc_c_extension.extensions': continue try: activation_function = entry_point.load() classes = activation_function() # pylint: disable=broad-except except Exception as exc: print("Failed to load %s" % entry_point.module_name, exc) traceback.print_exc() continue for klass in classes: all_classes[klass.extension_name] = klass if extra_extension_paths: for klass in __get_extra_extension_classes(extra_extension_paths): all_classes[klass.extension_name] = klass klass_list = list(all_classes.values()) if not sort: return klass_list for i, klass in enumerate(klass_list): deps = klass.get_dependencies() topodeps = set() for dep in deps: if dep.dependency_name not in all_classes: if dep.optional: continue else: error( "setup-issue", "Missing dependency %s for %s" % (dep.dependency_name, klass.extension_name)) if dep.is_upstream: topodeps.add(klass_list.index( all_classes[dep.dependency_name])) deps_map[i] = topodeps sorted_class_indices = toposort_flatten(deps_map) sorted_classes = [klass_list[i] for i in sorted_class_indices] return sorted_classes
def _resolve_placeholder(self, tree, name, include_paths): ext = os.path.splitext(name)[1] if ext != '.json': return None conf_path = inclusions.find_file(name, include_paths) if not conf_path: error( 'invalid-config', '(%s) Could not find subproject config file %s' % (self.project.sanitized_name, name)) self.project.add_subproject(name, conf_path) return PageResolutionResult(True, None, None, None)
def __init__(self, command_line_args=None, conf_file=None, defaults=None): """ Constructor for `ConfigParser`. Args: command_line_args: list, a list of command line arguments that will override the keys defined in `conf_file`, or `None` conf_file: str, the path to the configuration file. If `None`, `ConfigParser` will look for a file named `hotdoc.json` in the current directory. """ self.__conf_file = None self._conf_dir = None contents = '{}' if conf_file: self.__conf_file = os.path.abspath(conf_file) self.__conf_dir = os.path.dirname(self.__conf_file) try: with open(self.__conf_file, 'r') as _: contents = _.read() except IOError: pass self.__invoke_dir = os.getcwd() try: self.__config = json.loads(contents) except ValueError as ze_error: error('invalid-config', 'The provided configuration file %s is not valid json.\n' 'The exact error was %s.\n' 'This often happens because of missing or extra commas, ' 'but it may be something else, please fix it!\n' % (conf_file, str(ze_error))) self.__cli = command_line_args or {} self.__defaults = defaults or {} index = self.get_index() if index: self.__base_index_path = os.path.dirname(index) else: self.__base_index_path = ''
def parse_config(doc_repo, config): short_name = config.get("default-license") if short_name is not None: try: LicenseExtension.default_license = ALL_LICENSES[short_name] except KeyError: error('no-such-license', 'Unknown license : %s' % short_name) data = config.get("default-copyright-holders") if data: try: data = Schema([base_copyright_schema]).validate(data) except SchemaError: error('invalid-config', 'Invalid default copyright holders metadata : %s' % str(data)) for _ in data: LicenseExtension.default_copyright_holders.append( _copyright_holder_from_data(_)) LicenseExtension.authors_hold_copyright = config.get( "authors_hold_copyright", True)
def __license_for_page(self, page, code_samples=False): if code_samples: key = 'code-samples-license' else: key = 'license' if key in page.meta: short_name = page.meta[key] try: license_ = ALL_LICENSES[short_name] except KeyError: error('no-such-license', 'In %s: no such license %s' % (page.source_file, short_name)) else: if code_samples: license_ = self.default_code_samples_license else: license_ = self.default_license return license_
def parse_config(doc_repo, config): short_name = config.get("default-license") if short_name is not None: try: LicenseExtension.default_license = ALL_LICENSES[short_name] except KeyError: error('no-such-license', 'Unknown license : %s' % short_name) data = config.get("default-copyright-holders") if data: try: data = Schema([base_copyright_schema]).validate(data) except SchemaError: error( 'invalid-config', 'Invalid default copyright holders metadata : %s' % str(data)) for _ in data: LicenseExtension.default_copyright_holders.append( _copyright_holder_from_data(_)) LicenseExtension.authors_hold_copyright = config.get( "authors_hold_copyright", True)
def parse_config(self, config): super(LicenseExtension, self).parse_config(config) short_name = config.get("default_license") if short_name is not None: try: self.default_license = ALL_LICENSES[short_name] except KeyError: error('no-such-license', 'Unknown license : %s' % short_name) short_name = config.get("default_code_samples_license") if short_name is not None: try: self.default_code_samples_license = ALL_LICENSES[short_name] except KeyError: error('no-such-license', 'Unknown license : %s' % short_name) data = config.get("default_copyright_holders") if data: try: data = Schema([BASE_COPYRIGHT_SCHEMA]).validate(data) except SchemaError: error('invalid-config', 'Invalid default copyright holders metadata : %s' % str(data)) for datum in data: self.default_copyright_holders.append( CopyrightHolder(datum.get('name'), datum.get('email'), [str(year) for year in datum.get('years')], True)) self.authors_hold_copyright = config.get( "authors_hold_copyright", True)
def page_from_raw_text(self, source_file, contents): """ Banana banana """ raw_contents = contents meta = {} if contents.startswith('---\n'): split = contents.split('\n...\n', 1) if len(split) == 2: contents = split[1] try: blocks = yaml.load_all(split[0]) for block in blocks: meta.update(block) except ConstructorError as exception: error('invalid-page-metadata', '%s: Invalid metadata: \n%s' % (source_file, str(exception))) ast = cmark.hotdoc_to_ast(contents, self) return Page(source_file, ast, meta=meta, raw_contents=raw_contents)
def load_conf_file(self, conf_file, overrides): """ Load the project from a configuration file and key-value overides. """ if conf_file is None and os.path.exists('hotdoc.json'): conf_file = 'hotdoc.json' self.__conf_file = conf_file if conf_file and not os.path.exists(conf_file): error('invalid-config', "No configuration file was found at %s" % conf_file) actual_args = {} defaults = {'output_format': 'html'} for key, value in overrides.items(): if key in ('cmd', 'conf_file', 'dry'): continue if value != self.parser.get_default(key): actual_args[key] = value if self.parser.get_default(key) is not None: defaults[key] = value self.config = ConfigParser(command_line_args=actual_args, conf_file=conf_file, defaults=defaults) index = self.config.get_index() if index: hash_obj = hashlib.md5(self.config.get_index()) priv_name = 'hotdoc-private-' + hash_obj.hexdigest() else: priv_name = 'hotdoc-private' self.__private_folder = os.path.abspath(priv_name)
def load_config_json(conf_file): """Banana?""" try: with open(conf_file) as _: try: json_conf = json.load(_) except ValueError as ze_error: error( 'invalid-config', 'The provided configuration file %s is not valid json.\n' 'The exact error was %s.\n' 'This often happens because of missing or extra commas, ' 'but it may be something else, please fix it!\n' % (conf_file, str(ze_error))) except FileNotFoundError: json_conf = {} except IOError as _err: error( 'setup-issue', 'Passed config file %s could not be opened (%s)' % (conf_file, _err)) return json_conf
def __parse_config(self): """ Banana banana """ output = self.config.get_path('output') or None self.sitemap_path = self.config.get_path('sitemap') if self.sitemap_path is None: error('invalid-config', 'No sitemap was provided') if output is not None: self.output = os.path.abspath(output) else: self.output = None self.project_name = self.config.get('project_name', None) self.project_version = self.config.get('project_version', None) self.output_format = self.config.get('output_format') if self.output_format not in ["html"]: error('invalid-config', 'Unsupported output format : %s' % self.output_format) self.__index_file = self.config.get_index() if self.__index_file is None: error('invalid-config', 'index is required') if not os.path.exists(self.__index_file): error('invalid-config', 'The provided index "%s" does not exist' % self.__index_file) cmd_line_includes = self.config.get_paths('include_paths') self.__base_doc_folder = os.path.dirname(self.__index_file) self.include_paths = OrderedSet([self.__base_doc_folder]) self.include_paths |= OrderedSet(cmd_line_includes) self.__create_change_tracker() self.__setup_private_folder() self.__setup_database() self.__create_extensions( self.config.get_paths('extra_extension_paths')) if self.__conf_file: self.change_tracker.add_hard_dependency(self.__conf_file)
def __parse_config(self): """ Banana banana """ output = self.config.get_path('output') or None self.sitemap_path = self.config.get_path('sitemap') if self.sitemap_path is None: error('invalid-config', 'No sitemap was provided') if output is not None: self.output = os.path.abspath(output) else: self.output = None self.project_name = self.config.get('project_name', None) self.project_version = self.config.get('project_version', None) self.output_format = self.config.get('output_format') if self.output_format not in ["html"]: error('invalid-config', 'Unsupported output format : %s' % self.output_format) self.__index_file = self.config.get_index() if self.__index_file is None: error('invalid-config', 'index is required') if not os.path.exists(self.__index_file): error('invalid-config', 'The provided index "%s" does not exist' % self.__index_file) cmd_line_includes = self.config.get_paths('include_paths') self.__base_doc_folder = os.path.dirname(self.__index_file) self.include_paths = OrderedSet([self.__base_doc_folder]) self.include_paths |= OrderedSet(cmd_line_includes) self.__create_change_tracker() self.__setup_private_folder() self.__setup_database() self.__create_extensions() if self.__conf_file: self.change_tracker.add_hard_dependency(self.__conf_file)
def __parse_pages(self, change_tracker, sitemap): source_files = [] source_map = {} for i, fname in enumerate(sitemap.get_all_sources().keys()): resolved = self.resolve_placeholder_signal(self, fname, self.__include_paths) if resolved is None: source_file = find_md_file(fname, self.__include_paths) source_files.append(source_file) if source_file is None: error('no-such-subpage', 'No markdown file found for %s' % fname, filename=sitemap.source_file, lineno=i, column=0) source_map[source_file] = fname else: resolved, ext_name = resolved if ext_name: self.__placeholders[fname] = ext_name if resolved is not True: source_files.append(resolved) source_map[resolved] = fname else: if fname not in self.__all_pages: page = Page(fname, None) page.generated = True self.__all_pages[fname] = page stale, unlisted = change_tracker.get_stale_files( source_files, 'user-pages') old_user_symbols = set() new_user_symbols = set() for source_file in stale: pagename = source_map[source_file] prev_page = self.__all_pages.get(pagename) if prev_page: old_user_symbols |= prev_page.symbol_names page = self.__parse_page(source_file) new_user_symbols |= page.symbol_names newly_listed_symbols = OrderedSet(page.symbol_names) if prev_page: newly_listed_symbols -= prev_page.symbol_names self.stale_symbol_pages(newly_listed_symbols, page) if prev_page: page.subpages |= prev_page.subpages self.__all_pages[pagename] = page unlisted_pagenames = set() for source_file in unlisted: prev_page = None rel_path = None for ipath in self.__include_paths: rel_path = os.path.relpath(source_file, ipath) prev_page = self.__all_pages.get(rel_path) if prev_page: break if not prev_page: continue old_user_symbols |= prev_page.symbol_names self.__all_pages.pop(rel_path) unlisted_pagenames.add(rel_path) for source_file in source_files: page = self.__all_pages[source_map[source_file]] page.subpages |= sitemap.get_subpages(source_map[source_file]) page.subpages -= unlisted_pagenames return old_user_symbols - new_user_symbols
def __parse_pages(self, sitemap): change_tracker = self.app.change_tracker source_files = [] source_map = {} placeholders = [] overrides = self.list_override_pages_signal( self, self.project.include_paths) or [] for override in overrides: source_files.append(override.file) source_map[override.file] = override.source_file for i, fname in enumerate(sitemap.get_all_sources().keys()): resolved = self.resolve_placeholder_signal( self, fname, self.project.include_paths) if resolved is None: source_file = find_file(fname, self.project.include_paths) source_files.append(source_file) if source_file is None: error('no-such-subpage', 'No markdown file found for %s' % fname, filename=sitemap.source_file, lineno=i, column=0) source_map[source_file] = fname else: resolved, ext_name = resolved if ext_name: self.__placeholders[fname] = ext_name if resolved is not True: source_files.append(resolved) source_map[resolved] = fname else: if fname not in self.__all_pages: page = Page(fname, None, '', self.project.sanitized_name) page.generated = True self.__all_pages[fname] = page placeholders.append(fname) stale, unlisted = change_tracker.get_stale_files( source_files, 'user-pages-%s' % self.project.sanitized_name) old_user_symbols = set() new_user_symbols = set() for source_file in stale: pagename = source_map[source_file] prev_page = self.__all_pages.get(pagename) if prev_page: old_user_symbols |= prev_page.symbol_names page = self.__parse_page(source_file) new_user_symbols |= page.symbol_names newly_listed_symbols = OrderedSet(page.symbol_names) if prev_page: newly_listed_symbols -= prev_page.symbol_names self.stale_symbol_pages(newly_listed_symbols, page) if prev_page: page.subpages |= prev_page.subpages self.__all_pages[pagename] = page unlisted_pagenames = set() for source_file in unlisted: prev_page = None rel_path = None for ipath in self.project.include_paths: rel_path = os.path.relpath(source_file, ipath) prev_page = self.__all_pages.get(rel_path) if prev_page: break if not prev_page: continue old_user_symbols |= prev_page.symbol_names self.__all_pages.pop(rel_path) unlisted_pagenames.add(rel_path) def setup_subpages(pagenames, get_pagename): """Setup subpages for pages with names in @pagenames""" sitemap_pages = sitemap.get_all_sources() for pagename in pagenames: page = self.__all_pages[get_pagename(pagename)] subpages = sitemap_pages.get(get_pagename(pagename), []) page.subpages = OrderedSet(subpages) | page.subpages for subpage_name in page.subpages: if subpage_name not in unlisted_pagenames: subpage = self.__all_pages[subpage_name] if not subpage.meta.get('auto-sort', False): subpage.pre_sorted = True page.subpages -= unlisted_pagenames setup_subpages(source_files, lambda x: source_map[x]) setup_subpages(placeholders, lambda x: x) return old_user_symbols - new_user_symbols
def parse_config(doc_repo, config): DevhelpExtension.activated = bool(config.get('devhelp_activate', False)) if DevhelpExtension.activated and config.get('project_name', None) is None: error('invalid-config', 'To activate the devhelp extension, --project-name has to be ' 'specified.')
def parse(self, filename): """ Parse a sitemap file. Args: filename: str, the path to the sitemap file. Returns: Sitemap: the generated sitemap. """ with io.open(filename, 'r', encoding='utf-8') as _: lines = _.readlines() all_source_files = set() lineno = 0 root = None index = None cur_level = -1 parent_queue = [] for line in lines: try: level, line = dedent(line) except IndentError as exc: error('bad-indent', 'Invalid indentation', filename=filename, lineno=lineno, column=exc.column) if not line: continue source_file = dequote(line) if not source_file: continue if source_file in all_source_files: error('sitemap-duplicate', 'Filename listed twice', filename=filename, lineno=lineno, column=level * 8 + 1) all_source_files.add(source_file) page = OrderedDict() if root is not None and level == 0: error('sitemap-error', 'Sitemaps only support one root', filename=filename, lineno=lineno, column=0) if root is None: root = page index = source_file else: lvl_diff = cur_level - level while lvl_diff >= 0: parent_queue.pop() lvl_diff -= 1 parent_queue[-1][source_file] = page parent_queue.append(page) cur_level = level lineno += 1 return Sitemap(root, filename, index)
def build(self, sitemap, extensions): ext_level = -1 extensions = {ext.argument_prefix: ext for ext in extensions.values()} ext_pages = {} ext_index = None extension = None ext_name = 'core' sitemap_pages = sitemap.get_all_sources() self.__all_pages = {} for name, level in sitemap: page = None if level <= ext_level: self.add_unordered_subpages(extension, ext_index, ext_pages) extension = None ext_level = -1 ext_pages = {} ext_index = None ext_name = 'core' if extension: smart_key = extension.get_possible_path(name) else: smart_key = None if name.endswith('-index'): ext_name = name[:-6] extension = extensions.get(ext_name) if extension is None: position = sitemap.get_position(name) error( 'index-extension-not-found', 'No extension named %s for index page' % ext_name, filename=sitemap.source_file, lineno=position[0], column=position[1]) continue ext_level = level ext_pages = extension.make_pages() page = ext_pages['%s-index' % ext_name] del ext_pages['%s-index' % ext_name] ext_index = page elif name in ext_pages: page = ext_pages[name] del ext_pages[name] elif smart_key in ext_pages: page = ext_pages[smart_key] del ext_pages[smart_key] else: source_file = find_file(name, self.project.include_paths) if source_file is None: position = sitemap.get_position(name) error( 'page-not-found', 'No markdown file found for %s' % name, filename=sitemap.source_file, lineno=position[0], column=position[1]) ext = os.path.splitext(name)[1] if ext == '.json': self.project.add_subproject(name, source_file) page = Page( name, True, self.project.sanitized_name, 'core') else: page = self.parse_page(source_file, ext_name) page.extension_name = extension.extension_name if extension else 'core' self.__all_pages[name] = page subpages = sitemap_pages.get(name, []) page.subpages = OrderedSet(subpages) | page.subpages if not page.meta.get('auto-sort', False): page.pre_sorted = True if ext_index: self.add_unordered_subpages(extension, ext_index, ext_pages) self.root = self.__all_pages[sitemap.index_file]
def parse(self, filename): """ Parse a sitemap file. Args: filename: str, the path to the sitemap file. Returns: Sitemap: the generated sitemap. """ with io.open(filename, 'r', encoding='utf-8') as _: lines = _.readlines() all_source_files = set() source_map = {} lineno = 0 root = None index = None cur_level = -1 parent_queue = [] for line in lines: try: level, line = dedent(line) if line.startswith('#'): lineno += 1 continue elif line.startswith('\\#'): line = line[1:] except IndentError as exc: error('bad-indent', 'Invalid indentation', filename=filename, lineno=lineno, column=exc.column) if not line: lineno += 1 continue source_file = dequote(line) if not source_file: lineno += 1 continue if source_file in all_source_files: error('sitemap-duplicate', 'Filename listed twice', filename=filename, lineno=lineno, column=level * 8 + 1) all_source_files.add(source_file) source_map[source_file] = (lineno, level * 8 + 1) page = OrderedDict() if root is not None and level == 0: error('sitemap-error', 'Sitemaps only support one root', filename=filename, lineno=lineno, column=0) if root is None: root = page index = source_file else: lvl_diff = cur_level - level while lvl_diff >= 0: parent_queue.pop() lvl_diff -= 1 parent_queue[-1][source_file] = page parent_queue.append(page) cur_level = level lineno += 1 return Sitemap(root, filename, index, source_map)
def __parse_pages(self, change_tracker, sitemap): source_files = [] source_map = {} for i, fname in enumerate(sitemap.get_all_sources().keys()): resolved = self.resolve_placeholder_signal( self, fname, self.__include_paths) if resolved is None: source_file = find_md_file(fname, self.__include_paths) source_files.append(source_file) if source_file is None: error( 'no-such-subpage', 'No markdown file found for %s' % fname, filename=sitemap.source_file, lineno=i, column=0) source_map[source_file] = fname else: resolved, ext_name = resolved if ext_name: self.__placeholders[fname] = ext_name if resolved is not True: source_files.append(resolved) source_map[resolved] = fname else: if fname not in self.__all_pages: page = Page(fname, None) page.generated = True self.__all_pages[fname] = page stale, unlisted = change_tracker.get_stale_files( source_files, 'user-pages') old_user_symbols = set() new_user_symbols = set() for source_file in stale: pagename = source_map[source_file] prev_page = self.__all_pages.get(pagename) if prev_page: old_user_symbols |= prev_page.symbol_names page = self.__parse_page(source_file) new_user_symbols |= page.symbol_names newly_listed_symbols = OrderedSet(page.symbol_names) if prev_page: newly_listed_symbols -= prev_page.symbol_names self.stale_symbol_pages(newly_listed_symbols, page) if prev_page: page.subpages |= prev_page.subpages self.__all_pages[pagename] = page unlisted_pagenames = set() for source_file in unlisted: prev_page = None rel_path = None for ipath in self.__include_paths: rel_path = os.path.relpath(source_file, ipath) prev_page = self.__all_pages.get(rel_path) if prev_page: break if not prev_page: continue old_user_symbols |= prev_page.symbol_names self.__all_pages.pop(rel_path) unlisted_pagenames.add(rel_path) for source_file in source_files: page = self.__all_pages[source_map[source_file]] page.subpages |= sitemap.get_subpages(source_map[source_file]) page.subpages -= unlisted_pagenames return old_user_symbols - new_user_symbols