Пример #1
0
    def _connect(self, options: CombinedOptions):
        """
        Connect to PostgreSQL database using parameters from options.
        Save connection object into self._con.

        options(CombinedOptions) — CombinedOptions object with options from tag
                                   and config.
        """
        try:
            self._con = None
            self.logger.debug(
                f"Trying to connect: host={options['host']} port={options['port']}"
                f" dbname={options['dbname']}, user={options['user']} "
                f"password={options['password']}.")
            self._con = psycopg2.connect(f"host='{options['host']}' "
                                         f"port='{options['port']}' "
                                         f"dbname='{options['dbname']}' "
                                         f"user='******'user']}'"
                                         f"password='******'password']}'")
        except psycopg2.OperationalError:
            info = traceback.format_exc()
            output(
                f"\nFailed to connect to host {options['host']}. "
                'Documentation was not generated', self.quiet)
            self.logger.debug(f'Failed to connect: host={options["host"]}'
                              f' port={options["port"]}'
                              f' dbname={options["dbname"]} '
                              f'user={options["user"]} '
                              f'password={options["password"]}.\n\n{info}')
            raise psycopg2.OperationalError
    def _warning(self, msg: str, context='', error: Exception = None):
        '''
        Log warning and print to user.

        If debug mode — print also context (if sepcified) and error (if specified).

        msg — message which should be logged;
        context (optional) — tag context got with get_tag_context function. If
                             specified — will be logged. If debug = True it
                             will also go to STDOUT.
        '''
        output_message = ''
        if self.current_filename:
            output_message += f'[{self.current_filename}] '
        output_message += msg + '\n'
        log_message = output_message
        if context:
            log_message += f'Context:\n---\n{context}\n---\n'
        if error:
            tb_str = traceback.format_exception(etype=type(error),
                                                value=error,
                                                tb=error.__traceback__)
            log_message += '\n'.join(tb_str)
        if self.debug:
            output_message = log_message
        output(f'WARNING: {output_message}', self.quiet)
        self.logger.warning(log_message)
 def download_all(self):
     for file_dict in self.config['queue']:
         try:
             self.download_file(file_dict)
         except (BadConfigException, DownloadError) as e:
             if self.config['fail_fast']:
                 raise
             else:
                 self.logger.warning(f'{e}. Skipping.')
                 output(f'{e}. Skipping.', self.quiet)
    def make(self, target: str) -> str:
        with spinner(f'Making {target}', self.logger, self.quiet, self.debug):
            output('', self.quiet)  # empty line for better output
            try:
                if target == 'confluence':
                    return self._build()
                else:
                    raise ValueError(f'Confluence cannot make {target}')

            except Exception as exception:
                raise RuntimeError(f'Build failed: {exception}')
Пример #5
0
    def _get_repo_web_url(self, repo_path: Path) -> str:
        repo_web_url = self.options['protocol'] + '://'

        command = f'git remote show {self.options["remote_name"]}'

        self.logger.debug(
            f'Running the command to get repo fetch URL: {command}')

        git_remote_info = run(command,
                              cwd=repo_path,
                              shell=True,
                              check=True,
                              stdout=PIPE,
                              stderr=STDOUT)

        self.logger.debug('Processing the command output')

        git_remote_info_decoded = git_remote_info.stdout.decode(
            'utf8', errors='ignore').replace('\r\n', '\n')

        fetch_url_match = re.search(r'^  Fetch URL: (?P<url>.+)$',
                                    git_remote_info_decoded,
                                    flags=re.MULTILINE)

        if fetch_url_match:
            fetch_url = fetch_url_match.group('url')

            self.logger.debug(f'Fetch URL: {fetch_url}')

            if fetch_url.startswith('git'):
                repo_web_url += re.sub(
                    r'^git\@(?P<host>[^:]+):(?P<repo>.+)\.git$',
                    '\g<host>/\g<repo>', fetch_url)

            elif fetch_url.startswith('http'):
                repo_web_url += re.sub(
                    r'^https?:\/\/(?P<host>[^\/]+)\/(?P<repo>.+)\.git$',
                    '\g<host>/\g<repo>', fetch_url)

            else:
                self.logger.debug('Fetch URL protocol is not supported')

        else:
            warning_message = f'WARNING: cannot get fetch URL for the repo: {repo_path}'

            output(warning_message, self.quiet)

            self.logger.warning(warning_message)

        self.logger.debug(f'Repo Web URL: {repo_web_url}')

        return repo_web_url
Пример #6
0
    def apply(self):
        self.logger.info('Applying preprocessor')

        self.logger.debug(f'Allowed targets: {self.options["targets"]}')
        self.logger.debug(f'Current target: {self.context["target"]}')

        if not self.options['targets'] or self.context['target'] in self.options['targets']:
            if self.options['commands']:
                for command in self.options['commands']:
                    command = command.replace(
                        '${PROJECT_DIR}',
                        f'{self.project_path.absolute().as_posix()}'
                    )

                    command = command.replace(
                        '${SRC_DIR}',
                        f'{(self.project_path / self.config["src_dir"]).absolute().as_posix()}'
                    )

                    command = command.replace(
                        '${WORKING_DIR}',
                        f'{self.working_dir.absolute().as_posix()}'
                    )

                    command = command.replace(
                        '${BACKEND}',
                        f'{self.context["backend"]}'
                    )

                    command = command.replace(
                        '${TARGET}',
                        f'{self.context["target"]}'
                    )

                    try:
                        self.logger.debug(f'Running command: {command}')

                        command_output = run(command, shell=True, check=True, stdout=PIPE, stderr=STDOUT)

                        if command_output.stdout:
                            command_output_decoded = command_output.stdout.decode('utf8', errors='ignore')

                            output(command_output_decoded, self.quiet)

                            self.logger.debug(f'Output of the command: {command_output_decoded}')

                    except CalledProcessError as exception:
                        self.logger.error(str(exception))

                        raise RuntimeError(f'Failed: {exception.output.decode()}')

        self.logger.info('Preprocessor applied')
Пример #7
0
 def _to_md(self, data: dict, doc_template: str) -> str:
     try:
         template = self._env.get_template(doc_template)
         result = template.render(tables=data['tables'],
                                  functions=data['functions'],
                                  triggers=data['triggers'])
     except Exception as e:
         output(f'\nFailed to render doc template {doc_template}:',
                self.quiet)
         info = traceback.format_exc()
         self.logger.debug(f'Failed to render doc template:\n\n{info}')
         return ''
     return result
    def _unescape(self, options: Dict[str, OptionValue], full_tag: str) -> str:
        '''Replace the ``<escaped>`` tag with the content of the corresponding file.

        :param options: Tag options (i.e. attributes)

        :returns: The content of the file that is defined
            by the ``hash`` attribute
        '''

        self.logger.debug(f'Processing the tag, options: {options}')

        saved_content_hash = options.get('hash', '')

        saved_content_file_path = self._cache_dir_path / f'{saved_content_hash}.md'

        self.logger.debug(
            f'Restoring raw content from the file: {saved_content_file_path}')

        if saved_content_file_path.exists():
            with open(saved_content_file_path,
                      encoding='utf8') as saved_content_file:
                saved_content = saved_content_file.read()

            if self.pattern.search(saved_content):
                self.logger.debug(
                    'Recursive call of the <escaped> tags processing')

                saved_content = self.unescape(saved_content)

            return saved_content

        else:
            warning_message = f'WARNING: saved escaped code not found, hash: {saved_content_hash}'

            output(warning_message, self.quiet)

            self.logger.warning(warning_message)

            return full_tag
Пример #9
0
    def execute_queue(self):
        """
        Generate all diagrams which were scheduled in the queue. The queue is groupped by
        cmd_args so diagrams with the same settings will be processed by a single PlantUML
        instance.
        """

        self.logger.debug(
            f'Generating diagrams. Number of queues: {len(self._queue)}')
        pipe_args = [
            '-pipe', '-pipeNoStderr', '-pipedelimitor', PIPE_DELIMITER
        ]

        for args, sources, filenames in self._queue.values():
            self.logger.debug(
                f'Queue started. Number of diagrams: {len(sources)}')
            full_args = [*args, *pipe_args]
            self.logger.debug(
                f'Generating diagrams from queue, command: {" ".join(full_args)}'
            )
            p = Popen(full_args, stdout=PIPE, stdin=PIPE, stderr=PIPE)

            input_str = '\n\n'.join(sources).encode()
            r = p.communicate(input_str)

            results = r[0].split(PIPE_DELIMITER.encode())[:-1]
            self.logger.debug(
                f'Queue processed. Number of results: {len(results)}.')

            for bytes_, dest in zip(results, filenames):
                if bytes_.strip().startswith(b'ERROR'):
                    message = f'{"*"*10}\nFailed to generate diagram {dest}:\n{bytes_.decode()}'
                    self.logger.warning(message)
                    output(message, self.quiet)
                else:
                    with open(dest, 'wb') as f:
                        f.write(bytes_.strip())
Пример #10
0
    def _warning(self,
                 msg: str,
                 context: str = '',
                 error: Exception = None,
                 debug_msg: str = '') -> None:
        '''
        Log warning and print to user.

        If debug mode — print also context (if sepcified) and error (if specified).

        :param msg:       — message which should be logged;
        :param context:   — tag context got with get_tag_context function. If
                            specified — will be logged. If debug = True it
                            will also go to STDOUT.
        :param error:     — exception which was caught before warning. If specified —
                            error traceback whill be added to log (and debug output) message.
        :param debug_msg: — message to additionally print to stdout in debug mode.
        '''

        output_message = ''
        if self.current_filename:
            output_message += f'[{self.current_filename}] '
        output_message += msg + '\n'
        log_message = output_message
        if debug_msg:
            log_message += f'{debug_msg}\n'
        if context:
            log_message += f'Context:\n---\n{context}\n---\n'
        if error:
            tb_str = traceback.format_exception(etype=type(error),
                                                value=error,
                                                tb=error.__traceback__)
            log_message += '\n'.join(tb_str)
        if self.debug:
            output_message = log_message
        output(f'WARNING: {output_message}', self.quiet)
        self.logger.warning(log_message)
Пример #11
0
    def make(self, target: str) -> str:
        with spinner(f'Making {target}', self.logger, self.quiet, self.debug):
            try:
                img_dir = self._site_dir / 'img'
                shutil.rmtree(self._site_dir, ignore_errors=True)
                img_dir.mkdir(parents=True)
                source_path = self.working_dir / self._flat_src_file_name
                with open(source_path) as f:
                    source = f.read()

                processed_source = self._process_images(source, img_dir)
                with open(source_path, 'w') as f:
                    f.write(processed_source)

                try:
                    command = self._get_command(self._aglio_config,
                                                source_path,
                                                self._site_dir / "index.html")
                    self.logger.debug(f'Constructed command: {command}')

                    r = run(command,
                            shell=True,
                            check=True,
                            stdout=PIPE,
                            stderr=STDOUT)
                except CalledProcessError as e:
                    raise RuntimeError(e.output.decode('utf8',
                                                       errors='ignore'))
                command_output_decoded = r.stdout.decode('utf8',
                                                         errors='ignore')
                output(command_output_decoded, self.quiet)
                return self._site_dir

            except Exception as exception:
                err = traceback.format_exc()
                self.logger.debug(err)
                raise type(exception)(f'Build failed: {err}')
Пример #12
0
    def apply(self):
        self.logger.info('Applying preprocessor')

        wireframe_files = []

        # fixme: hash .scene file names
        i = 0
        for markdown_file_path in self.working_dir.rglob('*.md'):
            with open(markdown_file_path, encoding='utf8') as markdown_file:
                markdown_content = markdown_file.read()

            wireframe_definitions = re.finditer(self.pattern, markdown_content)

            for wireframe_definition in wireframe_definitions:
                wireframe_file_path = self._scene_filepath(i)
                i += 1
                with open(wireframe_file_path, 'w',
                          encoding='utf8') as wireframe_file:
                    wireframe_file.write(
                        wireframe_definition.group('body').strip())
                wireframe_files.append(wireframe_file_path)

        self.logger.debug(f'Wireframe files: {wireframe_files}')

        if wireframe_files:
            self._cache_dir_path.mkdir(parents=True, exist_ok=True)

            output('Running ImagineUI CLI', self.quiet)

            input_param = " ".join(
                map(lambda x: f'--input=' + x, wireframe_files))

            ver = self.options["version"]
            package_name = "imagineui-cli"
            if ver and ver != "latest":
                package_name = f'imagineui-cli@{self.options["version"]}'

            command = (f'npx {package_name} ' +
                       f'--outputDir={self._cache_dir_path} ' + input_param)

            command_output = run(command,
                                 shell=True,
                                 check=True,
                                 stdout=PIPE,
                                 stderr=STDOUT)

            if command_output.stdout:
                output(command_output.stdout.decode('utf8', errors='ignore'),
                       self.quiet)

            for markdown_file_path in self.working_dir.rglob('*.md'):
                with open(markdown_file_path,
                          encoding='utf8') as markdown_file:
                    markdown_content = markdown_file.read()

                processed_content = self.process_imagineui(markdown_content)

                if processed_content:
                    with open(markdown_file_path, 'w',
                              encoding='utf8') as markdown_file:
                        markdown_file.write(processed_content)

        self.logger.info('Preprocessor applied')
Пример #13
0
    def process_showcommits(self, markdown_content: str, template: str,
                            markdown_file_path: Path, repo_path: Path,
                            repo_web_url: str) -> str:
        markdown_file_in_src_dir_path = (
            self.config['src_dir'] / markdown_file_path.relative_to(
                self.working_dir.resolve())).resolve()

        source_file_rel_path = markdown_file_in_src_dir_path.relative_to(
            self.project_path.resolve())
        source_file_abs_path = repo_path / source_file_rel_path

        self.logger.debug(
            f'Currently processed file path: {markdown_file_path}, ' +
            f'mapped to src dir: {markdown_file_in_src_dir_path}, ' +
            f'repo path: {repo_path}, ' +
            f'source file path relative to repo path: {source_file_rel_path}, '
            + f'source file absolute path: {source_file_abs_path}')

        if not source_file_abs_path.exists():
            warning_message = f'WARNING: file does not exist: {source_file_abs_path}'

            output(warning_message, self.quiet)

            self.logger.warning(warning_message)

            return markdown_content

        command = f'git log -m --follow --patch --date=iso -- "{source_file_abs_path}"'

        self.logger.debug(
            f'Running the command to get the file history: {command}')

        source_file_git_history = run(command,
                                      cwd=source_file_abs_path.parent,
                                      shell=True,
                                      check=True,
                                      stdout=PIPE,
                                      stderr=STDOUT)

        self.logger.debug('Processing the command output')

        source_file_git_history_decoded = source_file_git_history.stdout.decode(
            'utf8', errors='ignore').replace('\r\n', '\n')

        file_path_anchor = self._get_file_path_anchor(repo_web_url,
                                                      source_file_rel_path)
        foreword, commits_and_afterword = template.split('{{startcommits}}',
                                                         maxsplit=1)
        commits_template, afterword = commits_and_afterword.split(
            '{{endcommits}}', maxsplit=1)
        output_history = foreword

        for commit_summary in re.finditer(
                r'commit (?P<hash>[0-9a-f]{8})[0-9a-f]{32}\n' +
                r'((?!commit [0-9a-f]{40}).*\n|\n)*' +
                r'Author: (?P<author>.+)\n' + r'Date: +(?P<date>.+)\n\n' +
                r'(?P<message>((?!commit [0-9a-f]{40}|diff \-\-git .+).*\n|\n)+)'
                + r'(' +
                r'diff \-\-git .+\nindex .+\n\-{3} a\/.+\n\+{3} b\/.+\n' +
                r'(?P<diff>((?!commit [0-9a-f]{40}).+\n)+)' + r')',
                source_file_git_history_decoded):
            commit_author_match = re.match(
                r'^(?P<name>.+) \<(?P<email>\S+\@\S+)\>$',
                commit_summary.group('author'))

            if commit_author_match:
                commit_author = commit_author_match.group('name')
                commit_author_email = commit_author_match.group('email')

            else:
                commit_author = commit_summary.group('author')
                commit_author_email = ''

            commit_message = re.sub(r'^ {4}',
                                    '',
                                    commit_summary.group('message'),
                                    flags=re.MULTILINE)

            commit_diff = commit_summary.group('diff')

            if self.options['escape_html']:
                commit_message = self._escape_html(commit_message)
                commit_diff = self._escape_html(commit_diff)

            output_history += (
                commits_template
            ).replace('{{hash}}', commit_summary.group('hash')).replace(
                '{{url}}',
                f'{repo_web_url}/commit/{commit_summary.group("hash")}{file_path_anchor}'
            ).replace('{{author}}', commit_author).replace(
                '{{email}}', commit_author_email).replace(
                    '{{date}}',
                    self._format_date(commit_summary.group('date'))).replace(
                        '{{message}}',
                        commit_message).replace('{{diff}}', commit_diff)

        output_history += afterword

        if self.options['position'] == 'after_content':
            markdown_content += '\n\n' + output_history

        elif self.options['position'] == 'defined_by_tag':
            markdown_content = self.pattern.sub(output_history,
                                                markdown_content)

        return markdown_content
Пример #14
0
    def _archeme_generate(self, options: OptionValue, body: str or dict) -> str:
        self.logger.debug(f'Config concatenation mode: {self.options["config_concat"]}')

        if options.get('config_file', None):
            if self.options['config_concat']:
                self.logger.debug(f'Reading current config as a string from user-specified file: {options["config_file"]}')

                with open(Path(options['config_file']).resolve(), encoding='utf8') as config_file:
                    config = config_file.read()

            else:
                self.logger.debug(f'Loading current config as an object from user-specified file: {options["config_file"]}')

                with open(Path(options['config_file']).resolve()) as config_file:
                    config = load(config_file, Loader)

        else:
            self.logger.debug('Using default config for this diagram')

            config = self._config

        if self.options['config_concat']:
            if config:
                config += '\n'

            if isinstance(body, str):
                self.logger.debug(
                    'Diagram body is a string, loading the concatenation of config and body as an object'
                )

                diagram_definition = load(config + body, Loader)

            else:
                if config:
                    self.logger.debug(
                        'Diagram body is an object, dumping it into a string, concatenating with config, ' +
                        'loading concatenation result as an object'
                    )

                    body = dump(body, allow_unicode=True)
                    diagram_definition = load(config + body, Loader)

                else:
                    self.logger.debug(
                        'Diagram body is an object, no config specified, loading diagram body as an object'
                    )

                    diagram_definition = body

        else:
            if isinstance(body, str):
                self.logger.debug('Diagram body is a string, loading it as an object')

                body = load(body, Loader)

            self.logger.debug(f'Diagram definition without config: {body}')
            self.logger.debug('Merging diagram definition with config')

            diagram_definition = {**config, **body}

        self.logger.debug(f'Full diagram definition: {diagram_definition}')

        engine = diagram_definition.get('engine', 'dot')
        format = options.get('format', None) or self.options['format']

        if options.get('module_id', None):
            self.logger.debug(f'Remembering module {options["module_id"]}')

            if options['module_id'] in self._modules.keys():
                warning_message = f'WARNING: Duplicate module ID: {options["module_id"]}'
                output(warning_message, self.quiet)

                self.logger.warning(warning_message)

            if isinstance(body, str):
                body = load(body, Loader)

            self._modules[options['module_id']] = body

            self.logger.debug(f'Remembered module description: {self._modules[options["module_id"]]}')

            diagram_gv_file_path = Path(self._cache_dir_path / f'custom_{options["module_id"]}.gv').resolve()
            self._generate_gv_source(diagram_definition, diagram_gv_file_path)
            diagram_image_file_path = Path(self._cache_dir_path / f'custom_{options["module_id"]}.{format}').resolve()
            self._draw_diagram(engine, format, diagram_gv_file_path, diagram_image_file_path)

        else:
            diagram_hash = md5(str(diagram_definition).encode()).hexdigest()

            diagram_gv_file_path = Path(self._cache_dir_path / f'auto_{diagram_hash}.gv').resolve()

            if not diagram_gv_file_path.exists():
                self._generate_gv_source(diagram_definition, diagram_gv_file_path)

            else:
                self.logger.debug(f'Graphviz source found in cache: {diagram_gv_file_path}')

            diagram_image_file_path = Path(
                self._cache_dir_path / f'auto_{diagram_hash}.{format}'
            ).resolve()

            if not diagram_image_file_path.exists():
                self._draw_diagram(engine, format, diagram_gv_file_path, diagram_image_file_path)

            else:
                self.logger.debug(f'Drawn diagram found in cache: {diagram_image_file_path}')

        return f'![{options.get("caption", "")}]({diagram_image_file_path})'
Пример #15
0
    def _process_plantuml(self, options: CombinedOptions, body: str) -> str:
        '''Save PlantUML diagram body to .diag file, generate an image from it,
        and return the image ref.

        If the image for this diagram has already been generated, the existing version
        is used.

        :param options: Options extracted from the diagram definition
        :param body: PlantUML diagram body

        :returns: Image ref
        '''
        self.logger.debug(
            f'Processing PlantUML diagram, options: {options}, body: {body}')

        body_hash = md5(f'{body}'.encode())
        body_hash.update(str(self.options).encode())

        diagram_src_path = self._cache_path / 'plantuml' / f'{body_hash.hexdigest()}.diag'

        self.logger.debug(f'Diagram definition file path: {diagram_src_path}')

        # params = self.options.get('params', {})

        diagram_format = self._get_diagram_format(options)

        diagram_path = diagram_src_path.with_suffix(f'.{diagram_format}')

        self.logger.debug(f'Diagram image path: {diagram_path}')

        if diagram_path.exists():
            self.logger.debug('Diagram image found in cache')

            return self._get_result(diagram_path, options)

        diagram_src_path.parent.mkdir(parents=True, exist_ok=True)

        with open(diagram_src_path, 'w', encoding='utf8') as diagram_src_file:
            diagram_src_file.write(body)

            self.logger.debug(f'Diagram definition written into the file')

        try:
            command = self._get_command(options, diagram_src_path)

            self.logger.debug(f'Running the command: {command}')

            run(command, shell=True, check=True, stdout=PIPE, stderr=STDOUT)

            if diagram_path.exists():
                self.logger.debug(f'Diagram image saved')

            else:
                error_message = f'Processing of PlantUML diagram {diagram_src_path} failed: diagram image not saved'

                output(error_message, self.quiet)

                self.logger.error(error_message)

        except CalledProcessError as exception:
            self.logger.error(str(exception))

            if diagram_path.exists():
                error_diagram_path = diagram_path.parent / (
                    diagram_path.stem + '_error' + diagram_path.suffix)
                diagram_path.rename(error_diagram_path)

                error_message = (
                    f'Processing of PlantUML diagram {diagram_src_path} failed: '
                    f'{exception.output.decode()}')

                output(error_message, self.quiet)

                self.logger.error(error_message)

            else:
                raise RuntimeError(f'Failed: {exception.output.decode()}')

        return self._get_result(diagram_path, options)
Пример #16
0
    def _process_diagram(self, kind: str, options: Dict[str, OptionValue],
                         body: str) -> str:
        '''Save diagram body to .diag file, generate an image from it with the appropriate backend,
        and return the image ref.

        If the image for this diagram has already been generated, the existing version
        is used.

        :param kind: Diagram kind: blockdiag, seqdiag, actdiag, or nwdiag
        :param options: Options extracted from the diagram definition
        :param body: Diagram body

        :returns: Image ref
        '''

        self.logger.debug(f'Processing diagram: {kind}, {options}, {body}')

        body_hash = md5(f'{body}'.encode())
        body_hash.update(str(self.options).encode())

        diagram_src_path = self._cache_path / kind / f'{body_hash.hexdigest()}.diag'

        params = self.options.get('params', {})

        diagram_format = {**params, **options}.get('format', 'png')

        diagram_path = diagram_src_path.with_suffix(f'.{diagram_format}')

        img_ref = f'![{options.get("caption", "")}]({diagram_path.absolute().as_posix()})'

        if diagram_path.exists():
            self.logger.debug(f'Diagram found in cache: {diagram_path}.')
            self.logger.debug(f'Replacing diagram definition with {img_ref}.')
            return img_ref

        diagram_src_path.parent.mkdir(parents=True, exist_ok=True)

        with open(diagram_src_path, 'w', encoding='utf8') as diagram_src_file:
            diagram_src_file.write(body)

        self.logger.debug(f'Saved diagram source to {diagram_src_path}.')

        try:
            command = self._get_command(kind, options, diagram_src_path)

            self.logger.debug(f'Running the command: {command}')

            run(command, shell=True, check=True, stdout=PIPE, stderr=STDOUT)

        except CalledProcessError as exception:
            self.logger.error(str(exception))

            if exception.output.decode().startswith('ERROR: '):
                error_message = f'Processing of diagram {diagram_src_path} failed: {exception.output.decode()}'

                output(error_message, self.quiet)

                self.logger.error(error_message)

            else:
                raise RuntimeError(f'Failed: {exception.output.decode()}')

        self.logger.debug(f'Replacing diagram definition with {img_ref}.')

        return img_ref
Пример #17
0
    def apply(self):
        output('', self.quiet)  # empty line for better output

        self._process_tags_for_all_files(self._import_from_confluence)
        self.logger.info(f'Preprocessor applied')
    def _build(self):
        '''
        Main method. Builds confluence XHTML document from flat md source and
        uploads it into the confluence server.
        '''
        host = self.options['host']
        credentials = self._get_credentials(host)
        self.logger.debug(f'Got credentials for host {host}: login {credentials[0]}, '
                          f'password {credentials[1]}')
        self._connect(host,
                      *credentials,
                      self.options['verify_ssl'])
        result = []
        if 'id' in self.options or ('title' in self.options and 'space_key' in self.options):
            self.logger.debug('Uploading flat project to confluence')
            output(f'Building main project', self.quiet)

            flatten.Preprocessor(
                self.context,
                self.logger,
                self.quiet,
                self.debug,
                {'flat_src_file_name': self._flat_src_file_name,
                 'keep_sources': True}
            ).apply()

            unescapecode.Preprocessor(
                self.context,
                self.logger,
                self.quiet,
                self.debug,
                {}
            ).apply()

            shutil.move(self.working_dir / self._flat_src_file_name,
                        self._flat_src_file_path)

            with open(self._flat_src_file_path, encoding='utf8') as f:
                md_source = f.read()

            options = self._get_options(self.options)

            self.logger.debug(f'Options: {options}')
            uploader = PageUploader(
                self._flat_src_file_path,
                options,
                self.con,
                self._cachedir,
                self._debug_dir,
                self._attachments_dir,
                self.logger
            )
            try:
                result.append(uploader.upload(md_source))
            except HTTPError as e:
                # reraising HTTPError with meaningful message
                raise HTTPError(e.response.text, e.response)

        self.logger.debug('Searching metadata for confluence properties')

        chapters = self.config['chapters']
        meta = load_meta(chapters, self.working_dir)
        for section in meta.iter_sections():

            if not isinstance(section.data.get('confluence'), dict):
                self.logger.debug(f'No "confluence" section in {section}), skipping.')
                continue

            self.logger.debug(f'Found "confluence" section in {section}), preparing to build.')
            # getting common options from foliant.yml and merging them with meta fields
            common_options = {}
            uncommon_options = ['title', 'id', 'space_key', 'parent_id', 'attachments']
            common_options = {k: v for k, v in self.options.items()
                              if k not in uncommon_options}
            try:
                options = self._get_options(common_options,
                                            section.data['confluence'],
                                            fallback_title=section.title)
            except Exception as e:
                # output(f'Skipping section {section}, wrong params: {e}', self.quiet)
                self.logger.debug(f'Skipping section {section}, wrong params: {e}')
                continue
            self.logger.debug(f'Building {section.chapter.filename}: {section.title}')
            output(f'Building {section.title}', self.quiet)
            md_source = section.get_source()

            self.logger.debug(f'Options: {options}')
            original_file = self.project_path / section.chapter.filename
            uploader = PageUploader(
                original_file,
                options,
                self.con,
                self._cachedir,
                self._debug_dir,
                self._attachments_dir,
                self.logger
            )
            try:
                result.append(uploader.upload(md_source))
            except HTTPError as e:
                # reraising HTTPError with meaningful message
                raise HTTPError(e.response.text, e.response)
        if result:
            return '\n' + '\n'.join(result)
        else:
            return 'nothing to upload'