コード例 #1
0
    def __critical_abort(self, code: int):
        print_idx(self.idx, "Collecting files in working directory")
        archive = shutil.make_archive(
            os.path.join(
                os.getcwd(), 'sltx-log-' + su.get_now() + '-' +
                su.sanitize_filename(self.file)), 'zip', self.__f("{out_dir}"))
        print_idx(
            self.idx, "  - Created: \"" + archive + "\" (" +
            os.path.basename(archive) + ")")

        # We have to force latexmk into think it has to re-run
        # TODO: We should check if not aux but we do this
        # We do this as we need a change and the percent itself gets consumed
        # This ensures a different sequence every time that will be deleted
        # on a successful run
        if sg.configuration[sg.C_CLEAN_ON_FAILURE]:
            LOGGER.info('Running auto clean on failure.')
            sg.args.exclude_patterns = []
            sg.args.include_patterns = [self.__f("{out_dir}")]
            scmd.cleanse_caches()
        else:
            with open(self.__f("{out_dir}/{file_base_noext}.aux"), 'a') as f:
                f.write('%% sltx errormark' + str(random.random()) + " - " +
                        str(random.random()))
        # automatic analyze
        os.system('sltx analyze "' + archive + '"')
        raise rex.RecipeException(
            archive, 'Recipe for ' + str(self.idx) + ' failed with code: ' +
            str(code) + '. See logfile: \"' + archive + "\"")
コード例 #2
0
def cmd_version():
    LOGGER.info("This is sltx, a simple latex helper-utility")
    LOGGER.info("Tex-Home: " + su.get_tex_home())
    LOGGER.info("Default config location: %s (present: %s)",
                su.get_default_conf(), str(os.path.isfile(su.get_default_conf())))
    LOGGER.info("Local config location: %s (present: %s)",
                su.get_local_conf(), str(os.path.isfile(su.get_local_conf())))
    LOGGER.info("Version: " + su.get_version())
コード例 #3
0
def _install_dependencies_guard():
    """Cheap command line guard which will check for valid keys
    """
    if "target" not in sg.dependencies or "dependencies" not in sg.dependencies:
        LOGGER.error(
            "The dependency-file must supply a 'target' and an 'dependencies' key!"
        )
        sys.exit(1)
コード例 #4
0
def cmd_compile():
    if sg.configuration[C_USE_DOCKER]:
        LOGGER.info("Using docker to compile (" +
                    sg.configuration[C_DOCKER_PROFILE] + ")")
        lithiecmd.compile()
    else:
        LOGGER.info("Docker was disabled, using local compilation.")
        cmd_raw_compile()
コード例 #5
0
ファイル: config.py プロジェクト: EagleoutIce/sltx
def assure_dir(name: str, target_path: str, create: bool):
    if not os.path.isdir(target_path):
        if create:
            LOGGER.info("> %s: %s not found. Creating...", name, target_path)
            os.makedirs(target_path)
        else:
            LOGGER.error("! Not allowed to create " + name + ". Exit")
            sys.exit(1)
コード例 #6
0
def _file_guard():
    if hasattr(sg.args, 'files') and not sg.args.files:
        if sg.args.verbose:
            LOGGER.info("Set default files to: " +
                        str(sg.configuration[sg.C_DEFAULT_FILES]))
        sg.args.files = sg.configuration[sg.C_DEFAULT_FILES]
        if not sg.args.files:
            LOGGER.error("No files supplied")
            exit(1)
コード例 #7
0
def f_grab_dirs(data: Tuple[str, str], target: str, path: str):
    # only choose relative path
    dir_target = os.path.join(target,
                              data[1]) if data[1] != target else os.path.join(
                                  data[1], os.path.relpath(data[0], path))
    Path(dir_target).parent.mkdir(parents=True, exist_ok=True)
    if sys.version_info >= (3, 8, 0):  # we have exist is ok
        shutil.copytree(data[0], dir_target, dirs_exist_ok=True)
    else:
        LOGGER.info("Python version below 3.8, falling back with distutils!")
        import distutils.dir_util as du
        du.copy_tree(data[0], dir_target)
コード例 #8
0
ファイル: docker_mg.py プロジェクト: EagleoutIce/sltx
 def update_img(self, profile: str):
     target = DOCKER_URL.format(**locals())
     LOGGER.info("Pulling image: %s this may take a few minutes", target)
     for line in self.client.api.pull(target, tag='latest', stream=True):
         # default values
         line = line.decode('utf-8')
         lines = line.split('\r\n')
         for subline in lines:
             if subline is None or subline.strip() == "":
                 continue
             d = {'status': 'unknown', 'progress': '', 'id': ''}
             d = {**d, **json.loads(subline)}
             LOGGER.info("   {status} {progress} {id}".format(**d))
コード例 #9
0
def cmd_dependency():
    if sg.args.deps is None or len(sg.args.deps) == 0:
        LOGGER.error("You must supply a dependency 'file'.")
        exit(1)

    for dep in sg.args.deps:
        # will extend the dict with 'new' ones
        # should work even better if sltx-source.yaml files are present in the targets
        sg.dependencies = load_dependencies_config(dep, sg.dependencies)

    assure_dirs()

    target = su.get_sltx_tex_home() if sg.args.local_path is None else sg.args.local_path
    install_dependencies(target=target)
コード例 #10
0
def install_dependencies(target: str = su.get_sltx_tex_home()) -> None:
    """Download and unpack given dependencies to the given target directory

    Args:
        target (str, optional): The target folder. Defaults to su.get_sltx_tex_home().
    """
    _install_dependencies_guard()

    write_to_log("====Dependencies for:" + sg.dependencies["target"] + "\n")
    LOGGER.info("\nDependencies for: " + sg.dependencies["target"])
    LOGGER.info("Installing to: %s\n", target)

    _install_dependencies('0', sg.dependencies, target, first=True)
    _install_dependencies_cleanup()
コード例 #11
0
def step_setup_sltx(document: dict) -> str:
    valid_profiles = ['tx-small', 'tx-default', 'tx-full']
    LOGGER.info(
        "Please enter the profile you want for sltx. Valid names are: " +
        str(valid_profiles))
    target_profile = prompt.get(
        "Profile [{default}]",
        default=sg.configuration[sg.C_DOCKER_PROFILE]).lower()

    setup_lines = "echo \"" + target_profile + "\" | sltx docker"

    add_step(document,
             "Setup and run sltx-install",
             _run=YamlBlock("pip install sltx\n" + setup_lines + "\n"))
    return target_profile
コード例 #12
0
def cleanse_caches():
    # TODO: clean up .latexmkrc entries; not the whole file
    cache_dir = sg.configuration[sg.C_CACHE_DIR]
    if os.path.isdir(cache_dir):
        LOGGER.info("Cleaning all the caches... (" + cache_dir + ")")
        # avoids deleting the cache dir itself
        for root, folder_dirs, folder_files in os.walk(cache_dir):
            for name in folder_files:
                f = os.path.join(root, name)
                if not should_be_excluded(str(f)) and should_be_included(str(f)):
                    os.remove(f)
            for name in folder_dirs:
                f = os.path.join(root, name)
                if not should_be_excluded(str(f)) and should_be_included(str(f)):
                    shutil.rmtree(os.path.join(root, name))
    else:
        LOGGER.warning("No caches \"" + cache_dir +
                       "\" were found. Skipping...")
コード例 #13
0
def cmd_raw_compile():
    # install possible deps
    for dep in sg.args.extra_dependencies:
        # will extend the dict with 'new' ones
        # should work even better if sltx-source.yaml files are present in the targets
        sg.dependencies = load_dependencies_config(dep, sg.dependencies)
    # i know just writing without len is more pythonic but i like it more if it is explicit
    if len(sg.args.extra_dependencies) > 0:
        texmf_home = su.get_tex_home()
        LOGGER.info("Insalling additional dependencies.")
        assure_dirs()
        install_dependencies(target=texmf_home)

    cooker.cook()
    # if no exception we went here compiling fine
    if sg.configuration[sg.C_CLEANUP]:
        sg.args.exclude_patterns = []
        sg.args.cleanse_all = False
        sg.args.cleanse_cache = False
        cmd_cleanse()
コード例 #14
0
def _install_dependencies_cleanup():
    """This will be run after the requested dependencies have been installed.
    """
    if sg.configuration[C_CLEANUP]:
        LOGGER.info("> Cleaning up the download directory, as set.")
        shutil.rmtree(sg.configuration[C_DOWNLOAD_DIR])

    LOGGER.info("Loaded: " + str(loaded))
    if not sg.configuration[C_RECURSIVE]:
        LOGGER.info("Recursion was disabled.")

    LOGGER.info("Dependency installation for %s completed.",
                sg.dependencies["target"])
コード例 #15
0
def generate():
    """Generate a new github action
    """
    document = {}

    LOGGER.info("We will now generate a GitHub-Action workflow")
    target_path = prompt.get("Workflow-Path [{default}]",
                             default=".github/workflows/compile.yaml")
    assure_workflow_target(target_path)

    document['name'] = prompt.get("Workflow name")
    document['on'] = {'push': {'branches': ['master', 'main']}}
    document['jobs'] = {'build': {'runs-on': 'ubuntu-latest', 'steps': []}}

    step_checkout(document)
    step_setup_python(document)
    profile = step_setup_sltx(document)
    files = step_compile(document, profile)
    step_commit_and_push(document, files)

    LOGGER.info("Ok, I will write the file now...")
    with open(target_path, 'tw') as f:
        # We will disable this to get name on top and have no sorting
        stream = yaml.dump(document, default_flow_style=False, sort_keys=False)
        # formatting a little bit?
        f.write(stream.replace('jobs:', '\njobs:'))
    LOGGER.info('File written to "' + target_path + '". Job completed.')
コード例 #16
0
def run(args=None):
    if args is None:
        args = sys.argv[1:]
    if len(args) < 1:
        parser.parse_args(['-h'])
    sg.args = parser.parse_args(args)

    if sg.args.log:
        log_set_file_handler()

    if not sg.args.command:
        parser.parse_args(['-h'])

    autoload_config(DEFAULT_CONFIG, 'default')
    autoload_config(LOCAL_CONFIG, 'local')

    _file_guard()

    if sg.args.threads < 0:
        if sg.args.verbose:
            LOGGER.info("Set default thread-count to: %d",
                        sg.configuration[sg.C_DEFAULT_THREADS])
        sg.args.threads = sg.configuration[sg.C_DEFAULT_THREADS]

    if sg.args.config is not None:
        sc.load_configuration(sg.args.config)

    cmd = None
    try:
        tc = sg.args.command.lower()
        cmd = sub_parser.cmds[tc if tc in
                              sub_parser.cmds else retrieve_by_alias(tc)]
    except KeyError:
        LOGGER.error("The supplied command: %s is unknown. Choose one of: %s",
                     sg.args.command, list(sub_parser.cmds.keys()))
        exit(1)

    cmd[0][0]()
コード例 #17
0
def assure_workflow_target(path: str):
    if os.path.isfile(path):
        LOGGER.info("A workflow-file with the given name does already exist!")
        overwrite = prompt.get_bool(default=False)
        if not overwrite:
            LOGGER.error("Aborting...")
            exit(1)
    base_path = os.path.dirname(path)
    if base_path is None or base_path.strip() == "":
        return

    if not os.path.isdir(base_path):
        LOGGER.info("The directory %s does not exist. should it be created?",
                    base_path)
        create = prompt.get_bool(default=True)
        if not create:
            LOGGER.error("Aborting...")
            exit(1)
        os.makedirs(base_path)
コード例 #18
0
def cmd_cleanse():
    sc.assure_dirs()
    # Delete all current log files
    # TODO: make this dry. avoid specifying the log files signature multiple times (see Recipe)
    LOGGER.info("Cleaning local logs...")
    clean_patterns = ['sltx-log-*.tar.gz',
                      'sltx-log-*.zip', 'sltx-drivers.log', '*.sltx-log']
    for clean_pattern in clean_patterns:
        for f in Path(".").glob(clean_pattern):
            if should_be_excluded(str(f)) or not should_be_included(str(f)):
                LOGGER.info("File " + str(f) + " excluded.")
            else:
                f.unlink()
    if sg.args.cleanse_all:
        texmf_home = su.get_sltx_tex_home()
        if os.path.isdir(texmf_home):
            LOGGER.error("Cleaning sltx-texmf-tree... (" + texmf_home + ")")
            shutil.rmtree(texmf_home)
        else:
            LOGGER.warning("The local sltx-texmf tree in \"" +
                           texmf_home + "\" was not found. Skipping...")

    if sg.args.cleanse_all or sg.args.cleanse_cache:
        cleanse_caches()
コード例 #19
0
ファイル: docker_mg.py プロジェクト: EagleoutIce/sltx
 def run_in_container(self, root: bool, profile: str, command: str):
     if profile.startswith(":"):
         target = profile[1:]
     else:
         target = DOCKER_URL.format(**locals())
     LOGGER.info("Launching container based on image: " + target)
     if root:
         LOGGER.warning(
             "Using root configuration. This might lead to permission errors in the future. "
             + target)
     # TODO: this must be expanded better and safer, this way only '~' might be used which is bad
     wd = sg.configuration[sg.C_WORKING_DIR].replace(
         os.path.expanduser('~'), '/root')
     LOGGER.info("  - Note: Working-Dir bound to: %s for %s", wd,
                 sg.configuration[sg.C_WORKING_DIR])
     LOGGER.info("  - Note: Main-Dir bound to: /root/data for " +
                 os.getcwd())
     volumes = {
         os.getcwd(): {
             'bind': '/root/data',
             'mount': 'rw'
         },
         sg.configuration[sg.C_WORKING_DIR]: {
             'bind': wd,
             'mount': 'rw'
         }
     }
     if sg.args.local_texmf:
         target_mount = "/usr/share/sltx/texmf"
         LOGGER.info("  - Note: Mounting local txmf-tree (%s) to %s",
                     su.get_tex_home(), target_mount)
         volumes[su.get_tex_home()] = {'bind': target_mount, 'mount': 'rw'}
     run = self.client.containers.run(
         target,
         command=command,
         detach=True,
         remove=False,
         working_dir='/root/data',
         tty=True,
         network_mode='bridge',
         user='******' if root else 'lithie-user',
         volumes=volumes)
     # We need a buffer in case a multibyte unicode sequence
     # will be broken at line end
     buffer = b''
     for l in run.logs(stdout=True,
                       stderr=True,
                       stream=True,
                       timestamps=True):
         try:
             LOGGER.info('\u0001' + (buffer + l).decode('utf-8'))
             buffer = b''
         except UnicodeDecodeError as ex:
             buffer += l
     LOGGER.info("Container completed.")
     feedback = run.wait()
     run.remove()
     if 'StatusCode' in feedback and feedback['StatusCode'] != 0:
         code = feedback['StatusCode']
         LOGGER.error("Command failed with: " + str(code))
         sys.exit(code)
コード例 #20
0
def autoload_config(path: str, name: str):
    if os.path.isfile(path):
        LOGGER.info("Auto-load %s-config: '%s'", name, path)
        sc.load_configuration(path)
コード例 #21
0
def print_idx(idx: str, message: str, pre: str = ''):
    LOGGER.info("%s[ID %s] %s", pre, str(idx), message)
コード例 #22
0
def _finish_runners(runners: list):
    futures.wait(runners)
    for runner in runners:
        if runner.result() is not None:
            LOGGER.info(runner.result())
コード例 #23
0
def run_bare_sltx(args: list):
    """Runs sltx
    """
    LOGGER.setLevel(logging.ERROR)
    heart.run(args)