Beispiel #1
0
def create_executable_wrapper(task):
    """
    To incorporate all settings (environment, working paths, remote or locally)
    we create an executable bash script which is called instead of the application
    and which will setup everything accordingly before doing the actual work.
    """
    shell = get_setting("shell", task=task, default="bash")
    executable_wrapper_content = [f"#!/bin/{shell}", "set -e"]

    # 1. First part is the folder we need to change if given
    working_dir = get_setting("working_dir",
                              task=task,
                              default=os.path.abspath(
                                  os.path.dirname(get_filename())))
    executable_wrapper_content.append(f"cd {working_dir}")

    executable_wrapper_content.append("echo 'Working in the folder:'; pwd")

    # 2. Second part of the executable wrapper, the environment.
    executable_wrapper_content.append("echo 'Setting up the environment'")
    # (a) If given, use the environment script
    env_setup_script = get_setting("env_script", task=task, default="")
    if env_setup_script:
        # The script will be called from the directory of the script. So we have to make sure the
        # env_script is reachable from there (not from where we are currently)
        if not os.path.isfile(map_folder(env_setup_script)):
            raise FileNotFoundError(
                f"Environment setup script {env_setup_script} does not exist.")
        executable_wrapper_content.append(f"source {env_setup_script}")

    # (b) Now override with any environment from the task or settings
    env_overrides = get_setting("env", task=task, default={})
    for key, value in env_overrides.items():
        value = value.replace("'", "'\''")
        value = f"'{value}'"
        executable_wrapper_content.append(f"export {key}={value}")

    executable_wrapper_content.append("echo 'Current environment:'; env")

    # 3. Third part is to call the actual program
    command = " ".join(create_cmd_from_task(task))
    executable_wrapper_content.append("echo 'Will now execute the program'")
    executable_wrapper_content.append(f"exec {command}")

    # Now we can write the file
    executable_file_dir = get_task_file_dir(task)
    os.makedirs(executable_file_dir, exist_ok=True)

    executable_wrapper_path = os.path.join(executable_file_dir,
                                           "executable_wrapper.sh")

    with open(executable_wrapper_path, "w") as f:
        f.write("\n".join(executable_wrapper_content))

    # make wrapper executable
    st = os.stat(executable_wrapper_path)
    os.chmod(executable_wrapper_path, st.st_mode | stat.S_IEXEC)

    return executable_wrapper_path
Beispiel #2
0
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

        #: gbasf2 project name, must be property/attribute, e.g. a luigi parameter
        # Setting it via a setting.json file is not supported to make sure users set unique project names
        self.gbasf2_project_name = get_unique_project_name(self.task)

        #: Output file directory of the task to wrap with gbasf2, where we will
        # store the pickled basf2 path and the created steerinfile to execute
        # that path.
        task_file_dir = get_task_file_dir(self.task)
        os.makedirs(task_file_dir, exist_ok=True)
        #: file name in which the pickled basf2 path from ``self.task.create_path()`` will be stored
        self.pickle_file_path = os.path.join(task_file_dir,
                                             "serialized_basf2_path.pkl")
        #: file name for steering file that executes pickled path, which will be send to the grid
        self.wrapper_file_path = os.path.join(task_file_dir,
                                              "steering_file_wrapper.py")

        self.log_file_dir = get_log_file_dir(self.task)
        os.makedirs(self.log_file_dir, exist_ok=True)

        self.dirac_user = get_dirac_user()
        #: Maximum number of times that each job in the project can be rescheduled until the project is declared as failed.
        self.max_retries = get_setting("gbasf2_max_retries",
                                       default=0,
                                       task=self.task)

        #: Store number of times each job had been rescheduled
        self.n_retries_by_job = Counter()

        #: Local storage for ``n_retries_by_job`` counter
        # so that it persists even if luigi process is killed and restarted.
        self.retries_file_path = os.path.join(self.log_file_dir,
                                              "n_retries_by_grid_job.json")
        if os.path.isfile(self.retries_file_path):
            with open(self.retries_file_path, "r") as retries_file:
                retries_from_file = json.load(retries_file)
                self.n_retries_by_job.update(retries_from_file)

        # Store dictionary with n_jobs_by_status in attribute to check if it changed,
        # useful for printing job status on change only
        self._n_jobs_by_status = ""

        # Store whether the job had already been successful in a variable b/c
        # there's actions we want to do only the first time that
        # ``get_job_status`` returns a success.
        self._project_had_been_successful = False
Beispiel #3
0
    def _create_htcondor_submit_file(self):
        submit_file_content = []

        # Specify where to write the log to
        log_file_dir = get_log_file_dir(self.task)
        os.makedirs(log_file_dir, exist_ok=True)

        stdout_log_file = os.path.abspath(os.path.join(log_file_dir, "stdout"))
        submit_file_content.append(f"output = {stdout_log_file}")

        stderr_log_file = os.path.abspath(os.path.join(log_file_dir, "stderr"))
        submit_file_content.append(f"error = {stderr_log_file}")

        job_log_file = os.path.abspath(os.path.join(log_file_dir, "job.log"))
        submit_file_content.append(f"log = {job_log_file}")

        # Specify the executable
        executable_file = create_executable_wrapper(self.task)
        submit_file_content.append(
            f"executable = {os.path.basename(executable_file)}")

        # Specify additional settings
        general_settings = get_setting("htcondor_settings", dict())
        try:
            general_settings.update(self.task.htcondor_settings)
        except AttributeError:
            pass

        transfer_files = get_setting("transfer_files",
                                     task=self.task,
                                     default=[])
        if transfer_files:
            working_dir = get_setting("working_dir",
                                      task=self.task,
                                      default="")
            if not working_dir or working_dir != ".":
                raise ValueError(
                    "If using transfer_files, the working_dir must be explicitely set to '.'"
                )

            general_settings.setdefault("should_transfer_files", "YES")
            general_settings.setdefault("when_to_transfer_output", "ON_EXIT")

            transfer_files = set(transfer_files)

            for transfer_file in transfer_files:
                if os.path.abspath(transfer_file) != transfer_file:
                    raise ValueError(
                        "You should only give absolute file names in transfer_files!"
                        +
                        f"{os.path.abspath(transfer_file)} != {transfer_file}")

            env_setup_script = get_setting("env_script",
                                           task=self.task,
                                           default="")
            if env_setup_script:
                # TODO: make sure to call it relatively
                transfer_files.add(os.path.abspath(env_setup_script))

            general_settings.setdefault("transfer_input_files",
                                        ",".join(transfer_files))

        job_name = get_setting("job_name", task=self.task, default=False)
        if job_name is not False:
            general_settings.setdefault("JobBatchName", job_name)

        for key, item in general_settings.items():
            submit_file_content.append(f"{key} = {item}")

        # Finally also start the process
        submit_file_content.append("queue 1")

        # Now we can write the submit file
        output_path = get_task_file_dir(self.task)
        submit_file_path = os.path.join(output_path, "job.submit")

        os.makedirs(output_path, exist_ok=True)

        with open(submit_file_path, "w") as submit_file:
            submit_file.write("\n".join(submit_file_content))

        return submit_file_path