Exemple #1
0
 def insert_db_job(self, row_idx, row):
     """Load job element from DB post restart."""
     if row_idx == 0:
         LOG.info("LOADING job data")
     (point_string, name, status, submit_num, time_submit, time_run,
      time_run_exit, batch_sys_name, batch_sys_job_id, platform_name) = row
     if status not in JOB_STATUS_SET:
         return
     t_id = f'{self.workflow_id}{ID_DELIM}{point_string}{ID_DELIM}{name}'
     j_id = f'{t_id}{ID_DELIM}{submit_num}'
     try:
         tdef = self.schd.config.get_taskdef(name)
         j_owner = self.schd.owner
         if platform_name:
             j_host = get_host_from_platform(get_platform(platform_name))
         else:
             j_host = self.schd.host
         j_buf = PbJob(
             stamp=f'{j_id}@{time()}',
             id=j_id,
             submit_num=submit_num,
             state=status,
             task_proxy=t_id,
             submitted_time=time_submit,
             started_time=time_run,
             finished_time=time_run_exit,
             batch_sys_name=batch_sys_name,
             batch_sys_job_id=batch_sys_job_id,
             host=j_host,
             owner=j_owner,
             name=name,
             cycle_point=point_string,
         )
         # Add in log files.
         j_buf.job_log_dir = get_task_job_log(self.schd.suite, point_string,
                                              name, submit_num)
         overrides = self.schd.task_events_mgr.broadcast_mgr.get_broadcast(
             TaskID.get(name, point_string))
         if overrides:
             rtconfig = pdeepcopy(tdef.rtconfig)
             poverride(rtconfig, overrides, prepend=True)
         else:
             rtconfig = tdef.rtconfig
         j_buf.extra_logs.extend([
             os.path.expanduser(os.path.expandvars(log_file))
             for log_file in rtconfig['extra log files']
         ])
     except SuiteConfigError:
         LOG.exception(
             ('ignoring job %s from the suite run database\n'
              '(its task definition has probably been deleted).') % j_id)
     except Exception:
         LOG.exception('could not load job %s' % j_id)
     else:
         self.added[j_id] = j_buf
         self.task_jobs.setdefault(t_id, set()).add(j_id)
         self.updates_pending = True
Exemple #2
0
 def set_job_state(self, job_d, status):
     """Set job state."""
     point, name, sub_num = self.parse_job_item(job_d)
     j_id = (f'{self.workflow_id}{ID_DELIM}{point}'
             f'{ID_DELIM}{name}{ID_DELIM}{sub_num}')
     if (status in JOB_STATUS_SET
             and (j_id in self.pool or j_id in self.added)):
         j_delta = PbJob(stamp=f'{j_id}@{time()}', state=status)
         self.updated.setdefault(j_id, PbJob(id=j_id)).MergeFrom(j_delta)
         self.updates_pending = True
Exemple #3
0
 def set_job_attr(self, job_d, attr_key, attr_val):
     """Set job attribute."""
     point, name, sub_num = self.parse_job_item(job_d)
     j_id = (f'{self.workflow_id}{ID_DELIM}{point}'
             f'{ID_DELIM}{name}{ID_DELIM}{sub_num}')
     if j_id in self.pool or j_id in self.added:
         j_delta = PbJob(stamp=f'{j_id}@{time()}')
         setattr(j_delta, attr_key, attr_val)
         self.updated.setdefault(j_id, PbJob(id=j_id)).MergeFrom(j_delta)
         self.updates_pending = True
Exemple #4
0
 def add_job_msg(self, job_d, msg):
     """Add message to job."""
     point, name, sub_num = self.parse_job_item(job_d)
     j_id = (f'{self.workflow_id}{ID_DELIM}{point}'
             f'{ID_DELIM}{name}{ID_DELIM}{sub_num}')
     # Check job existence before setting update (i.e orphan/simulation)
     if j_id in self.pool or j_id in self.added:
         j_delta = PbJob(stamp=f'{j_id}@{time()}')
         j_delta.messages.append(msg)
         self.updated.setdefault(j_id, PbJob(id=j_id)).MergeFrom(j_delta)
         self.updates_pending = True
Exemple #5
0
 def set_job_state(self, job_d, status):
     """Set job state."""
     update_time = time()
     point, name, sub_num = self.parse_job_item(job_d)
     j_id = (f'{self.workflow_id}{ID_DELIM}{point}'
             f'{ID_DELIM}{name}{ID_DELIM}{sub_num}')
     if status in JOB_STATUSES_ALL:
         j_delta = PbJob(stamp=f'{j_id}@{update_time}', state=status)
         self.updates.setdefault(j_id, PbJob(id=j_id)).MergeFrom(j_delta)
         self.updates_pending = True
     else:
         LOG.error(f'Unable to set {j_id} state field to {status}')
Exemple #6
0
 def set_job_attr(self, job_d, attr_key, attr_val):
     """Set job attribute."""
     update_time = time()
     point, name, sub_num = self.parse_job_item(job_d)
     j_id = (f'{self.workflow_id}{ID_DELIM}{point}'
             f'{ID_DELIM}{name}{ID_DELIM}{sub_num}')
     try:
         j_delta = PbJob(stamp=f'{j_id}@{update_time}')
         setattr(j_delta, attr_key, attr_val)
         self.updates.setdefault(j_id, PbJob(id=j_id)).MergeFrom(j_delta)
         self.updates_pending = True
     except (TypeError, AttributeError) as exc:
         LOG.error(f'Unable to set {j_id} data field: {str(exc)}')
Exemple #7
0
 def add_job_msg(self, job_d, msg):
     """Add message to job."""
     update_time = time()
     point, name, sub_num = self.parse_job_item(job_d)
     j_id = (f'{self.workflow_id}{ID_DELIM}{point}'
             f'{ID_DELIM}{name}{ID_DELIM}{sub_num}')
     try:
         j_delta = PbJob(stamp=f'{j_id}@{update_time}')
         j_delta.messages.append(msg)
         self.updates.setdefault(j_id, PbJob(id=j_id)).MergeFrom(j_delta)
         self.updates_pending = True
     except TypeError as exc:
         LOG.error(f'Unable to append to {j_id} message field: {str(exc)}')
Exemple #8
0
    def set_job_time(self, job_d, event_key, time_str=None):
        """Set an event time in job pool object.

        Set values of both event_key + '_time' and event_key + '_time_string'.
        """
        point, name, sub_num = self.parse_job_item(job_d)
        j_id = (f'{self.workflow_id}{ID_DELIM}{point}'
                f'{ID_DELIM}{name}{ID_DELIM}{sub_num}')
        if j_id in self.pool or j_id in self.added:
            j_delta = PbJob(stamp=f'{j_id}@{time()}')
            time_attr = f'{event_key}_time'
            setattr(j_delta, time_attr, time_str)
            self.updated.setdefault(j_id, PbJob(id=j_id)).MergeFrom(j_delta)
            self.updates_pending = True
Exemple #9
0
    def set_job_time(self, job_d, event_key, time_str=None):
        """Set an event time in job pool object.

        Set values of both event_key + '_time' and event_key + '_time_string'.
        """
        update_time = time()
        point, name, sub_num = self.parse_job_item(job_d)
        j_id = (f'{self.workflow_id}{ID_DELIM}{point}'
                f'{ID_DELIM}{name}{ID_DELIM}{sub_num}')
        try:
            j_delta = PbJob(stamp=f'{j_id}@{update_time}')
            time_attr = f'{event_key}_time'
            setattr(j_delta, time_attr, time_str)
            self.updates.setdefault(j_id, PbJob(id=j_id)).MergeFrom(j_delta)
            self.updates_pending = True
        except (TypeError, AttributeError) as exc:
            LOG.error(f'Unable to set {j_id} {time_attr} field: {str(exc)}')
Exemple #10
0
    def insert_job(self, job_conf):
        """Insert job into pool."""
        job_owner = job_conf['owner']
        sub_num = job_conf['submit_num']
        name, point_string = TaskID.split(job_conf['task_id'])
        t_id = f'{self.workflow_id}{ID_DELIM}{point_string}{ID_DELIM}{name}'
        j_id = f'{t_id}{ID_DELIM}{sub_num}'
        j_buf = PbJob(stamp=f'{j_id}@{time()}',
                      id=j_id,
                      submit_num=sub_num,
                      state=JOB_STATUSES_ALL[0],
                      task_proxy=t_id,
                      batch_sys_name=job_conf['batch_system_name'],
                      env_script=job_conf['env-script'],
                      err_script=job_conf['err-script'],
                      exit_script=job_conf['exit-script'],
                      execution_time_limit=job_conf['execution_time_limit'],
                      host=job_conf['platform']['name'],
                      init_script=job_conf['init-script'],
                      owner=job_owner,
                      post_script=job_conf['post-script'],
                      pre_script=job_conf['pre-script'],
                      script=job_conf['script'],
                      work_sub_dir=job_conf['work_d'],
                      name=name,
                      cycle_point=point_string,
                      batch_sys_conf=json.dumps(job_conf['batch_system_conf']),
                      directives=json.dumps(job_conf['directives']),
                      environment=json.dumps(job_conf['environment']),
                      param_var=json.dumps(job_conf['param_var']))

        # Add in log files.
        j_buf.job_log_dir = get_task_job_log(self.schd.suite, point_string,
                                             name, sub_num)
        j_buf.extra_logs.extend(job_conf['logfiles'])

        self.added[j_id] = j_buf
        self.task_jobs.setdefault(t_id, set()).add(j_id)
        self.updates_pending = True
Exemple #11
0
 def insert_job(self, job_conf):
     """Insert job into pool."""
     update_time = time()
     job_owner = job_conf['owner']
     sub_num = job_conf['submit_num']
     name, point_string = TaskID.split(job_conf['task_id'])
     t_id = f'{self.workflow_id}{ID_DELIM}{point_string}{ID_DELIM}{name}'
     j_id = f'{t_id}{ID_DELIM}{sub_num}'
     j_buf = PbJob(
         stamp=f'{j_id}@{update_time}',
         id=j_id,
         submit_num=sub_num,
         state=JOB_STATUSES_ALL[0],
         task_proxy=t_id,
         batch_sys_name=job_conf['batch_system_name'],
         env_script=job_conf['env-script'],
         err_script=job_conf['err-script'],
         exit_script=job_conf['exit-script'],
         execution_time_limit=job_conf['execution_time_limit'],
         host=job_conf['host'],
         init_script=job_conf['init-script'],
         job_log_dir=job_conf['job_log_dir'],
         owner=job_owner,
         post_script=job_conf['post-script'],
         pre_script=job_conf['pre-script'],
         script=job_conf['script'],
         work_sub_dir=job_conf['work_d'],
         name=name,
         cycle_point=point_string,
     )
     j_buf.batch_sys_conf.extend([
         f'{key}={val}'
         for key, val in job_conf['batch_system_conf'].items()
     ])
     j_buf.directives.extend(
         [f'{key}={val}' for key, val in job_conf['directives'].items()])
     j_buf.environment.extend(
         [f'{key}={val}' for key, val in job_conf['environment'].items()])
     j_buf.param_env_tmpl.extend([
         f'{key}={val}' for key, val in job_conf['param_env_tmpl'].items()
     ])
     j_buf.param_var.extend(
         [f'{key}={val}' for key, val in job_conf['param_var'].items()])
     j_buf.extra_logs.extend(job_conf['logfiles'])
     self.updates[j_id] = j_buf
     self.task_jobs.setdefault(t_id, set()).add(j_id)
     self.updates_pending = True