def full_snapshot() -> Snapshot: real = Realization( status=REALIZATION_STATE_UNKNOWN, active=True, steps={ "0": Step( status="", jobs={ "0": Job( start_time=dt.now(), end_time=dt.now(), name="poly_eval", status=JOB_STATE_START, error="error", stdout="std_out_file", stderr="std_err_file", data={ CURRENT_MEMORY_USAGE: "123", MAX_MEMORY_USAGE: "312", }, ), "1": Job( start_time=dt.now(), end_time=dt.now(), name="poly_postval", status=JOB_STATE_START, error="error", stdout="std_out_file", stderr="std_err_file", data={ CURRENT_MEMORY_USAGE: "123", MAX_MEMORY_USAGE: "312", }, ), }, ) }, ) snapshot = SnapshotDict( status=ENSEMBLE_STATE_STARTED, reals={}, ) for i in range(0, 100): snapshot.reals[str(i)] = copy.deepcopy(real) return Snapshot(snapshot.dict())
def test_changes(full_snapshot): source_model = SnapshotModel() model = JobListProxyModel(None, 0, 0, 0, 0) model.setSourceModel(source_model) reporting_mode = qt_api.QtTest.QAbstractItemModelTester.FailureReportingMode.Warning tester = qt_api.QtTest.QAbstractItemModelTester( # noqa, prevent GC model, reporting_mode) source_model._add_snapshot(SnapshotModel.prerender(full_snapshot), 0) assert (model.index(0, _id_to_col(ids.STATUS), QModelIndex()).data() == JOB_STATE_START) partial = PartialSnapshot(full_snapshot) start_time = datetime.datetime(year=2020, month=10, day=27, hour=12) end_time = datetime.datetime(year=2020, month=10, day=28, hour=13) partial.update_job( "0", "0", "0", job=Job( status=JOB_STATE_FAILURE, start_time=start_time, end_time=end_time, ), ) source_model._add_partial_snapshot(SnapshotModel.prerender(partial), 0) assert (model.index(0, _id_to_col(DURATION), QModelIndex()).data() == "1 day, 1:00:00") assert (model.index(0, _id_to_col(ids.STATUS), QModelIndex()).data() == JOB_STATE_FAILURE)
def _create_snapshot(self): reals = {} for real in self.get_active_reals(): reals[str(real.get_iens())] = Realization( active=True, status=state.REALIZATION_STATE_WAITING, ) for step in real.get_steps(): reals[str(real.get_iens())].steps[str(step.get_id())] = Step( status=state.STEP_STATE_UNKNOWN ) for job in step.get_jobs(): reals[str(real.get_iens())].steps[str(step.get_id())].jobs[ str(job.get_id()) ] = Job( status=state.JOB_STATE_START, data={}, name=job.get_name(), ) top = SnapshotDict( reals=reals, status=state.ENSEMBLE_STATE_UNKNOWN, metadata=self.get_metadata(), ) return Snapshot(top.dict())
def test_realization_job_hint(full_snapshot): model = SnapshotModel() model._add_snapshot(full_snapshot, 0) partial = PartialSnapshot(full_snapshot) partial.update_job("0", "0", "0", Job(status=JOB_STATE_RUNNING)) model._add_partial_snapshot(partial, 0) first_real = model.index(0, 0, model.index(0, 0)) colors = model.data(first_real, RealJobColorHint) assert colors[0].name() == QColor(*COLOR_RUNNING).name() assert colors[1].name() == QColor(*COLOR_PENDING).name()
def test_duration(mock_datetime, timezone, full_snapshot): source_model = SnapshotModel() model = JobListProxyModel(None, 0, 0, 0, 0) model.setSourceModel(source_model) reporting_mode = qt_api.QtTest.QAbstractItemModelTester.FailureReportingMode.Warning tester = qt_api.QtTest.QAbstractItemModelTester( # noqa, prevent GC model, reporting_mode) source_model._add_snapshot(SnapshotModel.prerender(full_snapshot), 0) assert (model.index(0, _id_to_col(ids.STATUS), QModelIndex()).data() == JOB_STATE_START) partial = PartialSnapshot(full_snapshot) start_time = datetime.datetime(year=2020, month=10, day=27, hour=12, tzinfo=timezone) # mock only datetime.datetime.now() mock_datetime.datetime.now.return_value = datetime.datetime( year=2020, month=10, day=28, hour=13, minute=12, second=11, microsecond=5, # Note that microseconds are intended to be removed tzinfo=timezone, ) partial.update_job( "0", "0", "2", job=Job( status=JOB_STATE_RUNNING, start_time=start_time, ), ) source_model._add_partial_snapshot(SnapshotModel.prerender(partial), 0) assert (model.index(2, _id_to_col(DURATION), QModelIndex()).data() == "1 day, 1:12:11") mock_datetime.datetime.now.assert_called_once_with(timezone)
def test_no_cross_talk(full_snapshot): source_model = SnapshotModel() model = JobListProxyModel(None, 0, 0, 0, 0) model.setSourceModel(source_model) reporting_mode = qt_api.QtTest.QAbstractItemModelTester.FailureReportingMode.Warning qt_api.QtTest.QAbstractItemModelTester(model, reporting_mode) # noqa, prevent GC source_model._add_snapshot(SnapshotModel.prerender(full_snapshot), 0) source_model._add_snapshot(SnapshotModel.prerender(full_snapshot), 1) # Test that changes to iter=1 does not bleed into iter=0 partial = PartialSnapshot(full_snapshot) partial.update_job("0", "0", "0", job=Job(status=JOB_STATE_FAILURE)) source_model._add_partial_snapshot(SnapshotModel.prerender(partial), 1) assert (model.index(0, _id_to_col(ids.STATUS), QModelIndex()).data() == JOB_STATE_START) model.set_step(1, 0, 0, 0) assert (model.index(0, _id_to_col(ids.STATUS), QModelIndex()).data() == JOB_STATE_FAILURE)
def test_snapshot_merge(snapshot): update_event = PartialSnapshot(snapshot) update_event.update_status(status="running") snapshot.merge_event(update_event) assert snapshot.get_status() == "running" update_event = PartialSnapshot(snapshot) update_event.update_job( real_id="1", step_id="0", job_id="0", job=Job( status="Finished", start_time=datetime(year=2020, month=10, day=27), end_time=datetime(year=2020, month=10, day=28), data={"memory": 1000}, ), ) update_event.update_job( real_id="1", step_id="0", job_id="1", job=Job( status="Running", start_time=datetime(year=2020, month=10, day=27), ), ) update_event.update_job( real_id="9", step_id="0", job_id="0", job=Job( status="Running", start_time=datetime(year=2020, month=10, day=27), ), ) snapshot.merge_event(update_event) assert snapshot.get_status() == "running" assert snapshot.get_job(real_id="1", step_id="0", job_id="0") == Job( status="Finished", start_time=datetime(year=2020, month=10, day=27), end_time=datetime(year=2020, month=10, day=28), data={"memory": 1000}, error=None, name="job0", stderr=None, stdout=None, ) assert snapshot.get_job(real_id="1", step_id="0", job_id="1") == Job( status="Running", start_time=datetime(year=2020, month=10, day=27), end_time=None, data={}, error=None, name="job1", stderr=None, stdout=None, ) assert snapshot.get_job(real_id="9", step_id="0", job_id="0").status == "Running" assert snapshot.get_job(real_id="9", step_id="0", job_id="0") == Job( status="Running", start_time=datetime(year=2020, month=10, day=27), end_time=None, data={}, error=None, name="job0", stderr=None, stdout=None, )
def partial_snapshot(snapshot) -> PartialSnapshot: partial = PartialSnapshot(snapshot) partial.update_real("0", Realization(status=JOB_STATE_FINISHED)) partial.update_job("0", "0", "0", Job(status=JOB_STATE_FINISHED)) return partial
def _create_partial_snapshot( self, run_context: ErtRunContext, detailed_progress: typing.Tuple[typing.Dict, int], iter_: int, ) -> typing.Optional[PartialSnapshot]: """Create a PartialSnapshot, or None if the sources of data were destroyed or had not been created yet. Both run_context and detailed_progress needs to be aligned with the stars if job status etc is to be produced. If queue_snapshot is set, this means the the differ will not be used to calculate changes.""" queue = self._iter_queue.get(iter_, None) if queue is None: logger.debug(f"no queue for {iter_}, no partial returned") return None queue_snapshot = queue.snapshot() snapshot = self._iter_snapshot.get(iter_, None) if snapshot is None: logger.debug(f"no snapshot for {iter_}, no partial returned") return None partial = PartialSnapshot(snapshot) if queue_snapshot is not None: for iens, change in queue_snapshot.items(): change_enum = JobStatusType.from_string(change) partial.update_real( str(iens), Realization( status=queue_status_to_real_state(change_enum)), ) iter_to_progress, progress_iter = detailed_progress if not iter_to_progress: logger.debug(f"partial: no detailed progress for iter:{iter_}") return partial if iter_ != progress_iter: logger.debug( f"partial: iter_to_progress iter ({progress_iter}) differed from run_context ({iter_})" ) for iens, _ in _enumerate_run_context(run_context): if not _is_iens_active(iens, run_context): continue progress = iter_to_progress[iter_].get(iens, None) if not progress: continue jobs = progress[0] for idx, fm in enumerate(jobs): partial.update_job( str(iens), # real_id "0", str(idx), Job( status=_map_job_state(fm.status), start_time=fm.start_time, end_time=fm.end_time, data={ CURRENT_MEMORY_USAGE: fm.current_memory_usage, MAX_MEMORY_USAGE: fm.max_memory_usage, }, stdout=fm.std_out_file, stderr=fm.std_err_file, error=fm.error, ), ) return partial
def _create_snapshot_dict( self, run_context: ErtRunContext, detailed_progress: typing.Tuple[typing.Dict, int], iter_: int, ) -> typing.Optional[SnapshotDict]: """create a snapshot of a run_context and detailed_progress. detailed_progress is expected to be a tuple of a realization_progress dict and iteration number. iter_ represents the current assimilation cycle.""" self._set_iter_queue(iter_, self._model._job_queue) snapshot = SnapshotDict( status=ENSEMBLE_STATE_STARTED, reals={}, metadata={"iter": iter_}, ) forward_model = self._model.get_forward_model() iter_to_progress, progress_iter = detailed_progress if progress_iter != iter_: logger.debug( f"run_context iter ({iter_}) and detailed_progress ({progress_iter} iter differed" ) if iter_ in self._iter_queue and self._iter_queue[iter_] is not None: queue_snapshot = self._iter_queue[iter_].snapshot() else: queue_snapshot = None enumerated = 0 for iens, run_arg in _enumerate_run_context(run_context): real_id = str(iens) enumerated += 1 if not _is_iens_active(iens, run_context): continue status = JobStatusType.JOB_QUEUE_UNKNOWN if queue_snapshot is not None and iens in queue_snapshot: status = JobStatusType.from_string(queue_snapshot[iens]) snapshot.reals[real_id] = Realization( status=queue_status_to_real_state(status), active=True, steps={}) step = Step(status="", jobs={}) snapshot.reals[real_id].steps["0"] = step for index in range(0, len(forward_model)): ext_job = forward_model.iget_job(index) step.jobs[str(index)] = Job(name=ext_job.name(), status=JOB_STATE_START, data={}) progress = iter_to_progress[iter_].get(iens, None) if not progress: continue jobs = progress[0] for idx, fm in enumerate(jobs): job = step.jobs[str(idx)] job.start_time = fm.start_time job.end_time = fm.end_time job.name = fm.name job.status = _map_job_state(fm.status) job.error = fm.error job.stdout = fm.std_out_file job.stderr = fm.std_err_file job.data = { CURRENT_MEMORY_USAGE: fm.current_memory_usage, MAX_MEMORY_USAGE: fm.max_memory_usage, } if enumerated == 0: logger.debug("enumerated 0 items from run_context, it is gone") return None return snapshot