def test_new_to_old(some_paramspecbases): (ps1, ps2, ps3, ps4) = some_paramspecbases idps_new = InterDependencies_(dependencies={ps1: (ps2, ps3)}, standalones=(ps4, )) paramspec1 = ParamSpec(name=ps1.name, paramtype=ps1.type, label=ps1.label, unit=ps1.unit, depends_on=[ps2.name, ps3.name]) paramspec2 = ParamSpec(name=ps2.name, paramtype=ps2.type, label=ps2.label, unit=ps2.unit) paramspec3 = ParamSpec(name=ps3.name, paramtype=ps3.type, label=ps3.label, unit=ps3.unit) paramspec4 = ParamSpec(name=ps4.name, paramtype=ps4.type, label=ps4.label, unit=ps4.unit) idps_old_expected = InterDependencies(paramspec2, paramspec3, paramspec1, paramspec4) assert new_to_old(idps_new) == idps_old_expected # idps_new = InterDependencies_(inferences={ps1: (ps2, ps3)}, standalones=(ps4, )) paramspec1 = ParamSpec(name=ps1.name, paramtype=ps1.type, label=ps1.label, unit=ps1.unit, inferred_from=[ps2.name, ps3.name]) paramspec2 = ParamSpec(name=ps2.name, paramtype=ps2.type, label=ps2.label, unit=ps2.unit) paramspec3 = ParamSpec(name=ps3.name, paramtype=ps3.type, label=ps3.label, unit=ps3.unit) paramspec4 = ParamSpec(name=ps4.name, paramtype=ps4.type, label=ps4.label, unit=ps4.unit) idps_old_expected = InterDependencies(paramspec2, paramspec3, paramspec1, paramspec4) assert new_to_old(idps_new) == idps_old_expected
def _extract_single_dataset_into_db(dataset: DataSet, target_conn: ConnectionPlus, target_exp_id: int) -> None: """ NB: This function should only be called from within :meth:extract_runs_into_db Insert the given dataset into the specified database file as the latest run. Trying to insert a run already in the DB is a NOOP. Args: dataset: A dataset representing the run to be copied target_conn: connection to the DB. Must be atomically guarded target_exp_id: The exp_id of the (target DB) experiment in which to insert the run """ if not dataset.completed: raise ValueError('Dataset not completed. An incomplete dataset ' 'can not be copied. The incomplete dataset has ' f'GUID: {dataset.guid} and run_id: {dataset.run_id}') source_conn = dataset.conn run_id = get_runid_from_guid(target_conn, dataset.guid) if run_id != -1: return if dataset.parameters is not None: param_names = dataset.parameters.split(',') else: param_names = [] parspecs_dict = { p.name: p for p in new_to_old(dataset._interdeps).paramspecs } parspecs = [parspecs_dict[p] for p in param_names] metadata = dataset.metadata snapshot_raw = dataset.snapshot_raw _, target_run_id, target_table_name = create_run(target_conn, target_exp_id, name=dataset.name, guid=dataset.guid, parameters=parspecs, metadata=metadata) _populate_results_table(source_conn, target_conn, dataset.table_name, target_table_name) mark_run_complete(target_conn, target_run_id) _rewrite_timestamps(target_conn, target_run_id, dataset.run_timestamp_raw, dataset.completed_timestamp_raw) if snapshot_raw is not None: add_meta_data(target_conn, target_run_id, {'snapshot': snapshot_raw})
def _perform_start_actions(self) -> None: """ Perform the actions that must take place once the run has been started """ paramspecs = new_to_old(self._interdeps).paramspecs for spec in paramspecs: add_parameter(self.conn, self.table_name, spec) update_run_description(self.conn, self.run_id, self.description.to_json()) set_run_timestamp(self.conn, self.run_id)
def serialize(self) -> Dict[str, Any]: """ Serialize this object into a dictionary """ ser = {} old_interdeps: InterDependencies if not self._old_style_deps: new_interdeps = cast(InterDependencies_, self.interdeps) old_interdeps = new_to_old(new_interdeps) else: old_interdeps = cast(InterDependencies, self.interdeps) ser['interdependencies'] = old_interdeps.serialize() return ser
def test_old_to_new_and_back(some_paramspecs): idps_old = InterDependencies(*some_paramspecs[1].values()) idps_new = old_to_new(idps_old) assert new_to_old(idps_new) == idps_old