Exemple #1
0
 def setUpClass(cls):
     cls.ref_task, _ = load_object(
         io_data_path("demo_with_optimizerv7.chrom"))
     cls.ref_task2, _ = load_object(
         io_data_path("demo_with_general_optimizerv9.chrom"))
     cls.ref_task3, _ = load_object(
         io_data_path("demo_with_general_optimizerv10.chrom"))
Exemple #2
0
 def test_load_same_file_twice(self):
     filename = "demo_with_optimizerv8.chrom"
     obj1, _ = load_object(io_data_path(filename))
     obj2, _ = load_object(io_data_path(filename))
     # Make sure they are identical without being the same object:
     assert_has_traits_almost_equal(obj1, obj2, eps=1e-15)
     self.assertIsNot(obj1, obj2)
     self.assertIsNot(obj1.project.study, obj2.project.study)
def assert_round_trip_to_file_identical(filepath, obj, ignore=()):
    save_object(filepath, obj)
    new_obj, _ = load_object(filepath)
    if isinstance(obj, HasTraits):
        assert_has_traits_almost_equal(new_obj, obj, ignore=ignore)
    else:
        assert_values_almost_equal(new_obj, obj, ignore=ignore)
Exemple #4
0
 def setUpClass(cls):
     # Ignore the names of objects and method's collection_step_number since
     # defaults have changed.
     cls.ignore = [
         'simulation_diffs', 'center_point_simulation', 'perf_params'
     ]
     cls.reference_filename = "demo_final_statev0.7.2.chrom"
     cls.ref_task, _ = load_object(io_data_path(cls.reference_filename))
Exemple #5
0
def assert_old_file_read(filename, reference_task, ignore=(), eps=1e-9):
    with reraise_traits_notification_exceptions():
        obj, legacy = load_object(io_data_path(filename))

    assert_has_traits_almost_equal(obj.project,
                                   reference_task.project,
                                   ignore=ignore,
                                   eps=eps)
    assert_true(legacy)
Exemple #6
0
 def test_run_save_bring_back(self):
     # Ensure/fake a round-off error on disk
     self.sim.section_times[-1] -= 1e-12
     save_object(self.test_file, self.sim)
     new_sim, _ = load_object(self.test_file)
     # Precision must be reduced because for the new simulation, the
     # section_times will be recomputed.
     assert_has_traits_almost_equal(new_sim, self.sim, eps=1e-11)
     # This is NOT assertAlmostEqual, because these 2 numbers must be
     # completely identical for CADET2 not to CRASH!
     assert_equal(new_sim.section_times[-1],
                  new_sim.solver.user_solution_times[-1])
Exemple #7
0
 def test_write_complete_study_with_sim_group(self):
     sim = self.study.simulations[0]
     sim_group = make_sample_simulation_group(cp=sim)
     self.study.analysis_tools.simulation_grids.append(sim_group)
     try:
         # FIXME: SimulationGroup not fully serialized, so it can't be
         # identical => test parts of it:
         save_object(self.test_file, self.study)
         new_obj, _ = load_object(self.test_file)
         self.assertIsInstance(new_obj, Study)
         self.assertEqual(len(new_obj.analysis_tools.simulation_grids), 1)
         self.assertIsInstance(new_obj.analysis_tools.simulation_grids[0],
                               SimulationGroup)
     finally:
         # Clean up for other tests since cls.study is created in setUpClass
         self.study.analysis_tools.simulation_grids.pop(-1)
Exemple #8
0
def load_default_user_datasource():
    """ Load or build the default user datasource.

    If a datasource was stored in the default, location, load the newest one.
    Otherwise, build a new SimpleDataSource.

    Returns
    -------
    tuple with a SimpleDatasource instance and the file if any that it was
    loaded from.
    """
    from kromatography.model.data_source import SimpleDataSource
    from kromatography.io.reader_writer import load_object

    # Logger defined here so that the logger can be defined after the
    # initialize_logging is called.
    logger = logging.getLogger(__name__)

    last_stored_ds_file = get_newest_app_file(extension=DS_EXT)
    if last_stored_ds_file is not None:
        try:
            ds, legacy_file = load_object(last_stored_ds_file)
            msg = "Loaded datasource from {}".format(last_stored_ds_file)
            logger.info(msg)
            if legacy_file:
                msg = "Datasource storage {} is a legacy file."
                logger.info(msg)

        except Exception as e:
            msg = ("Failed to load the last datasource file {}. The file "
                   "might be corrupted, and mights need to be removed. Error "
                   "was {}.").format(last_stored_ds_file, e)
            logger.error(msg)
            last_stored_ds_file = None

    # Not doing an else, so that this is executed when exception raised:
    if last_stored_ds_file is None:
        msg = "No valid datasource file found. Loading a default one."
        logger.debug(msg)
        ds = SimpleDataSource(name="User DataSource")
        last_stored_ds_file = ""

    return ds, last_stored_ds_file
Exemple #9
0
 def test_experiment_import_settings(self):
     """ Test that experiments import settings get updated.
     """
     filenames = [
         "std_project_serialv5.chrom", "demo_final_statev5.chrom",
         "demo_final_statev6.chrom", "demo_with_optimizerv8.chrom",
         "demo_with_general_optimizerv9.chrom"
     ]
     for filename in filenames:
         task, _ = load_object(io_data_path(filename))
         study = task.project.study
         for exp in study.experiments:
             expected = {
                 "time_of_origin", "col_name_patterns", 'holdup_volume'
             }
             settings = exp.output.import_settings
             self.assertEqual(set(settings.keys()), expected)
             self.assertIsInstance(settings["time_of_origin"], UnitScalar)
             self.assertIsInstance(settings["holdup_volume"], UnitScalar)
Exemple #10
0
    def test_binding_transport_model_target_prod_set(self):
        """ Make sure old files' binding and transport models are updated once
            loaded, such that they have target products.
        """
        task, _ = load_object(io_data_path("demo_with_optimizerv8.chrom"))
        study = task.project.study
        prod_name = study.product.name
        # Find all binding and transport models to test for their
        # target_product:
        optimizers = [
            optim for optim in study.analysis_tools.optimizations
            if isinstance(optim, BindModelOptim)
        ]
        optim_models = []
        for optim in optimizers:
            optim_models += [model for model in optim.optimal_models]

        all_models = (study.study_datasource.binding_models +
                      study.study_datasource.transport_models + optim_models)
        for model in all_models:
            self.assertEqual(model.target_product, prod_name)
Exemple #11
0
    def test_store_akta_settings(self):
        # This file was created after akta settings were added to the models
        fname = 'std_input_with_akta_shift.chrom'
        file_with_stored_settings = io_data_path(fname)
        task, _ = load_object(file_with_stored_settings)
        exp0 = task.project.study.search_experiment_by_name('Run_1')
        self.assertIsInstance(exp0.output.import_settings, dict)

        custom_patterns = {
            'uv': r'(UV.*280nm)',
            'conductivity': r'(COND$)',
            'concentration': r'(CONC$)',
            'pH': r'(pH$)',
            'flow': r'(FLOW$)',
            'fraction': r'(FRACTIONS?$)',
            'log_book': r'(Log(book)?$)',
            'temperature': r'(TEMP$)',
        }
        expected = {
            "time_of_origin": UnitScalar(102.050, units="minute"),
            "col_name_patterns": custom_patterns,
            'holdup_volume': UnitScalar(0.0, units='minute')
        }
        self.assertEqual(exp0.output.import_settings, expected)
 def setUpClass(cls):
     stored_study = io_data_path(
         "std_study_with_run_sim_from_scratch.chrom")
     cls.study, _ = load_object(stored_study)
Exemple #13
0
 def setUpClass(cls):
     # Ignore the names of objects and method's collection_step_number since
     # defaults have changed.
     cls.ignore = ['name', 'collection_step_number', 'job_manager']
     cls.reference_filename = "std_project_serialv5.chrom"
     cls.ref_task, _ = load_object(io_data_path(cls.reference_filename))
        Cannot recompute the costs data if we don't have access to the
        simulations run.
        """
        first_step = self.optimizer.steps[0]
        cost_functions_available = first_step.group_cost_functions != {}
        return self.no_cost_data or cost_functions_available

    @cached_property
    def _get_cost_func_weights(self):
        return np.array([
            self.peak_time_weight, self.peak_height_weight,
            self.peak_slope_weight
        ],
                        dtype=np.float64)

    def _get_no_cost_data(self):
        return self.cost_data is None or len(self.cost_data) == 0


if __name__ == "__main__":
    from kromatography.io.reader_writer import load_object
    task = load_object("demo_with_2_types_optim.chrom")
    optimizer = task.project.study.analysis_tools.optimizations[2]
    explorer = OptimizerCostFunctionExplorer(
        optimizer=optimizer,
        show_cost_data_nd="2D",
        x_axis_param="binding_model.sma_ka[3]",
        y_axis_param="binding_model.sma_nu[3]")
    explorer.configure_traits()
Exemple #15
0
"""
# Script inputs ---------------------------------------------------------------

EXTERNAL_DATASOURCE_FILEPATH = r"<FILE PATH HERE>"

PRODUCT_TO_IMPORT = "<YOUR PRODUCT NAME HERE>"

# Script ----------------------------------------------------------------------

import logging
from kromatography.io.reader_writer import load_object

logger = logging.getLogger(__name__)

logger.warning("Loading external datasource...")
ds = load_object(EXTERNAL_DATASOURCE_FILEPATH)

new_prod_comps = ds.get_objects_by_type(
    "product_components", filter_by={"target_product": PRODUCT_TO_IMPORT})
msg = "Adding {} components into active datasource..."
msg = msg.format(len(new_prod_comps))
logger.warning(msg)
for new_comp in new_prod_comps:
    user_datasource.set_object_of_type("product_components", new_comp)

logger.warning("Adding new product into active datasource...")
new_prod = ds.get_object_of_type("products", PRODUCT_TO_IMPORT)
user_datasource.set_object_of_type("products", new_prod)

task.edit_object_in_central_pane(new_prod)